importerutils

package
v4.20.7 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Jul 10, 2024 License: Apache-2.0 Imports: 18 Imported by: 0

Documentation

Overview

Example (CombineNormalDwellSpectra_Mismatch)
s1 := dataConvertModels.DetectorSampleByPMC{
	3:  []dataConvertModels.DetectorSample{},
	44: []dataConvertModels.DetectorSample{},
}
s2 := dataConvertModels.DetectorSampleByPMC{
	82: []dataConvertModels.DetectorSample{},
}

_, err := combineNormalDwellSpectra(s1, s2)
fmt.Printf("%v\n", err)
Output:

Found dwell spectrum PMC: 82 which has no corresponding normal spectrum
Example (CombineNormalDwellSpectra_OK)
s1 := dataConvertModels.DetectorSampleByPMC{
	3:  []dataConvertModels.DetectorSample{},
	44: []dataConvertModels.DetectorSample{},
}
s2 := dataConvertModels.DetectorSampleByPMC{
	44: []dataConvertModels.DetectorSample{},
}

comb, err := combineNormalDwellSpectra(s1, s2)
fmt.Printf("%v\n", err)

combPMCs := []int{}
for k := range comb {
	combPMCs = append(combPMCs, int(k))
}
sort.Ints(combPMCs)

for _, pmc := range combPMCs {
	fmt.Printf("%v\n", pmc)
}
Output:

<nil>
3
44
Example (ConvertHousekeepingData)
data := [][]string{
	{"13", "14", "34", "3.1415926", "44"},
	{"13", "13.33", "999", "55", "N/A"},
}

result, err := convertHousekeepingData(
	[]string{"ONE", "TWO", "THREE", "FOUR"},
	2,
	data,
	[]protos.Experiment_MetaDataType{protos.Experiment_MT_INT, protos.Experiment_MT_FLOAT, protos.Experiment_MT_INT, protos.Experiment_MT_FLOAT, protos.Experiment_MT_STRING},
)

fmt.Printf("%v|%v|%v\n", err, result.Header, len(result.Data))

// Print in increasing PMC order, map ordering is non-deterministic
keys := []int{}
for k := range result.Data {
	keys = append(keys, int(k))
}
sort.Ints(keys)

for _, pmc := range keys {
	hks := result.Data[int32(pmc)]
	fmt.Printf("%v: %v\n", pmc, hks)
}
Output:

<nil>|[ONE TWO THREE FOUR]|2
34: [{ 13 0 MT_INT} { 0 14 MT_FLOAT} { 0 3.1415925 MT_FLOAT} {44 0 0 MT_STRING}]
999: [{ 13 0 MT_INT} { 0 13.33 MT_FLOAT} { 0 55 MT_FLOAT} {N/A 0 0 MT_STRING}]
Example (ParseBeamLocationHeaders)
cols, geom_corr, err := parseBeamLocationHeaders([]string{"PMC", "x", "y", "z", "PMC_777_MCC_i", "PMC_777_MCC_j", "PMC_3027_MCC_i", "PMC_3027_MCC_j"}, true, 444)
fmt.Printf("%v|%v|%v\n", cols, geom_corr, err)

// With optional geom_corr
cols, geom_corr, err = parseBeamLocationHeaders([]string{"PMC", "x", "y", "z", "geom_corr", "PMC_777_MCC_i", "PMC_777_MCC_j", "PMC_3027_MCC_i", "PMC_3027_MCC_j"}, true, 444)
fmt.Printf("%v|%v|%v\n", cols, geom_corr, err)

// With optional geom_corr mixed amongst ijs (SHOULD FAIL)
cols, geom_corr, err = parseBeamLocationHeaders([]string{"PMC", "x", "y", "z", "PMC_777_MCC_i", "PMC_777_MCC_j", "geom_corr", "PMC_3027_MCC_i", "PMC_3027_MCC_j"}, true, 444)
fmt.Printf("%v|%v|%v\n", cols, geom_corr, err)

// With optional geom_corr and another mixed amongst ijs (SHOULD FAIL)
cols, geom_corr, err = parseBeamLocationHeaders([]string{"PMC", "x", "y", "z", "PMC_777_MCC_i", "PMC_777_MCC_j", "geom_corr", "PMC_3027_MCC_i", "SCLK", "PMC_3027_MCC_j"}, true, 444)
fmt.Printf("%v|%v|%v\n", cols, geom_corr, err)

cols, geom_corr, err = parseBeamLocationHeaders([]string{"PMC", "x", "y", "z", "PMC_777_MCC_i", "PMC_777_MCC_j", "PMC_3027_MCC_i", "PMC_3027_MCC_j"}, false, 444)
fmt.Printf("%v|%v|%v\n", cols, geom_corr, err)

cols, geom_corr, err = parseBeamLocationHeaders([]string{"PMC", "x", "y", "z", "image_i", "image_j"}, false, 444)
fmt.Printf("%v|%v|%v\n", cols, geom_corr, err)

cols, geom_corr, err = parseBeamLocationHeaders([]string{"PMC", "x", "y", "z", "image_i", "image_j", "SCLK"}, false, 444)
fmt.Printf("%v|%v|%v\n", cols, geom_corr, err)

cols, geom_corr, err = parseBeamLocationHeaders([]string{"PMC", "x", "y", "z", "image_i", "SCLK", "image_j"}, false, 444)
fmt.Printf("%v|%v|%v\n", cols, geom_corr, err)

cols, geom_corr, err = parseBeamLocationHeaders([]string{"PMC", "x", "y", "z", "PMC_777_MCC_i", "PMC_3027_MCC_i", "PMC_777_MCC_j", "PMC_3027_MCC_j"}, true, 444)
fmt.Printf("%v|%v|%v\n", cols, geom_corr, err)

cols, geom_corr, err = parseBeamLocationHeaders([]string{"PMC", "x", "y", "z", "PMC_777_MCC_i", "PMC_777_MCC_j", "PMC_3027_MCC_i"}, true, 444)
fmt.Printf("%v|%v|%v\n", cols, geom_corr, err)

cols, geom_corr, err = parseBeamLocationHeaders([]string{"PMC", "x", "y", "z", "SCLK", "PMC_777_MCC_i", "PMC_777_MCC_j", "PMC_3027_MCC_i"}, true, 444)
fmt.Printf("%v|%v|%v\n", cols, geom_corr, err)

cols, geom_corr, err = parseBeamLocationHeaders([]string{"PMC", "SCLK", "y", "z", "PMC_777_MCC_i", "PMC_777_MCC_j"}, true, 444)
fmt.Printf("%v|%v|%v\n", cols, geom_corr, err)
Output:

[{777 4 5} {3027 6 7}]|-1|<nil>
[{777 5 6} {3027 7 8}]|4|<nil>
[]|-1|Unexpected count of i/j columns
[]|-1|Unexpected column: geom_corr
[]|-1|Expected column image_i, got: PMC_777_MCC_i
[{444 4 5}]|-1|<nil>
[{444 4 5}]|-1|<nil>
[]|-1|Expected column image_j, got: SCLK
[]|-1|Unexpected column header PMC_3027_MCC_i after PMC_777_MCC_i
[]|-1|Unexpected count of i/j columns
[]|-1|Unexpected column: SCLK
[]|-1|Expected column x, got: SCLK
Example (ParseBeamLocationRow)
pmc, beam, err := parseBeamLocationRow([]string{"33", "1.1", "1.2", "1.3", "55.1", "55.2"}, []pmcColIdxs{{20, 4, 5}}, -1)
fmt.Printf("%v,%v|%v\n", pmc, beam, err)

pmc, beam, err = parseBeamLocationRow([]string{"33", "1.1", "1.2", "1.3", "55.1", "55.2", "444", "8.1", "lala", "8.2"}, []pmcColIdxs{{777, 4, 5}, {320, 7, 9}}, -1)
fmt.Printf("%v,%v|%v\n", pmc, beam, err)

pmc, beam, err = parseBeamLocationRow([]string{"33", "1.1", "1.2", "1.3", "0.983", "55.1", "55.2", "444", "8.1", "lala", "8.2"}, []pmcColIdxs{{777, 5, 6}, {320, 8, 10}}, 4)
fmt.Printf("%v,%v|%v\n", pmc, beam, err)
Output:

33,{1.1 1.2 1.3 0 map[20:{55.1 55.2}]}|<nil>
33,{1.1 1.2 1.3 0 map[320:{8.1 8.2} 777:{55.1 55.2}]}|<nil>
33,{1.1 1.2 1.3 0.983 map[320:{8.1 8.2} 777:{55.1 55.2}]}|<nil>
Example (ParseBeamLocations)
data, err := parseBeamLocations([][]string{[]string{"PMC", "x", "y", "z", "image_i", "image_j"}, []string{"33", "1.1", "1.2", "1.3", "55.1", "55.2"}}, false, 222)
fmt.Printf("%v|%v\n", data, err)

data, err = parseBeamLocations([][]string{
	[]string{"PMC", "x", "y", "z", "PMC_22_MCC_i", "PMC_22_MCC_j", "PMC_62_MCC_i", "PMC_62_MCC_j"},
	[]string{"33", "31.1", "31.2", "31.3", "355.1", "355.2", "3121.4", "3121.5"},
	[]string{"66", "91.1", "91.2", "91.3", "955.1", "955.2", "9121.4", "9121.5"},
}, true, 333)
fmt.Printf("%v|%v\n", data, err)

data, err = parseBeamLocations([][]string{
	[]string{"PMC", "x", "y", "z", "geom_corr", "PMC_22_MCC_i", "PMC_22_MCC_j", "PMC_62_MCC_i", "PMC_62_MCC_j"},
	[]string{"33", "31.1", "31.2", "31.3", "1.03", "355.1", "355.2", "3121.4", "3121.5"},
	[]string{"66", "91.1", "91.2", "91.3", "0.99", "955.1", "955.2", "9121.4", "9121.5"},
}, true, 333)
fmt.Printf("%v|%v\n", data, err)
Output:

map[33:{1.1 1.2 1.3 0 map[222:{55.1 55.2}]}]|<nil>
map[33:{31.1 31.2 31.3 0 map[22:{355.1 355.2} 62:{3121.4 3121.5}]} 66:{91.1 91.2 91.3 0 map[22:{955.1 955.2} 62:{9121.4 9121.5}]}]|<nil>
map[33:{31.1 31.2 31.3 1.03 map[22:{355.1 355.2} 62:{3121.4 3121.5}]} 66:{91.1 91.2 91.3 0.99 map[22:{955.1 955.2} 62:{9121.4 9121.5}]}]|<nil>
Example (ParseMSAMetadataLine)
lines := []string{
	"#LIVETIME    :  25.09,  25.08",
	"#OFFSET      :  0.3,   0.1    eV of first channel",
	"#XPERCHAN    :  10.0, 10.0    eV per channel",
	"#NCOLUMNS    : 2     Number of data columns",
	"123",
	"Some:Thing",
	"#SOME TEXT HERE",
	"#FIELD:1234",
	"##THE FIELD:12.34",
	"#ANOTHER FIELD  :  999",
	"#NCOLUMNS    : 2 ",
	"#DATE        :       Date in the format DD-MMM-YYYY, for example 07-JUL-2010",
	"#LIVETIME    :   9.87332058 ",
	"#XPERCHAN    : 7.9226, 7.9273   eV per channel",
}

for _, line := range lines {
	k, v, err := parseMSAMetadataLine(line)
	fmt.Printf("%v|%v|%v\n", k, v, err)
}
Output:

LIVETIME|25.09, 25.08|<nil>
OFFSET|0.3, 0.1|<nil>
XPERCHAN|10.0, 10.0|<nil>
NCOLUMNS|2|<nil>
||Expected # at start of metadata: 123
||Expected # at start of metadata: Some:Thing
||Failed to parse metadata line: #SOME TEXT HERE
FIELD|1234|<nil>
THE FIELD|12.34|<nil>
ANOTHER FIELD|999|<nil>
NCOLUMNS|2|<nil>
DATE||<nil>
LIVETIME|9.87332058|<nil>
XPERCHAN|7.9226, 7.9273|<nil>
Example (ParseMSASpectraLine)
testData := []parseMSASpectraLineTestItem{
	// 1 column
	parseMSASpectraLineTestItem{"1983", 7, 1},
	parseMSASpectraLineTestItem{"1", 8, 1},
	parseMSASpectraLineTestItem{"0", 9, 1},

	// 2 columns
	parseMSASpectraLineTestItem{"1983, 44", 7, 2},
	parseMSASpectraLineTestItem{"1, 0", 8, 2},
	parseMSASpectraLineTestItem{"2321,32342", 9, 2},

	// 3 columns (it doesn"t care)
	parseMSASpectraLineTestItem{"11, 22, 33", 9, 3},

	// 0 columns (sanity)
	parseMSASpectraLineTestItem{"11, 22, 33", 9, 0},

	// Wrong column counts
	parseMSASpectraLineTestItem{"1983, 44", 7, 1},
	parseMSASpectraLineTestItem{"1983", 7, 2},
	parseMSASpectraLineTestItem{"1983,", 7, 2},
	parseMSASpectraLineTestItem{"", 7, 1},
	parseMSASpectraLineTestItem{"", 7, 2},
	parseMSASpectraLineTestItem{",", 7, 1},
	parseMSASpectraLineTestItem{",", 7, 2},

	// Issues with parsing values
	parseMSASpectraLineTestItem{"#SOMETHING", 1, 1},
	parseMSASpectraLineTestItem{"#SOMETHING,#ELSE", 1, 2},
	parseMSASpectraLineTestItem{"1,#Number", 1, 2},
	parseMSASpectraLineTestItem{"Waffles", 2, 1},
	parseMSASpectraLineTestItem{"1.6", 4, 1},
	parseMSASpectraLineTestItem{"1.6, 3.1415926", 4, 2},
	parseMSASpectraLineTestItem{"16,3.1415926", 4, 2},
	parseMSASpectraLineTestItem{"-34, 10", 6, 2},
	parseMSASpectraLineTestItem{"34, -10", 6, 2},
	parseMSASpectraLineTestItem{"5, Waffles", 6, 2},
	parseMSASpectraLineTestItem{"Waffles, 5", 6, 2},
}

for _, t := range testData {
	v, e := parseMSASpectraLine(t.line, t.lc, t.ncols)
	fmt.Printf("%v|%v\n", v, e)
}
Output:

[1983]|<nil>
[1]|<nil>
[0]|<nil>
[1983 44]|<nil>
[1 0]|<nil>
[2321 32342]|<nil>
[11 22 33]|<nil>
[]|Expected 0 spectrum columns, got 3 on line [9]:11, 22, 33
[]|Expected 1 spectrum columns, got 2 on line [7]:1983, 44
[]|Expected 2 spectrum columns, got 1 on line [7]:1983
[]|Failed to read spectra "" on line [7]:1983,
[]|Failed to read spectra "" on line [7]:
[]|Expected 2 spectrum columns, got 1 on line [7]:
[]|Expected 1 spectrum columns, got 2 on line [7]:,
[]|Failed to read spectra "" on line [7]:,
[]|Failed to read spectra "#SOMETHING" on line [1]:#SOMETHING
[]|Failed to read spectra "#SOMETHING" on line [1]:#SOMETHING,#ELSE
[]|Failed to read spectra "#Number" on line [1]:1,#Number
[]|Failed to read spectra "Waffles" on line [2]:Waffles
[]|Failed to read spectra "1.6" on line [4]:1.6
[]|Failed to read spectra "1.6" on line [4]:1.6, 3.1415926
[]|Failed to read spectra "3.1415926" on line [4]:16,3.1415926
[]|Spectra expected non-negative value "-34" on line [6]:-34, 10
[]|Spectra expected non-negative value "-10" on line [6]:34, -10
[]|Failed to read spectra "Waffles" on line [6]:5, Waffles
[]|Failed to read spectra "Waffles" on line [6]:Waffles, 5
Example (ParsePsuedoIntensityData)
pmcTableHeader := []string{"PMC", "x", "y", "z"}
pmcTableData1 := []string{"77", "1", "2", "3"}
pmcTableData2 := []string{"78", "4", "3", "2"}

psHeader := []string{"pi1", "pi2", "pi3", "pi4", "pi5", "pi6"}
psTableData1 := []string{"0.1", "0.2", "0.3", "0.4", "0.5", "0.6"}
psTableData2 := []string{"10.1", "10.2", "10.3", "10.4", "10.5", "10.6"}

csvData := [][]string{pmcTableHeader, pmcTableData1, pmcTableData2, psHeader, psTableData1, psTableData2}
data, err := parsePsuedoIntensityData(csvData)
fmt.Printf("%v|%v\n", err, len(data))
fmt.Printf("%v\n", data[77])
fmt.Printf("%v\n", data[78])

csvData = [][]string{pmcTableData1, pmcTableData2, psHeader, psTableData1, psTableData2}
data, err = parsePsuedoIntensityData(csvData)
fmt.Printf("%v|%v\n", err, data)

csvData = [][]string{pmcTableHeader, pmcTableData1, []string{"oops", "1", "2", "3"}, psHeader, psTableData1, psTableData2}
data, err = parsePsuedoIntensityData(csvData)
fmt.Printf("%v|%v\n", err, data)

csvData = [][]string{pmcTableHeader, pmcTableData1, pmcTableData1, psHeader, psTableData1, []string{"10.1", "10.2", "wtf", "10.4", "10.5", "10.6"}}
data, err = parsePsuedoIntensityData(csvData)
fmt.Printf("%v|%v\n", err, data)
Output:

<nil>|2
[0.1 0.2 0.3 0.4 0.5 0.6]
[10.1 10.2 10.3 10.4 10.5 10.6]
expected first table to contain PMCs in first column, found: 77|map[]
row 2 - expected PMC, got: oops|map[]
row 5, col 3 - expected pseudointensity value, got: wtf|map[]
Example (ParseRanges)
rangeHeader := []string{"Name", "StartChannel", "EndChannel"}
range1 := []string{"ps1", "100", "120"}
range2 := []string{"ps2", "144", "173"}

csvData := [][]string{rangeHeader, range1, range2}
data, err := parseRanges(csvData)
fmt.Printf("%v|%v\n", err, len(data))
fmt.Printf("%+v\n", data[0])
fmt.Printf("%+v\n", data[1])

csvData = [][]string{[]string{"Date", "StartChannel", "EndChannel"}, range1, range2}
data, err = parseRanges(csvData)
fmt.Printf("%v|%v\n", err, data)
Output:

<nil>|2
{Name:ps1 Start:100 End:120}
{Name:ps2 Start:144 End:173}
Pseudo-intensity ranges has unexpected headers|[]
Example (ParseSpectraCSVData_OK)
lines := [][]string{
	[]string{"SCLK_A", "SCLK_B", "PMC", "real_time_A", "real_time_B", "live_time_A", "live_time_B", "XPERCHAN_A", "XPERCHAN_B", "OFFSET_A", "OFFSET_B"},
	[]string{"11", "12", "33", "17.7", "17.8", "17.1", "17.2", "17.5", "17.6", "17.3", "17.4"},
	[]string{"13", "14", "34", "18.7", "18.8", "18.1", "18.2", "18.5", "18.6", "18.3", "18.4"},
	[]string{"PMC", "x", "y", "z"},
	[]string{"33", "10", "20", "30"},
	[]string{"34", "11", "21", "31"},
	[]string{"A_1", "A_2", "A_3", "A_4", "A_5", "A_6"},
	[]string{"21", "22", "23", "24", "25", "26"},
	[]string{"121", "122", "123", "124", "125", "126"},
	[]string{"B_1", "B_2", "B_3", "B_4", "B_5", "B_6"},
	[]string{"41", "42", "43", "44", "45", "46"},
	[]string{"141", "142", "143", "144", "145", "146"},
}
data, err := parseSpectraCSVData(lines, "Normal", &logger.NullLogger{})

fmt.Printf("%v\n", err)

combPMCs := []int{}
for k := range data {
	combPMCs = append(combPMCs, int(k))
}
sort.Ints(combPMCs)

for _, pmc := range combPMCs {
	s := data[int32(pmc)]
	fmt.Printf("pmc[%v]\n", pmc)
	for detIdx := range s {
		fmt.Printf(" det[%v]\n  %v\n", detIdx, s[detIdx].ToString())
	}
}
Output:

<nil>
pmc[33]
 det[0]
  meta [DETECTOR_ID:A/s LIVETIME:17.1/f OFFSET:17.3/f PMC:33/i READTYPE:Normal/s REALTIME:17.7/f SCLK:11/i XPERCHAN:17.5/f] spectrum [21 22 23 24 25 26]
 det[1]
  meta [DETECTOR_ID:B/s LIVETIME:17.2/f OFFSET:17.4/f PMC:33/i READTYPE:Normal/s REALTIME:17.8/f SCLK:12/i XPERCHAN:17.6/f] spectrum [41 42 43 44 45 46]
pmc[34]
 det[0]
  meta [DETECTOR_ID:A/s LIVETIME:18.1/f OFFSET:18.3/f PMC:34/i READTYPE:Normal/s REALTIME:18.7/f SCLK:13/i XPERCHAN:18.5/f] spectrum [121 122 123 124 125 126]
 det[1]
  meta [DETECTOR_ID:B/s LIVETIME:18.2/f OFFSET:18.4/f PMC:34/i READTYPE:Normal/s REALTIME:18.8/f SCLK:14/i XPERCHAN:18.6/f] spectrum [141 142 143 144 145 146]
Example (ParseSpectraCSVData_SpectrumTableBadData)
lines := [][]string{
	[]string{"SCLK_A", "SCLK_B", "PMC", "real_time_A", "real_time_B", "live_time_A", "live_time_B", "XPERCHAN_A", "XPERCHAN_B", "OFFSET_A", "OFFSET_B"},
	[]string{"11", "12", "33", "17.7", "17.8", "17.1", "17.2", "17.5", "17.6", "17.3", "17.4"},
	[]string{"13", "14", "34", "18.7", "18.8", "18.1", "18.2", "18.5", "18.6", "18.3", "18.4"},
	[]string{"PMC", "x", "y", "z"},
	[]string{"33", "10", "20", "30"},
	[]string{"34", "11", "21", "31"},
	[]string{"A_1", "A_2", "A_3", "A_4", "A_5", "A_6"},
	[]string{"21", "22", "something", "24", "25", "26"},
	[]string{"121", "122", "123", "124", "125", "126"},
	[]string{"B_1", "B_2", "B_3", "B_4", "B_5", "B_6"},
	[]string{"41", "42", "43", "44", "45", "46"},
	[]string{"141", "142", "143", "144", "145", "146"},
}

data, err := parseSpectraCSVData(lines, "Normal", &logger.NullLogger{})
fmt.Printf("%v|%v\n", data, err)
Output:

map[]|row 7, col 3 - failed to read value, got: something
Example (ParseSpectraCSVData_SpectrumTableDiffColCounts)
lines := [][]string{
	[]string{"SCLK_A", "SCLK_B", "PMC", "real_time_A", "real_time_B", "live_time_A", "live_time_B", "XPERCHAN_A", "XPERCHAN_B", "OFFSET_A", "OFFSET_B"},
	[]string{"11", "12", "33", "17.7", "17.8", "17.1", "17.2", "17.5", "17.6", "17.3", "17.4"},
	[]string{"13", "14", "34", "18.7", "18.8", "18.1", "18.2", "18.5", "18.6", "18.3", "18.4"},
	[]string{"PMC", "x", "y", "z"},
	[]string{"33", "10", "20", "30"},
	[]string{"34", "11", "21", "31"},
	[]string{"A_1", "A_2", "A_3", "A_4", "A_5", "A_6"},
	[]string{"21", "22", "23", "24", "25", "26"},
	[]string{"121", "122", "123", "124", "125", "126"},
	[]string{"B_1", "B_2", "B_3", "B_4", "B_5", "B_6", "B_7"},
	[]string{"41", "42", "43", "44", "45", "46", "47"},
	[]string{"141", "142", "143", "144", "145", "146", "147"},
}

data, err := parseSpectraCSVData(lines, "Normal", &logger.NullLogger{})
fmt.Printf("%v|%v\n", data, err)
Output:

map[]|row 9 - differing channel count found, A was 6, B is 7
Example (ParseSpectraCSVData_SpectrumTablesDifferingRows)
lines := [][]string{
	[]string{"SCLK_A", "SCLK_B", "PMC", "real_time_A", "real_time_B", "live_time_A", "live_time_B", "XPERCHAN_A", "XPERCHAN_B", "OFFSET_A", "OFFSET_B"},
	[]string{"11", "12", "33", "17.7", "17.8", "17.1", "17.2", "17.5", "17.6", "17.3", "17.4"},
	[]string{"13", "14", "34", "18.7", "18.8", "18.1", "18.2", "18.5", "18.6", "18.3", "18.4"},
	[]string{"PMC", "x", "y", "z"},
	[]string{"33", "10", "20", "30"},
	[]string{"34", "11", "21", "31"},
	[]string{"A_1", "A_2", "A_3", "A_4", "A_5", "A_6"},
	[]string{"21", "22", "23", "24", "25", "26"},
	[]string{"121", "122", "123", "124", "125", "126"},
	[]string{"B_1", "B_2", "B_3", "B_4", "B_5", "B_6"},
	[]string{"41", "42", "43", "44", "45", "46"},
}

data, err := parseSpectraCSVData(lines, "Normal", &logger.NullLogger{})
fmt.Printf("%v|%v\n", data, err)
Output:

map[]|A table had 2 rows, B had 1
Example (ParseSpectraCSVData_TopTableErrors)
lines := [][]string{
	[]string{"SCLK_A", "SCLK_B", "PMC", "XPERCHAN_A", "XPERCHAN_B"},
}
data, err := parseSpectraCSVData(lines, "Normal", &logger.NullLogger{})
fmt.Printf("%v|%v\n", data, err)

lines = [][]string{
	[]string{"SCLK_A", "SCLK_B", "PMC", "real_time_A", "real_time_B", "live_time_A", "live_time_B", "XPERCHAN_A", "XPERCHAN_B", "OFFSET_A", "OFFSET_B"},
	[]string{"11", "12", "33", "17.7", "17.8", "17.1", "17.2", "17.5", "17.6", "17.3"},
}
data, err = parseSpectraCSVData(lines, "Normal", &logger.NullLogger{})
fmt.Printf("%v|%v\n", data, err)

lines = [][]string{
	[]string{"SCLK_A", "SCLK_B", "PMC", "real_time_A", "real_time_B", "live_time_A", "live_time_B", "XPERCHAN_A", "XPERCHAN_B", "OFFSET_A", "OFFSET_B"},
	[]string{"11", "12", "33", "17.7", "17.8", "17.1", "17.2", "17.5", "17.6", "17.3", "17.4", "666"},
}
data, err = parseSpectraCSVData(lines, "Normal", &logger.NullLogger{})
fmt.Printf("%v|%v\n", data, err)

lines = [][]string{
	[]string{"SCLK_A", "SCLK_B", "PMC", "real_time_A", "real_time_B", "live_time_A", "live_time_B", "XPERCHAN_A", "XPERCHAN_B", "OFFSET_A", "OFFSET_B"},
	[]string{"11", "something", "33", "17.7", "17.8", "17.1", "17.2", "17.5", "17.6", "17.3", "17.4"},
}
data, err = parseSpectraCSVData(lines, "Normal", &logger.NullLogger{})
fmt.Printf("%v|%v\n", data, err)

lines = [][]string{
	[]string{"SCLK_A", "SCLK_B", "PMC", "real_time_A", "real_time_B", "live_time_A", "live_time_B", "XPERCHAN_A", "XPERCHAN_B", "OFFSET_A", "OFFSET_B"},
	[]string{"11", "12", "something", "17.7", "17.8", "17.1", "17.2", "17.5", "17.6", "17.3", "17.4"},
}
data, err = parseSpectraCSVData(lines, "Normal", &logger.NullLogger{})
fmt.Printf("%v|%v\n", data, err)

lines = [][]string{
	[]string{"SCLK_A", "SCLK_B", "PMC", "real_time_A", "real_time_B", "live_time_A", "live_time_B", "XPERCHAN_A", "XPERCHAN_B", "OFFSET_A", "OFFSET_B"},
	[]string{"11", "12", "33", "17.7", "17.8", "17.1", "something", "17.5", "17.6", "17.3", "17.4"},
}
data, err = parseSpectraCSVData(lines, "Normal", &logger.NullLogger{})
fmt.Printf("%v|%v\n", data, err)
Output:

map[]|Unexpected columns in metadata table: [SCLK_A SCLK_B PMC XPERCHAN_A XPERCHAN_B]
map[]|row 1 - expected 11 metadata items in row, got: 10
map[]|row 1 - expected 11 metadata items in row, got: 12
map[]|row 1 - expected SCLK, got: something
map[]|row 1 - expected PMC, got: something
map[]|row 1 - live_time_B expected float, got: something
Example (ReadMSAFileLines)
data := strings.Split(`
	#TITLE       AMASE_23-G23A
	#OWNER       : Stony Brook
	#DATE        : 04-18-2024
	#TIME        : 04:05:02
	#NPOINTS     : 4096
	#NCOLUMNS     : 2
	#XUNITS      :  eV
	#YUNITS      :  COUNTS
	#DATATYPE    :  YY
	#XPERCHAN    :  9.9799, 10.1633    eV per channel
	#OFFSET      :  ,   -8.5912, -9.6341    eV of first channel
	#SIGNALTYPE  :  XRF
	#COMMENT     : 20 min purge, 175 microA, 28 kV, He trickle purge, 5 x 14 mm scan, 0.1 step size, 10s dwell 
	#COMMENT     : 
	#XPOSITION   :    4.599
	#YPOSITION   :    1.900
	#ZPOSITION   :    0.000
	#LIVETIME    :    1.9,   11.0
	#REALTIME    :    2.0,   12.0
	#SPECTRUM    :
	0, 0
	0, 0
	0, 0
	0, 0
	0, 0
	0, 0
	0, 0`, "\n")

items, err := ReadMSAFileLines(data, false, true, false)
fmt.Println(err)

data = []string{"#SOMETHING:123", "#PMC: 3001", "#DATATYPE: Y", "#NCOLUMNS: 2", "#NPOINTS : 3", "#SPECTRUM", "0", "23", "991231"}
items, err = ReadMSAFileLines(data, false, true, false)
fmt.Println(err)

data = []string{"#SOMETHING:123", "#PMC: 3001", "#DATATYPE: YY", "#NCOLUMNS: 1", "#NPOINTS : 3", "#SPECTRUM", "0", "23", "991231"}
items, err = ReadMSAFileLines(data, false, true, false)
fmt.Println(err)

data = []string{"#SOMETHING:123", "#PMC: 3001", "#DATATYPE: YY", "#NCOLUMNS: 2", "#DETECTOR_ID: A", "#NPOINTS : 3", "#SPECTRUM", "0, 0", "23, 0", "48, 991231"}
items, err = ReadMSAFileLines(data, false, true, false)
fmt.Println(err)

data = []string{"#SOMETHING:123", "#PMC: 3001", "#DATATYPE: YY", "#NCOLUMNS: 2", "#NPOINTS : 3", "#SPECTRUM", "0, 0", "23, 0", "48, 991231"}
items, err = ReadMSAFileLines(data, false, true, false)
fmt.Println(err)

fmt.Println("A")
fmt.Printf(" %v\n", items[0].ToString())

fmt.Println("B")
fmt.Printf(" %v\n", items[1].ToString())
Output:

Failed to parse metadata line: #TITLE       AMASE_23-G23A
Expected DATATYPE "YY" in MSA metadata
Expected NCOLUMNS "2" in MSA metadata
Unexpected DETECTOR_ID in multi-detector MSA
<nil>
A
 meta [DATATYPE:YY/s DETECTOR_ID:A/s NCOLUMNS:2/s NPOINTS:3/s PMC:3001/i SOMETHING:123/s] spectrum [0 23 48]
B
 meta [DATATYPE:YY/s DETECTOR_ID:B/s NCOLUMNS:2/s NPOINTS:3/s PMC:3001/i SOMETHING:123/s] spectrum [0 0 991231]
Example (ReadMSAFileLines_Single)
data := []string{"#SOMETHING:123", "#PMC: 3001", "#DATATYPE: Y", "#NCOLUMNS: 1", "#DETECTOR_ID: A", "#NPOINTS : 3", "#SPECTRUM", "0", "23", "991231"}
items, err := ReadMSAFileLines(data, true, true, false)
fmt.Printf("A|%v|%v\n", items[0].ToString(), err)

data = []string{"#SOMETHING:123", "#PMC: 3001", "#DATATYPE: Y", "#NCOLUMNS: 1", "#DETECTOR_ID: B", "#NPOINTS : 5", "#SPECTRUM", "0", "23", "991231", "0", "44", "#ENDOFDATA here"}
items, err = ReadMSAFileLines(data, true, true, false)
fmt.Printf("B|%v|%v\n", items[0].ToString(), err)

data = []string{"#SOMETHING:123", "#PMC: 3001", "#COMMENT: one", "#COMMENT: two", "#DATATYPE: Y", "#NCOLUMNS: 1", "#DETECTOR_ID: B", "#NPOINTS : 5", "#SPECTRUM", "0", "23", "991231", "0", "44", "#ENDOFDATA here"}
items, err = ReadMSAFileLines(data, true, true, false)
fmt.Printf("C|%v|%v\n", items[0].ToString(), err)

// Duplicate non-comment field
items, err = ReadMSAFileLines([]string{"#SOMETHING:123", "#PMC: 3001", "#DATATYPE: YY", "#NCOLUMNS: 1", "#DATATYPE: YY", "#DETECTOR_ID: A", "#NPOINTS : 3", "#SPECTRUM", "0", "23", "991231"}, true, true, false)
fmt.Printf("Dup|%v\n", err)

// Wrong DATATYPE
items, err = ReadMSAFileLines([]string{"#SOMETHING:123", "#PMC: 3001", "#DATATYPE: YY", "#NCOLUMNS: 1", "#DETECTOR_ID: A", "#NPOINTS : 3", "#SPECTRUM", "0", "23", "991231"}, true, true, false)
fmt.Printf("WrongDT|%v\n", err)

// Not expecting PMC
items, err = ReadMSAFileLines([]string{"#SOMETHING:123", "#PMC: 3001", "#DATATYPE: Y", "#NCOLUMNS: 1", "#DETECTOR_ID: B", "#NPOINTS : 5", "#SPECTRUM", "0", "23", "991231", "0", "44", "#ENDOFDATA here"}, true, false, false)
fmt.Printf("NoExpPMC|%v\n", err)

// Wrong point count
items, err = ReadMSAFileLines([]string{"#SOMETHING:123", "#PMC: 3001", "#DATATYPE: Y", "#NCOLUMNS: 1", "#DETECTOR_ID: B", "#NPOINTS : 4", "#SPECTRUM", "0", "23", "991231"}, true, true, false)
fmt.Printf("Wrong#Pts|%v\n", err)

// Missing SPECTRUM
items, err = ReadMSAFileLines([]string{"#SOMETHING:123", "#DATATYPE: Y", "#NCOLUMNS: 1", "#DETECTOR_ID: B", "#NPOINTS : 3", "99", "23", "991231"}, true, true, false)
fmt.Printf("MissingSPECTRUM|%v\n", err)

// Missing PMC
items, err = ReadMSAFileLines([]string{"#SOMETHING:123", "#DATATYPE: Y", "#NCOLUMNS: 1", "#DETECTOR_ID: B", "#NPOINTS : 5", "#SPECTRUM", "0", "23", "991231", "0", "44", "#ENDOFDATA here"}, true, true, false)
fmt.Printf("MissingPMC|%v\n", err)

// Missing DETECTOR_ID
items, err = ReadMSAFileLines([]string{"#SOMETHING:123", "#PMC: 3001", "#DATATYPE: Y", "#NCOLUMNS: 1", "#NPOINTS : 5", "#SPECTRUM", "0", "23", "991231", "0", "44", "#ENDOFDATA here"}, true, true, false)
fmt.Printf("MissingDETECTOR_ID|%v\n", err)

// Missing NPOINTS
items, err = ReadMSAFileLines([]string{"#SOMETHING:123", "#PMC: 3001", "#DATATYPE: Y", "#NCOLUMNS: 1", "#DETECTOR_ID: B", "#SPECTRUM", "0", "23", "991231", "0", "44", "#ENDOFDATA here"}, true, true, false)
fmt.Printf("MissingNPOINTS|%v\n", err)

// No metadata
items, err = ReadMSAFileLines([]string{"50", "23", "991231"}, true, true, false)
fmt.Printf("NoMeta|%v\n", err)

// Data after end of data is ignored
data = []string{"#SOMETHING:123", "#PMC: 3001", "#COMMENT: one", "#COMMENT: two", "#DATATYPE: Y", "#NCOLUMNS: 1", "#DETECTOR_ID: B", "#NPOINTS : 5", "#SPECTRUM", "0", "23", "991231", "0", "44", "#ENDOFDATA here", "78", "#SOME COMMENT!"}
items, err = ReadMSAFileLines(data, true, true, false)
fmt.Printf("D|%v|%v\n", items[0].ToString(), err)

// Blank line
items, err = ReadMSAFileLines([]string{""}, true, true, false)
fmt.Printf("Blank|%v\n", err)

// Empty file
items, err = ReadMSAFileLines([]string{}, true, true, false)
fmt.Printf("Empty|%v\n", err)
Output:

A|meta [DATATYPE:Y/s DETECTOR_ID:A/s NCOLUMNS:1/s NPOINTS:3/s PMC:3001/i SOMETHING:123/s] spectrum [0 23 991231]|<nil>
B|meta [DATATYPE:Y/s DETECTOR_ID:B/s NCOLUMNS:1/s NPOINTS:5/s PMC:3001/i SOMETHING:123/s] spectrum [0 23 991231 0 44]|<nil>
C|meta [COMMENT:one two/s DATATYPE:Y/s DETECTOR_ID:B/s NCOLUMNS:1/s NPOINTS:5/s PMC:3001/i SOMETHING:123/s] spectrum [0 23 991231 0 44]|<nil>
Dup|Duplicate meta data lines found for: DATATYPE
WrongDT|Expected DATATYPE "Y" in MSA metadata
NoExpPMC|PMC NOT expected, but was found in MSA
Wrong#Pts|Expected 4 spectra, got 3
MissingSPECTRUM|Unexpected potential spectra found at 5: 99
MissingPMC|PMC expected, but not found in MSA
MissingDETECTOR_ID|Failed to find DETECTOR_ID in metadata
MissingNPOINTS|Failed to find NPOINTS in metadata
NoMeta|Unexpected potential spectra found at 0: 50
D|meta [COMMENT:one two/s DATATYPE:Y/s DETECTOR_ID:B/s NCOLUMNS:1/s NPOINTS:5/s PMC:3001/i SOMETHING:123/s] spectrum [0 23 991231 0 44]|<nil>
Blank|No spectra data found to be read
Empty|No spectra data found to be read
Example (ScanHousekeepingData)
data := [][]string{
	{"ONE", "TWO", "PMC", "THREE", "FOUR"},
	{"13", "14", "34", "3.1415926", "44"},
	{"13", "13.33", "999", "55", "N/A"},
	{"Some other header"},
	{"TABLE", "TWO", "GOES", "HERE", "DUDE"},
	{"1", "2", "3", "4", "5"},
}

headers, pmcCol, dataTypes, rowCount := scanHousekeepingData(data)
fmt.Printf("%v|%v|%v|%v\n", headers, pmcCol, dataTypes, rowCount)

data = [][]string{
	{"ONE", "TWO", "PMC", "THREE"},
	{"13", "14", "34", "3.1415926"},
	{"13", "11", "999", "Fifty-Five"},
	{"1", "2", "3", "4"},
}

headers, pmcCol, dataTypes, rowCount = scanHousekeepingData(data)
fmt.Printf("%v|%v|%v|%v\n", headers, pmcCol, dataTypes, rowCount)
Output:

[ONE TWO THREE FOUR]|2|[MT_INT MT_FLOAT MT_INT MT_FLOAT MT_STRING]|3
[ONE TWO THREE]|2|[MT_INT MT_INT MT_INT MT_STRING]|4
Example (SplitColumnHeader)
pmc, data, ij, err := splitColumnHeader("PMC_333_corr_i")
fmt.Printf("%v,%v,%v|%v\n", pmc, data, ij, err)

pmc, data, ij, err = splitColumnHeader("PMC_777_MCC_j")
fmt.Printf("%v,%v,%v|%v\n", pmc, data, ij, err)

pmc, data, ij, err = splitColumnHeader("PMC_777_MC_j")
fmt.Printf("%v,%v,%v|%v\n", pmc, data, ij, err)

pmc, data, ij, err = splitColumnHeader("PMC_bob_MCC_j")
fmt.Printf("%v,%v,%v|%v\n", pmc, data, ij, err)

pmc, data, ij, err = splitColumnHeader("PMC_777_MCC_k")
fmt.Printf("%v,%v,%v|%v\n", pmc, data, ij, err)

pmc, data, ij, err = splitColumnHeader("PMc_777_MCC_j")
fmt.Printf("%v,%v,%v|%v\n", pmc, data, ij, err)

pmc, data, ij, err = splitColumnHeader("nan")
fmt.Printf("%v,%v,%v|%v\n", pmc, data, ij, err)
Output:

333,corr,i|<nil>
777,MCC,j|<nil>
0,,|Unexpected column: PMC_777_MC_j
0,,|Unexpected column: PMC_bob_MCC_j
0,,|Unexpected column: PMC_777_MCC_k
0,,|Unexpected column: PMc_777_MCC_j
0,,|Unexpected column: nan
Example (SplitMSAMetaFor2Detectors)
meta := dataConvertModels.MetaData{
	"COMMENT":    dataConvertModels.StringMetaValue("My Comment"),
	"XPERCHAN":   dataConvertModels.StringMetaValue("  10.30, 11.30 "),
	"OFFSET":     dataConvertModels.StringMetaValue("  3.30,   5.30 "),
	"SIGNALTYPE": dataConvertModels.StringMetaValue("  XRF"),
	"DATATYPE":   dataConvertModels.StringMetaValue("YY"),
	"PMC":        dataConvertModels.IntMetaValue(99),
	"SCLK":       dataConvertModels.IntMetaValue(399),
	"XPOSITION":  dataConvertModels.StringMetaValue("    1.0030"),
	"YPOSITION":  dataConvertModels.FloatMetaValue(2.0040),
	"ZPOSITION":  dataConvertModels.FloatMetaValue(2.4430),
	"LIVETIME":   dataConvertModels.StringMetaValue("  25.090,  25.080"),
	"REALTIME":   dataConvertModels.StringMetaValue("  25.110,  25.120"),
	"TRIGGERS":   dataConvertModels.StringMetaValue(" 45993, 43902"),
	"EVENTS":     dataConvertModels.StringMetaValue(" 44690, 42823"),
	"KETEK_ICR":  dataConvertModels.StringMetaValue(" 1833.1, 1750.7"),
	"KETEK_OCR":  dataConvertModels.StringMetaValue(" 1780.1, 1705.7"),
	"DATE":       dataConvertModels.StringMetaValue("03-20-2018"),
	"TIME":       dataConvertModels.StringMetaValue("13:10:30"),
	"NPOINTS":    dataConvertModels.StringMetaValue("4096"),
	"NCOLUMNS":   dataConvertModels.StringMetaValue("2"),
	"XUNITS":     dataConvertModels.StringMetaValue("eV"),
	"YUNITS":     dataConvertModels.StringMetaValue("COUNTS"),
}

a, b, e := splitMSAMetaFor2Detectors(meta, false)
fmt.Printf("%v\n", e)

fmt.Println("META A")
fmt.Printf("%v\n", a.ToString())

fmt.Println("META B")
fmt.Printf("%v\n", b.ToString())

meta = dataConvertModels.MetaData{
	"COMMENT":  dataConvertModels.StringMetaValue("My comment"),
	"LIVETIME": dataConvertModels.StringMetaValue("  25.09,  25.08, 30"),
}
a, b, e = splitMSAMetaFor2Detectors(meta, false)
fmt.Printf("%v\n", e)
Output:

<nil>
META A
[COMMENT:My Comment/s DATATYPE:YY/s DATE:03-20-2018/s DETECTOR_ID:A/s EVENTS:44690/s KETEK_ICR:1833.1/s KETEK_OCR:1780.1/s LIVETIME:25.09/f NCOLUMNS:2/s NPOINTS:4096/s OFFSET:3.3/f PMC:99/i REALTIME:25.11/f SCLK:399/i SIGNALTYPE:XRF/s TIME:13:10:30/s TRIGGERS:45993/s XPERCHAN:10.3/f XPOSITION:1.0030/s XUNITS:eV/s YPOSITION:2.004/f YUNITS:COUNTS/s ZPOSITION:2.443/f]
META B
[COMMENT:My Comment/s DATATYPE:YY/s DATE:03-20-2018/s DETECTOR_ID:B/s EVENTS:42823/s KETEK_ICR:1750.7/s KETEK_OCR:1705.7/s LIVETIME:25.08/f NCOLUMNS:2/s NPOINTS:4096/s OFFSET:5.3/f PMC:99/i REALTIME:25.12/f SCLK:399/i SIGNALTYPE:XRF/s TIME:13:10:30/s TRIGGERS:43902/s XPERCHAN:11.3/f XPOSITION:1.0030/s XUNITS:eV/s YPOSITION:2.004/f YUNITS:COUNTS/s ZPOSITION:2.443/f]
Metadata row cannot be split for 2 detectors due to commas
Example (SplitSpectraCSVTables_OneTable)
lines := [][]string{
	[]string{"SCLK_A", "SCLK_B", "PMC", "real_time_A", "real_time_B", "live_time_A", "live_time_B", "XPERCHAN_A", "XPERCHAN_B", "OFFSET_A", "OFFSET_B"},
	[]string{"11", "12", "33", "17.7", "17.8", "17.1", "17.2", "17.5", "17.6", "17.3", "17.4"},
	[]string{"PMC", "x", "y", "z"},
	[]string{"33", "10", "20", "30"},
	[]string{"A_1", "A_2", "A_3", "A_4", "A_5", "A_6"},
	[]string{"21", "22", "23", "24", "25", "26"},
	[]string{"B_1", "B_2", "B_3", "B_4", "B_5", "B_6"},
	[]string{"41", "42", "43", "44", "45", "46"},
}

data1, data2 := splitSpectraCSVTables(lines)
fmt.Printf("table1=%v, table2=%v\n", len(data1), len(data2))
fmt.Printf("%v\n", data1[0])
Output:

table1=8, table2=0
[SCLK_A SCLK_B PMC real_time_A real_time_B live_time_A live_time_B XPERCHAN_A XPERCHAN_B OFFSET_A OFFSET_B]
Example (SplitSpectraCSVTables_TwoTable)
lines := [][]string{
	[]string{"SCLK_A", "SCLK_B", "PMC", "real_time_A", "real_time_B", "live_time_A", "live_time_B", "XPERCHAN_A", "XPERCHAN_B", "OFFSET_A", "OFFSET_B"},
	[]string{"11", "12", "33", "17.7", "17.8", "17.1", "17.2", "17.5", "17.6", "17.3", "17.4"},
	[]string{"PMC", "x", "y", "z"},
	[]string{"33", "10", "20", "30"},
	[]string{"A_1", "A_2", "A_3", "A_4", "A_5", "A_6"},
	[]string{"21", "22", "23", "24", "25", "26"},
	[]string{"B_1", "B_2", "B_3", "B_4", "B_5", "B_6"},
	[]string{"41", "42", "43", "44", "45", "46"},
	[]string{"SCLK_A", "SCLK_B", "PMC", "real_time_A", "real_time_B", "live_time_A", "live_time_B", "XPERCHAN_A", "XPERCHAN_B", "OFFSET_A", "OFFSET_B"},
	[]string{"21", "12", "33", "17.7", "17.8", "17.1", "17.2", "17.5", "17.6", "17.3", "17.4"},
	[]string{"13", "14", "34", "18.7", "18.8", "18.1", "18.2", "18.5", "18.6", "18.3", "18.4"},
	[]string{"PMC", "x", "y", "z"},
	[]string{"23", "10", "20", "30"},
	[]string{"34", "11", "21", "31"},
	[]string{"A_1", "A_2", "A_3", "A_4", "A_5", "A_6"},
	[]string{"31", "22", "23", "24", "25", "26"},
	[]string{"121", "122", "123", "124", "125", "126"},
	[]string{"B_1", "B_2", "B_3", "B_4", "B_5", "B_6"},
	[]string{"31", "42", "43", "44", "45", "46"},
	[]string{"141", "142", "143", "144", "145", "146"},
}

data1, data2 := splitSpectraCSVTables(lines)
fmt.Printf("table1=%v, table2=%v\n", len(data1), len(data2))
fmt.Printf("%v\n", data1[0])
fmt.Printf("%v\n", data2[0])
fmt.Printf("%v\n", data2[1])
Output:

table1=8, table2=12
[SCLK_A SCLK_B PMC real_time_A real_time_B live_time_A live_time_B XPERCHAN_A XPERCHAN_B OFFSET_A OFFSET_B]
[SCLK_A SCLK_B PMC real_time_A real_time_B live_time_A live_time_B XPERCHAN_A XPERCHAN_B OFFSET_A OFFSET_B]
[21 12 33 17.7 17.8 17.1 17.2 17.5 17.6 17.3 17.4]

Index

Examples

Constants

This section is empty.

Variables

This section is empty.

Functions

func LogIfMoreFoundMSA

func LogIfMoreFoundMSA(m dataConvertModels.DetectorSampleByPMC, typename string, morethan int, log logger.ILogger)

func MakeFMDatasetOutput

func MakeFMDatasetOutput(
	beamLookup dataConvertModels.BeamLocationByPMC,
	hkData dataConvertModels.HousekeepingData,
	locSpectraLookup dataConvertModels.DetectorSampleByPMC,
	bulkMaxSpectraLookup dataConvertModels.DetectorSampleByPMC,
	contextImgsPerPMC map[int32]string,
	pseudoIntensityData dataConvertModels.PseudoIntensities,
	pseudoIntensityRanges []dataConvertModels.PseudoIntensityRange,
	matchedAlignedImages []dataConvertModels.MatchedAlignedImageMeta,
	rgbuImages []dataConvertModels.ImageMeta,
	discoImages []dataConvertModels.ImageMeta,
	whiteDiscoImage string,
	datasetMeta gdsfilename.FileNameMeta,
	datasetIDExpected string,
	overrideInstrument protos.ScanInstrument,
	overrideDetector string,
	beamVersion uint32,
	log logger.ILogger,
) (*dataConvertModels.OutputData, error)

Given the stuff read from disk, this takes all the data and assembles it in the output structure This was hard-coded into the FM importer in past, but now that we have SOFF files they need to work the same way, so it's been pulled into here

func ReadBeamLocationsFile

func ReadBeamLocationsFile(path string, expectMultipleIJ bool, mainImagePMC int32, jobLog logger.ILogger) (dataConvertModels.BeamLocationByPMC, error)

ReadBeamLocationsFile - Reads beam location CSV. Old style (expectMultipleIJ=false) or new multi-image IJ coord CSVs

func ReadBulkMaxSpectra

func ReadBulkMaxSpectra(filePaths []string, jobLog logger.ILogger) (dataConvertModels.DetectorSampleByPMC, error)

Expects the bulk file path and max file path in an array as inputs. Order does not matter because the file name can be used to determine which is being read

func ReadCSV

func ReadCSV(filePath string, headerIdx int, sep rune, jobLog logger.ILogger) ([][]string, error)

func ReadHousekeepingFile

func ReadHousekeepingFile(path string, headerRowCount int, jobLog logger.ILogger) (dataConvertModels.HousekeepingData, error)

func ReadMSAFileLines

func ReadMSAFileLines(lines []string, singleDetectorMSA bool, expectPMC bool, detectorADuplicate bool) ([]dataConvertModels.DetectorSample, error)

func ReadPseudoIntensityFile

func ReadPseudoIntensityFile(path string, expectHeaderRow bool, jobLog logger.ILogger) (dataConvertModels.PseudoIntensities, error)

func ReadPseudoIntensityRangesFile

func ReadPseudoIntensityRangesFile(path string, jobLog logger.ILogger) ([]dataConvertModels.PseudoIntensityRange, error)

func ReadSpectraCSV

func ReadSpectraCSV(path string, jobLog logger.ILogger) (dataConvertModels.DetectorSampleByPMC, error)

Types

This section is empty.

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL