Documentation ¶
Overview ¶
Example (CalculateTotals_AB_NeedsCombined) ¶
q, err := readQuantificationFile("./testdata/AB.bin") fmt.Printf("%v\n", err) if err == nil { result, err := calculateTotals(q, []int{90, 91, 95}) fmt.Printf("%v|%v\n", result, err) }
Output: <nil> map[]|Quantification must be for Combined detectors
Example (CalculateTotals_NoPMC) ¶
q, err := readQuantificationFile("./testdata/combined.bin") fmt.Printf("%v\n", err) if err == nil { result, err := calculateTotals(q, []int{68590, 68591, 68595}) fmt.Printf("%v|%v\n", result, err) }
Output: <nil> map[]|Quantification had no valid data for ROI PMCs
Example (CalculateTotals_Success) ¶
q, err := readQuantificationFile("./testdata/combined.bin") fmt.Printf("%v\n", err) if err == nil { result, err := calculateTotals(q, []int{90, 91, 95}) fmt.Printf("%v|%v\n", result, err) }
Output: <nil> map[CaO_%:7.5057006 FeO-T_%:10.621034 SiO2_%:41.48377 TiO2_%:0.7424]|<nil>
Example (CleanLogName) ¶
// Don't fix it... fmt.Println(cleanLogName("node00001_data.log")) // Do fix it... fmt.Println(cleanLogName("node00001.pmcs_stdout.log")) // Do fix it... fmt.Println(cleanLogName("NODE00001.PMCS_stdout.log"))
Output: node00001_data.log node00001_stdout.log NODE00001_stdout.log
Example (CombineQuantOutputs_BadPMC) ¶
var mockS3 awsutil.MockS3Client defer mockS3.FinishTest() const jobsBucket = "jobs-bucket" // Some of our files are empty, not there, have content // and they're meant to end up combined into one response... mockS3.ExpGetObjectInput = []s3.GetObjectInput{ { Bucket: aws.String(jobsBucket), Key: aws.String("JobData/abc123/output/node001.pmcs_result.csv"), }, { Bucket: aws.String(jobsBucket), Key: aws.String("JobData/abc123/output/node002.pmcs_result.csv"), }, } mockS3.QueuedGetObjectOutput = []*s3.GetObjectOutput{ { Body: io.NopCloser(bytes.NewReader([]byte(`Header row1 PMC, CaO_%, CaO_int, RTT 30, 5.1, 400, 7890 12, 6.1, 405, 7800 `))), }, { Body: io.NopCloser(bytes.NewReader([]byte(`Header row2 PMC, CaO_%, CaO_int, RTT NaN, 7.1, 415, 7840 `))), }, } fs := fileaccess.MakeS3Access(&mockS3) combinedCSV, err := combineQuantOutputs(fs, jobsBucket, "JobData/abc123", "The custom header", []string{"node001.pmcs", "node002.pmcs", "node003.pmcs"}) fmt.Printf("%v\n", err) fmt.Println(combinedCSV)
Output: Failed to combine map segment: JobData/abc123/output/node002.pmcs_result.csv, invalid PMC NaN at line 3
Example (CombineQuantOutputs_DownloadError) ¶
var mockS3 awsutil.MockS3Client defer mockS3.FinishTest() const jobsBucket = "jobs-bucket" // Some of our files are empty, not there, have content // and they're meant to end up combined into one response... mockS3.ExpGetObjectInput = []s3.GetObjectInput{ { Bucket: aws.String(jobsBucket), Key: aws.String("JobData/abc123/output/node001.pmcs_result.csv"), }, { Bucket: aws.String(jobsBucket), Key: aws.String("JobData/abc123/output/node002.pmcs_result.csv"), }, } mockS3.QueuedGetObjectOutput = []*s3.GetObjectOutput{ { Body: io.NopCloser(bytes.NewReader([]byte(`Header row1 PMC, CaO_%, CaO_int, RTT 30, 5.1, 400, 7890 12, 6.1, 405, 7800 `))), }, nil, } fs := fileaccess.MakeS3Access(&mockS3) combinedCSV, err := combineQuantOutputs(fs, jobsBucket, "JobData/abc123", "The custom header", []string{"node001.pmcs", "node002.pmcs", "node003.pmcs"}) fmt.Printf("%v\n", err) fmt.Println(combinedCSV)
Output: Failed to combine map segment: JobData/abc123/output/node002.pmcs_result.csv
Example (CombineQuantOutputs_DuplicatePMC) ¶
var mockS3 awsutil.MockS3Client defer mockS3.FinishTest() const jobsBucket = "jobs-bucket" // Some of our files are empty, not there, have content // and they're meant to end up combined into one response... mockS3.ExpGetObjectInput = []s3.GetObjectInput{ { Bucket: aws.String(jobsBucket), Key: aws.String("JobData/abc123/output/node001.pmcs_result.csv"), }, { Bucket: aws.String(jobsBucket), Key: aws.String("JobData/abc123/output/node002.pmcs_result.csv"), }, { Bucket: aws.String(jobsBucket), Key: aws.String("JobData/abc123/output/node003.pmcs_result.csv"), }, } mockS3.QueuedGetObjectOutput = []*s3.GetObjectOutput{ { Body: io.NopCloser(bytes.NewReader([]byte(`Header row1 PMC, CaO_%, CaO_int, RTT 30, 5.1, 400, 7890 12, 6.1, 405, 7800 `))), }, { Body: io.NopCloser(bytes.NewReader([]byte(`Header row2 PMC, CaO_%, CaO_int, RTT 18, 7.1, 415, 7840 `))), }, { Body: io.NopCloser(bytes.NewReader([]byte(`Header row3 PMC, CaO_%, CaO_int, RTT 3, 1.1, 450, 7830 30, 1.3, 451, 7833 40, 8.1, 455, 7870 `))), }, } fs := fileaccess.MakeS3Access(&mockS3) combinedCSV, err := combineQuantOutputs(fs, jobsBucket, "JobData/abc123", "The custom header", []string{"node001.pmcs", "node002.pmcs", "node003.pmcs"}) fmt.Printf("%v\n", err) fmt.Println(combinedCSV)
Output: <nil> The custom header PMC, CaO_%, CaO_int, RTT 3, 1.1, 450, 7830 12, 6.1, 405, 7800 18, 7.1, 415, 7840 30, 5.1, 400, 7890 30, 1.3, 451, 7833 40, 8.1, 455, 7870
Example (CombineQuantOutputs_LastLineCutOff) ¶
var mockS3 awsutil.MockS3Client defer mockS3.FinishTest() const jobsBucket = "jobs-bucket" // Some of our files are empty, not there, have content // and they're meant to end up combined into one response... mockS3.ExpGetObjectInput = []s3.GetObjectInput{ { Bucket: aws.String(jobsBucket), Key: aws.String("JobData/abc123/output/node001.pmcs_result.csv"), }, { Bucket: aws.String(jobsBucket), Key: aws.String("JobData/abc123/output/node002.pmcs_result.csv"), }, } mockS3.QueuedGetObjectOutput = []*s3.GetObjectOutput{ { Body: io.NopCloser(bytes.NewReader([]byte(`Header row1 PMC, CaO_%, CaO_int, RTT 30, 5.1, 400, 7890 12, 6.1, 405, 7800 `))), }, { Body: io.NopCloser(bytes.NewReader([]byte(`Header row2 PMC, CaO_%, CaO_int, RTT 31 `))), }, } fs := fileaccess.MakeS3Access(&mockS3) combinedCSV, err := combineQuantOutputs(fs, jobsBucket, "JobData/abc123", "The custom header", []string{"node001.pmcs", "node002.pmcs", "node003.pmcs"}) fmt.Printf("%v\n", err) fmt.Println(combinedCSV)
Output: Failed to combine map segment: JobData/abc123/output/node002.pmcs_result.csv, no PMC at line 3
Example (CombineQuantOutputs_OK) ¶
var mockS3 awsutil.MockS3Client defer mockS3.FinishTest() const jobsBucket = "jobs-bucket" // Some of our files are empty, not there, have content // and they're meant to end up combined into one response... mockS3.ExpGetObjectInput = []s3.GetObjectInput{ { Bucket: aws.String(jobsBucket), Key: aws.String("JobData/abc123/output/node001.pmcs_result.csv"), }, { Bucket: aws.String(jobsBucket), Key: aws.String("JobData/abc123/output/node002.pmcs_result.csv"), }, { Bucket: aws.String(jobsBucket), Key: aws.String("JobData/abc123/output/node003.pmcs_result.csv"), }, } mockS3.QueuedGetObjectOutput = []*s3.GetObjectOutput{ { Body: io.NopCloser(bytes.NewReader([]byte(`Header row1 PMC, CaO_%, CaO_int, RTT 30, 5.1, 400, 7890 12, 6.1, 405, 7800 `))), }, { Body: io.NopCloser(bytes.NewReader([]byte(`Header row2 PMC, CaO_%, CaO_int, RTT 18, 7.1, 415, 7840 `))), }, { Body: io.NopCloser(bytes.NewReader([]byte(`Header row3 PMC, CaO_%, CaO_int, RTT 3, 1.1, 450, 7830 40, 8.1, 455, 7870 `))), }, } fs := fileaccess.MakeS3Access(&mockS3) combinedCSV, err := combineQuantOutputs(fs, jobsBucket, "JobData/abc123", "The custom header", []string{"node001.pmcs", "node002.pmcs", "node003.pmcs"}) fmt.Printf("%v\n", err) fmt.Println(combinedCSV)
Output: <nil> The custom header PMC, CaO_%, CaO_int, RTT 3, 1.1, 450, 7830 12, 6.1, 405, 7800 18, 7.1, 415, 7840 30, 5.1, 400, 7890 40, 8.1, 455, 7870
Example (ConvertQuantificationData) ¶
data := csvData{ []string{"PMC", "Ca_%", "Ca_int", "SCLK", "Ti_%", "filename", "RTT"}, [][]string{ []string{"23", "1.5", "5", "11111", "4", "fileA.msa", "44"}, []string{"70", "3.4", "32", "12345", "4.21", "fileB.msa", "45"}, }, } result, err := convertQuantificationData(data, []string{"PMC", "RTT", "SCLK", "filename"}) fmt.Printf("%v|%v\n", result, err)
Output: {[Ca_% Ca_int Ti_%] [F I F] [{23 44 11111 fileA.msa [1.5 5 4]} {70 45 12345 fileB.msa [3.4 32 4.21]}]}|<nil>
Example (DecodeMapFileNameColumn) ¶
rt, det, err := decodeMapFileNameColumn("file.txt") fmt.Printf("%v|%v|%v\n", rt, det, err) rt, det, err = decodeMapFileNameColumn("Normal_A") fmt.Printf("%v|%v|%v\n", rt, det, err) rt, det, err = decodeMapFileNameColumn("Normal_A_MyRoiID") fmt.Printf("%v|%v|%v\n", rt, det, err) rt, det, err = decodeMapFileNameColumn("Dwell_B") fmt.Printf("%v|%v|%v\n", rt, det, err) rt, det, err = decodeMapFileNameColumn("Normal_C") fmt.Printf("%v|%v|%v\n", rt, det, err) rt, det, err = decodeMapFileNameColumn("LongRead_B") fmt.Printf("%v|%v|%v\n", rt, det, err) rt, det, err = decodeMapFileNameColumn("Scotland_something_00012.msa") fmt.Printf("%v|%v|%v\n", rt, det, err) rt, det, err = decodeMapFileNameColumn("Scotland_something_00012_10keV_33.msa") fmt.Printf("%v|%v|%v\n", rt, det, err) rt, det, err = decodeMapFileNameColumn("Normal_A_0123456789_873495_455.msa") fmt.Printf("%v|%v|%v\n", rt, det, err)
Output: ||decodeMapFileNameColumn: Invalid READTYPE in filename: "file.txt" Normal|A|<nil> Normal|A|<nil> Dwell|B|<nil> ||decodeMapFileNameColumn: Invalid DETECTOR_ID in filename: "Normal_C" ||decodeMapFileNameColumn: Invalid READTYPE in filename: "LongRead_B" ||decodeMapFileNameColumn: Invalid READTYPE in filename: "Scotland_something_00012.msa" ||decodeMapFileNameColumn: Invalid READTYPE in filename: "Scotland_something_00012_10keV_33.msa" Normal|A|<nil>
Example (FilterListItems) ¶
// Should just filter indexes that are valid idxToIgnoreMap := map[int]bool{ -9: true, 1: true, 2: true, 5: true, 6: true, } fmt.Println(filterListItems([]string{"snowboarding", "is", "awesome", "says", "Peter", "Nemere"}, idxToIgnoreMap))
Output: [snowboarding says Peter]
Example (GetElements) ¶
fmt.Printf("%v", getElements([]string{"PMC", "SCLK", "Ca_%", "Ti_%", "Ca_int", "Ti_int", "livetime", "Mg_%", "chisq"}))
Output: [Ca Ti Mg]
Example (GetInterestingColIndexes) ¶
header := []string{"PMC", "K_%", "Ca_%", "Fe_%", "K_int", "Ca_int", "Fe_int", "K_err", "Ca_err", "Fe_err", "total_counts", "livetime", "chisq", "eVstart", "eV/ch", "res", "iter", "filename", "Events", "Triggers", "SCLK", "RTT"} interesting, err := getInterestingColIndexes(header, []string{"PMC", "filename", "SCLK", "RTT"}) fmt.Printf("\"%v\" \"%v\"\n", interesting, err) interesting, err = getInterestingColIndexes(header, []string{"K_%", "total_counts"}) fmt.Printf("\"%v\" \"%v\"\n", interesting, err) // Bad cases interesting, err = getInterestingColIndexes(header, []string{"PMC", "TheFileName", "SCLK", "RTT"}) fmt.Printf("\"%v\" \"%v\"\n", interesting, err) header[5] = "SCLK" interesting, err = getInterestingColIndexes(header, []string{"PMC", "TheFileName", "SCLK", "RTT"}) fmt.Printf("\"%v\" \"%v\"\n", interesting, err) // 22 header items...
Output: "map[PMC:0 RTT:21 SCLK:20 filename:17]" "<nil>" "map[K_%:1 total_counts:10]" "<nil>" "map[]" "CSV column missing: TheFileName" "map[]" "Duplicate CSV column: SCLK"
Example (MakeColumnTypeList) ¶
data := csvData{[]string{"a", "b", "c", "d", "e"}, [][]string{[]string{"1.11111", "2", "3.1415962", "5", "6"}}} result, err := makeColumnTypeList(data, map[int]bool{2: true, 3: true}) fmt.Printf("%v|%v\n", result, err) result, err = makeColumnTypeList(data, map[int]bool{}) fmt.Printf("%v|%v\n", result, err) // Bad type data = csvData{[]string{"a", "b", "c", "d", "e"}, [][]string{[]string{"1.11111", "Wanaka", "3.1415962", "5"}}} result, err = makeColumnTypeList(data, map[int]bool{2: true, 3: true}) fmt.Printf("%v|%v\n", result, err) // Skipping the string 1 should make it work... result, err = makeColumnTypeList(data, map[int]bool{1: true, 3: true}) fmt.Printf("%v|%v\n", result, err)
Output: [F I I]|<nil> [F I F I I]|<nil> [F]|Failed to parse "Wanaka" as float or int at col 1/row 0 [F F]|<nil>
Example (MakeIndividualPMCListFileContents_AB) ¶
fmt.Println(makeIndividualPMCListFileContents([]int32{15, 7, 388}, "5x11dataset.bin", false, false, map[int32]bool{}))
Output: 5x11dataset.bin 15|Normal|A 15|Normal|B 7|Normal|A 7|Normal|B 388|Normal|A 388|Normal|B <nil>
Example (MakeIndividualPMCListFileContents_AB_Dwell) ¶
fmt.Println(makeIndividualPMCListFileContents([]int32{15, 7, 388}, "5x11dataset.bin", false, true, map[int32]bool{15: true}))
Output: 5x11dataset.bin 15|Normal|A,15|Dwell|A 15|Normal|B,15|Dwell|B 7|Normal|A 7|Normal|B 388|Normal|A 388|Normal|B <nil>
Example (MakeIndividualPMCListFileContents_Combined) ¶
fmt.Println(makeIndividualPMCListFileContents([]int32{15, 7, 388}, "5x11dataset.bin", true, false, map[int32]bool{}))
Output: 5x11dataset.bin 15|Normal|A,15|Normal|B 7|Normal|A,7|Normal|B 388|Normal|A,388|Normal|B <nil>
Example (MakeIndividualPMCListFileContents_Combined_Dwell) ¶
fmt.Println(makeIndividualPMCListFileContents([]int32{15, 7, 388}, "5x11dataset.bin", true, true, map[int32]bool{15: true}))
Output: 5x11dataset.bin 15|Normal|A,15|Normal|B,15|Dwell|A,15|Dwell|B 7|Normal|A,7|Normal|B 388|Normal|A,388|Normal|B <nil>
Example (MakeQuantJobPMCLists) ¶
fmt.Println(makeQuantJobPMCLists([]int32{1, 2, 3, 4, 5, 6, 7, 8}, 3)) fmt.Println(makeQuantJobPMCLists([]int32{1, 2, 3, 4, 5, 6, 7, 8, 9}, 3)) fmt.Println(makeQuantJobPMCLists([]int32{1, 2, 3, 4, 5, 6, 7, 8, 9, 10}, 3))
Output: [[1 2 3] [4 5 6] [7 8]] [[1 2 3] [4 5 6] [7 8 9]] [[1 2 3] [4 5 6] [7 8 9] [10]]
Example (MakeQuantedLocation) ¶
// Should just filter indexes that are valid fmt.Println(makeQuantedLocation([]string{"Ca_%", "PMC", "Ti_%", "RTT", "filename", "Ca_int"}, []string{"1.11111", "2", "3.1415962", "5", "FileA.msa", "6"}, map[int]bool{1: true, 3: true, 4: true}))
Output: {2 5 0 FileA.msa [1.11111 3.1415962 6]} <nil>
Example (MakeROIPMCListFileContents_AB) ¶
fmt.Println(makeROIPMCListFileContents(testROIs, "5x11dataset.bin", false, false, map[int32]bool{}))
Output: 5x11dataset.bin roi1-id:7|Normal|A,15|Normal|A,388|Normal|A roi1-id:7|Normal|B,15|Normal|B,388|Normal|B roi2-id:7|Normal|A,450|Normal|A roi2-id:7|Normal|B,450|Normal|B <nil>
Example (MakeROIPMCListFileContents_AB_Dwells) ¶
fmt.Println(makeROIPMCListFileContents(testROIs, "5x11dataset.bin", false, true, map[int32]bool{15: true}))
Output: 5x11dataset.bin roi1-id:7|Normal|A,15|Normal|A,15|Dwell|A,388|Normal|A roi1-id:7|Normal|B,15|Normal|B,15|Dwell|B,388|Normal|B roi2-id:7|Normal|A,450|Normal|A roi2-id:7|Normal|B,450|Normal|B <nil>
Example (MakeROIPMCListFileContents_Combined) ¶
fmt.Println(makeROIPMCListFileContents(testROIs, "5x11dataset.bin", true, false, map[int32]bool{}))
Output: 5x11dataset.bin roi1-id:7|Normal|A,7|Normal|B,15|Normal|A,15|Normal|B,388|Normal|A,388|Normal|B roi2-id:7|Normal|A,7|Normal|B,450|Normal|A,450|Normal|B <nil>
Example (MakeROIPMCListFileContents_Combined_Dwells) ¶
fmt.Println(makeROIPMCListFileContents(testROIs, "5x11dataset.bin", true, true, map[int32]bool{15: true}))
Output: 5x11dataset.bin roi1-id:7|Normal|A,7|Normal|B,15|Normal|A,15|Normal|B,15|Dwell|A,15|Dwell|B,388|Normal|A,388|Normal|B roi2-id:7|Normal|A,7|Normal|B,450|Normal|A,450|Normal|B <nil>
Example (MatchPMCsWithDataset) ¶
l := &logger.StdOutLogger{} data := csvData{[]string{"X", "Y", "Z", "filename", "Ca_%"}, [][]string{[]string{"1", "0.40", "0", "Roastt_Laguna_Salinas_28kV_230uA_03_03_2020_111.msa", "4.5"}}} exp, err := readDatasetFile("./testdata/LagunaSalinasdataset.bin") fmt.Printf("Test file read: %v\n", err) fmt.Printf("%v, header[%v]=%v, data[%v]=%v\n", matchPMCsWithDataset(&data, exp, true, l), len(data.header)-1, data.header[5], len(data.data[0])-1, data.data[0][5]) data = csvData{[]string{"X", "Y", "Z", "filename", "Ca_%"}, [][]string{[]string{"1", "930.40", "0", "Roastt_Laguna_Salinas_28kV_230uA_03_03_2020_111.msa", "4.5"}}} fmt.Println(matchPMCsWithDataset(&data, exp, true, l)) data = csvData{[]string{"X", "Y", "Z", "filename", "Ca_%"}, [][]string{[]string{"1", "0.40", "0", "Roastt_Laguna_Salinas_28kV_230uA_03_03_2020_116.msa", "4.5"}}} fmt.Printf("%v, header[%v]=%v, data[%v]=%v\n", matchPMCsWithDataset(&data, exp, false, l), len(data.header)-1, data.header[5], len(data.data[0])-1, data.data[0][5])
Output: Test file read: <nil> <nil>, header[5]=PMC, data[5]=111 matchPMCsWithDataset Failed to match 1.00,930.40,0.00 to a PMC in dataset file <nil>, header[5]=PMC, data[5]=116
Example (ParseFloatColumnValue) ¶
fVal, err := parseFloatColumnValue("3.1415926") fmt.Printf("%v|%v\n", fVal, err) fVal, err = parseFloatColumnValue("-3.15") fmt.Printf("%v|%v\n", fVal, err) fVal, err = parseFloatColumnValue("1.234e02") fmt.Printf("%v|%v\n", fVal, err) fVal, err = parseFloatColumnValue("") fmt.Printf("%v|%v\n", fVal, err) fVal, err = parseFloatColumnValue("nan") fmt.Printf("%v|%v\n", fVal, err) fVal, err = parseFloatColumnValue("-nan") fmt.Printf("%v|%v\n", fVal, err)
Output: 3.1415925|<nil> -3.15|<nil> 123.4|<nil> 0|strconv.ParseFloat: parsing "": invalid syntax NaN|<nil> NaN|<nil>
Example (ProcessQuantROIsToPMCs_Combined_CSVRowCountROICountMismatch) ¶
var mockS3 awsutil.MockS3Client defer mockS3.FinishTest() const jobsBucket = "jobs-bucket" // Some of our files are empty, not there, have content // and they're meant to end up combined into one response... mockS3.ExpGetObjectInput = []s3.GetObjectInput{ { Bucket: aws.String(jobsBucket), Key: aws.String("JobData/abc123/output/node001.pmcs_result.csv"), }, } mockS3.QueuedGetObjectOutput = []*s3.GetObjectOutput{ { Body: io.NopCloser(bytes.NewReader([]byte(`Header row1 PMC, CaO_%, CaO_int, RTT, filename 15, 5.1, 400, 7890, Normal_A_roi1-id 7, 6.1, 405, 7800, Normal_A_roi1-id 12, 6.7, 407, 7700, Normal_A_roi1-id `))), }, } fs := fileaccess.MakeS3Access(&mockS3) outputCSV, err := processQuantROIsToPMCs(fs, jobsBucket, "JobData/abc123", "The custom header", "node001.pmcs", true, testROIs) fmt.Printf("%v\n", err) fmt.Println(outputCSV)
Output: PMC 12 in CSV: JobData/abc123/output/node001.pmcs_result.csv doesn't exist in ROI: roi1
Example (ProcessQuantROIsToPMCs_Combined_DownloadError) ¶
var mockS3 awsutil.MockS3Client defer mockS3.FinishTest() const jobsBucket = "jobs-bucket" // Some of our files are empty, not there, have content // and they're meant to end up combined into one response... mockS3.ExpGetObjectInput = []s3.GetObjectInput{ { Bucket: aws.String(jobsBucket), Key: aws.String("JobData/abc123/output/node001.pmcs_result.csv"), }, } mockS3.QueuedGetObjectOutput = []*s3.GetObjectOutput{ nil, } fs := fileaccess.MakeS3Access(&mockS3) outputCSV, err := processQuantROIsToPMCs(fs, jobsBucket, "JobData/abc123", "The custom header", "node001.pmcs", true, testROIs) fmt.Printf("%v\n", err) fmt.Println(outputCSV)
Output: Failed to read map CSV: JobData/abc123/output/node001.pmcs_result.csv
Example (ProcessQuantROIsToPMCs_Combined_InvalidPMC) ¶
var mockS3 awsutil.MockS3Client defer mockS3.FinishTest() const jobsBucket = "jobs-bucket" // Some of our files are empty, not there, have content // and they're meant to end up combined into one response... mockS3.ExpGetObjectInput = []s3.GetObjectInput{ { Bucket: aws.String(jobsBucket), Key: aws.String("JobData/abc123/output/node001.pmcs_result.csv"), }, } mockS3.QueuedGetObjectOutput = []*s3.GetObjectOutput{ { Body: io.NopCloser(bytes.NewReader([]byte(`Header row1 PMC, CaO_%, CaO_int, filename, RTT 15, 5.1, 400, Normal_A_roi1-id, 7890 Qwerty, 6.1, 405, Normal_A_roi1-id, 7800 `))), }, } fs := fileaccess.MakeS3Access(&mockS3) outputCSV, err := processQuantROIsToPMCs(fs, jobsBucket, "JobData/abc123", "The custom header", "node001.pmcs", true, testROIs) fmt.Printf("%v\n", err) fmt.Println(outputCSV)
Output: Failed to process map CSV: JobData/abc123/output/node001.pmcs_result.csv, invalid PMC Qwerty at line 4
Example (ProcessQuantROIsToPMCs_Combined_NoFileNameCol) ¶
var mockS3 awsutil.MockS3Client defer mockS3.FinishTest() const jobsBucket = "jobs-bucket" // Some of our files are empty, not there, have content // and they're meant to end up combined into one response... mockS3.ExpGetObjectInput = []s3.GetObjectInput{ { Bucket: aws.String(jobsBucket), Key: aws.String("JobData/abc123/output/node001.pmcs_result.csv"), }, } mockS3.QueuedGetObjectOutput = []*s3.GetObjectOutput{ { Body: io.NopCloser(bytes.NewReader([]byte(`Header row1 PMC, CaO_%, CaO_int, RTT 15, 5.1, 400, 7890 7, 6.1, 405, 7800 `))), }, } fs := fileaccess.MakeS3Access(&mockS3) outputCSV, err := processQuantROIsToPMCs(fs, jobsBucket, "JobData/abc123", "The custom header", "node001.pmcs", true, testROIs) fmt.Printf("%v\n", err) fmt.Println(outputCSV)
Output: Map csv: JobData/abc123/output/node001.pmcs_result.csv, does not contain a filename column (used to match up ROIs)
Example (ProcessQuantROIsToPMCs_Combined_OK) ¶
var mockS3 awsutil.MockS3Client defer mockS3.FinishTest() const jobsBucket = "jobs-bucket" // Some of our files are empty, not there, have content // and they're meant to end up combined into one response... mockS3.ExpGetObjectInput = []s3.GetObjectInput{ { Bucket: aws.String(jobsBucket), Key: aws.String("JobData/abc123/output/node001.pmcs_result.csv"), }, } mockS3.QueuedGetObjectOutput = []*s3.GetObjectOutput{ { Body: io.NopCloser(bytes.NewReader([]byte(`Header row1 PMC, CaO_%, filename, CaO_int, RTT 15, 5.1, Normal_A_roi1-id, 400, 7890 7, 6.1, Normal_B_roi2-id, 405, 7800 `))), }, } fs := fileaccess.MakeS3Access(&mockS3) outputCSV, err := processQuantROIsToPMCs(fs, jobsBucket, "JobData/abc123", "The custom header", "node001.pmcs", true, testROIs) fmt.Printf("%v\n", err) fmt.Println(outputCSV)
Output: <nil> The custom header PMC, CaO_%, filename, CaO_int, RTT 7, 5.1, Normal_A_roi1-id, 400, 7890 15, 5.1, Normal_A_roi1-id, 400, 7890 388, 5.1, Normal_A_roi1-id, 400, 7890 7, 6.1, Normal_B_roi2-id, 405, 7800 450, 6.1, Normal_B_roi2-id, 405, 7800
Example (ProcessQuantROIsToPMCs_SeparateAB_InvalidFileName) ¶
var mockS3 awsutil.MockS3Client defer mockS3.FinishTest() const jobsBucket = "jobs-bucket" // Some of our files are empty, not there, have content // and they're meant to end up combined into one response... mockS3.ExpGetObjectInput = []s3.GetObjectInput{ { Bucket: aws.String(jobsBucket), Key: aws.String("JobData/abc123/output/node001.pmcs_result.csv"), }, } mockS3.QueuedGetObjectOutput = []*s3.GetObjectOutput{ { Body: io.NopCloser(bytes.NewReader([]byte(`Header row1 PMC, CaO_%, CaO_int, filename, RTT 15, 5.1, 400, Normal_A_roi1-id, 7890 15, 5.2, 401, Normal_B, 7890 7, 6.1, 405, Normal_A, 7800 7, 6.2, 406, Normal_B, 7800 `))), }, } fs := fileaccess.MakeS3Access(&mockS3) outputCSV, err := processQuantROIsToPMCs(fs, jobsBucket, "JobData/abc123", "The custom header", "node001.pmcs", false, testROIs) fmt.Printf("%v\n", err) fmt.Println(outputCSV)
Output: Invalid file name read: Normal_B from map CSV: JobData/abc123/output/node001.pmcs_result.csv, line 4
Example (ProcessQuantROIsToPMCs_SeparateAB_OK) ¶
var mockS3 awsutil.MockS3Client defer mockS3.FinishTest() const jobsBucket = "jobs-bucket" // Some of our files are empty, not there, have content // and they're meant to end up combined into one response... mockS3.ExpGetObjectInput = []s3.GetObjectInput{ { Bucket: aws.String(jobsBucket), Key: aws.String("JobData/abc123/output/node001.pmcs_result.csv"), }, } mockS3.QueuedGetObjectOutput = []*s3.GetObjectOutput{ { Body: io.NopCloser(bytes.NewReader([]byte(`Header row1 PMC, CaO_%, CaO_int, filename, RTT 15, 5.1, 400, Normal_A_roi1-id, 7890 15, 5.2, 401, Normal_B_roi1-id, 7890 7, 6.1, 405, Normal_A_roi2-id, 7800 7, 6.2, 406, Normal_B_roi2-id, 7800 `))), }, } fs := fileaccess.MakeS3Access(&mockS3) outputCSV, err := processQuantROIsToPMCs(fs, jobsBucket, "JobData/abc123", "The custom header", "node001.pmcs", false, testROIs) fmt.Printf("%v\n", err) fmt.Println(outputCSV)
Output: <nil> The custom header PMC, CaO_%, CaO_int, filename, RTT 7, 5.1, 400, Normal_A_roi1-id, 7890 7, 5.2, 401, Normal_B_roi1-id, 7890 15, 5.1, 400, Normal_A_roi1-id, 7890 15, 5.2, 401, Normal_B_roi1-id, 7890 388, 5.1, 400, Normal_A_roi1-id, 7890 388, 5.2, 401, Normal_B_roi1-id, 7890 7, 6.1, 405, Normal_A_roi2-id, 7800 7, 6.2, 406, Normal_B_roi2-id, 7800 450, 6.1, 405, Normal_A_roi2-id, 7800 450, 6.2, 406, Normal_B_roi2-id, 7800
Example (ReadCSV) ¶
csv := `something header more header col 1,"col, 2", col_3 "value one",123, 456 value two,444,555 ` d, err := readCSV(csv, 2) fmt.Printf("%v|%v", d, err)
Output: {[col 1 col, 2 col_3] [[value one 123 456] [value two 444 555]]}|<nil>
Example (ValidateParameters) ¶
fmt.Printf("%v\n", validateParameters("-b,0,50,2,10 -f")) fmt.Printf("%v\n", validateParameters("-b,0,50,2,10.55 -o \"filename.whatever\" -f -Fe,1")) fmt.Printf("%v\n", validateParameters("-b,0,50,2,10;ls -al;echo -f")) fmt.Printf("%v\n", validateParameters("-b,0,50,2,10&&rm -rf ~/; -f"))
Output: <nil> <nil> Invalid parameters passed: -b,0,50,2,10;ls -al;echo -f Invalid parameters passed: -b,0,50,2,10&&rm -rf ~/; -f
Index ¶
- Constants
- func ConvertQuantificationCSV(logger logger.ILogger, data string, expectMetaColumns []string, ...) ([]byte, []string, error)
- func CreateJob(createParams *protos.QuantCreateParams, requestorUserId string, ...) (*protos.JobStatus, error)
- func FormCombinedCSV(quantIDs []string, dataPerDetectorPerPMC map[string]map[int32]quantItem, ...) string
- func FormMultiQuantSummary(dataPerDetectorPerPMC map[string]map[int32]quantItem, ...) *protos.QuantCombineSummary
- func ImportQuantCSV(hctx wsHelpers.HandlerContext, scanId string, importUser *protos.UserInfo, ...) (string, error)
- func IsValidCreateParam(createParams *protos.QuantCreateParams, hctx wsHelpers.HandlerContext) error
- func ListUserQuants(searchParams *protos.SearchParams, hctx wsHelpers.HandlerContext) ([]*protos.QuantificationSummary, map[string]*protos.OwnershipItem, error)
- func MultiQuantCombinedCSV(name string, scanId string, roiZStack []*protos.QuantCombineItem, ...) (combinedQuantData, error)
- func MultiQuantCompare(reqRoiId string, roiPMCs []int32, quantIds []string, exprPB *protos.Experiment, ...) ([]*protos.QuantComparisonTable, error)
- func RunAutoQuantifications(scanId string, svcs *services.APIServices)
- type QuantJobUpdater
Examples ¶
- Package (CalculateTotals_AB_NeedsCombined)
- Package (CalculateTotals_NoPMC)
- Package (CalculateTotals_Success)
- Package (CleanLogName)
- Package (CombineQuantOutputs_BadPMC)
- Package (CombineQuantOutputs_DownloadError)
- Package (CombineQuantOutputs_DuplicatePMC)
- Package (CombineQuantOutputs_LastLineCutOff)
- Package (CombineQuantOutputs_OK)
- Package (ConvertQuantificationData)
- Package (DecodeMapFileNameColumn)
- Package (FilterListItems)
- Package (GetElements)
- Package (GetInterestingColIndexes)
- Package (MakeColumnTypeList)
- Package (MakeIndividualPMCListFileContents_AB)
- Package (MakeIndividualPMCListFileContents_AB_Dwell)
- Package (MakeIndividualPMCListFileContents_Combined)
- Package (MakeIndividualPMCListFileContents_Combined_Dwell)
- Package (MakeQuantJobPMCLists)
- Package (MakeQuantedLocation)
- Package (MakeROIPMCListFileContents_AB)
- Package (MakeROIPMCListFileContents_AB_Dwells)
- Package (MakeROIPMCListFileContents_Combined)
- Package (MakeROIPMCListFileContents_Combined_Dwells)
- Package (MatchPMCsWithDataset)
- Package (ParseFloatColumnValue)
- Package (ProcessQuantROIsToPMCs_Combined_CSVRowCountROICountMismatch)
- Package (ProcessQuantROIsToPMCs_Combined_DownloadError)
- Package (ProcessQuantROIsToPMCs_Combined_InvalidPMC)
- Package (ProcessQuantROIsToPMCs_Combined_NoFileNameCol)
- Package (ProcessQuantROIsToPMCs_Combined_OK)
- Package (ProcessQuantROIsToPMCs_SeparateAB_InvalidFileName)
- Package (ProcessQuantROIsToPMCs_SeparateAB_OK)
- Package (ReadCSV)
- Package (ValidateParameters)
Constants ¶
View Source
const JobParamsFileName = "params.json"
JobParamsFileName - File name of job params file
View Source
const QuantModeABManualUpload = "ABManual"
View Source
const QuantModeABMultiQuant = "ABMultiQuant"
View Source
const QuantModeCombinedManualUpload = "ABManual"
View Source
const QuantModeCombinedMultiQuant = "CombinedMultiQuant"
Variables ¶
This section is empty.
Functions ¶
func ConvertQuantificationCSV ¶
func ConvertQuantificationCSV(logger logger.ILogger, data string, expectMetaColumns []string, exprPB *protos.Experiment, matchPMCByCoord bool, detectorIDOverride string, detectorDuplicateAB bool) ([]byte, []string, error)
ConvertQuantificationCSV - converts from incoming string CSV data to serialised binary data. exprPB if nil means we wont match to dataset PMCs Returns the serialised quantification bytes and the elements that were quantified
func CreateJob ¶
func CreateJob(createParams *protos.QuantCreateParams, requestorUserId string, svcs *services.APIServices, sessUser *wsHelpers.SessionUser, wg *sync.WaitGroup, sendUpdate func(*protos.JobStatus)) (*protos.JobStatus, error)
CreateJob - creates a new quantification job
func FormCombinedCSV ¶
func FormMultiQuantSummary ¶
func ImportQuantCSV ¶
func IsValidCreateParam ¶
func IsValidCreateParam(createParams *protos.QuantCreateParams, hctx wsHelpers.HandlerContext) error
Validates the create parameters. Side-effect of modifying PmcsEncoded to just be an array of decoded PMCs
func ListUserQuants ¶
func ListUserQuants(searchParams *protos.SearchParams, hctx wsHelpers.HandlerContext) ([]*protos.QuantificationSummary, map[string]*protos.OwnershipItem, error)
func MultiQuantCombinedCSV ¶
func MultiQuantCombinedCSV( name string, scanId string, roiZStack []*protos.QuantCombineItem, exprPB *protos.Experiment, hctx wsHelpers.HandlerContext) (combinedQuantData, error)
func MultiQuantCompare ¶
func MultiQuantCompare(reqRoiId string, roiPMCs []int32, quantIds []string, exprPB *protos.Experiment, hctx wsHelpers.HandlerContext) ([]*protos.QuantComparisonTable, error)
func RunAutoQuantifications ¶
func RunAutoQuantifications(scanId string, svcs *services.APIServices)
Types ¶
type QuantJobUpdater ¶
type QuantJobUpdater struct {
// contains filtered or unexported fields
}
func MakeQuantJobUpdater ¶
func MakeQuantJobUpdater( params *protos.QuantCreateParams, session *melody.Session, notifier services.INotifier, db *mongo.Database, ) QuantJobUpdater
func (*QuantJobUpdater) SendQuantJobUpdate ¶
func (i *QuantJobUpdater) SendQuantJobUpdate(status *protos.JobStatus)
Source Files ¶
Directories ¶
Path | Synopsis |
---|---|
Exposes interfaces and structures required to run PIQUANT in the Kubernetes cluster along with functions to access quantification files, logs, results and summaries of quant jobs.
|
Exposes interfaces and structures required to run PIQUANT in the Kubernetes cluster along with functions to access quantification files, logs, results and summaries of quant jobs. |
Click to show internal directories.
Click to hide internal directories.