Documentation ¶
Overview ¶
Example (CalculateTotals_AB_NeedsCombined) ¶
q, err := readQuantificationFile("./testdata/AB.bin") fmt.Printf("%v\n", err) if err == nil { result, err := calculateTotals(q, []int{90, 91, 95}) fmt.Printf("%v|%v\n", result, err) }
Output: <nil> map[]|Quantification must be for Combined detectors
Example (CalculateTotals_NoPMC) ¶
q, err := readQuantificationFile("./testdata/combined.bin") fmt.Printf("%v\n", err) if err == nil { result, err := calculateTotals(q, []int{68590, 68591, 68595}) fmt.Printf("%v|%v\n", result, err) }
Output: <nil> map[]|Quantification had no valid data for ROI PMCs
Example (CalculateTotals_Success) ¶
q, err := readQuantificationFile("./testdata/combined.bin") fmt.Printf("%v\n", err) if err == nil { result, err := calculateTotals(q, []int{90, 91, 95}) fmt.Printf("%v|%v\n", result, err) }
Output: <nil> map[CaO_%:7.5057006 FeO-T_%:10.621034 SiO2_%:41.48377 TiO2_%:0.7424]|<nil>
Example (CleanLogName) ¶
// Don't fix it... fmt.Println(cleanLogName("node00001_data.log")) // Do fix it... fmt.Println(cleanLogName("node00001.pmcs_stdout.log")) // Do fix it... fmt.Println(cleanLogName("NODE00001.PMCS_stdout.log"))
Output: node00001_data.log node00001_stdout.log NODE00001_stdout.log
Example (CombineQuantOutputs_BadPMC) ¶
var mockS3 awsutil.MockS3Client defer mockS3.FinishTest() const jobsBucket = "jobs-bucket" // Some of our files are empty, not there, have content // and they're meant to end up combined into one response... mockS3.ExpGetObjectInput = []s3.GetObjectInput{ { Bucket: aws.String(jobsBucket), Key: aws.String("JobData/abc123/output/node001.pmcs_result.csv"), }, { Bucket: aws.String(jobsBucket), Key: aws.String("JobData/abc123/output/node002.pmcs_result.csv"), }, } mockS3.QueuedGetObjectOutput = []*s3.GetObjectOutput{ { Body: io.NopCloser(bytes.NewReader([]byte(`Header row1 PMC, CaO_%, CaO_int, RTT 30, 5.1, 400, 7890 12, 6.1, 405, 7800 `))), }, { Body: io.NopCloser(bytes.NewReader([]byte(`Header row2 PMC, CaO_%, CaO_int, RTT NaN, 7.1, 415, 7840 `))), }, } fs := fileaccess.MakeS3Access(&mockS3) combinedCSV, err := combineQuantOutputs(fs, jobsBucket, "JobData/abc123", "The custom header", []string{"node001.pmcs", "node002.pmcs", "node003.pmcs"}) fmt.Printf("%v\n", err) fmt.Println(combinedCSV)
Output: Failed to combine map segment: JobData/abc123/output/node002.pmcs_result.csv, invalid PMC NaN at line 3
Example (CombineQuantOutputs_DownloadError) ¶
var mockS3 awsutil.MockS3Client defer mockS3.FinishTest() const jobsBucket = "jobs-bucket" // Some of our files are empty, not there, have content // and they're meant to end up combined into one response... mockS3.ExpGetObjectInput = []s3.GetObjectInput{ { Bucket: aws.String(jobsBucket), Key: aws.String("JobData/abc123/output/node001.pmcs_result.csv"), }, { Bucket: aws.String(jobsBucket), Key: aws.String("JobData/abc123/output/node002.pmcs_result.csv"), }, } mockS3.QueuedGetObjectOutput = []*s3.GetObjectOutput{ { Body: io.NopCloser(bytes.NewReader([]byte(`Header row1 PMC, CaO_%, CaO_int, RTT 30, 5.1, 400, 7890 12, 6.1, 405, 7800 `))), }, nil, } fs := fileaccess.MakeS3Access(&mockS3) combinedCSV, err := combineQuantOutputs(fs, jobsBucket, "JobData/abc123", "The custom header", []string{"node001.pmcs", "node002.pmcs", "node003.pmcs"}) fmt.Printf("%v\n", err) fmt.Println(combinedCSV)
Output: Failed to combine map segment: JobData/abc123/output/node002.pmcs_result.csv
Example (CombineQuantOutputs_DuplicatePMC) ¶
var mockS3 awsutil.MockS3Client defer mockS3.FinishTest() const jobsBucket = "jobs-bucket" // Some of our files are empty, not there, have content // and they're meant to end up combined into one response... mockS3.ExpGetObjectInput = []s3.GetObjectInput{ { Bucket: aws.String(jobsBucket), Key: aws.String("JobData/abc123/output/node001.pmcs_result.csv"), }, { Bucket: aws.String(jobsBucket), Key: aws.String("JobData/abc123/output/node002.pmcs_result.csv"), }, { Bucket: aws.String(jobsBucket), Key: aws.String("JobData/abc123/output/node003.pmcs_result.csv"), }, } mockS3.QueuedGetObjectOutput = []*s3.GetObjectOutput{ { Body: io.NopCloser(bytes.NewReader([]byte(`Header row1 PMC, CaO_%, CaO_int, RTT 30, 5.1, 400, 7890 12, 6.1, 405, 7800 `))), }, { Body: io.NopCloser(bytes.NewReader([]byte(`Header row2 PMC, CaO_%, CaO_int, RTT 18, 7.1, 415, 7840 `))), }, { Body: io.NopCloser(bytes.NewReader([]byte(`Header row3 PMC, CaO_%, CaO_int, RTT 3, 1.1, 450, 7830 30, 1.3, 451, 7833 40, 8.1, 455, 7870 `))), }, } fs := fileaccess.MakeS3Access(&mockS3) combinedCSV, err := combineQuantOutputs(fs, jobsBucket, "JobData/abc123", "The custom header", []string{"node001.pmcs", "node002.pmcs", "node003.pmcs"}) fmt.Printf("%v\n", err) fmt.Println(combinedCSV)
Output: <nil> The custom header PMC, CaO_%, CaO_int, RTT 3, 1.1, 450, 7830 12, 6.1, 405, 7800 18, 7.1, 415, 7840 30, 5.1, 400, 7890 30, 1.3, 451, 7833 40, 8.1, 455, 7870
Example (CombineQuantOutputs_LastLineCutOff) ¶
var mockS3 awsutil.MockS3Client defer mockS3.FinishTest() const jobsBucket = "jobs-bucket" // Some of our files are empty, not there, have content // and they're meant to end up combined into one response... mockS3.ExpGetObjectInput = []s3.GetObjectInput{ { Bucket: aws.String(jobsBucket), Key: aws.String("JobData/abc123/output/node001.pmcs_result.csv"), }, { Bucket: aws.String(jobsBucket), Key: aws.String("JobData/abc123/output/node002.pmcs_result.csv"), }, } mockS3.QueuedGetObjectOutput = []*s3.GetObjectOutput{ { Body: io.NopCloser(bytes.NewReader([]byte(`Header row1 PMC, CaO_%, CaO_int, RTT 30, 5.1, 400, 7890 12, 6.1, 405, 7800 `))), }, { Body: io.NopCloser(bytes.NewReader([]byte(`Header row2 PMC, CaO_%, CaO_int, RTT 31 `))), }, } fs := fileaccess.MakeS3Access(&mockS3) combinedCSV, err := combineQuantOutputs(fs, jobsBucket, "JobData/abc123", "The custom header", []string{"node001.pmcs", "node002.pmcs", "node003.pmcs"}) fmt.Printf("%v\n", err) fmt.Println(combinedCSV)
Output: Failed to combine map segment: JobData/abc123/output/node002.pmcs_result.csv, no PMC at line 3
Example (CombineQuantOutputs_OK) ¶
var mockS3 awsutil.MockS3Client defer mockS3.FinishTest() const jobsBucket = "jobs-bucket" // Some of our files are empty, not there, have content // and they're meant to end up combined into one response... mockS3.ExpGetObjectInput = []s3.GetObjectInput{ { Bucket: aws.String(jobsBucket), Key: aws.String("JobData/abc123/output/node001.pmcs_result.csv"), }, { Bucket: aws.String(jobsBucket), Key: aws.String("JobData/abc123/output/node002.pmcs_result.csv"), }, { Bucket: aws.String(jobsBucket), Key: aws.String("JobData/abc123/output/node003.pmcs_result.csv"), }, } mockS3.QueuedGetObjectOutput = []*s3.GetObjectOutput{ { Body: io.NopCloser(bytes.NewReader([]byte(`Header row1 PMC, CaO_%, CaO_int, RTT 30, 5.1, 400, 7890 12, 6.1, 405, 7800 `))), }, { Body: io.NopCloser(bytes.NewReader([]byte(`Header row2 PMC, CaO_%, CaO_int, RTT 18, 7.1, 415, 7840 `))), }, { Body: io.NopCloser(bytes.NewReader([]byte(`Header row3 PMC, CaO_%, CaO_int, RTT 3, 1.1, 450, 7830 40, 8.1, 455, 7870 `))), }, } fs := fileaccess.MakeS3Access(&mockS3) combinedCSV, err := combineQuantOutputs(fs, jobsBucket, "JobData/abc123", "The custom header", []string{"node001.pmcs", "node002.pmcs", "node003.pmcs"}) fmt.Printf("%v\n", err) fmt.Println(combinedCSV)
Output: <nil> The custom header PMC, CaO_%, CaO_int, RTT 3, 1.1, 450, 7830 12, 6.1, 405, 7800 18, 7.1, 415, 7840 30, 5.1, 400, 7890 40, 8.1, 455, 7870
Example (ConvertQuantificationData) ¶
data := csvData{ []string{"PMC", "Ca_%", "Ca_int", "SCLK", "Ti_%", "filename", "RTT"}, [][]string{ []string{"23", "1.5", "5", "11111", "4", "fileA.msa", "44"}, []string{"70", "3.4", "32", "12345", "4.21", "fileB.msa", "45"}, }, } result, err := convertQuantificationData(data, []string{"PMC", "RTT", "SCLK", "filename"}) fmt.Printf("%v|%v\n", result, err)
Output: {[Ca_% Ca_int Ti_%] [F I F] [{23 44 11111 fileA.msa [1.5 5 4]} {70 45 12345 fileB.msa [3.4 32 4.21]}]}|<nil>
Example (DecodeMapFileNameColumn) ¶
rt, det, err := decodeMapFileNameColumn("file.txt") fmt.Printf("%v|%v|%v\n", rt, det, err) rt, det, err = decodeMapFileNameColumn("Normal_A") fmt.Printf("%v|%v|%v\n", rt, det, err) rt, det, err = decodeMapFileNameColumn("Normal_A_MyRoiID") fmt.Printf("%v|%v|%v\n", rt, det, err) rt, det, err = decodeMapFileNameColumn("Dwell_B") fmt.Printf("%v|%v|%v\n", rt, det, err) rt, det, err = decodeMapFileNameColumn("Normal_C") fmt.Printf("%v|%v|%v\n", rt, det, err) rt, det, err = decodeMapFileNameColumn("LongRead_B") fmt.Printf("%v|%v|%v\n", rt, det, err) rt, det, err = decodeMapFileNameColumn("Scotland_something_00012.msa") fmt.Printf("%v|%v|%v\n", rt, det, err) rt, det, err = decodeMapFileNameColumn("Scotland_something_00012_10keV_33.msa") fmt.Printf("%v|%v|%v\n", rt, det, err) rt, det, err = decodeMapFileNameColumn("Normal_A_0123456789_873495_455.msa") fmt.Printf("%v|%v|%v\n", rt, det, err)
Output: ||decodeMapFileNameColumn: Invalid READTYPE in filename: "file.txt" Normal|A|<nil> Normal|A|<nil> Dwell|B|<nil> ||decodeMapFileNameColumn: Invalid DETECTOR_ID in filename: "Normal_C" ||decodeMapFileNameColumn: Invalid READTYPE in filename: "LongRead_B" ||decodeMapFileNameColumn: Invalid READTYPE in filename: "Scotland_something_00012.msa" ||decodeMapFileNameColumn: Invalid READTYPE in filename: "Scotland_something_00012_10keV_33.msa" Normal|A|<nil>
Example (FilterListItems) ¶
// Should just filter indexes that are valid idxToIgnoreMap := map[int]bool{ -9: true, 1: true, 2: true, 5: true, 6: true, } fmt.Println(filterListItems([]string{"snowboarding", "is", "awesome", "says", "Peter", "Nemere"}, idxToIgnoreMap))
Output: [snowboarding says Peter]
Example (GetElements) ¶
fmt.Printf("%v", getElements([]string{"PMC", "SCLK", "Ca_%", "Ti_%", "Ca_int", "Ti_int", "livetime", "Mg_%", "chisq"}))
Output: [Ca Ti Mg]
Example (GetExistingAutoQuants) ¶
db := wstestlib.GetDB() // Ensure none ctx := context.TODO() coll := db.Collection(dbCollections.QuantificationsName) fmt.Printf("Drop: %v\n", coll.Drop(ctx)) names := []string{"AutoQuant-PDS(AB)", "AutoQuant-PIXL(AB)", "AutoQuant-PDS(Combined)", "AutoQuant-PIXL(Combined)"} existing, err := getExistingAutoQuants("123", names, db) fmt.Println("Test missing") fmt.Printf("%v\n", err) fmt.Printf("Read:%v\n\n", strings.Join(existing, ",")) autoQuant := &protos.QuantificationSummary{ Id: "PIXLAB123", ScanId: "123", Params: &protos.QuantStartingParameters{ UserParams: &protos.QuantCreateParams{ Command: "map", Name: "AutoQuant-PIXL(AB)", ScanId: "123", Elements: []string{"Na", "Mg"}, DetectorConfig: "PIXL/PiquantConfigs/v7", Parameters: "-Fe,1", QuantMode: "Combined", }, PmcCount: 51, ScanFilePath: "Datasets/104202753/dataset.bin", }, Elements: []string{"Na2O", "MgO"}, Status: &protos.JobStatus{ JobId: "PIXLAB123", Status: 5, Message: "Nodes ran: 1", EndUnixTimeSec: 1670988052, OutputFilePath: "Quantifications/104202753/auth0|62eda29040fd995f305e2322", OtherLogFiles: []string{"node00001_piquant.log", "node00001_stdout.log"}, }, } _, err = coll.InsertOne(ctx, autoQuant, options.InsertOne()) fmt.Printf("Insert: %v\n", err) //names := []string{"AutoQuant-PIXL(AB)"} existing, err = getExistingAutoQuants("123", names, db) fmt.Println("Test exists") fmt.Printf("%v\n", err) fmt.Printf("%v\n", strings.Join(existing, ","))
Output: Drop: <nil> Test missing <nil> Read: Insert: <nil> Test exists <nil> AutoQuant-PIXL(AB) (id: PIXLAB123)
Example (GetInterestingColIndexes) ¶
header := []string{"PMC", "K_%", "Ca_%", "Fe_%", "K_int", "Ca_int", "Fe_int", "K_err", "Ca_err", "Fe_err", "total_counts", "livetime", "chisq", "eVstart", "eV/ch", "res", "iter", "filename", "Events", "Triggers", "SCLK", "RTT"} interesting, err := getInterestingColIndexes(header, []string{"PMC", "filename", "SCLK", "RTT"}) fmt.Printf("\"%v\" \"%v\"\n", interesting, err) interesting, err = getInterestingColIndexes(header, []string{"K_%", "total_counts"}) fmt.Printf("\"%v\" \"%v\"\n", interesting, err) // Bad cases interesting, err = getInterestingColIndexes(header, []string{"PMC", "TheFileName", "SCLK", "RTT"}) fmt.Printf("\"%v\" \"%v\"\n", interesting, err) header[5] = "SCLK" interesting, err = getInterestingColIndexes(header, []string{"PMC", "TheFileName", "SCLK", "RTT"}) fmt.Printf("\"%v\" \"%v\"\n", interesting, err) // 22 header items...
Output: "map[PMC:0 RTT:21 SCLK:20 filename:17]" "<nil>" "map[K_%:1 total_counts:10]" "<nil>" "map[]" "CSV column missing: TheFileName" "map[]" "Duplicate CSV column: SCLK"
Example (MakeColumnTypeList) ¶
data := csvData{[]string{"a", "b", "c", "d", "e"}, [][]string{[]string{"1.11111", "2", "3.1415962", "5", "6"}}} result, err := makeColumnTypeList(data, map[int]bool{2: true, 3: true}) fmt.Printf("%v|%v\n", result, err) result, err = makeColumnTypeList(data, map[int]bool{}) fmt.Printf("%v|%v\n", result, err) // Bad type data = csvData{[]string{"a", "b", "c", "d", "e"}, [][]string{[]string{"1.11111", "Wanaka", "3.1415962", "5"}}} result, err = makeColumnTypeList(data, map[int]bool{2: true, 3: true}) fmt.Printf("%v|%v\n", result, err) // Skipping the string 1 should make it work... result, err = makeColumnTypeList(data, map[int]bool{1: true, 3: true}) fmt.Printf("%v|%v\n", result, err)
Output: [F I I]|<nil> [F I F I I]|<nil> [F]|Failed to parse "Wanaka" as float or int at col 1/row 0 [F F]|<nil>
Example (MakeIndividualPMCListFileContents_AB) ¶
fmt.Println(makeIndividualPMCListFileContents([]int32{15, 7, 388}, "5x11dataset.bin", false, false, map[int32]bool{}))
Output: 5x11dataset.bin 15|Normal|A 15|Normal|B 7|Normal|A 7|Normal|B 388|Normal|A 388|Normal|B <nil>
Example (MakeIndividualPMCListFileContents_AB_Dwell) ¶
fmt.Println(makeIndividualPMCListFileContents([]int32{15, 7, 388}, "5x11dataset.bin", false, true, map[int32]bool{15: true}))
Output: 5x11dataset.bin 15|Normal|A,15|Dwell|A 15|Normal|B,15|Dwell|B 7|Normal|A 7|Normal|B 388|Normal|A 388|Normal|B <nil>
Example (MakeIndividualPMCListFileContents_Combined) ¶
fmt.Println(makeIndividualPMCListFileContents([]int32{15, 7, 388}, "5x11dataset.bin", true, false, map[int32]bool{}))
Output: 5x11dataset.bin 15|Normal|A,15|Normal|B 7|Normal|A,7|Normal|B 388|Normal|A,388|Normal|B <nil>
Example (MakeIndividualPMCListFileContents_Combined_Dwell) ¶
fmt.Println(makeIndividualPMCListFileContents([]int32{15, 7, 388}, "5x11dataset.bin", true, true, map[int32]bool{15: true}))
Output: 5x11dataset.bin 15|Normal|A,15|Normal|B,15|Dwell|A,15|Dwell|B 7|Normal|A,7|Normal|B 388|Normal|A,388|Normal|B <nil>
Example (MakeQuantJobPMCLists) ¶
fmt.Println(makeQuantJobPMCLists([]int32{1, 2, 3, 4, 5, 6, 7, 8}, 3)) fmt.Println(makeQuantJobPMCLists([]int32{1, 2, 3, 4, 5, 6, 7, 8, 9}, 3)) fmt.Println(makeQuantJobPMCLists([]int32{1, 2, 3, 4, 5, 6, 7, 8, 9, 10}, 3))
Output: [[1 2 3] [4 5 6] [7 8]] [[1 2 3] [4 5 6] [7 8 9]] [[1 2 3] [4 5 6] [7 8 9] [10]]
Example (MakeQuantedLocation) ¶
// Should just filter indexes that are valid fmt.Println(makeQuantedLocation([]string{"Ca_%", "PMC", "Ti_%", "RTT", "filename", "Ca_int"}, []string{"1.11111", "2", "3.1415962", "5", "FileA.msa", "6"}, map[int]bool{1: true, 3: true, 4: true}))
Output: {2 5 0 FileA.msa [1.11111 3.1415962 6]} <nil>
Example (MakeROIPMCListFileContents_AB) ¶
fmt.Println(makeROIPMCListFileContents(testROIs, "5x11dataset.bin", false, false, map[int32]bool{}))
Output: 5x11dataset.bin roi1-id:7|Normal|A,15|Normal|A,388|Normal|A roi1-id:7|Normal|B,15|Normal|B,388|Normal|B roi2-id:7|Normal|A,450|Normal|A roi2-id:7|Normal|B,450|Normal|B <nil>
Example (MakeROIPMCListFileContents_AB_Dwells) ¶
fmt.Println(makeROIPMCListFileContents(testROIs, "5x11dataset.bin", false, true, map[int32]bool{15: true}))
Output: 5x11dataset.bin roi1-id:7|Normal|A,15|Normal|A,15|Dwell|A,388|Normal|A roi1-id:7|Normal|B,15|Normal|B,15|Dwell|B,388|Normal|B roi2-id:7|Normal|A,450|Normal|A roi2-id:7|Normal|B,450|Normal|B <nil>
Example (MakeROIPMCListFileContents_Combined) ¶
fmt.Println(makeROIPMCListFileContents(testROIs, "5x11dataset.bin", true, false, map[int32]bool{}))
Output: 5x11dataset.bin roi1-id:7|Normal|A,7|Normal|B,15|Normal|A,15|Normal|B,388|Normal|A,388|Normal|B roi2-id:7|Normal|A,7|Normal|B,450|Normal|A,450|Normal|B <nil>
Example (MakeROIPMCListFileContents_Combined_Dwells) ¶
fmt.Println(makeROIPMCListFileContents(testROIs, "5x11dataset.bin", true, true, map[int32]bool{15: true}))
Output: 5x11dataset.bin roi1-id:7|Normal|A,7|Normal|B,15|Normal|A,15|Normal|B,15|Dwell|A,15|Dwell|B,388|Normal|A,388|Normal|B roi2-id:7|Normal|A,7|Normal|B,450|Normal|A,450|Normal|B <nil>
Example (MatchPMCsWithDataset) ¶
l := &logger.StdOutLogger{} data := csvData{[]string{"X", "Y", "Z", "filename", "Ca_%"}, [][]string{[]string{"1", "0.40", "0", "Roastt_Laguna_Salinas_28kV_230uA_03_03_2020_111.msa", "4.5"}}} exp, err := readDatasetFile("./testdata/LagunaSalinasdataset.bin") fmt.Printf("Test file read: %v\n", err) fmt.Printf("%v, header[%v]=%v, data[%v]=%v\n", matchPMCsWithDataset(&data, exp, true, l), len(data.header)-1, data.header[5], len(data.data[0])-1, data.data[0][5]) data = csvData{[]string{"X", "Y", "Z", "filename", "Ca_%"}, [][]string{[]string{"1", "930.40", "0", "Roastt_Laguna_Salinas_28kV_230uA_03_03_2020_111.msa", "4.5"}}} fmt.Println(matchPMCsWithDataset(&data, exp, true, l)) data = csvData{[]string{"X", "Y", "Z", "filename", "Ca_%"}, [][]string{[]string{"1", "0.40", "0", "Roastt_Laguna_Salinas_28kV_230uA_03_03_2020_116.msa", "4.5"}}} fmt.Printf("%v, header[%v]=%v, data[%v]=%v\n", matchPMCsWithDataset(&data, exp, false, l), len(data.header)-1, data.header[5], len(data.data[0])-1, data.data[0][5])
Output: Test file read: <nil> <nil>, header[5]=PMC, data[5]=111 matchPMCsWithDataset Failed to match 1.00,930.40,0.00 to a PMC in dataset file <nil>, header[5]=PMC, data[5]=116
Example (ParseFloatColumnValue) ¶
fVal, err := parseFloatColumnValue("3.1415926") fmt.Printf("%v|%v\n", fVal, err) fVal, err = parseFloatColumnValue("-3.15") fmt.Printf("%v|%v\n", fVal, err) fVal, err = parseFloatColumnValue("1.234e02") fmt.Printf("%v|%v\n", fVal, err) fVal, err = parseFloatColumnValue("") fmt.Printf("%v|%v\n", fVal, err) fVal, err = parseFloatColumnValue("nan") fmt.Printf("%v|%v\n", fVal, err) fVal, err = parseFloatColumnValue("-nan") fmt.Printf("%v|%v\n", fVal, err)
Output: 3.1415925|<nil> -3.15|<nil> 123.4|<nil> 0|strconv.ParseFloat: parsing "": invalid syntax NaN|<nil> NaN|<nil>
Example (ProcessQuantROIsToPMCs_Combined_CSVRowCountROICountMismatch) ¶
var mockS3 awsutil.MockS3Client defer mockS3.FinishTest() const jobsBucket = "jobs-bucket" // Some of our files are empty, not there, have content // and they're meant to end up combined into one response... mockS3.ExpGetObjectInput = []s3.GetObjectInput{ { Bucket: aws.String(jobsBucket), Key: aws.String("JobData/abc123/output/node001.pmcs_result.csv"), }, } mockS3.QueuedGetObjectOutput = []*s3.GetObjectOutput{ { Body: io.NopCloser(bytes.NewReader([]byte(`Header row1 PMC, CaO_%, CaO_int, RTT, filename 15, 5.1, 400, 7890, Normal_A_roi1-id 7, 6.1, 405, 7800, Normal_A_roi1-id 12, 6.7, 407, 7700, Normal_A_roi1-id `))), }, } fs := fileaccess.MakeS3Access(&mockS3) outputCSV, err := processQuantROIsToPMCs(fs, jobsBucket, "JobData/abc123", "The custom header", "node001.pmcs", true, testROIs) fmt.Printf("%v\n", err) fmt.Println(outputCSV)
Output: PMC 12 in CSV: JobData/abc123/output/node001.pmcs_result.csv doesn't exist in ROI: roi1
Example (ProcessQuantROIsToPMCs_Combined_DownloadError) ¶
var mockS3 awsutil.MockS3Client defer mockS3.FinishTest() const jobsBucket = "jobs-bucket" // Some of our files are empty, not there, have content // and they're meant to end up combined into one response... mockS3.ExpGetObjectInput = []s3.GetObjectInput{ { Bucket: aws.String(jobsBucket), Key: aws.String("JobData/abc123/output/node001.pmcs_result.csv"), }, } mockS3.QueuedGetObjectOutput = []*s3.GetObjectOutput{ nil, } fs := fileaccess.MakeS3Access(&mockS3) outputCSV, err := processQuantROIsToPMCs(fs, jobsBucket, "JobData/abc123", "The custom header", "node001.pmcs", true, testROIs) fmt.Printf("%v\n", err) fmt.Println(outputCSV)
Output: Failed to read map CSV: JobData/abc123/output/node001.pmcs_result.csv
Example (ProcessQuantROIsToPMCs_Combined_InvalidPMC) ¶
var mockS3 awsutil.MockS3Client defer mockS3.FinishTest() const jobsBucket = "jobs-bucket" // Some of our files are empty, not there, have content // and they're meant to end up combined into one response... mockS3.ExpGetObjectInput = []s3.GetObjectInput{ { Bucket: aws.String(jobsBucket), Key: aws.String("JobData/abc123/output/node001.pmcs_result.csv"), }, } mockS3.QueuedGetObjectOutput = []*s3.GetObjectOutput{ { Body: io.NopCloser(bytes.NewReader([]byte(`Header row1 PMC, CaO_%, CaO_int, filename, RTT 15, 5.1, 400, Normal_A_roi1-id, 7890 Qwerty, 6.1, 405, Normal_A_roi1-id, 7800 `))), }, } fs := fileaccess.MakeS3Access(&mockS3) outputCSV, err := processQuantROIsToPMCs(fs, jobsBucket, "JobData/abc123", "The custom header", "node001.pmcs", true, testROIs) fmt.Printf("%v\n", err) fmt.Println(outputCSV)
Output: Failed to process map CSV: JobData/abc123/output/node001.pmcs_result.csv, invalid PMC Qwerty at line 4
Example (ProcessQuantROIsToPMCs_Combined_NoFileNameCol) ¶
var mockS3 awsutil.MockS3Client defer mockS3.FinishTest() const jobsBucket = "jobs-bucket" // Some of our files are empty, not there, have content // and they're meant to end up combined into one response... mockS3.ExpGetObjectInput = []s3.GetObjectInput{ { Bucket: aws.String(jobsBucket), Key: aws.String("JobData/abc123/output/node001.pmcs_result.csv"), }, } mockS3.QueuedGetObjectOutput = []*s3.GetObjectOutput{ { Body: io.NopCloser(bytes.NewReader([]byte(`Header row1 PMC, CaO_%, CaO_int, RTT 15, 5.1, 400, 7890 7, 6.1, 405, 7800 `))), }, } fs := fileaccess.MakeS3Access(&mockS3) outputCSV, err := processQuantROIsToPMCs(fs, jobsBucket, "JobData/abc123", "The custom header", "node001.pmcs", true, testROIs) fmt.Printf("%v\n", err) fmt.Println(outputCSV)
Output: Map csv: JobData/abc123/output/node001.pmcs_result.csv, does not contain a filename column (used to match up ROIs)
Example (ProcessQuantROIsToPMCs_Combined_OK) ¶
var mockS3 awsutil.MockS3Client defer mockS3.FinishTest() const jobsBucket = "jobs-bucket" // Some of our files are empty, not there, have content // and they're meant to end up combined into one response... mockS3.ExpGetObjectInput = []s3.GetObjectInput{ { Bucket: aws.String(jobsBucket), Key: aws.String("JobData/abc123/output/node001.pmcs_result.csv"), }, } mockS3.QueuedGetObjectOutput = []*s3.GetObjectOutput{ { Body: io.NopCloser(bytes.NewReader([]byte(`Header row1 PMC, CaO_%, filename, CaO_int, RTT 15, 5.1, Normal_A_roi1-id, 400, 7890 7, 6.1, Normal_B_roi2-id, 405, 7800 `))), }, } fs := fileaccess.MakeS3Access(&mockS3) outputCSV, err := processQuantROIsToPMCs(fs, jobsBucket, "JobData/abc123", "The custom header", "node001.pmcs", true, testROIs) fmt.Printf("%v\n", err) fmt.Println(outputCSV)
Output: <nil> The custom header PMC, CaO_%, filename, CaO_int, RTT 7, 5.1, Normal_A_roi1-id, 400, 7890 15, 5.1, Normal_A_roi1-id, 400, 7890 388, 5.1, Normal_A_roi1-id, 400, 7890 7, 6.1, Normal_B_roi2-id, 405, 7800 450, 6.1, Normal_B_roi2-id, 405, 7800
Example (ProcessQuantROIsToPMCs_SeparateAB_InvalidFileName) ¶
var mockS3 awsutil.MockS3Client defer mockS3.FinishTest() const jobsBucket = "jobs-bucket" // Some of our files are empty, not there, have content // and they're meant to end up combined into one response... mockS3.ExpGetObjectInput = []s3.GetObjectInput{ { Bucket: aws.String(jobsBucket), Key: aws.String("JobData/abc123/output/node001.pmcs_result.csv"), }, } mockS3.QueuedGetObjectOutput = []*s3.GetObjectOutput{ { Body: io.NopCloser(bytes.NewReader([]byte(`Header row1 PMC, CaO_%, CaO_int, filename, RTT 15, 5.1, 400, Normal_A_roi1-id, 7890 15, 5.2, 401, Normal_B, 7890 7, 6.1, 405, Normal_A, 7800 7, 6.2, 406, Normal_B, 7800 `))), }, } fs := fileaccess.MakeS3Access(&mockS3) outputCSV, err := processQuantROIsToPMCs(fs, jobsBucket, "JobData/abc123", "The custom header", "node001.pmcs", false, testROIs) fmt.Printf("%v\n", err) fmt.Println(outputCSV)
Output: Invalid file name read: Normal_B from map CSV: JobData/abc123/output/node001.pmcs_result.csv, line 4
Example (ProcessQuantROIsToPMCs_SeparateAB_OK) ¶
var mockS3 awsutil.MockS3Client defer mockS3.FinishTest() const jobsBucket = "jobs-bucket" // Some of our files are empty, not there, have content // and they're meant to end up combined into one response... mockS3.ExpGetObjectInput = []s3.GetObjectInput{ { Bucket: aws.String(jobsBucket), Key: aws.String("JobData/abc123/output/node001.pmcs_result.csv"), }, } mockS3.QueuedGetObjectOutput = []*s3.GetObjectOutput{ { Body: io.NopCloser(bytes.NewReader([]byte(`Header row1 PMC, CaO_%, CaO_int, filename, RTT 15, 5.1, 400, Normal_A_roi1-id, 7890 15, 5.2, 401, Normal_B_roi1-id, 7890 7, 6.1, 405, Normal_A_roi2-id, 7800 7, 6.2, 406, Normal_B_roi2-id, 7800 `))), }, } fs := fileaccess.MakeS3Access(&mockS3) outputCSV, err := processQuantROIsToPMCs(fs, jobsBucket, "JobData/abc123", "The custom header", "node001.pmcs", false, testROIs) fmt.Printf("%v\n", err) fmt.Println(outputCSV)
Output: <nil> The custom header PMC, CaO_%, CaO_int, filename, RTT 7, 5.1, 400, Normal_A_roi1-id, 7890 7, 5.2, 401, Normal_B_roi1-id, 7890 15, 5.1, 400, Normal_A_roi1-id, 7890 15, 5.2, 401, Normal_B_roi1-id, 7890 388, 5.1, 400, Normal_A_roi1-id, 7890 388, 5.2, 401, Normal_B_roi1-id, 7890 7, 6.1, 405, Normal_A_roi2-id, 7800 7, 6.2, 406, Normal_B_roi2-id, 7800 450, 6.1, 405, Normal_A_roi2-id, 7800 450, 6.2, 406, Normal_B_roi2-id, 7800
Example (ReadCSV) ¶
csv := `something header more header col 1,"col, 2", col_3 "value one",123, 456 value two,444,555 ` d, err := readCSV(csv, 2) fmt.Printf("%v|%v", d, err)
Output: {[col 1 col, 2 col_3] [[value one 123 456] [value two 444 555]]}|<nil>
Example (ReadQuantifiablePMCs) ¶
expr, err := readDatasetFile("./testdata/LagunaSalinasdataset.bin") fmt.Printf("Read Laguna: %v\n", err) pmcs, err := readQuantifiablePMCs(expr, "123", &logger.StdOutLoggerForTest{}) fmt.Printf("PMCRead: %v\n", err) fmt.Printf("PMCs: %v\n", pmcs) expr, err = readDatasetFile("./testdata/Naltsosdataset.bin") fmt.Printf("Read Naltsos: %v\n", err) pmcs, err = readQuantifiablePMCs(expr, "123", &logger.StdOutLoggerForTest{}) fmt.Printf("PMCRead: %v\n", err) fmt.Printf("PMCs: %v\n", pmcs)
Output: Read Laguna: <nil> PMCRead: <nil> PMCs: [] Read Naltsos: <nil> PMCRead: <nil> PMCs: [93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 216]
Example (ValidateParameters) ¶
fmt.Printf("%v\n", validateParameters("-b,0,50,2,10 -f")) fmt.Printf("%v\n", validateParameters("-b,0,50,2,10.55 -o \"filename.whatever\" -f -Fe,1")) fmt.Printf("%v\n", validateParameters("-b,0,50,2,10;ls -al;echo -f")) fmt.Printf("%v\n", validateParameters("-b,0,50,2,10&&rm -rf ~/; -f"))
Output: <nil> <nil> Invalid parameters passed: -b,0,50,2,10;ls -al;echo -f Invalid parameters passed: -b,0,50,2,10&&rm -rf ~/; -f
Index ¶
- Constants
- func ConvertQuantificationCSV(logger logger.ILogger, data string, expectMetaColumns []string, ...) ([]byte, []string, error)
- func CreateJob(createParams *protos.QuantCreateParams, requestorUserId string, ...) (*protos.JobStatus, error)
- func FormCombinedCSV(quantIDs []string, dataPerDetectorPerPMC map[string]map[int32]quantItem, ...) string
- func FormMultiQuantSummary(dataPerDetectorPerPMC map[string]map[int32]quantItem, ...) *protos.QuantCombineSummary
- func ImportQuantCSV(hctx wsHelpers.HandlerContext, scanId string, importUser *protos.UserInfo, ...) (string, error)
- func IsValidCreateParam(createParams *protos.QuantCreateParams, hctx wsHelpers.HandlerContext) error
- func ListUserQuants(searchParams *protos.SearchParams, hctx wsHelpers.HandlerContext) ([]*protos.QuantificationSummary, map[string]*protos.OwnershipItem, error)
- func MultiQuantCombinedCSV(name string, scanId string, roiZStack []*protos.QuantCombineItem, ...) (combinedQuantData, error)
- func MultiQuantCompare(reqRoiId string, roiPMCs []int32, quantIds []string, exprPB *protos.Experiment, ...) ([]*protos.QuantComparisonTable, error)
- func RunAutoQuantifications(scanId string, svcs *services.APIServices, onlyIfNotExists bool)
- type QuantJobUpdater
Examples ¶
- Package (CalculateTotals_AB_NeedsCombined)
- Package (CalculateTotals_NoPMC)
- Package (CalculateTotals_Success)
- Package (CleanLogName)
- Package (CombineQuantOutputs_BadPMC)
- Package (CombineQuantOutputs_DownloadError)
- Package (CombineQuantOutputs_DuplicatePMC)
- Package (CombineQuantOutputs_LastLineCutOff)
- Package (CombineQuantOutputs_OK)
- Package (ConvertQuantificationData)
- Package (DecodeMapFileNameColumn)
- Package (FilterListItems)
- Package (GetElements)
- Package (GetExistingAutoQuants)
- Package (GetInterestingColIndexes)
- Package (MakeColumnTypeList)
- Package (MakeIndividualPMCListFileContents_AB)
- Package (MakeIndividualPMCListFileContents_AB_Dwell)
- Package (MakeIndividualPMCListFileContents_Combined)
- Package (MakeIndividualPMCListFileContents_Combined_Dwell)
- Package (MakeQuantJobPMCLists)
- Package (MakeQuantedLocation)
- Package (MakeROIPMCListFileContents_AB)
- Package (MakeROIPMCListFileContents_AB_Dwells)
- Package (MakeROIPMCListFileContents_Combined)
- Package (MakeROIPMCListFileContents_Combined_Dwells)
- Package (MatchPMCsWithDataset)
- Package (ParseFloatColumnValue)
- Package (ProcessQuantROIsToPMCs_Combined_CSVRowCountROICountMismatch)
- Package (ProcessQuantROIsToPMCs_Combined_DownloadError)
- Package (ProcessQuantROIsToPMCs_Combined_InvalidPMC)
- Package (ProcessQuantROIsToPMCs_Combined_NoFileNameCol)
- Package (ProcessQuantROIsToPMCs_Combined_OK)
- Package (ProcessQuantROIsToPMCs_SeparateAB_InvalidFileName)
- Package (ProcessQuantROIsToPMCs_SeparateAB_OK)
- Package (ReadCSV)
- Package (ReadQuantifiablePMCs)
- Package (ValidateParameters)
Constants ¶
View Source
const JobParamsFileName = "params.json"
JobParamsFileName - File name of job params file
View Source
const QuantModeABManualUpload = "ABManual"
View Source
const QuantModeABMultiQuant = "ABMultiQuant"
View Source
const QuantModeCombinedManualUpload = "ABManual"
View Source
const QuantModeCombinedMultiQuant = "CombinedMultiQuant"
Variables ¶
This section is empty.
Functions ¶
func ConvertQuantificationCSV ¶
func ConvertQuantificationCSV(logger logger.ILogger, data string, expectMetaColumns []string, exprPB *protos.Experiment, matchPMCByCoord bool, detectorIDOverride string, detectorDuplicateAB bool) ([]byte, []string, error)
ConvertQuantificationCSV - converts from incoming string CSV data to serialised binary data. exprPB if nil means we wont match to dataset PMCs Returns the serialised quantification bytes and the elements that were quantified
func CreateJob ¶
func CreateJob(createParams *protos.QuantCreateParams, requestorUserId string, svcs *services.APIServices, sessUser *wsHelpers.SessionUser, sendUpdate func(*protos.JobStatus)) (*protos.JobStatus, error)
CreateJob - creates a new quantification job
func FormCombinedCSV ¶
func FormMultiQuantSummary ¶
func ImportQuantCSV ¶
func IsValidCreateParam ¶
func IsValidCreateParam(createParams *protos.QuantCreateParams, hctx wsHelpers.HandlerContext) error
Validates the create parameters. Side-effect of modifying PmcsEncoded to just be an array of decoded PMCs
func ListUserQuants ¶
func ListUserQuants(searchParams *protos.SearchParams, hctx wsHelpers.HandlerContext) ([]*protos.QuantificationSummary, map[string]*protos.OwnershipItem, error)
func MultiQuantCombinedCSV ¶
func MultiQuantCombinedCSV( name string, scanId string, roiZStack []*protos.QuantCombineItem, exprPB *protos.Experiment, hctx wsHelpers.HandlerContext) (combinedQuantData, error)
func MultiQuantCompare ¶
func MultiQuantCompare(reqRoiId string, roiPMCs []int32, quantIds []string, exprPB *protos.Experiment, hctx wsHelpers.HandlerContext) ([]*protos.QuantComparisonTable, error)
func RunAutoQuantifications ¶
func RunAutoQuantifications(scanId string, svcs *services.APIServices, onlyIfNotExists bool)
Types ¶
type QuantJobUpdater ¶
type QuantJobUpdater struct {
// contains filtered or unexported fields
}
func MakeQuantJobUpdater ¶
func MakeQuantJobUpdater( params *protos.QuantCreateParams, session *melody.Session, notifier services.INotifier, db *mongo.Database, fs fileaccess.FileAccess, usersBucket string, ) QuantJobUpdater
func (*QuantJobUpdater) SendEphemeralQuantJobUpdate ¶ added in v4.31.4
func (i *QuantJobUpdater) SendEphemeralQuantJobUpdate(status *protos.JobStatus)
We send updates for ephemeral quant jobs (that are short running) this way...
func (*QuantJobUpdater) SendQuantJobUpdate ¶
func (i *QuantJobUpdater) SendQuantJobUpdate(status *protos.JobStatus)
We send updates for quant jobs (that are long running, have names, stick around) this way...
Source Files ¶
Directories ¶
Path | Synopsis |
---|---|
Exposes interfaces and structures required to run PIQUANT in the Kubernetes cluster along with functions to access quantification files, logs, results and summaries of quant jobs.
|
Exposes interfaces and structures required to run PIQUANT in the Kubernetes cluster along with functions to access quantification files, logs, results and summaries of quant jobs. |
Click to show internal directories.
Click to hide internal directories.