Add "data optimization" which treats empty fields in data.csv as equivalent to the previous entry.
This optimization takes data size down from 41 MB to 16MB which is ~40% which is huge!pull/11602/head
parent
424c954770
commit
419f2506e6
|
@ -49,12 +49,15 @@ async function loadTestData() {
|
|||
}
|
||||
|
||||
const testData = [];
|
||||
let lineData = ["", "", "", "", "", ""];
|
||||
for await (const line of lines) {
|
||||
const splitLine = line.split(",");
|
||||
let splitLine = line.split(",");
|
||||
if (splitLine.length != 6) {
|
||||
console.warn(`Found line with wrong number of fields. Actual: ${splitLine.length} Expected: 6. Line: "${line}"`);
|
||||
continue;
|
||||
}
|
||||
splitLine = splitLine.map((value, index) => value === "" ? lineData[index] : value);
|
||||
lineData = splitLine;
|
||||
if (!isValidEnumValue(testStatus, splitLine[4])) {
|
||||
console.warn(`Invalid test status provided. Actual: ${splitLine[4]} Expected: One of ${Object.values(testStatus).join(", ")}`);
|
||||
continue;
|
||||
|
|
|
@ -0,0 +1,3 @@
|
|||
#!/bin/bash
|
||||
|
||||
awk -F, 'BEGIN {OFS = FS} { for(i=1; i<=NF; i++) { if($i == j[i]) { $i = ""; } else { j[i] = $i; } } printf "%s\n",$0 }'
|
Loading…
Reference in New Issue