From 63f0f8774b7903f46d18b80f6b3348490fa2e109 Mon Sep 17 00:00:00 2001 From: Andriy Dzikh Date: Fri, 21 May 2021 16:11:00 -0700 Subject: [PATCH 01/66] Create script to collect log data. --- hack/test-flake-chart/collect_data.sh | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) create mode 100755 hack/test-flake-chart/collect_data.sh diff --git a/hack/test-flake-chart/collect_data.sh b/hack/test-flake-chart/collect_data.sh new file mode 100755 index 0000000000..1707ab3028 --- /dev/null +++ b/hack/test-flake-chart/collect_data.sh @@ -0,0 +1,25 @@ +#!/bin/bash + +# Create temp path for partial data (storing everything but the commit date.) +PARTIAL_DATA_PATH=$(mktemp) +# Write +echo "Partial path: $PARTIAL_DATA_PATH" 1>&2 + +# Print header. +printf "Commit Hash,Commit Date,Environment,Test,Status\n" + +# 1) "cat" together all summary files. +# 2) Turn each test in each summary file to a CSV line containing its commit hash, environment, test, and status. +# 3) Copy partial data to $PARTIAL_DATA_PATH to join with date later. +# 4) Extract only commit hash for each row +# 5) Make the commit hashes unique (we assume that gsutil cats files from the same hash next to each other). +# Also force buffering to occur per line so remainder of pipe can continue to process. +# 6) Execute git log for each commit to get the date of each. +# 7) Join dates with test data. +gsutil cat gs://minikube-builds/logs/master/*/*_summary.json \ +| jq -r '({commit: .Detail.Details, environment: .Detail.Name, test: .PassedTests[]?, status: "Passed"},{commit: .Detail.Details, environment: .Detail.Name, test: .FailedTests[]?, status: "Failed"},{commit: .Detail.Details, environment: .Detail.Name, test: .SkippedTests[]?, status: "Skipped"}) | .commit + "," + .environment + "," + .test + "," + .status' \ +| tee $PARTIAL_DATA_PATH \ +| sed -r -n 's/^([^,]+),.*/\1/p' \ +| stdbuf -oL -eL uniq \ +| xargs -I {} git log -1 --pretty=format:"{},%as%n" {} \ +| join -t "," - $PARTIAL_DATA_PATH From abdbfa63b60b5442a72ec1a15a12770f2d70c662 Mon Sep 17 00:00:00 2001 From: Andriy Dzikh Date: Mon, 24 May 2021 11:15:56 -0700 Subject: [PATCH 02/66] Create initial HTML and JS for flake rate site. --- hack/test-flake-chart/flake_chart.html | 10 ++++++++++ hack/test-flake-chart/flake_chart.js | 18 ++++++++++++++++++ 2 files changed, 28 insertions(+) create mode 100644 hack/test-flake-chart/flake_chart.html create mode 100644 hack/test-flake-chart/flake_chart.js diff --git a/hack/test-flake-chart/flake_chart.html b/hack/test-flake-chart/flake_chart.html new file mode 100644 index 0000000000..333b61f4cf --- /dev/null +++ b/hack/test-flake-chart/flake_chart.html @@ -0,0 +1,10 @@ + + + + + + + + + + \ No newline at end of file diff --git a/hack/test-flake-chart/flake_chart.js b/hack/test-flake-chart/flake_chart.js new file mode 100644 index 0000000000..274a7429b9 --- /dev/null +++ b/hack/test-flake-chart/flake_chart.js @@ -0,0 +1,18 @@ + +function displayError(message) { + console.error(message); +} + +async function init() { + const response = await fetch("content.txt"); + if (!response.ok) { + const responseText = await response.text(); + displayError(`Failed to fetch data from GCS bucket. Error: ${responseText}`); + return; + } + + const responseText = await response.text(); + console.log(responseText); +} + +init(); From b33e27243501ae755298ed15cd28bf67460fc56a Mon Sep 17 00:00:00 2001 From: Andriy Dzikh Date: Mon, 24 May 2021 13:20:20 -0700 Subject: [PATCH 03/66] Create basic parsing of CSV test data. --- hack/test-flake-chart/flake_chart.js | 68 ++++++++++++++++++++++++++-- 1 file changed, 65 insertions(+), 3 deletions(-) diff --git a/hack/test-flake-chart/flake_chart.js b/hack/test-flake-chart/flake_chart.js index 274a7429b9..f823394598 100644 --- a/hack/test-flake-chart/flake_chart.js +++ b/hack/test-flake-chart/flake_chart.js @@ -1,18 +1,80 @@ +// Displays an error message to the UI. Any previous message will be erased. function displayError(message) { console.error(message); } +// Creates a generator that reads the response body one line at a time. +async function* bodyByLinesIterator(response) { + // TODO: Replace this with something that actually reads the body line by line + // (since the file can be big). + const lines = (await response.text()).split("\n"); + for (let line of lines) { + // Skip any empty lines (most likely at the end). + if (line !== "") { + yield line; + } + } +} + +// Determines whether `str` matches at least one value in `enumObject`. +function isValidEnumValue(enumObject, str) { + for (const enumKey in enumObject) { + if (enumObject[enumKey] === str) { + return true; + } + } + return false; +} + +// Enum for test status. +const testStatus = { + PASSED: "Passed", + FAILED: "Failed", + SKIPPED: "Skipped" +} + async function init() { - const response = await fetch("content.txt"); + const response = await fetch("data.csv"); if (!response.ok) { const responseText = await response.text(); displayError(`Failed to fetch data from GCS bucket. Error: ${responseText}`); return; } - const responseText = await response.text(); - console.log(responseText); + const lines = bodyByLinesIterator(response); + // Consume the header to ensure the data has the right number of fields. + const header = (await lines.next()).value; + if (header.split(",").length != 5) { + displayError(`Fetched CSV data contains wrong number of fields. Expected: 5. Actual Header: "${header}"`); + return; + } + + const testData = []; + for await (const line of lines) { + const splitLine = line.split(","); + if (splitLine.length != 5) { + console.warn(`Found line with wrong number of fields. Actual: ${splitLine.length} Expected: 5. Line: "${line}"`); + continue; + } + if (!isValidEnumValue(testStatus, splitLine[4])) { + console.warn(`Invalid test status provided. Actual: ${splitLine[4]} Expected: One of ${Object.values(testStatus).join(", ")}`); + continue; + } + testData.push({ + commit: splitLine[0], + date: new Date(splitLine[1]), + environment: splitLine[2], + name: splitLine[3], + status: splitLine[4] + }); + } + if (testData.length == 0) { + displayError("Fetched CSV data is empty or poorly formatted."); + return; + } + + console.log(testData); } init(); From b45b4c9a0bdb6ffb2fe945a5557b2fac2f033678 Mon Sep 17 00:00:00 2001 From: Andriy Dzikh Date: Tue, 25 May 2021 13:19:03 -0700 Subject: [PATCH 04/66] Include Google Charts. Refactor to wait for Google Charts and test data loading at the same time. --- hack/test-flake-chart/flake_chart.html | 1 + hack/test-flake-chart/flake_chart.js | 27 +++++++++++++++++++------- 2 files changed, 21 insertions(+), 7 deletions(-) diff --git a/hack/test-flake-chart/flake_chart.html b/hack/test-flake-chart/flake_chart.html index 333b61f4cf..5299da4a13 100644 --- a/hack/test-flake-chart/flake_chart.html +++ b/hack/test-flake-chart/flake_chart.html @@ -1,6 +1,7 @@ + diff --git a/hack/test-flake-chart/flake_chart.js b/hack/test-flake-chart/flake_chart.js index f823394598..08fc48ec0d 100644 --- a/hack/test-flake-chart/flake_chart.js +++ b/hack/test-flake-chart/flake_chart.js @@ -34,20 +34,18 @@ const testStatus = { SKIPPED: "Skipped" } -async function init() { +async function loadTestData() { const response = await fetch("data.csv"); if (!response.ok) { const responseText = await response.text(); - displayError(`Failed to fetch data from GCS bucket. Error: ${responseText}`); - return; + throw `Failed to fetch data from GCS bucket. Error: ${responseText}`; } const lines = bodyByLinesIterator(response); // Consume the header to ensure the data has the right number of fields. const header = (await lines.next()).value; if (header.split(",").length != 5) { - displayError(`Fetched CSV data contains wrong number of fields. Expected: 5. Actual Header: "${header}"`); - return; + throw `Fetched CSV data contains wrong number of fields. Expected: 5. Actual Header: "${header}"`; } const testData = []; @@ -70,10 +68,25 @@ async function init() { }); } if (testData.length == 0) { - displayError("Fetched CSV data is empty or poorly formatted."); + throw "Fetched CSV data is empty or poorly formatted."; + } + return testData; +} + +async function init() { + google.charts.load('current', {'packages': ['corechart']}); + let testData; + try { + // Wait for Google Charts to load, and for test data to load. + // Only store the test data (at index 1) into `testData`. + testData = (await Promise.all([ + new Promise(resolve => google.charts.setOnLoadCallback(resolve)), + loadTestData() + ]))[1]; + } catch(err) { + displayError(err); return; } - console.log(testData); } From 328d54ef639decece23e59da61116738d36526a7 Mon Sep 17 00:00:00 2001 From: Andriy Dzikh Date: Tue, 25 May 2021 16:20:56 -0700 Subject: [PATCH 05/66] Create first flake rate chart. --- hack/test-flake-chart/flake_chart.html | 2 +- hack/test-flake-chart/flake_chart.js | 71 ++++++++++++++++++++++++-- 2 files changed, 69 insertions(+), 4 deletions(-) diff --git a/hack/test-flake-chart/flake_chart.html b/hack/test-flake-chart/flake_chart.html index 5299da4a13..f39859daff 100644 --- a/hack/test-flake-chart/flake_chart.html +++ b/hack/test-flake-chart/flake_chart.html @@ -4,7 +4,7 @@ - +
diff --git a/hack/test-flake-chart/flake_chart.js b/hack/test-flake-chart/flake_chart.js index 08fc48ec0d..f042a2fdd1 100644 --- a/hack/test-flake-chart/flake_chart.js +++ b/hack/test-flake-chart/flake_chart.js @@ -74,7 +74,7 @@ async function loadTestData() { } async function init() { - google.charts.load('current', {'packages': ['corechart']}); + google.charts.load('current', { 'packages': ['corechart'] }); let testData; try { // Wait for Google Charts to load, and for test data to load. @@ -83,11 +83,76 @@ async function init() { new Promise(resolve => google.charts.setOnLoadCallback(resolve)), loadTestData() ]))[1]; - } catch(err) { + } catch (err) { displayError(err); return; } - console.log(testData); + + const data = new google.visualization.DataTable(); + data.addColumn('date', 'Date'); + data.addColumn('number', 'Flake Percentage'); + data.addColumn({ type: 'string', label: 'Commit Hash', role: 'tooltip', 'p': { 'html': true } }); + + const desiredTest = "TestFunctional/parallel/LogsCmd", desiredEnvironment = "Docker_Linux_containerd"; + + const average = arr => { + return arr.length === 0 ? 0 : arr.reduce((sum, value) => sum + value, 0) / arr.length; + }; + + const groups = + Array.from(testData + // Filter to only contain unskipped runs of the requested test and requested environment. + .filter(test => test.name === desiredTest && test.environment === desiredEnvironment && test.status !== testStatus.SKIPPED) + // Group by run date. + .reduce((groups, test) => { + // Convert Date to time number since hashing by Date does not work. + const dateValue = test.date.getTime(); + if (groups.has(dateValue)) { + groups.get(dateValue).push(test); + } else { + groups.set(dateValue, [test]); + } + return groups + }, new Map()) + // Get all entries (type of [[time number, [test]]]). + .entries() + ) + // Turn time number back to the corresponding Date. + .map(([dateValue, tests]) => ({ date: new Date(dateValue), tests })); + + data.addRows( + groups + // Sort by run date, past to future. + .sort((a, b) => a.date - b.date) + // Map each group to all variables need to format the rows. + .map(({ date, tests }) => ({ + date, // Turn time number back to corresponding date. + flakeRate: average(tests.map(test => test.status === testStatus.FAILED ? 100 : 0)), // Compute average of runs where FAILED counts as 100%. + commitHashes: tests.map(test => ({ hash: test.commit, status: test.status })) // Take all hashes and status' of tests in this group. + })) + .map(groupData => [ + groupData.date, + groupData.flakeRate, + `
+ ${groupData.date.toString()}
+ Flake Percentage: ${groupData.flakeRate.toFixed(2)}%
+ Hashes:
+ ${groupData.commitHashes.map(({ hash, status }) => ` - ${hash} (${status})`).join("
")} +
` + ]) + ); + + const options = { + title: `Flake Rate by day of ${desiredTest} on ${desiredEnvironment}`, + width: 900, + height: 500, + pointSize: 10, + pointShape: "circle", + vAxis: { minValue: 0, maxValue: 1 }, + tooltip: { trigger: "selection", isHtml: true } + }; + const chart = new google.visualization.LineChart(document.getElementById('chart_div')); + chart.draw(data, options); } init(); From 2e5ea59774fb5864dbc6cf85f2a3612473f2c694 Mon Sep 17 00:00:00 2001 From: Andriy Dzikh Date: Wed, 26 May 2021 09:36:44 -0700 Subject: [PATCH 06/66] Refactor data cleaning. --- hack/test-flake-chart/flake_chart.js | 54 ++++++++++++++-------------- 1 file changed, 26 insertions(+), 28 deletions(-) diff --git a/hack/test-flake-chart/flake_chart.js b/hack/test-flake-chart/flake_chart.js index f042a2fdd1..58b119ea22 100644 --- a/hack/test-flake-chart/flake_chart.js +++ b/hack/test-flake-chart/flake_chart.js @@ -73,6 +73,24 @@ async function loadTestData() { return testData; } +// Computes the average of an array of numbers. +Array.prototype.average = function () { + return this.length === 0 ? 0 : this.reduce((sum, value) => sum + value, 0) / this.length; +}; + +// Groups array elements by keys obtained through `keyGetter`. +Array.prototype.groupBy = function (keyGetter) { + return Array.from(this.reduce((mapCollection, element) => { + const key = keyGetter(element); + if (mapCollection.has(key)) { + mapCollection.get(key).push(element); + } else { + mapCollection.set(key, [element]); + } + return mapCollection; + }, new Map()).values()); +}; + async function init() { google.charts.load('current', { 'packages': ['corechart'] }); let testData; @@ -95,39 +113,19 @@ async function init() { const desiredTest = "TestFunctional/parallel/LogsCmd", desiredEnvironment = "Docker_Linux_containerd"; - const average = arr => { - return arr.length === 0 ? 0 : arr.reduce((sum, value) => sum + value, 0) / arr.length; - }; - - const groups = - Array.from(testData - // Filter to only contain unskipped runs of the requested test and requested environment. - .filter(test => test.name === desiredTest && test.environment === desiredEnvironment && test.status !== testStatus.SKIPPED) - // Group by run date. - .reduce((groups, test) => { - // Convert Date to time number since hashing by Date does not work. - const dateValue = test.date.getTime(); - if (groups.has(dateValue)) { - groups.get(dateValue).push(test); - } else { - groups.set(dateValue, [test]); - } - return groups - }, new Map()) - // Get all entries (type of [[time number, [test]]]). - .entries() - ) - // Turn time number back to the corresponding Date. - .map(([dateValue, tests]) => ({ date: new Date(dateValue), tests })); + const groups = testData + // Filter to only contain unskipped runs of the requested test and requested environment. + .filter(test => test.name === desiredTest && test.environment === desiredEnvironment && test.status !== testStatus.SKIPPED) + .groupBy(test => test.date.getTime()); data.addRows( groups // Sort by run date, past to future. - .sort((a, b) => a.date - b.date) + .sort((a, b) => a[0].date - b[0].date) // Map each group to all variables need to format the rows. - .map(({ date, tests }) => ({ - date, // Turn time number back to corresponding date. - flakeRate: average(tests.map(test => test.status === testStatus.FAILED ? 100 : 0)), // Compute average of runs where FAILED counts as 100%. + .map(tests => ({ + date: tests[0].date, // Get one of the dates from the tests (which will all be the same). + flakeRate: tests.map(test => test.status === testStatus.FAILED ? 100 : 0).average(), // Compute average of runs where FAILED counts as 100%. commitHashes: tests.map(test => ({ hash: test.commit, status: test.status })) // Take all hashes and status' of tests in this group. })) .map(groupData => [ From b5151a6d89029ff5ea132d91b60411a62e8ef160 Mon Sep 17 00:00:00 2001 From: Andriy Dzikh Date: Tue, 1 Jun 2021 09:46:14 -0700 Subject: [PATCH 07/66] Add duration to data collection. --- hack/test-flake-chart/collect_data.sh | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/hack/test-flake-chart/collect_data.sh b/hack/test-flake-chart/collect_data.sh index 1707ab3028..392aeb7c9d 100755 --- a/hack/test-flake-chart/collect_data.sh +++ b/hack/test-flake-chart/collect_data.sh @@ -6,7 +6,7 @@ PARTIAL_DATA_PATH=$(mktemp) echo "Partial path: $PARTIAL_DATA_PATH" 1>&2 # Print header. -printf "Commit Hash,Commit Date,Environment,Test,Status\n" +printf "Commit Hash,Commit Date,Environment,Test,Status,Duration\n" # 1) "cat" together all summary files. # 2) Turn each test in each summary file to a CSV line containing its commit hash, environment, test, and status. @@ -17,7 +17,10 @@ printf "Commit Hash,Commit Date,Environment,Test,Status\n" # 6) Execute git log for each commit to get the date of each. # 7) Join dates with test data. gsutil cat gs://minikube-builds/logs/master/*/*_summary.json \ -| jq -r '({commit: .Detail.Details, environment: .Detail.Name, test: .PassedTests[]?, status: "Passed"},{commit: .Detail.Details, environment: .Detail.Name, test: .FailedTests[]?, status: "Failed"},{commit: .Detail.Details, environment: .Detail.Name, test: .SkippedTests[]?, status: "Skipped"}) | .commit + "," + .environment + "," + .test + "," + .status' \ +| jq -r '((.PassedTests[]? as $name | {commit: .Detail.Details, environment: .Detail.Name, test: $name, duration: .Durations[$name], status: "Passed"}), + (.FailedTests[]? as $name | {commit: .Detail.Details, environment: .Detail.Name, test: $name, duration: .Durations[$name], status: "Failed"}), + (.SkippedTests[]? as $name | {commit: .Detail.Details, environment: .Detail.Name, test: $name, duration: 0, status: "Skipped"})) + | .commit + "," + .environment + "," + .test + "," + .status + "," + (.duration | tostring)' \ | tee $PARTIAL_DATA_PATH \ | sed -r -n 's/^([^,]+),.*/\1/p' \ | stdbuf -oL -eL uniq \ From ae62edbd181694d4b052fb7e93142c141f84a2a7 Mon Sep 17 00:00:00 2001 From: Andriy Dzikh Date: Tue, 1 Jun 2021 10:17:29 -0700 Subject: [PATCH 08/66] Update JS to accept duration data. --- hack/test-flake-chart/flake_chart.js | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/hack/test-flake-chart/flake_chart.js b/hack/test-flake-chart/flake_chart.js index 58b119ea22..462b8d8130 100644 --- a/hack/test-flake-chart/flake_chart.js +++ b/hack/test-flake-chart/flake_chart.js @@ -44,15 +44,15 @@ async function loadTestData() { const lines = bodyByLinesIterator(response); // Consume the header to ensure the data has the right number of fields. const header = (await lines.next()).value; - if (header.split(",").length != 5) { - throw `Fetched CSV data contains wrong number of fields. Expected: 5. Actual Header: "${header}"`; + if (header.split(",").length != 6) { + throw `Fetched CSV data contains wrong number of fields. Expected: 6. Actual Header: "${header}"`; } const testData = []; for await (const line of lines) { const splitLine = line.split(","); - if (splitLine.length != 5) { - console.warn(`Found line with wrong number of fields. Actual: ${splitLine.length} Expected: 5. Line: "${line}"`); + if (splitLine.length != 6) { + console.warn(`Found line with wrong number of fields. Actual: ${splitLine.length} Expected: 6. Line: "${line}"`); continue; } if (!isValidEnumValue(testStatus, splitLine[4])) { @@ -64,7 +64,8 @@ async function loadTestData() { date: new Date(splitLine[1]), environment: splitLine[2], name: splitLine[3], - status: splitLine[4] + status: splitLine[4], + duration: Number(splitLine[5]), }); } if (testData.length == 0) { From b55c6726ef5450090c76737c63628cbe019f49a3 Mon Sep 17 00:00:00 2001 From: Andriy Dzikh Date: Tue, 1 Jun 2021 10:18:26 -0700 Subject: [PATCH 09/66] Use URL search query to select test and environment. --- hack/test-flake-chart/flake_chart.js | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/hack/test-flake-chart/flake_chart.js b/hack/test-flake-chart/flake_chart.js index 462b8d8130..43f6865669 100644 --- a/hack/test-flake-chart/flake_chart.js +++ b/hack/test-flake-chart/flake_chart.js @@ -92,6 +92,17 @@ Array.prototype.groupBy = function (keyGetter) { }, new Map()).values()); }; +// Parse URL search `query` into [{key, value}]. +function parseUrlQuery(query) { + if (query[0] === '?') { + query = query.substring(1); + } + return Object.fromEntries((query === "" ? [] : query.split("&")).map(element => { + const keyValue = element.split("="); + return [unescape(keyValue[0]), unescape(keyValue[1])]; + })); +} + async function init() { google.charts.load('current', { 'packages': ['corechart'] }); let testData; @@ -112,7 +123,8 @@ async function init() { data.addColumn('number', 'Flake Percentage'); data.addColumn({ type: 'string', label: 'Commit Hash', role: 'tooltip', 'p': { 'html': true } }); - const desiredTest = "TestFunctional/parallel/LogsCmd", desiredEnvironment = "Docker_Linux_containerd"; + const query = parseUrlQuery(window.location.search); + const desiredTest = query.test || "", desiredEnvironment = query.env || ""; const groups = testData // Filter to only contain unskipped runs of the requested test and requested environment. From 424c954770215e82cd42040251b4ac103d162bb4 Mon Sep 17 00:00:00 2001 From: Andriy Dzikh Date: Tue, 1 Jun 2021 11:33:22 -0700 Subject: [PATCH 10/66] Add duration to existing graph with appropriate labels. --- hack/test-flake-chart/flake_chart.js | 35 ++++++++++++++++++++++------ 1 file changed, 28 insertions(+), 7 deletions(-) diff --git a/hack/test-flake-chart/flake_chart.js b/hack/test-flake-chart/flake_chart.js index 43f6865669..8d8f0cb524 100644 --- a/hack/test-flake-chart/flake_chart.js +++ b/hack/test-flake-chart/flake_chart.js @@ -121,7 +121,9 @@ async function init() { const data = new google.visualization.DataTable(); data.addColumn('date', 'Date'); data.addColumn('number', 'Flake Percentage'); - data.addColumn({ type: 'string', label: 'Commit Hash', role: 'tooltip', 'p': { 'html': true } }); + data.addColumn({ type: 'string', role: 'tooltip', 'p': { 'html': true } }); + data.addColumn('number', 'Duration'); + data.addColumn({ type: 'string', role: 'tooltip', 'p': { 'html': true } }); const query = parseUrlQuery(window.location.search); const desiredTest = query.test || "", desiredEnvironment = query.env || ""; @@ -139,7 +141,12 @@ async function init() { .map(tests => ({ date: tests[0].date, // Get one of the dates from the tests (which will all be the same). flakeRate: tests.map(test => test.status === testStatus.FAILED ? 100 : 0).average(), // Compute average of runs where FAILED counts as 100%. - commitHashes: tests.map(test => ({ hash: test.commit, status: test.status })) // Take all hashes and status' of tests in this group. + duration: tests.map(test => test.duration).average(), // Compute average duration of runs. + commitHashes: tests.map(test => ({ // Take all hashes, statuses, and durations of tests in this group. + hash: test.commit, + status: test.status, + duration: test.duration + })) })) .map(groupData => [ groupData.date, @@ -149,17 +156,31 @@ async function init() { Flake Percentage: ${groupData.flakeRate.toFixed(2)}%
Hashes:
${groupData.commitHashes.map(({ hash, status }) => ` - ${hash} (${status})`).join("
")} - ` + `, + groupData.duration, + `
+ ${groupData.date.toString()}
+ Average Duration: ${groupData.duration.toFixed(2)}s
+ Hashes:
+ ${groupData.commitHashes.map(({ hash, duration }) => ` - ${hash} (${duration}s)`).join("
")} +
`, ]) ); const options = { - title: `Flake Rate by day of ${desiredTest} on ${desiredEnvironment}`, - width: 900, - height: 500, + title: `Flake rate and duration by day of ${desiredTest} on ${desiredEnvironment}`, + width: window.innerWidth, + height: window.innerHeight, pointSize: 10, pointShape: "circle", - vAxis: { minValue: 0, maxValue: 1 }, + series: { + 0: { targetAxisIndex: 0 }, + 1: { targetAxisIndex: 1 }, + }, + vAxes: { + 0: { title: "Flake rate", minValue: 0, maxValue: 100 }, + 1: { title: "Duration (seconds)" }, + }, tooltip: { trigger: "selection", isHtml: true } }; const chart = new google.visualization.LineChart(document.getElementById('chart_div')); From 419f2506e697d65bbf493a6c92ba390ed248ebc8 Mon Sep 17 00:00:00 2001 From: Andriy Dzikh Date: Tue, 1 Jun 2021 12:15:16 -0700 Subject: [PATCH 11/66] Add "data optimization" which treats empty fields in data.csv as equivalent to the previous entry. This optimization takes data size down from 41 MB to 16MB which is ~40% which is huge! --- hack/test-flake-chart/flake_chart.js | 5 ++++- hack/test-flake-chart/optimize_data.sh | 3 +++ 2 files changed, 7 insertions(+), 1 deletion(-) create mode 100755 hack/test-flake-chart/optimize_data.sh diff --git a/hack/test-flake-chart/flake_chart.js b/hack/test-flake-chart/flake_chart.js index 8d8f0cb524..e18d5389a5 100644 --- a/hack/test-flake-chart/flake_chart.js +++ b/hack/test-flake-chart/flake_chart.js @@ -49,12 +49,15 @@ async function loadTestData() { } const testData = []; + let lineData = ["", "", "", "", "", ""]; for await (const line of lines) { - const splitLine = line.split(","); + let splitLine = line.split(","); if (splitLine.length != 6) { console.warn(`Found line with wrong number of fields. Actual: ${splitLine.length} Expected: 6. Line: "${line}"`); continue; } + splitLine = splitLine.map((value, index) => value === "" ? lineData[index] : value); + lineData = splitLine; if (!isValidEnumValue(testStatus, splitLine[4])) { console.warn(`Invalid test status provided. Actual: ${splitLine[4]} Expected: One of ${Object.values(testStatus).join(", ")}`); continue; diff --git a/hack/test-flake-chart/optimize_data.sh b/hack/test-flake-chart/optimize_data.sh new file mode 100755 index 0000000000..f62cb63f3e --- /dev/null +++ b/hack/test-flake-chart/optimize_data.sh @@ -0,0 +1,3 @@ +#!/bin/bash + +awk -F, 'BEGIN {OFS = FS} { for(i=1; i<=NF; i++) { if($i == j[i]) { $i = ""; } else { j[i] = $i; } } printf "%s\n",$0 }' From 78e98382836c8552f178f72deb0ac26d7e793f55 Mon Sep 17 00:00:00 2001 From: Andriy Dzikh Date: Tue, 1 Jun 2021 14:01:41 -0700 Subject: [PATCH 12/66] Refactor collect_data into collect_data + process_data. This allows processing data coming from other sources. --- hack/test-flake-chart/collect_data.sh | 26 +++----------------------- hack/test-flake-chart/process_data.sh | 26 ++++++++++++++++++++++++++ 2 files changed, 29 insertions(+), 23 deletions(-) create mode 100755 hack/test-flake-chart/process_data.sh diff --git a/hack/test-flake-chart/collect_data.sh b/hack/test-flake-chart/collect_data.sh index 392aeb7c9d..644c7842de 100755 --- a/hack/test-flake-chart/collect_data.sh +++ b/hack/test-flake-chart/collect_data.sh @@ -1,28 +1,8 @@ #!/bin/bash -# Create temp path for partial data (storing everything but the commit date.) -PARTIAL_DATA_PATH=$(mktemp) -# Write -echo "Partial path: $PARTIAL_DATA_PATH" 1>&2 - -# Print header. -printf "Commit Hash,Commit Date,Environment,Test,Status,Duration\n" +DIR=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd ) # 1) "cat" together all summary files. -# 2) Turn each test in each summary file to a CSV line containing its commit hash, environment, test, and status. -# 3) Copy partial data to $PARTIAL_DATA_PATH to join with date later. -# 4) Extract only commit hash for each row -# 5) Make the commit hashes unique (we assume that gsutil cats files from the same hash next to each other). -# Also force buffering to occur per line so remainder of pipe can continue to process. -# 6) Execute git log for each commit to get the date of each. -# 7) Join dates with test data. +# 2) Process all summary files. gsutil cat gs://minikube-builds/logs/master/*/*_summary.json \ -| jq -r '((.PassedTests[]? as $name | {commit: .Detail.Details, environment: .Detail.Name, test: $name, duration: .Durations[$name], status: "Passed"}), - (.FailedTests[]? as $name | {commit: .Detail.Details, environment: .Detail.Name, test: $name, duration: .Durations[$name], status: "Failed"}), - (.SkippedTests[]? as $name | {commit: .Detail.Details, environment: .Detail.Name, test: $name, duration: 0, status: "Skipped"})) - | .commit + "," + .environment + "," + .test + "," + .status + "," + (.duration | tostring)' \ -| tee $PARTIAL_DATA_PATH \ -| sed -r -n 's/^([^,]+),.*/\1/p' \ -| stdbuf -oL -eL uniq \ -| xargs -I {} git log -1 --pretty=format:"{},%as%n" {} \ -| join -t "," - $PARTIAL_DATA_PATH +| $DIR/process_data.sh diff --git a/hack/test-flake-chart/process_data.sh b/hack/test-flake-chart/process_data.sh new file mode 100755 index 0000000000..7348c1b178 --- /dev/null +++ b/hack/test-flake-chart/process_data.sh @@ -0,0 +1,26 @@ +#!/bin/bash + +# Create temp path for partial data (storing everything but the commit date.) +PARTIAL_DATA_PATH=$(mktemp) +# Print the partial path for debugging/convenience. +echo "Partial path: $PARTIAL_DATA_PATH" 1>&2 + +# Print header. +printf "Commit Hash,Commit Date,Environment,Test,Status,Duration\n" + +# 1) Turn each test in each summary file to a CSV line containing its commit hash, environment, test, and status. +# 2) Copy partial data to $PARTIAL_DATA_PATH to join with date later. +# 3) Extract only commit hash for each row +# 4) Make the commit hashes unique (we assume that gsutil cats files from the same hash next to each other). +# Also force buffering to occur per line so remainder of pipe can continue to process. +# 5) Execute git log for each commit to get the date of each. +# 6) Join dates with test data. +jq -r '((.PassedTests[]? as $name | {commit: .Detail.Details, environment: .Detail.Name, test: $name, duration: .Durations[$name], status: "Passed"}), + (.FailedTests[]? as $name | {commit: .Detail.Details, environment: .Detail.Name, test: $name, duration: .Durations[$name], status: "Failed"}), + (.SkippedTests[]? as $name | {commit: .Detail.Details, environment: .Detail.Name, test: $name, duration: 0, status: "Skipped"})) + | .commit + "," + .environment + "," + .test + "," + .status + "," + (.duration | tostring)' \ +| tee $PARTIAL_DATA_PATH \ +| sed -r -n 's/^([^,]+),.*/\1/p' \ +| stdbuf -oL -eL uniq \ +| xargs -I {} git log -1 --pretty=format:"{},%as%n" {} \ +| join -t "," - $PARTIAL_DATA_PATH From fbf4e03eb98790d3d7dfe33610599f97a844c036 Mon Sep 17 00:00:00 2001 From: Andriy Dzikh Date: Tue, 1 Jun 2021 14:06:54 -0700 Subject: [PATCH 13/66] Add license to sh scripts. --- hack/test-flake-chart/collect_data.sh | 14 ++++++++++++++ hack/test-flake-chart/optimize_data.sh | 15 +++++++++++++++ hack/test-flake-chart/process_data.sh | 14 ++++++++++++++ 3 files changed, 43 insertions(+) diff --git a/hack/test-flake-chart/collect_data.sh b/hack/test-flake-chart/collect_data.sh index 644c7842de..44273f6d80 100755 --- a/hack/test-flake-chart/collect_data.sh +++ b/hack/test-flake-chart/collect_data.sh @@ -1,5 +1,19 @@ #!/bin/bash +# Copyright 2018 The Kubernetes Authors All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + DIR=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd ) # 1) "cat" together all summary files. diff --git a/hack/test-flake-chart/optimize_data.sh b/hack/test-flake-chart/optimize_data.sh index f62cb63f3e..1fd93f1901 100755 --- a/hack/test-flake-chart/optimize_data.sh +++ b/hack/test-flake-chart/optimize_data.sh @@ -1,3 +1,18 @@ #!/bin/bash +# Copyright 2018 The Kubernetes Authors All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Take input CSV. For each field, if it is the same as the previous row, replace it with an empty string. awk -F, 'BEGIN {OFS = FS} { for(i=1; i<=NF; i++) { if($i == j[i]) { $i = ""; } else { j[i] = $i; } } printf "%s\n",$0 }' diff --git a/hack/test-flake-chart/process_data.sh b/hack/test-flake-chart/process_data.sh index 7348c1b178..f1ed764e87 100755 --- a/hack/test-flake-chart/process_data.sh +++ b/hack/test-flake-chart/process_data.sh @@ -1,5 +1,19 @@ #!/bin/bash +# Copyright 2018 The Kubernetes Authors All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + # Create temp path for partial data (storing everything but the commit date.) PARTIAL_DATA_PATH=$(mktemp) # Print the partial path for debugging/convenience. From ef33b8661cdae8973558f2bd0d1c22e8fd45383a Mon Sep 17 00:00:00 2001 From: Andriy Dzikh Date: Tue, 1 Jun 2021 16:12:27 -0700 Subject: [PATCH 14/66] Create new compute_flake_rate.go with basic CSV parsing. --- hack/test-flake-chart/compute_flake_rate.go | 109 ++++++++++++++++++++ 1 file changed, 109 insertions(+) create mode 100644 hack/test-flake-chart/compute_flake_rate.go diff --git a/hack/test-flake-chart/compute_flake_rate.go b/hack/test-flake-chart/compute_flake_rate.go new file mode 100644 index 0000000000..ca62639e7b --- /dev/null +++ b/hack/test-flake-chart/compute_flake_rate.go @@ -0,0 +1,109 @@ +/* +Copyright 2020 The Kubernetes Authors All rights reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package main + +import ( + "bufio" + "flag" + "fmt" + "io" + "os" + "runtime/debug" + "strings" + "time" +) + +var ( + dataCsv = flag.String("data-csv", "", "Source data to compute flake rates on") + dateRange = flag.Uint("date-range", 5, "Number of test dates to consider when computing flake rate") +) + +func main() { + flag.Parse() + + file, err := os.Open(*dataCsv) + if err != nil { + exit("Unable to read data CSV", err) + } + + testEntries := ReadData(file) + for _, entry := range testEntries { + fmt.Printf("Name: \"%s\", Environment: \"%s\", Date: \"%v\", Status: \"%s\"\n", entry.name, entry.environment, entry.date, entry.status) + } +} + +type TestEntry struct { + name string + environment string + date time.Time + status string +} + +// Reads CSV `file` and consumes each line to be a single TestEntry. +func ReadData(file *os.File) []TestEntry { + testEntries := []TestEntry{} + + fileReader := bufio.NewReaderSize(file, 256) + previousLine := []string{"", "", "", "", "", ""} + firstLine := true + for { + lineBytes, _, err := fileReader.ReadLine() + if err != nil { + if err == io.EOF { + break + } + exit("Error reading data CSV", err) + } + line := string(lineBytes) + fields := strings.Split(line, ",") + if firstLine { + if len(fields) != 6 { + exit(fmt.Sprintf("Data CSV in incorrect format. Expected 6 columns, but got %d", len(fields)), fmt.Errorf("Bad CSV format")) + } + firstLine = false + } + for i, field := range fields { + if field == "" { + fields[i] = previousLine[i] + } + } + if len(fields) != 6 { + fmt.Printf("Found line with wrong number of columns. Expectd 6, but got %d - skipping\n", len(fields)) + continue + } + previousLine = fields + if fields[4] == "Passed" || fields[4] == "Failed" { + date, err := time.Parse("2006-01-02", fields[1]) + if err != nil { + fmt.Printf("Failed to parse date: %v\n", err) + } + testEntries = append(testEntries, TestEntry{ + name: fields[3], + environment: fields[2], + date: date, + status: fields[4], + }) + } + } + return testEntries +} + +// exit will exit and clean up minikube +func exit(msg string, err error) { + fmt.Printf("WithError(%s)=%v called from:\n%s", msg, err, debug.Stack()) + os.Exit(60) +} From de6cff23db3f753000941a7da2357557327c2e91 Mon Sep 17 00:00:00 2001 From: Andriy Dzikh Date: Tue, 1 Jun 2021 16:20:47 -0700 Subject: [PATCH 15/66] Split entries based on environment and test name. --- hack/test-flake-chart/compute_flake_rate.go | 44 ++++++++++++++++++++- 1 file changed, 42 insertions(+), 2 deletions(-) diff --git a/hack/test-flake-chart/compute_flake_rate.go b/hack/test-flake-chart/compute_flake_rate.go index ca62639e7b..f62fef637d 100644 --- a/hack/test-flake-chart/compute_flake_rate.go +++ b/hack/test-flake-chart/compute_flake_rate.go @@ -41,8 +41,17 @@ func main() { } testEntries := ReadData(file) - for _, entry := range testEntries { - fmt.Printf("Name: \"%s\", Environment: \"%s\", Date: \"%v\", Status: \"%s\"\n", entry.name, entry.environment, entry.date, entry.status) + splitEntries := SplitData(testEntries) + for environment, environmentSplit := range splitEntries { + fmt.Printf("%s {\n", environment) + for test, testSplit := range environmentSplit { + fmt.Printf(" %s {\n", test) + for _, entry := range testSplit { + fmt.Printf(" Date: %v, Status: %s\n", entry.date, entry.status) + } + fmt.Printf(" }\n") + } + fmt.Printf("}\n") } } @@ -102,6 +111,37 @@ func ReadData(file *os.File) []TestEntry { return testEntries } +// Splits `testEntries` up into maps indexed first by environment and then by test. +func SplitData(testEntries []TestEntry) map[string]map[string][]TestEntry { + splitEntries := make(map[string]map[string][]TestEntry) + + for _, entry := range testEntries { + AppendEntry(splitEntries, entry.environment, entry.name, entry) + } + + return splitEntries +} + +// Appends `entry` to `splitEntries` at the `environment` and `test`. +func AppendEntry(splitEntries map[string]map[string][]TestEntry, environment, test string, entry TestEntry) { + // Lookup the environment. + environmentSplit, ok := splitEntries[environment] + if !ok { + // If the environment map is missing, make a map for this environment and store it. + environmentSplit = make(map[string][]TestEntry) + splitEntries[environment] = environmentSplit + } + + // Lookup the test. + testSplit, ok := environmentSplit[test] + if !ok { + // If the test is missing, make a slice for this test. + testSplit = make([]TestEntry, 0) + // The slice is not inserted, since it will be replaced anyway. + } + environmentSplit[test] = append(testSplit, entry) +} + // exit will exit and clean up minikube func exit(msg string, err error) { fmt.Printf("WithError(%s)=%v called from:\n%s", msg, err, debug.Stack()) From e6a553f67977856eac4ef8ae68184a55b7029131 Mon Sep 17 00:00:00 2001 From: Andriy Dzikh Date: Tue, 1 Jun 2021 16:27:03 -0700 Subject: [PATCH 16/66] Compute flake rate of all entries split by environment and test. --- hack/test-flake-chart/compute_flake_rate.go | 40 +++++++++++++++++---- 1 file changed, 33 insertions(+), 7 deletions(-) diff --git a/hack/test-flake-chart/compute_flake_rate.go b/hack/test-flake-chart/compute_flake_rate.go index f62fef637d..628a6efe0b 100644 --- a/hack/test-flake-chart/compute_flake_rate.go +++ b/hack/test-flake-chart/compute_flake_rate.go @@ -42,14 +42,11 @@ func main() { testEntries := ReadData(file) splitEntries := SplitData(testEntries) - for environment, environmentSplit := range splitEntries { + flakeRates := ComputeFlakeRates(splitEntries) + for environment, environmentSplit := range flakeRates { fmt.Printf("%s {\n", environment) - for test, testSplit := range environmentSplit { - fmt.Printf(" %s {\n", test) - for _, entry := range testSplit { - fmt.Printf(" Date: %v, Status: %s\n", entry.date, entry.status) - } - fmt.Printf(" }\n") + for test, flakeRate := range environmentSplit { + fmt.Printf(" %s: %f\n", test, flakeRate) } fmt.Printf("}\n") } @@ -142,6 +139,35 @@ func AppendEntry(splitEntries map[string]map[string][]TestEntry, environment, te environmentSplit[test] = append(testSplit, entry) } +// Computes the flake rates over each entry in `splitEntries`. +func ComputeFlakeRates(splitEntries map[string]map[string][]TestEntry) map[string]map[string]float32 { + flakeRates := make(map[string]map[string]float32) + for environment, environmentSplit := range splitEntries { + for test, testSplit := range environmentSplit { + failures := 0 + for _, entry := range testSplit { + if entry.status == "Failed" { + failures++ + } + } + SetValue(flakeRates, environment, test, float32(failures)/float32(len(testSplit))) + } + } + return flakeRates +} + +// Sets the `value` of keys `environment` and `test` in `flakeRates`. +func SetValue(flakeRates map[string]map[string]float32, environment, test string, value float32) { + // Lookup the environment. + environmentRates, ok := flakeRates[environment] + if !ok { + // If the environment map is missing, make a map for this environment and store it. + environmentRates = make(map[string]float32) + flakeRates[environment] = environmentRates + } + environmentRates[test] = value +} + // exit will exit and clean up minikube func exit(msg string, err error) { fmt.Printf("WithError(%s)=%v called from:\n%s", msg, err, debug.Stack()) From 60929009d6daae66405b8f5c5f2570d5b99121d6 Mon Sep 17 00:00:00 2001 From: Andriy Dzikh Date: Wed, 2 Jun 2021 10:56:02 -0700 Subject: [PATCH 17/66] Create FilterRecentEntries to only include the last N run dates. --- hack/test-flake-chart/compute_flake_rate.go | 52 ++++++++++++++++++++- 1 file changed, 51 insertions(+), 1 deletion(-) diff --git a/hack/test-flake-chart/compute_flake_rate.go b/hack/test-flake-chart/compute_flake_rate.go index 628a6efe0b..94eece1725 100644 --- a/hack/test-flake-chart/compute_flake_rate.go +++ b/hack/test-flake-chart/compute_flake_rate.go @@ -23,6 +23,7 @@ import ( "io" "os" "runtime/debug" + "sort" "strings" "time" ) @@ -42,7 +43,8 @@ func main() { testEntries := ReadData(file) splitEntries := SplitData(testEntries) - flakeRates := ComputeFlakeRates(splitEntries) + filteredEntries := FilterRecentEntries(splitEntries, *dateRange) + flakeRates := ComputeFlakeRates(filteredEntries) for environment, environmentSplit := range flakeRates { fmt.Printf("%s {\n", environment) for test, flakeRate := range environmentSplit { @@ -139,6 +141,54 @@ func AppendEntry(splitEntries map[string]map[string][]TestEntry, environment, te environmentSplit[test] = append(testSplit, entry) } +// Filters `splitEntries` to include only the most recent `date_range` dates. +func FilterRecentEntries(splitEntries map[string]map[string][]TestEntry, dateRange uint) map[string]map[string][]TestEntry { + filteredEntries := make(map[string]map[string][]TestEntry) + + for environment, environmentSplit := range splitEntries { + for test, testSplit := range environmentSplit { + dates := make([]time.Time, len(testSplit)) + for _, entry := range testSplit { + dates = append(dates, entry.date) + } + // Sort dates from future to past. + sort.Slice(dates, func(i, j int) bool { + return dates[j].Before(dates[i]) + }) + datesInRange := make([]time.Time, 0, dateRange) + var lastDate time.Time = time.Date(0, 0, 0, 0, 0, 0, 0, time.Local) + // Go through each date. + for _, date := range dates { + // If date is the same as last date, ignore it. + if date.Equal(lastDate) { + continue + } + + // Add the date. + datesInRange = append(datesInRange, date) + lastDate = date + // If the date_range has been hit, break out. + if uint(len(datesInRange)) == dateRange { + break + } + } + + for _, entry := range testSplit { + // Look for the first element <= entry.date + index := sort.Search(len(datesInRange), func(i int) bool { + return datesInRange[i].Before(entry.date) || datesInRange[i].Equal(entry.date) + }) + // If no date is <= entry.date, or the found date does not equal entry.date. + if index == len(datesInRange) || !datesInRange[index].Equal(entry.date) { + continue + } + AppendEntry(filteredEntries, environment, test, entry) + } + } + } + return filteredEntries +} + // Computes the flake rates over each entry in `splitEntries`. func ComputeFlakeRates(splitEntries map[string]map[string][]TestEntry) map[string]map[string]float32 { flakeRates := make(map[string]map[string]float32) From 9d4153f0abfc1bebecb53991e11d1dbcaa33c6e1 Mon Sep 17 00:00:00 2001 From: Andriy Dzikh Date: Wed, 2 Jun 2021 13:40:27 -0700 Subject: [PATCH 18/66] Create test for ReadData. --- hack/test-flake-chart/compute_flake_rate.go | 2 +- .../compute_flake_rate_test.go | 87 +++++++++++++++++++ 2 files changed, 88 insertions(+), 1 deletion(-) create mode 100644 hack/test-flake-chart/compute_flake_rate_test.go diff --git a/hack/test-flake-chart/compute_flake_rate.go b/hack/test-flake-chart/compute_flake_rate.go index 94eece1725..02151c6dec 100644 --- a/hack/test-flake-chart/compute_flake_rate.go +++ b/hack/test-flake-chart/compute_flake_rate.go @@ -62,7 +62,7 @@ type TestEntry struct { } // Reads CSV `file` and consumes each line to be a single TestEntry. -func ReadData(file *os.File) []TestEntry { +func ReadData(file io.Reader) []TestEntry { testEntries := []TestEntry{} fileReader := bufio.NewReaderSize(file, 256) diff --git a/hack/test-flake-chart/compute_flake_rate_test.go b/hack/test-flake-chart/compute_flake_rate_test.go new file mode 100644 index 0000000000..30210c9b88 --- /dev/null +++ b/hack/test-flake-chart/compute_flake_rate_test.go @@ -0,0 +1,87 @@ +/* +Copyright 2020 The Kubernetes Authors All rights reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package main + +import ( + "strings" + "testing" + "time" +) + +func simpleDate(year int, month time.Month, day int) time.Time { + return time.Date(year, month, day, 0, 0, 0, 0, time.UTC) +} + +func TestReadData(t *testing.T) { + actualData := ReadData(strings.NewReader( + `A,B,C,D,E,F + hash,2000-01-01,env1,test1,Passed,1 + hash,2001-01-01,env2,test2,Failed,1 + hash,,,test1,,1 + hash,2002-01-01,,,Passed,1 + hash,2003-01-01,env3,test3,Passed,1`, + )) + expectedData := []TestEntry{ + { + name: "test1", + environment: "env1", + date: simpleDate(2000, time.January, 1), + status: "Passed", + }, + { + name: "test2", + environment: "env2", + date: simpleDate(2001, time.January, 1), + status: "Failed", + }, + { + name: "test1", + environment: "env2", + date: simpleDate(2001, time.January, 1), + status: "Failed", + }, + { + name: "test1", + environment: "env2", + date: simpleDate(2002, time.January, 1), + status: "Passed", + }, + { + name: "test3", + environment: "env3", + date: simpleDate(2003, time.January, 1), + status: "Passed", + }, + } + + for i, actual := range actualData { + if len(expectedData) <= i { + t.Errorf("Received unmatched actual element at index %d. Actual: %v", i, actual) + continue + } + expected := expectedData[i] + if actual != expected { + t.Errorf("Elements differ at index %d. Expected: %v, Actual: %v", i, expected, actual) + } + } + + if len(actualData) < len(expectedData) { + for i := len(actualData); i < len(expectedData); i++ { + t.Errorf("Missing unmatched expected element at index %d. Expected: %v", i, expectedData[i]) + } + } +} From 401bcbfe0a9dda2f1dbb0feb0a723cdbfba93426 Mon Sep 17 00:00:00 2001 From: Andriy Dzikh Date: Wed, 2 Jun 2021 14:12:43 -0700 Subject: [PATCH 19/66] Move comparison code to its own function. --- .../compute_flake_rate_test.go | 35 +++++++++++++------ 1 file changed, 25 insertions(+), 10 deletions(-) diff --git a/hack/test-flake-chart/compute_flake_rate_test.go b/hack/test-flake-chart/compute_flake_rate_test.go index 30210c9b88..8e175bfb0d 100644 --- a/hack/test-flake-chart/compute_flake_rate_test.go +++ b/hack/test-flake-chart/compute_flake_rate_test.go @@ -17,6 +17,7 @@ limitations under the License. package main import ( + "fmt" "strings" "testing" "time" @@ -26,6 +27,28 @@ func simpleDate(year int, month time.Month, day int) time.Time { return time.Date(year, month, day, 0, 0, 0, 0, time.UTC) } +func compareEntrySlices(t *testing.T, actualData, expectedData []TestEntry, extra string) { + if extra != "" { + extra = fmt.Sprintf(" (%s)", extra) + } + for i, actual := range actualData { + if len(expectedData) <= i { + t.Errorf("Received unmatched actual element at index %d%s. Actual: %v", i, extra, actual) + continue + } + expected := expectedData[i] + if actual != expected { + t.Errorf("Elements differ at index %d%s. Expected: %v, Actual: %v", i, extra, expected, actual) + } + } + + if len(actualData) < len(expectedData) { + for i := len(actualData); i < len(expectedData); i++ { + t.Errorf("Missing unmatched expected element at index %d%s. Expected: %v", i, extra, expectedData[i]) + } + } +} + func TestReadData(t *testing.T) { actualData := ReadData(strings.NewReader( `A,B,C,D,E,F @@ -68,16 +91,8 @@ func TestReadData(t *testing.T) { }, } - for i, actual := range actualData { - if len(expectedData) <= i { - t.Errorf("Received unmatched actual element at index %d. Actual: %v", i, actual) - continue - } - expected := expectedData[i] - if actual != expected { - t.Errorf("Elements differ at index %d. Expected: %v, Actual: %v", i, expected, actual) - } - } + compareEntrySlices(t, actualData, expectedData, "") +} if len(actualData) < len(expectedData) { for i := len(actualData); i < len(expectedData); i++ { From 4e9718a28b6cf68be53eaf33a9ab77cbf39c3e93 Mon Sep 17 00:00:00 2001 From: Andriy Dzikh Date: Wed, 2 Jun 2021 14:15:25 -0700 Subject: [PATCH 20/66] Add test for SplitData. --- .../compute_flake_rate_test.go | 78 ++++++++++++++++++- 1 file changed, 75 insertions(+), 3 deletions(-) diff --git a/hack/test-flake-chart/compute_flake_rate_test.go b/hack/test-flake-chart/compute_flake_rate_test.go index 8e175bfb0d..13343a2995 100644 --- a/hack/test-flake-chart/compute_flake_rate_test.go +++ b/hack/test-flake-chart/compute_flake_rate_test.go @@ -94,9 +94,81 @@ func TestReadData(t *testing.T) { compareEntrySlices(t, actualData, expectedData, "") } - if len(actualData) < len(expectedData) { - for i := len(actualData); i < len(expectedData); i++ { - t.Errorf("Missing unmatched expected element at index %d. Expected: %v", i, expectedData[i]) +func compareSplitData(t *testing.T, actual, expected map[string]map[string][]TestEntry) { + for environment, actualTests := range actual { + expectedTests, environmentOk := expected[environment] + if !environmentOk { + t.Errorf("Unexpected environment %s in actual", environment) + continue + } + + for test, actualEntries := range actualTests { + expectedEntries, testOk := expectedTests[test] + if !testOk { + t.Errorf("Unexpected test %s (in environment %s) in actual", test, environment) + continue + } + + compareEntrySlices(t, actualEntries, expectedEntries, fmt.Sprintf("environment %s, test %s", environment, test)) + } + + for test := range expectedTests { + _, testOk := actualTests[test] + if !testOk { + t.Errorf("Missing expected test %s (in environment %s) in actual", test, environment) + } + } + } + + for environment := range expected { + _, environmentOk := actual[environment] + if !environmentOk { + t.Errorf("Missing expected environment %s in actual", environment) } } } + +func TestSplitData(t *testing.T) { + entry_e1_t1_1, entry_e1_t1_2 := TestEntry{ + name: "test1", + environment: "env1", + date: simpleDate(2000, time.January, 1), + status: "Passed", + }, TestEntry{ + name: "test1", + environment: "env1", + date: simpleDate(2000, time.January, 2), + status: "Passed", + } + entry_e1_t2 := TestEntry{ + name: "test2", + environment: "env1", + date: simpleDate(2000, time.January, 1), + status: "Passed", + } + entry_e2_t1 := TestEntry{ + name: "test1", + environment: "env2", + date: simpleDate(2000, time.January, 1), + status: "Passed", + } + entry_e2_t2 := TestEntry{ + name: "test2", + environment: "env2", + date: simpleDate(2000, time.January, 1), + status: "Passed", + } + actual := SplitData([]TestEntry{entry_e1_t1_1, entry_e1_t1_2, entry_e1_t2, entry_e2_t1, entry_e2_t2}) + expected := map[string]map[string][]TestEntry{ + "env1": { + "test1": {entry_e1_t1_1, entry_e1_t1_2}, + "test2": {entry_e1_t2}, + }, + "env2": { + "test1": {entry_e2_t1}, + "test2": {entry_e2_t2}, + }, + } + + compareSplitData(t, actual, expected) +} From df6f7a8485ce00305fa8cc21ea4ff395b30b433a Mon Sep 17 00:00:00 2001 From: Andriy Dzikh Date: Wed, 2 Jun 2021 15:48:12 -0700 Subject: [PATCH 21/66] Add test for FilterRecentEntries. --- hack/test-flake-chart/compute_flake_rate.go | 10 +- .../compute_flake_rate_test.go | 107 ++++++++++++++++++ 2 files changed, 112 insertions(+), 5 deletions(-) diff --git a/hack/test-flake-chart/compute_flake_rate.go b/hack/test-flake-chart/compute_flake_rate.go index 02151c6dec..ea622a4a80 100644 --- a/hack/test-flake-chart/compute_flake_rate.go +++ b/hack/test-flake-chart/compute_flake_rate.go @@ -115,14 +115,14 @@ func SplitData(testEntries []TestEntry) map[string]map[string][]TestEntry { splitEntries := make(map[string]map[string][]TestEntry) for _, entry := range testEntries { - AppendEntry(splitEntries, entry.environment, entry.name, entry) + appendEntry(splitEntries, entry.environment, entry.name, entry) } return splitEntries } // Appends `entry` to `splitEntries` at the `environment` and `test`. -func AppendEntry(splitEntries map[string]map[string][]TestEntry, environment, test string, entry TestEntry) { +func appendEntry(splitEntries map[string]map[string][]TestEntry, environment, test string, entry TestEntry) { // Lookup the environment. environmentSplit, ok := splitEntries[environment] if !ok { @@ -182,7 +182,7 @@ func FilterRecentEntries(splitEntries map[string]map[string][]TestEntry, dateRan if index == len(datesInRange) || !datesInRange[index].Equal(entry.date) { continue } - AppendEntry(filteredEntries, environment, test, entry) + appendEntry(filteredEntries, environment, test, entry) } } } @@ -200,14 +200,14 @@ func ComputeFlakeRates(splitEntries map[string]map[string][]TestEntry) map[strin failures++ } } - SetValue(flakeRates, environment, test, float32(failures)/float32(len(testSplit))) + setValue(flakeRates, environment, test, float32(failures)/float32(len(testSplit))) } } return flakeRates } // Sets the `value` of keys `environment` and `test` in `flakeRates`. -func SetValue(flakeRates map[string]map[string]float32, environment, test string, value float32) { +func setValue(flakeRates map[string]map[string]float32, environment, test string, value float32) { // Lookup the environment. environmentRates, ok := flakeRates[environment] if !ok { diff --git a/hack/test-flake-chart/compute_flake_rate_test.go b/hack/test-flake-chart/compute_flake_rate_test.go index 13343a2995..d27fd176e9 100644 --- a/hack/test-flake-chart/compute_flake_rate_test.go +++ b/hack/test-flake-chart/compute_flake_rate_test.go @@ -172,3 +172,110 @@ func TestSplitData(t *testing.T) { compareSplitData(t, actual, expected) } + +func TestFilterRecentEntries(t *testing.T) { + entry_e1_t1_r1, entry_e1_t1_r2, entry_e1_t1_r3, entry_e1_t1_o1, entry_e1_t1_o2 := TestEntry{ + name: "test1", + environment: "env1", + date: simpleDate(2000, time.January, 4), + status: "Passed", + }, TestEntry{ + name: "test1", + environment: "env1", + date: simpleDate(2000, time.January, 3), + status: "Passed", + }, TestEntry{ + name: "test1", + environment: "env1", + date: simpleDate(2000, time.January, 3), + status: "Passed", + }, TestEntry{ + name: "test1", + environment: "env1", + date: simpleDate(2000, time.January, 2), + status: "Passed", + }, TestEntry{ + name: "test1", + environment: "env1", + date: simpleDate(2000, time.January, 1), + status: "Passed", + } + entry_e1_t2_r1, entry_e1_t2_r2, entry_e1_t2_o1 := TestEntry{ + name: "test2", + environment: "env1", + date: simpleDate(2001, time.January, 3), + status: "Passed", + }, TestEntry{ + name: "test2", + environment: "env1", + date: simpleDate(2001, time.January, 2), + status: "Passed", + }, TestEntry{ + name: "test2", + environment: "env1", + date: simpleDate(2001, time.January, 1), + status: "Passed", + } + entry_e2_t2_r1, entry_e2_t2_r2, entry_e2_t2_o1 := TestEntry{ + name: "test2", + environment: "env2", + date: simpleDate(2003, time.January, 3), + status: "Passed", + }, TestEntry{ + name: "test2", + environment: "env2", + date: simpleDate(2003, time.January, 2), + status: "Passed", + }, TestEntry{ + name: "test2", + environment: "env2", + date: simpleDate(2003, time.January, 1), + status: "Passed", + } + + actualData := FilterRecentEntries(map[string]map[string][]TestEntry{ + "env1": { + "test1": { + entry_e1_t1_r1, + entry_e1_t1_r2, + entry_e1_t1_r3, + entry_e1_t1_o1, + entry_e1_t1_o2, + }, + "test2": { + entry_e1_t2_r1, + entry_e1_t2_r2, + entry_e1_t2_o1, + }, + }, + "env2": { + "test2": { + entry_e2_t2_r1, + entry_e2_t2_r2, + entry_e2_t2_o1, + }, + }, + }, 2) + + expectedData := map[string]map[string][]TestEntry{ + "env1": { + "test1": { + entry_e1_t1_r1, + entry_e1_t1_r2, + entry_e1_t1_r3, + }, + "test2": { + entry_e1_t2_r1, + entry_e1_t2_r2, + }, + }, + "env2": { + "test2": { + entry_e2_t2_r1, + entry_e2_t2_r2, + }, + }, + } + + compareSplitData(t, actualData, expectedData) +} From 65be305aab6517c7c503f1627bbf94ddca7b7052 Mon Sep 17 00:00:00 2001 From: Andriy Dzikh Date: Wed, 2 Jun 2021 16:17:39 -0700 Subject: [PATCH 22/66] Add test for ComputeFlakeRates. --- .../compute_flake_rate_test.go | 112 ++++++++++++++++++ 1 file changed, 112 insertions(+) diff --git a/hack/test-flake-chart/compute_flake_rate_test.go b/hack/test-flake-chart/compute_flake_rate_test.go index d27fd176e9..22c1f6b476 100644 --- a/hack/test-flake-chart/compute_flake_rate_test.go +++ b/hack/test-flake-chart/compute_flake_rate_test.go @@ -279,3 +279,115 @@ func TestFilterRecentEntries(t *testing.T) { compareSplitData(t, actualData, expectedData) } + +func TestComputeFlakeRates(t *testing.T) { + actualData := ComputeFlakeRates(map[string]map[string][]TestEntry{ + "env1": { + "test1": { + { + name: "test1", + environment: "env1", + date: simpleDate(2000, time.January, 4), + status: "Passed", + }, { + name: "test1", + environment: "env1", + date: simpleDate(2000, time.January, 3), + status: "Passed", + }, { + name: "test1", + environment: "env1", + date: simpleDate(2000, time.January, 3), + status: "Passed", + }, { + name: "test1", + environment: "env1", + date: simpleDate(2000, time.January, 2), + status: "Passed", + }, { + name: "test1", + environment: "env1", + date: simpleDate(2000, time.January, 1), + status: "Failed", + }, + }, + "test2": { + { + name: "test2", + environment: "env1", + date: simpleDate(2001, time.January, 3), + status: "Failed", + }, { + name: "test2", + environment: "env1", + date: simpleDate(2001, time.January, 2), + status: "Failed", + }, { + name: "test2", + environment: "env1", + date: simpleDate(2001, time.January, 1), + status: "Failed", + }, + }, + }, + "env2": { + "test2": { + { + name: "test2", + environment: "env2", + date: simpleDate(2003, time.January, 3), + status: "Passed", + }, TestEntry{ + name: "test2", + environment: "env2", + date: simpleDate(2003, time.January, 2), + status: "Failed", + }, + }, + }, + }) + + expectedData := map[string]map[string]float32{ + "env1": { + "test1": 0.2, + "test2": 1, + }, + "env2": { + "test2": 0.5, + }, + } + + for environment, actualTests := range actualData { + expectedTests, environmentOk := expectedData[environment] + if !environmentOk { + t.Errorf("Unexpected environment %s in actual", environment) + continue + } + + for test, actualFlakeRate := range actualTests { + expectedFlakeRate, testOk := expectedTests[test] + if !testOk { + t.Errorf("Unexpected test %s (in environment %s) in actual", test, environment) + continue + } + + if actualFlakeRate != expectedFlakeRate { + t.Errorf("Wrong flake rate. Expected: %v, Actual: %v", expectedFlakeRate, actualFlakeRate) + } + } + + for test := range expectedTests { + _, testOk := actualTests[test] + if !testOk { + t.Errorf("Missing expected test %s (in environment %s) in actual", test, environment) + } + } + } + + for environment := range expectedData { + _, environmentOk := actualData[environment] + if !environmentOk { + t.Errorf("Missing expected environment %s in actual", environment) + } + } +} From 36a6f876009a37269223046f976f9196233d45d8 Mon Sep 17 00:00:00 2001 From: Vishal Jain Date: Sun, 16 May 2021 11:59:29 -0700 Subject: [PATCH 23/66] Added Mock of Minikube Delete Profiles Test portion. --- cmd/minikube/cmd/delete.go | 33 +++++++++++++++++++-------------- cmd/minikube/cmd/delete_test.go | 15 +++++++++++++++ 2 files changed, 34 insertions(+), 14 deletions(-) diff --git a/cmd/minikube/cmd/delete.go b/cmd/minikube/cmd/delete.go index b7c86852d3..3104e57bfb 100644 --- a/cmd/minikube/cmd/delete.go +++ b/cmd/minikube/cmd/delete.go @@ -87,6 +87,24 @@ func (error DeletionError) Error() string { return error.Err.Error() } +var DeleteHostAndDirectoriesGetter = func(api libmachine.API, cc *config.ClusterConfig, profileName string) error { + if err := killMountProcess(); err != nil { + out.FailureT("Failed to kill mount process: {{.error}}", out.V{"error": err}) + } + + deleteHosts(api, cc) + + // In case DeleteHost didn't complete the job. + deleteProfileDirectory(profileName) + deleteMachineDirectories(cc) + + if err := deleteConfig(profileName); err != nil { + return err + } + + return deleteContext(profileName) +} + func init() { deleteCmd.Flags().BoolVar(&deleteAll, "all", false, "Set flag to delete all profiles") deleteCmd.Flags().BoolVar(&purge, "purge", false, "Set this flag to delete the '.minikube' folder from your user directory.") @@ -282,23 +300,10 @@ func deleteProfile(ctx context.Context, profile *config.Profile) error { } } - if err := killMountProcess(); err != nil { - out.FailureT("Failed to kill mount process: {{.error}}", out.V{"error": err}) - } - - deleteHosts(api, cc) - - // In case DeleteHost didn't complete the job. - deleteProfileDirectory(profile.Name) - deleteMachineDirectories(cc) - - if err := deleteConfig(profile.Name); err != nil { + if err := DeleteHostAndDirectoriesGetter(api, cc, profile.Name); err != nil { return err } - if err := deleteContext(profile.Name); err != nil { - return err - } out.Step(style.Deleted, `Removed all traces of the "{{.name}}" cluster.`, out.V{"name": profile.Name}) return nil } diff --git a/cmd/minikube/cmd/delete_test.go b/cmd/minikube/cmd/delete_test.go index 6c6a98939f..bff1652183 100644 --- a/cmd/minikube/cmd/delete_test.go +++ b/cmd/minikube/cmd/delete_test.go @@ -17,15 +17,18 @@ limitations under the License. package cmd import ( + "fmt" "io/ioutil" "os" "path/filepath" "testing" + "github.com/docker/machine/libmachine" "github.com/google/go-cmp/cmp" "github.com/otiai10/copy" "github.com/spf13/viper" + cmdcfg "k8s.io/minikube/cmd/minikube/cmd/config" "k8s.io/minikube/pkg/minikube/config" "k8s.io/minikube/pkg/minikube/localpath" ) @@ -154,6 +157,17 @@ func TestDeleteProfile(t *testing.T) { } } +var DeleteHostAndDirectoriesMock = func(api libmachine.API, cc *config.ClusterConfig, profileName string) error { + return deleteContextTest() +} + +func deleteContextTest() error { + if err := cmdcfg.Unset(config.ProfileName); err != nil { + return DeletionError{Err: fmt.Errorf("unset minikube profile: %v", err), Errtype: Fatal} + } + return nil +} + func TestDeleteAllProfiles(t *testing.T) { td, err := ioutil.TempDir("", "all") if err != nil { @@ -207,6 +221,7 @@ func TestDeleteAllProfiles(t *testing.T) { } profiles := append(validProfiles, inValidProfiles...) + DeleteHostAndDirectoriesGetter = DeleteHostAndDirectoriesMock errs := DeleteProfiles(profiles) if errs != nil { From ebe03d768760a0b1a3fde74de09a3d069ef8cd1b Mon Sep 17 00:00:00 2001 From: Vishal Jain Date: Sun, 30 May 2021 20:42:33 -0700 Subject: [PATCH 24/66] Added Mock to DeleteProfile Test. --- cmd/minikube/cmd/delete_test.go | 1 + 1 file changed, 1 insertion(+) diff --git a/cmd/minikube/cmd/delete_test.go b/cmd/minikube/cmd/delete_test.go index bff1652183..c878a1a2ca 100644 --- a/cmd/minikube/cmd/delete_test.go +++ b/cmd/minikube/cmd/delete_test.go @@ -117,6 +117,7 @@ func TestDeleteProfile(t *testing.T) { t.Logf("load failure: %v", err) } + DeleteHostAndDirectoriesGetter = DeleteHostAndDirectoriesMock errs := DeleteProfiles([]*config.Profile{profile}) if len(errs) > 0 { HandleDeletionErrors(errs) From d7d3593a897eb4544bdcb6005e0ad19d7a04b95c Mon Sep 17 00:00:00 2001 From: Andriy Dzikh Date: Thu, 3 Jun 2021 13:33:58 -0700 Subject: [PATCH 25/66] Rewrite process_data.sh to no longer depend on git log. This now means we depend on the date being contained within the details. --- hack/test-flake-chart/process_data.sh | 28 ++++++--------------------- 1 file changed, 6 insertions(+), 22 deletions(-) diff --git a/hack/test-flake-chart/process_data.sh b/hack/test-flake-chart/process_data.sh index f1ed764e87..b3a6e26a9e 100755 --- a/hack/test-flake-chart/process_data.sh +++ b/hack/test-flake-chart/process_data.sh @@ -14,27 +14,11 @@ # See the License for the specific language governing permissions and # limitations under the License. -# Create temp path for partial data (storing everything but the commit date.) -PARTIAL_DATA_PATH=$(mktemp) -# Print the partial path for debugging/convenience. -echo "Partial path: $PARTIAL_DATA_PATH" 1>&2 - # Print header. -printf "Commit Hash,Commit Date,Environment,Test,Status,Duration\n" +printf "Commit Hash,Test Date,Environment,Test,Status,Duration\n" -# 1) Turn each test in each summary file to a CSV line containing its commit hash, environment, test, and status. -# 2) Copy partial data to $PARTIAL_DATA_PATH to join with date later. -# 3) Extract only commit hash for each row -# 4) Make the commit hashes unique (we assume that gsutil cats files from the same hash next to each other). -# Also force buffering to occur per line so remainder of pipe can continue to process. -# 5) Execute git log for each commit to get the date of each. -# 6) Join dates with test data. -jq -r '((.PassedTests[]? as $name | {commit: .Detail.Details, environment: .Detail.Name, test: $name, duration: .Durations[$name], status: "Passed"}), - (.FailedTests[]? as $name | {commit: .Detail.Details, environment: .Detail.Name, test: $name, duration: .Durations[$name], status: "Failed"}), - (.SkippedTests[]? as $name | {commit: .Detail.Details, environment: .Detail.Name, test: $name, duration: 0, status: "Skipped"})) - | .commit + "," + .environment + "," + .test + "," + .status + "," + (.duration | tostring)' \ -| tee $PARTIAL_DATA_PATH \ -| sed -r -n 's/^([^,]+),.*/\1/p' \ -| stdbuf -oL -eL uniq \ -| xargs -I {} git log -1 --pretty=format:"{},%as%n" {} \ -| join -t "," - $PARTIAL_DATA_PATH +# Turn each test in each summary file to a CSV line containing its commit hash, date, environment, test, and status. +jq -r '((.PassedTests[]? as $name | {commit: (.Detail.Details | split(":") | .[0]), date: (.Detail.Details | split(":") | .[1]), environment: .Detail.Name, test: $name, duration: .Durations[$name], status: "Passed"}), + (.FailedTests[]? as $name | {commit: (.Detail.Details | split(":") | .[0]), date: (.Detail.Details | split(":") | .[1]), environment: .Detail.Name, test: $name, duration: .Durations[$name], status: "Failed"}), + (.SkippedTests[]? as $name | {commit: (.Detail.Details | split(":") | .[0]), date: (.Detail.Details | split(":") | .[1]), environment: .Detail.Name, test: $name, duration: 0, status: "Skipped"})) + | .commit + "," + .date + "," + .environment + "," + .test + "," + .status + "," + (.duration | tostring)' From 40fdbe61ae817b8ff618321461d74059ba9315a0 Mon Sep 17 00:00:00 2001 From: Andriy Dzikh Date: Thu, 3 Jun 2021 14:01:06 -0700 Subject: [PATCH 26/66] Allow Jenkins to append to the flake rate data. --- hack/jenkins/common.sh | 5 +++- hack/jenkins/upload_integration_report.sh | 4 +++ hack/test-flake-chart/jenkins_upload_tests.sh | 25 +++++++++++++++++++ 3 files changed, 33 insertions(+), 1 deletion(-) create mode 100755 hack/test-flake-chart/jenkins_upload_tests.sh diff --git a/hack/jenkins/common.sh b/hack/jenkins/common.sh index beb58f7d08..0832b6b3e2 100755 --- a/hack/jenkins/common.sh +++ b/hack/jenkins/common.sh @@ -419,7 +419,7 @@ fi touch "${HTML_OUT}" touch "${SUMMARY_OUT}" -gopogh_status=$(gopogh -in "${JSON_OUT}" -out_html "${HTML_OUT}" -out_summary "${SUMMARY_OUT}" -name "${JOB_NAME}" -pr "${MINIKUBE_LOCATION}" -repo github.com/kubernetes/minikube/ -details "${COMMIT}") || true +gopogh_status=$(gopogh -in "${JSON_OUT}" -out_html "${HTML_OUT}" -out_summary "${SUMMARY_OUT}" -name "${JOB_NAME}" -pr "${MINIKUBE_LOCATION}" -repo github.com/kubernetes/minikube/ -details "${COMMIT}:$(date +%Y-%m-%d)") || true fail_num=$(echo $gopogh_status | jq '.NumberOfFail') test_num=$(echo $gopogh_status | jq '.NumberOfTests') pessimistic_status="${fail_num} / ${test_num} failures" @@ -441,6 +441,9 @@ if [ -z "${EXTERNAL}" ]; then gsutil -qm cp "${HTML_OUT}" "gs://${JOB_GCS_BUCKET}.html" || true echo ">> uploading ${SUMMARY_OUT}" gsutil -qm cp "${SUMMARY_OUT}" "gs://${JOB_GCS_BUCKET}_summary.json" || true + if [[ "${MINIKUBE_LOCATION}" == "master" ]]; then + ./test-flake-chart/jenkins_upload_tests.sh "${SUMMARY_OUT}" + fi else # Otherwise, put the results in a predictable spot so the upload job can find them REPORTS_PATH=test_reports diff --git a/hack/jenkins/upload_integration_report.sh b/hack/jenkins/upload_integration_report.sh index 04e24df09e..ddf9a6cee6 100644 --- a/hack/jenkins/upload_integration_report.sh +++ b/hack/jenkins/upload_integration_report.sh @@ -47,3 +47,7 @@ gsutil -qm cp "${HTML_OUT}" "gs://${JOB_GCS_BUCKET}.html" || true SUMMARY_OUT="$ARTIFACTS/summary.txt" echo ">> uploading ${SUMMARY_OUT}" gsutil -qm cp "${SUMMARY_OUT}" "gs://${JOB_GCS_BUCKET}_summary.json" || true + +if [[ "${MINIKUBE_LOCATION}" == "master" ]]; then + ./test-flake-chart/jenkins_upload_tests.sh "${SUMMARY_OUT}" +fi diff --git a/hack/test-flake-chart/jenkins_upload_tests.sh b/hack/test-flake-chart/jenkins_upload_tests.sh new file mode 100755 index 0000000000..e609893b80 --- /dev/null +++ b/hack/test-flake-chart/jenkins_upload_tests.sh @@ -0,0 +1,25 @@ +#!/bin/bash + +set -x -o pipefail + +if [ "$#" -ne 1 ]; then + echo "Wrong number of arguments. Usage: jenkins_upload_tests.sh " 1>&2 + exit 1 +fi + +TMP_DATA=$(mktemp) + +# Use the gopogh summary, process it, optimize the data, remove the header, and store. +<"$1" ./test-flake-chart/process_data.sh \ + | ./test-flake-chart/optimize_data.sh \ + | sed "1d" > $TMP_DATA + +GCS_TMP="gs://minikube-flake-rate/$(basename "$TMP_DATA")" + +# Copy data to append to GCS +gsutil cp $TMP_DATA $GCS_TMP +# Append data to existing data. +gsutil compose gs://minikube-flake-rate/data.csv $GCS_TMP gs://minikube-flake-rate/data.csv +# Clear all the temp stuff. +rm $TMP_DATA +gsutil rm $GCS_TMP From d245cfcdf7e8d5238462557962dbf9630ed630e6 Mon Sep 17 00:00:00 2001 From: Andriy Dzikh Date: Thu, 3 Jun 2021 14:12:03 -0700 Subject: [PATCH 27/66] Move all flake rate files to Jenkins to allow auto-upload. --- hack/{ => jenkins}/test-flake-chart/collect_data.sh | 0 hack/{ => jenkins}/test-flake-chart/compute_flake_rate.go | 0 hack/{ => jenkins}/test-flake-chart/compute_flake_rate_test.go | 0 hack/{ => jenkins}/test-flake-chart/flake_chart.html | 0 hack/{ => jenkins}/test-flake-chart/flake_chart.js | 0 hack/{ => jenkins}/test-flake-chart/jenkins_upload_tests.sh | 0 hack/{ => jenkins}/test-flake-chart/optimize_data.sh | 0 hack/{ => jenkins}/test-flake-chart/process_data.sh | 0 8 files changed, 0 insertions(+), 0 deletions(-) rename hack/{ => jenkins}/test-flake-chart/collect_data.sh (100%) rename hack/{ => jenkins}/test-flake-chart/compute_flake_rate.go (100%) rename hack/{ => jenkins}/test-flake-chart/compute_flake_rate_test.go (100%) rename hack/{ => jenkins}/test-flake-chart/flake_chart.html (100%) rename hack/{ => jenkins}/test-flake-chart/flake_chart.js (100%) rename hack/{ => jenkins}/test-flake-chart/jenkins_upload_tests.sh (100%) rename hack/{ => jenkins}/test-flake-chart/optimize_data.sh (100%) rename hack/{ => jenkins}/test-flake-chart/process_data.sh (100%) diff --git a/hack/test-flake-chart/collect_data.sh b/hack/jenkins/test-flake-chart/collect_data.sh similarity index 100% rename from hack/test-flake-chart/collect_data.sh rename to hack/jenkins/test-flake-chart/collect_data.sh diff --git a/hack/test-flake-chart/compute_flake_rate.go b/hack/jenkins/test-flake-chart/compute_flake_rate.go similarity index 100% rename from hack/test-flake-chart/compute_flake_rate.go rename to hack/jenkins/test-flake-chart/compute_flake_rate.go diff --git a/hack/test-flake-chart/compute_flake_rate_test.go b/hack/jenkins/test-flake-chart/compute_flake_rate_test.go similarity index 100% rename from hack/test-flake-chart/compute_flake_rate_test.go rename to hack/jenkins/test-flake-chart/compute_flake_rate_test.go diff --git a/hack/test-flake-chart/flake_chart.html b/hack/jenkins/test-flake-chart/flake_chart.html similarity index 100% rename from hack/test-flake-chart/flake_chart.html rename to hack/jenkins/test-flake-chart/flake_chart.html diff --git a/hack/test-flake-chart/flake_chart.js b/hack/jenkins/test-flake-chart/flake_chart.js similarity index 100% rename from hack/test-flake-chart/flake_chart.js rename to hack/jenkins/test-flake-chart/flake_chart.js diff --git a/hack/test-flake-chart/jenkins_upload_tests.sh b/hack/jenkins/test-flake-chart/jenkins_upload_tests.sh similarity index 100% rename from hack/test-flake-chart/jenkins_upload_tests.sh rename to hack/jenkins/test-flake-chart/jenkins_upload_tests.sh diff --git a/hack/test-flake-chart/optimize_data.sh b/hack/jenkins/test-flake-chart/optimize_data.sh similarity index 100% rename from hack/test-flake-chart/optimize_data.sh rename to hack/jenkins/test-flake-chart/optimize_data.sh diff --git a/hack/test-flake-chart/process_data.sh b/hack/jenkins/test-flake-chart/process_data.sh similarity index 100% rename from hack/test-flake-chart/process_data.sh rename to hack/jenkins/test-flake-chart/process_data.sh From cec82877d86a3299ba3258b75222121f9756e2a1 Mon Sep 17 00:00:00 2001 From: Andriy Dzikh Date: Thu, 3 Jun 2021 14:25:04 -0700 Subject: [PATCH 28/66] Format flake rates into CSV containing environment, test, and flake rate. --- hack/jenkins/test-flake-chart/compute_flake_rate.go | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/hack/jenkins/test-flake-chart/compute_flake_rate.go b/hack/jenkins/test-flake-chart/compute_flake_rate.go index ea622a4a80..55f81bae15 100644 --- a/hack/jenkins/test-flake-chart/compute_flake_rate.go +++ b/hack/jenkins/test-flake-chart/compute_flake_rate.go @@ -45,12 +45,11 @@ func main() { splitEntries := SplitData(testEntries) filteredEntries := FilterRecentEntries(splitEntries, *dateRange) flakeRates := ComputeFlakeRates(filteredEntries) + fmt.Println("Environment,Test,Flake Rate") for environment, environmentSplit := range flakeRates { - fmt.Printf("%s {\n", environment) for test, flakeRate := range environmentSplit { - fmt.Printf(" %s: %f\n", test, flakeRate) + fmt.Printf("%s,%s,%f\n", environment, test, flakeRate) } - fmt.Printf("}\n") } } From 03b793c7d040dbffa2a608d8b0f2e833e94d9137 Mon Sep 17 00:00:00 2001 From: Vishal Jain Date: Wed, 2 Jun 2021 20:00:45 -0700 Subject: [PATCH 29/66] Fix names. --- cmd/minikube/cmd/delete.go | 4 ++-- cmd/minikube/cmd/delete_test.go | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/cmd/minikube/cmd/delete.go b/cmd/minikube/cmd/delete.go index 3104e57bfb..ec657fe817 100644 --- a/cmd/minikube/cmd/delete.go +++ b/cmd/minikube/cmd/delete.go @@ -87,7 +87,7 @@ func (error DeletionError) Error() string { return error.Err.Error() } -var DeleteHostAndDirectoriesGetter = func(api libmachine.API, cc *config.ClusterConfig, profileName string) error { +var hostAndDirsDeleter = func(api libmachine.API, cc *config.ClusterConfig, profileName string) error { if err := killMountProcess(); err != nil { out.FailureT("Failed to kill mount process: {{.error}}", out.V{"error": err}) } @@ -300,7 +300,7 @@ func deleteProfile(ctx context.Context, profile *config.Profile) error { } } - if err := DeleteHostAndDirectoriesGetter(api, cc, profile.Name); err != nil { + if err := hostAndDirsDeleter(api, cc, profile.Name); err != nil { return err } diff --git a/cmd/minikube/cmd/delete_test.go b/cmd/minikube/cmd/delete_test.go index c878a1a2ca..fff2ffabf8 100644 --- a/cmd/minikube/cmd/delete_test.go +++ b/cmd/minikube/cmd/delete_test.go @@ -117,7 +117,7 @@ func TestDeleteProfile(t *testing.T) { t.Logf("load failure: %v", err) } - DeleteHostAndDirectoriesGetter = DeleteHostAndDirectoriesMock + hostAndDirsDeleter = hostAndDirsDeleterMock errs := DeleteProfiles([]*config.Profile{profile}) if len(errs) > 0 { HandleDeletionErrors(errs) @@ -158,7 +158,7 @@ func TestDeleteProfile(t *testing.T) { } } -var DeleteHostAndDirectoriesMock = func(api libmachine.API, cc *config.ClusterConfig, profileName string) error { +var hostAndDirsDeleterMock = func(api libmachine.API, cc *config.ClusterConfig, profileName string) error { return deleteContextTest() } @@ -222,7 +222,7 @@ func TestDeleteAllProfiles(t *testing.T) { } profiles := append(validProfiles, inValidProfiles...) - DeleteHostAndDirectoriesGetter = DeleteHostAndDirectoriesMock + hostAndDirsDeleter = hostAndDirsDeleterMock errs := DeleteProfiles(profiles) if errs != nil { From a80f3bc5aead4e45f19c94419c8ebb9ce6a48c3e Mon Sep 17 00:00:00 2001 From: Andriy Dzikh Date: Mon, 7 Jun 2021 13:49:05 -0700 Subject: [PATCH 30/66] Add license to upload_tests script. --- .../test-flake-chart/jenkins_upload_tests.sh | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/hack/jenkins/test-flake-chart/jenkins_upload_tests.sh b/hack/jenkins/test-flake-chart/jenkins_upload_tests.sh index e609893b80..28db50692f 100755 --- a/hack/jenkins/test-flake-chart/jenkins_upload_tests.sh +++ b/hack/jenkins/test-flake-chart/jenkins_upload_tests.sh @@ -1,5 +1,19 @@ #!/bin/bash +# Copyright 2018 The Kubernetes Authors All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + set -x -o pipefail if [ "$#" -ne 1 ]; then From 9e7f1ebbf07bfcf8d9b2199cd7b8de6a2e5bb841 Mon Sep 17 00:00:00 2001 From: Andriy Dzikh Date: Mon, 7 Jun 2021 13:49:40 -0700 Subject: [PATCH 31/66] Make computing flake rates print out percentages (with fixed 2 decimal precision) rather than floats. --- hack/jenkins/test-flake-chart/compute_flake_rate.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hack/jenkins/test-flake-chart/compute_flake_rate.go b/hack/jenkins/test-flake-chart/compute_flake_rate.go index 55f81bae15..4f71aa3cf5 100644 --- a/hack/jenkins/test-flake-chart/compute_flake_rate.go +++ b/hack/jenkins/test-flake-chart/compute_flake_rate.go @@ -48,7 +48,7 @@ func main() { fmt.Println("Environment,Test,Flake Rate") for environment, environmentSplit := range flakeRates { for test, flakeRate := range environmentSplit { - fmt.Printf("%s,%s,%f\n", environment, test, flakeRate) + fmt.Printf("%s,%s,%.2f\n", environment, test, flakeRate*100) } } } From 501b238841278647eb940562838dc4dc90ce4c50 Mon Sep 17 00:00:00 2001 From: Andriy Dzikh Date: Mon, 7 Jun 2021 14:09:32 -0700 Subject: [PATCH 32/66] Use "set -eu -o pipefail" for all scripts. Previously failing commands in scripts wouldn't make them actually fail. Now it does! --- hack/jenkins/test-flake-chart/jenkins_upload_tests.sh | 2 +- hack/jenkins/test-flake-chart/optimize_data.sh | 2 ++ hack/jenkins/test-flake-chart/process_data.sh | 2 ++ 3 files changed, 5 insertions(+), 1 deletion(-) diff --git a/hack/jenkins/test-flake-chart/jenkins_upload_tests.sh b/hack/jenkins/test-flake-chart/jenkins_upload_tests.sh index 28db50692f..7d310f9486 100755 --- a/hack/jenkins/test-flake-chart/jenkins_upload_tests.sh +++ b/hack/jenkins/test-flake-chart/jenkins_upload_tests.sh @@ -14,7 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -set -x -o pipefail +set -eu -o pipefail if [ "$#" -ne 1 ]; then echo "Wrong number of arguments. Usage: jenkins_upload_tests.sh " 1>&2 diff --git a/hack/jenkins/test-flake-chart/optimize_data.sh b/hack/jenkins/test-flake-chart/optimize_data.sh index 1fd93f1901..67dae593e2 100755 --- a/hack/jenkins/test-flake-chart/optimize_data.sh +++ b/hack/jenkins/test-flake-chart/optimize_data.sh @@ -14,5 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +set -eu -o pipefail + # Take input CSV. For each field, if it is the same as the previous row, replace it with an empty string. awk -F, 'BEGIN {OFS = FS} { for(i=1; i<=NF; i++) { if($i == j[i]) { $i = ""; } else { j[i] = $i; } } printf "%s\n",$0 }' diff --git a/hack/jenkins/test-flake-chart/process_data.sh b/hack/jenkins/test-flake-chart/process_data.sh index b3a6e26a9e..7907e69673 100755 --- a/hack/jenkins/test-flake-chart/process_data.sh +++ b/hack/jenkins/test-flake-chart/process_data.sh @@ -14,6 +14,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +set -eu -o pipefail + # Print header. printf "Commit Hash,Test Date,Environment,Test,Status,Duration\n" From 7c4615460088198629851054473970ba3daa363c Mon Sep 17 00:00:00 2001 From: Andriy Dzikh Date: Mon, 7 Jun 2021 14:12:31 -0700 Subject: [PATCH 33/66] Rename jenkins_upload_tests.sh to upload_tests.sh. Since these scripts are already in the jenkins folder, having the jenkins prefix is redundant. --- hack/jenkins/common.sh | 2 +- .../{jenkins_upload_tests.sh => upload_tests.sh} | 0 2 files changed, 1 insertion(+), 1 deletion(-) rename hack/jenkins/test-flake-chart/{jenkins_upload_tests.sh => upload_tests.sh} (100%) diff --git a/hack/jenkins/common.sh b/hack/jenkins/common.sh index 0832b6b3e2..9d080baa88 100755 --- a/hack/jenkins/common.sh +++ b/hack/jenkins/common.sh @@ -442,7 +442,7 @@ if [ -z "${EXTERNAL}" ]; then echo ">> uploading ${SUMMARY_OUT}" gsutil -qm cp "${SUMMARY_OUT}" "gs://${JOB_GCS_BUCKET}_summary.json" || true if [[ "${MINIKUBE_LOCATION}" == "master" ]]; then - ./test-flake-chart/jenkins_upload_tests.sh "${SUMMARY_OUT}" + ./test-flake-chart/upload_tests.sh "${SUMMARY_OUT}" fi else # Otherwise, put the results in a predictable spot so the upload job can find them diff --git a/hack/jenkins/test-flake-chart/jenkins_upload_tests.sh b/hack/jenkins/test-flake-chart/upload_tests.sh similarity index 100% rename from hack/jenkins/test-flake-chart/jenkins_upload_tests.sh rename to hack/jenkins/test-flake-chart/upload_tests.sh From 8f953781a2fc31ffed2ca5f8c38007de30717cc5 Mon Sep 17 00:00:00 2001 From: Andriy Dzikh Date: Mon, 7 Jun 2021 14:18:53 -0700 Subject: [PATCH 34/66] Create report_flakes script to comment on PRs about flake rates of failed tests. --- hack/jenkins/common.sh | 2 + .../jenkins/test-flake-chart/report_flakes.sh | 74 +++++++++++++++++++ 2 files changed, 76 insertions(+) create mode 100755 hack/jenkins/test-flake-chart/report_flakes.sh diff --git a/hack/jenkins/common.sh b/hack/jenkins/common.sh index 9d080baa88..b153185ee3 100755 --- a/hack/jenkins/common.sh +++ b/hack/jenkins/common.sh @@ -443,6 +443,8 @@ if [ -z "${EXTERNAL}" ]; then gsutil -qm cp "${SUMMARY_OUT}" "gs://${JOB_GCS_BUCKET}_summary.json" || true if [[ "${MINIKUBE_LOCATION}" == "master" ]]; then ./test-flake-chart/upload_tests.sh "${SUMMARY_OUT}" + elif [[ "${JOB_NAME}" == "Docker_Linux" ]]; then + ./test-flake-chart/report_flakes.sh "${MINIKUBE_LOCATION}" "${SUMMARY_OUT}" "${JOB_NAME}" fi else # Otherwise, put the results in a predictable spot so the upload job can find them diff --git a/hack/jenkins/test-flake-chart/report_flakes.sh b/hack/jenkins/test-flake-chart/report_flakes.sh new file mode 100755 index 0000000000..86a4e35e8f --- /dev/null +++ b/hack/jenkins/test-flake-chart/report_flakes.sh @@ -0,0 +1,74 @@ +#!/bin/bash + +# Copyright 2018 The Kubernetes Authors All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -eu -o pipefail + +if [ "$#" -ne 2 ]; then + echo "Wrong number of arguments. Usage: report_flakes.sh " 1>&2 + exit 1 +fi + +PR_NUMBER=$1 +SUMMARY_DATA=$2 +ENVIRONMENT=$3 + +# To prevent having a super-long comment, add a maximum number of tests to report. +MAX_REPORTED_TESTS=30 + +DIR=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd ) + +TMP_DATA=$(mktemp) +# 1) Process the data in the gopogh summary. +# 2) Filter tests to only include failed tests on the environment (and only get their names). +# 3) Sort the names of the tests. +# 4) Store in file $TMP_DATA. +< "$SUMMARY_DATA" $DIR/process_data.sh \ + | sed -n -r -e "s/[0-9a-f]*,[0-9-]*,$ENVIRONMENT,([a-zA-Z\/_-]*),Failed,[.0-9]*/\1/p" \ + | sort \ + > "$TMP_DATA" + +# Download the precomputed flake rates from the GCS bucket into file $TMP_FLAKE_RATES. +TMP_FLAKE_RATES=$(mktemp) +gsutil cp gs://minikube-flake-rate/flake_rates.csv "$TMP_FLAKE_RATES" + +TMP_FAILED_RATES="$TMP_FLAKE_RATES\_filtered" +# 1) Parse/filter the flake rates to only include the test name and flake rates for environment. +# 2) Sort the flake rates based on test name. +# 3) Join the flake rates with the failing tests to only get flake rates of failing tests. +# 4) Sort failed test flake rates based on the flakiness of that test - stable tests should be first on the list. +# 5) Store in file $TMP_FAILED_RATES. +< "$TMP_FLAKE_RATES" sed -n -r -e "s/$ENVIRONMENT,([a-zA-Z\/_-]*),([.0-9]*)/\1,\2/p" \ + | sort -t, -k1,1 \ + | join -t , -j 1 "$TMP_DATA" - \ + | sort -g -t, -k2,2 \ + > "$TMP_FAILED_RATES" + +# Create the comment template. +TMP_COMMENT=$(mktemp) +printf "These are the flake rates of all failed tests on %s.\n|Failed Tests|Flake Rate (%%)|\n|---|---|\n" "$ENVIRONMENT" > "$TMP_COMMENT" +# 1) Get the first $MAX_REPORTED_TESTS lines. +# 2) Print a row in the table with the test name, flake rate, and a link to the flake chart for that test. +# 3) Append these rows to file $TMP_COMMENT. +< "$TMP_FAILED_RATES" head -n $MAX_REPORTED_TESTS \ + | sed -n -r -e "s/([a-zA-Z\/_-]*),([.0-9]*)/|\1|\2 ([chart](https:\/\/storage.googleapis.com\/minikube-flake-rate\/flake_chart.html?env=$ENVIRONMENT\&test=\1))|/p" \ + >> "$TMP_COMMENT" + +# If there are too many failing tests, add an extra row explaining this, and a message after the table. +if [[ $(wc -l < "$TMP_FAILED_RATES") -gt 30 ]]; then + printf "|More tests...|Continued...|\n\nToo many tests failed - See test logs for more details." >> "$TMP_COMMENT" +fi + +gh issue comment "https://github.com/kubernetes/minikube/pull/$PR_NUMBER" --body "$(cat $TMP_COMMENT)" From 139d7e37710ee729955ae2afd910a176a20251c9 Mon Sep 17 00:00:00 2001 From: Andriy Dzikh Date: Mon, 7 Jun 2021 16:32:20 -0700 Subject: [PATCH 35/66] Fix lints in compute_flake_rate.go and compute_flake_rate_test.go. --- .../test-flake-chart/compute_flake_rate.go | 2 +- .../compute_flake_rate_test.go | 126 +++++++++--------- 2 files changed, 64 insertions(+), 64 deletions(-) diff --git a/hack/jenkins/test-flake-chart/compute_flake_rate.go b/hack/jenkins/test-flake-chart/compute_flake_rate.go index 4f71aa3cf5..c8c9806382 100644 --- a/hack/jenkins/test-flake-chart/compute_flake_rate.go +++ b/hack/jenkins/test-flake-chart/compute_flake_rate.go @@ -79,7 +79,7 @@ func ReadData(file io.Reader) []TestEntry { fields := strings.Split(line, ",") if firstLine { if len(fields) != 6 { - exit(fmt.Sprintf("Data CSV in incorrect format. Expected 6 columns, but got %d", len(fields)), fmt.Errorf("Bad CSV format")) + exit(fmt.Sprintf("Data CSV in incorrect format. Expected 6 columns, but got %d", len(fields)), fmt.Errorf("bad CSV format")) } firstLine = false } diff --git a/hack/jenkins/test-flake-chart/compute_flake_rate_test.go b/hack/jenkins/test-flake-chart/compute_flake_rate_test.go index 22c1f6b476..0154ee3b02 100644 --- a/hack/jenkins/test-flake-chart/compute_flake_rate_test.go +++ b/hack/jenkins/test-flake-chart/compute_flake_rate_test.go @@ -23,8 +23,8 @@ import ( "time" ) -func simpleDate(year int, month time.Month, day int) time.Time { - return time.Date(year, month, day, 0, 0, 0, 0, time.UTC) +func simpleDate(year int, day int) time.Time { + return time.Date(year, time.January, day, 0, 0, 0, 0, time.UTC) } func compareEntrySlices(t *testing.T, actualData, expectedData []TestEntry, extra string) { @@ -62,31 +62,31 @@ func TestReadData(t *testing.T) { { name: "test1", environment: "env1", - date: simpleDate(2000, time.January, 1), + date: simpleDate(2000, 1), status: "Passed", }, { name: "test2", environment: "env2", - date: simpleDate(2001, time.January, 1), + date: simpleDate(2001, 1), status: "Failed", }, { name: "test1", environment: "env2", - date: simpleDate(2001, time.January, 1), + date: simpleDate(2001, 1), status: "Failed", }, { name: "test1", environment: "env2", - date: simpleDate(2002, time.January, 1), + date: simpleDate(2002, 1), status: "Passed", }, { name: "test3", environment: "env3", - date: simpleDate(2003, time.January, 1), + date: simpleDate(2003, 1), status: "Passed", }, } @@ -129,44 +129,44 @@ func compareSplitData(t *testing.T, actual, expected map[string]map[string][]Tes } func TestSplitData(t *testing.T) { - entry_e1_t1_1, entry_e1_t1_2 := TestEntry{ + entryE1T1_1, entryE1T1_2 := TestEntry{ name: "test1", environment: "env1", - date: simpleDate(2000, time.January, 1), + date: simpleDate(2000, 1), status: "Passed", }, TestEntry{ name: "test1", environment: "env1", - date: simpleDate(2000, time.January, 2), + date: simpleDate(2000, 2), status: "Passed", } - entry_e1_t2 := TestEntry{ + entryE1T2 := TestEntry{ name: "test2", environment: "env1", - date: simpleDate(2000, time.January, 1), + date: simpleDate(2000, 1), status: "Passed", } - entry_e2_t1 := TestEntry{ + entryE2T1 := TestEntry{ name: "test1", environment: "env2", - date: simpleDate(2000, time.January, 1), + date: simpleDate(2000, 1), status: "Passed", } - entry_e2_t2 := TestEntry{ + entryE2T2 := TestEntry{ name: "test2", environment: "env2", - date: simpleDate(2000, time.January, 1), + date: simpleDate(2000, 1), status: "Passed", } - actual := SplitData([]TestEntry{entry_e1_t1_1, entry_e1_t1_2, entry_e1_t2, entry_e2_t1, entry_e2_t2}) + actual := SplitData([]TestEntry{entryE1T1_1, entryE1T1_2, entryE1T2, entryE2T1, entryE2T2}) expected := map[string]map[string][]TestEntry{ "env1": { - "test1": {entry_e1_t1_1, entry_e1_t1_2}, - "test2": {entry_e1_t2}, + "test1": {entryE1T1_1, entryE1T1_2}, + "test2": {entryE1T2}, }, "env2": { - "test1": {entry_e2_t1}, - "test2": {entry_e2_t2}, + "test1": {entryE2T1}, + "test2": {entryE2T2}, }, } @@ -174,85 +174,85 @@ func TestSplitData(t *testing.T) { } func TestFilterRecentEntries(t *testing.T) { - entry_e1_t1_r1, entry_e1_t1_r2, entry_e1_t1_r3, entry_e1_t1_o1, entry_e1_t1_o2 := TestEntry{ + entryE1T1R1, entryE1T1R2, entryE1T1R3, entryE1T1O1, entryE1T1O2 := TestEntry{ name: "test1", environment: "env1", - date: simpleDate(2000, time.January, 4), + date: simpleDate(2000, 4), status: "Passed", }, TestEntry{ name: "test1", environment: "env1", - date: simpleDate(2000, time.January, 3), + date: simpleDate(2000, 3), status: "Passed", }, TestEntry{ name: "test1", environment: "env1", - date: simpleDate(2000, time.January, 3), + date: simpleDate(2000, 3), status: "Passed", }, TestEntry{ name: "test1", environment: "env1", - date: simpleDate(2000, time.January, 2), + date: simpleDate(2000, 2), status: "Passed", }, TestEntry{ name: "test1", environment: "env1", - date: simpleDate(2000, time.January, 1), + date: simpleDate(2000, 1), status: "Passed", } - entry_e1_t2_r1, entry_e1_t2_r2, entry_e1_t2_o1 := TestEntry{ + entryE1T2R1, entryE1T2R2, entryE1T2O1 := TestEntry{ name: "test2", environment: "env1", - date: simpleDate(2001, time.January, 3), + date: simpleDate(2001, 3), status: "Passed", }, TestEntry{ name: "test2", environment: "env1", - date: simpleDate(2001, time.January, 2), + date: simpleDate(2001, 2), status: "Passed", }, TestEntry{ name: "test2", environment: "env1", - date: simpleDate(2001, time.January, 1), + date: simpleDate(2001, 1), status: "Passed", } - entry_e2_t2_r1, entry_e2_t2_r2, entry_e2_t2_o1 := TestEntry{ + entryE2T2R1, entryE2T2R2, entryE2T2O1 := TestEntry{ name: "test2", environment: "env2", - date: simpleDate(2003, time.January, 3), + date: simpleDate(2003, 3), status: "Passed", }, TestEntry{ name: "test2", environment: "env2", - date: simpleDate(2003, time.January, 2), + date: simpleDate(2003, 2), status: "Passed", }, TestEntry{ name: "test2", environment: "env2", - date: simpleDate(2003, time.January, 1), + date: simpleDate(2003, 1), status: "Passed", } actualData := FilterRecentEntries(map[string]map[string][]TestEntry{ "env1": { "test1": { - entry_e1_t1_r1, - entry_e1_t1_r2, - entry_e1_t1_r3, - entry_e1_t1_o1, - entry_e1_t1_o2, + entryE1T1R1, + entryE1T1R2, + entryE1T1R3, + entryE1T1O1, + entryE1T1O2, }, "test2": { - entry_e1_t2_r1, - entry_e1_t2_r2, - entry_e1_t2_o1, + entryE1T2R1, + entryE1T2R2, + entryE1T2O1, }, }, "env2": { "test2": { - entry_e2_t2_r1, - entry_e2_t2_r2, - entry_e2_t2_o1, + entryE2T2R1, + entryE2T2R2, + entryE2T2O1, }, }, }, 2) @@ -260,19 +260,19 @@ func TestFilterRecentEntries(t *testing.T) { expectedData := map[string]map[string][]TestEntry{ "env1": { "test1": { - entry_e1_t1_r1, - entry_e1_t1_r2, - entry_e1_t1_r3, + entryE1T1R1, + entryE1T1R2, + entryE1T1R3, }, "test2": { - entry_e1_t2_r1, - entry_e1_t2_r2, + entryE1T2R1, + entryE1T2R2, }, }, "env2": { "test2": { - entry_e2_t2_r1, - entry_e2_t2_r2, + entryE2T2R1, + entryE2T2R2, }, }, } @@ -287,27 +287,27 @@ func TestComputeFlakeRates(t *testing.T) { { name: "test1", environment: "env1", - date: simpleDate(2000, time.January, 4), + date: simpleDate(2000, 4), status: "Passed", }, { name: "test1", environment: "env1", - date: simpleDate(2000, time.January, 3), + date: simpleDate(2000, 3), status: "Passed", }, { name: "test1", environment: "env1", - date: simpleDate(2000, time.January, 3), + date: simpleDate(2000, 3), status: "Passed", }, { name: "test1", environment: "env1", - date: simpleDate(2000, time.January, 2), + date: simpleDate(2000, 2), status: "Passed", }, { name: "test1", environment: "env1", - date: simpleDate(2000, time.January, 1), + date: simpleDate(2000, 1), status: "Failed", }, }, @@ -315,17 +315,17 @@ func TestComputeFlakeRates(t *testing.T) { { name: "test2", environment: "env1", - date: simpleDate(2001, time.January, 3), + date: simpleDate(2001, 3), status: "Failed", }, { name: "test2", environment: "env1", - date: simpleDate(2001, time.January, 2), + date: simpleDate(2001, 2), status: "Failed", }, { name: "test2", environment: "env1", - date: simpleDate(2001, time.January, 1), + date: simpleDate(2001, 1), status: "Failed", }, }, @@ -335,12 +335,12 @@ func TestComputeFlakeRates(t *testing.T) { { name: "test2", environment: "env2", - date: simpleDate(2003, time.January, 3), + date: simpleDate(2003, 3), status: "Passed", }, TestEntry{ name: "test2", environment: "env2", - date: simpleDate(2003, time.January, 2), + date: simpleDate(2003, 2), status: "Failed", }, }, From 716f6901890ae3882f4710f0fcd7c8ffc080df47 Mon Sep 17 00:00:00 2001 From: Andriy Dzikh Date: Mon, 7 Jun 2021 16:39:06 -0700 Subject: [PATCH 36/66] Change copyright to 2021. --- hack/jenkins/test-flake-chart/collect_data.sh | 2 +- hack/jenkins/test-flake-chart/compute_flake_rate.go | 2 +- hack/jenkins/test-flake-chart/compute_flake_rate_test.go | 2 +- hack/jenkins/test-flake-chart/optimize_data.sh | 2 +- hack/jenkins/test-flake-chart/process_data.sh | 2 +- hack/jenkins/test-flake-chart/report_flakes.sh | 2 +- hack/jenkins/test-flake-chart/upload_tests.sh | 2 +- 7 files changed, 7 insertions(+), 7 deletions(-) diff --git a/hack/jenkins/test-flake-chart/collect_data.sh b/hack/jenkins/test-flake-chart/collect_data.sh index 44273f6d80..a03b726825 100755 --- a/hack/jenkins/test-flake-chart/collect_data.sh +++ b/hack/jenkins/test-flake-chart/collect_data.sh @@ -1,6 +1,6 @@ #!/bin/bash -# Copyright 2018 The Kubernetes Authors All rights reserved. +# Copyright 2021 The Kubernetes Authors All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/hack/jenkins/test-flake-chart/compute_flake_rate.go b/hack/jenkins/test-flake-chart/compute_flake_rate.go index c8c9806382..b3e4c2013f 100644 --- a/hack/jenkins/test-flake-chart/compute_flake_rate.go +++ b/hack/jenkins/test-flake-chart/compute_flake_rate.go @@ -1,5 +1,5 @@ /* -Copyright 2020 The Kubernetes Authors All rights reserved. +Copyright 2021 The Kubernetes Authors All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/hack/jenkins/test-flake-chart/compute_flake_rate_test.go b/hack/jenkins/test-flake-chart/compute_flake_rate_test.go index 0154ee3b02..c6407c16bd 100644 --- a/hack/jenkins/test-flake-chart/compute_flake_rate_test.go +++ b/hack/jenkins/test-flake-chart/compute_flake_rate_test.go @@ -1,5 +1,5 @@ /* -Copyright 2020 The Kubernetes Authors All rights reserved. +Copyright 2021 The Kubernetes Authors All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/hack/jenkins/test-flake-chart/optimize_data.sh b/hack/jenkins/test-flake-chart/optimize_data.sh index 67dae593e2..2bc140fc28 100755 --- a/hack/jenkins/test-flake-chart/optimize_data.sh +++ b/hack/jenkins/test-flake-chart/optimize_data.sh @@ -1,6 +1,6 @@ #!/bin/bash -# Copyright 2018 The Kubernetes Authors All rights reserved. +# Copyright 2021 The Kubernetes Authors All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/hack/jenkins/test-flake-chart/process_data.sh b/hack/jenkins/test-flake-chart/process_data.sh index 7907e69673..25c6ba5ec6 100755 --- a/hack/jenkins/test-flake-chart/process_data.sh +++ b/hack/jenkins/test-flake-chart/process_data.sh @@ -1,6 +1,6 @@ #!/bin/bash -# Copyright 2018 The Kubernetes Authors All rights reserved. +# Copyright 2021 The Kubernetes Authors All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/hack/jenkins/test-flake-chart/report_flakes.sh b/hack/jenkins/test-flake-chart/report_flakes.sh index 86a4e35e8f..1cd4490c84 100755 --- a/hack/jenkins/test-flake-chart/report_flakes.sh +++ b/hack/jenkins/test-flake-chart/report_flakes.sh @@ -1,6 +1,6 @@ #!/bin/bash -# Copyright 2018 The Kubernetes Authors All rights reserved. +# Copyright 2021 The Kubernetes Authors All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/hack/jenkins/test-flake-chart/upload_tests.sh b/hack/jenkins/test-flake-chart/upload_tests.sh index 7d310f9486..508d76f9ad 100755 --- a/hack/jenkins/test-flake-chart/upload_tests.sh +++ b/hack/jenkins/test-flake-chart/upload_tests.sh @@ -1,6 +1,6 @@ #!/bin/bash -# Copyright 2018 The Kubernetes Authors All rights reserved. +# Copyright 2021 The Kubernetes Authors All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. From 3aa922813c86644510c2db7e6789dda4fa2447a7 Mon Sep 17 00:00:00 2001 From: Andriy Dzikh Date: Tue, 8 Jun 2021 11:50:14 -0700 Subject: [PATCH 37/66] Fix wrong number of parameters for report_flakes.sh. --- hack/jenkins/test-flake-chart/report_flakes.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hack/jenkins/test-flake-chart/report_flakes.sh b/hack/jenkins/test-flake-chart/report_flakes.sh index 1cd4490c84..951e929183 100755 --- a/hack/jenkins/test-flake-chart/report_flakes.sh +++ b/hack/jenkins/test-flake-chart/report_flakes.sh @@ -16,7 +16,7 @@ set -eu -o pipefail -if [ "$#" -ne 2 ]; then +if [ "$#" -ne 3 ]; then echo "Wrong number of arguments. Usage: report_flakes.sh " 1>&2 exit 1 fi From fcbae7eaa143ae6e1a330518a73f0191de132f50 Mon Sep 17 00:00:00 2001 From: Andriy Dzikh Date: Tue, 8 Jun 2021 11:54:08 -0700 Subject: [PATCH 38/66] Make sure gh is present when running report_flakes.sh. --- hack/jenkins/test-flake-chart/report_flakes.sh | 3 +++ 1 file changed, 3 insertions(+) diff --git a/hack/jenkins/test-flake-chart/report_flakes.sh b/hack/jenkins/test-flake-chart/report_flakes.sh index 951e929183..b0933fa56a 100755 --- a/hack/jenkins/test-flake-chart/report_flakes.sh +++ b/hack/jenkins/test-flake-chart/report_flakes.sh @@ -71,4 +71,7 @@ if [[ $(wc -l < "$TMP_FAILED_RATES") -gt 30 ]]; then printf "|More tests...|Continued...|\n\nToo many tests failed - See test logs for more details." >> "$TMP_COMMENT" fi +# install gh if not present +sudo $DIR/../installers/check_install_gh.sh || true + gh issue comment "https://github.com/kubernetes/minikube/pull/$PR_NUMBER" --body "$(cat $TMP_COMMENT)" From fb8e4d982bce78c88183e4360e6843a81f60380a Mon Sep 17 00:00:00 2001 From: Andriy Dzikh Date: Tue, 8 Jun 2021 13:06:28 -0700 Subject: [PATCH 39/66] Clean up compute_flake_rate.go. --- hack/jenkins/test-flake-chart/compute_flake_rate.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/hack/jenkins/test-flake-chart/compute_flake_rate.go b/hack/jenkins/test-flake-chart/compute_flake_rate.go index b3e4c2013f..69d40042f2 100644 --- a/hack/jenkins/test-flake-chart/compute_flake_rate.go +++ b/hack/jenkins/test-flake-chart/compute_flake_rate.go @@ -155,7 +155,7 @@ func FilterRecentEntries(splitEntries map[string]map[string][]TestEntry, dateRan return dates[j].Before(dates[i]) }) datesInRange := make([]time.Time, 0, dateRange) - var lastDate time.Time = time.Date(0, 0, 0, 0, 0, 0, 0, time.Local) + var lastDate time.Time // Go through each date. for _, date := range dates { // If date is the same as last date, ignore it. @@ -175,7 +175,7 @@ func FilterRecentEntries(splitEntries map[string]map[string][]TestEntry, dateRan for _, entry := range testSplit { // Look for the first element <= entry.date index := sort.Search(len(datesInRange), func(i int) bool { - return datesInRange[i].Before(entry.date) || datesInRange[i].Equal(entry.date) + return !datesInRange[i].After(entry.date) }) // If no date is <= entry.date, or the found date does not equal entry.date. if index == len(datesInRange) || !datesInRange[index].Equal(entry.date) { From e089973f6530a2f598a9ae041b8717523fd479cf Mon Sep 17 00:00:00 2001 From: Andriy Dzikh Date: Wed, 9 Jun 2021 15:21:43 -0700 Subject: [PATCH 40/66] Create SplitEntryMap type to simplify some definitions. --- .../test-flake-chart/compute_flake_rate.go | 15 +++++++++------ .../test-flake-chart/compute_flake_rate_test.go | 10 +++++----- 2 files changed, 14 insertions(+), 11 deletions(-) diff --git a/hack/jenkins/test-flake-chart/compute_flake_rate.go b/hack/jenkins/test-flake-chart/compute_flake_rate.go index 69d40042f2..4da2e48ab4 100644 --- a/hack/jenkins/test-flake-chart/compute_flake_rate.go +++ b/hack/jenkins/test-flake-chart/compute_flake_rate.go @@ -60,6 +60,9 @@ type TestEntry struct { status string } +// A map with keys of (environment, test_name) to values of slcies of TestEntry. +type SplitEntryMap map[string]map[string][]TestEntry + // Reads CSV `file` and consumes each line to be a single TestEntry. func ReadData(file io.Reader) []TestEntry { testEntries := []TestEntry{} @@ -110,8 +113,8 @@ func ReadData(file io.Reader) []TestEntry { } // Splits `testEntries` up into maps indexed first by environment and then by test. -func SplitData(testEntries []TestEntry) map[string]map[string][]TestEntry { - splitEntries := make(map[string]map[string][]TestEntry) +func SplitData(testEntries []TestEntry) SplitEntryMap { + splitEntries := make(SplitEntryMap) for _, entry := range testEntries { appendEntry(splitEntries, entry.environment, entry.name, entry) @@ -121,7 +124,7 @@ func SplitData(testEntries []TestEntry) map[string]map[string][]TestEntry { } // Appends `entry` to `splitEntries` at the `environment` and `test`. -func appendEntry(splitEntries map[string]map[string][]TestEntry, environment, test string, entry TestEntry) { +func appendEntry(splitEntries SplitEntryMap, environment, test string, entry TestEntry) { // Lookup the environment. environmentSplit, ok := splitEntries[environment] if !ok { @@ -141,8 +144,8 @@ func appendEntry(splitEntries map[string]map[string][]TestEntry, environment, te } // Filters `splitEntries` to include only the most recent `date_range` dates. -func FilterRecentEntries(splitEntries map[string]map[string][]TestEntry, dateRange uint) map[string]map[string][]TestEntry { - filteredEntries := make(map[string]map[string][]TestEntry) +func FilterRecentEntries(splitEntries SplitEntryMap, dateRange uint) SplitEntryMap { + filteredEntries := make(SplitEntryMap) for environment, environmentSplit := range splitEntries { for test, testSplit := range environmentSplit { @@ -189,7 +192,7 @@ func FilterRecentEntries(splitEntries map[string]map[string][]TestEntry, dateRan } // Computes the flake rates over each entry in `splitEntries`. -func ComputeFlakeRates(splitEntries map[string]map[string][]TestEntry) map[string]map[string]float32 { +func ComputeFlakeRates(splitEntries SplitEntryMap) map[string]map[string]float32 { flakeRates := make(map[string]map[string]float32) for environment, environmentSplit := range splitEntries { for test, testSplit := range environmentSplit { diff --git a/hack/jenkins/test-flake-chart/compute_flake_rate_test.go b/hack/jenkins/test-flake-chart/compute_flake_rate_test.go index c6407c16bd..897d32311a 100644 --- a/hack/jenkins/test-flake-chart/compute_flake_rate_test.go +++ b/hack/jenkins/test-flake-chart/compute_flake_rate_test.go @@ -94,7 +94,7 @@ func TestReadData(t *testing.T) { compareEntrySlices(t, actualData, expectedData, "") } -func compareSplitData(t *testing.T, actual, expected map[string]map[string][]TestEntry) { +func compareSplitData(t *testing.T, actual, expected SplitEntryMap) { for environment, actualTests := range actual { expectedTests, environmentOk := expected[environment] if !environmentOk { @@ -159,7 +159,7 @@ func TestSplitData(t *testing.T) { status: "Passed", } actual := SplitData([]TestEntry{entryE1T1_1, entryE1T1_2, entryE1T2, entryE2T1, entryE2T2}) - expected := map[string]map[string][]TestEntry{ + expected := SplitEntryMap{ "env1": { "test1": {entryE1T1_1, entryE1T1_2}, "test2": {entryE1T2}, @@ -233,7 +233,7 @@ func TestFilterRecentEntries(t *testing.T) { status: "Passed", } - actualData := FilterRecentEntries(map[string]map[string][]TestEntry{ + actualData := FilterRecentEntries(SplitEntryMap{ "env1": { "test1": { entryE1T1R1, @@ -257,7 +257,7 @@ func TestFilterRecentEntries(t *testing.T) { }, }, 2) - expectedData := map[string]map[string][]TestEntry{ + expectedData := SplitEntryMap{ "env1": { "test1": { entryE1T1R1, @@ -281,7 +281,7 @@ func TestFilterRecentEntries(t *testing.T) { } func TestComputeFlakeRates(t *testing.T) { - actualData := ComputeFlakeRates(map[string]map[string][]TestEntry{ + actualData := ComputeFlakeRates(SplitEntryMap{ "env1": { "test1": { { From e9e7b85e025878bc83279d9afbf1553b6de3f59d Mon Sep 17 00:00:00 2001 From: Andriy Dzikh Date: Thu, 10 Jun 2021 11:08:44 -0700 Subject: [PATCH 41/66] Make types and functions private. --- .../test-flake-chart/compute_flake_rate.go | 34 ++++++------ .../compute_flake_rate_test.go | 52 +++++++++---------- 2 files changed, 43 insertions(+), 43 deletions(-) diff --git a/hack/jenkins/test-flake-chart/compute_flake_rate.go b/hack/jenkins/test-flake-chart/compute_flake_rate.go index 4da2e48ab4..be741f5bb1 100644 --- a/hack/jenkins/test-flake-chart/compute_flake_rate.go +++ b/hack/jenkins/test-flake-chart/compute_flake_rate.go @@ -41,10 +41,10 @@ func main() { exit("Unable to read data CSV", err) } - testEntries := ReadData(file) - splitEntries := SplitData(testEntries) - filteredEntries := FilterRecentEntries(splitEntries, *dateRange) - flakeRates := ComputeFlakeRates(filteredEntries) + testEntries := readData(file) + splitEntries := splitData(testEntries) + filteredEntries := filterRecentEntries(splitEntries, *dateRange) + flakeRates := computeFlakeRates(filteredEntries) fmt.Println("Environment,Test,Flake Rate") for environment, environmentSplit := range flakeRates { for test, flakeRate := range environmentSplit { @@ -53,7 +53,7 @@ func main() { } } -type TestEntry struct { +type testEntry struct { name string environment string date time.Time @@ -61,11 +61,11 @@ type TestEntry struct { } // A map with keys of (environment, test_name) to values of slcies of TestEntry. -type SplitEntryMap map[string]map[string][]TestEntry +type splitEntryMap map[string]map[string][]testEntry // Reads CSV `file` and consumes each line to be a single TestEntry. -func ReadData(file io.Reader) []TestEntry { - testEntries := []TestEntry{} +func readData(file io.Reader) []testEntry { + testEntries := []testEntry{} fileReader := bufio.NewReaderSize(file, 256) previousLine := []string{"", "", "", "", "", ""} @@ -101,7 +101,7 @@ func ReadData(file io.Reader) []TestEntry { if err != nil { fmt.Printf("Failed to parse date: %v\n", err) } - testEntries = append(testEntries, TestEntry{ + testEntries = append(testEntries, testEntry{ name: fields[3], environment: fields[2], date: date, @@ -113,8 +113,8 @@ func ReadData(file io.Reader) []TestEntry { } // Splits `testEntries` up into maps indexed first by environment and then by test. -func SplitData(testEntries []TestEntry) SplitEntryMap { - splitEntries := make(SplitEntryMap) +func splitData(testEntries []testEntry) splitEntryMap { + splitEntries := make(splitEntryMap) for _, entry := range testEntries { appendEntry(splitEntries, entry.environment, entry.name, entry) @@ -124,12 +124,12 @@ func SplitData(testEntries []TestEntry) SplitEntryMap { } // Appends `entry` to `splitEntries` at the `environment` and `test`. -func appendEntry(splitEntries SplitEntryMap, environment, test string, entry TestEntry) { +func appendEntry(splitEntries splitEntryMap, environment, test string, entry testEntry) { // Lookup the environment. environmentSplit, ok := splitEntries[environment] if !ok { // If the environment map is missing, make a map for this environment and store it. - environmentSplit = make(map[string][]TestEntry) + environmentSplit = make(map[string][]testEntry) splitEntries[environment] = environmentSplit } @@ -137,15 +137,15 @@ func appendEntry(splitEntries SplitEntryMap, environment, test string, entry Tes testSplit, ok := environmentSplit[test] if !ok { // If the test is missing, make a slice for this test. - testSplit = make([]TestEntry, 0) + testSplit = make([]testEntry, 0) // The slice is not inserted, since it will be replaced anyway. } environmentSplit[test] = append(testSplit, entry) } // Filters `splitEntries` to include only the most recent `date_range` dates. -func FilterRecentEntries(splitEntries SplitEntryMap, dateRange uint) SplitEntryMap { - filteredEntries := make(SplitEntryMap) +func filterRecentEntries(splitEntries splitEntryMap, dateRange uint) splitEntryMap { + filteredEntries := make(splitEntryMap) for environment, environmentSplit := range splitEntries { for test, testSplit := range environmentSplit { @@ -192,7 +192,7 @@ func FilterRecentEntries(splitEntries SplitEntryMap, dateRange uint) SplitEntryM } // Computes the flake rates over each entry in `splitEntries`. -func ComputeFlakeRates(splitEntries SplitEntryMap) map[string]map[string]float32 { +func computeFlakeRates(splitEntries splitEntryMap) map[string]map[string]float32 { flakeRates := make(map[string]map[string]float32) for environment, environmentSplit := range splitEntries { for test, testSplit := range environmentSplit { diff --git a/hack/jenkins/test-flake-chart/compute_flake_rate_test.go b/hack/jenkins/test-flake-chart/compute_flake_rate_test.go index 897d32311a..2f458daad9 100644 --- a/hack/jenkins/test-flake-chart/compute_flake_rate_test.go +++ b/hack/jenkins/test-flake-chart/compute_flake_rate_test.go @@ -27,7 +27,7 @@ func simpleDate(year int, day int) time.Time { return time.Date(year, time.January, day, 0, 0, 0, 0, time.UTC) } -func compareEntrySlices(t *testing.T, actualData, expectedData []TestEntry, extra string) { +func compareEntrySlices(t *testing.T, actualData, expectedData []testEntry, extra string) { if extra != "" { extra = fmt.Sprintf(" (%s)", extra) } @@ -50,7 +50,7 @@ func compareEntrySlices(t *testing.T, actualData, expectedData []TestEntry, extr } func TestReadData(t *testing.T) { - actualData := ReadData(strings.NewReader( + actualData := readData(strings.NewReader( `A,B,C,D,E,F hash,2000-01-01,env1,test1,Passed,1 hash,2001-01-01,env2,test2,Failed,1 @@ -58,7 +58,7 @@ func TestReadData(t *testing.T) { hash,2002-01-01,,,Passed,1 hash,2003-01-01,env3,test3,Passed,1`, )) - expectedData := []TestEntry{ + expectedData := []testEntry{ { name: "test1", environment: "env1", @@ -94,7 +94,7 @@ func TestReadData(t *testing.T) { compareEntrySlices(t, actualData, expectedData, "") } -func compareSplitData(t *testing.T, actual, expected SplitEntryMap) { +func compareSplitData(t *testing.T, actual, expected splitEntryMap) { for environment, actualTests := range actual { expectedTests, environmentOk := expected[environment] if !environmentOk { @@ -129,37 +129,37 @@ func compareSplitData(t *testing.T, actual, expected SplitEntryMap) { } func TestSplitData(t *testing.T) { - entryE1T1_1, entryE1T1_2 := TestEntry{ + entryE1T1_1, entryE1T1_2 := testEntry{ name: "test1", environment: "env1", date: simpleDate(2000, 1), status: "Passed", - }, TestEntry{ + }, testEntry{ name: "test1", environment: "env1", date: simpleDate(2000, 2), status: "Passed", } - entryE1T2 := TestEntry{ + entryE1T2 := testEntry{ name: "test2", environment: "env1", date: simpleDate(2000, 1), status: "Passed", } - entryE2T1 := TestEntry{ + entryE2T1 := testEntry{ name: "test1", environment: "env2", date: simpleDate(2000, 1), status: "Passed", } - entryE2T2 := TestEntry{ + entryE2T2 := testEntry{ name: "test2", environment: "env2", date: simpleDate(2000, 1), status: "Passed", } - actual := SplitData([]TestEntry{entryE1T1_1, entryE1T1_2, entryE1T2, entryE2T1, entryE2T2}) - expected := SplitEntryMap{ + actual := splitData([]testEntry{entryE1T1_1, entryE1T1_2, entryE1T2, entryE2T1, entryE2T2}) + expected := splitEntryMap{ "env1": { "test1": {entryE1T1_1, entryE1T1_2}, "test2": {entryE1T2}, @@ -174,66 +174,66 @@ func TestSplitData(t *testing.T) { } func TestFilterRecentEntries(t *testing.T) { - entryE1T1R1, entryE1T1R2, entryE1T1R3, entryE1T1O1, entryE1T1O2 := TestEntry{ + entryE1T1R1, entryE1T1R2, entryE1T1R3, entryE1T1O1, entryE1T1O2 := testEntry{ name: "test1", environment: "env1", date: simpleDate(2000, 4), status: "Passed", - }, TestEntry{ + }, testEntry{ name: "test1", environment: "env1", date: simpleDate(2000, 3), status: "Passed", - }, TestEntry{ + }, testEntry{ name: "test1", environment: "env1", date: simpleDate(2000, 3), status: "Passed", - }, TestEntry{ + }, testEntry{ name: "test1", environment: "env1", date: simpleDate(2000, 2), status: "Passed", - }, TestEntry{ + }, testEntry{ name: "test1", environment: "env1", date: simpleDate(2000, 1), status: "Passed", } - entryE1T2R1, entryE1T2R2, entryE1T2O1 := TestEntry{ + entryE1T2R1, entryE1T2R2, entryE1T2O1 := testEntry{ name: "test2", environment: "env1", date: simpleDate(2001, 3), status: "Passed", - }, TestEntry{ + }, testEntry{ name: "test2", environment: "env1", date: simpleDate(2001, 2), status: "Passed", - }, TestEntry{ + }, testEntry{ name: "test2", environment: "env1", date: simpleDate(2001, 1), status: "Passed", } - entryE2T2R1, entryE2T2R2, entryE2T2O1 := TestEntry{ + entryE2T2R1, entryE2T2R2, entryE2T2O1 := testEntry{ name: "test2", environment: "env2", date: simpleDate(2003, 3), status: "Passed", - }, TestEntry{ + }, testEntry{ name: "test2", environment: "env2", date: simpleDate(2003, 2), status: "Passed", - }, TestEntry{ + }, testEntry{ name: "test2", environment: "env2", date: simpleDate(2003, 1), status: "Passed", } - actualData := FilterRecentEntries(SplitEntryMap{ + actualData := filterRecentEntries(splitEntryMap{ "env1": { "test1": { entryE1T1R1, @@ -257,7 +257,7 @@ func TestFilterRecentEntries(t *testing.T) { }, }, 2) - expectedData := SplitEntryMap{ + expectedData := splitEntryMap{ "env1": { "test1": { entryE1T1R1, @@ -281,7 +281,7 @@ func TestFilterRecentEntries(t *testing.T) { } func TestComputeFlakeRates(t *testing.T) { - actualData := ComputeFlakeRates(SplitEntryMap{ + actualData := computeFlakeRates(splitEntryMap{ "env1": { "test1": { { @@ -337,7 +337,7 @@ func TestComputeFlakeRates(t *testing.T) { environment: "env2", date: simpleDate(2003, 3), status: "Passed", - }, TestEntry{ + }, testEntry{ name: "test2", environment: "env2", date: simpleDate(2003, 2), From 79f8de1bcbf8a7d260d5b4158930912230a5a660 Mon Sep 17 00:00:00 2001 From: Andriy Dzikh Date: Thu, 10 Jun 2021 11:11:34 -0700 Subject: [PATCH 42/66] Add comment for testEntry. --- hack/jenkins/test-flake-chart/compute_flake_rate.go | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/hack/jenkins/test-flake-chart/compute_flake_rate.go b/hack/jenkins/test-flake-chart/compute_flake_rate.go index be741f5bb1..ccecf4f810 100644 --- a/hack/jenkins/test-flake-chart/compute_flake_rate.go +++ b/hack/jenkins/test-flake-chart/compute_flake_rate.go @@ -53,6 +53,13 @@ func main() { } } +// One entry of a test run. +// Example: TestEntry { +// name: "TestFunctional/parallel/LogsCmd", +// environment: "Docker_Linux", +// date: time.Now, +// status: "Passed", +// } type testEntry struct { name string environment string From ecaee4d932cf64f7d3d45c1e1a82c0892f013178 Mon Sep 17 00:00:00 2001 From: Andriy Dzikh Date: Thu, 10 Jun 2021 11:16:44 -0700 Subject: [PATCH 43/66] Add better comments for optimize_data and process_data. --- hack/jenkins/test-flake-chart/optimize_data.sh | 8 ++++++++ hack/jenkins/test-flake-chart/process_data.sh | 4 +++- 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/hack/jenkins/test-flake-chart/optimize_data.sh b/hack/jenkins/test-flake-chart/optimize_data.sh index 2bc140fc28..e92f5e0df2 100755 --- a/hack/jenkins/test-flake-chart/optimize_data.sh +++ b/hack/jenkins/test-flake-chart/optimize_data.sh @@ -17,4 +17,12 @@ set -eu -o pipefail # Take input CSV. For each field, if it is the same as the previous row, replace it with an empty string. +# This is to compress the input CSV. Example: +# Input: +# hash,2021-06-10,Docker_Linux,TestFunctional,Passed,0.5 +# hash,2021-06-10,Docker_Linux_containerd,TestFunctional,Failed,0.6 +# +# Output: +# hash,2021-06-10,Docker_Linux,TestFunctional,Passed,0.5 +# ,,DockerLinux_containerd,,Failed,0.6 awk -F, 'BEGIN {OFS = FS} { for(i=1; i<=NF; i++) { if($i == j[i]) { $i = ""; } else { j[i] = $i; } } printf "%s\n",$0 }' diff --git a/hack/jenkins/test-flake-chart/process_data.sh b/hack/jenkins/test-flake-chart/process_data.sh index 25c6ba5ec6..dc0e66e4b3 100755 --- a/hack/jenkins/test-flake-chart/process_data.sh +++ b/hack/jenkins/test-flake-chart/process_data.sh @@ -19,7 +19,9 @@ set -eu -o pipefail # Print header. printf "Commit Hash,Test Date,Environment,Test,Status,Duration\n" -# Turn each test in each summary file to a CSV line containing its commit hash, date, environment, test, and status. +# Turn each test in each summary file to a CSV line containing its commit hash, date, environment, test, status, and duration. +# Example line: +# 247982745892,2021-06-10,Docker_Linux,TestFunctional,Passed,0.5 jq -r '((.PassedTests[]? as $name | {commit: (.Detail.Details | split(":") | .[0]), date: (.Detail.Details | split(":") | .[1]), environment: .Detail.Name, test: $name, duration: .Durations[$name], status: "Passed"}), (.FailedTests[]? as $name | {commit: (.Detail.Details | split(":") | .[0]), date: (.Detail.Details | split(":") | .[1]), environment: .Detail.Name, test: $name, duration: .Durations[$name], status: "Failed"}), (.SkippedTests[]? as $name | {commit: (.Detail.Details | split(":") | .[0]), date: (.Detail.Details | split(":") | .[1]), environment: .Detail.Name, test: $name, duration: 0, status: "Skipped"})) From 2b3f7cedd77507aead2df8e0ce9dc2527a1096ee Mon Sep 17 00:00:00 2001 From: Andriy Dzikh Date: Thu, 10 Jun 2021 13:02:56 -0700 Subject: [PATCH 44/66] Remove bootstrap from flake_chart.html. --- hack/jenkins/test-flake-chart/flake_chart.html | 2 -- 1 file changed, 2 deletions(-) diff --git a/hack/jenkins/test-flake-chart/flake_chart.html b/hack/jenkins/test-flake-chart/flake_chart.html index f39859daff..beaf224c20 100644 --- a/hack/jenkins/test-flake-chart/flake_chart.html +++ b/hack/jenkins/test-flake-chart/flake_chart.html @@ -1,11 +1,9 @@ -
- \ No newline at end of file From a98db3511e4bfa091626d0c6c833db9cd6547ae8 Mon Sep 17 00:00:00 2001 From: Andriy Dzikh Date: Thu, 10 Jun 2021 13:16:16 -0700 Subject: [PATCH 45/66] Add description and example usage to all shell scripts. --- hack/jenkins/test-flake-chart/collect_data.sh | 4 ++++ hack/jenkins/test-flake-chart/optimize_data.sh | 3 +++ hack/jenkins/test-flake-chart/process_data.sh | 4 ++++ hack/jenkins/test-flake-chart/report_flakes.sh | 4 ++++ hack/jenkins/test-flake-chart/upload_tests.sh | 4 ++++ 5 files changed, 19 insertions(+) diff --git a/hack/jenkins/test-flake-chart/collect_data.sh b/hack/jenkins/test-flake-chart/collect_data.sh index a03b726825..160eecf18f 100755 --- a/hack/jenkins/test-flake-chart/collect_data.sh +++ b/hack/jenkins/test-flake-chart/collect_data.sh @@ -14,6 +14,10 @@ # See the License for the specific language governing permissions and # limitations under the License. +# Collects all test data manually, processes it, and uploads to GCS. This will +# overwrite any existing data. +# Example usage: ./collect_data.sh + DIR=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd ) # 1) "cat" together all summary files. diff --git a/hack/jenkins/test-flake-chart/optimize_data.sh b/hack/jenkins/test-flake-chart/optimize_data.sh index e92f5e0df2..641dd6905b 100755 --- a/hack/jenkins/test-flake-chart/optimize_data.sh +++ b/hack/jenkins/test-flake-chart/optimize_data.sh @@ -14,6 +14,9 @@ # See the License for the specific language governing permissions and # limitations under the License. +# Takes a CSV file through stdin, compresses it and writes it to stdout. +# Example usage: < data.csv ./optimize_data.sh > data_optimized.csv + set -eu -o pipefail # Take input CSV. For each field, if it is the same as the previous row, replace it with an empty string. diff --git a/hack/jenkins/test-flake-chart/process_data.sh b/hack/jenkins/test-flake-chart/process_data.sh index dc0e66e4b3..b51e07a9e2 100755 --- a/hack/jenkins/test-flake-chart/process_data.sh +++ b/hack/jenkins/test-flake-chart/process_data.sh @@ -14,6 +14,10 @@ # See the License for the specific language governing permissions and # limitations under the License. +# Takes a series of gopogh summary jsons, and formats them into a CSV file with +# a row for each test. +# Example usage: cat gopogh_1.json gopogh_2.json gopogh_3.json | ./process_data.sh + set -eu -o pipefail # Print header. diff --git a/hack/jenkins/test-flake-chart/report_flakes.sh b/hack/jenkins/test-flake-chart/report_flakes.sh index b0933fa56a..d4d5dc59b8 100755 --- a/hack/jenkins/test-flake-chart/report_flakes.sh +++ b/hack/jenkins/test-flake-chart/report_flakes.sh @@ -14,6 +14,10 @@ # See the License for the specific language governing permissions and # limitations under the License. +# Creates a comment on the provided PR number, using the provided gopogh summary +# to list out the flake rates of all failing tests. +# Example usage: ./report_flakes.sh 11602 gopogh.json Docker_Linux + set -eu -o pipefail if [ "$#" -ne 3 ]; then diff --git a/hack/jenkins/test-flake-chart/upload_tests.sh b/hack/jenkins/test-flake-chart/upload_tests.sh index 508d76f9ad..5906f73ae1 100755 --- a/hack/jenkins/test-flake-chart/upload_tests.sh +++ b/hack/jenkins/test-flake-chart/upload_tests.sh @@ -14,6 +14,10 @@ # See the License for the specific language governing permissions and # limitations under the License. +# Takes a gopogh summary, extracts test data as a CSV and appends to the +# existing CSV data in the GCS bucket. +# Example usage: ./jenkins_upload_tests.sh gopogh_summary.json + set -eu -o pipefail if [ "$#" -ne 1 ]; then From 7e785c1c1e426c1a5b53b06df23d487229656f7a Mon Sep 17 00:00:00 2001 From: Andriy Dzikh Date: Thu, 10 Jun 2021 13:19:05 -0700 Subject: [PATCH 46/66] Make collect_data.sh optimzie and upload to GCS automatically. --- hack/jenkins/test-flake-chart/collect_data.sh | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/hack/jenkins/test-flake-chart/collect_data.sh b/hack/jenkins/test-flake-chart/collect_data.sh index 160eecf18f..e62757a575 100755 --- a/hack/jenkins/test-flake-chart/collect_data.sh +++ b/hack/jenkins/test-flake-chart/collect_data.sh @@ -22,5 +22,9 @@ DIR=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd ) # 1) "cat" together all summary files. # 2) Process all summary files. +# 3) Optimize the resulting data. +# 4) Store in GCS bucket. gsutil cat gs://minikube-builds/logs/master/*/*_summary.json \ | $DIR/process_data.sh +| $DIR/optimize_data.sh +| gsutil cp - gs://minikube-flake-rate/data.csv From 884216db9e28526b3cf948a1aaf2978f94b919fe Mon Sep 17 00:00:00 2001 From: Andriy Dzikh Date: Thu, 10 Jun 2021 13:27:00 -0700 Subject: [PATCH 47/66] Make report_flakes.sh abort if there are no failed tests. --- hack/jenkins/test-flake-chart/report_flakes.sh | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/hack/jenkins/test-flake-chart/report_flakes.sh b/hack/jenkins/test-flake-chart/report_flakes.sh index d4d5dc59b8..85589babf8 100755 --- a/hack/jenkins/test-flake-chart/report_flakes.sh +++ b/hack/jenkins/test-flake-chart/report_flakes.sh @@ -60,6 +60,12 @@ TMP_FAILED_RATES="$TMP_FLAKE_RATES\_filtered" | sort -g -t, -k2,2 \ > "$TMP_FAILED_RATES" +FAILED_RATES_LINES=$(wc -l < "$TMP_FAILED_RATES") +if [[ "$FAILED_RATES_LINES" -gt 30 ]]; then + echo "No failed tests! Aborting without commenting..." 1>&2 + exit 0 +fi + # Create the comment template. TMP_COMMENT=$(mktemp) printf "These are the flake rates of all failed tests on %s.\n|Failed Tests|Flake Rate (%%)|\n|---|---|\n" "$ENVIRONMENT" > "$TMP_COMMENT" @@ -71,7 +77,7 @@ printf "These are the flake rates of all failed tests on %s.\n|Failed Tests|Flak >> "$TMP_COMMENT" # If there are too many failing tests, add an extra row explaining this, and a message after the table. -if [[ $(wc -l < "$TMP_FAILED_RATES") -gt 30 ]]; then +if [[ "$FAILED_RATES_LINES" -gt 30 ]]; then printf "|More tests...|Continued...|\n\nToo many tests failed - See test logs for more details." >> "$TMP_COMMENT" fi From dc6cb0b671fd35b09f995259ef5949525dc7872f Mon Sep 17 00:00:00 2001 From: Andriy Dzikh Date: Thu, 10 Jun 2021 13:30:35 -0700 Subject: [PATCH 48/66] Rename collect_data.sh to collect_data_manual.sh and make header comment more clear. --- .../{collect_data.sh => collect_data_manual.sh} | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) rename hack/jenkins/test-flake-chart/{collect_data.sh => collect_data_manual.sh} (85%) diff --git a/hack/jenkins/test-flake-chart/collect_data.sh b/hack/jenkins/test-flake-chart/collect_data_manual.sh similarity index 85% rename from hack/jenkins/test-flake-chart/collect_data.sh rename to hack/jenkins/test-flake-chart/collect_data_manual.sh index e62757a575..bed3d74679 100755 --- a/hack/jenkins/test-flake-chart/collect_data.sh +++ b/hack/jenkins/test-flake-chart/collect_data_manual.sh @@ -15,8 +15,9 @@ # limitations under the License. # Collects all test data manually, processes it, and uploads to GCS. This will -# overwrite any existing data. -# Example usage: ./collect_data.sh +# overwrite any existing data. This should only be done for a dryrun, new data +# should be handled exclusively through upload_tests.sh. +# Example usage: ./collect_data_manual.sh DIR=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd ) From 728229c719d3279f1cd88cdca06c7ab0c1171c38 Mon Sep 17 00:00:00 2001 From: Andriy Dzikh Date: Thu, 10 Jun 2021 13:36:23 -0700 Subject: [PATCH 49/66] Add more environments to the allowed environments for commenting. --- hack/jenkins/common.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hack/jenkins/common.sh b/hack/jenkins/common.sh index b153185ee3..7ae29a3329 100755 --- a/hack/jenkins/common.sh +++ b/hack/jenkins/common.sh @@ -443,7 +443,7 @@ if [ -z "${EXTERNAL}" ]; then gsutil -qm cp "${SUMMARY_OUT}" "gs://${JOB_GCS_BUCKET}_summary.json" || true if [[ "${MINIKUBE_LOCATION}" == "master" ]]; then ./test-flake-chart/upload_tests.sh "${SUMMARY_OUT}" - elif [[ "${JOB_NAME}" == "Docker_Linux" ]]; then + elif [[ "${JOB_NAME}" == "Docker_Linux" || "${JOB_NAME}" == "Docker_Linux_containerd" || "${JOB_NAME}" == "KVM_Linux" || "${JOB_NAME}" == "KVM_Linux_containerd" ]]; then ./test-flake-chart/report_flakes.sh "${MINIKUBE_LOCATION}" "${SUMMARY_OUT}" "${JOB_NAME}" fi else From 1c1fdbff42e0efcf3738640e31900bb421880dbf Mon Sep 17 00:00:00 2001 From: Andriy Dzikh Date: Thu, 10 Jun 2021 14:00:47 -0700 Subject: [PATCH 50/66] Make compute_flake_rate also compute average test duration. --- .../test-flake-chart/compute_flake_rate.go | 42 ++++- .../compute_flake_rate_test.go | 173 ++++++++++++++---- .../jenkins/test-flake-chart/report_flakes.sh | 2 +- 3 files changed, 173 insertions(+), 44 deletions(-) diff --git a/hack/jenkins/test-flake-chart/compute_flake_rate.go b/hack/jenkins/test-flake-chart/compute_flake_rate.go index ccecf4f810..0025df2fbe 100644 --- a/hack/jenkins/test-flake-chart/compute_flake_rate.go +++ b/hack/jenkins/test-flake-chart/compute_flake_rate.go @@ -24,6 +24,7 @@ import ( "os" "runtime/debug" "sort" + "strconv" "strings" "time" ) @@ -45,10 +46,12 @@ func main() { splitEntries := splitData(testEntries) filteredEntries := filterRecentEntries(splitEntries, *dateRange) flakeRates := computeFlakeRates(filteredEntries) - fmt.Println("Environment,Test,Flake Rate") + averageDurations := computeAverageDurations(filteredEntries) + fmt.Println("Environment,Test,Flake Rate,Duration") for environment, environmentSplit := range flakeRates { for test, flakeRate := range environmentSplit { - fmt.Printf("%s,%s,%.2f\n", environment, test, flakeRate*100) + duration := averageDurations[environment][test] + fmt.Printf("%s,%s,%.2f,%.3f\n", environment, test, flakeRate*100, duration) } } } @@ -59,12 +62,14 @@ func main() { // environment: "Docker_Linux", // date: time.Now, // status: "Passed", +// duration: 0.1, // } type testEntry struct { name string environment string date time.Time status string + duration float32 } // A map with keys of (environment, test_name) to values of slcies of TestEntry. @@ -107,12 +112,19 @@ func readData(file io.Reader) []testEntry { date, err := time.Parse("2006-01-02", fields[1]) if err != nil { fmt.Printf("Failed to parse date: %v\n", err) + continue + } + duration, err := strconv.ParseFloat(fields[5], 32) + if err != nil { + fmt.Printf("Failed to parse duration: %v\n", err) + continue } testEntries = append(testEntries, testEntry{ name: fields[3], environment: fields[2], date: date, status: fields[4], + duration: float32(duration), }) } } @@ -215,14 +227,32 @@ func computeFlakeRates(splitEntries splitEntryMap) map[string]map[string]float32 return flakeRates } -// Sets the `value` of keys `environment` and `test` in `flakeRates`. -func setValue(flakeRates map[string]map[string]float32, environment, test string, value float32) { +// Computes the average durations over each entry in `splitEntries`. +func computeAverageDurations(splitEntries splitEntryMap) map[string]map[string]float32 { + averageDurations := make(map[string]map[string]float32) + for environment, environmentSplit := range splitEntries { + for test, testSplit := range environmentSplit { + durationSum := float32(0) + for _, entry := range testSplit { + durationSum += entry.duration + } + if len(testSplit) != 0 { + durationSum /= float32(len(testSplit)) + } + setValue(averageDurations, environment, test, durationSum) + } + } + return averageDurations +} + +// Sets the `value` of keys `environment` and `test` in `mapEntries`. +func setValue(mapEntries map[string]map[string]float32, environment, test string, value float32) { // Lookup the environment. - environmentRates, ok := flakeRates[environment] + environmentRates, ok := mapEntries[environment] if !ok { // If the environment map is missing, make a map for this environment and store it. environmentRates = make(map[string]float32) - flakeRates[environment] = environmentRates + mapEntries[environment] = environmentRates } environmentRates[test] = value } diff --git a/hack/jenkins/test-flake-chart/compute_flake_rate_test.go b/hack/jenkins/test-flake-chart/compute_flake_rate_test.go index 2f458daad9..d4013c0885 100644 --- a/hack/jenkins/test-flake-chart/compute_flake_rate_test.go +++ b/hack/jenkins/test-flake-chart/compute_flake_rate_test.go @@ -53,10 +53,10 @@ func TestReadData(t *testing.T) { actualData := readData(strings.NewReader( `A,B,C,D,E,F hash,2000-01-01,env1,test1,Passed,1 - hash,2001-01-01,env2,test2,Failed,1 - hash,,,test1,,1 - hash,2002-01-01,,,Passed,1 - hash,2003-01-01,env3,test3,Passed,1`, + hash,2001-01-01,env2,test2,Failed,0.5 + hash,,,test1,,0.6 + hash,2002-01-01,,,Passed,0.9 + hash,2003-01-01,env3,test3,Passed,2`, )) expectedData := []testEntry{ { @@ -64,30 +64,35 @@ func TestReadData(t *testing.T) { environment: "env1", date: simpleDate(2000, 1), status: "Passed", + duration: 1, }, { name: "test2", environment: "env2", date: simpleDate(2001, 1), status: "Failed", + duration: 0.5, }, { name: "test1", environment: "env2", date: simpleDate(2001, 1), status: "Failed", + duration: 0.6, }, { name: "test1", environment: "env2", date: simpleDate(2002, 1), status: "Passed", + duration: 0.9, }, { name: "test3", environment: "env3", date: simpleDate(2003, 1), status: "Passed", + duration: 2, }, } @@ -280,6 +285,42 @@ func TestFilterRecentEntries(t *testing.T) { compareSplitData(t, actualData, expectedData) } +func compareValues(t *testing.T, actualValues, expectedValues map[string]map[string]float32) { + for environment, actualTests := range actualValues { + expectedTests, environmentOk := expectedValues[environment] + if !environmentOk { + t.Errorf("Unexpected environment %s in actual", environment) + continue + } + + for test, actualValue := range actualTests { + expectedValue, testOk := expectedTests[test] + if !testOk { + t.Errorf("Unexpected test %s (in environment %s) in actual", test, environment) + continue + } + + if actualValue != expectedValue { + t.Errorf("Wrong value at environment %s and test %s. Expected: %v, Actual: %v", environment, test, expectedValue, actualValue) + } + } + + for test := range expectedTests { + _, testOk := actualTests[test] + if !testOk { + t.Errorf("Missing expected test %s (in environment %s) in actual", test, environment) + } + } + } + + for environment := range expectedValues { + _, environmentOk := actualValues[environment] + if !environmentOk { + t.Errorf("Missing expected environment %s in actual", environment) + } + } +} + func TestComputeFlakeRates(t *testing.T) { actualData := computeFlakeRates(splitEntryMap{ "env1": { @@ -357,37 +398,95 @@ func TestComputeFlakeRates(t *testing.T) { }, } - for environment, actualTests := range actualData { - expectedTests, environmentOk := expectedData[environment] - if !environmentOk { - t.Errorf("Unexpected environment %s in actual", environment) - continue - } - - for test, actualFlakeRate := range actualTests { - expectedFlakeRate, testOk := expectedTests[test] - if !testOk { - t.Errorf("Unexpected test %s (in environment %s) in actual", test, environment) - continue - } - - if actualFlakeRate != expectedFlakeRate { - t.Errorf("Wrong flake rate. Expected: %v, Actual: %v", expectedFlakeRate, actualFlakeRate) - } - } - - for test := range expectedTests { - _, testOk := actualTests[test] - if !testOk { - t.Errorf("Missing expected test %s (in environment %s) in actual", test, environment) - } - } - } - - for environment := range expectedData { - _, environmentOk := actualData[environment] - if !environmentOk { - t.Errorf("Missing expected environment %s in actual", environment) - } - } + compareValues(t, actualData, expectedData) +} + +func TestComputeAverageDurations(t *testing.T) { + actualData := computeAverageDurations(splitEntryMap{ + "env1": { + "test1": { + { + name: "test1", + environment: "env1", + date: simpleDate(2000, 4), + status: "Passed", + duration: 1, + }, { + name: "test1", + environment: "env1", + date: simpleDate(2000, 3), + status: "Passed", + duration: 2, + }, { + name: "test1", + environment: "env1", + date: simpleDate(2000, 3), + status: "Passed", + duration: 3, + }, { + name: "test1", + environment: "env1", + date: simpleDate(2000, 2), + status: "Passed", + duration: 3, + }, { + name: "test1", + environment: "env1", + date: simpleDate(2000, 1), + status: "Failed", + duration: 3, + }, + }, + "test2": { + { + name: "test2", + environment: "env1", + date: simpleDate(2001, 3), + status: "Failed", + duration: 1, + }, { + name: "test2", + environment: "env1", + date: simpleDate(2001, 2), + status: "Failed", + duration: 3, + }, { + name: "test2", + environment: "env1", + date: simpleDate(2001, 1), + status: "Failed", + duration: 3, + }, + }, + }, + "env2": { + "test2": { + { + name: "test2", + environment: "env2", + date: simpleDate(2003, 3), + status: "Passed", + duration: 0.5, + }, testEntry{ + name: "test2", + environment: "env2", + date: simpleDate(2003, 2), + status: "Failed", + duration: 1.5, + }, + }, + }, + }) + + expectedData := map[string]map[string]float32{ + "env1": { + "test1": float32(12) / float32(5), + "test2": float32(7) / float32(3), + }, + "env2": { + "test2": 1, + }, + } + + compareValues(t, actualData, expectedData) } diff --git a/hack/jenkins/test-flake-chart/report_flakes.sh b/hack/jenkins/test-flake-chart/report_flakes.sh index 85589babf8..a04c0d359c 100755 --- a/hack/jenkins/test-flake-chart/report_flakes.sh +++ b/hack/jenkins/test-flake-chart/report_flakes.sh @@ -54,7 +54,7 @@ TMP_FAILED_RATES="$TMP_FLAKE_RATES\_filtered" # 3) Join the flake rates with the failing tests to only get flake rates of failing tests. # 4) Sort failed test flake rates based on the flakiness of that test - stable tests should be first on the list. # 5) Store in file $TMP_FAILED_RATES. -< "$TMP_FLAKE_RATES" sed -n -r -e "s/$ENVIRONMENT,([a-zA-Z\/_-]*),([.0-9]*)/\1,\2/p" \ +< "$TMP_FLAKE_RATES" sed -n -r -e "s/$ENVIRONMENT,([a-zA-Z\/_-]*),([.0-9]*),[.0-9]*/\1,\2/p" \ | sort -t, -k1,1 \ | join -t , -j 1 "$TMP_DATA" - \ | sort -g -t, -k2,2 \ From 806d8999887efc40c849577f2d4177e8c41e2a78 Mon Sep 17 00:00:00 2001 From: Andriy Dzikh Date: Thu, 10 Jun 2021 14:26:04 -0700 Subject: [PATCH 51/66] Add timeout to apiserver health check. --- pkg/minikube/bootstrapper/bsutil/kverify/api_server.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkg/minikube/bootstrapper/bsutil/kverify/api_server.go b/pkg/minikube/bootstrapper/bsutil/kverify/api_server.go index 39c29a1640..e5d275beca 100644 --- a/pkg/minikube/bootstrapper/bsutil/kverify/api_server.go +++ b/pkg/minikube/bootstrapper/bsutil/kverify/api_server.go @@ -232,7 +232,7 @@ func apiServerHealthzNow(hostname string, port int) (state.State, error) { Proxy: nil, // Avoid using a proxy to speak to a local host TLSClientConfig: &tls.Config{RootCAs: pool}, } - client := &http.Client{Transport: tr} + client := &http.Client{Transport: tr, Timeout: 5 * time.Second} resp, err := client.Get(url) // Connection refused, usually. if err != nil { From 4eb3c6332d7a242261a284ca04ee5afbaae3190e Mon Sep 17 00:00:00 2001 From: Dongjoon Hyun Date: Mon, 14 Jun 2021 15:11:49 -0700 Subject: [PATCH 52/66] Fix a download link to use arm64 instead of amd64 --- cmd/minikube/cmd/root.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cmd/minikube/cmd/root.go b/cmd/minikube/cmd/root.go index cfcd7f0276..5b81138d82 100644 --- a/cmd/minikube/cmd/root.go +++ b/cmd/minikube/cmd/root.go @@ -97,7 +97,7 @@ func Execute() { if runtime.GOOS == "darwin" && detect.IsAmd64M1Emulation() { exit.Message(reason.WrongBinaryM1, "You are trying to run amd64 binary on M1 system. Please use darwin/arm64 binary instead (Download at {{.url}}.)", - out.V{"url": notify.DownloadURL(version.GetVersion(), "darwin", "amd64")}) + out.V{"url": notify.DownloadURL(version.GetVersion(), "darwin", "arm64")}) } _, callingCmd := filepath.Split(os.Args[0]) From e7e52584c55bc301d3182e1b7f315885f8dc1e4a Mon Sep 17 00:00:00 2001 From: zhangdb-git Date: Tue, 15 Jun 2021 05:36:58 -0400 Subject: [PATCH 53/66] Remove duplicated translations --- site/content/en/docs/handbook/controls.md | 2 +- translations/ko.json | 5 ++--- translations/pl.json | 5 ++--- translations/zh-CN.json | 5 ++--- 4 files changed, 7 insertions(+), 10 deletions(-) diff --git a/site/content/en/docs/handbook/controls.md b/site/content/en/docs/handbook/controls.md index 0f1218ca63..0301da5e18 100644 --- a/site/content/en/docs/handbook/controls.md +++ b/site/content/en/docs/handbook/controls.md @@ -16,7 +16,7 @@ Start a cluster by running: minikube start ``` -Access the Kubernetes Dashboard running within the minikube cluster: +Access the Kubernetes dashboard running within the minikube cluster: ```shell minikube dashboard diff --git a/translations/ko.json b/translations/ko.json index 67b0f2a3fa..22a0818c5e 100644 --- a/translations/ko.json +++ b/translations/ko.json @@ -33,8 +33,7 @@ "A set of apiserver IP Addresses which are used in the generated certificate for kubernetes. This can be used if you want to make the apiserver available from outside the machine": "", "A set of apiserver names which are used in the generated certificate for kubernetes. This can be used if you want to make the apiserver available from outside the machine": "", "A set of key=value pairs that describe feature gates for alpha/experimental features.": "", - "Access the Kubernetes dashboard running within the minikube cluster": "", - "Access the kubernetes dashboard running within the minikube cluster": "minikube 클러스터 내의 쿠버네티스 대시보드에 접근합니다", + "Access the Kubernetes dashboard running within the minikube cluster": "minikube 클러스터 내의 쿠버네티스 대시보드에 접근합니다", "Access to ports below 1024 may fail on Windows with OpenSSH clients older than v8.1. For more information, see: https://minikube.sigs.k8s.io/docs/handbook/accessing/#access-to-ports-1024-on-windows-requires-root-permission": "", "Add SSH identity key to SSH authentication agent": "SSH 인증 에이전트에 SSH ID 키 추가합니다", "Add an image to local cache.": "로컬 캐시에 이미지를 추가합니다", @@ -962,4 +961,4 @@ "{{.profile}} profile is not valid: {{.err}}": "{{.profile}} 프로파일이 올바르지 않습니다: {{.err}}", "{{.type}} is not yet a supported filesystem. We will try anyways!": "", "{{.url}} is not accessible: {{.error}}": "{{.url}} 이 접근 불가능합니다: {{.error}}" -} \ No newline at end of file +} diff --git a/translations/pl.json b/translations/pl.json index 76bd872c81..8991b99ee0 100644 --- a/translations/pl.json +++ b/translations/pl.json @@ -32,8 +32,7 @@ "A set of apiserver IP Addresses which are used in the generated certificate for kubernetes. This can be used if you want to make the apiserver available from outside the machine": "", "A set of apiserver names which are used in the generated certificate for kubernetes. This can be used if you want to make the apiserver available from outside the machine": "", "A set of key=value pairs that describe feature gates for alpha/experimental features.": "", - "Access the Kubernetes dashboard running within the minikube cluster": "", - "Access the kubernetes dashboard running within the minikube cluster": "Dostęp do dashboardu uruchomionego w klastrze kubernetesa w minikube", + "Access the Kubernetes dashboard running within the minikube cluster": "Dostęp do dashboardu uruchomionego w klastrze kubernetesa w minikube", "Access to ports below 1024 may fail on Windows with OpenSSH clients older than v8.1. For more information, see: https://minikube.sigs.k8s.io/docs/handbook/accessing/#access-to-ports-1024-on-windows-requires-root-permission": "", "Add SSH identity key to SSH authentication agent": "", "Add an image to local cache.": "Dodaj obraz do lokalnego cache", @@ -962,4 +961,4 @@ "{{.profile}} profile is not valid: {{.err}}": "{{.profile}} profil nie jest poprawny: {{.err}}", "{{.type}} is not yet a supported filesystem. We will try anyways!": "{{.type}} nie jest wspierany przez system plików. I tak spróbujemy!", "{{.url}} is not accessible: {{.error}}": "{{.url}} nie jest osiągalny: {{.error}}" -} \ No newline at end of file +} diff --git a/translations/zh-CN.json b/translations/zh-CN.json index e586403774..62beb10000 100644 --- a/translations/zh-CN.json +++ b/translations/zh-CN.json @@ -39,8 +39,7 @@ "A set of apiserver names which are used in the generated certificate for kubernetes. This can be used if you want to make the apiserver available from outside the machine": "一组在为 kubernetes 生成的证书中使用的 apiserver 名称。如果您希望将此 apiserver 设置为可从机器外部访问,则可以使用这组 apiserver 名称", "A set of key=value pairs that describe configuration that may be passed to different components.\nThe key should be '.' separated, and the first part before the dot is the component to apply the configuration to.\nValid components are: kubelet, kubeadm, apiserver, controller-manager, etcd, proxy, scheduler\nValid kubeadm parameters:": "一组用于描述可传递给不同组件的配置的键值对。\n其中键应以英文句点“.”分隔,英文句点前面的第一个部分是应用该配置的组件。\n有效组件包括:kubelet、kubeadm、apiserver、controller-manager、etcd、proxy、scheduler\n有效 kubeadm 参数包括:", "A set of key=value pairs that describe feature gates for alpha/experimental features.": "一组用于描述 alpha 版功能/实验性功能的功能限制的键值对。", - "Access the Kubernetes dashboard running within the minikube cluster": "", - "Access the kubernetes dashboard running within the minikube cluster": "访问在 minikube 集群中运行的 kubernetes dashboard", + "Access the Kubernetes dashboard running within the minikube cluster": "访问在 minikube 集群中运行的 kubernetes dashboard", "Access to ports below 1024 may fail on Windows with OpenSSH clients older than v8.1. For more information, see: https://minikube.sigs.k8s.io/docs/handbook/accessing/#access-to-ports-1024-on-windows-requires-root-permission": "", "Add SSH identity key to SSH authentication agent": "", "Add an image to local cache.": "将 image 添加到本地缓存。", @@ -1072,4 +1071,4 @@ "{{.profile}} profile is not valid: {{.err}}": "", "{{.type}} is not yet a supported filesystem. We will try anyways!": "", "{{.url}} is not accessible: {{.error}}": "" -} \ No newline at end of file +} From 231fbee7b5a1a0c7ec2d095acc942b5719eb05c8 Mon Sep 17 00:00:00 2001 From: Andriy Dzikh Date: Tue, 15 Jun 2021 09:55:32 -0700 Subject: [PATCH 54/66] Fix collect_data_manual not being formatted correctly. --- hack/jenkins/test-flake-chart/collect_data_manual.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/hack/jenkins/test-flake-chart/collect_data_manual.sh b/hack/jenkins/test-flake-chart/collect_data_manual.sh index bed3d74679..287a1a63d5 100755 --- a/hack/jenkins/test-flake-chart/collect_data_manual.sh +++ b/hack/jenkins/test-flake-chart/collect_data_manual.sh @@ -26,6 +26,6 @@ DIR=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd ) # 3) Optimize the resulting data. # 4) Store in GCS bucket. gsutil cat gs://minikube-builds/logs/master/*/*_summary.json \ -| $DIR/process_data.sh -| $DIR/optimize_data.sh +| $DIR/process_data.sh \ +| $DIR/optimize_data.sh \ | gsutil cp - gs://minikube-flake-rate/data.csv From 599e7c49e9e06e354a77960d827f5065e810fa03 Mon Sep 17 00:00:00 2001 From: Steven Powell Date: Tue, 15 Jun 2021 10:17:46 -0700 Subject: [PATCH 55/66] create automated time-to-k8s benchmarks on release --- .github/workflows/time-to-k8s.yml | 20 +++++++++ .gitmodules | 4 +- .../{time-to-k8s => time-to-k8s-repo} | 0 hack/benchmark/time-to-k8s/time-to-k8s.sh | 43 +++++++++++++++---- 4 files changed, 57 insertions(+), 10 deletions(-) create mode 100644 .github/workflows/time-to-k8s.yml rename hack/benchmark/time-to-k8s/{time-to-k8s => time-to-k8s-repo} (100%) diff --git a/.github/workflows/time-to-k8s.yml b/.github/workflows/time-to-k8s.yml new file mode 100644 index 0000000000..d3f82b23d4 --- /dev/null +++ b/.github/workflows/time-to-k8s.yml @@ -0,0 +1,20 @@ +name: "time-to-k8s benchmark" +on: + pull_request: + types: [opened] + # release: + # types: [released] +jobs: + benchmark: + runs-on: ubuntu-18.04 + steps: + - uses: actions/checkout@v2 + - name: Checkout submodules + run: git submodule update --init + - uses: actions/setup-go@v2 + with: + go-version: 1.16.5 + stable: true + - name: Benchmark + run: | + ./hack/benchmark/time-to-k8s/time-to-k8s.sh ${{ secrets.MINIKUBE_BOT_PAT }} diff --git a/.gitmodules b/.gitmodules index 0e99693233..d398a94cf9 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,6 +1,6 @@ [submodule "site/themes/docsy"] path = site/themes/docsy url = https://github.com/google/docsy.git -[submodule "hack/benchmark/time-to-k8s/time-to-k8s"] - path = hack/benchmark/time-to-k8s/time-to-k8s +[submodule "hack/benchmark/time-to-k8s/time-to-k8s-repo"] + path = hack/benchmark/time-to-k8s/time-to-k8s-repo url = https://github.com/tstromberg/time-to-k8s.git diff --git a/hack/benchmark/time-to-k8s/time-to-k8s b/hack/benchmark/time-to-k8s/time-to-k8s-repo similarity index 100% rename from hack/benchmark/time-to-k8s/time-to-k8s rename to hack/benchmark/time-to-k8s/time-to-k8s-repo diff --git a/hack/benchmark/time-to-k8s/time-to-k8s.sh b/hack/benchmark/time-to-k8s/time-to-k8s.sh index d999a6afc8..f0ea5aaf85 100755 --- a/hack/benchmark/time-to-k8s/time-to-k8s.sh +++ b/hack/benchmark/time-to-k8s/time-to-k8s.sh @@ -17,7 +17,7 @@ set -e install_kind() { - curl -Lo ./kind https://kind.sigs.k8s.io/dl/v0.11.0/kind-linux-amd64 + curl -Lo ./kind https://kind.sigs.k8s.io/dl/v0.11.0/kind-linux-amd64 chmod +x ./kind sudo mv ./kind /usr/local } @@ -31,31 +31,58 @@ install_minikube() { sudo install ./out/minikube /usr/local/bin/minikube } +install_gh() { + export access_token="$1" + + # Make sure gh is installed and configured + ./hack/jenkins/installers/check_install_gh.sh +} + +config_git() { + git config user.name "minikube-bot" + git config user.email "minikube-bot@google.com" +} + +create_branch() { + git checkout -b addTimeToK8s"$1" +} + run_benchmark() { - ( cd ./hack/benchmark/time-to-k8s/time-to-k8s/ && + pwd + ( cd ./hack/benchmark/time-to-k8s/time-to-k8s-repo/ && git submodule update --init && - go run . --config local-kubernetes.yaml --iterations 5 --output output.csv ) + go run . --config local-kubernetes.yaml --iterations 1 --output output.csv ) } generate_chart() { - go run ./hack/benchmark/time-to-k8s/chart.go --csv ./hack/benchmark/time-to-k8s/time-to-k8s/output.csv --output ./site/static/images/benchmarks/timeToK8s/"$1".png + go run ./hack/benchmark/time-to-k8s/chart.go --csv ./hack/benchmark/time-to-k8s/time-to-k8s-repo/output.csv --output ./site/static/images/benchmarks/timeToK8s/"$1".png } create_page() { printf -- "---\ntitle: \"%s Benchmark\"\nlinkTitle: \"%s Benchmark\"\nweight: 1\n---\n\n![time-to-k8s](/images/benchmarks/timeToK8s/%s.png)\n" "$1" "$1" "$1" > ./site/content/en/docs/benchmarks/timeToK8s/"$1".md } -commit_chart() { +commit_changes() { git add ./site/static/images/benchmarks/timeToK8s/"$1".png ./site/content/en/docs/benchmarks/timeToK8s/"$1".md - git commit -m 'update time-to-k8s chart' + git commit -m "add time-to-k8s benchmark for $1" +} + +create_pr() { + git remote add minikube-bot https://minikube-bot:"$2"@github.com/minikube-bot/minikube.git + git push -u minikube-bot addTimeToK8s"$1" + gh pr create --repo kubernetes/minikube --base master --title "Add time-to-k8s benchmark for $1" --body "Updating time-to-k8s benchmark as part of the release process" } install_kind install_k3d install_minikube -VERSION=$(minikube version --short) +install_gh "$1" +config_git +VERSION=$(minikube version --short) +create_branch "$VERSION" run_benchmark generate_chart "$VERSION" create_page "$VERSION" -commit_chart "$VERSION" +commit_changes "$VERSION" +create_pr "$VERSION" "$1" From 262e6c2072a4b2e3b2c158c03210029729a87969 Mon Sep 17 00:00:00 2001 From: Steven Powell Date: Tue, 15 Jun 2021 10:23:03 -0700 Subject: [PATCH 56/66] uncomment run on release --- .github/workflows/time-to-k8s.yml | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/.github/workflows/time-to-k8s.yml b/.github/workflows/time-to-k8s.yml index d3f82b23d4..991f8b73a0 100644 --- a/.github/workflows/time-to-k8s.yml +++ b/.github/workflows/time-to-k8s.yml @@ -1,9 +1,7 @@ name: "time-to-k8s benchmark" on: - pull_request: - types: [opened] - # release: - # types: [released] + release: + types: [released] jobs: benchmark: runs-on: ubuntu-18.04 From 4480bda5a040fb33b0e2c1d0c20d7f40052df8c0 Mon Sep 17 00:00:00 2001 From: Steven Powell Date: Tue, 15 Jun 2021 10:27:02 -0700 Subject: [PATCH 57/66] fixed yaml formatting --- .github/workflows/time-to-k8s.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/time-to-k8s.yml b/.github/workflows/time-to-k8s.yml index 991f8b73a0..0b4103ac71 100644 --- a/.github/workflows/time-to-k8s.yml +++ b/.github/workflows/time-to-k8s.yml @@ -1,7 +1,7 @@ name: "time-to-k8s benchmark" on: - release: - types: [released] + release: + types: [released] jobs: benchmark: runs-on: ubuntu-18.04 From 25c5bec652876db534cd186a5c4c7b7f70f8b50c Mon Sep 17 00:00:00 2001 From: Steven Powell Date: Tue, 15 Jun 2021 10:30:08 -0700 Subject: [PATCH 58/66] change back iterations to 5 --- hack/benchmark/time-to-k8s/time-to-k8s.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hack/benchmark/time-to-k8s/time-to-k8s.sh b/hack/benchmark/time-to-k8s/time-to-k8s.sh index f0ea5aaf85..a16beea807 100755 --- a/hack/benchmark/time-to-k8s/time-to-k8s.sh +++ b/hack/benchmark/time-to-k8s/time-to-k8s.sh @@ -51,7 +51,7 @@ run_benchmark() { pwd ( cd ./hack/benchmark/time-to-k8s/time-to-k8s-repo/ && git submodule update --init && - go run . --config local-kubernetes.yaml --iterations 1 --output output.csv ) + go run . --config local-kubernetes.yaml --iterations 5 --output output.csv ) } generate_chart() { From 4e9cab72d13a3d2d3ca432bb2a78f1fc57eb1c65 Mon Sep 17 00:00:00 2001 From: Steven Powell Date: Tue, 15 Jun 2021 10:36:49 -0700 Subject: [PATCH 59/66] run `make generate-docs` --- translations/de.json | 2 +- translations/es.json | 2 +- translations/fr.json | 2 +- translations/ja.json | 2 +- translations/ko.json | 4 ++-- translations/pl.json | 4 ++-- translations/strings.txt | 2 +- translations/zh-CN.json | 4 ++-- 8 files changed, 11 insertions(+), 11 deletions(-) diff --git a/translations/de.json b/translations/de.json index bbd00044da..be50ba730e 100644 --- a/translations/de.json +++ b/translations/de.json @@ -625,7 +625,7 @@ "The heapster addon is depreciated. please try to disable metrics-server instead": "", "The hyperv virtual switch name. Defaults to first found. (hyperv driver only)": "Der Name des virtuellen Hyperv-Switch. Standardmäßig zuerst gefunden. (nur Hyperv-Treiber)", "The hypervisor does not appear to be configured properly. Run 'minikube start --alsologtostderr -v=1' and inspect the error code": "", - "The image you are trying to add {{.imageName}} doesn't exist!": "", + "The image '{{.imageName}}' was not found; unable to add it to cache.": "", "The initial time interval for each check that wait performs in seconds": "", "The kubeadm binary within the Docker container is not executable": "", "The kubernetes version that the minikube VM will use (ex: v1.2.3)": "Die von der minikube-VM verwendete Kubernetes-Version (Beispiel: v1.2.3)", diff --git a/translations/es.json b/translations/es.json index 67f349fb27..2595b1fe5a 100644 --- a/translations/es.json +++ b/translations/es.json @@ -630,7 +630,7 @@ "The heapster addon is depreciated. please try to disable metrics-server instead": "", "The hyperv virtual switch name. Defaults to first found. (hyperv driver only)": "El nombre del conmutador virtual de hyperv. El valor predeterminado será el primer nombre que se encuentre (solo con el controlador de hyperv).", "The hypervisor does not appear to be configured properly. Run 'minikube start --alsologtostderr -v=1' and inspect the error code": "", - "The image you are trying to add {{.imageName}} doesn't exist!": "", + "The image '{{.imageName}}' was not found; unable to add it to cache.": "", "The initial time interval for each check that wait performs in seconds": "", "The kubeadm binary within the Docker container is not executable": "", "The kubernetes version that the minikube VM will use (ex: v1.2.3)": "La versión de Kubernetes que utilizará la VM de minikube (p. ej.: versión 1.2.3)", diff --git a/translations/fr.json b/translations/fr.json index 7948802753..096e9d24c9 100644 --- a/translations/fr.json +++ b/translations/fr.json @@ -628,7 +628,7 @@ "The heapster addon is depreciated. please try to disable metrics-server instead": "", "The hyperv virtual switch name. Defaults to first found. (hyperv driver only)": "Nom du commutateur virtuel hyperv. La valeur par défaut affiche le premier commutateur trouvé (pilote hyperv uniquement).", "The hypervisor does not appear to be configured properly. Run 'minikube start --alsologtostderr -v=1' and inspect the error code": "", - "The image you are trying to add {{.imageName}} doesn't exist!": "", + "The image '{{.imageName}}' was not found; unable to add it to cache.": "", "The initial time interval for each check that wait performs in seconds": "", "The kubeadm binary within the Docker container is not executable": "", "The kubernetes version that the minikube VM will use (ex: v1.2.3)": "Version de Kubernetes qu'utilisera la VM minikube (exemple : v1.2.3).", diff --git a/translations/ja.json b/translations/ja.json index 359f941d7b..d86712a73c 100644 --- a/translations/ja.json +++ b/translations/ja.json @@ -624,7 +624,7 @@ "The heapster addon is depreciated. please try to disable metrics-server instead": "", "The hyperv virtual switch name. Defaults to first found. (hyperv driver only)": "hyperv 仮想スイッチ名。最初に見つかったものにデフォルト設定されます(hyperv ドライバのみ)", "The hypervisor does not appear to be configured properly. Run 'minikube start --alsologtostderr -v=1' and inspect the error code": "", - "The image you are trying to add {{.imageName}} doesn't exist!": "", + "The image '{{.imageName}}' was not found; unable to add it to cache.": "", "The initial time interval for each check that wait performs in seconds": "", "The kubeadm binary within the Docker container is not executable": "", "The kubernetes version that the minikube VM will use (ex: v1.2.3)": "minikube VM で使用される Kubernetes バージョン(例: v1.2.3)", diff --git a/translations/ko.json b/translations/ko.json index 22a0818c5e..6d9761ed21 100644 --- a/translations/ko.json +++ b/translations/ko.json @@ -638,7 +638,7 @@ "The heapster addon is depreciated. please try to disable metrics-server instead": "", "The hyperv virtual switch name. Defaults to first found. (hyperv driver only)": "", "The hypervisor does not appear to be configured properly. Run 'minikube start --alsologtostderr -v=1' and inspect the error code": "", - "The image you are trying to add {{.imageName}} doesn't exist!": "", + "The image '{{.imageName}}' was not found; unable to add it to cache.": "", "The initial time interval for each check that wait performs in seconds": "", "The kubeadm binary within the Docker container is not executable": "", "The machine-driver specified is failing to start. Try running 'docker-machine-driver-\u003ctype\u003e version'": "", @@ -961,4 +961,4 @@ "{{.profile}} profile is not valid: {{.err}}": "{{.profile}} 프로파일이 올바르지 않습니다: {{.err}}", "{{.type}} is not yet a supported filesystem. We will try anyways!": "", "{{.url}} is not accessible: {{.error}}": "{{.url}} 이 접근 불가능합니다: {{.error}}" -} +} \ No newline at end of file diff --git a/translations/pl.json b/translations/pl.json index 8991b99ee0..1d0462ae21 100644 --- a/translations/pl.json +++ b/translations/pl.json @@ -642,7 +642,7 @@ "The heapster addon is depreciated. please try to disable metrics-server instead": "", "The hyperv virtual switch name. Defaults to first found. (hyperv driver only)": "", "The hypervisor does not appear to be configured properly. Run 'minikube start --alsologtostderr -v=1' and inspect the error code": "", - "The image you are trying to add {{.imageName}} doesn't exist!": "", + "The image '{{.imageName}}' was not found; unable to add it to cache.": "", "The initial time interval for each check that wait performs in seconds": "", "The kubeadm binary within the Docker container is not executable": "", "The kubernetes version that the minikube VM will use (ex: v1.2.3)": "Wersja kubernetesa, która zostanie użyta przez wirtualną maszynę minikube (np. v1.2.3)", @@ -961,4 +961,4 @@ "{{.profile}} profile is not valid: {{.err}}": "{{.profile}} profil nie jest poprawny: {{.err}}", "{{.type}} is not yet a supported filesystem. We will try anyways!": "{{.type}} nie jest wspierany przez system plików. I tak spróbujemy!", "{{.url}} is not accessible: {{.error}}": "{{.url}} nie jest osiągalny: {{.error}}" -} +} \ No newline at end of file diff --git a/translations/strings.txt b/translations/strings.txt index b3a8957bf6..4757ac5dce 100644 --- a/translations/strings.txt +++ b/translations/strings.txt @@ -585,7 +585,7 @@ "The heapster addon is depreciated. please try to disable metrics-server instead": "", "The hyperv virtual switch name. Defaults to first found. (hyperv driver only)": "", "The hypervisor does not appear to be configured properly. Run 'minikube start --alsologtostderr -v=1' and inspect the error code": "", - "The image you are trying to add {{.imageName}} doesn't exist!": "", + "The image '{{.imageName}}' was not found; unable to add it to cache.": "", "The initial time interval for each check that wait performs in seconds": "", "The kubeadm binary within the Docker container is not executable": "", "The machine-driver specified is failing to start. Try running 'docker-machine-driver-\u003ctype\u003e version'": "", diff --git a/translations/zh-CN.json b/translations/zh-CN.json index 62beb10000..2fcb2b1d02 100644 --- a/translations/zh-CN.json +++ b/translations/zh-CN.json @@ -731,7 +731,7 @@ "The heapster addon is depreciated. please try to disable metrics-server instead": "", "The hyperv virtual switch name. Defaults to first found. (hyperv driver only)": "hyperv 虚拟交换机名称。默认为找到的第一个 hyperv 虚拟交换机。(仅限 hyperv 驱动程序)", "The hypervisor does not appear to be configured properly. Run 'minikube start --alsologtostderr -v=1' and inspect the error code": "管理程序似乎配置的不正确。执行 'minikube start --alsologtostderr -v=1' 并且检查错误代码", - "The image you are trying to add {{.imageName}} doesn't exist!": "", + "The image '{{.imageName}}' was not found; unable to add it to cache.": "", "The initial time interval for each check that wait performs in seconds": "", "The kubeadm binary within the Docker container is not executable": "", "The kubernetes version that the minikube VM will use (ex: v1.2.3)": "minikube 虚拟机将使用的 kubernetes 版本(例如 v1.2.3)", @@ -1071,4 +1071,4 @@ "{{.profile}} profile is not valid: {{.err}}": "", "{{.type}} is not yet a supported filesystem. We will try anyways!": "", "{{.url}} is not accessible: {{.error}}": "" -} +} \ No newline at end of file From 7537c9da2b0ebec5bea78a33e621276af12ddc91 Mon Sep 17 00:00:00 2001 From: Sharif Elgamal Date: Tue, 15 Jun 2021 14:22:04 -0700 Subject: [PATCH 60/66] add local-kicbase make target --- Makefile | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/Makefile b/Makefile index ce1c5dd247..445f7a56e9 100644 --- a/Makefile +++ b/Makefile @@ -40,7 +40,7 @@ KVM_GO_VERSION ?= $(GO_VERSION:.0=) INSTALL_SIZE ?= $(shell du out/minikube-windows-amd64.exe | cut -f1) BUILDROOT_BRANCH ?= 2020.02.12 -REGISTRY?=gcr.io/k8s-minikube +REGISTRY ?=gcr.io/k8s-minikube # Get git commit id COMMIT_NO := $(shell git rev-parse HEAD 2> /dev/null || true) @@ -705,6 +705,21 @@ KICBASE_IMAGE_GCR ?= $(REGISTRY)/kicbase:$(KIC_VERSION) KICBASE_IMAGE_HUB ?= kicbase/stable:$(KIC_VERSION) KICBASE_IMAGE_REGISTRIES ?= $(KICBASE_IMAGE_GCR) $(KICBASE_IMAGE_HUB) +.PHONY: local-kicbase +local-kicbase: deploy/kicbase/auto-pause ## Builds the kicbase image and tags it local/kicbase:latest and local/kicbase:$(KIC_VERSION)-$(COMMIT_SHORT) + docker build -f ./deploy/kicbase/Dockerfile -t local/kicbase:$(KIC_VERSION) --build-arg COMMIT_SHA=${VERSION}-$(COMMIT) --cache-from $(KICBASE_IMAGE_GCR) ./deploy/kicbase + docker tag local/kicbase:$(KIC_VERSION) local/kicbase:latest + docker tag local/kicbase:$(KIC_VERSION) local/kicbase:$(KIC_VERSION)-$(COMMIT_SHORT) + +SED = sed -i +ifeq ($(GOOS),darwin) + SED = sed -i '' +endif + +.PHONY: local-kicbase-debug +local-kicbase-debug: local-kicbase ## Builds a local kicbase image and switches source code to point to it + $(SED) 's|Version = .*|Version = \"$(KIC_VERSION)-$(COMMIT_SHORT)\"|;s|baseImageSHA = .*|baseImageSHA = \"\"|;s|gcrRepo = .*|gcrRepo = \"local/kicbase\"|;s|dockerhubRepo = .*|dockerhubRepo = \"local/kicbase\"|' pkg/drivers/kic/types.go + .PHONY: push-kic-base-image push-kic-base-image: deploy/kicbase/auto-pause docker-multi-arch-builder ## Push multi-arch local/kicbase:latest to all remote registries ifdef AUTOPUSH From 0f3255eab63381bffee3f10a7ab2efe17e254b9a Mon Sep 17 00:00:00 2001 From: Sharif Elgamal Date: Tue, 15 Jun 2021 15:16:04 -0700 Subject: [PATCH 61/66] space --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 445f7a56e9..db9a23088e 100644 --- a/Makefile +++ b/Makefile @@ -40,7 +40,7 @@ KVM_GO_VERSION ?= $(GO_VERSION:.0=) INSTALL_SIZE ?= $(shell du out/minikube-windows-amd64.exe | cut -f1) BUILDROOT_BRANCH ?= 2020.02.12 -REGISTRY ?=gcr.io/k8s-minikube +REGISTRY ?= gcr.io/k8s-minikube # Get git commit id COMMIT_NO := $(shell git rev-parse HEAD 2> /dev/null || true) From c037e5f62fb4e8c40bed7c44465779954d3d1b14 Mon Sep 17 00:00:00 2001 From: Andriy Dzikh Date: Wed, 16 Jun 2021 15:29:50 -0700 Subject: [PATCH 62/66] Stop using sudo for check_install_gh. --- hack/jenkins/test-flake-chart/report_flakes.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hack/jenkins/test-flake-chart/report_flakes.sh b/hack/jenkins/test-flake-chart/report_flakes.sh index a04c0d359c..16fc7a8800 100755 --- a/hack/jenkins/test-flake-chart/report_flakes.sh +++ b/hack/jenkins/test-flake-chart/report_flakes.sh @@ -82,6 +82,6 @@ if [[ "$FAILED_RATES_LINES" -gt 30 ]]; then fi # install gh if not present -sudo $DIR/../installers/check_install_gh.sh || true +$DIR/../installers/check_install_gh.sh gh issue comment "https://github.com/kubernetes/minikube/pull/$PR_NUMBER" --body "$(cat $TMP_COMMENT)" From 9f601ea39324ccde1d8f4e82b0b8fa9e661f7aed Mon Sep 17 00:00:00 2001 From: Andriy Dzikh Date: Wed, 16 Jun 2021 18:03:52 -0700 Subject: [PATCH 63/66] Fix commenting to a PR instead of an issue. --- hack/jenkins/test-flake-chart/report_flakes.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hack/jenkins/test-flake-chart/report_flakes.sh b/hack/jenkins/test-flake-chart/report_flakes.sh index 16fc7a8800..0ffff11a79 100755 --- a/hack/jenkins/test-flake-chart/report_flakes.sh +++ b/hack/jenkins/test-flake-chart/report_flakes.sh @@ -84,4 +84,4 @@ fi # install gh if not present $DIR/../installers/check_install_gh.sh -gh issue comment "https://github.com/kubernetes/minikube/pull/$PR_NUMBER" --body "$(cat $TMP_COMMENT)" +gh pr comment "https://github.com/kubernetes/minikube/pull/$PR_NUMBER" --body "$(cat $TMP_COMMENT)" From f465a9684432d23c201e2c9947c4a34e5c1b65be Mon Sep 17 00:00:00 2001 From: Medya Ghazizadeh Date: Thu, 17 Jun 2021 12:05:36 -0400 Subject: [PATCH 64/66] site: how to run minikube on remote network --- site/content/en/docs/faq/_index.md | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/site/content/en/docs/faq/_index.md b/site/content/en/docs/faq/_index.md index 9b5f9a6d95..3ce0a9a88c 100644 --- a/site/content/en/docs/faq/_index.md +++ b/site/content/en/docs/faq/_index.md @@ -92,3 +92,15 @@ Yes! If you prefer not having emoji in your minikube output 😔 , just set the MINIKUBE_IN_STYLE=0 minikube start ``` + +## How to access minikube cluster from on a remote network ? + +minikube is primary goal is to quickly sets up a local Kubernetes clusters, and we strongly discourge from using minikube in production or to listen on remote traffic. therefore by design minikube networking only listens on local network. + +however it possible to configure minikube to listen on a remote network. Please be aware this opens your network open to outside world and it is not recommended, and if you are not fully sure of the security implications, please avoid using this option. + +for docker/podman drivers you could use `--listen-address` +``` +minikube start --listen-address=0.0.0.0 +``` + From 300230bd5c3f56012a00e5a665638cd0ab32aaed Mon Sep 17 00:00:00 2001 From: Andriy Dzikh Date: Thu, 17 Jun 2021 09:41:40 -0700 Subject: [PATCH 65/66] Update flake chart colors. --- hack/jenkins/test-flake-chart/flake_chart.js | 1 + 1 file changed, 1 insertion(+) diff --git a/hack/jenkins/test-flake-chart/flake_chart.js b/hack/jenkins/test-flake-chart/flake_chart.js index e18d5389a5..736fc7cd7a 100644 --- a/hack/jenkins/test-flake-chart/flake_chart.js +++ b/hack/jenkins/test-flake-chart/flake_chart.js @@ -184,6 +184,7 @@ async function init() { 0: { title: "Flake rate", minValue: 0, maxValue: 100 }, 1: { title: "Duration (seconds)" }, }, + colors: ['#dc3912', '#3366cc'], tooltip: { trigger: "selection", isHtml: true } }; const chart = new google.visualization.LineChart(document.getElementById('chart_div')); From ddea20b2608c0f105c93f61c48ee85a34928ef77 Mon Sep 17 00:00:00 2001 From: Medya Ghazizadeh Date: Thu, 17 Jun 2021 13:05:08 -0400 Subject: [PATCH 66/66] Update _index.md --- site/content/en/docs/faq/_index.md | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/site/content/en/docs/faq/_index.md b/site/content/en/docs/faq/_index.md index 3ce0a9a88c..ccd09b6c57 100644 --- a/site/content/en/docs/faq/_index.md +++ b/site/content/en/docs/faq/_index.md @@ -93,13 +93,14 @@ MINIKUBE_IN_STYLE=0 minikube start ``` -## How to access minikube cluster from on a remote network ? +## How can I access a minikube cluster from a remote network? -minikube is primary goal is to quickly sets up a local Kubernetes clusters, and we strongly discourge from using minikube in production or to listen on remote traffic. therefore by design minikube networking only listens on local network. +minikube's primary goal is to quickly set up local Kubernetes clusters, and therefore we strongly discourage using minikube in production or for listening to remote traffic. By design, minikube is meant to only listen on the local network. -however it possible to configure minikube to listen on a remote network. Please be aware this opens your network open to outside world and it is not recommended, and if you are not fully sure of the security implications, please avoid using this option. +However, it is possible to configure minikube to listen on a remote network. This will open your network to the outside world and is not recommended. If you are not fully aware of the security implications, please avoid using this. + +For the docker and podman driver, use `--listen-address` flag: -for docker/podman drivers you could use `--listen-address` ``` minikube start --listen-address=0.0.0.0 ```