diff --git a/Gopkg.lock b/Gopkg.lock index a274fc0e7..a94772490 100644 --- a/Gopkg.lock +++ b/Gopkg.lock @@ -330,14 +330,14 @@ [[projects]] branch = "master" - digest = "1:da39b58557275d30a9340c2e1e13e16691461f9859d3230f59cceed411c04b49" + digest = "1:1ceac9ada19d8cfe0e400e32283ae97679f86fcdf48736de2cb081e1201dfeb4" name = "github.com/knative/test-infra" packages = [ "scripts", "tools/dep-collector", ] pruneopts = "UT" - revision = "89e4aae358be056ee70b595c20106a4a5c70fdc1" + revision = "9045f3a0de5b2b7b99a3aea4ee67c82a01119692" [[projects]] digest = "1:56dbf15e091bf7926cb33a57cb6bdfc658fc6d3498d2f76f10a97ce7856f1fde" diff --git a/vendor/github.com/knative/test-infra/devstats/grafana/dashboards/knative/new-and-episodic-pr-contributors.json b/vendor/github.com/knative/test-infra/devstats/grafana/dashboards/knative/new-and-episodic-pr-contributors.json new file mode 100644 index 000000000..f17537f33 --- /dev/null +++ b/vendor/github.com/knative/test-infra/devstats/grafana/dashboards/knative/new-and-episodic-pr-contributors.json @@ -0,0 +1,422 @@ +{ + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": "-- Grafana --", + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "type": "dashboard" + }, + { + "datasource": "psql", + "enable": true, + "hide": false, + "iconColor": "rgba(255, 96, 96, 1)", + "limit": 100, + "name": "Releases", + "query": "SELECT title, description from annotations WHERE $timeFilter order by time asc", + "rawQuery": "select extract(epoch from time) AS time, title as text, description as tags from sannotations where $__timeFilter(time)", + "showIn": 0, + "tagsColumn": "title,description", + "textColumn": "", + "titleColumn": "[[full_name]] release", + "type": "alert" + } + ] + }, + "editable": true, + "gnetId": null, + "graphTooltip": 0, + "iteration": 1543674726561, + "links": [], + "panels": [ + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "psql", + "decimals": 0, + "description": "Displays the number of new/episodic issues and the number of new/episodic issues authors.\nThe episodic author is defined as someone who hasn't created issue in the last 3 months and no more than 12 issues overall.", + "fill": 1, + "gridPos": { + "h": 22, + "w": 24, + "x": 0, + "y": 0 + }, + "id": 1, + "legend": { + "alignAsTable": false, + "avg": true, + "current": true, + "hideEmpty": false, + "hideZero": false, + "max": true, + "min": true, + "rightSide": false, + "show": true, + "total": false, + "values": true + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 1, + "points": false, + "renderer": "flot", + "seriesOverrides": [ + { + "alias": "New issue creators", + "yaxis": 2 + }, + { + "alias": "Episodic issue creators", + "yaxis": 2 + } + ], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "alias": "", + "dsType": "influxdb", + "format": "time_series", + "groupBy": [], + "hide": false, + "measurement": "reviewers_d", + "orderByTime": "ASC", + "policy": "autogen", + "query": "SELECT \"value\" FROM \"new_issues_[[repogroup]]_issues_[[period]]\" WHERE $timeFilter", + "rawQuery": true, + "rawSql": "select\n time,\n value as \"Number of PRs from new contributors\"\nfrom\n snew_contributors\nwhere\n $__timeFilter(time)\n and period = '[[period]]'\n and series = 'new_contrib[[repogroup]]prs'\norder by\n time", + "refId": "A", + "resultFormat": "time_series", + "select": [ + [ + { + "params": [ + "value" + ], + "type": "field" + } + ] + ], + "tags": [] + }, + { + "alias": "", + "dsType": "influxdb", + "format": "time_series", + "groupBy": [], + "hide": false, + "measurement": "reviewers_d", + "orderByTime": "ASC", + "policy": "autogen", + "query": "SELECT \"value\" FROM \"new_issues_[[repogroup]]_contributors_[[period]]\" WHERE $timeFilter", + "rawQuery": true, + "rawSql": "select\n time,\n value as \"New contributors\"\nfrom\n snew_contributors\nwhere\n $__timeFilter(time)\n and period = '[[period]]'\n and series = 'new_contrib[[repogroup]]contrib'\norder by\n time", + "refId": "B", + "resultFormat": "time_series", + "select": [ + [ + { + "params": [ + "value" + ], + "type": "field" + } + ] + ], + "tags": [] + }, + { + "alias": "", + "dsType": "influxdb", + "format": "time_series", + "groupBy": [], + "hide": false, + "measurement": "reviewers_d", + "orderByTime": "ASC", + "policy": "autogen", + "query": "SELECT \"value\" FROM \"episodic_issues_[[repogroup]]_issues_[[period]]\" WHERE $timeFilter", + "rawQuery": true, + "rawSql": "select\n time,\n value as \"Number of PRs from episodic contributors\"\nfrom\n sepisodic_contributors\nwhere\n $__timeFilter(time)\n and period = '[[period]]'\n and series = 'epis_contrib[[repogroup]]prs'\norder by\n time", + "refId": "C", + "resultFormat": "time_series", + "select": [ + [ + { + "params": [ + "value" + ], + "type": "field" + } + ] + ], + "tags": [] + }, + { + "alias": "", + "dsType": "influxdb", + "format": "time_series", + "groupBy": [], + "hide": false, + "measurement": "reviewers_d", + "orderByTime": "ASC", + "policy": "autogen", + "query": "SELECT \"value\" FROM \"episodic_issues_[[repogroup]]_contributors_[[period]]\" WHERE $timeFilter", + "rawQuery": true, + "rawSql": "select\n time,\n value as \"Episodic contributors\"\nfrom\n sepisodic_contributors\nwhere\n $__timeFilter(time)\n and period = '[[period]]'\n and series = 'epis_contrib[[repogroup]]contrib'\norder by\n time", + "refId": "D", + "resultFormat": "time_series", + "select": [ + [ + { + "params": [ + "value" + ], + "type": "field" + } + ] + ], + "tags": [] + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "New/episodic issues ([[repogroup_name]], [[period]])", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "transparent": true, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [ + "total" + ] + }, + "yaxes": [ + { + "format": "none", + "label": "Issues", + "logBase": 1, + "max": null, + "min": "0", + "show": true + }, + { + "format": "none", + "label": "Issue creators", + "logBase": 1, + "max": null, + "min": "0", + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "content": "[[docs]]", + "gridPos": { + "h": 11, + "w": 24, + "x": 0, + "y": 22 + }, + "id": 11, + "links": [], + "mode": "html", + "title": "Dashboard documentation", + "type": "text" + } + ], + "refresh": false, + "schemaVersion": 16, + "style": "dark", + "tags": [ + "dashboard", + "knative", + "issues" + ], + "templating": { + "list": [ + { + "allValue": null, + "current": {}, + "datasource": "psql", + "hide": 2, + "includeAll": false, + "label": null, + "multi": false, + "name": "full_name", + "options": [], + "query": "select value_s from gha_vars where name = 'full_name'", + "refresh": 1, + "regex": "", + "skipUrlSync": true, + "sort": 0, + "tagValuesQuery": "", + "tags": [], + "tagsQuery": "", + "type": "query", + "useTags": false + }, + { + "allValue": null, + "current": { + "tags": [], + "text": "28 Days MA", + "value": "d28" + }, + "hide": 0, + "includeAll": false, + "label": "Period", + "multi": false, + "name": "period", + "options": [ + { + "selected": true, + "text": "28 Days MA", + "value": "d28" + }, + { + "selected": false, + "text": "Week", + "value": "w" + }, + { + "selected": false, + "text": "Month", + "value": "m" + }, + { + "selected": false, + "text": "Quarter", + "value": "q" + }, + { + "selected": false, + "text": "Year", + "value": "y" + } + ], + "query": "d,w,m,q,y", + "skipUrlSync": false, + "type": "custom" + }, + { + "allValue": null, + "current": {}, + "datasource": "psql", + "hide": 0, + "includeAll": false, + "label": "Repository group", + "multi": false, + "name": "repogroup_name", + "options": [], + "query": "select all_repo_group_name from tall_repo_groups order by 1", + "refresh": 1, + "regex": "", + "skipUrlSync": false, + "sort": 1, + "tagValuesQuery": "", + "tags": [], + "tagsQuery": "", + "type": "query", + "useTags": false + }, + { + "allValue": null, + "current": {}, + "datasource": "psql", + "hide": 2, + "includeAll": false, + "label": null, + "multi": false, + "name": "repogroup", + "options": [], + "query": "select all_repo_group_value from tall_repo_groups where all_repo_group_name = '[[repogroup_name]]'", + "refresh": 1, + "regex": "", + "skipUrlSync": true, + "sort": 0, + "tagValuesQuery": "", + "tags": [], + "tagsQuery": "", + "type": "query", + "useTags": false + }, + { + "allValue": null, + "current": {}, + "datasource": "psql", + "hide": 2, + "includeAll": false, + "label": null, + "multi": false, + "name": "docs", + "options": [], + "query": "select value_s from gha_vars where name = 'new_and_episodic_prs_docs_html'", + "refresh": 1, + "regex": "", + "skipUrlSync": true, + "sort": 0, + "tagValuesQuery": "", + "tags": [], + "tagsQuery": "", + "type": "query", + "useTags": false + } + ] + }, + "time": { + "from": "now-6M", + "to": "now-1M" + }, + "timepicker": { + "refresh_intervals": [ + "5s", + "10s", + "30s", + "1m", + "5m", + "15m", + "30m", + "1h", + "2h", + "1d" + ], + "time_options": [ + "5m", + "15m", + "1h", + "6h", + "12h", + "24h", + "2d", + "7d", + "30d" + ] + }, + "timezone": "", + "title": "New and episodic PR contributors", + "uid": "14", + "version": 2 +} diff --git a/vendor/github.com/knative/test-infra/devstats/grafana/dashboards/knative/new-contributors-table.json b/vendor/github.com/knative/test-infra/devstats/grafana/dashboards/knative/new-contributors-table.json new file mode 100644 index 000000000..e670622f5 --- /dev/null +++ b/vendor/github.com/knative/test-infra/devstats/grafana/dashboards/knative/new-contributors-table.json @@ -0,0 +1,310 @@ +{ + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": "-- Grafana --", + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "type": "dashboard" + } + ] + }, + "editable": true, + "gnetId": null, + "graphTooltip": 0, + "iteration": 1541500383578, + "links": [], + "panels": [ + { + "columns": [], + "datasource": "psql", + "description": "", + "fontSize": "90%", + "gridPos": { + "h": 25, + "w": 24, + "x": 0, + "y": 0 + }, + "hideTimeOverride": false, + "id": 1, + "links": [], + "pageSize": 30, + "scroll": true, + "showHeader": true, + "sort": { + "col": 1, + "desc": false + }, + "styles": [ + { + "alias": "Contributor", + "colorMode": null, + "colors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "dateFormat": "YYYY-MM-DD HH:mm:ss", + "decimals": 2, + "pattern": "str", + "preserveFormat": false, + "thresholds": [], + "type": "string", + "unit": "short" + }, + { + "alias": "First contribution", + "colorMode": null, + "colors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "dateFormat": "MM/DD/YY h:mm:ss a", + "decimals": 0, + "pattern": "dt", + "thresholds": [], + "type": "date", + "unit": "none" + }, + { + "alias": "", + "colorMode": null, + "colors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "dateFormat": "YYYY-MM-DD HH:mm:ss", + "decimals": 2, + "mappingType": 1, + "pattern": "", + "thresholds": [], + "type": "number", + "unit": "short" + }, + { + "alias": "", + "colorMode": null, + "colors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "decimals": 0, + "pattern": "/.*/", + "thresholds": [], + "type": "number", + "unit": "short" + } + ], + "targets": [ + { + "alias": "", + "dsType": "influxdb", + "format": "table", + "groupBy": [ + { + "params": [ + "$__interval" + ], + "type": "time" + }, + { + "params": [ + "null" + ], + "type": "fill" + } + ], + "orderByTime": "ASC", + "policy": "default", + "query": "", + "rawQuery": true, + "rawSql": "select str, dt from \"snew_contributors_data\" where $__timeFilter(dt) and series = 'ncd[[repogroup]]' and period = 'h'", + "refId": "A", + "resultFormat": "table", + "select": [ + [ + { + "params": [ + "value" + ], + "type": "field" + }, + { + "params": [], + "type": "mean" + } + ] + ], + "tags": [] + } + ], + "timeFrom": null, + "timeShift": null, + "title": "[[full_name]] New contributors table (Repository group [[repogroup_name]])", + "transform": "table", + "transparent": false, + "type": "table" + }, + { + "content": "[[docs]]", + "gridPos": { + "h": 9, + "w": 24, + "x": 0, + "y": 25 + }, + "id": 11, + "links": [], + "mode": "html", + "title": "Dashboard documentation", + "type": "text" + } + ], + "refresh": false, + "schemaVersion": 16, + "style": "dark", + "tags": [ + "dashboard", + "knative", + "table" + ], + "templating": { + "list": [ + { + "allValue": null, + "current": {}, + "datasource": "psql", + "hide": 2, + "includeAll": false, + "label": null, + "multi": false, + "name": "full_name", + "options": [], + "query": "select value_s from gha_vars where name = 'full_name'", + "refresh": 1, + "regex": "", + "skipUrlSync": true, + "sort": 0, + "tagValuesQuery": "", + "tags": [], + "tagsQuery": "", + "type": "query", + "useTags": false + }, + { + "allValue": null, + "current": { + "text": "All", + "value": "All" + }, + "datasource": "psql", + "hide": 0, + "includeAll": false, + "label": "Repository group", + "multi": false, + "name": "repogroup_name", + "options": [], + "query": "select all_repo_group_name from tall_repo_groups order by 1", + "refresh": 1, + "regex": "", + "skipUrlSync": false, + "sort": 0, + "tagValuesQuery": "", + "tags": [], + "tagsQuery": "", + "type": "query", + "useTags": false + }, + { + "allValue": null, + "current": { + "text": "all", + "value": "all" + }, + "datasource": "psql", + "hide": 2, + "includeAll": false, + "label": "", + "multi": false, + "name": "repogroup", + "options": [], + "query": "select all_repo_group_value from tall_repo_groups where all_repo_group_name = '[[repogroup_name]]'", + "refresh": 1, + "regex": "", + "skipUrlSync": true, + "sort": 0, + "tagValuesQuery": "", + "tags": [], + "tagsQuery": "", + "type": "query", + "useTags": false + }, + { + "allValue": null, + "current": { + "isNone": true, + "text": "None", + "value": "" + }, + "datasource": "psql", + "hide": 2, + "includeAll": false, + "label": null, + "multi": false, + "name": "docs", + "options": [], + "query": "select value_s from gha_vars where name = 'new_contributors_docs_html'", + "refresh": 1, + "regex": "", + "skipUrlSync": true, + "sort": 0, + "tagValuesQuery": "", + "tags": [], + "tagsQuery": "", + "type": "query", + "useTags": false + } + ] + }, + "time": { + "from": "now-1M", + "to": "now" + }, + "timepicker": { + "hidden": false, + "refresh_intervals": [ + "5s", + "10s", + "30s", + "1m", + "5m", + "15m", + "30m", + "1h", + "2h", + "1d" + ], + "time_options": [ + "5m", + "15m", + "1h", + "6h", + "12h", + "24h", + "2d", + "7d", + "30d" + ] + }, + "timezone": "", + "title": "New contributors table", + "uid": "52", + "version": 4 +} diff --git a/vendor/github.com/knative/test-infra/devstats/grafana/dashboards/knative/prs-authors-companies-histogram.json b/vendor/github.com/knative/test-infra/devstats/grafana/dashboards/knative/prs-authors-companies-histogram.json new file mode 100644 index 000000000..c4ac67120 --- /dev/null +++ b/vendor/github.com/knative/test-infra/devstats/grafana/dashboards/knative/prs-authors-companies-histogram.json @@ -0,0 +1,348 @@ +{ + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": "-- Grafana --", + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "type": "dashboard" + } + ] + }, + "editable": true, + "gnetId": null, + "graphTooltip": 0, + "iteration": 1529598275115, + "links": [], + "panels": [ + { + "columns": [], + "datasource": "psql", + "description": "Shows PRs authors companies", + "fontSize": "90%", + "gridPos": { + "h": 22, + "w": 24, + "x": 0, + "y": 0 + }, + "hideTimeOverride": true, + "id": 1, + "links": [], + "pageSize": 1000, + "scroll": true, + "showHeader": true, + "sort": { + "col": 1, + "desc": true + }, + "styles": [ + { + "alias": "Time", + "dateFormat": "YYYY-MM-DD HH:mm:ss", + "decimals": null, + "pattern": "Time", + "type": "hidden" + }, + { + "alias": "Company", + "colorMode": null, + "colors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "dateFormat": "YYYY-MM-DD HH:mm:ss", + "decimals": 2, + "pattern": "name", + "preserveFormat": false, + "thresholds": [], + "type": "string", + "unit": "short" + }, + { + "alias": "Opened PRs", + "colorMode": null, + "colors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "dateFormat": "YYYY-MM-DD HH:mm:ss", + "decimals": 0, + "pattern": "value", + "thresholds": [], + "type": "number", + "unit": "none" + }, + { + "alias": "", + "colorMode": null, + "colors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "decimals": 2, + "pattern": "/.*/", + "thresholds": [], + "type": "number", + "unit": "short" + } + ], + "targets": [ + { + "alias": "", + "dsType": "influxdb", + "format": "table", + "groupBy": [ + { + "params": [ + "$__interval" + ], + "type": "time" + }, + { + "params": [ + "null" + ], + "type": "fill" + } + ], + "orderByTime": "ASC", + "policy": "default", + "query": "SELECT \"name\", \"value\" FROM \"hist_pr_companies_[[repogroup]]_[[period]]\" WHERE $timeFilter", + "rawQuery": true, + "rawSql": "select name, value from \"shpr_comps\" where series = 'hpr_comps[[repogroup]]' and period = '[[period]]'", + "refId": "A", + "resultFormat": "table", + "select": [ + [ + { + "params": [ + "value" + ], + "type": "field" + }, + { + "params": [], + "type": "mean" + } + ] + ], + "tags": [] + } + ], + "title": "[[full_name]] PRs authors companies (Repository group: [[repogroup_name]], Range: [[period_name]])", + "transform": "table", + "transparent": false, + "type": "table" + }, + { + "content": "[[docs]]", + "gridPos": { + "h": 9, + "w": 24, + "x": 0, + "y": 22 + }, + "id": 11, + "links": [], + "mode": "html", + "title": "Dashboard documentation", + "type": "text" + } + ], + "refresh": false, + "schemaVersion": 16, + "style": "dark", + "tags": [ + "companies", + "dashboard", + "knative", + "table" + ], + "templating": { + "list": [ + { + "allValue": null, + "current": { + "text": "OCI", + "value": "OCI" + }, + "datasource": "psql", + "hide": 2, + "includeAll": false, + "label": null, + "multi": false, + "name": "full_name", + "options": [], + "query": "select value_s from gha_vars where name = 'full_name'", + "refresh": 1, + "regex": "", + "skipUrlSync": true, + "sort": 0, + "tagValuesQuery": "", + "tags": [], + "tagsQuery": "", + "type": "query", + "useTags": false + }, + { + "allValue": null, + "current": { + "tags": [], + "text": "Last decade", + "value": "Last decade" + }, + "datasource": "psql", + "hide": 0, + "includeAll": false, + "label": "Range", + "multi": false, + "name": "period_name", + "options": [], + "query": "select quick_ranges_name from tquick_ranges order by time", + "refresh": 1, + "regex": "", + "skipUrlSync": false, + "sort": 0, + "tagValuesQuery": "", + "tags": [], + "tagsQuery": "", + "type": "query", + "useTags": false + }, + { + "allValue": null, + "current": { + "text": "y10", + "value": "y10" + }, + "datasource": "psql", + "hide": 2, + "includeAll": false, + "label": null, + "multi": false, + "name": "period", + "options": [], + "query": "select quick_ranges_suffix from tquick_ranges where quick_ranges_name = '[[period_name]]'", + "refresh": 1, + "regex": "", + "skipUrlSync": true, + "sort": 0, + "tagValuesQuery": "", + "tags": [], + "tagsQuery": "", + "type": "query", + "useTags": false + }, + { + "allValue": null, + "current": { + "text": "All", + "value": "All" + }, + "datasource": "psql", + "hide": 0, + "includeAll": false, + "label": "Repository group", + "multi": false, + "name": "repogroup_name", + "options": [], + "query": "select all_repo_group_name from tall_repo_groups order by 1", + "refresh": 1, + "regex": "", + "skipUrlSync": false, + "sort": 0, + "tagValuesQuery": "", + "tags": [], + "tagsQuery": "", + "type": "query", + "useTags": false + }, + { + "allValue": null, + "current": { + "text": "all", + "value": "all" + }, + "datasource": "psql", + "hide": 2, + "includeAll": false, + "label": "", + "multi": false, + "name": "repogroup", + "options": [], + "query": "select all_repo_group_value from tall_repo_groups where all_repo_group_name = '[[repogroup_name]]'", + "refresh": 1, + "regex": "", + "skipUrlSync": true, + "sort": 0, + "tagValuesQuery": "", + "tags": [], + "tagsQuery": "", + "type": "query", + "useTags": false + }, + { + "allValue": null, + "current": {}, + "datasource": "psql", + "hide": 2, + "includeAll": false, + "label": null, + "multi": false, + "name": "docs", + "options": [], + "query": "select value_s from gha_vars where name = 'pr_companies_docs_html'", + "refresh": 1, + "regex": "", + "skipUrlSync": true, + "sort": 0, + "tagValuesQuery": "", + "tags": [], + "tagsQuery": "", + "type": "query", + "useTags": false + } + ] + }, + "time": { + "from": "now-5y", + "to": "now" + }, + "timepicker": { + "hidden": true, + "refresh_intervals": [ + "5s", + "10s", + "30s", + "1m", + "5m", + "15m", + "30m", + "1h", + "2h", + "1d" + ], + "time_options": [ + "5m", + "15m", + "1h", + "6h", + "12h", + "24h", + "2d", + "7d", + "30d" + ] + }, + "timezone": "", + "title": "PRs authors companies histogram", + "uid": "22", + "version": 3 +} diff --git a/vendor/github.com/knative/test-infra/devstats/grafana/dashboards/knative/prs-authors-histogram.json b/vendor/github.com/knative/test-infra/devstats/grafana/dashboards/knative/prs-authors-histogram.json new file mode 100644 index 000000000..ce9900f65 --- /dev/null +++ b/vendor/github.com/knative/test-infra/devstats/grafana/dashboards/knative/prs-authors-histogram.json @@ -0,0 +1,347 @@ +{ + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": "-- Grafana --", + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "type": "dashboard" + } + ] + }, + "editable": true, + "gnetId": null, + "graphTooltip": 0, + "iteration": 1529598388542, + "links": [], + "panels": [ + { + "columns": [], + "datasource": "psql", + "description": "Shows PRs authors", + "fontSize": "90%", + "gridPos": { + "h": 22, + "w": 24, + "x": 0, + "y": 0 + }, + "hideTimeOverride": true, + "id": 1, + "links": [], + "pageSize": 1000, + "scroll": true, + "showHeader": true, + "sort": { + "col": 1, + "desc": true + }, + "styles": [ + { + "alias": "Time", + "dateFormat": "YYYY-MM-DD HH:mm:ss", + "decimals": null, + "pattern": "Time", + "type": "hidden" + }, + { + "alias": "Developer", + "colorMode": null, + "colors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "dateFormat": "YYYY-MM-DD HH:mm:ss", + "decimals": 2, + "pattern": "name", + "preserveFormat": false, + "thresholds": [], + "type": "string", + "unit": "short" + }, + { + "alias": "Opened PRs", + "colorMode": null, + "colors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "dateFormat": "YYYY-MM-DD HH:mm:ss", + "decimals": 0, + "pattern": "value", + "thresholds": [], + "type": "number", + "unit": "none" + }, + { + "alias": "", + "colorMode": null, + "colors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "decimals": 2, + "pattern": "/.*/", + "thresholds": [], + "type": "number", + "unit": "short" + } + ], + "targets": [ + { + "alias": "", + "dsType": "influxdb", + "format": "table", + "groupBy": [ + { + "params": [ + "$__interval" + ], + "type": "time" + }, + { + "params": [ + "null" + ], + "type": "fill" + } + ], + "orderByTime": "ASC", + "policy": "default", + "query": "SELECT \"name\", \"value\" FROM \"hist_pr_authors_[[repogroup]]_[[period]]\" WHERE $timeFilter", + "rawQuery": true, + "rawSql": "select name, value from \"shpr_auth\" where series = 'hpr_auth[[repogroup]]' and period = '[[period]]'", + "refId": "A", + "resultFormat": "table", + "select": [ + [ + { + "params": [ + "value" + ], + "type": "field" + }, + { + "params": [], + "type": "mean" + } + ] + ], + "tags": [] + } + ], + "title": "[[full_name]] PRs authors (Repository group: [[repogroup_name]], Range: [[period_name]])", + "transform": "table", + "transparent": false, + "type": "table" + }, + { + "content": "[[docs]]", + "gridPos": { + "h": 9, + "w": 24, + "x": 0, + "y": 22 + }, + "id": 11, + "links": [], + "mode": "html", + "title": "Dashboard documentation", + "type": "text" + } + ], + "refresh": false, + "schemaVersion": 16, + "style": "dark", + "tags": [ + "dashboard", + "knative", + "table" + ], + "templating": { + "list": [ + { + "allValue": null, + "current": { + "text": "OCI", + "value": "OCI" + }, + "datasource": "psql", + "hide": 2, + "includeAll": false, + "label": null, + "multi": false, + "name": "full_name", + "options": [], + "query": "select value_s from gha_vars where name = 'full_name'", + "refresh": 1, + "regex": "", + "skipUrlSync": true, + "sort": 0, + "tagValuesQuery": "", + "tags": [], + "tagsQuery": "", + "type": "query", + "useTags": false + }, + { + "allValue": null, + "current": { + "tags": [], + "text": "Last decade", + "value": "Last decade" + }, + "datasource": "psql", + "hide": 0, + "includeAll": false, + "label": "Range", + "multi": false, + "name": "period_name", + "options": [], + "query": "select quick_ranges_name from tquick_ranges order by time", + "refresh": 1, + "regex": "", + "skipUrlSync": false, + "sort": 0, + "tagValuesQuery": "", + "tags": [], + "tagsQuery": "", + "type": "query", + "useTags": false + }, + { + "allValue": null, + "current": { + "text": "y10", + "value": "y10" + }, + "datasource": "psql", + "hide": 2, + "includeAll": false, + "label": null, + "multi": false, + "name": "period", + "options": [], + "query": "select quick_ranges_suffix from tquick_ranges where quick_ranges_name = '[[period_name]]'", + "refresh": 1, + "regex": "", + "skipUrlSync": true, + "sort": 0, + "tagValuesQuery": "", + "tags": [], + "tagsQuery": "", + "type": "query", + "useTags": false + }, + { + "allValue": null, + "current": { + "text": "All", + "value": "All" + }, + "datasource": "psql", + "hide": 0, + "includeAll": false, + "label": "Repository group", + "multi": false, + "name": "repogroup_name", + "options": [], + "query": "select all_repo_group_name from tall_repo_groups order by 1", + "refresh": 1, + "regex": "", + "skipUrlSync": false, + "sort": 0, + "tagValuesQuery": "", + "tags": [], + "tagsQuery": "", + "type": "query", + "useTags": false + }, + { + "allValue": null, + "current": { + "text": "all", + "value": "all" + }, + "datasource": "psql", + "hide": 2, + "includeAll": false, + "label": "", + "multi": false, + "name": "repogroup", + "options": [], + "query": "select all_repo_group_value from tall_repo_groups where all_repo_group_name = '[[repogroup_name]]'", + "refresh": 1, + "regex": "", + "skipUrlSync": true, + "sort": 0, + "tagValuesQuery": "", + "tags": [], + "tagsQuery": "", + "type": "query", + "useTags": false + }, + { + "allValue": null, + "current": {}, + "datasource": "psql", + "hide": 2, + "includeAll": false, + "label": null, + "multi": false, + "name": "docs", + "options": [], + "query": "select value_s from gha_vars where name = 'pr_authors_docs_html'", + "refresh": 1, + "regex": "", + "skipUrlSync": true, + "sort": 0, + "tagValuesQuery": "", + "tags": [], + "tagsQuery": "", + "type": "query", + "useTags": false + } + ] + }, + "time": { + "from": "now-5y", + "to": "now" + }, + "timepicker": { + "hidden": true, + "refresh_intervals": [ + "5s", + "10s", + "30s", + "1m", + "5m", + "15m", + "30m", + "1h", + "2h", + "1d" + ], + "time_options": [ + "5m", + "15m", + "1h", + "6h", + "12h", + "24h", + "2d", + "7d", + "30d" + ] + }, + "timezone": "", + "title": "PRs authors histogram", + "uid": "23", + "version": 4 +} diff --git a/vendor/github.com/knative/test-infra/devstats/grafana/dashboards/knative/prs-authors.json b/vendor/github.com/knative/test-infra/devstats/grafana/dashboards/knative/prs-authors.json new file mode 100644 index 000000000..a5a2fbb93 --- /dev/null +++ b/vendor/github.com/knative/test-infra/devstats/grafana/dashboards/knative/prs-authors.json @@ -0,0 +1,345 @@ +{ + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": "-- Grafana --", + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "type": "dashboard" + }, + { + "datasource": "psql", + "enable": true, + "hide": false, + "iconColor": "rgba(255, 96, 96, 1)", + "limit": 100, + "name": "Releases", + "query": "SELECT title, description from annotations WHERE $timeFilter order by time asc", + "rawQuery": "select extract(epoch from time) AS time, title as text, description as tags from sannotations where $__timeFilter(time)", + "showIn": 0, + "tagsColumn": "title,description", + "textColumn": "", + "titleColumn": "[[full_name]] release", + "type": "alert" + } + ] + }, + "editable": true, + "gnetId": null, + "graphTooltip": 0, + "iteration": 1543674847952, + "links": [], + "panels": [ + { + "aliasColors": {}, + "bars": true, + "dashLength": 10, + "dashes": false, + "datasource": "psql", + "decimals": 0, + "description": "Number of unique PR authors in [[repogroup_name]] repository group.", + "fill": 1, + "gridPos": { + "h": 22, + "w": 24, + "x": 0, + "y": 0 + }, + "id": 1, + "legend": { + "alignAsTable": true, + "avg": true, + "current": true, + "hideEmpty": false, + "hideZero": false, + "max": true, + "min": true, + "rightSide": false, + "show": true, + "total": true, + "values": true + }, + "lines": false, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "alias": "", + "dsType": "influxdb", + "format": "time_series", + "groupBy": [], + "hide": false, + "measurement": "reviewers_d", + "orderByTime": "ASC", + "policy": "autogen", + "query": "SELECT \"value\" FROM \"autogen\".\"prs_authors_[[repogroup]]_[[period]][[aggregate]]\" WHERE $timeFilter", + "rawQuery": true, + "rawSql": "select\n time,\n value as \"Unique PR authors\"\nfrom\n spr_auth\nwhere\n $__timeFilter(time)\n and series = 'pr_auth[[repogroup]]'\n and period = '[[period]]'\norder by\n time", + "refId": "A", + "resultFormat": "time_series", + "select": [ + [ + { + "params": [ + "value" + ], + "type": "field" + } + ] + ], + "tags": [] + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Number of unique PRs authors in [[repogroup_name]] repository group ([[period]])", + "tooltip": { + "shared": false, + "sort": 0, + "value_type": "individual" + }, + "transparent": false, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [ + "total" + ] + }, + "yaxes": [ + { + "format": "short", + "label": "Unique PRs authors", + "logBase": 1, + "max": null, + "min": "0", + "show": true + }, + { + "format": "short", + "label": "", + "logBase": 1, + "max": null, + "min": "0", + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "content": "[[docs]]", + "gridPos": { + "h": 9, + "w": 24, + "x": 0, + "y": 22 + }, + "id": 11, + "links": [], + "mode": "html", + "title": "Dashboard documentation", + "type": "text" + } + ], + "refresh": false, + "schemaVersion": 16, + "style": "dark", + "tags": [ + "dashboard", + "knative" + ], + "templating": { + "list": [ + { + "allValue": null, + "current": { + "tags": [], + "text": "7 Days MA", + "value": "d7" + }, + "hide": 0, + "includeAll": false, + "label": "Period", + "multi": false, + "name": "period", + "options": [ + { + "selected": false, + "text": "Day", + "value": "d" + }, + { + "selected": true, + "text": "7 Days MA", + "value": "d7" + }, + { + "selected": false, + "text": "Week", + "value": "w" + }, + { + "selected": false, + "text": "Month", + "value": "m" + }, + { + "selected": false, + "text": "Quarter", + "value": "q" + }, + { + "selected": false, + "text": "Year", + "value": "y" + } + ], + "query": "d,d7,w,m,q,y", + "skipUrlSync": false, + "type": "custom" + }, + { + "allValue": null, + "current": { + "text": "All", + "value": "All" + }, + "datasource": "psql", + "hide": 0, + "includeAll": false, + "label": "Repository group", + "multi": false, + "name": "repogroup_name", + "options": [], + "query": "select all_repo_group_name from tall_repo_groups order by 1", + "refresh": 1, + "regex": "", + "skipUrlSync": false, + "sort": 0, + "tagValuesQuery": "", + "tags": [], + "tagsQuery": "", + "type": "query", + "useTags": false + }, + { + "allValue": null, + "current": { + "text": "all", + "value": "all" + }, + "datasource": "psql", + "hide": 2, + "includeAll": false, + "label": "", + "multi": false, + "name": "repogroup", + "options": [], + "query": "select all_repo_group_value from tall_repo_groups where all_repo_group_name = '[[repogroup_name]]'", + "refresh": 1, + "regex": "", + "skipUrlSync": true, + "sort": 0, + "tagValuesQuery": "", + "tags": [], + "tagsQuery": "", + "type": "query", + "useTags": false + }, + { + "allValue": null, + "current": {}, + "datasource": "psql", + "hide": 2, + "includeAll": false, + "label": null, + "multi": false, + "name": "full_name", + "options": [], + "query": "select value_s from gha_vars where name = 'full_name'", + "refresh": 1, + "regex": "", + "skipUrlSync": true, + "sort": 0, + "tagValuesQuery": "", + "tags": [], + "tagsQuery": "", + "type": "query", + "useTags": false + }, + { + "allValue": null, + "current": {}, + "datasource": "psql", + "hide": 2, + "includeAll": false, + "label": null, + "multi": false, + "name": "docs", + "options": [], + "query": "select value_s from gha_vars where name = 'prs_authors_chart_docs_html'", + "refresh": 1, + "regex": "", + "skipUrlSync": true, + "sort": 0, + "tagValuesQuery": "", + "tags": [], + "tagsQuery": "", + "type": "query", + "useTags": false + } + ] + }, + "time": { + "from": "now-6M", + "to": "now" + }, + "timepicker": { + "refresh_intervals": [ + "5s", + "10s", + "30s", + "1m", + "5m", + "15m", + "30m", + "1h", + "2h", + "1d" + ], + "time_options": [ + "5m", + "15m", + "1h", + "6h", + "12h", + "24h", + "2d", + "7d", + "30d" + ] + }, + "timezone": "", + "title": "PRs authors", + "uid": "21", + "version": 2 +} diff --git a/vendor/github.com/knative/test-infra/devstats/metrics/shared/episodic_contributors.sql b/vendor/github.com/knative/test-infra/devstats/metrics/shared/episodic_contributors.sql new file mode 100644 index 000000000..4365eda18 --- /dev/null +++ b/vendor/github.com/knative/test-infra/devstats/metrics/shared/episodic_contributors.sql @@ -0,0 +1,64 @@ +with prev as ( + select distinct user_id + from + gha_pull_requests + where + created_at >= date '{{from}}' - '3 months'::interval + and created_at < '{{from}}' +), prev_cnt as ( + select user_id, count(distinct id) as cnt + from + gha_pull_requests + where + created_at < '{{from}}' + group by + user_id +) +select + 'epis_contrib;All;contrib,prs' as name, + round(count(distinct pr.user_id) / {{n}}, 2) as contributors, + round(count(distinct pr.id) / {{n}}, 2) as prs +from + gha_pull_requests pr +left join + prev_cnt pc +on + pc.user_id = pr.user_id +where + pr.created_at >= '{{from}}' + and pr.created_at < '{{to}}' + and pr.user_id not in (select user_id from prev) + and (pc.user_id is null or pc.cnt <= 12) +union select sub.name, + round(count(distinct sub.user_id) / {{n}}, 2) as contributors, + round(count(distinct sub.id) / {{n}}, 2) as prs +from ( + select 'epis_contrib;' || coalesce(ecf.repo_group, r.repo_group) || ';contrib,prs' as name, + pr.user_id, + pr.id + from + gha_repos r, + gha_pull_requests pr + left join + gha_events_commits_files ecf + on + ecf.event_id = pr.event_id + left join + prev_cnt pc + on + pc.user_id = pr.user_id + where + pr.dup_repo_id = r.id + and pr.created_at >= '{{from}}' + and pr.created_at < '{{to}}' + and pr.user_id not in (select user_id from prev) + and (pc.user_id is null or pc.cnt <= 12) + ) sub +where + sub.name is not null +group by + sub.name +order by + prs desc, + name asc +; diff --git a/vendor/github.com/knative/test-infra/devstats/metrics/shared/hist_pr_authors.sql b/vendor/github.com/knative/test-infra/devstats/metrics/shared/hist_pr_authors.sql new file mode 100644 index 000000000..9d50bbbdc --- /dev/null +++ b/vendor/github.com/knative/test-infra/devstats/metrics/shared/hist_pr_authors.sql @@ -0,0 +1,44 @@ +select + sub.repo_group, + sub.actor, + count(distinct sub.id) as prs +from ( + select 'hpr_auth,' || coalesce(ecf.repo_group, r.repo_group) as repo_group, + pr.dup_actor_login as actor, + pr.id + from + gha_repos r, + gha_pull_requests pr + left join + gha_events_commits_files ecf + on + ecf.event_id = pr.event_id + where + {{period:pr.created_at}} + and pr.dup_repo_id = r.id + and (lower(pr.dup_actor_login) {{exclude_bots}}) + ) sub +where + sub.repo_group is not null +group by + sub.repo_group, + sub.actor +having + count(distinct sub.id) >= 1 +union select 'hpr_auth,All' as repo_group, + dup_actor_login as actor, + count(distinct id) as prs +from + gha_pull_requests +where + {{period:created_at}} + and (lower(dup_actor_login) {{exclude_bots}}) +group by + dup_actor_login +having + count(distinct id) >= 1 +order by + prs desc, + repo_group asc, + actor asc +; diff --git a/vendor/github.com/knative/test-infra/devstats/metrics/shared/new_contributors.sql b/vendor/github.com/knative/test-infra/devstats/metrics/shared/new_contributors.sql new file mode 100644 index 000000000..2a594b6e3 --- /dev/null +++ b/vendor/github.com/knative/test-infra/devstats/metrics/shared/new_contributors.sql @@ -0,0 +1,45 @@ +with prev as ( + select distinct user_id + from + gha_pull_requests + where + created_at < '{{from}}' +) +select + 'new_contrib;All;contrib,prs' as name, + round(count(distinct user_id) / {{n}}, 2) as contributors, + round(count(distinct id) / {{n}}, 2) as prs +from + gha_pull_requests +where + created_at >= '{{from}}' + and created_at < '{{to}}' + and user_id not in (select user_id from prev) +union select sub.name, + round(count(distinct sub.user_id) / {{n}}, 2) as contributors, + round(count(distinct sub.id) / {{n}}, 2) as prs +from ( + select 'new_contrib;' || coalesce(ecf.repo_group, r.repo_group) || ';contrib,prs' as name, + pr.user_id, + pr.id + from + gha_repos r, + gha_pull_requests pr + left join + gha_events_commits_files ecf + on + ecf.event_id = pr.event_id + where + pr.dup_repo_id = r.id + and pr.created_at >= '{{from}}' + and pr.created_at < '{{to}}' + and pr.user_id not in (select user_id from prev) + ) sub +where + sub.name is not null +group by + sub.name +order by + prs desc, + name asc +; diff --git a/vendor/github.com/knative/test-infra/devstats/metrics/shared/new_contributors_data.sql b/vendor/github.com/knative/test-infra/devstats/metrics/shared/new_contributors_data.sql new file mode 100644 index 000000000..f7bd7b5a4 --- /dev/null +++ b/vendor/github.com/knative/test-infra/devstats/metrics/shared/new_contributors_data.sql @@ -0,0 +1,64 @@ +with prev as ( + select distinct user_id + from + gha_pull_requests + where + merged = true + and merged_at < '{{from}}' + and (lower(dup_actor_login) {{exclude_bots}}) +), contributors as ( + select distinct pr.user_id, + first_value(pr.merged_at) over prs_by_merged_at as merged_at, + first_value(pr.dup_repo_id) over prs_by_merged_at as repo_id, + first_value(pr.dup_repo_name) over prs_by_merged_at as repo_name, + first_value(pr.event_id) over prs_by_merged_at as event_id + from + gha_pull_requests pr + where + pr.merged = true + and pr.merged_at >= '{{from}}' + and pr.merged_at < '{{to}}' + and pr.user_id not in (select user_id from prev) + and (lower(pr.dup_actor_login) {{exclude_bots}}) + window + prs_by_merged_at as ( + partition by pr.user_id + order by + pr.merged_at asc, + pr.event_id asc + range between unbounded preceding + and current row + ) +) +select + 'ncd,All' as metric, + c.merged_at, + 0.0 as value, + case a.name is null when true then a.login else case a.name when '' then a.login else a.name || ' (' || a.login || ')' end end as contributor +from + contributors c, + gha_actors a +where + c.user_id = a.id +union select 'ncd,' || coalesce(ecf.repo_group, r.repo_group) as metric, + c.merged_at, + 0.0 as value, + case a.name is null when true then a.login else case a.name when '' then a.login else a.name || ' (' || a.login || ')' end end as contributor +from + gha_actors a, + gha_repos r, + contributors c +left join + gha_events_commits_files ecf +on + ecf.event_id = c.event_id +where + c.user_id = a.id + and c.repo_id = r.id + and c.repo_name = r.name + and r.repo_group is not null +order by + metric asc, + merged_at asc, + contributor asc +; diff --git a/vendor/github.com/knative/test-infra/devstats/metrics/shared/prs_authors.sql b/vendor/github.com/knative/test-infra/devstats/metrics/shared/prs_authors.sql new file mode 100644 index 000000000..b0e3a737b --- /dev/null +++ b/vendor/github.com/knative/test-infra/devstats/metrics/shared/prs_authors.sql @@ -0,0 +1,35 @@ +select + 'pr_auth,All' as repo_group, + round(count(distinct dup_actor_login) / {{n}}, 2) as authors +from + gha_pull_requests +where + created_at >= '{{from}}' + and created_at < '{{to}}' + and (lower(dup_actor_login) {{exclude_bots}}) +union select sub.repo_group, + round(count(distinct sub.actor) / {{n}}, 2) as authors +from ( + select 'pr_auth,' || coalesce(ecf.repo_group, r.repo_group) as repo_group, + pr.dup_actor_login as actor + from + gha_repos r, + gha_pull_requests pr + left join + gha_events_commits_files ecf + on + ecf.event_id = pr.event_id + where + pr.dup_repo_id = r.id + and pr.created_at >= '{{from}}' + and pr.created_at < '{{to}}' + and (lower(pr.dup_actor_login) {{exclude_bots}}) + ) sub +where + sub.repo_group is not null +group by + sub.repo_group +order by + authors desc, + repo_group asc +; diff --git a/vendor/github.com/knative/test-infra/scripts/README.md b/vendor/github.com/knative/test-infra/scripts/README.md index dcf7dc977..cb0aa4e6b 100644 --- a/vendor/github.com/knative/test-infra/scripts/README.md +++ b/vendor/github.com/knative/test-infra/scripts/README.md @@ -11,10 +11,30 @@ This is a helper script to run the presubmit tests. To use it: 1. [optional] Define the function `build_tests()`. If you don't define this function, the default action for running the build tests is to: - - lint and link check markdown files + + - check markdown files - run `go build` on the entire repo - run `/hack/verify-codegen.sh` (if it exists) - - check licenses in `/cmd` (if it exists) + - check licenses in all go packages + + The markdown link checker tool doesn't check `localhost` links by default. + Its configuration file, `markdown-link-check-config.json`, lives in the + `test-infra/scripts` directory. To override it, create a file with the same + name, containing the custom config in the `/test` directory. + + The markdown lint tool ignores long lines by default. Its configuration file, + `markdown-lint-config.rc`, lives in the `test-infra/scripts` directory. To + override it, create a file with the same name, containing the custom config + in the `/test` directory. + +1. [optional] Customize the default build test runner, if you're using it. Set + the following environment variables if the default values don't fit your needs: + + - `DISABLE_MD_LINTING`: Disable linting markdown files, defaults to 0 (false). + - `DISABLE_MD_LINK_CHECK`: Disable checking links in markdown files, defaults + to 0 (false). + - `PRESUBMIT_TEST_FAIL_FAST`: Fail the presubmit test immediately if a test fails, + defaults to 0 (false). 1. [optional] Define the functions `pre_build_tests()` and/or `post_build_tests()`. These functions will be called before or after the @@ -84,14 +104,48 @@ main $@ This is a helper script for Knative E2E test scripts. To use it: +1. [optional] Customize the test cluster. Set the following environment variables + if the default values don't fit your needs: + + - `E2E_CLUSTER_REGION`: Cluster region, defaults to `us-central1`. + - `E2E_CLUSTER_BACKUP_REGIONS`: Space-separated list of regions to retry test + cluster creation in case of stockout. Defaults to `us-west1 us-east1`. + - `E2E_CLUSTER_ZONE`: Cluster zone (e.g., `a`), defaults to none (i.e. use a regional + cluster). + - `E2E_CLUSTER_BACKUP_ZONES`: Space-separated list of zones to retry test cluster + creation in case of stockout. If defined, `E2E_CLUSTER_BACKUP_REGIONS` will be + ignored thus it defaults to none. + - `E2E_CLUSTER_MACHINE`: Cluster node machine type, defaults to `n1-standard-4}`. + - `E2E_MIN_CLUSTER_NODES`: Minimum number of nodes in the cluster when autoscaling, + defaults to 1. + - `E2E_MAX_CLUSTER_NODES`: Maximum number of nodes in the cluster when autoscaling, + defaults to 3. + 1. Source the script. -1. [optional] Write the `teardown()` function, which will tear down your test +1. [optional] Write the `knative_setup()` function, which will set up your + system under test (e.g., Knative Serving). This function won't be called if you + use the `--skip-knative-setup` flag. + +1. [optional] Write the `knative_teardown()` function, which will tear down your + system under test (e.g., Knative Serving). This function won't be called if you + use the `--skip-knative-setup` flag. + +1. [optional] Write the `test_setup()` function, which will set up the test resources. +1. [optional] Write the `test_teardown()` function, which will tear down the test + resources. + +1. [optional] Write the `cluster_setup()` function, which will set up any resources + before the test cluster is created. + +1. [optional] Write the `cluster_teardown()` function, which will tear down any + resources after the test cluster is destroyed. + 1. [optional] Write the `dump_extra_cluster_state()` function. It will be called when a test fails, and can dump extra information about the current state - of the cluster (tipically using `kubectl`). + of the cluster (typically using `kubectl`). 1. [optional] Write the `parse_flags()` function. It will be called whenever an unrecognized flag is passed to the script, allowing you to define your own flags. @@ -103,14 +157,12 @@ This is a helper script for Knative E2E test scripts. To use it: 1. Write logic for the end-to-end tests. Run all go tests using `go_test_e2e()` (or `report_go_test()` if you need a more fine-grained control) and call - `fail_test()` or `success()` if any of them failed. The environment variables - `DOCKER_REPO_OVERRIDE`, `K8S_CLUSTER_OVERRIDE` and `K8S_USER_OVERRIDE` will be - set according to the test cluster. You can also use the following boolean (0 is - false, 1 is true) environment variables for the logic: + `fail_test()` or `success()` if any of them failed. The environment variable + `KO_DOCKER_REPO` and `E2E_PROJECT_ID` will be set according to the test cluster. + You can also use the following boolean (0 is false, 1 is true) environment + variables for the logic: - `EMIT_METRICS`: true if `--emit-metrics` was passed. - - `USING_EXISTING_CLUSTER`: true if the test cluster is an already existing one, - and not a temporary cluster created by `kubetest`. All environment variables above are marked read-only. @@ -119,24 +171,35 @@ This is a helper script for Knative E2E test scripts. To use it: 1. Calling your script without arguments will create a new cluster in the GCP project `$PROJECT_ID` and run the tests against it. -1. Calling your script with `--run-tests` and the variables `K8S_CLUSTER_OVERRIDE`, - `K8S_USER_OVERRIDE` and `DOCKER_REPO_OVERRIDE` set will immediately start the - tests against the cluster. +1. Calling your script with `--run-tests` and the variable `KO_DOCKER_REPO` set + will immediately start the tests against the cluster currently configured for + `kubectl`. + +1. By default Istio is installed on the cluster via Addon, use `--skip-istio-addon` if + you choose not to have it preinstalled. 1. You can force running the tests against a specific GKE cluster version by using - the `--cluster-version` flag and passing a X.Y.Z version as the flag value. + the `--cluster-version` flag and passing a full version as the flag value. ### Sample end-to-end test script This script will test that the latest Knative Serving nightly release works. It defines a special flag (`--no-knative-wait`) that causes the script not to -wait for Knative Serving to be up before running the tests. +wait for Knative Serving to be up before running the tests. It also requires that +the test cluster is created in a specific region, `us-west2`. ```bash + +# This test requires a cluster in LA +E2E_CLUSTER_REGION=us-west2 + source vendor/github.com/knative/test-infra/scripts/e2e-tests.sh -function teardown() { - echo "TODO: tear down test resources" +function knative_setup() { + start_latest_knative_serving + if (( WAIT_FOR_KNATIVE )); then + wait_until_pods_running knative-serving || fail_test "Knative Serving not up" + fi } function parse_flags() { @@ -151,12 +214,6 @@ WAIT_FOR_KNATIVE=1 initialize $@ -start_latest_knative_serving - -if (( WAIT_FOR_KNATIVE )); then - wait_until_pods_running knative-serving || fail_test "Knative Serving is not up" -fi - # TODO: use go_test_e2e to run the tests. kubectl get pods || fail_test @@ -169,16 +226,14 @@ This is a helper script for Knative release scripts. To use it: 1. Source the script. -1. Call the `initialize()` function passing `$@` (without quotes). - -1. Call the `run_validation_tests()` function passing the script or executable that - runs the release validation tests. It will call the script to run the tests unless - `--skip_tests` was passed. +1. [optional] By default, the release script will run `./test/presubmit-tests.sh` + as the release validation tests. If you need to run something else, set the + environment variable `VALIDATION_TESTS` to the executable to run. -1. Write logic for the release process. Call `publish_yaml()` to publish the manifest(s), - `tag_releases_in_yaml()` to tag the generated images, `branch_release()` to branch - named releases. Use the following boolean (0 is false, 1 is true) and string environment - variables for the logic: +1. Write logic for building the release in a function named `build_release()`. + Set the environment variable `ARTIFACTS_TO_PUBLISH` to the list of files created, + space separated. Use the following boolean (0 is false, 1 is true) and string + environment variables for the logic: - `RELEASE_VERSION`: contains the release version if `--version` was passed. This also overrides the value of the `TAG` variable as `v`. @@ -189,39 +244,40 @@ This is a helper script for Knative release scripts. To use it: - `RELEASE_GCS_BUCKET`: contains the GCS bucket name to store the manifests if `--release-gcs` was passed, otherwise the default value `knative-nightly/` will be used. It is empty if `--publish` was not passed. + - `BUILD_COMMIT_HASH`: the commit short hash for the current repo. If the current + git tree is dirty, it will have `-dirty` appended to it. + - `BUILD_YYYYMMDD`: current UTC date in `YYYYMMDD` format. + - `BUILD_TIMESTAMP`: human-readable UTC timestamp in `YYYY-MM-DD HH:MM:SS` format. + - `BUILD_TAG`: a tag in the form `v$BUILD_YYYYMMDD-$BUILD_COMMIT_HASH`. - `KO_DOCKER_REPO`: contains the GCR to store the images if `--release-gcr` was passed, otherwise the default value `gcr.io/knative-nightly` will be used. It is set to `ko.local` if `--publish` was not passed. - - `SKIP_TESTS`: true if `--skip-tests` was passed. This is handled automatically - by the `run_validation_tests()` function. + - `SKIP_TESTS`: true if `--skip-tests` was passed. This is handled automatically. - `TAG_RELEASE`: true if `--tag-release` was passed. In this case, the environment - variable `TAG` will contain the release tag in the form `vYYYYMMDD-`. + variable `TAG` will contain the release tag in the form `v$BUILD_TAG`. - `PUBLISH_RELEASE`: true if `--publish` was passed. In this case, the environment - variable `KO_FLAGS` will be updated with the `-L` option. - - `BRANCH_RELEASE`: true if both `--version` and `--publish-release` were passed. + variable `KO_FLAGS` will be updated with the `-L` option and `TAG` will contain + the release tag in the form `v$RELEASE_VERSION`. + - `PUBLISH_TO_GITHUB`: true if `--version`, `--branch` and `--publish-release` + were passed. All boolean environment variables default to false for safety. All environment variables above, except `KO_FLAGS`, are marked read-only once - `initialize()` is called. + `main()` is called (see below). + +1. Call the `main()` function passing `$@` (without quotes). ### Sample release script ```bash source vendor/github.com/knative/test-infra/scripts/release.sh -initialize $@ - -run_validation_tests ./test/presubmit-tests.sh - -# config/ contains the manifests -ko resolve ${KO_FLAGS} -f config/ > release.yaml - -tag_images_in_yaml release.yaml - -if (( PUBLISH_RELEASE )); then - publish_yaml release.yaml -fi +function build_release() { + # config/ contains the manifests + ko resolve ${KO_FLAGS} -f config/ > release.yaml + ARTIFACTS_TO_PUBLISH="release.yaml" +} -branch_release "Knative Foo" release.yaml +main $@ ``` diff --git a/vendor/github.com/knative/test-infra/scripts/e2e-tests.sh b/vendor/github.com/knative/test-infra/scripts/e2e-tests.sh index 49562ad81..efb6a00aa 100755 --- a/vendor/github.com/knative/test-infra/scripts/e2e-tests.sh +++ b/vendor/github.com/knative/test-infra/scripts/e2e-tests.sh @@ -1,6 +1,6 @@ #!/bin/bash -# Copyright 2018 The Knative Authors +# Copyright 2019 The Knative Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,29 +36,42 @@ function build_resource_name() { } # Test cluster parameters -readonly E2E_BASE_NAME="k${REPO_NAME}" -readonly E2E_CLUSTER_NAME=$(build_resource_name e2e-cls) -readonly E2E_NETWORK_NAME=$(build_resource_name e2e-net) -readonly E2E_CLUSTER_REGION=us-central1 -readonly E2E_CLUSTER_MACHINE=n1-standard-4 -readonly TEST_RESULT_FILE=/tmp/${E2E_BASE_NAME}-e2e-result + +# Configurable parameters +# export E2E_CLUSTER_REGION and E2E_CLUSTER_ZONE as they're used in the cluster setup subprocess +export E2E_CLUSTER_REGION=${E2E_CLUSTER_REGION:-us-central1} +# By default we use regional clusters. +export E2E_CLUSTER_ZONE=${E2E_CLUSTER_ZONE:-} + +# Default backup regions in case of stockouts; by default we don't fall back to a different zone in the same region +readonly E2E_CLUSTER_BACKUP_REGIONS=${E2E_CLUSTER_BACKUP_REGIONS:-us-west1 us-east1} +readonly E2E_CLUSTER_BACKUP_ZONES=${E2E_CLUSTER_BACKUP_ZONES:-} + +readonly E2E_CLUSTER_MACHINE=${E2E_CLUSTER_MACHINE:-n1-standard-4} +readonly E2E_GKE_ENVIRONMENT=${E2E_GKE_ENVIRONMENT:-prod} +readonly E2E_GKE_COMMAND_GROUP=${E2E_GKE_COMMAND_GROUP:-beta} + # Each knative repository may have a different cluster size requirement here, # so we allow calling code to set these parameters. If they are not set we # use some sane defaults. readonly E2E_MIN_CLUSTER_NODES=${E2E_MIN_CLUSTER_NODES:-1} readonly E2E_MAX_CLUSTER_NODES=${E2E_MAX_CLUSTER_NODES:-3} +readonly E2E_BASE_NAME="k${REPO_NAME}" +readonly E2E_CLUSTER_NAME=$(build_resource_name e2e-cls) +readonly E2E_NETWORK_NAME=$(build_resource_name e2e-net) +readonly TEST_RESULT_FILE=/tmp/${E2E_BASE_NAME}-e2e-result + # Flag whether test is using a boskos GCP project IS_BOSKOS=0 # Tear down the test resources. function teardown_test_resources() { + # On boskos, save time and don't teardown as the cluster will be destroyed anyway. + (( IS_BOSKOS )) && return header "Tearing down test environment" - # Free resources in GCP project. - if (( ! USING_EXISTING_CLUSTER )) && function_exists teardown; then - teardown - fi - + function_exists test_teardown && test_teardown + (( ! SKIP_KNATIVE_SETUP )) && function_exists knative_teardown && knative_teardown # Delete the kubernetes source downloaded by kubetest rm -fr kubernetes kubernetes.tar.gz } @@ -73,50 +86,6 @@ function go_test_e2e() { report_go_test -v -count=1 ${go_options} $@ ${test_options} } -# Download the k8s binaries required by kubetest. -# Parameters: $1 - GCP project that will host the test cluster. -function download_k8s() { - local version=${E2E_CLUSTER_VERSION} - # Fetch valid versions - local versions="$(gcloud container get-server-config \ - --project=$1 \ - --format='value(validMasterVersions)' \ - --region=${E2E_CLUSTER_REGION})" - local gke_versions=(`echo -n ${versions//;/ /}`) - echo "Valid GKE versions are [${versions//;/, }]" - if [[ "${version}" == "latest" ]]; then - # Get first (latest) version, excluding the "-gke.#" suffix - version="${gke_versions[0]%-*}" - echo "Using latest version, ${version}" - elif [[ "${version}" == "default" ]]; then - echo "ERROR: `default` GKE version is not supported yet" - return 1 - else - echo "Using command-line supplied version ${version}" - fi - # Download k8s to staging dir - version=v${version} - local staging_dir=${GOPATH}/src/k8s.io/kubernetes/_output/gcs-stage - rm -fr ${staging_dir} - staging_dir=${staging_dir}/${version} - mkdir -p ${staging_dir} - pushd ${staging_dir} - export KUBERNETES_PROVIDER=gke - export KUBERNETES_RELEASE=${version} - curl -fsSL https://get.k8s.io | bash - local result=$? - if [[ ${result} -eq 0 ]]; then - mv kubernetes/server/kubernetes-server-*.tar.gz . - mv kubernetes/client/kubernetes-client-*.tar.gz . - rm -fr kubernetes - # Create an empty kubernetes test tarball; we don't use it but kubetest will fetch it - # As of August 21 2018 this means avoiding a useless 1.2GB download - tar -czf kubernetes-test.tar.gz -T /dev/null - fi - popd - return ${result} -} - # Dump info about the test cluster. If dump_extra_cluster_info() is defined, calls it too. # This is intended to be called when a test fails to provide debugging information. function dump_cluster_state() { @@ -137,26 +106,89 @@ function dump_cluster_state() { echo "***************************************" } +# On a Prow job, save some metadata about the test for Testgrid. +function save_metadata() { + (( ! IS_PROW )) && return + local geo_key="Region" + local geo_value="${E2E_CLUSTER_REGION}" + if [[ -n "${E2E_CLUSTER_ZONE}" ]]; then + geo_key="Zone" + geo_value="${E2E_CLUSTER_REGION}-${E2E_CLUSTER_ZONE}" + fi + local cluster_version="$(gcloud container clusters list --project=${E2E_PROJECT_ID} --format='value(currentMasterVersion)')" + cat << EOF > ${ARTIFACTS}/metadata.json +{ + "E2E:${geo_key}": "${geo_value}", + "E2E:Machine": "${E2E_CLUSTER_MACHINE}", + "E2E:Version": "${cluster_version}", + "E2E:MinNodes": "${E2E_MIN_CLUSTER_NODES}", + "E2E:MaxNodes": "${E2E_MAX_CLUSTER_NODES}" +} +EOF +} + +# Set E2E_CLUSTER_VERSION to a specific GKE version. +# Parameters: $1 - target GKE version (X.Y, X.Y.Z, X.Y.Z-gke.W, default or gke-latest). +# $2 - region[-zone] where the clusteer will be created. +function resolve_k8s_version() { + local target_version="$1" + if [[ "${target_version}" == "default" ]]; then + local version="$(gcloud container get-server-config \ + --format='value(defaultClusterVersion)' \ + --zone=$2)" + [[ -z "${version}" ]] && return 1 + E2E_CLUSTER_VERSION="${version}" + echo "Using default version, ${E2E_CLUSTER_VERSION}" + return 0 + fi + # Fetch valid versions + local versions="$(gcloud container get-server-config \ + --format='value(validMasterVersions)' \ + --zone=$2)" + [[ -z "${versions}" ]] && return 1 + local gke_versions=($(echo -n "${versions//;/ /}")) + echo "Available GKE versions in $2 are [${versions//;/, }]" + if [[ "${target_version}" == "gke-latest" ]]; then + # Get first (latest) version, excluding the "-gke.#" suffix + E2E_CLUSTER_VERSION="${gke_versions[0]}" + echo "Using latest version, ${E2E_CLUSTER_VERSION}" + else + local latest="$(echo "${gke_versions[@]}" | tr ' ' '\n' | grep -E ^${target_version} | cut -f1 -d- | sort | tail -1)" + if [[ -z "${latest}" ]]; then + echo "ERROR: version ${target_version} is not available" + return 1 + fi + E2E_CLUSTER_VERSION="${latest}" + echo "Using ${E2E_CLUSTER_VERSION} for supplied version ${target_version}" + fi + return 0 +} + # Create a test cluster with kubetest and call the current script again. function create_test_cluster() { # Fail fast during setup. set -o errexit set -o pipefail - header "Creating test cluster" + if function_exists cluster_setup; then + cluster_setup || fail_test "cluster setup failed" + fi echo "Cluster will have a minimum of ${E2E_MIN_CLUSTER_NODES} and a maximum of ${E2E_MAX_CLUSTER_NODES} nodes." # Smallest cluster required to run the end-to-end-tests local CLUSTER_CREATION_ARGS=( - --gke-create-args="--enable-autoscaling --min-nodes=${E2E_MIN_CLUSTER_NODES} --max-nodes=${E2E_MAX_CLUSTER_NODES} --scopes=cloud-platform --enable-basic-auth --no-issue-client-certificate" + --gke-create-command="container clusters create --quiet --enable-autoscaling --min-nodes=${E2E_MIN_CLUSTER_NODES} --max-nodes=${E2E_MAX_CLUSTER_NODES} --scopes=cloud-platform --enable-basic-auth --no-issue-client-certificate ${GKE_ADDONS} ${EXTRA_CLUSTER_CREATION_FLAGS[@]}" --gke-shape={\"default\":{\"Nodes\":${E2E_MIN_CLUSTER_NODES}\,\"MachineType\":\"${E2E_CLUSTER_MACHINE}\"}} --provider=gke --deployment=gke --cluster="${E2E_CLUSTER_NAME}" - --gcp-region="${E2E_CLUSTER_REGION}" --gcp-network="${E2E_NETWORK_NAME}" - --gke-environment=prod + --gcp-node-image="${SERVING_GKE_IMAGE}" + --gke-environment="${E2E_GKE_ENVIRONMENT}" + --gke-command-group="${E2E_GKE_COMMAND_GROUP}" + --test=false + --up ) if (( ! IS_BOSKOS )); then CLUSTER_CREATION_ARGS+=(--gcp-project=${GCP_PROJECT}) @@ -166,109 +198,165 @@ function create_test_cluster() { mkdir -p $HOME/.ssh touch $HOME/.ssh/google_compute_engine.pub touch $HOME/.ssh/google_compute_engine - # Clear user and cluster variables, so they'll be set to the test cluster. - # DOCKER_REPO_OVERRIDE is not touched because when running locally it must - # be a writeable docker repo. - export K8S_USER_OVERRIDE= - export K8S_CLUSTER_OVERRIDE= # Assume test failed (see details in set_test_return_code()). set_test_return_code 1 - local test_cmd_args="--run-tests" - (( EMIT_METRICS )) && test_cmd_args+=" --emit-metrics" - [[ -n "${GCP_PROJECT}" ]] && test_cmd_args+=" --gcp-project ${GCP_PROJECT}" - # Get the current GCP project for downloading kubernetes local gcloud_project="${GCP_PROJECT}" [[ -z "${gcloud_project}" ]] && gcloud_project="$(gcloud config get-value project)" echo "gcloud project is ${gcloud_project}" + echo "gcloud user is $(gcloud config get-value core/account)" (( IS_BOSKOS )) && echo "Using boskos for the test cluster" [[ -n "${GCP_PROJECT}" ]] && echo "GCP project for test cluster is ${GCP_PROJECT}" echo "Test script is ${E2E_SCRIPT}" - download_k8s ${gcloud_project} || return 1 - # Don't fail test for kubetest, as it might incorrectly report test failure - # if teardown fails (for details, see success() below) - set +o errexit - run_go_tool k8s.io/test-infra/kubetest \ - kubetest "${CLUSTER_CREATION_ARGS[@]}" \ - --up \ - --down \ - --extract local \ - --gcp-node-image "${SERVING_GKE_IMAGE}" \ - --test-cmd "${E2E_SCRIPT}" \ - --test-cmd-args "${test_cmd_args}" + # Set arguments for this script again + local test_cmd_args="--run-tests" + (( EMIT_METRICS )) && test_cmd_args+=" --emit-metrics" + (( SKIP_KNATIVE_SETUP )) && test_cmd_args+=" --skip-knative-setup" + [[ -n "${GCP_PROJECT}" ]] && test_cmd_args+=" --gcp-project ${GCP_PROJECT}" + [[ -n "${E2E_SCRIPT_CUSTOM_FLAGS[@]}" ]] && test_cmd_args+=" ${E2E_SCRIPT_CUSTOM_FLAGS[@]}" + local extra_flags=() + # If using boskos, save time and let it tear down the cluster + (( ! IS_BOSKOS )) && extra_flags+=(--down) + + # Set a minimal kubernetes environment that satisfies kubetest + # TODO(adrcunha): Remove once https://github.com/kubernetes/test-infra/issues/13029 is fixed. + local kubedir="$(mktemp -d --tmpdir kubernetes.XXXXXXXXXX)" + local test_wrapper="${kubedir}/e2e-test.sh" + mkdir ${kubedir}/cluster + ln -s "$(which kubectl)" ${kubedir}/cluster/kubectl.sh + echo "#!/bin/bash" > ${test_wrapper} + echo "cd $(pwd) && set -x" >> ${test_wrapper} + echo "${E2E_SCRIPT} ${test_cmd_args}" >> ${test_wrapper} + chmod +x ${test_wrapper} + cd ${kubedir} + + # Create cluster and run the tests + create_test_cluster_with_retries "${CLUSTER_CREATION_ARGS[@]}" \ + --test-cmd "${test_wrapper}" \ + ${extra_flags[@]} \ + ${EXTRA_KUBETEST_FLAGS[@]} echo "Test subprocess exited with code $?" # Ignore any errors below, this is a best-effort cleanup and shouldn't affect the test result. set +o errexit - # Ensure we're using the GCP project used by kubetest - gcloud_project="$(gcloud config get-value project)" - # Delete target pools and health checks that might have leaked. - # See https://github.com/knative/serving/issues/959 for details. - # TODO(adrcunha): Remove once the leak issue is resolved. - local http_health_checks="$(gcloud compute target-pools list \ - --project=${gcloud_project} --format='value(healthChecks)' --filter="instances~-${E2E_CLUSTER_NAME}-" | \ - grep httpHealthChecks | tr '\n' ' ')" - local target_pools="$(gcloud compute target-pools list \ - --project=${gcloud_project} --format='value(name)' --filter="instances~-${E2E_CLUSTER_NAME}-" | \ - tr '\n' ' ')" - if [[ -n "${target_pools}" ]]; then - echo "Found leaked target pools, deleting" - gcloud compute forwarding-rules delete -q --project=${gcloud_project} --region=${E2E_CLUSTER_REGION} ${target_pools} - gcloud compute target-pools delete -q --project=${gcloud_project} --region=${E2E_CLUSTER_REGION} ${target_pools} - fi - if [[ -n "${http_health_checks}" ]]; then - echo "Found leaked health checks, deleting" - gcloud compute http-health-checks delete -q --project=${gcloud_project} ${http_health_checks} - fi - local result="$(cat ${TEST_RESULT_FILE})" - echo "Test result code is $result" + function_exists cluster_teardown && cluster_teardown + local result=$(get_test_return_code) + echo "Artifacts were written to ${ARTIFACTS}" + echo "Test result code is ${result}" exit ${result} } +# Retry backup regions/zones if cluster creations failed due to stockout. +# Parameters: $1..$n - any kubetest flags other than geo flag. +function create_test_cluster_with_retries() { + local cluster_creation_log=/tmp/${E2E_BASE_NAME}-cluster_creation-log + # zone_not_provided is a placeholder for e2e_cluster_zone to make for loop below work + local zone_not_provided="zone_not_provided" + + local e2e_cluster_regions=(${E2E_CLUSTER_REGION}) + local e2e_cluster_zones=(${E2E_CLUSTER_ZONE}) + + if [[ -n "${E2E_CLUSTER_BACKUP_ZONES}" ]]; then + e2e_cluster_zones+=(${E2E_CLUSTER_BACKUP_ZONES}) + elif [[ -n "${E2E_CLUSTER_BACKUP_REGIONS}" ]]; then + e2e_cluster_regions+=(${E2E_CLUSTER_BACKUP_REGIONS}) + e2e_cluster_zones=(${zone_not_provided}) + else + echo "No backup region/zone set, cluster creation will fail in case of stockout" + fi + + local e2e_cluster_target_version="${E2E_CLUSTER_VERSION}" + for e2e_cluster_region in "${e2e_cluster_regions[@]}"; do + for e2e_cluster_zone in "${e2e_cluster_zones[@]}"; do + E2E_CLUSTER_REGION=${e2e_cluster_region} + E2E_CLUSTER_ZONE=${e2e_cluster_zone} + [[ "${E2E_CLUSTER_ZONE}" == "${zone_not_provided}" ]] && E2E_CLUSTER_ZONE="" + local cluster_creation_zone="${E2E_CLUSTER_REGION}" + [[ -n "${E2E_CLUSTER_ZONE}" ]] && cluster_creation_zone="${E2E_CLUSTER_REGION}-${E2E_CLUSTER_ZONE}" + resolve_k8s_version ${e2e_cluster_target_version} ${cluster_creation_zone} || return 1 + + header "Creating test cluster ${E2E_CLUSTER_VERSION} in ${cluster_creation_zone}" + # Don't fail test for kubetest, as it might incorrectly report test failure + # if teardown fails (for details, see success() below) + set +o errexit + export CLUSTER_API_VERSION=${E2E_CLUSTER_VERSION} + run_go_tool k8s.io/test-infra/kubetest \ + kubetest "$@" --gcp-region=${cluster_creation_zone} 2>&1 | tee ${cluster_creation_log} + + # Exit if test succeeded + [[ "$(get_test_return_code)" == "0" ]] && return 0 + # Retry if cluster creation failed because of: + # - stockout (https://github.com/knative/test-infra/issues/592) + # - latest GKE not available in this region/zone yet (https://github.com/knative/test-infra/issues/694) + [[ -z "$(grep -Fo 'does not have enough resources available to fulfill' ${cluster_creation_log})" \ + && -z "$(grep -Fo 'ResponseError: code=400, message=No valid versions with the prefix' ${cluster_creation_log})" \ + && -z "$(grep -Po 'ResponseError: code=400, message=Master version "[0-9a-z\-\.]+" is unsupported' ${cluster_creation_log})" ]] \ + && return 1 + done + done + echo "No more region/zones to try, quitting" + return 1 +} + # Setup the test cluster for running the tests. function setup_test_cluster() { # Fail fast during setup. set -o errexit set -o pipefail - # Set the required variables if necessary. - if [[ -z ${K8S_USER_OVERRIDE} ]]; then - export K8S_USER_OVERRIDE=$(gcloud config get-value core/account) - fi + header "Setting up test cluster" - if [[ -z ${K8S_CLUSTER_OVERRIDE} ]]; then - USING_EXISTING_CLUSTER=0 - export K8S_CLUSTER_OVERRIDE=$(kubectl config current-context) - acquire_cluster_admin_role ${K8S_USER_OVERRIDE} ${E2E_CLUSTER_NAME} ${E2E_CLUSTER_REGION} - # Make sure we're in the default namespace. Currently kubetest switches to - # test-pods namespace when creating the cluster. - kubectl config set-context $K8S_CLUSTER_OVERRIDE --namespace=default - fi - readonly USING_EXISTING_CLUSTER + # Set the actual project the test cluster resides in + # It will be a project assigned by Boskos if test is running on Prow, + # otherwise will be ${GCP_PROJECT} set up by user. + readonly export E2E_PROJECT_ID="$(gcloud config get-value project)" - if [[ -z ${DOCKER_REPO_OVERRIDE} ]]; then - export DOCKER_REPO_OVERRIDE=gcr.io/$(gcloud config get-value project)/${E2E_BASE_NAME}-e2e-img + # Save some metadata about cluster creation for using in prow and testgrid + save_metadata + + local k8s_user=$(gcloud config get-value core/account) + local k8s_cluster=$(kubectl config current-context) + + is_protected_cluster ${k8s_cluster} && \ + abort "kubeconfig context set to ${k8s_cluster}, which is forbidden" + + # If cluster admin role isn't set, this is a brand new cluster + # Setup the admin role and also KO_DOCKER_REPO + if [[ -z "$(kubectl get clusterrolebinding cluster-admin-binding 2> /dev/null)" ]]; then + acquire_cluster_admin_role ${k8s_user} ${E2E_CLUSTER_NAME} ${E2E_CLUSTER_REGION} ${E2E_CLUSTER_ZONE} + kubectl config set-context ${k8s_cluster} --namespace=default + export KO_DOCKER_REPO=gcr.io/${E2E_PROJECT_ID}/${E2E_BASE_NAME}-e2e-img fi - echo "- Cluster is ${K8S_CLUSTER_OVERRIDE}" - echo "- User is ${K8S_USER_OVERRIDE}" - echo "- Docker is ${DOCKER_REPO_OVERRIDE}" + # Safety checks + is_protected_gcr ${KO_DOCKER_REPO} && \ + abort "\$KO_DOCKER_REPO set to ${KO_DOCKER_REPO}, which is forbidden" + + echo "- Project is ${E2E_PROJECT_ID}" + echo "- Cluster is ${k8s_cluster}" + echo "- User is ${k8s_user}" + echo "- Docker is ${KO_DOCKER_REPO}" - export KO_DOCKER_REPO="${DOCKER_REPO_OVERRIDE}" export KO_DATA_PATH="${REPO_ROOT_DIR}/.git" trap teardown_test_resources EXIT - if (( USING_EXISTING_CLUSTER )) && function_exists teardown; then - echo "Deleting any previous SUT instance" - teardown - fi - - readonly K8S_CLUSTER_OVERRIDE - readonly K8S_USER_OVERRIDE - readonly DOCKER_REPO_OVERRIDE - # Handle failures ourselves, so we can dump useful info. set +o errexit set +o pipefail + + if (( ! SKIP_KNATIVE_SETUP )) && function_exists knative_setup; then + # Wait for Istio installation to complete, if necessary, before calling knative_setup. + (( ! SKIP_ISTIO_ADDON )) && (wait_until_batch_job_complete istio-system || return 1) + knative_setup || fail_test "Knative setup failed" + fi + if function_exists test_setup; then + test_setup || fail_test "test setup failed" + fi +} + +# Gets the exit of the test script. +# For more details, see set_test_return_code(). +function get_test_return_code() { + echo $(cat ${TEST_RESULT_FILE}) } # Set the return code that the test script will return. @@ -282,6 +370,7 @@ function set_test_return_code() { echo -n "$1"> ${TEST_RESULT_FILE} } +# Signal (as return code and in the logs) that all E2E tests passed. function success() { set_test_return_code 0 echo "**************************************" @@ -301,19 +390,19 @@ function fail_test() { RUN_TESTS=0 EMIT_METRICS=0 -USING_EXISTING_CLUSTER=1 +SKIP_KNATIVE_SETUP=0 +SKIP_ISTIO_ADDON=0 GCP_PROJECT="" E2E_SCRIPT="" E2E_CLUSTER_VERSION="" +GKE_ADDONS="" +EXTRA_CLUSTER_CREATION_FLAGS=() +EXTRA_KUBETEST_FLAGS=() +E2E_SCRIPT_CUSTOM_FLAGS=() # Parse flags and initialize the test cluster. function initialize() { - # Normalize calling script path; we can't use readlink because it's not available everywhere - E2E_SCRIPT=$0 - [[ ${E2E_SCRIPT} =~ ^[\./].* ]] || E2E_SCRIPT="./$0" - E2E_SCRIPT="$(cd ${E2E_SCRIPT%/*} && echo $PWD/${E2E_SCRIPT##*/})" - readonly E2E_SCRIPT - + E2E_SCRIPT="$(get_canonical_path $0)" E2E_CLUSTER_VERSION="${SERVING_GKE_VERSION}" cd ${REPO_ROOT_DIR} @@ -325,29 +414,30 @@ function initialize() { local skip=$? if [[ ${skip} -ne 0 ]]; then # Skip parsed flag (and possibly argument) and continue - shift ${skip} + # Also save it to it's passed through to the test script + for ((i=1;i<=skip;i++)); do + E2E_SCRIPT_CUSTOM_FLAGS+=("$1") + shift + done continue fi fi # Try parsing flag as a standard one. - case $parameter in + case ${parameter} in --run-tests) RUN_TESTS=1 ;; --emit-metrics) EMIT_METRICS=1 ;; - --gcp-project) - shift - [[ $# -ge 1 ]] || abort "missing project name after --gcp-project" - GCP_PROJECT=$1 - ;; - --cluster-version) - shift - [[ $# -ge 1 ]] || abort "missing version after --cluster-version" - [[ $1 =~ ^[0-9]+\.[0-9]+\.[0-9]+$ ]] || abort "kubernetes version must be 'X.Y.Z'" - E2E_CLUSTER_VERSION=$1 - ;; + --skip-knative-setup) SKIP_KNATIVE_SETUP=1 ;; + --skip-istio-addon) SKIP_ISTIO_ADDON=1 ;; *) - echo "usage: $0 [--run-tests][--emit-metrics][--cluster-version X.Y.Z][--gcp-project name]" - abort "unknown option ${parameter}" - ;; + [[ $# -ge 2 ]] || abort "missing parameter after $1" + shift + case ${parameter} in + --gcp-project) GCP_PROJECT=$1 ;; + --cluster-version) E2E_CLUSTER_VERSION=$1 ;; + --cluster-creation-flag) EXTRA_CLUSTER_CREATION_FLAGS+=($1) ;; + --kubetest-flag) EXTRA_KUBETEST_FLAGS+=($1) ;; + *) abort "unknown option ${parameter}" ;; + esac esac shift done @@ -357,21 +447,22 @@ function initialize() { echo "\$PROJECT_ID is set to '${PROJECT_ID}', using it to run the tests" GCP_PROJECT="${PROJECT_ID}" fi - if (( ! IS_PROW )) && [[ -z "${GCP_PROJECT}" ]]; then + if (( ! IS_PROW )) && (( ! RUN_TESTS )) && [[ -z "${GCP_PROJECT}" ]]; then abort "set \$PROJECT_ID or use --gcp-project to select the GCP project where the tests are run" fi (( IS_PROW )) && [[ -z "${GCP_PROJECT}" ]] && IS_BOSKOS=1 - # Safety checks - is_protected_gcr ${DOCKER_REPO_OVERRIDE} && \ - abort "\$DOCKER_REPO_OVERRIDE set to ${DOCKER_REPO_OVERRIDE}, which is forbidden" + (( SKIP_ISTIO_ADDON )) || GKE_ADDONS="--addons=Istio" readonly RUN_TESTS readonly EMIT_METRICS - readonly E2E_CLUSTER_VERSION readonly GCP_PROJECT readonly IS_BOSKOS + readonly EXTRA_CLUSTER_CREATION_FLAGS + readonly EXTRA_KUBETEST_FLAGS + readonly SKIP_KNATIVE_SETUP + readonly GKE_ADDONS if (( ! RUN_TESTS )); then create_test_cluster diff --git a/vendor/github.com/knative/test-infra/scripts/library.sh b/vendor/github.com/knative/test-infra/scripts/library.sh index 9b7a78f0c..5b2be7246 100755 --- a/vendor/github.com/knative/test-infra/scripts/library.sh +++ b/vendor/github.com/knative/test-infra/scripts/library.sh @@ -18,18 +18,13 @@ # to be used in test scripts and the like. It doesn't do anything when # called from command line. +# GCP project where all tests related resources live +readonly KNATIVE_TESTS_PROJECT=knative-tests + # Default GKE version to be used with Knative Serving -readonly SERVING_GKE_VERSION=latest +readonly SERVING_GKE_VERSION=gke-latest readonly SERVING_GKE_IMAGE=cos -# Public latest stable nightly images and yaml files. -readonly KNATIVE_BASE_YAML_SOURCE=https://storage.googleapis.com/knative-nightly/@/latest -readonly KNATIVE_ISTIO_CRD_YAML=${KNATIVE_BASE_YAML_SOURCE/@/serving}/istio-crds.yaml -readonly KNATIVE_ISTIO_YAML=${KNATIVE_BASE_YAML_SOURCE/@/serving}/istio.yaml -readonly KNATIVE_SERVING_RELEASE=${KNATIVE_BASE_YAML_SOURCE/@/serving}/serving.yaml -readonly KNATIVE_BUILD_RELEASE=${KNATIVE_BASE_YAML_SOURCE/@/build}/release.yaml -readonly KNATIVE_EVENTING_RELEASE=${KNATIVE_BASE_YAML_SOURCE/@/eventing}/release.yaml - # Conveniently set GOPATH if unset if [[ -z "${GOPATH:-}" ]]; then export GOPATH="$(go env GOPATH)" @@ -44,6 +39,25 @@ readonly IS_PROW readonly REPO_ROOT_DIR="$(git rev-parse --show-toplevel)" readonly REPO_NAME="$(basename ${REPO_ROOT_DIR})" +# Useful flags about the current OS +IS_LINUX=0 +IS_OSX=0 +IS_WINDOWS=0 +case "${OSTYPE}" in + darwin*) IS_OSX=1 ;; + linux*) IS_LINUX=1 ;; + msys*) IS_WINDOWS=1 ;; + *) echo "** Internal error in library.sh, unknown OS '${OSTYPE}'" ; exit 1 ;; +esac +readonly IS_LINUX +readonly IS_OSX +readonly IS_WINDOWS + +# Set ARTIFACTS to an empty temp dir if unset +if [[ -z "${ARTIFACTS:-}" ]]; then + export ARTIFACTS="$(mktemp -d)" +fi + # On a Prow job, redirect stderr to stdout so it's synchronously added to log (( IS_PROW )) && exec 2>&1 @@ -59,7 +73,7 @@ function abort() { # $2 - banner message. function make_banner() { local msg="$1$1$1$1 $2 $1$1$1$1" - local border="${msg//[-0-9A-Za-z _.,\/()]/$1}" + local border="${msg//[-0-9A-Za-z _.,\/()\']/$1}" echo -e "${border}\n${msg}\n${border}" } @@ -128,7 +142,7 @@ function wait_until_pods_running() { [[ ${status[0]} -lt 1 ]] && all_ready=0 && break [[ ${status[1]} -lt 1 ]] && all_ready=0 && break [[ ${status[0]} -ne ${status[1]} ]] && all_ready=0 && break - done <<< $(echo "${pods}" | grep -v Completed) + done <<< "$(echo "${pods}" | grep -v Completed)" if (( all_ready )); then echo -e "\nAll pods are up:\n${pods}" return 0 @@ -141,21 +155,46 @@ function wait_until_pods_running() { return 1 } -# Waits until the given service has an external IP address. +# Waits until all batch jobs complete in the given namespace. +# Parameters: $1 - namespace. +function wait_until_batch_job_complete() { + echo -n "Waiting until all batch jobs in namespace $1 run to completion." + for i in {1..150}; do # timeout after 5 minutes + local jobs=$(kubectl get jobs -n $1 --no-headers \ + -ocustom-columns='n:{.metadata.name},c:{.spec.completions},s:{.status.succeeded}') + # All jobs must be complete + local not_complete=$(echo "${jobs}" | awk '{if ($2!=$3) print $0}' | wc -l) + if [[ ${not_complete} -eq 0 ]]; then + echo -e "\nAll jobs are complete:\n${jobs}" + return 0 + fi + echo -n "." + sleep 2 + done + echo -e "\n\nERROR: timeout waiting for jobs to complete\n${jobs}" + return 1 +} + +# Waits until the given service has an external address (IP/hostname). # Parameters: $1 - namespace. # $2 - service name. function wait_until_service_has_external_ip() { - echo -n "Waiting until service $2 in namespace $1 has an external IP" + echo -n "Waiting until service $2 in namespace $1 has an external address (IP/hostname)" for i in {1..150}; do # timeout after 15 minutes local ip=$(kubectl get svc -n $1 $2 -o jsonpath="{.status.loadBalancer.ingress[0].ip}") if [[ -n "${ip}" ]]; then echo -e "\nService $2.$1 has IP $ip" return 0 fi + local hostname=$(kubectl get svc -n $1 $2 -o jsonpath="{.status.loadBalancer.ingress[0].hostname}") + if [[ -n "${hostname}" ]]; then + echo -e "\nService $2.$1 has hostname $hostname" + return 0 + fi echo -n "." sleep 6 done - echo -e "\n\nERROR: timeout waiting for service $svc.$ns to have an external IP" + echo -e "\n\nERROR: timeout waiting for service $2.$1 to have an external address" kubectl get pods -n $1 return 1 } @@ -168,7 +207,7 @@ function wait_until_routable() { for i in {1..150}; do # timeout after 5 minutes local val=$(curl -H "Host: $2" "http://$1" 2>/dev/null) if [[ -n "$val" ]]; then - echo "\nEndpoint is now routable" + echo -e "\nEndpoint is now routable" return 0 fi echo -n "." @@ -195,15 +234,42 @@ function get_app_pods() { kubectl get pods ${namespace} --selector=app=$1 --output=jsonpath="{.items[*].metadata.name}" } +# Capitalize the first letter of each word. +# Parameters: $1..$n - words to capitalize. +function capitalize() { + local capitalized=() + for word in $@; do + local initial="$(echo ${word:0:1}| tr 'a-z' 'A-Z')" + capitalized+=("${initial}${word:1}") + done + echo "${capitalized[@]}" +} + +# Dumps pod logs for the given app. +# Parameters: $1 - app name. +# $2 - namespace. +function dump_app_logs() { + echo ">>> ${REPO_NAME_FORMATTED} $1 logs:" + for pod in $(get_app_pods "$1" "$2") + do + echo ">>> Pod: $pod" + kubectl -n "$2" logs "$pod" -c "$1" + done +} + # Sets the given user as cluster admin. # Parameters: $1 - user # $2 - cluster name # $3 - cluster region +# $4 - cluster zone, optional function acquire_cluster_admin_role() { + echo "Acquiring cluster-admin role for user '$1'" + local geoflag="--region=$3" + [[ -n $4 ]] && geoflag="--zone=$3-$4" # Get the password of the admin and use it, as the service account (or the user) # might not have the necessary permission. local password=$(gcloud --format="value(masterAuth.password)" \ - container clusters describe $2 --region=$3) + container clusters describe $2 ${geoflag}) if [[ -n "${password}" ]]; then # Cluster created with basic authentication kubectl config set-credentials cluster-admin \ @@ -213,9 +279,9 @@ function acquire_cluster_admin_role() { local key=$(mktemp) echo "Certificate in ${cert}, key in ${key}" gcloud --format="value(masterAuth.clientCertificate)" \ - container clusters describe $2 --region=$3 | base64 -d > ${cert} + container clusters describe $2 ${geoflag} | base64 -d > ${cert} gcloud --format="value(masterAuth.clientKey)" \ - container clusters describe $2 --region=$3 | base64 -d > ${key} + container clusters describe $2 ${geoflag} | base64 -d > ${key} kubectl config set-credentials cluster-admin \ --client-certificate=${cert} --client-key=${key} fi @@ -226,7 +292,7 @@ function acquire_cluster_admin_role() { --user=$1 # Reset back to the default account gcloud container clusters get-credentials \ - $2 --region=$3 --project $(gcloud config get-value project) + $2 ${geoflag} --project $(gcloud config get-value project) } # Runs a go test and generate a junit summary. @@ -237,10 +303,6 @@ function report_go_test() { local args=" $@ " local go_test="go test -race -v ${args/ -v / }" # Just run regular go tests if not on Prow. - if (( ! IS_PROW )); then - ${go_test} - return - fi echo "Running tests with '${go_test}'" local report=$(mktemp) ${go_test} | tee ${report} @@ -255,37 +317,37 @@ function report_go_test() { | sed -e "s#\"github.com/knative/${REPO_NAME}/#\"#g" \ > ${xml} echo "XML report written to ${xml}" + if (( ! IS_PROW )); then + # Keep the suffix, so files are related. + local logfile=${xml/junit_/go_test_} + logfile=${logfile/.xml/.log} + cp ${report} ${logfile} + echo "Test log written to ${logfile}" + fi return ${failed} } -# Install the latest stable Knative/serving in the current cluster. -function start_latest_knative_serving() { +# Install Knative Serving in the current cluster. +# Parameters: $1 - Knative Serving manifest. +function start_knative_serving() { header "Starting Knative Serving" - subheader "Installing Istio" - echo "Installing Istio CRD from ${KNATIVE_ISTIO_CRD_YAML}" - kubectl apply -f ${KNATIVE_ISTIO_CRD_YAML} || return 1 - echo "Installing Istio from ${KNATIVE_ISTIO_YAML}" - kubectl apply -f ${KNATIVE_ISTIO_YAML} || return 1 - wait_until_pods_running istio-system || return 1 - kubectl label namespace default istio-injection=enabled || return 1 - subheader "Installing Knative Build" - kubectl apply -f ${KNATIVE_BUILD_RELEASE} || return 1 subheader "Installing Knative Serving" - echo "Installing Serving from ${KNATIVE_SERVING_RELEASE}" - kubectl apply -f ${KNATIVE_SERVING_RELEASE} || return 1 + echo "Installing Serving CRDs from $1" + kubectl apply --selector knative.dev/crd-install=true -f "$1" + echo "Installing the rest of serving components from $1" + kubectl apply -f "$1" wait_until_pods_running knative-serving || return 1 - wait_until_pods_running knative-build || return 1 } -# Install the latest stable Knative/build in the current cluster. -function start_latest_knative_build() { - header "Starting Knative Build" - subheader "Installing Istio" - kubectl apply -f ${KNATIVE_ISTIO_YAML} || return 1 - wait_until_pods_running istio-system || return 1 - subheader "Installing Knative Build" - kubectl apply -f ${KNATIVE_BUILD_RELEASE} || return 1 - wait_until_pods_running knative-build || return 1 +# Install the stable release Knative/serving in the current cluster. +# Parameters: $1 - Knative Serving version number, e.g. 0.6.0. +function start_release_knative_serving() { + start_knative_serving "https://storage.googleapis.com/knative-releases/serving/previous/v$1/serving.yaml" +} + +# Install the latest stable Knative Serving in the current cluster. +function start_latest_knative_serving() { + start_knative_serving "${KNATIVE_SERVING_RELEASE}" } # Run a go tool, installing it first if necessary. @@ -345,27 +407,114 @@ function run_lint_tool() { # Check links in the given markdown files. # Parameters: $1...$n - files to inspect function check_links_in_markdown() { - # https://github.com/tcort/markdown-link-check - run_lint_tool markdown-link-check "checking links in markdown files" -q $@ + # https://github.com/raviqqe/liche + local config="${REPO_ROOT_DIR}/test/markdown-link-check-config.rc" + [[ ! -e ${config} ]] && config="${_TEST_INFRA_SCRIPTS_DIR}/markdown-link-check-config.rc" + local options="$(grep '^-' ${config} | tr \"\n\" ' ')" + run_lint_tool liche "checking links in markdown files" "-d ${REPO_ROOT_DIR} ${options}" $@ } # Check format of the given markdown files. # Parameters: $1..$n - files to inspect function lint_markdown() { # https://github.com/markdownlint/markdownlint - run_lint_tool mdl "linting markdown files" "-r ~MD013" $@ + local config="${REPO_ROOT_DIR}/test/markdown-lint-config.rc" + [[ ! -e ${config} ]] && config="${_TEST_INFRA_SCRIPTS_DIR}/markdown-lint-config.rc" + run_lint_tool mdl "linting markdown files" "-c ${config}" $@ } -# Return 0 if the given parameter is an integer, otherwise 1 -# Parameters: $1 - an integer +# Return whether the given parameter is an integer. +# Parameters: $1 - integer to check function is_int() { [[ -n $1 && $1 =~ ^[0-9]+$ ]] } -# Return 0 if the given parameter is the knative release/nightly gcr, 1 -# otherwise -# Parameters: $1 - gcr name, e.g. gcr.io/knative-nightly +# Return whether the given parameter is the knative release/nightly GCF. +# Parameters: $1 - full GCR name, e.g. gcr.io/knative-foo-bar function is_protected_gcr() { - [[ -n $1 && "$1" =~ "^gcr.io/knative-(releases|nightly)/?$" ]] + [[ -n $1 && $1 =~ ^gcr.io/knative-(releases|nightly)/?$ ]] +} + +# Return whether the given parameter is any cluster under ${KNATIVE_TESTS_PROJECT}. +# Parameters: $1 - Kubernetes cluster context (output of kubectl config current-context) +function is_protected_cluster() { + # Example: gke_knative-tests_us-central1-f_prow + [[ -n $1 && $1 =~ ^gke_${KNATIVE_TESTS_PROJECT}_us\-[a-zA-Z0-9]+\-[a-z]+_[a-z0-9\-]+$ ]] +} + +# Return whether the given parameter is ${KNATIVE_TESTS_PROJECT}. +# Parameters: $1 - project name +function is_protected_project() { + [[ -n $1 && "$1" == "${KNATIVE_TESTS_PROJECT}" ]] +} + +# Remove symlinks in a path that are broken or lead outside the repo. +# Parameters: $1 - path name, e.g. vendor +function remove_broken_symlinks() { + for link in $(find $1 -type l); do + # Remove broken symlinks + if [[ ! -e ${link} ]]; then + unlink ${link} + continue + fi + # Get canonical path to target, remove if outside the repo + local target="$(ls -l ${link})" + target="${target##* -> }" + [[ ${target} == /* ]] || target="./${target}" + target="$(cd `dirname ${link}` && cd ${target%/*} && echo $PWD/${target##*/})" + if [[ ${target} != *github.com/knative/* ]]; then + unlink ${link} + continue + fi + done +} + +# Returns the canonical path of a filesystem object. +# Parameters: $1 - path to return in canonical form +# $2 - base dir for relative links; optional, defaults to current +function get_canonical_path() { + # We don't use readlink because it's not available on every platform. + local path=$1 + local pwd=${2:-.} + [[ ${path} == /* ]] || path="${pwd}/${path}" + echo "$(cd ${path%/*} && echo $PWD/${path##*/})" } +# Returns the URL to the latest manifest for the given Knative project. +# Parameters: $1 - repository name of the given project +# $2 - name of the yaml file, without extension +function get_latest_knative_yaml_source() { + local branch_name="" + local repo_name="$1" + local yaml_name="$2" + # Get the branch name from Prow's env var, see https://github.com/kubernetes/test-infra/blob/master/prow/jobs.md. + # Otherwise, try getting the current branch from git. + (( IS_PROW )) && branch_name="${PULL_BASE_REF:-}" + [[ -z "${branch_name}" ]] && branch_name="$(git rev-parse --abbrev-ref HEAD)" + # If it's a release branch, the yaml source URL should point to a specific version. + if [[ ${branch_name} =~ ^release-[0-9\.]+$ ]]; then + # Get the latest tag name for the current branch, which is likely formatted as v0.5.0 + local tag_name="$(git describe --tags --abbrev=0)" + # The given repo might not have this tag, so we need to find its latest release manifest with the same major&minor version. + local major_minor="$(echo ${tag_name} | cut -d. -f1-2)" + local yaml_source_path="$(gsutil ls gs://knative-releases/${repo_name}/previous/${major_minor}.*/${yaml_name}.yaml \ + | sort \ + | tail -n 1 \ + | cut -b6-)" + echo "https://storage.googleapis.com/${yaml_source_path}" + # If it's not a release branch, the yaml source URL should be nightly build. + else + echo "https://storage.googleapis.com/knative-nightly/${repo_name}/latest/${yaml_name}.yaml" + fi +} + +# Initializations that depend on previous functions. +# These MUST come last. + +readonly _TEST_INFRA_SCRIPTS_DIR="$(dirname $(get_canonical_path ${BASH_SOURCE[0]}))" +readonly REPO_NAME_FORMATTED="Knative $(capitalize ${REPO_NAME//-/})" + +# Public latest nightly or release yaml files. +readonly KNATIVE_SERVING_RELEASE="$(get_latest_knative_yaml_source "serving" "serving")" +readonly KNATIVE_BUILD_RELEASE="$(get_latest_knative_yaml_source "build" "build")" +readonly KNATIVE_EVENTING_RELEASE="$(get_latest_knative_yaml_source "eventing" "release")" diff --git a/vendor/github.com/knative/test-infra/scripts/markdown-link-check-config.rc b/vendor/github.com/knative/test-infra/scripts/markdown-link-check-config.rc new file mode 100644 index 000000000..9d802a0d4 --- /dev/null +++ b/vendor/github.com/knative/test-infra/scripts/markdown-link-check-config.rc @@ -0,0 +1,5 @@ +# For help, see +# https://github.com/raviqqe/liche/blob/master/README.md + +# Don't check localhost links +-x "^https?://localhost($|[:/].*)" diff --git a/vendor/github.com/knative/test-infra/scripts/markdown-lint-config.rc b/vendor/github.com/knative/test-infra/scripts/markdown-lint-config.rc new file mode 100644 index 000000000..461f891a2 --- /dev/null +++ b/vendor/github.com/knative/test-infra/scripts/markdown-lint-config.rc @@ -0,0 +1,5 @@ +# For help, see +# https://github.com/markdownlint/markdownlint/blob/master/docs/configuration.md + +# Ignore long lines +rules "~MD013" diff --git a/vendor/github.com/knative/test-infra/scripts/presubmit-tests.sh b/vendor/github.com/knative/test-infra/scripts/presubmit-tests.sh index 4ff947558..8b077f6dd 100755 --- a/vendor/github.com/knative/test-infra/scripts/presubmit-tests.sh +++ b/vendor/github.com/knative/test-infra/scripts/presubmit-tests.sh @@ -19,6 +19,11 @@ source $(dirname ${BASH_SOURCE})/library.sh +# Custom configuration of presubmit tests +readonly DISABLE_MD_LINTING=${DISABLE_MD_LINTING:-0} +readonly DISABLE_MD_LINK_CHECK=${DISABLE_MD_LINK_CHECK:-0} +readonly PRESUBMIT_TEST_FAIL_FAST=${PRESUBMIT_TEST_FAIL_FAST:-0} + # Extensions or file patterns that don't require presubmit tests. readonly NO_PRESUBMIT_FILES=(\.png \.gitignore \.gitattributes ^OWNERS ^OWNERS_ALIASES ^AUTHORS) @@ -38,7 +43,7 @@ IS_DOCUMENTATION_PR=0 # Returns true if PR only contains the given file regexes. # Parameters: $1 - file regexes, space separated. function pr_only_contains() { - [[ -z "$(echo "${CHANGED_FILES}" | grep -v \(${1// /\\|}\)$))" ]] + [[ -z "$(echo "${CHANGED_FILES}" | grep -v "\(${1// /\\|}\)$")" ]] } # List changed files in the current PR. @@ -59,7 +64,10 @@ function initialize_environment() { echo -e "Changed files in commit ${PULL_PULL_SHA}:\n${CHANGED_FILES}" local no_presubmit_files="${NO_PRESUBMIT_FILES[*]}" pr_only_contains "${no_presubmit_files}" && IS_PRESUBMIT_EXEMPT_PR=1 - pr_only_contains "\.md ${no_presubmit_files}" && IS_DOCUMENTATION_PR=1 + # A documentation PR must contain markdown files + if pr_only_contains "\.md ${no_presubmit_files}"; then + [[ -n "$(echo "${CHANGED_FILES}" | grep '\.md')" ]] && IS_DOCUMENTATION_PR=1 + fi else header "NO CHANGED FILES REPORTED, ASSUMING IT'S AN ERROR AND RUNNING TESTS ANYWAY" fi @@ -96,30 +104,48 @@ function run_build_tests() { fi fi # Don't run post-build tests if pre/build tests failed - if function_exists post_build_tests; then + if (( ! failed )) && function_exists post_build_tests; then post_build_tests || failed=1 fi results_banner "Build" ${failed} return ${failed} } -# Default build test runner that: -# * lint and link check markdown files -# * `go build` on the entire repo -# * run `/hack/verify-codegen.sh` (if it exists) -# * check licenses in `/cmd` (if it exists) -function default_build_test_runner() { +# Perform markdown build tests if necessary, unless disabled. +function markdown_build_tests() { + (( DISABLE_MD_LINTING && DISABLE_MD_LINK_CHECK )) && return 0 + # Get changed markdown files (ignore /vendor and deleted files) + local mdfiles="" + for file in $(echo "${CHANGED_FILES}" | grep \.md$ | grep -v ^vendor/); do + [[ -f "${file}" ]] && mdfiles="${mdfiles} ${file}" + done + [[ -z "${mdfiles}" ]] && return 0 local failed=0 - # Ignore markdown files in /vendor - local mdfiles="$(echo "${CHANGED_FILES}" | grep \.md$ | grep -v ^vendor/)" - if [[ -n "${mdfiles}" ]]; then + if (( ! DISABLE_MD_LINTING )); then subheader "Linting the markdown files" lint_markdown ${mdfiles} || failed=1 + fi + if (( ! DISABLE_MD_LINK_CHECK )); then subheader "Checking links in the markdown files" check_links_in_markdown ${mdfiles} || failed=1 fi + return ${failed} +} + +# Default build test runner that: +# * check markdown files +# * `go build` on the entire repo +# * run `/hack/verify-codegen.sh` (if it exists) +# * check licenses in all go packages +function default_build_test_runner() { + local failed=0 + # Perform markdown build checks first + markdown_build_tests || failed=1 # For documentation PRs, just check the md files (( IS_DOCUMENTATION_PR )) && return ${failed} + # Skip build test if there is no go code + local go_pkg_dirs="$(go list ./...)" + [[ -z "${go_pkg_dirs}" ]] && return ${failed} # Ensure all the code builds subheader "Checking that go code builds" go build -v ./... || failed=1 @@ -134,10 +160,8 @@ function default_build_test_runner() { ./hack/verify-codegen.sh || failed=1 fi # Check that we don't have any forbidden licenses in our images. - if [[ -d ./cmd ]]; then - subheader "Checking for forbidden licenses" - check_licenses ./cmd/* || failed=1 - fi + subheader "Checking for forbidden licenses" + check_licenses ${go_pkg_dirs} || failed=1 return ${failed} } @@ -145,6 +169,10 @@ function default_build_test_runner() { # unit test runner. function run_unit_tests() { (( ! RUN_UNIT_TESTS )) && return 0 + if (( IS_DOCUMENTATION_PR )); then + header "Documentation only PR, skipping unit tests" + return 0 + fi header "Running unit tests" local failed=0 # Run pre-unit tests, if any @@ -160,7 +188,7 @@ function run_unit_tests() { fi fi # Don't run post-unit tests if pre/unit tests failed - if function_exists post_unit_tests; then + if (( ! failed )) && function_exists post_unit_tests; then post_unit_tests || failed=1 fi results_banner "Unit" ${failed} @@ -177,7 +205,10 @@ function default_unit_test_runner() { function run_integration_tests() { # Don't run integration tests if not requested OR on documentation PRs (( ! RUN_INTEGRATION_TESTS )) && return 0 - (( IS_DOCUMENTATION_PR )) && return 0 + if (( IS_DOCUMENTATION_PR )); then + header "Documentation only PR, skipping integration tests" + return 0 + fi header "Running integration tests" local failed=0 # Run pre-integration tests, if any @@ -236,7 +267,7 @@ function main() { echo ">> gcloud SDK version" gcloud version echo ">> kubectl version" - kubectl version + kubectl version --client echo ">> go version" go version echo ">> git version" @@ -291,12 +322,23 @@ function main() { if (( RUN_BUILD_TESTS || RUN_UNIT_TESTS || RUN_INTEGRATION_TESTS )); then abort "--run-test must be used alone" fi + # If this is a presubmit run, but a documentation-only PR, don't run the test + if (( IS_PRESUBMIT && IS_DOCUMENTATION_PR )); then + header "Documentation only PR, skipping running custom test" + exit 0 + fi ${TEST_TO_RUN} || failed=1 fi run_build_tests || failed=1 - run_unit_tests || failed=1 - run_integration_tests || failed=1 + # If PRESUBMIT_TEST_FAIL_FAST is set to true, don't run unit tests if build tests failed + if (( ! PRESUBMIT_TEST_FAIL_FAST )) || (( ! failed )); then + run_unit_tests || failed=1 + fi + # If PRESUBMIT_TEST_FAIL_FAST is set to true, don't run integration tests if build/unit tests failed + if (( ! PRESUBMIT_TEST_FAIL_FAST )) || (( ! failed )); then + run_integration_tests || failed=1 + fi exit ${failed} } diff --git a/vendor/github.com/knative/test-infra/scripts/release.sh b/vendor/github.com/knative/test-infra/scripts/release.sh index f71fb85cc..e3f92b47a 100755 --- a/vendor/github.com/knative/test-infra/scripts/release.sh +++ b/vendor/github.com/knative/test-infra/scripts/release.sh @@ -22,57 +22,81 @@ source $(dirname ${BASH_SOURCE})/library.sh # GitHub upstream. readonly KNATIVE_UPSTREAM="https://github.com/knative/${REPO_NAME}" +# GCRs for Knative releases. +readonly NIGHTLY_GCR="gcr.io/knative-nightly/github.com/knative/${REPO_NAME}" +readonly RELEASE_GCR="gcr.io/knative-releases/github.com/knative/${REPO_NAME}" + +# Georeplicate images to {us,eu,asia}.gcr.io +readonly GEO_REPLICATION=(us eu asia) + # Simple banner for logging purposes. # Parameters: $1 - message to display. function banner() { make_banner "@" "$1" } -# Tag images in the yaml file if $TAG is not empty. +# Tag images in the yaml files if $TAG is not empty. # $KO_DOCKER_REPO is the registry containing the images to tag with $TAG. -# Parameters: $1 - yaml file to parse for images. -function tag_images_in_yaml() { +# Parameters: $1..$n - files to parse for images (non .yaml files are ignored). +function tag_images_in_yamls() { [[ -z ${TAG} ]] && return 0 local SRC_DIR="${GOPATH}/src/" local DOCKER_BASE="${KO_DOCKER_REPO}/${REPO_ROOT_DIR/$SRC_DIR}" - echo "Tagging images under '${DOCKER_BASE}' with ${TAG}" - for image in $(grep -o "${DOCKER_BASE}/[a-z\./-]\+@sha256:[0-9a-f]\+" $1); do - gcloud -q container images add-tag ${image} ${image%%@*}:${TAG} - - # Georeplicate to {us,eu,asia}.gcr.io - gcloud -q container images add-tag ${image} us.${image%%@*}:${TAG} - gcloud -q container images add-tag ${image} eu.${image%%@*}:${TAG} - gcloud -q container images add-tag ${image} asia.${image%%@*}:${TAG} + local GEO_REGIONS="${GEO_REPLICATION[@]} " + echo "Tagging any images under '${DOCKER_BASE}' with ${TAG}" + for file in $@; do + [[ "${file##*.}" != "yaml" ]] && continue + echo "Inspecting ${file}" + for image in $(grep -o "${DOCKER_BASE}/[a-z\./-]\+@sha256:[0-9a-f]\+" ${file}); do + for region in "" ${GEO_REGIONS// /. }; do + gcloud -q container images add-tag ${image} ${region}${image%%@*}:${TAG} + done + done done } -# Copy the given yaml file to the $RELEASE_GCS_BUCKET bucket's "latest" directory. -# If $TAG is not empty, also copy it to $RELEASE_GCS_BUCKET bucket's "previous" directory. -# Parameters: $1 - yaml file to copy. -function publish_yaml() { +# Copy the given files to the $RELEASE_GCS_BUCKET bucket's "latest" directory. +# If $TAG is not empty, also copy them to $RELEASE_GCS_BUCKET bucket's "previous" directory. +# Parameters: $1..$n - files to copy. +function publish_to_gcs() { function verbose_gsutil_cp { - local DEST="gs://${RELEASE_GCS_BUCKET}/$2/" - echo "Publishing $1 to ${DEST}" - gsutil cp $1 ${DEST} + local DEST="gs://${RELEASE_GCS_BUCKET}/$1/" + shift + echo "Publishing [$@] to ${DEST}" + gsutil -m cp $@ ${DEST} } - verbose_gsutil_cp $1 latest - if [[ -n ${TAG} ]]; then - verbose_gsutil_cp $1 previous/${TAG} + # Before publishing the files, cleanup the `latest` dir if it exists. + local latest_dir="gs://${RELEASE_GCS_BUCKET}/latest" + if [[ -n "$(gsutil ls ${latest_dir} 2> /dev/null)" ]]; then + echo "Cleaning up '${latest_dir}' first" + gsutil -m rm ${latest_dir}/** fi + verbose_gsutil_cp latest $@ + [[ -n ${TAG} ]] && verbose_gsutil_cp previous/${TAG} $@ } # These are global environment variables. SKIP_TESTS=0 +PRESUBMIT_TEST_FAIL_FAST=1 TAG_RELEASE=0 PUBLISH_RELEASE=0 -BRANCH_RELEASE=0 +PUBLISH_TO_GITHUB=0 TAG="" +BUILD_COMMIT_HASH="" +BUILD_YYYYMMDD="" +BUILD_TIMESTAMP="" +BUILD_TAG="" RELEASE_VERSION="" RELEASE_NOTES="" RELEASE_BRANCH="" -RELEASE_GCS_BUCKET="" -KO_FLAGS="" -export KO_DOCKER_REPO="" +RELEASE_GCS_BUCKET="knative-nightly/${REPO_NAME}" +KO_FLAGS="-P" +VALIDATION_TESTS="./test/presubmit-tests.sh" +YAMLS_TO_PUBLISH="" +ARTIFACTS_TO_PUBLISH="" +FROM_NIGHTLY_RELEASE="" +FROM_NIGHTLY_RELEASE_GCS="" +export KO_DOCKER_REPO="gcr.io/knative-nightly" export GITHUB_TOKEN="" # Convenience function to run the hub tool. @@ -81,6 +105,14 @@ function hub_tool() { run_go_tool github.com/github/hub hub $@ } +# Shortcut to "git push" that handles authentication. +# Parameters: $1..$n - arguments to "git push ". +function git_push() { + local repo_url="${KNATIVE_UPSTREAM}" + [[ -n "${GITHUB_TOKEN}}" ]] && repo_url="${repo_url/:\/\//:\/\/${GITHUB_TOKEN}@}" + git push ${repo_url} $@ +} + # Return the master version of a release. # For example, "v0.2.1" returns "0.2" # Parameters: $1 - release version label. @@ -98,6 +130,13 @@ function release_build_number() { echo "${tokens[2]}" } +# Return the short commit SHA from a release tag. +# For example, "v20010101-deadbeef" returns "deadbeef". +function hash_from_tag() { + local tokens=(${1//-/ }) + echo "${tokens[1]}" +} + # Setup the repository upstream, if not set. function setup_upstream() { # hub and checkout need the upstream URL to be set @@ -116,6 +155,44 @@ function setup_branch() { git fetch ${KNATIVE_UPSTREAM} ${RELEASE_BRANCH}:upstream/${RELEASE_BRANCH} } +# Setup version, branch and release notes for a auto release. +function prepare_auto_release() { + echo "Auto release requested" + TAG_RELEASE=1 + PUBLISH_RELEASE=1 + + git fetch --all || abort "error fetching branches/tags from remote" + local tags="$(git tag | cut -d 'v' -f2 | cut -d '.' -f1-2 | sort | uniq)" + local branches="$( { (git branch -r | grep upstream/release-) ; (git branch | grep release-); } | cut -d '-' -f2 | sort | uniq)" + + echo "Versions released (from tags): [" ${tags} "]" + echo "Versions released (from branches): [" ${branches} "]" + + local release_number="" + for i in ${branches}; do + release_number="${i}" + for j in ${tags}; do + if [[ "${i}" == "${j}" ]]; then + release_number="" + fi + done + done + + if [[ -z "${release_number}" ]]; then + echo "*** No new release will be generated, as no new branches exist" + exit 0 + fi + + RELEASE_VERSION="${release_number}.0" + RELEASE_BRANCH="release-${release_number}" + echo "Will create release ${RELEASE_VERSION} from branch ${RELEASE_BRANCH}" + # If --release-notes not used, add a placeholder + if [[ -z "${RELEASE_NOTES}" ]]; then + RELEASE_NOTES="$(mktemp)" + echo "[add release notes here]" > ${RELEASE_NOTES} + fi +} + # Setup version, branch and release notes for a "dot" release. function prepare_dot_release() { echo "Dot release requested" @@ -164,19 +241,92 @@ function prepare_dot_release() { fi } +# Setup source nightly image for a release. +function prepare_from_nightly_release() { + echo "Release from nightly requested" + SKIP_TESTS=1 + if [[ "${FROM_NIGHTLY_RELEASE}" == "latest" ]]; then + echo "Finding the latest nightly release" + find_latest_nightly "${NIGHTLY_GCR}" || abort "cannot find the latest nightly release" + echo "Latest nightly is ${FROM_NIGHTLY_RELEASE}" + fi + readonly FROM_NIGHTLY_RELEASE_GCS="gs://knative-nightly/${REPO_NAME}/previous/${FROM_NIGHTLY_RELEASE}" + gsutil ls -d "${FROM_NIGHTLY_RELEASE_GCS}" > /dev/null \ + || abort "nightly release ${FROM_NIGHTLY_RELEASE} doesn't exist" +} + +# Build a release from an existing nightly one. +function build_from_nightly_release() { + banner "Building the release" + echo "Fetching manifests from nightly" + local yamls_dir="$(mktemp -d)" + gsutil -m cp -r "${FROM_NIGHTLY_RELEASE_GCS}/*" "${yamls_dir}" || abort "error fetching manifests" + # Update references to release GCR + for yaml in ${yamls_dir}/*.yaml; do + sed -i -e "s#${NIGHTLY_GCR}#${RELEASE_GCR}#" "${yaml}" + done + ARTIFACTS_TO_PUBLISH="$(find ${yamls_dir} -name '*.yaml' -printf '%p ')" + echo "Copying nightly images" + copy_nightly_images_to_release_gcr "${NIGHTLY_GCR}" "${FROM_NIGHTLY_RELEASE}" + # Create a release branch from the nightly release tag. + local commit="$(hash_from_tag ${FROM_NIGHTLY_RELEASE})" + echo "Creating release branch ${RELEASE_BRANCH} at commit ${commit}" + git checkout -b ${RELEASE_BRANCH} ${commit} || abort "cannot create branch" + git_push upstream ${RELEASE_BRANCH} || abort "cannot push branch" +} + +# Build a release from source. +function build_from_source() { + run_validation_tests ${VALIDATION_TESTS} + banner "Building the release" + build_release + # Do not use `||` above or any error will be swallowed. + if [[ $? -ne 0 ]]; then + abort "error building the release" + fi +} + +# Copy tagged images from the nightly GCR to the release GCR, tagging them 'latest'. +# This is a recursive function, first call must pass $NIGHTLY_GCR as first parameter. +# Parameters: $1 - GCR to recurse into. +# $2 - tag to be used to select images to copy. +function copy_nightly_images_to_release_gcr() { + for entry in $(gcloud --format="value(name)" container images list --repository="$1"); do + copy_nightly_images_to_release_gcr "${entry}" "$2" + # Copy each image with the given nightly tag + for x in $(gcloud --format="value(tags)" container images list-tags "${entry}" --filter="tags=$2" --limit=1); do + local path="${entry/${NIGHTLY_GCR}}" # Image "path" (remove GCR part) + local dst="${RELEASE_GCR}${path}:latest" + gcloud container images add-tag "${entry}:$2" "${dst}" || abort "error copying image" + done + done +} + +# Recurse into GCR and find the nightly tag of the first `latest` image found. +# Parameters: $1 - GCR to recurse into. +function find_latest_nightly() { + for entry in $(gcloud --format="value(name)" container images list --repository="$1"); do + find_latest_nightly "${entry}" && return 0 + for tag in $(gcloud --format="value(tags)" container images list-tags "${entry}" \ + --filter="tags=latest" --limit=1); do + local tags=( ${tag//,/ } ) + # Skip if more than one nightly tag, as we don't know what's the latest. + if [[ ${#tags[@]} -eq 2 ]]; then + local nightly_tag="${tags[@]/latest}" # Remove 'latest' tag + FROM_NIGHTLY_RELEASE="${nightly_tag// /}" # Remove spaces + return 0 + fi + done + done + return 1 +} + # Parses flags and sets environment variables accordingly. function parse_flags() { - TAG="" - RELEASE_VERSION="" - RELEASE_NOTES="" - RELEASE_BRANCH="" - KO_FLAGS="-P" - KO_DOCKER_REPO="gcr.io/knative-nightly" - RELEASE_GCS_BUCKET="knative-nightly/${REPO_NAME}" - GITHUB_TOKEN="" local has_gcr_flag=0 local has_gcs_flag=0 local is_dot_release=0 + local is_auto_release=0 cd ${REPO_ROOT_DIR} while [[ $# -ne 0 ]]; do @@ -188,13 +338,16 @@ function parse_flags() { --publish) PUBLISH_RELEASE=1 ;; --nopublish) PUBLISH_RELEASE=0 ;; --dot-release) is_dot_release=1 ;; + --auto-release) is_auto_release=1 ;; + --from-latest-nightly) FROM_NIGHTLY_RELEASE=latest ;; *) [[ $# -ge 2 ]] || abort "missing parameter after $1" shift case ${parameter} in --github-token) [[ ! -f "$1" ]] && abort "file $1 doesn't exist" - GITHUB_TOKEN="$(cat $1)" + # Remove any trailing newline/space from token + GITHUB_TOKEN="$(echo -n $(cat $1))" [[ -n "${GITHUB_TOKEN}" ]] || abort "file $1 is empty" ;; --release-gcr) @@ -217,12 +370,38 @@ function parse_flags() { [[ ! -f "$1" ]] && abort "file $1 doesn't exist" RELEASE_NOTES=$1 ;; + --from-nightly) + [[ $1 =~ ^v[0-9]+-[0-9a-f]+$ ]] || abort "nightly tag must be 'vYYYYMMDD-commithash'" + FROM_NIGHTLY_RELEASE=$1 + ;; *) abort "unknown option ${parameter}" ;; esac esac shift done + # Do auto release unless release is forced + if (( is_auto_release )); then + (( is_dot_release )) && abort "cannot have both --dot-release and --auto-release set simultaneously" + [[ -n "${RELEASE_VERSION}" ]] && abort "cannot have both --version and --auto-release set simultaneously" + [[ -n "${RELEASE_BRANCH}" ]] && abort "cannot have both --branch and --auto-release set simultaneously" + [[ -n "${FROM_NIGHTLY_RELEASE}" ]] && abort "cannot have --auto-release with a nightly source" + setup_upstream + prepare_auto_release + fi + + # Setup source nightly image + if [[ -n "${FROM_NIGHTLY_RELEASE}" ]]; then + (( is_dot_release )) && abort "dot releases are built from source" + [[ -z "${RELEASE_VERSION}" ]] && abort "release version must be specified with --version" + # TODO(adrcunha): "dot" releases from release branches require releasing nightlies + # for such branches, which we don't do yet. + [[ "${RELEASE_VERSION}" =~ ^[0-9]+\.[0-9]+\.0$ ]] || abort "version format must be 'X.Y.0'" + RELEASE_BRANCH="release-$(master_version ${RELEASE_VERSION})" + prepare_from_nightly_release + setup_upstream + fi + # Setup dot releases if (( is_dot_release )); then setup_upstream @@ -238,30 +417,33 @@ function parse_flags() { RELEASE_GCS_BUCKET="" fi - if (( TAG_RELEASE )); then - # Get the commit, excluding any tags but keeping the "dirty" flag - local commit="$(git describe --always --dirty --match '^$')" - [[ -n "${commit}" ]] || abort "Error getting the current commit" - # Like kubernetes, image tag is vYYYYMMDD-commit - TAG="v$(date +%Y%m%d)-${commit}" - fi - - if [[ -n "${RELEASE_VERSION}" ]]; then - TAG="v${RELEASE_VERSION}" - fi + # Get the commit, excluding any tags but keeping the "dirty" flag + BUILD_COMMIT_HASH="$(git describe --always --dirty --match '^$')" + [[ -n "${BUILD_COMMIT_HASH}" ]] || abort "error getting the current commit" + BUILD_YYYYMMDD="$(date -u +%Y%m%d)" + BUILD_TIMESTAMP="$(date -u '+%Y-%m-%d %H:%M:%S')" + BUILD_TAG="v${BUILD_YYYYMMDD}-${BUILD_COMMIT_HASH}" - [[ -n "${RELEASE_VERSION}" ]] && (( PUBLISH_RELEASE )) && BRANCH_RELEASE=1 + (( TAG_RELEASE )) && TAG="${BUILD_TAG}" + [[ -n "${RELEASE_VERSION}" ]] && TAG="v${RELEASE_VERSION}" + [[ -n "${RELEASE_VERSION}" && -n "${RELEASE_BRANCH}" ]] && (( PUBLISH_RELEASE )) && PUBLISH_TO_GITHUB=1 + readonly BUILD_COMMIT_HASH + readonly BUILD_YYYYMMDD + readonly BUILD_TIMESTAMP + readonly BUILD_TAG readonly SKIP_TESTS readonly TAG_RELEASE readonly PUBLISH_RELEASE - readonly BRANCH_RELEASE + readonly PUBLISH_TO_GITHUB readonly TAG readonly RELEASE_VERSION readonly RELEASE_NOTES readonly RELEASE_BRANCH readonly RELEASE_GCS_BUCKET readonly KO_DOCKER_REPO + readonly VALIDATION_TESTS + readonly FROM_NIGHTLY_RELEASE } # Run tests (unless --skip-tests was passed). Conveniently displays a banner indicating so. @@ -277,15 +459,30 @@ function run_validation_tests() { fi } -# Initialize everything (flags, workspace, etc) for a release. -function initialize() { +# Publishes the generated artifacts to GCS, GitHub, etc. +# Parameters: $1..$n - files to add to the release. +function publish_artifacts() { + (( ! PUBLISH_RELEASE )) && return + tag_images_in_yamls ${ARTIFACTS_TO_PUBLISH} + publish_to_gcs ${ARTIFACTS_TO_PUBLISH} + publish_to_github ${ARTIFACTS_TO_PUBLISH} + banner "New release published successfully" +} + +# Entry point for a release script. +function main() { + function_exists build_release || abort "function 'build_release()' not defined" + [[ -x ${VALIDATION_TESTS} ]] || abort "test script '${VALIDATION_TESTS}' doesn't exist" parse_flags $@ # Log what will be done and where. banner "Release configuration" + echo "- gcloud user: $(gcloud config get-value core/account)" + echo "- Go path: ${GOPATH}" + echo "- Repository root: ${REPO_ROOT_DIR}" echo "- Destination GCR: ${KO_DOCKER_REPO}" (( SKIP_TESTS )) && echo "- Tests will NOT be run" || echo "- Tests will be run" if (( TAG_RELEASE )); then - echo "- Artifacts will tagged '${TAG}'" + echo "- Artifacts will be tagged '${TAG}'" else echo "- Artifacts WILL NOT be tagged" fi @@ -294,46 +491,68 @@ function initialize() { else echo "- Release will not be published" fi - if (( BRANCH_RELEASE )); then - echo "- Release WILL BE branched from '${RELEASE_BRANCH}'" + if (( PUBLISH_TO_GITHUB )); then + echo "- Release WILL BE published to GitHub" + fi + if [[ -n "${FROM_NIGHTLY_RELEASE}" ]]; then + echo "- Release will be A COPY OF '${FROM_NIGHTLY_RELEASE}' nightly" + else + echo "- Release will be BUILT FROM SOURCE" + [[ -n "${RELEASE_BRANCH}" ]] && echo "- Release will be built from branch '${RELEASE_BRANCH}'" fi [[ -n "${RELEASE_NOTES}" ]] && echo "- Release notes are generated from '${RELEASE_NOTES}'" # Checkout specific branch, if necessary - if (( BRANCH_RELEASE )); then + if [[ -n "${RELEASE_BRANCH}" && -z "${FROM_NIGHTLY_RELEASE}" ]]; then setup_upstream setup_branch git checkout upstream/${RELEASE_BRANCH} || abort "cannot checkout branch ${RELEASE_BRANCH}" fi + + if [[ -n "${FROM_NIGHTLY_RELEASE}" ]]; then + build_from_nightly_release + else + set -e -o pipefail + build_from_source + set +e +o pipefail + fi + # TODO(adrcunha): Remove once all repos use ARTIFACTS_TO_PUBLISH. + [[ -z "${ARTIFACTS_TO_PUBLISH}" ]] && ARTIFACTS_TO_PUBLISH="${YAMLS_TO_PUBLISH}" + [[ -z "${ARTIFACTS_TO_PUBLISH}" ]] && abort "no artifacts were generated" + # Ensure no empty file will be published. + for artifact in ${ARTIFACTS_TO_PUBLISH}; do + [[ -s ${artifact} ]] || abort "Artifact ${artifact} is empty" + done + echo "New release built successfully" + publish_artifacts } -# Create a new release on GitHub, also git tagging it (unless this is not a versioned release). -# Parameters: $1 - Module name (e.g., "Knative Serving"). -# $2 - YAML files to add to the release, space separated. -function branch_release() { - (( BRANCH_RELEASE )) || return 0 - local title="$1 release ${TAG}" +# Publishes a new release on GitHub, also git tagging it (unless this is not a versioned release). +# Parameters: $1..$n - files to add to the release. +function publish_to_github() { + (( PUBLISH_TO_GITHUB )) || return 0 + local title="${REPO_NAME_FORMATTED} release ${TAG}" local attachments=() local description="$(mktemp)" local attachments_dir="$(mktemp -d)" - # Copy each YAML to a separate dir - for yaml in $2; do - cp ${yaml} ${attachments_dir}/ - attachments+=("--attach=${yaml}#$(basename ${yaml})") + local commitish="" + # Copy files to a separate dir + for artifact in $@; do + cp ${artifact} ${attachments_dir}/ + attachments+=("--attach=${artifact}#$(basename ${artifact})") done echo -e "${title}\n" > ${description} if [[ -n "${RELEASE_NOTES}" ]]; then cat ${RELEASE_NOTES} >> ${description} fi git tag -a ${TAG} -m "${title}" - local repo_url="${KNATIVE_UPSTREAM}" - [[ -n "${GITHUB_TOKEN}}" ]] && repo_url="${repo_url/:\/\//:\/\/${GITHUB_TOKEN}@}" - hub_tool push ${repo_url} tag ${TAG} + git_push tag ${TAG} + [[ -n "${RELEASE_BRANCH}" ]] && commitish="--commitish=${RELEASE_BRANCH}" hub_tool release create \ --prerelease \ ${attachments[@]} \ --file=${description} \ - --commitish=${RELEASE_BRANCH} \ + ${commitish} \ ${TAG} }