Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Multiple CI Enhancements #7428

Merged
merged 13 commits into from
Jan 13, 2023
31 changes: 19 additions & 12 deletions .github/workflows/build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -27,9 +27,9 @@ jobs:
boards-aarch: ${{ steps.set-matrix.outputs.boards-aarch }}
steps:
- name: Dump GitHub context
run: echo "$GITHUB_CONTEXT"
env:
GITHUB_CONTEXT: ${{ toJson(github) }}
run: echo "$GITHUB_CONTEXT"
- uses: actions/checkout@v3
with:
submodules: false
Expand Down Expand Up @@ -135,21 +135,27 @@ jobs:
GITHUB_TOKEN: ${{ github.token }}
EXCLUDE_COMMIT: ${{ github.event.after }}
run: python3 -u ci_changes_per_commit.py
- name: Set head sha
if: github.event_name == 'pull_request'
run: echo "HEAD_SHA=$(git show -s --format=%s $GITHUB_SHA | grep -o -P "(?<=Merge ).*(?= into)")" >> $GITHUB_ENV
- name: Set base sha
if: github.event_name == 'pull_request'
run: |
git fetch --no-tags --no-recurse-submodules --depth=$((DEPTH + 1)) origin $HEAD_SHA
echo "BASE_SHA=$(git rev-list $HEAD_SHA --skip=$DEPTH --max-count=1)" >> $GITHUB_ENV
env:
DEPTH: ${{ steps.get-last-commit-with-checks.outputs.commit_depth || github.event.pull_request.commits }}
- name: Get changes
id: get-changes
if: github.event_name == 'pull_request'
uses: tj-actions/changed-files@v34
with:
json: true
sha: ${{ steps.get-last-commit-with-checks.outputs.commit && github.event.after }}
base_sha: ${{ steps.get-last-commit-with-checks.outputs.commit }}
run: echo $(git diff $BASE_SHA...$HEAD_SHA --name-only) | echo "changed_files=[\"$(sed "s/ /\", \"/g")\"]" >> $GITHUB_OUTPUT
- name: Set matrix
id: set-matrix
working-directory: tools
env:
CHANGED_FILES: ${{ steps.get-changes.outputs.all_changed_and_modified_files }}
LAST_FAILED_JOBS: ${{ steps.get-last-commit-with-checks.outputs.checkruns }}
run: python3 -u ci_set_matrix.py
env:
CHANGED_FILES: ${{ steps.get-changes.outputs.changed_files }}
LAST_FAILED_JOBS: ${{ steps.get-last-commit-with-checks.outputs.check_runs }}


mpy-cross-mac:
Expand Down Expand Up @@ -412,14 +418,15 @@ jobs:
path: ${{ github.workspace }}/.idf_tools
key: ${{ runner.os }}-idf-tools-${{ hashFiles('.git/modules/ports/espressif/esp-idf/HEAD') }}-${{ steps.py3.outputs.python-path }}-20220404
- name: Clone IDF submodules
run: |
(cd $IDF_PATH && git submodule update --init)
run: git submodule update --init $IDF_PATH
env:
IDF_PATH: ${{ github.workspace }}/ports/espressif/esp-idf
- name: Install IDF tools
run: |
echo "Installing ESP-IDF tools"
$IDF_PATH/tools/idf_tools.py --non-interactive install required
$IDF_PATH/tools/idf_tools.py --non-interactive install cmake
echo "Installing Python environment and packages"
$IDF_PATH/tools/idf_tools.py --non-interactive install-python-env
rm -rf $IDF_TOOLS_PATH/dist
env:
Expand All @@ -437,7 +444,6 @@ jobs:
run: |
source $IDF_PATH/export.sh
gcc --version
xtensa-esp32s2-elf-gcc --version
python3 --version
ninja --version
cmake --version
Expand Down Expand Up @@ -471,6 +477,7 @@ jobs:
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
if: (github.event_name == 'push' && github.ref == 'refs/heads/main' && github.repository_owner == 'adafruit') || (github.event_name == 'release' && (github.event.action == 'published' || github.event.action == 'rerequested'))


build-aarch:
runs-on: ubuntu-20.04
needs: test
Expand Down
107 changes: 58 additions & 49 deletions tools/ci_changes_per_commit.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
}
nodes {
commit {
checkSuites(first: 3) {
checkSuites(first: 100) {
nodes {
conclusion
workflowRun {
Expand All @@ -39,7 +39,7 @@
}
"""

QUERY_CHECKRUNS = """
QUERY_CHECK_RUNS = """
query ($checkSuiteID: ID!,
$afterFailedRun: String, $afterIncompleteRun: String,
$includeFailedRuns: Boolean!, $includeIncompleteRuns: Boolean!) {
Expand Down Expand Up @@ -92,7 +92,7 @@
}


query_variables_checkruns = {
query_variables_check_runs = {
"checkSuiteID": "",
"afterFailedRun": None,
"afterIncompleteRun": None,
Expand All @@ -111,13 +111,11 @@ def __init__(self, query, variables={}, headers={}):
self.headers = headers

def paginate(self, page_info, name):
has_page = (
page_info["hasNextPage"] if name.startswith("after") else page_info["hasPreviousPage"]
)
has_page = page_info["hasNextPage" if name.startswith("after") else "hasPreviousPage"]
if has_page:
self.variables[name] = (
page_info["endCursor"] if name.startswith("after") else page_info["startCursor"]
)
self.variables[name] = page_info[
"endCursor" if name.startswith("after") else "startCursor"
]
return has_page

def fetch(self):
Expand All @@ -141,28 +139,31 @@ def set_output(name, value):
print(f"Would set GitHub actions output {name} to '{value}'")


def get_commit_and_checksuite(query_commits):
commits = query_commits.fetch()["data"]["repository"]["pullRequest"]["commits"]

if commits["totalCount"] > 0:
for commit in reversed(commits["nodes"]):
commit = commit["commit"]
commit_sha = commit["oid"]
if commit_sha == os.environ["EXCLUDE_COMMIT"]:
continue
checksuites = commit["checkSuites"]
if checksuites["totalCount"] > 0:
for checksuite in checksuites["nodes"]:
if checksuite["workflowRun"]["workflow"]["name"] == "Build CI":
return [
commit_sha,
checksuite["id"] if checksuite["conclusion"] != "SUCCESS" else None,
]
else:
if query_commits.paginate(commits["pageInfo"], "beforeCommit"):
return get_commit_and_checksuite(query_commits)

return [None, None]
def get_commit_depth_and_check_suite(query_commits):
commit_depth = 0
while True:
commits = query_commits.fetch()["data"]["repository"]["pullRequest"]["commits"]
if commits["totalCount"] > 0:
nodes = commits["nodes"]
nodes.reverse()
if nodes[0]["commit"]["oid"] == os.environ["EXCLUDE_COMMIT"]:
nodes.pop(0)
for commit in nodes:
commit_depth += 1
commit = commit["commit"]
commit_sha = commit["oid"]
check_suites = commit["checkSuites"]
if check_suites["totalCount"] > 0:
for check_suite in check_suites["nodes"]:
if check_suite["workflowRun"]["workflow"]["name"] == "Build CI":
return [
{"sha": commit_sha, "depth": commit_depth},
check_suite["id"]
if check_suite["conclusion"] != "SUCCESS"
else None,
]
if not query_commits.paginate(commits["pageInfo"], "beforeCommit"):
return [None, None]


def append_runs_to_list(runs, bad_runs_by_matrix):
Expand All @@ -180,53 +181,61 @@ def append_runs_to_list(runs, bad_runs_by_matrix):
bad_runs_by_matrix[matrix].append(res_board.group()[1:-1])


def get_bad_checkruns(query_checkruns):
def get_bad_check_runs(query_check_runs):
more_pages = True
bad_runs_by_matrix = {}
run_types = ["failed", "incomplete"]

while more_pages:
checkruns = query_checkruns.fetch()["data"]["node"]
run_types = ["failed", "incomplete"]
check_runs = query_check_runs.fetch()["data"]["node"]
more_pages = False

for run_type in run_types:
run_type_camel = run_type.capitalize() + "Run"
run_type = run_type + "Runs"

append_runs_to_list(checkruns[run_type], bad_runs_by_matrix)
append_runs_to_list(check_runs[run_type], bad_runs_by_matrix)

if query_checkruns.paginate(checkruns[run_type]["pageInfo"], "after" + run_type_camel):
query_checkruns.variables["include" + run_type_camel] = True
if query_check_runs.paginate(
check_runs[run_type]["pageInfo"], "after" + run_type_camel
):
query_check_runs.variables["include" + run_type_camel] = True
more_pages = True

return bad_runs_by_matrix


def set_commit(commit):
set_output("commit_sha", commit["sha"])
set_output("commit_depth", commit["depth"])


def main():
query_commits = Query(QUERY_COMMITS, query_variables_commits, headers)
query_commits.variables["owner"], query_commits.variables["name"] = os.environ["REPO"].split(
"/"
)

commit, checksuite = get_commit_and_checksuite(query_commits)
commit, check_suite = get_commit_depth_and_check_suite(query_commits)

if checksuite is None:
if commit is None:
print("No checkSuites found -> Abort")
if not check_suite:
if commit:
set_commit(commit)
else:
set_output("commit", commit)
print("Abort: No check suite found")
quit()

query_checkruns = Query(QUERY_CHECKRUNS, query_variables_checkruns, headers)
query_checkruns.variables["checkSuiteID"] = checksuite
query_check_runs = Query(QUERY_CHECK_RUNS, query_variables_check_runs, headers)
query_check_runs.variables["checkSuiteID"] = check_suite

checkruns = get_bad_checkruns(query_checkruns)
check_runs = get_bad_check_runs(query_check_runs)

if len(checkruns) == 0:
print("No checkRuns found -> Abort")
if not check_runs:
print("Abort: No check runs found")
quit()

set_output("commit", commit)
set_output("checkruns", json.dumps(checkruns))
set_commit(commit)
set_output("check_runs", json.dumps(check_runs))


if __name__ == "__main__":
Expand Down
2 changes: 2 additions & 0 deletions tools/ci_fetch_deps.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,8 @@ def run(title, command, check=True):
"Fetch back to the start of 2021 to get commit history",
f'git fetch --recurse-submodules=no --shallow-since="2021-07-01" origin {ref}',
)
# See https://stackoverflow.com/questions/63878612/git-fatal-error-in-object-unshallow-sha-1#comment118418373_63879454
run('Fix for bug "fatal: error in object: unshallow"', "git repack -d")
run("Init submodules", "git submodule init")
run("Submodule status", "git submodule status")

Expand Down
46 changes: 29 additions & 17 deletions tools/ci_set_matrix.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
import sys
import json
import pathlib
import subprocess
from concurrent.futures import ThreadPoolExecutor

tools_dir = pathlib.Path(__file__).resolve().parent
Expand Down Expand Up @@ -82,15 +83,15 @@
last_failed_jobs = json.loads(j)


def set_output(name, value):
def set_output(name: str, value):
if "GITHUB_OUTPUT" in os.environ:
with open(os.environ["GITHUB_OUTPUT"], "at") as f:
print(f"{name}={value}", file=f)
else:
print(f"Would set GitHub actions output {name} to '{value}'")


def set_boards_to_build(build_all):
def set_boards_to_build(build_all: bool):
# Get boards in json format
boards_info_json = build_board_info.get_board_mapping()
all_board_ids = set()
Expand Down Expand Up @@ -228,23 +229,34 @@ def get_settings(board):
set_output(f"boards-{arch}", json.dumps(sorted(arch_to_boards[arch])))


def set_docs_to_build(build_all):
if "build-doc" in last_failed_jobs:
build_all = True

doc_match = build_all
if not build_all:
doc_pattern = re.compile(
r"^(?:.github/workflows/|docs|extmod/ulab|(?:(?:ports/\w+/bindings|shared-bindings)\S+\.c|conf\.py|tools/extract_pyi\.py|requirements-doc\.txt)$)|(?:-stubs|\.(?:md|MD|rst|RST))$"
)
for p in changed_files:
if doc_pattern.search(p):
doc_match = True
break
def set_docs_to_build(build_doc: bool):
if not build_doc:
if "build-doc" in last_failed_jobs:
build_doc = True
else:
doc_pattern = re.compile(
r"^(?:.github/workflows/|docs|extmod/ulab|(?:(?:ports/\w+/bindings|shared-bindings)\S+\.c|conf\.py|tools/extract_pyi\.py|requirements-doc\.txt)$)|(?:-stubs|\.(?:md|MD|rst|RST))$"
dhalbert marked this conversation as resolved.
Show resolved Hide resolved
)
github_workspace = os.environ.get("GITHUB_WORKSPACE") or ""
github_workspace = github_workspace and github_workspace + "/"
for p in changed_files:
if doc_pattern.search(p) and (
(
subprocess.run(
f"git diff -U0 $BASE_SHA...$HEAD_SHA {github_workspace + p} | grep -o -m 1 '^[+-]\/\/|'",
capture_output=True,
shell=True,
).stdout
)
if p.endswith(".c")
else True
):
build_doc = True
break

# Set the step outputs
print("Building docs:", doc_match)
set_output("build-doc", doc_match)
print("Building docs:", build_doc)
set_output("build-doc", build_doc)


def check_changed_files():
Expand Down