Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -39,70 +39,97 @@

# ================================
#
# This script is used by ci to merge several retagger report JSON files, which is then used
# by running python3 runner.py merge-tags-from-reports reports-merged.json
# This script is used by ci to merge several report JSON files generated by unittests or retagger.
#
# ================================

import os
import sys
import json
import glob
import argparse
from dataclasses import dataclass
from dataclasses import dataclass, asdict


@dataclass
class Test:
name: str
status: str
duration: str
name: str
status: str
duration: str


def read_report(path: str) -> list[Test]:
def read_report(path: str, status_filter: list[str]) -> list[Test]:
tests = []
with open(path) as f:
data = json.load(f)
for result in data:
name, status, duration = result["name"], result["status"], result["duration"]
tests.append(Test(f"{name}", status, duration))

try:
with open(path) as f:
report_tests = json.load(f)

for test in report_tests:
status = test['status'].lower()
if not status_filter or any(s in status for s in status_filter):
tests.append(Test(
name=test['name'],
status=test['status'],
duration=test.get('duration')
))
except (json.JSONDecodeError, Exception) as e:
print(f"Error reading {path}: {e}", file=sys.stderr)

return tests

def merge_tests(report: list[Test], merged: dict[str, dict]):
for test in report:
if test.name not in merged:
merged[test.name] = test.__dict__

def export_reports(merged: dict[str, dict], outfile: str):
with open(outfile, "w") as f:
json.dump(list(merged.values()), f)
print(f"=== Exported {len(merged)} tests to {f.name} ===")
def merge_reports(source_dir: str, pattern: str, status_filter: list[str]) -> dict[str, Test]:
merged_tests = {}

path = f"{source_dir}/{pattern}"
files = glob.glob(path, recursive=True)

files = [file for file in files if file.endswith(".json")]

if not files:
print(f"No files found matching pattern: {path}")
return merged_tests

print(f"Merging {len(files)} reports")

for file in files:
tests = read_report(file, status_filter)

for test in tests:
if test.name not in merged_tests:
merged_tests[test.name] = test

return merged_tests

def merge_reports(reports: list[str], outfile: str):
merged_reports = {}
for report in reports:
report_tests = read_report(report)
merge_tests(report_tests, merged_reports)

export_reports(merged_reports, outfile)
def export_reports(merged_tests: dict[str, Test], outfile: str, status_filter: list[str]):
output = [asdict(test) for test in merged_tests.values()]

with open(outfile, "w") as f:
json.dump(output, f, indent=2)

print(f"Merged {len(merged_tests)} tests to {outfile}")

def main(outfile: str, source_dir: str, pattern: str):
path = f"{source_dir}/{pattern}"
files = glob.glob(path)

files = [file for file in files if file.endswith(".json")]
merge_reports(files, outfile)
def main(outfile: str, source_dir: str, pattern: str, status_filter: str):
status_list = []
if status_filter:
status_list = [s.strip() for s in status_filter.split(',')]

merged_tests = merge_reports(source_dir, pattern, status_list)
export_reports(merged_tests, outfile, status_list)


if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Merge unittest retagger report JSON files")
parser = argparse.ArgumentParser(description="Merge test report JSON files")
parser.add_argument("--outfile", help="Output file name (optional)", default="reports-merged.json")
parser.add_argument("--dir", help="Reports files directory (optional)", default=".")
parser.add_argument("--pattern", default="*", help="Pattern matching for input files (optional)")
parser.add_argument("--status-filter", default="", help="Comma-separated list of status to filter (optional)")

args = parser.parse_args()
main(
outfile=args.outfile,
source_dir=args.dir,
pattern=args.pattern
pattern=args.pattern,
status_filter=args.status_filter
)
13 changes: 13 additions & 0 deletions .github/workflows/ci-matrix-gen.yml
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,10 @@ on:
required: false
type: number
default: 7
export_test_reports:
required: false
type: boolean
default: false

jobs:
generate-tier1:
Expand Down Expand Up @@ -274,6 +278,15 @@ jobs:
${{ matrix.logs }}
retention-days: ${{ inputs.logs_retention_days || 15 }}
if-no-files-found: ignore

- name: Upload test reports
if: ${{ inputs.export_test_reports && (success() || failure()) }}
uses: actions/upload-artifact@v5
continue-on-error: true
with:
name: ${{ format('{0}_test_reports', matrix.name) }}
path: /tmp/test-report-*.json
retention-days: 1

tier2:
if: ${{ success() || inputs.jobs_to_run }}
Expand Down
105 changes: 91 additions & 14 deletions .github/workflows/ci-post-merge.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,27 +7,66 @@ jobs:
check-ci-and-notify:
runs-on: ubuntu-latest
steps:
- uses: actions/github-script@v8
- name: Wait for CI
id: wait-for-ci
uses: actions/github-script@v8
with:
script: |
const GRAALVMBOT_LOGIN = "graalvmbot";
const pr = context.payload.pull_request;
if (!pr || !pr.number || pr.state !== "closed") return;
if (!pr || !pr.number || pr.state !== "closed") {
console.log("Not a closed pull request event.");
return;
}

const author = pr.user;
const sender = context.payload.sender;
const assignees = pr.assignees || [];
if (!author || author.login !== GRAALVMBOT_LOGIN) return;
if (assignees.length !== 1) return;
if (!sender || sender.login !== GRAALVMBOT_LOGIN) {
console.log(`PR closed by ${sender.login}, not ${GRAALVMBOT_LOGIN}. Skipping CI check.`);
return;
}
if (assignees.length !== 1) {
console.log(`Expected exactly 1 assignee, found ${assignees.length}. Skipping CI check.`);
return;
}

const sha = pr.head.sha;

const runsResp = await github.rest.actions.listWorkflowRunsForRepo({
owner: context.repo.owner,
repo: context.repo.repo,
head_sha: sha,
event: "pull_request",
per_page: 10
});
// Wait for CI workflow to complete
const maxWaitMs = 4* 60 * 60 * 1000;
const intervalMs = 15 * 60 * 1000;
const startMs = Date.now();
let runsResp = null;
while (Date.now() - startMs < maxWaitMs) {
console.log(`Waiting for workflow with SHA ${sha} to complete...`);
try {
runsResp = await github.rest.actions.listWorkflowRunsForRepo({
owner: context.repo.owner,
repo: context.repo.repo,
head_sha: sha,
});
} catch (err) {
console.log(`warning: failed to fetch workflow runs: ${err}`);
await new Promise(r => setTimeout(r, intervalMs));
continue;
}

const hasCompleted = runsResp.data.workflow_runs.some(run => run.head_sha === sha && run.status === "completed");
if (hasCompleted) break;

const hasInProgress = runsResp.data.workflow_runs.some(run => run.head_sha === sha && run.status !== "completed");
if (!hasInProgress && runsResp.data.workflow_runs.length === 0) {
await new Promise(r => setTimeout(r, intervalMs));
continue;
}

await new Promise(r => setTimeout(r, intervalMs));
}

if (!runsResp) {
console.log("No workflow runs found for this SHA.");
return;
}

const failedRun = runsResp.data.workflow_runs.find(run =>
run.head_sha === sha &&
Expand All @@ -40,10 +79,48 @@ jobs:
return;
}

core.setOutput('assignee', assignees[0] ? assignees[0].login : '');
core.setOutput('failed_run_url', failedRun.html_url);
core.setOutput('failed_run_id', failedRun.id);
console.log(`Found failed CI workflow: ${failedRun.html_url}`);
- name: Download merged test report
if: ${{ steps.wait-for-ci.outputs.failed_run_url != '' }}
uses: actions/download-artifact@v5
with:
name: merged_test_reports
path: report
github-token: ${{ secrets.GITHUB_TOKEN }}
run-id: ${{ steps.wait-for-ci.outputs.failed_run_id }}
continue-on-error: true
- name: Post failure comment
if: ${{ steps.wait-for-ci.outputs.failed_run_url != '' }}
uses: actions/github-script@v8
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
const fs = require('fs');
const assignee = '${{ steps.wait-for-ci.outputs.assignee }}';
const runUrl = '${{ steps.wait-for-ci.outputs.failed_run_url }}';

let body = `@${assignee} - CI workflow failed: [View workflow](${runUrl})`;
try {
const reportPath = 'report/merged_test_reports.json';
if (fs.existsSync(reportPath)) {
const data = JSON.parse(fs.readFileSync(reportPath, 'utf8'));
const failed = data.map(t => t.name);
if (failed.length) {
const list = failed.map(n => `- ${n}`).join('\n');
body = `@${assignee} - CI workflow failed: [View workflow](${runUrl})\nFailed tests:\n\n${list}`;
}
}
} catch (e) {
console.log(`Error parsing test report: ${e}`);
}

const pr = context.payload.pull_request;
await github.rest.issues.createComment({
issue_number: pr.number,
owner: context.repo.owner,
repo: context.repo.repo,
body: `@${assignees[0].login} - One or more CI jobs failed - [View details](${failedRun.html_url})`
body,
});
console.log("CI failed, assignee notified.")
2 changes: 1 addition & 1 deletion .github/workflows/ci-unittest-retagger.yml
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ jobs:
declare -a os_list=("linux-x86_64" "linux-aarch64" "win32-AMD64")
for os in "${os_list[@]}"; do
echo "Merging tags for $os"
python3 .github/scripts/merge_retagger_results.py --dir ../retagger-reports --outfile "../retagger-reports/reports-merged-$os.json" --pattern "*$os*" || true
python3 .github/scripts/merge_reports.py --dir ../retagger-reports --outfile "../retagger-reports/reports-merged-$os.json" --pattern "*$os*" || true
python3 graalpython/com.oracle.graal.python.test/src/runner.py merge-tags-from-report "../retagger-reports/reports-merged-$os.json" --platform "$os-github" || true
git add -A
git commit -m "Apply retags for $os" || true
Expand Down
30 changes: 29 additions & 1 deletion .github/workflows/ci-unittests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,4 +18,32 @@ jobs:
needs: build-standalone-artifacts
uses: ./.github/workflows/ci-matrix-gen.yml
with:
jobs_to_run: ^(?!python-svm-build|style).*-gate.*$
jobs_to_run: ^(?!python-svm-build|style).*-gate.*$
export_test_reports: true

collect-reports:
if: always()
needs: run-tests
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v6
- name: Download test reports
uses: actions/download-artifact@v5
with:
pattern: '*_test_reports'
merge-multiple: true
path: test-reports
- name: Merge test reports
run: |
python3 .github/scripts/merge_reports.py \
--outfile merged_test_reports.json \
--dir test-reports \
--pattern "**/*.json" \
--status-filter "failed"
- name: Upload merged test report
if: always()
uses: actions/upload-artifact@v5
with:
name: merged_test_reports
path: merged_test_reports.json
if-no-files-found: ignore
Loading