Update PR

* Convert to a python harness
This commit is contained in:
abraunegg 2026-03-07 11:54:51 +11:00
commit 8fa8283b6e
10 changed files with 440 additions and 102 deletions

View file

@ -23,7 +23,7 @@ jobs:
run: |
dnf -y update
dnf -y group install development-tools
dnf -y install ldc libcurl-devel sqlite-devel dbus-devel jq
dnf -y install python3 ldc libcurl-devel sqlite-devel dbus-devel jq
- name: Build + local install prefix
run: |
@ -57,7 +57,7 @@ jobs:
E2E_TARGET: personal
RUN_ID: ${{ github.run_id }}
run: |
bash ci/e2e/run.sh
python3 ci/e2e/run.py
- name: Upload E2E artefacts
if: always()
@ -75,7 +75,6 @@ jobs:
steps:
- uses: actions/checkout@v4
# Download the artifact produced by the e2e_personal job
- name: Download artefact
uses: actions/download-artifact@v4
with:
@ -115,6 +114,7 @@ jobs:
echo "md<<EOF" >> "$GITHUB_OUTPUT"
echo -e "$md" >> "$GITHUB_OUTPUT"
echo "EOF" >> "$GITHUB_OUTPUT"
- name: Find PR associated with this commit
id: pr
uses: actions/github-script@v7
@ -164,4 +164,4 @@ jobs:
await github.rest.issues.createComment({
owner, repo, issue_number, body
});
}
}

View file

@ -0,0 +1,3 @@
"""
E2E framework package for GitHub Actions based validation.
"""

23
ci/e2e/framework/base.py Normal file
View file

@ -0,0 +1,23 @@
from __future__ import annotations
from abc import ABC, abstractmethod
from framework.context import E2EContext
from framework.result import TestResult
class E2ETestCase(ABC):
"""
Base class for all E2E test cases.
"""
case_id: str = ""
name: str = ""
description: str = ""
@abstractmethod
def run(self, context: E2EContext) -> TestResult:
"""
Execute the test case and return a structured TestResult.
"""
raise NotImplementedError

View file

@ -0,0 +1,66 @@
from __future__ import annotations
import os
from dataclasses import dataclass
from pathlib import Path
from framework.utils import ensure_directory, timestamp_now, write_text_file_append
@dataclass
class E2EContext:
"""
Runtime context for the E2E framework.
"""
onedrive_bin: str
e2e_target: str
run_id: str
repo_root: Path
out_dir: Path
logs_dir: Path
state_dir: Path
work_root: Path
@classmethod
def from_environment(cls) -> "E2EContext":
onedrive_bin = os.environ.get("ONEDRIVE_BIN", "").strip()
e2e_target = os.environ.get("E2E_TARGET", "").strip()
run_id = os.environ.get("RUN_ID", "").strip()
if not onedrive_bin:
raise RuntimeError("Environment variable ONEDRIVE_BIN must be set")
if not e2e_target:
raise RuntimeError("Environment variable E2E_TARGET must be set")
if not run_id:
raise RuntimeError("Environment variable RUN_ID must be set")
repo_root = Path.cwd()
out_dir = repo_root / "ci" / "e2e" / "out"
logs_dir = out_dir / "logs"
state_dir = out_dir / "state"
runner_temp = os.environ.get("RUNNER_TEMP", "/tmp").strip()
work_root = Path(runner_temp) / f"onedrive-e2e-{e2e_target}"
return cls(
onedrive_bin=onedrive_bin,
e2e_target=e2e_target,
run_id=run_id,
repo_root=repo_root,
out_dir=out_dir,
logs_dir=logs_dir,
state_dir=state_dir,
work_root=work_root,
)
@property
def master_log_file(self) -> Path:
return self.out_dir / "run.log"
def log(self, message: str) -> None:
ensure_directory(self.out_dir)
line = f"[{timestamp_now()}] {message}\n"
print(line, end="")
write_text_file_append(self.master_log_file, line)

View file

@ -0,0 +1,50 @@
from __future__ import annotations
from dataclasses import dataclass, field
@dataclass
class TestResult:
"""
Structured test result returned by each test case.
"""
case_id: str
name: str
status: str
reason: str = ""
artifacts: list[str] = field(default_factory=list)
details: dict = field(default_factory=dict)
@staticmethod
def pass_result(
case_id: str,
name: str,
artifacts: list[str] | None = None,
details: dict | None = None,
) -> "TestResult":
return TestResult(
case_id=case_id,
name=name,
status="pass",
reason="",
artifacts=artifacts or [],
details=details or {},
)
@staticmethod
def fail_result(
case_id: str,
name: str,
reason: str,
artifacts: list[str] | None = None,
details: dict | None = None,
) -> "TestResult":
return TestResult(
case_id=case_id,
name=name,
status="fail",
reason=reason,
artifacts=artifacts or [],
details=details or {},
)

78
ci/e2e/framework/utils.py Normal file
View file

@ -0,0 +1,78 @@
from __future__ import annotations
import os
import shutil
import subprocess
from dataclasses import dataclass
from datetime import datetime, timezone
from pathlib import Path
@dataclass
class CommandResult:
command: list[str]
returncode: int
stdout: str
stderr: str
@property
def ok(self) -> bool:
return self.returncode == 0
def timestamp_now() -> str:
return datetime.now(timezone.utc).strftime("%Y-%m-%d %H:%M:%S UTC")
def ensure_directory(path: Path) -> None:
path.mkdir(parents=True, exist_ok=True)
def reset_directory(path: Path) -> None:
if path.exists():
shutil.rmtree(path)
path.mkdir(parents=True, exist_ok=True)
def write_text_file(path: Path, content: str) -> None:
ensure_directory(path.parent)
path.write_text(content, encoding="utf-8")
def write_text_file_append(path: Path, content: str) -> None:
ensure_directory(path.parent)
with path.open("a", encoding="utf-8") as fp:
fp.write(content)
def run_command(
command: list[str],
cwd: Path | None = None,
env: dict[str, str] | None = None,
) -> CommandResult:
merged_env = os.environ.copy()
if env:
merged_env.update(env)
completed = subprocess.run(
command,
cwd=str(cwd) if cwd else None,
env=merged_env,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
text=True,
encoding="utf-8",
errors="replace",
check=False,
)
return CommandResult(
command=command,
returncode=completed.returncode,
stdout=completed.stdout,
stderr=completed.stderr,
)
def command_to_string(command: list[str]) -> str:
return " ".join(command)

124
ci/e2e/run.py Normal file
View file

@ -0,0 +1,124 @@
#!/usr/bin/env python3
from __future__ import annotations
import json
import sys
import traceback
from pathlib import Path
from framework.context import E2EContext
from framework.result import TestResult
from framework.utils import ensure_directory, write_text_file
from testcases.tc0001_basic_resync import TestCase0001BasicResync
def build_test_suite() -> list:
"""
Return the ordered list of E2E test cases to execute.
Add future test cases here in the required execution order.
"""
return [
TestCase0001BasicResync(),
]
def result_to_actions_case(result: TestResult) -> dict:
"""
Convert the internal TestResult into the JSON structure expected by the
GitHub Actions workflow summary/reporting logic.
"""
output = {
"id": result.case_id,
"name": result.name,
"status": result.status,
}
if result.reason:
output["reason"] = result.reason
if result.artifacts:
output["artifacts"] = result.artifacts
if result.details:
output["details"] = result.details
return output
def main() -> int:
context = E2EContext.from_environment()
ensure_directory(context.out_dir)
ensure_directory(context.logs_dir)
ensure_directory(context.state_dir)
ensure_directory(context.work_root)
context.log(
f"Initialising E2E framework for target='{context.e2e_target}', "
f"run_id='{context.run_id}'"
)
cases = []
failed = False
for testcase in build_test_suite():
context.log(f"Starting test case {testcase.case_id}: {testcase.name}")
try:
result = testcase.run(context)
if result.case_id != testcase.case_id:
raise RuntimeError(
f"Test case returned mismatched case_id: "
f"expected '{testcase.case_id}', got '{result.case_id}'"
)
cases.append(result_to_actions_case(result))
if result.status != "pass":
failed = True
context.log(
f"Test case {testcase.case_id} FAILED: {result.reason or 'no reason provided'}"
)
else:
context.log(f"Test case {testcase.case_id} PASSED")
except Exception as exc:
failed = True
tb = traceback.format_exc()
context.log(f"Unhandled exception in test case {testcase.case_id}: {exc}")
context.log(tb)
error_log = context.logs_dir / f"{testcase.case_id}_exception.log"
write_text_file(error_log, tb)
failure_result = TestResult(
case_id=testcase.case_id,
name=testcase.name,
status="fail",
reason=f"Unhandled exception: {exc}",
artifacts=[str(error_log)],
details={
"exception_type": type(exc).__name__,
},
)
cases.append(result_to_actions_case(failure_result))
results = {
"target": context.e2e_target,
"run_id": context.run_id,
"cases": cases,
}
results_file = context.out_dir / "results.json"
results_json = json.dumps(results, indent=2, sort_keys=False)
write_text_file(results_file, results_json)
context.log(f"Wrote results to {results_file}")
return 1 if failed else 0
if __name__ == "__main__":
sys.exit(main())

View file

@ -1,98 +0,0 @@
#!/usr/bin/env bash
set -euo pipefail
# Required environment variables:
# ONEDRIVE_BIN
# E2E_TARGET
# RUN_ID
#
# Optional (provided by GitHub Actions):
# RUNNER_TEMP
OUT_DIR="ci/e2e/out"
SYNC_ROOT="${RUNNER_TEMP:-/tmp}/sync-${E2E_TARGET}"
mkdir -p "$OUT_DIR"
mkdir -p "$SYNC_ROOT"
RESULTS_FILE="${OUT_DIR}/results.json"
LOG_FILE="${OUT_DIR}/sync.log"
# We'll collect cases as JSON objects in a bash array, then assemble results.json.
declare -a CASES=()
pass_count=0
fail_count=0
# Helper: add a PASS case
add_pass() {
local id="$1"
local name="$2"
CASES+=("$(jq -cn --arg id "$id" --arg name "$name" \
'{id:$id,name:$name,status:"pass"}')")
pass_count=$((pass_count + 1))
}
# Helper: add a FAIL case (with reason)
add_fail() {
local id="$1"
local name="$2"
local reason="$3"
CASES+=("$(jq -cn --arg id "$id" --arg name "$name" --arg reason "$reason" \
'{id:$id,name:$name,status:"fail",reason:$reason}')")
fail_count=$((fail_count + 1))
}
echo "E2E target: ${E2E_TARGET}"
echo "Sync root: ${SYNC_ROOT}"
###############################################
# Test Case 0001: basic resync
###############################################
TC_ID="0001"
TC_NAME="basic-resync (sync + verbose + resync + resync-auth)"
echo "Running test case ${TC_ID}: ${TC_NAME}"
echo "Running: onedrive --sync --verbose --resync --resync-auth"
# Stream output to console AND log file (Option A) while preserving exit code.
set +e
"$ONEDRIVE_BIN" \
--sync \
--verbose \
--resync \
--resync-auth \
--syncdir "$SYNC_ROOT" \
2>&1 | tee "$LOG_FILE"
rc=${PIPESTATUS[0]}
set -e
if [ "$rc" -eq 0 ]; then
add_pass "$TC_ID" "$TC_NAME"
else
add_fail "$TC_ID" "$TC_NAME" "onedrive exited with code ${rc}"
fi
###############################################
# Write results.json
###############################################
# Build JSON array from CASES[]
cases_json="$(printf '%s\n' "${CASES[@]}" | jq -cs '.')"
jq -n \
--arg target "$E2E_TARGET" \
--argjson run_id "$RUN_ID" \
--argjson cases "$cases_json" \
'{target:$target, run_id:$run_id, cases:$cases}' \
> "$RESULTS_FILE"
echo "Results written to ${RESULTS_FILE}"
echo "Passed: ${pass_count}"
echo "Failed: ${fail_count}"
# Fail the job if any cases failed.
if [ "$fail_count" -ne 0 ]; then
exit 1
fi

View file

@ -0,0 +1,3 @@
"""
E2E test case package.
"""

View file

@ -0,0 +1,89 @@
from __future__ import annotations
from pathlib import Path
from framework.base import E2ETestCase
from framework.context import E2EContext
from framework.result import TestResult
from framework.utils import command_to_string, reset_directory, run_command, write_text_file
class TestCase0001BasicResync(E2ETestCase):
"""
Test Case 0001: basic resync
Purpose:
- validate that the E2E framework can invoke the client
- validate that the configured environment is sufficient to run a basic sync
- provide a simple baseline smoke test before more advanced E2E scenarios
"""
case_id = "0001"
name = "basic resync"
description = "Run a basic --sync --resync --resync-auth operation and capture the outcome"
def run(self, context: E2EContext) -> TestResult:
case_work_dir = context.work_root / f"tc{self.case_id}"
case_log_dir = context.logs_dir / f"tc{self.case_id}"
state_dir = context.state_dir / f"tc{self.case_id}"
reset_directory(case_work_dir)
reset_directory(case_log_dir)
reset_directory(state_dir)
stdout_file = case_log_dir / "stdout.log"
stderr_file = case_log_dir / "stderr.log"
metadata_file = state_dir / "metadata.txt"
command = [
context.onedrive_bin,
"--sync",
"--verbose",
"--resync",
"--resync-auth",
]
context.log(
f"Executing Test Case {self.case_id}: {command_to_string(command)}"
)
result = run_command(command, cwd=context.repo_root)
write_text_file(stdout_file, result.stdout)
write_text_file(stderr_file, result.stderr)
metadata_lines = [
f"case_id={self.case_id}",
f"name={self.name}",
f"command={command_to_string(command)}",
f"returncode={result.returncode}",
]
write_text_file(metadata_file, "\n".join(metadata_lines) + "\n")
artifacts = [
str(stdout_file),
str(stderr_file),
str(metadata_file),
]
details = {
"command": command,
"returncode": result.returncode,
}
if result.returncode != 0:
reason = f"onedrive exited with non-zero status {result.returncode}"
return TestResult.fail_result(
case_id=self.case_id,
name=self.name,
reason=reason,
artifacts=artifacts,
details=details,
)
return TestResult.pass_result(
case_id=self.case_id,
name=self.name,
artifacts=artifacts,
details=details,
)