Add further test cases

* Add Test Cases 0003 to 0016
This commit is contained in:
abraunegg 2026-03-13 16:25:25 +11:00
commit c30cd8145f
16 changed files with 895 additions and 0 deletions

View file

@ -11,6 +11,20 @@ from framework.result import TestResult
from framework.utils import ensure_directory, write_text_file
from testcases.tc0001_basic_resync import TestCase0001BasicResync
from testcases.tc0002_sync_list_validation import TestCase0002SyncListValidation
from testcases.tc0003_dry_run_validation import TestCase0003DryRunValidation
from testcases.tc0004_single_directory_sync import TestCase0004SingleDirectorySync
from testcases.tc0005_force_sync_override import TestCase0005ForceSyncOverride
from testcases.tc0006_download_only import TestCase0006DownloadOnly
from testcases.tc0007_download_only_cleanup_local_files import TestCase0007DownloadOnlyCleanupLocalFiles
from testcases.tc0008_upload_only import TestCase0008UploadOnly
from testcases.tc0009_upload_only_no_remote_delete import TestCase0009UploadOnlyNoRemoteDelete
from testcases.tc0010_upload_only_remove_source_files import TestCase0010UploadOnlyRemoveSourceFiles
from testcases.tc0011_skip_file_validation import TestCase0011SkipFileValidation
from testcases.tc0012_skip_dir_validation import TestCase0012SkipDirValidation
from testcases.tc0013_skip_dotfiles_validation import TestCase0013SkipDotfilesValidation
from testcases.tc0014_skip_size_validation import TestCase0014SkipSizeValidation
from testcases.tc0015_skip_symlinks_validation import TestCase0015SkipSymlinksValidation
from testcases.tc0016_check_nosync_validation import TestCase0016CheckNosyncValidation
def build_test_suite() -> list:
@ -22,6 +36,20 @@ def build_test_suite() -> list:
return [
TestCase0001BasicResync(),
TestCase0002SyncListValidation(),
TestCase0003DryRunValidation(),
TestCase0004SingleDirectorySync(),
TestCase0005ForceSyncOverride(),
TestCase0006DownloadOnly(),
TestCase0007DownloadOnlyCleanupLocalFiles(),
TestCase0008UploadOnly(),
TestCase0009UploadOnlyNoRemoteDelete(),
TestCase0010UploadOnlyRemoveSourceFiles(),
TestCase0011SkipFileValidation(),
TestCase0012SkipDirValidation(),
TestCase0013SkipDotfilesValidation(),
TestCase0014SkipSizeValidation(),
TestCase0015SkipSymlinksValidation(),
TestCase0016CheckNosyncValidation(),
]

View file

@ -0,0 +1,73 @@
from __future__ import annotations
from framework.result import TestResult
from testcases.wave1_common import Wave1TestCaseBase
class TestCase0003DryRunValidation(Wave1TestCaseBase):
case_id = "0003"
name = "dry-run validation"
description = "Validate that --dry-run performs no local or remote changes"
def run(self, context):
case_work_dir, case_log_dir, case_state_dir = self._initialise_case_dirs(context)
root_name = self._root_name(context)
artifacts = []
seed_root = case_work_dir / "seed-syncroot"
seed_root.mkdir(parents=True, exist_ok=True)
self._create_text_file(seed_root / root_name / "Remote" / "online.txt", "online baseline\n")
self._create_text_file(seed_root / root_name / "Remote" / "keep.txt", "keep baseline\n")
self._create_binary_file(seed_root / root_name / "Data" / "payload.bin", 64 * 1024)
seed_config_dir = self._new_config_dir(context, case_work_dir, "seed")
config_path, sync_list_path = self._write_config(seed_config_dir, sync_list_entries=[f"/{root_name}"])
artifacts.extend([str(config_path), str(sync_list_path)])
seed_result = self._run_onedrive(context, sync_root=seed_root, config_dir=seed_config_dir)
artifacts.extend(self._write_command_artifacts(result=seed_result, log_dir=case_log_dir, state_dir=case_state_dir, phase_name="seed"))
artifacts.extend(self._write_manifests(seed_root, case_state_dir, "seed_local"))
if seed_result.returncode != 0:
return TestResult.fail_result(self.case_id, self.name, f"Remote seed failed with status {seed_result.returncode}", artifacts, {"phase": "seed"})
dry_root = case_work_dir / "dryrun-syncroot"
dry_root.mkdir(parents=True, exist_ok=True)
self._create_text_file(dry_root / root_name / "LocalOnly" / "draft.txt", "local only\n")
self._create_text_file(dry_root / root_name / "Remote" / "keep.txt", "locally modified but should not upload\n")
pre_snapshot = self._snapshot_files(dry_root)
artifacts.append(self._write_json_artifact(case_state_dir / "pre_snapshot.json", pre_snapshot))
dry_config_dir = self._new_config_dir(context, case_work_dir, "dryrun")
config_path, sync_list_path = self._write_config(dry_config_dir, sync_list_entries=[f"/{root_name}"])
artifacts.extend([str(config_path), str(sync_list_path)])
dry_result = self._run_onedrive(context, sync_root=dry_root, config_dir=dry_config_dir, extra_args=["--dry-run"])
artifacts.extend(self._write_command_artifacts(result=dry_result, log_dir=case_log_dir, state_dir=case_state_dir, phase_name="dry_run"))
post_snapshot = self._snapshot_files(dry_root)
artifacts.append(self._write_json_artifact(case_state_dir / "post_snapshot.json", post_snapshot))
if dry_result.returncode != 0:
return TestResult.fail_result(self.case_id, self.name, f"Dry-run exited with status {dry_result.returncode}", artifacts, {"phase": "dry-run"})
if pre_snapshot != post_snapshot:
return TestResult.fail_result(self.case_id, self.name, "Local filesystem changed during --dry-run", artifacts, {"phase": "dry-run"})
verify_root, verify_result, verify_artifacts = self._download_remote_scope(context, case_work_dir, root_name, "remote")
artifacts.extend(verify_artifacts)
artifacts.extend(self._write_command_artifacts(result=verify_result, log_dir=case_log_dir, state_dir=case_state_dir, phase_name="verify_remote"))
artifacts.extend(self._write_manifests(verify_root, case_state_dir, "verify_remote"))
if verify_result.returncode != 0:
return TestResult.fail_result(self.case_id, self.name, f"Remote verification download failed with status {verify_result.returncode}", artifacts)
downloaded = set(self._snapshot_files(verify_root).keys())
expected_present = {
f"{root_name}/Remote",
f"{root_name}/Remote/online.txt",
f"{root_name}/Remote/keep.txt",
f"{root_name}/Data",
f"{root_name}/Data/payload.bin",
}
unexpected_absent = sorted(expected_present - downloaded)
if unexpected_absent:
return TestResult.fail_result(self.case_id, self.name, "Remote baseline changed after --dry-run", artifacts, {"missing": unexpected_absent})
if f"{root_name}/LocalOnly/draft.txt" in downloaded:
return TestResult.fail_result(self.case_id, self.name, "Local-only file was uploaded during --dry-run", artifacts)
return TestResult.pass_result(self.case_id, self.name, artifacts, {"root_name": root_name})

View file

@ -0,0 +1,52 @@
from __future__ import annotations
from framework.result import TestResult
from testcases.wave1_common import Wave1TestCaseBase
class TestCase0004SingleDirectorySync(Wave1TestCaseBase):
case_id = "0004"
name = "single-directory synchronisation"
description = "Validate that only the nominated subtree is synchronised"
def run(self, context):
case_work_dir, case_log_dir, case_state_dir = self._initialise_case_dirs(context)
root_name = self._root_name(context)
artifacts = []
sync_root = case_work_dir / "syncroot"
sync_root.mkdir(parents=True, exist_ok=True)
self._create_text_file(sync_root / root_name / "Scoped" / "include.txt", "scoped file\n")
self._create_text_file(sync_root / root_name / "Scoped" / "Nested" / "deep.txt", "nested scoped\n")
self._create_text_file(sync_root / root_name / "Unscoped" / "exclude.txt", "should stay local only\n")
config_dir = self._new_config_dir(context, case_work_dir, "main")
config_path, sync_list_path = self._write_config(config_dir, sync_list_entries=[f"/{root_name}"])
artifacts.extend([str(config_path), str(sync_list_path)])
result = self._run_onedrive(context, sync_root=sync_root, config_dir=config_dir, extra_args=["--single-directory", f"{root_name}/Scoped"])
artifacts.extend(self._write_command_artifacts(result=result, log_dir=case_log_dir, state_dir=case_state_dir, phase_name="single_directory"))
artifacts.extend(self._write_manifests(sync_root, case_state_dir, "local_after"))
if result.returncode != 0:
return TestResult.fail_result(self.case_id, self.name, f"--single-directory sync failed with status {result.returncode}", artifacts)
verify_root, verify_result, verify_artifacts = self._download_remote_scope(context, case_work_dir, root_name, "remote")
artifacts.extend(verify_artifacts)
artifacts.extend(self._write_command_artifacts(result=verify_result, log_dir=case_log_dir, state_dir=case_state_dir, phase_name="verify_remote"))
artifacts.extend(self._write_manifests(verify_root, case_state_dir, "remote_manifest"))
if verify_result.returncode != 0:
return TestResult.fail_result(self.case_id, self.name, f"Remote verification failed with status {verify_result.returncode}", artifacts)
snapshot = self._snapshot_files(verify_root)
required = {
f"{root_name}/Scoped",
f"{root_name}/Scoped/include.txt",
f"{root_name}/Scoped/Nested",
f"{root_name}/Scoped/Nested/deep.txt",
}
missing = sorted(required - set(snapshot.keys()))
if missing:
return TestResult.fail_result(self.case_id, self.name, "Scoped content was not uploaded as expected", artifacts, {"missing": missing})
if f"{root_name}/Unscoped/exclude.txt" in snapshot:
return TestResult.fail_result(self.case_id, self.name, "Unscoped content was unexpectedly synchronised", artifacts)
return TestResult.pass_result(self.case_id, self.name, artifacts, {"root_name": root_name})

View file

@ -0,0 +1,52 @@
from __future__ import annotations
from framework.result import TestResult
from testcases.wave1_common import Wave1TestCaseBase
class TestCase0005ForceSyncOverride(Wave1TestCaseBase):
case_id = "0005"
name = "force-sync override"
description = "Validate that --force-sync overrides skip_dir when using --single-directory"
def run(self, context):
case_work_dir, case_log_dir, case_state_dir = self._initialise_case_dirs(context)
root_name = self._root_name(context)
artifacts = []
seed_root = case_work_dir / "seed-syncroot"
seed_root.mkdir(parents=True, exist_ok=True)
self._create_text_file(seed_root / root_name / "Blocked" / "blocked.txt", "blocked remote file\n")
seed_conf = self._new_config_dir(context, case_work_dir, "seed")
config_path, sync_list_path = self._write_config(seed_conf, sync_list_entries=[f"/{root_name}"])
artifacts.extend([str(config_path), str(sync_list_path)])
seed_result = self._run_onedrive(context, sync_root=seed_root, config_dir=seed_conf)
artifacts.extend(self._write_command_artifacts(result=seed_result, log_dir=case_log_dir, state_dir=case_state_dir, phase_name="seed"))
if seed_result.returncode != 0:
return TestResult.fail_result(self.case_id, self.name, f"Remote seed failed with status {seed_result.returncode}", artifacts)
no_force_root = case_work_dir / "no-force-syncroot"
no_force_root.mkdir(parents=True, exist_ok=True)
no_force_conf = self._new_config_dir(context, case_work_dir, "no-force")
config_path, sync_list_path = self._write_config(no_force_conf, extra_lines=['skip_dir = "Blocked"'], sync_list_entries=[f"/{root_name}"])
artifacts.extend([str(config_path), str(sync_list_path)])
no_force_result = self._run_onedrive(context, sync_root=no_force_root, config_dir=no_force_conf, extra_args=["--download-only", "--single-directory", f"{root_name}/Blocked"])
artifacts.extend(self._write_command_artifacts(result=no_force_result, log_dir=case_log_dir, state_dir=case_state_dir, phase_name="no_force"))
if no_force_result.returncode != 0:
return TestResult.fail_result(self.case_id, self.name, f"Blocked single-directory sync without --force-sync failed with status {no_force_result.returncode}", artifacts)
if (no_force_root / root_name / "Blocked" / "blocked.txt").exists():
return TestResult.fail_result(self.case_id, self.name, "Blocked content was downloaded without --force-sync", artifacts)
force_root = case_work_dir / "force-syncroot"
force_root.mkdir(parents=True, exist_ok=True)
force_conf = self._new_config_dir(context, case_work_dir, "force")
config_path, sync_list_path = self._write_config(force_conf, extra_lines=['skip_dir = "Blocked"'], sync_list_entries=[f"/{root_name}"])
artifacts.extend([str(config_path), str(sync_list_path)])
force_result = self._run_onedrive(context, sync_root=force_root, config_dir=force_conf, extra_args=["--download-only", "--single-directory", f"{root_name}/Blocked", "--force-sync"])
artifacts.extend(self._write_command_artifacts(result=force_result, log_dir=case_log_dir, state_dir=case_state_dir, phase_name="force"))
artifacts.extend(self._write_manifests(force_root, case_state_dir, "force_manifest"))
if force_result.returncode != 0:
return TestResult.fail_result(self.case_id, self.name, f"Blocked single-directory sync with --force-sync failed with status {force_result.returncode}", artifacts)
if not (force_root / root_name / "Blocked" / "blocked.txt").exists():
return TestResult.fail_result(self.case_id, self.name, "Blocked content was not downloaded with --force-sync", artifacts)
return TestResult.pass_result(self.case_id, self.name, artifacts, {"root_name": root_name})

View file

@ -0,0 +1,51 @@
from __future__ import annotations
from framework.result import TestResult
from testcases.wave1_common import Wave1TestCaseBase
class TestCase0006DownloadOnly(Wave1TestCaseBase):
case_id = "0006"
name = "download-only behaviour"
description = "Validate that remote content downloads locally and local-only content is not uploaded"
def run(self, context):
case_work_dir, case_log_dir, case_state_dir = self._initialise_case_dirs(context)
root_name = self._root_name(context)
artifacts = []
seed_root = case_work_dir / "seed-syncroot"
seed_root.mkdir(parents=True, exist_ok=True)
self._create_text_file(seed_root / root_name / "Remote" / "download_me.txt", "remote file\n")
seed_conf = self._new_config_dir(context, case_work_dir, "seed")
config_path, sync_list_path = self._write_config(seed_conf, sync_list_entries=[f"/{root_name}"])
artifacts.extend([str(config_path), str(sync_list_path)])
seed_result = self._run_onedrive(context, sync_root=seed_root, config_dir=seed_conf)
artifacts.extend(self._write_command_artifacts(result=seed_result, log_dir=case_log_dir, state_dir=case_state_dir, phase_name="seed"))
if seed_result.returncode != 0:
return TestResult.fail_result(self.case_id, self.name, f"Remote seed failed with status {seed_result.returncode}", artifacts)
sync_root = case_work_dir / "download-syncroot"
sync_root.mkdir(parents=True, exist_ok=True)
self._create_text_file(sync_root / root_name / "LocalOnly" / "stay_local.txt", "must not upload\n")
conf_dir = self._new_config_dir(context, case_work_dir, "download")
config_path, sync_list_path = self._write_config(conf_dir, sync_list_entries=[f"/{root_name}"])
artifacts.extend([str(config_path), str(sync_list_path)])
result = self._run_onedrive(context, sync_root=sync_root, config_dir=conf_dir, extra_args=["--download-only"])
artifacts.extend(self._write_command_artifacts(result=result, log_dir=case_log_dir, state_dir=case_state_dir, phase_name="download_only"))
artifacts.extend(self._write_manifests(sync_root, case_state_dir, "local_after"))
if result.returncode != 0:
return TestResult.fail_result(self.case_id, self.name, f"--download-only failed with status {result.returncode}", artifacts)
if not (sync_root / root_name / "Remote" / "download_me.txt").exists():
return TestResult.fail_result(self.case_id, self.name, "Remote file was not downloaded locally", artifacts)
if not (sync_root / root_name / "LocalOnly" / "stay_local.txt").exists():
return TestResult.fail_result(self.case_id, self.name, "Local-only file should remain present locally", artifacts)
verify_root, verify_result, verify_artifacts = self._download_remote_scope(context, case_work_dir, root_name, "verify_remote")
artifacts.extend(verify_artifacts)
artifacts.extend(self._write_command_artifacts(result=verify_result, log_dir=case_log_dir, state_dir=case_state_dir, phase_name="verify_remote"))
if verify_result.returncode != 0:
return TestResult.fail_result(self.case_id, self.name, f"Remote verification failed with status {verify_result.returncode}", artifacts)
verify_snapshot = self._snapshot_files(verify_root)
if f"{root_name}/LocalOnly/stay_local.txt" in verify_snapshot:
return TestResult.fail_result(self.case_id, self.name, "Local-only file was uploaded during --download-only", artifacts)
return TestResult.pass_result(self.case_id, self.name, artifacts, {"root_name": root_name})

View file

@ -0,0 +1,43 @@
from __future__ import annotations
from framework.result import TestResult
from testcases.wave1_common import Wave1TestCaseBase
class TestCase0007DownloadOnlyCleanupLocalFiles(Wave1TestCaseBase):
case_id = "0007"
name = "download-only cleanup-local-files"
description = "Validate that stale local files are removed when cleanup_local_files is enabled"
def run(self, context):
case_work_dir, case_log_dir, case_state_dir = self._initialise_case_dirs(context)
root_name = self._root_name(context)
artifacts = []
seed_root = case_work_dir / "seed-syncroot"
seed_root.mkdir(parents=True, exist_ok=True)
self._create_text_file(seed_root / root_name / "Keep" / "keep.txt", "keep\n")
seed_conf = self._new_config_dir(context, case_work_dir, "seed")
config_path, sync_list_path = self._write_config(seed_conf, sync_list_entries=[f"/{root_name}"])
artifacts.extend([str(config_path), str(sync_list_path)])
seed_result = self._run_onedrive(context, sync_root=seed_root, config_dir=seed_conf)
artifacts.extend(self._write_command_artifacts(result=seed_result, log_dir=case_log_dir, state_dir=case_state_dir, phase_name="seed"))
if seed_result.returncode != 0:
return TestResult.fail_result(self.case_id, self.name, f"Remote seed failed with status {seed_result.returncode}", artifacts)
sync_root = case_work_dir / "cleanup-syncroot"
sync_root.mkdir(parents=True, exist_ok=True)
self._create_text_file(sync_root / root_name / "Keep" / "keep.txt", "local keep placeholder\n")
self._create_text_file(sync_root / root_name / "Obsolete" / "old.txt", "obsolete\n")
conf_dir = self._new_config_dir(context, case_work_dir, "cleanup")
config_path, sync_list_path = self._write_config(conf_dir, extra_lines=['cleanup_local_files = "true"'], sync_list_entries=[f"/{root_name}"])
artifacts.extend([str(config_path), str(sync_list_path)])
result = self._run_onedrive(context, sync_root=sync_root, config_dir=conf_dir, extra_args=["--download-only"])
artifacts.extend(self._write_command_artifacts(result=result, log_dir=case_log_dir, state_dir=case_state_dir, phase_name="cleanup_download_only"))
artifacts.extend(self._write_manifests(sync_root, case_state_dir, "local_after"))
if result.returncode != 0:
return TestResult.fail_result(self.case_id, self.name, f"Cleanup validation failed with status {result.returncode}", artifacts)
if not (sync_root / root_name / "Keep" / "keep.txt").exists():
return TestResult.fail_result(self.case_id, self.name, "Expected retained file is missing after cleanup", artifacts)
if (sync_root / root_name / "Obsolete" / "old.txt").exists():
return TestResult.fail_result(self.case_id, self.name, "Stale local file still exists after cleanup_local_files processing", artifacts)
return TestResult.pass_result(self.case_id, self.name, artifacts, {"root_name": root_name})

View file

@ -0,0 +1,38 @@
from __future__ import annotations
from framework.result import TestResult
from testcases.wave1_common import Wave1TestCaseBase
class TestCase0008UploadOnly(Wave1TestCaseBase):
case_id = "0008"
name = "upload-only behaviour"
description = "Validate that local content is uploaded when using --upload-only"
def run(self, context):
case_work_dir, case_log_dir, case_state_dir = self._initialise_case_dirs(context)
root_name = self._root_name(context)
artifacts = []
sync_root = case_work_dir / "upload-syncroot"
sync_root.mkdir(parents=True, exist_ok=True)
self._create_text_file(sync_root / root_name / "Upload" / "file.txt", "upload me\n")
self._create_binary_file(sync_root / root_name / "Upload" / "blob.bin", 70 * 1024)
conf_dir = self._new_config_dir(context, case_work_dir, "upload")
config_path, sync_list_path = self._write_config(conf_dir, sync_list_entries=[f"/{root_name}"])
artifacts.extend([str(config_path), str(sync_list_path)])
result = self._run_onedrive(context, sync_root=sync_root, config_dir=conf_dir, extra_args=["--upload-only"])
artifacts.extend(self._write_command_artifacts(result=result, log_dir=case_log_dir, state_dir=case_state_dir, phase_name="upload_only"))
if result.returncode != 0:
return TestResult.fail_result(self.case_id, self.name, f"--upload-only failed with status {result.returncode}", artifacts)
verify_root, verify_result, verify_artifacts = self._download_remote_scope(context, case_work_dir, root_name, "verify_remote")
artifacts.extend(verify_artifacts)
artifacts.extend(self._write_command_artifacts(result=verify_result, log_dir=case_log_dir, state_dir=case_state_dir, phase_name="verify_remote"))
artifacts.extend(self._write_manifests(verify_root, case_state_dir, "remote_manifest"))
if verify_result.returncode != 0:
return TestResult.fail_result(self.case_id, self.name, f"Remote verification failed with status {verify_result.returncode}", artifacts)
verify_snapshot = self._snapshot_files(verify_root)
expected = {f"{root_name}/Upload/file.txt", f"{root_name}/Upload/blob.bin"}
missing = sorted(expected - set(verify_snapshot.keys()))
if missing:
return TestResult.fail_result(self.case_id, self.name, "Uploaded files were not present remotely", artifacts, {"missing": missing})
return TestResult.pass_result(self.case_id, self.name, artifacts, {"root_name": root_name})

View file

@ -0,0 +1,48 @@
from __future__ import annotations
from framework.result import TestResult
from testcases.wave1_common import Wave1TestCaseBase
class TestCase0009UploadOnlyNoRemoteDelete(Wave1TestCaseBase):
case_id = "0009"
name = "upload-only no-remote-delete"
description = "Validate that remote data is retained when local content is absent and no_remote_delete is enabled"
def run(self, context):
case_work_dir, case_log_dir, case_state_dir = self._initialise_case_dirs(context)
root_name = self._root_name(context)
artifacts = []
seed_root = case_work_dir / "seed-syncroot"
seed_root.mkdir(parents=True, exist_ok=True)
self._create_text_file(seed_root / root_name / "RemoteKeep" / "preserve.txt", "preserve remotely\n")
seed_conf = self._new_config_dir(context, case_work_dir, "seed")
config_path, sync_list_path = self._write_config(seed_conf, sync_list_entries=[f"/{root_name}"])
artifacts.extend([str(config_path), str(sync_list_path)])
seed_result = self._run_onedrive(context, sync_root=seed_root, config_dir=seed_conf)
artifacts.extend(self._write_command_artifacts(result=seed_result, log_dir=case_log_dir, state_dir=case_state_dir, phase_name="seed"))
if seed_result.returncode != 0:
return TestResult.fail_result(self.case_id, self.name, f"Remote seed failed with status {seed_result.returncode}", artifacts)
sync_root = case_work_dir / "upload-syncroot"
sync_root.mkdir(parents=True, exist_ok=True)
self._create_text_file(sync_root / root_name / "LocalUpload" / "new.txt", "new upload\n")
conf_dir = self._new_config_dir(context, case_work_dir, "upload")
config_path, sync_list_path = self._write_config(conf_dir, extra_lines=['no_remote_delete = "true"'], sync_list_entries=[f"/{root_name}"])
artifacts.extend([str(config_path), str(sync_list_path)])
result = self._run_onedrive(context, sync_root=sync_root, config_dir=conf_dir, extra_args=["--upload-only"])
artifacts.extend(self._write_command_artifacts(result=result, log_dir=case_log_dir, state_dir=case_state_dir, phase_name="upload_only_no_remote_delete"))
if result.returncode != 0:
return TestResult.fail_result(self.case_id, self.name, f"--upload-only --no-remote-delete failed with status {result.returncode}", artifacts)
verify_root, verify_result, verify_artifacts = self._download_remote_scope(context, case_work_dir, root_name, "verify_remote")
artifacts.extend(verify_artifacts)
artifacts.extend(self._write_command_artifacts(result=verify_result, log_dir=case_log_dir, state_dir=case_state_dir, phase_name="verify_remote"))
artifacts.extend(self._write_manifests(verify_root, case_state_dir, "remote_manifest"))
if verify_result.returncode != 0:
return TestResult.fail_result(self.case_id, self.name, f"Remote verification failed with status {verify_result.returncode}", artifacts)
verify_snapshot = self._snapshot_files(verify_root)
expected = {f"{root_name}/RemoteKeep/preserve.txt", f"{root_name}/LocalUpload/new.txt"}
missing = sorted(expected - set(verify_snapshot.keys()))
if missing:
return TestResult.fail_result(self.case_id, self.name, "Remote content was deleted or not uploaded as expected", artifacts, {"missing": missing})
return TestResult.pass_result(self.case_id, self.name, artifacts, {"root_name": root_name})

View file

@ -0,0 +1,37 @@
from __future__ import annotations
from framework.result import TestResult
from testcases.wave1_common import Wave1TestCaseBase
class TestCase0010UploadOnlyRemoveSourceFiles(Wave1TestCaseBase):
case_id = "0010"
name = "upload-only remove-source-files"
description = "Validate that local files are removed after successful upload when remove_source_files is enabled"
def run(self, context):
case_work_dir, case_log_dir, case_state_dir = self._initialise_case_dirs(context)
root_name = self._root_name(context)
artifacts = []
sync_root = case_work_dir / "upload-syncroot"
sync_root.mkdir(parents=True, exist_ok=True)
source_file = sync_root / root_name / "Source" / "upload_and_remove.txt"
self._create_text_file(source_file, "remove after upload\n")
conf_dir = self._new_config_dir(context, case_work_dir, "upload")
config_path, sync_list_path = self._write_config(conf_dir, extra_lines=['remove_source_files = "true"'], sync_list_entries=[f"/{root_name}"])
artifacts.extend([str(config_path), str(sync_list_path)])
result = self._run_onedrive(context, sync_root=sync_root, config_dir=conf_dir, extra_args=["--upload-only"])
artifacts.extend(self._write_command_artifacts(result=result, log_dir=case_log_dir, state_dir=case_state_dir, phase_name="upload_only_remove_source"))
artifacts.extend(self._write_manifests(sync_root, case_state_dir, "local_after"))
if result.returncode != 0:
return TestResult.fail_result(self.case_id, self.name, f"--upload-only with remove_source_files failed with status {result.returncode}", artifacts)
if source_file.exists():
return TestResult.fail_result(self.case_id, self.name, "Source file still exists locally after upload", artifacts)
verify_root, verify_result, verify_artifacts = self._download_remote_scope(context, case_work_dir, root_name, "verify_remote")
artifacts.extend(verify_artifacts)
artifacts.extend(self._write_command_artifacts(result=verify_result, log_dir=case_log_dir, state_dir=case_state_dir, phase_name="verify_remote"))
if verify_result.returncode != 0:
return TestResult.fail_result(self.case_id, self.name, f"Remote verification failed with status {verify_result.returncode}", artifacts)
if not (verify_root / root_name / "Source" / "upload_and_remove.txt").exists():
return TestResult.fail_result(self.case_id, self.name, "Uploaded file was not present remotely after local removal", artifacts)
return TestResult.pass_result(self.case_id, self.name, artifacts, {"root_name": root_name})

View file

@ -0,0 +1,43 @@
from __future__ import annotations
from framework.result import TestResult
from testcases.wave1_common import Wave1TestCaseBase
class TestCase0011SkipFileValidation(Wave1TestCaseBase):
case_id = "0011"
name = "skip_file validation"
description = "Validate that skip_file patterns exclude matching files from synchronisation"
def run(self, context):
case_work_dir, case_log_dir, case_state_dir = self._initialise_case_dirs(context)
root_name = self._root_name(context)
artifacts = []
sync_root = case_work_dir / "syncroot"
sync_root.mkdir(parents=True, exist_ok=True)
self._create_text_file(sync_root / root_name / "keep.txt", "keep me\n")
self._create_text_file(sync_root / root_name / "ignore.tmp", "temp\n")
self._create_text_file(sync_root / root_name / "editor.swp", "swap\n")
self._create_text_file(sync_root / root_name / "Nested" / "keep.md", "nested keep\n")
conf_dir = self._new_config_dir(context, case_work_dir, "main")
config_path, sync_list_path = self._write_config(conf_dir, extra_lines=['skip_file = "*.tmp|*.swp"'], sync_list_entries=[f"/{root_name}"])
artifacts.extend([str(config_path), str(sync_list_path)])
result = self._run_onedrive(context, sync_root=sync_root, config_dir=conf_dir)
artifacts.extend(self._write_command_artifacts(result=result, log_dir=case_log_dir, state_dir=case_state_dir, phase_name="skip_file"))
if result.returncode != 0:
return TestResult.fail_result(self.case_id, self.name, f"skip_file validation failed with status {result.returncode}", artifacts)
verify_root, verify_result, verify_artifacts = self._download_remote_scope(context, case_work_dir, root_name, "verify_remote")
artifacts.extend(verify_artifacts)
artifacts.extend(self._write_command_artifacts(result=verify_result, log_dir=case_log_dir, state_dir=case_state_dir, phase_name="verify_remote"))
artifacts.extend(self._write_manifests(verify_root, case_state_dir, "remote_manifest"))
if verify_result.returncode != 0:
return TestResult.fail_result(self.case_id, self.name, f"Remote verification failed with status {verify_result.returncode}", artifacts)
snapshot = self._snapshot_files(verify_root)
expected = {f"{root_name}/keep.txt", f"{root_name}/Nested/keep.md"}
missing = sorted(expected - set(snapshot.keys()))
if missing:
return TestResult.fail_result(self.case_id, self.name, "Expected non-skipped files are missing remotely", artifacts, {"missing": missing})
present = sorted(path for path in [f"{root_name}/ignore.tmp", f"{root_name}/editor.swp"] if path in snapshot)
if present:
return TestResult.fail_result(self.case_id, self.name, "skip_file patterns did not exclude all matching files", artifacts, {"present": present})
return TestResult.pass_result(self.case_id, self.name, artifacts, {"root_name": root_name})

View file

@ -0,0 +1,70 @@
from __future__ import annotations
from framework.result import TestResult
from testcases.wave1_common import Wave1TestCaseBase
class TestCase0012SkipDirValidation(Wave1TestCaseBase):
case_id = "0012"
name = "skip_dir validation"
description = "Validate loose and strict skip_dir matching behaviour"
def run(self, context):
case_work_dir, case_log_dir, case_state_dir = self._initialise_case_dirs(context)
root_name = self._root_name(context)
artifacts = []
failures = []
loose_root = case_work_dir / "loose-syncroot"
loose_root.mkdir(parents=True, exist_ok=True)
self._create_text_file(loose_root / root_name / "project" / "build" / "out.bin", "skip me\n")
self._create_text_file(loose_root / root_name / "build" / "root.bin", "skip me too\n")
self._create_text_file(loose_root / root_name / "project" / "src" / "app.txt", "keep me\n")
loose_conf = self._new_config_dir(context, case_work_dir, "loose")
config_path, sync_list_path = self._write_config(loose_conf, extra_lines=['skip_dir = "build"', 'skip_dir_strict_match = "false"'], sync_list_entries=[f"/{root_name}"])
artifacts.extend([str(config_path), str(sync_list_path)])
loose_result = self._run_onedrive(context, sync_root=loose_root, config_dir=loose_conf)
artifacts.extend(self._write_command_artifacts(result=loose_result, log_dir=case_log_dir, state_dir=case_state_dir, phase_name="loose_match"))
if loose_result.returncode != 0:
return TestResult.fail_result(self.case_id, self.name, f"Loose skip_dir scenario failed with status {loose_result.returncode}", artifacts)
verify_root, verify_result, verify_artifacts = self._download_remote_scope(context, case_work_dir, root_name, "loose_remote")
artifacts.extend(verify_artifacts)
artifacts.extend(self._write_command_artifacts(result=verify_result, log_dir=case_log_dir, state_dir=case_state_dir, phase_name="loose_verify"))
if verify_result.returncode != 0:
return TestResult.fail_result(self.case_id, self.name, f"Loose skip_dir verification failed with status {verify_result.returncode}", artifacts)
loose_snapshot = self._snapshot_files(verify_root)
if f"{root_name}/project/src/app.txt" not in loose_snapshot:
failures.append("Loose matching did not retain non-build content")
for forbidden in [f"{root_name}/project/build/out.bin", f"{root_name}/build/root.bin"]:
if forbidden in loose_snapshot:
failures.append(f"Loose matching did not exclude {forbidden}")
strict_scope = f"{root_name}_STRICT"
strict_root = case_work_dir / "strict-syncroot"
strict_root.mkdir(parents=True, exist_ok=True)
self._create_text_file(strict_root / strict_scope / "project" / "build" / "skip.bin", "skip strict\n")
self._create_text_file(strict_root / strict_scope / "other" / "build" / "keep.bin", "keep strict\n")
self._create_text_file(strict_root / strict_scope / "other" / "src" / "keep.txt", "keep strict txt\n")
strict_conf = self._new_config_dir(context, case_work_dir, "strict")
config_path, sync_list_path = self._write_config(strict_conf, extra_lines=[f'skip_dir = "{strict_scope}/project/build"', 'skip_dir_strict_match = "true"'], sync_list_entries=[f"/{strict_scope}"])
artifacts.extend([str(config_path), str(sync_list_path)])
strict_result = self._run_onedrive(context, sync_root=strict_root, config_dir=strict_conf)
artifacts.extend(self._write_command_artifacts(result=strict_result, log_dir=case_log_dir, state_dir=case_state_dir, phase_name="strict_match"))
if strict_result.returncode != 0:
return TestResult.fail_result(self.case_id, self.name, f"Strict skip_dir scenario failed with status {strict_result.returncode}", artifacts)
strict_verify_root, strict_verify_result, strict_verify_artifacts = self._download_remote_scope(context, case_work_dir, strict_scope, "strict_remote")
artifacts.extend(strict_verify_artifacts)
artifacts.extend(self._write_command_artifacts(result=strict_verify_result, log_dir=case_log_dir, state_dir=case_state_dir, phase_name="strict_verify"))
if strict_verify_result.returncode != 0:
return TestResult.fail_result(self.case_id, self.name, f"Strict skip_dir verification failed with status {strict_verify_result.returncode}", artifacts)
strict_snapshot = self._snapshot_files(strict_verify_root)
if f"{strict_scope}/project/build/skip.bin" in strict_snapshot:
failures.append("Strict matching did not exclude the targeted full path")
for required in [f"{strict_scope}/other/build/keep.bin", f"{strict_scope}/other/src/keep.txt"]:
if required not in strict_snapshot:
failures.append(f"Strict matching excluded unexpected content: {required}")
artifacts.extend(self._write_manifests(verify_root, case_state_dir, "loose_manifest"))
artifacts.extend(self._write_manifests(strict_verify_root, case_state_dir, "strict_manifest"))
if failures:
return TestResult.fail_result(self.case_id, self.name, "; ".join(failures), artifacts, {"failure_count": len(failures)})
return TestResult.pass_result(self.case_id, self.name, artifacts, {"root_name": root_name, "strict_scope": strict_scope})

View file

@ -0,0 +1,41 @@
from __future__ import annotations
from framework.result import TestResult
from testcases.wave1_common import Wave1TestCaseBase
class TestCase0013SkipDotfilesValidation(Wave1TestCaseBase):
case_id = "0013"
name = "skip_dotfiles validation"
description = "Validate that dotfiles and dot-directories are excluded when skip_dotfiles is enabled"
def run(self, context):
case_work_dir, case_log_dir, case_state_dir = self._initialise_case_dirs(context)
root_name = self._root_name(context)
artifacts = []
sync_root = case_work_dir / "syncroot"
sync_root.mkdir(parents=True, exist_ok=True)
self._create_text_file(sync_root / root_name / ".hidden.txt", "hidden\n")
self._create_text_file(sync_root / root_name / ".dotdir" / "inside.txt", "inside dotdir\n")
self._create_text_file(sync_root / root_name / "visible.txt", "visible\n")
self._create_text_file(sync_root / root_name / "normal" / "keep.md", "normal keep\n")
conf_dir = self._new_config_dir(context, case_work_dir, "main")
config_path, sync_list_path = self._write_config(conf_dir, extra_lines=['skip_dotfiles = "true"'], sync_list_entries=[f"/{root_name}"])
artifacts.extend([str(config_path), str(sync_list_path)])
result = self._run_onedrive(context, sync_root=sync_root, config_dir=conf_dir)
artifacts.extend(self._write_command_artifacts(result=result, log_dir=case_log_dir, state_dir=case_state_dir, phase_name="skip_dotfiles"))
if result.returncode != 0:
return TestResult.fail_result(self.case_id, self.name, f"skip_dotfiles validation failed with status {result.returncode}", artifacts)
verify_root, verify_result, verify_artifacts = self._download_remote_scope(context, case_work_dir, root_name, "verify_remote")
artifacts.extend(verify_artifacts)
artifacts.extend(self._write_command_artifacts(result=verify_result, log_dir=case_log_dir, state_dir=case_state_dir, phase_name="verify_remote"))
if verify_result.returncode != 0:
return TestResult.fail_result(self.case_id, self.name, f"Remote verification failed with status {verify_result.returncode}", artifacts)
snapshot = self._snapshot_files(verify_root)
for required in [f"{root_name}/visible.txt", f"{root_name}/normal/keep.md"]:
if required not in snapshot:
return TestResult.fail_result(self.case_id, self.name, f"Expected visible content missing remotely: {required}", artifacts)
for forbidden in [f"{root_name}/.hidden.txt", f"{root_name}/.dotdir/inside.txt"]:
if forbidden in snapshot:
return TestResult.fail_result(self.case_id, self.name, f"Dotfile content was unexpectedly synchronised: {forbidden}", artifacts)
return TestResult.pass_result(self.case_id, self.name, artifacts, {"root_name": root_name})

View file

@ -0,0 +1,37 @@
from __future__ import annotations
from framework.result import TestResult
from testcases.wave1_common import Wave1TestCaseBase
class TestCase0014SkipSizeValidation(Wave1TestCaseBase):
case_id = "0014"
name = "skip_size validation"
description = "Validate that files above the configured size threshold are excluded from synchronisation"
def run(self, context):
case_work_dir, case_log_dir, case_state_dir = self._initialise_case_dirs(context)
root_name = self._root_name(context)
artifacts = []
sync_root = case_work_dir / "syncroot"
sync_root.mkdir(parents=True, exist_ok=True)
self._create_binary_file(sync_root / root_name / "small.bin", 128 * 1024)
self._create_binary_file(sync_root / root_name / "large.bin", 2 * 1024 * 1024)
conf_dir = self._new_config_dir(context, case_work_dir, "main")
config_path, sync_list_path = self._write_config(conf_dir, extra_lines=['skip_size = "1"'], sync_list_entries=[f"/{root_name}"])
artifacts.extend([str(config_path), str(sync_list_path)])
result = self._run_onedrive(context, sync_root=sync_root, config_dir=conf_dir)
artifacts.extend(self._write_command_artifacts(result=result, log_dir=case_log_dir, state_dir=case_state_dir, phase_name="skip_size"))
if result.returncode != 0:
return TestResult.fail_result(self.case_id, self.name, f"skip_size validation failed with status {result.returncode}", artifacts)
verify_root, verify_result, verify_artifacts = self._download_remote_scope(context, case_work_dir, root_name, "verify_remote")
artifacts.extend(verify_artifacts)
artifacts.extend(self._write_command_artifacts(result=verify_result, log_dir=case_log_dir, state_dir=case_state_dir, phase_name="verify_remote"))
if verify_result.returncode != 0:
return TestResult.fail_result(self.case_id, self.name, f"Remote verification failed with status {verify_result.returncode}", artifacts)
snapshot = self._snapshot_files(verify_root)
if f"{root_name}/small.bin" not in snapshot:
return TestResult.fail_result(self.case_id, self.name, "Small file is missing remotely", artifacts)
if f"{root_name}/large.bin" in snapshot:
return TestResult.fail_result(self.case_id, self.name, "Large file exceeded skip_size threshold but was synchronised", artifacts)
return TestResult.pass_result(self.case_id, self.name, artifacts, {"root_name": root_name})

View file

@ -0,0 +1,43 @@
from __future__ import annotations
import os
from framework.result import TestResult
from testcases.wave1_common import Wave1TestCaseBase
class TestCase0015SkipSymlinksValidation(Wave1TestCaseBase):
case_id = "0015"
name = "skip_symlinks validation"
description = "Validate that symbolic links are excluded when skip_symlinks is enabled"
def run(self, context):
case_work_dir, case_log_dir, case_state_dir = self._initialise_case_dirs(context)
root_name = self._root_name(context)
artifacts = []
sync_root = case_work_dir / "syncroot"
sync_root.mkdir(parents=True, exist_ok=True)
target_file = sync_root / root_name / "real.txt"
self._create_text_file(target_file, "real content\n")
symlink_path = sync_root / root_name / "real-link.txt"
symlink_path.parent.mkdir(parents=True, exist_ok=True)
os.symlink("real.txt", symlink_path)
conf_dir = self._new_config_dir(context, case_work_dir, "main")
config_path, sync_list_path = self._write_config(conf_dir, extra_lines=['skip_symlinks = "true"'], sync_list_entries=[f"/{root_name}"])
artifacts.extend([str(config_path), str(sync_list_path)])
artifacts.append(self._write_json_artifact(case_state_dir / "local_snapshot_pre.json", self._snapshot_files(sync_root)))
result = self._run_onedrive(context, sync_root=sync_root, config_dir=conf_dir)
artifacts.extend(self._write_command_artifacts(result=result, log_dir=case_log_dir, state_dir=case_state_dir, phase_name="skip_symlinks"))
if result.returncode != 0:
return TestResult.fail_result(self.case_id, self.name, f"skip_symlinks validation failed with status {result.returncode}", artifacts)
verify_root, verify_result, verify_artifacts = self._download_remote_scope(context, case_work_dir, root_name, "verify_remote")
artifacts.extend(verify_artifacts)
artifacts.extend(self._write_command_artifacts(result=verify_result, log_dir=case_log_dir, state_dir=case_state_dir, phase_name="verify_remote"))
if verify_result.returncode != 0:
return TestResult.fail_result(self.case_id, self.name, f"Remote verification failed with status {verify_result.returncode}", artifacts)
snapshot = self._snapshot_files(verify_root)
if f"{root_name}/real.txt" not in snapshot:
return TestResult.fail_result(self.case_id, self.name, "Real file is missing remotely", artifacts)
if f"{root_name}/real-link.txt" in snapshot:
return TestResult.fail_result(self.case_id, self.name, "Symbolic link was unexpectedly synchronised", artifacts)
return TestResult.pass_result(self.case_id, self.name, artifacts, {"root_name": root_name})

View file

@ -0,0 +1,39 @@
from __future__ import annotations
from framework.result import TestResult
from testcases.wave1_common import Wave1TestCaseBase
class TestCase0016CheckNosyncValidation(Wave1TestCaseBase):
case_id = "0016"
name = "check_nosync validation"
description = "Validate that local directories containing .nosync are excluded when check_nosync is enabled"
def run(self, context):
case_work_dir, case_log_dir, case_state_dir = self._initialise_case_dirs(context)
root_name = self._root_name(context)
artifacts = []
sync_root = case_work_dir / "syncroot"
sync_root.mkdir(parents=True, exist_ok=True)
self._create_text_file(sync_root / root_name / "Blocked" / ".nosync", "marker\n")
self._create_text_file(sync_root / root_name / "Blocked" / "blocked.txt", "blocked\n")
self._create_text_file(sync_root / root_name / "Allowed" / "allowed.txt", "allowed\n")
conf_dir = self._new_config_dir(context, case_work_dir, "main")
config_path, sync_list_path = self._write_config(conf_dir, extra_lines=['check_nosync = "true"'], sync_list_entries=[f"/{root_name}"])
artifacts.extend([str(config_path), str(sync_list_path)])
result = self._run_onedrive(context, sync_root=sync_root, config_dir=conf_dir)
artifacts.extend(self._write_command_artifacts(result=result, log_dir=case_log_dir, state_dir=case_state_dir, phase_name="check_nosync"))
if result.returncode != 0:
return TestResult.fail_result(self.case_id, self.name, f"check_nosync validation failed with status {result.returncode}", artifacts)
verify_root, verify_result, verify_artifacts = self._download_remote_scope(context, case_work_dir, root_name, "verify_remote")
artifacts.extend(verify_artifacts)
artifacts.extend(self._write_command_artifacts(result=verify_result, log_dir=case_log_dir, state_dir=case_state_dir, phase_name="verify_remote"))
if verify_result.returncode != 0:
return TestResult.fail_result(self.case_id, self.name, f"Remote verification failed with status {verify_result.returncode}", artifacts)
snapshot = self._snapshot_files(verify_root)
if f"{root_name}/Allowed/allowed.txt" not in snapshot:
return TestResult.fail_result(self.case_id, self.name, "Allowed content is missing remotely", artifacts)
for forbidden in [f"{root_name}/Blocked/blocked.txt", f"{root_name}/Blocked/.nosync"]:
if forbidden in snapshot:
return TestResult.fail_result(self.case_id, self.name, f".nosync-protected content was unexpectedly synchronised: {forbidden}", artifacts)
return TestResult.pass_result(self.case_id, self.name, artifacts, {"root_name": root_name})

View file

@ -0,0 +1,200 @@
from __future__ import annotations
import hashlib
import json
import os
import re
from pathlib import Path
from typing import Iterable
from framework.base import E2ETestCase
from framework.context import E2EContext
from framework.manifest import build_manifest, write_manifest
from framework.utils import (
command_to_string,
reset_directory,
run_command,
write_text_file,
)
CONFIG_FILE_NAME = "config"
SYNC_LIST_FILE_NAME = "sync_list"
class Wave1TestCaseBase(E2ETestCase):
"""
Shared helper base for Wave 1 E2E test cases.
"""
def _safe_run_id(self, context: E2EContext) -> str:
value = re.sub(r"[^A-Za-z0-9]+", "_", context.run_id).strip("_").lower()
return value or "run"
def _root_name(self, context: E2EContext) -> str:
return f"ZZ_E2E_TC{self.case_id}_{self._safe_run_id(context)}"
def _initialise_case_dirs(self, context: E2EContext) -> tuple[Path, Path, Path]:
case_work_dir = context.work_root / f"tc{self.case_id}"
case_log_dir = context.logs_dir / f"tc{self.case_id}"
case_state_dir = context.state_dir / f"tc{self.case_id}"
reset_directory(case_work_dir)
reset_directory(case_log_dir)
reset_directory(case_state_dir)
return case_work_dir, case_log_dir, case_state_dir
def _new_config_dir(self, context: E2EContext, case_work_dir: Path, name: str) -> Path:
config_dir = case_work_dir / f"conf-{name}"
reset_directory(config_dir)
context.bootstrap_config_dir(config_dir)
return config_dir
def _write_config(
self,
config_dir: Path,
*,
extra_lines: Iterable[str] | None = None,
sync_list_entries: Iterable[str] | None = None,
) -> tuple[Path, Path | None]:
config_path = config_dir / CONFIG_FILE_NAME
sync_list_path: Path | None = None
lines = [
f"# tc{self.case_id} generated config",
'bypass_data_preservation = "true"',
'monitor_interval = "5"',
]
if extra_lines:
lines.extend(list(extra_lines))
write_text_file(config_path, "\n".join(lines) + "\n")
if sync_list_entries is not None:
sync_list_path = config_dir / SYNC_LIST_FILE_NAME
write_text_file(sync_list_path, "\n".join(sync_list_entries) + "\n")
return config_path, sync_list_path
def _run_onedrive(
self,
context: E2EContext,
*,
sync_root: Path,
config_dir: Path,
extra_args: list[str] | None = None,
use_resync: bool = True,
use_resync_auth: bool = True,
):
command = [context.onedrive_bin, "--sync", "--verbose"]
if use_resync:
command.append("--resync")
if use_resync_auth:
command.append("--resync-auth")
command.extend(["--syncdir", str(sync_root), "--confdir", str(config_dir)])
if extra_args:
command.extend(extra_args)
context.log(f"Executing Test Case {self.case_id}: {command_to_string(command)}")
return run_command(command, cwd=context.repo_root)
def _write_command_artifacts(
self,
*,
result,
log_dir: Path,
state_dir: Path,
phase_name: str,
extra_metadata: dict[str, str | int | bool] | None = None,
) -> list[str]:
stdout_file = log_dir / f"{phase_name}_stdout.log"
stderr_file = log_dir / f"{phase_name}_stderr.log"
metadata_file = state_dir / f"{phase_name}_metadata.txt"
write_text_file(stdout_file, result.stdout)
write_text_file(stderr_file, result.stderr)
metadata = {
"phase": phase_name,
"command": command_to_string(result.command),
"returncode": result.returncode,
}
if extra_metadata:
metadata.update(extra_metadata)
lines = [f"{key}={value}" for key, value in metadata.items()]
write_text_file(metadata_file, "\n".join(lines) + "\n")
return [str(stdout_file), str(stderr_file), str(metadata_file)]
def _write_manifests(self, root: Path, state_dir: Path, prefix: str) -> list[str]:
manifest_file = state_dir / f"{prefix}_manifest.txt"
write_manifest(manifest_file, build_manifest(root))
return [str(manifest_file)]
def _write_json_artifact(self, path: Path, payload: object) -> str:
write_text_file(path, json.dumps(payload, indent=2, sort_keys=True) + "\n")
return str(path)
def _create_text_file(self, path: Path, content: str) -> None:
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text(content, encoding="utf-8")
def _create_binary_file(self, path: Path, size_bytes: int) -> None:
path.parent.mkdir(parents=True, exist_ok=True)
chunk = os.urandom(min(size_bytes, 1024 * 1024))
with path.open("wb") as fp:
remaining = size_bytes
while remaining > 0:
to_write = chunk[: min(len(chunk), remaining)]
fp.write(to_write)
remaining -= len(to_write)
def _snapshot_files(self, root: Path) -> dict[str, str]:
result: dict[str, str] = {}
if not root.exists():
return result
for path in sorted(root.rglob("*")):
rel = path.relative_to(root).as_posix()
if path.is_symlink():
result[rel] = f"symlink->{os.readlink(path)}"
continue
if path.is_dir():
result[rel] = "dir"
continue
hasher = hashlib.sha256()
with path.open("rb") as fp:
while True:
chunk = fp.read(8192)
if not chunk:
break
hasher.update(chunk)
result[rel] = hasher.hexdigest()
return result
def _download_remote_scope(
self,
context: E2EContext,
case_work_dir: Path,
scope_root: str,
name: str,
*,
extra_config_lines: Iterable[str] | None = None,
extra_args: list[str] | None = None,
) -> tuple[Path, object, list[str]]:
verify_root = case_work_dir / f"verify-{name}"
reset_directory(verify_root)
config_dir = self._new_config_dir(context, case_work_dir, f"verify-{name}")
config_path, sync_list_path = self._write_config(
config_dir,
extra_lines=extra_config_lines,
sync_list_entries=[f"/{scope_root}"],
)
result = self._run_onedrive(
context,
sync_root=verify_root,
config_dir=config_dir,
extra_args=["--download-only"] + (extra_args or []),
)
artifacts = [str(config_path)]
if sync_list_path:
artifacts.append(str(sync_list_path))
return verify_root, result, artifacts