This commit is contained in:
abraunegg 2026-03-14 06:31:40 +00:00 committed by GitHub
commit dd5b7b63a2
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
37 changed files with 4519 additions and 1 deletions

View file

@ -444,6 +444,7 @@ rsnapshot
rsv
rtud
rul
ruleset
runstatedir
runsvdir
Ruppe

156
.github/workflows/e2e-personal.yaml vendored Normal file
View file

@ -0,0 +1,156 @@
name: E2E Personal Account Testing
on:
push:
branches-ignore:
- master
- main
permissions:
contents: read
pull-requests: write
issues: write
jobs:
e2e_personal:
runs-on: ubuntu-latest
container: fedora:latest
environment: onedrive-e2e
steps:
- uses: actions/checkout@v4
- name: Install Dependencies
run: |
dnf -y update
dnf -y group install development-tools
dnf -y install python3 ldc libcurl-devel sqlite-devel dbus-devel jq
- name: Build + local install prefix
run: |
./configure --prefix="$PWD/.ci/prefix"
make -j"$(nproc)"
make install
"$PWD/.ci/prefix/bin/onedrive" --version
- name: Prepare isolated HOME
run: |
set -euo pipefail
export HOME="$RUNNER_TEMP/home-personal"
echo "HOME=$HOME" >> "$GITHUB_ENV"
echo "XDG_CONFIG_HOME=$HOME/.config" >> "$GITHUB_ENV"
echo "XDG_CACHE_HOME=$HOME/.cache" >> "$GITHUB_ENV"
mkdir -p "$HOME"
- name: Inject refresh token into onedrive config
env:
REFRESH_TOKEN_PERSONAL: ${{ secrets.REFRESH_TOKEN_PERSONAL }}
run: |
set -euo pipefail
mkdir -p "$XDG_CONFIG_HOME/onedrive"
umask 077
printf "%s" "$REFRESH_TOKEN_PERSONAL" > "$XDG_CONFIG_HOME/onedrive/refresh_token"
chmod 600 "$XDG_CONFIG_HOME/onedrive/refresh_token"
- name: Run E2E harness
env:
ONEDRIVE_BIN: ${{ github.workspace }}/.ci/prefix/bin/onedrive
E2E_TARGET: personal
RUN_ID: ${{ github.run_id }}
run: |
python3 ci/e2e/run.py
- name: Upload E2E artefacts
if: always()
uses: actions/upload-artifact@v4
with:
name: e2e-personal
path: ci/e2e/out/**
pr_comment:
name: Post PR summary comment
needs: [ e2e_personal ]
runs-on: ubuntu-latest
if: always()
steps:
- uses: actions/checkout@v4
- name: Download artefact
uses: actions/download-artifact@v4
with:
name: e2e-personal
path: artifacts/e2e-personal
- name: Build markdown summary
id: summary
run: |
set -euo pipefail
f="$(find artifacts/e2e-personal -name results.json -type f | head -n 1 || true)"
if [ -z "$f" ] || [ ! -f "$f" ]; then
echo "md=⚠️ E2E ran but results.json was not found." >> "$GITHUB_OUTPUT"
exit 0
fi
target=$(jq -r '.target // "personal"' "$f")
total=$(jq -r '.cases | length' "$f")
passed=$(jq -r '[.cases[] | select(.status=="pass")] | length' "$f")
failed=$(jq -r '[.cases[] | select(.status=="fail")] | length' "$f")
failures=$(jq -r '.cases[]
| select(.status=="fail")
| "- Test Case \(.id // "????"): \(.name) — \(.reason // "no reason provided")"' "$f" || true)
md="## ${target^} Account Testing\n"
md+="**${total}** Test Cases Run \n"
md+="**${passed}** Test Cases Passed \n"
md+="**${failed}** Test Cases Failed \n\n"
if [ "$failed" -gt 0 ] && [ -n "$failures" ]; then
md+="### ${target^} Account Test Failures\n"
md+="$failures\n"
fi
echo "md<<EOF" >> "$GITHUB_OUTPUT"
echo -e "$md" >> "$GITHUB_OUTPUT"
echo "EOF" >> "$GITHUB_OUTPUT"
- name: Find PR associated with this commit
id: pr
uses: actions/github-script@v7
with:
script: |
const { owner, repo } = context.repo;
const sha = context.sha;
const prs = await github.rest.repos.listPullRequestsAssociatedWithCommit({
owner, repo, commit_sha: sha
});
if (!prs.data.length) {
core.setOutput("found", "false");
return;
}
core.setOutput("found", "true");
core.setOutput("number", String(prs.data[0].number));
- name: Post PR comment
if: steps.pr.outputs.found == 'true'
uses: actions/github-script@v7
env:
COMMENT_MD: ${{ steps.summary.outputs.md }}
with:
script: |
const { owner, repo } = context.repo;
const issue_number = Number("${{ steps.pr.outputs.number }}");
const md = process.env.COMMENT_MD || "⚠️ No summary text produced.";
await github.rest.issues.createComment({
owner,
repo,
issue_number,
body: md
});

View file

@ -58,7 +58,7 @@ endif
system_unit_files = contrib/systemd/onedrive@.service
user_unit_files = contrib/systemd/onedrive.service
DOCFILES = readme.md config LICENSE changelog.md docs/advanced-usage.md docs/application-config-options.md docs/application-security.md docs/business-shared-items.md docs/client-architecture.md docs/contributing.md docs/docker.md docs/install.md docs/national-cloud-deployments.md docs/podman.md docs/privacy-policy.md docs/sharepoint-libraries.md docs/terms-of-service.md docs/ubuntu-package-install.md docs/usage.md docs/known-issues.md docs/webhooks.md
DOCFILES = readme.md config LICENSE changelog.md docs/advanced-usage.md docs/application-config-options.md docs/application-security.md docs/business-shared-items.md docs/client-architecture.md docs/contributing.md docs/docker.md docs/install.md docs/national-cloud-deployments.md docs/podman.md docs/privacy-policy.md docs/sharepoint-libraries.md docs/terms-of-service.md docs/ubuntu-package-install.md docs/usage.md docs/known-issues.md docs/webhooks.md docs/end_to_end_testing.md
ifneq ("$(wildcard /etc/redhat-release)","")
RHEL = $(shell cat /etc/redhat-release | grep -E "(Red Hat Enterprise Linux|CentOS|AlmaLinux)" | wc -l)

View file

@ -0,0 +1,3 @@
"""
E2E framework package for GitHub Actions based validation.
"""

23
ci/e2e/framework/base.py Normal file
View file

@ -0,0 +1,23 @@
from __future__ import annotations
from abc import ABC, abstractmethod
from framework.context import E2EContext
from framework.result import TestResult
class E2ETestCase(ABC):
"""
Base class for all E2E test cases.
"""
case_id: str = ""
name: str = ""
description: str = ""
@abstractmethod
def run(self, context: E2EContext) -> TestResult:
"""
Execute the test case and return a structured TestResult.
"""
raise NotImplementedError

103
ci/e2e/framework/context.py Normal file
View file

@ -0,0 +1,103 @@
from __future__ import annotations
import os
import shutil
from dataclasses import dataclass
from pathlib import Path
from framework.utils import ensure_directory, timestamp_now, write_text_file_append
@dataclass
class E2EContext:
"""
Runtime context for the E2E framework.
"""
onedrive_bin: str
e2e_target: str
run_id: str
repo_root: Path
out_dir: Path
logs_dir: Path
state_dir: Path
work_root: Path
@classmethod
def from_environment(cls) -> "E2EContext":
onedrive_bin = os.environ.get("ONEDRIVE_BIN", "").strip()
e2e_target = os.environ.get("E2E_TARGET", "").strip()
run_id = os.environ.get("RUN_ID", "").strip()
if not onedrive_bin:
raise RuntimeError("Environment variable ONEDRIVE_BIN must be set")
if not e2e_target:
raise RuntimeError("Environment variable E2E_TARGET must be set")
if not run_id:
raise RuntimeError("Environment variable RUN_ID must be set")
repo_root = Path.cwd()
out_dir = repo_root / "ci" / "e2e" / "out"
logs_dir = out_dir / "logs"
state_dir = out_dir / "state"
runner_temp = os.environ.get("RUNNER_TEMP", "/tmp").strip()
work_root = Path(runner_temp) / f"onedrive-e2e-{e2e_target}"
return cls(
onedrive_bin=onedrive_bin,
e2e_target=e2e_target,
run_id=run_id,
repo_root=repo_root,
out_dir=out_dir,
logs_dir=logs_dir,
state_dir=state_dir,
work_root=work_root,
)
@property
def master_log_file(self) -> Path:
return self.out_dir / "run.log"
@property
def default_onedrive_config_dir(self) -> Path:
xdg_config_home = os.environ.get("XDG_CONFIG_HOME", "").strip()
if xdg_config_home:
return Path(xdg_config_home) / "onedrive"
home = os.environ.get("HOME", "").strip()
if not home:
raise RuntimeError("Neither XDG_CONFIG_HOME nor HOME is set")
return Path(home) / ".config" / "onedrive"
@property
def default_refresh_token_path(self) -> Path:
return self.default_onedrive_config_dir / "refresh_token"
def ensure_refresh_token_available(self) -> None:
if not self.default_refresh_token_path.is_file():
raise RuntimeError(
f"Required refresh_token file not found at: {self.default_refresh_token_path}"
)
def bootstrap_config_dir(self, config_dir: Path) -> Path:
"""
Copy the existing refresh_token into a per-test/per-scenario config dir.
"""
self.ensure_refresh_token_available()
ensure_directory(config_dir)
source = self.default_refresh_token_path
destination = config_dir / "refresh_token"
shutil.copy2(source, destination)
os.chmod(destination, 0o600)
return destination
def log(self, message: str) -> None:
ensure_directory(self.out_dir)
line = f"[{timestamp_now()}] {message}\n"
print(line, end="")
write_text_file_append(self.master_log_file, line)

View file

@ -0,0 +1,26 @@
from __future__ import annotations
from pathlib import Path
def build_manifest(root: Path) -> list[str]:
"""
Build a deterministic manifest of all files and directories beneath root.
Paths are returned relative to root using POSIX separators.
"""
entries: list[str] = []
if not root.exists():
return entries
for path in sorted(root.rglob("*")):
rel = path.relative_to(root).as_posix()
entries.append(rel)
return entries
def write_manifest(path: Path, entries: list[str]) -> None:
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text("\n".join(entries) + ("\n" if entries else ""), encoding="utf-8")

View file

@ -0,0 +1,50 @@
from __future__ import annotations
from dataclasses import dataclass, field
@dataclass
class TestResult:
"""
Structured test result returned by each test case.
"""
case_id: str
name: str
status: str
reason: str = ""
artifacts: list[str] = field(default_factory=list)
details: dict = field(default_factory=dict)
@staticmethod
def pass_result(
case_id: str,
name: str,
artifacts: list[str] | None = None,
details: dict | None = None,
) -> "TestResult":
return TestResult(
case_id=case_id,
name=name,
status="pass",
reason="",
artifacts=artifacts or [],
details=details or {},
)
@staticmethod
def fail_result(
case_id: str,
name: str,
reason: str,
artifacts: list[str] | None = None,
details: dict | None = None,
) -> "TestResult":
return TestResult(
case_id=case_id,
name=name,
status="fail",
reason=reason,
artifacts=artifacts or [],
details=details or {},
)

80
ci/e2e/framework/utils.py Normal file
View file

@ -0,0 +1,80 @@
from __future__ import annotations
import os
import shutil
import subprocess
from dataclasses import dataclass
from datetime import datetime, timezone
from pathlib import Path
@dataclass
class CommandResult:
command: list[str]
returncode: int
stdout: str
stderr: str
@property
def ok(self) -> bool:
return self.returncode == 0
def timestamp_now() -> str:
return datetime.now(timezone.utc).strftime("%Y-%m-%d %H:%M:%S UTC")
def ensure_directory(path: Path) -> None:
path.mkdir(parents=True, exist_ok=True)
def reset_directory(path: Path) -> None:
if path.exists():
shutil.rmtree(path)
path.mkdir(parents=True, exist_ok=True)
def write_text_file(path: Path, content: str) -> None:
ensure_directory(path.parent)
path.write_text(content, encoding="utf-8")
def write_text_file_append(path: Path, content: str) -> None:
ensure_directory(path.parent)
with path.open("a", encoding="utf-8") as fp:
fp.write(content)
def run_command(
command: list[str],
cwd: Path | None = None,
env: dict[str, str] | None = None,
input_text: str | None = None,
) -> CommandResult:
merged_env = os.environ.copy()
if env:
merged_env.update(env)
completed = subprocess.run(
command,
cwd=str(cwd) if cwd else None,
env=merged_env,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
text=True,
encoding="utf-8",
errors="replace",
check=False,
input=input_text,
)
return CommandResult(
command=command,
returncode=completed.returncode,
stdout=completed.stdout,
stderr=completed.stderr,
)
def command_to_string(command: list[str]) -> str:
return " ".join(command)

170
ci/e2e/run.py Normal file
View file

@ -0,0 +1,170 @@
#!/usr/bin/env python3
from __future__ import annotations
import json
import sys
import traceback
from pathlib import Path
from framework.context import E2EContext
from framework.result import TestResult
from framework.utils import ensure_directory, write_text_file
from testcases.tc0001_basic_resync import TestCase0001BasicResync
from testcases.tc0002_sync_list_validation import TestCase0002SyncListValidation
from testcases.tc0003_dry_run_validation import TestCase0003DryRunValidation
from testcases.tc0004_single_directory_sync import TestCase0004SingleDirectorySync
from testcases.tc0005_force_sync_override import TestCase0005ForceSyncOverride
from testcases.tc0006_download_only import TestCase0006DownloadOnly
from testcases.tc0007_download_only_cleanup_local_files import TestCase0007DownloadOnlyCleanupLocalFiles
from testcases.tc0008_upload_only import TestCase0008UploadOnly
from testcases.tc0009_upload_only_no_remote_delete import TestCase0009UploadOnlyNoRemoteDelete
from testcases.tc0010_upload_only_remove_source_files import TestCase0010UploadOnlyRemoveSourceFiles
from testcases.tc0011_skip_file_validation import TestCase0011SkipFileValidation
from testcases.tc0012_skip_dir_validation import TestCase0012SkipDirValidation
from testcases.tc0013_skip_dotfiles_validation import TestCase0013SkipDotfilesValidation
from testcases.tc0014_skip_size_validation import TestCase0014SkipSizeValidation
from testcases.tc0015_skip_symlinks_validation import TestCase0015SkipSymlinksValidation
from testcases.tc0016_check_nosync_validation import TestCase0016CheckNosyncValidation
from testcases.tc0017_check_nomount_validation import TestCase0017CheckNomountValidation
from testcases.tc0018_recycle_bin_validation import TestCase0018RecycleBinValidation
from testcases.tc0019_logging_and_running_config import TestCase0019LoggingAndRunningConfig
from testcases.tc0020_monitor_mode_validation import TestCase0020MonitorModeValidation
from testcases.tc0021_resumable_transfers_validation import TestCase0021ResumableTransfersValidation
from testcases.tc0022_local_first_validation import TestCase0022LocalFirstValidation
from testcases.tc0023_bypass_data_preservation_validation import TestCase0023BypassDataPreservationValidation
from testcases.tc0024_big_delete_safeguard_validation import TestCase0024BigDeleteSafeguardValidation
def build_test_suite() -> list:
"""
Return the ordered list of E2E test cases to execute.
Add future test cases here in the required execution order.
"""
return [
TestCase0001BasicResync(),
TestCase0002SyncListValidation(),
TestCase0003DryRunValidation(),
TestCase0004SingleDirectorySync(),
TestCase0005ForceSyncOverride(),
TestCase0006DownloadOnly(),
TestCase0007DownloadOnlyCleanupLocalFiles(),
TestCase0008UploadOnly(),
TestCase0009UploadOnlyNoRemoteDelete(),
TestCase0010UploadOnlyRemoveSourceFiles(),
TestCase0011SkipFileValidation(),
TestCase0012SkipDirValidation(),
TestCase0013SkipDotfilesValidation(),
TestCase0014SkipSizeValidation(),
TestCase0015SkipSymlinksValidation(),
TestCase0016CheckNosyncValidation(),
TestCase0017CheckNomountValidation(),
TestCase0018RecycleBinValidation(),
TestCase0019LoggingAndRunningConfig(),
TestCase0020MonitorModeValidation(),
TestCase0021ResumableTransfersValidation(),
TestCase0022LocalFirstValidation(),
TestCase0023BypassDataPreservationValidation(),
TestCase0024BigDeleteSafeguardValidation(),
]
def result_to_actions_case(result: TestResult) -> dict:
"""
Convert the internal TestResult into the JSON structure expected by the
GitHub Actions workflow summary/reporting logic.
"""
output = {
"id": result.case_id,
"name": result.name,
"status": result.status,
}
if result.reason:
output["reason"] = result.reason
if result.artifacts:
output["artifacts"] = result.artifacts
if result.details:
output["details"] = result.details
return output
def main() -> int:
context = E2EContext.from_environment()
ensure_directory(context.out_dir)
ensure_directory(context.logs_dir)
ensure_directory(context.state_dir)
ensure_directory(context.work_root)
context.log(
f"Initialising E2E framework for target='{context.e2e_target}', "
f"run_id='{context.run_id}'"
)
cases = []
failed = False
for testcase in build_test_suite():
context.log(f"Starting test case {testcase.case_id}: {testcase.name}")
try:
result = testcase.run(context)
if result.case_id != testcase.case_id:
raise RuntimeError(
f"Test case returned mismatched case_id: "
f"expected '{testcase.case_id}', got '{result.case_id}'"
)
cases.append(result_to_actions_case(result))
if result.status != "pass":
failed = True
context.log(
f"Test case {testcase.case_id} FAILED: {result.reason or 'no reason provided'}"
)
else:
context.log(f"Test case {testcase.case_id} PASSED")
except Exception as exc:
failed = True
tb = traceback.format_exc()
context.log(f"Unhandled exception in test case {testcase.case_id}: {exc}")
context.log(tb)
error_log = context.logs_dir / f"{testcase.case_id}_exception.log"
write_text_file(error_log, tb)
failure_result = TestResult(
case_id=testcase.case_id,
name=testcase.name,
status="fail",
reason=f"Unhandled exception: {exc}",
artifacts=[str(error_log)],
details={
"exception_type": type(exc).__name__,
},
)
cases.append(result_to_actions_case(failure_result))
results = {
"target": context.e2e_target,
"run_id": context.run_id,
"cases": cases,
}
results_file = context.out_dir / "results.json"
results_json = json.dumps(results, indent=2, sort_keys=False)
write_text_file(results_file, results_json)
context.log(f"Wrote results to {results_file}")
return 1 if failed else 0
if __name__ == "__main__":
sys.exit(main())

View file

@ -0,0 +1,3 @@
"""
E2E test case package.
"""

View file

@ -0,0 +1,92 @@
from __future__ import annotations
from pathlib import Path
from framework.base import E2ETestCase
from framework.context import E2EContext
from framework.result import TestResult
from framework.utils import command_to_string, reset_directory, run_command, write_text_file
class TestCase0001BasicResync(E2ETestCase):
"""
Test Case 0001: basic resync
Purpose:
- validate that the E2E framework can invoke the client
- validate that the configured environment is sufficient to run a basic sync
- provide a simple baseline smoke test before more advanced E2E scenarios
"""
case_id = "0001"
name = "basic resync"
description = "Run a basic --sync --resync --resync-auth operation and capture the outcome"
def run(self, context: E2EContext) -> TestResult:
case_work_dir = context.work_root / f"tc{self.case_id}"
case_log_dir = context.logs_dir / f"tc{self.case_id}"
state_dir = context.state_dir / f"tc{self.case_id}"
reset_directory(case_work_dir)
reset_directory(case_log_dir)
reset_directory(state_dir)
context.ensure_refresh_token_available()
stdout_file = case_log_dir / "stdout.log"
stderr_file = case_log_dir / "stderr.log"
metadata_file = state_dir / "metadata.txt"
command = [
context.onedrive_bin,
"--sync",
"--verbose",
"--resync",
"--resync-auth",
]
context.log(
f"Executing Test Case {self.case_id}: {command_to_string(command)}"
)
result = run_command(command, cwd=context.repo_root)
write_text_file(stdout_file, result.stdout)
write_text_file(stderr_file, result.stderr)
metadata_lines = [
f"case_id={self.case_id}",
f"name={self.name}",
f"command={command_to_string(command)}",
f"returncode={result.returncode}",
]
write_text_file(metadata_file, "\n".join(metadata_lines) + "\n")
artifacts = [
str(stdout_file),
str(stderr_file),
str(metadata_file),
]
details = {
"command": command,
"returncode": result.returncode,
}
if result.returncode != 0:
reason = f"onedrive exited with non-zero status {result.returncode}"
return TestResult.fail_result(
case_id=self.case_id,
name=self.name,
reason=reason,
artifacts=artifacts,
details=details,
)
return TestResult.pass_result(
case_id=self.case_id,
name=self.name,
artifacts=artifacts,
details=details,
)

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,141 @@
from __future__ import annotations
import os
from pathlib import Path
from framework.base import E2ETestCase
from framework.context import E2EContext
from framework.manifest import build_manifest, write_manifest
from framework.result import TestResult
from framework.utils import command_to_string, reset_directory, run_command, write_text_file
class TestCase0003DryRunValidation(E2ETestCase):
case_id = "0003"
name = "dry-run validation"
description = "Validate that --dry-run performs no changes locally or remotely"
def _root_name(self, context: E2EContext) -> str:
return f"ZZ_E2E_TC0003_{context.run_id}_{os.getpid()}"
def _write_config(self, config_path: Path) -> None:
write_text_file(config_path, "# tc0003 config\nbypass_data_preservation = \"true\"\n")
def _bootstrap_confdir(self, context: E2EContext, confdir: Path) -> Path:
copied_refresh_token = context.bootstrap_config_dir(confdir)
self._write_config(confdir / "config")
return copied_refresh_token
def _create_local_fixture(self, sync_root: Path, root_name: str) -> None:
reset_directory(sync_root)
write_text_file(sync_root / root_name / "Upload" / "file1.txt", "tc0003 file1\n")
write_text_file(sync_root / root_name / "Upload" / "file2.bin", "tc0003 file2\n")
write_text_file(sync_root / root_name / "Notes" / "draft.md", "# tc0003\n")
def run(self, context: E2EContext) -> TestResult:
case_work_dir = context.work_root / "tc0003"
case_log_dir = context.logs_dir / "tc0003"
state_dir = context.state_dir / "tc0003"
reset_directory(case_work_dir)
reset_directory(case_log_dir)
reset_directory(state_dir)
context.ensure_refresh_token_available()
sync_root = case_work_dir / "syncroot"
seed_confdir = case_work_dir / "conf-seed"
verify_root = case_work_dir / "verifyroot"
verify_confdir = case_work_dir / "conf-verify"
root_name = self._root_name(context)
self._create_local_fixture(sync_root, root_name)
copied_refresh_token = self._bootstrap_confdir(context, seed_confdir)
self._bootstrap_confdir(context, verify_confdir)
before_manifest = build_manifest(sync_root)
before_manifest_file = state_dir / "before_manifest.txt"
after_manifest_file = state_dir / "after_manifest.txt"
remote_manifest_file = state_dir / "remote_verify_manifest.txt"
metadata_file = state_dir / "metadata.txt"
stdout_file = case_log_dir / "seed_stdout.log"
stderr_file = case_log_dir / "seed_stderr.log"
verify_stdout = case_log_dir / "verify_stdout.log"
verify_stderr = case_log_dir / "verify_stderr.log"
write_manifest(before_manifest_file, before_manifest)
command = [
context.onedrive_bin,
"--sync",
"--verbose",
"--dry-run",
"--resync",
"--resync-auth",
"--syncdir",
str(sync_root),
"--confdir",
str(seed_confdir),
]
context.log(f"Executing Test Case {self.case_id}: {command_to_string(command)}")
result = run_command(command, cwd=context.repo_root)
write_text_file(stdout_file, result.stdout)
write_text_file(stderr_file, result.stderr)
after_manifest = build_manifest(sync_root)
write_manifest(after_manifest_file, after_manifest)
verify_command = [
context.onedrive_bin,
"--sync",
"--verbose",
"--download-only",
"--resync",
"--resync-auth",
"--syncdir",
str(verify_root),
"--confdir",
str(verify_confdir),
]
verify_result = run_command(verify_command, cwd=context.repo_root)
write_text_file(verify_stdout, verify_result.stdout)
write_text_file(verify_stderr, verify_result.stderr)
remote_manifest = build_manifest(verify_root)
write_manifest(remote_manifest_file, remote_manifest)
metadata_lines = [
f"case_id={self.case_id}",
f"name={self.name}",
f"root_name={root_name}",
f"copied_refresh_token={copied_refresh_token}",
f"command={command_to_string(command)}",
f"returncode={result.returncode}",
f"verify_command={command_to_string(verify_command)}",
f"verify_returncode={verify_result.returncode}",
]
write_text_file(metadata_file, "\n".join(metadata_lines) + "\n")
artifacts = [
str(stdout_file),
str(stderr_file),
str(verify_stdout),
str(verify_stderr),
str(before_manifest_file),
str(after_manifest_file),
str(remote_manifest_file),
str(metadata_file),
]
details = {
"command": command,
"returncode": result.returncode,
"verify_command": verify_command,
"verify_returncode": verify_result.returncode,
"root_name": root_name,
}
if result.returncode != 0:
return TestResult.fail_result(self.case_id, self.name, f"Remote seed failed with status {result.returncode}", artifacts, details)
if verify_result.returncode != 0:
return TestResult.fail_result(self.case_id, self.name, f"Remote verification failed with status {verify_result.returncode}", artifacts, details)
if before_manifest != after_manifest:
return TestResult.fail_result(self.case_id, self.name, "Local filesystem changed during --dry-run", artifacts, details)
if any(entry == root_name or entry.startswith(root_name + "/") for entry in remote_manifest):
return TestResult.fail_result(self.case_id, self.name, f"Dry-run unexpectedly synchronised remote content: {root_name}", artifacts, details)
return TestResult.pass_result(self.case_id, self.name, artifacts, details)

View file

@ -0,0 +1,129 @@
from __future__ import annotations
import os
from pathlib import Path
from framework.base import E2ETestCase
from framework.context import E2EContext
from framework.manifest import build_manifest, write_manifest
from framework.result import TestResult
from framework.utils import command_to_string, reset_directory, run_command, write_text_file
class TestCase0004SingleDirectorySync(E2ETestCase):
case_id = "0004"
name = "single-directory synchronisation"
description = "Validate that only the nominated subtree is synchronised"
def _write_config(self, config_path: Path) -> None:
write_text_file(config_path, "# tc0004 config\nbypass_data_preservation = \"true\"\n")
def run(self, context: E2EContext) -> TestResult:
case_work_dir = context.work_root / "tc0004"
case_log_dir = context.logs_dir / "tc0004"
state_dir = context.state_dir / "tc0004"
reset_directory(case_work_dir)
reset_directory(case_log_dir)
reset_directory(state_dir)
context.ensure_refresh_token_available()
sync_root = case_work_dir / "syncroot"
confdir = case_work_dir / "conf-main"
verify_root = case_work_dir / "verifyroot"
verify_confdir = case_work_dir / "conf-verify"
target_dir = f"ZZ_E2E_TC0004_TARGET_{context.run_id}_{os.getpid()}"
other_dir = f"ZZ_E2E_TC0004_OTHER_{context.run_id}_{os.getpid()}"
write_text_file(sync_root / target_dir / "keep.txt", "target\n")
write_text_file(sync_root / target_dir / "nested" / "inside.md", "nested\n")
write_text_file(sync_root / other_dir / "skip.txt", "other\n")
context.bootstrap_config_dir(confdir)
self._write_config(confdir / "config")
context.bootstrap_config_dir(verify_confdir)
self._write_config(verify_confdir / "config")
stdout_file = case_log_dir / "single_directory_stdout.log"
stderr_file = case_log_dir / "single_directory_stderr.log"
verify_stdout = case_log_dir / "verify_stdout.log"
verify_stderr = case_log_dir / "verify_stderr.log"
local_manifest_file = state_dir / "local_after_manifest.txt"
remote_manifest_file = state_dir / "remote_verify_manifest.txt"
metadata_file = state_dir / "single_directory_metadata.txt"
command = [
context.onedrive_bin,
"--sync",
"--verbose",
"--resync",
"--resync-auth",
"--single-directory",
target_dir,
"--syncdir",
str(sync_root),
"--confdir",
str(confdir),
]
context.log(f"Executing Test Case {self.case_id}: {command_to_string(command)}")
result = run_command(command, cwd=context.repo_root)
write_text_file(stdout_file, result.stdout)
write_text_file(stderr_file, result.stderr)
write_manifest(local_manifest_file, build_manifest(sync_root))
verify_command = [
context.onedrive_bin,
"--sync",
"--verbose",
"--download-only",
"--resync",
"--resync-auth",
"--syncdir",
str(verify_root),
"--confdir",
str(verify_confdir),
]
verify_result = run_command(verify_command, cwd=context.repo_root)
write_text_file(verify_stdout, verify_result.stdout)
write_text_file(verify_stderr, verify_result.stderr)
remote_manifest = build_manifest(verify_root)
write_manifest(remote_manifest_file, remote_manifest)
metadata = [
f"case_id={self.case_id}",
f"target_dir={target_dir}",
f"other_dir={other_dir}",
f"command={command_to_string(command)}",
f"returncode={result.returncode}",
f"verify_command={command_to_string(verify_command)}",
f"verify_returncode={verify_result.returncode}",
]
write_text_file(metadata_file, "\n".join(metadata) + "\n")
artifacts = [
str(stdout_file),
str(stderr_file),
str(verify_stdout),
str(verify_stderr),
str(local_manifest_file),
str(remote_manifest_file),
str(metadata_file),
]
details = {
"command": command,
"returncode": result.returncode,
"verify_returncode": verify_result.returncode,
"target_dir": target_dir,
"other_dir": other_dir,
}
if result.returncode != 0:
return TestResult.fail_result(self.case_id, self.name, f"--single-directory sync failed with status {result.returncode}", artifacts, details)
if verify_result.returncode != 0:
return TestResult.fail_result(self.case_id, self.name, f"Remote verification failed with status {verify_result.returncode}", artifacts, details)
if not any(e == target_dir or e.startswith(target_dir + "/") for e in remote_manifest):
return TestResult.fail_result(self.case_id, self.name, f"Target directory was not synchronised: {target_dir}", artifacts, details)
if any(e == other_dir or e.startswith(other_dir + "/") for e in remote_manifest):
return TestResult.fail_result(self.case_id, self.name, f"Non-target directory was unexpectedly synchronised: {other_dir}", artifacts, details)
return TestResult.pass_result(self.case_id, self.name, artifacts, details)

View file

@ -0,0 +1,103 @@
from __future__ import annotations
import os
from pathlib import Path
from framework.base import E2ETestCase
from framework.context import E2EContext
from framework.manifest import build_manifest, write_manifest
from framework.result import TestResult
from framework.utils import command_to_string, reset_directory, run_command, write_text_file
class TestCase0005ForceSyncOverride(E2ETestCase):
case_id = "0005"
name = "force-sync override"
description = "Validate that --force-sync overrides skip_dir for blocked single-directory sync"
def _write_config(self, config_path: Path, blocked_dir: str) -> None:
write_text_file(config_path, f"# tc0005 config\nbypass_data_preservation = \"true\"\nskip_dir = \"{blocked_dir}\"\n")
def run(self, context: E2EContext) -> TestResult:
case_work_dir = context.work_root / "tc0005"
case_log_dir = context.logs_dir / "tc0005"
state_dir = context.state_dir / "tc0005"
reset_directory(case_work_dir)
reset_directory(case_log_dir)
reset_directory(state_dir)
context.ensure_refresh_token_available()
sync_root = case_work_dir / "syncroot"
confdir = case_work_dir / "conf-seed"
verify_root = case_work_dir / "verifyroot"
verify_confdir = case_work_dir / "conf-verify"
blocked_dir = f"ZZ_E2E_TC0005_BLOCKED_{context.run_id}_{os.getpid()}"
write_text_file(sync_root / blocked_dir / "allowed_via_force.txt", "force\n")
context.bootstrap_config_dir(confdir)
self._write_config(confdir / "config", blocked_dir)
context.bootstrap_config_dir(verify_confdir)
write_text_file(verify_confdir / "config", "# tc0005 verify\nbypass_data_preservation = \"true\"\n")
stdout_file = case_log_dir / "seed_stdout.log"
stderr_file = case_log_dir / "seed_stderr.log"
verify_stdout = case_log_dir / "verify_stdout.log"
verify_stderr = case_log_dir / "verify_stderr.log"
remote_manifest_file = state_dir / "remote_verify_manifest.txt"
metadata_file = state_dir / "seed_metadata.txt"
command = [
context.onedrive_bin,
"--sync",
"--verbose",
"--resync",
"--resync-auth",
"--single-directory",
blocked_dir,
"--force-sync",
"--syncdir",
str(sync_root),
"--confdir",
str(confdir),
]
result = run_command(command, cwd=context.repo_root, input_text="Y\n")
write_text_file(stdout_file, result.stdout)
write_text_file(stderr_file, result.stderr)
verify_command = [
context.onedrive_bin,
"--sync",
"--verbose",
"--download-only",
"--resync",
"--resync-auth",
"--syncdir",
str(verify_root),
"--confdir",
str(verify_confdir),
]
verify_result = run_command(verify_command, cwd=context.repo_root)
write_text_file(verify_stdout, verify_result.stdout)
write_text_file(verify_stderr, verify_result.stderr)
remote_manifest = build_manifest(verify_root)
write_manifest(remote_manifest_file, remote_manifest)
write_text_file(metadata_file, "\n".join([
f"blocked_dir={blocked_dir}",
f"command={command_to_string(command)}",
f"returncode={result.returncode}",
f"verify_returncode={verify_result.returncode}",
]) + "\n")
artifacts = [str(stdout_file), str(stderr_file), str(verify_stdout), str(verify_stderr), str(remote_manifest_file), str(metadata_file)]
details = {"command": command, "returncode": result.returncode, "verify_returncode": verify_result.returncode, "blocked_dir": blocked_dir}
if result.returncode != 0:
return TestResult.fail_result(self.case_id, self.name, f"Blocked single-directory sync with --force-sync failed with status {result.returncode}", artifacts, details)
if verify_result.returncode != 0:
return TestResult.fail_result(self.case_id, self.name, f"Remote verification failed with status {verify_result.returncode}", artifacts, details)
if f"{blocked_dir}/allowed_via_force.txt" not in remote_manifest:
return TestResult.fail_result(self.case_id, self.name, f"--force-sync did not synchronise blocked path: {blocked_dir}/allowed_via_force.txt", artifacts, details)
return TestResult.pass_result(self.case_id, self.name, artifacts, details)

View file

@ -0,0 +1,43 @@
from __future__ import annotations
import os
from pathlib import Path
from framework.base import E2ETestCase
from framework.context import E2EContext
from framework.manifest import build_manifest, write_manifest
from framework.result import TestResult
from framework.utils import command_to_string, reset_directory, run_command, write_text_file
class TestCase0006DownloadOnly(E2ETestCase):
case_id = "0006"
name = "download-only behaviour"
description = "Validate that download-only populates local content from remote data"
def _write_config(self, config_path: Path) -> None:
write_text_file(config_path, "# tc0006 config\nbypass_data_preservation = \"true\"\n")
def run(self, context: E2EContext) -> TestResult:
case_work_dir = context.work_root / "tc0006"; case_log_dir = context.logs_dir / "tc0006"; state_dir = context.state_dir / "tc0006"
reset_directory(case_work_dir); reset_directory(case_log_dir); reset_directory(state_dir); context.ensure_refresh_token_available()
seed_root = case_work_dir / "seedroot"; seed_conf = case_work_dir / "conf-seed"; download_root = case_work_dir / "downloadroot"; download_conf = case_work_dir / "conf-download"; root_name = f"ZZ_E2E_TC0006_{context.run_id}_{os.getpid()}"
write_text_file(seed_root / root_name / "remote.txt", "remote\n"); write_text_file(seed_root / root_name / "subdir" / "nested.txt", "nested\n")
context.bootstrap_config_dir(seed_conf); self._write_config(seed_conf / "config")
context.bootstrap_config_dir(download_conf); self._write_config(download_conf / "config")
seed_stdout = case_log_dir / "seed_stdout.log"; seed_stderr = case_log_dir / "seed_stderr.log"; dl_stdout = case_log_dir / "download_stdout.log"; dl_stderr = case_log_dir / "download_stderr.log"; local_manifest_file = state_dir / "download_manifest.txt"; metadata_file = state_dir / "seed_metadata.txt"
seed_command = [context.onedrive_bin, "--display-running-config", "--sync", "--verbose", "--resync", "--resync-auth", "--syncdir", str(seed_root), "--confdir", str(seed_conf)]
seed_result = run_command(seed_command, cwd=context.repo_root)
write_text_file(seed_stdout, seed_result.stdout); write_text_file(seed_stderr, seed_result.stderr)
download_command = [context.onedrive_bin, "--display-running-config", "--sync", "--verbose", "--download-only", "--resync", "--resync-auth", "--syncdir", str(download_root), "--confdir", str(download_conf)]
download_result = run_command(download_command, cwd=context.repo_root)
write_text_file(dl_stdout, download_result.stdout); write_text_file(dl_stderr, download_result.stderr); local_manifest = build_manifest(download_root); write_manifest(local_manifest_file, local_manifest)
write_text_file(metadata_file, "\n".join([f"root_name={root_name}", f"seed_command={command_to_string(seed_command)}", f"seed_returncode={seed_result.returncode}", f"download_command={command_to_string(download_command)}", f"download_returncode={download_result.returncode}"]) + "\n")
artifacts = [str(seed_stdout), str(seed_stderr), str(dl_stdout), str(dl_stderr), str(local_manifest_file), str(metadata_file)]
details = {"seed_returncode": seed_result.returncode, "download_returncode": download_result.returncode, "root_name": root_name}
if seed_result.returncode != 0: return TestResult.fail_result(self.case_id, self.name, f"Remote seed failed with status {seed_result.returncode}", artifacts, details)
if download_result.returncode != 0: return TestResult.fail_result(self.case_id, self.name, f"--download-only failed with status {download_result.returncode}", artifacts, details)
wanted = [root_name, f"{root_name}/remote.txt", f"{root_name}/subdir", f"{root_name}/subdir/nested.txt"]
missing = [w for w in wanted if w not in local_manifest]
if missing: return TestResult.fail_result(self.case_id, self.name, "Downloaded manifest missing expected content: " + ", ".join(missing), artifacts, details)
return TestResult.pass_result(self.case_id, self.name, artifacts, details)

View file

@ -0,0 +1,43 @@
from __future__ import annotations
import os
from pathlib import Path
from framework.base import E2ETestCase
from framework.context import E2EContext
from framework.manifest import build_manifest, write_manifest
from framework.result import TestResult
from framework.utils import command_to_string, reset_directory, run_command, write_text_file
class TestCase0007DownloadOnlyCleanupLocalFiles(E2ETestCase):
case_id = "0007"
name = "download-only cleanup-local-files"
description = "Validate that cleanup_local_files removes stale local content in download-only mode"
def _write_config(self, config_path: Path) -> None:
write_text_file(config_path, "# tc0007 config\nbypass_data_preservation = \"true\"\n")
def run(self, context: E2EContext) -> TestResult:
case_work_dir = context.work_root / "tc0007"; case_log_dir = context.logs_dir / "tc0007"; state_dir = context.state_dir / "tc0007"
reset_directory(case_work_dir); reset_directory(case_log_dir); reset_directory(state_dir); context.ensure_refresh_token_available()
sync_root = case_work_dir / "syncroot"; seed_conf = case_work_dir / "conf-seed"; cleanup_conf = case_work_dir / "conf-cleanup"; root_name = f"ZZ_E2E_TC0007_{context.run_id}_{os.getpid()}"
write_text_file(sync_root / root_name / "keep.txt", "keep\n")
context.bootstrap_config_dir(seed_conf); self._write_config(seed_conf / "config")
context.bootstrap_config_dir(cleanup_conf); self._write_config(cleanup_conf / "config")
seed_stdout = case_log_dir / "seed_stdout.log"; seed_stderr = case_log_dir / "seed_stderr.log"; cleanup_stdout = case_log_dir / "cleanup_stdout.log"; cleanup_stderr = case_log_dir / "cleanup_stderr.log"; post_manifest_file = state_dir / "post_cleanup_manifest.txt"; metadata_file = state_dir / "seed_metadata.txt"
seed_command = [context.onedrive_bin, "--display-running-config", "--sync", "--verbose", "--resync", "--resync-auth", "--syncdir", str(sync_root), "--confdir", str(seed_conf)]
seed_result = run_command(seed_command, cwd=context.repo_root)
write_text_file(seed_stdout, seed_result.stdout); write_text_file(seed_stderr, seed_result.stderr)
stale = sync_root / root_name / "stale-local.txt"; write_text_file(stale, "stale\n")
cleanup_command = [context.onedrive_bin, "--display-running-config", "--sync", "--verbose", "--download-only", "--cleanup-local-files", "--resync", "--resync-auth", "--syncdir", str(sync_root), "--confdir", str(cleanup_conf)]
cleanup_result = run_command(cleanup_command, cwd=context.repo_root)
write_text_file(cleanup_stdout, cleanup_result.stdout); write_text_file(cleanup_stderr, cleanup_result.stderr); post_manifest = build_manifest(sync_root); write_manifest(post_manifest_file, post_manifest)
write_text_file(metadata_file, "\n".join([f"root_name={root_name}", f"seed_returncode={seed_result.returncode}", f"cleanup_returncode={cleanup_result.returncode}"]) + "\n")
artifacts = [str(seed_stdout), str(seed_stderr), str(cleanup_stdout), str(cleanup_stderr), str(post_manifest_file), str(metadata_file)]
details = {"seed_returncode": seed_result.returncode, "cleanup_returncode": cleanup_result.returncode, "root_name": root_name}
if seed_result.returncode != 0: return TestResult.fail_result(self.case_id, self.name, f"Remote seed failed with status {seed_result.returncode}", artifacts, details)
if cleanup_result.returncode != 0: return TestResult.fail_result(self.case_id, self.name, f"cleanup_local_files processing failed with status {cleanup_result.returncode}", artifacts, details)
if stale.exists() or f"{root_name}/stale-local.txt" in post_manifest: return TestResult.fail_result(self.case_id, self.name, "Stale local file still exists after cleanup_local_files processing", artifacts, details)
if f"{root_name}/keep.txt" not in post_manifest: return TestResult.fail_result(self.case_id, self.name, "Expected remote-backed file missing after cleanup_local_files processing", artifacts, details)
return TestResult.pass_result(self.case_id, self.name, artifacts, details)

View file

@ -0,0 +1,41 @@
from __future__ import annotations
import os
from pathlib import Path
from framework.base import E2ETestCase
from framework.context import E2EContext
from framework.manifest import build_manifest, write_manifest
from framework.result import TestResult
from framework.utils import command_to_string, reset_directory, run_command, write_text_file
class TestCase0008UploadOnly(E2ETestCase):
case_id = "0008"
name = "upload-only behaviour"
description = "Validate that upload-only pushes local content remotely"
def _write_config(self, config_path: Path) -> None:
write_text_file(config_path, "# tc0008 config\nbypass_data_preservation = \"true\"\n")
def run(self, context: E2EContext) -> TestResult:
case_work_dir = context.work_root / "tc0008"; case_log_dir = context.logs_dir / "tc0008"; state_dir = context.state_dir / "tc0008"
reset_directory(case_work_dir); reset_directory(case_log_dir); reset_directory(state_dir); context.ensure_refresh_token_available()
upload_root = case_work_dir / "uploadroot"; upload_conf = case_work_dir / "conf-upload"; verify_root = case_work_dir / "verifyroot"; verify_conf = case_work_dir / "conf-verify"; root_name = f"ZZ_E2E_TC0008_{context.run_id}_{os.getpid()}"
write_text_file(upload_root / root_name / "upload.txt", "upload only\n")
context.bootstrap_config_dir(upload_conf); self._write_config(upload_conf / "config")
context.bootstrap_config_dir(verify_conf); self._write_config(verify_conf / "config")
stdout_file = case_log_dir / "upload_only_stdout.log"; stderr_file = case_log_dir / "upload_only_stderr.log"; verify_stdout = case_log_dir / "verify_stdout.log"; verify_stderr = case_log_dir / "verify_stderr.log"; remote_manifest_file = state_dir / "remote_verify_manifest.txt"; metadata_file = state_dir / "upload_metadata.txt"
command = [context.onedrive_bin, "--display-running-config", "--sync", "--verbose", "--upload-only", "--resync", "--resync-auth", "--syncdir", str(upload_root), "--confdir", str(upload_conf)]
result = run_command(command, cwd=context.repo_root)
write_text_file(stdout_file, result.stdout); write_text_file(stderr_file, result.stderr)
verify_command = [context.onedrive_bin, "--display-running-config", "--sync", "--verbose", "--download-only", "--resync", "--resync-auth", "--syncdir", str(verify_root), "--confdir", str(verify_conf)]
verify_result = run_command(verify_command, cwd=context.repo_root)
write_text_file(verify_stdout, verify_result.stdout); write_text_file(verify_stderr, verify_result.stderr); remote_manifest = build_manifest(verify_root); write_manifest(remote_manifest_file, remote_manifest)
write_text_file(metadata_file, "\n".join([f"root_name={root_name}", f"returncode={result.returncode}", f"verify_returncode={verify_result.returncode}"]) + "\n")
artifacts = [str(stdout_file), str(stderr_file), str(verify_stdout), str(verify_stderr), str(remote_manifest_file), str(metadata_file)]
details = {"returncode": result.returncode, "verify_returncode": verify_result.returncode, "root_name": root_name}
if result.returncode != 0: return TestResult.fail_result(self.case_id, self.name, f"--upload-only failed with status {result.returncode}", artifacts, details)
if verify_result.returncode != 0: return TestResult.fail_result(self.case_id, self.name, f"Remote verification failed with status {verify_result.returncode}", artifacts, details)
if f"{root_name}/upload.txt" not in remote_manifest: return TestResult.fail_result(self.case_id, self.name, f"Upload-only did not synchronise expected remote file: {root_name}/upload.txt", artifacts, details)
return TestResult.pass_result(self.case_id, self.name, artifacts, details)

View file

@ -0,0 +1,47 @@
from __future__ import annotations
import os
from pathlib import Path
from framework.base import E2ETestCase
from framework.context import E2EContext
from framework.manifest import build_manifest, write_manifest
from framework.result import TestResult
from framework.utils import command_to_string, reset_directory, run_command, write_text_file
class TestCase0009UploadOnlyNoRemoteDelete(E2ETestCase):
case_id = "0009"
name = "upload-only no-remote-delete"
description = "Validate that no_remote_delete preserves remote content in upload-only mode"
def _write_config(self, config_path: Path) -> None:
write_text_file(config_path, "# tc0009 config\nbypass_data_preservation = \"true\"\n")
def run(self, context: E2EContext) -> TestResult:
case_work_dir = context.work_root / "tc0009"; case_log_dir = context.logs_dir / "tc0009"; state_dir = context.state_dir / "tc0009"
reset_directory(case_work_dir); reset_directory(case_log_dir); reset_directory(state_dir); context.ensure_refresh_token_available()
sync_root = case_work_dir / "syncroot"; seed_conf = case_work_dir / "conf-seed"; upload_conf = case_work_dir / "conf-upload"; verify_root = case_work_dir / "verifyroot"; verify_conf = case_work_dir / "conf-verify"; root_name = f"ZZ_E2E_TC0009_{context.run_id}_{os.getpid()}"
keep_file = sync_root / root_name / "keep.txt"; write_text_file(keep_file, "keep remote\n")
context.bootstrap_config_dir(seed_conf); self._write_config(seed_conf / "config")
context.bootstrap_config_dir(upload_conf); self._write_config(upload_conf / "config")
context.bootstrap_config_dir(verify_conf); self._write_config(verify_conf / "config")
seed_stdout = case_log_dir / "seed_stdout.log"; seed_stderr = case_log_dir / "seed_stderr.log"; upload_stdout = case_log_dir / "upload_only_stdout.log"; upload_stderr = case_log_dir / "upload_only_stderr.log"; verify_stdout = case_log_dir / "verify_stdout.log"; verify_stderr = case_log_dir / "verify_stderr.log"; remote_manifest_file = state_dir / "remote_verify_manifest.txt"; metadata_file = state_dir / "seed_metadata.txt"
seed_command = [context.onedrive_bin, "--display-running-config", "--sync", "--verbose", "--resync", "--resync-auth", "--syncdir", str(sync_root), "--confdir", str(seed_conf)]
seed_result = run_command(seed_command, cwd=context.repo_root)
write_text_file(seed_stdout, seed_result.stdout); write_text_file(seed_stderr, seed_result.stderr)
if keep_file.exists(): keep_file.unlink()
upload_command = [context.onedrive_bin, "--display-running-config", "--sync", "--verbose", "--upload-only", "--no-remote-delete", "--resync", "--resync-auth", "--syncdir", str(sync_root), "--confdir", str(upload_conf)]
upload_result = run_command(upload_command, cwd=context.repo_root)
write_text_file(upload_stdout, upload_result.stdout); write_text_file(upload_stderr, upload_result.stderr)
verify_command = [context.onedrive_bin, "--display-running-config", "--sync", "--verbose", "--download-only", "--resync", "--resync-auth", "--syncdir", str(verify_root), "--confdir", str(verify_conf)]
verify_result = run_command(verify_command, cwd=context.repo_root)
write_text_file(verify_stdout, verify_result.stdout); write_text_file(verify_stderr, verify_result.stderr); remote_manifest = build_manifest(verify_root); write_manifest(remote_manifest_file, remote_manifest)
write_text_file(metadata_file, "\n".join([f"root_name={root_name}", f"seed_returncode={seed_result.returncode}", f"upload_returncode={upload_result.returncode}", f"verify_returncode={verify_result.returncode}"]) + "\n")
artifacts = [str(seed_stdout), str(seed_stderr), str(upload_stdout), str(upload_stderr), str(verify_stdout), str(verify_stderr), str(remote_manifest_file), str(metadata_file)]
details = {"seed_returncode": seed_result.returncode, "upload_returncode": upload_result.returncode, "verify_returncode": verify_result.returncode, "root_name": root_name}
if seed_result.returncode != 0: return TestResult.fail_result(self.case_id, self.name, f"Remote seed failed with status {seed_result.returncode}", artifacts, details)
if upload_result.returncode != 0: return TestResult.fail_result(self.case_id, self.name, f"--upload-only --no-remote-delete failed with status {upload_result.returncode}", artifacts, details)
if verify_result.returncode != 0: return TestResult.fail_result(self.case_id, self.name, f"Remote verification failed with status {verify_result.returncode}", artifacts, details)
if f"{root_name}/keep.txt" not in remote_manifest: return TestResult.fail_result(self.case_id, self.name, f"Remote file was unexpectedly deleted despite --no-remote-delete: {root_name}/keep.txt", artifacts, details)
return TestResult.pass_result(self.case_id, self.name, artifacts, details)

View file

@ -0,0 +1,42 @@
from __future__ import annotations
import os
from pathlib import Path
from framework.base import E2ETestCase
from framework.context import E2EContext
from framework.manifest import build_manifest, write_manifest
from framework.result import TestResult
from framework.utils import command_to_string, reset_directory, run_command, write_text_file
class TestCase0010UploadOnlyRemoveSourceFiles(E2ETestCase):
case_id = "0010"
name = "upload-only remove-source-files"
description = "Validate that remove_source_files removes local files after upload-only succeeds"
def _write_config(self, config_path: Path) -> None:
write_text_file(config_path, "# tc0010 config\nbypass_data_preservation = \"true\"\n")
def run(self, context: E2EContext) -> TestResult:
case_work_dir = context.work_root / "tc0010"; case_log_dir = context.logs_dir / "tc0010"; state_dir = context.state_dir / "tc0010"
reset_directory(case_work_dir); reset_directory(case_log_dir); reset_directory(state_dir); context.ensure_refresh_token_available()
sync_root = case_work_dir / "syncroot"; upload_conf = case_work_dir / "conf-upload"; verify_root = case_work_dir / "verifyroot"; verify_conf = case_work_dir / "conf-verify"; root_name = f"ZZ_E2E_TC0010_{context.run_id}_{os.getpid()}"
source_file = sync_root / root_name / "source.txt"; write_text_file(source_file, "remove after upload\n")
context.bootstrap_config_dir(upload_conf); self._write_config(upload_conf / "config")
context.bootstrap_config_dir(verify_conf); self._write_config(verify_conf / "config")
stdout_file = case_log_dir / "upload_only_remove_source_stdout.log"; stderr_file = case_log_dir / "upload_only_remove_source_stderr.log"; verify_stdout = case_log_dir / "verify_stdout.log"; verify_stderr = case_log_dir / "verify_stderr.log"; post_manifest_file = state_dir / "post_upload_manifest.txt"; remote_manifest_file = state_dir / "remote_verify_manifest.txt"; metadata_file = state_dir / "upload_metadata.txt"
command = [context.onedrive_bin, "--display-running-config", "--sync", "--verbose", "--upload-only", "--remove-source-files", "--resync", "--resync-auth", "--syncdir", str(sync_root), "--confdir", str(upload_conf)]
result = run_command(command, cwd=context.repo_root)
write_text_file(stdout_file, result.stdout); write_text_file(stderr_file, result.stderr); post_manifest = build_manifest(sync_root); write_manifest(post_manifest_file, post_manifest)
verify_command = [context.onedrive_bin, "--display-running-config", "--sync", "--verbose", "--download-only", "--resync", "--resync-auth", "--syncdir", str(verify_root), "--confdir", str(verify_conf)]
verify_result = run_command(verify_command, cwd=context.repo_root)
write_text_file(verify_stdout, verify_result.stdout); write_text_file(verify_stderr, verify_result.stderr); remote_manifest = build_manifest(verify_root); write_manifest(remote_manifest_file, remote_manifest)
write_text_file(metadata_file, "\n".join([f"root_name={root_name}", f"returncode={result.returncode}", f"verify_returncode={verify_result.returncode}"]) + "\n")
artifacts = [str(stdout_file), str(stderr_file), str(verify_stdout), str(verify_stderr), str(post_manifest_file), str(remote_manifest_file), str(metadata_file)]
details = {"returncode": result.returncode, "verify_returncode": verify_result.returncode, "root_name": root_name}
if result.returncode != 0: return TestResult.fail_result(self.case_id, self.name, f"--upload-only with remove_source_files failed with status {result.returncode}", artifacts, details)
if verify_result.returncode != 0: return TestResult.fail_result(self.case_id, self.name, f"Remote verification failed with status {verify_result.returncode}", artifacts, details)
if source_file.exists() or f"{root_name}/source.txt" in post_manifest: return TestResult.fail_result(self.case_id, self.name, "Local source file still exists after remove_source_files processing", artifacts, details)
if f"{root_name}/source.txt" not in remote_manifest: return TestResult.fail_result(self.case_id, self.name, f"Remote file missing after upload-only remove_source_files: {root_name}/source.txt", artifacts, details)
return TestResult.pass_result(self.case_id, self.name, artifacts, details)

View file

@ -0,0 +1,43 @@
from __future__ import annotations
import os
from pathlib import Path
from framework.base import E2ETestCase
from framework.context import E2EContext
from framework.manifest import build_manifest, write_manifest
from framework.result import TestResult
from framework.utils import command_to_string, reset_directory, run_command, write_text_file
class TestCase0011SkipFileValidation(E2ETestCase):
case_id = "0011"
name = "skip_file validation"
description = "Validate that skip_file patterns prevent matching files from synchronising"
def _write_config(self, config_path: Path) -> None:
write_text_file(config_path, "# tc0011 config\nbypass_data_preservation = \"true\"\nskip_file = \"*.tmp|*.swp\"\n")
def run(self, context: E2EContext) -> TestResult:
case_work_dir = context.work_root / "tc0011"; case_log_dir = context.logs_dir / "tc0011"; state_dir = context.state_dir / "tc0011"
reset_directory(case_work_dir); reset_directory(case_log_dir); reset_directory(state_dir); context.ensure_refresh_token_available()
sync_root = case_work_dir / "syncroot"; confdir = case_work_dir / "conf-main"; verify_root = case_work_dir / "verifyroot"; verify_conf = case_work_dir / "conf-verify"; root_name = f"ZZ_E2E_TC0011_{context.run_id}_{os.getpid()}"
write_text_file(sync_root / root_name / "keep.txt", "keep\n"); write_text_file(sync_root / root_name / "skip.tmp", "skip\n"); write_text_file(sync_root / root_name / "editor.swp", "swap\n")
context.bootstrap_config_dir(confdir); self._write_config(confdir / "config")
context.bootstrap_config_dir(verify_conf); write_text_file(verify_conf / "config", "# tc0011 verify\nbypass_data_preservation = \"true\"\n")
stdout_file = case_log_dir / "skip_file_stdout.log"; stderr_file = case_log_dir / "skip_file_stderr.log"; verify_stdout = case_log_dir / "verify_stdout.log"; verify_stderr = case_log_dir / "verify_stderr.log"; remote_manifest_file = state_dir / "remote_verify_manifest.txt"; metadata_file = state_dir / "metadata.txt"
command = [context.onedrive_bin, "--display-running-config", "--sync", "--verbose", "--resync", "--resync-auth", "--syncdir", str(sync_root), "--confdir", str(confdir)]
result = run_command(command, cwd=context.repo_root)
write_text_file(stdout_file, result.stdout); write_text_file(stderr_file, result.stderr)
verify_command = [context.onedrive_bin, "--display-running-config", "--sync", "--verbose", "--download-only", "--resync", "--resync-auth", "--syncdir", str(verify_root), "--confdir", str(verify_conf)]
verify_result = run_command(verify_command, cwd=context.repo_root)
write_text_file(verify_stdout, verify_result.stdout); write_text_file(verify_stderr, verify_result.stderr); remote_manifest = build_manifest(verify_root); write_manifest(remote_manifest_file, remote_manifest)
write_text_file(metadata_file, f"root_name={root_name}\nreturncode={result.returncode}\nverify_returncode={verify_result.returncode}\n")
artifacts = [str(stdout_file), str(stderr_file), str(verify_stdout), str(verify_stderr), str(remote_manifest_file), str(metadata_file)]
details = {"returncode": result.returncode, "verify_returncode": verify_result.returncode, "root_name": root_name}
if result.returncode != 0: return TestResult.fail_result(self.case_id, self.name, f"skip_file validation failed with status {result.returncode}", artifacts, details)
if verify_result.returncode != 0: return TestResult.fail_result(self.case_id, self.name, f"Remote verification failed with status {verify_result.returncode}", artifacts, details)
if f"{root_name}/keep.txt" not in remote_manifest: return TestResult.fail_result(self.case_id, self.name, f"Expected non-skipped file missing remotely: {root_name}/keep.txt", artifacts, details)
for unwanted in [f"{root_name}/skip.tmp", f"{root_name}/editor.swp"]:
if unwanted in remote_manifest: return TestResult.fail_result(self.case_id, self.name, f"skip_file pattern failed, file was synchronised: {unwanted}", artifacts, details)
return TestResult.pass_result(self.case_id, self.name, artifacts, details)

View file

@ -0,0 +1,75 @@
from __future__ import annotations
import os
from pathlib import Path
from framework.base import E2ETestCase
from framework.context import E2EContext
from framework.manifest import build_manifest, write_manifest
from framework.result import TestResult
from framework.utils import command_to_string, reset_directory, run_command, write_text_file
class TestCase0012SkipDirValidation(E2ETestCase):
case_id = "0012"
name = "skip_dir validation"
description = "Validate skip_dir loose matching and skip_dir_strict_match behaviour"
def _write_config(self, config_path: Path, skip_dir_value: str, strict: bool) -> None:
lines = ["# tc0012 config", "bypass_data_preservation = \"true\"", f"skip_dir = \"{skip_dir_value}\""]
if strict:
lines.append("skip_dir_strict_match = \"true\"")
write_text_file(config_path, "\n".join(lines) + "\n")
def _run_loose(self, context: E2EContext, case_log_dir: Path, all_artifacts: list[str], failures: list[str]) -> None:
scenario_root = context.work_root / "tc0012" / "loose_match"; scenario_state = context.state_dir / "tc0012" / "loose_match"
reset_directory(scenario_root); reset_directory(scenario_state)
sync_root = scenario_root / "syncroot"; confdir = scenario_root / "conf-loose"; verify_root = scenario_root / "verifyroot"; verify_conf = scenario_root / "conf-verify-loose"
root = f"ZZ_E2E_TC0012_LOOSE_{context.run_id}_{os.getpid()}"
write_text_file(sync_root / root / "Cache" / "top.txt", "skip top\n")
write_text_file(sync_root / root / "App" / "Cache" / "nested.txt", "skip nested\n")
write_text_file(sync_root / root / "Keep" / "ok.txt", "ok\n")
context.bootstrap_config_dir(confdir); self._write_config(confdir / "config", "Cache", False)
context.bootstrap_config_dir(verify_conf); write_text_file(verify_conf / "config", "# verify\nbypass_data_preservation = \"true\"\n")
stdout_file = case_log_dir / "loose_match_stdout.log"; stderr_file = case_log_dir / "loose_match_stderr.log"; verify_stdout = case_log_dir / "loose_match_verify_stdout.log"; verify_stderr = case_log_dir / "loose_match_verify_stderr.log"; manifest_file = scenario_state / "remote_verify_manifest.txt"
result = run_command([context.onedrive_bin, "--display-running-config", "--sync", "--verbose", "--resync", "--resync-auth", "--syncdir", str(sync_root), "--confdir", str(confdir)], cwd=context.repo_root)
write_text_file(stdout_file, result.stdout); write_text_file(stderr_file, result.stderr)
verify_result = run_command([context.onedrive_bin, "--display-running-config", "--sync", "--verbose", "--download-only", "--resync", "--resync-auth", "--syncdir", str(verify_root), "--confdir", str(verify_conf)], cwd=context.repo_root)
write_text_file(verify_stdout, verify_result.stdout); write_text_file(verify_stderr, verify_result.stderr); manifest = build_manifest(verify_root); write_manifest(manifest_file, manifest)
all_artifacts.extend([str(stdout_file), str(stderr_file), str(verify_stdout), str(verify_stderr), str(manifest_file)])
if result.returncode != 0: failures.append(f"Loose skip_dir scenario failed with status {result.returncode}"); return
if verify_result.returncode != 0: failures.append(f"Loose skip_dir verification failed with status {verify_result.returncode}"); return
if f"{root}/Keep/ok.txt" not in manifest: failures.append("Loose skip_dir scenario did not synchronise expected non-skipped content")
for unwanted in [f"{root}/Cache/top.txt", f"{root}/App/Cache/nested.txt"]:
if unwanted in manifest: failures.append(f"Loose skip_dir scenario unexpectedly synchronised skipped directory content: {unwanted}")
def _run_strict(self, context: E2EContext, case_log_dir: Path, all_artifacts: list[str], failures: list[str]) -> None:
scenario_root = context.work_root / "tc0012" / "strict_match"; scenario_state = context.state_dir / "tc0012" / "strict_match"
reset_directory(scenario_root); reset_directory(scenario_state)
sync_root = scenario_root / "syncroot"; confdir = scenario_root / "conf-strict"; verify_root = scenario_root / "verifyroot"; verify_conf = scenario_root / "conf-verify-strict"
root = f"ZZ_E2E_TC0012_STRICT_{context.run_id}_{os.getpid()}"
write_text_file(sync_root / root / "Cache" / "top.txt", "top should remain\n")
write_text_file(sync_root / root / "App" / "Cache" / "nested.txt", "nested should skip\n")
write_text_file(sync_root / root / "Keep" / "ok.txt", "ok\n")
context.bootstrap_config_dir(confdir); self._write_config(confdir / "config", f"{root}/App/Cache", True)
context.bootstrap_config_dir(verify_conf); write_text_file(verify_conf / "config", "# verify\nbypass_data_preservation = \"true\"\n")
stdout_file = case_log_dir / "strict_match_stdout.log"; stderr_file = case_log_dir / "strict_match_stderr.log"; verify_stdout = case_log_dir / "strict_match_verify_stdout.log"; verify_stderr = case_log_dir / "strict_match_verify_stderr.log"; manifest_file = scenario_state / "remote_verify_manifest.txt"
result = run_command([context.onedrive_bin, "--display-running-config", "--sync", "--verbose", "--resync", "--resync-auth", "--syncdir", str(sync_root), "--confdir", str(confdir)], cwd=context.repo_root)
write_text_file(stdout_file, result.stdout); write_text_file(stderr_file, result.stderr)
verify_result = run_command([context.onedrive_bin, "--display-running-config", "--sync", "--verbose", "--download-only", "--resync", "--resync-auth", "--syncdir", str(verify_root), "--confdir", str(verify_conf)], cwd=context.repo_root)
write_text_file(verify_stdout, verify_result.stdout); write_text_file(verify_stderr, verify_result.stderr); manifest = build_manifest(verify_root); write_manifest(manifest_file, manifest)
all_artifacts.extend([str(stdout_file), str(stderr_file), str(verify_stdout), str(verify_stderr), str(manifest_file)])
if result.returncode != 0: failures.append(f"Strict skip_dir scenario failed with status {result.returncode}"); return
if verify_result.returncode != 0: failures.append(f"Strict skip_dir verification failed with status {verify_result.returncode}"); return
if f"{root}/Keep/ok.txt" not in manifest: failures.append("Strict skip_dir scenario did not synchronise expected non-skipped content")
if f"{root}/Cache/top.txt" not in manifest: failures.append("Strict skip_dir scenario incorrectly skipped top-level Cache directory")
if f"{root}/App/Cache/nested.txt" in manifest: failures.append("Strict skip_dir scenario unexpectedly synchronised strict-matched directory content")
def run(self, context: E2EContext) -> TestResult:
case_log_dir = context.logs_dir / "tc0012"; reset_directory(case_log_dir); context.ensure_refresh_token_available()
all_artifacts = []; failures = []
self._run_loose(context, case_log_dir, all_artifacts, failures)
self._run_strict(context, case_log_dir, all_artifacts, failures)
details = {"failures": failures}
if failures: return TestResult.fail_result(self.case_id, self.name, "; ".join(failures), all_artifacts, details)
return TestResult.pass_result(self.case_id, self.name, all_artifacts, details)

View file

@ -0,0 +1,43 @@
from __future__ import annotations
import os
from pathlib import Path
from framework.base import E2ETestCase
from framework.context import E2EContext
from framework.manifest import build_manifest, write_manifest
from framework.result import TestResult
from framework.utils import command_to_string, reset_directory, run_command, write_text_file
class TestCase0013SkipDotfilesValidation(E2ETestCase):
case_id = "0013"
name = "skip_dotfiles validation"
description = "Validate that skip_dotfiles prevents dotfiles and dot-directories from synchronising"
def _write_config(self, config_path: Path) -> None:
write_text_file(config_path, "# tc0013 config\nbypass_data_preservation = \"true\"\nskip_dotfiles = \"true\"\n")
def run(self, context: E2EContext) -> TestResult:
case_work_dir = context.work_root / "tc0013"; case_log_dir = context.logs_dir / "tc0013"; state_dir = context.state_dir / "tc0013"
reset_directory(case_work_dir); reset_directory(case_log_dir); reset_directory(state_dir); context.ensure_refresh_token_available()
sync_root = case_work_dir / "syncroot"; confdir = case_work_dir / "conf-main"; verify_root = case_work_dir / "verifyroot"; verify_conf = case_work_dir / "conf-verify"; root_name = f"ZZ_E2E_TC0013_{context.run_id}_{os.getpid()}"
write_text_file(sync_root / root_name / "visible.txt", "visible\n"); write_text_file(sync_root / root_name / ".hidden.txt", "hidden\n"); write_text_file(sync_root / root_name / ".dotdir" / "inside.txt", "inside\n")
context.bootstrap_config_dir(confdir); self._write_config(confdir / "config")
context.bootstrap_config_dir(verify_conf); write_text_file(verify_conf / "config", "# verify\nbypass_data_preservation = \"true\"\n")
stdout_file = case_log_dir / "skip_dotfiles_stdout.log"; stderr_file = case_log_dir / "skip_dotfiles_stderr.log"; verify_stdout = case_log_dir / "verify_stdout.log"; verify_stderr = case_log_dir / "verify_stderr.log"; remote_manifest_file = state_dir / "remote_verify_manifest.txt"; metadata_file = state_dir / "metadata.txt"
command = [context.onedrive_bin, "--display-running-config", "--sync", "--verbose", "--resync", "--resync-auth", "--syncdir", str(sync_root), "--confdir", str(confdir)]
result = run_command(command, cwd=context.repo_root)
write_text_file(stdout_file, result.stdout); write_text_file(stderr_file, result.stderr)
verify_command = [context.onedrive_bin, "--display-running-config", "--sync", "--verbose", "--download-only", "--resync", "--resync-auth", "--syncdir", str(verify_root), "--confdir", str(verify_conf)]
verify_result = run_command(verify_command, cwd=context.repo_root)
write_text_file(verify_stdout, verify_result.stdout); write_text_file(verify_stderr, verify_result.stderr); remote_manifest = build_manifest(verify_root); write_manifest(remote_manifest_file, remote_manifest)
write_text_file(metadata_file, f"root_name={root_name}\nreturncode={result.returncode}\nverify_returncode={verify_result.returncode}\n")
artifacts = [str(stdout_file), str(stderr_file), str(verify_stdout), str(verify_stderr), str(remote_manifest_file), str(metadata_file)]
details = {"returncode": result.returncode, "verify_returncode": verify_result.returncode, "root_name": root_name}
if result.returncode != 0: return TestResult.fail_result(self.case_id, self.name, f"skip_dotfiles validation failed with status {result.returncode}", artifacts, details)
if verify_result.returncode != 0: return TestResult.fail_result(self.case_id, self.name, f"Remote verification failed with status {verify_result.returncode}", artifacts, details)
if f"{root_name}/visible.txt" not in remote_manifest: return TestResult.fail_result(self.case_id, self.name, "Visible file missing after skip_dotfiles processing", artifacts, details)
for unwanted in [f"{root_name}/.hidden.txt", f"{root_name}/.dotdir", f"{root_name}/.dotdir/inside.txt"]:
if unwanted in remote_manifest: return TestResult.fail_result(self.case_id, self.name, f"Dotfile content was unexpectedly synchronised: {unwanted}", artifacts, details)
return TestResult.pass_result(self.case_id, self.name, artifacts, details)

View file

@ -0,0 +1,48 @@
from __future__ import annotations
import os
from pathlib import Path
from framework.base import E2ETestCase
from framework.context import E2EContext
from framework.manifest import build_manifest, write_manifest
from framework.result import TestResult
from framework.utils import command_to_string, reset_directory, run_command, write_text_file
class TestCase0014SkipSizeValidation(E2ETestCase):
case_id = "0014"
name = "skip_size validation"
description = "Validate that skip_size prevents oversized files from synchronising"
def _write_config(self, config_path: Path) -> None:
write_text_file(config_path, "# tc0014 config\nbypass_data_preservation = \"true\"\nenable_logging = \"true\"\nskip_size = \"1\"\n")
def run(self, context: E2EContext) -> TestResult:
case_work_dir = context.work_root / "tc0014"; case_log_dir = context.logs_dir / "tc0014"; state_dir = context.state_dir / "tc0014"
reset_directory(case_work_dir); reset_directory(case_log_dir); reset_directory(state_dir); context.ensure_refresh_token_available()
sync_root = case_work_dir / "syncroot"; confdir = case_work_dir / "conf-main"; verify_root = case_work_dir / "verifyroot"; verify_conf = case_work_dir / "conf-verify"; root_name = f"ZZ_E2E_TC0014_{context.run_id}_{os.getpid()}"; app_log_dir = case_log_dir / "app-logs"
write_text_file(sync_root / root_name / "small.bin", "a" * 16384)
big_path = sync_root / root_name / "large.bin"; big_path.parent.mkdir(parents=True, exist_ok=True); big_path.write_bytes(b"B" * (2 * 1024 * 1024))
context.bootstrap_config_dir(confdir); self._write_config(confdir / "config")
write_text_file(confdir / "config", (confdir / "config").read_text(encoding="utf-8") + f'log_dir = "{app_log_dir}"\n')
context.bootstrap_config_dir(verify_conf); write_text_file(verify_conf / "config", "# verify\nbypass_data_preservation = \"true\"\n")
stdout_file = case_log_dir / "skip_size_stdout.log"; stderr_file = case_log_dir / "skip_size_stderr.log"; verify_stdout = case_log_dir / "verify_stdout.log"; verify_stderr = case_log_dir / "verify_stderr.log"; remote_manifest_file = state_dir / "remote_verify_manifest.txt"; metadata_file = state_dir / "metadata.txt"; config_copy = state_dir / "config_used.txt"; verify_config_copy = state_dir / "verify_config_used.txt"
command = [context.onedrive_bin, "--display-running-config", "--sync", "--verbose", "--resync", "--resync-auth", "--syncdir", str(sync_root), "--confdir", str(confdir)]
result = run_command(command, cwd=context.repo_root)
write_text_file(stdout_file, result.stdout); write_text_file(stderr_file, result.stderr)
verify_command = [context.onedrive_bin, "--display-running-config", "--sync", "--verbose", "--download-only", "--resync", "--resync-auth", "--syncdir", str(verify_root), "--confdir", str(verify_conf)]
verify_result = run_command(verify_command, cwd=context.repo_root)
write_text_file(verify_stdout, verify_result.stdout); write_text_file(verify_stderr, verify_result.stderr); remote_manifest = build_manifest(verify_root); write_manifest(remote_manifest_file, remote_manifest)
write_text_file(config_copy, (confdir / "config").read_text(encoding="utf-8"))
write_text_file(verify_config_copy, (verify_conf / "config").read_text(encoding="utf-8"))
write_text_file(metadata_file, f"root_name={root_name}\nlarge_size={big_path.stat().st_size}\nlarge_size_mb_decimal={big_path.stat().st_size / 1000 / 1000:.3f}\nlarge_size_mib_binary={big_path.stat().st_size / 1024 / 1024:.3f}\nreturncode={result.returncode}\nverify_returncode={verify_result.returncode}\n")
artifacts = [str(stdout_file), str(stderr_file), str(verify_stdout), str(verify_stderr), str(remote_manifest_file), str(metadata_file), str(config_copy), str(verify_config_copy)]
if app_log_dir.exists():
artifacts.append(str(app_log_dir))
details = {"returncode": result.returncode, "verify_returncode": verify_result.returncode, "root_name": root_name, "large_size": big_path.stat().st_size, "large_size_mb_decimal": round(big_path.stat().st_size / 1000 / 1000, 3), "large_size_mib_binary": round(big_path.stat().st_size / 1024 / 1024, 3), "skip_size": 1}
if result.returncode != 0: return TestResult.fail_result(self.case_id, self.name, f"skip_size validation failed with status {result.returncode}", artifacts, details)
if verify_result.returncode != 0: return TestResult.fail_result(self.case_id, self.name, f"Remote verification failed with status {verify_result.returncode}", artifacts, details)
if f"{root_name}/small.bin" not in remote_manifest: return TestResult.fail_result(self.case_id, self.name, "Small file missing after skip_size processing", artifacts, details)
if f"{root_name}/large.bin" in remote_manifest: return TestResult.fail_result(self.case_id, self.name, "Large file exceeded configured skip_size threshold but was synchronised; review display-running-config output and debug logs", artifacts, details)
return TestResult.pass_result(self.case_id, self.name, artifacts, details)

View file

@ -0,0 +1,42 @@
from __future__ import annotations
import os
from pathlib import Path
from framework.base import E2ETestCase
from framework.context import E2EContext
from framework.manifest import build_manifest, write_manifest
from framework.result import TestResult
from framework.utils import command_to_string, reset_directory, run_command, write_text_file
class TestCase0015SkipSymlinksValidation(E2ETestCase):
case_id = "0015"
name = "skip_symlinks validation"
description = "Validate that skip_symlinks prevents symbolic links from synchronising"
def _write_config(self, config_path: Path) -> None:
write_text_file(config_path, "# tc0015 config\nbypass_data_preservation = \"true\"\nskip_symlinks = \"true\"\n")
def run(self, context: E2EContext) -> TestResult:
case_work_dir = context.work_root / "tc0015"; case_log_dir = context.logs_dir / "tc0015"; state_dir = context.state_dir / "tc0015"
reset_directory(case_work_dir); reset_directory(case_log_dir); reset_directory(state_dir); context.ensure_refresh_token_available()
sync_root = case_work_dir / "syncroot"; confdir = case_work_dir / "conf-main"; verify_root = case_work_dir / "verifyroot"; verify_conf = case_work_dir / "conf-verify"; root_name = f"ZZ_E2E_TC0015_{context.run_id}_{os.getpid()}"
target = sync_root / root_name / "real.txt"; write_text_file(target, "real\n"); link = sync_root / root_name / "linked.txt"; link.parent.mkdir(parents=True, exist_ok=True); link.symlink_to(target.name)
context.bootstrap_config_dir(confdir); self._write_config(confdir / "config")
context.bootstrap_config_dir(verify_conf); write_text_file(verify_conf / "config", "# verify\nbypass_data_preservation = \"true\"\n")
stdout_file = case_log_dir / "skip_symlinks_stdout.log"; stderr_file = case_log_dir / "skip_symlinks_stderr.log"; verify_stdout = case_log_dir / "verify_stdout.log"; verify_stderr = case_log_dir / "verify_stderr.log"; remote_manifest_file = state_dir / "remote_verify_manifest.txt"; metadata_file = state_dir / "metadata.txt"
command = [context.onedrive_bin, "--display-running-config", "--sync", "--verbose", "--resync", "--resync-auth", "--syncdir", str(sync_root), "--confdir", str(confdir)]
result = run_command(command, cwd=context.repo_root)
write_text_file(stdout_file, result.stdout); write_text_file(stderr_file, result.stderr)
verify_command = [context.onedrive_bin, "--display-running-config", "--sync", "--verbose", "--download-only", "--resync", "--resync-auth", "--syncdir", str(verify_root), "--confdir", str(verify_conf)]
verify_result = run_command(verify_command, cwd=context.repo_root)
write_text_file(verify_stdout, verify_result.stdout); write_text_file(verify_stderr, verify_result.stderr); remote_manifest = build_manifest(verify_root); write_manifest(remote_manifest_file, remote_manifest)
write_text_file(metadata_file, f"root_name={root_name}\nreturncode={result.returncode}\nverify_returncode={verify_result.returncode}\n")
artifacts = [str(stdout_file), str(stderr_file), str(verify_stdout), str(verify_stderr), str(remote_manifest_file), str(metadata_file)]
details = {"returncode": result.returncode, "verify_returncode": verify_result.returncode, "root_name": root_name}
if result.returncode != 0: return TestResult.fail_result(self.case_id, self.name, f"skip_symlinks validation failed with status {result.returncode}", artifacts, details)
if verify_result.returncode != 0: return TestResult.fail_result(self.case_id, self.name, f"Remote verification failed with status {verify_result.returncode}", artifacts, details)
if f"{root_name}/real.txt" not in remote_manifest: return TestResult.fail_result(self.case_id, self.name, "Regular file missing after skip_symlinks processing", artifacts, details)
if f"{root_name}/linked.txt" in remote_manifest: return TestResult.fail_result(self.case_id, self.name, "Symbolic link was unexpectedly synchronised", artifacts, details)
return TestResult.pass_result(self.case_id, self.name, artifacts, details)

View file

@ -0,0 +1,43 @@
from __future__ import annotations
import os
from pathlib import Path
from framework.base import E2ETestCase
from framework.context import E2EContext
from framework.manifest import build_manifest, write_manifest
from framework.result import TestResult
from framework.utils import command_to_string, reset_directory, run_command, write_text_file
class TestCase0016CheckNosyncValidation(E2ETestCase):
case_id = "0016"
name = "check_nosync validation"
description = "Validate that check_nosync prevents directories containing .nosync from synchronising"
def _write_config(self, config_path: Path) -> None:
write_text_file(config_path, "# tc0016 config\nbypass_data_preservation = \"true\"\ncheck_nosync = \"true\"\n")
def run(self, context: E2EContext) -> TestResult:
case_work_dir = context.work_root / "tc0016"; case_log_dir = context.logs_dir / "tc0016"; state_dir = context.state_dir / "tc0016"
reset_directory(case_work_dir); reset_directory(case_log_dir); reset_directory(state_dir); context.ensure_refresh_token_available()
sync_root = case_work_dir / "syncroot"; confdir = case_work_dir / "conf-main"; verify_root = case_work_dir / "verifyroot"; verify_conf = case_work_dir / "conf-verify"; root_name = f"ZZ_E2E_TC0016_{context.run_id}_{os.getpid()}"
write_text_file(sync_root / root_name / "Allowed" / "ok.txt", "ok\n"); write_text_file(sync_root / root_name / "Blocked" / ".nosync", ""); write_text_file(sync_root / root_name / "Blocked" / "blocked.txt", "blocked\n")
context.bootstrap_config_dir(confdir); self._write_config(confdir / "config")
context.bootstrap_config_dir(verify_conf); write_text_file(verify_conf / "config", "# verify\nbypass_data_preservation = \"true\"\n")
stdout_file = case_log_dir / "check_nosync_stdout.log"; stderr_file = case_log_dir / "check_nosync_stderr.log"; verify_stdout = case_log_dir / "verify_stdout.log"; verify_stderr = case_log_dir / "verify_stderr.log"; remote_manifest_file = state_dir / "remote_verify_manifest.txt"; metadata_file = state_dir / "metadata.txt"
command = [context.onedrive_bin, "--display-running-config", "--sync", "--verbose", "--resync", "--resync-auth", "--syncdir", str(sync_root), "--confdir", str(confdir)]
result = run_command(command, cwd=context.repo_root)
write_text_file(stdout_file, result.stdout); write_text_file(stderr_file, result.stderr)
verify_command = [context.onedrive_bin, "--display-running-config", "--sync", "--verbose", "--download-only", "--resync", "--resync-auth", "--syncdir", str(verify_root), "--confdir", str(verify_conf)]
verify_result = run_command(verify_command, cwd=context.repo_root)
write_text_file(verify_stdout, verify_result.stdout); write_text_file(verify_stderr, verify_result.stderr); remote_manifest = build_manifest(verify_root); write_manifest(remote_manifest_file, remote_manifest)
write_text_file(metadata_file, f"root_name={root_name}\nreturncode={result.returncode}\nverify_returncode={verify_result.returncode}\n")
artifacts = [str(stdout_file), str(stderr_file), str(verify_stdout), str(verify_stderr), str(remote_manifest_file), str(metadata_file)]
details = {"returncode": result.returncode, "verify_returncode": verify_result.returncode, "root_name": root_name}
if result.returncode != 0: return TestResult.fail_result(self.case_id, self.name, f"check_nosync validation failed with status {result.returncode}", artifacts, details)
if verify_result.returncode != 0: return TestResult.fail_result(self.case_id, self.name, f"Remote verification failed with status {verify_result.returncode}", artifacts, details)
if f"{root_name}/Allowed/ok.txt" not in remote_manifest: return TestResult.fail_result(self.case_id, self.name, "Allowed content missing after check_nosync processing", artifacts, details)
for unwanted in [f"{root_name}/Blocked", f"{root_name}/Blocked/.nosync", f"{root_name}/Blocked/blocked.txt"]:
if unwanted in remote_manifest: return TestResult.fail_result(self.case_id, self.name, f".nosync directory content was unexpectedly synchronised: {unwanted}", artifacts, details)
return TestResult.pass_result(self.case_id, self.name, artifacts, details)

View file

@ -0,0 +1,105 @@
from __future__ import annotations
import os
from pathlib import Path
from framework.base import E2ETestCase
from framework.context import E2EContext
from framework.result import TestResult
from framework.utils import command_to_string, reset_directory, run_command, write_text_file
class TestCase0017CheckNomountValidation(E2ETestCase):
case_id = "0017"
name = "check_nomount validation"
description = "Validate that check_nomount aborts synchronisation when .nosync exists in the sync_dir mount point"
def _write_config(self, config_path: Path) -> None:
write_text_file(
config_path,
"# tc0017 config\n"
'bypass_data_preservation = "true"\n'
'check_nomount = "true"\n',
)
def run(self, context: E2EContext) -> TestResult:
case_work_dir = context.work_root / "tc0017"
case_log_dir = context.logs_dir / "tc0017"
state_dir = context.state_dir / "tc0017"
reset_directory(case_work_dir)
reset_directory(case_log_dir)
reset_directory(state_dir)
context.ensure_refresh_token_available()
sync_root = case_work_dir / "syncroot"
confdir = case_work_dir / "conf-main"
root_name = f"ZZ_E2E_TC0017_{context.run_id}_{os.getpid()}"
write_text_file(sync_root / ".nosync", "")
write_text_file(sync_root / root_name / "should_not_upload.txt", "blocked by check_nomount\n")
context.bootstrap_config_dir(confdir)
self._write_config(confdir / "config")
stdout_file = case_log_dir / "check_nomount_stdout.log"
stderr_file = case_log_dir / "check_nomount_stderr.log"
metadata_file = state_dir / "metadata.txt"
command = [
context.onedrive_bin,
"--display-running-config",
"--sync",
"--verbose",
"--resync",
"--resync-auth",
"--syncdir",
str(sync_root),
"--confdir",
str(confdir),
]
context.log(f"Executing Test Case {self.case_id}: {command_to_string(command)}")
result = run_command(command, cwd=context.repo_root)
write_text_file(stdout_file, result.stdout)
write_text_file(stderr_file, result.stderr)
write_text_file(
metadata_file,
"\n".join(
[
f"case_id={self.case_id}",
f"root_name={root_name}",
f"command={command_to_string(command)}",
f"returncode={result.returncode}",
]
)
+ "\n",
)
artifacts = [str(stdout_file), str(stderr_file), str(metadata_file)]
details = {
"command": command,
"returncode": result.returncode,
"root_name": root_name,
}
combined_output = (result.stdout + "\n" + result.stderr).lower()
if result.returncode == 0:
return TestResult.fail_result(
self.case_id,
self.name,
"check_nomount did not abort synchronisation when .nosync existed in the sync_dir mount point",
artifacts,
details,
)
if ".nosync file found" not in combined_output and "aborting synchronization process to safeguard data" not in combined_output:
return TestResult.fail_result(
self.case_id,
self.name,
"check_nomount did not emit the expected .nosync safeguard message",
artifacts,
details,
)
return TestResult.pass_result(self.case_id, self.name, artifacts, details)

View file

@ -0,0 +1,214 @@
from __future__ import annotations
import os
from pathlib import Path
from framework.base import E2ETestCase
from framework.context import E2EContext
from framework.manifest import build_manifest, write_manifest
from framework.result import TestResult
from framework.utils import command_to_string, reset_directory, run_command, write_text_file
class TestCase0018RecycleBinValidation(E2ETestCase):
case_id = "0018"
name = "recycle bin validation"
description = "Validate that online deletions are moved into a FreeDesktop-compliant recycle bin when enabled"
def _write_seed_config(self, config_path: Path) -> None:
write_text_file(config_path, "# tc0018 seed config\n" 'bypass_data_preservation = "true"\n')
def _write_cleanup_config(self, config_path: Path, recycle_bin_path: Path) -> None:
write_text_file(
config_path,
"# tc0018 cleanup config\n"
'bypass_data_preservation = "true"\n'
'cleanup_local_files = "true"\n'
'download_only = "true"\n'
'use_recycle_bin = "true"\n'
f'recycle_bin_path = "{recycle_bin_path}"\n',
)
def run(self, context: E2EContext) -> TestResult:
case_work_dir = context.work_root / "tc0018"
case_log_dir = context.logs_dir / "tc0018"
state_dir = context.state_dir / "tc0018"
reset_directory(case_work_dir)
reset_directory(case_log_dir)
reset_directory(state_dir)
context.ensure_refresh_token_available()
sync_root = case_work_dir / "syncroot"
conf_seed = case_work_dir / "conf-seed"
conf_cleanup = case_work_dir / "conf-cleanup"
verify_root = case_work_dir / "verifyroot"
conf_verify = case_work_dir / "conf-verify"
recycle_bin_root = case_work_dir / "RecycleBin"
root_name = f"ZZ_E2E_TC0018_{context.run_id}_{os.getpid()}"
write_text_file(sync_root / root_name / "Keep" / "keep.txt", "keep\n")
write_text_file(sync_root / root_name / "OldData" / "old.txt", "old\n")
context.bootstrap_config_dir(conf_seed)
self._write_seed_config(conf_seed / "config")
context.bootstrap_config_dir(conf_cleanup)
self._write_cleanup_config(conf_cleanup / "config", recycle_bin_root)
context.bootstrap_config_dir(conf_verify)
self._write_seed_config(conf_verify / "config")
seed_stdout = case_log_dir / "seed_stdout.log"
seed_stderr = case_log_dir / "seed_stderr.log"
remove_stdout = case_log_dir / "remove_stdout.log"
remove_stderr = case_log_dir / "remove_stderr.log"
cleanup_stdout = case_log_dir / "cleanup_stdout.log"
cleanup_stderr = case_log_dir / "cleanup_stderr.log"
verify_stdout = case_log_dir / "verify_stdout.log"
verify_stderr = case_log_dir / "verify_stderr.log"
recycle_manifest_file = state_dir / "recycle_manifest.txt"
remote_manifest_file = state_dir / "remote_verify_manifest.txt"
local_manifest_file = state_dir / "local_manifest_after_cleanup.txt"
metadata_file = state_dir / "metadata.txt"
seed_command = [
context.onedrive_bin,
"--display-running-config",
"--sync",
"--upload-only",
"--verbose",
"--resync",
"--resync-auth",
"--single-directory",
root_name,
"--syncdir",
str(sync_root),
"--confdir",
str(conf_seed),
]
context.log(f"Executing Test Case {self.case_id} seed: {command_to_string(seed_command)}")
seed_result = run_command(seed_command, cwd=context.repo_root)
write_text_file(seed_stdout, seed_result.stdout)
write_text_file(seed_stderr, seed_result.stderr)
remove_command = [
context.onedrive_bin,
"--display-running-config",
"--verbose",
"--remove-directory",
f"{root_name}/OldData",
"--syncdir",
str(sync_root),
"--confdir",
str(conf_seed),
]
remove_result = run_command(remove_command, cwd=context.repo_root)
write_text_file(remove_stdout, remove_result.stdout)
write_text_file(remove_stderr, remove_result.stderr)
cleanup_command = [
context.onedrive_bin,
"--display-running-config",
"--sync",
"--verbose",
"--download-only",
"--cleanup-local-files",
"--single-directory",
root_name,
"--syncdir",
str(sync_root),
"--confdir",
str(conf_cleanup),
]
cleanup_result = run_command(cleanup_command, cwd=context.repo_root)
write_text_file(cleanup_stdout, cleanup_result.stdout)
write_text_file(cleanup_stderr, cleanup_result.stderr)
verify_command = [
context.onedrive_bin,
"--display-running-config",
"--sync",
"--verbose",
"--download-only",
"--resync",
"--resync-auth",
"--single-directory",
root_name,
"--syncdir",
str(verify_root),
"--confdir",
str(conf_verify),
]
verify_result = run_command(verify_command, cwd=context.repo_root)
write_text_file(verify_stdout, verify_result.stdout)
write_text_file(verify_stderr, verify_result.stderr)
recycle_manifest = build_manifest(recycle_bin_root)
remote_manifest = build_manifest(verify_root)
local_manifest = build_manifest(sync_root)
write_manifest(recycle_manifest_file, recycle_manifest)
write_manifest(remote_manifest_file, remote_manifest)
write_manifest(local_manifest_file, local_manifest)
write_text_file(
metadata_file,
"\n".join(
[
f"case_id={self.case_id}",
f"root_name={root_name}",
f"seed_returncode={seed_result.returncode}",
f"remove_returncode={remove_result.returncode}",
f"cleanup_returncode={cleanup_result.returncode}",
f"verify_returncode={verify_result.returncode}",
]
)
+ "\n",
)
artifacts = [
str(seed_stdout),
str(seed_stderr),
str(remove_stdout),
str(remove_stderr),
str(cleanup_stdout),
str(cleanup_stderr),
str(verify_stdout),
str(verify_stderr),
str(recycle_manifest_file),
str(remote_manifest_file),
str(local_manifest_file),
str(metadata_file),
]
details = {
"seed_returncode": seed_result.returncode,
"remove_returncode": remove_result.returncode,
"cleanup_returncode": cleanup_result.returncode,
"verify_returncode": verify_result.returncode,
"root_name": root_name,
}
if seed_result.returncode != 0:
return TestResult.fail_result(self.case_id, self.name, f"Remote seed failed with status {seed_result.returncode}", artifacts, details)
if remove_result.returncode != 0:
return TestResult.fail_result(self.case_id, self.name, f"Online directory removal failed with status {remove_result.returncode}", artifacts, details)
if cleanup_result.returncode != 0:
return TestResult.fail_result(self.case_id, self.name, f"Recycle bin cleanup sync failed with status {cleanup_result.returncode}", artifacts, details)
if verify_result.returncode != 0:
return TestResult.fail_result(self.case_id, self.name, f"Remote verification failed with status {verify_result.returncode}", artifacts, details)
if (sync_root / root_name / "OldData").exists():
return TestResult.fail_result(self.case_id, self.name, "OldData still exists locally after online deletion cleanup", artifacts, details)
if not (sync_root / root_name / "Keep" / "keep.txt").is_file():
return TestResult.fail_result(self.case_id, self.name, "Keep file is missing locally after recycle bin processing", artifacts, details)
recycle_has_file = any(path.endswith("old.txt") for path in recycle_manifest)
recycle_has_info = any(path.endswith(".trashinfo") for path in recycle_manifest)
if not recycle_has_file:
return TestResult.fail_result(self.case_id, self.name, "Deleted content was not moved into the configured recycle bin", artifacts, details)
if not recycle_has_info:
return TestResult.fail_result(self.case_id, self.name, "Recycle bin metadata .trashinfo file was not created", artifacts, details)
if f"{root_name}/Keep/keep.txt" not in remote_manifest:
return TestResult.fail_result(self.case_id, self.name, "Keep file is missing online after recycle bin processing", artifacts, details)
if any(entry == f"{root_name}/OldData" or entry.startswith(f"{root_name}/OldData/") for entry in remote_manifest):
return TestResult.fail_result(self.case_id, self.name, "OldData still exists online after explicit online removal", artifacts, details)
return TestResult.pass_result(self.case_id, self.name, artifacts, details)

View file

@ -0,0 +1,98 @@
from __future__ import annotations
import os
from pathlib import Path
from framework.base import E2ETestCase
from framework.context import E2EContext
from framework.result import TestResult
from framework.utils import command_to_string, reset_directory, run_command, write_text_file
class TestCase0019LoggingAndRunningConfig(E2ETestCase):
case_id = "0019"
name = "logging and running config validation"
description = "Validate custom log_dir output and display-running-config visibility"
def _write_config(self, config_path: Path, app_log_dir: Path) -> None:
write_text_file(
config_path,
"# tc0019 config\n"
'bypass_data_preservation = "true"\n'
'enable_logging = "true"\n'
f'log_dir = "{app_log_dir}"\n',
)
def run(self, context: E2EContext) -> TestResult:
case_work_dir = context.work_root / "tc0019"
case_log_dir = context.logs_dir / "tc0019"
state_dir = context.state_dir / "tc0019"
reset_directory(case_work_dir)
reset_directory(case_log_dir)
reset_directory(state_dir)
context.ensure_refresh_token_available()
sync_root = case_work_dir / "syncroot"
confdir = case_work_dir / "conf-main"
root_name = f"ZZ_E2E_TC0019_{context.run_id}_{os.getpid()}"
app_log_dir = case_log_dir / "app-logs"
write_text_file(sync_root / root_name / "logging.txt", "log me\n")
context.bootstrap_config_dir(confdir)
self._write_config(confdir / "config", app_log_dir)
stdout_file = case_log_dir / "logging_stdout.log"
stderr_file = case_log_dir / "logging_stderr.log"
metadata_file = state_dir / "metadata.txt"
command = [
context.onedrive_bin,
"--display-running-config",
"--sync",
"--verbose",
"--resync",
"--resync-auth",
"--single-directory",
root_name,
"--syncdir",
str(sync_root),
"--confdir",
str(confdir),
]
context.log(f"Executing Test Case {self.case_id}: {command_to_string(command)}")
result = run_command(command, cwd=context.repo_root)
write_text_file(stdout_file, result.stdout)
write_text_file(stderr_file, result.stderr)
log_entries = sorted(str(p.relative_to(app_log_dir)) for p in app_log_dir.rglob("*") if p.is_file()) if app_log_dir.exists() else []
write_text_file(
metadata_file,
"\n".join(
[
f"case_id={self.case_id}",
f"root_name={root_name}",
f"returncode={result.returncode}",
] + [f"log_file={entry}" for entry in log_entries]
) + "\n",
)
artifacts = [str(stdout_file), str(stderr_file), str(metadata_file)]
if app_log_dir.exists():
artifacts.append(str(app_log_dir))
details = {
"returncode": result.returncode,
"root_name": root_name,
"log_file_count": len(log_entries),
}
if result.returncode != 0:
return TestResult.fail_result(self.case_id, self.name, f"Logging validation failed with status {result.returncode}", artifacts, details)
if not log_entries:
return TestResult.fail_result(self.case_id, self.name, "No application log files were created in the configured log_dir", artifacts, details)
stdout_lower = result.stdout.lower()
if "display_running_config" not in stdout_lower and "log_dir" not in stdout_lower:
return TestResult.fail_result(self.case_id, self.name, "display-running-config output did not expose the active runtime configuration", artifacts, details)
return TestResult.pass_result(self.case_id, self.name, artifacts, details)

View file

@ -0,0 +1,144 @@
from __future__ import annotations
import os
import signal
import subprocess
import time
from pathlib import Path
from framework.base import E2ETestCase
from framework.context import E2EContext
from framework.manifest import build_manifest, write_manifest
from framework.result import TestResult
from framework.utils import command_to_string, reset_directory, run_command, write_text_file
class TestCase0020MonitorModeValidation(E2ETestCase):
case_id = "0020"
name = "monitor mode validation"
description = "Validate that monitor mode uploads local changes without manually re-running --sync"
def _write_config(self, config_path: Path, app_log_dir: Path) -> None:
write_text_file(
config_path,
"# tc0020 config\n"
'bypass_data_preservation = "true"\n'
'enable_logging = "true"\n'
f'log_dir = "{app_log_dir}"\n'
'monitor_interval = "5"\n'
'monitor_fullscan_frequency = "1"\n',
)
def run(self, context: E2EContext) -> TestResult:
case_work_dir = context.work_root / "tc0020"
case_log_dir = context.logs_dir / "tc0020"
state_dir = context.state_dir / "tc0020"
reset_directory(case_work_dir)
reset_directory(case_log_dir)
reset_directory(state_dir)
context.ensure_refresh_token_available()
sync_root = case_work_dir / "syncroot"
confdir = case_work_dir / "conf-main"
verify_root = case_work_dir / "verifyroot"
verify_conf = case_work_dir / "conf-verify"
root_name = f"ZZ_E2E_TC0020_{context.run_id}_{os.getpid()}"
app_log_dir = case_log_dir / "app-logs"
write_text_file(sync_root / root_name / "baseline.txt", "baseline\n")
context.bootstrap_config_dir(confdir)
self._write_config(confdir / "config", app_log_dir)
context.bootstrap_config_dir(verify_conf)
write_text_file(verify_conf / "config", "# tc0020 verify\n" 'bypass_data_preservation = "true"\n')
stdout_file = case_log_dir / "monitor_stdout.log"
stderr_file = case_log_dir / "monitor_stderr.log"
verify_stdout = case_log_dir / "verify_stdout.log"
verify_stderr = case_log_dir / "verify_stderr.log"
remote_manifest_file = state_dir / "remote_verify_manifest.txt"
metadata_file = state_dir / "metadata.txt"
command = [
context.onedrive_bin,
"--display-running-config",
"--monitor",
"--verbose",
"--resync",
"--resync-auth",
"--single-directory",
root_name,
"--syncdir",
str(sync_root),
"--confdir",
str(confdir),
]
context.log(f"Executing Test Case {self.case_id}: {command_to_string(command)}")
with stdout_file.open("w", encoding="utf-8") as stdout_fp, stderr_file.open("w", encoding="utf-8") as stderr_fp:
process = subprocess.Popen(
command,
cwd=str(context.repo_root),
stdout=stdout_fp,
stderr=stderr_fp,
text=True,
)
time.sleep(8)
write_text_file(sync_root / root_name / "monitor-added.txt", "added while monitor mode was running\n")
time.sleep(12)
process.send_signal(signal.SIGINT)
try:
process.wait(timeout=30)
except subprocess.TimeoutExpired:
process.kill()
process.wait(timeout=30)
verify_command = [
context.onedrive_bin,
"--display-running-config",
"--sync",
"--verbose",
"--download-only",
"--resync",
"--resync-auth",
"--single-directory",
root_name,
"--syncdir",
str(verify_root),
"--confdir",
str(verify_conf),
]
verify_result = run_command(verify_command, cwd=context.repo_root)
write_text_file(verify_stdout, verify_result.stdout)
write_text_file(verify_stderr, verify_result.stderr)
remote_manifest = build_manifest(verify_root)
write_manifest(remote_manifest_file, remote_manifest)
write_text_file(
metadata_file,
"\n".join(
[
f"case_id={self.case_id}",
f"root_name={root_name}",
f"monitor_returncode={process.returncode}",
f"verify_returncode={verify_result.returncode}",
]
) + "\n",
)
artifacts = [str(stdout_file), str(stderr_file), str(verify_stdout), str(verify_stderr), str(remote_manifest_file), str(metadata_file)]
if app_log_dir.exists():
artifacts.append(str(app_log_dir))
details = {
"monitor_returncode": process.returncode,
"verify_returncode": verify_result.returncode,
"root_name": root_name,
}
if verify_result.returncode != 0:
return TestResult.fail_result(self.case_id, self.name, f"Remote verification failed with status {verify_result.returncode}", artifacts, details)
if f"{root_name}/monitor-added.txt" not in remote_manifest:
return TestResult.fail_result(self.case_id, self.name, "Monitor mode did not upload the file created while the process was running", artifacts, details)
return TestResult.pass_result(self.case_id, self.name, artifacts, details)

View file

@ -0,0 +1,156 @@
from __future__ import annotations
import os
import signal
import subprocess
import time
from pathlib import Path
from framework.base import E2ETestCase
from framework.context import E2EContext
from framework.manifest import build_manifest, write_manifest
from framework.result import TestResult
from framework.utils import command_to_string, reset_directory, run_command, write_text_file
class TestCase0021ResumableTransfersValidation(E2ETestCase):
case_id = "0021"
name = "resumable transfers validation"
description = "Validate interrupted upload recovery for a resumable session upload"
def _write_config(self, config_path: Path, app_log_dir: Path) -> None:
write_text_file(
config_path,
"# tc0021 config\n"
'bypass_data_preservation = "true"\n'
'enable_logging = "true"\n'
f'log_dir = "{app_log_dir}"\n'
'force_session_upload = "true"\n'
'rate_limit = "262144"\n',
)
def run(self, context: E2EContext) -> TestResult:
case_work_dir = context.work_root / "tc0021"
case_log_dir = context.logs_dir / "tc0021"
state_dir = context.state_dir / "tc0021"
reset_directory(case_work_dir)
reset_directory(case_log_dir)
reset_directory(state_dir)
context.ensure_refresh_token_available()
sync_root = case_work_dir / "syncroot"
confdir = case_work_dir / "conf-main"
verify_root = case_work_dir / "verifyroot"
verify_conf = case_work_dir / "conf-verify"
root_name = f"ZZ_E2E_TC0021_{context.run_id}_{os.getpid()}"
app_log_dir = case_log_dir / "app-logs"
large_file = sync_root / root_name / "session-large.bin"
large_file.parent.mkdir(parents=True, exist_ok=True)
large_file.write_bytes(b"R" * (5 * 1024 * 1024))
context.bootstrap_config_dir(confdir)
self._write_config(confdir / "config", app_log_dir)
context.bootstrap_config_dir(verify_conf)
write_text_file(verify_conf / "config", "# tc0021 verify\n" 'bypass_data_preservation = "true"\n')
phase1_stdout = case_log_dir / "phase1_stdout.log"
phase1_stderr = case_log_dir / "phase1_stderr.log"
phase2_stdout = case_log_dir / "phase2_stdout.log"
phase2_stderr = case_log_dir / "phase2_stderr.log"
verify_stdout = case_log_dir / "verify_stdout.log"
verify_stderr = case_log_dir / "verify_stderr.log"
remote_manifest_file = state_dir / "remote_verify_manifest.txt"
metadata_file = state_dir / "metadata.txt"
command = [
context.onedrive_bin,
"--display-running-config",
"--sync",
"--upload-only",
"--verbose",
"--resync",
"--resync-auth",
"--single-directory",
root_name,
"--syncdir",
str(sync_root),
"--confdir",
str(confdir),
]
context.log(f"Executing Test Case {self.case_id} phase 1: {command_to_string(command)}")
with phase1_stdout.open("w", encoding="utf-8") as stdout_fp, phase1_stderr.open("w", encoding="utf-8") as stderr_fp:
process = subprocess.Popen(
command,
cwd=str(context.repo_root),
stdout=stdout_fp,
stderr=stderr_fp,
text=True,
)
time.sleep(5)
process.send_signal(signal.SIGINT)
try:
process.wait(timeout=30)
except subprocess.TimeoutExpired:
process.kill()
process.wait(timeout=30)
context.log(f"Executing Test Case {self.case_id} phase 2: {command_to_string(command)}")
phase2_result = run_command(command, cwd=context.repo_root)
write_text_file(phase2_stdout, phase2_result.stdout)
write_text_file(phase2_stderr, phase2_result.stderr)
verify_command = [
context.onedrive_bin,
"--display-running-config",
"--sync",
"--verbose",
"--download-only",
"--resync",
"--resync-auth",
"--single-directory",
root_name,
"--syncdir",
str(verify_root),
"--confdir",
str(verify_conf),
]
verify_result = run_command(verify_command, cwd=context.repo_root)
write_text_file(verify_stdout, verify_result.stdout)
write_text_file(verify_stderr, verify_result.stderr)
remote_manifest = build_manifest(verify_root)
write_manifest(remote_manifest_file, remote_manifest)
write_text_file(
metadata_file,
"\n".join(
[
f"case_id={self.case_id}",
f"root_name={root_name}",
f"phase1_returncode={process.returncode}",
f"phase2_returncode={phase2_result.returncode}",
f"verify_returncode={verify_result.returncode}",
f"large_size={large_file.stat().st_size}",
]
) + "\n",
)
artifacts = [str(phase1_stdout), str(phase1_stderr), str(phase2_stdout), str(phase2_stderr), str(verify_stdout), str(verify_stderr), str(remote_manifest_file), str(metadata_file)]
if app_log_dir.exists():
artifacts.append(str(app_log_dir))
details = {
"phase1_returncode": process.returncode,
"phase2_returncode": phase2_result.returncode,
"verify_returncode": verify_result.returncode,
"root_name": root_name,
"large_size": large_file.stat().st_size,
}
if phase2_result.returncode != 0:
return TestResult.fail_result(self.case_id, self.name, f"Resumable upload recovery phase failed with status {phase2_result.returncode}", artifacts, details)
if verify_result.returncode != 0:
return TestResult.fail_result(self.case_id, self.name, f"Remote verification failed with status {verify_result.returncode}", artifacts, details)
if f"{root_name}/session-large.bin" not in remote_manifest:
return TestResult.fail_result(self.case_id, self.name, "Interrupted resumable upload did not complete successfully on the subsequent run", artifacts, details)
return TestResult.pass_result(self.case_id, self.name, artifacts, details)

View file

@ -0,0 +1,115 @@
from __future__ import annotations
import os
from pathlib import Path
from framework.base import E2ETestCase
from framework.context import E2EContext
from framework.manifest import build_manifest, write_manifest
from framework.result import TestResult
from framework.utils import command_to_string, reset_directory, run_command, write_text_file
class TestCase0022LocalFirstValidation(E2ETestCase):
case_id = "0022"
name = "local_first validation"
description = "Validate that local_first treats local content as the source of truth during a conflict"
def _write_default_config(self, config_path: Path) -> None:
write_text_file(config_path, "# tc0022 config\n" 'bypass_data_preservation = "true"\n')
def _write_local_first_config(self, config_path: Path) -> None:
write_text_file(config_path, "# tc0022 local first config\n" 'bypass_data_preservation = "true"\n' 'local_first = "true"\n')
def run(self, context: E2EContext) -> TestResult:
case_work_dir = context.work_root / "tc0022"
case_log_dir = context.logs_dir / "tc0022"
state_dir = context.state_dir / "tc0022"
reset_directory(case_work_dir)
reset_directory(case_log_dir)
reset_directory(state_dir)
context.ensure_refresh_token_available()
seed_root = case_work_dir / "seedroot"
local_root = case_work_dir / "localroot"
remote_update_root = case_work_dir / "remoteupdateroot"
verify_root = case_work_dir / "verifyroot"
conf_seed = case_work_dir / "conf-seed"
conf_local = case_work_dir / "conf-local"
conf_remote = case_work_dir / "conf-remote"
conf_verify = case_work_dir / "conf-verify"
root_name = f"ZZ_E2E_TC0022_{context.run_id}_{os.getpid()}"
relative_file = f"{root_name}/conflict.txt"
write_text_file(seed_root / relative_file, "base\n")
write_text_file(remote_update_root / relative_file, "remote wins unless local_first applies\n")
context.bootstrap_config_dir(conf_seed)
self._write_default_config(conf_seed / "config")
context.bootstrap_config_dir(conf_local)
self._write_local_first_config(conf_local / "config")
context.bootstrap_config_dir(conf_remote)
self._write_default_config(conf_remote / "config")
context.bootstrap_config_dir(conf_verify)
self._write_default_config(conf_verify / "config")
seed_stdout = case_log_dir / "seed_stdout.log"
seed_stderr = case_log_dir / "seed_stderr.log"
download_stdout = case_log_dir / "download_stdout.log"
download_stderr = case_log_dir / "download_stderr.log"
remote_stdout = case_log_dir / "remote_update_stdout.log"
remote_stderr = case_log_dir / "remote_update_stderr.log"
final_stdout = case_log_dir / "final_sync_stdout.log"
final_stderr = case_log_dir / "final_sync_stderr.log"
verify_stdout = case_log_dir / "verify_stdout.log"
verify_stderr = case_log_dir / "verify_stderr.log"
remote_manifest_file = state_dir / "remote_verify_manifest.txt"
metadata_file = state_dir / "metadata.txt"
seed_command = [context.onedrive_bin, "--display-running-config", "--sync", "--upload-only", "--verbose", "--resync", "--resync-auth", "--single-directory", root_name, "--syncdir", str(seed_root), "--confdir", str(conf_seed)]
seed_result = run_command(seed_command, cwd=context.repo_root)
write_text_file(seed_stdout, seed_result.stdout)
write_text_file(seed_stderr, seed_result.stderr)
download_command = [context.onedrive_bin, "--display-running-config", "--sync", "--verbose", "--download-only", "--resync", "--resync-auth", "--single-directory", root_name, "--syncdir", str(local_root), "--confdir", str(conf_local)]
download_result = run_command(download_command, cwd=context.repo_root)
write_text_file(download_stdout, download_result.stdout)
write_text_file(download_stderr, download_result.stderr)
write_text_file(local_root / relative_file, "local wins because local_first is enabled\n")
remote_command = [context.onedrive_bin, "--display-running-config", "--sync", "--upload-only", "--verbose", "--single-directory", root_name, "--syncdir", str(remote_update_root), "--confdir", str(conf_remote)]
remote_result = run_command(remote_command, cwd=context.repo_root)
write_text_file(remote_stdout, remote_result.stdout)
write_text_file(remote_stderr, remote_result.stderr)
final_command = [context.onedrive_bin, "--display-running-config", "--sync", "--verbose", "--single-directory", root_name, "--syncdir", str(local_root), "--confdir", str(conf_local)]
final_result = run_command(final_command, cwd=context.repo_root)
write_text_file(final_stdout, final_result.stdout)
write_text_file(final_stderr, final_result.stderr)
verify_command = [context.onedrive_bin, "--display-running-config", "--sync", "--verbose", "--download-only", "--resync", "--resync-auth", "--single-directory", root_name, "--syncdir", str(verify_root), "--confdir", str(conf_verify)]
verify_result = run_command(verify_command, cwd=context.repo_root)
write_text_file(verify_stdout, verify_result.stdout)
write_text_file(verify_stderr, verify_result.stderr)
remote_manifest = build_manifest(verify_root)
write_manifest(remote_manifest_file, remote_manifest)
local_content = (local_root / relative_file).read_text(encoding="utf-8") if (local_root / relative_file).is_file() else ""
remote_content = (verify_root / relative_file).read_text(encoding="utf-8") if (verify_root / relative_file).is_file() else ""
write_text_file(metadata_file, f"case_id={self.case_id}\nroot_name={root_name}\nseed_returncode={seed_result.returncode}\ndownload_returncode={download_result.returncode}\nremote_returncode={remote_result.returncode}\nfinal_returncode={final_result.returncode}\nverify_returncode={verify_result.returncode}\nlocal_content={local_content!r}\nremote_content={remote_content!r}\n")
artifacts = [str(seed_stdout), str(seed_stderr), str(download_stdout), str(download_stderr), str(remote_stdout), str(remote_stderr), str(final_stdout), str(final_stderr), str(verify_stdout), str(verify_stderr), str(remote_manifest_file), str(metadata_file)]
details = {"seed_returncode": seed_result.returncode, "download_returncode": download_result.returncode, "remote_returncode": remote_result.returncode, "final_returncode": final_result.returncode, "verify_returncode": verify_result.returncode, "root_name": root_name}
for label, rc in [("seed", seed_result.returncode), ("download", download_result.returncode), ("remote update", remote_result.returncode), ("final sync", final_result.returncode), ("verify", verify_result.returncode)]:
if rc != 0:
return TestResult.fail_result(self.case_id, self.name, f"{label} phase failed with status {rc}", artifacts, details)
expected = "local wins because local_first is enabled\n"
if local_content != expected:
return TestResult.fail_result(self.case_id, self.name, "Local content was not retained after conflict resolution with local_first enabled", artifacts, details)
if remote_content != expected:
return TestResult.fail_result(self.case_id, self.name, "Remote content did not converge to the local source-of-truth content when local_first was enabled", artifacts, details)
return TestResult.pass_result(self.case_id, self.name, artifacts, details)

View file

@ -0,0 +1,101 @@
from __future__ import annotations
import os
from pathlib import Path
from framework.base import E2ETestCase
from framework.context import E2EContext
from framework.result import TestResult
from framework.utils import reset_directory, run_command, write_text_file
class TestCase0023BypassDataPreservationValidation(E2ETestCase):
case_id = "0023"
name = "bypass_data_preservation validation"
description = "Validate that bypass_data_preservation overwrites local conflict data instead of creating safeBackup files"
def _write_default_config(self, config_path: Path) -> None:
write_text_file(config_path, "# tc0023 config\n" 'bypass_data_preservation = "false"\n')
def _write_bypass_config(self, config_path: Path) -> None:
write_text_file(config_path, "# tc0023 bypass config\n" 'bypass_data_preservation = "true"\n')
def run(self, context: E2EContext) -> TestResult:
case_work_dir = context.work_root / "tc0023"
case_log_dir = context.logs_dir / "tc0023"
state_dir = context.state_dir / "tc0023"
reset_directory(case_work_dir)
reset_directory(case_log_dir)
reset_directory(state_dir)
context.ensure_refresh_token_available()
seed_root = case_work_dir / "seedroot"
local_root = case_work_dir / "localroot"
remote_update_root = case_work_dir / "remoteupdateroot"
conf_seed = case_work_dir / "conf-seed"
conf_local = case_work_dir / "conf-local"
conf_remote = case_work_dir / "conf-remote"
root_name = f"ZZ_E2E_TC0023_{context.run_id}_{os.getpid()}"
relative_file = f"{root_name}/conflict.txt"
write_text_file(seed_root / relative_file, "base\n")
write_text_file(remote_update_root / relative_file, "remote authoritative content\n")
context.bootstrap_config_dir(conf_seed)
self._write_default_config(conf_seed / "config")
context.bootstrap_config_dir(conf_local)
self._write_bypass_config(conf_local / "config")
context.bootstrap_config_dir(conf_remote)
self._write_default_config(conf_remote / "config")
seed_stdout = case_log_dir / "seed_stdout.log"
seed_stderr = case_log_dir / "seed_stderr.log"
download_stdout = case_log_dir / "download_stdout.log"
download_stderr = case_log_dir / "download_stderr.log"
remote_stdout = case_log_dir / "remote_update_stdout.log"
remote_stderr = case_log_dir / "remote_update_stderr.log"
final_stdout = case_log_dir / "final_sync_stdout.log"
final_stderr = case_log_dir / "final_sync_stderr.log"
metadata_file = state_dir / "metadata.txt"
seed_command = [context.onedrive_bin, "--display-running-config", "--sync", "--upload-only", "--verbose", "--resync", "--resync-auth", "--single-directory", root_name, "--syncdir", str(seed_root), "--confdir", str(conf_seed)]
seed_result = run_command(seed_command, cwd=context.repo_root)
write_text_file(seed_stdout, seed_result.stdout)
write_text_file(seed_stderr, seed_result.stderr)
download_command = [context.onedrive_bin, "--display-running-config", "--sync", "--verbose", "--download-only", "--resync", "--resync-auth", "--single-directory", root_name, "--syncdir", str(local_root), "--confdir", str(conf_local)]
download_result = run_command(download_command, cwd=context.repo_root)
write_text_file(download_stdout, download_result.stdout)
write_text_file(download_stderr, download_result.stderr)
write_text_file(local_root / relative_file, "local conflicting content\n")
remote_command = [context.onedrive_bin, "--display-running-config", "--sync", "--upload-only", "--verbose", "--single-directory", root_name, "--syncdir", str(remote_update_root), "--confdir", str(conf_remote)]
remote_result = run_command(remote_command, cwd=context.repo_root)
write_text_file(remote_stdout, remote_result.stdout)
write_text_file(remote_stderr, remote_result.stderr)
final_command = [context.onedrive_bin, "--display-running-config", "--sync", "--verbose", "--single-directory", root_name, "--syncdir", str(local_root), "--confdir", str(conf_local)]
final_result = run_command(final_command, cwd=context.repo_root)
write_text_file(final_stdout, final_result.stdout)
write_text_file(final_stderr, final_result.stderr)
local_file = local_root / relative_file
local_content = local_file.read_text(encoding="utf-8") if local_file.is_file() else ""
safe_backup_files = [p.name for p in local_file.parent.glob("*safeBackup*")]
write_text_file(metadata_file, f"case_id={self.case_id}\nroot_name={root_name}\nseed_returncode={seed_result.returncode}\ndownload_returncode={download_result.returncode}\nremote_returncode={remote_result.returncode}\nfinal_returncode={final_result.returncode}\nlocal_content={local_content!r}\nsafe_backup_files={safe_backup_files!r}\n")
artifacts = [str(seed_stdout), str(seed_stderr), str(download_stdout), str(download_stderr), str(remote_stdout), str(remote_stderr), str(final_stdout), str(final_stderr), str(metadata_file)]
details = {"seed_returncode": seed_result.returncode, "download_returncode": download_result.returncode, "remote_returncode": remote_result.returncode, "final_returncode": final_result.returncode, "root_name": root_name, "safe_backup_count": len(safe_backup_files)}
for label, rc in [("seed", seed_result.returncode), ("download", download_result.returncode), ("remote update", remote_result.returncode), ("final sync", final_result.returncode)]:
if rc != 0:
return TestResult.fail_result(self.case_id, self.name, f"{label} phase failed with status {rc}", artifacts, details)
expected = "remote authoritative content\n"
if local_content != expected:
return TestResult.fail_result(self.case_id, self.name, "Local conflict content was not overwritten by the remote version when bypass_data_preservation was enabled", artifacts, details)
if safe_backup_files:
return TestResult.fail_result(self.case_id, self.name, "safeBackup files were created despite bypass_data_preservation being enabled", artifacts, details)
return TestResult.pass_result(self.case_id, self.name, artifacts, details)

View file

@ -0,0 +1,121 @@
from __future__ import annotations
import os
import shutil
from pathlib import Path
from framework.base import E2ETestCase
from framework.context import E2EContext
from framework.manifest import build_manifest, write_manifest
from framework.result import TestResult
from framework.utils import reset_directory, run_command, write_text_file
class TestCase0024BigDeleteSafeguardValidation(E2ETestCase):
case_id = "0024"
name = "big delete safeguard validation"
description = "Validate classify_as_big_delete protection and forced acknowledgement via --force"
def _write_config(self, config_path: Path) -> None:
write_text_file(config_path, "# tc0024 config\n" 'bypass_data_preservation = "true"\n' 'classify_as_big_delete = "3"\n')
def run(self, context: E2EContext) -> TestResult:
case_work_dir = context.work_root / "tc0024"
case_log_dir = context.logs_dir / "tc0024"
state_dir = context.state_dir / "tc0024"
reset_directory(case_work_dir)
reset_directory(case_log_dir)
reset_directory(state_dir)
context.ensure_refresh_token_available()
seed_root = case_work_dir / "seedroot"
local_root = case_work_dir / "localroot"
verify_root = case_work_dir / "verifyroot"
conf_seed = case_work_dir / "conf-seed"
conf_local = case_work_dir / "conf-local"
conf_verify = case_work_dir / "conf-verify"
root_name = f"ZZ_E2E_TC0024_{context.run_id}_{os.getpid()}"
for idx in range(1, 6):
write_text_file(seed_root / root_name / "BigDelete" / f"file{idx}.txt", f"file {idx}\n")
write_text_file(seed_root / root_name / "Keep" / "keep.txt", "keep\n")
context.bootstrap_config_dir(conf_seed)
self._write_config(conf_seed / "config")
context.bootstrap_config_dir(conf_local)
self._write_config(conf_local / "config")
context.bootstrap_config_dir(conf_verify)
self._write_config(conf_verify / "config")
seed_stdout = case_log_dir / "seed_stdout.log"
seed_stderr = case_log_dir / "seed_stderr.log"
download_stdout = case_log_dir / "download_stdout.log"
download_stderr = case_log_dir / "download_stderr.log"
blocked_stdout = case_log_dir / "blocked_stdout.log"
blocked_stderr = case_log_dir / "blocked_stderr.log"
forced_stdout = case_log_dir / "forced_stdout.log"
forced_stderr = case_log_dir / "forced_stderr.log"
verify_stdout = case_log_dir / "verify_stdout.log"
verify_stderr = case_log_dir / "verify_stderr.log"
remote_manifest_file = state_dir / "remote_verify_manifest.txt"
metadata_file = state_dir / "metadata.txt"
seed_command = [context.onedrive_bin, "--display-running-config", "--sync", "--upload-only", "--verbose", "--resync", "--resync-auth", "--single-directory", root_name, "--syncdir", str(seed_root), "--confdir", str(conf_seed)]
seed_result = run_command(seed_command, cwd=context.repo_root)
write_text_file(seed_stdout, seed_result.stdout)
write_text_file(seed_stderr, seed_result.stderr)
download_command = [context.onedrive_bin, "--display-running-config", "--sync", "--verbose", "--download-only", "--resync", "--resync-auth", "--single-directory", root_name, "--syncdir", str(local_root), "--confdir", str(conf_local)]
download_result = run_command(download_command, cwd=context.repo_root)
write_text_file(download_stdout, download_result.stdout)
write_text_file(download_stderr, download_result.stderr)
target_delete_path = local_root / root_name / "BigDelete"
if not target_delete_path.exists():
return TestResult.fail_result(
self.case_id,
self.name,
"Expected BigDelete path was not downloaded before delete phase",
[str(seed_stdout), str(seed_stderr), str(download_stdout), str(download_stderr), str(metadata_file)],
{"seed_returncode": seed_result.returncode, "download_returncode": download_result.returncode, "root_name": root_name},
)
shutil.rmtree(target_delete_path)
blocked_command = [context.onedrive_bin, "--display-running-config", "--sync", "--verbose", "--single-directory", root_name, "--syncdir", str(local_root), "--confdir", str(conf_local)]
blocked_result = run_command(blocked_command, cwd=context.repo_root)
write_text_file(blocked_stdout, blocked_result.stdout)
write_text_file(blocked_stderr, blocked_result.stderr)
forced_command = [context.onedrive_bin, "--display-running-config", "--sync", "--verbose", "--force", "--single-directory", root_name, "--syncdir", str(local_root), "--confdir", str(conf_local)]
forced_result = run_command(forced_command, cwd=context.repo_root)
write_text_file(forced_stdout, forced_result.stdout)
write_text_file(forced_stderr, forced_result.stderr)
verify_command = [context.onedrive_bin, "--display-running-config", "--sync", "--verbose", "--download-only", "--resync", "--resync-auth", "--single-directory", root_name, "--syncdir", str(verify_root), "--confdir", str(conf_verify)]
verify_result = run_command(verify_command, cwd=context.repo_root)
write_text_file(verify_stdout, verify_result.stdout)
write_text_file(verify_stderr, verify_result.stderr)
remote_manifest = build_manifest(verify_root)
write_manifest(remote_manifest_file, remote_manifest)
blocked_output = (blocked_result.stdout + "\n" + blocked_result.stderr).lower()
write_text_file(metadata_file, f"case_id={self.case_id}\nroot_name={root_name}\nseed_returncode={seed_result.returncode}\ndownload_returncode={download_result.returncode}\nblocked_returncode={blocked_result.returncode}\nforced_returncode={forced_result.returncode}\nverify_returncode={verify_result.returncode}\n")
artifacts = [str(seed_stdout), str(seed_stderr), str(download_stdout), str(download_stderr), str(blocked_stdout), str(blocked_stderr), str(forced_stdout), str(forced_stderr), str(verify_stdout), str(verify_stderr), str(remote_manifest_file), str(metadata_file)]
details = {"seed_returncode": seed_result.returncode, "download_returncode": download_result.returncode, "blocked_returncode": blocked_result.returncode, "forced_returncode": forced_result.returncode, "verify_returncode": verify_result.returncode, "root_name": root_name}
for label, rc in [("seed", seed_result.returncode), ("download", download_result.returncode), ("forced sync", forced_result.returncode), ("verify", verify_result.returncode)]:
if rc != 0:
return TestResult.fail_result(self.case_id, self.name, f"{label} phase failed with status {rc}", artifacts, details)
if blocked_result.returncode == 0 and "big delete" not in blocked_output:
return TestResult.fail_result(self.case_id, self.name, "Big delete safeguard did not trigger before forced acknowledgement", artifacts, details)
if "big delete" not in blocked_output and "--force" not in blocked_output:
return TestResult.fail_result(self.case_id, self.name, "Blocked sync did not emit a big delete safeguard warning", artifacts, details)
if any(entry == f"{root_name}/BigDelete" or entry.startswith(f"{root_name}/BigDelete/") for entry in remote_manifest):
return TestResult.fail_result(self.case_id, self.name, "BigDelete content still exists online after acknowledged forced delete", artifacts, details)
if f"{root_name}/Keep/keep.txt" not in remote_manifest:
return TestResult.fail_result(self.case_id, self.name, "Keep content disappeared during big delete safeguard processing", artifacts, details)
return TestResult.pass_result(self.case_id, self.name, artifacts, details)

View file

@ -0,0 +1,8 @@
# End to End Testing of OneDrive Client for Linux
[![End to End Testing](https://github.com/abraunegg/onedrive/actions/workflows/e2e-personal.yaml/badge.svg)](https://github.com/abraunegg/onedrive/blob/master/docs/end_to_end_testing.md)
| Test Case | Description | Details |
|:---|:---|:---|
| 0001 | Basic Resync | - validate that the E2E framework can invoke the client<br> - validate that the configured environment is sufficient to run a basic sync<br> - provide a simple baseline smoke test before more advanced E2E scenarios |
| 0002 | 'sync_list' Validation | This validates sync_list as a policy-conformance test.<br><br>The test is considered successful when all observed sync operations involving the fixture tree match the active sync_list rules.<br><br>This test covers exclusions, inclusions, wildcard and globbing for paths and files. Specific 'sync_list' test coverage is as follows:<br>- Scenario SL-0001: root directory include with trailing slash<br>- Scenario SL-0002: root include without trailing slash<br>- Scenario SL-0003: non-root include by name<br>- Scenario SL-0004: include tree with nested exclusion<br>- Scenario SL-0005: included tree with hidden directory excluded<br>- Scenario SL-0006: file-specific include inside named directory<br>- Scenario SL-0007: rooted include of Programming tree<br>- Scenario SL-0008: exclude Android recursive build output and include Programming<br>- Scenario SL-0009: exclude Android recursive .cxx content and include Programming<br>- Scenario SL-0010: exclude Web recursive build output and include Programming<br>- Scenario SL-0011: exclude .gradle anywhere and include Programming<br>- Scenario SL-0012: exclude build/kotlin anywhere and include Programming<br>- Scenario SL-0013: exclude .venv and venv anywhere and include Programming<br>- Scenario SL-0014: exclude common cache and vendor directories and include Programming<br>- Scenario SL-0015: complex style Programming ruleset<br>- Scenario SL-0016: massive mixed rule set across Programming Documents and Work<br>- Scenario SL-0017: stress test kitchen sink rule set with broad include and targeted file include<br>- Scenario SL-0018: exact trailing slash configuration with cleanup validation<br>- Scenario SL-0019: no trailing slash workaround with cleanup validation<br>- Scenario SL-0020: focused trailing slash Projects regression for sibling path survival<br>- Scenario SL-0021: focused no trailing slash Projects regression for sibling path survival<br>- Scenario SL-0022: exact root-file include<br>- Scenario SL-0023: sync_root_files = true with rooted 'Projects' include<br>- Scenario SL-0024: cleanup regression with 'sync_root_files = true'<br>- Scenario SL-0025: prefix-collision safety for 'Projects/Code'<br>- Scenario SL-0026: mixed rooted subtree include plus exact root-file include<br> |

View file

@ -1,7 +1,9 @@
# OneDrive Client for Linux
[![Version](https://img.shields.io/github/v/release/abraunegg/onedrive)](https://github.com/abraunegg/onedrive/releases)
[![Release Date](https://img.shields.io/github/release-date/abraunegg/onedrive)](https://github.com/abraunegg/onedrive/releases)
[![Test Build](https://github.com/abraunegg/onedrive/actions/workflows/testbuild.yaml/badge.svg)](https://github.com/abraunegg/onedrive/actions/workflows/testbuild.yaml)
[![End to End Testing](https://github.com/abraunegg/onedrive/actions/workflows/e2e-personal.yaml/badge.svg)](https://github.com/abraunegg/onedrive/blob/master/docs/end_to_end_testing.md)
[![Build Docker Images](https://github.com/abraunegg/onedrive/actions/workflows/docker.yaml/badge.svg)](https://github.com/abraunegg/onedrive/actions/workflows/docker.yaml)
[![Docker Pulls](https://img.shields.io/docker/pulls/driveone/onedrive)](https://hub.docker.com/r/driveone/onedrive)