mirror of
https://github.com/abraunegg/onedrive
synced 2024-05-27 02:52:18 +02:00
Compare commits
202 commits
Author | SHA1 | Date | |
---|---|---|---|
1a88d33be3 | |||
6282ec9e3b | |||
4a60654e3f | |||
a74ff589f8 | |||
651aa16844 | |||
85f99cb899 | |||
d712dd6093 | |||
43b0bed4cb | |||
fc5cbaf2e9 | |||
50d80d333e | |||
99271a45de | |||
fb0a5f0796 | |||
94ccb7a6d0 | |||
b2bf6aa475 | |||
268dcf9120 | |||
f5edb8b394 | |||
4b32dbf12d | |||
628a85dc93 | |||
3340fcca6a | |||
25cc361277 | |||
32a702eaaf | |||
30b108027d | |||
dfd1d1aa4a | |||
06420c9a0a | |||
c9fe8ad051 | |||
8045002c87 | |||
b0eb9ffcdf | |||
aa294b32f8 | |||
669a21747b | |||
3b8d7b3b1a | |||
64a3bfb033 | |||
16b751971f | |||
d960febb18 | |||
77684452aa | |||
4e5a32c210 | |||
56149c285c | |||
ae33616d0b | |||
3ef4a27fb7 | |||
b6d477b04e | |||
5945edc060 | |||
3aa191e3e4 | |||
6324f915fc | |||
8c6bdb984d | |||
d48bb34036 | |||
ce9b781c34 | |||
00ce7eed0e | |||
0838651327 | |||
cfb4933778 | |||
6607ba537b | |||
52b1276282 | |||
5b14157b09 | |||
00ba377c30 | |||
537b4338a6 | |||
eaeebbde8c | |||
c92d8470f4 | |||
b3829c1ef3 | |||
3acb6c13d2 | |||
c10870abd4 | |||
b9a239ace1 | |||
d043d5584f | |||
54e1ab0c16 | |||
ad20628dbe | |||
15233e485f | |||
7d5a5d33fd | |||
6bf0ea5deb | |||
3bca35d345 | |||
678add91f8 | |||
4db2ec02fc | |||
c22010095f | |||
44937abeb8 | |||
b2cea4b1dd | |||
72898345e7 | |||
817656ba90 | |||
1d8e0204bf | |||
19727d9c57 | |||
0b89a1fea0 | |||
87003c5923 | |||
f853b129cd | |||
62859609b7 | |||
5a7da61cbb | |||
12d54db1e4 | |||
c35ff20f5b | |||
95982c374e | |||
0ec1c95e4a | |||
941e1e215d | |||
361c3cf0a1 | |||
812f1a8d31 | |||
39a0048445 | |||
4a6c78a7a6 | |||
c73c0aa608 | |||
591b13f892 | |||
e3ce0ef6a8 | |||
8baec3c3fd | |||
ef8e5bed59 | |||
1eda30d29d | |||
7daf8797f8 | |||
4f15c35256 | |||
da967b06bc | |||
eaf097f7b0 | |||
fd0a028276 | |||
e08c89ad0b | |||
7fe242dfc0 | |||
1fdee4caaf | |||
f2b5a5543e | |||
1f4e0e143c | |||
d1642dab30 | |||
514df625b9 | |||
993770f49a | |||
a35c7c72f5 | |||
e2ed2d2b58 | |||
7a5cf8dd46 | |||
d035ee13cd | |||
a348750ec6 | |||
371b87f62a | |||
19308541eb | |||
738be2d150 | |||
5288f94ac4 | |||
3b7a06cdcd | |||
33423ee441 | |||
abc8b58ee7 | |||
d169dfc642 | |||
b16fe173e0 | |||
de701629a8 | |||
eba676c600 | |||
9b8a25f34c | |||
d4595beaba | |||
7b4f1a4d09 | |||
fd3a849d45 | |||
76752d0352 | |||
64b706f7c1 | |||
f561bd79a3 | |||
d57a695632 | |||
cc3b83afcb | |||
5ee30ff623 | |||
935ab764a4 | |||
b6c28fd2b3 | |||
c06e8ccfb6 | |||
aaf8505205 | |||
b2991fded9 | |||
496ba5fc3e | |||
11361c841f | |||
031c82922d | |||
a17a667e44 | |||
e84b16d7fc | |||
c82e90a140 | |||
90f9b31a8b | |||
87de4cf79c | |||
e422adb477 | |||
3591eededc | |||
0ab2955bd7 | |||
0e0fdacf7c | |||
ca984eba70 | |||
042949f1c1 | |||
2bab99b62c | |||
af86b55b35 | |||
dc8327e4b8 | |||
1935def140 | |||
c7eabab27b | |||
83a79077c6 | |||
39fba9394f | |||
bc5ad62fc0 | |||
2cdd7f4c63 | |||
7ebe0d7949 | |||
bc2728322a | |||
352a02052a | |||
4e1deec933 | |||
5958c54938 | |||
6c2e881314 | |||
cdea464952 | |||
a9aafabb79 | |||
c49446712b | |||
e008468686 | |||
0ebe66412a | |||
b7c7caa0b5 | |||
22fda0a286 | |||
8711074b57 | |||
2995dc66ae | |||
62701e5ef3 | |||
0fffb8fbc0 | |||
767352686f | |||
b6fee40939 | |||
ff8d137057 | |||
ddb9f1eae7 | |||
40743b5a86 | |||
de2bbb5564 | |||
9650ac2418 | |||
69d4a64456 | |||
ee2da78446 | |||
b38fd5c200 | |||
ee2c14c6cc | |||
b7aede6d30 | |||
0c8a24205d | |||
7139578af1 | |||
04c65f9b48 | |||
e08792da42 | |||
d4b733ae89 | |||
8a4931ccda | |||
680090a680 | |||
5506efb313 | |||
10448efd9a | |||
a134fa4a73 | |||
a83af761eb |
2
.github/ISSUE_TEMPLATE/config.yml
vendored
2
.github/ISSUE_TEMPLATE/config.yml
vendored
|
@ -2,4 +2,4 @@ blank_issues_enabled: false
|
|||
contact_links:
|
||||
- name: "Have a question?"
|
||||
url: https://github.com/abraunegg/onedrive/discussions
|
||||
about: "Please do not raise a GitHub issue for asking questions. Post your question in discussions. Thanks in advance for helping us keep the issue tracker clean!"
|
||||
about: "Please do not raise a GitHub issue for asking questions - please post your question under GitHub Discussions. When opening a new discussion, please include all relevant details such as including your application version and how you installed the client. Thanks in advance for helping us keep the issue tracker clean!"
|
||||
|
|
96
.github/workflows/docker.yaml
vendored
Normal file
96
.github/workflows/docker.yaml
vendored
Normal file
|
@ -0,0 +1,96 @@
|
|||
name: Build Docker Images
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ master ]
|
||||
tags: [ 'v*' ]
|
||||
pull_request:
|
||||
# Comment these out to force a test build on a PR
|
||||
branches:
|
||||
- master
|
||||
types: [closed]
|
||||
|
||||
env:
|
||||
DOCKER_HUB_SLUG: driveone/onedrive
|
||||
|
||||
jobs:
|
||||
build:
|
||||
# Comment this out to force a test build on a PR
|
||||
if: (!(github.event.action == 'closed' && github.event.pull_request.merged != true))
|
||||
|
||||
# Build runs on
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
flavor: [ fedora, debian, alpine ]
|
||||
include:
|
||||
- flavor: fedora
|
||||
dockerfile: ./contrib/docker/Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
- flavor: debian
|
||||
dockerfile: ./contrib/docker/Dockerfile-debian
|
||||
platforms: linux/386,linux/amd64,linux/arm64,linux/arm/v7
|
||||
- flavor: alpine
|
||||
dockerfile: ./contrib/docker/Dockerfile-alpine
|
||||
platforms: linux/amd64,linux/arm64
|
||||
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
submodules: recursive
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Docker meta
|
||||
id: docker_meta
|
||||
uses: marcelcoding/ghaction-docker-meta@v2
|
||||
with:
|
||||
tag-edge: true
|
||||
images: |
|
||||
${{ env.DOCKER_HUB_SLUG }}
|
||||
tag-semver: |
|
||||
{{version}}
|
||||
{{major}}.{{minor}}
|
||||
flavor: ${{ matrix.flavor }}
|
||||
main-flavor: ${{ matrix.flavor == 'debian' }}
|
||||
|
||||
- uses: docker/setup-qemu-action@v2
|
||||
with:
|
||||
image: tonistiigi/binfmt:latest
|
||||
platforms: all
|
||||
if: matrix.platforms != 'linux/amd64'
|
||||
|
||||
- uses: docker/setup-buildx-action@v2
|
||||
|
||||
- name: Cache Docker layers
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: /tmp/.buildx-cache
|
||||
key: ${{ runner.os }}-buildx-${{ matrix.flavor }}-${{ github.sha }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-buildx-${{ matrix.flavor }}
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
if: github.event_name != 'pull_request'
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
|
||||
|
||||
- name: Build and Push to Docker
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
context: .
|
||||
file: ${{ matrix.dockerfile }}
|
||||
platforms: ${{ matrix.platforms }}
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
tags: ${{ steps.docker_meta.outputs.tags }}
|
||||
labels: ${{ steps.docker_meta.outputs.labels }}
|
||||
cache-from: type=local,src=/tmp/.buildx-cache
|
||||
cache-to: type=local,dest=/tmp/.buildx-cache-new
|
||||
|
||||
- name: Move cache
|
||||
run: |
|
||||
rm -rf /tmp/.buildx-cache
|
||||
mv /tmp/.buildx-cache-new /tmp/.buildx-cache
|
5
.github/workflows/lock.yml
vendored
5
.github/workflows/lock.yml
vendored
|
@ -8,9 +8,10 @@ jobs:
|
|||
lock:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: dessant/lock-threads@v2.0.3
|
||||
- name: Lock Threads
|
||||
uses: dessant/lock-threads@v2.0.3
|
||||
with:
|
||||
github-token: ${{ github.token }}
|
||||
github-token: ${{ secrets.LOCK_THREADS }}
|
||||
issue-lock-inactive-days: '7'
|
||||
issue-exclude-created-before: ''
|
||||
issue-exclude-labels: ''
|
||||
|
|
43
.github/workflows/testbuild.yaml
vendored
Normal file
43
.github/workflows/testbuild.yaml
vendored
Normal file
|
@ -0,0 +1,43 @@
|
|||
name: Test Build
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ "master" ]
|
||||
pull_request:
|
||||
branches: [ "master" ]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
|
||||
#runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-20.04
|
||||
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
submodules: recursive
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Update Image
|
||||
run: |
|
||||
sudo apt-get clean
|
||||
sudo apt-get update -y
|
||||
|
||||
- name: Install build-essential
|
||||
run: sudo apt install -y build-essential
|
||||
|
||||
- name: Install build-dependencies
|
||||
run: sudo apt install -y libcurl4-openssl-dev libsqlite3-dev pkg-config git curl ldc
|
||||
|
||||
- name: Configure
|
||||
run: ./configure
|
||||
|
||||
- name: Compile
|
||||
run: make clean; make;
|
||||
|
||||
- name: Install
|
||||
run: sudo make install
|
||||
|
||||
- name: Run
|
||||
run: onedrive --version
|
192
.travis-ci.sh
192
.travis-ci.sh
|
@ -1,192 +0,0 @@
|
|||
#!/bin/bash
|
||||
# Based on a test script from avsm/ocaml repo https://github.com/avsm/ocaml
|
||||
# Adapted from https://www.tomaz.me/2013/12/02/running-travis-ci-tests-on-arm.html
|
||||
# Adapted from https://github.com/PJK/libcbor/blob/master/.travis-qemu.sh
|
||||
# Adapted from https://gist.github.com/oznu/b5efd7784e5a820ec3746820f2183dc0
|
||||
# Adapted from https://blog.lazy-evaluation.net/posts/linux/debian-armhf-bootstrap.html
|
||||
# Adapted from https://blog.lazy-evaluation.net/posts/linux/debian-stretch-arm64.html
|
||||
|
||||
set -e
|
||||
|
||||
# CHROOT Directory
|
||||
CHROOT_DIR=/tmp/chroot
|
||||
|
||||
# Debian package dependencies for the host to run ARM under QEMU
|
||||
DEBIAN_MIRROR="http://httpredir.debian.org/debian"
|
||||
HOST_DEPENDENCIES=(qemu-user-static binfmt-support debootstrap sbuild wget)
|
||||
|
||||
# Debian package dependencies for the chrooted environment
|
||||
GUEST_DEPENDENCIES=(build-essential libcurl4-openssl-dev libsqlite3-dev libgnutls-openssl27 git pkg-config libxml2)
|
||||
|
||||
# LDC Version
|
||||
# Different versions due to https://github.com/ldc-developers/ldc/issues/3027
|
||||
# LDC v1.16.0 re-introduces ARMHF and ARM64 version - https://github.com/ldc-developers/ldc/releases/tag/v1.16.0
|
||||
LDC_VERSION_ARMHF=1.16.0
|
||||
LDC_VERSION_ARM64=1.16.0
|
||||
|
||||
function setup_arm32_chroot {
|
||||
# Update apt repository details
|
||||
sudo apt-get update
|
||||
# 32Bit Variables
|
||||
VERSION=jessie
|
||||
CHROOT_ARCH=armhf
|
||||
# Host dependencies
|
||||
sudo apt-get install -qq -y "${HOST_DEPENDENCIES[@]}"
|
||||
# Download LDC compiler
|
||||
wget "https://github.com/ldc-developers/ldc/releases/download/v${LDC_VERSION_ARMHF}/ldc2-${LDC_VERSION_ARMHF}-linux-armhf.tar.xz"
|
||||
tar -xf "ldc2-${LDC_VERSION_ARMHF}-linux-armhf.tar.xz"
|
||||
mv "ldc2-${LDC_VERSION_ARMHF}-linux-armhf" "dlang-${ARCH}"
|
||||
rm -rf "ldc2-${LDC_VERSION_ARMHF}-linux-armhf.tar.xz"
|
||||
# Create chrooted environment
|
||||
sudo mkdir "${CHROOT_DIR}"
|
||||
sudo debootstrap --foreign --no-check-gpg --variant=buildd --arch="${CHROOT_ARCH}" "${VERSION}" "${CHROOT_DIR}" "${DEBIAN_MIRROR}"
|
||||
sudo cp /usr/bin/qemu-arm-static "${CHROOT_DIR}"/usr/bin/
|
||||
sudo chroot "${CHROOT_DIR}" /debootstrap/debootstrap --second-stage
|
||||
sudo sbuild-createchroot --arch=${CHROOT_ARCH} --foreign --setup-only ${VERSION} ${CHROOT_DIR} ${DEBIAN_MIRROR}
|
||||
configure_chroot
|
||||
}
|
||||
|
||||
function setup_arm64_chroot {
|
||||
# Update apt repository details
|
||||
sudo apt-get update
|
||||
# 64Bit Variables
|
||||
VERSION64=stretch
|
||||
CHROOT_ARCH64=arm64
|
||||
# Host dependencies
|
||||
sudo apt-get install -qq -y "${HOST_DEPENDENCIES[@]}"
|
||||
# Download LDC compiler
|
||||
wget "https://github.com/ldc-developers/ldc/releases/download/v${LDC_VERSION_ARM64}/ldc2-${LDC_VERSION_ARM64}-linux-aarch64.tar.xz"
|
||||
tar -xf "ldc2-${LDC_VERSION_ARM64}-linux-aarch64.tar.xz"
|
||||
mv "ldc2-${LDC_VERSION_ARM64}-linux-aarch64" "dlang-${ARCH}"
|
||||
rm -rf "ldc2-${LDC_VERSION_ARM64}-linux-aarch64.tar.xz"
|
||||
|
||||
# ARM64 qemu-debootstrap needs to be 1.0.78, Trusty is 1.0.59
|
||||
#sudo echo "deb http://archive.ubuntu.com/ubuntu xenial main restricted universe multiverse" >> /etc/apt/sources.list
|
||||
echo "deb http://archive.ubuntu.com/ubuntu xenial main restricted universe multiverse" | sudo tee -a /etc/apt/sources.list > /dev/null
|
||||
sudo apt-get update
|
||||
sudo apt-get install -t xenial debootstrap
|
||||
|
||||
# Create chrooted environment
|
||||
sudo mkdir "${CHROOT_DIR}"
|
||||
sudo qemu-debootstrap --arch=${CHROOT_ARCH64} ${VERSION64} ${CHROOT_DIR} ${DEBIAN_MIRROR}
|
||||
configure_chroot
|
||||
}
|
||||
|
||||
function setup_x32_chroot {
|
||||
# Update apt repository details
|
||||
sudo apt-get update
|
||||
# 32Bit Variables
|
||||
VERSION=jessie
|
||||
CHROOT_ARCH32=i386
|
||||
# Host dependencies
|
||||
sudo apt-get install -qq -y "${HOST_DEPENDENCIES[@]}"
|
||||
# Download DMD compiler
|
||||
DMDVER=2.083.1
|
||||
wget "http://downloads.dlang.org/releases/2.x/${DMDVER}/dmd.${DMDVER}.linux.tar.xz"
|
||||
tar -xf "dmd.${DMDVER}.linux.tar.xz"
|
||||
mv dmd2 "dlang-${ARCH}"
|
||||
rm -rf "dmd.${DMDVER}.linux.tar.xz"
|
||||
# Create chrooted environment
|
||||
sudo mkdir "${CHROOT_DIR}"
|
||||
sudo debootstrap --foreign --no-check-gpg --variant=buildd --arch=${CHROOT_ARCH32} ${VERSION} ${CHROOT_DIR} ${DEBIAN_MIRROR}
|
||||
sudo cp /usr/bin/qemu-i386-static "${CHROOT_DIR}/usr/bin/"
|
||||
sudo cp /usr/bin/qemu-x86_64-static "${CHROOT_DIR}/usr/bin/"
|
||||
sudo chroot "${CHROOT_DIR}" /debootstrap/debootstrap --second-stage
|
||||
sudo sbuild-createchroot --arch=${CHROOT_ARCH32} --foreign --setup-only ${VERSION} ${CHROOT_DIR} ${DEBIAN_MIRROR}
|
||||
configure_chroot
|
||||
}
|
||||
|
||||
function configure_chroot {
|
||||
# Create file with environment variables which will be used inside chrooted environment
|
||||
echo "export ARCH=${ARCH}" > envvars.sh
|
||||
echo "export TRAVIS_BUILD_DIR=${TRAVIS_BUILD_DIR}" >> envvars.sh
|
||||
chmod a+x envvars.sh
|
||||
|
||||
# Install dependencies inside chroot
|
||||
sudo chroot "${CHROOT_DIR}" apt-get update
|
||||
sudo chroot "${CHROOT_DIR}" apt-get --allow-unauthenticated install -qq -y "${GUEST_DEPENDENCIES[@]}"
|
||||
|
||||
# Create build dir and copy travis build files to our chroot environment
|
||||
sudo mkdir -p "${CHROOT_DIR}"/"${TRAVIS_BUILD_DIR}"
|
||||
sudo rsync -a "${TRAVIS_BUILD_DIR}"/ "${CHROOT_DIR}"/"${TRAVIS_BUILD_DIR}"/
|
||||
|
||||
# Indicate chroot environment has been set up
|
||||
sudo touch "${CHROOT_DIR}"/.chroot_is_done
|
||||
|
||||
# Call ourselves again which will cause tests to run
|
||||
sudo chroot "${CHROOT_DIR}" bash -c "cd ${TRAVIS_BUILD_DIR} && chmod a+x ./.travis-ci.sh"
|
||||
sudo chroot "${CHROOT_DIR}" bash -c "cd ${TRAVIS_BUILD_DIR} && ./.travis-ci.sh"
|
||||
}
|
||||
|
||||
function build_onedrive {
|
||||
# Depending on architecture, build onedrive using applicable tool
|
||||
echo "$(uname -a)"
|
||||
HOMEDIR=$(pwd)
|
||||
if [ "${ARCH}" = "x64" ]; then
|
||||
# Build on x86_64 as normal
|
||||
./configure
|
||||
make clean; make;
|
||||
else
|
||||
if [ "${ARCH}" = "x32" ]; then
|
||||
# 32Bit DMD Build
|
||||
./configure DC="${HOMEDIR}"/dlang-"${ARCH}"/linux/bin32/dmd
|
||||
make clean;
|
||||
make
|
||||
else
|
||||
# LDC Build - ARM32, ARM64
|
||||
./configure DC="${HOMEDIR}"/dlang-"${ARCH}"/bin/ldmd2
|
||||
make clean;
|
||||
make
|
||||
fi
|
||||
fi
|
||||
# Functional testing of built application
|
||||
test_onedrive
|
||||
}
|
||||
|
||||
function test_onedrive {
|
||||
# Testing onedrive client - does the built application execute?
|
||||
./onedrive --version
|
||||
|
||||
# Functional testing on x64 only
|
||||
if [ "${ARCH}" = "x64" ]; then
|
||||
chmod a+x ./tests/makefiles.sh
|
||||
cd ./tests/
|
||||
./makefiles.sh
|
||||
cd ..
|
||||
mkdir -p ~/.config/onedrive/
|
||||
echo "$ODP" > ~/.config/onedrive/refresh_token
|
||||
./onedrive --synchronize --verbose --syncdir '~/OneDriveALT'
|
||||
# OneDrive Cleanup
|
||||
rm -rf ~/OneDriveALT/*
|
||||
./onedrive --synchronize --verbose --syncdir '~/OneDriveALT'
|
||||
fi
|
||||
}
|
||||
|
||||
if [ "${ARCH}" = "arm32" ] || [ "${ARCH}" = "arm64" ] || [ "${ARCH}" = "x32" ]; then
|
||||
if [ -e "/.chroot_is_done" ]; then
|
||||
# We are inside ARM chroot
|
||||
echo "Running inside chrooted QEMU ${ARCH} environment"
|
||||
. ./envvars.sh
|
||||
export PATH="$PATH:/usr/sbin:/sbin:/bin"
|
||||
build_onedrive
|
||||
else
|
||||
# Need to set up chrooted environment first
|
||||
echo "Setting up chrooted ${ARCH} build environment"
|
||||
if [ "${ARCH}" = "x32" ]; then
|
||||
# 32Bit i386 Environment
|
||||
setup_x32_chroot
|
||||
else
|
||||
if [ "${ARCH}" = "arm32" ]; then
|
||||
# 32Bit ARM Environment
|
||||
setup_arm32_chroot
|
||||
else
|
||||
# 64Bit ARM Environment
|
||||
setup_arm64_chroot
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
else
|
||||
# Proceed as normal
|
||||
echo "Running an x86_64 Build"
|
||||
build_onedrive
|
||||
fi
|
17
.travis.yml
17
.travis.yml
|
@ -1,17 +0,0 @@
|
|||
# sudo access is required
|
||||
sudo: required
|
||||
# Compilation language
|
||||
language: d
|
||||
# Use latest DMD
|
||||
d:
|
||||
- dmd
|
||||
|
||||
# What build architectures will we build on
|
||||
env:
|
||||
- ARCH=x64
|
||||
- ARCH=x32
|
||||
- ARCH=arm32
|
||||
- ARCH=arm64
|
||||
|
||||
script:
|
||||
- "bash -ex .travis-ci.sh"
|
162
CHANGELOG.md
162
CHANGELOG.md
|
@ -2,6 +2,168 @@
|
|||
The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/)
|
||||
and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html).
|
||||
|
||||
## 2.4.25 - 2023-06-21
|
||||
### Fixed
|
||||
* Fixed that the application was reporting as v2.2.24 when in fact it was v2.4.24 (release tagging issue)
|
||||
* Fixed that the running version obsolete flag (due to above issue) was causing a false flag as being obsolete
|
||||
* Fixed that zero-byte files do not have a hash as reported by the OneDrive API thus should not generate an error message
|
||||
|
||||
### Updated
|
||||
* Update to Debian Docker file to resolve Docker image Operating System reported vulnerabilities
|
||||
* Update to Alpine Docker file to resolve Docker image Operating System reported vulnerabilities
|
||||
* Update to Fedora Docker file to resolve Docker image Operating System reported vulnerabilities
|
||||
* Updated documentation (various)
|
||||
|
||||
## 2.4.24 - 2023-06-20
|
||||
### Fixed
|
||||
* Fix for extra encoded quotation marks surrounding Docker environment variables
|
||||
* Fix webhook subscription creation for SharePoint Libraries
|
||||
* Fix that a HTTP 504 - Gateway Timeout causes local files to be deleted when using --download-only & --cleanup-local-files mode
|
||||
* Fix that folders are renamed despite using --dry-run
|
||||
* Fix deprecation warnings with dmd 2.103.0
|
||||
* Fix error that the application is unable to perform a database vacuum: out of memory when exiting
|
||||
|
||||
### Removed
|
||||
* Remove sha1 from being used by the client as this is being depreciated by Microsoft in July 2023
|
||||
* Complete the removal of crc32 elements
|
||||
|
||||
### Added
|
||||
* Added ONEDRIVE_SINGLE_DIRECTORY configuration capability to Docker
|
||||
* Added --get-file-link shell completion
|
||||
* Added configuration to allow HTTP session timeout(s) tuning via config (taken from v2.5.x)
|
||||
|
||||
### Updated
|
||||
* Update to Debian Docker file to resolve Docker image Operating System reported vulnerabilities
|
||||
* Update to Alpine Docker file to resolve Docker image Operating System reported vulnerabilities
|
||||
* Update to Fedora Docker file to resolve Docker image Operating System reported vulnerabilities
|
||||
* Updated cgi.d to commit 680003a - last upstream change before requiring `core.d` dependency requirement
|
||||
* Updated documentation (various)
|
||||
|
||||
## 2.4.23 - 2023-01-06
|
||||
### Fixed
|
||||
* Fixed RHEL7, RHEL8 and RHEL9 Makefile and SPEC file compatibility
|
||||
|
||||
### Removed
|
||||
* Disable systemd 'PrivateUsers' due to issues with systemd running processes when option is enabled, causes local file deletes on RHEL based systems
|
||||
|
||||
### Updated
|
||||
* Update --get-O365-drive-id error handling to display a more a more appropriate error message if the API cannot be found
|
||||
* Update the GitHub version check to utilise the date a release was done, to allow 1 month grace period before generating obsolete version message
|
||||
* Update Alpine Dockerfile to use Alpine 3.17 and Golang 1.19
|
||||
* Update handling of --source-directory and --destination-directory if one is empty or missing and if used with --synchronize or --monitor
|
||||
* Updated documentation (various)
|
||||
|
||||
## 2.4.22 - 2022-12-06
|
||||
### Fixed
|
||||
* Fix application crash when local file is changed to a symbolic link with non-existent target
|
||||
* Fix build error with dmd-2.101.0
|
||||
* Fix build error with LDC 1.28.1 on Alpine
|
||||
* Fix issue of silent exit when unable to delete local files when using --cleanup-local-files
|
||||
* Fix application crash due to access permissions on configured path for sync_dir
|
||||
* Fix potential application crash when exiting due to failure state and unable to cleanly shutdown the database
|
||||
* Fix creation of parent empty directories when parent is excluded by sync_list
|
||||
|
||||
### Added
|
||||
* Added performance output details for key functions
|
||||
|
||||
### Changed
|
||||
* Switch Docker 'latest' to point at Debian builds rather than Fedora due to ongoing Fedora build failures
|
||||
* Align application logging events to actual application defaults for --monitor operations
|
||||
* Performance Improvement: Avoid duplicate costly path calculations and DB operations if not required
|
||||
* Disable non-working remaining sandboxing options within systemd service files
|
||||
* Performance Improvement: Only check 'sync_list' if this has been enabled and configured
|
||||
* Display 'Sync with OneDrive is complete' when using --synchronize
|
||||
* Change the order of processing between Microsoft OneDrive restrictions and limitations check and skip_file|skip_dir check
|
||||
|
||||
### Removed
|
||||
* Remove building Fedora ARMv7 builds due to ongoing build failures
|
||||
|
||||
### Updated
|
||||
* Update config change detection handling
|
||||
* Updated documentation (various)
|
||||
|
||||
## 2.4.21 - 2022-09-27
|
||||
### Fixed
|
||||
* Fix that the download progress bar doesn't always reach 100% when rate_limit is set
|
||||
* Fix --resync handling of database file removal
|
||||
* Fix Makefile to be consistent with permissions that are being used
|
||||
* Fix that logging output for skipped uploaded files is missing
|
||||
* Fix to allow non-sync tasks while sync is running
|
||||
* Fix where --resync is enforced for non-sync operations
|
||||
* Fix to resolve segfault when running 'onedrive --display-sync-status' when run as 2nd process
|
||||
* Fix DMD 2.100.2 depreciation warning
|
||||
|
||||
### Added
|
||||
* Add GitHub Action Test Build Workflow (replacing Travis CI)
|
||||
* Add option --display-running-config to display the running configuration as used at application startup
|
||||
* Add 'config' option to request readonly access in oauth authorization step
|
||||
* Add option --cleanup-local-files to cleanup local files regardless of sync state when using --download-only
|
||||
* Add option --with-editing-perms to create a read-write shareable link when used with --create-share-link <file>
|
||||
|
||||
### Changed
|
||||
* Change the exit code of the application to 126 when a --resync is required
|
||||
|
||||
### Updated
|
||||
* Updated --get-O365-drive-id implementation for data access
|
||||
* Update what application options require an argument
|
||||
* Update application logging output for error messages to remove certain \n prefix when logging to a file
|
||||
* Update onedrive.spec.in to fix error building RPM
|
||||
* Update GUI notification handling for specific skipped scenarios
|
||||
* Updated documentation (various)
|
||||
|
||||
## 2.4.20 - 2022-07-20
|
||||
### Fixed
|
||||
* Fix 'foreign key constraint failed' when using OneDrive Business Shared Folders due to change to using /delta query
|
||||
* Fix various little spelling fixes (check with lintian during Debian packaging)
|
||||
* Fix handling of a custom configuration directory when using --confdir
|
||||
* Fix to ensure that any active http instance is shutdown before any application exit
|
||||
* Fix to enforce that --confdir must be a directory
|
||||
|
||||
### Added
|
||||
* Added 'force_http_11' configuration option to allow forcing HTTP/1.1 operations
|
||||
|
||||
### Changed
|
||||
* Increased thread sleep for better process I/O wait handling
|
||||
* Removed 'force_http_2' configuration option
|
||||
|
||||
### Updated
|
||||
* Update OneDrive API response handling for National Cloud Deployments
|
||||
* Updated to switch to using curl defaults for HTTP/2 operations
|
||||
* Updated documentation (various)
|
||||
|
||||
## 2.4.19 - 2022-06-15
|
||||
### Fixed
|
||||
* Update Business Shared Folders to use a /delta query
|
||||
* Update when DB is updated by OneDrive API data and update when file hash is required to be generated
|
||||
|
||||
### Added
|
||||
* Added ONEDRIVE_UPLOADONLY flag for Docker
|
||||
|
||||
### Updated
|
||||
* Updated GitHub workflows
|
||||
* Updated documentation (various)
|
||||
|
||||
## 2.4.18 - 2022-06-02
|
||||
### Fixed
|
||||
* Fixed various database related access issues steming from running multiple instances of the application at the same time using the same configuration data
|
||||
* Fixed --display-config being impacted by --resync flag
|
||||
* Fixed installation permissions for onedrive man-pages file
|
||||
* Fixed that in some situations that users try --upload-only and --download-only together which is not possible
|
||||
* Fixed application crash if unable to read required hash files
|
||||
|
||||
### Added
|
||||
* Added Feature Request to add an override for skip_dir|skip_file through flag to force sync
|
||||
* Added a check to validate local filesystem available space before attempting file download
|
||||
* Added GitHub Actions to build Docker containers and push to DockerHub
|
||||
|
||||
### Updated
|
||||
* Updated all Docker build files to current distributions, using updated distribution LDC version
|
||||
* Updated logging output to logfiles when an actual sync process is occuring
|
||||
* Updated output of --display-config to be more relevant
|
||||
* Updated manpage to align with application configuration
|
||||
* Updated documentation and Docker files based on minimum compiler versions to dmd-2.088.0 and ldc-1.18.0
|
||||
* Updated documentation (various)
|
||||
|
||||
## 2.4.17 - 2022-04-30
|
||||
### Fixed
|
||||
* Fix docker build, by add missing git package for Fedora builds
|
||||
|
|
16
Makefile.in
16
Makefile.in
|
@ -55,10 +55,10 @@ endif
|
|||
system_unit_files = contrib/systemd/onedrive@.service
|
||||
user_unit_files = contrib/systemd/onedrive.service
|
||||
|
||||
DOCFILES = README.md config LICENSE CHANGELOG.md docs/Docker.md docs/INSTALL.md docs/SharePoint-Shared-Libraries.md docs/USAGE.md docs/BusinessSharedFolders.md docs/advanced-usage.md
|
||||
DOCFILES = README.md config LICENSE CHANGELOG.md docs/Docker.md docs/INSTALL.md docs/SharePoint-Shared-Libraries.md docs/USAGE.md docs/BusinessSharedFolders.md docs/advanced-usage.md docs/application-security.md
|
||||
|
||||
ifneq ("$(wildcard /etc/redhat-release)","")
|
||||
RHEL = $(shell cat /etc/redhat-release | grep -E "(Red Hat Enterprise Linux Server|CentOS)" | wc -l)
|
||||
RHEL = $(shell cat /etc/redhat-release | grep -E "(Red Hat Enterprise Linux|CentOS)" | wc -l)
|
||||
RHEL_VERSION = $(shell rpm --eval "%{rhel}")
|
||||
else
|
||||
RHEL = 0
|
||||
|
@ -107,10 +107,10 @@ onedrive: $(SOURCES)
|
|||
|
||||
install: all
|
||||
$(INSTALL) -D onedrive $(DESTDIR)$(bindir)/onedrive
|
||||
$(INSTALL) -D onedrive.1 $(DESTDIR)$(mandir)/man1/onedrive.1
|
||||
$(INSTALL) -D -m 644 contrib/logrotate/onedrive.logrotate $(DESTDIR)$(sysconfdir)/logrotate.d/onedrive
|
||||
$(INSTALL) -D -m 0644 onedrive.1 $(DESTDIR)$(mandir)/man1/onedrive.1
|
||||
$(INSTALL) -D -m 0644 contrib/logrotate/onedrive.logrotate $(DESTDIR)$(sysconfdir)/logrotate.d/onedrive
|
||||
mkdir -p $(DESTDIR)$(docdir)
|
||||
$(INSTALL) -D -m 644 $(DOCFILES) $(DESTDIR)$(docdir)
|
||||
$(INSTALL) -D -m 0644 $(DOCFILES) $(DESTDIR)$(docdir)
|
||||
ifeq ($(HAVE_SYSTEMD),yes)
|
||||
$(INSTALL) -d -m 0755 $(DESTDIR)$(systemduserunitdir) $(DESTDIR)$(systemdsystemunitdir)
|
||||
ifeq ($(RHEL),1)
|
||||
|
@ -127,9 +127,9 @@ ifeq ($(RHEL_VERSION),6)
|
|||
endif
|
||||
endif
|
||||
ifeq ($(COMPLETIONS),yes)
|
||||
$(INSTALL) -D -m 644 contrib/completions/complete.zsh $(DESTDIR)$(ZSH_COMPLETION_DIR)/_onedrive
|
||||
$(INSTALL) -D -m 644 contrib/completions/complete.bash $(DESTDIR)$(BASH_COMPLETION_DIR)/onedrive
|
||||
$(INSTALL) -D -m 644 contrib/completions/complete.fish $(DESTDIR)$(FISH_COMPLETION_DIR)/onedrive.fish
|
||||
$(INSTALL) -D -m 0644 contrib/completions/complete.zsh $(DESTDIR)$(ZSH_COMPLETION_DIR)/_onedrive
|
||||
$(INSTALL) -D -m 0644 contrib/completions/complete.bash $(DESTDIR)$(BASH_COMPLETION_DIR)/onedrive
|
||||
$(INSTALL) -D -m 0644 contrib/completions/complete.fish $(DESTDIR)$(FISH_COMPLETION_DIR)/onedrive.fish
|
||||
endif
|
||||
|
||||
|
||||
|
|
27
README.md
27
README.md
|
@ -1,8 +1,8 @@
|
|||
# OneDrive Client for Linux
|
||||
[![Version](https://img.shields.io/github/v/release/abraunegg/onedrive)](https://github.com/abraunegg/onedrive/releases)
|
||||
[![Release Date](https://img.shields.io/github/release-date/abraunegg/onedrive)](https://github.com/abraunegg/onedrive/releases)
|
||||
[![Travis CI](https://img.shields.io/travis/com/abraunegg/onedrive)](https://travis-ci.com/abraunegg/onedrive/builds)
|
||||
[![Docker Build](https://img.shields.io/docker/cloud/automated/driveone/onedrive)](https://hub.docker.com/r/driveone/onedrive)
|
||||
[![Test Build](https://github.com/abraunegg/onedrive/actions/workflows/testbuild.yaml/badge.svg)](https://github.com/abraunegg/onedrive/actions/workflows/testbuild.yaml)
|
||||
[![Build Docker Images](https://github.com/abraunegg/onedrive/actions/workflows/docker.yaml/badge.svg)](https://github.com/abraunegg/onedrive/actions/workflows/docker.yaml)
|
||||
[![Docker Pulls](https://img.shields.io/docker/pulls/driveone/onedrive)](https://hub.docker.com/r/driveone/onedrive)
|
||||
|
||||
A free Microsoft OneDrive Client which supports OneDrive Personal, OneDrive for Business, OneDrive for Office365 and SharePoint.
|
||||
|
@ -36,17 +36,28 @@ This client is a 'fork' of the [skilion](https://github.com/skilion/onedrive) cl
|
|||
* Colorful log output terminal modification: [OneDrive Client for Linux Colorful log Output](https://github.com/zzzdeb/dotfiles/blob/master/scripts/tools/onedrive_log)
|
||||
* System Tray Icon: [OneDrive Client for Linux System Tray Icon](https://github.com/DanielBorgesOliveira/onedrive_tray)
|
||||
|
||||
## Supported Application Version
|
||||
Only the current application release version or greater is supported.
|
||||
|
||||
The current application release version is: [![Version](https://img.shields.io/github/v/release/abraunegg/onedrive)](https://github.com/abraunegg/onedrive/releases)
|
||||
|
||||
Check the version of the application you are using `onedrive --version` and ensure that you are running either the current release or compile the application yourself from master to get the latest version.
|
||||
|
||||
If you are not using the above application version or greater, you must upgrade your application to obtain support.
|
||||
|
||||
## Have a Question
|
||||
If you have a question or need something clarified, please raise a new disscussion post [here](https://github.com/abraunegg/onedrive/discussions)
|
||||
|
||||
Be sure to review the Frequently Asked Questions as well before raising a new discussion post.
|
||||
|
||||
## Frequently Asked Questions
|
||||
Refer to [Frequently Asked Questions](https://github.com/abraunegg/onedrive/wiki/Frequently-Asked-Questions)
|
||||
|
||||
## Have a question
|
||||
If you have a question or need something clarified, please raise a new disscussion post [here](https://github.com/abraunegg/onedrive/discussions)
|
||||
|
||||
## Reporting an Issue or Bug
|
||||
If you encounter any bugs you can report them here on Github. Before filing an issue be sure to:
|
||||
If you encounter any bugs you can report them here on GitHub. Before filing an issue be sure to:
|
||||
|
||||
1. Check the version of the application you are using `onedrive --version` and ensure that you are running either the latest [release](https://github.com/abraunegg/onedrive/releases) or built from master.
|
||||
2. Fill in a new bug report using the [issue template](https://github.com/abraunegg/onedrive/issues/new?template=bug_report.md)
|
||||
1. Check the version of the application you are using `onedrive --version` and ensure that you are running a supported application version. If you are not using a supported application version, you must first upgrade your application to a supported version and then re-test for your issue.
|
||||
2. If you are using a supported applcation version, fill in a new bug report using the [issue template](https://github.com/abraunegg/onedrive/issues/new?template=bug_report.md)
|
||||
3. Generate a debug log for support using the following [process](https://github.com/abraunegg/onedrive/wiki/Generate-debug-log-for-support)
|
||||
* If you are in *any* way concerned regarding the sensitivity of the data contained with in the verbose debug log file, create a new OneDrive account, configure the client to use that, use *dummy* data to simulate your environment and then replicate your original issue
|
||||
* If you are still concerned, provide an NDA or confidentiality document to sign
|
||||
|
|
15
config
15
config
|
@ -18,15 +18,16 @@
|
|||
# disable_notifications = "false"
|
||||
# disable_upload_validation = "false"
|
||||
# enable_logging = "false"
|
||||
# force_http_2 = "false"
|
||||
# force_http_11 = "false"
|
||||
# local_first = "false"
|
||||
# no_remote_delete = "false"
|
||||
# skip_symlinks = "false"
|
||||
# debug_https = "false"
|
||||
# skip_dotfiles = "false"
|
||||
# skip_size = "1000"
|
||||
# dry_run = "false"
|
||||
# min_notify_changes = "5"
|
||||
# monitor_log_frequency = "5"
|
||||
# monitor_log_frequency = "6"
|
||||
# monitor_fullscan_frequency = "12"
|
||||
# sync_root_files = "false"
|
||||
# classify_as_big_delete = "1000"
|
||||
|
@ -43,10 +44,18 @@
|
|||
# sync_dir_permissions = "700"
|
||||
# sync_file_permissions = "600"
|
||||
# rate_limit = "131072"
|
||||
# operation_timeout = "3600"
|
||||
# webhook_enabled = "false"
|
||||
# webhook_public_url = ""
|
||||
# webhook_listening_host = ""
|
||||
# webhook_listening_port = "8888"
|
||||
# webhook_expiration_interval = "86400"
|
||||
# webhook_renewal_interval = "43200"
|
||||
# space_reservation = "50"
|
||||
# display_running_config = "false"
|
||||
# read_only_auth_scope = "false"
|
||||
# cleanup_local_files = "false"
|
||||
# operation_timeout = "3600"
|
||||
# dns_timeout = "60"
|
||||
# connect_timeout = "10"
|
||||
# data_timeout = "600"
|
||||
# ip_protocol_version = "0"
|
||||
|
|
24
configure
vendored
24
configure
vendored
|
@ -1,6 +1,6 @@
|
|||
#! /bin/sh
|
||||
# Guess values for system-dependent variables and create Makefiles.
|
||||
# Generated by GNU Autoconf 2.69 for onedrive v2.4.17.
|
||||
# Generated by GNU Autoconf 2.69 for onedrive v2.4.25.
|
||||
#
|
||||
# Report bugs to <https://github.com/abraunegg/onedrive>.
|
||||
#
|
||||
|
@ -579,8 +579,8 @@ MAKEFLAGS=
|
|||
# Identity of this package.
|
||||
PACKAGE_NAME='onedrive'
|
||||
PACKAGE_TARNAME='onedrive'
|
||||
PACKAGE_VERSION='v2.4.17'
|
||||
PACKAGE_STRING='onedrive v2.4.17'
|
||||
PACKAGE_VERSION='v2.4.25'
|
||||
PACKAGE_STRING='onedrive v2.4.25'
|
||||
PACKAGE_BUGREPORT='https://github.com/abraunegg/onedrive'
|
||||
PACKAGE_URL=''
|
||||
|
||||
|
@ -1219,7 +1219,7 @@ if test "$ac_init_help" = "long"; then
|
|||
# Omit some internal or obsolete options to make the list less imposing.
|
||||
# This message is too long to be a string in the A/UX 3.1 sh.
|
||||
cat <<_ACEOF
|
||||
\`configure' configures onedrive v2.4.17 to adapt to many kinds of systems.
|
||||
\`configure' configures onedrive v2.4.25 to adapt to many kinds of systems.
|
||||
|
||||
Usage: $0 [OPTION]... [VAR=VALUE]...
|
||||
|
||||
|
@ -1280,7 +1280,7 @@ fi
|
|||
|
||||
if test -n "$ac_init_help"; then
|
||||
case $ac_init_help in
|
||||
short | recursive ) echo "Configuration of onedrive v2.4.17:";;
|
||||
short | recursive ) echo "Configuration of onedrive v2.4.25:";;
|
||||
esac
|
||||
cat <<\_ACEOF
|
||||
|
||||
|
@ -1393,7 +1393,7 @@ fi
|
|||
test -n "$ac_init_help" && exit $ac_status
|
||||
if $ac_init_version; then
|
||||
cat <<\_ACEOF
|
||||
onedrive configure v2.4.17
|
||||
onedrive configure v2.4.25
|
||||
generated by GNU Autoconf 2.69
|
||||
|
||||
Copyright (C) 2012 Free Software Foundation, Inc.
|
||||
|
@ -1410,7 +1410,7 @@ cat >config.log <<_ACEOF
|
|||
This file contains any messages produced by compilers while
|
||||
running configure, to aid debugging if configure makes a mistake.
|
||||
|
||||
It was created by onedrive $as_me v2.4.17, which was
|
||||
It was created by onedrive $as_me v2.4.25, which was
|
||||
generated by GNU Autoconf 2.69. Invocation command line was
|
||||
|
||||
$ $0 $@
|
||||
|
@ -2133,7 +2133,7 @@ case $(basename $DC) in
|
|||
# remove everthing after ):
|
||||
VERSION=${VERSION%%):*}
|
||||
# now version should be something like L.M.N
|
||||
MINVERSION=1.17.0
|
||||
MINVERSION=1.18.0
|
||||
;;
|
||||
dmd)
|
||||
# DMD64 D Compiler v2.085.1\n...
|
||||
|
@ -2141,7 +2141,7 @@ case $(basename $DC) in
|
|||
VERSION=${VERSION#*Compiler v}
|
||||
VERSION=${VERSION%% *}
|
||||
# now version should be something like L.M.N
|
||||
MINVERSION=2.087.0
|
||||
MINVERSION=2.088.0
|
||||
;;
|
||||
esac
|
||||
|
||||
|
@ -2162,7 +2162,7 @@ fi
|
|||
|
||||
|
||||
|
||||
PACKAGE_DATE="April 2022"
|
||||
PACKAGE_DATE="June 2023"
|
||||
|
||||
|
||||
|
||||
|
@ -3159,7 +3159,7 @@ cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
|
|||
# report actual input values of CONFIG_FILES etc. instead of their
|
||||
# values after options handling.
|
||||
ac_log="
|
||||
This file was extended by onedrive $as_me v2.4.17, which was
|
||||
This file was extended by onedrive $as_me v2.4.25, which was
|
||||
generated by GNU Autoconf 2.69. Invocation command line was
|
||||
|
||||
CONFIG_FILES = $CONFIG_FILES
|
||||
|
@ -3212,7 +3212,7 @@ _ACEOF
|
|||
cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
|
||||
ac_cs_config="`$as_echo "$ac_configure_args" | sed 's/^ //; s/[\\""\`\$]/\\\\&/g'`"
|
||||
ac_cs_version="\\
|
||||
onedrive config.status v2.4.17
|
||||
onedrive config.status v2.4.25
|
||||
configured by $0, generated by GNU Autoconf 2.69,
|
||||
with options \\"\$ac_cs_config\\"
|
||||
|
||||
|
|
|
@ -9,7 +9,7 @@ dnl - commit the changed files (configure.ac, configure)
|
|||
dnl - tag the release
|
||||
|
||||
AC_PREREQ([2.69])
|
||||
AC_INIT([onedrive],[v2.4.17], [https://github.com/abraunegg/onedrive], [onedrive])
|
||||
AC_INIT([onedrive],[v2.4.25], [https://github.com/abraunegg/onedrive], [onedrive])
|
||||
AC_CONFIG_SRCDIR([src/main.d])
|
||||
|
||||
|
||||
|
@ -104,7 +104,7 @@ case $(basename $DC) in
|
|||
# remove everthing after ):
|
||||
VERSION=${VERSION%%):*}
|
||||
# now version should be something like L.M.N
|
||||
MINVERSION=1.17.0
|
||||
MINVERSION=1.18.0
|
||||
;;
|
||||
dmd)
|
||||
# DMD64 D Compiler v2.085.1\n...
|
||||
|
@ -112,7 +112,7 @@ case $(basename $DC) in
|
|||
VERSION=${VERSION#*Compiler v}
|
||||
VERSION=${VERSION%% *}
|
||||
# now version should be something like L.M.N
|
||||
MINVERSION=2.087.0
|
||||
MINVERSION=2.088.0
|
||||
;;
|
||||
esac
|
||||
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
#!/bin/bash
|
||||
#
|
||||
# BASH completion code for OneDrive Linux Client
|
||||
# (c) 2019 Norbert Preining
|
||||
# License: GPLv3+ (as with the rest of the OneDrive Linux client project)
|
||||
|
@ -12,7 +10,7 @@ _onedrive()
|
|||
cur=${COMP_WORDS[COMP_CWORD]}
|
||||
prev=${COMP_WORDS[COMP_CWORD-1]}
|
||||
|
||||
options='--check-for-nomount --check-for-nosync --debug-https --disable-notifications --display-config --display-sync-status --download-only --disable-upload-validation --dry-run --enable-logging --force-http-1.1 --force-http-2 --local-first --logout -m --monitor --no-remote-delete --print-token --reauth --resync --skip-dot-files --skip-symlinks --synchronize --upload-only -v --verbose --version -h --help'
|
||||
options='--check-for-nomount --check-for-nosync --debug-https --disable-notifications --display-config --display-sync-status --download-only --disable-upload-validation --dry-run --enable-logging --force-http-1.1 --force-http-2 --get-file-link --local-first --logout -m --monitor --no-remote-delete --print-token --reauth --resync --skip-dot-files --skip-symlinks --synchronize --upload-only -v --verbose --version -h --help'
|
||||
argopts='--create-directory --get-O365-drive-id --operation-timeout --remove-directory --single-directory --source-directory'
|
||||
|
||||
# Loop on the arguments to manage conflicting options
|
||||
|
@ -21,12 +19,21 @@ _onedrive()
|
|||
[[ ${COMP_WORDS[i]} == '--synchronize' ]] && options=${options/--monitor}
|
||||
[[ ${COMP_WORDS[i]} == '--monitor' ]] && options=${options/--synchronize}
|
||||
done
|
||||
|
||||
|
||||
case "$prev" in
|
||||
--confdir|--syncdir)
|
||||
_filedir
|
||||
return 0
|
||||
;;
|
||||
|
||||
--get-file-link)
|
||||
if command -v sed &> /dev/null; then
|
||||
pushd "$(onedrive --display-config | sed -n "/sync_dir/s/.*= //p")" &> /dev/null
|
||||
_filedir
|
||||
popd &> /dev/null
|
||||
fi
|
||||
return 0
|
||||
;;
|
||||
--create-directory|--get-O365-drive-id|--operation-timeout|--remove-directory|--single-directory|--source-directory)
|
||||
return 0
|
||||
;;
|
||||
|
@ -35,7 +42,7 @@ _onedrive()
|
|||
return 0
|
||||
;;
|
||||
esac
|
||||
|
||||
|
||||
# notreached
|
||||
return 0
|
||||
}
|
||||
|
|
|
@ -16,6 +16,7 @@ complete -c onedrive -l dry-run -d 'Perform a trial sync with no changes made.'
|
|||
complete -c onedrive -l enable-logging -d 'Enable client activity to a separate log file.'
|
||||
complete -c onedrive -l force-http-1.1 -d 'Force the use of HTTP 1.1 for all operations.'
|
||||
complete -c onedrive -l force-http-2 -d 'Force the use of HTTP 2 for all operations.'
|
||||
complete -c onedrive -l get-file-link -d 'Display the file link of a synced file.'
|
||||
complete -c onedrive -l get-O365-drive-id -d 'Query and return the Office 365 Drive ID for a given Office 365 SharePoint Shared Library.'
|
||||
complete -c onedrive -s h -l help -d 'Print help information.'
|
||||
complete -c onedrive -l local-first -d 'Synchronize from the local directory source first, before downloading changes from OneDrive.'
|
||||
|
|
|
@ -21,6 +21,7 @@ all_opts=(
|
|||
'--enable-logging[Enable client activity to a separate log file]'
|
||||
'--force-http-1.1[Force the use of HTTP 1.1 for all operations]'
|
||||
'--force-http-2[Force the use of HTTP 2 for all operations]'
|
||||
'--get-file-link[Display the file link of a synced file.]:file name:'
|
||||
'--get-O365-drive-id[Query and return the Office 365 Drive ID for a given Office 365 SharePoint Shared Library]:'
|
||||
'--local-first[Synchronize from the local directory source first, before downloading changes from OneDrive.]'
|
||||
'--logout[Logout the current user]'
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
# -*-Dockerfile-*-
|
||||
|
||||
ARG FEDORA_VERSION=35
|
||||
ARG FEDORA_VERSION=38
|
||||
ARG DEBIAN_VERSION=bullseye
|
||||
ARG GO_VERSION=1.17
|
||||
ARG GOSU_VERSION=1.14
|
||||
ARG GO_VERSION=1.20
|
||||
ARG GOSU_VERSION=1.16
|
||||
|
||||
FROM golang:${GO_VERSION}-${DEBIAN_VERSION} AS builder-gosu
|
||||
ARG GOSU_VERSION
|
||||
|
@ -25,6 +25,9 @@ RUN ./configure \
|
|||
|
||||
FROM fedora:${FEDORA_VERSION}
|
||||
|
||||
RUN dnf clean all \
|
||||
&& dnf -y update
|
||||
|
||||
RUN dnf install -y libcurl sqlite ldc-libs \
|
||||
&& dnf clean all \
|
||||
&& mkdir -p /onedrive/conf /onedrive/data
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
# -*-Dockerfile-*-
|
||||
|
||||
ARG ALPINE_VERSION=3.15
|
||||
ARG GO_VERSION=1.17
|
||||
ARG GOSU_VERSION=1.14
|
||||
ARG ALPINE_VERSION=3.18
|
||||
ARG GO_VERSION=1.20
|
||||
ARG GOSU_VERSION=1.16
|
||||
|
||||
FROM golang:${GO_VERSION}-alpine${ALPINE_VERSION} AS builder-gosu
|
||||
ARG GOSU_VERSION
|
||||
|
@ -23,6 +23,9 @@ RUN autoreconf -fiv \
|
|||
|
||||
FROM alpine:${ALPINE_VERSION}
|
||||
|
||||
RUN apk add --upgrade apk-tools \
|
||||
&& apk upgrade --available
|
||||
|
||||
RUN apk add --update --no-cache bash libcurl libgcc shadow sqlite-libs ldc-runtime \
|
||||
&& mkdir -p /onedrive/conf /onedrive/data
|
||||
|
||||
|
|
|
@ -1,46 +1,28 @@
|
|||
# -*-Dockerfile-*-
|
||||
|
||||
ARG DEBIAN_VERSION=bullseye
|
||||
ARG LDC_VERSION_MAIN=1.28.1
|
||||
ARG DEBIAN_VERSION=stable
|
||||
|
||||
FROM debian:${DEBIAN_VERSION} AS builder-onedrive
|
||||
ARG LDC_VERSION_MAIN
|
||||
|
||||
RUN apt-get update \
|
||||
&& DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends build-essential curl ca-certificates libcurl4-openssl-dev libsqlite3-dev libxml2-dev pkg-config \
|
||||
RUN apt-get clean \
|
||||
&& apt-get update \
|
||||
&& DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends build-essential curl ca-certificates libcurl4-openssl-dev libsqlite3-dev libxml2-dev pkg-config git ldc \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN ARCH="$(dpkg --print-architecture)" \
|
||||
&& case "${ARCH}" in \
|
||||
aarch64|arm64) \
|
||||
BINARY_URL="https://github.com/ldc-developers/ldc/releases/download/v${LDC_VERSION_MAIN}/ldc2-${LDC_VERSION_MAIN}-linux-aarch64.tar.xz"; \
|
||||
;; \
|
||||
amd64|x86-64) \
|
||||
BINARY_URL="https://github.com/ldc-developers/ldc/releases/download/v${LDC_VERSION_MAIN}/ldc2-${LDC_VERSION_MAIN}-linux-x86_64.tar.xz"; \
|
||||
;; \
|
||||
*) \
|
||||
echo "Unsupported arch: ${ARCH}"; \
|
||||
exit 1; \
|
||||
;; \
|
||||
esac \
|
||||
&& echo ${BINARY_URL} \
|
||||
&& curl -k -LfsSo /tmp/ldc.tar.xz ${BINARY_URL} \
|
||||
&& mkdir -p /opt/ldc \
|
||||
&& tar -xvf /tmp/ldc.tar.xz -C /opt/ldc --strip-components=1 \
|
||||
&& rm -rf /tmp/ldc.tar.xz
|
||||
|
||||
COPY . /usr/src/onedrive
|
||||
WORKDIR /usr/src/onedrive
|
||||
|
||||
RUN ./configure DC=/opt/ldc/bin/ldmd2 \
|
||||
RUN ./configure DC=/usr/bin/ldmd2 \
|
||||
&& make clean \
|
||||
&& make \
|
||||
&& make install
|
||||
|
||||
FROM debian:${DEBIAN_VERSION}-slim
|
||||
|
||||
RUN apt-get update \
|
||||
&& DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends gosu libcurl4 libsqlite3-0 ca-certificates \
|
||||
RUN apt-get clean \
|
||||
&& apt-get update \
|
||||
&& apt-get upgrade -y \
|
||||
&& DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends gosu libcurl4 libsqlite3-0 ca-certificates libphobos2-ldc-shared100 \
|
||||
&& rm -rf /var/lib/apt/lists/* \
|
||||
# Fix bug with ssl on armhf: https://serverfault.com/a/1045189
|
||||
&& /usr/bin/c_rehash \
|
||||
|
|
|
@ -1,51 +0,0 @@
|
|||
# -*-Dockerfile-*-
|
||||
|
||||
ARG DEBIAN_VERSION=buster
|
||||
ARG LDC_VERSION_MAIN=1.17.0
|
||||
|
||||
FROM debian:${DEBIAN_VERSION} AS builder-onedrive
|
||||
ARG LDC_VERSION_MAIN
|
||||
|
||||
RUN apt-get update \
|
||||
&& DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends build-essential curl ca-certificates libcurl4-openssl-dev libsqlite3-dev libxml2-dev pkg-config \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN ARCH="$(dpkg --print-architecture)" \
|
||||
&& case "${ARCH}" in \
|
||||
armhf|arm) \
|
||||
BINARY_URL="https://github.com/ldc-developers/ldc/releases/download/v${LDC_VERSION_MAIN}/ldc2-${LDC_VERSION_MAIN}-linux-aarch64.tar.xz"; \
|
||||
;; \
|
||||
*) \
|
||||
echo "Unsupported arch: ${ARCH}"; \
|
||||
exit 1; \
|
||||
;; \
|
||||
esac \
|
||||
&& echo ${BINARY_URL} \
|
||||
&& curl -k -LfsSo /tmp/ldc.tar.xz ${BINARY_URL} \
|
||||
&& mkdir -p /opt/ldc \
|
||||
&& tar -xvf /tmp/ldc.tar.xz -C /opt/ldc --strip-components=1 \
|
||||
&& rm -rf /tmp/ldc.tar.xz
|
||||
|
||||
COPY . /usr/src/onedrive
|
||||
WORKDIR /usr/src/onedrive
|
||||
|
||||
RUN ./configure DC=/opt/ldc/bin/ldmd2 \
|
||||
&& make clean \
|
||||
&& make \
|
||||
&& make install
|
||||
|
||||
FROM debian:${DEBIAN_VERSION}-slim
|
||||
|
||||
RUN apt-get update \
|
||||
&& DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends gosu libcurl4 libsqlite3-0 ca-certificates \
|
||||
&& rm -rf /var/lib/apt/lists/* \
|
||||
# Fix bug with ssl on armhf: https://serverfault.com/a/1045189
|
||||
&& /usr/bin/c_rehash \
|
||||
&& mkdir -p /onedrive/conf /onedrive/data
|
||||
|
||||
COPY --from=builder-onedrive /usr/local/bin/onedrive /usr/local/bin/
|
||||
|
||||
COPY contrib/docker/entrypoint.sh /
|
||||
RUN chmod +x /entrypoint.sh
|
||||
|
||||
ENTRYPOINT ["/entrypoint.sh"]
|
|
@ -62,6 +62,20 @@ if [ "${ONEDRIVE_DOWNLOADONLY:=0}" == "1" ]; then
|
|||
ARGS=(--download-only ${ARGS[@]})
|
||||
fi
|
||||
|
||||
# Tell client to sync in upload-only mode based on environment variable
|
||||
if [ "${ONEDRIVE_UPLOADONLY:=0}" == "1" ]; then
|
||||
echo "# We are synchronizing in upload-only mode"
|
||||
echo "# Adding --upload-only"
|
||||
ARGS=(--upload-only ${ARGS[@]})
|
||||
fi
|
||||
|
||||
# Tell client to sync in no-remote-delete mode based on environment variable
|
||||
if [ "${ONEDRIVE_NOREMOTEDELETE:=0}" == "1" ]; then
|
||||
echo "# We are synchronizing in no-remote-delete mode"
|
||||
echo "# Adding --no-remote-delete"
|
||||
ARGS=(--no-remote-delete ${ARGS[@]})
|
||||
fi
|
||||
|
||||
# Tell client to logout based on environment variable
|
||||
if [ "${ONEDRIVE_LOGOUT:=0}" == "1" ]; then
|
||||
echo "# We are logging out"
|
||||
|
@ -83,13 +97,27 @@ if [ -n "${ONEDRIVE_AUTHFILES:=""}" ]; then
|
|||
ARGS=(--auth-files ${ONEDRIVE_AUTHFILES} ${ARGS[@]})
|
||||
fi
|
||||
|
||||
# Tell client to utilize provided auth reponse based on environment variable
|
||||
# Tell client to utilize provided auth response based on environment variable
|
||||
if [ -n "${ONEDRIVE_AUTHRESPONSE:=""}" ]; then
|
||||
echo "# We are providing the auth response directly to perform authentication"
|
||||
echo "# Adding --auth-response ARG"
|
||||
ARGS=(--auth-response \"${ONEDRIVE_AUTHRESPONSE}\" ${ARGS[@]})
|
||||
fi
|
||||
|
||||
# Tell client to print the running configuration at application startup
|
||||
if [ "${ONEDRIVE_DISPLAY_CONFIG:=0}" == "1" ]; then
|
||||
echo "# We are printing the application running configuration at application startup"
|
||||
echo "# Adding --display-running-config"
|
||||
ARGS=(--display-running-config ${ARGS[@]})
|
||||
fi
|
||||
|
||||
# Tell client to use sync single dir option
|
||||
if [ -n "${ONEDRIVE_SINGLE_DIRECTORY:=""}" ]; then
|
||||
echo "# We are synchronizing in single-directory mode"
|
||||
echo "# Adding --single-directory ARG"
|
||||
ARGS=(--single-directory \"${ONEDRIVE_SINGLE_DIRECTORY}\" ${ARGS[@]})
|
||||
fi
|
||||
|
||||
if [ ${#} -gt 0 ]; then
|
||||
ARGS=("${@}")
|
||||
fi
|
||||
|
|
|
@ -5,8 +5,14 @@
|
|||
%global with_systemd 0
|
||||
%endif
|
||||
|
||||
%if 0%{?rhel} >= 7
|
||||
%global rhel_unitdir 1
|
||||
%else
|
||||
%global rhel_unitdir 0
|
||||
%endif
|
||||
|
||||
Name: onedrive
|
||||
Version: 2.4.17
|
||||
Version: 2.4.25
|
||||
Release: 1%{?dist}
|
||||
Summary: Microsoft OneDrive Client
|
||||
Group: System Environment/Network
|
||||
|
@ -15,7 +21,7 @@ URL: https://github.com/abraunegg/onedrive
|
|||
Source0: v%{version}.tar.gz
|
||||
BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-root-%(%{__id_u} -n)
|
||||
|
||||
BuildRequires: dmd >= 2.087.0
|
||||
BuildRequires: dmd >= 2.088.0
|
||||
BuildRequires: sqlite-devel >= 3.7.15
|
||||
BuildRequires: libcurl-devel
|
||||
Requires: sqlite >= 3.7.15
|
||||
|
@ -59,11 +65,16 @@ make
|
|||
%{_docdir}/%{name}
|
||||
%{_bindir}/%{name}
|
||||
%if 0%{?with_systemd}
|
||||
%if 0%{?rhel_unitdir}
|
||||
%{_unitdir}/%{name}.service
|
||||
%{_unitdir}/%{name}@.service
|
||||
%else
|
||||
%{_userunitdir}/%{name}.service
|
||||
%{_unitdir}/%{name}@.service
|
||||
%endif
|
||||
%else
|
||||
%{_bindir}/onedrive_service.sh
|
||||
/etc/init.d/onedrive
|
||||
%endif
|
||||
|
||||
%changelog
|
||||
%changelog
|
||||
|
|
|
@ -5,9 +5,11 @@ After=network-online.target
|
|||
Wants=network-online.target
|
||||
|
||||
[Service]
|
||||
# Commented out hardenings are disabled because they don't work out of the box.
|
||||
# Commented out hardenings are disabled because they may not work out of the box on your distribution
|
||||
# If you know what you are doing please try to enable them.
|
||||
|
||||
ProtectSystem=full
|
||||
#PrivateUsers=true
|
||||
#PrivateDevices=true
|
||||
ProtectHostname=true
|
||||
#ProtectClock=true
|
||||
|
@ -22,4 +24,4 @@ RestartSec=3
|
|||
RestartPreventExitStatus=3
|
||||
|
||||
[Install]
|
||||
WantedBy=default.target
|
||||
WantedBy=default.target
|
|
@ -1,4 +1,8 @@
|
|||
# How to configure OneDrive Business Shared Folder Sync
|
||||
## Application Version
|
||||
Before reading this document, please ensure you are running application version [![Version](https://img.shields.io/github/v/release/abraunegg/onedrive)](https://github.com/abraunegg/onedrive/releases) or greater. Use `onedrive --version` to determine what application version you are using and upgrade your client if required.
|
||||
|
||||
## Process Overview
|
||||
Syncing OneDrive Business Shared Folders requires additional configuration for your 'onedrive' client:
|
||||
1. List available shared folders to determine which folder you wish to sync & to validate that you have access to that folder
|
||||
2. Create a new file called 'business_shared_folders' in your config directory which contains a list of the shared folders you wish to sync
|
||||
|
|
294
docs/Docker.md
294
docs/Docker.md
|
@ -1,86 +1,159 @@
|
|||
# Run the OneDrive Client for Linux under Docker
|
||||
This client can be run as a Docker container, with 3 available options for you to choose from:
|
||||
1. Container based on Fedora 35 - Docker Tag: latest
|
||||
2. Container based on Debian 11 - Docker Tag: debian
|
||||
3. Container based on Alpine Linux - Docker Tag: alpine
|
||||
This client can be run as a Docker container, with 3 available container base options for you to choose from:
|
||||
|
||||
| Container Base | Docker Tag | Description | i686 | x86_64 | ARMHF | AARCH64 |
|
||||
|----------------|-------------|----------------------------------------------------------------|:------:|:------:|:-----:|:-------:|
|
||||
| Alpine Linux | edge-alpine | Docker container based on Alpine 3.18 using 'master' |❌|✔|❌|✔|
|
||||
| Alpine Linux | alpine | Docker container based on Alpine 3.18 using latest release |❌|✔|❌|✔|
|
||||
| Debian | debian | Docker container based on Debian Stable using latest release |✔|✔|✔|✔|
|
||||
| Debian | edge | Docker container based on Debian Stable using 'master' |✔|✔|✔|✔|
|
||||
| Debian | edge-debian | Docker container based on Debian Stable using 'master' |✔|✔|✔|✔|
|
||||
| Debian | latest | Docker container based on Debian Stable using latest release |✔|✔|✔|✔|
|
||||
| Fedora | edge-fedora | Docker container based on Fedora 38 using 'master' |❌|✔|❌|✔|
|
||||
| Fedora | fedora | Docker container based on Fedora 38 using latest release |❌|✔|❌|✔|
|
||||
|
||||
These containers offer a simple monitoring-mode service for the OneDrive Client for Linux.
|
||||
|
||||
The instructions below have been validated on:
|
||||
* Red Hat Enterprise Linux 8.x
|
||||
* Ubuntu Server 20.04
|
||||
* Fedora 38
|
||||
|
||||
The instructions below will utilise the 'latest' tag, however this can be substituted for 'debian' or 'alpine' if desired.
|
||||
The instructions below will utilise the 'edge' tag, however this can be substituted for any of the other docker tags such as 'latest' from the table above if desired.
|
||||
|
||||
## Basic Setup
|
||||
### 0. Install docker using your distribution platform's instructions
|
||||
1. Ensure that SELinux has been disabled on your system. A reboot may be required to ensure that this is correctly disabled.
|
||||
2. Install Docker as per requried for your platform
|
||||
3. Obtain your normal, non-root user UID and GID by using the `id` command
|
||||
4. As your normal, non-root user, ensure that you can run `docker run hello-world` *without* using `sudo`
|
||||
The 'edge' Docker Container will align closer to all documentation and features, where as 'latest' is the release version from a static point in time. The 'latest' tag however may contain bugs and/or issues that will have been fixed, and those fixes are contained in 'edge'.
|
||||
|
||||
Once the above 4 steps are complete and you can successfully run `docker run hello-world` without sudo, only then proceed to 'Pulling and Running the Docker Image'
|
||||
Additionally there are specific version release tags for each release. Refer to https://hub.docker.com/r/driveone/onedrive/tags for any other Docker tags you may be interested in.
|
||||
|
||||
## Pulling and Running the Docker Image
|
||||
### 1. Pull the image
|
||||
```bash
|
||||
docker pull driveone/onedrive:latest
|
||||
**Note:** The below instructions for docker has been tested and validated when logging into the system as an unprivileged user (non 'root' user).
|
||||
|
||||
## High Level Configuration Steps
|
||||
1. Install 'docker' as per your distribution platform's instructions if not already installed.
|
||||
2. Configure 'docker' to allow non-privileged users to run Docker commands
|
||||
3. Disable 'SELinux' as per your distribution platform's instructions
|
||||
4. Test 'docker' by running a test container without using `sudo`
|
||||
5. Prepare the required docker volumes to store the configuration and data
|
||||
6. Run the 'onedrive' container and perform authorisation
|
||||
7. Running the 'onedrive' container under 'docker'
|
||||
|
||||
## Configuration Steps
|
||||
|
||||
### 1. Install 'docker' on your platform
|
||||
Install 'docker' as per your distribution platform's instructions if not already installed.
|
||||
|
||||
### 2. Configure 'docker' to allow non-privileged users to run Docker commands
|
||||
Read https://docs.docker.com/engine/install/linux-postinstall/ to configure the 'docker' user group with your user account to allow your non 'root' user to run 'docker' commands.
|
||||
|
||||
### 3. Disable SELinux on your platform
|
||||
In order to run the Docker container, SELinux must be disabled. Without doing this, when the application is authenticated in the steps below, the following error will be presented:
|
||||
```text
|
||||
ERROR: The local file system returned an error with the following message:
|
||||
Error Message: /onedrive/conf/refresh_token: Permission denied
|
||||
|
||||
The database cannot be opened. Please check the permissions of ~/.config/onedrive/items.sqlite3
|
||||
```
|
||||
The only known work-around for the above problem at present is to disable SELinux. Please refer to your distribution platform's instructions on how to perform this step.
|
||||
|
||||
* Fedora: https://docs.fedoraproject.org/en-US/quick-docs/selinux-changing-states-and-modes/#_disabling_selinux
|
||||
* Red Hat Enterprise Linux: https://access.redhat.com/solutions/3176
|
||||
|
||||
Post disabling SELinux and reboot your system, confirm that `getenforce` returns `Disabled`:
|
||||
```text
|
||||
$ getenforce
|
||||
Disabled
|
||||
```
|
||||
|
||||
**NOTE:** SELinux context needs to be configured or disabled for Docker to be able to write to OneDrive host directory.
|
||||
If you are still experiencing permission issues despite disabling SELinux, please read https://www.redhat.com/sysadmin/container-permission-denied-errors
|
||||
|
||||
### 2. Prepare config volume
|
||||
The Docker container requries 2 Docker volumes:
|
||||
### 4. Test 'docker' on your platform
|
||||
Ensure that 'docker' is running as a system service, and is enabled to be activated on system reboot:
|
||||
```bash
|
||||
sudo systemctl enable --now docker
|
||||
```
|
||||
|
||||
Test that 'docker' is operational for your 'non-root' user, as per below:
|
||||
```bash
|
||||
[alex@fedora-38-docker-host ~]$ docker run hello-world
|
||||
Unable to find image 'hello-world:latest' locally
|
||||
latest: Pulling from library/hello-world
|
||||
719385e32844: Pull complete
|
||||
Digest: sha256:88ec0acaa3ec199d3b7eaf73588f4518c25f9d34f58ce9a0df68429c5af48e8d
|
||||
Status: Downloaded newer image for hello-world:latest
|
||||
|
||||
Hello from Docker!
|
||||
This message shows that your installation appears to be working correctly.
|
||||
|
||||
To generate this message, Docker took the following steps:
|
||||
1. The Docker client contacted the Docker daemon.
|
||||
2. The Docker daemon pulled the "hello-world" image from the Docker Hub.
|
||||
(amd64)
|
||||
3. The Docker daemon created a new container from that image which runs the
|
||||
executable that produces the output you are currently reading.
|
||||
4. The Docker daemon streamed that output to the Docker client, which sent it
|
||||
to your terminal.
|
||||
|
||||
To try something more ambitious, you can run an Ubuntu container with:
|
||||
$ docker run -it ubuntu bash
|
||||
|
||||
Share images, automate workflows, and more with a free Docker ID:
|
||||
https://hub.docker.com/
|
||||
|
||||
For more examples and ideas, visit:
|
||||
https://docs.docker.com/get-started/
|
||||
|
||||
[alex@fedora-38-docker-host ~]$
|
||||
```
|
||||
|
||||
### 5. Configure the required docker volumes
|
||||
The 'onedrive' Docker container requires 2 docker volumes to operate:
|
||||
* Config Volume
|
||||
* Data Volume
|
||||
|
||||
Create the config volume with the following command:
|
||||
The first volume is the configuration volume that stores all the applicable application configuration + current runtime state. In a non-containerised environment, this normally resides in `~/.config/onedrive` - in a containerised environment this is stored in the volume tagged as `/onedrive/conf`
|
||||
|
||||
The second volume is the data volume, where all your data from Microsoft OneDrive is stored locally. This volume is mapped to an actual directory point on your local filesystem and this is stored in the volume tagged as `/onedrive/data`
|
||||
|
||||
#### 5.1 Prepare the 'config' volume
|
||||
Create the 'config' volume with the following command:
|
||||
```bash
|
||||
docker volume create onedrive_conf
|
||||
```
|
||||
|
||||
This will create a docker volume labeled `onedrive_conf`, where all configuration of your onedrive account will be stored. You can add a custom config file and other things later.
|
||||
This will create a docker volume labeled `onedrive_conf`, where all configuration of your onedrive account will be stored. You can add a custom config file in this location at a later point in time if required.
|
||||
|
||||
The second docker volume is for your data folder and is created in the next step. This volume needs to be a path to a directory on your local filesystem, and this is where your data will be stored from OneDrive. Keep in mind that:
|
||||
#### 5.2 Prepare the 'data' volume
|
||||
Create the 'data' volume with the following command:
|
||||
```bash
|
||||
docker volume create onedrive_data
|
||||
```
|
||||
|
||||
This will create a docker volume labeled `onedrive_data` and will map to a path on your local filesystem. This is where your data from Microsoft OneDrive will be stored. Keep in mind that:
|
||||
|
||||
* The owner of this specified folder must not be root
|
||||
* The owner of this specified folder must have permissions for its parent directory
|
||||
* Docker will attempt to change the permissions of the volume to the user the container is configured to run as
|
||||
|
||||
**NOTE:** Issues occur when this target folder is a mounted folder of an external system (NAS, SMB mount, USB Drive etc) as the 'mount' itself is owed by 'root'. If this is your use case, you *must* ensure your normal user can mount your desired target without having the target mounted by 'root'. If you do not fix this, your Docker container will fail to start with the following error message:
|
||||
```bash
|
||||
ROOT level privileges prohibited!
|
||||
```
|
||||
|
||||
### 3. First run
|
||||
The 'onedrive' client within the Docker container needs to be authorized with your Microsoft account. This is achieved by initially running docker in interactive mode.
|
||||
### 6. First run of Docker container under docker and performing authorisation
|
||||
The 'onedrive' client within the container first needs to be authorised with your Microsoft account. This is achieved by initially running docker in interactive mode.
|
||||
|
||||
Run the docker image with the commands below and make sure to change `ONEDRIVE_DATA_DIR` to the actual onedrive data directory on your filesystem that you wish to use (e.g. `"/home/abraunegg/OneDrive"`).
|
||||
Run the docker image with the commands below and make sure to change the value of `ONEDRIVE_DATA_DIR` to the actual onedrive data directory on your filesystem that you wish to use (e.g. `export ONEDRIVE_DATA_DIR="/home/abraunegg/OneDrive"`).
|
||||
|
||||
**Important:** The 'target' folder of `ONEDRIVE_DATA_DIR` must exist before running the docker container. The script below will create 'ONEDRIVE_DATA_DIR' so that it exists locally for the docker volume mapping to occur.
|
||||
|
||||
It is also a requirement that the container be run using a non-root uid and gid, you must insert a non-root UID and GID (e.g.` export ONEDRIVE_UID=1000` and export `ONEDRIVE_GID=1000`). The script below will use `id` to evaluate your system environment to use the correct values.
|
||||
```bash
|
||||
export ONEDRIVE_DATA_DIR="${HOME}/OneDrive"
|
||||
mkdir -p ${ONEDRIVE_DATA_DIR}
|
||||
docker run -it --name onedrive -v onedrive_conf:/onedrive/conf \
|
||||
-v "${ONEDRIVE_DATA_DIR}:/onedrive/data" \
|
||||
-e "ONEDRIVE_UID=${ONEDRIVE_UID}" \
|
||||
-e "ONEDRIVE_GID=${ONEDRIVE_GID}" \
|
||||
driveone/onedrive:latest
|
||||
```
|
||||
**Important:** The 'target' folder of `ONEDRIVE_DATA_DIR` must exist before running the Docker container, otherwise, Docker will create the target folder, and the folder will be given 'root' permissions, which then causes the Docker container to fail upon startup with the following error message:
|
||||
```bash
|
||||
ROOT level privileges prohibited!
|
||||
```
|
||||
**NOTE:** It is also highly advisable for you to replace `${ONEDRIVE_UID}` and `${ONEDRIVE_GID}` with your actual UID and GID as specified by your `id` command output to avoid any any potential user or group conflicts.
|
||||
|
||||
**Example:**
|
||||
```bash
|
||||
export ONEDRIVE_UID=`id -u`
|
||||
export ONEDRIVE_GID=`id -g`
|
||||
export ONEDRIVE_DATA_DIR="${HOME}/OneDrive"
|
||||
mkdir -p ${ONEDRIVE_DATA_DIR}
|
||||
docker run -it --name onedrive -v onedrive_conf:/onedrive/conf \
|
||||
-v "${ONEDRIVE_DATA_DIR}:/onedrive/data" \
|
||||
-e "ONEDRIVE_UID=${ONEDRIVE_UID}" \
|
||||
-e "ONEDRIVE_GID=${ONEDRIVE_GID}" \
|
||||
driveone/onedrive:latest
|
||||
driveone/onedrive:edge
|
||||
```
|
||||
|
||||
When the Docker container successfully starts:
|
||||
|
@ -93,48 +166,47 @@ Once the 'onedrive' application is authorised, the client will automatically sta
|
|||
|
||||
If the client is working as expected, you can detach from the container with Ctrl+p, Ctrl+q.
|
||||
|
||||
### 4. Docker Container Status, stop, and restart
|
||||
Check if the monitor service is running
|
||||
### 7. Running the 'onedrive' container under 'docker'
|
||||
|
||||
#### 7.1 Check if the monitor service is running
|
||||
```bash
|
||||
docker ps -f name=onedrive
|
||||
```
|
||||
|
||||
Show monitor run logs
|
||||
|
||||
#### 7.2 Show 'onedrive' runtime logs
|
||||
```bash
|
||||
docker logs onedrive
|
||||
```
|
||||
|
||||
Stop running monitor
|
||||
|
||||
#### 7.3 Stop running 'onedrive' container
|
||||
```bash
|
||||
docker stop onedrive
|
||||
```
|
||||
|
||||
Resume monitor
|
||||
|
||||
#### 7.4 Start 'onedrive' container
|
||||
```bash
|
||||
docker start onedrive
|
||||
```
|
||||
|
||||
Remove onedrive Docker container
|
||||
|
||||
#### 7.5 Remove 'onedrive' container
|
||||
```bash
|
||||
docker rm -f onedrive
|
||||
```
|
||||
## Advanced Setup
|
||||
|
||||
### 5. Docker-compose
|
||||
Also supports docker-compose schemas > 3.
|
||||
In the following example it is assumed you have a `ONEDRIVE_DATA_DIR` environment variable and a `onedrive_conf` volume.
|
||||
However, you can also use bind mounts for the configuration folder, e.g. `export ONEDRIVE_CONF="${HOME}/OneDriveConfig"`.
|
||||
## Advanced Usage
|
||||
|
||||
### How to use Docker-compose
|
||||
You can utilise `docker-compose` if available on your platform if you are able to use docker compose schemas > 3.
|
||||
|
||||
In the following example it is assumed you have a `ONEDRIVE_DATA_DIR` environment variable and have already created the `onedrive_conf` volume.
|
||||
|
||||
You can also use docker bind mounts for the configuration folder, e.g. `export ONEDRIVE_CONF="${HOME}/OneDriveConfig"`.
|
||||
|
||||
```
|
||||
version: "3"
|
||||
services:
|
||||
onedrive:
|
||||
image: driveone/onedrive:latest
|
||||
image: driveone/onedrive:edge
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
- ONEDRIVE_UID=${PUID}
|
||||
|
@ -146,7 +218,7 @@ services:
|
|||
|
||||
Note that you still have to perform step 3: First Run.
|
||||
|
||||
### 6. Edit the config
|
||||
### Editing the running configuration and using a 'config' file
|
||||
The 'onedrive' client should run in default configuration, however you can change this default configuration by placing a custom config file in the `onedrive_conf` docker volume. First download the default config from [here](https://raw.githubusercontent.com/abraunegg/onedrive/master/config)
|
||||
Then put it into your onedrive_conf volume path, which can be found with:
|
||||
|
||||
|
@ -158,17 +230,17 @@ Or you can map your own config folder to the config volume. Make sure to copy al
|
|||
|
||||
The detailed document for the config can be found here: [Configuration](https://github.com/abraunegg/onedrive/blob/master/docs/USAGE.md#configuration)
|
||||
|
||||
### 7. Sync multiple accounts
|
||||
There are many ways to do this, the easiest is probably to
|
||||
### Syncing multiple accounts
|
||||
There are many ways to do this, the easiest is probably to do the following:
|
||||
1. Create a second docker config volume (replace `Work` with your desired name): `docker volume create onedrive_conf_Work`
|
||||
2. And start a second docker monitor container (again replace `Work` with your desired name):
|
||||
```
|
||||
export ONEDRIVE_DATA_DIR_WORK="/home/abraunegg/OneDriveWork"
|
||||
mkdir -p ${ONEDRIVE_DATA_DIR_WORK}
|
||||
docker run -it --restart unless-stopped --name onedrive_Work -v onedrive_conf_Work:/onedrive/conf -v "${ONEDRIVE_DATA_DIR_WORK}:/onedrive/data" driveone/onedrive:latest
|
||||
docker run -it --restart unless-stopped --name onedrive_Work -v onedrive_conf_Work:/onedrive/conf -v "${ONEDRIVE_DATA_DIR_WORK}:/onedrive/data" driveone/onedrive:edge
|
||||
```
|
||||
|
||||
## Run or update with one script
|
||||
### Run or update the Docker container with one script
|
||||
If you are experienced with docker and onedrive, you can use the following script:
|
||||
|
||||
```bash
|
||||
|
@ -178,16 +250,15 @@ ONEDRIVE_DATA_DIR="${HOME}/OneDrive"
|
|||
mkdir -p ${ONEDRIVE_DATA_DIR}
|
||||
|
||||
firstRun='-d'
|
||||
docker pull driveone/onedrive:latest
|
||||
docker pull driveone/onedrive:edge
|
||||
docker inspect onedrive_conf > /dev/null 2>&1 || { docker volume create onedrive_conf; firstRun='-it'; }
|
||||
docker inspect onedrive > /dev/null 2>&1 && docker rm -f onedrive
|
||||
docker run $firstRun --restart unless-stopped --name onedrive -v onedrive_conf:/onedrive/conf -v "${ONEDRIVE_DATA_DIR}:/onedrive/data" driveone/onedrive:latest
|
||||
docker run $firstRun --restart unless-stopped --name onedrive -v onedrive_conf:/onedrive/conf -v "${ONEDRIVE_DATA_DIR}:/onedrive/data" driveone/onedrive:edge
|
||||
```
|
||||
|
||||
|
||||
## Environment Variables
|
||||
| Variable | Purpose | Sample Value |
|
||||
| ---------------- | --------------------------------------------------- |:-------------:|
|
||||
## Supported Docker Environment Variables
|
||||
| Variable | Purpose | Sample Value |
|
||||
| ---------------- | --------------------------------------------------- |:--------------------------------------------------------------------------------------------------------------------------------:|
|
||||
| <B>ONEDRIVE_UID</B> | UserID (UID) to run as | 1000 |
|
||||
| <B>ONEDRIVE_GID</B> | GroupID (GID) to run as | 1000 |
|
||||
| <B>ONEDRIVE_VERBOSE</B> | Controls "--verbose" switch on onedrive sync. Default is 0 | 1 |
|
||||
|
@ -195,41 +266,56 @@ docker run $firstRun --restart unless-stopped --name onedrive -v onedrive_conf:/
|
|||
| <B>ONEDRIVE_DEBUG_HTTPS</B> | Controls "--debug-https" switch on onedrive sync. Default is 0 | 1 |
|
||||
| <B>ONEDRIVE_RESYNC</B> | Controls "--resync" switch on onedrive sync. Default is 0 | 1 |
|
||||
| <B>ONEDRIVE_DOWNLOADONLY</B> | Controls "--download-only" switch on onedrive sync. Default is 0 | 1 |
|
||||
| <B>ONEDRIVE_UPLOADONLY</B> | Controls "--upload-only" switch on onedrive sync. Default is 0 | 1 |
|
||||
| <B>ONEDRIVE_NOREMOTEDELETE</B> | Controls "--no-remote-delete" switch on onedrive sync. Default is 0 | 1 |
|
||||
| <B>ONEDRIVE_LOGOUT</B> | Controls "--logout" switch. Default is 0 | 1 |
|
||||
| <B>ONEDRIVE_REAUTH</B> | Controls "--reauth" switch. Default is 0 | 1 |
|
||||
| <B>ONEDRIVE_AUTHFILES</B> | Controls "--auth-files" option. Default is "" | "authUrl:responseUrl" |
|
||||
| <B>ONEDRIVE_AUTHRESPONSE</B> | Controls "--auth-response" option. Default is "" | See [here](https://github.com/abraunegg/onedrive/blob/master/docs/USAGE.md#authorize-the-application-with-your-onedrive-account) |
|
||||
| <B>ONEDRIVE_DISPLAY_CONFIG</B> | Controls "--display-running-config" switch on onedrive sync. Default is 0 | 1 |
|
||||
| <B>ONEDRIVE_SINGLE_DIRECTORY</B> | Controls "--single-directory" option. Default = "" | "mydir" |
|
||||
|
||||
### Usage Examples
|
||||
### Environment Variables Usage Examples
|
||||
**Verbose Output:**
|
||||
```bash
|
||||
docker container run -e ONEDRIVE_VERBOSE=1 -v onedrive_conf:/onedrive/conf -v "${ONEDRIVE_DATA_DIR}:/onedrive/data" driveone/onedrive:latest
|
||||
docker container run -e ONEDRIVE_VERBOSE=1 -v onedrive_conf:/onedrive/conf -v "${ONEDRIVE_DATA_DIR}:/onedrive/data" driveone/onedrive:edge
|
||||
```
|
||||
**Debug Output:**
|
||||
```bash
|
||||
docker container run -e ONEDRIVE_DEBUG=1 -v onedrive_conf:/onedrive/conf -v "${ONEDRIVE_DATA_DIR}:/onedrive/data" driveone/onedrive:latest
|
||||
docker container run -e ONEDRIVE_DEBUG=1 -v onedrive_conf:/onedrive/conf -v "${ONEDRIVE_DATA_DIR}:/onedrive/data" driveone/onedrive:edge
|
||||
```
|
||||
**Perform a --resync:**
|
||||
```bash
|
||||
docker container run -e ONEDRIVE_RESYNC=1 -v onedrive_conf:/onedrive/conf -v "${ONEDRIVE_DATA_DIR}:/onedrive/data" driveone/onedrive:latest
|
||||
docker container run -e ONEDRIVE_RESYNC=1 -v onedrive_conf:/onedrive/conf -v "${ONEDRIVE_DATA_DIR}:/onedrive/data" driveone/onedrive:edge
|
||||
```
|
||||
**Perform a --resync and --verbose:**
|
||||
```bash
|
||||
docker container run -e ONEDRIVE_RESYNC=1 -e ONEDRIVE_VERBOSE=1 -v onedrive_conf:/onedrive/conf -v "${ONEDRIVE_DATA_DIR}:/onedrive/data" driveone/onedrive:latest
|
||||
docker container run -e ONEDRIVE_RESYNC=1 -e ONEDRIVE_VERBOSE=1 -v onedrive_conf:/onedrive/conf -v "${ONEDRIVE_DATA_DIR}:/onedrive/data" driveone/onedrive:edge
|
||||
```
|
||||
|
||||
**Perform a --logout and re-authenticate:**
|
||||
```bash
|
||||
docker container run -it -e ONEDRIVE_LOGOUT=1 -v onedrive_conf:/onedrive/conf -v "${ONEDRIVE_DATA_DIR}:/onedrive/data" driveone/onedrive:latest
|
||||
docker container run -it -e ONEDRIVE_LOGOUT=1 -v onedrive_conf:/onedrive/conf -v "${ONEDRIVE_DATA_DIR}:/onedrive/data" driveone/onedrive:edge
|
||||
```
|
||||
|
||||
## Build instructions
|
||||
## Building a custom Docker image
|
||||
|
||||
### Build Environment Requirements
|
||||
* Build environment must have at least 1GB of memory & 2GB swap space
|
||||
|
||||
There are 2 ways to validate this requirement:
|
||||
* Modify the file `/etc/dphys-swapfile` and edit the `CONF_SWAPSIZE`, for example: `CONF_SWAPSIZE=2048`. A reboot is required to make this change effective.
|
||||
* Dynamically allocate a swapfile for building:
|
||||
You can validate your build environment memory status with the following command:
|
||||
```text
|
||||
cat /proc/meminfo | grep -E 'MemFree|Swap'
|
||||
```
|
||||
This should result in the following similar output:
|
||||
```text
|
||||
MemFree: 3704644 kB
|
||||
SwapCached: 0 kB
|
||||
SwapTotal: 8117244 kB
|
||||
SwapFree: 8117244 kB
|
||||
```
|
||||
|
||||
If you do not have enough swap space, you can use the following script to dynamically allocate a swapfile for building the Docker container:
|
||||
|
||||
```bash
|
||||
cd /var
|
||||
sudo fallocate -l 1.5G swapfile
|
||||
|
@ -244,32 +330,36 @@ swapon -s
|
|||
free -h
|
||||
```
|
||||
|
||||
### Building a custom Docker image
|
||||
If you are running a Raspberry Pi, you will need to edit your system configuration to increase your swapfile:
|
||||
|
||||
* Modify the file `/etc/dphys-swapfile` and edit the `CONF_SWAPSIZE`, for example: `CONF_SWAPSIZE=2048`.
|
||||
|
||||
A reboot of your Raspberry Pi is required to make this change effective.
|
||||
|
||||
### Building and running a custom Docker image
|
||||
You can also build your own image instead of pulling the one from [hub.docker.com](https://hub.docker.com/r/driveone/onedrive):
|
||||
```bash
|
||||
git clone https://github.com/abraunegg/onedrive
|
||||
cd onedrive
|
||||
docker build . -t local-onedrive -f contrib/docker/Dockerfile
|
||||
docker container run -v onedrive_conf:/onedrive/conf -v "${ONEDRIVE_DATA_DIR}:/onedrive/data" local-onedrive:latest
|
||||
```
|
||||
|
||||
There are alternate, smaller images available by building
|
||||
Dockerfile-debian or Dockerfile-alpine. These [multi-stage builder
|
||||
pattern](https://docs.docker.com/develop/develop-images/multistage-build/)
|
||||
Dockerfiles require Docker version at least 17.05.
|
||||
There are alternate, smaller images available by using `Dockerfile-debian` or `Dockerfile-alpine`. These [multi-stage builder pattern](https://docs.docker.com/develop/develop-images/multistage-build/) Dockerfiles require Docker version at least 17.05.
|
||||
|
||||
#### How to build and run a custom Docker image based on Debian
|
||||
### How to build and run a custom Docker image based on Debian
|
||||
``` bash
|
||||
docker build . -t local-ondrive-debian -f contrib/docker/Dockerfile-debian
|
||||
docker container run -v onedrive_conf:/onedrive/conf -v "${ONEDRIVE_DATA_DIR}:/onedrive/data" local-ondrive-debian:latest
|
||||
```
|
||||
|
||||
#### How to build and run a custom Docker image based on Alpine Linux
|
||||
### How to build and run a custom Docker image based on Alpine Linux
|
||||
``` bash
|
||||
docker build . -t local-ondrive-alpine -f contrib/docker/Dockerfile-alpine
|
||||
docker container run -v onedrive_conf:/onedrive/conf -v "${ONEDRIVE_DATA_DIR}:/onedrive/data" local-ondrive-alpine:latest
|
||||
```
|
||||
|
||||
#### How to build and run a custom Docker image for ARMHF (Raspberry Pi)
|
||||
### How to build and run a custom Docker image for ARMHF (Raspberry Pi)
|
||||
Compatible with:
|
||||
* Raspberry Pi
|
||||
* Raspberry Pi 2
|
||||
|
@ -277,12 +367,30 @@ Compatible with:
|
|||
* Raspberry Pi 3
|
||||
* Raspberry Pi 4
|
||||
``` bash
|
||||
docker build . -t local-onedrive-rpi -f contrib/docker/Dockerfile-rpi
|
||||
docker container run -v onedrive_conf:/onedrive/conf -v "${ONEDRIVE_DATA_DIR}:/onedrive/data" local-ondrive-rpi:latest
|
||||
docker build . -t local-onedrive-armhf -f contrib/docker/Dockerfile-debian
|
||||
docker container run -v onedrive_conf:/onedrive/conf -v "${ONEDRIVE_DATA_DIR}:/onedrive/data" local-onedrive-armhf:latest
|
||||
```
|
||||
|
||||
#### How to build and run a custom Docker image for AARCH64 Platforms
|
||||
### How to build and run a custom Docker image for AARCH64 Platforms
|
||||
``` bash
|
||||
docker build . -t local-onedrive-aarch64 -f contrib/docker/Dockerfile-debian
|
||||
docker container run -v onedrive_conf:/onedrive/conf -v "${ONEDRIVE_DATA_DIR}:/onedrive/data" local-onedrive-aarch64:latest
|
||||
```
|
||||
### How to support double-byte languages
|
||||
In some geographic regions, you may need to change and/or update the locale specification of the Docker container to better support the local language used for your local filesystem. To do this, follow the example below:
|
||||
```
|
||||
FROM driveone/onedrive
|
||||
|
||||
ENV DEBIAN_FRONTEND noninteractive
|
||||
|
||||
RUN apt-get update
|
||||
RUN apt-get install -y locales
|
||||
|
||||
RUN echo "ja_JP.UTF-8 UTF-8" > /etc/locale.gen && \
|
||||
locale-gen ja_JP.UTF-8 && \
|
||||
dpkg-reconfigure locales && \
|
||||
/usr/sbin/update-locale LANG=ja_JP.UTF-8
|
||||
|
||||
ENV LC_ALL ja_JP.UTF-8
|
||||
```
|
||||
The above example changes the Docker container to support Japanese. To support your local language, change `ja_JP.UTF-8` to the required entry.
|
184
docs/INSTALL.md
184
docs/INSTALL.md
|
@ -6,26 +6,28 @@ This project has been packaged for the following Linux distributions as per belo
|
|||
Only the current release version or greater is supported. Earlier versions are not supported and should not be installed or used.
|
||||
|
||||
#### Important Note:
|
||||
Distribution packages may be of an older release when compared to the latest release that is [available](https://github.com/abraunegg/onedrive/releases). If any package version indicator below is ![#f03c15](https://via.placeholder.com/15/f03c15/000000?text=+) for your distribution, it is recommended that you build from source. Do not install the software from the available distribution package. If a package is out of date, please contact the package maintainer for resolution.
|
||||
Distribution packages may be of an older release when compared to the latest release that is [available](https://github.com/abraunegg/onedrive/releases). If any package version indicator below is 'red' for your distribution, it is recommended that you build from source. Do not install the software from the available distribution package. If a package is out of date, please contact the package maintainer for resolution.
|
||||
|
||||
| Distribution | Package Name & Package Link | PKG_Version | i686 | x86_64 | ARMHF | AARCH64 | Extra Details |
|
||||
|---------------------------------|------------------------------------------------------------------------------|:---------------:|:----:|:------:|:-----:|:-------:|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| Alpine Linux | [onedrive](https://pkgs.alpinelinux.org/packages?name=onedrive&branch=edge) |<a href="https://pkgs.alpinelinux.org/packages?name=onedrive&branch=edge"><img src="https://repology.org/badge/version-for-repo/alpine_edge/onedrive.svg?header=" alt="Alpine Linux Edge package" width="46" height="20"></a>|❌|✔|❌|✔ | |
|
||||
| Arch Linux<br><br>Manjaro Linux | [onedrive-abraunegg](https://aur.archlinux.org/packages/onedrive-abraunegg/) |<a href="https://aur.archlinux.org/packages/onedrive-abraunegg"><img src="https://repology.org/badge/version-for-repo/aur/onedrive-abraunegg.svg?header=" alt="AUR package" width="46" height="20"></a>|✔|✔|✔|✔ | Install via: `pamac build onedrive-abraunegg` from the Arch Linux User Repository (AUR)<br><br>**Note:** If asked regarding a provider for 'd-runtime' and 'd-compiler', select 'liblphobos' and 'ldc'<br><br>**Note:** System must have at least 1GB of memory & 1GB swap space
|
||||
| Debian | [onedrive](https://packages.debian.org/search?keywords=onedrive) |<a href="https://packages.debian.org/search?keywords=onedrive"><img src="https://repology.org/badge/version-for-repo/debian_11/onedrive.svg?header=" alt="Debian package" width="46" height="20"></a>|✔|✔|✔|✔| It is recommended that for Debian that you install from OpenSuSE Build Service using the Debian Package Install [Instructions](ubuntu-package-install.md) |
|
||||
| Debian 11 | [onedrive](https://packages.debian.org/bullseye/source/onedrive) |<a href="https://packages.debian.org/bullseye/source/onedrive"><img src="https://repology.org/badge/version-for-repo/debian_11/onedrive.svg?header=" alt="Debian 11 package" width="46" height="20"></a>|✔|✔|✔|✔| **Note:** Do not install from Debian Package Repositories<br><br>It is recommended that for Debian 11 that you install from OpenSuSE Build Service using the Debian Package Install [Instructions](ubuntu-package-install.md) |
|
||||
| Debian 12 | [onedrive](https://packages.debian.org/bookworm/source/onedrive) |<a href="https://packages.debian.org/bookworm/source/onedrive"><img src="https://repology.org/badge/version-for-repo/debian_12/onedrive.svg?header=" alt="Debian 12 package" width="46" height="20"></a>|✔|✔|✔|✔| **Note:** Do not install from Debian Package Repositories<br><br>It is recommended that for Debian 12 that you install from OpenSuSE Build Service using the Debian Package Install [Instructions](ubuntu-package-install.md) |
|
||||
| Fedora | [onedrive](https://koji.fedoraproject.org/koji/packageinfo?packageID=26044) |<a href="https://koji.fedoraproject.org/koji/packageinfo?packageID=26044"><img src="https://repology.org/badge/version-for-repo/fedora_rawhide/onedrive.svg?header=" alt="Fedora Rawhide package" width="46" height="20"></a>|✔|✔|✔|✔| |
|
||||
| Gentoo | [onedrive](https://gpo.zugaina.org/net-misc/onedrive) | No API Available |✔|✔|❌|❌| |
|
||||
| Homebrew | [onedrive](https://formulae.brew.sh/formula/onedrive) | <a href="https://formulae.brew.sh/formula/onedrive"><img src="https://repology.org/badge/version-for-repo/homebrew/onedrive.svg?header=" alt="Homebrew package" width="46" height="20"></a> |❌|✔|❌|❌| |
|
||||
| Linux Mint 20.x | [onedrive](https://community.linuxmint.com/software/view/onedrive) |<a href="https://community.linuxmint.com/software/view/onedrive"><img src="https://repology.org/badge/version-for-repo/ubuntu_20_04/onedrive.svg?header=" alt="Ubuntu 20.04 package" width="46" height="20"></a> |❌|✔|✔|✔| **Note:** Do not install from Linux Mint Repositories<br><br>It is recommended that for Linux Mint that you install from OpenSuSE Build Service using the Ubuntu Package Install [Instructions](ubuntu-package-install.md) |
|
||||
| Linux Mint 21.x | [onedrive](https://community.linuxmint.com/software/view/onedrive) |<a href="https://community.linuxmint.com/software/view/onedrive"><img src="https://repology.org/badge/version-for-repo/ubuntu_22_04/onedrive.svg?header=" alt="Ubuntu 22.04 package" width="46" height="20"></a> |❌|✔|✔|✔| **Note:** Do not install from Linux Mint Repositories<br><br>It is recommended that for Linux Mint that you install from OpenSuSE Build Service using the Ubuntu Package Install [Instructions](ubuntu-package-install.md) |
|
||||
| NixOS | [onedrive](https://search.nixos.org/packages?channel=20.09&from=0&size=50&sort=relevance&query=onedrive)|<a href="https://search.nixos.org/packages?channel=20.09&from=0&size=50&sort=relevance&query=onedrive"><img src="https://repology.org/badge/version-for-repo/nix_unstable/onedrive.svg?header=" alt="nixpkgs unstable package" width="46" height="20"></a>|❌|✔|❌|❌| Use package `onedrive` either by adding it to `configuration.nix` or by using the command `nix-env -iA <channel name>.onedrive`. This does not install a service. To install a service, use unstable channel (will stabilize in 20.09) and add `services.onedrive.enable=true` in `configuration.nix`. You can also add a custom package using the `services.onedrive.package` option (recommended since package lags upstream). Enabling the service installs a default package too (based on the channel). You can also add multiple onedrive accounts trivially, see [documentation](https://github.com/NixOS/nixpkgs/pull/77734#issuecomment-575874225). |
|
||||
| OpenSuSE | [onedrive](https://software.opensuse.org/package/onedrive) |<a href="https://software.opensuse.org/package/onedrive"><img src="https://repology.org/badge/version-for-repo/opensuse_tumbleweed/onedrive.svg?header=" alt="openSUSE Tumbleweed package" width="46" height="20"></a>|✔|✔|❌|❌| |
|
||||
| OpenSuSE | [onedrive](https://software.opensuse.org/package/onedrive) |<a href="https://software.opensuse.org/package/onedrive"><img src="https://repology.org/badge/version-for-repo/opensuse_network_tumbleweed/onedrive.svg?header=" alt="openSUSE Tumbleweed package" width="46" height="20"></a>|✔|✔|❌|❌| |
|
||||
| OpenSuSE Build Service | [onedrive](https://build.opensuse.org/package/show/home:npreining:debian-ubuntu-onedrive/onedrive) | No API Available |✔|✔|✔|✔| Package Build Service for Debian and Ubuntu |
|
||||
| Raspbian | [onedrive](https://archive.raspbian.org/raspbian/pool/main/o/onedrive/) |<a href="https://archive.raspbian.org/raspbian/pool/main/o/onedrive/"><img src="https://repology.org/badge/version-for-repo/raspbian_stable/onedrive.svg?header=" alt="Raspbian Stable package" width="46" height="20"></a> |❌|❌|✔|❌| **Note:** You must compile from source for Raspbian |
|
||||
| Raspbian | [onedrive](https://archive.raspbian.org/raspbian/pool/main/o/onedrive/) |<a href="https://archive.raspbian.org/raspbian/pool/main/o/onedrive/"><img src="https://repology.org/badge/version-for-repo/raspbian_stable/onedrive.svg?header=" alt="Raspbian Stable package" width="46" height="20"></a> |❌|❌|✔|✔| **Note:** Do not install from Raspbian Package Repositories<br><br>It is recommended that for Raspbian that you install from OpenSuSE Build Service using the Debian Package Install [Instructions](ubuntu-package-install.md) |
|
||||
| Slackware | [onedrive](https://slackbuilds.org/result/?search=onedrive&sv=) |<a href="https://slackbuilds.org/result/?search=onedrive&sv="><img src="https://repology.org/badge/version-for-repo/slackbuilds/onedrive.svg?header=" alt="SlackBuilds package" width="46" height="20"></a>|✔|✔|❌|❌| |
|
||||
| Solus | [onedrive](https://dev.getsol.us/search/query/FB7PIf1jG9Z9/#R) |<a href="https://dev.getsol.us/search/query/FB7PIf1jG9Z9/#R"><img src="https://repology.org/badge/version-for-repo/solus/onedrive.svg?header=" alt="Solus package" width="46" height="20"></a>|✔|✔|❌|❌| |
|
||||
| Ubuntu 18.04 | [onedrive](https://packages.ubuntu.com/bionic/onedrive) |<a href="https://packages.ubuntu.com/bionic/onedrive"><img src="https://repology.org/badge/version-for-repo/ubuntu_18_04/onedrive.svg?header=" alt="Ubuntu 18.04 package" width="88" height="20"></a> |✔|✔|✔|❌| **Note:** Do not install from Ubuntu Universe<br><br>You must compile from source for this version of Ubuntu |
|
||||
| Ubuntu 20.04 | [onedrive](https://packages.ubuntu.com/focal/onedrive) |<a href="https://packages.ubuntu.com/focal/onedrive"><img src="https://repology.org/badge/version-for-repo/ubuntu_20_04/onedrive.svg?header=" alt="Ubuntu 20.04 package" width="46" height="20"></a> |❌|✔|✔|✔| **Note:** Do not install from Ubuntu Universe<br><br>Install from OpenSuSE Build Service using the Ubuntu Package Install [Instructions](ubuntu-package-install.md) |
|
||||
| Ubuntu 21.04 | [onedrive](https://packages.ubuntu.com/hirsute/onedrive) |<a href="https://packages.ubuntu.com/hirsute/onedrive"><img src="https://repology.org/badge/version-for-repo/ubuntu_21_04/onedrive.svg?header=" alt="Ubuntu 21.04 package" width="46" height="20"></a> |❌|✔|✔|✔| **Note:** Do not install from Ubuntu Universe<br><br>Install from OpenSuSE Build Service using the Ubuntu Package Install [Instructions](ubuntu-package-install.md) |
|
||||
| Ubuntu 21.10 | [onedrive](https://packages.ubuntu.com/impish/onedrive) |<a href="https://packages.ubuntu.com/impish/onedrive"><img src="https://repology.org/badge/version-for-repo/ubuntu_21_10/onedrive.svg?header=" alt="Ubuntu 21.10 package" width="46" height="20"></a> |❌|✔|✔|✔| **Note:** Do not install from Ubuntu Universe<br><br>Install from OpenSuSE Build Service using the Ubuntu Package Install [Instructions](ubuntu-package-install.md) |
|
||||
| Ubuntu 22.04 | [onedrive](https://packages.ubuntu.com/jammy/onedrive) |<a href="https://packages.ubuntu.com/jammy/onedrive"><img src="https://repology.org/badge/version-for-repo/ubuntu_22_04/onedrive.svg?header=" alt="Ubuntu 22.04 package" width="46" height="20"></a> |❌|✔|✔|✔| **Note:** Do not install from Ubuntu Universe<br><br>Install from OpenSuSE Build Service using the Ubuntu Package Install [Instructions](ubuntu-package-install.md) |
|
||||
| Ubuntu 20.04 | [onedrive](https://packages.ubuntu.com/focal/onedrive) |<a href="https://packages.ubuntu.com/focal/onedrive"><img src="https://repology.org/badge/version-for-repo/ubuntu_20_04/onedrive.svg?header=" alt="Ubuntu 20.04 package" width="46" height="20"></a> |❌|✔|✔|✔| **Note:** Do not install from Ubuntu Universe<br><br>It is recommended that for Ubuntu that you install from OpenSuSE Build Service using the Ubuntu Package Install [Instructions](ubuntu-package-install.md) |
|
||||
| Ubuntu 22.04 | [onedrive](https://packages.ubuntu.com/jammy/onedrive) |<a href="https://packages.ubuntu.com/jammy/onedrive"><img src="https://repology.org/badge/version-for-repo/ubuntu_22_04/onedrive.svg?header=" alt="Ubuntu 22.04 package" width="46" height="20"></a> |❌|✔|✔|✔| **Note:** Do not install from Ubuntu Universe<br><br>It is recommended that for Ubuntu that you install from OpenSuSE Build Service using the Ubuntu Package Install [Instructions](ubuntu-package-install.md) |
|
||||
| Ubuntu 23.04 | [onedrive](https://packages.ubuntu.com/lunar/onedrive) |<a href="https://packages.ubuntu.com/lunar/onedrive"><img src="https://repology.org/badge/version-for-repo/ubuntu_23_04/onedrive.svg?header=" alt="Ubuntu 23.04 package" width="46" height="20"></a> |❌|✔|✔|✔| **Note:** Do not install from Ubuntu Universe<br><br>It is recommended that for Ubuntu that you install from OpenSuSE Build Service using the Ubuntu Package Install [Instructions](ubuntu-package-install.md) |
|
||||
| Void Linux | [onedrive](https://voidlinux.org/packages/?arch=x86_64&q=onedrive) |<a href="https://voidlinux.org/packages/?arch=x86_64&q=onedrive"><img src="https://repology.org/badge/version-for-repo/void_x86_64/onedrive.svg?header=" alt="Void Linux x86_64 package" width="46" height="20"></a>|✔|✔|❌|❌| |
|
||||
|
||||
#### Important information for all Ubuntu and Ubuntu based distribution users:
|
||||
|
@ -47,7 +49,7 @@ If you wish to change this situation so that you can just use the Universe packa
|
|||
* [SQLite 3](https://www.sqlite.org/) >= 3.7.15
|
||||
* [Digital Mars D Compiler (DMD)](http://dlang.org/download.html) or [LDC – the LLVM-based D Compiler](https://github.com/ldc-developers/ldc)
|
||||
|
||||
**Note:** DMD version >= 2.087.0 or LDC version >= 1.17.0 is required to compile this application
|
||||
**Note:** DMD version >= 2.088.0 or LDC version >= 1.18.0 is required to compile this application
|
||||
|
||||
### Example for installing DMD Compiler
|
||||
```text
|
||||
|
@ -61,61 +63,15 @@ curl -fsS https://dlang.org/install.sh | bash -s ldc
|
|||
|
||||
## Distribution Package Dependencies
|
||||
### Dependencies: Ubuntu 16.x
|
||||
Ubuntu Linux 16.04 LTS reached the end of its five-year LTS window on April 30th 2021 and is no longer supported.
|
||||
Ubuntu Linux 16.x LTS reached the end of its five-year LTS window on April 30th 2021 and is no longer supported.
|
||||
|
||||
### Dependencies: Ubuntu 18.x / Lubuntu 18.x / Debian 9 - i386 / i686
|
||||
These dependencies are also applicable for all Ubuntu based distributions such as:
|
||||
* Lubuntu
|
||||
* Linux Mint
|
||||
* POP OS
|
||||
* Peppermint OS
|
||||
### Dependencies: Ubuntu 18.x / Lubuntu 18.x
|
||||
Ubuntu Linux 18.x LTS reached the end of its five-year LTS window on May 31th 2023 and is no longer supported.
|
||||
|
||||
**Important:** The DMD compiler cannot be used in its default configuration on Ubuntu 18.x / Lubuntu 18.x / Debian 9 i386 / i686 architectures due to an issue in the Ubuntu / Debian linking process. See [https://issues.dlang.org/show_bug.cgi?id=19116](https://issues.dlang.org/show_bug.cgi?id=19116) for further details.
|
||||
### Dependencies: Debian 9
|
||||
Debian 9 reached the end of its five-year support window on June 30th 2022 and is no longer supported.
|
||||
|
||||
**Note:** Ubuntu 18.x validated with the DMD compiler on the following Ubuntu i386 / i686 platform:
|
||||
```text
|
||||
DISTRIB_ID=Ubuntu
|
||||
DISTRIB_RELEASE=18.04
|
||||
DISTRIB_CODENAME=bionic
|
||||
DISTRIB_DESCRIPTION="Ubuntu 18.04.3 LTS"
|
||||
```
|
||||
**Note:** Lubuntu 18.x validated with the DMD compiler on the following Lubuntu i386 / i686 platform:
|
||||
```text
|
||||
DISTRIB_ID=Ubuntu
|
||||
DISTRIB_RELEASE=18.10
|
||||
DISTRIB_CODENAME=cosmic
|
||||
DISTRIB_DESCRIPTION="Ubuntu 18.10"
|
||||
```
|
||||
**Note:** Debian 9 validated with the DMD compiler on the following Debian i386 / i686 platform:
|
||||
```text
|
||||
cat /etc/debian_version
|
||||
9.11
|
||||
```
|
||||
|
||||
First install development dependencies as per below:
|
||||
```text
|
||||
sudo apt install build-essential
|
||||
sudo apt install libcurl4-openssl-dev
|
||||
sudo apt install libsqlite3-dev
|
||||
sudo apt install pkg-config
|
||||
sudo apt install git
|
||||
sudo apt install curl
|
||||
```
|
||||
For notifications the following is also necessary:
|
||||
```text
|
||||
sudo apt install libnotify-dev
|
||||
```
|
||||
Second, install the DMD compiler as per below:
|
||||
```text
|
||||
curl -fsS https://dlang.org/install.sh | bash -s dmd
|
||||
```
|
||||
Thirdly, reconfigure the default linker as per below:
|
||||
```text
|
||||
sudo update-alternatives --install "/usr/bin/ld" "ld" "/usr/bin/ld.gold" 20
|
||||
sudo update-alternatives --install "/usr/bin/ld" "ld" "/usr/bin/ld.bfd" 10
|
||||
```
|
||||
|
||||
### Dependencies: Ubuntu 18.x -> Ubuntu 22.x / Debian 9 -> Debian 11 - x86_64
|
||||
### Dependencies: Ubuntu 20.x -> Ubuntu 23.x / Debian 10 -> Debian 12 - x86_64
|
||||
These dependencies are also applicable for all Ubuntu based distributions such as:
|
||||
* Lubuntu
|
||||
* Linux Mint
|
||||
|
@ -123,11 +79,7 @@ These dependencies are also applicable for all Ubuntu based distributions such a
|
|||
* Peppermint OS
|
||||
```text
|
||||
sudo apt install build-essential
|
||||
sudo apt install libcurl4-openssl-dev
|
||||
sudo apt install libsqlite3-dev
|
||||
sudo apt install pkg-config
|
||||
sudo apt install git
|
||||
sudo apt install curl
|
||||
sudo apt install libcurl4-openssl-dev libsqlite3-dev pkg-config git curl
|
||||
curl -fsS https://dlang.org/install.sh | bash -s dmd
|
||||
```
|
||||
For notifications the following is also necessary:
|
||||
|
@ -141,20 +93,18 @@ CentOS 6.x and RHEL 6.x reached End of Life status on November 30th 2020 and is
|
|||
### Dependencies: Fedora < Version 18 / CentOS 7.x / RHEL 7.x
|
||||
```text
|
||||
sudo yum groupinstall 'Development Tools'
|
||||
sudo yum install libcurl-devel
|
||||
sudo yum install sqlite-devel
|
||||
curl -fsS https://dlang.org/install.sh | bash -s dmd
|
||||
sudo yum install libcurl-devel sqlite-devel
|
||||
curl -fsS https://dlang.org/install.sh | bash -s dmd-2.099.0
|
||||
```
|
||||
For notifications the following is also necessary:
|
||||
```text
|
||||
sudo yum install libnotify-devel
|
||||
```
|
||||
|
||||
### Dependencies: Fedora > Version 18 / CentOS 8.x / RHEL 8.x
|
||||
### Dependencies: Fedora > Version 18 / CentOS 8.x / RHEL 8.x / RHEL 9.x
|
||||
```text
|
||||
sudo dnf groupinstall 'Development Tools'
|
||||
sudo dnf install libcurl-devel
|
||||
sudo dnf install sqlite-devel
|
||||
sudo dnf install libcurl-devel sqlite-devel
|
||||
curl -fsS https://dlang.org/install.sh | bash -s dmd
|
||||
```
|
||||
For notifications the following is also necessary:
|
||||
|
@ -171,43 +121,19 @@ For notifications the following is also necessary:
|
|||
sudo pacman -S libnotify
|
||||
```
|
||||
|
||||
### Dependencies: Raspbian (ARMHF)
|
||||
Validated using:
|
||||
* `Linux raspberrypi 5.4.79-v7+ #1373 SMP Mon Nov 23 13:22:33 GMT 2020 armv7l GNU/Linux` (2020-12-02-raspios-buster-armhf) using Raspberry Pi 2 Model B
|
||||
* `Linux raspberrypi 5.4.83-v8+ #1379 SMP PREEMPT Mon Dec 14 13:15:14 GMT 2020 aarch64` (2021-01-11-raspios-buster-armhf) using Raspberry Pi 3 Model B+
|
||||
### Dependencies: Raspbian (ARMHF) and Ubuntu 22.x / Debian 11 / Debian 12 / Raspbian (ARM64)
|
||||
**Note:** The minimum LDC compiler version required to compile this application is now 1.18.0, which is not available for Debian Buster or distributions based on Debian Buster. You are advised to first upgrade your platform distribution to one that is based on Debian Bullseye (Debian 11) or later.
|
||||
|
||||
These instructions were validated using:
|
||||
* `Linux raspberrypi 5.10.92-v8+ #1514 SMP PREEMPT Mon Jan 17 17:39:38 GMT 2022 aarch64` (2022-01-28-raspios-bullseye-armhf-lite) using Raspberry Pi 3B (revision 1.2)
|
||||
* `Linux raspberrypi 5.10.92-v8+ #1514 SMP PREEMPT Mon Jan 17 17:39:38 GMT 2022 aarch64` (2022-01-28-raspios-bullseye-arm64-lite) using Raspberry Pi 3B (revision 1.2)
|
||||
* `Linux ubuntu 5.15.0-1005-raspi #5-Ubuntu SMP PREEMPT Mon Apr 4 12:21:48 UTC 2022 aarch64 aarch64 aarch64 GNU/Linux` (ubuntu-22.04-preinstalled-server-arm64+raspi) using Raspberry Pi 3B (revision 1.2)
|
||||
|
||||
**Note:** Build environment must have at least 1GB of memory & 1GB swap space. Check with `swapon`.
|
||||
|
||||
```text
|
||||
sudo apt install build-essential
|
||||
sudo apt install libcurl4-openssl-dev
|
||||
sudo apt install libsqlite3-dev
|
||||
sudo apt install pkg-config
|
||||
sudo apt install git
|
||||
sudo apt install curl
|
||||
wget https://github.com/ldc-developers/ldc/releases/download/v1.17.0/ldc2-1.17.0-linux-armhf.tar.xz
|
||||
tar -xvf ldc2-1.17.0-linux-armhf.tar.xz
|
||||
```
|
||||
For notifications the following is also necessary:
|
||||
```text
|
||||
sudo apt install libnotify-dev
|
||||
```
|
||||
|
||||
### Dependencies: Ubuntu 20.x / Debian 10 (ARM64)
|
||||
Validated using:
|
||||
* `Ubuntu 20.04.2 LTS (GNU/Linux 5.4.0-1028-raspi aarch64)` (ubuntu-20.04.2-preinstalled-server-arm64+raspi) using Raspberry Pi 3 Model B+
|
||||
|
||||
**Note:** Build environment must have at least 1GB of memory & 1GB swap space. Check with `swapon`.
|
||||
|
||||
```text
|
||||
sudo apt install build-essential
|
||||
sudo apt install libcurl4-openssl-dev
|
||||
sudo apt install libsqlite3-dev
|
||||
sudo apt install pkg-config
|
||||
sudo apt install git
|
||||
sudo apt install curl
|
||||
wget https://github.com/ldc-developers/ldc/releases/download/v1.25.1/ldc2-1.25.1-linux-aarch64.tar.xz
|
||||
tar -xvf ldc2-1.25.1-linux-aarch64.tar.xz
|
||||
sudo apt install libcurl4-openssl-dev libsqlite3-dev pkg-config git curl ldc
|
||||
```
|
||||
For notifications the following is also necessary:
|
||||
```text
|
||||
|
@ -260,7 +186,7 @@ sudo zypper install libnotify-devel
|
|||
|
||||
## Compilation & Installation
|
||||
### High Level Steps
|
||||
1. Install the platform dependancies for your Linux OS
|
||||
1. Install the platform dependencies for your Linux OS
|
||||
2. Activate your DMD or LDC compiler
|
||||
3. Clone the GitHub repository, run configure and make, then install
|
||||
4. Deactivate your DMD or LDC compiler
|
||||
|
@ -268,13 +194,13 @@ sudo zypper install libnotify-devel
|
|||
### Building using DMD Reference Compiler
|
||||
Before cloning and compiling, if you have installed DMD via curl for your OS, you will need to activate DMD as per example below:
|
||||
```text
|
||||
Run `source ~/dlang/dmd-2.087.0/activate` in your shell to use dmd-2.087.0.
|
||||
Run `source ~/dlang/dmd-2.088.0/activate` in your shell to use dmd-2.088.0.
|
||||
This will setup PATH, LIBRARY_PATH, LD_LIBRARY_PATH, DMD, DC, and PS1.
|
||||
Run `deactivate` later on to restore your environment.
|
||||
```
|
||||
Without performing this step, the compilation process will fail.
|
||||
|
||||
**Note:** Depending on your DMD version, substitute `2.087.0` above with your DMD version that is installed.
|
||||
**Note:** Depending on your DMD version, substitute `2.088.0` above with your DMD version that is installed.
|
||||
|
||||
```text
|
||||
git clone https://github.com/abraunegg/onedrive.git
|
||||
|
@ -304,27 +230,36 @@ as far as possible automatically, but can be overridden by passing
|
|||
`--with-fish-completion-dir=<DIR>` to `configure`.
|
||||
|
||||
### Building using a different compiler (for example [LDC](https://wiki.dlang.org/LDC))
|
||||
#### ARMHF Architecture (Raspbian etc)
|
||||
#### ARMHF Architecture (Raspbian) and ARM64 Architecture (Ubuntu 22.x / Debian 11 / Raspbian)
|
||||
**Note:** The minimum LDC compiler version required to compile this application is now 1.18.0, which is not available for Debian Buster or distributions based on Debian Buster. You are advised to first upgrade your platform distribution to one that is based on Debian Bullseye (Debian 11) or later.
|
||||
|
||||
**Note:** Build environment must have at least 1GB of memory & 1GB swap space. Check with `swapon`.
|
||||
```text
|
||||
git clone https://github.com/abraunegg/onedrive.git
|
||||
cd onedrive
|
||||
./configure DC=~/ldc2-1.17.0-linux-armhf/bin/ldmd2
|
||||
./configure DC=/usr/bin/ldmd2
|
||||
make clean; make
|
||||
sudo make install
|
||||
```
|
||||
|
||||
#### ARM64 Architecture
|
||||
**Note:** Build environment must have at least 1GB of memory & 1GB swap space. Check with `swapon`
|
||||
```text
|
||||
git clone https://github.com/abraunegg/onedrive.git
|
||||
cd onedrive
|
||||
./configure DC=~/ldc2-1.25.1-linux-aarch64/bin/ldmd2
|
||||
make clean; make
|
||||
sudo make install
|
||||
```
|
||||
## Upgrading the client
|
||||
If you have installed the client from a distribution package, the client will be updated when the distribution package is updated by the package maintainer and will be updated to the new application version when you perform your package update.
|
||||
|
||||
If you have built the client from source, to upgrade your client, it is recommended that you first uninstall your existing 'onedrive' binary (see below), then re-install the client by re-cloning, re-compiling and re-installing the client again to install the new version.
|
||||
|
||||
**Note:** Following the uninstall process will remove all client components including *all* systemd files, including any custom files created for specific access such as SharePoint Libraries.
|
||||
|
||||
You can optionally choose to not perform this uninstallation step, and simply re-install the client by re-cloning, re-compiling and re-installing the client again - however the risk here is that you end up with two onedrive client binaries on your system, and depending on your system search path preferences, this will determine which binary is used.
|
||||
|
||||
**Important:** Before performing any upgrade, it is highly recommended for you to stop any running systemd service if applicable to ensure that these services are restarted using the updated client version.
|
||||
|
||||
Post re-install, to confirm that you have the new version of the client installed, use `onedrive --version` to determine the client version that is now installed.
|
||||
|
||||
## Uninstalling the client
|
||||
### Uninstalling the client if installed from distribution package
|
||||
Follow your distribution documentation to uninstall the package that you installed
|
||||
|
||||
### Uninstalling the client if installed and built from source
|
||||
From within your GitHub repository clone, perform the following to remove the 'onedrive' binary:
|
||||
```text
|
||||
sudo make uninstall
|
||||
|
@ -334,18 +269,9 @@ If you are not upgrading your client, to remove your application state and confi
|
|||
```
|
||||
rm -rf ~/.config/onedrive
|
||||
```
|
||||
**Note:** If you are using the `--confdir option`, substitute `~/.config/onedrive` above for that directory.
|
||||
**Note:** If you are using the `--confdir option`, substitute `~/.config/onedrive` for the correct directory storing your client configuration.
|
||||
|
||||
If you want to just delete the application key, but keep the items database:
|
||||
```text
|
||||
rm -f ~/.config/onedrive/refresh_token
|
||||
```
|
||||
|
||||
## Upgrading the client
|
||||
If you have installed the client from a distribution package, the client will be updated when the distribution package is updated by the package maintainer and will be updated to the new application version when you perform your package update.
|
||||
|
||||
If you have built the client from source, to upgrade your client, you must first uninstall your existing 'onedrive' binary (see above), then re-install the client by re-cloning, re-compiling and re-installing the client again to install the new version.
|
||||
|
||||
To confirm you have the new version installed, use `onedrive --version` to determine the version that is now installed.
|
||||
|
||||
|
||||
|
|
262
docs/Podman.md
262
docs/Podman.md
|
@ -1,31 +1,104 @@
|
|||
# Run the OneDrive Client for Linux under Podman
|
||||
This client can be run as a Podman container, with 3 available options for you to choose from:
|
||||
1. Container based on Fedora 35 - Docker Tag: latest
|
||||
2. Container based on Debian 11 - Docker Tag: debian
|
||||
3. Container based on Alpine Linux - Docker Tag: alpine
|
||||
This client can be run as a Podman container, with 3 available container base options for you to choose from:
|
||||
|
||||
| Container Base | Docker Tag | Description | i686 | x86_64 | ARMHF | AARCH64 |
|
||||
|----------------|-------------|----------------------------------------------------------------|:------:|:------:|:-----:|:-------:|
|
||||
| Alpine Linux | edge-alpine | Podman container based on Alpine 3.18 using 'master' |❌|✔|❌|✔|
|
||||
| Alpine Linux | alpine | Podman container based on Alpine 3.18 using latest release |❌|✔|❌|✔|
|
||||
| Debian | debian | Podman container based on Debian Stable using latest release |✔|✔|✔|✔|
|
||||
| Debian | edge | Podman container based on Debian Stable using 'master' |✔|✔|✔|✔|
|
||||
| Debian | edge-debian | Podman container based on Debian Stable using 'master' |✔|✔|✔|✔|
|
||||
| Debian | latest | Podman container based on Debian Stable using latest release |✔|✔|✔|✔|
|
||||
| Fedora | edge-fedora | Podman container based on Fedora 38 using 'master' |❌|✔|❌|✔|
|
||||
| Fedora | fedora | Podman container based on Fedora 38 using latest release |❌|✔|❌|✔|
|
||||
|
||||
These containers offer a simple monitoring-mode service for the OneDrive Client for Linux.
|
||||
|
||||
The instructions below have been validated on:
|
||||
* Fedora 35
|
||||
* Fedora 38
|
||||
|
||||
The instructions below will utilise the 'latest' tag, however this can be substituted for 'stretch' or 'alpine' if desired. The below instructions for podman have only been tested as the root user while running the containers themselves as non-root users.
|
||||
The instructions below will utilise the 'edge' tag, however this can be substituted for any of the other docker tags such as 'latest' from the table above if desired.
|
||||
|
||||
The 'edge' Docker Container will align closer to all documentation and features, where as 'latest' is the release version from a static point in time. The 'latest' tag however may contain bugs and/or issues that will have been fixed, and those fixes are contained in 'edge'.
|
||||
|
||||
## Basic Setup
|
||||
### 0. Install podman using your distribution platform's instructions if not already installed
|
||||
1. Ensure that SELinux has been disabled on your system. A reboot may be required to ensure that this is correctly disabled.
|
||||
2. Install Podman as per requried for your platform
|
||||
3. Obtain your normal, non-root user UID and GID by using the `id` command or select another non-root id to run the container as
|
||||
Additionally there are specific version release tags for each release. Refer to https://hub.docker.com/r/driveone/onedrive/tags for any other Docker tags you may be interested in.
|
||||
|
||||
**NOTE:** SELinux context needs to be configured or disabled for Podman to be able to write to OneDrive host directory.
|
||||
**Note:** The below instructions for podman has been tested and validated when logging into the system as an unprivileged user (non 'root' user).
|
||||
|
||||
### 1.1 Prepare data volume
|
||||
The container requries 2 Podman volumes:
|
||||
## High Level Configuration Steps
|
||||
1. Install 'podman' as per your distribution platform's instructions if not already installed.
|
||||
2. Disable 'SELinux' as per your distribution platform's instructions
|
||||
3. Test 'podman' by running a test container
|
||||
4. Prepare the required podman volumes to store the configuration and data
|
||||
5. Run the 'onedrive' container and perform authorisation
|
||||
6. Running the 'onedrive' container under 'podman'
|
||||
|
||||
## Configuration Steps
|
||||
|
||||
### 1. Install 'podman' on your platform
|
||||
Install 'podman' as per your distribution platform's instructions if not already installed.
|
||||
|
||||
### 2. Disable SELinux on your platform
|
||||
In order to run the Docker container under 'podman', SELinux must be disabled. Without doing this, when the application is authenticated in the steps below, the following error will be presented:
|
||||
```text
|
||||
ERROR: The local file system returned an error with the following message:
|
||||
Error Message: /onedrive/conf/refresh_token: Permission denied
|
||||
|
||||
The database cannot be opened. Please check the permissions of ~/.config/onedrive/items.sqlite3
|
||||
```
|
||||
The only known work-around for the above problem at present is to disable SELinux. Please refer to your distribution platform's instructions on how to perform this step.
|
||||
|
||||
* Fedora: https://docs.fedoraproject.org/en-US/quick-docs/selinux-changing-states-and-modes/#_disabling_selinux
|
||||
* Red Hat Enterprise Linux: https://access.redhat.com/solutions/3176
|
||||
|
||||
Post disabling SELinux and reboot your system, confirm that `getenforce` returns `Disabled`:
|
||||
```text
|
||||
$ getenforce
|
||||
Disabled
|
||||
```
|
||||
|
||||
If you are still experiencing permission issues despite disabling SELinux, please read https://www.redhat.com/sysadmin/container-permission-denied-errors
|
||||
|
||||
### 3. Test 'podman' on your platform
|
||||
Test that 'podman' is operational for your 'non-root' user, as per below:
|
||||
```bash
|
||||
[alex@fedora38-podman ~]$ podman pull fedora
|
||||
Resolved "fedora" as an alias (/etc/containers/registries.conf.d/000-shortnames.conf)
|
||||
Trying to pull registry.fedoraproject.org/fedora:latest...
|
||||
Getting image source signatures
|
||||
Copying blob b30887322388 done |
|
||||
Copying config a1cd3cbf8a done |
|
||||
Writing manifest to image destination
|
||||
a1cd3cbf8adaa422629f2fcdc629fd9297138910a467b11c66e5ddb2c2753dff
|
||||
[alex@fedora38-podman ~]$ podman run fedora /bin/echo "Welcome to the Podman World"
|
||||
Welcome to the Podman World
|
||||
[alex@fedora38-podman ~]$
|
||||
```
|
||||
|
||||
### 4. Configure the required podman volumes
|
||||
The 'onedrive' Docker container requires 2 podman volumes to operate:
|
||||
* Config Volume
|
||||
* Data Volume
|
||||
|
||||
The first volume is for your data folder and is created in the next step. This volume needs to be a path to a directory on your local filesystem, and this is where your data will be stored from OneDrive. Keep in mind that:
|
||||
The first volume is the configuration volume that stores all the applicable application configuration + current runtime state. In a non-containerised environment, this normally resides in `~/.config/onedrive` - in a containerised environment this is stored in the volume tagged as `/onedrive/conf`
|
||||
|
||||
The second volume is the data volume, where all your data from Microsoft OneDrive is stored locally. This volume is mapped to an actual directory point on your local filesystem and this is stored in the volume tagged as `/onedrive/data`
|
||||
|
||||
#### 4.1 Prepare the 'config' volume
|
||||
Create the 'config' volume with the following command:
|
||||
```bash
|
||||
podman volume create onedrive_conf
|
||||
```
|
||||
|
||||
This will create a podman volume labeled `onedrive_conf`, where all configuration of your onedrive account will be stored. You can add a custom config file in this location at a later point in time if required.
|
||||
|
||||
#### 4.2 Prepare the 'data' volume
|
||||
Create the 'data' volume with the following command:
|
||||
```bash
|
||||
podman volume create onedrive_data
|
||||
```
|
||||
|
||||
This will create a podman volume labeled `onedrive_data` and will map to a path on your local filesystem. This is where your data from Microsoft OneDrive will be stored. Keep in mind that:
|
||||
|
||||
* The owner of this specified folder must not be root
|
||||
* Podman will attempt to change the permissions of the volume to the user the container is configured to run as
|
||||
|
@ -35,45 +108,54 @@ The first volume is for your data folder and is created in the next step. This v
|
|||
ROOT level privileges prohibited!
|
||||
```
|
||||
|
||||
### 1.2 Prepare config volume
|
||||
Although not required, you can prepare the config volume before starting the container. Otherwise it will be created automatically during initial startup of the container.
|
||||
### 5. First run of Docker container under podman and performing authorisation
|
||||
The 'onedrive' client within the container first needs to be authorised with your Microsoft account. This is achieved by initially running podman in interactive mode.
|
||||
|
||||
Create the config volume with the following command:
|
||||
```bash
|
||||
podman volume create onedrive_conf
|
||||
```
|
||||
Run the podman image with the commands below and make sure to change the value of `ONEDRIVE_DATA_DIR` to the actual onedrive data directory on your filesystem that you wish to use (e.g. `export ONEDRIVE_DATA_DIR="/home/abraunegg/OneDrive"`).
|
||||
|
||||
This will create a podman volume labeled `onedrive_conf`, where all configuration of your onedrive account will be stored. You can add a custom config file and other things later.
|
||||
|
||||
### 2. First run
|
||||
The 'onedrive' client within the container needs to be authorized with your Microsoft account. This is achieved by initially running podman in interactive mode.
|
||||
|
||||
Run the podman image with the commands below and make sure to change `ONEDRIVE_DATA_DIR` to the actual onedrive data directory on your filesystem that you wish to use (e.g. `"/home/abraunegg/OneDrive"`).
|
||||
|
||||
It is a requirement that the container be run using a non-root uid and gid, you must insert a non-root UID and GID (e.g.` export ONEDRIVE_UID=1000` and export `ONEDRIVE_GID=1000`).
|
||||
**Important:** The 'target' folder of `ONEDRIVE_DATA_DIR` must exist before running the podman container. The script below will create 'ONEDRIVE_DATA_DIR' so that it exists locally for the podman volume mapping to occur.
|
||||
|
||||
It is also a requirement that the container be run using a non-root uid and gid, you must insert a non-root UID and GID (e.g.` export ONEDRIVE_UID=1000` and export `ONEDRIVE_GID=1000`). The script below will use `id` to evaluate your system environment to use the correct values.
|
||||
```bash
|
||||
export ONEDRIVE_DATA_DIR="${HOME}/OneDrive"
|
||||
export ONEDRIVE_UID=1000
|
||||
export ONEDRIVE_GID=1000
|
||||
export ONEDRIVE_UID=`id -u`
|
||||
export ONEDRIVE_GID=`id -g`
|
||||
mkdir -p ${ONEDRIVE_DATA_DIR}
|
||||
podman run -it --name onedrive --user "${ONEDRIVE_UID}:${ONEDRIVE_GID}" \
|
||||
-v onedrive_conf:/onedrive/conf:U,Z \
|
||||
-v "onedrive-test-data:/onedrive/data:U,Z" \
|
||||
driveone/onedrive:latest
|
||||
-v "${ONEDRIVE_DATA_DIR}:/onedrive/data:U,Z" \
|
||||
driveone/onedrive:edge
|
||||
```
|
||||
**Important:** The 'target' folder of `ONEDRIVE_DATA_DIR` must exist before running the podman container
|
||||
|
||||
**If you plan to use podmans built in auto-updating of container images described in step 5, you must pass an additional argument to set a label during the first run.**
|
||||
**Important:** In some scenarios, 'podman' sets the configuration and data directories to a different UID & GID as specified. To resolve this situation, you must run 'podman' with the `--userns=keep-id` flag to ensure 'podman' uses the UID and GID as specified. The updated script example when using `--userns=keep-id` is below:
|
||||
|
||||
The run command would look instead look like as follows:
|
||||
```
|
||||
```bash
|
||||
export ONEDRIVE_DATA_DIR="${HOME}/OneDrive"
|
||||
export ONEDRIVE_UID=`id -u`
|
||||
export ONEDRIVE_GID=`id -g`
|
||||
mkdir -p ${ONEDRIVE_DATA_DIR}
|
||||
podman run -it --name onedrive --user "${ONEDRIVE_UID}:${ONEDRIVE_GID}" \
|
||||
--userns=keep-id \
|
||||
-v onedrive_conf:/onedrive/conf:U,Z \
|
||||
-v "onedrive-test-data:/onedrive/data:U,Z" \
|
||||
-v "${ONEDRIVE_DATA_DIR}:/onedrive/data:U,Z" \
|
||||
driveone/onedrive:edge
|
||||
```
|
||||
|
||||
|
||||
**Important:** If you plan to use the 'podman' built in auto-updating of container images described in 'Systemd Service & Auto Updating' below, you must pass an additional argument to set a label during the first run. The updated script example to support auto-updating of container images is below:
|
||||
|
||||
```bash
|
||||
export ONEDRIVE_DATA_DIR="${HOME}/OneDrive"
|
||||
export ONEDRIVE_UID=`id -u`
|
||||
export ONEDRIVE_GID=`id -g`
|
||||
mkdir -p ${ONEDRIVE_DATA_DIR}
|
||||
podman run -it --name onedrive --user "${ONEDRIVE_UID}:${ONEDRIVE_GID}" \
|
||||
--userns=keep-id \
|
||||
-v onedrive_conf:/onedrive/conf:U,Z \
|
||||
-v "${ONEDRIVE_DATA_DIR}:/onedrive/data:U,Z" \
|
||||
-e PODMAN=1 \
|
||||
--label "io.containers.autoupdate=image"
|
||||
driveone/onedrive:latest
|
||||
--label "io.containers.autoupdate=image" \
|
||||
driveone/onedrive:edge
|
||||
```
|
||||
|
||||
When the Podman container successfully starts:
|
||||
|
@ -86,41 +168,39 @@ Once the 'onedrive' application is authorised, the client will automatically sta
|
|||
|
||||
If the client is working as expected, you can detach from the container with Ctrl+p, Ctrl+q.
|
||||
|
||||
### 4. Podman Container Status, stop, and restart
|
||||
Check if the monitor service is running
|
||||
### 6. Running the 'onedrive' container under 'podman'
|
||||
|
||||
#### 6.1 Check if the monitor service is running
|
||||
```bash
|
||||
podman ps -f name=onedrive
|
||||
```
|
||||
|
||||
Show monitor run logs
|
||||
|
||||
#### 6.2 Show 'onedrive' runtime logs
|
||||
```bash
|
||||
podman logs onedrive
|
||||
```
|
||||
|
||||
Stop running monitor
|
||||
|
||||
#### 6.3 Stop running 'onedrive' container
|
||||
```bash
|
||||
podman stop onedrive
|
||||
```
|
||||
|
||||
Resume monitor
|
||||
|
||||
#### 6.4 Start 'onedrive' container
|
||||
```bash
|
||||
podman start onedrive
|
||||
```
|
||||
|
||||
Remove onedrive container
|
||||
|
||||
#### 6.5 Remove 'onedrive' container
|
||||
```bash
|
||||
podman rm -f onedrive
|
||||
```
|
||||
## Advanced Setup
|
||||
|
||||
### 5. Systemd Service & Auto Updating
|
||||
|
||||
Podman supports running containers as a systemd service and also auto updating of the container images. Using the existing running container you can generate a systemd unit file to be installed by the **root** user. To have your container image auto-update with podman, it must first be created with the label `"io.containers.autoupdate=image"` mentioned in step 2.
|
||||
## Advanced Usage
|
||||
|
||||
### Systemd Service & Auto Updating
|
||||
|
||||
Podman supports running containers as a systemd service and also auto updating of the container images. Using the existing running container you can generate a systemd unit file to be installed by the **root** user. To have your container image auto-update with podman, it must first be created with the label `"io.containers.autoupdate=image"` mentioned in step 5 above.
|
||||
|
||||
```
|
||||
cd /tmp
|
||||
|
@ -166,7 +246,7 @@ systemctl start podman-auto-update.timer
|
|||
systemctl list-timers --all
|
||||
```
|
||||
|
||||
### 6. Edit the config
|
||||
### Editing the running configuration and using a 'config' file
|
||||
The 'onedrive' client should run in default configuration, however you can change this default configuration by placing a custom config file in the `onedrive_conf` podman volume. First download the default config from [here](https://raw.githubusercontent.com/abraunegg/onedrive/master/config)
|
||||
Then put it into your onedrive_conf volume path, which can be found with:
|
||||
|
||||
|
@ -177,22 +257,27 @@ Or you can map your own config folder to the config volume. Make sure to copy al
|
|||
|
||||
The detailed document for the config can be found here: [Configuration](https://github.com/abraunegg/onedrive/blob/master/docs/USAGE.md#configuration)
|
||||
|
||||
### 7. Sync multiple accounts
|
||||
There are many ways to do this, the easiest is probably to
|
||||
1. Create a second podman config volume (replace `Work` with your desired name): `podman volume create onedrive_conf_Work`
|
||||
2. And start a second podman monitor container (again replace `Work` with your desired name):
|
||||
```
|
||||
### Syncing multiple accounts
|
||||
There are many ways to do this, the easiest is probably to do the following:
|
||||
1. Create a second podman config volume (replace `work` with your desired name): `podman volume create onedrive_conf_work`
|
||||
2. And start a second podman monitor container (again replace `work` with your desired name):
|
||||
|
||||
```bash
|
||||
export ONEDRIVE_DATA_DIR_WORK="/home/abraunegg/OneDriveWork"
|
||||
export ONEDRIVE_UID=`id -u`
|
||||
export ONEDRIVE_GID=`id -g`
|
||||
mkdir -p ${ONEDRIVE_DATA_DIR_WORK}
|
||||
podman run -it --restart unless-stopped --name onedrive_work \
|
||||
-v onedrive_conf_Work:/onedrive/conf \
|
||||
-v "${ONEDRIVE_DATA_DIR_WORK}:/onedrive/data" \
|
||||
--user "${ONEDRIVE_UID}:${ONEDRIVE_GID}" \
|
||||
driveone/onedrive:latest
|
||||
podman run -it --name onedrive_work --user "${ONEDRIVE_UID}:${ONEDRIVE_GID}" \
|
||||
--userns=keep-id \
|
||||
-v onedrive_conf_work:/onedrive/conf:U,Z \
|
||||
-v "${ONEDRIVE_DATA_DIR_WORK}:/onedrive/data:U,Z" \
|
||||
-e PODMAN=1 \
|
||||
--label "io.containers.autoupdate=image" \
|
||||
driveone/onedrive:edge
|
||||
```
|
||||
|
||||
## Environment Variables
|
||||
| Variable | Purpose | Sample Value |
|
||||
## Supported Podman Environment Variables
|
||||
| Variable | Purpose | Sample Value |
|
||||
| ---------------- | --------------------------------------------------- |:-------------:|
|
||||
| <B>ONEDRIVE_UID</B> | UserID (UID) to run as | 1000 |
|
||||
| <B>ONEDRIVE_GID</B> | GroupID (GID) to run as | 1000 |
|
||||
|
@ -201,35 +286,38 @@ podman run -it --restart unless-stopped --name onedrive_work \
|
|||
| <B>ONEDRIVE_DEBUG_HTTPS</B> | Controls "--debug-https" switch on onedrive sync. Default is 0 | 1 |
|
||||
| <B>ONEDRIVE_RESYNC</B> | Controls "--resync" switch on onedrive sync. Default is 0 | 1 |
|
||||
| <B>ONEDRIVE_DOWNLOADONLY</B> | Controls "--download-only" switch on onedrive sync. Default is 0 | 1 |
|
||||
| <B>ONEDRIVE_UPLOADONLY</B> | Controls "--upload-only" switch on onedrive sync. Default is 0 | 1 |
|
||||
| <B>ONEDRIVE_NOREMOTEDELETE</B> | Controls "--no-remote-delete" switch on onedrive sync. Default is 0 | 1 |
|
||||
| <B>ONEDRIVE_LOGOUT</B> | Controls "--logout" switch. Default is 0 | 1 |
|
||||
| <B>ONEDRIVE_REAUTH</B> | Controls "--reauth" switch. Default is 0 | 1 |
|
||||
| <B>ONEDRIVE_AUTHFILES</B> | Controls "--auth-files" option. Default is "" | "authUrl:responseUrl" |
|
||||
| <B>ONEDRIVE_AUTHRESPONSE</B> | Controls "--auth-response" option. Default is "" | See [here](https://github.com/abraunegg/onedrive/blob/master/docs/USAGE.md#authorize-the-application-with-your-onedrive-account) |
|
||||
| <B>ONEDRIVE_DISPLAY_CONFIG</B> | Controls "--display-running-config" switch on onedrive sync. Default is 0 | 1 |
|
||||
| <B>ONEDRIVE_SINGLE_DIRECTORY</B> | Controls "--single-directory" option. Default = "" | "mydir" |
|
||||
|
||||
### Usage Examples
|
||||
### Environment Variables Usage Examples
|
||||
**Verbose Output:**
|
||||
```bash
|
||||
podman run -e ONEDRIVE_VERBOSE=1 -v onedrive_conf:/onedrive/conf:U,Z -v "${ONEDRIVE_DATA_DIR}:/onedrive/data:U,Z" --user "${ONEDRIVE_UID}:${ONEDRIVE_GID}" driveone/onedrive:latest
|
||||
podman run -e ONEDRIVE_VERBOSE=1 -v onedrive_conf:/onedrive/conf:U,Z -v "${ONEDRIVE_DATA_DIR}:/onedrive/data:U,Z" --user "${ONEDRIVE_UID}:${ONEDRIVE_GID}" driveone/onedrive:edge
|
||||
```
|
||||
**Debug Output:**
|
||||
```bash
|
||||
podman run -e ONEDRIVE_DEBUG=1 -v onedrive_conf:/onedrive/conf:U,Z -v "${ONEDRIVE_DATA_DIR}:/onedrive/data:U,Z" --user "${ONEDRIVE_UID}:${ONEDRIVE_GID}" driveone/onedrive:latest
|
||||
podman run -e ONEDRIVE_DEBUG=1 -v onedrive_conf:/onedrive/conf:U,Z -v "${ONEDRIVE_DATA_DIR}:/onedrive/data:U,Z" --user "${ONEDRIVE_UID}:${ONEDRIVE_GID}" driveone/onedrive:edge
|
||||
```
|
||||
**Perform a --resync:**
|
||||
```bash
|
||||
podman run -e ONEDRIVE_RESYNC=1 -v onedrive_conf:/onedrive/conf:U,Z -v "${ONEDRIVE_DATA_DIR}:/onedrive/data:U,Z" --user "${ONEDRIVE_UID}:${ONEDRIVE_GID}" driveone/onedrive:latest
|
||||
podman run -e ONEDRIVE_RESYNC=1 -v onedrive_conf:/onedrive/conf:U,Z -v "${ONEDRIVE_DATA_DIR}:/onedrive/data:U,Z" --user "${ONEDRIVE_UID}:${ONEDRIVE_GID}" driveone/onedrive:edge
|
||||
```
|
||||
**Perform a --resync and --verbose:**
|
||||
```bash
|
||||
podman run -e ONEDRIVE_RESYNC=1 -e ONEDRIVE_VERBOSE=1 -v onedrive_conf:/onedrive/conf:U,Z -v "${ONEDRIVE_DATA_DIR}:/onedrive/data:U,Z" --user "${ONEDRIVE_UID}:${ONEDRIVE_GID}" driveone/onedrive:latest
|
||||
podman run -e ONEDRIVE_RESYNC=1 -e ONEDRIVE_VERBOSE=1 -v onedrive_conf:/onedrive/conf:U,Z -v "${ONEDRIVE_DATA_DIR}:/onedrive/data:U,Z" --user "${ONEDRIVE_UID}:${ONEDRIVE_GID}" driveone/onedrive:edge
|
||||
```
|
||||
**Perform a --logout and re-authenticate:**
|
||||
```bash
|
||||
podman run -it -e ONEDRIVE_LOGOUT=1 -v onedrive_conf:/onedrive/conf:U,Z -v "${ONEDRIVE_DATA_DIR}:/onedrive/data:U,Z" --user "${ONEDRIVE_UID}:${ONEDRIVE_GID}" driveone/onedrive:latest
|
||||
podman run -it -e ONEDRIVE_LOGOUT=1 -v onedrive_conf:/onedrive/conf:U,Z -v "${ONEDRIVE_DATA_DIR}:/onedrive/data:U,Z" --user "${ONEDRIVE_UID}:${ONEDRIVE_GID}" driveone/onedrive:edge
|
||||
```
|
||||
|
||||
## Build instructions
|
||||
|
||||
## Building a custom Podman image
|
||||
You can also build your own image instead of pulling the one from [hub.docker.com](https://hub.docker.com/r/driveone/onedrive):
|
||||
```bash
|
||||
git clone https://github.com/abraunegg/onedrive
|
||||
|
@ -238,22 +326,22 @@ podman build . -t local-onedrive -f contrib/docker/Dockerfile
|
|||
```
|
||||
|
||||
There are alternate, smaller images available by building
|
||||
Dockerfile-stretch or Dockerfile-alpine. These [multi-stage builder
|
||||
pattern](https://docs.docker.com/develop/develop-images/multistage-build/)
|
||||
Dockerfile-debian or Dockerfile-alpine. These [multi-stage builder pattern](https://docs.docker.com/develop/develop-images/multistage-build/)
|
||||
Dockerfiles require Docker version at least 17.05.
|
||||
|
||||
#### How to build and run a custom Docker image based on Debian Stretch
|
||||
### How to build and run a custom Podman image based on Debian
|
||||
``` bash
|
||||
podman build . -t local-ondrive-stretch -f contrib/docker/Dockerfile-stretch
|
||||
podman run -v onedrive_conf:/onedrive/conf:U,Z -v "${ONEDRIVE_DATA_DIR}:/onedrive/data:U,Z" --user "${ONEDRIVE_UID}:${ONEDRIVE_GID}" local-ondrive-stretch:latest
|
||||
podman build . -t local-ondrive-debian -f contrib/docker/Dockerfile-debian
|
||||
podman run -v onedrive_conf:/onedrive/conf:U,Z -v "${ONEDRIVE_DATA_DIR}:/onedrive/data:U,Z" --user "${ONEDRIVE_UID}:${ONEDRIVE_GID}" --userns=keep-id local-ondrive-debian:latest
|
||||
```
|
||||
|
||||
#### How to build and run a custom Docker image based on Alpine Linux
|
||||
### How to build and run a custom Podman image based on Alpine Linux
|
||||
``` bash
|
||||
podman build . -t local-ondrive-alpine -f contrib/docker/Dockerfile-alpine
|
||||
podman run -v onedrive_conf:/onedrive/conf:U,Z -v "${ONEDRIVE_DATA_DIR}:/onedrive/data:U,Z" --user "${ONEDRIVE_UID}:${ONEDRIVE_GID}" local-ondrive-alpine:latest
|
||||
podman run -v onedrive_conf:/onedrive/conf:U,Z -v "${ONEDRIVE_DATA_DIR}:/onedrive/data:U,Z" --user "${ONEDRIVE_UID}:${ONEDRIVE_GID}" --userns=keep-id local-ondrive-alpine:latest
|
||||
```
|
||||
|
||||
#### How to build and run a custom Docker image for ARMHF (Raspberry Pi)
|
||||
### How to build and run a custom Podman image for ARMHF (Raspberry Pi)
|
||||
Compatible with:
|
||||
* Raspberry Pi
|
||||
* Raspberry Pi 2
|
||||
|
@ -261,12 +349,12 @@ Compatible with:
|
|||
* Raspberry Pi 3
|
||||
* Raspberry Pi 4
|
||||
``` bash
|
||||
podman build . -t local-onedrive-rpi -f contrib/docker/Dockerfile-rpi
|
||||
podman run -v onedrive_conf:/onedrive/conf:U,Z -v "${ONEDRIVE_DATA_DIR}:/onedrive/data:U,Z" --user "${ONEDRIVE_UID}:${ONEDRIVE_GID}" local-ondrive-rpi:latest
|
||||
podman build . -t local-onedrive-armhf -f contrib/docker/Dockerfile-debian
|
||||
podman run -v onedrive_conf:/onedrive/conf:U,Z -v "${ONEDRIVE_DATA_DIR}:/onedrive/data:U,Z" --user "${ONEDRIVE_UID}:${ONEDRIVE_GID}" --userns=keep-id local-onedrive-armhf:latest
|
||||
```
|
||||
|
||||
#### How to build and run a custom Docker image for AARCH64 Platforms
|
||||
### How to build and run a custom Podman image for AARCH64 Platforms
|
||||
``` bash
|
||||
podman build . -t local-onedrive-aarch64 -f contrib/docker/Dockerfile-aarch64
|
||||
podman run -v onedrive_conf:/onedrive/conf:U,Z -v "${ONEDRIVE_DATA_DIR}:/onedrive/data:U,Z" --user "${ONEDRIVE_UID}:${ONEDRIVE_GID}" local-onedrive-aarch64:latest
|
||||
podman build . -t local-onedrive-aarch64 -f contrib/docker/Dockerfile-debian
|
||||
podman run -v onedrive_conf:/onedrive/conf:U,Z -v "${ONEDRIVE_DATA_DIR}:/onedrive/data:U,Z" --user "${ONEDRIVE_UID}:${ONEDRIVE_GID}" --userns=keep-id local-onedrive-aarch64:latest
|
||||
```
|
||||
|
|
|
@ -1,4 +1,23 @@
|
|||
# How to configure OneDrive SharePoint Shared Library sync
|
||||
**WARNING:** Several users have reported files being overwritten causing data loss as a result of using this client with SharePoint Libraries when running as a systemd service.
|
||||
|
||||
When this has been investigated, the following has been noted as potential root causes:
|
||||
* File indexing application such as Baloo File Indexer or Tracker3 constantly indexing your OneDrive data
|
||||
* The use of WPS Office and how it 'saves' files by deleting the existing item and replaces it with the saved data
|
||||
|
||||
Additionally there could be a yet unknown bug with the client, however all debugging and data provided previously shows that an 'external' process to the 'onedrive' application modifies the files triggering the undesirable upload to occur.
|
||||
|
||||
**Possible Preventative Actions:**
|
||||
* Disable all File Indexing for your SharePoint Library data. It is out of scope to detail on how you should do this.
|
||||
* Disable using a systemd service for syncing your SharePoint Library data.
|
||||
* Do not use WPS Office to edit your documents. Use OpenOffice or LibreOffice as these do not exhibit the same 'delete to save' action that WPS Office has.
|
||||
|
||||
Additionally, please use caution when using this client with SharePoint.
|
||||
|
||||
## Application Version
|
||||
Before reading this document, please ensure you are running application version [![Version](https://img.shields.io/github/v/release/abraunegg/onedrive)](https://github.com/abraunegg/onedrive/releases) or greater. Use `onedrive --version` to determine what application version you are using and upgrade your client if required.
|
||||
|
||||
## Process Overview
|
||||
Syncing a OneDrive SharePoint library requires additional configuration for your 'onedrive' client:
|
||||
1. Login to OneDrive and under 'Shared Libraries' obtain the shared library name
|
||||
2. Query that shared library name using the client to obtain the required configuration details
|
||||
|
@ -7,12 +26,14 @@ Syncing a OneDrive SharePoint library requires additional configuration for your
|
|||
5. Test the configuration using '--dry-run'
|
||||
6. Sync the SharePoint Library as required
|
||||
|
||||
## Listing available OneDrive SharePoint Libraries
|
||||
1. Login to the OneDrive web interface and determine which shared library you wish to configure the client for:
|
||||
**Note:** The `--get-O365-drive-id` process below requires a fully configured 'onedrive' configuration so that the applicable Drive ID for the given Office 365 SharePoint Shared Library can be determined. It is highly recommended that you do not use the application 'default' configuration directory for any SharePoint Site, and configure separate items for each site you wish to use.
|
||||
|
||||
## 1. Listing available OneDrive SharePoint Libraries
|
||||
Login to the OneDrive web interface and determine which shared library you wish to configure the client for:
|
||||
![shared_libraries](./images/SharedLibraries.jpg)
|
||||
|
||||
## Query that shared library name using the client to obtain the required configuration details
|
||||
2. Run the following command using the 'onedrive' client
|
||||
## 2. Query OneDrive API to obtain required configuration details
|
||||
Run the following command using the 'onedrive' client to query the OneDrive API to obtain the required 'drive_id' of the SharePoint Library that you wish to sync:
|
||||
```text
|
||||
onedrive --get-O365-drive-id '<your site name to search>'
|
||||
```
|
||||
|
@ -46,33 +67,162 @@ The following SharePoint site names were returned:
|
|||
```
|
||||
This list of site names can be used as a basis to search for the correct site for which you are searching
|
||||
|
||||
## 3. Create a new configuration directory and sync location for this SharePoint Library
|
||||
Create a new configuration directory for this SharePoint Library in the following manner:
|
||||
```text
|
||||
mkdir ~/.config/SharePoint_My_Library_Name
|
||||
```
|
||||
|
||||
## Configure the client's config file with the required 'drive_id' & 'sync_dir' options
|
||||
3. Create a new local folder to store the SharePoint Library data in
|
||||
Create a new local folder to store the SharePoint Library data in:
|
||||
```text
|
||||
mkdir ~/SharePoint_My_Library_Name
|
||||
```
|
||||
|
||||
**Note:** Do not use spaces in the directory name, use '_' as a replacement
|
||||
|
||||
Update your 'onedrive' configuration file (`~/.config/onedrive/config`) with the following:
|
||||
## 4. Configure SharePoint Library config file with the required 'drive_id' & 'sync_dir' options
|
||||
Download a copy of the default configuration file by downloading this file from GitHub and saving this file in the directory created above:
|
||||
```text
|
||||
wget https://raw.githubusercontent.com/abraunegg/onedrive/master/config -O ~/.config/SharePoint_My_Library_Name/config
|
||||
```
|
||||
|
||||
Update your 'onedrive' configuration file (`~/.config/SharePoint_My_Library_Name/config`) with the local folder where you will store your data:
|
||||
```text
|
||||
sync_dir = "~/SharePoint_My_Library_Name"
|
||||
```
|
||||
|
||||
4. Once you have obtained the 'drive_id' above, add to your 'onedrive' configuration file (`~/.config/onedrive/config`) the following:
|
||||
Update your 'onedrive' configuration file(`~/.config/SharePoint_My_Library_Name/config`) with the 'drive_id' value obtained in the steps above:
|
||||
```text
|
||||
drive_id = "insert the drive_id value from above here"
|
||||
```
|
||||
The OneDrive client will now be configured to sync this SharePoint shared library to your local system.
|
||||
The OneDrive client will now be configured to sync this SharePoint shared library to your local system and the location you have configured.
|
||||
|
||||
**Note:** After changing `drive_id`, you must perform a full re-synchronization by adding `--resync` to your existing command line.
|
||||
|
||||
## Test the configuration using '--dry-run'
|
||||
5. Test your new configuration using the `--dry-run` option to validate the the new configuration
|
||||
## 5. Validate and Test the configuration
|
||||
Validate your new configuration using the `--display-config` option to validate you have configured the application correctly:
|
||||
```text
|
||||
onedrive --confdir="~/.config/SharePoint_My_Library_Name" --display-config
|
||||
```
|
||||
|
||||
## Sync the SharePoint Library as required
|
||||
6. Sync the SharePoint Library to your system with either `--synchronize` or `--monitor` operations
|
||||
Test your new configuration using the `--dry-run` option to validate the application configuration:
|
||||
```text
|
||||
onedrive --confdir="~/.config/SharePoint_My_Library_Name" --synchronize --verbose --dry-run
|
||||
```
|
||||
|
||||
**Note:** As this is a *new* configuration, the application will be required to be re-authorised the first time this command is run with the new configuration.
|
||||
|
||||
## 6. Sync the SharePoint Library as required
|
||||
Sync the SharePoint Library to your system with either `--synchronize` or `--monitor` operations:
|
||||
```text
|
||||
onedrive --confdir="~/.config/SharePoint_My_Library_Name" --synchronize --verbose
|
||||
```
|
||||
|
||||
```text
|
||||
onedrive --confdir="~/.config/SharePoint_My_Library_Name" --monitor --verbose
|
||||
```
|
||||
|
||||
**Note:** As this is a *new* configuration, the application will be required to be re-authorised the first time this command is run with the new configuration.
|
||||
|
||||
## 7. Enable custom systemd service for SharePoint Library
|
||||
Systemd can be used to automatically run this configuration in the background, however, a unique systemd service will need to be setup for this SharePoint Library instance
|
||||
|
||||
In order to automatically start syncing each SharePoint Library, you will need to create a service file for each SharePoint Library. From the applicable 'systemd folder' where the applicable systemd service file exists:
|
||||
* RHEL / CentOS: `/usr/lib/systemd/system`
|
||||
* Others: `/usr/lib/systemd/user` and `/lib/systemd/system`
|
||||
|
||||
### Step1: Create a new systemd service file
|
||||
#### Red Hat Enterprise Linux, CentOS Linux
|
||||
Copy the required service file to a new name:
|
||||
```text
|
||||
sudo cp /usr/lib/systemd/system/onedrive.service /usr/lib/systemd/system/onedrive-SharePoint_My_Library_Name.service
|
||||
```
|
||||
or
|
||||
```text
|
||||
sudo cp /usr/lib/systemd/system/onedrive@.service /usr/lib/systemd/system/onedrive-SharePoint_My_Library_Name@.service
|
||||
```
|
||||
|
||||
#### Others such as Arch, Ubuntu, Debian, OpenSuSE, Fedora
|
||||
Copy the required service file to a new name:
|
||||
```text
|
||||
sudo cp /usr/lib/systemd/user/onedrive.service /usr/lib/systemd/user/onedrive-SharePoint_My_Library_Name.service
|
||||
```
|
||||
or
|
||||
```text
|
||||
sudo cp /lib/systemd/system/onedrive@.service /lib/systemd/system/onedrive-SharePoint_My_Library_Name@.service
|
||||
```
|
||||
|
||||
### Step 2: Edit new systemd service file
|
||||
Edit the new systemd file, updating the line beginning with `ExecStart` so that the confdir mirrors the one you used above:
|
||||
```text
|
||||
ExecStart=/usr/local/bin/onedrive --monitor --confdir="/full/path/to/config/dir"
|
||||
```
|
||||
|
||||
Example:
|
||||
```text
|
||||
ExecStart=/usr/local/bin/onedrive --monitor --confdir="/home/myusername/.config/SharePoint_My_Library_Name"
|
||||
```
|
||||
|
||||
**Note:** When running the client manually, `--confdir="~/.config/......` is acceptable. In a systemd configuration file, the full path must be used. The `~` must be expanded.
|
||||
|
||||
### Step 3: Enable the new systemd service
|
||||
Once the file is correctly editied, you can enable the new systemd service using the following commands.
|
||||
|
||||
#### Red Hat Enterprise Linux, CentOS Linux
|
||||
```text
|
||||
systemctl enable onedrive-SharePoint_My_Library_Name
|
||||
systemctl start onedrive-SharePoint_My_Library_Name
|
||||
```
|
||||
|
||||
#### Others such as Arch, Ubuntu, Debian, OpenSuSE, Fedora
|
||||
```text
|
||||
systemctl --user enable onedrive-SharePoint_My_Library_Name
|
||||
systemctl --user start onedrive-SharePoint_My_Library_Name
|
||||
```
|
||||
or
|
||||
```text
|
||||
systemctl --user enable onedrive-SharePoint_My_Library_Name@myusername.service
|
||||
systemctl --user start onedrive-SharePoint_My_Library_Name@myusername.service
|
||||
```
|
||||
|
||||
### Step 4: Viewing systemd status and logs for the custom service
|
||||
#### Viewing systemd service status - Red Hat Enterprise Linux, CentOS Linux
|
||||
```text
|
||||
systemctl status onedrive-SharePoint_My_Library_Name
|
||||
```
|
||||
|
||||
#### Viewing systemd service status - Others such as Arch, Ubuntu, Debian, OpenSuSE, Fedora
|
||||
```text
|
||||
systemctl --user status onedrive-SharePoint_My_Library_Name
|
||||
```
|
||||
|
||||
#### Viewing journalctl systemd logs - Red Hat Enterprise Linux, CentOS Linux
|
||||
```text
|
||||
journalctl --unit=onedrive-SharePoint_My_Library_Name -f
|
||||
```
|
||||
|
||||
#### Viewing journalctl systemd logs - Others such as Arch, Ubuntu, Debian, OpenSuSE, Fedora
|
||||
```text
|
||||
journalctl --user --unit=onedrive-SharePoint_My_Library_Name -f
|
||||
```
|
||||
|
||||
### Step 5: (Optional) Run custom systemd service at boot without user login
|
||||
In some cases it may be desirable for the systemd service to start without having to login as your 'user'
|
||||
|
||||
All the systemd steps above that utilise the `--user` option, will run the systemd service as your particular user. As such, the systemd service will not start unless you actually login to your system.
|
||||
|
||||
To avoid this issue, you need to reconfigure your 'user' account so that the systemd services you have created will startup without you having to login to your system:
|
||||
```text
|
||||
loginctl enable-linger <your_user_name>
|
||||
```
|
||||
|
||||
Example:
|
||||
```text
|
||||
alex@ubuntu-headless:~$ loginctl enable-linger alex
|
||||
```
|
||||
|
||||
## 8. Configuration for a SharePoint Library is complete
|
||||
The 'onedrive' client configuration for this particular SharePoint Library is now complete.
|
||||
|
||||
# How to configure multiple OneDrive SharePoint Shared Library sync
|
||||
Refer to [./advanced-usage.md](advanced-usage.md) for configuration assistance.
|
||||
Create a new configuration as per the process above. Repeat these steps for each SharePoint Library that you wish to use.
|
||||
|
|
602
docs/USAGE.md
602
docs/USAGE.md
|
@ -1,4 +1,7 @@
|
|||
# Configuration and Usage of the OneDrive Free Client
|
||||
## Application Version
|
||||
Before reading this document, please ensure you are running application version [![Version](https://img.shields.io/github/v/release/abraunegg/onedrive)](https://github.com/abraunegg/onedrive/releases) or greater. Use `onedrive --version` to determine what application version you are using and upgrade your client if required.
|
||||
|
||||
## Table of Contents
|
||||
- [Using the client](#using-the-client)
|
||||
* [Upgrading from 'skilion' client](#upgrading-from-skilion-client)
|
||||
|
@ -8,9 +11,12 @@
|
|||
* [Show your configuration](#show-your-configuration)
|
||||
* [Testing your configuration](#testing-your-configuration)
|
||||
* [Performing a sync](#performing-a-sync)
|
||||
* [Performing a selective directory sync](#performing-a-selective-directory-sync)
|
||||
* [Performing a single directory sync](#performing-a-single-directory-sync)
|
||||
* [Performing a 'one-way' download sync](#performing-a-one-way-download-sync)
|
||||
* [Performing a 'one-way' upload sync](#performing-a-one-way-upload-sync)
|
||||
* [Performing a selective sync via 'sync_list' file](#performing-a-selective-sync-via-sync_list-file)
|
||||
* [Performing a --resync](#performing-a---resync)
|
||||
* [Performing a --force-sync without a --resync or changing your configuration](#performing-a---force-sync-without-a---resync-or-changing-your-configuration)
|
||||
* [Increasing logging level](#increasing-logging-level)
|
||||
* [Client Activity Log](#client-activity-log)
|
||||
* [Notifications](#notifications)
|
||||
|
@ -24,19 +30,30 @@
|
|||
+ [skip_file](#skip_file)
|
||||
+ [skip_dotfiles](#skip_dotfiles)
|
||||
+ [monitor_interval](#monitor_interval)
|
||||
+ [monitor_fullscan_frequency](#monitor_fullscan_frequency)
|
||||
+ [monitor_fullscan_frequency](#monitor_fullscan_frequency)
|
||||
+ [monitor_log_frequency](#monitor_log_frequency)
|
||||
+ [min_notify_changes](#min_notify_changes)
|
||||
+ [operation_timeout](#operation_timeout)
|
||||
* [Performing a --resync](#performing-a---resync)
|
||||
* [Handling Symbolic Links](#handling-symbolic-links)
|
||||
* [Selective sync via 'sync_list' file](#selective-sync-via-sync_list-file)
|
||||
+ [ip_protocol_version](#ip_protocol_version)
|
||||
+ [classify_as_big_delete](#classify_as_big_delete)
|
||||
* [Configuring the client for 'single tenant application' use](#configuring-the-client-for-single-tenant-application-use)
|
||||
* [Configuring the client to use older 'skilion' application identifier](#configuring-the-client-to-use-older-skilion-application-identifier)
|
||||
- [Frequently Asked Configuration Questions](#frequently-asked-configuration-questions)
|
||||
* [How to sync only specific or single directory?](#how-to-sync-only-specific-or-single-directory)
|
||||
* [How to 'skip' directories from syncing?](#how-to-skip-directories-from-syncing)
|
||||
* [How to 'rate limit' the application to control bandwidth consumed for upload & download operations](#how-to-rate-limit-the-application-to-control-bandwidth-consumed-for-upload--download-operations)
|
||||
* [Shared folders (OneDrive Personal)](#shared-folders-onedrive-personal)
|
||||
* [Shared folders (OneDrive Business or Office 365)](#shared-folders-onedrive-business-or-office-365)
|
||||
* [SharePoint / Office 365 Shared Libraries](#sharepoint--office-365-shared-libraries)
|
||||
* [How to 'skip' files from syncing?](#how-to-skip-files-from-syncing)
|
||||
* [How to 'skip' dot files and folders from syncing?](#how-to-skip-dot-files-and-folders-from-syncing)
|
||||
* [How to 'skip' files larger than a certain size from syncing?](#how-to-skip-files-larger-than-a-certain-size-from-syncing)
|
||||
* [How to 'rate limit' the application to control bandwidth consumed for upload & download operations?](#how-to-rate-limit-the-application-to-control-bandwidth-consumed-for-upload--download-operations)
|
||||
* [How to prevent your local disk from filling up?](#how-to-prevent-your-local-disk-from-filling-up)
|
||||
* [How are symbolic links handled by the client?](#how-are-symbolic-links-handled-by-the-client)
|
||||
* [How to sync shared folders (OneDrive Personal)?](#how-to-sync-shared-folders-onedrive-personal)
|
||||
* [How to sync shared folders (OneDrive Business or Office 365)?](#how-to-sync-shared-folders-onedrive-business-or-office-365)
|
||||
* [How to sync sharePoint / Office 365 Shared Libraries?](#how-to-sync-sharepoint--office-365-shared-libraries)
|
||||
* [How to run a user systemd service at boot without user login?](#how-to-run-a-user-systemd-service-at-boot-without-user-login)
|
||||
* [How to create a shareable link?](#how-to-create-a-shareable-link)
|
||||
* [How to sync both Personal and Business accounts at the same time?](#how-to-sync-both-personal-and-business-accounts-at-the-same-time)
|
||||
* [How to sync multiple SharePoint Libraries at the same time?](#how-to-sync-multiple-sharepoint-libraries-at-the-same-time)
|
||||
- [Running 'onedrive' in 'monitor' mode](#running-onedrive-in-monitor-mode)
|
||||
* [Use webhook to subscribe to remote updates in 'monitor' mode](#use-webhook-to-subscribe-to-remote-updates-in-monitor-mode)
|
||||
* [More webhook configuration options](#more-webhook-configuration-options)
|
||||
|
@ -55,9 +72,7 @@
|
|||
* [Setup selinux for a sync folder outside of the home folder](#setup-selinux-for-a-sync-folder-outside-of-the-home-folder)
|
||||
- [All available commands](#all-available-commands)
|
||||
|
||||
|
||||
## Using the client
|
||||
|
||||
### Upgrading from 'skilion' client
|
||||
The 'skilion' version contains a significant number of defects in how the local sync state is managed. When upgrading from the 'skilion' version to this version, it is advisable to stop any service / onedrive process from running and then remove any `items.sqlite3` file from your configuration directory (`~/.config/onedrive/`) as this will force the creation of a new local cache file.
|
||||
|
||||
|
@ -83,7 +98,13 @@ The files and directories in the synchronization directory must follow the [Wind
|
|||
The application will attempt to handle instances where you have two files with the same names but with different capitalization. Where there is a namespace clash, the file name which clashes will not be synced. This is expected behavior and won't be fixed.
|
||||
|
||||
### curl compatibility
|
||||
If your system utilises curl >= 7.62.0 curl defaults to prefer HTTP/2 over HTTP/1.1 by default. If you wish to use HTTP/2 for some operations you will need to use the `--force-http-2` config option to enable otherwise all operations will use HTTP/1.1.
|
||||
If your system utilises curl < 7.47.0, curl defaults to HTTP/1.1 for HTTPS operations. The client will use HTTP/1.1.
|
||||
|
||||
If your system utilises curl >= 7.47.0 and < 7.62.0, curl will prefer HTTP/2 for HTTPS but will stick to HTTP/1.1 by default. The client will use HTTP/1.1 for HTTPS operations.
|
||||
|
||||
If your system utilises curl >= 7.62.0, curl defaults to prefer HTTP/2 over HTTP/1.1 by default. The client will utilse HTTP/2 for most HTTPS operations and HTTP/1.1 for others. This difference is governed by the OneDrive platform and not this client.
|
||||
|
||||
If you wish to explicitly use HTTP/1.1 you will need to use the `--force-http-11` flag or set the config option `force_http_11 = "true"` to force the application to use HTTP/1.1 otherwise all client operations will use whatever is the curl default for your distribution.
|
||||
|
||||
### Authorize the application with your OneDrive Account
|
||||
After installing the application you must authorize the application with your OneDrive Account. This is done by running the application without any additional command switches.
|
||||
|
@ -123,21 +144,17 @@ onedrive --display-config
|
|||
```
|
||||
This will display all the pertinent runtime interpretation of the options and configuration you are using. Example output is as follows:
|
||||
```text
|
||||
onedrive version = vX.Y.Z-A-bcdefghi
|
||||
Config path = /home/alex/.config/onedrive
|
||||
Config file found in config path = false
|
||||
Config option 'check_nosync' = false
|
||||
Config option 'sync_dir' = /home/alex/OneDrive
|
||||
Config option 'skip_dir' =
|
||||
Config option 'skip_file' = ~*|.~*|*.tmp
|
||||
Config option 'skip_dotfiles' = false
|
||||
Config option 'skip_symlinks' = false
|
||||
Config option 'monitor_interval' = 300
|
||||
Config option 'min_notify_changes' = 5
|
||||
Config option 'log_dir' = /var/log/onedrive/
|
||||
Config option 'classify_as_big_delete' = 1000
|
||||
Config option 'sync_root_files' = false
|
||||
Selective sync configured = false
|
||||
Configuration file successfully loaded
|
||||
onedrive version = vX.Y.Z-A-bcdefghi
|
||||
Config path = /home/alex/.config/onedrive
|
||||
Config file found in config path = true
|
||||
Config option 'sync_dir' = /home/alex/OneDrive
|
||||
Config option 'enable_logging' = false
|
||||
...
|
||||
Selective sync 'sync_list' configured = false
|
||||
Config option 'sync_business_shared_folders' = false
|
||||
Business Shared Folders configured = false
|
||||
Config option 'webhook_enabled' = false
|
||||
```
|
||||
|
||||
### Testing your configuration
|
||||
|
@ -185,7 +202,7 @@ If you prefer to use your local files as stored in `~/OneDrive` as the 'source o
|
|||
onedrive --synchronize --local-first
|
||||
```
|
||||
|
||||
### Performing a selective directory sync
|
||||
### Performing a single directory sync
|
||||
In some cases it may be desirable to sync a single directory under ~/OneDrive without having to change your client configuration. To do this use the following command:
|
||||
```text
|
||||
onedrive --synchronize --single-directory '<dir_name>'
|
||||
|
@ -204,10 +221,136 @@ In some cases it may be desirable to 'upload only' to OneDrive. To do this use t
|
|||
```text
|
||||
onedrive --synchronize --upload-only
|
||||
```
|
||||
**Note:** If a file or folder is present on OneDrive, that does not exist locally, it will be removed. If the data on OneDrive should be kept, the following should be used:
|
||||
**Note:** If a file or folder is present on OneDrive, that was previously synced and now does not exist locally, that item it will be removed from OneDrive. If the data on OneDrive should be kept, the following should be used:
|
||||
```text
|
||||
onedrive --synchronize --upload-only --no-remote-delete
|
||||
```
|
||||
**Note:** The operation of 'upload only' does not request data from OneDrive about what 'other' data exists online. The client only knows about the data that 'this' client uploaded, thus any files or folders created or uploaded outside of this client will remain untouched online.
|
||||
|
||||
### Performing a selective sync via 'sync_list' file
|
||||
Selective sync allows you to sync only specific files and directories.
|
||||
To enable selective sync create a file named `sync_list` in your application configuration directory (default is `~/.config/onedrive`).
|
||||
|
||||
Important points to understand before using 'sync_list'.
|
||||
* 'sync_list' excludes _everything_ by default on onedrive.
|
||||
* 'sync_list' follows an _"exclude overrides include"_ rule, and requires **explicit inclusion**.
|
||||
* Order exclusions before inclusions, so that anything _specifically included_ is included.
|
||||
* How and where you place your `/` matters for excludes and includes in sub directories.
|
||||
|
||||
Each line of the file represents a relative path from your `sync_dir`. All files and directories not matching any line of the file will be skipped during all operations.
|
||||
|
||||
Additionally, the use of `/` is critically important to determine how a rule is interpreted. It is very similar to `**` wildcards, for those that are familiar with globbing patterns.
|
||||
Here is an example of `sync_list`:
|
||||
```text
|
||||
# sync_list supports comments
|
||||
#
|
||||
# The ordering of entries is highly recommended - exclusions before inclusions
|
||||
#
|
||||
# Exclude temp folder(s) or file(s) under Documents folder(s), anywhere in Onedrive
|
||||
!Documents/temp*
|
||||
#
|
||||
# Exclude secret data folder in root directory only
|
||||
!/Secret_data/*
|
||||
#
|
||||
# Include everything else in root directory
|
||||
/*
|
||||
#
|
||||
# Include my Backup folder(s) or file(s) anywhere on Onedrive
|
||||
Backup
|
||||
#
|
||||
# Include my Backup folder in root
|
||||
/Backup/
|
||||
#
|
||||
# Include Documents folder(s) anywhere in Onedrive
|
||||
Documents/
|
||||
#
|
||||
# Include all PDF files in Documents folder(s), anywhere in Onedrive
|
||||
Documents/*.pdf
|
||||
#
|
||||
# Include this single document in Documents folder(s), anywhere in Onedrive
|
||||
Documents/latest_report.docx
|
||||
#
|
||||
# Include all Work/Project directories or files, inside 'Work' folder(s), anywhere in Onedrive
|
||||
Work/Project*
|
||||
#
|
||||
# Include all "notes.txt" files, anywhere in Onedrive
|
||||
notes.txt
|
||||
#
|
||||
# Include /Blender in the ~Onedrive root but not if elsewhere in Onedrive
|
||||
/Blender
|
||||
#
|
||||
# Include these directories(or files) in 'Pictures' folder(s), that have a space in their name
|
||||
Pictures/Camera Roll
|
||||
Pictures/Saved Pictures
|
||||
#
|
||||
# Include these names if they match any file or folder
|
||||
Cinema Soc
|
||||
Codes
|
||||
Textbooks
|
||||
Year 2
|
||||
```
|
||||
The following are supported for pattern matching and exclusion rules:
|
||||
* Use the `*` to wildcard select any characters to match for the item to be included
|
||||
* Use either `!` or `-` characters at the start of the line to exclude an otherwise included item
|
||||
|
||||
|
||||
**Note:** When enabling the use of 'sync_list' utilise the `--display-config` option to validate that your configuration will be used by the application, and test your configuration by adding `--dry-run` to ensure the client will operate as per your requirement.
|
||||
|
||||
**Note:** After changing the sync_list, you must perform a full re-synchronization by adding `--resync` to your existing command line - for example: `onedrive --synchronize --resync`
|
||||
|
||||
**Note:** In some circumstances, it may be required to sync all the individual files within the 'sync_dir', but due to frequent name change / addition / deletion of these files, it is not desirable to constantly change the 'sync_list' file to include / exclude these files and force a resync. To assist with this, enable the following in your configuration file:
|
||||
```text
|
||||
sync_root_files = "true"
|
||||
```
|
||||
This will tell the application to sync any file that it finds in your 'sync_dir' root by default.
|
||||
|
||||
### Performing a --resync
|
||||
If you modify any of the following configuration items, you will be required to perform a `--resync` to ensure your client is syncing your data with the updated configuration:
|
||||
* sync_dir
|
||||
* skip_dir
|
||||
* skip_file
|
||||
* drive_id
|
||||
* Modifying sync_list
|
||||
* Modifying business_shared_folders
|
||||
|
||||
Additionally, you may choose to perform a `--resync` if you feel that this action needs to be taken to ensure your data is in sync. If you are using this switch simply because you dont know the sync status, you can query the actual sync status using `--display-sync-status`.
|
||||
|
||||
When using `--resync`, the following warning and advice will be presented:
|
||||
```text
|
||||
The use of --resync will remove your local 'onedrive' client state, thus no record will exist regarding your current 'sync status'
|
||||
This has the potential to overwrite local versions of files with potentially older versions downloaded from OneDrive which can lead to data loss
|
||||
If in-doubt, backup your local data first before proceeding with --resync
|
||||
|
||||
Are you sure you wish to proceed with --resync? [Y/N]
|
||||
```
|
||||
|
||||
To proceed with using `--resync`, you must type 'y' or 'Y' to allow the application to continue.
|
||||
|
||||
**Note:** It is highly recommended to only use `--resync` if the application advises you to use it. Do not just blindly set the application to start with `--resync` as the default option.
|
||||
|
||||
**Note:** In some automated environments (and it is 100% assumed you *know* what you are doing because of automation), in order to avoid this 'proceed with acknowledgement' requirement, add `--resync-auth` to automatically acknowledge the prompt.
|
||||
|
||||
### Performing a --force-sync without a --resync or changing your configuration
|
||||
In some cases and situations, you may have configured the application to skip certain files and folders using 'skip_file' and 'skip_dir' configuration. You then may have a requirement to actually sync one of these items, but do not wish to modify your configuration, nor perform an entire `--resync` twice.
|
||||
|
||||
The `--force-sync` option allows you to sync a specific directory, ignoring your 'skip_file' and 'skip_dir' configuration and negating the requirement to perform a `--resync`
|
||||
|
||||
In order to use this option, you must run the application manually in the following manner:
|
||||
```text
|
||||
onedrive --synchronize --single-directory '<directory_to_sync>' --force-sync <add any other options needed or required>
|
||||
```
|
||||
|
||||
When using `--force-sync`, the following warning and advice will be presented:
|
||||
```text
|
||||
WARNING: Overriding application configuration to use application defaults for skip_dir and skip_file due to --synchronize --single-directory --force-sync being used
|
||||
|
||||
The use of --force-sync will reconfigure the application to use defaults. This may have untold and unknown future impacts.
|
||||
By proceeding in using this option you accept any impacts including any data loss that may occur as a result of using --force-sync.
|
||||
|
||||
Are you sure you wish to proceed with --force-sync [Y/N]
|
||||
```
|
||||
|
||||
To proceed with using `--force-sync`, you must type 'y' or 'Y' to allow the application to continue.
|
||||
|
||||
### Increasing logging level
|
||||
When running a sync it may be desirable to see additional information as to the progress and operation of the client. To do this, use the following command:
|
||||
|
@ -228,7 +371,7 @@ The requested client activity log will instead be located in the users home dire
|
|||
On many systems this can be achieved by
|
||||
```text
|
||||
sudo mkdir /var/log/onedrive
|
||||
sudo chown root.users /var/log/onedrive
|
||||
sudo chown root:users /var/log/onedrive
|
||||
sudo chmod 0775 /var/log/onedrive
|
||||
```
|
||||
|
||||
|
@ -313,7 +456,7 @@ Configuration is determined by three layers: the default values, values set in t
|
|||
|
||||
Most command line options have a respective configuration file setting.
|
||||
|
||||
If you want to change the defaults, you can copy and edit the included config file into your configuration directory. Valid directories for the config file are:
|
||||
If you want to change the defaults, you can copy and edit the included config file into your configuration directory. Valid default directories for the config file are:
|
||||
* `~/.config/onedrive`
|
||||
* `/etc/onedrive`
|
||||
|
||||
|
@ -327,6 +470,8 @@ This file does not get created by default, and should only be created if you wan
|
|||
|
||||
See the [config](https://raw.githubusercontent.com/abraunegg/onedrive/master/config) file for the full list of options, and [All available commands](https://github.com/abraunegg/onedrive/blob/master/docs/USAGE.md#all-available-commands) for all possible keys and their default values.
|
||||
|
||||
**Note:** The location of the application configuration information can also be specified by using the `--confdir` configuration option which can be passed in at client run-time.
|
||||
|
||||
### The default configuration file is listed below:
|
||||
```text
|
||||
# Configuration for OneDrive Linux Client
|
||||
|
@ -349,15 +494,16 @@ See the [config](https://raw.githubusercontent.com/abraunegg/onedrive/master/con
|
|||
# disable_notifications = "false"
|
||||
# disable_upload_validation = "false"
|
||||
# enable_logging = "false"
|
||||
# force_http_2 = "false"
|
||||
# force_http_11 = "false"
|
||||
# local_first = "false"
|
||||
# no_remote_delete = "false"
|
||||
# skip_symlinks = "false"
|
||||
# debug_https = "false"
|
||||
# skip_dotfiles = "false"
|
||||
# skip_size = "1000"
|
||||
# dry_run = "false"
|
||||
# min_notify_changes = "5"
|
||||
# monitor_log_frequency = "5"
|
||||
# monitor_log_frequency = "6"
|
||||
# monitor_fullscan_frequency = "12"
|
||||
# sync_root_files = "false"
|
||||
# classify_as_big_delete = "1000"
|
||||
|
@ -374,13 +520,21 @@ See the [config](https://raw.githubusercontent.com/abraunegg/onedrive/master/con
|
|||
# sync_dir_permissions = "700"
|
||||
# sync_file_permissions = "600"
|
||||
# rate_limit = "131072"
|
||||
# operation_timeout = "3600"
|
||||
# webhook_enabled = "false"
|
||||
# webhook_public_url = ""
|
||||
# webhook_listening_host = ""
|
||||
# webhook_listening_port = "8888"
|
||||
# webhook_expiration_interval = "86400"
|
||||
# webhook_renewal_interval = "43200"
|
||||
# space_reservation = "50"
|
||||
# display_running_config = "false"
|
||||
# read_only_auth_scope = "false"
|
||||
# cleanup_local_files = "false"
|
||||
# operation_timeout = "3600"
|
||||
# dns_timeout = "60"
|
||||
# connect_timeout = "10"
|
||||
# data_timeout = "600"
|
||||
# ip_protocol_version = "0"
|
||||
```
|
||||
|
||||
### 'config' file configuration examples:
|
||||
|
@ -414,7 +568,7 @@ The following are directory and file default permissions for any new directory o
|
|||
* Directories: 700 - This provides the following permissions: `drwx------`
|
||||
* Files: 600 - This provides the following permissions: `-rw-------`
|
||||
|
||||
To change the default permissions, update the following 2 configuration options with the required permissions. Utilise [Unix Permissions Calculator](http://permissions-calculator.org/) to assist in determining the required permissions.
|
||||
To change the default permissions, update the following 2 configuration options with the required permissions. Utilise the [Unix Permissions Calculator](https://chmod-calculator.com/) to assist in determining the required permissions.
|
||||
|
||||
```text
|
||||
# When changing a config option below, remove the '#' from the start of the line
|
||||
|
@ -468,7 +622,7 @@ Patterns are case insensitive. `*` and `?` [wildcards characters](https://techne
|
|||
|
||||
Files can be skipped in the following fashion:
|
||||
* Specify a wildcard, eg: '*.txt' (skip all txt files)
|
||||
* Explicitly specify the filename and it's full path relative to your sync_dir, eg: 'path/to/file/filename.ext'
|
||||
* Explicitly specify the filename and it's full path relative to your sync_dir, eg: '/path/to/file/filename.ext'
|
||||
* Explicitly specify the filename only and skip every instance of this filename, eg: 'filename.ext'
|
||||
|
||||
By default, the following files will be skipped:
|
||||
|
@ -484,7 +638,7 @@ Example:
|
|||
# For explanations of all config options below see docs/USAGE.md or the man page.
|
||||
#
|
||||
# sync_dir = "~/OneDrive"
|
||||
skip_file = "~*|Documents/OneNote*|Documents/config.xlaunch|myfile.ext"
|
||||
skip_file = "~*|/Documents/OneNote*|/Documents/config.xlaunch|myfile.ext|/Documents/keepass.kdbx"
|
||||
# monitor_interval = "300"
|
||||
# skip_dir = ""
|
||||
# log_dir = "/var/log/onedrive/"
|
||||
|
@ -495,10 +649,11 @@ skip_file = "~*|Documents/OneNote*|Documents/config.xlaunch|myfile.ext"
|
|||
skip_file = "~*|.~*|*.tmp|*.swp"
|
||||
skip_file = "*.blah"
|
||||
skip_file = "never_sync.file"
|
||||
skip_file = "/Documents/keepass.kdbx"
|
||||
```
|
||||
This will be interpreted the same as:
|
||||
```text
|
||||
skip_file = "~*|.~*|*.tmp|*.swp|*.blah|never_sync.file"
|
||||
skip_file = "~*|.~*|*.tmp|*.swp|*.blah|never_sync.file|/Documents/keepass.kdbx"
|
||||
```
|
||||
|
||||
**Note:** after changing `skip_file`, you must perform a full re-synchronization by adding `--resync` to your existing command line - for example: `onedrive --synchronize --resync`
|
||||
|
@ -526,8 +681,9 @@ Example:
|
|||
# dry_run = "false"
|
||||
monitor_interval = "600"
|
||||
# min_notify_changes = "5"
|
||||
# monitor_log_frequency = "5"
|
||||
# monitor_log_frequency = "6"
|
||||
```
|
||||
**Note:** It is strongly advised you do not use a value of less than 300 seconds for 'monitor_interval'. Using a value less than 300 means your application will be constantly needlessly checking OneDrive online for changes. Future versions of the application may enforce the checking of this minimum value.
|
||||
|
||||
#### monitor_fullscan_frequency
|
||||
This configuration option controls the number of 'monitor_interval' iterations between when a full scan of your data is performed to ensure data integrity and consistency.
|
||||
|
@ -539,13 +695,37 @@ Setting this value to 24 means that the full scan of OneDrive and checking the i
|
|||
Example:
|
||||
```text
|
||||
# min_notify_changes = "5"
|
||||
# monitor_log_frequency = "5"
|
||||
# monitor_log_frequency = "6"
|
||||
monitor_fullscan_frequency = "24"
|
||||
# sync_root_files = "false"
|
||||
# classify_as_big_delete = "1000"
|
||||
```
|
||||
|
||||
**Note:** When running in --monitor mode, at application start-up, a full scan will be performed to ensure data integrity. This option has zero effect when running the application in --synchronize mode and a full scan will always be performed.
|
||||
**Note:** When running in --monitor mode, at application start-up, a full scan will be performed to ensure data integrity. This option has zero effect when running the application in `--synchronize` mode and a full scan will always be performed.
|
||||
|
||||
#### monitor_log_frequency
|
||||
This configuration option controls the output of when logging is performed to detail that a sync is occuring with OneDrive when using `--monitor` mode. The frequency of syncing with OneDrive is controled via 'monitor_interval'.
|
||||
|
||||
By default without configuration, 'monitor_log_frequency' is set to 6.
|
||||
|
||||
By default, at application start-up when using `--monitor` mode, the following will be logged to indicate that the application has correctly started and performed all the initial processing steps:
|
||||
```
|
||||
Configuring Global Azure AD Endpoints
|
||||
Initializing the Synchronization Engine ...
|
||||
Initializing monitor ...
|
||||
OneDrive monitor interval (seconds): 300
|
||||
Starting a sync with OneDrive
|
||||
Syncing changes from OneDrive ...
|
||||
Performing a database consistency and integrity check on locally stored data ...
|
||||
Sync with OneDrive is complete
|
||||
```
|
||||
Then, based on 'monitor_log_frequency', the following will be logged when the value is reached:
|
||||
```
|
||||
Starting a sync with OneDrive
|
||||
Syncing changes from OneDrive ...
|
||||
Sync with OneDrive is complete
|
||||
```
|
||||
**Note:** The additional log output `Performing a database consistency and integrity check on locally stored data ...` will only be displayed when this activity is occuring which is triggered by 'monitor_fullscan_frequency'.
|
||||
|
||||
#### min_notify_changes
|
||||
This option defines the minimum number of pending incoming changes necessary to trigger a desktop notification. This allows controlling the frequency of notifications.
|
||||
|
@ -555,7 +735,7 @@ Example:
|
|||
# dry_run = "false"
|
||||
# monitor_interval = "300"
|
||||
min_notify_changes = "50"
|
||||
# monitor_log_frequency = "5"
|
||||
# monitor_log_frequency = "6"
|
||||
# monitor_fullscan_frequency = "12"
|
||||
```
|
||||
|
||||
|
@ -569,114 +749,44 @@ Example:
|
|||
operation_timeout = "3600"
|
||||
```
|
||||
|
||||
### Performing a --resync
|
||||
If you modify any of the following configuration items, you will be required to perform a `--resync` to ensure your client is syncing your data with the updated configuration:
|
||||
* sync_dir
|
||||
* skip_dir
|
||||
* skip_file
|
||||
* drive_id
|
||||
* Modifying sync_list
|
||||
* Modifying business_shared_folders
|
||||
#### ip_protocol_version
|
||||
By default, the application will use IPv4 and IPv6 to resolve and communicate with Microsoft OneDrive. In some Linux distributions (most notably Ubuntu and those distributions based on Ubuntu) this will cause problems due to how DNS resolution is being performed.
|
||||
|
||||
Additionally, you may choose to perform a `--resync` if you feel that this action needs to be taken to ensure your data is in sync. If you are using this switch simply because you dont know the sync status, you can query the actual sync status using `--display-sync-status`.
|
||||
|
||||
When using `--resync`, the following warning and advice will be presented:
|
||||
To configure the application to use a specific IP version, configure the following in your config file:
|
||||
```text
|
||||
The use of --resync will remove your local 'onedrive' client state, thus no record will exist regarding your current 'sync status'
|
||||
This has the potential to overwrite local versions of files with potentially older versions downloaded from OneDrive which can lead to data loss
|
||||
If in-doubt, backup your local data first before proceeding with --resync
|
||||
# operation_timeout = "3600"
|
||||
# dns_timeout = "60"
|
||||
# connect_timeout = "10"
|
||||
# data_timeout = "600"
|
||||
ip_protocol_version = "1"
|
||||
|
||||
Are you sure you wish to proceed with --resync? [Y/N]
|
||||
```
|
||||
**Note:**
|
||||
* A value of 0 will mean the client will use IPv4 and IPv6. This is the default.
|
||||
* A value of 1 will mean the client will use IPv4 only.
|
||||
* A value of 2 will mean the client will use IPv6 only.
|
||||
|
||||
#### classify_as_big_delete
|
||||
This configuration option will help prevent the online deletion of files and folders online, when the directory that has been deleted contains more items than the specified value.
|
||||
|
||||
By default, this value is 1000 which will count files and folders as children of the directory that has been deleted.
|
||||
|
||||
To change this value, configure the following in your config file:
|
||||
```text
|
||||
# monitor_fullscan_frequency = "12"
|
||||
# sync_root_files = "false"
|
||||
classify_as_big_delete = "3000"
|
||||
# user_agent = ""
|
||||
# remove_source_files = "false"
|
||||
```
|
||||
|
||||
To proceed with using `--resync`, you must type 'y' or 'Y' to allow the application to continue.
|
||||
**Note:**
|
||||
* This option only looks at Directories. It has zero effect on deleting files located in your 'sync_dir' root
|
||||
* This option (in v2.4.x and below) only gets activated when using `--monitor`. In `--synchronize` mode it is ignored as it is assumed you performed that desired operation before you started your next manual sync with OneDrive.
|
||||
* Be sensible with setting this value - do not use a low value such as '1' as this will prevent you from syncing your data each and every time you delete a single file.
|
||||
|
||||
**Note:** It is highly recommended to only use `--resync` if the application advises you to use it. Do not just blindly set the application to start with `--resync` as the default option.
|
||||
|
||||
**Note:** In some automated environments (and it is 100% assumed you *know* what you are doing because of automation), in order to avoid this 'proceed with acknowledgement' requirement, add `--resync-auth` to automatically acknowledge the prompt.
|
||||
|
||||
### Handling Symbolic Links
|
||||
Microsoft OneDrive has zero concept or understanding of symbolic links, and attempting to upload a symbolic link to Microsoft OneDrive generates a platform API error. All data (files and folders) that are uploaded to OneDrive must be whole files or actual directories.
|
||||
|
||||
As such, there are only two methods to support symbolic links with this client:
|
||||
1. Follow the Linux symbolic link and upload what ever the link is pointing at to OneDrive. This is the default behaviour.
|
||||
2. Skip symbolic links by configuring the application to do so. In skipping, no data, no link, no reference is uploaded to OneDrive.
|
||||
|
||||
To skip symbolic links, edit your configuration as per below:
|
||||
|
||||
```text
|
||||
# local_first = "false"
|
||||
# no_remote_delete = "false"
|
||||
skip_symlinks = "true"
|
||||
# debug_https = "false"
|
||||
# skip_dotfiles = "false"
|
||||
```
|
||||
Setting this to `"true"` will configure the client to skip all symbolic links while syncing.
|
||||
|
||||
The default setting is `"false"` which will sync the whole folder structure referenced by the symbolic link, duplicating the contents on OneDrive in the place where the symbolic link is.
|
||||
|
||||
### Selective sync via 'sync_list' file
|
||||
Selective sync allows you to sync only specific files and directories.
|
||||
To enable selective sync create a file named `sync_list` in `~/.config/onedrive`.
|
||||
Each line of the file represents a relative path from your `sync_dir`. All files and directories not matching any line of the file will be skipped during all operations.
|
||||
Here is an example of `sync_list`:
|
||||
```text
|
||||
# sync_list supports comments
|
||||
#
|
||||
# The ordering of entries is highly recommended - exclusions before inclusions
|
||||
#
|
||||
# Exclude temp folders under Documents
|
||||
!Documents/temp*
|
||||
# Exclude my secret data
|
||||
!/Secret_data/*
|
||||
#
|
||||
# Include my Backup folder
|
||||
Backup
|
||||
# Include Documents folder
|
||||
Documents/
|
||||
# Include all PDF documents
|
||||
Documents/*.pdf
|
||||
# Include this single document
|
||||
Documents/latest_report.docx
|
||||
# Include all Work/Project directories
|
||||
Work/Project*
|
||||
notes.txt
|
||||
# Include /Blender in the ~OneDrive root but not if elsewhere
|
||||
/Blender
|
||||
# Include these names if they match any file or folder
|
||||
Cinema Soc
|
||||
Codes
|
||||
Textbooks
|
||||
Year 2
|
||||
```
|
||||
The following are supported for pattern matching and exclusion rules:
|
||||
* Use the `*` to wildcard select any characters to match for the item to be included
|
||||
* Use either `!` or `-` characters at the start of the line to exclude an otherwise included item
|
||||
|
||||
To simplify 'exclusions' and 'inclusions', the following is also possible:
|
||||
```text
|
||||
# sync_list supports comments
|
||||
#
|
||||
# The ordering of entries is highly recommended - exclusions before inclusions
|
||||
#
|
||||
# Exclude temp folders under Documents
|
||||
!Documents/temp*
|
||||
# Exclude my secret data
|
||||
!/Secret_data/*
|
||||
#
|
||||
# Include everything else
|
||||
/*
|
||||
```
|
||||
|
||||
**Note:** After changing the sync_list, you must perform a full re-synchronization by adding `--resync` to your existing command line - for example: `onedrive --synchronize --resync`
|
||||
|
||||
**Note:** In some circumstances, it may be required to sync all the individual files within the 'sync_dir', but due to frequent name change / addition / deletion of these files, it is not desirable to constantly change the 'sync_list' file to include / exclude these files and force a resync. To assist with this, enable the following in your configuration file:
|
||||
```text
|
||||
sync_root_files = "true"
|
||||
```
|
||||
This will tell the application to sync any file that it finds in your 'sync_dir' root by default.
|
||||
|
||||
### Configuring the client for 'single tenant application' use
|
||||
#### Configuring the client for 'single tenant application' use
|
||||
In some instances when using OneDrive Business Accounts, depending on the Azure organisational configuration, it will be necessary to configure the client as a 'single tenant application'.
|
||||
To configure this, after creating the application on your Azure tenant, update the 'config' file with the tenant name (not the GUID) and the newly created Application ID, then this will be used for the authentication process.
|
||||
```text
|
||||
|
@ -689,7 +799,7 @@ azure_tenant_id = "your.azure.tenant.name"
|
|||
# sync_business_shared_folders = "false"
|
||||
```
|
||||
|
||||
### Configuring the client to use older 'skilion' application identifier
|
||||
#### Configuring the client to use older 'skilion' application identifier
|
||||
In some instances it may be desirable to utilise the older 'skilion' application identifier to avoid authorising a new application ID within Microsoft Azure environments.
|
||||
To configure this, update the 'config' file with the old Application ID, then this will be used for the authentication process.
|
||||
```text
|
||||
|
@ -702,11 +812,17 @@ application_id = "22c49a0d-d21c-4792-aed1-8f163c982546"
|
|||
|
||||
**Note:** After changing the 'application_id' you will need to restart any 'onedrive' process you have running, and potentially issue a `--reauth` to re-authenticate the client with this updated application ID.
|
||||
|
||||
## Frequently Asked Configuration Questions
|
||||
|
||||
### How to sync only specific or single directory?
|
||||
There are two methods to achieve this:
|
||||
* Utilise '--single-directory' option to only sync this specific path
|
||||
* Utilise 'sync_list' to configure what files and directories to sync, and what should be exluded
|
||||
|
||||
### How to 'skip' directories from syncing?
|
||||
There are several mechanisms available to 'skip' a directory from the sync process:
|
||||
* Utilise 'skip_dir'
|
||||
* Utilise 'sync_list'
|
||||
* Utilise 'skip_dir' to configure what directories to skip. Refer to above for configuration advice.
|
||||
* Utilise 'sync_list' to configure what files and directories to sync, and what should be exluded
|
||||
|
||||
One further method is to add a '.nosync' empty file to any folder. When this file is present, adding `--check-for-nosync` to your command line will now make the sync process skip any folder where the '.nosync' file is present.
|
||||
|
||||
|
@ -720,8 +836,35 @@ check_nosync = "true"
|
|||
# download_only = "false"
|
||||
# disable_notifications = "false"
|
||||
```
|
||||
**Default:** False
|
||||
|
||||
### How to 'rate limit' the application to control bandwidth consumed for upload & download operations
|
||||
### How to 'skip' files from syncing?
|
||||
There are two methods to achieve this:
|
||||
* Utilise 'skip_file' to configure what files to skip. Refer to above for configuration advice.
|
||||
* Utilise 'sync_list' to configure what files and directories to sync, and what should be exluded
|
||||
|
||||
### How to 'skip' dot files and folders from syncing?
|
||||
There are three methods to achieve this:
|
||||
* Utilise 'skip_file' or 'skip_dir' to configure what files or folders to skip. Refer to above for configuration advice.
|
||||
* Utilise 'sync_list' to configure what files and directories to sync, and what should be exluded
|
||||
* Utilise 'skip_dotfiles' to skip any dot file (for example: `.Trash-1000` or `.xdg-volume-info`) from syncing to OneDrive.
|
||||
|
||||
Example:
|
||||
```text
|
||||
# skip_symlinks = "false"
|
||||
# debug_https = "false"
|
||||
skip_dotfiles = "true"
|
||||
# skip_size = "1000"
|
||||
# dry_run = "false"
|
||||
```
|
||||
**Default:** False
|
||||
|
||||
### How to 'skip' files larger than a certain size from syncing?
|
||||
There are two methods to achieve this:
|
||||
* Use `--skip-size ARG` as part of a CLI command to skip new files larger than this size (in MB)
|
||||
* Use `skip_size = "value"` as part of your 'config' file where files larger than this size (in MB) will be skipped
|
||||
|
||||
### How to 'rate limit' the application to control bandwidth consumed for upload & download operations?
|
||||
To minimise the Internet bandwidth for upload and download operations, you can configure the 'rate_limit' option within the config file.
|
||||
|
||||
Example valid values for this are as follows:
|
||||
|
@ -742,15 +885,93 @@ rate_limit = "131072"
|
|||
|
||||
**Note:** A number greater than '131072' is a valid value, with '104857600' being tested as an upper limit.
|
||||
|
||||
### Shared folders (OneDrive Personal)
|
||||
### How to prevent your local disk from filling up?
|
||||
By default, the application will reserve 50MB of disk space to prevent your filesystem to run out of disk space. This value can be modified by adding the following to your config file:
|
||||
|
||||
Example:
|
||||
```text
|
||||
...
|
||||
# webhook_expiration_interval = "86400"
|
||||
# webhook_renewal_interval = "43200"
|
||||
space_reservation = "10"
|
||||
```
|
||||
|
||||
The value entered is in MB (Mega Bytes). In this example, a value of 10MB is being used, and will be converted to bytes by the application. The value being used can be reviewed when using `--display-config`:
|
||||
```
|
||||
Config option 'sync_dir_permissions' = 700
|
||||
Config option 'sync_file_permissions' = 600
|
||||
Config option 'space_reservation' = 10485760
|
||||
Config option 'application_id' =
|
||||
Config option 'azure_ad_endpoint' =
|
||||
Config option 'azure_tenant_id' = common
|
||||
```
|
||||
|
||||
Any value is valid here, however, if you use a value of '0' a value of '1' will actually be used, so that you actually do not run out of disk space.
|
||||
|
||||
### How are symbolic links handled by the client?
|
||||
Microsoft OneDrive has zero concept or understanding of symbolic links, and attempting to upload a symbolic link to Microsoft OneDrive generates a platform API error. All data (files and folders) that are uploaded to OneDrive must be whole files or actual directories.
|
||||
|
||||
As such, there are only two methods to support symbolic links with this client:
|
||||
1. Follow the Linux symbolic link and upload what ever the link is pointing at to OneDrive. This is the default behaviour.
|
||||
2. Skip symbolic links by configuring the application to do so. In skipping, no data, no link, no reference is uploaded to OneDrive.
|
||||
|
||||
To skip symbolic links, edit your configuration as per below:
|
||||
|
||||
```text
|
||||
# local_first = "false"
|
||||
# no_remote_delete = "false"
|
||||
skip_symlinks = "true"
|
||||
# debug_https = "false"
|
||||
# skip_dotfiles = "false"
|
||||
```
|
||||
Setting this to `"true"` will configure the client to skip all symbolic links while syncing.
|
||||
|
||||
The default setting is `"false"` which will sync the whole folder structure referenced by the symbolic link, duplicating the contents on OneDrive in the place where the symbolic link is.
|
||||
|
||||
### How to sync shared folders (OneDrive Personal)?
|
||||
Folders shared with you can be synced by adding them to your OneDrive. To do that open your Onedrive, go to the Shared files list, right click on the folder you want to sync and then click on "Add to my OneDrive".
|
||||
|
||||
### Shared folders (OneDrive Business or Office 365)
|
||||
### How to sync shared folders (OneDrive Business or Office 365)?
|
||||
Refer to [./BusinessSharedFolders.md](BusinessSharedFolders.md) for configuration assistance.
|
||||
|
||||
### SharePoint / Office 365 Shared Libraries
|
||||
Do not use the 'Add shortcut to My files' from the OneDrive web based interface to add a 'shortcut' to your shared folder. This shortcut is not supported by the OneDrive API, thus it cannot be used.
|
||||
|
||||
### How to sync sharePoint / Office 365 Shared Libraries?
|
||||
Refer to [./SharePoint-Shared-Libraries.md](SharePoint-Shared-Libraries.md) for configuration assistance.
|
||||
|
||||
### How to run a user systemd service at boot without user login?
|
||||
In some cases it may be desirable for the systemd service to start without having to login as your 'user'
|
||||
|
||||
To avoid this issue, you need to reconfigure your 'user' account so that the systemd services you have created will startup without you having to login to your system:
|
||||
```text
|
||||
loginctl enable-linger <your_user_name>
|
||||
```
|
||||
|
||||
### How to create a shareable link?
|
||||
In some cases it may be desirable to create a shareable file link and give this link to other users to access a specific file.
|
||||
|
||||
To do this, use the following command:
|
||||
```text
|
||||
onedrive --create-share-link <path/to/file>
|
||||
```
|
||||
**Note:** By default this will be a read-only link.
|
||||
|
||||
To make this a read-write link, use the following command:
|
||||
```text
|
||||
onedrive --create-share-link <path/to/file> --with-editing-perms
|
||||
```
|
||||
**Note:** The ordering of the option file path and option flag is important.
|
||||
|
||||
### How to sync both Personal and Business accounts at the same time?
|
||||
You must configure separate instances of the application configuration for each account.
|
||||
|
||||
Refer to [./advanced-usage.md](advanced-usage.md) for configuration assistance.
|
||||
|
||||
### How to sync multiple SharePoint Libraries at the same time?
|
||||
You must configure a separate instances of the application configuration for each SharePoint Library.
|
||||
|
||||
Refer to [./advanced-usage.md](advanced-usage.md) for configuration assistance.
|
||||
|
||||
## Running 'onedrive' in 'monitor' mode
|
||||
Monitor mode (`--monitor`) allows the onedrive process to continually monitor your local file system for changes to files.
|
||||
|
||||
|
@ -762,13 +983,19 @@ Both of these errors are local environment issues, where the following system va
|
|||
* `fs.file-max`
|
||||
* `fs.inotify.max_user_watches`
|
||||
|
||||
To determine what these values are on your system use the following commands:
|
||||
```
|
||||
To determine what the existing values are on your system use the following commands:
|
||||
```text
|
||||
sysctl fs.file-max
|
||||
sysctl fs.inotify.max_user_watches
|
||||
```
|
||||
|
||||
To make a change to these variables:
|
||||
To determine what value to change to, you need to count all the files and folders in your configured 'sync_dir':
|
||||
```text
|
||||
cd /path/to/your/sync/dir
|
||||
ls -laR | wc -l
|
||||
```
|
||||
|
||||
To make a change to these variables using your file and folder count:
|
||||
```
|
||||
sudo sysctl fs.file-max=<new_value>
|
||||
sudo sysctl fs.inotify.max_user_watches=<new_value>
|
||||
|
@ -782,7 +1009,7 @@ A webhook can be optionally enabled in the monitor mode to allow the onedrive pr
|
|||
|
||||
To enable this feature, you need to configure the following options in the config file:
|
||||
|
||||
```
|
||||
```text
|
||||
webhook_enabled = "true"
|
||||
webhook_public_url = "<public-facing url to reach your webhook>"
|
||||
```
|
||||
|
@ -793,10 +1020,11 @@ Setting `webhook_enabled` to `true` enables the webhook in 'monitor' mode. The o
|
|||
|
||||
For example, below is a nginx config snippet to proxy traffic into the webhook:
|
||||
|
||||
```
|
||||
```text
|
||||
server {
|
||||
listen 80;
|
||||
location /webhooks/onedrive {
|
||||
proxy_http_version 1.1;
|
||||
proxy_pass http://127.0.0.1:8888;
|
||||
}
|
||||
}
|
||||
|
@ -804,6 +1032,29 @@ server {
|
|||
|
||||
With nginx running, you can configure `webhook_public_url` to `https://<your_host>/webhooks/onedrive`.
|
||||
|
||||
If you receive this application error:
|
||||
```text
|
||||
Subscription validation request failed. Response must exactly match validationToken query parameter.
|
||||
```
|
||||
The most likely cause for this error will be your nginx configuration. To resolve, potentially investigate the following configuration for nginx:
|
||||
|
||||
```text
|
||||
server {
|
||||
listen 80;
|
||||
location /webhooks/onedrive {
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Original-Request-URI $request_uri;
|
||||
proxy_read_timeout 300s;
|
||||
proxy_connect_timeout 75s;
|
||||
proxy_buffering off;
|
||||
proxy_http_version 1.1;
|
||||
proxy_pass http://127.0.0.1:8888;
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
For any further nginx configuration assistance, please refer to: https://docs.nginx.com/
|
||||
|
||||
### More webhook configuration options
|
||||
|
||||
Below options can be optionally configured. The default is usually good enough.
|
||||
|
@ -849,6 +1100,7 @@ tail -f /var/log/onedrive/<username>.onedrive.log
|
|||
To change what 'user' the client runs under (by default root), manually edit the init.d service file and modify `daemon --user root onedrive_service.sh` for the correct user.
|
||||
|
||||
### OneDrive service running as root user via systemd (Arch, Ubuntu, Debian, OpenSuSE, Fedora)
|
||||
First, su to root using `su - root`, then enable the systemd service:
|
||||
```text
|
||||
systemctl --user enable onedrive
|
||||
systemctl --user start onedrive
|
||||
|
@ -857,6 +1109,11 @@ systemctl --user start onedrive
|
|||
|
||||
**Note:** This will run the 'onedrive' process with a UID/GID of '0', thus, any files or folders that are created will be owned by 'root'
|
||||
|
||||
To view the status of the service running, use the following:
|
||||
```text
|
||||
systemctl --user status onedrive.service
|
||||
```
|
||||
|
||||
To see the systemd application logs run:
|
||||
```text
|
||||
journalctl --user-unit=onedrive -f
|
||||
|
@ -935,6 +1192,11 @@ systemctl --user enable onedrive
|
|||
systemctl --user start onedrive
|
||||
```
|
||||
|
||||
To view the status of the service running for the user, use the following:
|
||||
```text
|
||||
systemctl --user status onedrive.service
|
||||
```
|
||||
|
||||
To see the systemd application logs run:
|
||||
```text
|
||||
journalctl --user-unit=onedrive -f
|
||||
|
@ -1012,11 +1274,13 @@ for extra details.
|
|||
|
||||
## Additional Configuration
|
||||
### Advanced Configuration of the OneDrive Free Client
|
||||
* Configuring the client to use mulitple OneDrive accounts / configurations
|
||||
* Configuring the client for use in dual-boot (Windows / Linux) situations
|
||||
* Configuring the client for use when 'sync_dir' is a mounted directory
|
||||
* Upload data from the local ~/OneDrive folder to a specific location on OneDrive
|
||||
|
||||
* Configuring the client to use mulitple OneDrive accounts / configurations, for example:
|
||||
* Setup to use onedrive with both Personal and Business accounts
|
||||
* Setup to use onedrive with multiple SharePoint Libraries
|
||||
* Configuring the client for use in dual-boot (Windows / Linux) situations
|
||||
* Configuring the client for use when 'sync_dir' is a mounted directory
|
||||
* Upload data from the local ~/OneDrive folder to a specific location on OneDrive
|
||||
|
||||
Refer to [./advanced-usage.md](advanced-usage.md) for configuration assistance.
|
||||
|
||||
### Access OneDrive service through a proxy
|
||||
|
@ -1081,13 +1345,15 @@ Options:
|
|||
The authorization URL is written to the `authUrl`, then onedrive waits for the file `responseUrl`
|
||||
to be present, and reads the response from that file.
|
||||
--auth-response ARG
|
||||
Perform authentication not via interactive dialog but via providing the reponse url directly.
|
||||
Perform authentication not via interactive dialog but via providing the response url directly.
|
||||
--check-for-nomount
|
||||
Check for the presence of .nosync in the syncdir root. If found, do not perform sync.
|
||||
--check-for-nosync
|
||||
Check for the presence of .nosync in each directory. If found, skip directory from sync.
|
||||
--classify-as-big-delete
|
||||
Number of children in a path that is locally removed which will be classified as a 'big data delete'
|
||||
--cleanup-local-files
|
||||
Cleanup additional local files when using --download-only. This will remove local data.
|
||||
--confdir ARG
|
||||
Set the directory used to store the configuration files
|
||||
--create-directory ARG
|
||||
|
@ -1106,6 +1372,8 @@ Options:
|
|||
Disable upload validation when uploading to OneDrive
|
||||
--display-config
|
||||
Display what options the client will use as currently configured - no sync will be performed.
|
||||
--display-running-config
|
||||
Display what options the client has been configured to use on application startup.
|
||||
--display-sync-status
|
||||
Display the sync status of the client - no sync will be performed.
|
||||
--download-only
|
||||
|
@ -1116,8 +1384,10 @@ Options:
|
|||
Enable client activity to a separate log file
|
||||
--force
|
||||
Force the deletion of data when a 'big delete' is detected
|
||||
--force-http-2
|
||||
Force the use of HTTP/2 for all operations where applicable
|
||||
--force-http-11
|
||||
Force the use of HTTP 1.1 for all operations
|
||||
--force-sync
|
||||
Force a synchronization of a specific folder, only when using --single-directory and ignoring all non-default skip_dir and skip_file rules
|
||||
--get-O365-drive-id ARG
|
||||
Query and return the Office 365 Drive ID for a given Office 365 SharePoint Shared Library
|
||||
--get-file-link ARG
|
||||
|
@ -1146,7 +1416,7 @@ Options:
|
|||
Frequency of logging in monitor mode
|
||||
--no-remote-delete
|
||||
Do not delete local file 'deletes' from OneDrive when using --upload-only
|
||||
--operation-timeout
|
||||
--operation-timeout ARG
|
||||
Maximum amount of time (in seconds) an operation is allowed to take
|
||||
--print-token
|
||||
Print the access token, useful for debugging
|
||||
|
@ -1176,6 +1446,8 @@ Options:
|
|||
Skip syncing of symlinks
|
||||
--source-directory ARG
|
||||
Source directory to rename or move on OneDrive - no sync will be performed.
|
||||
--space-reservation ARG
|
||||
The amount of disk space to reserve (in MB) to avoid 100% disk space utilisation
|
||||
--sync-root-files
|
||||
Sync all files in sync_dir root when using sync_list.
|
||||
--sync-shared-folders
|
||||
|
@ -1192,4 +1464,6 @@ Options:
|
|||
Print more details, useful for debugging (repeat for extra debugging)
|
||||
--version
|
||||
Print the version and exit
|
||||
--with-editing-perms
|
||||
Create a read-write shareable link for an existing file on OneDrive when used with --create-share-link <file>
|
||||
```
|
||||
|
|
|
@ -1,9 +1,10 @@
|
|||
# Advanced Configuration of the OneDrive Free Client
|
||||
This document covers the following scenarios:
|
||||
* Configuring the client to use multiple OneDrive accounts / configurations
|
||||
* Configuring the client for use in dual-boot (Windows / Linux) situations
|
||||
* Configuring the client for use when 'sync_dir' is a mounted directory
|
||||
* Upload data from the local ~/OneDrive folder to a specific location on OneDrive
|
||||
* [Configuring the client to use multiple OneDrive accounts / configurations](#configuring-the-client-to-use-multiple-onedrive-accounts--configurations)
|
||||
* [Configuring the client to use multiple OneDrive accounts / configurations using Docker](#configuring-the-client-to-use-multiple-onedrive-accounts--configurations-using-docker)
|
||||
* [Configuring the client for use in dual-boot (Windows / Linux) situations](#configuring-the-client-for-use-in-dual-boot-windows--linux-situations)
|
||||
* [Configuring the client for use when 'sync_dir' is a mounted directory](#configuring-the-client-for-use-when-sync_dir-is-a-mounted-directory)
|
||||
* [Upload data from the local ~/OneDrive folder to a specific location on OneDrive](#upload-data-from-the-local-onedrive-folder-to-a-specific-location-on-onedrive)
|
||||
|
||||
## Configuring the client to use multiple OneDrive accounts / configurations
|
||||
Essentially, each OneDrive account or SharePoint Shared Library which you require to be synced needs to have its own and unique configuration, local sync directory and service files. To do this, the following steps are needed:
|
||||
|
@ -91,18 +92,29 @@ In order to automatically start syncing your OneDrive accounts, you will need to
|
|||
* RHEL / CentOS: `/usr/lib/systemd/system`
|
||||
* Others: `/usr/lib/systemd/user` and `/lib/systemd/system`
|
||||
|
||||
**Note:** The `onedrive.service` runs the service as the 'root' user, whereas the `onedrive@.service` runs the service as your user account.
|
||||
|
||||
### Step1: Create a new systemd service file
|
||||
#### Red Hat Enterprise Linux, CentOS Linux
|
||||
Copy the required service file to a new name:
|
||||
```text
|
||||
cp onedrive.service onedrive-my-new-config.service
|
||||
sudo cp /usr/lib/systemd/system/onedrive.service /usr/lib/systemd/system/onedrive-my-new-config
|
||||
```
|
||||
or
|
||||
```text
|
||||
cp onedrive@.service onedrive-my-new-config@.service
|
||||
sudo cp /usr/lib/systemd/system/onedrive@.service /usr/lib/systemd/system/onedrive-my-new-config@.service
|
||||
```
|
||||
|
||||
Edit the line beginning with `ExecStart` so that the confdir mirrors the one you used above:
|
||||
#### Others such as Arch, Ubuntu, Debian, OpenSuSE, Fedora
|
||||
Copy the required service file to a new name:
|
||||
```text
|
||||
sudo cp /usr/lib/systemd/user/onedrive.service /usr/lib/systemd/user/onedrive-my-new-config.service
|
||||
```
|
||||
or
|
||||
```text
|
||||
sudo cp /lib/systemd/system/onedrive@.service /lib/systemd/system/onedrive-my-new-config@.service
|
||||
```
|
||||
|
||||
### Step 2: Edit new systemd service file
|
||||
Edit the new systemd file, updating the line beginning with `ExecStart` so that the confdir mirrors the one you used above:
|
||||
```text
|
||||
ExecStart=/usr/local/bin/onedrive --monitor --confdir="/full/path/to/config/dir"
|
||||
```
|
||||
|
@ -112,7 +124,18 @@ Example:
|
|||
ExecStart=/usr/local/bin/onedrive --monitor --confdir="/home/myusername/.config/my-new-config"
|
||||
```
|
||||
|
||||
Then you can safely run these commands:
|
||||
**Note:** When running the client manually, `--confdir="~/.config/......` is acceptable. In a systemd configuration file, the full path must be used. The `~` must be expanded.
|
||||
|
||||
### Step 3: Enable the new systemd service
|
||||
Once the file is correctly editied, you can enable the new systemd service using the following commands.
|
||||
|
||||
#### Red Hat Enterprise Linux, CentOS Linux
|
||||
```text
|
||||
systemctl enable onedrive-my-new-config
|
||||
systemctl start onedrive-my-new-config
|
||||
```
|
||||
|
||||
#### Others such as Arch, Ubuntu, Debian, OpenSuSE, Fedora
|
||||
```text
|
||||
systemctl --user enable onedrive-my-new-config
|
||||
systemctl --user start onedrive-my-new-config
|
||||
|
@ -123,8 +146,92 @@ systemctl --user enable onedrive-my-new-config@myusername.service
|
|||
systemctl --user start onedrive-my-new-config@myusername.service
|
||||
```
|
||||
|
||||
### Step 4: Viewing systemd status and logs for the custom service
|
||||
#### Viewing systemd service status - Red Hat Enterprise Linux, CentOS Linux
|
||||
```text
|
||||
systemctl status onedrive-my-new-config
|
||||
```
|
||||
|
||||
#### Viewing systemd service status - Others such as Arch, Ubuntu, Debian, OpenSuSE, Fedora
|
||||
```text
|
||||
systemctl --user status onedrive-my-new-config
|
||||
```
|
||||
|
||||
#### Viewing journalctl systemd logs - Red Hat Enterprise Linux, CentOS Linux
|
||||
```text
|
||||
journalctl --unit=onedrive-my-new-config -f
|
||||
```
|
||||
|
||||
#### Viewing journalctl systemd logs - Others such as Arch, Ubuntu, Debian, OpenSuSE, Fedora
|
||||
```text
|
||||
journalctl --user --unit=onedrive-my-new-config -f
|
||||
```
|
||||
|
||||
### Step 5: (Optional) Run custom systemd service at boot without user login
|
||||
In some cases it may be desirable for the systemd service to start without having to login as your 'user'
|
||||
|
||||
All the systemd steps above that utilise the `--user` option, will run the systemd service as your particular user. As such, the systemd service will not start unless you actually login to your system.
|
||||
|
||||
To avoid this issue, you need to reconfigure your 'user' account so that the systemd services you have created will startup without you having to login to your system:
|
||||
```text
|
||||
loginctl enable-linger <your_user_name>
|
||||
```
|
||||
|
||||
Example:
|
||||
```text
|
||||
alex@ubuntu-headless:~$ loginctl enable-linger alex
|
||||
```
|
||||
|
||||
Repeat these steps for each OneDrive new account that you wish to use.
|
||||
|
||||
## Configuring the client to use multiple OneDrive accounts / configurations using Docker
|
||||
In some situations it may be desirable to run multiple Docker containers at the same time, each with their own configuration.
|
||||
|
||||
To run the Docker container successfully, it needs two unique Docker volumes to operate:
|
||||
* Your configuration Docker volumes
|
||||
* Your data Docker volume
|
||||
|
||||
When running multiple Docker containers, this is no different - each Docker container must have it's own configuration and data volume.
|
||||
|
||||
### High level steps:
|
||||
1. Create the required unique Docker volumes for the configuration volume
|
||||
2. Create the required unique local path used for the Docker data volume
|
||||
3. Start the multiple Docker containers with the required configuration for each container
|
||||
|
||||
#### Create the required unique Docker volumes for the configuration volume
|
||||
Create the required unique Docker volumes for the configuration volume(s):
|
||||
```text
|
||||
docker volume create onedrive_conf_sharepoint_site1
|
||||
docker volume create onedrive_conf_sharepoint_site2
|
||||
docker volume create onedrive_conf_sharepoint_site3
|
||||
...
|
||||
docker volume create onedrive_conf_sharepoint_site50
|
||||
```
|
||||
|
||||
#### Create the required unique local path used for the Docker data volume
|
||||
Create the required unique local path used for the Docker data volume
|
||||
```text
|
||||
mkdir -p /use/full/local/path/no/tilda/SharePointSite1
|
||||
mkdir -p /use/full/local/path/no/tilda/SharePointSite2
|
||||
mkdir -p /use/full/local/path/no/tilda/SharePointSite3
|
||||
...
|
||||
mkdir -p /use/full/local/path/no/tilda/SharePointSite50
|
||||
```
|
||||
|
||||
#### Start the Docker container with the required configuration (example)
|
||||
```text
|
||||
docker run -it --name onedrive -v onedrive_conf_sharepoint_site1:/onedrive/conf -v "/use/full/local/path/no/tilda/SharePointSite1:/onedrive/data" driveone/onedrive:latest
|
||||
docker run -it --name onedrive -v onedrive_conf_sharepoint_site2:/onedrive/conf -v "/use/full/local/path/no/tilda/SharePointSite2:/onedrive/data" driveone/onedrive:latest
|
||||
docker run -it --name onedrive -v onedrive_conf_sharepoint_site3:/onedrive/conf -v "/use/full/local/path/no/tilda/SharePointSite3:/onedrive/data" driveone/onedrive:latest
|
||||
...
|
||||
docker run -it --name onedrive -v onedrive_conf_sharepoint_site50:/onedrive/conf -v "/use/full/local/path/no/tilda/SharePointSite50:/onedrive/data" driveone/onedrive:latest
|
||||
```
|
||||
|
||||
#### TIP
|
||||
To avoid 're-authenticating' and 'authorising' each individual Docker container, if all the Docker containers are using the 'same' OneDrive credentials, you can re-use the 'refresh_token' from one Docker container to another by copying this file to the configuration Docker volume of each Docker container.
|
||||
|
||||
If the account credentials are different .. you will need to re-authenticate each Docker container individually.
|
||||
|
||||
## Configuring the client for use in dual-boot (Windows / Linux) situations
|
||||
When dual booting Windows and Linux, depending on the Windows OneDrive account configuration, the 'Files On-Demand' option may be enabled when running OneDrive within your Windows environment.
|
||||
|
||||
|
|
97
docs/application-security.md
Normal file
97
docs/application-security.md
Normal file
|
@ -0,0 +1,97 @@
|
|||
# OneDrive Client for Linux Application Security
|
||||
This document details the following information:
|
||||
|
||||
* Why is this application an 'unverified publisher'?
|
||||
* Application Security and Permission Scopes
|
||||
* How to change Permission Scopes
|
||||
* How to review your existing application access consent
|
||||
|
||||
## Why is this application an 'unverified publisher'?
|
||||
Publisher Verification, as per the Microsoft [process](https://learn.microsoft.com/en-us/azure/active-directory/develop/publisher-verification-overview) has actually been configured, and, actually has been verified!
|
||||
|
||||
### Verified Publisher Configuration Evidence
|
||||
As per the image below, the Azure portal shows that the 'Publisher Domain' has actually been verified:
|
||||
![confirmed_verified_publisher](./images/confirmed_verified_publisher.jpg)
|
||||
|
||||
* The 'Publisher Domain' is: https://abraunegg.github.io/
|
||||
* The required 'Microsoft Identity Association' is: https://abraunegg.github.io/.well-known/microsoft-identity-association.json
|
||||
|
||||
## Application Security and Permission Scopes
|
||||
There are 2 main components regarding security for this application:
|
||||
* Azure Application Permissions
|
||||
* User Authentication Permissions
|
||||
|
||||
Keeping this in mind, security options should follow the security principal of 'least privilege':
|
||||
> The principle that a security architecture should be designed so that each entity
|
||||
> is granted the minimum system resources and authorizations that the entity needs
|
||||
> to perform its function.
|
||||
|
||||
Reference: [https://csrc.nist.gov/glossary/term/least_privilege](https://csrc.nist.gov/glossary/term/least_privilege)
|
||||
|
||||
As such, the following API permissions are used by default:
|
||||
|
||||
### Default Azure Application Permissions
|
||||
|
||||
| API / Permissions name | Type | Description | Admin consent required |
|
||||
|---|---|---|---|
|
||||
| Files.Read | Delegated | Have read-only access to user files | No |
|
||||
| Files.Read.All | Delegated | Have read-only access to all files user can access | No |
|
||||
| Sites.Read.All | Delegated | Have read-only access to all items in all site collections | No |
|
||||
| offline_access | Delegated | Maintain access to data you have given it access to | No |
|
||||
|
||||
![default_authentication_scopes](./images/default_authentication_scopes.jpg)
|
||||
|
||||
### Default User Authentication Permissions
|
||||
|
||||
When a user authenticates with Microsoft OneDrive, additional account permissions are provided by service to give the user specific access to their data. These are delegated permissions provided by the platform:
|
||||
|
||||
| API / Permissions name | Type | Description | Admin consent required |
|
||||
|---|---|---|---|
|
||||
| Files.ReadWrite | Delegated | Have full access to user files | No |
|
||||
| Files.ReadWrite.All | Delegated | Have full access to all files user can access | No |
|
||||
| Sites.ReadWrite.All | Delegated | Have full access to all items in all site collections | No |
|
||||
| offline_access | Delegated | Maintain access to data you have given it access to | No |
|
||||
|
||||
When these delegated API permissions are combined, these provide the effective authentication scope for the OneDrive Client for Linux to access your data. The resulting effective 'default' permissions will be:
|
||||
|
||||
| API / Permissions name | Type | Description | Admin consent required |
|
||||
|---|---|---|---|
|
||||
| Files.ReadWrite | Delegated | Have full access to user files | No |
|
||||
| Files.ReadWrite.All | Delegated | Have full access to all files user can access | No |
|
||||
| Sites.ReadWrite.All | Delegated | Have full access to all items in all site collections | No |
|
||||
| offline_access | Delegated | Maintain access to data you have given it access to | No |
|
||||
|
||||
These 'default' permissions will allow the OneDrive Client for Linux to read, write and delete data associated with your OneDrive Account.
|
||||
|
||||
## Configuring read-only access to your OneDrive data
|
||||
In some situations, it may be desirable to configure the OneDrive Client for Linux totally in read-only operation.
|
||||
|
||||
To change the application to 'read-only' access, add the following to your configuration file:
|
||||
```text
|
||||
read_only_auth_scope = "true"
|
||||
```
|
||||
This will change the user authentication scope request to use read-only access.
|
||||
|
||||
**Note:** When changing this value, you *must* re-authenticate the client using the `--reauth` option to utilise the change in authentication scopes.
|
||||
|
||||
When using read-only authentication scopes, the uploading of any data or local change to OneDrive will fail with the following error:
|
||||
```
|
||||
2022-Aug-06 13:16:45.3349625 ERROR: Microsoft OneDrive API returned an error with the following message:
|
||||
2022-Aug-06 13:16:45.3351661 Error Message: HTTP request returned status code 403 (Forbidden)
|
||||
2022-Aug-06 13:16:45.3352467 Error Reason: Access denied
|
||||
2022-Aug-06 13:16:45.3352838 Error Timestamp: 2022-06-12T13:16:45
|
||||
2022-Aug-06 13:16:45.3353171 API Request ID: <redacted>
|
||||
```
|
||||
|
||||
As such, it is also advisable for you to add the following to your configuration file so that 'uploads' are prevented:
|
||||
```text
|
||||
download_only = "true"
|
||||
```
|
||||
|
||||
**Important:** Additionally when using 'read_only_auth_scope' you also will need to remove your existing application access consent otherwise old authentication consent will be valid and will be used. This will mean the application will technically have the consent to upload data. See below on how to remove your prior application consent.
|
||||
|
||||
## Reviewing your existing application access consent
|
||||
|
||||
To review your existing application access consent, you need to access the following URL: https://account.live.com/consent/Manage
|
||||
|
||||
From here, you are able to review what applications have been given what access to your data, and remove application access as required.
|
|
@ -13,20 +13,20 @@ sudo yum install -y libcurl-devel
|
|||
sudo yum install -y sqlite-devel
|
||||
sudo yum install -y libnotify-devel
|
||||
sudo yum install -y wget
|
||||
sudo yum install -y http://downloads.dlang.org/releases/2.x/2.087.0/dmd-2.087.0-0.fedora.x86_64.rpm
|
||||
sudo yum install -y http://downloads.dlang.org/releases/2.x/2.088.0/dmd-2.088.0-0.fedora.x86_64.rpm
|
||||
mkdir -p ~/rpmbuild/{BUILD,RPMS,SOURCES,SPECS,SRPMS}
|
||||
```
|
||||
|
||||
## Build RPM from spec file
|
||||
Build the RPM from the provided spec file:
|
||||
```text
|
||||
wget https://github.com/abraunegg/onedrive/archive/refs/tags/v2.4.15.tar.gz -O ~/rpmbuild/SOURCES/v2.4.15.tar.gz
|
||||
wget https://github.com/abraunegg/onedrive/archive/refs/tags/v2.4.22.tar.gz -O ~/rpmbuild/SOURCES/v2.4.22.tar.gz
|
||||
wget https://raw.githubusercontent.com/abraunegg/onedrive/master/contrib/spec/onedrive.spec.in -O ~/rpmbuild/SPECS/onedrive.spec
|
||||
rpmbuild -ba ~/rpmbuild/SPECS/onedrive.spec
|
||||
```
|
||||
|
||||
## RPM Build Results
|
||||
Below are output results of building, installing and running the RPM package on the respective platforms:
|
||||
## RPM Build Example Results
|
||||
Below are example output results of building, installing and running the RPM package on the respective platforms:
|
||||
|
||||
### CentOS 7
|
||||
```text
|
||||
|
@ -376,4 +376,4 @@ Config option 'sync_root_files' = false
|
|||
Selective sync 'sync_list' configured = false
|
||||
Business Shared Folders configured = false
|
||||
[alex@localhost ~]$
|
||||
```
|
||||
```
|
||||
|
|
Binary file not shown.
Before Width: | Height: | Size: 82 KiB After Width: | Height: | Size: 86 KiB |
BIN
docs/images/confirmed_verified_publisher.jpg
Normal file
BIN
docs/images/confirmed_verified_publisher.jpg
Normal file
Binary file not shown.
After Width: | Height: | Size: 45 KiB |
BIN
docs/images/default_authentication_scopes.jpg
Normal file
BIN
docs/images/default_authentication_scopes.jpg
Normal file
Binary file not shown.
After Width: | Height: | Size: 86 KiB |
|
@ -6,7 +6,7 @@ The below are known issues with this client:
|
|||
|
||||
**Description:**
|
||||
|
||||
When running the client in standalone mode (`--synchronize`) moving folders that are sucessfully synced around between subseqant standalone syncs causes a deletion & re-upload of data to occur.
|
||||
When running the client in standalone mode (`--synchronize`) moving folders that are successfully synced around between subsequent standalone syncs causes a deletion & re-upload of data to occur.
|
||||
|
||||
**Explanation:**
|
||||
|
||||
|
|
|
@ -1,11 +1,15 @@
|
|||
# How to configure access to specific Microsoft Azure deployments
|
||||
## Application Version
|
||||
Before reading this document, please ensure you are running application version [![Version](https://img.shields.io/github/v/release/abraunegg/onedrive)](https://github.com/abraunegg/onedrive/releases) or greater. Use `onedrive --version` to determine what application version you are using and upgrade your client if required.
|
||||
|
||||
## Process Overview
|
||||
In some cases it is a requirement to utilise specific Microsoft Azure cloud deployments to conform with data and security reuqirements that requires data to reside within the geographic borders of that country.
|
||||
Current national clouds that are supported are:
|
||||
* Microsoft Cloud for US Government
|
||||
* Microsoft Cloud Germany
|
||||
* Azure and Office 365 operated by 21Vianet in China
|
||||
* Azure and Office365 operated by 21Vianet in China
|
||||
|
||||
In order to sucessfully use these specific Microsoft Azure deployments, the following steps are required:
|
||||
In order to successfully use these specific Microsoft Azure deployments, the following steps are required:
|
||||
1. Register an application with the Microsoft identity platform using the Azure portal
|
||||
2. Configure the new application with the appropriate authentication scopes
|
||||
3. Validate that the authentication / redirect URI is correct for your application registration
|
||||
|
@ -14,7 +18,14 @@ In order to sucessfully use these specific Microsoft Azure deployments, the foll
|
|||
6. Authenticate the client
|
||||
|
||||
## Step 1: Register a new application with Microsoft Azure
|
||||
1. Log into [Microsoft Azure](https://portal.azure.com/) with your applicable identity
|
||||
1. Log into your applicable Microsoft Azure Portal with your applicable Office365 identity:
|
||||
|
||||
| National Cloud Environment | Microsoft Azure Portal |
|
||||
|---|---|
|
||||
| Microsoft Cloud for US Government | https://portal.azure.com/ |
|
||||
| Microsoft Cloud Germany | https://portal.azure.com/ |
|
||||
| Azure and Office365 operated by 21Vianet | https://portal.azure.cn/ |
|
||||
|
||||
2. Select 'Azure Active Directory' as the service you wish to configure
|
||||
3. Under 'Manage', select 'App registrations' to register a new application
|
||||
4. Click 'New registration'
|
||||
|
@ -35,9 +46,8 @@ Configure the API permissions as per the following:
|
|||
|---|---|---|---|
|
||||
| Files.ReadWrite | Delegated | Have full access to user files | No |
|
||||
| Files.ReadWrite.All | Delegated | Have full access to all files user can access | No |
|
||||
| Sites.ReadWrite.All | Delegated | Have full access to all items in all site collections | No |
|
||||
| offline_access | Delegated | Maintain access to data you have given it access to | No |
|
||||
| Sites.Read.All | Delegated | Read items in all site collections | No |
|
||||
| Sites.ReadWrite.All | Delegated | Edit or delete items in all site collections | No |
|
||||
|
||||
![authentication_scopes](./images/authentication_scopes.jpg)
|
||||
|
||||
|
@ -49,12 +59,12 @@ Add the appropriate redirect URI for your Azure deployment:
|
|||
A valid entry for the response URI should be one of:
|
||||
* https://login.microsoftonline.us/common/oauth2/nativeclient (Microsoft Cloud for US Government)
|
||||
* https://login.microsoftonline.de/common/oauth2/nativeclient (Microsoft Cloud Germany)
|
||||
* https://login.chinacloudapi.cn/common/oauth2/nativeclient (Azure and Office 365 operated by 21Vianet in China)
|
||||
* https://login.chinacloudapi.cn/common/oauth2/nativeclient (Azure and Office365 operated by 21Vianet in China)
|
||||
|
||||
For a single-tenant application, it may be necessary to use your specific tenant id instead of "common":
|
||||
* https://login.microsoftonline.us/example.onmicrosoft.us/oauth2/nativeclient (Microsoft Cloud for US Government)
|
||||
* https://login.microsoftonline.de/example.onmicrosoft.de/oauth2/nativeclient (Microsoft Cloud Germany)
|
||||
* https://login.chinacloudapi.cn/example.onmicrosoft.cn/oauth2/nativeclient (Azure and Office 365 operated by 21Vianet in China)
|
||||
* https://login.chinacloudapi.cn/example.onmicrosoft.cn/oauth2/nativeclient (Azure and Office365 operated by 21Vianet in China)
|
||||
|
||||
## Step 4: Configure the onedrive client to use new application registration
|
||||
Update to your 'onedrive' configuration file (`~/.config/onedrive/config`) the following:
|
||||
|
@ -79,7 +89,7 @@ Valid entries are:
|
|||
* USL4 (Microsoft Cloud for US Government)
|
||||
* USL5 (Microsoft Cloud for US Government - DOD)
|
||||
* DE (Microsoft Cloud Germany)
|
||||
* CN (Azure and Office 365 operated by 21Vianet in China)
|
||||
* CN (Azure and Office365 operated by 21Vianet in China)
|
||||
|
||||
This will configure your client to use the correct Azure AD and Graph endpoints as per [https://docs.microsoft.com/en-us/graph/deployments](https://docs.microsoft.com/en-us/graph/deployments)
|
||||
|
||||
|
|
65
docs/privacy-policy.md
Normal file
65
docs/privacy-policy.md
Normal file
|
@ -0,0 +1,65 @@
|
|||
# Privacy Policy
|
||||
Effective Date: May 16 2018
|
||||
|
||||
## Introduction
|
||||
|
||||
This Privacy Policy outlines how OneDrive Client for Linux ("we," "our," or "us") collects, uses, and protects information when you use our software ("OneDrive Client for Linux"). We respect your privacy and are committed to ensuring the confidentiality and security of any information you provide while using the Software.
|
||||
|
||||
## Information We Do Not Collect
|
||||
|
||||
We want to be transparent about the fact that we do not collect any personal data, usage data, or tracking data through the Software. This means:
|
||||
|
||||
1. **No Personal Data**: We do not collect any information that can be used to personally identify you, such as your name, email address, phone number, or physical address.
|
||||
|
||||
2. **No Usage Data**: We do not collect data about how you use the Software, such as the features you use, the duration of your sessions, or any interactions within the Software.
|
||||
|
||||
3. **No Tracking Data**: We do not use cookies or similar tracking technologies to monitor your online behavior or track your activities across websites or apps.
|
||||
|
||||
## How We Use Your Information
|
||||
|
||||
Since we do not collect any personal, usage, or tracking data, there is no information for us to use for any purpose.
|
||||
|
||||
## Third-Party Services
|
||||
|
||||
The Software may include links to third-party websites or services, but we do not have control over the privacy practices or content of these third-party services. We encourage you to review the privacy policies of any third-party services you access through the Software.
|
||||
|
||||
## Children's Privacy
|
||||
|
||||
Since we do not collect any personal, usage, or tracking data, there is no restriction on the use of this application by anyone under the age of 18.
|
||||
|
||||
## Information You Choose to Share
|
||||
|
||||
While we do not collect personal data, usage data, or tracking data through the Software, there may be instances where you voluntarily choose to share information with us, particularly when submitting bug reports. These bug reports may contain sensitive information such as account details, file names, and directory names. It's important to note that these details are included in the logs and debug logs solely for the purpose of diagnosing and resolving technical issues with the Software.
|
||||
|
||||
We want to emphasize that, even in these cases, we do not have access to your actual data. The logs and debug logs provided in bug reports are used exclusively for technical troubleshooting and debugging purposes. We take measures to treat this information with the utmost care, and it is only accessible to our technical support and development teams. We do not use this information for any other purpose, and we have strict security measures in place to protect it.
|
||||
|
||||
## Protecting Your Sensitive Data
|
||||
|
||||
We are committed to safeguarding your sensitive data and maintaining its confidentiality. To ensure its protection:
|
||||
|
||||
1. **Limited Access**: Only authorized personnel within our technical support and development teams have access to the logs and debug logs containing sensitive data, and they are trained in handling this information securely.
|
||||
|
||||
2. **Data Encryption**: We use industry-standard encryption protocols to protect the transmission and storage of sensitive data.
|
||||
|
||||
3. **Data Retention**: We retain bug report data for a limited time necessary for resolving the reported issue. Once the issue is resolved, we promptly delete or anonymize the data.
|
||||
|
||||
4. **Security Measures**: We employ robust security measures to prevent unauthorized access, disclosure, or alteration of sensitive data.
|
||||
|
||||
By submitting a bug report, you acknowledge and consent to the inclusion of sensitive information in logs and debug logs for the sole purpose of addressing technical issues with the Software.
|
||||
|
||||
## Your Responsibilities
|
||||
|
||||
While we take measures to protect your sensitive data, it is essential for you to exercise caution when submitting bug reports. Please refrain from including any sensitive or personally identifiable information that is not directly related to the technical issue you are reporting. You have the option to redact or obfuscate sensitive details in bug reports to further protect your data.
|
||||
|
||||
## Changes to this Privacy Policy
|
||||
|
||||
We may update this Privacy Policy from time to time to reflect changes in our practices or for other operational, legal, or regulatory reasons. We will notify you of any material changes by posting the updated Privacy Policy on our website or through the Software. We encourage you to review this Privacy Policy periodically.
|
||||
|
||||
## Contact Us
|
||||
|
||||
If you have any questions or concerns about this Privacy Policy or our privacy practices, please contact us at support@mynas.com.au or via GitHub (https://github.com/abraunegg/onedrive)
|
||||
|
||||
## Conclusion
|
||||
|
||||
By using the Software, you agree to the terms outlined in this Privacy Policy. If you do not agree with any part of this policy, please discontinue the use of the Software.
|
||||
|
54
docs/terms-of-service.md
Normal file
54
docs/terms-of-service.md
Normal file
|
@ -0,0 +1,54 @@
|
|||
# OneDrive Client for Linux - Software Service Terms of Service
|
||||
|
||||
## 1. Introduction
|
||||
|
||||
These Terms of Service ("Terms") govern your use of the OneDrive Client for Linux ("Application") software and related Microsoft OneDrive services ("Service") provided by Microsoft. By accessing or using the Service, you agree to comply with and be bound by these Terms. If you do not agree to these Terms, please do not use the Service.
|
||||
|
||||
## 2. License Compliance
|
||||
|
||||
The OneDrive Client for Linux software is licensed under the GNU General Public License, version 3.0 (the "GPLv3"). Your use of the software must comply with the terms and conditions of the GPLv3. A copy of the GPLv3 can be found here: https://www.gnu.org/licenses/gpl-3.0.en.html
|
||||
|
||||
## 3. Use of the Service
|
||||
|
||||
### 3.1. Access and Accounts
|
||||
|
||||
You may need to create an account or provide personal information to access certain features of the Service. You are responsible for maintaining the confidentiality of your account information and are solely responsible for all activities that occur under your account.
|
||||
|
||||
### 3.2. Prohibited Activities
|
||||
|
||||
You agree not to:
|
||||
|
||||
- Use the Service in any way that violates applicable laws or regulations.
|
||||
- Use the Service to engage in any unlawful, harmful, or fraudulent activity.
|
||||
- Use the Service in any manner that disrupts, damages, or impairs the Service.
|
||||
|
||||
## 4. Intellectual Property
|
||||
|
||||
The OneDrive Client for Linux software is subject to the GPLv3, and you must respect all copyrights, trademarks, and other intellectual property rights associated with the software. Any contributions you make to the software must also comply with the GPLv3.
|
||||
|
||||
## 5. Disclaimer of Warranties
|
||||
|
||||
The OneDrive Client for Linux software is provided "as is" without any warranties, either expressed or implied. We do not guarantee that the use of the Application will be error-free or uninterrupted.
|
||||
|
||||
Microsoft is not responsible for OneDrive Client for Linux. Any issues or problems with OneDrive Client for Linux should be raised on GitHub at https://github.com/abraunegg/onedrive or email support@mynas.com.au
|
||||
|
||||
OneDrive Client for Linux is not responsible for the Microsoft OneDrive Service or the Microsoft Graph API Service that this Application utilizes. Any issue with either Microsoft OneDrive or Microsoft Graph API should be raised with Microsoft via their support channel in your country.
|
||||
|
||||
## 6. Limitation of Liability
|
||||
|
||||
To the fullest extent permitted by law, we shall not be liable for any direct, indirect, incidental, special, consequential, or punitive damages, or any loss of profits or revenues, whether incurred directly or indirectly, or any loss of data, use, goodwill, or other intangible losses, resulting from (a) your use or inability to use the Service, or (b) any other matter relating to the Service.
|
||||
|
||||
This limitiation of liability explicitly relates to the use of the OneDrive Client for Linux software and does not affect your rights under the GPLv3.
|
||||
|
||||
## 7. Changes to Terms
|
||||
|
||||
We reserve the right to update or modify these Terms at any time without prior notice. Any changes will be effective immediately upon posting on GitHub. Your continued use of the Service after the posting of changes constitutes your acceptance of such changes. Changes can be reviewed on GitHub.
|
||||
|
||||
## 8. Governing Law
|
||||
|
||||
These Terms shall be governed by and construed in accordance with the laws of Australia, without regard to its conflict of law principles.
|
||||
|
||||
## 9. Contact Us
|
||||
|
||||
If you have any questions or concerns about these Terms, please contact us at https://github.com/abraunegg/onedrive or email support@mynas.com.au
|
||||
|
|
@ -4,18 +4,38 @@ This document covers the appropriate steps to install the 'onedrive' client usin
|
|||
|
||||
#### Important information for all Ubuntu and Ubuntu based distribution users:
|
||||
This information is specifically for the following platforms and distributions:
|
||||
* Ubuntu
|
||||
|
||||
* Lubuntu
|
||||
* Linux Mint
|
||||
* POP OS
|
||||
* Peppermint OS
|
||||
* Raspbian
|
||||
* Ubuntu
|
||||
|
||||
Whilst there are [onedrive](https://packages.ubuntu.com/search?keywords=onedrive&searchon=names&suite=all§ion=all) Universe packages available for Ubuntu, do not install 'onedrive' from these Universe packages. The default Ubuntu Universe packages are out-of-date and are not supported and should not be used.
|
||||
|
||||
## Determine which instructions to use
|
||||
Ubuntu and its clones are based on various different releases, thus, you must use the correct instructions below, otherwise you may run into package dependancy issues and will be unable to install the client.
|
||||
|
||||
### Step 1: Ensure your system is up-to-date
|
||||
### Step 1: Remove any configured PPA and associated 'onedrive' package and systemd service files
|
||||
Many Internet 'help' pages provide inconsistent details on how to install the OneDrive Client for Linux. A number of these websites continue to point users to install the client via the yann1ck PPA repository however this PPA no longer exists and should not be used.
|
||||
|
||||
To remove the PPA repository and the older client, perform the following actions:
|
||||
```text
|
||||
sudo apt remove onedrive
|
||||
sudo add-apt-repository --remove ppa:yann1ck/onedrive
|
||||
```
|
||||
|
||||
Additionally, Ubuntu and its clones have a bad habit of creating a 'default' systemd service file when installing the 'onedrive' package so that the client will automatically run the client post being authenticated. This systemd entry is erroneous and needs to be removed.
|
||||
```
|
||||
Created symlink /etc/systemd/user/default.target.wants/onedrive.service → /usr/lib/systemd/user/onedrive.service.
|
||||
```
|
||||
To remove this symbolic link, run the following command:
|
||||
```
|
||||
sudo rm /etc/systemd/user/default.target.wants/onedrive.service
|
||||
```
|
||||
|
||||
### Step 2: Ensure your system is up-to-date
|
||||
Use a script, similar to the following to ensure your system is updated correctly:
|
||||
```text
|
||||
#!/bin/bash
|
||||
|
@ -87,61 +107,59 @@ Reading state information... Done
|
|||
root@ubuntu-20-LTS:~#
|
||||
```
|
||||
|
||||
Reboot your system after running this process before continuing with Step 2.
|
||||
Reboot your system after running this process before continuing with Step 3.
|
||||
```text
|
||||
reboot
|
||||
```
|
||||
|
||||
|
||||
### Step 2: Determine what your OS is based on
|
||||
### Step 3: Determine what your OS is based on
|
||||
Determine what your OS is based on. To do this, run the following command:
|
||||
```text
|
||||
lsb_release -a
|
||||
```
|
||||
**Example:**
|
||||
```text
|
||||
alex@ubuntu-system:~$ lsb_release -a
|
||||
No LSB modules are available.
|
||||
Distributor ID: Ubuntu
|
||||
Description: Ubuntu 22.04 LTS
|
||||
Release: 22.04
|
||||
Codename: jammy
|
||||
```
|
||||
|
||||
### Step 3: Pick the correct instructions to use
|
||||
### Step 4: Pick the correct instructions to use
|
||||
If required, review the table below based on your 'lsb_release' information to pick the appropriate instructions to use:
|
||||
|
||||
| Release & Codename | Instructions to use |
|
||||
|--------------------|---------------------|
|
||||
| 18.x / bionic | You must build from source or upgrade your Operating System Ubuntu 20.x |
|
||||
| Linux Mint 19.x / tina | You must build from source or upgrade your Operating System Linux Mint 20.x |
|
||||
| Linux Mint 20.x / ulyana | Use Ubuntu 20.04 instructions below |
|
||||
| Linux Mint 19.x | This platform is End-of-Life (EOL) and no longer supported. You must upgrade to Linux Mint 21.x |
|
||||
| Linux Mint 20.x | Use [Ubuntu 20.04](#distribution-ubuntu-2004) instructions below |
|
||||
| Linux Mint 21.x | Use [Ubuntu 22.04](#distribution-ubuntu-2204) instructions below |
|
||||
| Linux Mint Debian Edition (LMDE) 5 / Elsie | Use [Debian 11](#distribution-debian-11) instructions below |
|
||||
| Linux Mint Debian Edition (LMDE) 6 / Faye | Use [Debian 12](#distribution-debian-12) instructions below |
|
||||
| Debian 9 | This platform is End-of-Life (EOL) and no longer supported. You must upgrade to Debian 12 |
|
||||
| Debian 10 | You must build from source or upgrade your Operating System to Debian 12 |
|
||||
| Debian 11 | Use [Debian 11](#distribution-debian-11) instructions below |
|
||||
| Debian 12 | Use [Debian 12](#distribution-debian-12) instructions below |
|
||||
| Raspbian GNU/Linux 10 | You must build from source or upgrade your Operating System to Raspbian GNU/Linux 12 |
|
||||
| Raspbian GNU/Linux 11 | Use [Debian 11](#distribution-debian-11) instructions below |
|
||||
| Raspbian GNU/Linux 12 | Use [Debian 12](#distribution-debian-12) instructions below |
|
||||
| Ubuntu 18.04 / Bionic | This platform is End-of-Life (EOL) and no longer supported. You must upgrade to Ubuntu 22.04 |
|
||||
| Ubuntu 20.04 / Focal | Use [Ubuntu 20.04](#distribution-ubuntu-2004) instructions below |
|
||||
| Ubuntu 21.04 / Hirsute | Use [Ubuntu 21.04](#distribution-ubuntu-2104) instructions below |
|
||||
| Ubuntu 21.10 / Impish | Use [Ubuntu 21.10](#distribution-ubuntu-2110) instructions below |
|
||||
| Ubuntu 22.04 / Jammy | Use [Ubuntu 22.04](#distribution-ubuntu-2204) instructions below |
|
||||
| Ubuntu 22.10 / Kinetic | Use [Ubuntu 22.10](#distribution-ubuntu-2210) instructions below |
|
||||
| Ubuntu 23.04 / Lunar | Use [Ubuntu 23.04](#distribution-ubuntu-2304) instructions below |
|
||||
| Ubuntu 23.10 / Mantic | Use [Ubuntu 23.10](#distribution-ubuntu-2310) instructions below |
|
||||
|
||||
## Distribution Package Install Instructions
|
||||
|
||||
### Distribution: Debian 10
|
||||
The packages support the following platform architectures:
|
||||
| i686 | x86_64 | ARMHF | AARCH64 |
|
||||
|:----:|:------:|:-----:|:-------:|
|
||||
|✔|✔|✔|✔| |
|
||||
|
||||
#### Step 1: Add the OpenSuSE Build Service repository release key
|
||||
Add the OpenSuSE Build Service repository release key using the following command:
|
||||
```text
|
||||
wget -qO - https://download.opensuse.org/repositories/home:/npreining:/debian-ubuntu-onedrive/Debian_10/Release.key | sudo apt-key add -
|
||||
```
|
||||
|
||||
#### Step 2: Add the OpenSuSE Build Service repository
|
||||
Add the OpenSuSE Build Service repository using the following command:
|
||||
```text
|
||||
echo 'deb https://download.opensuse.org/repositories/home:/npreining:/debian-ubuntu-onedrive/Debian_10/ ./' | sudo tee /etc/apt/sources.list.d/onedrive.list
|
||||
```
|
||||
|
||||
#### Step 3: Update your apt package cache
|
||||
1. Run: `sudo apt-get update`
|
||||
|
||||
#### Step 4: Install 'onedrive'
|
||||
1. Run: `sudo apt install onedrive`
|
||||
|
||||
#### Step 5: Read 'Known Issues' with these packages
|
||||
1. Read and understand the known issues with these packages below, taking any action that is needed.
|
||||
|
||||
### Distribution: Debian 11
|
||||
The packages support the following platform architectures:
|
||||
| i686 | x86_64 | ARMHF | AARCH64 |
|
||||
|:----:|:------:|:-----:|:-------:|
|
||||
|✔|✔|✔|✔| |
|
||||
|✔|✔|✔|✔|
|
||||
|
||||
#### Step 1: Add the OpenSuSE Build Service repository release key
|
||||
Add the OpenSuSE Build Service repository release key using the following command:
|
||||
|
@ -156,19 +174,46 @@ echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/obs-o
|
|||
```
|
||||
|
||||
#### Step 3: Update your apt package cache
|
||||
1. Run: `sudo apt-get update`
|
||||
Run: `sudo apt-get update`
|
||||
|
||||
#### Step 4: Install 'onedrive'
|
||||
1. Run: `sudo apt install onedrive`
|
||||
Run: `sudo apt install --no-install-recommends --no-install-suggests onedrive`
|
||||
|
||||
#### Step 5: Read 'Known Issues' with these packages
|
||||
1. Read and understand the known issues with these packages below, taking any action that is needed.
|
||||
Read and understand the [known issues](#known-issues-with-installing-from-the-above-packages) with these packages below, taking any action that is needed.
|
||||
|
||||
### Distribution: Debian 12
|
||||
The packages support the following platform architectures:
|
||||
| i686 | x86_64 | ARMHF | AARCH64 |
|
||||
|:----:|:------:|:-----:|:-------:|
|
||||
|✔|✔|✔|✔|
|
||||
|
||||
#### Step 1: Add the OpenSuSE Build Service repository release key
|
||||
Add the OpenSuSE Build Service repository release key using the following command:
|
||||
```text
|
||||
wget -qO - https://download.opensuse.org/repositories/home:/npreining:/debian-ubuntu-onedrive/Debian_12/Release.key | gpg --dearmor | sudo tee /usr/share/keyrings/obs-onedrive.gpg > /dev/null
|
||||
```
|
||||
|
||||
#### Step 2: Add the OpenSuSE Build Service repository
|
||||
Add the OpenSuSE Build Service repository using the following command:
|
||||
```text
|
||||
echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/obs-onedrive.gpg] https://download.opensuse.org/repositories/home:/npreining:/debian-ubuntu-onedrive/Debian_12/ ./" | sudo tee /etc/apt/sources.list.d/onedrive.list
|
||||
```
|
||||
|
||||
#### Step 3: Update your apt package cache
|
||||
Run: `sudo apt-get update`
|
||||
|
||||
#### Step 4: Install 'onedrive'
|
||||
Run: `sudo apt install --no-install-recommends --no-install-suggests onedrive`
|
||||
|
||||
#### Step 5: Read 'Known Issues' with these packages
|
||||
Read and understand the [known issues](#known-issues-with-installing-from-the-above-packages) with these packages below, taking any action that is needed.
|
||||
|
||||
### Distribution: Ubuntu 20.04
|
||||
The packages support the following platform architectures:
|
||||
| i686 | x86_64 | ARMHF | AARCH64 |
|
||||
|:----:|:------:|:-----:|:-------:|
|
||||
❌|✔|✔|✔| |
|
||||
|❌|✔|✔|✔|
|
||||
|
||||
#### Step 1: Add the OpenSuSE Build Service repository release key
|
||||
Add the OpenSuSE Build Service repository release key using the following command:
|
||||
|
@ -183,19 +228,19 @@ echo 'deb https://download.opensuse.org/repositories/home:/npreining:/debian-ubu
|
|||
```
|
||||
|
||||
#### Step 3: Update your apt package cache
|
||||
1. Run: `sudo apt-get update`
|
||||
Run: `sudo apt-get update`
|
||||
|
||||
#### Step 4: Install 'onedrive'
|
||||
1. Run: `sudo apt install onedrive`
|
||||
Run: `sudo apt install --no-install-recommends --no-install-suggests onedrive`
|
||||
|
||||
#### Step 5: Read 'Known Issues' with these packages
|
||||
1. Read and understand the known issues with these packages below, taking any action that is needed.
|
||||
Read and understand the [known issues](#known-issues-with-installing-from-the-above-packages) with these packages below, taking any action that is needed.
|
||||
|
||||
### Distribution: Ubuntu 21.04
|
||||
The packages support the following platform architectures:
|
||||
| i686 | x86_64 | ARMHF | AARCH64 |
|
||||
|:----:|:------:|:-----:|:-------:|
|
||||
❌|✔|✔|✔| |
|
||||
|❌|✔|✔|✔|
|
||||
|
||||
#### Step 1: Add the OpenSuSE Build Service repository release key
|
||||
Add the OpenSuSE Build Service repository release key using the following command:
|
||||
|
@ -210,19 +255,19 @@ echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/obs-o
|
|||
```
|
||||
|
||||
#### Step 3: Update your apt package cache
|
||||
1. Run: `sudo apt-get update`
|
||||
Run: `sudo apt-get update`
|
||||
|
||||
#### Step 4: Install 'onedrive'
|
||||
1. Run: `sudo apt install onedrive`
|
||||
Run: `sudo apt install --no-install-recommends --no-install-suggests onedrive`
|
||||
|
||||
#### Step 5: Read 'Known Issues' with these packages
|
||||
1. Read and understand the known issues with these packages below, taking any action that is needed.
|
||||
Read and understand the [known issues](#known-issues-with-installing-from-the-above-packages) with these packages below, taking any action that is needed.
|
||||
|
||||
### Distribution: Ubuntu 21.10
|
||||
The packages support the following platform architectures:
|
||||
| i686 | x86_64 | ARMHF | AARCH64 |
|
||||
|:----:|:------:|:-----:|:-------:|
|
||||
❌|✔|✔|✔| |
|
||||
|❌|✔|✔|✔|
|
||||
|
||||
#### Step 1: Add the OpenSuSE Build Service repository release key
|
||||
Add the OpenSuSE Build Service repository release key using the following command:
|
||||
|
@ -237,19 +282,19 @@ echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/obs-o
|
|||
```
|
||||
|
||||
#### Step 3: Update your apt package cache
|
||||
1. Run: `sudo apt-get update`
|
||||
Run: `sudo apt-get update`
|
||||
|
||||
#### Step 4: Install 'onedrive'
|
||||
1. Run: `sudo apt install onedrive`
|
||||
Run: `sudo apt install --no-install-recommends --no-install-suggests onedrive`
|
||||
|
||||
#### Step 5: Read 'Known Issues' with these packages
|
||||
1. Read and understand the known issues with these packages below, taking any action that is needed.
|
||||
Read and understand the [known issues](#known-issues-with-installing-from-the-above-packages) with these packages below, taking any action that is needed.
|
||||
|
||||
### Distribution: Ubuntu 22.04
|
||||
The packages support the following platform architectures:
|
||||
| i686 | x86_64 | ARMHF | AARCH64 |
|
||||
|:----:|:------:|:-----:|:-------:|
|
||||
❌|✔|✔|✔| |
|
||||
|❌|✔|✔|✔|
|
||||
|
||||
#### Step 1: Add the OpenSuSE Build Service repository release key
|
||||
Add the OpenSuSE Build Service repository release key using the following command:
|
||||
|
@ -264,39 +309,102 @@ echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/obs-o
|
|||
```
|
||||
|
||||
#### Step 3: Update your apt package cache
|
||||
1. Run: `sudo apt-get update`
|
||||
Run: `sudo apt-get update`
|
||||
|
||||
#### Step 4: Install 'onedrive'
|
||||
1. Run: `sudo apt install onedrive`
|
||||
Run: `sudo apt install --no-install-recommends --no-install-suggests onedrive`
|
||||
|
||||
#### Step 5: Read 'Known Issues' with these packages
|
||||
1. Read and understand the known issues with these packages below, taking any action that is needed.
|
||||
Read and understand the [known issues](#known-issues-with-installing-from-the-above-packages) with these packages below, taking any action that is needed.
|
||||
|
||||
### Distribution: Ubuntu 22.10
|
||||
The packages support the following platform architectures:
|
||||
| i686 | x86_64 | ARMHF | AARCH64 |
|
||||
|:----:|:------:|:-----:|:-------:|
|
||||
|❌|✔|✔|✔|
|
||||
|
||||
#### Step 1: Add the OpenSuSE Build Service repository release key
|
||||
Add the OpenSuSE Build Service repository release key using the following command:
|
||||
```text
|
||||
wget -qO - https://download.opensuse.org/repositories/home:/npreining:/debian-ubuntu-onedrive/xUbuntu_22.10/Release.key | gpg --dearmor | sudo tee /usr/share/keyrings/obs-onedrive.gpg > /dev/null
|
||||
```
|
||||
|
||||
#### Step 2: Add the OpenSuSE Build Service repository
|
||||
Add the OpenSuSE Build Service repository using the following command:
|
||||
```text
|
||||
echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/obs-onedrive.gpg] https://download.opensuse.org/repositories/home:/npreining:/debian-ubuntu-onedrive/xUbuntu_22.10/ ./" | sudo tee /etc/apt/sources.list.d/onedrive.list
|
||||
```
|
||||
|
||||
#### Step 3: Update your apt package cache
|
||||
Run: `sudo apt-get update`
|
||||
|
||||
#### Step 4: Install 'onedrive'
|
||||
Run: `sudo apt install --no-install-recommends --no-install-suggests onedrive`
|
||||
|
||||
#### Step 5: Read 'Known Issues' with these packages
|
||||
Read and understand the [known issues](#known-issues-with-installing-from-the-above-packages) with these packages below, taking any action that is needed.
|
||||
|
||||
### Distribution: Ubuntu 23.04
|
||||
The packages support the following platform architectures:
|
||||
| i686 | x86_64 | ARMHF | AARCH64 |
|
||||
|:----:|:------:|:-----:|:-------:|
|
||||
|❌|✔|✔|✔|
|
||||
|
||||
#### Step 1: Add the OpenSuSE Build Service repository release key
|
||||
Add the OpenSuSE Build Service repository release key using the following command:
|
||||
```text
|
||||
wget -qO - https://download.opensuse.org/repositories/home:/npreining:/debian-ubuntu-onedrive/xUbuntu_23.04/Release.key | gpg --dearmor | sudo tee /usr/share/keyrings/obs-onedrive.gpg > /dev/null
|
||||
```
|
||||
|
||||
#### Step 2: Add the OpenSuSE Build Service repository
|
||||
Add the OpenSuSE Build Service repository using the following command:
|
||||
```text
|
||||
echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/obs-onedrive.gpg] https://download.opensuse.org/repositories/home:/npreining:/debian-ubuntu-onedrive/xUbuntu_23.04/ ./" | sudo tee /etc/apt/sources.list.d/onedrive.list
|
||||
```
|
||||
|
||||
#### Step 3: Update your apt package cache
|
||||
Run: `sudo apt-get update`
|
||||
|
||||
#### Step 4: Install 'onedrive'
|
||||
Run: `sudo apt install --no-install-recommends --no-install-suggests onedrive`
|
||||
|
||||
#### Step 5: Read 'Known Issues' with these packages
|
||||
Read and understand the [known issues](#known-issues-with-installing-from-the-above-packages) with these packages below, taking any action that is needed.
|
||||
|
||||
### Distribution: Ubuntu 23.10
|
||||
The packages support the following platform architectures:
|
||||
| i686 | x86_64 | ARMHF | AARCH64 |
|
||||
|:----:|:------:|:-----:|:-------:|
|
||||
|❌|✔|❌|✔|
|
||||
|
||||
#### Step 1: Add the OpenSuSE Build Service repository release key
|
||||
Add the OpenSuSE Build Service repository release key using the following command:
|
||||
```text
|
||||
wget -qO - https://download.opensuse.org/repositories/home:/npreining:/debian-ubuntu-onedrive/xUbuntu_23.10/Release.key | gpg --dearmor | sudo tee /usr/share/keyrings/obs-onedrive.gpg > /dev/null
|
||||
```
|
||||
|
||||
#### Step 2: Add the OpenSuSE Build Service repository
|
||||
Add the OpenSuSE Build Service repository using the following command:
|
||||
```text
|
||||
echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/obs-onedrive.gpg] https://download.opensuse.org/repositories/home:/npreining:/debian-ubuntu-onedrive/xUbuntu_23.10/ ./" | sudo tee /etc/apt/sources.list.d/onedrive.list
|
||||
```
|
||||
|
||||
#### Step 3: Update your apt package cache
|
||||
Run: `sudo apt-get update`
|
||||
|
||||
#### Step 4: Install 'onedrive'
|
||||
Run: `sudo apt install --no-install-recommends --no-install-suggests onedrive`
|
||||
|
||||
#### Step 5: Read 'Known Issues' with these packages
|
||||
Read and understand the [known issues](#known-issues-with-installing-from-the-above-packages) with these packages below, taking any action that is needed.
|
||||
|
||||
|
||||
## Known Issues with Installing from the above packages
|
||||
|
||||
### 1. The 'onedrive' client will automatically startup post 'authentication' without any further actions.
|
||||
The 'onedrive' client will automatically startup post 'authentication' without any further actions. In some circumstances this may be highly undesirable and can also lead to data loss.
|
||||
### 1. The client may segfault | core-dump when exiting
|
||||
When the client is run in `--monitor` mode manually, or when using the systemd service, the client may segfault on exit.
|
||||
|
||||
This is because, when the package is installed, the following symbolic link is created:
|
||||
```text
|
||||
Created symlink /etc/systemd/user/default.target.wants/onedrive.service → /usr/lib/systemd/user/onedrive.service.
|
||||
```
|
||||
|
||||
To resolve this, so that the client is *not* automatically started, without 'enabling' the client yourself, you need to remove this symbolic link:
|
||||
```
|
||||
sudo rm /etc/systemd/user/default.target.wants/onedrive.service
|
||||
```
|
||||
|
||||
This issue is being tracked by: [#1274](https://github.com/abraunegg/onedrive/issues/1274)
|
||||
|
||||
**Important:** It is highly advisable that you remove this symbolic link before you configure or authenticate your client. If you do not remove this symbolic link before you configure or authenticate your client this could lead to multiple copies of the client running, leading to sync conflics and operational issues which may include data loss (data deleted locally & on OneDrive).
|
||||
|
||||
Do not rely on this symbolic link for your systemd configuration to automatically start your onedrive client - refer to [Running 'onedrive' as a system service](https://github.com/abraunegg/onedrive/blob/master/docs/USAGE.md#running-onedrive-as-a-system-service) on how to configure this correctly.
|
||||
|
||||
### 2. The client will segfault | core-dump when exiting
|
||||
When the client is being run in `--monitor` mode manually, or when using the systemd service, the client will segfault on exit.
|
||||
|
||||
This issue is caused by the way the Ubuntu 'onedrive' packages are built using the Ubuntu LDC package & compiler options which is the root cause for this issue. Refer to: https://bugs.launchpad.net/ubuntu/+source/ldc/+bug/1895969
|
||||
This issue is caused by the way the 'onedrive' packages are built using the distribution LDC package & the default distribution compiler options which is the root cause for this issue. Refer to: https://bugs.launchpad.net/ubuntu/+source/ldc/+bug/1895969
|
||||
|
||||
**Additional references:**
|
||||
* https://github.com/abraunegg/onedrive/issues/1053
|
||||
|
@ -304,4 +412,3 @@ This issue is caused by the way the Ubuntu 'onedrive' packages are built using t
|
|||
|
||||
**Resolution Options:**
|
||||
* Uninstall the package and build client from source
|
||||
|
||||
|
|
|
@ -23,6 +23,9 @@ Perform authorization via two files passed in as \fBARG\fP in the format \fBauth
|
|||
The authorization URL is written to the \fBauthUrl\fP, then \fBonedrive\fP waits for
|
||||
the file \fBresponseUrl\fP to be present, and reads the response from that file.
|
||||
.TP
|
||||
\fB\-\-auth\-response\fP ARG
|
||||
Perform authentication not via interactive dialog but via providing the response url directly.
|
||||
.TP
|
||||
\fB\-\-check\-for\-nomount\fP
|
||||
Check for the presence of .nosync in the syncdir root. If found, do not perform sync.
|
||||
.br
|
||||
|
@ -38,6 +41,11 @@ Number of children in a path that is locally removed which will be classified as
|
|||
.br
|
||||
Configuration file key: \fBclassify_as_big_delete\fP (default: \fB1000\fP)
|
||||
.TP
|
||||
\fB\-\-cleanup\-local\-files\fP
|
||||
Cleanup additional local files when using \-\-download-only. This will remove local data.
|
||||
.br
|
||||
Configuration file key: \fBcleanup_local_files\fP (default: \fBfalse\fP)
|
||||
.TP
|
||||
\fB\-\-confdir\fP ARG
|
||||
Set the directory used to store the configuration files
|
||||
.TP
|
||||
|
@ -55,6 +63,11 @@ Configuration file key: \fBdebug_https\fP (default: \fBfalse\fP)
|
|||
\fB\-\-destination\-directory\fP ARG
|
||||
Destination directory for renamed or move on OneDrive \- no sync will be performed.
|
||||
.TP
|
||||
\fB\-\-disable\-download\-validation\fP
|
||||
Disable download validation when downloading from OneDrive
|
||||
.br
|
||||
Configuration file key: \fBdisable_download_validation\fP (default: \fBfalse\fP)
|
||||
.TP
|
||||
\fB\-\-disable\-notifications\fP
|
||||
Do not use desktop notifications in monitor mode
|
||||
.br
|
||||
|
@ -68,6 +81,9 @@ Configuration file key: \fBdisable_upload_validation\fP (default: \fBfalse\fP)
|
|||
\fB\-\-display\-config\fP
|
||||
Display what options the client will use as currently configured \- no sync will be performed.
|
||||
.TP
|
||||
\fB\-\-display\-running\-config\fP
|
||||
Display what options the client has been configured to use on application startup.
|
||||
.TP
|
||||
\fB\-\-display\-sync\-status\fP
|
||||
Display the sync status of the client \- no sync will be performed.
|
||||
.TP
|
||||
|
@ -89,10 +105,15 @@ Configuration file key: \fBenable_logging\fP (default: \fBfalse\fP)
|
|||
\fB\-\-force\fP
|
||||
Force the deletion of data when a 'big delete' is detected
|
||||
.TP
|
||||
\fB\-\-force\-http\-2\fP
|
||||
Force the use of HTTP/2 for all operations where applicable
|
||||
\fB\-\-force\-http\-11\fP
|
||||
Force the use of HTTP 1.1 for all operations
|
||||
.br
|
||||
Configuration file key: \fBforce_http_2\fP (default: \fBfalse\fP)
|
||||
Configuration file key: \fBforce_http_11\fP (default: \fBfalse\fP)
|
||||
.TP
|
||||
\fB\-\-force\-sync\fP
|
||||
Force a synchronization of a specific folder, only when using --synchronize --single-directory and ignore
|
||||
.br
|
||||
all non-default skip_dir and skip_file rules
|
||||
.TP
|
||||
\fB\-\-get\-O365\-drive\-id\fP ARG
|
||||
Query and return the Office 365 Drive ID for a given Office 365 SharePoint Shared Library
|
||||
|
@ -116,12 +137,15 @@ defines the directory where logging output is saved to, needs to end with a slas
|
|||
.br
|
||||
Configuration file key: \fBlog_dir\fP (default: \fB/var/log/onedrive/\fP)
|
||||
.TP
|
||||
\fB\-\-min-notify-changes\fP
|
||||
\fB\-\-min\-notify\-changes\fP
|
||||
the minimum number of pending incoming changes necessary to trigger
|
||||
a desktop notification
|
||||
.br
|
||||
Configuration file key: \fBmin_notify_changes\fP (default: \fB5\fP)
|
||||
.TP
|
||||
\fB\-m \-\-modified\-by\fP ARG
|
||||
Display the last modified by details of a given path
|
||||
.TP
|
||||
\fB\-m \-\-monitor\fP
|
||||
Keep monitoring for local and remote changes
|
||||
.TP
|
||||
|
@ -154,8 +178,8 @@ Configuration file key: \fBoperation_timeout\fP (default: \fB3600\fP)
|
|||
\fB\-\-print\-token\fP
|
||||
Print the access token, useful for debugging
|
||||
.TP
|
||||
\fB\-\-resync\fP
|
||||
Forget the last saved state, perform a full sync
|
||||
\fB\-\-reauth\fP
|
||||
Reauthenticate the client with OneDrive
|
||||
.TP
|
||||
\fB\-\-remove\-directory\fP ARG
|
||||
Remove a directory on OneDrive \- no sync will be performed.
|
||||
|
@ -165,6 +189,12 @@ Remove source file after successful transfer to OneDrive when using \-\-upload-o
|
|||
.br
|
||||
Configuration file key: \fBremove_source_files\fP (default: \fBfalse\fP)
|
||||
.TP
|
||||
\fB\-\-resync\fP
|
||||
Forget the last saved state, perform a full sync
|
||||
.TP
|
||||
\fB\-\-resync\-auth\fP
|
||||
Approve the use of performing a --resync action without needing CLI authorization
|
||||
.TP
|
||||
\fB\-\-single\-directory\fP ARG
|
||||
Specify a single local directory within the OneDrive root to sync.
|
||||
.TP
|
||||
|
@ -197,9 +227,11 @@ Configuration file key: \fBskip_symlinks\fP (default: \fBfalse\fP)
|
|||
\fB\-\-source\-directory\fP ARG
|
||||
Source directory to rename or move on OneDrive \- no sync will be performed.
|
||||
.TP
|
||||
\fB\-\-space\-reservation\fP ARG
|
||||
The amount of disk space to reserve (in MB) to avoid 100% disk space utilisation
|
||||
.TP
|
||||
\fB\-\-sync\-root\-files\fP
|
||||
Sync all files in sync_dir root when using sync_list.
|
||||
|
||||
.TP
|
||||
\fB\-\-sync\-shared\-folders\fP
|
||||
Sync OneDrive Business Shared Folders
|
||||
|
@ -231,6 +263,9 @@ enables even more verbose debug statements.
|
|||
\fB\-\-version\fP
|
||||
Print the version and exit
|
||||
.TP
|
||||
\fB\-\-with\-editing\-perms\fP
|
||||
Create a read-write shareable link for an existing file on OneDrive when used with --create-share-link <file>
|
||||
.TP
|
||||
\fB\-h \-\-help\fP
|
||||
This help information.
|
||||
.PP
|
||||
|
|
2212
src/arsd/cgi.d
2212
src/arsd/cgi.d
File diff suppressed because it is too large
Load diff
166
src/config.d
166
src/config.d
|
@ -43,6 +43,35 @@ final class Config
|
|||
// Default file permission mode
|
||||
public long defaultFilePermissionMode = 600;
|
||||
public int configuredFilePermissionMode;
|
||||
|
||||
// Bring in v2.5.0 config items
|
||||
|
||||
// HTTP Struct items, used for configuring HTTP()
|
||||
// Curl Timeout Handling
|
||||
// libcurl dns_cache_timeout timeout
|
||||
immutable int defaultDnsTimeout = 60;
|
||||
// Connect timeout for HTTP|HTTPS connections
|
||||
immutable int defaultConnectTimeout = 10;
|
||||
// With the following settings we force
|
||||
// - if there is no data flow for 10min, abort
|
||||
// - if the download time for one item exceeds 1h, abort
|
||||
//
|
||||
// Timeout for activity on connection
|
||||
// this translates into Curl's CURLOPT_LOW_SPEED_TIME
|
||||
// which says:
|
||||
// It contains the time in number seconds that the
|
||||
// transfer speed should be below the CURLOPT_LOW_SPEED_LIMIT
|
||||
// for the library to consider it too slow and abort.
|
||||
immutable int defaultDataTimeout = 600;
|
||||
// Maximum time any operation is allowed to take
|
||||
// This includes dns resolution, connecting, data transfer, etc.
|
||||
immutable int defaultOperationTimeout = 3600;
|
||||
// Specify how many redirects should be allowed
|
||||
immutable int defaultMaxRedirects = 5;
|
||||
// Specify what IP protocol version should be used when communicating with OneDrive
|
||||
immutable int defaultIpProtocol = 0; // 0 = IPv4 + IPv6, 1 = IPv4 Only, 2 = IPv6 Only
|
||||
|
||||
|
||||
|
||||
this(string confdirOption)
|
||||
{
|
||||
|
@ -62,7 +91,7 @@ final class Config
|
|||
boolValues["disable_download_validation"] = false;
|
||||
boolValues["disable_upload_validation"] = false;
|
||||
boolValues["enable_logging"] = false;
|
||||
boolValues["force_http_2"] = false;
|
||||
boolValues["force_http_11"] = false;
|
||||
boolValues["local_first"] = false;
|
||||
boolValues["no_remote_delete"] = false;
|
||||
boolValues["skip_symlinks"] = false;
|
||||
|
@ -75,7 +104,7 @@ final class Config
|
|||
longValues["monitor_interval"] = 300;
|
||||
longValues["skip_size"] = 0;
|
||||
longValues["min_notify_changes"] = 5;
|
||||
longValues["monitor_log_frequency"] = 5;
|
||||
longValues["monitor_log_frequency"] = 6;
|
||||
// Number of N sync runs before performing a full local scan of sync_dir
|
||||
// By default 12 which means every ~60 minutes a full disk scan of sync_dir will occur
|
||||
// 'monitor_interval' * 'monitor_fullscan_frequency' = 3600 = 1 hour
|
||||
|
@ -122,10 +151,8 @@ final class Config
|
|||
longValues["sync_file_permissions"] = defaultFilePermissionMode;
|
||||
// Configure download / upload rate limits
|
||||
longValues["rate_limit"] = 0;
|
||||
// maximum time an operation is allowed to take
|
||||
// This includes dns resolution, connecting, data transfer, etc.
|
||||
longValues["operation_timeout"] = 3600;
|
||||
|
||||
// To ensure we do not fill up the load disk, how much disk space should be reserved by default
|
||||
longValues["space_reservation"] = 50 * 2^^20; // 50 MB as Bytes
|
||||
// Webhook options
|
||||
boolValues["webhook_enabled"] = false;
|
||||
stringValues["webhook_public_url"] = "";
|
||||
|
@ -133,6 +160,12 @@ final class Config
|
|||
longValues["webhook_listening_port"] = 8888;
|
||||
longValues["webhook_expiration_interval"] = 3600 * 24;
|
||||
longValues["webhook_renewal_interval"] = 3600 * 12;
|
||||
// Log to application output running configuration values
|
||||
boolValues["display_running_config"] = false;
|
||||
// Configure read-only authentication scope
|
||||
boolValues["read_only_auth_scope"] = false;
|
||||
// Flag to cleanup local files when using --download-only
|
||||
boolValues["cleanup_local_files"] = false;
|
||||
|
||||
// DEVELOPER OPTIONS
|
||||
// display_memory = true | false
|
||||
|
@ -146,7 +179,29 @@ final class Config
|
|||
// display_sync_options = true | false
|
||||
// - It may be desirable to see what options are being passed in to performSync() without enabling the full verbose debug logging
|
||||
boolValues["display_sync_options"] = false;
|
||||
|
||||
// force_children_scan = true | false
|
||||
// - Force client to use /children rather than /delta to query changes on OneDrive
|
||||
// - This option flags nationalCloudDeployment as true, forcing the client to act like it is using a National Cloud Deployment
|
||||
boolValues["force_children_scan"] = false;
|
||||
// display_processing_time = true | false
|
||||
// - Enabling this option will add function processing times to the console output
|
||||
// - This then enables tracking of where the application is spending most amount of time when processing data when users have questions re performance
|
||||
boolValues["display_processing_time"] = false;
|
||||
|
||||
// HTTPS & CURL Operation Settings
|
||||
// - Maximum time an operation is allowed to take
|
||||
// This includes dns resolution, connecting, data transfer, etc.
|
||||
longValues["operation_timeout"] = defaultOperationTimeout;
|
||||
// libcurl dns_cache_timeout timeout
|
||||
longValues["dns_timeout"] = defaultDnsTimeout;
|
||||
// Timeout for HTTPS connections
|
||||
longValues["connect_timeout"] = defaultConnectTimeout;
|
||||
// Timeout for activity on a HTTPS connection
|
||||
longValues["data_timeout"] = defaultDataTimeout;
|
||||
// What IP protocol version should be used when communicating with OneDrive
|
||||
longValues["ip_protocol_version"] = defaultIpProtocol; // 0 = IPv4 + IPv6, 1 = IPv4 Only, 2 = IPv6 Only
|
||||
|
||||
// EXPAND USERS HOME DIRECTORY
|
||||
// Determine the users home directory.
|
||||
// Need to avoid using ~ here as expandTilde() below does not interpret correctly when running under init.d or systemd scripts
|
||||
// Check for HOME environment variable
|
||||
|
@ -176,6 +231,8 @@ final class Config
|
|||
string systemConfigDirBase;
|
||||
if (confdirOption != "") {
|
||||
// A CLI 'confdir' was passed in
|
||||
// Clean up any stray " .. these should not be there ...
|
||||
confdirOption = strip(confdirOption,"\"");
|
||||
log.vdebug("configDirName: CLI override to set configDirName to: ", confdirOption);
|
||||
if (canFind(confdirOption,"~")) {
|
||||
// A ~ was found
|
||||
|
@ -213,6 +270,20 @@ final class Config
|
|||
mkdirRecurse(configDirName);
|
||||
// Configure the applicable permissions for the folder
|
||||
configDirName.setAttributes(returnRequiredDirectoryPermisions());
|
||||
} else {
|
||||
// The config path exists
|
||||
// The path that exists must be a directory, not a file
|
||||
if (!isDir(configDirName)) {
|
||||
if (!confdirOption.empty) {
|
||||
// the configuration path was passed in by the user .. user error
|
||||
writeln("ERROR: --confdir entered value is an existing file instead of an existing directory");
|
||||
} else {
|
||||
// other error
|
||||
writeln("ERROR: ~/.config/onedrive is a file rather than a directory");
|
||||
}
|
||||
// Must exit
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
}
|
||||
|
||||
// configDirName has a trailing /
|
||||
|
@ -303,6 +374,8 @@ final class Config
|
|||
boolValues["synchronize"] = false;
|
||||
boolValues["force"] = false;
|
||||
boolValues["list_business_shared_folders"] = false;
|
||||
boolValues["force_sync"] = false;
|
||||
boolValues["with_editing_perms"] = false;
|
||||
|
||||
// Application Startup option validation
|
||||
try {
|
||||
|
@ -319,7 +392,7 @@ final class Config
|
|||
"Perform authentication not via interactive dialog but via files read/writes to these files.",
|
||||
&stringValues["auth_files"],
|
||||
"auth-response",
|
||||
"Perform authentication not via interactive dialog but via providing the reponse url directly.",
|
||||
"Perform authentication not via interactive dialog but via providing the response url directly.",
|
||||
&stringValues["auth_response"],
|
||||
"check-for-nomount",
|
||||
"Check for the presence of .nosync in the syncdir root. If found, do not perform sync.",
|
||||
|
@ -330,6 +403,9 @@ final class Config
|
|||
"classify-as-big-delete",
|
||||
"Number of children in a path that is locally removed which will be classified as a 'big data delete'",
|
||||
&longValues["classify_as_big_delete"],
|
||||
"cleanup-local-files",
|
||||
"Cleanup additional local files when using --download-only. This will remove local data.",
|
||||
&boolValues["cleanup_local_files"],
|
||||
"create-directory",
|
||||
"Create a directory on OneDrive - no sync will be performed.",
|
||||
&stringValues["create_directory"],
|
||||
|
@ -354,6 +430,9 @@ final class Config
|
|||
"display-config",
|
||||
"Display what options the client will use as currently configured - no sync will be performed.",
|
||||
&boolValues["display_config"],
|
||||
"display-running-config",
|
||||
"Display what options the client has been configured to use on application startup.",
|
||||
&boolValues["display_running_config"],
|
||||
"display-sync-status",
|
||||
"Display the sync status of the client - no sync will be performed.",
|
||||
&boolValues["display_sync_status"],
|
||||
|
@ -366,12 +445,15 @@ final class Config
|
|||
"enable-logging",
|
||||
"Enable client activity to a separate log file",
|
||||
&boolValues["enable_logging"],
|
||||
"force-http-2",
|
||||
"Force the use of HTTP/2 for all operations where applicable",
|
||||
&boolValues["force_http_2"],
|
||||
"force-http-11",
|
||||
"Force the use of HTTP 1.1 for all operations",
|
||||
&boolValues["force_http_11"],
|
||||
"force",
|
||||
"Force the deletion of data when a 'big delete' is detected",
|
||||
&boolValues["force"],
|
||||
"force-sync",
|
||||
"Force a synchronization of a specific folder, only when using --synchronize --single-directory and ignore all non-default skip_dir and skip_file rules",
|
||||
&boolValues["force_sync"],
|
||||
"get-file-link",
|
||||
"Display the file link of a synced file",
|
||||
&stringValues["get_file_link"],
|
||||
|
@ -408,9 +490,6 @@ final class Config
|
|||
"no-remote-delete",
|
||||
"Do not delete local file 'deletes' from OneDrive when using --upload-only",
|
||||
&boolValues["no_remote_delete"],
|
||||
"operation-timeout",
|
||||
"Maximum amount of time (in seconds) an operation is allowed to take",
|
||||
&longValues["operation_timeout"],
|
||||
"print-token",
|
||||
"Print the access token, useful for debugging",
|
||||
&boolValues["print_token"],
|
||||
|
@ -453,6 +532,9 @@ final class Config
|
|||
"source-directory",
|
||||
"Source directory to rename or move on OneDrive - no sync will be performed.",
|
||||
&stringValues["source_directory"],
|
||||
"space-reservation",
|
||||
"The amount of disk space to reserve (in MB) to avoid 100% disk space utilisation",
|
||||
&longValues["space_reservation"],
|
||||
"syncdir",
|
||||
"Specify the local directory used for synchronization to OneDrive",
|
||||
&stringValues["sync_dir"],
|
||||
|
@ -482,7 +564,10 @@ final class Config
|
|||
&boolValues["list_business_shared_folders"],
|
||||
"sync-shared-folders",
|
||||
"Sync OneDrive Business Shared Folders",
|
||||
&boolValues["sync_business_shared_folders"]
|
||||
&boolValues["sync_business_shared_folders"],
|
||||
"with-editing-perms",
|
||||
"Create a read-write shareable link for an existing file on OneDrive when used with --create-share-link <file>",
|
||||
&boolValues["with_editing_perms"]
|
||||
);
|
||||
if (opt.helpWanted) {
|
||||
outputLongHelp(opt.options);
|
||||
|
@ -549,9 +634,19 @@ final class Config
|
|||
private bool load(string filename)
|
||||
{
|
||||
// configure function variables
|
||||
try {
|
||||
readText(filename);
|
||||
} catch (std.file.FileException e) {
|
||||
// Unable to access required file
|
||||
log.error("ERROR: Unable to access ", e.msg);
|
||||
// Use exit scopes to shutdown API
|
||||
return false;
|
||||
}
|
||||
|
||||
// We were able to readText the config file - so, we should be able to open and read it
|
||||
auto file = File(filename, "r");
|
||||
string lineBuffer;
|
||||
|
||||
|
||||
// configure scopes
|
||||
// - failure
|
||||
scope(failure) {
|
||||
|
@ -560,7 +655,6 @@ final class Config
|
|||
// close open file
|
||||
file.close();
|
||||
}
|
||||
return false;
|
||||
}
|
||||
// - exit
|
||||
scope(exit) {
|
||||
|
@ -617,7 +711,13 @@ final class Config
|
|||
setValueString("skip_dir", configFileSkipDir);
|
||||
}
|
||||
}
|
||||
|
||||
// --single-directory Strip quotation marks from path
|
||||
// This is an issue when using ONEDRIVE_SINGLE_DIRECTORY with Docker
|
||||
if (key == "single_directory") {
|
||||
// Strip quotation marks from provided path
|
||||
string configSingleDirectory = strip(to!string(c.front.dup), "\"");
|
||||
setValueString("single_directory", configSingleDirectory);
|
||||
}
|
||||
// Azure AD Configuration
|
||||
if (key == "azure_ad_endpoint") {
|
||||
string azureConfigValue = c.front.dup;
|
||||
|
@ -647,6 +747,16 @@ final class Config
|
|||
if (ppp) {
|
||||
c.popFront();
|
||||
setValueLong(key, to!long(c.front.dup));
|
||||
// if key is space_reservation we have to calculate MB -> bytes
|
||||
if (key == "space_reservation") {
|
||||
// temp value
|
||||
ulong tempValue = to!long(c.front.dup);
|
||||
// a value of 0 needs to be made at least 1MB ..
|
||||
if (tempValue == 0) {
|
||||
tempValue = 1;
|
||||
}
|
||||
setValueLong("space_reservation", to!long(tempValue * 2^^20));
|
||||
}
|
||||
} else {
|
||||
log.log("Unknown key in config file: ", key);
|
||||
return false;
|
||||
|
@ -713,11 +823,27 @@ final class Config
|
|||
}
|
||||
return configuredFilePermissionMode;
|
||||
}
|
||||
|
||||
void resetSkipToDefaults() {
|
||||
// reset skip_file and skip_dir to application defaults
|
||||
// skip_file
|
||||
log.vdebug("original skip_file: ", getValueString("skip_file"));
|
||||
log.vdebug("resetting skip_file");
|
||||
setValueString("skip_file", defaultSkipFile);
|
||||
log.vdebug("reset skip_file: ", getValueString("skip_file"));
|
||||
// skip_dir
|
||||
log.vdebug("original skip_dir: ", getValueString("skip_dir"));
|
||||
log.vdebug("resetting skip_dir");
|
||||
setValueString("skip_dir", defaultSkipDir);
|
||||
log.vdebug("reset skip_dir: ", getValueString("skip_dir"));
|
||||
}
|
||||
}
|
||||
|
||||
void outputLongHelp(Option[] opt)
|
||||
{
|
||||
auto argsNeedingOptions = [
|
||||
"--auth-files",
|
||||
"--auth-response",
|
||||
"--confdir",
|
||||
"--create-directory",
|
||||
"--create-share-link",
|
||||
|
@ -730,10 +856,14 @@ void outputLongHelp(Option[] opt)
|
|||
"--monitor-interval",
|
||||
"--monitor-log-frequency",
|
||||
"--monitor-fullscan-frequency",
|
||||
"--operation-timeout",
|
||||
"--remove-directory",
|
||||
"--single-directory",
|
||||
"--skip-dir",
|
||||
"--skip-file",
|
||||
"--skip-size",
|
||||
"--source-directory",
|
||||
"--space-reservation",
|
||||
"--syncdir",
|
||||
"--user-agent" ];
|
||||
writeln(`OneDrive - a client for OneDrive Cloud Services
|
||||
|
|
74
src/itemdb.d
74
src/itemdb.d
|
@ -23,9 +23,8 @@ struct Item {
|
|||
string cTag;
|
||||
SysTime mtime;
|
||||
string parentId;
|
||||
string crc32Hash;
|
||||
string sha1Hash;
|
||||
string quickXorHash;
|
||||
string sha256Hash;
|
||||
string remoteDriveId;
|
||||
string remoteId;
|
||||
string syncStatus;
|
||||
|
@ -34,7 +33,7 @@ struct Item {
|
|||
final class ItemDatabase
|
||||
{
|
||||
// increment this for every change in the db schema
|
||||
immutable int itemDatabaseVersion = 10;
|
||||
immutable int itemDatabaseVersion = 11;
|
||||
|
||||
Database db;
|
||||
string insertItemStmt;
|
||||
|
@ -42,6 +41,7 @@ final class ItemDatabase
|
|||
string selectItemByIdStmt;
|
||||
string selectItemByParentIdStmt;
|
||||
string deleteItemByIdStmt;
|
||||
bool databaseInitialised = false;
|
||||
|
||||
this(const(char)[] filename)
|
||||
{
|
||||
|
@ -51,8 +51,17 @@ final class ItemDatabase
|
|||
dbVersion = db.getVersion();
|
||||
} catch (SqliteException e) {
|
||||
// An error was generated - what was the error?
|
||||
log.error("\nAn internal database error occurred: " ~ e.msg ~ "\n");
|
||||
exit(-1);
|
||||
if (e.msg == "database is locked") {
|
||||
writeln();
|
||||
log.error("ERROR: onedrive application is already running - check system process list for active application instances");
|
||||
log.vlog(" - Use 'sudo ps aufxw | grep onedrive' to potentially determine acive running process");
|
||||
writeln();
|
||||
} else {
|
||||
writeln();
|
||||
log.error("ERROR: An internal database error occurred: " ~ e.msg);
|
||||
writeln();
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if (dbVersion == 0) {
|
||||
|
@ -84,14 +93,18 @@ final class ItemDatabase
|
|||
// https://www.sqlite.org/pragma.html#pragma_auto_vacuum
|
||||
// PRAGMA schema.auto_vacuum = 0 | NONE | 1 | FULL | 2 | INCREMENTAL;
|
||||
db.exec("PRAGMA auto_vacuum = FULL");
|
||||
// This pragma sets or queries the database connection locking-mode. The locking-mode is either NORMAL or EXCLUSIVE.
|
||||
// https://www.sqlite.org/pragma.html#pragma_locking_mode
|
||||
// PRAGMA schema.locking_mode = NORMAL | EXCLUSIVE
|
||||
db.exec("PRAGMA locking_mode = EXCLUSIVE");
|
||||
|
||||
insertItemStmt = "
|
||||
INSERT OR REPLACE INTO item (driveId, id, name, type, eTag, cTag, mtime, parentId, crc32Hash, sha1Hash, quickXorHash, remoteDriveId, remoteId, syncStatus)
|
||||
VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10, ?11, ?12, ?13, ?14)
|
||||
INSERT OR REPLACE INTO item (driveId, id, name, type, eTag, cTag, mtime, parentId, quickXorHash, sha256Hash, remoteDriveId, remoteId, syncStatus)
|
||||
VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10, ?11, ?12, ?13)
|
||||
";
|
||||
updateItemStmt = "
|
||||
UPDATE item
|
||||
SET name = ?3, type = ?4, eTag = ?5, cTag = ?6, mtime = ?7, parentId = ?8, crc32Hash = ?9, sha1Hash = ?10, quickXorHash = ?11, remoteDriveId = ?12, remoteId = ?13, syncStatus = ?14
|
||||
SET name = ?3, type = ?4, eTag = ?5, cTag = ?6, mtime = ?7, parentId = ?8, quickXorHash = ?9, sha256Hash = ?10, remoteDriveId = ?11, remoteId = ?12, syncStatus = ?13
|
||||
WHERE driveId = ?1 AND id = ?2
|
||||
";
|
||||
selectItemByIdStmt = "
|
||||
|
@ -101,6 +114,14 @@ final class ItemDatabase
|
|||
";
|
||||
selectItemByParentIdStmt = "SELECT * FROM item WHERE driveId = ? AND parentId = ?";
|
||||
deleteItemByIdStmt = "DELETE FROM item WHERE driveId = ? AND id = ?";
|
||||
|
||||
// flag that the database is accessible and we have control
|
||||
databaseInitialised = true;
|
||||
}
|
||||
|
||||
bool isDatabaseInitialised()
|
||||
{
|
||||
return databaseInitialised;
|
||||
}
|
||||
|
||||
void createTable()
|
||||
|
@ -114,9 +135,8 @@ final class ItemDatabase
|
|||
cTag TEXT,
|
||||
mtime TEXT NOT NULL,
|
||||
parentId TEXT,
|
||||
crc32Hash TEXT,
|
||||
sha1Hash TEXT,
|
||||
quickXorHash TEXT,
|
||||
sha256Hash TEXT,
|
||||
remoteDriveId TEXT,
|
||||
remoteId TEXT,
|
||||
deltaLink TEXT,
|
||||
|
@ -299,19 +319,18 @@ final class ItemDatabase
|
|||
bind(6, cTag);
|
||||
bind(7, mtime.toISOExtString());
|
||||
bind(8, parentId);
|
||||
bind(9, crc32Hash);
|
||||
bind(10, sha1Hash);
|
||||
bind(11, quickXorHash);
|
||||
bind(12, remoteDriveId);
|
||||
bind(13, remoteId);
|
||||
bind(14, syncStatus);
|
||||
bind(9, quickXorHash);
|
||||
bind(10, sha256Hash);
|
||||
bind(11, remoteDriveId);
|
||||
bind(12, remoteId);
|
||||
bind(13, syncStatus);
|
||||
}
|
||||
}
|
||||
|
||||
private Item buildItem(Statement.Result result)
|
||||
{
|
||||
assert(!result.empty, "The result must not be empty");
|
||||
assert(result.front.length == 15, "The result must have 15 columns");
|
||||
assert(result.front.length == 14, "The result must have 14 columns");
|
||||
Item item = {
|
||||
driveId: result.front[0].dup,
|
||||
id: result.front[1].dup,
|
||||
|
@ -320,12 +339,11 @@ final class ItemDatabase
|
|||
cTag: result.front[5].dup,
|
||||
mtime: SysTime.fromISOExtString(result.front[6]),
|
||||
parentId: result.front[7].dup,
|
||||
crc32Hash: result.front[8].dup,
|
||||
sha1Hash: result.front[9].dup,
|
||||
quickXorHash: result.front[10].dup,
|
||||
remoteDriveId: result.front[11].dup,
|
||||
remoteId: result.front[12].dup,
|
||||
syncStatus: result.front[14].dup
|
||||
quickXorHash: result.front[8].dup,
|
||||
sha256Hash: result.front[9].dup,
|
||||
remoteDriveId: result.front[10].dup,
|
||||
remoteId: result.front[11].dup,
|
||||
syncStatus: result.front[12].dup
|
||||
};
|
||||
switch (result.front[3]) {
|
||||
case "file": item.type = ItemType.file; break;
|
||||
|
@ -481,8 +499,14 @@ final class ItemDatabase
|
|||
// Perform a vacuum on the database, commit WAL / SHM to file
|
||||
void performVacuum()
|
||||
{
|
||||
auto stmt = db.prepare("VACUUM;");
|
||||
stmt.exec();
|
||||
try {
|
||||
auto stmt = db.prepare("VACUUM;");
|
||||
stmt.exec();
|
||||
} catch (SqliteException e) {
|
||||
writeln();
|
||||
log.error("ERROR: Unable to perform a database vacuum: " ~ e.msg);
|
||||
writeln();
|
||||
}
|
||||
}
|
||||
|
||||
// Select distinct driveId items from database
|
||||
|
|
1040
src/main.d
1040
src/main.d
File diff suppressed because it is too large
Load diff
|
@ -7,4 +7,4 @@ dnotify.d
|
|||
|
||||
notify.d
|
||||
https://github.com/D-Programming-Deimos/libnotify/blob/master/deimos/notify/notify.d
|
||||
License: GPL 2.1 or upwards, see file
|
||||
License: GNU Lesser General Public License (LGPL) 2.1 or upwards, see file
|
||||
|
|
188
src/onedrive.d
188
src/onedrive.d
|
@ -214,9 +214,9 @@ final class OneDriveApi
|
|||
http = HTTP();
|
||||
// Curl Timeout Handling
|
||||
// libcurl dns_cache_timeout timeout
|
||||
http.dnsTimeout = (dur!"seconds"(60));
|
||||
http.dnsTimeout = (dur!"seconds"(cfg.getValueLong("dns_timeout")));
|
||||
// Timeout for HTTPS connections
|
||||
http.connectTimeout = (dur!"seconds"(10));
|
||||
http.connectTimeout = (dur!"seconds"(cfg.getValueLong("connect_timeout")));
|
||||
// with the following settings we force
|
||||
// - if there is no data flow for 10min, abort
|
||||
// - if the download time for one item exceeds 1h, abort
|
||||
|
@ -227,17 +227,27 @@ final class OneDriveApi
|
|||
// It contains the time in number seconds that the
|
||||
// transfer speed should be below the CURLOPT_LOW_SPEED_LIMIT
|
||||
// for the library to consider it too slow and abort.
|
||||
http.dataTimeout = (dur!"seconds"(600));
|
||||
http.dataTimeout = (dur!"seconds"(cfg.getValueLong("data_timeout")));
|
||||
// maximum time an operation is allowed to take
|
||||
// This includes dns resolution, connecting, data transfer, etc.
|
||||
http.operationTimeout = (dur!"seconds"(cfg.getValueLong("operation_timeout")));
|
||||
// What IP protocol version should be used when using Curl - IPv4 & IPv6, IPv4 or IPv6
|
||||
http.handle.set(CurlOption.ipresolve,cfg.getValueLong("ip_protocol_version")); // 0 = IPv4 + IPv6, 1 = IPv4 Only, 2 = IPv6 Only
|
||||
// Specify how many redirects should be allowed
|
||||
http.maxRedirects(5);
|
||||
http.maxRedirects(cfg.defaultMaxRedirects);
|
||||
|
||||
// Do we enable curl debugging?
|
||||
if (cfg.getValueBool("debug_https")) {
|
||||
http.verbose = true;
|
||||
.debugResponse = true;
|
||||
|
||||
// Output what options we are using so that in the debug log this can be tracked
|
||||
log.vdebug("http.dnsTimeout = ", cfg.getValueLong("dns_timeout"));
|
||||
log.vdebug("http.connectTimeout = ", cfg.getValueLong("connect_timeout"));
|
||||
log.vdebug("http.dataTimeout = ", cfg.getValueLong("data_timeout"));
|
||||
log.vdebug("http.operationTimeout = ", cfg.getValueLong("operation_timeout"));
|
||||
log.vdebug("http.CurlOption.ipresolve = ", cfg.getValueLong("ip_protocol_version"));
|
||||
log.vdebug("http.maxRedirects = ", cfg.defaultMaxRedirects);
|
||||
}
|
||||
|
||||
// Update clientId if application_id is set in config file
|
||||
|
@ -424,15 +434,14 @@ final class OneDriveApi
|
|||
|
||||
// What version of HTTP protocol do we use?
|
||||
// Curl >= 7.62.0 defaults to http2 for a significant number of operations
|
||||
if (cfg.getValueBool("force_http_2")) {
|
||||
// Use curl defaults
|
||||
log.vdebug("Upgrading all HTTP operations to HTTP/2 where applicable");
|
||||
} else {
|
||||
// Downgrade curl by default due to silent exist issues when using http/2
|
||||
// See issue #501 for details and discussion
|
||||
log.vdebug("Downgrading all HTTP operations to HTTP/1.1 by default");
|
||||
if (cfg.getValueBool("force_http_11")) {
|
||||
// Downgrade to curl to use HTTP 1.1 for all operations
|
||||
log.vlog("Downgrading all HTTP operations to HTTP/1.1 due to user configuration");
|
||||
// Downgrade to HTTP 1.1 - yes version = 2 is HTTP 1.1
|
||||
http.handle.set(CurlOption.http_version,2);
|
||||
} else {
|
||||
// Use curl defaults
|
||||
log.vlog("Using Curl defaults for all HTTP operations");
|
||||
}
|
||||
|
||||
// Configure upload / download rate limits if configured
|
||||
|
@ -558,7 +567,17 @@ final class OneDriveApi
|
|||
{
|
||||
import std.stdio, std.regex;
|
||||
char[] response;
|
||||
string url = authUrl ~ "?client_id=" ~ clientId ~ "&scope=Files.ReadWrite%20Files.ReadWrite.all%20Sites.Read.All%20Sites.ReadWrite.All%20offline_access&response_type=code&prompt=login&redirect_uri=" ~ redirectUrl;
|
||||
string authScope;
|
||||
// What authentication scope to use?
|
||||
if (cfg.getValueBool("read_only_auth_scope")) {
|
||||
// read-only authentication scopes has been requested
|
||||
authScope = "&scope=Files.Read%20Files.Read.All%20Sites.Read.All%20offline_access&response_type=code&prompt=login&redirect_uri=";
|
||||
} else {
|
||||
// read-write authentication scopes will be used (default)
|
||||
authScope = "&scope=Files.ReadWrite%20Files.ReadWrite.All%20Sites.ReadWrite.All%20offline_access&response_type=code&prompt=login&redirect_uri=";
|
||||
}
|
||||
|
||||
string url = authUrl ~ "?client_id=" ~ clientId ~ authScope ~ redirectUrl;
|
||||
string authFilesString = cfg.getValueString("auth_files");
|
||||
string authResponseString = cfg.getValueString("auth_response");
|
||||
if (authResponseString != "") {
|
||||
|
@ -567,9 +586,19 @@ final class OneDriveApi
|
|||
string[] authFiles = authFilesString.split(":");
|
||||
string authUrl = authFiles[0];
|
||||
string responseUrl = authFiles[1];
|
||||
auto authUrlFile = File(authUrl, "w");
|
||||
authUrlFile.write(url);
|
||||
authUrlFile.close();
|
||||
|
||||
try {
|
||||
// Try and write out the auth URL to the nominated file
|
||||
auto authUrlFile = File(authUrl, "w");
|
||||
authUrlFile.write(url);
|
||||
authUrlFile.close();
|
||||
} catch (std.exception.ErrnoException e) {
|
||||
// There was a file system error
|
||||
// display the error message
|
||||
displayFileSystemErrorMessage(e.msg, getFunctionName!({}));
|
||||
return false;
|
||||
}
|
||||
|
||||
while (!exists(responseUrl)) {
|
||||
Thread.sleep(dur!("msecs")(100));
|
||||
}
|
||||
|
@ -600,13 +629,19 @@ final class OneDriveApi
|
|||
// match the authorization code
|
||||
auto c = matchFirst(response, r"(?:[\?&]code=)([\w\d-.]+)");
|
||||
if (c.empty) {
|
||||
log.log("Invalid uri");
|
||||
log.log("Invalid response uri entered");
|
||||
return false;
|
||||
}
|
||||
c.popFront(); // skip the whole match
|
||||
redeemToken(c.front);
|
||||
return true;
|
||||
}
|
||||
|
||||
string getSiteSearchUrl()
|
||||
{
|
||||
// Return the actual siteSearchUrl being used and/or requested when performing 'siteQuery = onedrive.o365SiteSearch(nextLink);' call
|
||||
return .siteSearchUrl;
|
||||
}
|
||||
|
||||
ulong getRetryAfterValue()
|
||||
{
|
||||
|
@ -744,7 +779,7 @@ final class OneDriveApi
|
|||
download(url, saveToPath, fileSize);
|
||||
// Does path exist?
|
||||
if (exists(saveToPath)) {
|
||||
// File was downloaded sucessfully - configure the applicable permissions for the file
|
||||
// File was downloaded successfully - configure the applicable permissions for the file
|
||||
log.vdebug("Setting file permissions for: ", saveToPath);
|
||||
saveToPath.setAttributes(cfg.returnRequiredFilePermisions());
|
||||
}
|
||||
|
@ -997,10 +1032,19 @@ final class OneDriveApi
|
|||
auto expirationDateTime = Clock.currTime(UTC()) + subscriptionExpirationInterval;
|
||||
const(char)[] url;
|
||||
url = subscriptionUrl;
|
||||
// Create a resource item based on if we have a driveId
|
||||
string resourceItem;
|
||||
if (driveId.length) {
|
||||
resourceItem = "/drives/" ~ driveId ~ "/root";
|
||||
} else {
|
||||
resourceItem = "/me/drive/root";
|
||||
}
|
||||
|
||||
// create JSON request to create webhook subscription
|
||||
const JSONValue request = [
|
||||
"changeType": "updated",
|
||||
"notificationUrl": notificationUrl,
|
||||
"resource": "/me/drive/root",
|
||||
"resource": resourceItem,
|
||||
"expirationDateTime": expirationDateTime.toISOExtString(),
|
||||
"clientState": randomUUID().toString()
|
||||
];
|
||||
|
@ -1014,6 +1058,7 @@ final class OneDriveApi
|
|||
|
||||
// We need to exit here, user needs to fix issue
|
||||
log.error("ERROR: Unable to initialize subscriptions for updates. Please fix this issue.");
|
||||
shutdown();
|
||||
exit(-1);
|
||||
}
|
||||
|
||||
|
@ -1081,6 +1126,29 @@ final class OneDriveApi
|
|||
}
|
||||
|
||||
if (response.type() == JSONType.object) {
|
||||
// Has the client been configured to use read_only_auth_scope
|
||||
if (cfg.getValueBool("read_only_auth_scope")) {
|
||||
// read_only_auth_scope has been configured
|
||||
if ("scope" in response){
|
||||
string effectiveScopes = response["scope"].str();
|
||||
// Display the effective authentication scopes
|
||||
writeln();
|
||||
writeln("Effective API Authentication Scopes: ", effectiveScopes);
|
||||
// if we have any write scopes, we need to tell the user to update an remove online prior authentication and exit application
|
||||
if (canFind(effectiveScopes, "Write")) {
|
||||
// effective scopes contain write scopes .. so not a read-only configuration
|
||||
writeln();
|
||||
writeln("ERROR: You have authentication scopes that allow write operations. You need to remove your existing application access consent");
|
||||
writeln();
|
||||
writeln("Please login to https://account.live.com/consent/Manage and remove your existing application access consent");
|
||||
writeln();
|
||||
// force exit
|
||||
shutdown();
|
||||
exit(-1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if ("access_token" in response){
|
||||
accessToken = "bearer " ~ response["access_token"].str();
|
||||
refreshToken = response["refresh_token"].str();
|
||||
|
@ -1116,7 +1184,9 @@ final class OneDriveApi
|
|||
} catch (OneDriveException e) {
|
||||
if (e.httpStatusCode == 400 || e.httpStatusCode == 401) {
|
||||
// flag error and notify
|
||||
log.errorAndNotify("\nERROR: Refresh token invalid, use --reauth to authorize the client again.\n");
|
||||
writeln();
|
||||
log.errorAndNotify("ERROR: Refresh token invalid, use --reauth to authorize the client again.");
|
||||
writeln();
|
||||
// set error message
|
||||
e.msg ~= "\nRefresh token invalid, use --reauth to authorize the client again";
|
||||
}
|
||||
|
@ -1203,30 +1273,72 @@ final class OneDriveApi
|
|||
p.title = "Downloading";
|
||||
writeln();
|
||||
bool barInit = false;
|
||||
real previousDLPercent = -1.0;
|
||||
real previousProgressPercent = -1.0;
|
||||
real percentCheck = 5.0;
|
||||
long segmentCount = 1;
|
||||
// Setup progress bar to display
|
||||
http.onProgress = delegate int(size_t dltotal, size_t dlnow, size_t ultotal, size_t ulnow)
|
||||
{
|
||||
// For each onProgress, what is the % of dlnow to dltotal
|
||||
// floor - rounds down to nearest whole number
|
||||
real currentDLPercent = floor(double(dlnow)/dltotal*100);
|
||||
// Have we started downloading?
|
||||
if (currentDLPercent > 0){
|
||||
// We have started downloading
|
||||
// If matching 5% of download, increment progress bar
|
||||
if ((isIdentical(fmod(currentDLPercent, percentCheck), 0.0)) && (previousDLPercent != currentDLPercent)) {
|
||||
// What have we downloaded thus far
|
||||
log.vdebugNewLine("Data Received = ", dlnow);
|
||||
log.vdebug("Expected Total = ", dltotal);
|
||||
log.vdebug("Percent Complete = ", currentDLPercent);
|
||||
// Increment counter & show bar update
|
||||
p.next();
|
||||
previousDLPercent = currentDLPercent;
|
||||
log.vdebugNewLine("Data Received = ", dlnow);
|
||||
log.vdebug("Expected Total = ", dltotal);
|
||||
log.vdebug("Percent Complete = ", currentDLPercent);
|
||||
// Every 5% download we need to increment the download bar
|
||||
|
||||
// Has the user set a data rate limit?
|
||||
// when using rate_limit, we will get odd download rates, for example:
|
||||
// Percent Complete = 24
|
||||
// Data Received = 13080163
|
||||
// Expected Total = 52428800
|
||||
// Percent Complete = 24
|
||||
// Data Received = 13685777
|
||||
// Expected Total = 52428800
|
||||
// Percent Complete = 26 <---- jumps to 26% missing 25%, thus fmod misses incrementing progress bar
|
||||
// Data Received = 13685777
|
||||
// Expected Total = 52428800
|
||||
// Percent Complete = 26
|
||||
|
||||
if (cfg.getValueLong("rate_limit") > 0) {
|
||||
// User configured rate limit
|
||||
// How much data should be in each segment to qualify for 5%
|
||||
long dataPerSegment = to!long(floor(double(dltotal)/iteration));
|
||||
// How much data received do we need to validate against
|
||||
long thisSegmentData = dataPerSegment * segmentCount;
|
||||
long nextSegmentData = dataPerSegment * (segmentCount + 1);
|
||||
// Has the data that has been received in a 5% window that we need to increment the progress bar at
|
||||
if ((dlnow > thisSegmentData) && (dlnow < nextSegmentData) && (previousProgressPercent != currentDLPercent) || (dlnow == dltotal)) {
|
||||
// Downloaded data equals approx 5%
|
||||
log.vdebug("Incrementing Progress Bar using calculated 5% of data received");
|
||||
// Downloading 50% |oooooooooooooooooooo | ETA 00:01:40
|
||||
// increment progress bar
|
||||
p.next();
|
||||
// update values
|
||||
log.vdebug("Setting previousProgressPercent to ", currentDLPercent);
|
||||
previousProgressPercent = currentDLPercent;
|
||||
log.vdebug("Incrementing segmentCount");
|
||||
segmentCount++;
|
||||
}
|
||||
} else {
|
||||
// Is currentDLPercent divisible by 5 leaving remainder 0 and does previousProgressPercent not equal currentDLPercent
|
||||
if ((isIdentical(fmod(currentDLPercent, percentCheck), 0.0)) && (previousProgressPercent != currentDLPercent)) {
|
||||
// currentDLPercent matches a new increment
|
||||
log.vdebug("Incrementing Progress Bar using fmod match");
|
||||
// Downloading 50% |oooooooooooooooooooo | ETA 00:01:40
|
||||
// increment progress bar
|
||||
p.next();
|
||||
// update values
|
||||
previousProgressPercent = currentDLPercent;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if ((currentDLPercent == 0) && (!barInit)) {
|
||||
// Initialise the download bar at 0%
|
||||
// Downloading 0% | | ETA --:--:--:^C
|
||||
// Downloading 0% | | ETA --:--:--:
|
||||
p.next();
|
||||
barInit = true;
|
||||
}
|
||||
|
@ -1422,7 +1534,8 @@ final class OneDriveApi
|
|||
retryAttempts++;
|
||||
if (canFind(e.msg, "Couldn't connect to server on handle") || canFind(e.msg, "Couldn't resolve host name on handle") || canFind(errorMessage, "Timeout was reached on handle")) {
|
||||
// no access to Internet
|
||||
log.error("\nERROR: There was a timeout in accessing the Microsoft OneDrive service - Internet connectivity issue?");
|
||||
writeln();
|
||||
log.error("ERROR: There was a timeout in accessing the Microsoft OneDrive service - Internet connectivity issue?");
|
||||
// what is the error reason to assis the user as what to check
|
||||
if (canFind(e.msg, "Couldn't connect to server on handle")) {
|
||||
log.log(" - Check HTTPS access or Firewall Rules");
|
||||
|
@ -1472,6 +1585,17 @@ final class OneDriveApi
|
|||
// Some other error was returned
|
||||
log.error(" Error Message: ", errorMessage);
|
||||
log.error(" Calling Function: ", getFunctionName!({}));
|
||||
|
||||
// Was this a curl initialization error?
|
||||
if (canFind(errorMessage, "Failed initialization on handle")) {
|
||||
// initialization error ... prevent a run-away process if we have zero disk space
|
||||
ulong localActualFreeSpace = to!ulong(getAvailableDiskSpace("."));
|
||||
if (localActualFreeSpace == 0) {
|
||||
// force exit
|
||||
shutdown();
|
||||
exit(-1);
|
||||
}
|
||||
}
|
||||
}
|
||||
// return an empty JSON for handling
|
||||
return json;
|
||||
|
@ -1691,7 +1815,7 @@ final class OneDriveApi
|
|||
case 403:
|
||||
// OneDrive responded that the user is forbidden
|
||||
log.vlog("OneDrive returned a 'HTTP 403 - Forbidden' - gracefully handling error");
|
||||
// Throw this as a specific exception so this is caught when performing sync.o365SiteSearch
|
||||
// Throw this as a specific exception so this is caught when performing 'siteQuery = onedrive.o365SiteSearch(nextLink);' call
|
||||
throw new OneDriveException(http.statusLine.code, http.statusLine.reason, response);
|
||||
|
||||
// 412 - Precondition Failed
|
||||
|
|
|
@ -221,6 +221,7 @@ private bool isPathExcluded(string path, string[] allowedPaths)
|
|||
{
|
||||
// function variables
|
||||
bool exclude = false;
|
||||
bool exludeDirectMatch = false; // will get updated to true, if there is a pattern match to sync_list entry
|
||||
bool excludeMatched = false; // will get updated to true, if there is a pattern match to sync_list entry
|
||||
bool finalResult = true; // will get updated to false, if pattern match to sync_list entry
|
||||
int offset;
|
||||
|
@ -232,8 +233,9 @@ private bool isPathExcluded(string path, string[] allowedPaths)
|
|||
if (allowedPaths.empty) return false;
|
||||
path = buildNormalizedPath(path);
|
||||
log.vdebug("Evaluation against 'sync_list' for this path: ", path);
|
||||
log.vdebug("[S]exclude = ", exclude);
|
||||
log.vdebug("[S]excludeMatched = ", excludeMatched);
|
||||
log.vdebug("[S]exclude = ", exclude);
|
||||
log.vdebug("[S]exludeDirectMatch = ", exludeDirectMatch);
|
||||
log.vdebug("[S]excludeMatched = ", excludeMatched);
|
||||
|
||||
// unless path is an exact match, entire sync_list entries need to be processed to ensure
|
||||
// negative matches are also correctly detected
|
||||
|
@ -298,9 +300,12 @@ private bool isPathExcluded(string path, string[] allowedPaths)
|
|||
// direct match, break and go sync
|
||||
break;
|
||||
} else {
|
||||
log.vdebug("Evaluation against 'sync_list' result: direct match but to be excluded");
|
||||
finalResult = true;
|
||||
log.vdebug("Evaluation against 'sync_list' result: direct match - path to be excluded");
|
||||
// do not set excludeMatched = true here, otherwise parental path also gets excluded
|
||||
// flag exludeDirectMatch so that a 'wildcard match' will not override this exclude
|
||||
exludeDirectMatch = true;
|
||||
// final result
|
||||
finalResult = true;
|
||||
}
|
||||
} else {
|
||||
// no exact path match, but something common does match
|
||||
|
@ -357,7 +362,7 @@ private bool isPathExcluded(string path, string[] allowedPaths)
|
|||
if (matchAll(path, allowedMask)) {
|
||||
// regex wildcard evaluation matches
|
||||
// if we have a prior pattern match for an exclude, excludeMatched = true
|
||||
if (!exclude && !excludeMatched) {
|
||||
if (!exclude && !excludeMatched && !exludeDirectMatch) {
|
||||
// nothing triggered an exclusion before evaluation against wildcard match attempt
|
||||
log.vdebug("Evaluation against 'sync_list' result: wildcard pattern match");
|
||||
finalResult = false;
|
||||
|
@ -370,11 +375,12 @@ private bool isPathExcluded(string path, string[] allowedPaths)
|
|||
}
|
||||
}
|
||||
// Interim results
|
||||
log.vdebug("[F]exclude = ", exclude);
|
||||
log.vdebug("[F]excludeMatched = ", excludeMatched);
|
||||
log.vdebug("[F]exclude = ", exclude);
|
||||
log.vdebug("[F]exludeDirectMatch = ", exludeDirectMatch);
|
||||
log.vdebug("[F]excludeMatched = ", excludeMatched);
|
||||
|
||||
// If exclude or excludeMatched is true, then finalResult has to be true
|
||||
if ((exclude) || (excludeMatched)) {
|
||||
if ((exclude) || (excludeMatched) || (exludeDirectMatch)) {
|
||||
finalResult = true;
|
||||
}
|
||||
|
||||
|
|
840
src/sync.d
840
src/sync.d
File diff suppressed because it is too large
Load diff
242
src/util.d
242
src/util.d
|
@ -14,7 +14,10 @@ import std.uri;
|
|||
import std.json;
|
||||
import std.traits;
|
||||
import qxor;
|
||||
static import log;
|
||||
import core.stdc.stdlib;
|
||||
|
||||
import log;
|
||||
import config;
|
||||
|
||||
shared string deviceName;
|
||||
|
||||
|
@ -47,28 +50,6 @@ void safeRemove(const(char)[] path)
|
|||
if (exists(path)) remove(path);
|
||||
}
|
||||
|
||||
// returns the crc32 hex string of a file
|
||||
string computeCrc32(string path)
|
||||
{
|
||||
CRC32 crc;
|
||||
auto file = File(path, "rb");
|
||||
foreach (ubyte[] data; chunks(file, 4096)) {
|
||||
crc.put(data);
|
||||
}
|
||||
return crc.finish().toHexString().dup;
|
||||
}
|
||||
|
||||
// returns the sha1 hash hex string of a file
|
||||
string computeSha1Hash(string path)
|
||||
{
|
||||
SHA1 sha;
|
||||
auto file = File(path, "rb");
|
||||
foreach (ubyte[] data; chunks(file, 4096)) {
|
||||
sha.put(data);
|
||||
}
|
||||
return sha.finish().toHexString().dup;
|
||||
}
|
||||
|
||||
// returns the quickXorHash base64 string of a file
|
||||
string computeQuickXorHash(string path)
|
||||
{
|
||||
|
@ -80,6 +61,16 @@ string computeQuickXorHash(string path)
|
|||
return Base64.encode(qxor.finish());
|
||||
}
|
||||
|
||||
// returns the SHA256 hex string of a file
|
||||
string computeSHA256Hash(string path) {
|
||||
SHA256 sha256;
|
||||
auto file = File(path, "rb");
|
||||
foreach (ubyte[] data; chunks(file, 4096)) {
|
||||
sha256.put(data);
|
||||
}
|
||||
return sha256.finish().toHexString().dup;
|
||||
}
|
||||
|
||||
// converts wildcards (*, ?) to regex
|
||||
Regex!char wild2regex(const(char)[] pattern)
|
||||
{
|
||||
|
@ -125,22 +116,30 @@ Regex!char wild2regex(const(char)[] pattern)
|
|||
}
|
||||
|
||||
// returns true if the network connection is available
|
||||
bool testNetwork()
|
||||
bool testNetwork(Config cfg)
|
||||
{
|
||||
// Use low level HTTP struct
|
||||
auto http = HTTP();
|
||||
http.url = "https://login.microsoftonline.com";
|
||||
// DNS lookup timeout
|
||||
http.dnsTimeout = (dur!"seconds"(5));
|
||||
http.dnsTimeout = (dur!"seconds"(cfg.getValueLong("dns_timeout")));
|
||||
// Timeout for connecting
|
||||
http.connectTimeout = (dur!"seconds"(5));
|
||||
http.connectTimeout = (dur!"seconds"(cfg.getValueLong("connect_timeout")));
|
||||
// Data Timeout for HTTPS connections
|
||||
http.dataTimeout = (dur!"seconds"(cfg.getValueLong("data_timeout")));
|
||||
// maximum time any operation is allowed to take
|
||||
// This includes dns resolution, connecting, data transfer, etc.
|
||||
http.operationTimeout = (dur!"seconds"(cfg.getValueLong("operation_timeout")));
|
||||
// What IP protocol version should be used when using Curl - IPv4 & IPv6, IPv4 or IPv6
|
||||
http.handle.set(CurlOption.ipresolve,cfg.getValueLong("ip_protocol_version")); // 0 = IPv4 + IPv6, 1 = IPv4 Only, 2 = IPv6 Only
|
||||
|
||||
// HTTP connection test method
|
||||
http.method = HTTP.Method.head;
|
||||
// Attempt to contact the Microsoft Online Service
|
||||
try {
|
||||
log.vdebug("Attempting to contact online service");
|
||||
http.perform();
|
||||
log.vdebug("Shutting down HTTP engine as sucessfully reached OneDrive Online Service");
|
||||
log.vdebug("Shutting down HTTP engine as successfully reached OneDrive Online Service");
|
||||
http.shutdown();
|
||||
return true;
|
||||
} catch (SocketException e) {
|
||||
|
@ -268,7 +267,8 @@ bool containsASCIIHTMLCodes(string path)
|
|||
// Parse and display error message received from OneDrive
|
||||
void displayOneDriveErrorMessage(string message, string callingFunction)
|
||||
{
|
||||
log.error("\nERROR: Microsoft OneDrive API returned an error with the following message:");
|
||||
writeln();
|
||||
log.error("ERROR: Microsoft OneDrive API returned an error with the following message:");
|
||||
auto errorArray = splitLines(message);
|
||||
log.error(" Error Message: ", errorArray[0]);
|
||||
// Extract 'message' as the reason
|
||||
|
@ -332,18 +332,25 @@ void displayOneDriveErrorMessage(string message, string callingFunction)
|
|||
}
|
||||
|
||||
// Where in the code was this error generated
|
||||
log.error(" Calling Function: ", callingFunction);
|
||||
log.vlog(" Calling Function: ", callingFunction);
|
||||
}
|
||||
|
||||
// Parse and display error message received from the local file system
|
||||
void displayFileSystemErrorMessage(string message, string callingFunction)
|
||||
{
|
||||
log.error("\nERROR: The local file system returned an error with the following message:");
|
||||
writeln();
|
||||
log.error("ERROR: The local file system returned an error with the following message:");
|
||||
auto errorArray = splitLines(message);
|
||||
// What was the error message
|
||||
log.error(" Error Message: ", errorArray[0]);
|
||||
// Where in the code was this error generated
|
||||
log.error(" Calling Function: ", callingFunction);
|
||||
log.vlog(" Calling Function: ", callingFunction);
|
||||
// If we are out of disk space (despite download reservations) we need to exit the application
|
||||
ulong localActualFreeSpace = to!ulong(getAvailableDiskSpace("."));
|
||||
if (localActualFreeSpace == 0) {
|
||||
// force exit
|
||||
exit(-1);
|
||||
}
|
||||
}
|
||||
|
||||
// Get the function name that is being called to assist with identifying where an error is being generated
|
||||
|
@ -352,12 +359,14 @@ string getFunctionName(alias func)() {
|
|||
}
|
||||
|
||||
// Get the latest release version from GitHub
|
||||
string getLatestReleaseVersion() {
|
||||
JSONValue getLatestReleaseDetails() {
|
||||
// Import curl just for this function
|
||||
import std.net.curl;
|
||||
char[] content;
|
||||
JSONValue json;
|
||||
JSONValue githubLatest;
|
||||
JSONValue versionDetails;
|
||||
string latestTag;
|
||||
string publishedDate;
|
||||
|
||||
try {
|
||||
content = get("https://api.github.com/repos/abraunegg/onedrive/releases/latest");
|
||||
|
@ -367,50 +376,181 @@ string getLatestReleaseVersion() {
|
|||
}
|
||||
|
||||
try {
|
||||
json = content.parseJSON();
|
||||
githubLatest = content.parseJSON();
|
||||
} catch (JSONException e) {
|
||||
// unable to parse the content JSON, set to blank JSON
|
||||
log.vdebug("Unable to parse GitHub JSON response");
|
||||
json = parseJSON("{}");
|
||||
githubLatest = parseJSON("{}");
|
||||
}
|
||||
|
||||
// json has to be a valid JSON object
|
||||
if (json.type() == JSONType.object){
|
||||
if ("tag_name" in json) {
|
||||
// githubLatest has to be a valid JSON object
|
||||
if (githubLatest.type() == JSONType.object){
|
||||
// use the returned tag_name
|
||||
if ("tag_name" in githubLatest) {
|
||||
// use the provided tag
|
||||
// "tag_name": "vA.B.CC" and strip 'v'
|
||||
latestTag = strip(json["tag_name"].str, "v");
|
||||
latestTag = strip(githubLatest["tag_name"].str, "v");
|
||||
} else {
|
||||
// set to latestTag zeros
|
||||
log.vdebug("'tag_name' unavailable in JSON response. Setting latest GitHub release version to 0.0.0");
|
||||
log.vdebug("'tag_name' unavailable in JSON response. Setting GitHub 'tag_name' release version to 0.0.0");
|
||||
latestTag = "0.0.0";
|
||||
}
|
||||
// use the returned published_at date
|
||||
if ("published_at" in githubLatest) {
|
||||
// use the provided value
|
||||
publishedDate = githubLatest["published_at"].str;
|
||||
} else {
|
||||
// set to v2.0.0 release date
|
||||
log.vdebug("'published_at' unavailable in JSON response. Setting GitHub 'published_at' date to 2018-07-18T18:00:00Z");
|
||||
publishedDate = "2018-07-18T18:00:00Z";
|
||||
}
|
||||
} else {
|
||||
// JSONValue is not an object
|
||||
log.vdebug("Invalid JSON Object. Setting latest GitHub release version to 0.0.0");
|
||||
log.vdebug("Invalid JSON Object. Setting GitHub 'tag_name' release version to 0.0.0");
|
||||
latestTag = "0.0.0";
|
||||
log.vdebug("Invalid JSON Object. Setting GitHub 'published_at' date to 2018-07-18T18:00:00Z");
|
||||
publishedDate = "2018-07-18T18:00:00Z";
|
||||
}
|
||||
|
||||
// return the latest github version
|
||||
return latestTag;
|
||||
// return the latest github version and published date as our own JSON
|
||||
versionDetails = [
|
||||
"latestTag": JSONValue(latestTag),
|
||||
"publishedDate": JSONValue(publishedDate)
|
||||
];
|
||||
|
||||
// return JSON
|
||||
return versionDetails;
|
||||
}
|
||||
|
||||
// Get the release details from the 'current' running version
|
||||
JSONValue getCurrentVersionDetails(string thisVersion) {
|
||||
// Import curl just for this function
|
||||
import std.net.curl;
|
||||
char[] content;
|
||||
JSONValue githubDetails;
|
||||
JSONValue versionDetails;
|
||||
string versionTag = "v" ~ thisVersion;
|
||||
string publishedDate;
|
||||
|
||||
try {
|
||||
content = get("https://api.github.com/repos/abraunegg/onedrive/releases");
|
||||
} catch (CurlException e) {
|
||||
// curl generated an error - meaning we could not query GitHub
|
||||
log.vdebug("Unable to query GitHub for release details");
|
||||
}
|
||||
|
||||
try {
|
||||
githubDetails = content.parseJSON();
|
||||
} catch (JSONException e) {
|
||||
// unable to parse the content JSON, set to blank JSON
|
||||
log.vdebug("Unable to parse GitHub JSON response");
|
||||
githubDetails = parseJSON("{}");
|
||||
}
|
||||
|
||||
// githubDetails has to be a valid JSON array
|
||||
if (githubDetails.type() == JSONType.array){
|
||||
foreach (searchResult; githubDetails.array) {
|
||||
// searchResult["tag_name"].str;
|
||||
if (searchResult["tag_name"].str == versionTag) {
|
||||
log.vdebug("MATCHED version");
|
||||
log.vdebug("tag_name: ", searchResult["tag_name"].str);
|
||||
log.vdebug("published_at: ", searchResult["published_at"].str);
|
||||
publishedDate = searchResult["published_at"].str;
|
||||
}
|
||||
}
|
||||
|
||||
if (publishedDate.empty) {
|
||||
// empty .. no version match ?
|
||||
// set to v2.0.0 release date
|
||||
log.vdebug("'published_at' unavailable in JSON response. Setting GitHub 'published_at' date to 2018-07-18T18:00:00Z");
|
||||
publishedDate = "2018-07-18T18:00:00Z";
|
||||
}
|
||||
} else {
|
||||
// JSONValue is not an Array
|
||||
log.vdebug("Invalid JSON Array. Setting GitHub 'published_at' date to 2018-07-18T18:00:00Z");
|
||||
publishedDate = "2018-07-18T18:00:00Z";
|
||||
}
|
||||
|
||||
// return the latest github version and published date as our own JSON
|
||||
versionDetails = [
|
||||
"versionTag": JSONValue(thisVersion),
|
||||
"publishedDate": JSONValue(publishedDate)
|
||||
];
|
||||
|
||||
// return JSON
|
||||
return versionDetails;
|
||||
}
|
||||
|
||||
// Check the application version versus GitHub latestTag
|
||||
void checkApplicationVersion() {
|
||||
// calculate if the client is current version or not
|
||||
string latestVersion = strip(getLatestReleaseVersion());
|
||||
// Get the latest details from GitHub
|
||||
JSONValue latestVersionDetails = getLatestReleaseDetails();
|
||||
string latestVersion = latestVersionDetails["latestTag"].str;
|
||||
SysTime publishedDate = SysTime.fromISOExtString(latestVersionDetails["publishedDate"].str).toUTC();
|
||||
SysTime releaseGracePeriod = publishedDate;
|
||||
SysTime currentTime = Clock.currTime().toUTC();
|
||||
|
||||
// drop fraction seconds
|
||||
publishedDate.fracSecs = Duration.zero;
|
||||
currentTime.fracSecs = Duration.zero;
|
||||
releaseGracePeriod.fracSecs = Duration.zero;
|
||||
// roll the grace period forward to allow distributions to catch up based on their release cycles
|
||||
releaseGracePeriod = releaseGracePeriod.add!"months"(1);
|
||||
|
||||
// what is this clients version?
|
||||
auto currentVersionArray = strip(strip(import("version"), "v")).split("-");
|
||||
string applicationVersion = currentVersionArray[0];
|
||||
|
||||
// display warning if not current
|
||||
// debug output
|
||||
log.vdebug("applicationVersion: ", applicationVersion);
|
||||
log.vdebug("latestVersion: ", latestVersion);
|
||||
log.vdebug("publishedDate: ", publishedDate);
|
||||
log.vdebug("currentTime: ", currentTime);
|
||||
log.vdebug("releaseGracePeriod: ", releaseGracePeriod);
|
||||
|
||||
// display details if not current
|
||||
// is application version is older than available on GitHub
|
||||
if (applicationVersion != latestVersion) {
|
||||
// is application version is older than available on GitHub
|
||||
// application version is different
|
||||
bool displayObsolete = false;
|
||||
|
||||
// what warning do we present?
|
||||
if (applicationVersion < latestVersion) {
|
||||
// application version is obsolete and unsupported
|
||||
// go get this running version details
|
||||
JSONValue thisVersionDetails = getCurrentVersionDetails(applicationVersion);
|
||||
SysTime thisVersionPublishedDate = SysTime.fromISOExtString(thisVersionDetails["publishedDate"].str).toUTC();
|
||||
thisVersionPublishedDate.fracSecs = Duration.zero;
|
||||
log.vdebug("thisVersionPublishedDate: ", thisVersionPublishedDate);
|
||||
|
||||
// the running version grace period is its release date + 1 month
|
||||
SysTime thisVersionReleaseGracePeriod = thisVersionPublishedDate;
|
||||
thisVersionReleaseGracePeriod = thisVersionReleaseGracePeriod.add!"months"(1);
|
||||
log.vdebug("thisVersionReleaseGracePeriod: ", thisVersionReleaseGracePeriod);
|
||||
|
||||
// is this running version obsolete ?
|
||||
if (!displayObsolete) {
|
||||
// if releaseGracePeriod > currentTime
|
||||
// display an information warning that there is a new release available
|
||||
if (releaseGracePeriod.toUnixTime() > currentTime.toUnixTime()) {
|
||||
// inside release grace period ... set flag to false
|
||||
displayObsolete = false;
|
||||
} else {
|
||||
// outside grace period
|
||||
displayObsolete = true;
|
||||
}
|
||||
}
|
||||
|
||||
// display version response
|
||||
writeln();
|
||||
log.logAndNotify("WARNING: Your onedrive client version is obsolete and unsupported. Please upgrade your client version.");
|
||||
log.vlog("Application version: ", applicationVersion);
|
||||
log.vlog("Version available: ", latestVersion);
|
||||
if (!displayObsolete) {
|
||||
// display the new version is available message
|
||||
log.logAndNotify("INFO: A new onedrive client version is available. Please upgrade your client version when possible.");
|
||||
} else {
|
||||
// display the obsolete message
|
||||
log.logAndNotify("WARNING: Your onedrive client version is now obsolete and unsupported. Please upgrade your client version.");
|
||||
}
|
||||
log.log("Current Application Version: ", applicationVersion);
|
||||
log.log("Version Available: ", latestVersion);
|
||||
writeln();
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue