OneDrive Client for Linux v2.5.0 (#2805)
OneDrive Client for Linux v2.5.0 --------- Signed-off-by: Thomas Staudinger <Staudi.Kaos@gmail.com> Co-authored-by: JC-comp <147694781+JC-comp@users.noreply.github.com> Co-authored-by: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Co-authored-by: Pierrick Caillon <megamisan@users.noreply.github.com> Co-authored-by: Pierrick Caillon <pierrick.caillon@megami.fr> Co-authored-by: Thomas Staudinger <Staudi.Kaos@gmail.com> Co-authored-by: Yuan Liu <Lyncredible@users.noreply.github.com>
4
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
|
|
@ -1,12 +1,12 @@
|
|||
name: "Bug Report"
|
||||
description: Create a Bug Report to help us fix your issue
|
||||
description: Before proceeding, please ensure your issue is a genuine software bug. This form is exclusively for reporting actual software bugs that need fixing. For other items, use GitHub Discussions instead.
|
||||
title: "Bug: "
|
||||
labels: ["Bug"]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
**Note:** Before submitting a bug report, please ensure you are running the latest 'onedrive' client as built from 'master' and compile by using the latest available DMD or LDC compiler. Refer to the the [INSTALL](https://github.com/abraunegg/onedrive/blob/master/docs/INSTALL.md) document on how to build the client for your system.
|
||||
**Note:** Before submitting a bug report, please ensure you are running the latest 'onedrive' client as built from 'master' and compile by using the latest available DMD or LDC compiler. Refer to the the [install](https://github.com/abraunegg/onedrive/blob/master/docs/install.md) document on how to build the client for your system.
|
||||
|
||||
- type: textarea
|
||||
id: bugDescription
|
||||
|
|
|
|||
2
.github/ISSUE_TEMPLATE/config.yml
vendored
|
|
@ -2,4 +2,4 @@ blank_issues_enabled: false
|
|||
contact_links:
|
||||
- name: "Have a question?"
|
||||
url: https://github.com/abraunegg/onedrive/discussions
|
||||
about: "Please do not raise a GitHub issue for asking questions - please post your question under GitHub Discussions. When opening a new discussion, please include all relevant details such as including your application version and how you installed the client. Thanks in advance for helping us keep the issue tracker clean!"
|
||||
about: "Please refrain from using GitHub Issues for asking questions or reporting non-software bugs. Instead, post your questions, installation issues, dependency concerns, or anything else that isn't a software bug under GitHub Discussions. When starting a new GitHub Discussion, be sure to include all relevant details, such as your application version and installation method. Thank you for helping us keep the issue tracker focused on actual software bugs!"
|
||||
|
|
|
|||
8
LICENSE
|
|
@ -1,7 +1,7 @@
|
|||
GNU GENERAL PUBLIC LICENSE
|
||||
Version 3, 29 June 2007
|
||||
|
||||
Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
|
||||
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
|
||||
Everyone is permitted to copy and distribute verbatim copies
|
||||
of this license document, but changing it is not allowed.
|
||||
|
||||
|
|
@ -645,7 +645,7 @@ the "copyright" line and a pointer to where the full notice is found.
|
|||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
Also add information on how to contact you by electronic and paper mail.
|
||||
|
||||
|
|
@ -664,11 +664,11 @@ might be different; for a GUI interface, you would use an "about box".
|
|||
You should also get your employer (if you work as a programmer) or school,
|
||||
if any, to sign a "copyright disclaimer" for the program, if necessary.
|
||||
For more information on this, and how to apply and follow the GNU GPL, see
|
||||
<http://www.gnu.org/licenses/>.
|
||||
<https://www.gnu.org/licenses/>.
|
||||
|
||||
The GNU General Public License does not permit incorporating your program
|
||||
into proprietary programs. If your program is a subroutine library, you
|
||||
may consider it more useful to permit linking proprietary applications with
|
||||
the library. If this is what you want to do, use the GNU Lesser General
|
||||
Public License instead of this License. But first, please read
|
||||
<http://www.gnu.org/philosophy/why-not-lgpl.html>.
|
||||
<https://www.gnu.org/licenses/why-not-lgpl.html>.
|
||||
|
|
|
|||
43
Makefile.in
|
|
@ -34,13 +34,18 @@ DEBUG = @DEBUG@
|
|||
DC = @DC@
|
||||
DC_TYPE = @DC_TYPE@
|
||||
DCFLAGS = @DCFLAGS@
|
||||
DCFLAGS += -w -g -O -J.
|
||||
DCFLAGS += -w -J.
|
||||
ifeq ($(DEBUG),yes)
|
||||
ifeq ($(DC_TYPE),dmd)
|
||||
DCFLAGS += -debug -gs
|
||||
# Add DMD Debugging Flags
|
||||
DCFLAGS += -g -debug -gs
|
||||
else
|
||||
DCFLAGS += -d-debug -gc
|
||||
# Add LDC Debugging Flags
|
||||
DCFLAGS += -g -d-debug -gc
|
||||
endif
|
||||
else
|
||||
# Only add optimisation flags if debugging is not enabled
|
||||
DCFLAGS += -O
|
||||
endif
|
||||
|
||||
ifeq ($(NOTIFICATIONS),yes)
|
||||
|
|
@ -55,7 +60,7 @@ endif
|
|||
system_unit_files = contrib/systemd/onedrive@.service
|
||||
user_unit_files = contrib/systemd/onedrive.service
|
||||
|
||||
DOCFILES = README.md config LICENSE CHANGELOG.md docs/Docker.md docs/INSTALL.md docs/SharePoint-Shared-Libraries.md docs/USAGE.md docs/BusinessSharedFolders.md docs/advanced-usage.md docs/application-security.md
|
||||
DOCFILES = readme.md config LICENSE changelog.md docs/advanced-usage.md docs/application-config-options.md docs/application-security.md docs/business-shared-items.md docs/client-architecture.md docs/contributing.md docs/docker.md docs/install.md docs/national-cloud-deployments.md docs/podman.md docs/privacy-policy.md docs/sharepoint-libraries.md docs/terms-of-service.md docs/ubuntu-package-install.md docs/usage.md docs/known-issues.md docs/webhooks.md
|
||||
|
||||
ifneq ("$(wildcard /etc/redhat-release)","")
|
||||
RHEL = $(shell cat /etc/redhat-release | grep -E "(Red Hat Enterprise Linux|CentOS)" | wc -l)
|
||||
|
|
@ -66,19 +71,19 @@ RHEL_VERSION = 0
|
|||
endif
|
||||
|
||||
SOURCES = \
|
||||
src/config.d \
|
||||
src/itemdb.d \
|
||||
src/log.d \
|
||||
src/main.d \
|
||||
src/monitor.d \
|
||||
src/onedrive.d \
|
||||
src/qxor.d \
|
||||
src/selective.d \
|
||||
src/sqlite.d \
|
||||
src/sync.d \
|
||||
src/upload.d \
|
||||
src/config.d \
|
||||
src/log.d \
|
||||
src/util.d \
|
||||
src/progress.d \
|
||||
src/qxor.d \
|
||||
src/curlEngine.d \
|
||||
src/onedrive.d \
|
||||
src/webhook.d \
|
||||
src/sync.d \
|
||||
src/itemdb.d \
|
||||
src/sqlite.d \
|
||||
src/clientSideFiltering.d \
|
||||
src/monitor.d \
|
||||
src/arsd/cgi.d
|
||||
|
||||
ifeq ($(NOTIFICATIONS),yes)
|
||||
|
|
@ -92,10 +97,9 @@ clean:
|
|||
rm -rf autom4te.cache
|
||||
rm -f config.log config.status
|
||||
|
||||
# also remove files generated via ./configure
|
||||
# Remove files generated via ./configure
|
||||
distclean: clean
|
||||
rm -f Makefile contrib/pacman/PKGBUILD contrib/spec/onedrive.spec onedrive.1 \
|
||||
$(system_unit_files) $(user_unit_files)
|
||||
rm -f Makefile contrib/pacman/PKGBUILD contrib/spec/onedrive.spec onedrive.1 $(system_unit_files) $(user_unit_files)
|
||||
|
||||
onedrive: $(SOURCES)
|
||||
if [ -f .git/HEAD ] ; then \
|
||||
|
|
@ -132,7 +136,6 @@ ifeq ($(COMPLETIONS),yes)
|
|||
$(INSTALL) -D -m 0644 contrib/completions/complete.fish $(DESTDIR)$(FISH_COMPLETION_DIR)/onedrive.fish
|
||||
endif
|
||||
|
||||
|
||||
uninstall:
|
||||
rm -f $(DESTDIR)$(bindir)/onedrive
|
||||
rm -f $(DESTDIR)$(mandir)/man1/onedrive.1
|
||||
|
|
@ -156,5 +159,3 @@ ifeq ($(COMPLETIONS),yes)
|
|||
rm -f $(DESTDIR)$(BASH_COMPLETION_DIR)/onedrive
|
||||
rm -f $(DESTDIR)$(FISH_COMPLETION_DIR)/onedrive.fish
|
||||
endif
|
||||
|
||||
|
||||
|
|
|
|||
92
README.md
|
|
@ -1,92 +0,0 @@
|
|||
# OneDrive Client for Linux
|
||||
[](https://github.com/abraunegg/onedrive/releases)
|
||||
[](https://github.com/abraunegg/onedrive/releases)
|
||||
[](https://github.com/abraunegg/onedrive/actions/workflows/testbuild.yaml)
|
||||
[](https://github.com/abraunegg/onedrive/actions/workflows/docker.yaml)
|
||||
[](https://hub.docker.com/r/driveone/onedrive)
|
||||
|
||||
A free Microsoft OneDrive Client which supports OneDrive Personal, OneDrive for Business, OneDrive for Office365 and SharePoint.
|
||||
|
||||
This powerful and highly configurable client can run on all major Linux distributions, FreeBSD, or as a Docker container. It supports one-way and two-way sync capabilities and securely connects to Microsoft OneDrive services.
|
||||
|
||||
This client is a 'fork' of the [skilion](https://github.com/skilion/onedrive) client, which the developer has confirmed he has no desire to maintain or support the client ([reference](https://github.com/skilion/onedrive/issues/518#issuecomment-717604726)). This fork has been in active development since mid 2018.
|
||||
|
||||
## Features
|
||||
* State caching
|
||||
* Real-Time local file monitoring with inotify
|
||||
* Real-Time syncing of remote updates via webhooks
|
||||
* File upload / download validation to ensure data integrity
|
||||
* Resumable uploads
|
||||
* Support OneDrive for Business (part of Office 365)
|
||||
* Shared Folder support for OneDrive Personal and OneDrive Business accounts
|
||||
* SharePoint / Office365 Shared Libraries
|
||||
* Desktop notifications via libnotify
|
||||
* Dry-run capability to test configuration changes
|
||||
* Prevent major OneDrive accidental data deletion after configuration change
|
||||
* Support for National cloud deployments (Microsoft Cloud for US Government, Microsoft Cloud Germany, Azure and Office 365 operated by 21Vianet in China)
|
||||
* Supports single & multi-tenanted applications
|
||||
* Supports rate limiting of traffic
|
||||
|
||||
## What's missing
|
||||
* Ability to encrypt/decrypt files on-the-fly when uploading/downloading files from OneDrive
|
||||
* Support for Windows 'On-Demand' functionality so file is only downloaded when accessed locally
|
||||
|
||||
## External Enhancements
|
||||
* A GUI for configuration management: [OneDrive Client for Linux GUI](https://github.com/bpozdena/OneDriveGUI)
|
||||
* Colorful log output terminal modification: [OneDrive Client for Linux Colorful log Output](https://github.com/zzzdeb/dotfiles/blob/master/scripts/tools/onedrive_log)
|
||||
* System Tray Icon: [OneDrive Client for Linux System Tray Icon](https://github.com/DanielBorgesOliveira/onedrive_tray)
|
||||
|
||||
## Supported Application Version
|
||||
Only the current application release version or greater is supported.
|
||||
|
||||
The current application release version is: [](https://github.com/abraunegg/onedrive/releases)
|
||||
|
||||
Check the version of the application you are using `onedrive --version` and ensure that you are running either the current release or compile the application yourself from master to get the latest version.
|
||||
|
||||
If you are not using the above application version or greater, you must upgrade your application to obtain support.
|
||||
|
||||
## Have a Question
|
||||
If you have a question or need something clarified, please raise a new disscussion post [here](https://github.com/abraunegg/onedrive/discussions)
|
||||
|
||||
Be sure to review the Frequently Asked Questions as well before raising a new discussion post.
|
||||
|
||||
## Frequently Asked Questions
|
||||
Refer to [Frequently Asked Questions](https://github.com/abraunegg/onedrive/wiki/Frequently-Asked-Questions)
|
||||
|
||||
## Reporting an Issue or Bug
|
||||
If you encounter any bugs you can report them here on GitHub. Before filing an issue be sure to:
|
||||
|
||||
1. Check the version of the application you are using `onedrive --version` and ensure that you are running a supported application version. If you are not using a supported application version, you must first upgrade your application to a supported version and then re-test for your issue.
|
||||
2. If you are using a supported applcation version, fill in a new bug report using the [issue template](https://github.com/abraunegg/onedrive/issues/new?template=bug_report.md)
|
||||
3. Generate a debug log for support using the following [process](https://github.com/abraunegg/onedrive/wiki/Generate-debug-log-for-support)
|
||||
* If you are in *any* way concerned regarding the sensitivity of the data contained with in the verbose debug log file, create a new OneDrive account, configure the client to use that, use *dummy* data to simulate your environment and then replicate your original issue
|
||||
* If you are still concerned, provide an NDA or confidentiality document to sign
|
||||
4. Upload the debug log to [pastebin](https://pastebin.com/) or archive and email to support@mynas.com.au
|
||||
* If you are concerned regarding the sensitivity of your debug data, encrypt + password protect the archive file and provide the decryption password via an out-of-band (OOB) mechanism. Email support@mynas.com.au for an OOB method for the password to be sent.
|
||||
* If you are still concerned, provide an NDA or confidentiality document to sign
|
||||
|
||||
## Known issues
|
||||
Refer to [docs/known-issues.md](https://github.com/abraunegg/onedrive/blob/master/docs/known-issues.md)
|
||||
|
||||
## Documentation and Configuration Assistance
|
||||
### Installing from Distribution Packages or Building the OneDrive Client for Linux from source
|
||||
Refer to [docs/INSTALL.md](https://github.com/abraunegg/onedrive/blob/master/docs/INSTALL.md)
|
||||
|
||||
### Configuration and Usage
|
||||
Refer to [docs/USAGE.md](https://github.com/abraunegg/onedrive/blob/master/docs/USAGE.md)
|
||||
|
||||
### Configure OneDrive Business Shared Folders
|
||||
Refer to [docs/BusinessSharedFolders.md](https://github.com/abraunegg/onedrive/blob/master/docs/BusinessSharedFolders.md)
|
||||
|
||||
### Configure SharePoint / Office 365 Shared Libraries (Business or Education)
|
||||
Refer to [docs/SharePoint-Shared-Libraries.md](https://github.com/abraunegg/onedrive/blob/master/docs/SharePoint-Shared-Libraries.md)
|
||||
|
||||
### Configure National Cloud support
|
||||
Refer to [docs/national-cloud-deployments.md](https://github.com/abraunegg/onedrive/blob/master/docs/national-cloud-deployments.md)
|
||||
|
||||
### Docker support
|
||||
Refer to [docs/Docker.md](https://github.com/abraunegg/onedrive/blob/master/docs/Docker.md)
|
||||
|
||||
### Podman support
|
||||
Refer to [docs/Podman.md](https://github.com/abraunegg/onedrive/blob/master/docs/Podman.md)
|
||||
|
||||
|
|
@ -1,8 +1,84 @@
|
|||
# Changelog
|
||||
The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/)
|
||||
and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html).
|
||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/)
|
||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||
|
||||
## 2.5.0 - 2024-09-16
|
||||
|
||||
### Special Thankyou
|
||||
A special thankyou to all those who helped with testing and providing feedback during the development of this major release. A big thankyou to:
|
||||
* @JC-comp
|
||||
* @Lyncredible
|
||||
* @rrodrigueznt
|
||||
* @bpozdena
|
||||
* @hskrieg
|
||||
* @robertschulze
|
||||
* @aothmane-control
|
||||
* @mozram
|
||||
* @LunCh-CECNL
|
||||
* @pkolmann
|
||||
* @tdcockers
|
||||
* @undefiened
|
||||
* @cyb3rko
|
||||
|
||||
### Notable Changes
|
||||
* This version introduces significant changes regarding how the integrity and validation of your data is determined and is not backwards compatible with v2.4.x.
|
||||
* OneDrive Business Shared Folder Sync has been 100% re-written in v2.5.0. If you are using this feature, please read the new documentation carefully.
|
||||
* The application function --download-only no longer automatically deletes local files. Please read the new documentation regarding this feature.
|
||||
|
||||
### Changes
|
||||
* Renamed various documentation files to align with document content
|
||||
* Implement buffered logging so that all logging from all upload & download activities are handled correctly
|
||||
* Replace polling monitor loop with blocking wait
|
||||
* Update how the application utilises curl to fix socket reuse
|
||||
* Various performance enhancements
|
||||
* Implement refactored OneDrive API logic
|
||||
* Enforcement of operational conflicts
|
||||
* Enforcement of application configuration defaults and minimums
|
||||
* Utilise threadsafe sqlite DB access methods
|
||||
* Various bugs and other issues identified during development and testing
|
||||
* Various code cleanup and optimisations
|
||||
|
||||
### Fixed
|
||||
* Fix Bug: Upload only not working with Business shared folders
|
||||
* Fix Bug: Business shared folders with same basename get merged
|
||||
* Fix Bug: --dry-run prevents authorization
|
||||
* Fix Bug: Log timestamps lacking trailing zeros, leading to poor log file output alignment
|
||||
* Fix Bug: Subscription ID already exists when using webhooks
|
||||
* Fix Bug: Not all files being downloaded when API data includes HTML ASCII Control Sequences
|
||||
* Fix Bug: --display-sync-status does not work when OneNote sections (.one files) are in your OneDrive
|
||||
* Fix Bug: vim backups when editing files cause edited file to be deleted rather than the edited file being uploaded
|
||||
* Fix Bug: skip_dir does not always work as intended for all directory entries
|
||||
* Fix Bug: Online date being changed in download-only mode
|
||||
* Fix Bug: Resolve that download_only = "true" and cleanup_local_files = "true" also deletes files present online
|
||||
* Fix Bug: Resolve that upload session are not canceled with resync option
|
||||
* Fix Bug: Local files should be safely backed up when the item is not in sync locally to prevent data loss when they are deleted online
|
||||
* Fix Bug: Files with newer timestamp are not chosen as version to be kept
|
||||
* Fix Bug: Synced file is removed when updated on the remote while being processed by onedrive
|
||||
* Fix Bug: Cannot select/filter within Personal Shared Folders
|
||||
* Fix Bug: HTML encoding requires to add filter entries twice
|
||||
* Fix Bug: Uploading files using fragments stuck at 0%
|
||||
* Fix Bug: Implement safeguard when sync_dir is missing and is re-created data is not deleted online
|
||||
* Fix Bug: Fix that --get-sharepoint-drive-id does not handle a SharePoint site with more than 200 entries
|
||||
* Fix Bug: Fix that 'sync_list' does not include files that should be included, when specified just as *.ext_type
|
||||
* Fix Bug: Fix 'sync_list' processing so that '.folder_name' is excluded but 'folder_name' is included
|
||||
|
||||
### Added
|
||||
* Implement Feature Request: Multi-threaded uploading/downloading of files
|
||||
* Implement Feature Request: Renaming/Relocation of OneDrive Business shared folders
|
||||
* Implement Feature Request: Support the syncing of individual business shared files
|
||||
* Implement Feature Request: Implement application output to detail upload|download failures at the end of a sync process
|
||||
* Implement Feature Request: Log when manual Authorization is required when using --auth-files
|
||||
* Implement Feature Request: Add cmdline parameter to display (human readable) quota status
|
||||
* Implement Feature Request: Add capability to disable 'fullscan_frequency'
|
||||
* Implement Feature Request: Ability to set --disable-download-validation from Docker environment variable
|
||||
* Implement Feature Request: Ability to set --sync-shared-files from Docker environment variable
|
||||
* Implement Feature Request: file sync (upload/download/delete) notifications
|
||||
|
||||
### Updated
|
||||
* Overhauled all documentation
|
||||
|
||||
## 2.4.25 - 2023-06-21
|
||||
|
||||
### Fixed
|
||||
* Fixed that the application was reporting as v2.2.24 when in fact it was v2.4.24 (release tagging issue)
|
||||
* Fixed that the running version obsolete flag (due to above issue) was causing a false flag as being obsolete
|
||||
|
|
@ -24,7 +100,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.
|
|||
* Fix error that the application is unable to perform a database vacuum: out of memory when exiting
|
||||
|
||||
### Removed
|
||||
* Remove sha1 from being used by the client as this is being depreciated by Microsoft in July 2023
|
||||
* Remove sha1 from being used by the client as this is being deprecated by Microsoft in July 2023
|
||||
* Complete the removal of crc32 elements
|
||||
|
||||
### Added
|
||||
|
|
@ -293,7 +369,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.
|
|||
* Added support in --get-O365-drive-id to provide the 'drive_id' for multiple 'document libraries' within a single Shared Library Site
|
||||
|
||||
### Removed
|
||||
* Removed the depreciated config option 'force_http_11' which was flagged as depreciated by PR #549 in v2.3.6 (June 2019)
|
||||
* Removed the deprecated config option 'force_http_11' which was flagged as deprecated by PR #549 in v2.3.6 (June 2019)
|
||||
|
||||
### Updated
|
||||
* Updated error output of --get-O365-drive-id to provide more details why an error occurred if a SharePoint site lacks the details we need to perform the match
|
||||
|
|
@ -673,7 +749,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.
|
|||
* Added --force-http-2 to use HTTP/2 if desired
|
||||
|
||||
### Changed
|
||||
* Depreciated --force-http-1.1 (enabled by default) due to OneDrive inconsistent behavior with HTTP/2 protocol
|
||||
* Deprecated --force-http-1.1 (enabled by default) due to OneDrive inconsistent behavior with HTTP/2 protocol
|
||||
|
||||
## 2.3.5 - 2019-06-19
|
||||
### Fixed
|
||||
15
config
|
|
@ -3,7 +3,7 @@
|
|||
# with their default values.
|
||||
# All values need to be enclosed in quotes
|
||||
# When changing a config option below, remove the '#' from the start of the line
|
||||
# For explanations of all config options below see docs/USAGE.md or the man page.
|
||||
# For explanations of all config options below see docs/usage.md or the man page.
|
||||
#
|
||||
# sync_dir = "~/OneDrive"
|
||||
# skip_file = "~*|.~*|*.tmp"
|
||||
|
|
@ -40,22 +40,19 @@
|
|||
# bypass_data_preservation = "false"
|
||||
# azure_ad_endpoint = ""
|
||||
# azure_tenant_id = "common"
|
||||
# sync_business_shared_folders = "false"
|
||||
# sync_business_shared_items = "false"
|
||||
# sync_dir_permissions = "700"
|
||||
# sync_file_permissions = "600"
|
||||
# rate_limit = "131072"
|
||||
# operation_timeout = "3600"
|
||||
# webhook_enabled = "false"
|
||||
# webhook_public_url = ""
|
||||
# webhook_listening_host = ""
|
||||
# webhook_listening_port = "8888"
|
||||
# webhook_expiration_interval = "86400"
|
||||
# webhook_renewal_interval = "43200"
|
||||
# webhook_expiration_interval = "600"
|
||||
# webhook_renewal_interval = "300"
|
||||
# webhook_retry_interval = "60"
|
||||
# space_reservation = "50"
|
||||
# display_running_config = "false"
|
||||
# read_only_auth_scope = "false"
|
||||
# cleanup_local_files = "false"
|
||||
# operation_timeout = "3600"
|
||||
# dns_timeout = "60"
|
||||
# connect_timeout = "10"
|
||||
# data_timeout = "600"
|
||||
# ip_protocol_version = "0"
|
||||
|
|
|
|||
22
configure
vendored
|
|
@ -1,6 +1,6 @@
|
|||
#! /bin/sh
|
||||
# Guess values for system-dependent variables and create Makefiles.
|
||||
# Generated by GNU Autoconf 2.69 for onedrive v2.4.25.
|
||||
# Generated by GNU Autoconf 2.69 for onedrive v2.5.0.
|
||||
#
|
||||
# Report bugs to <https://github.com/abraunegg/onedrive>.
|
||||
#
|
||||
|
|
@ -579,8 +579,8 @@ MAKEFLAGS=
|
|||
# Identity of this package.
|
||||
PACKAGE_NAME='onedrive'
|
||||
PACKAGE_TARNAME='onedrive'
|
||||
PACKAGE_VERSION='v2.4.25'
|
||||
PACKAGE_STRING='onedrive v2.4.25'
|
||||
PACKAGE_VERSION='v2.5.0'
|
||||
PACKAGE_STRING='onedrive v2.5.0'
|
||||
PACKAGE_BUGREPORT='https://github.com/abraunegg/onedrive'
|
||||
PACKAGE_URL=''
|
||||
|
||||
|
|
@ -1219,7 +1219,7 @@ if test "$ac_init_help" = "long"; then
|
|||
# Omit some internal or obsolete options to make the list less imposing.
|
||||
# This message is too long to be a string in the A/UX 3.1 sh.
|
||||
cat <<_ACEOF
|
||||
\`configure' configures onedrive v2.4.25 to adapt to many kinds of systems.
|
||||
\`configure' configures onedrive v2.5.0 to adapt to many kinds of systems.
|
||||
|
||||
Usage: $0 [OPTION]... [VAR=VALUE]...
|
||||
|
||||
|
|
@ -1280,7 +1280,7 @@ fi
|
|||
|
||||
if test -n "$ac_init_help"; then
|
||||
case $ac_init_help in
|
||||
short | recursive ) echo "Configuration of onedrive v2.4.25:";;
|
||||
short | recursive ) echo "Configuration of onedrive v2.5.0:";;
|
||||
esac
|
||||
cat <<\_ACEOF
|
||||
|
||||
|
|
@ -1393,7 +1393,7 @@ fi
|
|||
test -n "$ac_init_help" && exit $ac_status
|
||||
if $ac_init_version; then
|
||||
cat <<\_ACEOF
|
||||
onedrive configure v2.4.25
|
||||
onedrive configure v2.5.0
|
||||
generated by GNU Autoconf 2.69
|
||||
|
||||
Copyright (C) 2012 Free Software Foundation, Inc.
|
||||
|
|
@ -1410,7 +1410,7 @@ cat >config.log <<_ACEOF
|
|||
This file contains any messages produced by compilers while
|
||||
running configure, to aid debugging if configure makes a mistake.
|
||||
|
||||
It was created by onedrive $as_me v2.4.25, which was
|
||||
It was created by onedrive $as_me v2.5.0, which was
|
||||
generated by GNU Autoconf 2.69. Invocation command line was
|
||||
|
||||
$ $0 $@
|
||||
|
|
@ -2130,7 +2130,7 @@ case $(basename $DC) in
|
|||
VERSION=`$DC --version`
|
||||
# remove everything up to first (
|
||||
VERSION=${VERSION#* (}
|
||||
# remove everthing after ):
|
||||
# remove everything after ):
|
||||
VERSION=${VERSION%%):*}
|
||||
# now version should be something like L.M.N
|
||||
MINVERSION=1.18.0
|
||||
|
|
@ -2162,7 +2162,7 @@ fi
|
|||
|
||||
|
||||
|
||||
PACKAGE_DATE="June 2023"
|
||||
PACKAGE_DATE="September 2024"
|
||||
|
||||
|
||||
|
||||
|
|
@ -3159,7 +3159,7 @@ cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
|
|||
# report actual input values of CONFIG_FILES etc. instead of their
|
||||
# values after options handling.
|
||||
ac_log="
|
||||
This file was extended by onedrive $as_me v2.4.25, which was
|
||||
This file was extended by onedrive $as_me v2.5.0, which was
|
||||
generated by GNU Autoconf 2.69. Invocation command line was
|
||||
|
||||
CONFIG_FILES = $CONFIG_FILES
|
||||
|
|
@ -3212,7 +3212,7 @@ _ACEOF
|
|||
cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
|
||||
ac_cs_config="`$as_echo "$ac_configure_args" | sed 's/^ //; s/[\\""\`\$]/\\\\&/g'`"
|
||||
ac_cs_version="\\
|
||||
onedrive config.status v2.4.25
|
||||
onedrive config.status v2.5.0
|
||||
configured by $0, generated by GNU Autoconf 2.69,
|
||||
with options \\"\$ac_cs_config\\"
|
||||
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ dnl - commit the changed files (configure.ac, configure)
|
|||
dnl - tag the release
|
||||
|
||||
AC_PREREQ([2.69])
|
||||
AC_INIT([onedrive],[v2.4.25], [https://github.com/abraunegg/onedrive], [onedrive])
|
||||
AC_INIT([onedrive],[v2.5.0], [https://github.com/abraunegg/onedrive], [onedrive])
|
||||
AC_CONFIG_SRCDIR([src/main.d])
|
||||
|
||||
|
||||
|
|
@ -101,7 +101,7 @@ case $(basename $DC) in
|
|||
VERSION=`$DC --version`
|
||||
# remove everything up to first (
|
||||
VERSION=${VERSION#* (}
|
||||
# remove everthing after ):
|
||||
# remove everything after ):
|
||||
VERSION=${VERSION%%):*}
|
||||
# now version should be something like L.M.N
|
||||
MINVERSION=1.18.0
|
||||
|
|
@ -162,7 +162,7 @@ dnl value via pkg-config and put it into $def_systemdsystemunitdir
|
|||
AS_IF([test "x$with_systemdsystemunitdir" = "xyes" -o "x$with_systemdsystemunitdir" = "xauto"],
|
||||
[ dnl true part, so try to determine with pkg-config
|
||||
def_systemdsystemunitdir=$($PKG_CONFIG --variable=systemdsystemunitdir systemd)
|
||||
dnl if we cannot find it via pkg-config, *and* the user explicitely passed it in with,
|
||||
dnl if we cannot find it via pkg-config, *and* the user explicitly passed it in with,
|
||||
dnl we warn, and in all cases we unset (set to no) the respective variable
|
||||
AS_IF([test "x$def_systemdsystemunitdir" = "x"],
|
||||
[ dnl we couldn't find the default value via pkg-config
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ _onedrive()
|
|||
prev=${COMP_WORDS[COMP_CWORD-1]}
|
||||
|
||||
options='--check-for-nomount --check-for-nosync --debug-https --disable-notifications --display-config --display-sync-status --download-only --disable-upload-validation --dry-run --enable-logging --force-http-1.1 --force-http-2 --get-file-link --local-first --logout -m --monitor --no-remote-delete --print-token --reauth --resync --skip-dot-files --skip-symlinks --synchronize --upload-only -v --verbose --version -h --help'
|
||||
argopts='--create-directory --get-O365-drive-id --operation-timeout --remove-directory --single-directory --source-directory'
|
||||
argopts='--create-directory --get-O365-drive-id --remove-directory --single-directory --source-directory'
|
||||
|
||||
# Loop on the arguments to manage conflicting options
|
||||
for (( i=0; i < ${#COMP_WORDS[@]}-1; i++ )); do
|
||||
|
|
@ -34,7 +34,7 @@ _onedrive()
|
|||
fi
|
||||
return 0
|
||||
;;
|
||||
--create-directory|--get-O365-drive-id|--operation-timeout|--remove-directory|--single-directory|--source-directory)
|
||||
--create-directory|--get-O365-drive-id|--remove-directory|--single-directory|--source-directory)
|
||||
return 0
|
||||
;;
|
||||
*)
|
||||
|
|
|
|||
|
|
@ -23,7 +23,6 @@ complete -c onedrive -l local-first -d 'Synchronize from the local directory sou
|
|||
complete -c onedrive -l logout -d 'Logout the current user.'
|
||||
complete -c onedrive -n "not __fish_seen_subcommand_from --synchronize" -a "-m --monitor" -d 'Keep monitoring for local and remote changes.'
|
||||
complete -c onedrive -l no-remote-delete -d 'Do not delete local file deletes from OneDrive when using --upload-only.'
|
||||
complete -c onedrive -l operation-timeout -d 'Specify the maximum amount of time (in seconds) an operation is allowed to take.'
|
||||
complete -c onedrive -l print-token -d 'Print the access token, useful for debugging.'
|
||||
complete -c onedrive -l remote-directory -d 'Remove a directory on OneDrive - no sync will be performed.'
|
||||
complete -c onedrive -l reauth -d 'Reauthenticate the client with OneDrive.'
|
||||
|
|
|
|||
|
|
@ -27,7 +27,6 @@ all_opts=(
|
|||
'--logout[Logout the current user]'
|
||||
'(-m --monitor)'{-m,--monitor}'[Keep monitoring for local and remote changes]'
|
||||
'--no-remote-delete[Do not delete local file deletes from OneDrive when using --upload-only]'
|
||||
'--operation-timeout[Specify the maximum amount of time (in seconds) an operation is allowed to take.]:seconds:'
|
||||
'--print-token[Print the access token, useful for debugging]'
|
||||
'--reauth[Reauthenticate the client with OneDrive]'
|
||||
'--resync[Forget the last saved state, perform a full sync]'
|
||||
|
|
|
|||
|
|
@ -1,9 +1,9 @@
|
|||
# -*-Dockerfile-*-
|
||||
|
||||
ARG FEDORA_VERSION=38
|
||||
ARG FEDORA_VERSION=40
|
||||
ARG DEBIAN_VERSION=bullseye
|
||||
ARG GO_VERSION=1.20
|
||||
ARG GOSU_VERSION=1.16
|
||||
ARG GO_VERSION=1.22
|
||||
ARG GOSU_VERSION=1.17
|
||||
|
||||
FROM golang:${GO_VERSION}-${DEBIAN_VERSION} AS builder-gosu
|
||||
ARG GOSU_VERSION
|
||||
|
|
|
|||
|
|
@ -1,8 +1,8 @@
|
|||
# -*-Dockerfile-*-
|
||||
|
||||
ARG ALPINE_VERSION=3.18
|
||||
ARG GO_VERSION=1.20
|
||||
ARG GOSU_VERSION=1.16
|
||||
ARG ALPINE_VERSION=3.20
|
||||
ARG GO_VERSION=1.22
|
||||
ARG GOSU_VERSION=1.17
|
||||
|
||||
FROM golang:${GO_VERSION}-alpine${ALPINE_VERSION} AS builder-gosu
|
||||
ARG GOSU_VERSION
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ RUN apt-get clean \
|
|||
COPY . /usr/src/onedrive
|
||||
WORKDIR /usr/src/onedrive
|
||||
|
||||
RUN ./configure DC=/usr/bin/ldmd2 \
|
||||
RUN ./configure \
|
||||
&& make clean \
|
||||
&& make \
|
||||
&& make install
|
||||
|
|
|
|||
|
|
@ -7,126 +7,166 @@ set +H -euo pipefail
|
|||
|
||||
# Create new group using target GID
|
||||
if ! odgroup="$(getent group "$ONEDRIVE_GID")"; then
|
||||
odgroup='onedrive'
|
||||
groupadd "${odgroup}" -g "$ONEDRIVE_GID"
|
||||
odgroup='onedrive'
|
||||
groupadd "${odgroup}" -g "$ONEDRIVE_GID"
|
||||
else
|
||||
odgroup=${odgroup%%:*}
|
||||
odgroup=${odgroup%%:*}
|
||||
fi
|
||||
|
||||
# Create new user using target UID
|
||||
if ! oduser="$(getent passwd "$ONEDRIVE_UID")"; then
|
||||
oduser='onedrive'
|
||||
useradd -m "${oduser}" -u "$ONEDRIVE_UID" -g "$ONEDRIVE_GID"
|
||||
oduser='onedrive'
|
||||
useradd -m "${oduser}" -u "$ONEDRIVE_UID" -g "$ONEDRIVE_GID"
|
||||
else
|
||||
oduser="${oduser%%:*}"
|
||||
usermod -g "${odgroup}" "${oduser}"
|
||||
grep -qv root <( groups "${oduser}" ) || { echo 'ROOT level privileges prohibited!'; exit 1; }
|
||||
oduser="${oduser%%:*}"
|
||||
usermod -g "${odgroup}" "${oduser}"
|
||||
fi
|
||||
|
||||
# Root privilege check
|
||||
# Containers should not be run as 'root', but allow via environment variable override
|
||||
if [ "${ONEDRIVE_RUNAS_ROOT:=0}" == "1" ]; then
|
||||
echo "# Running container as root due to environment variable override"
|
||||
oduser='root'
|
||||
odgroup='root'
|
||||
else
|
||||
grep -qv root <( groups "${oduser}" ) || { echo 'ROOT level privileges prohibited!'; exit 1; }
|
||||
echo "# Running container as user: ${oduser}"
|
||||
fi
|
||||
|
||||
# Default parameters
|
||||
ARGS=(--monitor --confdir /onedrive/conf --syncdir /onedrive/data)
|
||||
echo "Base Args: ${ARGS}"
|
||||
echo "# Base Args: ${ARGS}"
|
||||
|
||||
# Make Verbose output optional, based on an environment variable
|
||||
if [ "${ONEDRIVE_VERBOSE:=0}" == "1" ]; then
|
||||
echo "# We are being verbose"
|
||||
echo "# Adding --verbose"
|
||||
ARGS=(--verbose ${ARGS[@]})
|
||||
echo "# We are being verbose"
|
||||
echo "# Adding --verbose"
|
||||
ARGS=(--verbose ${ARGS[@]})
|
||||
fi
|
||||
|
||||
# Tell client to perform debug output, based on an environment variable
|
||||
if [ "${ONEDRIVE_DEBUG:=0}" == "1" ]; then
|
||||
echo "# We are performing debug output"
|
||||
echo "# Adding --verbose --verbose"
|
||||
ARGS=(--verbose --verbose ${ARGS[@]})
|
||||
echo "# We are performing debug output"
|
||||
echo "# Adding --verbose --verbose"
|
||||
ARGS=(--verbose --verbose ${ARGS[@]})
|
||||
fi
|
||||
|
||||
# Tell client to perform HTTPS debug output, based on an environment variable
|
||||
if [ "${ONEDRIVE_DEBUG_HTTPS:=0}" == "1" ]; then
|
||||
echo "# We are performing HTTPS debug output"
|
||||
echo "# Adding --debug-https"
|
||||
ARGS=(--debug-https ${ARGS[@]})
|
||||
echo "# We are performing HTTPS debug output"
|
||||
echo "# Adding --debug-https"
|
||||
ARGS=(--debug-https ${ARGS[@]})
|
||||
fi
|
||||
|
||||
# Tell client to perform a resync based on environment variable
|
||||
if [ "${ONEDRIVE_RESYNC:=0}" == "1" ]; then
|
||||
echo "# We are performing a --resync"
|
||||
echo "# Adding --resync --resync-auth"
|
||||
ARGS=(--resync --resync-auth ${ARGS[@]})
|
||||
echo "# We are performing a --resync"
|
||||
echo "# Adding --resync --resync-auth"
|
||||
ARGS=(--resync --resync-auth ${ARGS[@]})
|
||||
fi
|
||||
|
||||
# Tell client to sync in download-only mode based on environment variable
|
||||
if [ "${ONEDRIVE_DOWNLOADONLY:=0}" == "1" ]; then
|
||||
echo "# We are synchronizing in download-only mode"
|
||||
echo "# Adding --download-only"
|
||||
ARGS=(--download-only ${ARGS[@]})
|
||||
echo "# We are synchronizing in download-only mode"
|
||||
echo "# Adding --download-only"
|
||||
ARGS=(--download-only ${ARGS[@]})
|
||||
fi
|
||||
|
||||
# Tell client to sync in upload-only mode based on environment variable
|
||||
if [ "${ONEDRIVE_UPLOADONLY:=0}" == "1" ]; then
|
||||
echo "# We are synchronizing in upload-only mode"
|
||||
echo "# Adding --upload-only"
|
||||
ARGS=(--upload-only ${ARGS[@]})
|
||||
echo "# We are synchronizing in upload-only mode"
|
||||
echo "# Adding --upload-only"
|
||||
ARGS=(--upload-only ${ARGS[@]})
|
||||
fi
|
||||
|
||||
# Tell client to sync in no-remote-delete mode based on environment variable
|
||||
if [ "${ONEDRIVE_NOREMOTEDELETE:=0}" == "1" ]; then
|
||||
echo "# We are synchronizing in no-remote-delete mode"
|
||||
echo "# Adding --no-remote-delete"
|
||||
ARGS=(--no-remote-delete ${ARGS[@]})
|
||||
echo "# We are synchronizing in no-remote-delete mode"
|
||||
echo "# Adding --no-remote-delete"
|
||||
ARGS=(--no-remote-delete ${ARGS[@]})
|
||||
fi
|
||||
|
||||
# Tell client to logout based on environment variable
|
||||
if [ "${ONEDRIVE_LOGOUT:=0}" == "1" ]; then
|
||||
echo "# We are logging out"
|
||||
echo "# Adding --logout"
|
||||
ARGS=(--logout ${ARGS[@]})
|
||||
echo "# We are logging out"
|
||||
echo "# Adding --logout"
|
||||
ARGS=(--logout ${ARGS[@]})
|
||||
fi
|
||||
|
||||
# Tell client to re-authenticate based on environment variable
|
||||
if [ "${ONEDRIVE_REAUTH:=0}" == "1" ]; then
|
||||
echo "# We are logging out to perform a reauthentication"
|
||||
echo "# Adding --reauth"
|
||||
ARGS=(--reauth ${ARGS[@]})
|
||||
echo "# We are logging out to perform a reauthentication"
|
||||
echo "# Adding --reauth"
|
||||
ARGS=(--reauth ${ARGS[@]})
|
||||
fi
|
||||
|
||||
# Tell client to utilize auth files at the provided locations based on environment variable
|
||||
if [ -n "${ONEDRIVE_AUTHFILES:=""}" ]; then
|
||||
echo "# We are using auth files to perform authentication"
|
||||
echo "# Adding --auth-files ARG"
|
||||
ARGS=(--auth-files ${ONEDRIVE_AUTHFILES} ${ARGS[@]})
|
||||
echo "# We are using auth files to perform authentication"
|
||||
echo "# Adding --auth-files ARG"
|
||||
ARGS=(--auth-files ${ONEDRIVE_AUTHFILES} ${ARGS[@]})
|
||||
fi
|
||||
|
||||
# Tell client to utilize provided auth response based on environment variable
|
||||
if [ -n "${ONEDRIVE_AUTHRESPONSE:=""}" ]; then
|
||||
echo "# We are providing the auth response directly to perform authentication"
|
||||
echo "# Adding --auth-response ARG"
|
||||
ARGS=(--auth-response \"${ONEDRIVE_AUTHRESPONSE}\" ${ARGS[@]})
|
||||
echo "# We are providing the auth response directly to perform authentication"
|
||||
echo "# Adding --auth-response ARG"
|
||||
ARGS=(--auth-response \"${ONEDRIVE_AUTHRESPONSE}\" ${ARGS[@]})
|
||||
fi
|
||||
|
||||
# Tell client to print the running configuration at application startup
|
||||
if [ "${ONEDRIVE_DISPLAY_CONFIG:=0}" == "1" ]; then
|
||||
echo "# We are printing the application running configuration at application startup"
|
||||
echo "# Adding --display-running-config"
|
||||
ARGS=(--display-running-config ${ARGS[@]})
|
||||
echo "# We are printing the application running configuration at application startup"
|
||||
echo "# Adding --display-running-config"
|
||||
ARGS=(--display-running-config ${ARGS[@]})
|
||||
fi
|
||||
|
||||
# Tell client to use sync single dir option
|
||||
if [ -n "${ONEDRIVE_SINGLE_DIRECTORY:=""}" ]; then
|
||||
echo "# We are synchronizing in single-directory mode"
|
||||
echo "# Adding --single-directory ARG"
|
||||
ARGS=(--single-directory \"${ONEDRIVE_SINGLE_DIRECTORY}\" ${ARGS[@]})
|
||||
echo "# We are synchronizing in single-directory mode"
|
||||
echo "# Adding --single-directory ARG"
|
||||
ARGS=(--single-directory \"${ONEDRIVE_SINGLE_DIRECTORY}\" ${ARGS[@]})
|
||||
fi
|
||||
|
||||
# Tell client run in dry-run mode
|
||||
if [ "${ONEDRIVE_DRYRUN:=0}" == "1" ]; then
|
||||
echo "# We are running in dry-run mode"
|
||||
echo "# Adding --dry-run"
|
||||
ARGS=(--dry-run ${ARGS[@]})
|
||||
fi
|
||||
|
||||
# Tell client to disable download validation
|
||||
if [ "${ONEDRIVE_DISABLE_DOWNLOAD_VALIDATION:=0}" == "1" ]; then
|
||||
echo "# We are disabling the download integrity checks performed by this client"
|
||||
echo "# Adding --disable-download-validation"
|
||||
ARGS=(--disable-download-validation ${ARGS[@]})
|
||||
fi
|
||||
|
||||
# Tell client to disable upload validation
|
||||
if [ "${ONEDRIVE_DISABLE_UPLOAD_VALIDATION:=0}" == "1" ]; then
|
||||
echo "# We are disabling the upload integrity checks performed by this client"
|
||||
echo "# Adding --disable-upload-validation"
|
||||
ARGS=(--disable-upload-validation ${ARGS[@]})
|
||||
fi
|
||||
|
||||
# Tell client to download OneDrive Business Shared Files if 'sync_business_shared_items' option has been enabled in the configuration files
|
||||
if [ "${ONEDRIVE_SYNC_SHARED_FILES:=0}" == "1" ]; then
|
||||
echo "# We are attempting to sync OneDrive Business Shared Files if 'sync_business_shared_items' has been enabled in the config file"
|
||||
echo "# Adding --sync-shared-files"
|
||||
ARGS=(--sync-shared-files ${ARGS[@]})
|
||||
fi
|
||||
|
||||
if [ ${#} -gt 0 ]; then
|
||||
ARGS=("${@}")
|
||||
ARGS=("${@}")
|
||||
fi
|
||||
|
||||
echo "# Launching onedrive"
|
||||
# Only switch user if not running as target uid (ie. Docker)
|
||||
if [ "$ONEDRIVE_UID" = "$(id -u)" ]; then
|
||||
/usr/local/bin/onedrive "${ARGS[@]}"
|
||||
echo "# Launching 'onedrive' as ${oduser}"
|
||||
/usr/local/bin/onedrive "${ARGS[@]}"
|
||||
else
|
||||
chown "${oduser}:${odgroup}" /onedrive/data /onedrive/conf
|
||||
exec gosu "${oduser}" /usr/local/bin/onedrive "${ARGS[@]}"
|
||||
fi
|
||||
echo "# Changing ownership permissions on /onedrive/data and /onedrive/conf to ${oduser}:${odgroup}"
|
||||
chown "${oduser}:${odgroup}" /onedrive/data /onedrive/conf
|
||||
echo "# Launching 'onedrive' as ${oduser} via gosu"
|
||||
exec gosu "${oduser}" /usr/local/bin/onedrive "${ARGS[@]}"
|
||||
fi
|
||||
|
|
@ -1,30 +1,32 @@
|
|||
pkgname=onedrive
|
||||
pkgver=@PACKAGE_VERSION@
|
||||
pkgrel=1 #patch-level (Increment this when patch is applied)
|
||||
pkgdesc="A free OneDrive Client for Linux. This is a fork of the https://github.com/skilion/onedrive repository"
|
||||
license=("unknown")
|
||||
pkgrel=1 # Patch-level (increment this when a patch is applied)
|
||||
pkgdesc="OneDrive Client for Linux"
|
||||
license=("GPL3")
|
||||
url="https://github.com/abraunegg/onedrive/"
|
||||
arch=("i686" "x86_64")
|
||||
|
||||
depends=("curl" "gcc-libs" "glibc" "sqlite")
|
||||
makedepends=("dmd" "git" "tar")
|
||||
makedepends=("dmd" "git" "tar" "make")
|
||||
|
||||
source=("https://github.com/abraunegg/onedrive/archive/v$pkgver.tar.gz")
|
||||
sha256sums=('SKIP') # Use SKIP or actual checksum
|
||||
|
||||
prepare() {
|
||||
cd "$srcdir"
|
||||
wget "https://github.com/abraunegg/onedrive/archive/v$pkgver.tar.gz" -O "$pkgname-$pkgver-patch-$pkgrel.tar.gz" #Pull last commit release
|
||||
tar -xzf "$pkgname-$pkgver-patch-$pkgrel.tar.gz" --one-top-level="$pkgname-$pkgver-patch-$pkgrel" --strip-components 1
|
||||
cd "$srcdir"
|
||||
tar -xzf "$pkgname-$pkgver.tar.gz" --one-top-level="$pkgname-$pkgver" --strip-components 1
|
||||
}
|
||||
|
||||
build() {
|
||||
cd "$pkgname-$pkgver-patch-$pkgrel"
|
||||
git init #Create .git folder required from Makefile
|
||||
git add * #Create .git/index
|
||||
git commit --allow-empty-message -m "" #Create .git/refs/heads/master
|
||||
git tag v$pkgver #Add version tag
|
||||
make PREFIX=/usr onedrive
|
||||
cd "$srcdir/$pkgname-$pkgver"
|
||||
git init
|
||||
git add .
|
||||
git commit --allow-empty-message -m ""
|
||||
git tag "v$pkgver"
|
||||
make PREFIX=/usr onedrive
|
||||
}
|
||||
|
||||
package() {
|
||||
cd "$pkgname-$pkgver-patch-$pkgrel"
|
||||
make PREFIX=/usr DESTDIR="$pkgdir" install
|
||||
cd "$srcdir/$pkgname-$pkgver"
|
||||
make PREFIX=/usr DESTDIR="$pkgdir" install
|
||||
}
|
||||
|
|
|
|||
|
|
@ -12,9 +12,9 @@
|
|||
%endif
|
||||
|
||||
Name: onedrive
|
||||
Version: 2.4.25
|
||||
Version: 2.5.0
|
||||
Release: 1%{?dist}
|
||||
Summary: Microsoft OneDrive Client
|
||||
Summary: OneDrive Client for Linux
|
||||
Group: System Environment/Network
|
||||
License: GPLv3
|
||||
URL: https://github.com/abraunegg/onedrive
|
||||
|
|
@ -41,7 +41,7 @@ Requires(postun): initscripts
|
|||
%define debug_package %{nil}
|
||||
|
||||
%description
|
||||
Microsoft OneDrive Client for Linux
|
||||
OneDrive Client for Linux
|
||||
|
||||
%prep
|
||||
|
||||
|
|
@ -50,7 +50,7 @@ Microsoft OneDrive Client for Linux
|
|||
|
||||
%build
|
||||
# cd %{_builddir}/%{name}-%{version}
|
||||
%configure
|
||||
%configure --enable-debug --enable-notifications
|
||||
make
|
||||
|
||||
%install
|
||||
|
|
@ -59,7 +59,7 @@ make
|
|||
%clean
|
||||
|
||||
%files
|
||||
%doc README.md LICENSE CHANGELOG.md
|
||||
%doc readme.md LICENSE changelog.md
|
||||
%config %{_sysconfdir}/logrotate.d/onedrive
|
||||
%{_mandir}/man1/%{name}.1.gz
|
||||
%{_docdir}/%{name}
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
[Unit]
|
||||
Description=OneDrive Free Client
|
||||
Description=OneDrive Client for Linux
|
||||
Documentation=https://github.com/abraunegg/onedrive
|
||||
After=network-online.target
|
||||
Wants=network-online.target
|
||||
|
|
@ -21,7 +21,10 @@ RestrictRealtime=true
|
|||
ExecStart=@prefix@/bin/onedrive --monitor
|
||||
Restart=on-failure
|
||||
RestartSec=3
|
||||
RestartPreventExitStatus=3
|
||||
# Do not restart the service if a --resync is required which is done via a 126 exit code
|
||||
RestartPreventExitStatus=126
|
||||
# Time to wait for the service to stop gracefully before forcefully terminating it
|
||||
TimeoutStopSec=90
|
||||
|
||||
[Install]
|
||||
WantedBy=default.target
|
||||
|
|
@ -1,13 +1,15 @@
|
|||
[Unit]
|
||||
Description=OneDrive Free Client for %i
|
||||
Description=OneDrive Client for Linux running for %i
|
||||
Documentation=https://github.com/abraunegg/onedrive
|
||||
After=network-online.target
|
||||
Wants=network-online.target
|
||||
|
||||
[Service]
|
||||
# Commented out hardenings are disabled because they don't work out of the box.
|
||||
# Commented out hardenings are disabled because they may not work out of the box on your distribution
|
||||
# If you know what you are doing please try to enable them.
|
||||
|
||||
ProtectSystem=full
|
||||
|
||||
#PrivateDevices=true
|
||||
ProtectHostname=true
|
||||
#ProtectClock=true
|
||||
|
|
@ -21,7 +23,10 @@ User=%i
|
|||
Group=users
|
||||
Restart=on-failure
|
||||
RestartSec=3
|
||||
RestartPreventExitStatus=3
|
||||
# Do not restart the service if a --resync is required which is done via a 126 exit code
|
||||
RestartPreventExitStatus=126
|
||||
# Time to wait for the service to stop gracefully before forcefully terminating it
|
||||
TimeoutStopSec=90
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
|
|
|
|||
|
|
@ -1,192 +0,0 @@
|
|||
# How to configure OneDrive Business Shared Folder Sync
|
||||
## Application Version
|
||||
Before reading this document, please ensure you are running application version [](https://github.com/abraunegg/onedrive/releases) or greater. Use `onedrive --version` to determine what application version you are using and upgrade your client if required.
|
||||
|
||||
## Process Overview
|
||||
Syncing OneDrive Business Shared Folders requires additional configuration for your 'onedrive' client:
|
||||
1. List available shared folders to determine which folder you wish to sync & to validate that you have access to that folder
|
||||
2. Create a new file called 'business_shared_folders' in your config directory which contains a list of the shared folders you wish to sync
|
||||
3. Test the configuration using '--dry-run'
|
||||
4. Sync the OneDrive Business Shared folders as required
|
||||
|
||||
## Listing available OneDrive Business Shared Folders
|
||||
List the available OneDrive Business Shared folders with the following command:
|
||||
```text
|
||||
onedrive --list-shared-folders
|
||||
```
|
||||
This will return a listing of all OneDrive Business Shared folders which have been shared with you and by whom. This is important for conflict resolution:
|
||||
```text
|
||||
Initializing the Synchronization Engine ...
|
||||
|
||||
Listing available OneDrive Business Shared Folders:
|
||||
---------------------------------------
|
||||
Shared Folder: SharedFolder0
|
||||
Shared By: Firstname Lastname
|
||||
---------------------------------------
|
||||
Shared Folder: SharedFolder1
|
||||
Shared By: Firstname Lastname
|
||||
---------------------------------------
|
||||
Shared Folder: SharedFolder2
|
||||
Shared By: Firstname Lastname
|
||||
---------------------------------------
|
||||
Shared Folder: SharedFolder0
|
||||
Shared By: Firstname Lastname (user@domain)
|
||||
---------------------------------------
|
||||
Shared Folder: SharedFolder1
|
||||
Shared By: Firstname Lastname (user@domain)
|
||||
---------------------------------------
|
||||
Shared Folder: SharedFolder2
|
||||
Shared By: Firstname Lastname (user@domain)
|
||||
...
|
||||
```
|
||||
|
||||
## Configuring OneDrive Business Shared Folders
|
||||
1. Create a new file called 'business_shared_folders' in your config directory
|
||||
2. On each new line, list the OneDrive Business Shared Folder you wish to sync
|
||||
```text
|
||||
[alex@centos7full onedrive]$ cat ~/.config/onedrive/business_shared_folders
|
||||
# comment
|
||||
Child Shared Folder
|
||||
# Another comment
|
||||
Top Level to Share
|
||||
[alex@centos7full onedrive]$
|
||||
```
|
||||
3. Validate your configuration with `onedrive --display-config`:
|
||||
```text
|
||||
Configuration file successfully loaded
|
||||
onedrive version = v2.4.3
|
||||
Config path = /home/alex/.config/onedrive-business/
|
||||
Config file found in config path = true
|
||||
Config option 'check_nosync' = false
|
||||
Config option 'sync_dir' = /home/alex/OneDriveBusiness
|
||||
Config option 'skip_dir' =
|
||||
Config option 'skip_file' = ~*|.~*|*.tmp
|
||||
Config option 'skip_dotfiles' = false
|
||||
Config option 'skip_symlinks' = false
|
||||
Config option 'monitor_interval' = 300
|
||||
Config option 'min_notify_changes' = 5
|
||||
Config option 'log_dir' = /var/log/onedrive/
|
||||
Config option 'classify_as_big_delete' = 1000
|
||||
Config option 'sync_root_files' = false
|
||||
Selective sync 'sync_list' configured = false
|
||||
Business Shared Folders configured = true
|
||||
business_shared_folders contents:
|
||||
# comment
|
||||
Child Shared Folder
|
||||
# Another comment
|
||||
Top Level to Share
|
||||
```
|
||||
|
||||
## Performing a sync of OneDrive Business Shared Folders
|
||||
Perform a standalone sync using the following command: `onedrive --synchronize --sync-shared-folders --verbose`:
|
||||
```text
|
||||
onedrive --synchronize --sync-shared-folders --verbose
|
||||
Using 'user' Config Dir: /home/alex/.config/onedrive-business/
|
||||
Using 'system' Config Dir:
|
||||
Configuration file successfully loaded
|
||||
Initializing the OneDrive API ...
|
||||
Configuring Global Azure AD Endpoints
|
||||
Opening the item database ...
|
||||
All operations will be performed in: /home/alex/OneDriveBusiness
|
||||
Application version: v2.4.3
|
||||
Account Type: business
|
||||
Default Drive ID: b!bO8V7s9SSk6r7mWHpIjURotN33W1W2tEv3OXV_oFIdQimEdOHR-1So7CqeT1MfHA
|
||||
Default Root ID: 01WIXGO5V6Y2GOVW7725BZO354PWSELRRZ
|
||||
Remaining Free Space: 1098316220277
|
||||
Fetching details for OneDrive Root
|
||||
OneDrive Root exists in the database
|
||||
Initializing the Synchronization Engine ...
|
||||
Syncing changes from OneDrive ...
|
||||
Applying changes of Path ID: 01WIXGO5V6Y2GOVW7725BZO354PWSELRRZ
|
||||
Number of items from OneDrive to process: 0
|
||||
Attempting to sync OneDrive Business Shared Folders
|
||||
Syncing this OneDrive Business Shared Folder: Child Shared Folder
|
||||
OneDrive Business Shared Folder - Shared By: test user
|
||||
Applying changes of Path ID: 01JRXHEZMREEB3EJVHNVHKNN454Q7DFXPR
|
||||
Adding OneDrive root details for processing
|
||||
Adding OneDrive folder details for processing
|
||||
Adding 4 OneDrive items for processing from OneDrive folder
|
||||
Adding 2 OneDrive items for processing from /Child Shared Folder/Cisco VDI Whitepaper
|
||||
Adding 2 OneDrive items for processing from /Child Shared Folder/SMPP_Shared
|
||||
Processing 11 OneDrive items to ensure consistent local state
|
||||
Syncing this OneDrive Business Shared Folder: Top Level to Share
|
||||
OneDrive Business Shared Folder - Shared By: test user (testuser@mynasau3.onmicrosoft.com)
|
||||
Applying changes of Path ID: 01JRXHEZLRMXHKBYZNOBF3TQOPBXS3VZMA
|
||||
Adding OneDrive root details for processing
|
||||
Adding OneDrive folder details for processing
|
||||
Adding 4 OneDrive items for processing from OneDrive folder
|
||||
Adding 3 OneDrive items for processing from /Top Level to Share/10-Files
|
||||
Adding 2 OneDrive items for processing from /Top Level to Share/10-Files/Cisco VDI Whitepaper
|
||||
Adding 2 OneDrive items for processing from /Top Level to Share/10-Files/Images
|
||||
Adding 8 OneDrive items for processing from /Top Level to Share/10-Files/Images/JPG
|
||||
Adding 8 OneDrive items for processing from /Top Level to Share/10-Files/Images/PNG
|
||||
Adding 2 OneDrive items for processing from /Top Level to Share/10-Files/SMPP
|
||||
Processing 31 OneDrive items to ensure consistent local state
|
||||
Uploading differences of ~/OneDriveBusiness
|
||||
Processing root
|
||||
The directory has not changed
|
||||
Processing SMPP_Local
|
||||
The directory has not changed
|
||||
Processing SMPP-IF-SPEC_v3_3-24858.pdf
|
||||
The file has not changed
|
||||
Processing SMPP_v3_4_Issue1_2-24857.pdf
|
||||
The file has not changed
|
||||
Processing new_local_file.txt
|
||||
The file has not changed
|
||||
Processing root
|
||||
The directory has not changed
|
||||
...
|
||||
The directory has not changed
|
||||
Processing week02-03-Combinational_Logic-v1.pptx
|
||||
The file has not changed
|
||||
Uploading new items of ~/OneDriveBusiness
|
||||
Applying changes of Path ID: 01WIXGO5V6Y2GOVW7725BZO354PWSELRRZ
|
||||
Number of items from OneDrive to process: 0
|
||||
Attempting to sync OneDrive Business Shared Folders
|
||||
Syncing this OneDrive Business Shared Folder: Child Shared Folder
|
||||
OneDrive Business Shared Folder - Shared By: test user
|
||||
Applying changes of Path ID: 01JRXHEZMREEB3EJVHNVHKNN454Q7DFXPR
|
||||
Adding OneDrive root details for processing
|
||||
Adding OneDrive folder details for processing
|
||||
Adding 4 OneDrive items for processing from OneDrive folder
|
||||
Adding 2 OneDrive items for processing from /Child Shared Folder/Cisco VDI Whitepaper
|
||||
Adding 2 OneDrive items for processing from /Child Shared Folder/SMPP_Shared
|
||||
Processing 11 OneDrive items to ensure consistent local state
|
||||
Syncing this OneDrive Business Shared Folder: Top Level to Share
|
||||
OneDrive Business Shared Folder - Shared By: test user (testuser@mynasau3.onmicrosoft.com)
|
||||
Applying changes of Path ID: 01JRXHEZLRMXHKBYZNOBF3TQOPBXS3VZMA
|
||||
Adding OneDrive root details for processing
|
||||
Adding OneDrive folder details for processing
|
||||
Adding 4 OneDrive items for processing from OneDrive folder
|
||||
Adding 3 OneDrive items for processing from /Top Level to Share/10-Files
|
||||
Adding 2 OneDrive items for processing from /Top Level to Share/10-Files/Cisco VDI Whitepaper
|
||||
Adding 2 OneDrive items for processing from /Top Level to Share/10-Files/Images
|
||||
Adding 8 OneDrive items for processing from /Top Level to Share/10-Files/Images/JPG
|
||||
Adding 8 OneDrive items for processing from /Top Level to Share/10-Files/Images/PNG
|
||||
Adding 2 OneDrive items for processing from /Top Level to Share/10-Files/SMPP
|
||||
Processing 31 OneDrive items to ensure consistent local state
|
||||
```
|
||||
|
||||
**Note:** Whenever you modify the `business_shared_folders` file you must perform a `--resync` of your database to clean up stale entries due to changes in your configuration.
|
||||
|
||||
## Enable / Disable syncing of OneDrive Business Shared Folders
|
||||
Performing a sync of the configured OneDrive Business Shared Folders can be enabled / disabled via adding the following to your configuration file.
|
||||
|
||||
### Enable syncing of OneDrive Business Shared Folders via config file
|
||||
```text
|
||||
sync_business_shared_folders = "true"
|
||||
```
|
||||
|
||||
### Disable syncing of OneDrive Business Shared Folders via config file
|
||||
```text
|
||||
sync_business_shared_folders = "false"
|
||||
```
|
||||
|
||||
## Known Issues
|
||||
Shared folders, shared with you from people outside of your 'organisation' are unable to be synced. This is due to the Microsoft Graph API not presenting these folders.
|
||||
|
||||
Shared folders that match this scenario, when you view 'Shared' via OneDrive online, will have a 'world' symbol as per below:
|
||||
|
||||

|
||||
|
||||
This issue is being tracked by: [#966](https://github.com/abraunegg/onedrive/issues/966)
|
||||
277
docs/INSTALL.md
|
|
@ -1,277 +0,0 @@
|
|||
# Installing or Upgrading using Distribution Packages or Building the OneDrive Client for Linux from source
|
||||
|
||||
## Installing or Upgrading using Distribution Packages
|
||||
This project has been packaged for the following Linux distributions as per below. The current client release is: [](https://github.com/abraunegg/onedrive/releases)
|
||||
|
||||
Only the current release version or greater is supported. Earlier versions are not supported and should not be installed or used.
|
||||
|
||||
#### Important Note:
|
||||
Distribution packages may be of an older release when compared to the latest release that is [available](https://github.com/abraunegg/onedrive/releases). If any package version indicator below is 'red' for your distribution, it is recommended that you build from source. Do not install the software from the available distribution package. If a package is out of date, please contact the package maintainer for resolution.
|
||||
|
||||
| Distribution | Package Name & Package Link | PKG_Version | i686 | x86_64 | ARMHF | AARCH64 | Extra Details |
|
||||
|---------------------------------|------------------------------------------------------------------------------|:---------------:|:----:|:------:|:-----:|:-------:|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| Alpine Linux | [onedrive](https://pkgs.alpinelinux.org/packages?name=onedrive&branch=edge) |<a href="https://pkgs.alpinelinux.org/packages?name=onedrive&branch=edge"><img src="https://repology.org/badge/version-for-repo/alpine_edge/onedrive.svg?header=" alt="Alpine Linux Edge package" width="46" height="20"></a>|❌|✔|❌|✔ | |
|
||||
| Arch Linux<br><br>Manjaro Linux | [onedrive-abraunegg](https://aur.archlinux.org/packages/onedrive-abraunegg/) |<a href="https://aur.archlinux.org/packages/onedrive-abraunegg"><img src="https://repology.org/badge/version-for-repo/aur/onedrive-abraunegg.svg?header=" alt="AUR package" width="46" height="20"></a>|✔|✔|✔|✔ | Install via: `pamac build onedrive-abraunegg` from the Arch Linux User Repository (AUR)<br><br>**Note:** If asked regarding a provider for 'd-runtime' and 'd-compiler', select 'liblphobos' and 'ldc'<br><br>**Note:** System must have at least 1GB of memory & 1GB swap space
|
||||
| Debian 11 | [onedrive](https://packages.debian.org/bullseye/source/onedrive) |<a href="https://packages.debian.org/bullseye/source/onedrive"><img src="https://repology.org/badge/version-for-repo/debian_11/onedrive.svg?header=" alt="Debian 11 package" width="46" height="20"></a>|✔|✔|✔|✔| **Note:** Do not install from Debian Package Repositories<br><br>It is recommended that for Debian 11 that you install from OpenSuSE Build Service using the Debian Package Install [Instructions](ubuntu-package-install.md) |
|
||||
| Debian 12 | [onedrive](https://packages.debian.org/bookworm/source/onedrive) |<a href="https://packages.debian.org/bookworm/source/onedrive"><img src="https://repology.org/badge/version-for-repo/debian_12/onedrive.svg?header=" alt="Debian 12 package" width="46" height="20"></a>|✔|✔|✔|✔| **Note:** Do not install from Debian Package Repositories<br><br>It is recommended that for Debian 12 that you install from OpenSuSE Build Service using the Debian Package Install [Instructions](ubuntu-package-install.md) |
|
||||
| Fedora | [onedrive](https://koji.fedoraproject.org/koji/packageinfo?packageID=26044) |<a href="https://koji.fedoraproject.org/koji/packageinfo?packageID=26044"><img src="https://repology.org/badge/version-for-repo/fedora_rawhide/onedrive.svg?header=" alt="Fedora Rawhide package" width="46" height="20"></a>|✔|✔|✔|✔| |
|
||||
| Gentoo | [onedrive](https://gpo.zugaina.org/net-misc/onedrive) | No API Available |✔|✔|❌|❌| |
|
||||
| Homebrew | [onedrive](https://formulae.brew.sh/formula/onedrive) | <a href="https://formulae.brew.sh/formula/onedrive"><img src="https://repology.org/badge/version-for-repo/homebrew/onedrive.svg?header=" alt="Homebrew package" width="46" height="20"></a> |❌|✔|❌|❌| |
|
||||
| Linux Mint 20.x | [onedrive](https://community.linuxmint.com/software/view/onedrive) |<a href="https://community.linuxmint.com/software/view/onedrive"><img src="https://repology.org/badge/version-for-repo/ubuntu_20_04/onedrive.svg?header=" alt="Ubuntu 20.04 package" width="46" height="20"></a> |❌|✔|✔|✔| **Note:** Do not install from Linux Mint Repositories<br><br>It is recommended that for Linux Mint that you install from OpenSuSE Build Service using the Ubuntu Package Install [Instructions](ubuntu-package-install.md) |
|
||||
| Linux Mint 21.x | [onedrive](https://community.linuxmint.com/software/view/onedrive) |<a href="https://community.linuxmint.com/software/view/onedrive"><img src="https://repology.org/badge/version-for-repo/ubuntu_22_04/onedrive.svg?header=" alt="Ubuntu 22.04 package" width="46" height="20"></a> |❌|✔|✔|✔| **Note:** Do not install from Linux Mint Repositories<br><br>It is recommended that for Linux Mint that you install from OpenSuSE Build Service using the Ubuntu Package Install [Instructions](ubuntu-package-install.md) |
|
||||
| NixOS | [onedrive](https://search.nixos.org/packages?channel=20.09&from=0&size=50&sort=relevance&query=onedrive)|<a href="https://search.nixos.org/packages?channel=20.09&from=0&size=50&sort=relevance&query=onedrive"><img src="https://repology.org/badge/version-for-repo/nix_unstable/onedrive.svg?header=" alt="nixpkgs unstable package" width="46" height="20"></a>|❌|✔|❌|❌| Use package `onedrive` either by adding it to `configuration.nix` or by using the command `nix-env -iA <channel name>.onedrive`. This does not install a service. To install a service, use unstable channel (will stabilize in 20.09) and add `services.onedrive.enable=true` in `configuration.nix`. You can also add a custom package using the `services.onedrive.package` option (recommended since package lags upstream). Enabling the service installs a default package too (based on the channel). You can also add multiple onedrive accounts trivially, see [documentation](https://github.com/NixOS/nixpkgs/pull/77734#issuecomment-575874225). |
|
||||
| OpenSuSE | [onedrive](https://software.opensuse.org/package/onedrive) |<a href="https://software.opensuse.org/package/onedrive"><img src="https://repology.org/badge/version-for-repo/opensuse_network_tumbleweed/onedrive.svg?header=" alt="openSUSE Tumbleweed package" width="46" height="20"></a>|✔|✔|❌|❌| |
|
||||
| OpenSuSE Build Service | [onedrive](https://build.opensuse.org/package/show/home:npreining:debian-ubuntu-onedrive/onedrive) | No API Available |✔|✔|✔|✔| Package Build Service for Debian and Ubuntu |
|
||||
| Raspbian | [onedrive](https://archive.raspbian.org/raspbian/pool/main/o/onedrive/) |<a href="https://archive.raspbian.org/raspbian/pool/main/o/onedrive/"><img src="https://repology.org/badge/version-for-repo/raspbian_stable/onedrive.svg?header=" alt="Raspbian Stable package" width="46" height="20"></a> |❌|❌|✔|✔| **Note:** Do not install from Raspbian Package Repositories<br><br>It is recommended that for Raspbian that you install from OpenSuSE Build Service using the Debian Package Install [Instructions](ubuntu-package-install.md) |
|
||||
| Slackware | [onedrive](https://slackbuilds.org/result/?search=onedrive&sv=) |<a href="https://slackbuilds.org/result/?search=onedrive&sv="><img src="https://repology.org/badge/version-for-repo/slackbuilds/onedrive.svg?header=" alt="SlackBuilds package" width="46" height="20"></a>|✔|✔|❌|❌| |
|
||||
| Solus | [onedrive](https://dev.getsol.us/search/query/FB7PIf1jG9Z9/#R) |<a href="https://dev.getsol.us/search/query/FB7PIf1jG9Z9/#R"><img src="https://repology.org/badge/version-for-repo/solus/onedrive.svg?header=" alt="Solus package" width="46" height="20"></a>|✔|✔|❌|❌| |
|
||||
| Ubuntu 20.04 | [onedrive](https://packages.ubuntu.com/focal/onedrive) |<a href="https://packages.ubuntu.com/focal/onedrive"><img src="https://repology.org/badge/version-for-repo/ubuntu_20_04/onedrive.svg?header=" alt="Ubuntu 20.04 package" width="46" height="20"></a> |❌|✔|✔|✔| **Note:** Do not install from Ubuntu Universe<br><br>It is recommended that for Ubuntu that you install from OpenSuSE Build Service using the Ubuntu Package Install [Instructions](ubuntu-package-install.md) |
|
||||
| Ubuntu 22.04 | [onedrive](https://packages.ubuntu.com/jammy/onedrive) |<a href="https://packages.ubuntu.com/jammy/onedrive"><img src="https://repology.org/badge/version-for-repo/ubuntu_22_04/onedrive.svg?header=" alt="Ubuntu 22.04 package" width="46" height="20"></a> |❌|✔|✔|✔| **Note:** Do not install from Ubuntu Universe<br><br>It is recommended that for Ubuntu that you install from OpenSuSE Build Service using the Ubuntu Package Install [Instructions](ubuntu-package-install.md) |
|
||||
| Ubuntu 23.04 | [onedrive](https://packages.ubuntu.com/lunar/onedrive) |<a href="https://packages.ubuntu.com/lunar/onedrive"><img src="https://repology.org/badge/version-for-repo/ubuntu_23_04/onedrive.svg?header=" alt="Ubuntu 23.04 package" width="46" height="20"></a> |❌|✔|✔|✔| **Note:** Do not install from Ubuntu Universe<br><br>It is recommended that for Ubuntu that you install from OpenSuSE Build Service using the Ubuntu Package Install [Instructions](ubuntu-package-install.md) |
|
||||
| Void Linux | [onedrive](https://voidlinux.org/packages/?arch=x86_64&q=onedrive) |<a href="https://voidlinux.org/packages/?arch=x86_64&q=onedrive"><img src="https://repology.org/badge/version-for-repo/void_x86_64/onedrive.svg?header=" alt="Void Linux x86_64 package" width="46" height="20"></a>|✔|✔|❌|❌| |
|
||||
|
||||
#### Important information for all Ubuntu and Ubuntu based distribution users:
|
||||
This information is specifically for the following platforms and distributions:
|
||||
* Ubuntu
|
||||
* Lubuntu
|
||||
* Linux Mint
|
||||
* POP OS
|
||||
* Peppermint OS
|
||||
|
||||
Whilst there are [onedrive](https://packages.ubuntu.com/search?keywords=onedrive&searchon=names&suite=all§ion=all) Universe packages available for Ubuntu, do not install 'onedrive' from these Universe packages. The default Universe packages are out-of-date and are not supported and should not be used. If you wish to use a package, it is highly recommended that you utilise the [OpenSuSE Build Service](ubuntu-package-install.md) to install packages for these platforms. If the OpenSuSE Build Service does not cater for your version, your only option is to build from source.
|
||||
|
||||
If you wish to change this situation so that you can just use the Universe packages via 'apt install onedrive', consider becoming the Ubuntu package maintainer and contribute back to your community.
|
||||
|
||||
## Building from Source - High Level Requirements
|
||||
* Build environment must have at least 1GB of memory & 1GB swap space
|
||||
* Install the required distribution package dependencies
|
||||
* [libcurl](http://curl.haxx.se/libcurl/)
|
||||
* [SQLite 3](https://www.sqlite.org/) >= 3.7.15
|
||||
* [Digital Mars D Compiler (DMD)](http://dlang.org/download.html) or [LDC – the LLVM-based D Compiler](https://github.com/ldc-developers/ldc)
|
||||
|
||||
**Note:** DMD version >= 2.088.0 or LDC version >= 1.18.0 is required to compile this application
|
||||
|
||||
### Example for installing DMD Compiler
|
||||
```text
|
||||
curl -fsS https://dlang.org/install.sh | bash -s dmd
|
||||
```
|
||||
|
||||
### Example for installing LDC Compiler
|
||||
```text
|
||||
curl -fsS https://dlang.org/install.sh | bash -s ldc
|
||||
```
|
||||
|
||||
## Distribution Package Dependencies
|
||||
### Dependencies: Ubuntu 16.x
|
||||
Ubuntu Linux 16.x LTS reached the end of its five-year LTS window on April 30th 2021 and is no longer supported.
|
||||
|
||||
### Dependencies: Ubuntu 18.x / Lubuntu 18.x
|
||||
Ubuntu Linux 18.x LTS reached the end of its five-year LTS window on May 31th 2023 and is no longer supported.
|
||||
|
||||
### Dependencies: Debian 9
|
||||
Debian 9 reached the end of its five-year support window on June 30th 2022 and is no longer supported.
|
||||
|
||||
### Dependencies: Ubuntu 20.x -> Ubuntu 23.x / Debian 10 -> Debian 12 - x86_64
|
||||
These dependencies are also applicable for all Ubuntu based distributions such as:
|
||||
* Lubuntu
|
||||
* Linux Mint
|
||||
* POP OS
|
||||
* Peppermint OS
|
||||
```text
|
||||
sudo apt install build-essential
|
||||
sudo apt install libcurl4-openssl-dev libsqlite3-dev pkg-config git curl
|
||||
curl -fsS https://dlang.org/install.sh | bash -s dmd
|
||||
```
|
||||
For notifications the following is also necessary:
|
||||
```text
|
||||
sudo apt install libnotify-dev
|
||||
```
|
||||
|
||||
### Dependencies: CentOS 6.x / RHEL 6.x
|
||||
CentOS 6.x and RHEL 6.x reached End of Life status on November 30th 2020 and is no longer supported.
|
||||
|
||||
### Dependencies: Fedora < Version 18 / CentOS 7.x / RHEL 7.x
|
||||
```text
|
||||
sudo yum groupinstall 'Development Tools'
|
||||
sudo yum install libcurl-devel sqlite-devel
|
||||
curl -fsS https://dlang.org/install.sh | bash -s dmd-2.099.0
|
||||
```
|
||||
For notifications the following is also necessary:
|
||||
```text
|
||||
sudo yum install libnotify-devel
|
||||
```
|
||||
|
||||
### Dependencies: Fedora > Version 18 / CentOS 8.x / RHEL 8.x / RHEL 9.x
|
||||
```text
|
||||
sudo dnf groupinstall 'Development Tools'
|
||||
sudo dnf install libcurl-devel sqlite-devel
|
||||
curl -fsS https://dlang.org/install.sh | bash -s dmd
|
||||
```
|
||||
For notifications the following is also necessary:
|
||||
```text
|
||||
sudo dnf install libnotify-devel
|
||||
```
|
||||
|
||||
### Dependencies: Arch Linux & Manjaro Linux
|
||||
```text
|
||||
sudo pacman -S make pkg-config curl sqlite ldc
|
||||
```
|
||||
For notifications the following is also necessary:
|
||||
```text
|
||||
sudo pacman -S libnotify
|
||||
```
|
||||
|
||||
### Dependencies: Raspbian (ARMHF) and Ubuntu 22.x / Debian 11 / Debian 12 / Raspbian (ARM64)
|
||||
**Note:** The minimum LDC compiler version required to compile this application is now 1.18.0, which is not available for Debian Buster or distributions based on Debian Buster. You are advised to first upgrade your platform distribution to one that is based on Debian Bullseye (Debian 11) or later.
|
||||
|
||||
These instructions were validated using:
|
||||
* `Linux raspberrypi 5.10.92-v8+ #1514 SMP PREEMPT Mon Jan 17 17:39:38 GMT 2022 aarch64` (2022-01-28-raspios-bullseye-armhf-lite) using Raspberry Pi 3B (revision 1.2)
|
||||
* `Linux raspberrypi 5.10.92-v8+ #1514 SMP PREEMPT Mon Jan 17 17:39:38 GMT 2022 aarch64` (2022-01-28-raspios-bullseye-arm64-lite) using Raspberry Pi 3B (revision 1.2)
|
||||
* `Linux ubuntu 5.15.0-1005-raspi #5-Ubuntu SMP PREEMPT Mon Apr 4 12:21:48 UTC 2022 aarch64 aarch64 aarch64 GNU/Linux` (ubuntu-22.04-preinstalled-server-arm64+raspi) using Raspberry Pi 3B (revision 1.2)
|
||||
|
||||
**Note:** Build environment must have at least 1GB of memory & 1GB swap space. Check with `swapon`.
|
||||
|
||||
```text
|
||||
sudo apt install build-essential
|
||||
sudo apt install libcurl4-openssl-dev libsqlite3-dev pkg-config git curl ldc
|
||||
```
|
||||
For notifications the following is also necessary:
|
||||
```text
|
||||
sudo apt install libnotify-dev
|
||||
```
|
||||
|
||||
### Dependencies: Gentoo
|
||||
```text
|
||||
sudo emerge app-portage/layman
|
||||
sudo layman -a dlang
|
||||
```
|
||||
Add ebuild from contrib/gentoo to a local overlay to use.
|
||||
|
||||
For notifications the following is also necessary:
|
||||
```text
|
||||
sudo emerge x11-libs/libnotify
|
||||
```
|
||||
|
||||
### Dependencies: OpenSuSE Leap 15.0
|
||||
```text
|
||||
sudo zypper addrepo https://download.opensuse.org/repositories/devel:languages:D/openSUSE_Leap_15.0/devel:languages:D.repo
|
||||
sudo zypper refresh
|
||||
sudo zypper install gcc git libcurl-devel sqlite3-devel dmd phobos-devel phobos-devel-static
|
||||
```
|
||||
For notifications the following is also necessary:
|
||||
```text
|
||||
sudo zypper install libnotify-devel
|
||||
```
|
||||
|
||||
### Dependencies: OpenSuSE Leap 15.1
|
||||
```text
|
||||
sudo zypper addrepo https://download.opensuse.org/repositories/devel:languages:D/openSUSE_Leap_15.1/devel:languages:D.repo
|
||||
sudo zypper refresh
|
||||
sudo zypper install gcc git libcurl-devel sqlite3-devel dmd phobos-devel phobos-devel-static
|
||||
```
|
||||
For notifications the following is also necessary:
|
||||
```text
|
||||
sudo zypper install libnotify-devel
|
||||
```
|
||||
|
||||
### Dependencies: OpenSuSE Leap 15.2
|
||||
```text
|
||||
sudo zypper refresh
|
||||
sudo zypper install gcc git libcurl-devel sqlite3-devel dmd phobos-devel phobos-devel-static
|
||||
```
|
||||
For notifications the following is also necessary:
|
||||
```text
|
||||
sudo zypper install libnotify-devel
|
||||
```
|
||||
|
||||
## Compilation & Installation
|
||||
### High Level Steps
|
||||
1. Install the platform dependencies for your Linux OS
|
||||
2. Activate your DMD or LDC compiler
|
||||
3. Clone the GitHub repository, run configure and make, then install
|
||||
4. Deactivate your DMD or LDC compiler
|
||||
|
||||
### Building using DMD Reference Compiler
|
||||
Before cloning and compiling, if you have installed DMD via curl for your OS, you will need to activate DMD as per example below:
|
||||
```text
|
||||
Run `source ~/dlang/dmd-2.088.0/activate` in your shell to use dmd-2.088.0.
|
||||
This will setup PATH, LIBRARY_PATH, LD_LIBRARY_PATH, DMD, DC, and PS1.
|
||||
Run `deactivate` later on to restore your environment.
|
||||
```
|
||||
Without performing this step, the compilation process will fail.
|
||||
|
||||
**Note:** Depending on your DMD version, substitute `2.088.0` above with your DMD version that is installed.
|
||||
|
||||
```text
|
||||
git clone https://github.com/abraunegg/onedrive.git
|
||||
cd onedrive
|
||||
./configure
|
||||
make clean; make;
|
||||
sudo make install
|
||||
```
|
||||
|
||||
### Build options
|
||||
Notifications can be enabled using the `configure` switch `--enable-notifications`.
|
||||
|
||||
Systemd service files are installed in the appropriate directories on the system,
|
||||
as provided by `pkg-config systemd` settings. If the need for overriding the
|
||||
deduced path are necessary, the two options `--with-systemdsystemunitdir` (for
|
||||
the Systemd system unit location), and `--with-systemduserunitdir` (for the
|
||||
Systemd user unit location) can be specified. Passing in `no` to one of these
|
||||
options disabled service file installation.
|
||||
|
||||
By passing `--enable-debug` to the `configure` call, `onedrive` gets built with additional debug
|
||||
information, useful (for example) to get `perf`-issued figures.
|
||||
|
||||
By passing `--enable-completions` to the `configure` call, shell completion functions are
|
||||
installed for `bash`, `zsh` and `fish`. The installation directories are determined
|
||||
as far as possible automatically, but can be overridden by passing
|
||||
`--with-bash-completion-dir=<DIR>`, `--with-zsh-completion-dir=<DIR>`, and
|
||||
`--with-fish-completion-dir=<DIR>` to `configure`.
|
||||
|
||||
### Building using a different compiler (for example [LDC](https://wiki.dlang.org/LDC))
|
||||
#### ARMHF Architecture (Raspbian) and ARM64 Architecture (Ubuntu 22.x / Debian 11 / Raspbian)
|
||||
**Note:** The minimum LDC compiler version required to compile this application is now 1.18.0, which is not available for Debian Buster or distributions based on Debian Buster. You are advised to first upgrade your platform distribution to one that is based on Debian Bullseye (Debian 11) or later.
|
||||
|
||||
**Note:** Build environment must have at least 1GB of memory & 1GB swap space. Check with `swapon`.
|
||||
```text
|
||||
git clone https://github.com/abraunegg/onedrive.git
|
||||
cd onedrive
|
||||
./configure DC=/usr/bin/ldmd2
|
||||
make clean; make
|
||||
sudo make install
|
||||
```
|
||||
|
||||
## Upgrading the client
|
||||
If you have installed the client from a distribution package, the client will be updated when the distribution package is updated by the package maintainer and will be updated to the new application version when you perform your package update.
|
||||
|
||||
If you have built the client from source, to upgrade your client, it is recommended that you first uninstall your existing 'onedrive' binary (see below), then re-install the client by re-cloning, re-compiling and re-installing the client again to install the new version.
|
||||
|
||||
**Note:** Following the uninstall process will remove all client components including *all* systemd files, including any custom files created for specific access such as SharePoint Libraries.
|
||||
|
||||
You can optionally choose to not perform this uninstallation step, and simply re-install the client by re-cloning, re-compiling and re-installing the client again - however the risk here is that you end up with two onedrive client binaries on your system, and depending on your system search path preferences, this will determine which binary is used.
|
||||
|
||||
**Important:** Before performing any upgrade, it is highly recommended for you to stop any running systemd service if applicable to ensure that these services are restarted using the updated client version.
|
||||
|
||||
Post re-install, to confirm that you have the new version of the client installed, use `onedrive --version` to determine the client version that is now installed.
|
||||
|
||||
## Uninstalling the client
|
||||
### Uninstalling the client if installed from distribution package
|
||||
Follow your distribution documentation to uninstall the package that you installed
|
||||
|
||||
### Uninstalling the client if installed and built from source
|
||||
From within your GitHub repository clone, perform the following to remove the 'onedrive' binary:
|
||||
```text
|
||||
sudo make uninstall
|
||||
```
|
||||
|
||||
If you are not upgrading your client, to remove your application state and configuration, perform the following additional step:
|
||||
```
|
||||
rm -rf ~/.config/onedrive
|
||||
```
|
||||
**Note:** If you are using the `--confdir option`, substitute `~/.config/onedrive` for the correct directory storing your client configuration.
|
||||
|
||||
If you want to just delete the application key, but keep the items database:
|
||||
```text
|
||||
rm -f ~/.config/onedrive/refresh_token
|
||||
```
|
||||
1469
docs/USAGE.md
|
|
@ -124,10 +124,12 @@ Example:
|
|||
ExecStart=/usr/local/bin/onedrive --monitor --confdir="/home/myusername/.config/my-new-config"
|
||||
```
|
||||
|
||||
**Note:** When running the client manually, `--confdir="~/.config/......` is acceptable. In a systemd configuration file, the full path must be used. The `~` must be expanded.
|
||||
> [!IMPORTANT]
|
||||
> When running the client manually, `--confdir="~/.config/......` is acceptable. In a systemd configuration file, the full path must be used. The `~` must be manually expanded when editing your systemd file.
|
||||
|
||||
|
||||
### Step 3: Enable the new systemd service
|
||||
Once the file is correctly editied, you can enable the new systemd service using the following commands.
|
||||
Once the file is correctly edited, you can enable the new systemd service using the following commands.
|
||||
|
||||
#### Red Hat Enterprise Linux, CentOS Linux
|
||||
```text
|
||||
|
|
@ -227,10 +229,10 @@ docker run -it --name onedrive -v onedrive_conf_sharepoint_site3:/onedrive/conf
|
|||
docker run -it --name onedrive -v onedrive_conf_sharepoint_site50:/onedrive/conf -v "/use/full/local/path/no/tilda/SharePointSite50:/onedrive/data" driveone/onedrive:latest
|
||||
```
|
||||
|
||||
#### TIP
|
||||
To avoid 're-authenticating' and 'authorising' each individual Docker container, if all the Docker containers are using the 'same' OneDrive credentials, you can re-use the 'refresh_token' from one Docker container to another by copying this file to the configuration Docker volume of each Docker container.
|
||||
|
||||
If the account credentials are different .. you will need to re-authenticate each Docker container individually.
|
||||
> [!TIP]
|
||||
> To avoid 're-authenticating' and 'authorising' each individual Docker container, if all the Docker containers are using the 'same' OneDrive credentials, you can reuse the 'refresh_token' from one Docker container to another by copying this file to the configuration Docker volume of each Docker container.
|
||||
>
|
||||
> If the account credentials are different .. you will need to re-authenticate each Docker container individually.
|
||||
|
||||
## Configuring the client for use in dual-boot (Windows / Linux) situations
|
||||
When dual booting Windows and Linux, depending on the Windows OneDrive account configuration, the 'Files On-Demand' option may be enabled when running OneDrive within your Windows environment.
|
||||
|
|
@ -241,7 +243,7 @@ To fix the problem of windows turning all files (that should be kept offline) in
|
|||
|
||||
To find this setting, open the onedrive pop-up window from the taskbar, click "Help & Settings" > "Settings". This opens a new window. Go to the tab "Settings" and look for the section "Files On-Demand".
|
||||
|
||||
After unchecking the option and clicking "OK", the Windows OneDrive client should restart itself and start actually downloading your files so they will truely be available on your disk when offline. These files will then be fully accessible under Linux and the Linux OneDrive client.
|
||||
After unchecking the option and clicking "OK", the Windows OneDrive client should restart itself and start actually downloading your files so they will truly be available on your disk when offline. These files will then be fully accessible under Linux and the Linux OneDrive client.
|
||||
|
||||
| OneDrive Personal | Onedrive Business<br>SharePoint |
|
||||
|---|---|
|
||||
|
|
@ -257,12 +259,13 @@ The issue here is - how does the client react if the mount point gets removed -
|
|||
|
||||
The client has zero knowledge of any event that causes a mountpoint to become unavailable, thus, the client (if you are running as a service) will assume that you deleted the files, thus, will go ahead and delete all your files on OneDrive. This is most certainly an undesirable action.
|
||||
|
||||
There are a few options here which you can configure in your 'config' file to assist you to prevent this sort of item from occuring:
|
||||
There are a few options here which you can configure in your 'config' file to assist you to prevent this sort of item from occurring:
|
||||
1. classify_as_big_delete
|
||||
2. check_nomount
|
||||
3. check_nosync
|
||||
|
||||
**Note:** Before making any change to your configuration, stop any sync process & stop any onedrive systemd service from running.
|
||||
> [!NOTE]
|
||||
> Before making any change to your configuration, stop any sync process & stop any onedrive systemd service from running.
|
||||
|
||||
### classify_as_big_delete
|
||||
By default, this uses a value of 1000 files|folders. An undesirable unmount if you have more than 1000 files, this default level will prevent the client from executing the online delete. Modify this value up or down as desired
|
||||
|
|
@ -282,7 +285,7 @@ After making this sort of change - test with `--dry-run` so you can see the impa
|
|||
## Upload data from the local ~/OneDrive folder to a specific location on OneDrive
|
||||
In some environments, you may not want your local ~/OneDrive folder to be uploaded directly to the root of your OneDrive account online.
|
||||
|
||||
Unfortunatly, the OneDrive API lacks any facility to perform a re-direction of data during upload.
|
||||
Unfortunately, the OneDrive API lacks any facility to perform a re-direction of data during upload.
|
||||
|
||||
The workaround for this is to structure your local filesystem and reconfigure your client to achieve the desired goal.
|
||||
|
||||
|
|
|
|||
1206
docs/application-config-options.md
Normal file
|
|
@ -63,6 +63,18 @@ When these delegated API permissions are combined, these provide the effective a
|
|||
|
||||
These 'default' permissions will allow the OneDrive Client for Linux to read, write and delete data associated with your OneDrive Account.
|
||||
|
||||
## How are the Authentication Scopes used?
|
||||
|
||||
When using the OneDrive Client for Linux, the above authentication scopes will be presented to the Microsoft Authentication Service (login.microsoftonline.com), where the service will validate the request and provide an applicable token to access Microsoft OneDrive with. This can be illustrated as the following:
|
||||
|
||||

|
||||
|
||||
This is similar to the Microsoft Windows OneDrive Client:
|
||||
|
||||

|
||||
|
||||
In a business setting, IT staff who need to authorise the use of the OneDrive Client for Linux in their environment can be assured of its safety. The primary concern for IT staff should be securing the device running the OneDrive Client for Linux. Unlike in a corporate environment where Windows devices are secured through Active Directory and Group Policy Objects (GPOs) to protect corporate data on the device, it is beyond the responsibility of this client to manage security on Linux devices.
|
||||
|
||||
## Configuring read-only access to your OneDrive data
|
||||
In some situations, it may be desirable to configure the OneDrive Client for Linux totally in read-only operation.
|
||||
|
||||
|
|
@ -72,7 +84,8 @@ read_only_auth_scope = "true"
|
|||
```
|
||||
This will change the user authentication scope request to use read-only access.
|
||||
|
||||
**Note:** When changing this value, you *must* re-authenticate the client using the `--reauth` option to utilise the change in authentication scopes.
|
||||
> [!IMPORTANT]
|
||||
> When changing this value, you *must* re-authenticate the client using the `--reauth` option to utilise the change in authentication scopes.
|
||||
|
||||
When using read-only authentication scopes, the uploading of any data or local change to OneDrive will fail with the following error:
|
||||
```
|
||||
|
|
@ -88,7 +101,8 @@ As such, it is also advisable for you to add the following to your configuration
|
|||
download_only = "true"
|
||||
```
|
||||
|
||||
**Important:** Additionally when using 'read_only_auth_scope' you also will need to remove your existing application access consent otherwise old authentication consent will be valid and will be used. This will mean the application will technically have the consent to upload data. See below on how to remove your prior application consent.
|
||||
> [!IMPORTANT]
|
||||
> Additionally when using 'read_only_auth_scope' you also will need to remove your existing application access consent otherwise old authentication consent will be valid and will be used. This will mean the application will technically have the consent to upload data. See below on how to remove your prior application consent.
|
||||
|
||||
## Reviewing your existing application access consent
|
||||
|
||||
|
|
|
|||
|
|
@ -1,11 +1,10 @@
|
|||
# RPM Package Build Process
|
||||
The instuctions below have been tested on the following systems:
|
||||
* CentOS 7 x86_64
|
||||
* CentOS 8 x86_64
|
||||
The instructions below have been tested on the following systems:
|
||||
* CentOS Stream release 9
|
||||
|
||||
These instructions should also be applicable for RedHat & Fedora platforms, or any other RedHat RPM based distribution.
|
||||
|
||||
## Prepare Package Development Environment (CentOS 7, 8)
|
||||
## Prepare Package Development Environment
|
||||
Install the following dependencies on your build system:
|
||||
```text
|
||||
sudo yum groupinstall -y 'Development Tools'
|
||||
|
|
@ -13,237 +12,94 @@ sudo yum install -y libcurl-devel
|
|||
sudo yum install -y sqlite-devel
|
||||
sudo yum install -y libnotify-devel
|
||||
sudo yum install -y wget
|
||||
sudo yum install -y http://downloads.dlang.org/releases/2.x/2.088.0/dmd-2.088.0-0.fedora.x86_64.rpm
|
||||
sudo yum install -y https://downloads.dlang.org/releases/2.x/2.088.0/dmd-2.088.0-0.fedora.x86_64.rpm
|
||||
mkdir -p ~/rpmbuild/{BUILD,RPMS,SOURCES,SPECS,SRPMS}
|
||||
```
|
||||
|
||||
## Build RPM from spec file
|
||||
Build the RPM from the provided spec file:
|
||||
```text
|
||||
wget https://github.com/abraunegg/onedrive/archive/refs/tags/v2.4.22.tar.gz -O ~/rpmbuild/SOURCES/v2.4.22.tar.gz
|
||||
wget https://raw.githubusercontent.com/abraunegg/onedrive/master/contrib/spec/onedrive.spec.in -O ~/rpmbuild/SPECS/onedrive.spec
|
||||
wget https://github.com/abraunegg/onedrive/archive/refs/tags/v2.5.0.tar.gz -O ~/rpmbuild/SOURCES/v2.5.0.tar.gz
|
||||
#wget https://raw.githubusercontent.com/abraunegg/onedrive/master/contrib/spec/onedrive.spec.in -O ~/rpmbuild/SPECS/onedrive.spec
|
||||
|
||||
wget https://raw.githubusercontent.com/abraunegg/onedrive/onedrive-v2.5.0-release-candidate-3/contrib/spec/onedrive.spec.in -O ~/rpmbuild/SPECS/onedrive.spec
|
||||
rpmbuild -ba ~/rpmbuild/SPECS/onedrive.spec
|
||||
```
|
||||
|
||||
## RPM Build Example Results
|
||||
Below are example output results of building, installing and running the RPM package on the respective platforms:
|
||||
|
||||
### CentOS 7
|
||||
### CentOS Stream release 9 RPM Build Process
|
||||
```text
|
||||
[alex@localhost ~]$ rpmbuild -ba ~/rpmbuild/SPECS/onedrive.spec
|
||||
Executing(%prep): /bin/sh -e /var/tmp/rpm-tmp.wi6Tdz
|
||||
[alex@centos9stream ~]$ rpmbuild -ba ~/rpmbuild/SPECS/onedrive.spec
|
||||
Executing(%prep): /bin/sh -e /var/tmp/rpm-tmp.V7l9aO
|
||||
+ umask 022
|
||||
+ cd /home/alex/rpmbuild/BUILD
|
||||
+ cd /home/alex/rpmbuild/BUILD
|
||||
+ rm -rf onedrive-2.4.15
|
||||
+ /usr/bin/tar -xf -
|
||||
+ /usr/bin/gzip -dc /home/alex/rpmbuild/SOURCES/v2.4.15.tar.gz
|
||||
+ STATUS=0
|
||||
+ '[' 0 -ne 0 ']'
|
||||
+ cd onedrive-2.4.15
|
||||
+ /usr/bin/chmod -Rf a+rX,u+w,g-w,o-w .
|
||||
+ exit 0
|
||||
Executing(%build): /bin/sh -e /var/tmp/rpm-tmp.dyeEuM
|
||||
+ umask 022
|
||||
+ cd /home/alex/rpmbuild/BUILD
|
||||
+ cd onedrive-2.4.15
|
||||
+ CFLAGS='-O2 -g -pipe -Wall -Wp,-D_FORTIFY_SOURCE=2 -fexceptions -fstack-protector-strong --param=ssp-buffer-size=4 -grecord-gcc-switches -m64 -mtune=generic'
|
||||
+ export CFLAGS
|
||||
+ CXXFLAGS='-O2 -g -pipe -Wall -Wp,-D_FORTIFY_SOURCE=2 -fexceptions -fstack-protector-strong --param=ssp-buffer-size=4 -grecord-gcc-switches -m64 -mtune=generic'
|
||||
+ export CXXFLAGS
|
||||
+ FFLAGS='-O2 -g -pipe -Wall -Wp,-D_FORTIFY_SOURCE=2 -fexceptions -fstack-protector-strong --param=ssp-buffer-size=4 -grecord-gcc-switches -m64 -mtune=generic -I/usr/lib64/gfortran/modules'
|
||||
+ export FFLAGS
|
||||
+ FCFLAGS='-O2 -g -pipe -Wall -Wp,-D_FORTIFY_SOURCE=2 -fexceptions -fstack-protector-strong --param=ssp-buffer-size=4 -grecord-gcc-switches -m64 -mtune=generic -I/usr/lib64/gfortran/modules'
|
||||
+ export FCFLAGS
|
||||
+ LDFLAGS='-Wl,-z,relro '
|
||||
+ export LDFLAGS
|
||||
+ '[' 1 == 1 ']'
|
||||
+ '[' x86_64 == ppc64le ']'
|
||||
++ find . -name config.guess -o -name config.sub
|
||||
+ ./configure --build=x86_64-redhat-linux-gnu --host=x86_64-redhat-linux-gnu --program-prefix= --disable-dependency-tracking --prefix=/usr --exec-prefix=/usr --bindir=/usr/bin --sbindir=/usr/sbin --sysconfdir=/etc --datadir=/usr/share --includedir=/usr/include --libdir=/usr/lib64 --libexecdir=/usr/libexec --localstatedir=/var --sharedstatedir=/var/lib --mandir=/usr/share/man --infodir=/usr/share/info
|
||||
configure: WARNING: unrecognized options: --disable-dependency-tracking
|
||||
checking for a BSD-compatible install... /usr/bin/install -c
|
||||
checking for x86_64-redhat-linux-gnu-pkg-config... no
|
||||
checking for pkg-config... /usr/bin/pkg-config
|
||||
checking pkg-config is at least version 0.9.0... yes
|
||||
checking for dmd... dmd
|
||||
checking version of D compiler... 2.087.0
|
||||
checking for curl... yes
|
||||
checking for sqlite... yes
|
||||
configure: creating ./config.status
|
||||
config.status: creating Makefile
|
||||
config.status: creating contrib/pacman/PKGBUILD
|
||||
config.status: creating contrib/spec/onedrive.spec
|
||||
config.status: creating onedrive.1
|
||||
config.status: creating contrib/systemd/onedrive.service
|
||||
config.status: creating contrib/systemd/onedrive@.service
|
||||
configure: WARNING: unrecognized options: --disable-dependency-tracking
|
||||
+ make
|
||||
if [ -f .git/HEAD ] ; then \
|
||||
git describe --tags > version ; \
|
||||
else \
|
||||
echo v2.4.15 > version ; \
|
||||
fi
|
||||
dmd -w -g -O -J. -L-lcurl -L-lsqlite3 -L-ldl src/config.d src/itemdb.d src/log.d src/main.d src/monitor.d src/onedrive.d src/qxor.d src/selective.d src/sqlite.d src/sync.d src/upload.d src/util.d src/progress.d src/arsd/cgi.d -ofonedrive
|
||||
+ exit 0
|
||||
Executing(%install): /bin/sh -e /var/tmp/rpm-tmp.L3JbHy
|
||||
+ umask 022
|
||||
+ cd /home/alex/rpmbuild/BUILD
|
||||
+ '[' /home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el7.x86_64 '!=' / ']'
|
||||
+ rm -rf /home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el7.x86_64
|
||||
++ dirname /home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el7.x86_64
|
||||
+ mkdir -p /home/alex/rpmbuild/BUILDROOT
|
||||
+ mkdir /home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el7.x86_64
|
||||
+ cd onedrive-2.4.15
|
||||
+ /usr/bin/make install DESTDIR=/home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el7.x86_64 PREFIX=/home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el7.x86_64
|
||||
/usr/bin/install -c -D onedrive /home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el7.x86_64/usr/bin/onedrive
|
||||
/usr/bin/install -c -D onedrive.1 /home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el7.x86_64/usr/share/man/man1/onedrive.1
|
||||
/usr/bin/install -c -D -m 644 contrib/logrotate/onedrive.logrotate /home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el7.x86_64/etc/logrotate.d/onedrive
|
||||
mkdir -p /home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el7.x86_64/usr/share/doc/onedrive
|
||||
/usr/bin/install -c -D -m 644 README.md config LICENSE CHANGELOG.md docs/Docker.md docs/INSTALL.md docs/SharePoint-Shared-Libraries.md docs/USAGE.md docs/BusinessSharedFolders.md docs/advanced-usage.md /home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el7.x86_64/usr/share/doc/onedrive
|
||||
/usr/bin/install -c -d -m 0755 /home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el7.x86_64/usr/lib/systemd/user /home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el7.x86_64/usr/lib/systemd/system
|
||||
/usr/bin/install -c -m 0644 contrib/systemd/onedrive@.service /home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el7.x86_64/usr/lib/systemd/system
|
||||
/usr/bin/install -c -m 0644 contrib/systemd/onedrive.service /home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el7.x86_64/usr/lib/systemd/system
|
||||
+ /usr/lib/rpm/check-buildroot
|
||||
+ /usr/lib/rpm/redhat/brp-compress
|
||||
+ /usr/lib/rpm/redhat/brp-strip /usr/bin/strip
|
||||
+ /usr/lib/rpm/redhat/brp-strip-comment-note /usr/bin/strip /usr/bin/objdump
|
||||
+ /usr/lib/rpm/redhat/brp-strip-static-archive /usr/bin/strip
|
||||
+ /usr/lib/rpm/brp-python-bytecompile /usr/bin/python 1
|
||||
+ /usr/lib/rpm/redhat/brp-python-hardlink
|
||||
+ /usr/lib/rpm/redhat/brp-java-repack-jars
|
||||
Processing files: onedrive-2.4.15-1.el7.x86_64
|
||||
Executing(%doc): /bin/sh -e /var/tmp/rpm-tmp.cpSXho
|
||||
+ umask 022
|
||||
+ cd /home/alex/rpmbuild/BUILD
|
||||
+ cd onedrive-2.4.15
|
||||
+ DOCDIR=/home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el7.x86_64/usr/share/doc/onedrive-2.4.15
|
||||
+ export DOCDIR
|
||||
+ /usr/bin/mkdir -p /home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el7.x86_64/usr/share/doc/onedrive-2.4.15
|
||||
+ cp -pr README.md /home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el7.x86_64/usr/share/doc/onedrive-2.4.15
|
||||
+ cp -pr LICENSE /home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el7.x86_64/usr/share/doc/onedrive-2.4.15
|
||||
+ cp -pr CHANGELOG.md /home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el7.x86_64/usr/share/doc/onedrive-2.4.15
|
||||
+ exit 0
|
||||
Provides: config(onedrive) = 2.4.15-1.el7 onedrive = 2.4.15-1.el7 onedrive(x86-64) = 2.4.15-1.el7
|
||||
Requires(rpmlib): rpmlib(CompressedFileNames) <= 3.0.4-1 rpmlib(FileDigests) <= 4.6.0-1 rpmlib(PayloadFilesHavePrefix) <= 4.0-1
|
||||
Requires(post): systemd
|
||||
Requires(preun): systemd
|
||||
Requires(postun): systemd
|
||||
Requires: ld-linux-x86-64.so.2()(64bit) ld-linux-x86-64.so.2(GLIBC_2.3)(64bit) libc.so.6()(64bit) libc.so.6(GLIBC_2.14)(64bit) libc.so.6(GLIBC_2.15)(64bit) libc.so.6(GLIBC_2.2.5)(64bit) libc.so.6(GLIBC_2.3.2)(64bit) libc.so.6(GLIBC_2.3.4)(64bit) libc.so.6(GLIBC_2.4)(64bit) libc.so.6(GLIBC_2.6)(64bit) libc.so.6(GLIBC_2.8)(64bit) libc.so.6(GLIBC_2.9)(64bit) libcurl.so.4()(64bit) libdl.so.2()(64bit) libdl.so.2(GLIBC_2.2.5)(64bit) libgcc_s.so.1()(64bit) libgcc_s.so.1(GCC_3.0)(64bit) libgcc_s.so.1(GCC_4.2.0)(64bit) libm.so.6()(64bit) libm.so.6(GLIBC_2.2.5)(64bit) libpthread.so.0()(64bit) libpthread.so.0(GLIBC_2.2.5)(64bit) libpthread.so.0(GLIBC_2.3.2)(64bit) libpthread.so.0(GLIBC_2.3.4)(64bit) librt.so.1()(64bit) librt.so.1(GLIBC_2.2.5)(64bit) libsqlite3.so.0()(64bit) rtld(GNU_HASH)
|
||||
Checking for unpackaged file(s): /usr/lib/rpm/check-files /home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el7.x86_64
|
||||
Wrote: /home/alex/rpmbuild/SRPMS/onedrive-2.4.15-1.el7.src.rpm
|
||||
Wrote: /home/alex/rpmbuild/RPMS/x86_64/onedrive-2.4.15-1.el7.x86_64.rpm
|
||||
Executing(%clean): /bin/sh -e /var/tmp/rpm-tmp.nWoW33
|
||||
+ umask 022
|
||||
+ cd /home/alex/rpmbuild/BUILD
|
||||
+ cd onedrive-2.4.15
|
||||
+ exit 0
|
||||
[alex@localhost ~]$ sudo yum -y install /home/alex/rpmbuild/RPMS/x86_64/onedrive-2.4.15-1.el7.x86_64.rpm
|
||||
Loaded plugins: fastestmirror
|
||||
Examining /home/alex/rpmbuild/RPMS/x86_64/onedrive-2.4.15-1.el7.x86_64.rpm: onedrive-2.4.15-1.el7.x86_64
|
||||
Marking /home/alex/rpmbuild/RPMS/x86_64/onedrive-2.4.15-1.el7.x86_64.rpm to be installed
|
||||
Resolving Dependencies
|
||||
--> Running transaction check
|
||||
---> Package onedrive.x86_64 0:2.4.15-1.el7 will be installed
|
||||
--> Finished Dependency Resolution
|
||||
|
||||
Dependencies Resolved
|
||||
|
||||
==============================================================================================================================================================================================
|
||||
Package Arch Version Repository Size
|
||||
==============================================================================================================================================================================================
|
||||
Installing:
|
||||
onedrive x86_64 2.4.15-1.el7 /onedrive-2.4.15-1.el7.x86_64 7.2 M
|
||||
|
||||
Transaction Summary
|
||||
==============================================================================================================================================================================================
|
||||
Install 1 Package
|
||||
|
||||
Total size: 7.2 M
|
||||
Installed size: 7.2 M
|
||||
Downloading packages:
|
||||
Running transaction check
|
||||
Running transaction test
|
||||
Transaction test succeeded
|
||||
Running transaction
|
||||
Installing : onedrive-2.4.15-1.el7.x86_64 1/1
|
||||
Verifying : onedrive-2.4.15-1.el7.x86_64 1/1
|
||||
|
||||
Installed:
|
||||
onedrive.x86_64 0:2.4.15-1.el7
|
||||
|
||||
Complete!
|
||||
[alex@localhost ~]$ which onedrive
|
||||
/usr/bin/onedrive
|
||||
[alex@localhost ~]$ onedrive --version
|
||||
onedrive v2.4.15
|
||||
[alex@localhost ~]$ onedrive --display-config
|
||||
onedrive version = v2.4.15
|
||||
Config path = /home/alex/.config/onedrive
|
||||
Config file found in config path = false
|
||||
Config option 'check_nosync' = false
|
||||
Config option 'sync_dir' = /home/alex/OneDrive
|
||||
Config option 'skip_dir' =
|
||||
Config option 'skip_file' = ~*|.~*|*.tmp
|
||||
Config option 'skip_dotfiles' = false
|
||||
Config option 'skip_symlinks' = false
|
||||
Config option 'monitor_interval' = 300
|
||||
Config option 'min_notify_changes' = 5
|
||||
Config option 'log_dir' = /var/log/onedrive/
|
||||
Config option 'classify_as_big_delete' = 1000
|
||||
Config option 'upload_only' = false
|
||||
Config option 'no_remote_delete' = false
|
||||
Config option 'remove_source_files' = false
|
||||
Config option 'sync_root_files' = false
|
||||
Selective sync 'sync_list' configured = false
|
||||
Business Shared Folders configured = false
|
||||
[alex@localhost ~]$
|
||||
```
|
||||
|
||||
### CentOS 8
|
||||
```text
|
||||
[alex@localhost ~]$ rpmbuild -ba ~/rpmbuild/SPECS/onedrive.spec
|
||||
Executing(%prep): /bin/sh -e /var/tmp/rpm-tmp.UINFyE
|
||||
+ umask 022
|
||||
+ cd /home/alex/rpmbuild/BUILD
|
||||
+ cd /home/alex/rpmbuild/BUILD
|
||||
+ rm -rf onedrive-2.4.15
|
||||
+ /usr/bin/gzip -dc /home/alex/rpmbuild/SOURCES/v2.4.15.tar.gz
|
||||
+ rm -rf onedrive-2.5.0
|
||||
+ /usr/bin/tar -xof -
|
||||
+ /usr/bin/gzip -dc /home/alex/rpmbuild/SOURCES/v2.5.0.tar.gz
|
||||
+ STATUS=0
|
||||
+ '[' 0 -ne 0 ']'
|
||||
+ cd onedrive-2.4.15
|
||||
+ cd onedrive-2.5.0
|
||||
+ /usr/bin/chmod -Rf a+rX,u+w,g-w,o-w .
|
||||
+ RPM_EC=0
|
||||
++ jobs -p
|
||||
+ exit 0
|
||||
Executing(%build): /bin/sh -e /var/tmp/rpm-tmp.cX1WQa
|
||||
Executing(%build): /bin/sh -e /var/tmp/rpm-tmp.x8hFro
|
||||
+ umask 022
|
||||
+ cd /home/alex/rpmbuild/BUILD
|
||||
+ cd onedrive-2.4.15
|
||||
+ CFLAGS='-O2 -g -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fexceptions -fstack-protector-strong -grecord-gcc-switches -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -m64 -mtune=generic -fasynchronous-unwind-tables -fstack-clash-protection -fcf-protection'
|
||||
+ cd onedrive-2.5.0
|
||||
+ CFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -m64 -march=x86-64-v2 -mtune=generic -fasynchronous-unwind-tables -fstack-clash-protection -fcf-protection'
|
||||
+ export CFLAGS
|
||||
+ CXXFLAGS='-O2 -g -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fexceptions -fstack-protector-strong -grecord-gcc-switches -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -m64 -mtune=generic -fasynchronous-unwind-tables -fstack-clash-protection -fcf-protection'
|
||||
+ CXXFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -m64 -march=x86-64-v2 -mtune=generic -fasynchronous-unwind-tables -fstack-clash-protection -fcf-protection'
|
||||
+ export CXXFLAGS
|
||||
+ FFLAGS='-O2 -g -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fexceptions -fstack-protector-strong -grecord-gcc-switches -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -m64 -mtune=generic -fasynchronous-unwind-tables -fstack-clash-protection -fcf-protection -I/usr/lib64/gfortran/modules'
|
||||
+ FFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -m64 -march=x86-64-v2 -mtune=generic -fasynchronous-unwind-tables -fstack-clash-protection -fcf-protection -I/usr/lib64/gfortran/modules'
|
||||
+ export FFLAGS
|
||||
+ FCFLAGS='-O2 -g -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fexceptions -fstack-protector-strong -grecord-gcc-switches -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -m64 -mtune=generic -fasynchronous-unwind-tables -fstack-clash-protection -fcf-protection -I/usr/lib64/gfortran/modules'
|
||||
+ FCFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -m64 -march=x86-64-v2 -mtune=generic -fasynchronous-unwind-tables -fstack-clash-protection -fcf-protection -I/usr/lib64/gfortran/modules'
|
||||
+ export FCFLAGS
|
||||
+ LDFLAGS='-Wl,-z,relro -Wl,-z,now -specs=/usr/lib/rpm/redhat/redhat-hardened-ld'
|
||||
+ LDFLAGS='-Wl,-z,relro -Wl,--as-needed -Wl,-z,now -specs=/usr/lib/rpm/redhat/redhat-hardened-ld -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 '
|
||||
+ export LDFLAGS
|
||||
+ LT_SYS_LIBRARY_PATH=/usr/lib64:
|
||||
+ export LT_SYS_LIBRARY_PATH
|
||||
+ CC=gcc
|
||||
+ export CC
|
||||
+ CXX=g++
|
||||
+ export CXX
|
||||
+ '[' '-flto=auto -ffat-lto-objectsx' '!=' x ']'
|
||||
++ find . -type f -name configure -print
|
||||
+ for file in $(find . -type f -name configure -print)
|
||||
+ /usr/bin/sed -r --in-place=.backup 's/^char \(\*f\) \(\) = /__attribute__ ((used)) char (*f) () = /g' ./configure
|
||||
+ diff -u ./configure.backup ./configure
|
||||
+ mv ./configure.backup ./configure
|
||||
+ /usr/bin/sed -r --in-place=.backup 's/^char \(\*f\) \(\);/__attribute__ ((used)) char (*f) ();/g' ./configure
|
||||
+ diff -u ./configure.backup ./configure
|
||||
+ mv ./configure.backup ./configure
|
||||
+ /usr/bin/sed -r --in-place=.backup 's/^char \$2 \(\);/__attribute__ ((used)) char \$2 ();/g' ./configure
|
||||
+ diff -u ./configure.backup ./configure
|
||||
+ mv ./configure.backup ./configure
|
||||
+ /usr/bin/sed --in-place=.backup '1{$!N;$!N};$!N;s/int x = 1;\nint y = 0;\nint z;\nint nan;/volatile int x = 1; volatile int y = 0; volatile int z, nan;/;P;D' ./configure
|
||||
+ diff -u ./configure.backup ./configure
|
||||
+ mv ./configure.backup ./configure
|
||||
+ /usr/bin/sed --in-place=.backup 's#^lt_cv_sys_global_symbol_to_cdecl=.*#lt_cv_sys_global_symbol_to_cdecl="sed -n -e '\''s/^T .* \\(.*\\)$/extern int \\1();/p'\'' -e '\''s/^$symcode* .* \\(.*\\)$/extern char \\1;/p'\''"#' ./configure
|
||||
+ diff -u ./configure.backup ./configure
|
||||
+ mv ./configure.backup ./configure
|
||||
+ '[' 1 = 1 ']'
|
||||
+++ dirname ./configure
|
||||
++ find . -name config.guess -o -name config.sub
|
||||
+ '[' 1 = 1 ']'
|
||||
+ '[' x '!=' 'x-Wl,-z,now -specs=/usr/lib/rpm/redhat/redhat-hardened-ld' ']'
|
||||
++ find . -name ltmain.sh
|
||||
+ ./configure --build=x86_64-redhat-linux-gnu --host=x86_64-redhat-linux-gnu --program-prefix= --disable-dependency-tracking --prefix=/usr --exec-prefix=/usr --bindir=/usr/bin --sbindir=/usr/sbin --sysconfdir=/etc --datadir=/usr/share --includedir=/usr/include --libdir=/usr/lib64 --libexecdir=/usr/libexec --localstatedir=/var --sharedstatedir=/var/lib --mandir=/usr/share/man --infodir=/usr/share/info
|
||||
+ ./configure --build=x86_64-redhat-linux-gnu --host=x86_64-redhat-linux-gnu --program-prefix= --disable-dependency-tracking --prefix=/usr --exec-prefix=/usr --bindir=/usr/bin --sbindir=/usr/sbin --sysconfdir=/etc --datadir=/usr/share --includedir=/usr/include --libdir=/usr/lib64 --libexecdir=/usr/libexec --localstatedir=/var --sharedstatedir=/var/lib --mandir=/usr/share/man --infodir=/usr/share/info --enable-debug --enable-notifications
|
||||
configure: WARNING: unrecognized options: --disable-dependency-tracking
|
||||
checking for a BSD-compatible install... /usr/bin/install -c
|
||||
checking for x86_64-redhat-linux-gnu-pkg-config... /usr/bin/x86_64-redhat-linux-gnu-pkg-config
|
||||
checking pkg-config is at least version 0.9.0... yes
|
||||
checking for dmd... dmd
|
||||
checking version of D compiler... 2.087.0
|
||||
checking version of D compiler... 2.088.0
|
||||
checking for curl... yes
|
||||
checking for sqlite... yes
|
||||
checking for notify... yes
|
||||
configure: creating ./config.status
|
||||
config.status: creating Makefile
|
||||
config.status: creating contrib/pacman/PKGBUILD
|
||||
|
|
@ -256,124 +112,174 @@ configure: WARNING: unrecognized options: --disable-dependency-tracking
|
|||
if [ -f .git/HEAD ] ; then \
|
||||
git describe --tags > version ; \
|
||||
else \
|
||||
echo v2.4.15 > version ; \
|
||||
echo v2.5.0 > version ; \
|
||||
fi
|
||||
dmd -w -g -O -J. -L-lcurl -L-lsqlite3 -L-ldl src/config.d src/itemdb.d src/log.d src/main.d src/monitor.d src/onedrive.d src/qxor.d src/selective.d src/sqlite.d src/sync.d src/upload.d src/util.d src/progress.d src/arsd/cgi.d -ofonedrive
|
||||
dmd -w -J. -g -debug -gs -version=NoPragma -version=NoGdk -version=Notifications -L-lcurl -L-lsqlite3 -L-lnotify -L-lgdk_pixbuf-2.0 -L-lgio-2.0 -L-lgobject-2.0 -L-lglib-2.0 -L-ldl src/main.d src/config.d src/log.d src/util.d src/qxor.d src/curlEngine.d src/onedrive.d src/webhook.d src/sync.d src/itemdb.d src/sqlite.d src/clientSideFiltering.d src/monitor.d src/arsd/cgi.d src/notifications/notify.d src/notifications/dnotify.d -ofonedrive
|
||||
+ RPM_EC=0
|
||||
++ jobs -p
|
||||
+ exit 0
|
||||
Executing(%install): /bin/sh -e /var/tmp/rpm-tmp.dNFPdx
|
||||
Executing(%install): /bin/sh -e /var/tmp/rpm-tmp.Oj0XhN
|
||||
+ umask 022
|
||||
+ cd /home/alex/rpmbuild/BUILD
|
||||
+ '[' /home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el8.x86_64 '!=' / ']'
|
||||
+ rm -rf /home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el8.x86_64
|
||||
++ dirname /home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el8.x86_64
|
||||
+ '[' /home/alex/rpmbuild/BUILDROOT/onedrive-2.5.0-1.el9.x86_64 '!=' / ']'
|
||||
+ rm -rf /home/alex/rpmbuild/BUILDROOT/onedrive-2.5.0-1.el9.x86_64
|
||||
++ dirname /home/alex/rpmbuild/BUILDROOT/onedrive-2.5.0-1.el9.x86_64
|
||||
+ mkdir -p /home/alex/rpmbuild/BUILDROOT
|
||||
+ mkdir /home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el8.x86_64
|
||||
+ cd onedrive-2.4.15
|
||||
+ /usr/bin/make install DESTDIR=/home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el8.x86_64 'INSTALL=/usr/bin/install -p' PREFIX=/home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el8.x86_64
|
||||
/usr/bin/install -p -D onedrive /home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el8.x86_64/usr/bin/onedrive
|
||||
/usr/bin/install -p -D onedrive.1 /home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el8.x86_64/usr/share/man/man1/onedrive.1
|
||||
/usr/bin/install -p -D -m 644 contrib/logrotate/onedrive.logrotate /home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el8.x86_64/etc/logrotate.d/onedrive
|
||||
mkdir -p /home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el8.x86_64/usr/share/doc/onedrive
|
||||
/usr/bin/install -p -D -m 644 README.md config LICENSE CHANGELOG.md docs/Docker.md docs/INSTALL.md docs/SharePoint-Shared-Libraries.md docs/USAGE.md docs/BusinessSharedFolders.md docs/advanced-usage.md /home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el8.x86_64/usr/share/doc/onedrive
|
||||
/usr/bin/install -p -d -m 0755 /home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el8.x86_64/usr/lib/systemd/user /home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el8.x86_64/usr/lib/systemd/system
|
||||
/usr/bin/install -p -m 0644 contrib/systemd/onedrive@.service /home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el8.x86_64/usr/lib/systemd/system
|
||||
/usr/bin/install -p -m 0644 contrib/systemd/onedrive.service /home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el8.x86_64/usr/lib/systemd/system
|
||||
+ mkdir /home/alex/rpmbuild/BUILDROOT/onedrive-2.5.0-1.el9.x86_64
|
||||
+ cd onedrive-2.5.0
|
||||
+ /usr/bin/make install DESTDIR=/home/alex/rpmbuild/BUILDROOT/onedrive-2.5.0-1.el9.x86_64 'INSTALL=/usr/bin/install -p' PREFIX=/home/alex/rpmbuild/BUILDROOT/onedrive-2.5.0-1.el9.x86_64
|
||||
/usr/bin/install -p -D onedrive /home/alex/rpmbuild/BUILDROOT/onedrive-2.5.0-1.el9.x86_64/usr/bin/onedrive
|
||||
/usr/bin/install -p -D -m 0644 onedrive.1 /home/alex/rpmbuild/BUILDROOT/onedrive-2.5.0-1.el9.x86_64/usr/share/man/man1/onedrive.1
|
||||
/usr/bin/install -p -D -m 0644 contrib/logrotate/onedrive.logrotate /home/alex/rpmbuild/BUILDROOT/onedrive-2.5.0-1.el9.x86_64/etc/logrotate.d/onedrive
|
||||
mkdir -p /home/alex/rpmbuild/BUILDROOT/onedrive-2.5.0-1.el9.x86_64/usr/share/doc/onedrive
|
||||
/usr/bin/install -p -D -m 0644 readme.md config LICENSE changelog.md docs/advanced-usage.md docs/application-config-options.md docs/application-security.md docs/business-shared-items.md docs/client-architecture.md docs/contributing.md docs/docker.md docs/install.md docs/national-cloud-deployments.md docs/podman.md docs/privacy-policy.md docs/sharepoint-libraries.md docs/terms-of-service.md docs/ubuntu-package-install.md docs/usage.md docs/known-issues.md /home/alex/rpmbuild/BUILDROOT/onedrive-2.5.0-1.el9.x86_64/usr/share/doc/onedrive
|
||||
/usr/bin/install -p -d -m 0755 /home/alex/rpmbuild/BUILDROOT/onedrive-2.5.0-1.el9.x86_64/usr/lib/systemd/user /home/alex/rpmbuild/BUILDROOT/onedrive-2.5.0-1.el9.x86_64/usr/lib/systemd/system
|
||||
/usr/bin/install -p -m 0644 contrib/systemd/onedrive@.service /home/alex/rpmbuild/BUILDROOT/onedrive-2.5.0-1.el9.x86_64/usr/lib/systemd/system
|
||||
/usr/bin/install -p -m 0644 contrib/systemd/onedrive.service /home/alex/rpmbuild/BUILDROOT/onedrive-2.5.0-1.el9.x86_64/usr/lib/systemd/system
|
||||
+ /usr/lib/rpm/check-buildroot
|
||||
+ /usr/lib/rpm/redhat/brp-ldconfig
|
||||
/sbin/ldconfig: Warning: ignoring configuration file that cannot be opened: /home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el8.x86_64/etc/ld.so.conf: No such file or directory
|
||||
+ /usr/lib/rpm/brp-compress
|
||||
+ /usr/lib/rpm/brp-strip /usr/bin/strip
|
||||
+ /usr/lib/rpm/brp-strip-comment-note /usr/bin/strip /usr/bin/objdump
|
||||
+ /usr/lib/rpm/redhat/brp-strip-lto /usr/bin/strip
|
||||
+ /usr/lib/rpm/brp-strip-static-archive /usr/bin/strip
|
||||
+ /usr/lib/rpm/brp-python-bytecompile 1
|
||||
+ /usr/lib/rpm/redhat/brp-python-bytecompile '' 1 0
|
||||
+ /usr/lib/rpm/brp-python-hardlink
|
||||
+ PYTHON3=/usr/libexec/platform-python
|
||||
+ /usr/lib/rpm/redhat/brp-mangle-shebangs
|
||||
Processing files: onedrive-2.4.15-1.el8.x86_64
|
||||
Executing(%doc): /bin/sh -e /var/tmp/rpm-tmp.TnFKbZ
|
||||
Processing files: onedrive-2.5.0-1.el9.x86_64
|
||||
Executing(%doc): /bin/sh -e /var/tmp/rpm-tmp.vy1y65
|
||||
+ umask 022
|
||||
+ cd /home/alex/rpmbuild/BUILD
|
||||
+ cd onedrive-2.4.15
|
||||
+ DOCDIR=/home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el8.x86_64/usr/share/doc/onedrive
|
||||
+ cd onedrive-2.5.0
|
||||
+ DOCDIR=/home/alex/rpmbuild/BUILDROOT/onedrive-2.5.0-1.el9.x86_64/usr/share/doc/onedrive
|
||||
+ export LC_ALL=C
|
||||
+ LC_ALL=C
|
||||
+ export DOCDIR
|
||||
+ /usr/bin/mkdir -p /home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el8.x86_64/usr/share/doc/onedrive
|
||||
+ cp -pr README.md /home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el8.x86_64/usr/share/doc/onedrive
|
||||
+ cp -pr LICENSE /home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el8.x86_64/usr/share/doc/onedrive
|
||||
+ cp -pr CHANGELOG.md /home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el8.x86_64/usr/share/doc/onedrive
|
||||
+ /usr/bin/mkdir -p /home/alex/rpmbuild/BUILDROOT/onedrive-2.5.0-1.el9.x86_64/usr/share/doc/onedrive
|
||||
+ cp -pr readme.md /home/alex/rpmbuild/BUILDROOT/onedrive-2.5.0-1.el9.x86_64/usr/share/doc/onedrive
|
||||
+ cp -pr LICENSE /home/alex/rpmbuild/BUILDROOT/onedrive-2.5.0-1.el9.x86_64/usr/share/doc/onedrive
|
||||
+ cp -pr changelog.md /home/alex/rpmbuild/BUILDROOT/onedrive-2.5.0-1.el9.x86_64/usr/share/doc/onedrive
|
||||
+ RPM_EC=0
|
||||
++ jobs -p
|
||||
+ exit 0
|
||||
warning: File listed twice: /usr/share/doc/onedrive
|
||||
warning: File listed twice: /usr/share/doc/onedrive/CHANGELOG.md
|
||||
warning: File listed twice: /usr/share/doc/onedrive/LICENSE
|
||||
warning: File listed twice: /usr/share/doc/onedrive/README.md
|
||||
Provides: config(onedrive) = 2.4.15-1.el8 onedrive = 2.4.15-1.el8 onedrive(x86-64) = 2.4.15-1.el8
|
||||
warning: File listed twice: /usr/share/doc/onedrive/changelog.md
|
||||
warning: File listed twice: /usr/share/doc/onedrive/readme.md
|
||||
Provides: config(onedrive) = 2.5.0-1.el9 onedrive = 2.5.0-1.el9 onedrive(x86-64) = 2.5.0-1.el9
|
||||
Requires(rpmlib): rpmlib(CompressedFileNames) <= 3.0.4-1 rpmlib(FileDigests) <= 4.6.0-1 rpmlib(PayloadFilesHavePrefix) <= 4.0-1
|
||||
Requires(post): systemd
|
||||
Requires(preun): systemd
|
||||
Requires(postun): systemd
|
||||
Requires: ld-linux-x86-64.so.2()(64bit) ld-linux-x86-64.so.2(GLIBC_2.3)(64bit) libc.so.6()(64bit) libc.so.6(GLIBC_2.14)(64bit) libc.so.6(GLIBC_2.15)(64bit) libc.so.6(GLIBC_2.2.5)(64bit) libc.so.6(GLIBC_2.3.2)(64bit) libc.so.6(GLIBC_2.3.4)(64bit) libc.so.6(GLIBC_2.4)(64bit) libc.so.6(GLIBC_2.6)(64bit) libc.so.6(GLIBC_2.8)(64bit) libc.so.6(GLIBC_2.9)(64bit) libcurl.so.4()(64bit) libdl.so.2()(64bit) libdl.so.2(GLIBC_2.2.5)(64bit) libgcc_s.so.1()(64bit) libgcc_s.so.1(GCC_3.0)(64bit) libgcc_s.so.1(GCC_4.2.0)(64bit) libm.so.6()(64bit) libm.so.6(GLIBC_2.2.5)(64bit) libpthread.so.0()(64bit) libpthread.so.0(GLIBC_2.2.5)(64bit) libpthread.so.0(GLIBC_2.3.2)(64bit) libpthread.so.0(GLIBC_2.3.4)(64bit) librt.so.1()(64bit) librt.so.1(GLIBC_2.2.5)(64bit) libsqlite3.so.0()(64bit) rtld(GNU_HASH)
|
||||
Checking for unpackaged file(s): /usr/lib/rpm/check-files /home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el8.x86_64
|
||||
Wrote: /home/alex/rpmbuild/SRPMS/onedrive-2.4.15-1.el8.src.rpm
|
||||
Wrote: /home/alex/rpmbuild/RPMS/x86_64/onedrive-2.4.15-1.el8.x86_64.rpm
|
||||
Executing(%clean): /bin/sh -e /var/tmp/rpm-tmp.FAMTFz
|
||||
Requires: ld-linux-x86-64.so.2()(64bit) ld-linux-x86-64.so.2(GLIBC_2.3)(64bit) libc.so.6()(64bit) libc.so.6(GLIBC_2.14)(64bit) libc.so.6(GLIBC_2.15)(64bit) libc.so.6(GLIBC_2.17)(64bit) libc.so.6(GLIBC_2.2.5)(64bit) libc.so.6(GLIBC_2.3.2)(64bit) libc.so.6(GLIBC_2.3.4)(64bit) libc.so.6(GLIBC_2.32)(64bit) libc.so.6(GLIBC_2.33)(64bit) libc.so.6(GLIBC_2.34)(64bit) libc.so.6(GLIBC_2.4)(64bit) libc.so.6(GLIBC_2.6)(64bit) libc.so.6(GLIBC_2.7)(64bit) libc.so.6(GLIBC_2.8)(64bit) libc.so.6(GLIBC_2.9)(64bit) libcurl.so.4()(64bit) libgcc_s.so.1()(64bit) libgcc_s.so.1(GCC_3.0)(64bit) libgcc_s.so.1(GCC_4.2.0)(64bit) libgdk_pixbuf-2.0.so.0()(64bit) libgio-2.0.so.0()(64bit) libglib-2.0.so.0()(64bit) libgobject-2.0.so.0()(64bit) libm.so.6()(64bit) libm.so.6(GLIBC_2.2.5)(64bit) libnotify.so.4()(64bit) libsqlite3.so.0()(64bit) rtld(GNU_HASH)
|
||||
Checking for unpackaged file(s): /usr/lib/rpm/check-files /home/alex/rpmbuild/BUILDROOT/onedrive-2.5.0-1.el9.x86_64
|
||||
Wrote: /home/alex/rpmbuild/SRPMS/onedrive-2.5.0-1.el9.src.rpm
|
||||
Wrote: /home/alex/rpmbuild/RPMS/x86_64/onedrive-2.5.0-1.el9.x86_64.rpm
|
||||
Executing(%clean): /bin/sh -e /var/tmp/rpm-tmp.pM33Kl
|
||||
+ umask 022
|
||||
+ cd /home/alex/rpmbuild/BUILD
|
||||
+ cd onedrive-2.4.15
|
||||
+ cd onedrive-2.5.0
|
||||
+ RPM_EC=0
|
||||
++ jobs -p
|
||||
+ exit 0
|
||||
[alex@localhost ~]$ sudo yum -y install /home/alex/rpmbuild/RPMS/x86_64/onedrive-2.4.15-1.el8.x86_64.rpm
|
||||
Last metadata expiration check: 0:04:07 ago on Fri 14 Jan 2022 14:22:13 EST.
|
||||
```
|
||||
|
||||
### CentOS Stream release 9 RPM Package Install Process
|
||||
```text
|
||||
[alex@centos9stream ~]$ sudo yum -y install /home/alex/rpmbuild/RPMS/x86_64/onedrive-2.5.0-1.el9.x86_64.rpm
|
||||
[sudo] password for alex:
|
||||
Last metadata expiration check: 0:33:14 ago on Mon 19 Aug 2024 17:22:48.
|
||||
Dependencies resolved.
|
||||
==============================================================================================================================================================================================
|
||||
Package Architecture Version Repository Size
|
||||
==============================================================================================================================================================================================
|
||||
===============================================================================================================================================================================================
|
||||
Package Architecture Version Repository Size
|
||||
===============================================================================================================================================================================================
|
||||
Installing:
|
||||
onedrive x86_64 2.4.15-1.el8 @commandline 1.5 M
|
||||
onedrive x86_64 2.5.0-1.el9 @commandline 1.5 M
|
||||
|
||||
Transaction Summary
|
||||
==============================================================================================================================================================================================
|
||||
===============================================================================================================================================================================================
|
||||
Install 1 Package
|
||||
|
||||
Total size: 1.5 M
|
||||
Installed size: 7.1 M
|
||||
Installed size: 7.6 M
|
||||
Downloading Packages:
|
||||
Running transaction check
|
||||
Transaction check succeeded.
|
||||
Running transaction test
|
||||
Transaction test succeeded.
|
||||
Running transaction
|
||||
Preparing : 1/1
|
||||
Installing : onedrive-2.4.15-1.el8.x86_64 1/1
|
||||
Running scriptlet: onedrive-2.4.15-1.el8.x86_64 1/1
|
||||
Verifying : onedrive-2.4.15-1.el8.x86_64 1/1
|
||||
Preparing : 1/1
|
||||
Installing : onedrive-2.5.0-1.el9.x86_64 1/1
|
||||
Running scriptlet: onedrive-2.5.0-1.el9.x86_64 1/1
|
||||
Verifying : onedrive-2.5.0-1.el9.x86_64 1/1
|
||||
|
||||
Installed:
|
||||
onedrive-2.4.15-1.el8.x86_64
|
||||
onedrive-2.5.0-1.el9.x86_64
|
||||
|
||||
Complete!
|
||||
[alex@localhost ~]$ which onedrive
|
||||
/usr/bin/onedrive
|
||||
[alex@localhost ~]$ onedrive --version
|
||||
onedrive v2.4.15
|
||||
[alex@localhost ~]$ onedrive --display-config
|
||||
onedrive version = v2.4.15
|
||||
Config path = /home/alex/.config/onedrive
|
||||
Config file found in config path = false
|
||||
Config option 'check_nosync' = false
|
||||
Config option 'sync_dir' = /home/alex/OneDrive
|
||||
Config option 'skip_dir' =
|
||||
Config option 'skip_file' = ~*|.~*|*.tmp
|
||||
Config option 'skip_dotfiles' = false
|
||||
Config option 'skip_symlinks' = false
|
||||
Config option 'monitor_interval' = 300
|
||||
Config option 'min_notify_changes' = 5
|
||||
Config option 'log_dir' = /var/log/onedrive/
|
||||
Config option 'classify_as_big_delete' = 1000
|
||||
Config option 'upload_only' = false
|
||||
Config option 'no_remote_delete' = false
|
||||
Config option 'remove_source_files' = false
|
||||
Config option 'sync_root_files' = false
|
||||
Selective sync 'sync_list' configured = false
|
||||
Business Shared Folders configured = false
|
||||
[alex@localhost ~]$
|
||||
```
|
||||
[alex@centos9stream ~]$ onedrive --version
|
||||
onedrive v2.5.0
|
||||
[alex@centos9stream ~]$ onedrive --display-config
|
||||
WARNING: D-Bus message bus daemon is not available; GUI notifications are disabled
|
||||
Application version = onedrive v2.5.0
|
||||
Compiled with = DMD 2088
|
||||
User Application Config path = /home/alex/.config/onedrive
|
||||
System Application Config path = /etc/onedrive
|
||||
Applicable Application 'config' location = /home/alex/.config/onedrive/config
|
||||
Configuration file found in config location = false - using application defaults
|
||||
Applicable 'sync_list' location = /home/alex/.config/onedrive/sync_list
|
||||
Applicable 'items.sqlite3' location = /home/alex/.config/onedrive/items.sqlite3
|
||||
Config option 'drive_id' =
|
||||
Config option 'sync_dir' = ~/OneDrive
|
||||
Config option 'enable_logging' = false
|
||||
Config option 'log_dir' = /var/log/onedrive
|
||||
Config option 'disable_notifications' = false
|
||||
Config option 'skip_dir' =
|
||||
Config option 'skip_dir_strict_match' = false
|
||||
Config option 'skip_file' = ~*|.~*|*.tmp|*.swp|*.partial
|
||||
Config option 'skip_dotfiles' = false
|
||||
Config option 'skip_symlinks' = false
|
||||
Config option 'monitor_interval' = 300
|
||||
Config option 'monitor_log_frequency' = 12
|
||||
Config option 'monitor_fullscan_frequency' = 12
|
||||
Config option 'read_only_auth_scope' = false
|
||||
Config option 'dry_run' = false
|
||||
Config option 'upload_only' = false
|
||||
Config option 'download_only' = false
|
||||
Config option 'local_first' = false
|
||||
Config option 'check_nosync' = false
|
||||
Config option 'check_nomount' = false
|
||||
Config option 'resync' = false
|
||||
Config option 'resync_auth' = false
|
||||
Config option 'cleanup_local_files' = false
|
||||
Config option 'classify_as_big_delete' = 1000
|
||||
Config option 'disable_upload_validation' = false
|
||||
Config option 'disable_download_validation' = false
|
||||
Config option 'bypass_data_preservation' = false
|
||||
Config option 'no_remote_delete' = false
|
||||
Config option 'remove_source_files' = false
|
||||
Config option 'sync_dir_permissions' = 700
|
||||
Config option 'sync_file_permissions' = 600
|
||||
Config option 'space_reservation' = 52428800
|
||||
Config option 'application_id' = d50ca740-c83f-4d1b-b616-12c519384f0c
|
||||
Config option 'azure_ad_endpoint' =
|
||||
Config option 'azure_tenant_id' =
|
||||
Config option 'user_agent' = ISV|abraunegg|OneDrive Client for Linux/v2.5.0
|
||||
Config option 'force_http_11' = false
|
||||
Config option 'debug_https' = false
|
||||
Config option 'rate_limit' = 0
|
||||
Config option 'operation_timeout' = 3600
|
||||
Config option 'dns_timeout' = 60
|
||||
Config option 'connect_timeout' = 10
|
||||
Config option 'data_timeout' = 60
|
||||
Config option 'ip_protocol_version' = 0
|
||||
Config option 'threads' = 8
|
||||
Environment var 'XDG_RUNTIME_DIR' = true
|
||||
Environment var 'DBUS_SESSION_BUS_ADDRESS' = true
|
||||
|
||||
Selective sync 'sync_list' configured = false
|
||||
|
||||
Config option 'sync_business_shared_items' = false
|
||||
|
||||
Config option 'webhook_enabled' = false
|
||||
```
|
||||
251
docs/business-shared-items.md
Normal file
|
|
@ -0,0 +1,251 @@
|
|||
# How to sync OneDrive Business Shared Items
|
||||
|
||||
> [!CAUTION]
|
||||
> Before reading this document, please ensure you are running application version [](https://github.com/abraunegg/onedrive/releases) or greater. Use `onedrive --version` to determine what application version you are using and upgrade your client if required.
|
||||
|
||||
> [!CAUTION]
|
||||
> This feature has been 100% re-written from v2.5.0 onwards and is not backwards compatible with v2.4.x client versions. If enabling this feature, you must upgrade to v2.5.0 or above on all systems that are running this client.
|
||||
>
|
||||
> An additional pre-requesite before using this capability in v2.5.0 and above is for you to revert any v2.4.x Shared Business Folder configuration you may be currently using, including, but not limited to:
|
||||
> * Removing `sync_business_shared_folders = "true|false"` from your 'config' file
|
||||
> * Removing the 'business_shared_folders' file
|
||||
> * Removing any local data | shared folder data from your configured 'sync_dir' to ensure that there are no conflicts or issues.
|
||||
> * Removing any configuration online that might be related to using this feature prior to v2.5.0
|
||||
|
||||
## Process Overview
|
||||
Syncing OneDrive Business Shared Folders requires additional configuration for your 'onedrive' client:
|
||||
1. From the OneDrive web interface, review the 'Shared' objects that have been shared with you.
|
||||
2. Select the applicable folder, and click the 'Add shortcut to My files', which will then add this to your 'My files' folder
|
||||
3. Update your OneDrive Client for Linux 'config' file to enable the feature by adding `sync_business_shared_items = "true"`. Adding this option will trigger a `--resync` requirement.
|
||||
4. Test the configuration using '--dry-run'
|
||||
5. Remove the use of '--dry-run' and sync the OneDrive Business Shared folders as required
|
||||
|
||||
### Enable syncing of OneDrive Business Shared Items via config file
|
||||
```text
|
||||
sync_business_shared_items = "true"
|
||||
```
|
||||
|
||||
### Disable syncing of OneDrive Business Shared Items via config file
|
||||
```text
|
||||
sync_business_shared_items = "false"
|
||||
```
|
||||
|
||||
## Syncing OneDrive Business Shared Folders
|
||||
Use the following steps to add a OneDrive Business Shared Folder to your account:
|
||||
1. Login to Microsoft OneDrive online, and navigate to 'Shared' from the left hand side pane
|
||||
|
||||

|
||||
|
||||
2. Select the respective folder you wish to sync, and click the 'Add shortcut to My files' at the top of the page
|
||||
|
||||

|
||||
|
||||
3. The final result online will look like this:
|
||||
|
||||

|
||||
|
||||
When using Microsoft Windows, this shared folder will appear as the following:
|
||||
|
||||

|
||||
|
||||
4. Sync your data using `onedrive --sync --verbose`. If you have just enabled the `sync_business_shared_items = "true"` configuration option, you will be required to perform a resync. During the sync, the selected shared folder will be downloaded:
|
||||
|
||||
```
|
||||
...
|
||||
Processing API Response Bundle: 1 - Quantity of 'changes|items' in this bundle to process: 4
|
||||
Finished processing /delta JSON response from the OneDrive API
|
||||
Processing 3 applicable changes and items received from Microsoft OneDrive
|
||||
Processing OneDrive JSON item batch [1/1] to ensure consistent local state
|
||||
Creating local directory: ./my_shared_folder
|
||||
Quota information is restricted or not available for this drive.
|
||||
Syncing this OneDrive Business Shared Folder: my_shared_folder
|
||||
Fetching /delta response from the OneDrive API for Drive ID: b!BhWyqa7K_kqXqHtSIlsqjR5iJogxpWxDradnpVGTU2VxBOJh82Y6S4he4rdnGPBT
|
||||
Processing API Response Bundle: 1 - Quantity of 'changes|items' in this bundle to process: 6
|
||||
Finished processing /delta JSON response from the OneDrive API
|
||||
Processing 6 applicable changes and items received from Microsoft OneDrive
|
||||
Processing OneDrive JSON item batch [1/1] to ensure consistent local state
|
||||
Creating local directory: ./my_shared_folder/asdf
|
||||
Creating local directory: ./my_shared_folder/original_data
|
||||
Number of items to download from OneDrive: 3
|
||||
Downloading file: my_shared_folder/asdf/asdfasdfhashdkfasdf.txt ... done
|
||||
Downloading file: my_shared_folder/asdf/asdfasdf.txt ... done
|
||||
Downloading file: my_shared_folder/original_data/file1.data ... done
|
||||
Performing a database consistency and integrity check on locally stored data
|
||||
...
|
||||
```
|
||||
|
||||
When this is viewed locally, on Linux, this shared folder is seen as the following:
|
||||
|
||||

|
||||
|
||||
Any shared folder you add can utilise any 'client side filtering' rules that you have created.
|
||||
|
||||
|
||||
## Syncing OneDrive Business Shared Files
|
||||
There are two methods to support the syncing OneDrive Business Shared Files with the OneDrive Application
|
||||
1. Add a 'shortcut' to your 'My Files' for the file, which creates a URL shortcut to the file which can be followed when using a Linux Window Manager (Gnome, KDE etc) and the link will open up in a browser. Microsoft Windows only supports this option.
|
||||
2. Use `--sync-shared-files` option to sync all files shared with you to your local disk. If you use this method, you can utilise any 'client side filtering' rules that you have created to filter out files you do not want locally. This option will create a new folder locally, with sub-folders named after the person who shared the data with you.
|
||||
|
||||
### Syncing OneDrive Business Shared Files using Option 1
|
||||
1. As per the above method for adding folders, select the shared file, then select to 'Add shorcut' to the file
|
||||
|
||||

|
||||
|
||||
2. The final result online will look like this:
|
||||
|
||||

|
||||
|
||||
When using Microsoft Windows, this shared file will appear as the following:
|
||||
|
||||

|
||||
|
||||
3. Sync your data using `onedrive --sync --verbose`. If you have just enabled the `sync_business_shared_items = "true"` configuration option, you will be required to perform a resync.
|
||||
```
|
||||
...
|
||||
All application operations will be performed in the configured local 'sync_dir' directory: /home/alex/OneDrive
|
||||
Fetching /delta response from the OneDrive API for Drive ID: b!bO8V7s9SSk6r7mWHpIjURotN33W1W2tEv3OXV_oFIdQimEdOHR-1So7CqeT1MfHA
|
||||
Processing API Response Bundle: 1 - Quantity of 'changes|items' in this bundle to process: 2
|
||||
Finished processing /delta JSON response from the OneDrive API
|
||||
Processing 1 applicable changes and items received from Microsoft OneDrive
|
||||
Processing OneDrive JSON item batch [1/1] to ensure consistent local state
|
||||
Number of items to download from OneDrive: 1
|
||||
Downloading file: ./file to share.docx.url ... done
|
||||
Syncing this OneDrive Business Shared Folder: my_shared_folder
|
||||
Fetching /delta response from the OneDrive API for Drive ID: b!BhWyqa7K_kqXqHtSIlsqjR5iJogxpWxDradnpVGTU2VxBOJh82Y6S4he4rdnGPBT
|
||||
Processing API Response Bundle: 1 - Quantity of 'changes|items' in this bundle to process: 0
|
||||
Finished processing /delta JSON response from the OneDrive API
|
||||
No additional changes or items that can be applied were discovered while processing the data received from Microsoft OneDrive
|
||||
Quota information is restricted or not available for this drive.
|
||||
Performing a database consistency and integrity check on locally stored data
|
||||
Processing DB entries for this Drive ID: b!BhWyqa7K_kqXqHtSIlsqjR5iJogxpWxDradnpVGTU2VxBOJh82Y6S4he4rdnGPBT
|
||||
Quota information is restricted or not available for this drive.
|
||||
...
|
||||
```
|
||||
|
||||
When this is viewed locally, on Linux, this shared folder is seen as the following:
|
||||
|
||||

|
||||
|
||||
Any shared file link you add can utilise any 'client side filtering' rules that you have created.
|
||||
|
||||
|
||||
### Syncing OneDrive Business Shared Files using Option 2
|
||||
|
||||
> [!IMPORTANT]
|
||||
> When using option 2, all files that have been shared with you will be downloaded by default. To reduce this, first use `--list-shared-items` to list all shared items with your account, then use 'client side filtering' rules such as 'sync_list' configuration to selectivly sync all the files to your local system.
|
||||
|
||||
1. Review all items that have been shared with you by using `onedrive --list-shared-items`. This should display output similar to the following:
|
||||
```
|
||||
...
|
||||
Listing available OneDrive Business Shared Items:
|
||||
|
||||
-----------------------------------------------------------------------------------
|
||||
Shared File: large_document_shared.docx
|
||||
Shared By: test user (testuser@mynasau3.onmicrosoft.com)
|
||||
-----------------------------------------------------------------------------------
|
||||
Shared File: no_download_access.docx
|
||||
Shared By: test user (testuser@mynasau3.onmicrosoft.com)
|
||||
-----------------------------------------------------------------------------------
|
||||
Shared File: online_access_only.txt
|
||||
Shared By: test user (testuser@mynasau3.onmicrosoft.com)
|
||||
-----------------------------------------------------------------------------------
|
||||
Shared File: read_only.txt
|
||||
Shared By: test user (testuser@mynasau3.onmicrosoft.com)
|
||||
-----------------------------------------------------------------------------------
|
||||
Shared File: qewrqwerwqer.txt
|
||||
Shared By: test user (testuser@mynasau3.onmicrosoft.com)
|
||||
-----------------------------------------------------------------------------------
|
||||
Shared File: dummy_file_to_share.docx
|
||||
Shared By: testuser2 testuser2 (testuser2@mynasau3.onmicrosoft.com)
|
||||
-----------------------------------------------------------------------------------
|
||||
Shared Folder: Sub Folder 2
|
||||
Shared By: test user (testuser@mynasau3.onmicrosoft.com)
|
||||
-----------------------------------------------------------------------------------
|
||||
Shared File: file to share.docx
|
||||
Shared By: test user (testuser@mynasau3.onmicrosoft.com)
|
||||
-----------------------------------------------------------------------------------
|
||||
Shared Folder: Top Folder
|
||||
Shared By: test user (testuser@mynasau3.onmicrosoft.com)
|
||||
-----------------------------------------------------------------------------------
|
||||
Shared Folder: my_shared_folder
|
||||
Shared By: testuser2 testuser2 (testuser2@mynasau3.onmicrosoft.com)
|
||||
-----------------------------------------------------------------------------------
|
||||
Shared Folder: Jenkins
|
||||
Shared By: test user (testuser@mynasau3.onmicrosoft.com)
|
||||
-----------------------------------------------------------------------------------
|
||||
...
|
||||
```
|
||||
|
||||
2. If applicable, add entries to a 'sync_list' file, to only sync the shared files that are of importance to you.
|
||||
|
||||
3. Run the command `onedrive --sync --verbose --sync-shared-files` to sync the shared files to your local file system. This will create a new local folder called 'Files Shared With Me', and will contain sub-directories named after the entity account that has shared the file with you. In that folder will reside the shared file:
|
||||
|
||||
```
|
||||
...
|
||||
Finished processing /delta JSON response from the OneDrive API
|
||||
No additional changes or items that can be applied were discovered while processing the data received from Microsoft OneDrive
|
||||
Syncing this OneDrive Business Shared Folder: my_shared_folder
|
||||
Fetching /delta response from the OneDrive API for Drive ID: b!BhWyqa7K_kqXqHtSIlsqjR5iJogxpWxDradnpVGTU2VxBOJh82Y6S4he4rdnGPBT
|
||||
Processing API Response Bundle: 1 - Quantity of 'changes|items' in this bundle to process: 0
|
||||
Finished processing /delta JSON response from the OneDrive API
|
||||
No additional changes or items that can be applied were discovered while processing the data received from Microsoft OneDrive
|
||||
Quota information is restricted or not available for this drive.
|
||||
Creating the OneDrive Business Shared Files Local Directory: /home/alex/OneDrive/Files Shared With Me
|
||||
Checking for any applicable OneDrive Business Shared Files which need to be synced locally
|
||||
Creating the OneDrive Business Shared File Users Local Directory: /home/alex/OneDrive/Files Shared With Me/test user (testuser@mynasau3.onmicrosoft.com)
|
||||
Creating the OneDrive Business Shared File Users Local Directory: /home/alex/OneDrive/Files Shared With Me/testuser2 testuser2 (testuser2@mynasau3.onmicrosoft.com)
|
||||
Number of items to download from OneDrive: 7
|
||||
Downloading file: Files Shared With Me/test user (testuser@mynasau3.onmicrosoft.com)/file to share.docx ... done
|
||||
OneDrive returned a 'HTTP 403 - Forbidden' - gracefully handling error
|
||||
Unable to download this file as this was shared as read-only without download permission: Files Shared With Me/test user (testuser@mynasau3.onmicrosoft.com)/no_download_access.docx
|
||||
ERROR: File failed to download. Increase logging verbosity to determine why.
|
||||
Downloading file: Files Shared With Me/test user (testuser@mynasau3.onmicrosoft.com)/no_download_access.docx ... failed!
|
||||
Downloading file: Files Shared With Me/testuser2 testuser2 (testuser2@mynasau3.onmicrosoft.com)/dummy_file_to_share.docx ... done
|
||||
Downloading: Files Shared With Me/test user (testuser@mynasau3.onmicrosoft.com)/large_document_shared.docx ... 0% | ETA --:--:--
|
||||
Downloading file: Files Shared With Me/test user (testuser@mynasau3.onmicrosoft.com)/online_access_only.txt ... done
|
||||
Downloading file: Files Shared With Me/test user (testuser@mynasau3.onmicrosoft.com)/read_only.txt ... done
|
||||
Downloading file: Files Shared With Me/test user (testuser@mynasau3.onmicrosoft.com)/qewrqwerwqer.txt ... done
|
||||
Downloading: Files Shared With Me/test user (testuser@mynasau3.onmicrosoft.com)/large_document_shared.docx ... 5% | ETA 00:00:00
|
||||
Downloading: Files Shared With Me/test user (testuser@mynasau3.onmicrosoft.com)/large_document_shared.docx ... 10% | ETA 00:00:00
|
||||
Downloading: Files Shared With Me/test user (testuser@mynasau3.onmicrosoft.com)/large_document_shared.docx ... 15% | ETA 00:00:00
|
||||
Downloading: Files Shared With Me/test user (testuser@mynasau3.onmicrosoft.com)/large_document_shared.docx ... 20% | ETA 00:00:00
|
||||
Downloading: Files Shared With Me/test user (testuser@mynasau3.onmicrosoft.com)/large_document_shared.docx ... 25% | ETA 00:00:00
|
||||
Downloading: Files Shared With Me/test user (testuser@mynasau3.onmicrosoft.com)/large_document_shared.docx ... 30% | ETA 00:00:00
|
||||
Downloading: Files Shared With Me/test user (testuser@mynasau3.onmicrosoft.com)/large_document_shared.docx ... 35% | ETA 00:00:00
|
||||
Downloading: Files Shared With Me/test user (testuser@mynasau3.onmicrosoft.com)/large_document_shared.docx ... 40% | ETA 00:00:00
|
||||
Downloading: Files Shared With Me/test user (testuser@mynasau3.onmicrosoft.com)/large_document_shared.docx ... 45% | ETA 00:00:00
|
||||
Downloading: Files Shared With Me/test user (testuser@mynasau3.onmicrosoft.com)/large_document_shared.docx ... 50% | ETA 00:00:00
|
||||
Downloading: Files Shared With Me/test user (testuser@mynasau3.onmicrosoft.com)/large_document_shared.docx ... 55% | ETA 00:00:00
|
||||
Downloading: Files Shared With Me/test user (testuser@mynasau3.onmicrosoft.com)/large_document_shared.docx ... 60% | ETA 00:00:00
|
||||
Downloading: Files Shared With Me/test user (testuser@mynasau3.onmicrosoft.com)/large_document_shared.docx ... 65% | ETA 00:00:00
|
||||
Downloading: Files Shared With Me/test user (testuser@mynasau3.onmicrosoft.com)/large_document_shared.docx ... 70% | ETA 00:00:00
|
||||
Downloading: Files Shared With Me/test user (testuser@mynasau3.onmicrosoft.com)/large_document_shared.docx ... 75% | ETA 00:00:00
|
||||
Downloading: Files Shared With Me/test user (testuser@mynasau3.onmicrosoft.com)/large_document_shared.docx ... 80% | ETA 00:00:00
|
||||
Downloading: Files Shared With Me/test user (testuser@mynasau3.onmicrosoft.com)/large_document_shared.docx ... 85% | ETA 00:00:00
|
||||
Downloading: Files Shared With Me/test user (testuser@mynasau3.onmicrosoft.com)/large_document_shared.docx ... 90% | ETA 00:00:00
|
||||
Downloading: Files Shared With Me/test user (testuser@mynasau3.onmicrosoft.com)/large_document_shared.docx ... 95% | ETA 00:00:00
|
||||
Downloading: Files Shared With Me/test user (testuser@mynasau3.onmicrosoft.com)/large_document_shared.docx ... 100% | DONE in 00:00:00
|
||||
Quota information is restricted or not available for this drive.
|
||||
Downloading file: Files Shared With Me/test user (testuser@mynasau3.onmicrosoft.com)/large_document_shared.docx ... done
|
||||
Quota information is restricted or not available for this drive.
|
||||
Quota information is restricted or not available for this drive.
|
||||
Performing a database consistency and integrity check on locally stored data
|
||||
Processing DB entries for this Drive ID: b!BhWyqa7K_kqXqHtSIlsqjR5iJogxpWxDradnpVGTU2VxBOJh82Y6S4he4rdnGPBT
|
||||
Quota information is restricted or not available for this drive.
|
||||
...
|
||||
```
|
||||
|
||||
When this is viewed locally, on Linux, this 'Files Shared With Me' and content is seen as the following:
|
||||
|
||||

|
||||
|
||||
Unfortunately there is no Microsoft Windows equivalent for this capability.
|
||||
|
||||
## Known Issues
|
||||
Shared folders, shared with you from people outside of your 'organisation' are unable to be synced. This is due to the Microsoft Graph API not presenting these folders.
|
||||
|
||||
Shared folders that match this scenario, when you view 'Shared' via OneDrive online, will have a 'world' symbol as per below:
|
||||
|
||||

|
||||
|
||||
This issue is being tracked by: [#966](https://github.com/abraunegg/onedrive/issues/966)
|
||||
333
docs/client-architecture.md
Normal file
|
|
@ -0,0 +1,333 @@
|
|||
# OneDrive Client for Linux Application Architecture
|
||||
|
||||
## How does the client work at a high level?
|
||||
The client utilises the 'libcurl' library to communicate with Microsoft OneDrive via the Microsoft Graph API. The diagram below shows this high level interaction with the Microsoft and GitHub API services online:
|
||||
|
||||

|
||||
|
||||
Depending on your operational environment, it is possible to 'tweak' the following options which will modify how libcurl operates with it's interaction with Microsoft OneDrive services:
|
||||
|
||||
* Downgrade all HTTPS operations to use HTTP1.1 (Config Option: `force_http_11`)
|
||||
* Control how long a specific transfer should take before it is considered too slow and aborted (Config Option: `operation_timeout`)
|
||||
* Control libcurl handling of DNS Cache Timeout (Config Option: `dns_timeout`)
|
||||
* Control the maximum time allowed for the connection to be established (Config Option: `connect_timeout`)
|
||||
* Control the timeout for activity on an established HTTPS connection (Config Option: `data_timeout`)
|
||||
* Control what IP protocol version should be used when communicating with OneDrive (Config Option: `ip_protocol_version`)
|
||||
* Control what User Agent is presented to Microsoft services (Config Option: `user_agent`)
|
||||
|
||||
> [!IMPORTANT]
|
||||
> The default 'user_agent' value conforms to specific Microsoft requirements to identify as an ISV that complies with OneDrive traffic decoration requirements. Changing this value potentially will impact how Microsoft see's your client, thus your traffic may get throttled. For further information please read: https://learn.microsoft.com/en-us/sharepoint/dev/general-development/how-to-avoid-getting-throttled-or-blocked-in-sharepoint-online
|
||||
|
||||
Diving a little deeper into how the client operates, the diagram below outlines at a high level the operational workflow of the OneDrive Client for Linux, demonstrating how it interacts with the OneDrive API to maintain synchronisation, manage local and cloud data integrity, and ensure that user data is accurately mirrored between the local filesystem and OneDrive cloud storage.
|
||||
|
||||

|
||||
|
||||
The application operational processes have several high level key stages:
|
||||
|
||||
1. **Access Token Validation:** Initially, the client validates its access and the existing access token, refreshing it if necessary. This step ensures that the client has the required permissions to interact with the OneDrive API.
|
||||
|
||||
2. **Query Microsoft OneDrive API:** The client queries the /delta API endpoint of Microsoft OneDrive, which returns JSON responses. The /delta endpoint is particularly used for syncing changes, helping the client to identify any updates in the OneDrive storage.
|
||||
|
||||
3. **Process JSON Responses:** The client processes each JSON response to determine if it represents a 'root' or 'deleted' item. Items not marked as 'root' or 'deleted' are temporarily stored for further processing. For 'root' or 'deleted' items, the client processes them immediately, otherwise, the client evaluates the items against client-side filtering rules to decide whether to discard them or to process and save them in the local database cache for actions like creating directories or downloading files.
|
||||
|
||||
4. **Local Cache Database Processing for Data Integrity:** The client processes its local cache database to check for data integrity and differences compared to the OneDrive storage. If differences are found, such as a file or folder change including deletions, the client uploads these changes to OneDrive. Responses from the API, including item metadata, are saved to the local cache database.
|
||||
|
||||
5. **Local Filesystem Scanning:** The client scans the local filesystem for new files or folders. Each new item is checked against client-side filtering rules. If an item passes the filtering, it is uploaded to OneDrive. Otherwise, it is discarded if it doesn't meet the filtering criteria.
|
||||
|
||||
6. **Final Data True-Up:** Lastly, the client queries the /delta link for a final true-up, processing any further online JSON changes if required. This ensures that the local and OneDrive storages are fully synchronised.
|
||||
|
||||
## What are the operational modes of the client?
|
||||
|
||||
There are 2 main operational modes that the client can utilise:
|
||||
|
||||
1. Standalone sync mode that performs a single sync action against Microsoft OneDrive. This method is used when you utilise `--sync`.
|
||||
2. Ongoing sync mode that continuously syncs your data with Microsoft OneDrive and utilises 'inotify' to watch for local system changes. This method is used when you utilise `--monitor`.
|
||||
|
||||
By default, both modes consider all data stored online within Microsoft OneDrive as the 'source-of-truth' - that is, what is online, is the correct data (file version, file content, file timestamp, folder structure and so on). This consideration also matches how the Microsoft OneDrive Client for Windows operates.
|
||||
|
||||
However, in standalone mode (`--sync`), you can *change* what reference the client will use as the 'source-of-truth' for your data by using the `--local-first` option so that the application will look at your local files *first* and consider your local files as your 'source-of-truth' to replicate that directory structure to Microsoft OneDrive.
|
||||
|
||||
> [!IMPORTANT]
|
||||
> Please be aware that if you designate a network mount point (such as NFS, Windows Network Share, or Samba Network Share) as your `sync_dir`, this setup inherently lacks 'inotify' support. Support for 'inotify' is essential for real-time tracking of file changes, which means that the client's 'Monitor Mode' cannot immediately detect changes in files located on these network shares. Instead, synchronisation between your local filesystem and Microsoft OneDrive will occur at intervals specified by the `monitor_interval` setting. This limitation regarding 'inotify' support on network mount points like NFS or Samba is beyond the control of this client.
|
||||
|
||||
## OneDrive Client for Linux High Level Activity Flows
|
||||
|
||||
The diagrams below show the high level process flow and decision making when running the application
|
||||
|
||||
### Main functional activity flows
|
||||

|
||||
|
||||
### Processing a potentially new local item
|
||||

|
||||
|
||||
### Processing a potentially changed local item
|
||||

|
||||
|
||||
### Download a file from Microsoft OneDrive
|
||||

|
||||
|
||||
### Upload a modified file to Microsoft OneDrive
|
||||

|
||||
|
||||
### Upload a new local file to Microsoft OneDrive
|
||||

|
||||
|
||||
### Determining if an 'item' is synchronised between Microsoft OneDrive and the local file system
|
||||

|
||||
|
||||
### Determining if an 'item' is excluded due to 'Client Side Filtering' rules
|
||||
|
||||
By default, the OneDrive Client for Linux will sync all files and folders between Microsoft OneDrive and the local filesystem.
|
||||
|
||||
Client Side Filtering in the context of this client refers to user-configured rules that determine what files and directories the client should upload or download from Microsoft OneDrive. These rules are crucial for optimising synchronisation, especially when dealing with large numbers of files or specific file types. The OneDrive Client for Linux offers several configuration options to facilitate this:
|
||||
|
||||
* **skip_dir:** This option allows the user to specify directories that should not be synchronised with OneDrive. It's particularly useful for omitting large or irrelevant directories from the sync process.
|
||||
|
||||
* **skip_dotfiles:** Dotfiles, usually configuration files or scripts, can be excluded from the sync. This is useful for users who prefer to keep these files local.
|
||||
|
||||
* **skip_file:** Specific files can be excluded from synchronisation using this option. It provides flexibility in selecting which files are essential for cloud storage.
|
||||
|
||||
* **skip_symlinks:** Symlinks often point to files outside the OneDrive directory or to locations that are not relevant for cloud storage. This option prevents them from being included in the sync.
|
||||
|
||||
This exclusion process can be illustrated by the following activity diagram. A 'true' return value means that the path being evaluated needs to be excluded:
|
||||
|
||||

|
||||
|
||||
## File conflict handling - default operational modes
|
||||
|
||||
When using the default operational modes (`--sync` or `--monitor`) the client application is conforming to how the Microsoft Windows OneDrive client operates in terms of resolving conflicts for files.
|
||||
|
||||
Additionally, when using `--resync` this conflict resolution can differ slightly, as, when using `--resync` you are *deleting* the known application state, thus, the application has zero reference as to what was previously in sync with the local file system.
|
||||
|
||||
Due to this factor, when using `--resync` the online source is always going to be considered accurate and the source-of-truth, regardless of the local file state, file timestamp or file hash.
|
||||
|
||||
### Default Operational Modes - Conflict Handling
|
||||
|
||||
#### Scenario
|
||||
1. Create a local file
|
||||
2. Perform a sync with Microsoft OneDrive using `onedrive --sync`
|
||||
3. Modify file online
|
||||
4. Modify file locally with different data|contents
|
||||
5. Perform a sync with Microsoft OneDrive using `onedrive --sync`
|
||||
|
||||

|
||||
|
||||
#### Evidence of Conflict Handling
|
||||
```
|
||||
...
|
||||
Processing API Response Bundle: 1 - Quantity of 'changes|items' in this bundle to process: 2
|
||||
Finished processing /delta JSON response from the OneDrive API
|
||||
Processing 1 applicable changes and items received from Microsoft OneDrive
|
||||
Processing OneDrive JSON item batch [1/1] to ensure consistent local state
|
||||
Number of items to download from OneDrive: 1
|
||||
The local file to replace (./1.txt) has been modified locally since the last download. Renaming it to avoid potential local data loss.
|
||||
The local item is out-of-sync with OneDrive, renaming to preserve existing file and prevent local data loss: ./1.txt -> ./1-onedrive-client-dev.txt
|
||||
Downloading file ./1.txt ... done
|
||||
Performing a database consistency and integrity check on locally stored data
|
||||
Processing DB entries for this Drive ID: b!bO8V7s9SSk6r7mWHpIjURotN33W1W2tEv3OXV_oFIdQimEdOHR-1So7CqeT1MfHA
|
||||
Processing ~/OneDrive
|
||||
The directory has not changed
|
||||
Processing α
|
||||
...
|
||||
The file has not changed
|
||||
Processing เอกสาร
|
||||
The directory has not changed
|
||||
Processing 1.txt
|
||||
The file has not changed
|
||||
Scanning the local file system '~/OneDrive' for new data to upload
|
||||
...
|
||||
New items to upload to OneDrive: 1
|
||||
Total New Data to Upload: 52 Bytes
|
||||
Uploading new file ./1-onedrive-client-dev.txt ... done.
|
||||
Performing a last examination of the most recent online data within Microsoft OneDrive to complete the reconciliation process
|
||||
Fetching /delta response from the OneDrive API for Drive ID: b!bO8V7s9SSk6r7mWHpIjURotN33W1W2tEv3OXV_oFIdQimEdOHR-1So7CqeT1MfHA
|
||||
Processing API Response Bundle: 1 - Quantity of 'changes|items' in this bundle to process: 2
|
||||
Finished processing /delta JSON response from the OneDrive API
|
||||
Processing 1 applicable changes and items received from Microsoft OneDrive
|
||||
Processing OneDrive JSON item batch [1/1] to ensure consistent local state
|
||||
|
||||
Sync with Microsoft OneDrive is complete
|
||||
Waiting for all internal threads to complete before exiting application
|
||||
```
|
||||
|
||||
### Default Operational Modes - Conflict Handling with --resync
|
||||
|
||||
#### Scenario
|
||||
1. Create a local file
|
||||
2. Perform a sync with Microsoft OneDrive using `onedrive --sync`
|
||||
3. Modify file online
|
||||
4. Modify file locally with different data|contents
|
||||
5. Perform a sync with Microsoft OneDrive using `onedrive --sync --resync`
|
||||
|
||||

|
||||
|
||||
#### Evidence of Conflict Handling
|
||||
```
|
||||
...
|
||||
Deleting the saved application sync status ...
|
||||
Using IPv4 and IPv6 (if configured) for all network operations
|
||||
Checking Application Version ...
|
||||
...
|
||||
Processing API Response Bundle: 1 - Quantity of 'changes|items' in this bundle to process: 14
|
||||
Finished processing /delta JSON response from the OneDrive API
|
||||
Processing 13 applicable changes and items received from Microsoft OneDrive
|
||||
Processing OneDrive JSON item batch [1/1] to ensure consistent local state
|
||||
Local file time discrepancy detected: ./1.txt
|
||||
This local file has a different modified time 2024-Feb-19 19:32:55Z (UTC) when compared to remote modified time 2024-Feb-19 19:32:36Z (UTC)
|
||||
The local file has a different hash when compared to remote file hash
|
||||
Local item does not exist in local database - replacing with file from OneDrive - failed download?
|
||||
The local item is out-of-sync with OneDrive, renaming to preserve existing file and prevent local data loss: ./1.txt -> ./1-onedrive-client-dev.txt
|
||||
Number of items to download from OneDrive: 1
|
||||
Downloading file ./1.txt ... done
|
||||
Performing a database consistency and integrity check on locally stored data
|
||||
Processing DB entries for this Drive ID: b!bO8V7s9SSk6r7mWHpIjURotN33W1W2tEv3OXV_oFIdQimEdOHR-1So7CqeT1MfHA
|
||||
Processing ~/OneDrive
|
||||
The directory has not changed
|
||||
Processing α
|
||||
...
|
||||
Processing เอกสาร
|
||||
The directory has not changed
|
||||
Processing 1.txt
|
||||
The file has not changed
|
||||
Scanning the local file system '~/OneDrive' for new data to upload
|
||||
...
|
||||
New items to upload to OneDrive: 1
|
||||
Total New Data to Upload: 52 Bytes
|
||||
Uploading new file ./1-onedrive-client-dev.txt ... done.
|
||||
Performing a last examination of the most recent online data within Microsoft OneDrive to complete the reconciliation process
|
||||
Fetching /delta response from the OneDrive API for Drive ID: b!bO8V7s9SSk6r7mWHpIjURotN33W1W2tEv3OXV_oFIdQimEdOHR-1So7CqeT1MfHA
|
||||
Processing API Response Bundle: 1 - Quantity of 'changes|items' in this bundle to process: 2
|
||||
Finished processing /delta JSON response from the OneDrive API
|
||||
Processing 1 applicable changes and items received from Microsoft OneDrive
|
||||
Processing OneDrive JSON item batch [1/1] to ensure consistent local state
|
||||
|
||||
Sync with Microsoft OneDrive is complete
|
||||
Waiting for all internal threads to complete before exiting application
|
||||
```
|
||||
|
||||
## File conflict handling - local-first operational mode
|
||||
|
||||
When using `--local-first` as your operational parameter the client application is now using your local filesystem data as the 'source-of-truth' as to what should be stored online.
|
||||
|
||||
However - Microsoft OneDrive itself, has *zero* acknowledgement of this concept, thus, conflict handling needs to be aligned to how Microsoft OneDrive on other platforms operate, that is, rename the local offending file.
|
||||
|
||||
Additionally, when using `--resync` you are *deleting* the known application state, thus, the application has zero reference as to what was previously in sync with the local file system.
|
||||
|
||||
Due to this factor, when using `--resync` the online source is always going to be considered accurate and the source-of-truth, regardless of the local file state, file timestamp or file hash or use of `--local-first`.
|
||||
|
||||
### Local First Operational Modes - Conflict Handling
|
||||
|
||||
#### Scenario
|
||||
1. Create a local file
|
||||
2. Perform a sync with Microsoft OneDrive using `onedrive --sync --local-first`
|
||||
3. Modify file locally with different data|contents
|
||||
4. Modify file online with different data|contents
|
||||
5. Perform a sync with Microsoft OneDrive using `onedrive --sync --local-first`
|
||||
|
||||

|
||||
|
||||
#### Evidence of Conflict Handling
|
||||
```
|
||||
Reading configuration file: /home/alex/.config/onedrive/config
|
||||
...
|
||||
Using IPv4 and IPv6 (if configured) for all network operations
|
||||
Checking Application Version ...
|
||||
...
|
||||
Sync Engine Initialised with new Onedrive API instance
|
||||
All application operations will be performed in the configured local 'sync_dir' directory: /home/alex/OneDrive
|
||||
Performing a database consistency and integrity check on locally stored data
|
||||
Processing DB entries for this Drive ID: b!bO8V7s9SSk6r7mWHpIjURotN33W1W2tEv3OXV_oFIdQimEdOHR-1So7CqeT1MfHA
|
||||
Processing ~/OneDrive
|
||||
The directory has not changed
|
||||
Processing α
|
||||
The directory has not changed
|
||||
...
|
||||
The file has not changed
|
||||
Processing เอกสาร
|
||||
The directory has not changed
|
||||
Processing 1.txt
|
||||
Local file time discrepancy detected: 1.txt
|
||||
The file content has changed locally and has a newer timestamp, thus needs to be uploaded to OneDrive
|
||||
Changed local items to upload to OneDrive: 1
|
||||
The local item is out-of-sync with OneDrive, renaming to preserve existing file and prevent local data loss: 1.txt -> 1-onedrive-client-dev.txt
|
||||
Uploading new file 1-onedrive-client-dev.txt ... done.
|
||||
Scanning the local file system '~/OneDrive' for new data to upload
|
||||
...
|
||||
Fetching /delta response from the OneDrive API for Drive ID: b!bO8V7s9SSk6r7mWHpIjURotN33W1W2tEv3OXV_oFIdQimEdOHR-1So7CqeT1MfHA
|
||||
Processing API Response Bundle: 1 - Quantity of 'changes|items' in this bundle to process: 3
|
||||
Finished processing /delta JSON response from the OneDrive API
|
||||
Processing 2 applicable changes and items received from Microsoft OneDrive
|
||||
Processing OneDrive JSON item batch [1/1] to ensure consistent local state
|
||||
Number of items to download from OneDrive: 1
|
||||
Downloading file ./1.txt ... done
|
||||
|
||||
Sync with Microsoft OneDrive is complete
|
||||
Waiting for all internal threads to complete before exiting application
|
||||
```
|
||||
|
||||
|
||||
### Local First Operational Modes - Conflict Handling with --resync
|
||||
|
||||
#### Scenario
|
||||
1. Create a local file
|
||||
2. Perform a sync with Microsoft OneDrive using `onedrive --sync --local-first`
|
||||
3. Modify file locally with different data|contents
|
||||
4. Modify file online with different data|contents
|
||||
5. Perform a sync with Microsoft OneDrive using `onedrive --sync --local-first --resync`
|
||||
|
||||

|
||||
|
||||
#### Evidence of Conflict Handling
|
||||
```
|
||||
...
|
||||
|
||||
The usage of --resync will delete your local 'onedrive' client state, thus no record of your current 'sync status' will exist.
|
||||
This has the potential to overwrite local versions of files with perhaps older versions of documents downloaded from OneDrive, resulting in local data loss.
|
||||
If in doubt, backup your local data before using --resync
|
||||
|
||||
Are you sure you wish to proceed with --resync? [Y/N] y
|
||||
|
||||
Deleting the saved application sync status ...
|
||||
Using IPv4 and IPv6 (if configured) for all network operations
|
||||
...
|
||||
Sync Engine Initialised with new Onedrive API instance
|
||||
All application operations will be performed in the configured local 'sync_dir' directory: /home/alex/OneDrive
|
||||
Performing a database consistency and integrity check on locally stored data
|
||||
Processing DB entries for this Drive ID: b!bO8V7s9SSk6r7mWHpIjURotN33W1W2tEv3OXV_oFIdQimEdOHR-1So7CqeT1MfHA
|
||||
Processing ~/OneDrive
|
||||
The directory has not changed
|
||||
Scanning the local file system '~/OneDrive' for new data to upload
|
||||
Skipping item - excluded by sync_list config: ./random_25k_files
|
||||
OneDrive Client requested to create this directory online: ./α
|
||||
The requested directory to create was found on OneDrive - skipping creating the directory: ./α
|
||||
...
|
||||
New items to upload to OneDrive: 9
|
||||
Total New Data to Upload: 49 KB
|
||||
...
|
||||
The file we are attempting to upload as a new file already exists on Microsoft OneDrive: ./1.txt
|
||||
Skipping uploading this item as a new file, will upload as a modified file (online file already exists): ./1.txt
|
||||
The local item is out-of-sync with OneDrive, renaming to preserve existing file and prevent local data loss: ./1.txt -> ./1-onedrive-client-dev.txt
|
||||
Uploading new file ./1-onedrive-client-dev.txt ... done.
|
||||
Fetching /delta response from the OneDrive API for Drive ID: b!bO8V7s9SSk6r7mWHpIjURotN33W1W2tEv3OXV_oFIdQimEdOHR-1So7CqeT1MfHA
|
||||
Processing API Response Bundle: 1 - Quantity of 'changes|items' in this bundle to process: 15
|
||||
Finished processing /delta JSON response from the OneDrive API
|
||||
Processing 14 applicable changes and items received from Microsoft OneDrive
|
||||
Processing OneDrive JSON item batch [1/1] to ensure consistent local state
|
||||
Number of items to download from OneDrive: 1
|
||||
Downloading file ./1.txt ... done
|
||||
|
||||
Sync with Microsoft OneDrive is complete
|
||||
Waiting for all internal threads to complete before exiting application
|
||||
```
|
||||
|
||||
## Client Functional Component Architecture Relationships
|
||||
|
||||
The diagram below shows the main functional relationship of application code components, and how these relate to each relevant code module within this application:
|
||||
|
||||

|
||||
|
||||
## Database Schema
|
||||
|
||||
The diagram below shows the database schema that is used within the application
|
||||
|
||||

|
||||
175
docs/contributing.md
Normal file
|
|
@ -0,0 +1,175 @@
|
|||
# OneDrive Client for Linux: Coding Style Guidelines
|
||||
|
||||
## Introduction
|
||||
|
||||
This document outlines the coding style guidelines for code contributions for the OneDrive Client for Linux.
|
||||
|
||||
These guidelines are intended to ensure the codebase remains clean, well-organised, and accessible to all contributors, new and experienced alike.
|
||||
|
||||
## Code Layout
|
||||
> [!NOTE]
|
||||
> When developing any code contribution, please utilise either Microsoft Visual Studio Code or Notepad++.
|
||||
|
||||
### Indentation
|
||||
Most of the codebase utilises tabs for space indentation, with 4 spaces to a tab. Please keep to this convention.
|
||||
|
||||
### Line Length
|
||||
Try and keep line lengths to a reasonable length. Do not constrain yourself to short line lengths such as 80 characters. This means when the code is being displayed in the code editor, lines are correctly displayed when using screen resolutions of 1920x1080 and above.
|
||||
|
||||
If you wish to use shorter line lengths (80 characters for example), please do not follow this sort of example:
|
||||
```code
|
||||
...
|
||||
void functionName(
|
||||
string somevar,
|
||||
bool someOtherVar,
|
||||
cost(char) anotherVar=null
|
||||
){
|
||||
....
|
||||
```
|
||||
|
||||
### Coding Style | Braces
|
||||
Please use 1TBS (One True Brace Style) which is a variation of the K&R (Kernighan & Ritchie) style. This approach is intended to improve readability and maintain consistency throughout the code.
|
||||
|
||||
When using this coding style, even when the code of the `if`, `else`, `for`, or function definition contains only one statement, braces are used to enclose it.
|
||||
|
||||
```code
|
||||
// What this if statement is doing
|
||||
if (condition) {
|
||||
// The condition was true
|
||||
.....
|
||||
} else {
|
||||
// The condition was false
|
||||
.....
|
||||
}
|
||||
|
||||
// Loop 10 times to do something
|
||||
for (int i = 0; i < 10; i++) {
|
||||
// Loop body
|
||||
}
|
||||
|
||||
// This function is to do this
|
||||
void functionExample() {
|
||||
// Function body
|
||||
}
|
||||
```
|
||||
|
||||
## Naming Conventsions
|
||||
|
||||
### Variables and Functions
|
||||
Please use `camelCase` for variable and function names.
|
||||
|
||||
### Classes and Interfaces
|
||||
Please use `PascalCase` for classes, interfaces, and structs.
|
||||
|
||||
### Constants
|
||||
Use uppercase with underscores between words.
|
||||
|
||||
## Documentation
|
||||
|
||||
### Language and Spelling
|
||||
To maintain consistency across the project's documentation, comments, and code, all written text must adhere to British English spelling conventions, not American English. This requirement applies to all aspects of the codebase, including variable names, comments, and documentation.
|
||||
|
||||
For example, use "specialise" instead of "specialize", "colour" instead of "color", and "organise" instead of "organize". This standard ensures that the project maintains a cohesive and consistent linguistic style.
|
||||
|
||||
### Code Comments
|
||||
Please comment code at all levels. Use `//` for all line comments. Detail why a statement is needed, or what is expected to happen so future readers or contributors can read through the intent of the code with clarity.
|
||||
|
||||
If fixing a 'bug', please add a link to the GitHub issue being addressed as a comment, for example:
|
||||
```code
|
||||
...
|
||||
// Before discarding change - does this ID still exist on OneDrive - as in IS this
|
||||
// potentially a --single-directory sync and the user 'moved' the file out of the 'sync-dir' to another OneDrive folder
|
||||
// This is a corner edge case - https://github.com/skilion/onedrive/issues/341
|
||||
|
||||
// What is the original local path for this ID in the database? Does it match 'syncFolderChildPath'
|
||||
if (itemdb.idInLocalDatabase(driveId, item["id"].str)){
|
||||
// item is in the database
|
||||
string originalLocalPath = computeItemPath(driveId, item["id"].str);
|
||||
...
|
||||
```
|
||||
|
||||
All code should be clearly commented.
|
||||
|
||||
### Application Logging Output
|
||||
If making changes to any application logging output, please first discuss this either via direct communication or email.
|
||||
|
||||
For reference, below are the available application logging output functions and examples:
|
||||
```code
|
||||
|
||||
// most used
|
||||
addLogEntry("Basic 'info' message", ["info"]); .... or just use addLogEntry("Basic 'info' message");
|
||||
addLogEntry("Basic 'verbose' message", ["verbose"]);
|
||||
addLogEntry("Basic 'debug' message", ["debug"]);
|
||||
|
||||
// GUI notify only
|
||||
addLogEntry("Basic 'notify' ONLY message and displayed in GUI if notifications are enabled", ["notify"]);
|
||||
|
||||
// info and notify
|
||||
addLogEntry("Basic 'info and notify' message and displayed in GUI if notifications are enabled", ["info", "notify"]);
|
||||
|
||||
// log file only
|
||||
addLogEntry("Information sent to the log file only, and only if logging to a file is enabled", ["logFileOnly"]);
|
||||
|
||||
// Console only (session based upload|download)
|
||||
addLogEntry("Basic 'Console only with new line' message", ["consoleOnly"]);
|
||||
|
||||
// Console only with no new line
|
||||
addLogEntry("Basic 'Console only with no new line' message", ["consoleOnlyNoNewLine"]);
|
||||
|
||||
```
|
||||
|
||||
### Documentation Updates
|
||||
If the code changes any of the functionality that is documented, it is expected that any PR submission will also include updating the respective section of user documentation and/or man page as part of the code submission.
|
||||
|
||||
## Development Testing
|
||||
Whilst there are more modern DMD and LDC compilers available, ensuring client build compatibility with older platforms is a key requirement.
|
||||
|
||||
The issue stems from Debian and Ubuntu LTS versions - such as Ubuntu 20.04. It's [ldc package](https://packages.ubuntu.com/focal/ldc) is only v1.20.1 , thus, this is the minimum version that all compilation needs to be tested against.
|
||||
|
||||
The reason LDC v1.20.1 must be used, is that this is the version that is used to compile the packages presented at [OpenSuSE Build Service ](https://build.opensuse.org/package/show/home:npreining:debian-ubuntu-onedrive/onedrive) - which is where most Debian and Ubuntu users will install the client from.
|
||||
|
||||
It is assumed here that you know how to download and install the correct LDC compiler for your platform.
|
||||
|
||||
## Submitting a PR
|
||||
When submitting a PR, please provide your testing evidence in the PR submission of what has been fixed, in the format of:
|
||||
|
||||
### Without PR
|
||||
```
|
||||
Application output that is doing whatever | or illustration of issue | illustration of bug
|
||||
```
|
||||
|
||||
### With PR
|
||||
```
|
||||
Application output that is doing whatever | or illustration of issue being fixed | illustration of bug being fixed
|
||||
```
|
||||
Please also include validation of compilation using the minimum LDC package version.
|
||||
|
||||
To assist with your testing validation against the minimum LDC compiler version, a script as per below could assist you with this validation:
|
||||
|
||||
```bash
|
||||
|
||||
#!/bin/bash
|
||||
|
||||
PR=<Your_PR_Number>
|
||||
|
||||
rm -rf ./onedrive-pr${PR}
|
||||
git clone https://github.com/abraunegg/onedrive.git onedrive-pr${PR}
|
||||
cd onedrive-pr${PR}
|
||||
git fetch origin pull/${PR}/head:pr${PR}
|
||||
git checkout pr${PR}
|
||||
|
||||
# MIN LDC Version to compile
|
||||
# MIN Version for ARM / Compiling with LDC
|
||||
source ~/dlang/ldc-1.20.1/activate
|
||||
|
||||
# Compile code with specific LDC version
|
||||
./configure --enable-debug --enable-notifications; make clean; make;
|
||||
deactivate
|
||||
./onedrive --version
|
||||
|
||||
```
|
||||
|
||||
## References
|
||||
|
||||
* D Language Official Style Guide: https://dlang.org/dstyle.html
|
||||
* British English spelling conventions: https://www.collinsdictionary.com/
|
||||
|
|
@ -3,19 +3,19 @@ This client can be run as a Docker container, with 3 available container base op
|
|||
|
||||
| Container Base | Docker Tag | Description | i686 | x86_64 | ARMHF | AARCH64 |
|
||||
|----------------|-------------|----------------------------------------------------------------|:------:|:------:|:-----:|:-------:|
|
||||
| Alpine Linux | edge-alpine | Docker container based on Alpine 3.18 using 'master' |❌|✔|❌|✔|
|
||||
| Alpine Linux | alpine | Docker container based on Alpine 3.18 using latest release |❌|✔|❌|✔|
|
||||
| Alpine Linux | edge-alpine | Docker container based on Alpine 3.20 using 'master' |❌|✔|❌|✔|
|
||||
| Alpine Linux | alpine | Docker container based on Alpine 3.20 using latest release |❌|✔|❌|✔|
|
||||
| Debian | debian | Docker container based on Debian Stable using latest release |✔|✔|✔|✔|
|
||||
| Debian | edge | Docker container based on Debian Stable using 'master' |✔|✔|✔|✔|
|
||||
| Debian | edge-debian | Docker container based on Debian Stable using 'master' |✔|✔|✔|✔|
|
||||
| Debian | latest | Docker container based on Debian Stable using latest release |✔|✔|✔|✔|
|
||||
| Fedora | edge-fedora | Docker container based on Fedora 38 using 'master' |❌|✔|❌|✔|
|
||||
| Fedora | fedora | Docker container based on Fedora 38 using latest release |❌|✔|❌|✔|
|
||||
| Fedora | edge-fedora | Docker container based on Fedora 40 using 'master' |❌|✔|❌|✔|
|
||||
| Fedora | fedora | Docker container based on Fedora 40 using latest release |❌|✔|❌|✔|
|
||||
|
||||
These containers offer a simple monitoring-mode service for the OneDrive Client for Linux.
|
||||
|
||||
The instructions below have been validated on:
|
||||
* Fedora 38
|
||||
* Fedora 40
|
||||
|
||||
The instructions below will utilise the 'edge' tag, however this can be substituted for any of the other docker tags such as 'latest' from the table above if desired.
|
||||
|
||||
|
|
@ -23,7 +23,8 @@ The 'edge' Docker Container will align closer to all documentation and features,
|
|||
|
||||
Additionally there are specific version release tags for each release. Refer to https://hub.docker.com/r/driveone/onedrive/tags for any other Docker tags you may be interested in.
|
||||
|
||||
**Note:** The below instructions for docker has been tested and validated when logging into the system as an unprivileged user (non 'root' user).
|
||||
> [!NOTE]
|
||||
> The below instructions for docker has been tested and validated when logging into the system as an unprivileged user (non 'root' user).
|
||||
|
||||
## High Level Configuration Steps
|
||||
1. Install 'docker' as per your distribution platform's instructions if not already installed.
|
||||
|
|
@ -37,7 +38,11 @@ Additionally there are specific version release tags for each release. Refer to
|
|||
## Configuration Steps
|
||||
|
||||
### 1. Install 'docker' on your platform
|
||||
Install 'docker' as per your distribution platform's instructions if not already installed.
|
||||
Install Docker for your system using the official instructions found at https://docs.docker.com/engine/install/.
|
||||
|
||||
> [!CAUTION]
|
||||
> If you are using Ubuntu or any distribution based on Ubuntu, do not install Docker from your distribution's repositories, as they may contain obsolete versions. Instead, you must install Docker using the packages provided directly by Docker.
|
||||
|
||||
|
||||
### 2. Configure 'docker' to allow non-privileged users to run Docker commands
|
||||
Read https://docs.docker.com/engine/install/linux-postinstall/ to configure the 'docker' user group with your user account to allow your non 'root' user to run 'docker' commands.
|
||||
|
|
@ -71,7 +76,7 @@ sudo systemctl enable --now docker
|
|||
|
||||
Test that 'docker' is operational for your 'non-root' user, as per below:
|
||||
```bash
|
||||
[alex@fedora-38-docker-host ~]$ docker run hello-world
|
||||
[alex@fedora-40-docker-host ~]$ docker run hello-world
|
||||
Unable to find image 'hello-world:latest' locally
|
||||
latest: Pulling from library/hello-world
|
||||
719385e32844: Pull complete
|
||||
|
|
@ -99,7 +104,7 @@ Share images, automate workflows, and more with a free Docker ID:
|
|||
For more examples and ideas, visit:
|
||||
https://docs.docker.com/get-started/
|
||||
|
||||
[alex@fedora-38-docker-host ~]$
|
||||
[alex@fedora-40-docker-host ~]$
|
||||
```
|
||||
|
||||
### 5. Configure the required docker volumes
|
||||
|
|
@ -131,17 +136,19 @@ This will create a docker volume labeled `onedrive_data` and will map to a path
|
|||
* The owner of this specified folder must have permissions for its parent directory
|
||||
* Docker will attempt to change the permissions of the volume to the user the container is configured to run as
|
||||
|
||||
**NOTE:** Issues occur when this target folder is a mounted folder of an external system (NAS, SMB mount, USB Drive etc) as the 'mount' itself is owed by 'root'. If this is your use case, you *must* ensure your normal user can mount your desired target without having the target mounted by 'root'. If you do not fix this, your Docker container will fail to start with the following error message:
|
||||
```bash
|
||||
ROOT level privileges prohibited!
|
||||
```
|
||||
> [!IMPORTANT]
|
||||
> Issues occur when this target folder is a mounted folder of an external system (NAS, SMB mount, USB Drive etc) as the 'mount' itself is owed by 'root'. If this is your use case, you *must* ensure your normal user can mount your desired target without having the target mounted by 'root'. If you do not fix this, your Docker container will fail to start with the following error message:
|
||||
> ```bash
|
||||
> ROOT level privileges prohibited!
|
||||
> ```
|
||||
|
||||
### 6. First run of Docker container under docker and performing authorisation
|
||||
The 'onedrive' client within the container first needs to be authorised with your Microsoft account. This is achieved by initially running docker in interactive mode.
|
||||
|
||||
Run the docker image with the commands below and make sure to change the value of `ONEDRIVE_DATA_DIR` to the actual onedrive data directory on your filesystem that you wish to use (e.g. `export ONEDRIVE_DATA_DIR="/home/abraunegg/OneDrive"`).
|
||||
|
||||
**Important:** The 'target' folder of `ONEDRIVE_DATA_DIR` must exist before running the docker container. The script below will create 'ONEDRIVE_DATA_DIR' so that it exists locally for the docker volume mapping to occur.
|
||||
> [!IMPORTANT]
|
||||
> The 'target' folder of `ONEDRIVE_DATA_DIR` must exist before running the docker container. The script below will create 'ONEDRIVE_DATA_DIR' so that it exists locally for the docker volume mapping to occur.
|
||||
|
||||
It is also a requirement that the container be run using a non-root uid and gid, you must insert a non-root UID and GID (e.g.` export ONEDRIVE_UID=1000` and export `ONEDRIVE_GID=1000`). The script below will use `id` to evaluate your system environment to use the correct values.
|
||||
```bash
|
||||
|
|
@ -228,7 +235,7 @@ docker volume inspect onedrive_conf
|
|||
|
||||
Or you can map your own config folder to the config volume. Make sure to copy all files from the docker volume into your mapped folder first.
|
||||
|
||||
The detailed document for the config can be found here: [Configuration](https://github.com/abraunegg/onedrive/blob/master/docs/USAGE.md#configuration)
|
||||
The detailed document for the config can be found here: [Configuration](https://github.com/abraunegg/onedrive/blob/master/docs/usage.md#configuration)
|
||||
|
||||
### Syncing multiple accounts
|
||||
There are many ways to do this, the easiest is probably to do the following:
|
||||
|
|
@ -246,7 +253,7 @@ If you are experienced with docker and onedrive, you can use the following scrip
|
|||
```bash
|
||||
# Update ONEDRIVE_DATA_DIR with correct OneDrive directory path
|
||||
ONEDRIVE_DATA_DIR="${HOME}/OneDrive"
|
||||
# Create directory if non-existant
|
||||
# Create directory if non-existent
|
||||
mkdir -p ${ONEDRIVE_DATA_DIR}
|
||||
|
||||
firstRun='-d'
|
||||
|
|
@ -270,10 +277,15 @@ docker run $firstRun --restart unless-stopped --name onedrive -v onedrive_conf:/
|
|||
| <B>ONEDRIVE_NOREMOTEDELETE</B> | Controls "--no-remote-delete" switch on onedrive sync. Default is 0 | 1 |
|
||||
| <B>ONEDRIVE_LOGOUT</B> | Controls "--logout" switch. Default is 0 | 1 |
|
||||
| <B>ONEDRIVE_REAUTH</B> | Controls "--reauth" switch. Default is 0 | 1 |
|
||||
| <B>ONEDRIVE_AUTHFILES</B> | Controls "--auth-files" option. Default is "" | "authUrl:responseUrl" |
|
||||
| <B>ONEDRIVE_AUTHRESPONSE</B> | Controls "--auth-response" option. Default is "" | See [here](https://github.com/abraunegg/onedrive/blob/master/docs/USAGE.md#authorize-the-application-with-your-onedrive-account) |
|
||||
| <B>ONEDRIVE_AUTHFILES</B> | Controls "--auth-files" option. Default is "" | Please read [CLI Option: --auth-files](./application-config-options.md#cli-option---auth-files) |
|
||||
| <B>ONEDRIVE_AUTHRESPONSE</B> | Controls "--auth-response" option. Default is "" | Please read [CLI Option: --auth-response](./application-config-options.md#cli-option---auth-response) |
|
||||
| <B>ONEDRIVE_DISPLAY_CONFIG</B> | Controls "--display-running-config" switch on onedrive sync. Default is 0 | 1 |
|
||||
| <B>ONEDRIVE_SINGLE_DIRECTORY</B> | Controls "--single-directory" option. Default = "" | "mydir" |
|
||||
| <B>ONEDRIVE_DRYRUN</B> | Controls "--dry-run" option. Default is 0 | 1 |
|
||||
| <B>ONEDRIVE_DISABLE_DOWNLOAD_VALIDATION</B> | Controls "--disable-download-validation" option. Default is 0 | 1 |
|
||||
| <B>ONEDRIVE_DISABLE_UPLOAD_VALIDATION</B> | Controls "--disable-upload-validation" option. Default is 0 | 1 |
|
||||
| <B>ONEDRIVE_SYNC_SHARED_FILES</B> | Controls "--sync-shared-files" option. Default is 0 | 1 |
|
||||
| <B>ONEDRIVE_RUNAS_ROOT</B> | Controls if the Docker container should be run as the 'root' user instead of 'onedrive' user. Default is 0 | 1 |
|
||||
|
||||
### Environment Variables Usage Examples
|
||||
**Verbose Output:**
|
||||
|
|
@ -334,7 +346,8 @@ If you are running a Raspberry Pi, you will need to edit your system configurati
|
|||
|
||||
* Modify the file `/etc/dphys-swapfile` and edit the `CONF_SWAPSIZE`, for example: `CONF_SWAPSIZE=2048`.
|
||||
|
||||
A reboot of your Raspberry Pi is required to make this change effective.
|
||||
> [!IMPORTANT]
|
||||
> A reboot of your Raspberry Pi is required to make this change effective.
|
||||
|
||||
### Building and running a custom Docker image
|
||||
You can also build your own image instead of pulling the one from [hub.docker.com](https://hub.docker.com/r/driveone/onedrive):
|
||||
BIN
docs/images/add_shared_file_shortcut.png
Normal file
|
After Width: | Height: | Size: 35 KiB |
BIN
docs/images/add_shared_folder.png
Normal file
|
After Width: | Height: | Size: 31 KiB |
BIN
docs/images/files_shared_with_me_folder.png
Normal file
|
After Width: | Height: | Size: 184 KiB |
BIN
docs/images/initial_firewall_config.png
Normal file
|
After Width: | Height: | Size: 45 KiB |
BIN
docs/images/linux_shared_folder_view.png
Normal file
|
After Width: | Height: | Size: 103 KiB |
BIN
docs/images/linux_view_shared_file_link.png
Normal file
|
After Width: | Height: | Size: 98 KiB |
BIN
docs/images/nginx_verify_install.png
Normal file
|
After Width: | Height: | Size: 212 KiB |
BIN
docs/images/objects_shared_with_me.png
Normal file
|
After Width: | Height: | Size: 91 KiB |
BIN
docs/images/online_shared_file_link.png
Normal file
|
After Width: | Height: | Size: 33 KiB |
BIN
docs/images/shared_folder_added.png
Normal file
|
After Width: | Height: | Size: 31 KiB |
BIN
docs/images/windows_view_shared_file_link.png
Normal file
|
After Width: | Height: | Size: 223 KiB |
BIN
docs/images/windows_view_shared_folders.png
Normal file
|
After Width: | Height: | Size: 226 KiB |
274
docs/install.md
Normal file
|
|
@ -0,0 +1,274 @@
|
|||
# Installing or Upgrading using Distribution Packages or Building the OneDrive Client for Linux from source
|
||||
|
||||
## Installing or Upgrading using Distribution Packages
|
||||
This project has been packaged for the following Linux distributions as per below. The current client release is: [](https://github.com/abraunegg/onedrive/releases)
|
||||
|
||||
Only the current release version or greater is supported. Earlier versions are not supported and should not be installed or used.
|
||||
|
||||
> [!CAUTION]
|
||||
> Distribution packages may be of an older release when compared to the latest release that is [available](https://github.com/abraunegg/onedrive/releases). If any package version indicator below is 'red' for your distribution, it is recommended that you build from source. Do not install the software from the available distribution package. If a package is out of date, please contact the package maintainer for resolution.
|
||||
|
||||
| Distribution | Package Name & Package Link | PKG_Version | i686 | x86_64 | ARMHF | AARCH64 | Extra Details |
|
||||
|---------------------------------|----------------------------------------------------------------------------------------------------------|:---------------:|:----:|:------:|:-----:|:-------:|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| Alpine Linux | [onedrive](https://pkgs.alpinelinux.org/packages?name=onedrive&branch=edge) |<a href="https://pkgs.alpinelinux.org/packages?name=onedrive&branch=edge"><img src="https://repology.org/badge/version-for-repo/alpine_edge/onedrive.svg?header=" alt="Alpine Linux Edge package" width="46" height="20"></a>|❌|✔|❌|✔ | |
|
||||
| Arch Linux<br><br>Manjaro Linux | [onedrive-abraunegg](https://aur.archlinux.org/packages/onedrive-abraunegg/) |<a href="https://aur.archlinux.org/packages/onedrive-abraunegg"><img src="https://repology.org/badge/version-for-repo/aur/onedrive-abraunegg.svg?header=" alt="AUR package" width="46" height="20"></a>|✔|✔|✔|✔ | Install via: `pamac build onedrive-abraunegg` from the Arch Linux User Repository (AUR)<br><br>**Note:** You must first install 'base-devel' as this is a pre-requisite for using the AUR<br><br>**Note:** If asked regarding a provider for 'd-runtime' and 'd-compiler', select 'liblphobos' and 'ldc'<br><br>**Note:** System must have at least 1GB of memory & 1GB swap space
|
||||
| CentOS 8 | [onedrive](https://koji.fedoraproject.org/koji/packageinfo?packageID=26044) |<a href="https://koji.fedoraproject.org/koji/packageinfo?packageID=26044"><img src="https://repology.org/badge/version-for-repo/epel_8/onedrive.svg?header=" alt="CentOS 8 package" width="46" height="20"></a>|❌|✔|❌|✔| **Note:** You must install the EPEL Repository first |
|
||||
| CentOS 9 | [onedrive](https://koji.fedoraproject.org/koji/packageinfo?packageID=26044) |<a href="https://koji.fedoraproject.org/koji/packageinfo?packageID=26044"><img src="https://repology.org/badge/version-for-repo/epel_9/onedrive.svg?header=" alt="CentOS 9 package" width="46" height="20"></a>|❌|✔|❌|✔| **Note:** You must install the EPEL Repository first |
|
||||
| Debian 11 | [onedrive](https://packages.debian.org/bullseye/source/onedrive) |<a href="https://packages.debian.org/bullseye/source/onedrive"><img src="https://repology.org/badge/version-for-repo/debian_11/onedrive.svg?header=" alt="Debian 11 package" width="46" height="20"></a>|✔|✔|✔|✔| **Note:** Do not install from Debian Package Repositories as the package is obsolete and is not supported<br><br>For a supported application version, it is recommended that for Debian 11 that you install from OpenSuSE Build Service using the Debian Package Install [Instructions](ubuntu-package-install.md) |
|
||||
| Debian 12 | [onedrive](https://packages.debian.org/bookworm/source/onedrive) |<a href="https://packages.debian.org/bookworm/source/onedrive"><img src="https://repology.org/badge/version-for-repo/debian_12/onedrive.svg?header=" alt="Debian 12 package" width="46" height="20"></a>|✔|✔|✔|✔| **Note:** Do not install from Debian Package Repositories as the package is obsolete and is not supported<br><br>For a supported application version, it is recommended that for Debian 12 that you install from OpenSuSE Build Service using the Debian Package Install [Instructions](ubuntu-package-install.md) |
|
||||
| Debian Sid | [onedrive](https://packages.debian.org/sid/onedrive) |<a href="https://packages.debian.org/sid/onedrive"><img src="https://repology.org/badge/version-for-repo/debian_unstable/onedrive.svg?header=" alt="Debian Sid package" width="46" height="20"></a>|✔|✔|✔|✔| |
|
||||
| Fedora | [onedrive](https://koji.fedoraproject.org/koji/packageinfo?packageID=26044) |<a href="https://koji.fedoraproject.org/koji/packageinfo?packageID=26044"><img src="https://repology.org/badge/version-for-repo/fedora_rawhide/onedrive.svg?header=" alt="Fedora Rawhide package" width="46" height="20"></a>|✔|✔|✔|✔| |
|
||||
| Gentoo | [onedrive](https://gpo.zugaina.org/net-misc/onedrive) | No API Available |✔|✔|❌|❌| |
|
||||
| Homebrew | [onedrive](https://formulae.brew.sh/formula/onedrive) |<a href="https://formulae.brew.sh/formula/onedrive"><img src="https://repology.org/badge/version-for-repo/homebrew/onedrive.svg?header=" alt="Homebrew package" width="46" height="20"></a> |❌|✔|❌|❌| |
|
||||
| Linux Mint 20.x | [onedrive](https://community.linuxmint.com/software/view/onedrive) |<a href="https://community.linuxmint.com/software/view/onedrive"><img src="https://repology.org/badge/version-for-repo/ubuntu_20_04/onedrive.svg?header=" alt="Ubuntu 20.04 package" width="46" height="20"></a> |❌|✔|✔|✔| **Note:** Do not install from Linux Mint Repositories as the package is obsolete and is not supported<br><br>For a supported application version, it is recommended that for Linux Mint that you install from OpenSuSE Build Service using the Ubuntu Package Install [Instructions](ubuntu-package-install.md) |
|
||||
| Linux Mint 21.x | [onedrive](https://community.linuxmint.com/software/view/onedrive) |<a href="https://community.linuxmint.com/software/view/onedrive"><img src="https://repology.org/badge/version-for-repo/ubuntu_22_04/onedrive.svg?header=" alt="Ubuntu 22.04 package" width="46" height="20"></a> |❌|✔|✔|✔| **Note:** Do not install from Linux Mint Repositories as the package is obsolete and is not supported<br><br>For a supported application version, it is recommended that for Linux Mint that you install from OpenSuSE Build Service using the Ubuntu Package Install [Instructions](ubuntu-package-install.md) |
|
||||
| Linux Mint 22.x | [onedrive](https://community.linuxmint.com/software/view/onedrive) |<a href="https://community.linuxmint.com/software/view/onedrive"><img src="https://repology.org/badge/version-for-repo/ubuntu_24_04/onedrive.svg?header=" alt="Ubuntu 24.04 package" width="46" height="20"></a> |❌|✔|✔|✔| **Note:** Do not install from Linux Mint Repositories as the package is obsolete and is not supported<br><br>For a supported application version, it is recommended that for Linux Mint that you install from OpenSuSE Build Service using the Ubuntu Package Install [Instructions](ubuntu-package-install.md) |
|
||||
| NixOS | [onedrive](https://search.nixos.org/packages?channel=20.09&from=0&size=50&sort=relevance&query=onedrive) |<a href="https://search.nixos.org/packages?channel=20.09&from=0&size=50&sort=relevance&query=onedrive"><img src="https://repology.org/badge/version-for-repo/nix_unstable/onedrive.svg?header=" alt="nixpkgs unstable package" width="46" height="20"></a>|❌|✔|❌|❌| Use package `onedrive` either by adding it to `configuration.nix` or by using the command `nix-env -iA <channel name>.onedrive`. This does not install a service. To install a service, use unstable channel (will stabilize in 20.09) and add `services.onedrive.enable=true` in `configuration.nix`. You can also add a custom package using the `services.onedrive.package` option (recommended since package lags upstream). Enabling the service installs a default package too (based on the channel). You can also add multiple onedrive accounts trivially, see [documentation](https://github.com/NixOS/nixpkgs/pull/77734#issuecomment-575874225). |
|
||||
| OpenSuSE | [onedrive](https://software.opensuse.org/package/onedrive) |<a href="https://software.opensuse.org/package/onedrive"><img src="https://repology.org/badge/version-for-repo/opensuse_network_tumbleweed/onedrive.svg?header=" alt="openSUSE Tumbleweed package" width="46" height="20"></a>|✔|✔|❌|❌| |
|
||||
| OpenSuSE Build Service | [onedrive](https://build.opensuse.org/package/show/home:npreining:debian-ubuntu-onedrive/onedrive) | No API Available |✔|✔|✔|✔| Package Build Service for Debian and Ubuntu |
|
||||
| Raspbian | [onedrive](https://archive.raspbian.org/raspbian/pool/main/o/onedrive/) |<a href="https://archive.raspbian.org/raspbian/pool/main/o/onedrive/"><img src="https://repology.org/badge/version-for-repo/raspbian_stable/onedrive.svg?header=" alt="Raspbian Stable package" width="46" height="20"></a> |❌|❌|✔|✔| **Note:** Do not install from Raspbian Package Repositories as the package is obsolete and is not supported<br><br>For a supported application version, it is recommended that for Raspbian that you install from OpenSuSE Build Service using the Debian Package Install [Instructions](ubuntu-package-install.md) |
|
||||
| Slackware | [onedrive](https://slackbuilds.org/result/?search=onedrive&sv=) |<a href="https://slackbuilds.org/result/?search=onedrive&sv="><img src="https://repology.org/badge/version-for-repo/slackbuilds/onedrive.svg?header=" alt="SlackBuilds package" width="46" height="20"></a>|✔|✔|❌|❌| |
|
||||
| Solus | [onedrive](https://packages.getsol.us/shannon/o/onedrive/?sort=time&order=desc) |<a href="https://packages.getsol.us/shannon/o/onedrive/?sort=time&order=desc"><img src="https://repology.org/badge/version-for-repo/solus/onedrive.svg?header=" alt="Solus package" width="46" height="20"></a>|❌|✔|❌|❌| |
|
||||
| Ubuntu 20.04 | [onedrive](https://packages.ubuntu.com/focal/onedrive) |<a href="https://packages.ubuntu.com/focal/onedrive"><img src="https://repology.org/badge/version-for-repo/ubuntu_20_04/onedrive.svg?header=" alt="Ubuntu 20.04 package" width="46" height="20"></a> |❌|✔|✔|✔| **Note:** Do not install from Ubuntu Universe as the package is obsolete and is not supported<br><br>For a supported application version, it is recommended that for Ubuntu that you install from OpenSuSE Build Service using the Ubuntu Package Install [Instructions](ubuntu-package-install.md) |
|
||||
| Ubuntu 22.04 | [onedrive](https://packages.ubuntu.com/jammy/onedrive) |<a href="https://packages.ubuntu.com/jammy/onedrive"><img src="https://repology.org/badge/version-for-repo/ubuntu_22_04/onedrive.svg?header=" alt="Ubuntu 22.04 package" width="46" height="20"></a> |❌|✔|✔|✔| **Note:** Do not install from Ubuntu Universe as the package is obsolete and is not supported<br><br>For a supported application version, it is recommended that for Ubuntu that you install from OpenSuSE Build Service using the Ubuntu Package Install [Instructions](ubuntu-package-install.md) |
|
||||
| Ubuntu 23.04 | [onedrive](https://packages.ubuntu.com/lunar/onedrive) |<a href="https://packages.ubuntu.com/lunar/onedrive"><img src="https://repology.org/badge/version-for-repo/ubuntu_23_04/onedrive.svg?header=" alt="Ubuntu 23.04 package" width="46" height="20"></a> |❌|✔|✔|✔| **Note:** Do not install from Ubuntu Universe as the package is obsolete and is not supported<br><br>For a supported application version, it is recommended that for Ubuntu that you install from OpenSuSE Build Service using the Ubuntu Package Install [Instructions](ubuntu-package-install.md) |
|
||||
| Ubuntu 24.04 | [onedrive](https://packages.ubuntu.com/noble/onedrive) |<a href="https://packages.ubuntu.com/noble/onedrive"><img src="https://repology.org/badge/version-for-repo/ubuntu_24_04/onedrive.svg?header=" alt="Ubuntu 24.04 package" width="46" height="20"></a> |❌|✔|✔|✔| **Note:** Do not install from Ubuntu Universe as the package is obsolete and is not supported<br><br>For a supported application version, it is recommended that for Ubuntu that you install from OpenSuSE Build Service using the Ubuntu Package Install [Instructions](ubuntu-package-install.md) |
|
||||
| Void Linux | [onedrive](https://voidlinux.org/packages/?arch=x86_64&q=onedrive) |<a href="https://voidlinux.org/packages/?arch=x86_64&q=onedrive"><img src="https://repology.org/badge/version-for-repo/void_x86_64/onedrive.svg?header=" alt="Void Linux x86_64 package" width="46" height="20"></a>|✔|✔|❌|❌| |
|
||||
|
||||
## Building from Source - High Level Requirements
|
||||
* For successful compilation of this application, it's crucial that the build environment is equipped with a minimum of 1GB of memory and an additional 1GB of swap space.
|
||||
* Install the required distribution package dependencies coverering the required development tools and development libraries for curl and sqlite
|
||||
* Install the [Digital Mars D Compiler (DMD)](https://dlang.org/download.html) or [LDC – the LLVM-based D Compiler](https://github.com/ldc-developers/ldc)
|
||||
|
||||
> [!IMPORTANT]
|
||||
> To compile this application successfully, it is essential to use either DMD version **2.088.0** or higher, or LDC version **1.18.0** or higher. Ensuring compatibility and optimal performance necessitates the use of these specific versions or their more recent updates.
|
||||
|
||||
### Example for installing DMD Compiler
|
||||
```text
|
||||
curl -fsS https://dlang.org/install.sh | bash -s dmd
|
||||
```
|
||||
|
||||
### Example for installing LDC Compiler
|
||||
```text
|
||||
curl -fsS https://dlang.org/install.sh | bash -s ldc
|
||||
```
|
||||
|
||||
## Distribution Package Dependencies
|
||||
### Dependencies: Ubuntu 16.x
|
||||
Ubuntu Linux 16.x LTS reached the end of its five-year LTS window on April 30th 2021 and is no longer supported.
|
||||
|
||||
### Dependencies: Ubuntu 18.x / Lubuntu 18.x
|
||||
Ubuntu Linux 18.x LTS reached the end of its five-year LTS window on May 31th 2023 and is no longer supported.
|
||||
|
||||
### Dependencies: Debian 9
|
||||
Debian 9 reached the end of its five-year support window on June 30th 2022 and is no longer supported.
|
||||
|
||||
### Dependencies: Ubuntu 20.x -> Ubuntu 24.x / Debian 10 -> Debian 12 - x86_64
|
||||
These dependencies are also applicable for all Ubuntu based distributions such as:
|
||||
* Lubuntu
|
||||
* Linux Mint
|
||||
* POP OS
|
||||
* Peppermint OS
|
||||
```text
|
||||
sudo apt install build-essential
|
||||
sudo apt install libcurl4-openssl-dev libsqlite3-dev pkg-config git curl
|
||||
curl -fsS https://dlang.org/install.sh | bash -s dmd
|
||||
```
|
||||
For notifications the following is also necessary:
|
||||
```text
|
||||
sudo apt install libnotify-dev
|
||||
```
|
||||
|
||||
### Dependencies: CentOS 6.x / RHEL 6.x
|
||||
CentOS 6.x and RHEL 6.x reached End of Life status on November 30th 2020 and is no longer supported.
|
||||
|
||||
### Dependencies: CentOS 7.x / RHEL 7.x
|
||||
CentOS 7.x and RHEL 7.x reached End of Life status on June 30th 2024 and is no longer supported.
|
||||
|
||||
### Dependencies: Fedora > Version 18 / CentOS 8.x / CentOS 9.x/ RHEL 8.x / RHEL 9.x
|
||||
```text
|
||||
sudo dnf groupinstall 'Development Tools'
|
||||
sudo dnf install libcurl-devel sqlite-devel
|
||||
curl -fsS https://dlang.org/install.sh | bash -s dmd
|
||||
```
|
||||
For notifications the following is also necessary:
|
||||
```text
|
||||
sudo dnf install libnotify-devel
|
||||
```
|
||||
|
||||
### Dependencies: Arch Linux & Manjaro Linux
|
||||
```text
|
||||
sudo pacman -S make pkg-config curl sqlite ldc
|
||||
```
|
||||
For notifications the following is also necessary:
|
||||
```text
|
||||
sudo pacman -S libnotify
|
||||
```
|
||||
|
||||
### Dependencies: Raspbian (ARMHF) and Ubuntu 22.x / Debian 11 / Debian 12 / Raspbian (ARM64)
|
||||
> [!CAUTION]
|
||||
> The minimum LDC compiler version required to compile this application is 1.18.0, which is not available for Debian Buster or distributions based on Debian Buster. You are advised to first upgrade your platform distribution to one that is based on Debian Bullseye (Debian 11) or later.
|
||||
|
||||
These instructions were validated using:
|
||||
* `Linux raspberrypi 5.10.92-v8+ #1514 SMP PREEMPT Mon Jan 17 17:39:38 GMT 2022 aarch64` (2022-01-28-raspios-bullseye-armhf-lite) using Raspberry Pi 3B (revision 1.2)
|
||||
* `Linux raspberrypi 5.10.92-v8+ #1514 SMP PREEMPT Mon Jan 17 17:39:38 GMT 2022 aarch64` (2022-01-28-raspios-bullseye-arm64-lite) using Raspberry Pi 3B (revision 1.2)
|
||||
* `Linux ubuntu 5.15.0-1005-raspi #5-Ubuntu SMP PREEMPT Mon Apr 4 12:21:48 UTC 2022 aarch64 aarch64 aarch64 GNU/Linux` (ubuntu-22.04-preinstalled-server-arm64+raspi) using Raspberry Pi 3B (revision 1.2)
|
||||
|
||||
> [!IMPORTANT]
|
||||
> For successful compilation of this application, it's crucial that the build environment is equipped with a minimum of 1GB of memory and an additional 1GB of swap space. To verify your system's swap space availability, you can use the `swapon` command. Ensuring these requirements are met is vital for the application's compilation process.
|
||||
|
||||
```text
|
||||
sudo apt install build-essential
|
||||
sudo apt install libcurl4-openssl-dev libsqlite3-dev pkg-config git curl ldc
|
||||
```
|
||||
For notifications the following is also necessary:
|
||||
```text
|
||||
sudo apt install libnotify-dev
|
||||
```
|
||||
|
||||
### Dependencies: Gentoo
|
||||
```text
|
||||
sudo emerge app-portage/layman
|
||||
sudo layman -a dlang
|
||||
```
|
||||
Add ebuild from contrib/gentoo to a local overlay to use.
|
||||
|
||||
For notifications the following is also necessary:
|
||||
```text
|
||||
sudo emerge x11-libs/libnotify
|
||||
```
|
||||
|
||||
### Dependencies: OpenSuSE Leap 15.0
|
||||
```text
|
||||
sudo zypper addrepo https://download.opensuse.org/repositories/devel:languages:D/openSUSE_Leap_15.0/devel:languages:D.repo
|
||||
sudo zypper refresh
|
||||
sudo zypper install gcc git libcurl-devel sqlite3-devel dmd phobos-devel phobos-devel-static
|
||||
```
|
||||
For notifications the following is also necessary:
|
||||
```text
|
||||
sudo zypper install libnotify-devel
|
||||
```
|
||||
|
||||
### Dependencies: OpenSuSE Leap 15.1
|
||||
```text
|
||||
sudo zypper addrepo https://download.opensuse.org/repositories/devel:languages:D/openSUSE_Leap_15.1/devel:languages:D.repo
|
||||
sudo zypper refresh
|
||||
sudo zypper install gcc git libcurl-devel sqlite3-devel dmd phobos-devel phobos-devel-static
|
||||
```
|
||||
For notifications the following is also necessary:
|
||||
```text
|
||||
sudo zypper install libnotify-devel
|
||||
```
|
||||
|
||||
### Dependencies: OpenSuSE Leap 15.2
|
||||
```text
|
||||
sudo zypper refresh
|
||||
sudo zypper install gcc git libcurl-devel sqlite3-devel dmd phobos-devel phobos-devel-static
|
||||
```
|
||||
For notifications the following is also necessary:
|
||||
```text
|
||||
sudo zypper install libnotify-devel
|
||||
```
|
||||
|
||||
## Compilation & Installation
|
||||
### High Level Steps
|
||||
1. Install the platform dependencies for your Linux OS
|
||||
2. Activate your DMD or LDC compiler
|
||||
3. Clone the GitHub repository, run configure and make, then install
|
||||
4. Deactivate your DMD or LDC compiler
|
||||
|
||||
### Building using DMD Reference Compiler
|
||||
Before cloning and compiling, if you have installed DMD via curl for your OS, you will need to activate DMD as per example below:
|
||||
```text
|
||||
Run `source ~/dlang/dmd-2.088.0/activate` in your shell to use dmd-2.088.0.
|
||||
This will setup PATH, LIBRARY_PATH, LD_LIBRARY_PATH, DMD, DC, and PS1.
|
||||
Run `deactivate` later on to restore your environment.
|
||||
```
|
||||
Without performing this step, the compilation process will fail.
|
||||
|
||||
> [!NOTE]
|
||||
> Depending on your DMD version, substitute `2.088.0` above with your DMD version that is installed.
|
||||
|
||||
```text
|
||||
git clone https://github.com/abraunegg/onedrive.git
|
||||
cd onedrive
|
||||
./configure
|
||||
make clean; make;
|
||||
sudo make install
|
||||
```
|
||||
|
||||
### Build options
|
||||
#### GUI Notification Support
|
||||
GUI notification support can be enabled using the `configure` switch `--enable-notifications`.
|
||||
|
||||
#### systemd service directory customisation support
|
||||
Systemd service files are installed in the appropriate directories on the system,
|
||||
as provided by `pkg-config systemd` settings. If the need for overriding the
|
||||
deduced path are necessary, the two options `--with-systemdsystemunitdir` (for
|
||||
the Systemd system unit location), and `--with-systemduserunitdir` (for the
|
||||
Systemd user unit location) can be specified. Passing in `no` to one of these
|
||||
options disabled service file installation.
|
||||
|
||||
#### Additional Compiler Debug
|
||||
By passing `--enable-debug` to the `configure` call, `onedrive` gets built with additional debug
|
||||
information, useful (for example) to get `perf`-issued figures.
|
||||
|
||||
#### Shell Completion Support
|
||||
By passing `--enable-completions` to the `configure` call, shell completion functions are
|
||||
installed for `bash`, `zsh` and `fish`. The installation directories are determined
|
||||
as far as possible automatically, but can be overridden by passing
|
||||
`--with-bash-completion-dir=<DIR>`, `--with-zsh-completion-dir=<DIR>`, and
|
||||
`--with-fish-completion-dir=<DIR>` to `configure`.
|
||||
|
||||
### Building using a different compiler (for example [LDC](https://wiki.dlang.org/LDC))
|
||||
#### ARMHF Architecture (Raspbian) and ARM64 Architecture (Ubuntu 22.x / Debian 11 / Debian 12 / Raspbian)
|
||||
> [!CAUTION]
|
||||
> The minimum LDC compiler version required to compile this application is 1.18.0, which is not available for Debian Buster or distributions based on Debian Buster. You are advised to first upgrade your platform distribution to one that is based on Debian Bullseye (Debian 11) or later.
|
||||
|
||||
> [!IMPORTANT]
|
||||
> For successful compilation of this application, it's crucial that the build environment is equipped with a minimum of 1GB of memory and an additional 1GB of swap space. To verify your system's swap space availability, you can use the `swapon` command. Ensuring these requirements are met is vital for the application's compilation process.
|
||||
|
||||
```text
|
||||
git clone https://github.com/abraunegg/onedrive.git
|
||||
cd onedrive
|
||||
./configure DC=/usr/bin/ldmd2
|
||||
make clean; make
|
||||
sudo make install
|
||||
```
|
||||
|
||||
## Upgrading the client
|
||||
If you have installed the client from a distribution package, the client will be updated when the distribution package is updated by the package maintainer and will be updated to the new application version when you perform your package update.
|
||||
|
||||
If you have built the client from source, to upgrade your client, it is recommended that you first uninstall your existing 'onedrive' binary (see below), then re-install the client by re-cloning, re-compiling and re-installing the client again to install the new version.
|
||||
|
||||
> [!NOTE]
|
||||
> Following the uninstall process will remove all client components including *all* systemd files, including any custom files created for specific access such as SharePoint Libraries.
|
||||
|
||||
You can optionally choose to not perform this uninstallation step, and simply re-install the client by re-cloning, re-compiling and re-installing the client again - however the risk here is that you end up with two onedrive client binaries on your system, and depending on your system search path preferences, this will determine which binary is used.
|
||||
|
||||
> [!CAUTION]
|
||||
> Before performing any upgrade, it is highly recommended for you to stop any running systemd service if applicable to ensure that these services are restarted using the updated client version.
|
||||
|
||||
Post re-install, to confirm that you have the new version of the client installed, use `onedrive --version` to determine the client version that is now installed.
|
||||
|
||||
## Uninstalling the client
|
||||
### Uninstalling the client if installed from distribution package
|
||||
Follow your distribution documentation to uninstall the package that you installed
|
||||
|
||||
### Uninstalling the client if installed and built from source
|
||||
From within your GitHub repository clone, perform the following to remove the 'onedrive' binary:
|
||||
```text
|
||||
sudo make uninstall
|
||||
```
|
||||
|
||||
If you are not upgrading your client, to remove your application state and configuration, perform the following additional step:
|
||||
```
|
||||
rm -rf ~/.config/onedrive
|
||||
```
|
||||
> [!IMPORTANT]
|
||||
> If you are using the `--confdir option`, substitute `~/.config/onedrive` for the correct directory storing your client configuration.
|
||||
|
||||
If you want to just delete the application key, but keep the items database:
|
||||
```text
|
||||
rm -f ~/.config/onedrive/refresh_token
|
||||
```
|
||||
|
|
@ -1,54 +1,63 @@
|
|||
# Known Issues
|
||||
The below are known issues with this client:
|
||||
# List of Identified Known Issues
|
||||
The following points detail known issues associated with this client:
|
||||
|
||||
## Moving files into different folders should not cause data to delete and be re-uploaded
|
||||
**Issue Tracker:** [#876](https://github.com/abraunegg/onedrive/issues/876)
|
||||
## Renaming or Moving Files in Standalone Mode causes online deletion and re-upload to occur
|
||||
**Issue Tracker:** [#876](https://github.com/abraunegg/onedrive/issues/876), [#2579](https://github.com/abraunegg/onedrive/issues/2579)
|
||||
|
||||
**Description:**
|
||||
**Summary:**
|
||||
|
||||
When running the client in standalone mode (`--synchronize`) moving folders that are successfully synced around between subsequent standalone syncs causes a deletion & re-upload of data to occur.
|
||||
Renaming or moving files and/or folders while using the standalone sync option `--sync` this results in unnecessary data deletion online and subsequent re-upload.
|
||||
|
||||
**Explanation:**
|
||||
**Detailed Description:**
|
||||
|
||||
Technically, the client is 'working' correctly, as, when moving files, you are 'deleting' them from the current location, but copying them to the 'new location'. As the client is running in standalone sync mode, there is no way to track what OS operations have been done when the client is not running - thus, this is why the 'delete and upload' is occurring.
|
||||
In standalone mode (`--sync`), the renaming or moving folders locally that have already been synchronized leads to the data being deleted online and then re-uploaded in the next synchronization process.
|
||||
|
||||
**Workaround:**
|
||||
**Technical Explanation:**
|
||||
|
||||
If the tracking of moving data to new local directories is requried, it is better to run the client in service mode (`--monitor`) rather than in standalone mode, as the 'move' of files can then be handled at the point when it occurs, so that the data is moved to the new location on OneDrive without the need to be deleted and re-uploaded.
|
||||
This behavior is expected from the client under these specific conditions. Renaming or moving files is interpreted as deleting them from their original location and creating them in a new location. In standalone sync mode, the client lacks the capability to track file system changes (including renames and moves) that occur when it is not running. This limitation is the root cause of the observed 'deletion and re-upload' cycle.
|
||||
|
||||
**Recommended Workaround:**
|
||||
|
||||
For effective tracking of file and folder renames or moves to new local directories, it is recommended to run the client in service mode (`--monitor`) rather than in standalone mode. This approach allows the client to immediately process these changes, enabling the data to be updated (renamed or moved) in the new location on OneDrive without undergoing deletion and re-upload.
|
||||
|
||||
## Application 'stops' running without any visible reason
|
||||
**Issue Tracker:** [#494](https://github.com/abraunegg/onedrive/issues/494), [#753](https://github.com/abraunegg/onedrive/issues/753), [#792](https://github.com/abraunegg/onedrive/issues/792), [#884](https://github.com/abraunegg/onedrive/issues/884), [#1162](https://github.com/abraunegg/onedrive/issues/1162), [#1408](https://github.com/abraunegg/onedrive/issues/1408), [#1520](https://github.com/abraunegg/onedrive/issues/1520), [#1526](https://github.com/abraunegg/onedrive/issues/1526)
|
||||
|
||||
**Description:**
|
||||
**Summary:**
|
||||
|
||||
When running the client and performing an upload or download operation, the application just stops working without any reason or explanation. If `echo $?` is used after the application has exited without visible reason, an error level of 141 may be provided.
|
||||
Users experience sudden shutdowns in a client application during file transfers with Microsoft's Europe Data Centers, likely due to unstable internet or HTTPS inspection issues. This problem, often signaled by an error code of 141, is related to the application's reliance on Curl and OpenSSL. Resolution steps include system updates, seeking support from OS vendors, ISPs, OpenSSL/Curl teams, and providing detailed debug logs to Microsoft for analysis.
|
||||
|
||||
Additionally, this issue has mainly been seen when the client is operating against Microsoft's Europe Data Centre's.
|
||||
**Detailed Description:**
|
||||
|
||||
**Explanation:**
|
||||
The application unexpectedly stops functioning during upload or download operations when using the client. This issue occurs without any apparent reason. Running `echo $?` after the unexpected exit may return an error code of 141.
|
||||
|
||||
The client is heavily dependant on Curl and OpenSSL to perform the activities with the Microsoft OneDrive service. Generally, when this issue occurs, the following is found in the HTTPS Debug Log:
|
||||
This problem predominantly arises when the client interacts with Microsoft's Europe Data Centers.
|
||||
|
||||
**Technical Explanation:**
|
||||
|
||||
The client heavily relies on Curl and OpenSSL for operations with the Microsoft OneDrive service. A common observation during this error is an entry in the HTTPS Debug Log stating:
|
||||
```
|
||||
OpenSSL SSL_read: SSL_ERROR_SYSCALL, errno 104
|
||||
```
|
||||
The only way to determine that this is the cause of the application ceasing to work is to generate a HTTPS debug log using the following additional flags:
|
||||
To confirm this as the root cause, a detailed HTTPS debug log can be generated with these commands:
|
||||
```
|
||||
--verbose --verbose --debug-https
|
||||
```
|
||||
|
||||
This is indicative of the following:
|
||||
* Some sort of flaky Internet connection somewhere between you and the OneDrive service
|
||||
* Some sort of 'broken' HTTPS transparent inspection service inspecting your traffic somewhere between you and the OneDrive service
|
||||
This error typically suggests one of the following issues:
|
||||
* An unstable internet connection between the user and the OneDrive service.
|
||||
* An issue with HTTPS transparent inspection services that monitor the traffic en route to the OneDrive service.
|
||||
|
||||
**How to resolve:**
|
||||
**Recommended Resolution Steps:**
|
||||
|
||||
The best avenue of action here are:
|
||||
* Ensure your OS is as up-to-date as possible
|
||||
* Get support from your OS vendor
|
||||
* Speak to your ISP or Help Desk for assistance
|
||||
* Open a ticket with OpenSSL and/or Curl teams to better handle this sort of connection failure
|
||||
* Generate a HTTPS Debug Log for this application and open a new support request with Microsoft and provide the debug log file for their analysis.
|
||||
Recommended steps to address this issue include:
|
||||
* Updating your operating system to the latest version.
|
||||
* Configure the application to only use HTTP/1.1
|
||||
* Configure the application to use IPv4 only.
|
||||
* Upgrade your 'curl' application to the latest available from the curl developers.
|
||||
* Seeking assistance from your OS vendor.
|
||||
* Contacting your Internet Service Provider (ISP) or your IT Help Desk.
|
||||
* Reporting the issue to the OpenSSL and/or Curl teams for improved handling of such connection failures.
|
||||
* Creating a HTTPS Debug Log during the issue and submitting a support request to Microsoft with the log for their analysis.
|
||||
|
||||
If you wish to diagnose this issue further, refer to the following:
|
||||
|
||||
https://maulwuff.de/research/ssl-debugging.html
|
||||
For more in-depth SSL troubleshooting, please read: https://maulwuff.de/research/ssl-debugging.html
|
||||
|
|
@ -1,13 +1,13 @@
|
|||
# How to configure access to specific Microsoft Azure deployments
|
||||
## Application Version
|
||||
Before reading this document, please ensure you are running application version [](https://github.com/abraunegg/onedrive/releases) or greater. Use `onedrive --version` to determine what application version you are using and upgrade your client if required.
|
||||
> [!CAUTION]
|
||||
> Before reading this document, please ensure you are running application version [](https://github.com/abraunegg/onedrive/releases) or greater. Use `onedrive --version` to determine what application version you are using and upgrade your client if required.
|
||||
|
||||
## Process Overview
|
||||
In some cases it is a requirement to utilise specific Microsoft Azure cloud deployments to conform with data and security reuqirements that requires data to reside within the geographic borders of that country.
|
||||
In some cases it is a requirement to utilise specific Microsoft Azure cloud deployments to conform with data and security requirements that requires data to reside within the geographic borders of that country.
|
||||
Current national clouds that are supported are:
|
||||
* Microsoft Cloud for US Government
|
||||
* Microsoft Cloud Germany
|
||||
* Azure and Office365 operated by 21Vianet in China
|
||||
* Azure and Office365 operated by VNET in China
|
||||
|
||||
In order to successfully use these specific Microsoft Azure deployments, the following steps are required:
|
||||
1. Register an application with the Microsoft identity platform using the Azure portal
|
||||
|
|
@ -22,9 +22,9 @@ In order to successfully use these specific Microsoft Azure deployments, the fol
|
|||
|
||||
| National Cloud Environment | Microsoft Azure Portal |
|
||||
|---|---|
|
||||
| Microsoft Cloud for US Government | https://portal.azure.com/ |
|
||||
| Microsoft Cloud Germany | https://portal.azure.com/ |
|
||||
| Azure and Office365 operated by 21Vianet | https://portal.azure.cn/ |
|
||||
| Microsoft Cloud for US Government | https://portal.azure.com/ |
|
||||
| Microsoft Cloud Germany | https://portal.azure.com/ |
|
||||
| Azure and Office365 operated by VNET | https://portal.azure.cn/ |
|
||||
|
||||
2. Select 'Azure Active Directory' as the service you wish to configure
|
||||
3. Under 'Manage', select 'App registrations' to register a new application
|
||||
|
|
@ -37,7 +37,8 @@ In order to successfully use these specific Microsoft Azure deployments, the fol
|
|||
|
||||

|
||||
|
||||
**Note:** The Application (client) ID UUID as displayed after client registration, is what is required as the 'application_id' for Step 4 below.
|
||||
> [!NOTE]
|
||||
> The Application (client) ID UUID as displayed after client registration, is what is required as the 'application_id' for Step 4 below.
|
||||
|
||||
## Step 2: Configure application authentication scopes
|
||||
Configure the API permissions as per the following:
|
||||
|
|
@ -59,12 +60,12 @@ Add the appropriate redirect URI for your Azure deployment:
|
|||
A valid entry for the response URI should be one of:
|
||||
* https://login.microsoftonline.us/common/oauth2/nativeclient (Microsoft Cloud for US Government)
|
||||
* https://login.microsoftonline.de/common/oauth2/nativeclient (Microsoft Cloud Germany)
|
||||
* https://login.chinacloudapi.cn/common/oauth2/nativeclient (Azure and Office365 operated by 21Vianet in China)
|
||||
* https://login.chinacloudapi.cn/common/oauth2/nativeclient (Azure and Office365 operated by VNET in China)
|
||||
|
||||
For a single-tenant application, it may be necessary to use your specific tenant id instead of "common":
|
||||
* https://login.microsoftonline.us/example.onmicrosoft.us/oauth2/nativeclient (Microsoft Cloud for US Government)
|
||||
* https://login.microsoftonline.de/example.onmicrosoft.de/oauth2/nativeclient (Microsoft Cloud Germany)
|
||||
* https://login.chinacloudapi.cn/example.onmicrosoft.cn/oauth2/nativeclient (Azure and Office365 operated by 21Vianet in China)
|
||||
* https://login.chinacloudapi.cn/example.onmicrosoft.cn/oauth2/nativeclient (Azure and Office365 operated by VNET in China)
|
||||
|
||||
## Step 4: Configure the onedrive client to use new application registration
|
||||
Update to your 'onedrive' configuration file (`~/.config/onedrive/config`) the following:
|
||||
|
|
@ -89,7 +90,7 @@ Valid entries are:
|
|||
* USL4 (Microsoft Cloud for US Government)
|
||||
* USL5 (Microsoft Cloud for US Government - DOD)
|
||||
* DE (Microsoft Cloud Germany)
|
||||
* CN (Azure and Office365 operated by 21Vianet in China)
|
||||
* CN (Azure and Office365 operated by VNET in China)
|
||||
|
||||
This will configure your client to use the correct Azure AD and Graph endpoints as per [https://docs.microsoft.com/en-us/graph/deployments](https://docs.microsoft.com/en-us/graph/deployments)
|
||||
|
||||
|
|
@ -105,7 +106,7 @@ azure_tenant_id = "insert valid entry here"
|
|||
|
||||
This will configure your client to use the specified tenant id in its Azure AD and Graph endpoint URIs, instead of "common".
|
||||
The tenant id may be the GUID Directory ID (formatted "00000000-0000-0000-0000-000000000000"), or the fully qualified tenant name (e.g. "example.onmicrosoft.us").
|
||||
The GUID Directory ID may be located in the Azure administation page as per [https://docs.microsoft.com/en-us/onedrive/find-your-office-365-tenant-id](https://docs.microsoft.com/en-us/onedrive/find-your-office-365-tenant-id). Note that you may need to go to your national-deployment-specific administration page, rather than following the links within that document.
|
||||
The GUID Directory ID may be located in the Azure administration page as per [https://docs.microsoft.com/en-us/onedrive/find-your-office-365-tenant-id](https://docs.microsoft.com/en-us/onedrive/find-your-office-365-tenant-id). Note that you may need to go to your national-deployment-specific administration page, rather than following the links within that document.
|
||||
The tenant name may be obtained by following the PowerShell instructions on [https://docs.microsoft.com/en-us/onedrive/find-your-office-365-tenant-id](https://docs.microsoft.com/en-us/onedrive/find-your-office-365-tenant-id); it is shown as the "TenantDomain" upon completion of the "Connect-AzureAD" command.
|
||||
|
||||
**Example:**
|
||||
|
|
|
|||
|
|
@ -3,19 +3,19 @@ This client can be run as a Podman container, with 3 available container base op
|
|||
|
||||
| Container Base | Docker Tag | Description | i686 | x86_64 | ARMHF | AARCH64 |
|
||||
|----------------|-------------|----------------------------------------------------------------|:------:|:------:|:-----:|:-------:|
|
||||
| Alpine Linux | edge-alpine | Podman container based on Alpine 3.18 using 'master' |❌|✔|❌|✔|
|
||||
| Alpine Linux | alpine | Podman container based on Alpine 3.18 using latest release |❌|✔|❌|✔|
|
||||
| Alpine Linux | edge-alpine | Podman container based on Alpine 3.20 using 'master' |❌|✔|❌|✔|
|
||||
| Alpine Linux | alpine | Podman container based on Alpine 3.20 using latest release |❌|✔|❌|✔|
|
||||
| Debian | debian | Podman container based on Debian Stable using latest release |✔|✔|✔|✔|
|
||||
| Debian | edge | Podman container based on Debian Stable using 'master' |✔|✔|✔|✔|
|
||||
| Debian | edge-debian | Podman container based on Debian Stable using 'master' |✔|✔|✔|✔|
|
||||
| Debian | latest | Podman container based on Debian Stable using latest release |✔|✔|✔|✔|
|
||||
| Fedora | edge-fedora | Podman container based on Fedora 38 using 'master' |❌|✔|❌|✔|
|
||||
| Fedora | fedora | Podman container based on Fedora 38 using latest release |❌|✔|❌|✔|
|
||||
| Fedora | edge-fedora | Podman container based on Fedora 40 using 'master' |❌|✔|❌|✔|
|
||||
| Fedora | fedora | Podman container based on Fedora 40 using latest release |❌|✔|❌|✔|
|
||||
|
||||
These containers offer a simple monitoring-mode service for the OneDrive Client for Linux.
|
||||
|
||||
The instructions below have been validated on:
|
||||
* Fedora 38
|
||||
* Fedora 40
|
||||
|
||||
The instructions below will utilise the 'edge' tag, however this can be substituted for any of the other docker tags such as 'latest' from the table above if desired.
|
||||
|
||||
|
|
@ -23,7 +23,8 @@ The 'edge' Docker Container will align closer to all documentation and features,
|
|||
|
||||
Additionally there are specific version release tags for each release. Refer to https://hub.docker.com/r/driveone/onedrive/tags for any other Docker tags you may be interested in.
|
||||
|
||||
**Note:** The below instructions for podman has been tested and validated when logging into the system as an unprivileged user (non 'root' user).
|
||||
> [!NOTE]
|
||||
> The below instructions for podman has been tested and validated when logging into the system as an unprivileged user (non 'root' user).
|
||||
|
||||
## High Level Configuration Steps
|
||||
1. Install 'podman' as per your distribution platform's instructions if not already installed.
|
||||
|
|
@ -62,7 +63,7 @@ If you are still experiencing permission issues despite disabling SELinux, pleas
|
|||
### 3. Test 'podman' on your platform
|
||||
Test that 'podman' is operational for your 'non-root' user, as per below:
|
||||
```bash
|
||||
[alex@fedora38-podman ~]$ podman pull fedora
|
||||
[alex@fedora40-podman ~]$ podman pull fedora
|
||||
Resolved "fedora" as an alias (/etc/containers/registries.conf.d/000-shortnames.conf)
|
||||
Trying to pull registry.fedoraproject.org/fedora:latest...
|
||||
Getting image source signatures
|
||||
|
|
@ -70,9 +71,9 @@ Copying blob b30887322388 done |
|
|||
Copying config a1cd3cbf8a done |
|
||||
Writing manifest to image destination
|
||||
a1cd3cbf8adaa422629f2fcdc629fd9297138910a467b11c66e5ddb2c2753dff
|
||||
[alex@fedora38-podman ~]$ podman run fedora /bin/echo "Welcome to the Podman World"
|
||||
[alex@fedora40-podman ~]$ podman run fedora /bin/echo "Welcome to the Podman World"
|
||||
Welcome to the Podman World
|
||||
[alex@fedora38-podman ~]$
|
||||
[alex@fedora40-podman ~]$
|
||||
```
|
||||
|
||||
### 4. Configure the required podman volumes
|
||||
|
|
@ -103,17 +104,19 @@ This will create a podman volume labeled `onedrive_data` and will map to a path
|
|||
* The owner of this specified folder must not be root
|
||||
* Podman will attempt to change the permissions of the volume to the user the container is configured to run as
|
||||
|
||||
**NOTE:** Issues occur when this target folder is a mounted folder of an external system (NAS, SMB mount, USB Drive etc) as the 'mount' itself is owed by 'root'. If this is your use case, you *must* ensure your normal user can mount your desired target without having the target mounted by 'root'. If you do not fix this, your Podman container will fail to start with the following error message:
|
||||
```bash
|
||||
ROOT level privileges prohibited!
|
||||
```
|
||||
> [!IMPORTANT]
|
||||
> Issues occur when this target folder is a mounted folder of an external system (NAS, SMB mount, USB Drive etc) as the 'mount' itself is owed by 'root'. If this is your use case, you *must* ensure your normal user can mount your desired target without having the target mounted by 'root'. If you do not fix this, your Podman container will fail to start with the following error message:
|
||||
> ```bash
|
||||
> ROOT level privileges prohibited!
|
||||
> ```
|
||||
|
||||
### 5. First run of Docker container under podman and performing authorisation
|
||||
The 'onedrive' client within the container first needs to be authorised with your Microsoft account. This is achieved by initially running podman in interactive mode.
|
||||
|
||||
Run the podman image with the commands below and make sure to change the value of `ONEDRIVE_DATA_DIR` to the actual onedrive data directory on your filesystem that you wish to use (e.g. `export ONEDRIVE_DATA_DIR="/home/abraunegg/OneDrive"`).
|
||||
|
||||
**Important:** The 'target' folder of `ONEDRIVE_DATA_DIR` must exist before running the podman container. The script below will create 'ONEDRIVE_DATA_DIR' so that it exists locally for the podman volume mapping to occur.
|
||||
> [!IMPORTANT]
|
||||
> The 'target' folder of `ONEDRIVE_DATA_DIR` must exist before running the podman container. The script below will create 'ONEDRIVE_DATA_DIR' so that it exists locally for the podman volume mapping to occur.
|
||||
|
||||
It is also a requirement that the container be run using a non-root uid and gid, you must insert a non-root UID and GID (e.g.` export ONEDRIVE_UID=1000` and export `ONEDRIVE_GID=1000`). The script below will use `id` to evaluate your system environment to use the correct values.
|
||||
```bash
|
||||
|
|
@ -127,7 +130,8 @@ podman run -it --name onedrive --user "${ONEDRIVE_UID}:${ONEDRIVE_GID}" \
|
|||
driveone/onedrive:edge
|
||||
```
|
||||
|
||||
**Important:** In some scenarios, 'podman' sets the configuration and data directories to a different UID & GID as specified. To resolve this situation, you must run 'podman' with the `--userns=keep-id` flag to ensure 'podman' uses the UID and GID as specified. The updated script example when using `--userns=keep-id` is below:
|
||||
> [!IMPORTANT]
|
||||
> In some scenarios, 'podman' sets the configuration and data directories to a different UID & GID as specified. To resolve this situation, you must run 'podman' with the `--userns=keep-id` flag to ensure 'podman' uses the UID and GID as specified. The updated script example when using `--userns=keep-id` is below:
|
||||
|
||||
```bash
|
||||
export ONEDRIVE_DATA_DIR="${HOME}/OneDrive"
|
||||
|
|
@ -142,7 +146,8 @@ podman run -it --name onedrive --user "${ONEDRIVE_UID}:${ONEDRIVE_GID}" \
|
|||
```
|
||||
|
||||
|
||||
**Important:** If you plan to use the 'podman' built in auto-updating of container images described in 'Systemd Service & Auto Updating' below, you must pass an additional argument to set a label during the first run. The updated script example to support auto-updating of container images is below:
|
||||
> [!IMPORTANT]
|
||||
> If you plan to use the 'podman' built in auto-updating of container images described in 'Systemd Service & Auto Updating' below, you must pass an additional argument to set a label during the first run. The updated script example to support auto-updating of container images is below:
|
||||
|
||||
```bash
|
||||
export ONEDRIVE_DATA_DIR="${HOME}/OneDrive"
|
||||
|
|
@ -195,7 +200,6 @@ podman start onedrive
|
|||
podman rm -f onedrive
|
||||
```
|
||||
|
||||
|
||||
## Advanced Usage
|
||||
|
||||
### Systemd Service & Auto Updating
|
||||
|
|
@ -255,7 +259,7 @@ podman volume inspect onedrive_conf
|
|||
```
|
||||
Or you can map your own config folder to the config volume. Make sure to copy all files from the volume into your mapped folder first.
|
||||
|
||||
The detailed document for the config can be found here: [Configuration](https://github.com/abraunegg/onedrive/blob/master/docs/USAGE.md#configuration)
|
||||
The detailed document for the config can be found here: [Configuration](https://github.com/abraunegg/onedrive/blob/master/docs/usage.md#configuration)
|
||||
|
||||
### Syncing multiple accounts
|
||||
There are many ways to do this, the easiest is probably to do the following:
|
||||
|
|
@ -290,10 +294,15 @@ podman run -it --name onedrive_work --user "${ONEDRIVE_UID}:${ONEDRIVE_GID}" \
|
|||
| <B>ONEDRIVE_NOREMOTEDELETE</B> | Controls "--no-remote-delete" switch on onedrive sync. Default is 0 | 1 |
|
||||
| <B>ONEDRIVE_LOGOUT</B> | Controls "--logout" switch. Default is 0 | 1 |
|
||||
| <B>ONEDRIVE_REAUTH</B> | Controls "--reauth" switch. Default is 0 | 1 |
|
||||
| <B>ONEDRIVE_AUTHFILES</B> | Controls "--auth-files" option. Default is "" | "authUrl:responseUrl" |
|
||||
| <B>ONEDRIVE_AUTHRESPONSE</B> | Controls "--auth-response" option. Default is "" | See [here](https://github.com/abraunegg/onedrive/blob/master/docs/USAGE.md#authorize-the-application-with-your-onedrive-account) |
|
||||
| <B>ONEDRIVE_AUTHFILES</B> | Controls "--auth-files" option. Default is "" | Please read [CLI Option: --auth-files](./application-config-options.md#cli-option---auth-files) |
|
||||
| <B>ONEDRIVE_AUTHRESPONSE</B> | Controls "--auth-response" option. Default is "" | Please read [CLI Option: --auth-response](./application-config-options.md#cli-option---auth-response) |
|
||||
| <B>ONEDRIVE_DISPLAY_CONFIG</B> | Controls "--display-running-config" switch on onedrive sync. Default is 0 | 1 |
|
||||
| <B>ONEDRIVE_SINGLE_DIRECTORY</B> | Controls "--single-directory" option. Default = "" | "mydir" |
|
||||
| <B>ONEDRIVE_DRYRUN</B> | Controls "--dry-run" option. Default is 0 | 1 |
|
||||
| <B>ONEDRIVE_DISABLE_DOWNLOAD_VALIDATION</B> | Controls "--disable-download-validation" option. Default is 0 | 1 |
|
||||
| <B>ONEDRIVE_DISABLE_UPLOAD_VALIDATION</B> | Controls "--disable-upload-validation" option. Default is 0 | 1 |
|
||||
| <B>ONEDRIVE_SYNC_SHARED_FILES</B> | Controls "--sync-shared-files" option. Default is 0 | 1 |
|
||||
| <B>ONEDRIVE_RUNAS_ROOT</B> | Controls if the Docker container should be run as the 'root' user instead of 'onedrive' user. Default is 0 | 1 |
|
||||
|
||||
### Environment Variables Usage Examples
|
||||
**Verbose Output:**
|
||||
BIN
docs/puml/applyPotentiallyChangedItem.png
Normal file
|
After Width: | Height: | Size: 74 KiB |
48
docs/puml/applyPotentiallyChangedItem.puml
Normal file
|
|
@ -0,0 +1,48 @@
|
|||
@startuml
|
||||
start
|
||||
partition "applyPotentiallyChangedItem" {
|
||||
:Check if existing item path differs from changed item path;
|
||||
if (itemWasMoved) then (yes)
|
||||
:Log moving item;
|
||||
if (destination exists) then (yes)
|
||||
if (item in database) then (yes)
|
||||
:Check if item is synced;
|
||||
if (item is synced) then (yes)
|
||||
:Log destination is in sync;
|
||||
else (no)
|
||||
:Log destination occupied with a different item;
|
||||
:Backup conflicting file;
|
||||
note right: Local data loss prevention
|
||||
endif
|
||||
else (no)
|
||||
:Log destination occupied by an un-synced file;
|
||||
:Backup conflicting file;
|
||||
note right: Local data loss prevention
|
||||
endif
|
||||
endif
|
||||
:Try to rename path;
|
||||
if (dry run) then (yes)
|
||||
:Track as faked id item;
|
||||
:Track path not renamed;
|
||||
else (no)
|
||||
:Rename item;
|
||||
:Flag item as moved;
|
||||
if (item is a file) then (yes)
|
||||
:Set local timestamp to match online;
|
||||
endif
|
||||
endif
|
||||
else (no)
|
||||
endif
|
||||
:Check if eTag changed;
|
||||
if (eTag changed) then (yes)
|
||||
if (item is a file and not moved) then (yes)
|
||||
:Decide if to download based on hash;
|
||||
else (no)
|
||||
:Update database;
|
||||
endif
|
||||
else (no)
|
||||
:Update database if timestamp differs or in specific operational mode;
|
||||
endif
|
||||
}
|
||||
stop
|
||||
@enduml
|
||||
BIN
docs/puml/applyPotentiallyNewLocalItem.png
Normal file
|
After Width: | Height: | Size: 141 KiB |
90
docs/puml/applyPotentiallyNewLocalItem.puml
Normal file
|
|
@ -0,0 +1,90 @@
|
|||
@startuml
|
||||
start
|
||||
partition "applyPotentiallyNewLocalItem" {
|
||||
:Check if path exists;
|
||||
|
||||
if (Path exists?) then (yes)
|
||||
:Log "Path on local disk already exists";
|
||||
|
||||
if (Is symbolic link?) then (yes)
|
||||
:Log "Path is a symbolic link";
|
||||
|
||||
if (Can read symbolic link?) then (no)
|
||||
:Log "Reading symbolic link failed";
|
||||
:Log "Skipping item - invalid symbolic link";
|
||||
stop
|
||||
endif
|
||||
endif
|
||||
|
||||
:Determine if item is in-sync;
|
||||
note right: Execute 'isItemSynced()' function
|
||||
if (Is item in-sync?) then (yes)
|
||||
:Log "Item in-sync";
|
||||
:Update/Insert item in DB;
|
||||
stop
|
||||
else (no)
|
||||
:Log "Item not in-sync";
|
||||
:Compare local & remote modification times;
|
||||
|
||||
if (Local time > Remote time?) then (yes)
|
||||
if (ID in database?) then (yes)
|
||||
:Log "Local file is newer & ID in DB";
|
||||
:Fetch latest DB record;
|
||||
if (Times equal?) then (yes)
|
||||
:Log "Times match, keeping local file";
|
||||
else (no)
|
||||
:Log "Local time newer, keeping file";
|
||||
note right: Online item has an 'older' modified timestamp wise than the local file\nIt is assumed that the local file is the file to keep
|
||||
endif
|
||||
stop
|
||||
else (no)
|
||||
:Log "Local item not in DB";
|
||||
if (Bypass data protection?) then (yes)
|
||||
:Log "WARNING: Data protection disabled";
|
||||
else (no)
|
||||
:Safe backup local file;
|
||||
note right: Local data loss prevention
|
||||
endif
|
||||
stop
|
||||
endif
|
||||
else (no)
|
||||
if (Remote time > Local time?) then (yes)
|
||||
:Log "Remote item is newer";
|
||||
if (Bypass data protection?) then (yes)
|
||||
:Log "WARNING: Data protection disabled";
|
||||
else (no)
|
||||
:Safe backup local file;
|
||||
note right: Local data loss prevention
|
||||
endif
|
||||
endif
|
||||
|
||||
if (Times equal?) then (yes)
|
||||
note left: Specific handling if timestamp was\nadjusted by isItemSynced()
|
||||
:Log "Times equal, no action required";
|
||||
:Update/Insert item in DB;
|
||||
stop
|
||||
endif
|
||||
endif
|
||||
endif
|
||||
|
||||
else (no)
|
||||
:Handle as potentially new item;
|
||||
switch (Item type)
|
||||
case (File)
|
||||
:Add to download queue;
|
||||
case (Directory)
|
||||
:Log "Creating local directory";
|
||||
if (Dry run?) then (no)
|
||||
:Create directory & set attributes;
|
||||
:Save item to DB;
|
||||
else
|
||||
:Log "Dry run, faking directory creation";
|
||||
:Save item to dry-run DB;
|
||||
endif
|
||||
case (Unknown)
|
||||
:Log "Unknown type, no action";
|
||||
endswitch
|
||||
endif
|
||||
}
|
||||
stop
|
||||
@enduml
|
||||
BIN
docs/puml/client_side_filtering_processing_order.png
Normal file
|
After Width: | Height: | Size: 47 KiB |
34
docs/puml/client_side_filtering_processing_order.puml
Normal file
|
|
@ -0,0 +1,34 @@
|
|||
@startuml
|
||||
|Decision Tree|
|
||||
:Start Client Side Filtering Evaluation;
|
||||
if (check_nosync?) then (true)
|
||||
:Skip item (no sync);
|
||||
else (false)
|
||||
if (skip_dotfiles?) then (true)
|
||||
:Skip file (dotfile);
|
||||
else (false)
|
||||
if (skip_symlinks?) then (true)
|
||||
:Skip item (symlink);
|
||||
else (false)
|
||||
if (skip_dir?) then (true)
|
||||
:Skip directory;
|
||||
else (false)
|
||||
if (skip_file?) then (true)
|
||||
:Skip file;
|
||||
else (false)
|
||||
if (in sync_list?) then (false)
|
||||
:Skip item (not in sync list);
|
||||
else (true)
|
||||
if (skip_size?) then (true)
|
||||
:Skip file (size too large);
|
||||
else (false)
|
||||
:File or Directory flagged\nto be synced;
|
||||
endif
|
||||
endif
|
||||
endif
|
||||
endif
|
||||
endif
|
||||
endif
|
||||
endif
|
||||
:End Client Side Filtering Evaluation;
|
||||
@enduml
|
||||
BIN
docs/puml/client_side_filtering_rules.png
Normal file
|
After Width: | Height: | Size: 93 KiB |
71
docs/puml/client_side_filtering_rules.puml
Normal file
|
|
@ -0,0 +1,71 @@
|
|||
@startuml
|
||||
start
|
||||
:Start;
|
||||
partition "checkPathAgainstClientSideFiltering" {
|
||||
:Get localFilePath;
|
||||
|
||||
if (Does path exist?) then (no)
|
||||
:Return false;
|
||||
stop
|
||||
endif
|
||||
|
||||
if (Check .nosync?) then (yes)
|
||||
:Check for .nosync file;
|
||||
if (.nosync found) then (yes)
|
||||
:Log and return true;
|
||||
stop
|
||||
endif
|
||||
endif
|
||||
|
||||
if (Skip dotfiles?) then (yes)
|
||||
:Check if dotfile;
|
||||
if (Is dotfile) then (yes)
|
||||
:Log and return true;
|
||||
stop
|
||||
endif
|
||||
endif
|
||||
|
||||
if (Skip symlinks?) then (yes)
|
||||
:Check if symlink;
|
||||
if (Is symlink) then (yes)
|
||||
if (Config says skip?) then (yes)
|
||||
:Log and return true;
|
||||
stop
|
||||
elseif (Unexisting symlink?) then (yes)
|
||||
:Check if relative link works;
|
||||
if (Relative link ok) then (no)
|
||||
:Log and return true;
|
||||
stop
|
||||
endif
|
||||
endif
|
||||
endif
|
||||
endif
|
||||
|
||||
if (Skip dir or file?) then (yes)
|
||||
:Check dir or file exclusion;
|
||||
if (Excluded by config?) then (yes)
|
||||
:Log and return true;
|
||||
stop
|
||||
endif
|
||||
endif
|
||||
|
||||
if (Use sync_list?) then (yes)
|
||||
:Check sync_list exclusions;
|
||||
if (Excluded by sync_list?) then (yes)
|
||||
:Log and return true;
|
||||
stop
|
||||
endif
|
||||
endif
|
||||
|
||||
if (Check file size?) then (yes)
|
||||
:Check for file size limit;
|
||||
if (File size exceeds limit?) then (yes)
|
||||
:Log and return true;
|
||||
stop
|
||||
endif
|
||||
endif
|
||||
|
||||
:Return false;
|
||||
}
|
||||
stop
|
||||
@enduml
|
||||
BIN
docs/puml/client_use_of_libcurl.png
Normal file
|
After Width: | Height: | Size: 52 KiB |
41
docs/puml/client_use_of_libcurl.puml
Normal file
|
|
@ -0,0 +1,41 @@
|
|||
@startuml
|
||||
participant "OneDrive Client\nfor Linux" as od
|
||||
participant "libcurl" as lc
|
||||
participant "Client Web Browser" as browser
|
||||
participant "Microsoft Authentication Service\n(OAuth 2.0 Endpoint)" as oauth
|
||||
participant "GitHub API" as github
|
||||
participant "Microsoft Graph API" as graph
|
||||
|
||||
activate od
|
||||
activate lc
|
||||
|
||||
od->od: Generate Authentication\nService URL
|
||||
activate browser
|
||||
od->browser: Navigate to Authentication\nService URL via Client Web Browser
|
||||
browser->oauth: Request access token
|
||||
activate oauth
|
||||
oauth-->browser: Access token
|
||||
browser-->od: Access token
|
||||
deactivate oauth
|
||||
deactivate browser
|
||||
|
||||
od->lc: Check application version\nvia api.github.com
|
||||
activate github
|
||||
lc->github: Query release status
|
||||
activate github
|
||||
github-->lc: Release information
|
||||
deactivate github
|
||||
lc-->od: Process release information
|
||||
deactivate lc
|
||||
|
||||
loop API Communication
|
||||
od->lc: Construct HTTPS request (with token)
|
||||
activate lc
|
||||
lc->graph: API Request
|
||||
activate graph
|
||||
graph-->lc: API Response
|
||||
deactivate graph
|
||||
lc-->od: Process response
|
||||
deactivate lc
|
||||
end
|
||||
@enduml
|
||||
BIN
docs/puml/code_functional_component_relationships.png
Normal file
|
After Width: | Height: | Size: 119 KiB |
78
docs/puml/code_functional_component_relationships.puml
Normal file
|
|
@ -0,0 +1,78 @@
|
|||
@startuml
|
||||
!define DATABASE_ENTITY(x) entity x
|
||||
component main {
|
||||
}
|
||||
component config {
|
||||
}
|
||||
component log {
|
||||
}
|
||||
component curlEngine {
|
||||
}
|
||||
component util {
|
||||
}
|
||||
component onedrive {
|
||||
}
|
||||
component syncEngine {
|
||||
}
|
||||
component itemdb {
|
||||
}
|
||||
component clientSideFiltering {
|
||||
}
|
||||
component monitor {
|
||||
}
|
||||
component sqlite {
|
||||
}
|
||||
component qxor {
|
||||
}
|
||||
|
||||
DATABASE_ENTITY("Database")
|
||||
|
||||
main --> config
|
||||
main --> log
|
||||
main --> curlEngine
|
||||
main --> util
|
||||
main --> onedrive
|
||||
main --> syncEngine
|
||||
main --> itemdb
|
||||
main --> clientSideFiltering
|
||||
main --> monitor
|
||||
|
||||
config --> log
|
||||
config --> util
|
||||
|
||||
clientSideFiltering --> config
|
||||
clientSideFiltering --> util
|
||||
clientSideFiltering --> log
|
||||
|
||||
syncEngine --> config
|
||||
syncEngine --> log
|
||||
syncEngine --> util
|
||||
syncEngine --> onedrive
|
||||
syncEngine --> itemdb
|
||||
syncEngine --> clientSideFiltering
|
||||
|
||||
util --> log
|
||||
util --> config
|
||||
util --> qxor
|
||||
util --> curlEngine
|
||||
|
||||
sqlite --> log
|
||||
sqlite -> "Database" : uses
|
||||
|
||||
onedrive --> config
|
||||
onedrive --> log
|
||||
onedrive --> util
|
||||
onedrive --> curlEngine
|
||||
|
||||
monitor --> config
|
||||
monitor --> util
|
||||
monitor --> log
|
||||
monitor --> clientSideFiltering
|
||||
monitor .> syncEngine : inotify event
|
||||
|
||||
itemdb --> sqlite
|
||||
itemdb --> util
|
||||
itemdb --> log
|
||||
|
||||
curlEngine --> log
|
||||
@enduml
|
||||
BIN
docs/puml/conflict_handling_default.png
Normal file
|
After Width: | Height: | Size: 54 KiB |
31
docs/puml/conflict_handling_default.puml
Normal file
|
|
@ -0,0 +1,31 @@
|
|||
@startuml
|
||||
start
|
||||
note left: Operational Mode 'onedrive --sync'
|
||||
:Query OneDrive /delta API for online changes;
|
||||
note left: This data is considered the 'source-of-truth'\nLocal data should be a 'replica' of this data
|
||||
:Process received JSON data;
|
||||
if (JSON item is a file) then (yes)
|
||||
if (Does the file exist locally) then (yes)
|
||||
:Compute relevant file hashes;
|
||||
:Check DB for file record;
|
||||
if (DB record found) then (yes)
|
||||
:Compare file hash with DB hash;
|
||||
if (Is the hash different) then (yes)
|
||||
:Log that the local file was modified locally since last sync;
|
||||
:Renaming local file to avoid potential local data loss;
|
||||
note left: Local data loss prevention\nRenamed file will be uploaded as new file
|
||||
else (no)
|
||||
endif
|
||||
else (no)
|
||||
endif
|
||||
else (no)
|
||||
endif
|
||||
:Download file (as per online JSON item) as required;
|
||||
else (no)
|
||||
:Other handling for directories | root objects | deleted items;
|
||||
endif
|
||||
:Performing a database consistency and\nintegrity check on locally stored data;
|
||||
:Scan file system for any new data to upload;
|
||||
note left: The file that was renamed will be uploaded here
|
||||
stop
|
||||
@enduml
|
||||
BIN
docs/puml/conflict_handling_default_resync.png
Normal file
|
After Width: | Height: | Size: 67 KiB |
35
docs/puml/conflict_handling_default_resync.puml
Normal file
|
|
@ -0,0 +1,35 @@
|
|||
@startuml
|
||||
start
|
||||
note left: Operational Mode 'onedrive -sync --resync'
|
||||
:Query OneDrive /delta API for online changes;
|
||||
note left: This data is considered the 'source-of-truth'\nLocal data should be a 'replica' of this data
|
||||
:Process received JSON data;
|
||||
if (JSON item is a file) then (yes)
|
||||
if (Does the file exist locally) then (yes)
|
||||
note left: In a --resync scenario there are no DB\nrecords that can be used or referenced\nuntil the JSON item is processed and\nadded to the local database cache
|
||||
if (Can the file be read) then (yes)
|
||||
:Compute UTC timestamp data from local file and JSON data;
|
||||
if (timestamps are equal) then (yes)
|
||||
else (no)
|
||||
:Log that a local file time discrepancy was detected;
|
||||
if (Do file hashes match) then (yes)
|
||||
:Correct the offending timestamp as hashes match;
|
||||
else (no)
|
||||
:Local file is technically different;
|
||||
:Renaming local file to avoid potential local data loss;
|
||||
note left: Local data loss prevention\nRenamed file will be uploaded as new file
|
||||
endif
|
||||
endif
|
||||
else (no)
|
||||
endif
|
||||
else (no)
|
||||
endif
|
||||
:Download file (as per online JSON item) as required;
|
||||
else (no)
|
||||
:Other handling for directories | root objects | deleted items;
|
||||
endif
|
||||
:Performing a database consistency and\nintegrity check on locally stored data;
|
||||
:Scan file system for any new data to upload;
|
||||
note left: The file that was renamed will be uploaded here
|
||||
stop
|
||||
@enduml
|
||||
BIN
docs/puml/conflict_handling_local-first_default.png
Normal file
|
After Width: | Height: | Size: 83 KiB |
62
docs/puml/conflict_handling_local-first_default.puml
Normal file
|
|
@ -0,0 +1,62 @@
|
|||
@startuml
|
||||
start
|
||||
note left: Operational Mode 'onedrive -sync -local-first'
|
||||
:Performing a database consistency and\nintegrity check on locally stored data;
|
||||
note left: This data is considered the 'source-of-truth'\nOnline data should be a 'replica' of this data
|
||||
repeat
|
||||
:Process each DB record;
|
||||
if (Is the DB record is in sync with local file) then (yes)
|
||||
|
||||
else (no)
|
||||
|
||||
:Log reason for discrepancy;
|
||||
:Flag item to be processed as a modified local file;
|
||||
|
||||
endif
|
||||
repeat while
|
||||
|
||||
:Process modified items to upload;
|
||||
|
||||
if (Does local file DB record match current latest online JSON data) then (yes)
|
||||
|
||||
else (no)
|
||||
|
||||
:Log that the local file was modified locally since last sync;
|
||||
:Renaming local file to avoid potential local data loss;
|
||||
note left: Local data loss prevention\nRenamed file will be uploaded as new file
|
||||
:Upload renamed local file as new file;
|
||||
|
||||
endif
|
||||
|
||||
:Upload modified file;
|
||||
|
||||
:Scan file system for any new data to upload;
|
||||
|
||||
:Query OneDrive /delta API for online changes;
|
||||
|
||||
:Process received JSON data;
|
||||
if (JSON item is a file) then (yes)
|
||||
if (Does the file exist locally) then (yes)
|
||||
:Compute relevant file hashes;
|
||||
:Check DB for file record;
|
||||
if (DB record found) then (yes)
|
||||
:Compare file hash with DB hash;
|
||||
if (Is the hash different) then (yes)
|
||||
:Log that the local file was modified locally since last sync;
|
||||
:Renaming local file to avoid potential local data loss;
|
||||
note left: Local data loss prevention\nRenamed file will be uploaded as new file
|
||||
else (no)
|
||||
endif
|
||||
else (no)
|
||||
|
||||
endif
|
||||
else (no)
|
||||
endif
|
||||
|
||||
:Download file (as per online JSON item) as required;
|
||||
|
||||
else (no)
|
||||
:Other handling for directories | root objects | deleted items;
|
||||
endif
|
||||
stop
|
||||
@enduml
|
||||
BIN
docs/puml/conflict_handling_local-first_resync.png
Normal file
|
After Width: | Height: | Size: 105 KiB |
70
docs/puml/conflict_handling_local-first_resync.puml
Normal file
|
|
@ -0,0 +1,70 @@
|
|||
@startuml
|
||||
start
|
||||
note left: Operational Mode 'onedrive -sync -local-first -resync'
|
||||
:Query OneDrive API and create new database with default root account objects;
|
||||
:Performing a database consistency and\nintegrity check on locally stored data;
|
||||
note left: This data is considered the 'source-of-truth'\nOnline data should be a 'replica' of this data\nHowever the database has only 1 record currently
|
||||
:Scan file system for any new data to upload;
|
||||
note left: This is where in this specific mode all local\n content is assessed for applicability for\nupload to Microsoft OneDrive
|
||||
|
||||
repeat
|
||||
:For each new local item;
|
||||
if (Is the item a directory) then (yes)
|
||||
if (Is Directory found online) then (yes)
|
||||
:Save directory details from online in local database;
|
||||
else (no)
|
||||
:Create directory online;
|
||||
:Save details in local database;
|
||||
endif
|
||||
else (no)
|
||||
:Flag file as a potentially new item to upload;
|
||||
endif
|
||||
repeat while
|
||||
|
||||
:Process potential new items to upload;
|
||||
|
||||
repeat
|
||||
:For each potential file to upload;
|
||||
if (Is File found online) then (yes)
|
||||
if (Does the online JSON data match local file) then (yes)
|
||||
:Save details in local database;
|
||||
else (no)
|
||||
:Log that the local file was modified locally since last sync;
|
||||
:Renaming local file to avoid potential local data loss;
|
||||
note left: Local data loss prevention\nRenamed file will be uploaded as new file
|
||||
:Upload renamed local file as new file;
|
||||
endif
|
||||
else (no)
|
||||
:Upload new file;
|
||||
endif
|
||||
repeat while
|
||||
|
||||
:Query OneDrive /delta API for online changes;
|
||||
:Process received JSON data;
|
||||
if (JSON item is a file) then (yes)
|
||||
if (Does the file exist locally) then (yes)
|
||||
:Compute relevant file hashes;
|
||||
:Check DB for file record;
|
||||
if (DB record found) then (yes)
|
||||
:Compare file hash with DB hash;
|
||||
if (Is the hash different) then (yes)
|
||||
:Log that the local file was modified locally since last sync;
|
||||
:Renaming local file to avoid potential local data loss;
|
||||
note left: Local data loss prevention\nRenamed file will be uploaded as new file
|
||||
else (no)
|
||||
endif
|
||||
else (no)
|
||||
|
||||
endif
|
||||
else (no)
|
||||
endif
|
||||
|
||||
:Download file (as per online JSON item) as required;
|
||||
|
||||
else (no)
|
||||
:Other handling for directories | root objects | deleted items;
|
||||
endif
|
||||
|
||||
|
||||
stop
|
||||
@enduml
|
||||
BIN
docs/puml/database_schema.png
Normal file
|
After Width: | Height: | Size: 39 KiB |
39
docs/puml/database_schema.puml
Normal file
|
|
@ -0,0 +1,39 @@
|
|||
@startuml
|
||||
|
||||
class item {
|
||||
driveId: TEXT
|
||||
id: TEXT
|
||||
name: TEXT
|
||||
remoteName: TEXT
|
||||
type: TEXT
|
||||
eTag: TEXT
|
||||
cTag: TEXT
|
||||
mtime: TEXT
|
||||
parentId: TEXT
|
||||
quickXorHash: TEXT
|
||||
sha256Hash: TEXT
|
||||
remoteDriveId: TEXT
|
||||
remoteParentId: TEXT
|
||||
remoteId: TEXT
|
||||
remoteType: TEXT
|
||||
deltaLink: TEXT
|
||||
syncStatus: TEXT
|
||||
size: TEXT
|
||||
}
|
||||
|
||||
note right of item::driveId
|
||||
PRIMARY KEY (driveId, id)
|
||||
FOREIGN KEY (driveId, parentId) REFERENCES item
|
||||
end note
|
||||
|
||||
item --|> item : parentId
|
||||
|
||||
note "Indexes" as N1
|
||||
note left of N1
|
||||
name_idx ON item (name)
|
||||
remote_idx ON item (remoteDriveId, remoteId)
|
||||
item_children_idx ON item (driveId, parentId)
|
||||
selectByPath_idx ON item (name, driveId, parentId)
|
||||
end note
|
||||
|
||||
@enduml
|
||||
BIN
docs/puml/downloadFile.png
Normal file
|
After Width: | Height: | Size: 83 KiB |
63
docs/puml/downloadFile.puml
Normal file
|
|
@ -0,0 +1,63 @@
|
|||
@startuml
|
||||
start
|
||||
|
||||
partition "Download File" {
|
||||
|
||||
:Get item specifics from JSON;
|
||||
:Calculate item's path;
|
||||
|
||||
if (Is item malware?) then (yes)
|
||||
:Log malware detected;
|
||||
stop
|
||||
else (no)
|
||||
:Check for file size in JSON;
|
||||
if (File size missing) then (yes)
|
||||
:Log error;
|
||||
stop
|
||||
endif
|
||||
|
||||
:Configure hashes for comparison;
|
||||
if (Hashes missing) then (yes)
|
||||
:Log error;
|
||||
stop
|
||||
endif
|
||||
|
||||
if (Does file exist locally?) then (yes)
|
||||
:Check DB for item;
|
||||
if (DB hash match?) then (no)
|
||||
:Log modification; Perform safe backup;
|
||||
note left: Local data loss prevention
|
||||
endif
|
||||
endif
|
||||
|
||||
:Check local disk space;
|
||||
if (Insufficient space?) then (yes)
|
||||
:Log insufficient space;
|
||||
stop
|
||||
else (no)
|
||||
if (Dry run?) then (yes)
|
||||
:Fake download process;
|
||||
else (no)
|
||||
:Attempt to download file;
|
||||
if (Download exception occurs?) then (yes)
|
||||
:Handle exceptions; Retry download or log error;
|
||||
endif
|
||||
|
||||
if (File downloaded successfully?) then (yes)
|
||||
:Validate download;
|
||||
if (Validation passes?) then (yes)
|
||||
:Log success; Update DB;
|
||||
else (no)
|
||||
:Log validation failure; Remove file;
|
||||
endif
|
||||
else (no)
|
||||
:Log download failed;
|
||||
endif
|
||||
endif
|
||||
endif
|
||||
endif
|
||||
|
||||
}
|
||||
|
||||
stop
|
||||
@enduml
|
||||
BIN
docs/puml/high_level_operational_process.png
Normal file
|
After Width: | Height: | Size: 81 KiB |
55
docs/puml/high_level_operational_process.puml
Normal file
|
|
@ -0,0 +1,55 @@
|
|||
@startuml
|
||||
|
||||
participant "OneDrive Client\nfor Linux" as Client
|
||||
participant "Microsoft OneDrive\nAPI" as API
|
||||
|
||||
== Access Token Validation ==
|
||||
Client -> Client: Validate access and\nexisting access token\nRefresh if needed
|
||||
|
||||
== Query Microsoft OneDrive /delta API ==
|
||||
Client -> API: Query /delta API
|
||||
API -> Client: JSON responses
|
||||
|
||||
== Process JSON Responses ==
|
||||
loop for each JSON response
|
||||
Client -> Client: Determine if JSON is 'root'\nor 'deleted' item\nElse, push into temporary array for further processing
|
||||
alt if 'root' or 'deleted'
|
||||
Client -> Client: Process 'root' or 'deleted' items
|
||||
else
|
||||
Client -> Client: Evaluate against 'Client Side Filtering' rules
|
||||
alt if unwanted
|
||||
Client -> Client: Discard JSON
|
||||
else
|
||||
Client -> Client: Process JSON (create dir/download file)
|
||||
Client -> Client: Save in local database cache
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
== Local Cache Database Processing for Data Integrity ==
|
||||
Client -> Client: Process local cache database\nto check local data integrity and for differences
|
||||
alt if difference found
|
||||
Client -> API: Upload file/folder change including deletion
|
||||
API -> Client: Response with item metadata
|
||||
Client -> Client: Save response to local cache database
|
||||
end
|
||||
|
||||
== Local Filesystem Scanning ==
|
||||
Client -> Client: Scan local filesystem\nfor new files/folders
|
||||
|
||||
loop for each new item
|
||||
Client -> Client: Check item against 'Client Side Filtering' rules
|
||||
alt if item passes filtering
|
||||
Client -> API: Upload new file/folder change including deletion
|
||||
API -> Client: Response with item metadata
|
||||
Client -> Client: Save response in local\ncache database
|
||||
else
|
||||
Client -> Client: Discard item\n(Does not meet filtering criteria)
|
||||
end
|
||||
end
|
||||
|
||||
== Final Data True-Up ==
|
||||
Client -> API: Query /delta link for true-up
|
||||
API -> Client: Process further online JSON changes if required
|
||||
|
||||
@enduml
|
||||
BIN
docs/puml/is_item_in_sync.png
Normal file
|
After Width: | Height: | Size: 109 KiB |
79
docs/puml/is_item_in_sync.puml
Normal file
|
|
@ -0,0 +1,79 @@
|
|||
@startuml
|
||||
start
|
||||
partition "Is item in sync" {
|
||||
:Check if path exists;
|
||||
if (path does not exist) then (no)
|
||||
:Return false;
|
||||
stop
|
||||
else (yes)
|
||||
endif
|
||||
|
||||
:Identify item type;
|
||||
switch (item type)
|
||||
case (file)
|
||||
|
||||
:Check if path is a file;
|
||||
if (path is not a file) then (no)
|
||||
:Log "item is a directory but should be a file";
|
||||
:Return false;
|
||||
stop
|
||||
else (yes)
|
||||
endif
|
||||
|
||||
:Attempt to read local file;
|
||||
if (file is unreadable) then (no)
|
||||
:Log "file cannot be read";
|
||||
:Return false;
|
||||
stop
|
||||
else (yes)
|
||||
endif
|
||||
|
||||
:Get local and input item modified time;
|
||||
note right: The 'input item' could be a database reference object, or the online JSON object\nas provided by the Microsoft OneDrive API
|
||||
:Reduce time resolution to seconds;
|
||||
|
||||
if (localModifiedTime == itemModifiedTime) then (yes)
|
||||
:Return true;
|
||||
stop
|
||||
else (no)
|
||||
:Log time discrepancy;
|
||||
endif
|
||||
|
||||
:Check if file hash is the same;
|
||||
if (hash is the same) then (yes)
|
||||
:Log "hash match, correcting timestamp";
|
||||
if (local time > item time) then (yes)
|
||||
if (download only mode) then (no)
|
||||
:Correct timestamp online if not dryRun;
|
||||
else (yes)
|
||||
:Correct local timestamp if not dryRun;
|
||||
endif
|
||||
else (no)
|
||||
:Correct local timestamp if not dryRun;
|
||||
endif
|
||||
:Return false;
|
||||
note right: Specifically return false here as we performed a time correction\nApplication logic will then perform additional handling based on this very specific response.
|
||||
stop
|
||||
else (no)
|
||||
:Log "different hash";
|
||||
:Return false;
|
||||
stop
|
||||
endif
|
||||
|
||||
case (dir or remote)
|
||||
:Check if path is a directory;
|
||||
if (path is a directory) then (yes)
|
||||
:Return true;
|
||||
stop
|
||||
else (no)
|
||||
:Log "item is a file but should be a directory";
|
||||
:Return false;
|
||||
stop
|
||||
endif
|
||||
|
||||
case (unknown)
|
||||
:Return true but do not sync;
|
||||
stop
|
||||
endswitch
|
||||
}
|
||||
@enduml
|
||||
BIN
docs/puml/main_activity_flows.png
Normal file
|
After Width: | Height: | Size: 136 KiB |
81
docs/puml/main_activity_flows.puml
Normal file
|
|
@ -0,0 +1,81 @@
|
|||
@startuml
|
||||
|
||||
start
|
||||
|
||||
:Validate access and existing access token\nRefresh if needed;
|
||||
|
||||
:Query /delta API;
|
||||
note right: Query Microsoft OneDrive /delta API
|
||||
:Receive JSON responses;
|
||||
|
||||
:Process JSON Responses;
|
||||
partition "Process /delta JSON Responses" {
|
||||
while (for each JSON response) is (yes)
|
||||
:Determine if JSON is 'root'\nor 'deleted' item;
|
||||
if ('root' or 'deleted') then (yes)
|
||||
:Process 'root' or 'deleted' items;
|
||||
if ('root' object) then (yes)
|
||||
:Process 'root' JSON;
|
||||
else (no)
|
||||
if (Is 'deleted' object in sync) then (yes)
|
||||
:Process deletion of local item;
|
||||
else (no)
|
||||
:Rename local file as it is not in sync;
|
||||
note right: Deletion event conflict handling\nLocal data loss prevention
|
||||
endif
|
||||
endif
|
||||
else (no)
|
||||
:Evaluate against 'Client Side Filtering' rules;
|
||||
if (unwanted) then (yes)
|
||||
:Discard JSON;
|
||||
else (no)
|
||||
:Process JSON (create dir/download file);
|
||||
if (Is the 'JSON' item in the local cache) then (yes)
|
||||
:Process JSON as a potentially changed local item;
|
||||
note left: Run 'applyPotentiallyChangedItem' function
|
||||
else (no)
|
||||
:Process JSON as potentially new local item;
|
||||
note right: Run 'applyPotentiallyNewLocalItem' function
|
||||
endif
|
||||
:Process objects in download queue;
|
||||
:Download File;
|
||||
note left: Download file from Microsoft OneDrive (Multi Threaded Download)
|
||||
:Save in local database cache;
|
||||
endif
|
||||
endif
|
||||
endwhile
|
||||
}
|
||||
|
||||
partition "Perform data integrity check based on local cache database" {
|
||||
:Process local cache database\nto check local data integrity and for differences;
|
||||
if (difference found) then (yes)
|
||||
:Upload file/folder change including deletion;
|
||||
note right: Upload local change to Microsoft OneDrive
|
||||
:Receive response with item metadata;
|
||||
:Save response to local cache database;
|
||||
else (no)
|
||||
endif
|
||||
}
|
||||
|
||||
partition "Local Filesystem Scanning" {
|
||||
:Scan local filesystem\nfor new files/folders;
|
||||
while (for each new item) is (yes)
|
||||
:Check item against 'Client Side Filtering' rules;
|
||||
if (item passes filtering) then (yes)
|
||||
:Upload new file/folder change including deletion;
|
||||
note right: Upload to Microsoft OneDrive
|
||||
:Receive response with item metadata;
|
||||
:Save response in local\ncache database;
|
||||
else (no)
|
||||
:Discard item\n(Does not meet filtering criteria);
|
||||
endif
|
||||
endwhile
|
||||
}
|
||||
|
||||
partition "Final True-Up" {
|
||||
:Query /delta link for true-up;
|
||||
note right: Final Data True-Up
|
||||
:Process further online JSON changes if required;
|
||||
}
|
||||
stop
|
||||
@enduml
|
||||
BIN
docs/puml/onedrive_linux_authentication.png
Normal file
|
After Width: | Height: | Size: 92 KiB |
47
docs/puml/onedrive_linux_authentication.puml
Normal file
|
|
@ -0,0 +1,47 @@
|
|||
@startuml
|
||||
participant "OneDrive Client for Linux"
|
||||
participant "Microsoft OneDrive\nAuthentication Service\n(login.microsoftonline.com)" as AuthServer
|
||||
participant "User's Device (for MFA)" as UserDevice
|
||||
participant "Microsoft Graph API\n(graph.microsoft.com)" as GraphAPI
|
||||
participant "Microsoft OneDrive"
|
||||
|
||||
"OneDrive Client for Linux" -> AuthServer: Request Authorization\n(Client Credentials, Scopes)
|
||||
AuthServer -> "OneDrive Client for Linux": Provide Authorization Code
|
||||
|
||||
"OneDrive Client for Linux" -> AuthServer: Request Access Token\n(Authorization Code, Client Credentials)
|
||||
|
||||
alt MFA Enabled
|
||||
AuthServer -> UserDevice: Trigger MFA Challenge
|
||||
UserDevice -> AuthServer: Provide MFA Verification
|
||||
AuthServer -> "OneDrive Client for Linux": Return Access Token\n(and Refresh Token)
|
||||
"OneDrive Client for Linux" -> GraphAPI: Request Microsoft OneDrive Data\n(Access Token)
|
||||
loop Token Expiry Check
|
||||
"OneDrive Client for Linux" -> AuthServer: Is Access Token Expired?
|
||||
alt Token Expired
|
||||
"OneDrive Client for Linux" -> AuthServer: Request New Access Token\n(Refresh Token)
|
||||
AuthServer -> "OneDrive Client for Linux": Return New Access Token
|
||||
else Token Valid
|
||||
GraphAPI -> "Microsoft OneDrive": Retrieve Data
|
||||
"Microsoft OneDrive" -> GraphAPI: Return Data
|
||||
GraphAPI -> "OneDrive Client for Linux": Provide Data
|
||||
end
|
||||
end
|
||||
else MFA Not Required
|
||||
AuthServer -> "OneDrive Client for Linux": Return Access Token\n(and Refresh Token)
|
||||
"OneDrive Client for Linux" -> GraphAPI: Request Microsoft OneDrive Data\n(Access Token)
|
||||
loop Token Expiry Check
|
||||
"OneDrive Client for Linux" -> AuthServer: Is Access Token Expired?
|
||||
alt Token Expired
|
||||
"OneDrive Client for Linux" -> AuthServer: Request New Access Token\n(Refresh Token)
|
||||
AuthServer -> "OneDrive Client for Linux": Return New Access Token
|
||||
else Token Valid
|
||||
GraphAPI -> "Microsoft OneDrive": Retrieve Data
|
||||
"Microsoft OneDrive" -> GraphAPI: Return Data
|
||||
GraphAPI -> "OneDrive Client for Linux": Provide Data
|
||||
end
|
||||
end
|
||||
else MFA Failed or Other Auth Error
|
||||
AuthServer -> "OneDrive Client for Linux": Error Message (e.g., Invalid Credentials, MFA Failure)
|
||||
end
|
||||
|
||||
@enduml
|
||||
59
docs/puml/onedrive_windows_ad_authentication.puml
Normal file
|
|
@ -0,0 +1,59 @@
|
|||
@startuml
|
||||
participant "Microsoft Windows OneDrive Client"
|
||||
participant "Azure Active Directory\n(Active Directory)\n(login.microsoftonline.com)" as AzureAD
|
||||
participant "Microsoft OneDrive\nAuthentication Service\n(login.microsoftonline.com)" as AuthServer
|
||||
participant "User's Device (for MFA)" as UserDevice
|
||||
participant "Microsoft Graph API\n(graph.microsoft.com)" as GraphAPI
|
||||
participant "Microsoft OneDrive"
|
||||
|
||||
"Microsoft Windows OneDrive Client" -> AzureAD: Request Authorization\n(Client Credentials, Scopes)
|
||||
AzureAD -> AuthServer: Validate Credentials\n(Forward Request)
|
||||
AuthServer -> AzureAD: Provide Authorization Code
|
||||
AzureAD -> "Microsoft Windows OneDrive Client": Provide Authorization Code (via AzureAD)
|
||||
|
||||
"Microsoft Windows OneDrive Client" -> AzureAD: Request Access Token\n(Authorization Code, Client Credentials)
|
||||
AzureAD -> AuthServer: Request Access Token\n(Authorization Code, Forwarded Credentials)
|
||||
AuthServer -> AzureAD: Return Access Token\n(and Refresh Token)
|
||||
AzureAD -> "Microsoft Windows OneDrive Client": Return Access Token\n(and Refresh Token) (via AzureAD)
|
||||
|
||||
alt MFA Enabled
|
||||
AzureAD -> UserDevice: Trigger MFA Challenge
|
||||
UserDevice -> AzureAD: Provide MFA Verification
|
||||
AzureAD -> "Microsoft Windows OneDrive Client": Return Access Token\n(and Refresh Token) (Post MFA)
|
||||
"Microsoft Windows OneDrive Client" -> GraphAPI: Request Microsoft OneDrive Data\n(Access Token)
|
||||
loop Token Expiry Check
|
||||
"Microsoft Windows OneDrive Client" -> AzureAD: Is Access Token Expired?
|
||||
AzureAD -> AuthServer: Validate Token Expiry
|
||||
alt Token Expired
|
||||
"Microsoft Windows OneDrive Client" -> AzureAD: Request New Access Token\n(Refresh Token)
|
||||
AzureAD -> AuthServer: Request New Access Token\n(Refresh Token)
|
||||
AuthServer -> AzureAD: Return New Access Token
|
||||
AzureAD -> "Microsoft Windows OneDrive Client": Return New Access Token (via AzureAD)
|
||||
else Token Valid
|
||||
GraphAPI -> "Microsoft OneDrive": Retrieve Data
|
||||
"Microsoft OneDrive" -> GraphAPI: Return Data
|
||||
GraphAPI -> "Microsoft Windows OneDrive Client": Provide Data
|
||||
end
|
||||
end
|
||||
else MFA Not Required
|
||||
AzureAD -> "Microsoft Windows OneDrive Client": Return Access Token\n(and Refresh Token) (Direct)
|
||||
"Microsoft Windows OneDrive Client" -> GraphAPI: Request Microsoft OneDrive Data\n(Access Token)
|
||||
loop Token Expiry Check
|
||||
"Microsoft Windows OneDrive Client" -> AzureAD: Is Access Token Expired?
|
||||
AzureAD -> AuthServer: Validate Token Expiry
|
||||
alt Token Expired
|
||||
"Microsoft Windows OneDrive Client" -> AzureAD: Request New Access Token\n(Refresh Token)
|
||||
AzureAD -> AuthServer: Request New Access Token\n(Refresh Token)
|
||||
AuthServer -> AzureAD: Return New Access Token
|
||||
AzureAD -> "Microsoft Windows OneDrive Client": Return New Access Token (via AzureAD)
|
||||
else Token Valid
|
||||
GraphAPI -> "Microsoft OneDrive": Retrieve Data
|
||||
"Microsoft OneDrive" -> GraphAPI: Return Data
|
||||
GraphAPI -> "Microsoft Windows OneDrive Client": Provide Data
|
||||
end
|
||||
end
|
||||
else MFA Failed or Other Auth Error
|
||||
AzureAD -> "Microsoft Windows OneDrive Client": Error Message (e.g., Invalid Credentials, MFA Failure)
|
||||
end
|
||||
|
||||
@enduml
|
||||
BIN
docs/puml/onedrive_windows_authentication.png
Normal file
|
After Width: | Height: | Size: 93 KiB |
47
docs/puml/onedrive_windows_authentication.puml
Normal file
|
|
@ -0,0 +1,47 @@
|
|||
@startuml
|
||||
participant "Microsoft Windows OneDrive Client"
|
||||
participant "Microsoft OneDrive\nAuthentication Service\n(login.microsoftonline.com)" as AuthServer
|
||||
participant "User's Device (for MFA)" as UserDevice
|
||||
participant "Microsoft Graph API\n(graph.microsoft.com)" as GraphAPI
|
||||
participant "Microsoft OneDrive"
|
||||
|
||||
"Microsoft Windows OneDrive Client" -> AuthServer: Request Authorization\n(Client Credentials, Scopes)
|
||||
AuthServer -> "Microsoft Windows OneDrive Client": Provide Authorization Code
|
||||
|
||||
"Microsoft Windows OneDrive Client" -> AuthServer: Request Access Token\n(Authorization Code, Client Credentials)
|
||||
|
||||
alt MFA Enabled
|
||||
AuthServer -> UserDevice: Trigger MFA Challenge
|
||||
UserDevice -> AuthServer: Provide MFA Verification
|
||||
AuthServer -> "Microsoft Windows OneDrive Client": Return Access Token\n(and Refresh Token)
|
||||
"Microsoft Windows OneDrive Client" -> GraphAPI: Request Microsoft OneDrive Data\n(Access Token)
|
||||
loop Token Expiry Check
|
||||
"Microsoft Windows OneDrive Client" -> AuthServer: Is Access Token Expired?
|
||||
alt Token Expired
|
||||
"Microsoft Windows OneDrive Client" -> AuthServer: Request New Access Token\n(Refresh Token)
|
||||
AuthServer -> "Microsoft Windows OneDrive Client": Return New Access Token
|
||||
else Token Valid
|
||||
GraphAPI -> "Microsoft OneDrive": Retrieve Data
|
||||
"Microsoft OneDrive" -> GraphAPI: Return Data
|
||||
GraphAPI -> "Microsoft Windows OneDrive Client": Provide Data
|
||||
end
|
||||
end
|
||||
else MFA Not Required
|
||||
AuthServer -> "Microsoft Windows OneDrive Client": Return Access Token\n(and Refresh Token)
|
||||
"Microsoft Windows OneDrive Client" -> GraphAPI: Request Microsoft OneDrive Data\n(Access Token)
|
||||
loop Token Expiry Check
|
||||
"Microsoft Windows OneDrive Client" -> AuthServer: Is Access Token Expired?
|
||||
alt Token Expired
|
||||
"Microsoft Windows OneDrive Client" -> AuthServer: Request New Access Token\n(Refresh Token)
|
||||
AuthServer -> "Microsoft Windows OneDrive Client": Return New Access Token
|
||||
else Token Valid
|
||||
GraphAPI -> "Microsoft OneDrive": Retrieve Data
|
||||
"Microsoft OneDrive" -> GraphAPI: Return Data
|
||||
GraphAPI -> "Microsoft Windows OneDrive Client": Provide Data
|
||||
end
|
||||
end
|
||||
else MFA Failed or Other Auth Error
|
||||
AuthServer -> "Microsoft Windows OneDrive Client": Error Message (e.g., Invalid Credentials, MFA Failure)
|
||||
end
|
||||
|
||||
@enduml
|
||||
BIN
docs/puml/uploadFile.png
Normal file
|
After Width: | Height: | Size: 96 KiB |
62
docs/puml/uploadFile.puml
Normal file
|
|
@ -0,0 +1,62 @@
|
|||
@startuml
|
||||
start
|
||||
partition "Upload File" {
|
||||
:Log "fileToUpload";
|
||||
:Check database for parent path;
|
||||
if (parent path found?) then (yes)
|
||||
if (drive ID not empty?) then (yes)
|
||||
:Proceed;
|
||||
else (no)
|
||||
:Use defaultDriveId;
|
||||
endif
|
||||
else (no)
|
||||
stop
|
||||
endif
|
||||
:Check if file exists locally;
|
||||
if (file exists?) then (yes)
|
||||
:Read local file;
|
||||
if (can read file?) then (yes)
|
||||
if (parent path in DB?) then (yes)
|
||||
:Get file size;
|
||||
if (file size <= max?) then (yes)
|
||||
:Check available space on OneDrive;
|
||||
if (space available?) then (yes)
|
||||
:Check if file exists on OneDrive;
|
||||
if (file exists online?) then (yes)
|
||||
:Save online metadata only;
|
||||
if (if local file newer) then (yes)
|
||||
:Local file is newer;
|
||||
:Upload file as changed local file;
|
||||
else (no)
|
||||
:Remote file is newer;
|
||||
:Perform safe backup;
|
||||
note right: Local data loss prevention
|
||||
:Upload renamed file as new file;
|
||||
endif
|
||||
else (no)
|
||||
:Attempt upload;
|
||||
endif
|
||||
else (no)
|
||||
:Log "Insufficient space";
|
||||
endif
|
||||
else (no)
|
||||
:Log "File too large";
|
||||
endif
|
||||
else (no)
|
||||
:Log "Parent path issue";
|
||||
endif
|
||||
else (no)
|
||||
:Log "Cannot read file";
|
||||
endif
|
||||
else (no)
|
||||
:Log "File disappeared locally";
|
||||
endif
|
||||
:Upload success or failure;
|
||||
if (upload failed?) then (yes)
|
||||
:Log failure;
|
||||
else (no)
|
||||
:Update cache;
|
||||
endif
|
||||
}
|
||||
stop
|
||||
@enduml
|
||||
BIN
docs/puml/uploadModifiedFile.png
Normal file
|
After Width: | Height: | Size: 83 KiB |
56
docs/puml/uploadModifiedFile.puml
Normal file
|
|
@ -0,0 +1,56 @@
|
|||
@startuml
|
||||
start
|
||||
partition "Upload Modified File" {
|
||||
:Initialize API Instance;
|
||||
:Check for Dry Run;
|
||||
if (Is Dry Run?) then (yes)
|
||||
:Create Fake Response;
|
||||
else (no)
|
||||
:Get Current Online Data;
|
||||
if (Error Fetching Data) then (yes)
|
||||
:Handle Errors;
|
||||
if (Retryable Error?) then (yes)
|
||||
:Retry Fetching Data;
|
||||
detach
|
||||
else (no)
|
||||
:Log and Display Error;
|
||||
endif
|
||||
endif
|
||||
if (filesize > 0 and valid latest online data) then (yes)
|
||||
if (is online file newer) then (yes)
|
||||
:Log that online is newer;
|
||||
:Perform safe backup;
|
||||
note left: Local data loss prevention
|
||||
:Upload renamed local file as new file;
|
||||
endif
|
||||
endif
|
||||
:Determine Upload Method;
|
||||
if (Use Simple Upload?) then (yes)
|
||||
:Perform Simple Upload;
|
||||
if (Upload Error) then (yes)
|
||||
:Handle Upload Errors and Retries;
|
||||
if (Retryable Upload Error?) then (yes)
|
||||
:Retry Upload;
|
||||
detach
|
||||
else (no)
|
||||
:Log and Display Upload Error;
|
||||
endif
|
||||
endif
|
||||
else (no)
|
||||
:Create Upload Session;
|
||||
:Perform Upload via Session;
|
||||
if (Session Upload Error) then (yes)
|
||||
:Handle Session Upload Errors and Retries;
|
||||
if (Retryable Session Error?) then (yes)
|
||||
:Retry Session Upload;
|
||||
detach
|
||||
else (no)
|
||||
:Log and Display Session Error;
|
||||
endif
|
||||
endif
|
||||
endif
|
||||
endif
|
||||
:Finalize;
|
||||
}
|
||||
stop
|
||||
@enduml
|
||||
BIN
docs/puml/webhooks.png
Normal file
|
After Width: | Height: | Size: 44 KiB |
25
docs/puml/webhooks.puml
Normal file
|
|
@ -0,0 +1,25 @@
|
|||
@startuml
|
||||
|
||||
skinparam SequenceBoxBackgroundColor<<Internal>> AliceBlue
|
||||
|
||||
box "Linux System"<<Internal>>
|
||||
participant ClientApp as "OneDrive Client for Linux\n(webhook listener 127.0.0.1:8888)"
|
||||
participant Nginx
|
||||
end box
|
||||
|
||||
participant Firewall as "Firewall | Router"
|
||||
participant GraphAPI as "Microsoft Graph API"
|
||||
|
||||
ClientApp -> GraphAPI: HTTPS POST /v1.0/subscriptions
|
||||
GraphAPI -> ClientApp: Subscription details response (HTTPS)
|
||||
|
||||
== Subscription Notification ==
|
||||
GraphAPI -> Firewall: HTTPS Notification (port 443)
|
||||
Firewall -> Nginx: Port forwarding to Nginx (port 443)
|
||||
|
||||
alt Request for /webhooks/onedrive
|
||||
Nginx -> ClientApp: Proxy notification to http://127.0.0.1:8888
|
||||
ClientApp -> Nginx: Response
|
||||
Nginx -> GraphAPI: Return proxied response (HTTPS)
|
||||
end
|
||||
@enduml
|
||||
|
|
@ -1,21 +1,23 @@
|
|||
# How to configure OneDrive SharePoint Shared Library sync
|
||||
**WARNING:** Several users have reported files being overwritten causing data loss as a result of using this client with SharePoint Libraries when running as a systemd service.
|
||||
|
||||
When this has been investigated, the following has been noted as potential root causes:
|
||||
* File indexing application such as Baloo File Indexer or Tracker3 constantly indexing your OneDrive data
|
||||
* The use of WPS Office and how it 'saves' files by deleting the existing item and replaces it with the saved data
|
||||
> [!CAUTION]
|
||||
> Before reading this document, please ensure you are running application version [](https://github.com/abraunegg/onedrive/releases) or greater. Use `onedrive --version` to determine what application version you are using and upgrade your client if required.
|
||||
|
||||
Additionally there could be a yet unknown bug with the client, however all debugging and data provided previously shows that an 'external' process to the 'onedrive' application modifies the files triggering the undesirable upload to occur.
|
||||
|
||||
**Possible Preventative Actions:**
|
||||
* Disable all File Indexing for your SharePoint Library data. It is out of scope to detail on how you should do this.
|
||||
* Disable using a systemd service for syncing your SharePoint Library data.
|
||||
* Do not use WPS Office to edit your documents. Use OpenOffice or LibreOffice as these do not exhibit the same 'delete to save' action that WPS Office has.
|
||||
|
||||
Additionally, please use caution when using this client with SharePoint.
|
||||
|
||||
## Application Version
|
||||
Before reading this document, please ensure you are running application version [](https://github.com/abraunegg/onedrive/releases) or greater. Use `onedrive --version` to determine what application version you are using and upgrade your client if required.
|
||||
> [!CAUTION]
|
||||
> Several users have reported files being overwritten causing data loss as a result of using this client with SharePoint Libraries when running as a systemd service.
|
||||
>
|
||||
> When this has been investigated, the following has been noted as potential root causes:
|
||||
> * File indexing application such as Baloo File Indexer or Tracker3 constantly indexing your OneDrive data
|
||||
> * The use of WPS Office and how it 'saves' files by deleting the existing item and replaces it with the saved data. Do not use WPS Office.
|
||||
>
|
||||
> Additionally there could be a yet unknown bug with the client, however all debugging and data provided previously shows that an 'external' process to the 'onedrive' application modifies the files triggering the undesirable upload to occur.
|
||||
>
|
||||
> **Possible Preventative Actions:**
|
||||
> * Disable all File Indexing for your SharePoint Library data. It is out of scope to detail on how you should do this.
|
||||
> * Disable using a systemd service for syncing your SharePoint Library data.
|
||||
> * Do not use WPS Office to edit your documents. Use OpenOffice or LibreOffice as these do not exhibit the same 'delete to save' action that WPS Office has.
|
||||
>
|
||||
> Additionally has been 100% re-written from v2.5.0 onwards, thus the mechanism for saving data to SharePoint has been critically overhauled to simplify actions to negate the impacts where SharePoint will *modify* your file post upload, breaking file integrity as the file you have locally, is not the file that is stored online. Please read https://github.com/OneDrive/onedrive-api-docs/issues/935 for relevant details.
|
||||
|
||||
## Process Overview
|
||||
Syncing a OneDrive SharePoint library requires additional configuration for your 'onedrive' client:
|
||||
|
|
@ -26,7 +28,8 @@ Syncing a OneDrive SharePoint library requires additional configuration for your
|
|||
5. Test the configuration using '--dry-run'
|
||||
6. Sync the SharePoint Library as required
|
||||
|
||||
**Note:** The `--get-O365-drive-id` process below requires a fully configured 'onedrive' configuration so that the applicable Drive ID for the given Office 365 SharePoint Shared Library can be determined. It is highly recommended that you do not use the application 'default' configuration directory for any SharePoint Site, and configure separate items for each site you wish to use.
|
||||
> [!IMPORTANT]
|
||||
> The `--get-sharepoint-drive-id` process below requires a fully configured 'onedrive' configuration so that the applicable Drive ID for the given SharePoint Shared Library can be determined. It is highly recommended that you do not use the application 'default' configuration directory for any SharePoint Site, and configure separate items for each site you wish to use.
|
||||
|
||||
## 1. Listing available OneDrive SharePoint Libraries
|
||||
Login to the OneDrive web interface and determine which shared library you wish to configure the client for:
|
||||
|
|
@ -35,7 +38,7 @@ Login to the OneDrive web interface and determine which shared library you wish
|
|||
## 2. Query OneDrive API to obtain required configuration details
|
||||
Run the following command using the 'onedrive' client to query the OneDrive API to obtain the required 'drive_id' of the SharePoint Library that you wish to sync:
|
||||
```text
|
||||
onedrive --get-O365-drive-id '<your site name to search>'
|
||||
onedrive --get-sharepoint-drive-id '<your site name to search>'
|
||||
```
|
||||
This will return something similar to the following:
|
||||
```text
|
||||
|
|
@ -78,7 +81,8 @@ Create a new local folder to store the SharePoint Library data in:
|
|||
mkdir ~/SharePoint_My_Library_Name
|
||||
```
|
||||
|
||||
**Note:** Do not use spaces in the directory name, use '_' as a replacement
|
||||
> [!TIP]
|
||||
> Do not use spaces in the directory name, use '_' as a replacement
|
||||
|
||||
## 4. Configure SharePoint Library config file with the required 'drive_id' & 'sync_dir' options
|
||||
Download a copy of the default configuration file by downloading this file from GitHub and saving this file in the directory created above:
|
||||
|
|
@ -97,7 +101,8 @@ drive_id = "insert the drive_id value from above here"
|
|||
```
|
||||
The OneDrive client will now be configured to sync this SharePoint shared library to your local system and the location you have configured.
|
||||
|
||||
**Note:** After changing `drive_id`, you must perform a full re-synchronization by adding `--resync` to your existing command line.
|
||||
> [!IMPORTANT]
|
||||
> After changing `drive_id`, you must perform a full re-synchronization by adding `--resync` to your existing command line.
|
||||
|
||||
## 5. Validate and Test the configuration
|
||||
Validate your new configuration using the `--display-config` option to validate you have configured the application correctly:
|
||||
|
|
@ -110,7 +115,8 @@ Test your new configuration using the `--dry-run` option to validate the applica
|
|||
onedrive --confdir="~/.config/SharePoint_My_Library_Name" --synchronize --verbose --dry-run
|
||||
```
|
||||
|
||||
**Note:** As this is a *new* configuration, the application will be required to be re-authorised the first time this command is run with the new configuration.
|
||||
> [!IMPORTANT]
|
||||
> As this is a *new* configuration, the application will be required to be re-authorised the first time this command is run with the new configuration.
|
||||
|
||||
## 6. Sync the SharePoint Library as required
|
||||
Sync the SharePoint Library to your system with either `--synchronize` or `--monitor` operations:
|
||||
|
|
@ -122,7 +128,8 @@ onedrive --confdir="~/.config/SharePoint_My_Library_Name" --synchronize --verbos
|
|||
onedrive --confdir="~/.config/SharePoint_My_Library_Name" --monitor --verbose
|
||||
```
|
||||
|
||||
**Note:** As this is a *new* configuration, the application will be required to be re-authorised the first time this command is run with the new configuration.
|
||||
> [!IMPORTANT]
|
||||
> As this is a *new* configuration, the application will be required to be re-authorised the first time this command is run with the new configuration.
|
||||
|
||||
## 7. Enable custom systemd service for SharePoint Library
|
||||
Systemd can be used to automatically run this configuration in the background, however, a unique systemd service will need to be setup for this SharePoint Library instance
|
||||
|
|
@ -163,10 +170,11 @@ Example:
|
|||
ExecStart=/usr/local/bin/onedrive --monitor --confdir="/home/myusername/.config/SharePoint_My_Library_Name"
|
||||
```
|
||||
|
||||
**Note:** When running the client manually, `--confdir="~/.config/......` is acceptable. In a systemd configuration file, the full path must be used. The `~` must be expanded.
|
||||
> [!IMPORTANT]
|
||||
> When running the client manually, `--confdir="~/.config/......` is acceptable. In a systemd configuration file, the full path must be used. The `~` must be manually expanded when editing your systemd file.
|
||||
|
||||
### Step 3: Enable the new systemd service
|
||||
Once the file is correctly editied, you can enable the new systemd service using the following commands.
|
||||
Once the file is correctly edited, you can enable the new systemd service using the following commands.
|
||||
|
||||
#### Red Hat Enterprise Linux, CentOS Linux
|
||||
```text
|
||||
|
|
@ -38,7 +38,7 @@ OneDrive Client for Linux is not responsible for the Microsoft OneDrive Service
|
|||
|
||||
To the fullest extent permitted by law, we shall not be liable for any direct, indirect, incidental, special, consequential, or punitive damages, or any loss of profits or revenues, whether incurred directly or indirectly, or any loss of data, use, goodwill, or other intangible losses, resulting from (a) your use or inability to use the Service, or (b) any other matter relating to the Service.
|
||||
|
||||
This limitiation of liability explicitly relates to the use of the OneDrive Client for Linux software and does not affect your rights under the GPLv3.
|
||||
This limitation of liability explicitly relates to the use of the OneDrive Client for Linux software and does not affect your rights under the GPLv3.
|
||||
|
||||
## 7. Changes to Terms
|
||||
|
||||
|
|
|
|||
|
|
@ -43,7 +43,9 @@ This systemd entry is erroneous and needs to be removed. Without removing this e
|
|||
Opening the item database ...
|
||||
|
||||
ERROR: onedrive application is already running - check system process list for active application instances
|
||||
- Use 'sudo ps aufxw | grep onedrive' to potentially determine acive running process
|
||||
- Use 'sudo ps aufxw | grep onedrive' to potentially determine active running process
|
||||
|
||||
Waiting for all internal threads to complete before exiting application
|
||||
```
|
||||
|
||||
To remove this symbolic link, run the following command:
|
||||
|
|
@ -148,7 +150,7 @@ If required, review the table below based on your 'lsb_release' information to p
|
|||
|
||||
| Release & Codename | Instructions to use |
|
||||
|--------------------|---------------------|
|
||||
| Linux Mint 19.x | This platform is End-of-Life (EOL) and no longer supported. You must upgrade to Linux Mint 21.x |
|
||||
| Linux Mint 19.x | This platform is End-of-Life (EOL) and no longer supported. You must upgrade to at least Linux Mint 20.x |
|
||||
| Linux Mint 20.x | Use [Ubuntu 20.04](#distribution-ubuntu-2004) instructions below |
|
||||
| Linux Mint 21.x | Use [Ubuntu 22.04](#distribution-ubuntu-2204) instructions below |
|
||||
| Linux Mint 22.x | Use [Ubuntu 24.04](#distribution-ubuntu-2404) instructions below |
|
||||
|
|
@ -162,7 +164,7 @@ If required, review the table below based on your 'lsb_release' information to p
|
|||
| Raspbian GNU/Linux 10 | You must build from source or upgrade your Operating System to Raspbian GNU/Linux 12 |
|
||||
| Raspbian GNU/Linux 11 | Use [Debian 11](#distribution-debian-11) instructions below |
|
||||
| Raspbian GNU/Linux 12 | Use [Debian 12](#distribution-debian-12) instructions below |
|
||||
| Ubuntu 18.04 / Bionic | This platform is End-of-Life (EOL) and no longer supported. You must upgrade to Ubuntu 24.04 |
|
||||
| Ubuntu 18.04 / Bionic | This platform is End-of-Life (EOL) and no longer supported. You must upgrade to at least Ubuntu 20.04 |
|
||||
| Ubuntu 20.04 / Focal | Use [Ubuntu 20.04](#distribution-ubuntu-2004) instructions below |
|
||||
| Ubuntu 21.04 / Hirsute | Use [Ubuntu 21.04](#distribution-ubuntu-2104) instructions below |
|
||||
| Ubuntu 21.10 / Impish | Use [Ubuntu 21.10](#distribution-ubuntu-2110) instructions below |
|
||||
|
|
@ -452,14 +454,4 @@ Read and understand the [known issues](#known-issues-with-installing-from-the-ab
|
|||
|
||||
## Known Issues with Installing from the above packages
|
||||
|
||||
### 1. The client may segfault | core-dump when exiting
|
||||
When the client is run in `--monitor` mode manually, or when using the systemd service, the client may segfault on exit.
|
||||
|
||||
This issue is caused by the way the 'onedrive' packages are built using the distribution LDC package & the default distribution compiler options which is the root cause for this issue. Refer to: https://bugs.launchpad.net/ubuntu/+source/ldc/+bug/1895969
|
||||
|
||||
**Additional references:**
|
||||
* https://github.com/abraunegg/onedrive/issues/1053
|
||||
* https://github.com/abraunegg/onedrive/issues/1609
|
||||
|
||||
**Resolution Options:**
|
||||
* Uninstall the package and build client from source
|
||||
There are currently no known issues when installing 'onedrive' from the OpenSuSE Build Service repository.
|
||||
|
|
|
|||
1070
docs/usage.md
Normal file
341
docs/webhooks.md
Normal file
|
|
@ -0,0 +1,341 @@
|
|||
# How to configure receiving real-time changes from Microsoft OneDrive using webhooks
|
||||
|
||||
When operating in 'Monitor Mode,' receiving real-time updates to online data can significantly enhance synchronisation efficiency. This is achieved by enabling 'webhooks,' which allows the client to subscribe to remote updates and receive real-time notifications when certain events occur on Microsoft OneDrive.
|
||||
|
||||
With this setup, any remote changes are promptly synchronised to your local file system, eliminating the need to wait for the next scheduled synchronisation cycle.
|
||||
|
||||
> [!IMPORTANT]
|
||||
> In March 2023, Microsoft updated the webhook notification capability in Microsoft Graph to only allow valid HTTPS URLs as the destination for subscription updates.
|
||||
>
|
||||
> This change was part of Microsoft's ongoing efforts to enhance security and ensure that all webhooks used with Microsoft Graph comply with modern security standards. The enforcement of this requirement prevents the registration of subscriptions with non-secure (HTTP) endpoints, thereby improving the security of data transmission.
|
||||
>
|
||||
> Therefore, as a prerequisite, you must have a valid fully qualified domain name (FQDN) for your system that is externally resolvable, or configure Dynamic DNS (DDNS) using a provider such as:
|
||||
> * No-IP
|
||||
> * DynDNS
|
||||
> * DuckDNS
|
||||
> * Afraid.org
|
||||
> * Cloudflare
|
||||
> * Google Domains
|
||||
> * Dynu
|
||||
> * ChangeIP
|
||||
>
|
||||
> This FQDN will allow you to create a valid HTTPS certificate for your system, which can be used by Microsoft Graph for webhook functionality.
|
||||
>
|
||||
> Please note that it is beyond the scope of this document to provide guidance on setting up this requirement.
|
||||
|
||||
Depending on your environment, a number of steps are required to configure this application functionality. At a very high level these configuration steps are:
|
||||
|
||||
1. Application configuration to enable 'webhooks' functionality
|
||||
2. Install and configure 'nginx' as a reverse proxy for HTTPS traffic
|
||||
3. Install and configure Let's Encrypt 'certbot' to provide a valid HTTPS certificate for your system using your FQDN
|
||||
4. Configure your Firewall or Router to forward traffic to your system
|
||||
|
||||
> [!NOTE]
|
||||
> The configuration steps below were validated on [Fedora 40 Workstation](https://fedoraproject.org/)
|
||||
>
|
||||
> The installation of required components (nginx, certbot) for your platform is beyond the scope of this document and it is assumed you know how to install these components. If you are unsure, please seek support from your Linux distribution support channels.
|
||||
|
||||
### Step 1: Application configuration
|
||||
|
||||
#### Enable the 'webhook' application feature
|
||||
* In your 'config' file, set `webhook_enabled = "true"` to activate the webhook feature.
|
||||
|
||||
#### Configure the public notification URL
|
||||
* In your 'config' file, set `webhook_public_url = "https://<your.fully.qualified.domain.name>/webhooks/onedrive"` as the public URL that will receive subscription updates from the Microsoft Graph API platform.
|
||||
|
||||
> [!NOTE]
|
||||
> This URL will utilise your FQDN and must be resolvable from the Internet. This FQDN will also be used within your 'nginx' configuration.
|
||||
|
||||
#### Testing
|
||||
At this point, if you attempt to test 'webhooks', when they are attempted to be initialised, the following error *should* be generated:
|
||||
```
|
||||
ERROR: Microsoft OneDrive API returned an error with the following message:
|
||||
Error Message: HTTP request returned status code 400 (Bad Request)
|
||||
Error Reason: Subscription validation request timed out.
|
||||
Error Code: ValidationError
|
||||
Error Timestamp: YYYY-MM-DDThh:mm:ss
|
||||
API Request ID: eb196382-51d7-4411-984a-45a3fda90463
|
||||
Will retry creating or renewing subscription in 1 minute
|
||||
```
|
||||
This error is 100% normal at this point.
|
||||
|
||||
### Step 2: Install and configure 'nginx'
|
||||
|
||||
> [!NOTE]
|
||||
> Nginx is a web server that can also be used as a reverse proxy, load balancer, mail proxy and HTTP cache.
|
||||
|
||||
#### Install and enable 'nginx'
|
||||
* Install 'nginx' and any other requirements to install 'nginx' on your platform. It is beyond the scope of this document to advise on how to install this. Enable and start the 'nginx' service.
|
||||
|
||||
> [!TIP]
|
||||
> You may need to enable firewall rules to allow inbound http and https connections on your system:
|
||||
> ```
|
||||
> sudo firewall-cmd --permanent --add-service=http
|
||||
> sudo firewall-cmd --permanent --add-service=https
|
||||
> sudo firewall-cmd --reload
|
||||
> ```
|
||||
|
||||
#### Verify your 'nginx' installation
|
||||
* From your local machine, attempt to access the local server now running, by using a web browser and pointing at http://127.0.0.1/
|
||||
|
||||

|
||||
|
||||
#### Configure 'nginx' to receive the subscription update
|
||||
* Create a basic 'nginx' configuration file to support proxying traffic from Nginx to the local 'onedrive' process, which will, by default, have an HTTP listener running on TCP port 8888
|
||||
```
|
||||
server {
|
||||
listen 80;
|
||||
server_name <your.fully.qualified.domain.name>;
|
||||
location /webhooks/onedrive {
|
||||
# Proxy Options
|
||||
proxy_http_version 1.1;
|
||||
proxy_pass http://127.0.0.1:8888;
|
||||
}
|
||||
}
|
||||
```
|
||||
The configuration above will:
|
||||
* Create an endpoint listener at `https://<your.fully.qualified.domain.name>/webhooks/onedrive`
|
||||
* Proxy the received traffic at this listener to the local listener TCP port
|
||||
|
||||
> [!TIP]
|
||||
> Save this file in the nginx configuration directory similar to the following path: `/etc/nginx/conf.d/onedrive_webhook.conf`. This will help keep all your configurations organised.
|
||||
|
||||
* Test your 'nginx' configuration using `sudo nginx -t` to validate that there are no errors. If any are identified, please correct them.
|
||||
* Once tested, reload your 'nginx' configuration to activate the webhook reverse proxy configuration.
|
||||
|
||||
### Step 4: Initial Firewall/Router Configuration
|
||||
* Configure your firewall or router to forward all incomming HTTP and HTTPS traffic to the internal address of your system where 'nginx' is running. This is required for to allow the Let's Encrypt `certbot` tool to create a valid HTTPS certificate for your system.
|
||||
|
||||

|
||||
|
||||
* A valid configuration will be similar to the above illustration.
|
||||
|
||||
### Step 5: Use Let's Encrypt 'certbot' to create a SSL Certificate and deploy to your 'nginx' webhook configuration
|
||||
* Install the Let's Encrypt 'certbot' tool along with the associated python module 'python-certbot-nginx' for your platform
|
||||
* Run the 'certbot' tool on your platform to generate a valid HTTPS certificate for your `<your.fully.qualified.domain.name>` by running `certbot --nginx`. This should *detect* your active `server_name` from your 'nginx' configuration and install the certificate in the correct manner.
|
||||
|
||||
* The resulting 'nginx' configuration will look something like this:
|
||||
```
|
||||
server {
|
||||
server_name <your.fully.qualified.domain.name>;
|
||||
location /webhooks/onedrive {
|
||||
# Proxy Options
|
||||
proxy_http_version 1.1;
|
||||
proxy_pass http://127.0.0.1:8888;
|
||||
}
|
||||
|
||||
listen 443 ssl; # managed by Certbot
|
||||
ssl_certificate /etc/letsencrypt/live/<your.fully.qualified.domain.name>/fullchain.pem; # managed by Certbot
|
||||
ssl_certificate_key /etc/letsencrypt/live/<your.fully.qualified.domain.name>/privkey.pem; # managed by Certbot
|
||||
include /etc/letsencrypt/options-ssl-nginx.conf; # managed by Certbot
|
||||
ssl_dhparam /etc/letsencrypt/ssl-dhparams.pem; # managed by Certbot
|
||||
|
||||
}
|
||||
server {
|
||||
if ($host = <your.fully.qualified.domain.name>) {
|
||||
return 301 https://$host$request_uri;
|
||||
} # managed by Certbot
|
||||
|
||||
|
||||
listen 80;
|
||||
server_name <your.fully.qualified.domain.name>;
|
||||
return 404; # managed by Certbot
|
||||
}
|
||||
```
|
||||
|
||||
* Test your 'nginx' configuration using `sudo nginx -t` to validate that there are no errors. If any are identified, please correct them.
|
||||
* Once tested, reload your 'nginx' configuration to activate the webhook reverse proxy configuration.
|
||||
|
||||
> [!IMPORTANT]
|
||||
> It is strongly advised that post doing this step, you implement a method to automatically keep your SSL certificate in a healthy state, as if the SSL certificate expires, webhook functionality will stop working. It is also beyond the scope of this document on how to do this.
|
||||
|
||||
### Step 6: Update 'nginx' to only use TLS 1.2 and TLS 1.3
|
||||
To ensure that you are configuring your 'nginx' configuration to use secure communication, it is advisable for you to add the following to your `onedrive_webhook.conf` within the `server {}` configuration section:
|
||||
```
|
||||
# Ensure only TLS 1.2 and TLS 1.3 are used
|
||||
ssl_protocols TLSv1.2 TLSv1.3;
|
||||
```
|
||||
The resulting 'nginx' configuration will look something like this:
|
||||
```
|
||||
server {
|
||||
server_name <your.fully.qualified.domain.name>;
|
||||
location /webhooks/onedrive {
|
||||
# Proxy Options
|
||||
proxy_http_version 1.1;
|
||||
proxy_pass http://127.0.0.1:8888;
|
||||
}
|
||||
|
||||
listen 443 ssl; # managed by Certbot
|
||||
ssl_certificate /etc/letsencrypt/live/<your.fully.qualified.domain.name>/fullchain.pem; # managed by Certbot
|
||||
ssl_certificate_key /etc/letsencrypt/live/<your.fully.qualified.domain.name>/privkey.pem; # managed by Certbot
|
||||
include /etc/letsencrypt/options-ssl-nginx.conf; # managed by Certbot
|
||||
ssl_dhparam /etc/letsencrypt/ssl-dhparams.pem; # managed by Certbot
|
||||
|
||||
# Ensure only TLS 1.2 and TLS 1.3 are used
|
||||
ssl_protocols TLSv1.2 TLSv1.3;
|
||||
}
|
||||
server {
|
||||
if ($host = <your.fully.qualified.domain.name>) {
|
||||
return 301 https://$host$request_uri;
|
||||
} # managed by Certbot
|
||||
|
||||
|
||||
listen 80;
|
||||
server_name <your.fully.qualified.domain.name>;
|
||||
return 404; # managed by Certbot
|
||||
}
|
||||
```
|
||||
* Test your 'nginx' configuration using `sudo nginx -t` to validate that there are no errors. If any are identified, please correct them.
|
||||
* Once tested, reload your 'nginx' configuration to activate the webhook reverse proxy configuration.
|
||||
|
||||
To validate that the TLS configuration is working, perform the following tests from a different system that is able to resolve your FQDN externally:
|
||||
```
|
||||
curl -I -v --tlsv1.2 --tls-max 1.2 https://<your.fully.qualified.domain.name>
|
||||
curl -I -v --tlsv1.3 --tls-max 1.3 https://<your.fully.qualified.domain.name>
|
||||
```
|
||||
This should return valid TLS information similar to the following:
|
||||
```
|
||||
* Rebuilt URL to: https://your.fully.qualified.domain.name/
|
||||
* Trying 123.123.123.123...
|
||||
* TCP_NODELAY set
|
||||
* Connected to your.fully.qualified.domain.name (123.123.123.123) port 443 (#0)
|
||||
* ALPN, offering h2
|
||||
* ALPN, offering http/1.1
|
||||
* successfully set certificate verify locations:
|
||||
* CAfile: /etc/pki/tls/certs/ca-bundle.crt
|
||||
CApath: none
|
||||
* TLSv1.2 (OUT), TLS handshake, Client hello (1):
|
||||
* TLSv1.2 (IN), TLS handshake, Server hello (2):
|
||||
* TLSv1.2 (IN), TLS handshake, Certificate (11):
|
||||
* TLSv1.2 (IN), TLS handshake, Server key exchange (12):
|
||||
* TLSv1.2 (IN), TLS handshake, Server finished (14):
|
||||
* TLSv1.2 (OUT), TLS handshake, Client key exchange (16):
|
||||
* TLSv1.2 (OUT), TLS change cipher, Change cipher spec (1):
|
||||
* TLSv1.2 (OUT), TLS handshake, Finished (20):
|
||||
* TLSv1.2 (IN), TLS handshake, Finished (20):
|
||||
* SSL connection using TLSv1.2 / ECDHE-ECDSA-AES256-GCM-SHA384
|
||||
* ALPN, server accepted to use http/1.1
|
||||
* Server certificate:
|
||||
* subject: CN=your.fully.qualified.domain.name
|
||||
* start date: Aug 28 07:18:04 2024 GMT
|
||||
* expire date: Nov 26 07:18:03 2024 GMT
|
||||
* subjectAltName: host "your.fully.qualified.domain.name" matched cert's "your.fully.qualified.domain.name"
|
||||
* issuer: C=US; O=Let's Encrypt; CN=E6
|
||||
* SSL certificate verify ok.
|
||||
> HEAD / HTTP/1.1
|
||||
> Host: your.fully.qualified.domain.name
|
||||
> User-Agent: curl/7.61.1
|
||||
> Accept: */*
|
||||
>
|
||||
< HTTP/1.1 200 OK
|
||||
HTTP/1.1 200 OK
|
||||
< Server: nginx/1.26.2
|
||||
Server: nginx/1.26.2
|
||||
< Date: Sat, 31 Aug 2024 22:36:01 GMT
|
||||
Date: Sat, 31 Aug 2024 22:36:01 GMT
|
||||
< Content-Type: text/html
|
||||
Content-Type: text/html
|
||||
< Content-Length: 8474
|
||||
Content-Length: 8474
|
||||
< Last-Modified: Mon, 20 Feb 2023 17:42:39 GMT
|
||||
Last-Modified: Mon, 20 Feb 2023 17:42:39 GMT
|
||||
< Connection: keep-alive
|
||||
Connection: keep-alive
|
||||
< ETag: "63f3b10f-211a"
|
||||
ETag: "63f3b10f-211a"
|
||||
< Accept-Ranges: bytes
|
||||
Accept-Ranges: bytes
|
||||
```
|
||||
|
||||
Lastly, to validate that TLS 1.1 and below is being blocked, perform the following tests from a different system that is able to resolve your FQDN externally:
|
||||
```
|
||||
curl -I -v --tlsv1.1 --tls-max 1.1 https://<your.fully.qualified.domain.name>
|
||||
```
|
||||
|
||||
The response should be similar to the following:
|
||||
```
|
||||
* Rebuilt URL to: https://your.fully.qualified.domain.name/
|
||||
* Trying 123.123.123.123...
|
||||
* TCP_NODELAY set
|
||||
* Connected to your.fully.qualified.domain.name (123.123.123.123) port 443 (#0)
|
||||
* ALPN, offering h2
|
||||
* ALPN, offering http/1.1
|
||||
* successfully set certificate verify locations:
|
||||
* CAfile: /etc/pki/tls/certs/ca-bundle.crt
|
||||
CApath: none
|
||||
* TLSv1.3 (OUT), TLS alert, internal error (592):
|
||||
* error:141E70BF:SSL routines:tls_construct_client_hello:no protocols available
|
||||
curl: (35) error:141E70BF:SSL routines:tls_construct_client_hello:no protocols available
|
||||
```
|
||||
|
||||
> [!IMPORTANT]
|
||||
> TLS 1.2 and TLS 1.3 support is provided by OpenSSL.
|
||||
>
|
||||
> To correctly support only using these TLS versions, you must be using 'nginx' version 1.15.0 or later combined with OpenSSL 1.1.1 or later.
|
||||
>
|
||||
> If your distribution does not provide these, then please raise this with your distribution or upgrade your distribution to one that does.
|
||||
|
||||
> [!NOTE]
|
||||
> If you use a version of 'nginx' that supports TLS 1.3 but are using an older version of OpenSSL (e.g., OpenSSL 1.0.x), TLS 1.3 will not be supported even if your 'nginx' configuration requests it.
|
||||
|
||||
> [!NOTE]
|
||||
> If using 'LetsEncrypt', TLS 1.2 and TLS 1.3 support will be automatically configured in the `/etc/letsencrypt/options-ssl-nginx.conf` include file when the SSL Certificate is added to your 'nginx' configuration.
|
||||
|
||||
|
||||
### Step 7: Secure your 'nginx' configuration to only allow Microsoft 365 to connect
|
||||
Enhance your 'nginx' configuration to only allow the Microsoft 365 platform which includes the Microsoft Graph API to communicate with your configured webhooks endpoint. Review https://www.microsoft.com/en-us/download/details.aspx?id=56519 to assist you. Please note, it is beyond the scope of this document to tell you how to secure your system against unauthorised access of your endpoint listener.
|
||||
|
||||
> [!IMPORTANT]
|
||||
> The IP address ranges that are part of the Microsoft 365 Common and Office Online services, which also cover Microsoft Graph API can be sourced from the above Microsoft URL. You should regularly update your configuration as Microsoft updates these ranges frequently.
|
||||
> It is recommended to automate these updates accordingly and is also beyond the scope of this document on how to do this.
|
||||
|
||||
### Step 8: Test your 'onedrive' application using this configuration
|
||||
|
||||
* Run the 'onedrive' application using `--monitor --verbose` and the client should now create a new subscription and register itself:
|
||||
```
|
||||
.....
|
||||
Performing initial synchronisation to ensure consistent local state ...
|
||||
Started webhook server
|
||||
Initializing subscription for updates ...
|
||||
Webhook: handled validation request
|
||||
Created new subscription a09ba1cf-3420-4d78-9117-b41373de33ff with expiration: 2024-08-28T08:42:00.637Z
|
||||
Attempting to contact Microsoft OneDrive Login Service
|
||||
Successfully reached Microsoft OneDrive Login Service
|
||||
Starting a sync with Microsoft OneDrive
|
||||
.....
|
||||
```
|
||||
|
||||
* Review the 'nginx' logs to validate that applicable communication is occuring:
|
||||
```
|
||||
70.37.95.11 - - [28/Aug/2024:18:26:07 +1000] "POST /webhooks/onedrive?validationToken=Validation%3a+Testing+client+application+reachability+for+subscription+Request-Id%3a+25460109-0e8b-4521-8090-dd691b407ed8 HTTP/1.1" 200 128 "-" "-" "-"
|
||||
137.135.11.116 - - [28/Aug/2024:18:32:02 +1000] "POST /webhooks/onedrive?validationToken=Validation%3a+Testing+client+application+reachability+for+subscription+Request-Id%3a+65e43e3c-cbab-4e74-87ec-0e8fafdef6d3 HTTP/1.1" 200 128 "-" "-" "-"
|
||||
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
In some circumstances, `SELinux` can provent 'nginx' from communicating with local system processes. When this occurs, the application will generate an error similar to the following:
|
||||
```
|
||||
ERROR: Microsoft OneDrive API returned an error with the following message:
|
||||
Error Message: HTTP request returned status code 400 (Bad Request)
|
||||
Error Reason: Subscription validation request failed. Notification endpoint must respond with 200 OK to validation request.
|
||||
Error Code: ValidationError
|
||||
Error Timestamp: 2024-08-28T08:22:34
|
||||
API Request ID: 36684746-1458-4150-aeab-9871355a106c
|
||||
Calling Function: logSubscriptionError()
|
||||
```
|
||||
|
||||
To correct this issue, use the `setsebool` tool to allow HTTPD processes (which includes 'nginx') to make network connections:
|
||||
```
|
||||
sudo setsebool -P httpd_can_network_connect 1
|
||||
```
|
||||
After setting the boolean, restart 'nginx' to apply the SELinux configuration change.
|
||||
|
||||
## Resulting configuration
|
||||
|
||||
When these steps are followed, your environment configuration will be similar to the following diagram:
|
||||
|
||||

|
||||
|
||||
## Additional Configuration Assistance
|
||||
|
||||
Refer to [application-config-options.md](application-config-options.md) for further guidance on 'webhook' configuration options.
|
||||
675
onedrive.1.in
|
|
@ -1,303 +1,64 @@
|
|||
.TH ONEDRIVE "1" "@PACKAGE_DATE@" "@PACKAGE_VERSION@" "User Commands"
|
||||
.SH NAME
|
||||
onedrive \- folder synchronization with OneDrive
|
||||
onedrive \- A client for the Microsoft OneDrive Cloud Service
|
||||
.SH SYNOPSIS
|
||||
.B onedrive
|
||||
[\fI\,OPTION\/\fR] \-\-synchronize
|
||||
[\fI\,OPTION\/\fR] --sync
|
||||
.br
|
||||
.B onedrive
|
||||
[\fI\,OPTION\/\fR] \-\-monitor
|
||||
[\fI\,OPTION\/\fR] --monitor
|
||||
.br
|
||||
.B onedrive
|
||||
[\fI\,OPTION\/\fR] \-\-display-config
|
||||
[\fI\,OPTION\/\fR] --display-config
|
||||
.br
|
||||
.B onedrive
|
||||
[\fI\,OPTION\/\fR] \-\-display-sync-status
|
||||
[\fI\,OPTION\/\fR] --display-sync-status
|
||||
.br
|
||||
.B onedrive
|
||||
[\fI\,OPTION\/\fR] -h | --help
|
||||
.br
|
||||
.B onedrive
|
||||
--version
|
||||
.SH DESCRIPTION
|
||||
A complete tool to interact with OneDrive on Linux.
|
||||
.SH OPTIONS
|
||||
Without any option given, no sync is done and the program exits.
|
||||
.TP
|
||||
\fB\-\-auth\-files\fP ARG
|
||||
Perform authorization via two files passed in as \fBARG\fP in the format \fBauthUrl:responseUrl\fP.
|
||||
The authorization URL is written to the \fBauthUrl\fP, then \fBonedrive\fP waits for
|
||||
the file \fBresponseUrl\fP to be present, and reads the response from that file.
|
||||
.TP
|
||||
\fB\-\-auth\-response\fP ARG
|
||||
Perform authentication not via interactive dialog but via providing the response url directly.
|
||||
.TP
|
||||
\fB\-\-check\-for\-nomount\fP
|
||||
Check for the presence of .nosync in the syncdir root. If found, do not perform sync.
|
||||
.br
|
||||
Configuration file key: \fBcheck_nomount\fP (default: \fBfalse\fP)
|
||||
.TP
|
||||
\fB\-\-check\-for\-nosync\fP
|
||||
Check for the presence of .nosync in each directory. If found, skip directory from sync.
|
||||
.br
|
||||
Configuration file key: \fBcheck_nosync\fP (default: \fBfalse\fP)
|
||||
.TP
|
||||
\fB\-\-classify\-as\-big\-delete\fP
|
||||
Number of children in a path that is locally removed which will be classified as a 'big data delete'
|
||||
.br
|
||||
Configuration file key: \fBclassify_as_big_delete\fP (default: \fB1000\fP)
|
||||
.TP
|
||||
\fB\-\-cleanup\-local\-files\fP
|
||||
Cleanup additional local files when using \-\-download-only. This will remove local data.
|
||||
.br
|
||||
Configuration file key: \fBcleanup_local_files\fP (default: \fBfalse\fP)
|
||||
.TP
|
||||
\fB\-\-confdir\fP ARG
|
||||
Set the directory used to store the configuration files
|
||||
.TP
|
||||
\fB\-\-create\-directory\fP ARG
|
||||
Create a directory on OneDrive \- no sync will be performed.
|
||||
.TP
|
||||
\fB\-\-create\-share\-link\fP ARG
|
||||
Create a shareable link for an existing file on OneDrive
|
||||
.TP
|
||||
\fB\-\-debug\-https\fP
|
||||
Debug OneDrive HTTPS communication.
|
||||
.br
|
||||
Configuration file key: \fBdebug_https\fP (default: \fBfalse\fP)
|
||||
.TP
|
||||
\fB\-\-destination\-directory\fP ARG
|
||||
Destination directory for renamed or move on OneDrive \- no sync will be performed.
|
||||
.TP
|
||||
\fB\-\-disable\-download\-validation\fP
|
||||
Disable download validation when downloading from OneDrive
|
||||
.br
|
||||
Configuration file key: \fBdisable_download_validation\fP (default: \fBfalse\fP)
|
||||
.TP
|
||||
\fB\-\-disable\-notifications\fP
|
||||
Do not use desktop notifications in monitor mode
|
||||
.br
|
||||
Configuration file key: \fBdisable_notifications\fP (default: \fBfalse\fP)
|
||||
.TP
|
||||
\fB\-\-disable\-upload\-validation\fP
|
||||
Disable upload validation when uploading to OneDrive
|
||||
.br
|
||||
Configuration file key: \fBdisable_upload_validation\fP (default: \fBfalse\fP)
|
||||
.TP
|
||||
\fB\-\-display\-config\fP
|
||||
Display what options the client will use as currently configured \- no sync will be performed.
|
||||
.TP
|
||||
\fB\-\-display\-running\-config\fP
|
||||
Display what options the client has been configured to use on application startup.
|
||||
.TP
|
||||
\fB\-\-display\-sync\-status\fP
|
||||
Display the sync status of the client \- no sync will be performed.
|
||||
.TP
|
||||
\fB\-\-download\-only\fP
|
||||
Replicate the OneDrive online state locally, by only downloading changes from OneDrive. Do not upload local changes to OneDrive.
|
||||
.br
|
||||
Configuration file key: \fBdownload_only\fP (default: \fBfalse\fP)
|
||||
.TP
|
||||
\fB\-\-dry\-run\fP
|
||||
Perform a trial sync with no changes made. Can ONLY be used with --synchronize. Will be ignored for --monitor
|
||||
.br
|
||||
Configuration file key: \fBdry_run\fP (default: \fBfalse\fP)
|
||||
.TP
|
||||
\fB\-\-enable\-logging\fP
|
||||
Enable client activity to a separate log file
|
||||
.br
|
||||
Configuration file key: \fBenable_logging\fP (default: \fBfalse\fP)
|
||||
.TP
|
||||
\fB\-\-force\fP
|
||||
Force the deletion of data when a 'big delete' is detected
|
||||
.TP
|
||||
\fB\-\-force\-http\-11\fP
|
||||
Force the use of HTTP 1.1 for all operations
|
||||
.br
|
||||
Configuration file key: \fBforce_http_11\fP (default: \fBfalse\fP)
|
||||
.TP
|
||||
\fB\-\-force\-sync\fP
|
||||
Force a synchronization of a specific folder, only when using --synchronize --single-directory and ignore
|
||||
.br
|
||||
all non-default skip_dir and skip_file rules
|
||||
.TP
|
||||
\fB\-\-get\-O365\-drive\-id\fP ARG
|
||||
Query and return the Office 365 Drive ID for a given Office 365 SharePoint Shared Library
|
||||
.TP
|
||||
\fB\-\-get\-file\-link\fP ARG
|
||||
Display the file link of a synced file
|
||||
.TP
|
||||
\fB\-\-list\-shared\-folders\fP
|
||||
List OneDrive Business Shared Folders
|
||||
.TP
|
||||
\fB\-\-local\-first\fP
|
||||
Synchronize from the local directory source first, before downloading changes from OneDrive.
|
||||
.br
|
||||
Configuration file key: \fBlocal_first\fP (default: \fBfalse\fP)
|
||||
.TP
|
||||
\fB\-\-logout\fP
|
||||
Logout the current user
|
||||
.TP
|
||||
\fB\-\-log\-dir\fP ARG
|
||||
defines the directory where logging output is saved to, needs to end with a slash
|
||||
.br
|
||||
Configuration file key: \fBlog_dir\fP (default: \fB/var/log/onedrive/\fP)
|
||||
.TP
|
||||
\fB\-\-min\-notify\-changes\fP
|
||||
the minimum number of pending incoming changes necessary to trigger
|
||||
a desktop notification
|
||||
.br
|
||||
Configuration file key: \fBmin_notify_changes\fP (default: \fB5\fP)
|
||||
.TP
|
||||
\fB\-m \-\-modified\-by\fP ARG
|
||||
Display the last modified by details of a given path
|
||||
.TP
|
||||
\fB\-m \-\-monitor\fP
|
||||
Keep monitoring for local and remote changes
|
||||
.TP
|
||||
\fB\-\-monitor\-interval\fP ARG
|
||||
The number of seconds by which each sync operation is undertaken when
|
||||
idle under monitor mode
|
||||
.br
|
||||
Configuration file key: \fBmonitor_interval\fP (default: \fB300\fP)
|
||||
.TP
|
||||
\fB\-\-monitor\-fullscan-frequency\fP ARG
|
||||
Number of sync runs before performing a full local scan of the synced directory
|
||||
.br
|
||||
Configuration file key: \fBmonitor_fullscan_frequency\fP (default: \fB10\fP)
|
||||
.TP
|
||||
\fB\-\-monitor\-log\-frequency\fP ARG
|
||||
Frequency of logging in monitor mode
|
||||
.br
|
||||
Configuration file key: \fBmonitor_log_frequency\fP (default: \fB5\fP)
|
||||
.TP
|
||||
\fB\-\-no\-remote\-delete\fP
|
||||
Do not delete local file 'deletes' from OneDrive when using \fB\-\-upload\-only\fR
|
||||
.br
|
||||
Configuration file key: \fBno_remote_delete\fP (default: \fBfalse\fP)
|
||||
.TP
|
||||
\fB\-\-operation\-timeout\fP ARG
|
||||
Set the maximum amount of time (seconds) a file operation is allowed to take. This includes DNS resolution, connecting, data transfer, etc.
|
||||
.br
|
||||
Configuration file key: \fBoperation_timeout\fP (default: \fB3600\fP)
|
||||
.TP
|
||||
\fB\-\-print\-token\fP
|
||||
Print the access token, useful for debugging
|
||||
.TP
|
||||
\fB\-\-reauth\fP
|
||||
Reauthenticate the client with OneDrive
|
||||
.TP
|
||||
\fB\-\-remove\-directory\fP ARG
|
||||
Remove a directory on OneDrive \- no sync will be performed.
|
||||
.TP
|
||||
\fB\-\-remove\-source\-files\fP
|
||||
Remove source file after successful transfer to OneDrive when using \-\-upload-only
|
||||
.br
|
||||
Configuration file key: \fBremove_source_files\fP (default: \fBfalse\fP)
|
||||
.TP
|
||||
\fB\-\-resync\fP
|
||||
Forget the last saved state, perform a full sync
|
||||
.TP
|
||||
\fB\-\-resync\-auth\fP
|
||||
Approve the use of performing a --resync action without needing CLI authorization
|
||||
.TP
|
||||
\fB\-\-single\-directory\fP ARG
|
||||
Specify a single local directory within the OneDrive root to sync.
|
||||
.TP
|
||||
\fB\-\-skip\-dir\fP ARG
|
||||
Skip any directories that match this pattern from syncing
|
||||
.TP
|
||||
\fB\-\-skip\-dir\-strict\-match\fP
|
||||
When matching skip_dir directories, only match explicit matches
|
||||
.br
|
||||
Configuration file key: \fBskip_dir_strict_match\fP (default: \fBfalse\fP)
|
||||
.TP
|
||||
\fB\-\-skip\-dot\-files\fP
|
||||
Skip dot files and folders from syncing
|
||||
.br
|
||||
Configuration file key: \fBskip_dotfiles\fP (default: \fBfalse\fP)
|
||||
.TP
|
||||
\fB\-\-skip\-file\fP
|
||||
Skip any files that match this pattern from syncing
|
||||
.br
|
||||
Configuration file key: \fBskip_file\fP (default: \fB~*|.~*|*.tmp\fP)
|
||||
.TP
|
||||
\fB\-\-skip\-size\fP ARG
|
||||
Skip new files larger than this size (in MB)
|
||||
.TP
|
||||
\fB\-\-skip\-symlinks\fP
|
||||
Skip syncing of symlinks
|
||||
.br
|
||||
Configuration file key: \fBskip_symlinks\fP (default: \fBfalse\fP)
|
||||
.TP
|
||||
\fB\-\-source\-directory\fP ARG
|
||||
Source directory to rename or move on OneDrive \- no sync will be performed.
|
||||
.TP
|
||||
\fB\-\-space\-reservation\fP ARG
|
||||
The amount of disk space to reserve (in MB) to avoid 100% disk space utilisation
|
||||
.TP
|
||||
\fB\-\-sync\-root\-files\fP
|
||||
Sync all files in sync_dir root when using sync_list.
|
||||
.TP
|
||||
\fB\-\-sync\-shared\-folders\fP
|
||||
Sync OneDrive Business Shared Folders
|
||||
.br
|
||||
Configuration file key: \fBsync_business_shared_folders\fP (default: \fBfalse\fP)
|
||||
.TP
|
||||
\fB\-\-syncdir\fP ARG
|
||||
Set the directory used to sync the files that are synced
|
||||
.br
|
||||
Configuration file key: \fBsync_dir\fP (default: \fB~/OneDrive\fP)
|
||||
.TP
|
||||
\fB\-\-synchronize\fP
|
||||
Perform a synchronization
|
||||
.TP
|
||||
\fB\-\-upload\-only\fP
|
||||
Replicate the locally configured sync_dir state to OneDrive, by only uploading local changes to OneDrive. Do not download changes from OneDrive.
|
||||
.br
|
||||
Configuration file key: \fBupload_only\fP (default: \fBfalse\fP)
|
||||
.TP
|
||||
\fB\-\-user\-agent\fP ARG
|
||||
Set the used User Agent identifier
|
||||
.br
|
||||
Configuration file key: \fBuser_agent\fP (default: don't change)
|
||||
.TP
|
||||
\fB\-v \-\-verbose\fP
|
||||
Print more details, useful for debugging. Given two times (or more)
|
||||
enables even more verbose debug statements.
|
||||
.TP
|
||||
\fB\-\-version\fP
|
||||
Print the version and exit
|
||||
.TP
|
||||
\fB\-\-with\-editing\-perms\fP
|
||||
Create a read-write shareable link for an existing file on OneDrive when used with --create-share-link <file>
|
||||
.TP
|
||||
\fB\-h \-\-help\fP
|
||||
This help information.
|
||||
.PP
|
||||
This is a free Microsoft OneDrive Client designed to work with OneDrive Personal, OneDrive for Business, Office365 OneDrive, and SharePoint Libraries. It's fully compatible with most major Linux distributions and FreeBSD, and can be containerised using Docker or Podman. The client offers secure one-way and two-way synchronisation capabilities, making it easy to connect to Microsoft OneDrive services across various platforms.
|
||||
|
||||
.SH FEATURES
|
||||
|
||||
State caching
|
||||
|
||||
Real-Time file monitoring with Inotify
|
||||
|
||||
File upload / download validation to ensure data integrity
|
||||
|
||||
Resumable uploads
|
||||
|
||||
Support OneDrive for Business (part of Office 365)
|
||||
|
||||
Shared Folder support for OneDrive Personal and OneDrive Business accounts
|
||||
|
||||
SharePoint / Office365 Shared Libraries
|
||||
|
||||
Desktop notifications via libnotify
|
||||
|
||||
Dry-run capability to test configuration changes
|
||||
|
||||
Prevent major OneDrive accidental data deletion after configuration change
|
||||
|
||||
Support for National cloud deployments (Microsoft Cloud for US Government, Microsoft Cloud Germany, Azure and Office 365 operated by 21Vianet in China)
|
||||
|
||||
.br
|
||||
* Compatible with OneDrive Personal, OneDrive for Business including accessing Microsoft SharePoint Libraries
|
||||
.br
|
||||
* Provides rules for client-side filtering to select data for syncing with Microsoft OneDrive accounts
|
||||
.br
|
||||
* Caches sync state for efficiency
|
||||
.br
|
||||
* Supports a dry-run option for safe configuration testing
|
||||
.br
|
||||
* Validates file transfers to ensure data integrity
|
||||
.br
|
||||
* Monitors local files in real-time using inotify
|
||||
.br
|
||||
* Supports interrupted uploads for completion at a later time
|
||||
.br
|
||||
* Capability to sync remote updates immediately via webhooks
|
||||
.br
|
||||
* Enhanced synchronisation speed with multi-threaded file transfers
|
||||
.br
|
||||
* Manages traffic bandwidth use with rate limiting
|
||||
.br
|
||||
* Supports seamless access to shared folders and files across both OneDrive Personal and OneDrive for Business accounts
|
||||
.br
|
||||
* Supports national cloud deployments including Microsoft Cloud for US Government, Microsoft Cloud Germany, and Azure and Office 365 operated by VNET in China
|
||||
.br
|
||||
* Supports sending desktop alerts using libnotify
|
||||
.br
|
||||
* Protects against significant data loss on OneDrive after configuration changes
|
||||
.br
|
||||
* Works with both single and multi-tenant applications
|
||||
|
||||
.SH CONFIGURATION
|
||||
By default, the client will use a sensible set of default values to interact with the Microsoft OneDrive service.
|
||||
.TP
|
||||
Should you wish to change these defaults, you should copy the default config file into your home directory before making any applicable changes:
|
||||
|
||||
You should copy the default config file into your home directory before making changes:
|
||||
.nf
|
||||
\fB
|
||||
mkdir\ \-p\ ~/.config/onedrive
|
||||
|
|
@ -305,87 +66,299 @@ cp\ @DOCDIR@/config\ ~/.config/onedrive/config
|
|||
\fP
|
||||
.fi
|
||||
|
||||
For the supported options see the above list of command line options
|
||||
for the availability of a configuration key.
|
||||
.PP
|
||||
Pattern are case insensitive.
|
||||
\fB*\fP and \fB?\fP wildcards characters are supported.
|
||||
Use \fB|\fP to separate multiple patterns.
|
||||
.TP
|
||||
Please refer to the online documentation file application-config-options.md for details on all configuration file options.
|
||||
|
||||
After changing the filters (\fBskip_file\fP or \fBskip_dir\fP in your configs) you must
|
||||
execute \fBonedrive --synchronize --resync\fP.
|
||||
.SH CLIENT SIDE FILTERING
|
||||
Client Side Filtering in the context of the OneDrive Client for Linux refers to user-configured rules that determine what files and directories the client should upload or download from Microsoft OneDrive. These rules are crucial for optimising synchronisation, especially when dealing with large numbers of files or specific file types. The OneDrive Client for Linux offers several configuration options to facilitate this:
|
||||
.TP
|
||||
.B skip_dir
|
||||
Specifies directories that should not be synchronised with OneDrive. Useful for omitting large or irrelevant directories from the sync process.
|
||||
.TP
|
||||
.B skip_dotfiles
|
||||
Excludes dotfiles, usually configuration files or scripts, from the sync. Ideal for users who prefer to keep these files local.
|
||||
.TP
|
||||
.B skip_file
|
||||
Allows specifying specific files to exclude from synchronisation. Offers flexibility in selecting essential files for cloud storage.
|
||||
.TP
|
||||
.B skip_symlinks
|
||||
Prevents symlinks, which often point to files outside the OneDrive directory or to irrelevant locations, from being included in the sync.
|
||||
.PP
|
||||
Additionally, the OneDrive Client for Linux allows the implementation of Client Side Filtering rules through a 'sync_list' file. This file explicitly states which directories or files should be included in the synchronisation. By default, any item not listed in the 'sync_list' file is excluded. This approach offers granular control over synchronisation, ensuring that only necessary data is transferred to and from Microsoft OneDrive.
|
||||
.PP
|
||||
These configurable options and the 'sync_list' file provide users with the flexibility to tailor the synchronisation process to their specific needs, conserving bandwidth and storage space while ensuring that important files are always backed up and accessible.
|
||||
.TP
|
||||
.B NOTE:
|
||||
After changing any Client Side Filtering rule, a full re-synchronisation must be performed using --resync
|
||||
|
||||
.SH FIRST RUN
|
||||
|
||||
After installing the application you must run it at least once from the terminal
|
||||
to authorize it.
|
||||
|
||||
You will be asked to open a specific link using your web browser where you
|
||||
will have to login into your Microsoft Account and give the application the
|
||||
permission to access your files. After giving the permission, you will be
|
||||
redirected to a blank page. Copy the URI of the blank page into the application.
|
||||
|
||||
|
||||
.SH SYSTEMD INTEGRATION
|
||||
|
||||
Service files are installed into user and system directories.
|
||||
Once you've installed the application, you'll need to authorise it using your Microsoft OneDrive Account. This can be done by simply running the application without any additional command switches.
|
||||
.TP
|
||||
OneDrive service running as root user
|
||||
To enable this mode, run as root user
|
||||
.nf
|
||||
\fB
|
||||
systemctl enable onedrive
|
||||
systemctl start onedrive
|
||||
\fP
|
||||
.fi
|
||||
Please be aware that some companies may require you to explicitly add this app to the Microsoft MyApps portal. To add an approved app to your apps, click on the ellipsis in the top-right corner and select "Request new apps." On the next page, you can add this app. If it's not listed, you should make a request through your IT department.
|
||||
.TP
|
||||
When you run the application for the first time, you'll be prompted to open a specific URL using your web browser, where you'll need to log in to your Microsoft Account and grant the application permission to access your files. After granting permission to the application, you'll be redirected to a blank page. Simply copy the URI from the blank page and paste it into the application.
|
||||
.TP
|
||||
This process authenticates your application with your account information, and it is now ready to use to sync your data between your local system and Microsoft OneDrive.
|
||||
|
||||
.SH GUI NOTIFICATIONS
|
||||
If the client has been compiled with support for notifications, the client will send notifications about client activity via libnotify to the GUI via DBus when the client is being run in --monitor mode.
|
||||
|
||||
.SH APPLICATION LOGGING
|
||||
When running onedrive all actions can be logged to a separate log file. This can be enabled by using the \fB--enable-logging\fP flag. By default, log files will be written to \fB/var/log/onedrive\fP. All logfiles will be in the format of \fB%username%.onedrive.log\fP, where \fB%username%\fP represents the user who ran the client.
|
||||
|
||||
.SH ALL CLI OPTIONS
|
||||
The options below allow you to control the behavior of the onedrive client from the CLI. Without any specific option, if the client is already authenticated, the client will exit without any further action.
|
||||
|
||||
.TP
|
||||
OneDrive service running as root user for a non-root user
|
||||
This mode allows starting the OneDrive service automatically with
|
||||
system start for multiple users. For each \fB<username>\fP run:
|
||||
.nf
|
||||
\fB
|
||||
systemctl enable onedrive@<username>
|
||||
systemctl start onedrive@<username>
|
||||
\fP
|
||||
.fi
|
||||
\fB\-\-sync\fR
|
||||
Do a one-time synchronisation with OneDrive.
|
||||
|
||||
.TP
|
||||
OneDrive service running as non-root user
|
||||
In this mode the service will be started when the user logs in.
|
||||
Run as user
|
||||
.nf
|
||||
\fB
|
||||
systemctl --user enable onedrive
|
||||
systemctl --user start onedrive
|
||||
\fP
|
||||
.fi
|
||||
\fB\-\-monitor\fR
|
||||
Monitor filesystem for changes and sync regularly.
|
||||
|
||||
.SH LOGGING OUTPUT
|
||||
.TP
|
||||
\fB\-\-display-config\fR
|
||||
Display the currently used configuration for the onedrive client.
|
||||
|
||||
When running onedrive all actions can be logged to a separate log file.
|
||||
This can be enabled by using the \fB--enable-logging\fP flag.
|
||||
By default, log files will be written to \fB/var/log/onedrive\fP.
|
||||
.TP
|
||||
\fB\-\-display-sync-status\fR
|
||||
Query OneDrive service and report on pending changes.
|
||||
|
||||
All logfiles will be in the format of \fB%username%.onedrive.log\fP,
|
||||
where \fB%username%\fP represents the user who ran the client.
|
||||
.TP
|
||||
\fB\-\-auth-files\fR \fIARG\fR
|
||||
Perform authentication not via interactive dialog but via files that are read/written when using this option. The two files are passed in as \fBARG\fP in the format \fBauthUrl:responseUrl\fP.
|
||||
The authorisation URL is written to the \fBauthUrl\fP file, then \fBonedrive\fP waits for the file \fBresponseUrl\fP to be present, and reads the response from that file.
|
||||
.br
|
||||
Always specify the full path when using this option, otherwise the application will default to using the default configuration path for these files (~/.config/onedrive/)
|
||||
|
||||
.TP
|
||||
\fB\-\-auth-response\fR \fIARG\fR
|
||||
Perform authentication not via interactive dialog but via providing the response URL directly.
|
||||
|
||||
.SH NOTIFICATIONS
|
||||
.TP
|
||||
\fB\-\-check-for-nomount\fR
|
||||
Check for the presence of .nosync in the syncdir root. If found, do not perform sync.
|
||||
|
||||
If OneDrive has been compiled with support for notifications, a running
|
||||
\fBonedrive\fP in monitor mode will send notifications about
|
||||
initialization and errors via libnotify to the dbus.
|
||||
.TP
|
||||
\fB\-\-check-for-nosync\fR
|
||||
Check for the presence of .nosync in each directory. If found, skip directory from sync.
|
||||
|
||||
.TP
|
||||
\fB\-\-classify-as-big-delete\fR \fIARG\fR
|
||||
Number of children in a path that is locally removed which will be classified as a 'big data delete'.
|
||||
|
||||
.TP
|
||||
\fB\-\-cleanup-local-files\fR
|
||||
Cleanup additional local files when using --download-only. This will remove local data.
|
||||
|
||||
.TP
|
||||
\fB\-\-confdir\fR \fIARG\fR
|
||||
Set the directory used to store the configuration files.
|
||||
|
||||
.TP
|
||||
\fB\-\-create-directory\fR \fIARG\fR
|
||||
Create a directory on OneDrive - no sync will be performed.
|
||||
|
||||
.TP
|
||||
\fB\-\-create-share-link\fR \fIARG\fR
|
||||
Create a shareable link for an existing file on OneDrive.
|
||||
|
||||
.TP
|
||||
\fB\-\-debug-https\fR
|
||||
Debug OneDrive HTTPS communication.
|
||||
|
||||
.TP
|
||||
\fB\-\-destination-directory\fR \fIARG\fR
|
||||
Destination directory for renamed or moved items on OneDrive - no sync will be performed.
|
||||
|
||||
.TP
|
||||
\fB\-\-disable-download-validation\fR
|
||||
Disable download validation when downloading from OneDrive.
|
||||
|
||||
.TP
|
||||
\fB\-\-disable-notifications\fR
|
||||
Do not use desktop notifications in monitor mode.
|
||||
|
||||
.TP
|
||||
\fB\-\-disable-upload-validation\fR
|
||||
Disable upload validation when uploading to OneDrive.
|
||||
|
||||
.TP
|
||||
\fB\-\-display-quota\fR
|
||||
Display the quota status of the client - no sync will be performed.
|
||||
|
||||
.TP
|
||||
\fB\-\-display-running-config\fR
|
||||
Display what options the client has been configured to use on application startup.
|
||||
|
||||
.TP
|
||||
\fB\-\-download-only\fR
|
||||
Replicate the OneDrive online state locally, by only downloading changes from OneDrive. Do not upload local changes to OneDrive.
|
||||
|
||||
.TP
|
||||
\fB\-\-dry-run\fR
|
||||
Perform a trial sync with no changes made.
|
||||
|
||||
.TP
|
||||
\fB\-\-enable-logging\fR
|
||||
Enable client activity to a separate log file.
|
||||
|
||||
.TP
|
||||
\fB\-\-force\fR
|
||||
Force the deletion of data when a 'big delete' is detected.
|
||||
|
||||
.TP
|
||||
\fB\-\-force-http-11\fR
|
||||
Force the use of HTTP 1.1 for all operations.
|
||||
|
||||
.TP
|
||||
\fB\-\-force-sync\fR
|
||||
Force a synchronisation of a specific folder, only when using --sync --single-directory and ignore all non-default skip_dir and skip_file rules.
|
||||
|
||||
.TP
|
||||
\fB\-\-get-O365-drive-id\fR \fIARG\fR
|
||||
Query and return the Office 365 Drive ID for a given Office 365 SharePoint Shared Library (DEPRECATED).
|
||||
|
||||
.TP
|
||||
\fB\-\-get-file-link\fR \fIARG\fR
|
||||
Display the file link of a synced file.
|
||||
|
||||
.TP
|
||||
\fB\-\-get-sharepoint-drive-id\fR
|
||||
Query and return the Office 365 Drive ID for a given Office 365 SharePoint Shared Library.
|
||||
|
||||
.TP
|
||||
\fB\-\-help\fR, \fB\-h\fR
|
||||
Display application help.
|
||||
|
||||
.TP
|
||||
\fB\-\-list-shared-items\fR
|
||||
List OneDrive Business Shared Items.
|
||||
|
||||
.TP
|
||||
\fB\-\-local-first\fR
|
||||
Synchronise from the local directory source first, before downloading changes from OneDrive.
|
||||
|
||||
.TP
|
||||
\fB\-\-log-dir\fR \fIARG\fR
|
||||
Directory where logging output is saved to, needs to end with a slash.
|
||||
|
||||
.TP
|
||||
\fB\-\-logout\fR
|
||||
Logout the current user.
|
||||
|
||||
.TP
|
||||
\fB\-\-modified-by\fR \fIARG\fR
|
||||
Display the last modified by details of a given path.
|
||||
|
||||
.TP
|
||||
\fB\-\-monitor-interval\fR \fIARG\fR
|
||||
Number of seconds by which each sync operation is undertaken when idle under monitor mode.
|
||||
|
||||
.TP
|
||||
\fB\-\-monitor-log-frequency\fR \fIARG\fR
|
||||
Frequency of logging in monitor mode.
|
||||
|
||||
.TP
|
||||
\fB\-\-no-remote-delete\fR
|
||||
Do not delete local file 'deletes' from OneDrive when using --upload-only.
|
||||
|
||||
.TP
|
||||
\fB\-\-print-access-token\fR
|
||||
Print the access token, useful for debugging.
|
||||
|
||||
.TP
|
||||
\fB\-\-reauth\fR
|
||||
Reauthenticate the client with OneDrive.
|
||||
|
||||
.TP
|
||||
\fB\-\-remove-directory\fR \fIARG\fR
|
||||
Remove a directory on OneDrive - no sync will be performed.
|
||||
|
||||
.TP
|
||||
\fB\-\-remove-source-files\fR
|
||||
Remove source file after successful transfer to OneDrive when using --upload-only.
|
||||
|
||||
.TP
|
||||
\fB\-\-resync\fR
|
||||
Forget the last saved state, perform a full sync.
|
||||
|
||||
.TP
|
||||
\fB\-\-resync-auth\fR
|
||||
Approve the use of performing a --resync action.
|
||||
|
||||
.TP
|
||||
\fB\-\-single-directory\fR \fIARG\fR
|
||||
Specify a single local directory within the OneDrive root to sync.
|
||||
|
||||
.TP
|
||||
\fB\-\-skip-dir\fR \fIARG\fR
|
||||
Skip any directories that match this pattern from syncing.
|
||||
|
||||
.TP
|
||||
\fB\-\-skip-dir-strict-match\fR
|
||||
When matching skip_dir directories, only match explicit matches.
|
||||
|
||||
.TP
|
||||
\fB\-\-skip-dot-files\fR
|
||||
Skip dot files and folders from syncing.
|
||||
|
||||
.TP
|
||||
\fB\-\-skip-file\fR \fIARG\fR
|
||||
Skip any files that match this pattern from syncing.
|
||||
|
||||
.TP
|
||||
\fB\-\-skip-size\fR \fIARG\fR
|
||||
Skip new files larger than this size (in MB).
|
||||
|
||||
.TP
|
||||
\fB\-\-skip-symlinks\fR
|
||||
Skip syncing of symlinks.
|
||||
|
||||
.TP
|
||||
\fB\-\-source-directory\fR \fIARG\fR
|
||||
Source directory to rename or move on OneDrive - no sync will be performed.
|
||||
|
||||
.TP
|
||||
\fB\-\-space-reservation\fR \fIARG\fR
|
||||
The amount of disk space to reserve (in MB) to avoid 100% disk space utilisation.
|
||||
|
||||
.TP
|
||||
\fB\-\-sync-root-files\fR
|
||||
Sync all files in sync_dir root when using sync_list.
|
||||
|
||||
.TP
|
||||
\fB\-\-sync-shared-files\fR
|
||||
Sync OneDrive Business Shared Files to the local filesystem.
|
||||
|
||||
.TP
|
||||
\fB\-\-syncdir\fR \fIARG\fR
|
||||
Specify the local directory used for synchronisation to OneDrive.
|
||||
|
||||
.TP
|
||||
\fB\-\-synchronize\fR
|
||||
Perform a synchronisation with Microsoft OneDrive (DEPRECATED).
|
||||
|
||||
.TP
|
||||
\fB\-\-upload-only\fR
|
||||
Replicate the locally configured sync_dir state to OneDrive, by only uploading local changes to OneDrive. Do not download changes from OneDrive.
|
||||
|
||||
.TP
|
||||
\fB\-\-verbose\fR, \fB\-v+\fR
|
||||
Print more details, useful for debugging (repeat for extra debugging).
|
||||
|
||||
.TP
|
||||
\fB\-\-version\fR
|
||||
Print the version and exit.
|
||||
|
||||
.TP
|
||||
\fB\-\-with-editing-perms\fR
|
||||
Create a read-write shareable link for an existing file on OneDrive when used with --create-share-link <file>.
|
||||
|
||||
.SH DOCUMENTATION
|
||||
All documentation is available on GitHub: https://github.com/abraunegg/onedrive/tree/master/docs/
|
||||
|
||||
Note that this does not work if \fBonedrive\fP is started as root
|
||||
for a user via the \fBonedrive@<username>\fP service.
|
||||
|
||||
.SH SEE ALSO
|
||||
|
||||
Further examples and documentation is available in
|
||||
\f[C]README.md\f[]
|
||||
\f[C]docs/USAGE.md\f[]
|
||||
\f[C]docs/advanced-usage.md\f[]
|
||||
\f[C]docs/BusinessSharedFolders.md\f[]
|
||||
\f[C]docs/SharePoint-Shared-Libraries.md\f[]
|
||||
\f[C]docs/national-cloud-deployments.md\f[]
|
||||
.BR curl(1),
|
||||
|
|
|
|||
94
readme.md
Normal file
|
|
@ -0,0 +1,94 @@
|
|||
# OneDrive Client for Linux
|
||||
[](https://github.com/abraunegg/onedrive/releases)
|
||||
[](https://github.com/abraunegg/onedrive/releases)
|
||||
[](https://github.com/abraunegg/onedrive/actions/workflows/testbuild.yaml)
|
||||
[](https://github.com/abraunegg/onedrive/actions/workflows/docker.yaml)
|
||||
[](https://hub.docker.com/r/driveone/onedrive)
|
||||
|
||||
Introducing a free Microsoft OneDrive Client that seamlessly supports OneDrive Personal, OneDrive for Business, OneDrive for Office365, and SharePoint Libraries.
|
||||
|
||||
This robust and highly customisable client is compatible with all major Linux distributions and FreeBSD, and can also be deployed as a container using Docker or Podman. It offers both one-way and two-way synchronisation capabilities while ensuring a secure connection to Microsoft OneDrive services.
|
||||
|
||||
Originally derived as a 'fork' from the [skilion](https://github.com/skilion/onedrive) client, it's worth noting that the developer of the original client has explicitly stated they have no intention of maintaining or supporting their work ([reference](https://github.com/skilion/onedrive/issues/518#issuecomment-717604726)).
|
||||
|
||||
This client represents a 100% re-imagining of the original work, addressing numerous notable bugs and issues while incorporating a significant array of new features. This client has been under active development since mid-2018.
|
||||
|
||||
## Features
|
||||
* Compatible with OneDrive Personal, OneDrive for Business including accessing Microsoft SharePoint Libraries
|
||||
* Provides rules for client-side filtering to select data for syncing with Microsoft OneDrive accounts
|
||||
* Caches sync state for efficiency
|
||||
* Supports a dry-run option for safe configuration testing
|
||||
* Validates file transfers to ensure data integrity
|
||||
* Monitors local files in real-time using inotify
|
||||
* Supports interrupted uploads for completion at a later time
|
||||
* Capability to sync remote updates immediately via webhooks
|
||||
* Enhanced synchronisation speed with multi-threaded file transfers
|
||||
* Manages traffic bandwidth use with rate limiting
|
||||
* Supports seamless access to shared folders and files across both OneDrive Personal and OneDrive for Business accounts
|
||||
* Supports national cloud deployments including Microsoft Cloud for US Government, Microsoft Cloud Germany and Azure and Office 365 operated by VNET in China
|
||||
* Supports sending desktop alerts using libnotify
|
||||
* Protects against significant data loss on OneDrive after configuration changes
|
||||
* Works with both single and multi-tenant applications
|
||||
|
||||
## What's missing
|
||||
* Ability to encrypt/decrypt files on-the-fly when uploading/downloading files from OneDrive
|
||||
* Support for Windows 'On-Demand' functionality so file is only downloaded when accessed locally
|
||||
|
||||
## External Enhancements
|
||||
* A GUI for configuration management: [OneDrive Client for Linux GUI](https://github.com/bpozdena/OneDriveGUI)
|
||||
* Colorful log output terminal modification: [OneDrive Client for Linux Colorful log Output](https://github.com/zzzdeb/dotfiles/blob/master/scripts/tools/onedrive_log)
|
||||
* System Tray Icon: [OneDrive Client for Linux System Tray Icon](https://github.com/DanielBorgesOliveira/onedrive_tray)
|
||||
|
||||
## Frequently Asked Questions
|
||||
Refer to [Frequently Asked Questions](https://github.com/abraunegg/onedrive/wiki/Frequently-Asked-Questions)
|
||||
|
||||
## Have a question
|
||||
If you have a question or need something clarified, please raise a new discussion post [here](https://github.com/abraunegg/onedrive/discussions)
|
||||
|
||||
## Basic Troubleshooting Steps
|
||||
If you are encountering any issue running the application please follow these steps first:
|
||||
1. Check the version of the application you are using `onedrive --version` and ensure that you are running either the latest [release](https://github.com/abraunegg/onedrive/releases) or built from master.
|
||||
2. Configure the application to only use IPv4 network connectivity, and then retest.
|
||||
3. Configure the application to only use HTTP/1.1. operations with IPv4 network connectivity, and then retest.
|
||||
4. If the above points do not resolve your issue, upgrade your 'curl' version to the latest available by the curl developers. Refer to https://curl.se/docs/releases.html for details.
|
||||
|
||||
|
||||
## Reporting an Issue or Bug
|
||||
> [!IMPORTANT]
|
||||
> Please ensure that issues reported as bugs are indeed software bugs. For installation problems, distribution package/version issues, or package dependency concerns, please start a [Discussion](https://github.com/abraunegg/onedrive/discussions) instead of filing a bug report.
|
||||
|
||||
If you encounter any bugs you can report them here on Github. Before filing an issue be sure to:
|
||||
|
||||
1. Fill in a new bug report using the [issue template](https://github.com/abraunegg/onedrive/issues/new?template=bug_report.md)
|
||||
2. Generate a debug log for support using the following [process](https://github.com/abraunegg/onedrive/wiki/Generate-debug-log-for-support)
|
||||
* If you are in *any* way concerned regarding the sensitivity of the data contained with in the verbose debug log file, create a new OneDrive account, configure the client to use that, use *dummy* data to simulate your environment and then replicate your original issue
|
||||
* If you are still concerned, provide an NDA or confidentiality document to sign
|
||||
3. Upload the debug log to [pastebin](https://pastebin.com/) or archive and email to support@mynas.com.au
|
||||
* If you are concerned regarding the sensitivity of your debug data, encrypt + password protect the archive file and provide the decryption password via an out-of-band (OOB) mechanism. Email support@mynas.com.au for an OOB method for the password to be sent.
|
||||
* If you are still concerned, provide an NDA or confidentiality document to sign
|
||||
|
||||
## Known issues
|
||||
Refer to [docs/known-issues.md](https://github.com/abraunegg/onedrive/blob/master/docs/known-issues.md)
|
||||
|
||||
## Documentation and Configuration Assistance
|
||||
### Installing from Distribution Packages or Building the OneDrive Client for Linux from source
|
||||
Refer to [docs/install.md](https://github.com/abraunegg/onedrive/blob/master/docs/install.md)
|
||||
|
||||
### Configuration and Usage
|
||||
Refer to [docs/usage.md](https://github.com/abraunegg/onedrive/blob/master/docs/usage.md)
|
||||
|
||||
### Configure OneDrive Business Shared Items
|
||||
Refer to [docs/business-shared-items.md](https://github.com/abraunegg/onedrive/blob/master/docs/business-shared-items.md)
|
||||
|
||||
### Configure SharePoint / Office 365 Shared Libraries (Business or Education)
|
||||
Refer to [docs/sharepoint-libraries.md](https://github.com/abraunegg/onedrive/blob/master/docs/sharepoint-libraries.md)
|
||||
|
||||
### Configure National Cloud support
|
||||
Refer to [docs/national-cloud-deployments.md](https://github.com/abraunegg/onedrive/blob/master/docs/national-cloud-deployments.md)
|
||||
|
||||
### Docker support
|
||||
Refer to [docs/docker.md](https://github.com/abraunegg/onedrive/blob/master/docs/docker.md)
|
||||
|
||||
### Podman support
|
||||
Refer to [docs/podman.md](https://github.com/abraunegg/onedrive/blob/master/docs/podman.md)
|
||||
|
||||
|
|
@ -683,6 +683,7 @@ enum long defaultMaxContentLength = 5_000_000;
|
|||
public import std.string;
|
||||
public import std.stdio;
|
||||
public import std.conv;
|
||||
import std.concurrency;
|
||||
import std.uri;
|
||||
import std.uni;
|
||||
import std.algorithm.comparison;
|
||||
|
|
@ -763,7 +764,7 @@ class ConnectionClosedException : Exception {
|
|||
version(Windows) {
|
||||
// FIXME: ugly hack to solve stdin exception problems on Windows:
|
||||
// reading stdin results in StdioException (Bad file descriptor)
|
||||
// this is probably due to http://d.puremagic.com/issues/show_bug.cgi?id=3425
|
||||
// this is probably due to https://issues.dlang.org/show_bug.cgi?id=3425
|
||||
private struct stdin {
|
||||
struct ByChunk { // Replicates std.stdio.ByChunk
|
||||
private:
|
||||
|
|
@ -1100,7 +1101,7 @@ class Cgi {
|
|||
const(ubyte)[] delegate() readdata = null,
|
||||
// finally, use this to do custom output if needed
|
||||
void delegate(const(ubyte)[]) _rawDataOutput = null,
|
||||
// to flush teh custom output
|
||||
// to flush the custom output
|
||||
void delegate() _flush = null
|
||||
)
|
||||
{
|
||||
|
|
@ -2226,7 +2227,7 @@ class Cgi {
|
|||
uri ~= "s";
|
||||
uri ~= "://";
|
||||
uri ~= host;
|
||||
/+ // the host has the port so p sure this never needed, cgi on apache and embedded http all do the right hting now
|
||||
/+ // the host has the port so p sure this never needed, cgi on apache and embedded http all do the right thing now
|
||||
version(none)
|
||||
if(!(!port || port == defaultPort)) {
|
||||
uri ~= ":";
|
||||
|
|
@ -2316,7 +2317,7 @@ class Cgi {
|
|||
|
||||
/// This is like setResponseExpires, but it can be called multiple times. The setting most in the past is the one kept.
|
||||
/// If you have multiple functions, they all might call updateResponseExpires about their own return value. The program
|
||||
/// output as a whole is as cacheable as the least cachable part in the chain.
|
||||
/// output as a whole is as cacheable as the least cacheable part in the chain.
|
||||
|
||||
/// setCache(false) always overrides this - it is, by definition, the strictest anti-cache statement available. If your site outputs sensitive user data, you should probably call setCache(false) when you do, to ensure no other functions will cache the content, as it may be a privacy risk.
|
||||
/// Conversely, setting here overrides setCache(true), since any expiration date is in the past of infinity.
|
||||
|
|
@ -2328,7 +2329,7 @@ class Cgi {
|
|||
}
|
||||
|
||||
/*
|
||||
/// Set to true if you want the result to be cached publically - that is, is the content shared?
|
||||
/// Set to true if you want the result to be cached publicly - that is, is the content shared?
|
||||
/// Should generally be false if the user is logged in. It assumes private cache only.
|
||||
/// setCache(true) also turns on public caching, and setCache(false) sets to private.
|
||||
void setPublicCaching(bool allowPublicCaches) {
|
||||
|
|
@ -3910,14 +3911,16 @@ struct RequestServer {
|
|||
|
||||
If you want the forking worker process server, you do need to compile with the embedded_httpd_processes config though.
|
||||
+/
|
||||
void serveEmbeddedHttp(alias fun, CustomCgi = Cgi, long maxContentLength = defaultMaxContentLength)(ThisFor!fun _this) {
|
||||
shared void serveEmbeddedHttp(alias fun, T, CustomCgi = Cgi, long maxContentLength = defaultMaxContentLength)(shared T _this) {
|
||||
globalStopFlag = false;
|
||||
static if(__traits(isStaticFunction, fun))
|
||||
alias funToUse = fun;
|
||||
void funToUse(CustomCgi cgi) {
|
||||
fun(_this, cgi);
|
||||
}
|
||||
else
|
||||
void funToUse(CustomCgi cgi) {
|
||||
static if(__VERSION__ > 2097)
|
||||
__traits(child, _this, fun)(cgi);
|
||||
__traits(child, _inst_this, fun)(_inst_this, cgi);
|
||||
else static assert(0, "Not implemented in your compiler version!");
|
||||
}
|
||||
auto manager = new ListeningConnectionManager(listeningHost, listeningPort, &doThreadHttpConnection!(CustomCgi, funToUse), null, useFork, numberOfThreads);
|
||||
|
|
@ -6275,7 +6278,7 @@ ByChunkRange byChunk(BufferedInputRange ir, size_t atMost) {
|
|||
}
|
||||
|
||||
version(cgi_with_websocket) {
|
||||
// http://tools.ietf.org/html/rfc6455
|
||||
// https://tools.ietf.org/html/rfc6455
|
||||
|
||||
/**
|
||||
WEBSOCKET SUPPORT:
|
||||
|
|
@ -7289,7 +7292,7 @@ private void serialize(T)(scope void delegate(scope ubyte[]) sink, T t) {
|
|||
} else static assert(0, T.stringof);
|
||||
}
|
||||
|
||||
// all may be stack buffers, so use cautio
|
||||
// all may be stack buffers, so use caution
|
||||
private void deserialize(T)(scope ubyte[] delegate(int sz) get, scope void delegate(T) dg) {
|
||||
static if(is(T == struct)) {
|
||||
T t;
|
||||
|
|
@ -10178,7 +10181,7 @@ struct Redirection {
|
|||
/++
|
||||
Serves a class' methods, as a kind of low-state RPC over the web. To be used with [dispatcher].
|
||||
|
||||
Usage of this function will add a dependency on [arsd.dom] and [arsd.jsvar] unless you have overriden
|
||||
Usage of this function will add a dependency on [arsd.dom] and [arsd.jsvar] unless you have overridden
|
||||
the presenter in the dispatcher.
|
||||
|
||||
FIXME: explain this better
|
||||
|
|
@ -10618,7 +10621,7 @@ template urlNamesForMethod(alias method, string default_) {
|
|||
enum AccessCheck {
|
||||
allowed,
|
||||
denied,
|
||||
nonExistant,
|
||||
nonExistent,
|
||||
}
|
||||
|
||||
enum Operation {
|
||||
|
|
@ -11807,4 +11810,4 @@ Authors: Adam D. Ruppe
|
|||
Distributed under the Boost Software License, Version 1.0.
|
||||
(See accompanying file LICENSE_1_0.txt or copy at
|
||||
http://www.boost.org/LICENSE_1_0.txt)
|
||||
*/
|
||||
*/
|
||||
|
|
|
|||
581
src/clientSideFiltering.d
Normal file
|
|
@ -0,0 +1,581 @@
|
|||
// What is this module called?
|
||||
module clientSideFiltering;
|
||||
|
||||
// What does this module require to function?
|
||||
import std.algorithm;
|
||||
import std.array;
|
||||
import std.file;
|
||||
import std.path;
|
||||
import std.regex;
|
||||
import std.stdio;
|
||||
import std.string;
|
||||
import std.conv;
|
||||
|
||||
// What other modules that we have created do we need to import?
|
||||
import config;
|
||||
import util;
|
||||
import log;
|
||||
|
||||
class ClientSideFiltering {
|
||||
// Class variables
|
||||
ApplicationConfig appConfig;
|
||||
string[] syncListRules;
|
||||
Regex!char fileMask;
|
||||
Regex!char directoryMask;
|
||||
bool skipDirStrictMatch = false;
|
||||
bool skipDotfiles = false;
|
||||
|
||||
this(ApplicationConfig appConfig) {
|
||||
// Configure the class variable to consume the application configuration
|
||||
this.appConfig = appConfig;
|
||||
}
|
||||
|
||||
// Initialise the required items
|
||||
bool initialise() {
|
||||
// Log what is being done
|
||||
addLogEntry("Configuring Client Side Filtering (Selective Sync)", ["debug"]);
|
||||
|
||||
// Load the sync_list file if it exists
|
||||
if (exists(appConfig.syncListFilePath)){
|
||||
loadSyncList(appConfig.syncListFilePath);
|
||||
}
|
||||
|
||||
// Configure skip_dir, skip_file, skip-dir-strict-match & skip_dotfiles from config entries
|
||||
// Handle skip_dir configuration in config file
|
||||
addLogEntry("Configuring skip_dir ...", ["debug"]);
|
||||
addLogEntry("skip_dir: " ~ to!string(appConfig.getValueString("skip_dir")), ["debug"]);
|
||||
setDirMask(appConfig.getValueString("skip_dir"));
|
||||
|
||||
// Was --skip-dir-strict-match configured?
|
||||
addLogEntry("Configuring skip_dir_strict_match ...", ["debug"]);
|
||||
addLogEntry("skip_dir_strict_match: " ~ to!string(appConfig.getValueBool("skip_dir_strict_match")), ["debug"]);
|
||||
if (appConfig.getValueBool("skip_dir_strict_match")) {
|
||||
setSkipDirStrictMatch();
|
||||
}
|
||||
|
||||
// Was --skip-dot-files configured?
|
||||
addLogEntry("Configuring skip_dotfiles ...", ["debug"]);
|
||||
addLogEntry("skip_dotfiles: " ~ to!string(appConfig.getValueBool("skip_dotfiles")), ["debug"]);
|
||||
if (appConfig.getValueBool("skip_dotfiles")) {
|
||||
setSkipDotfiles();
|
||||
}
|
||||
|
||||
// Handle skip_file configuration in config file
|
||||
addLogEntry("Configuring skip_file ...", ["debug"]);
|
||||
|
||||
// Validate skip_file to ensure that this does not contain an invalid configuration
|
||||
// Do not use a skip_file entry of .* as this will prevent correct searching of local changes to process.
|
||||
foreach(entry; appConfig.getValueString("skip_file").split("|")){
|
||||
if (entry == ".*") {
|
||||
// invalid entry element detected
|
||||
addLogEntry("ERROR: Invalid skip_file entry '.*' detected");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// All skip_file entries are valid
|
||||
addLogEntry("skip_file: " ~ appConfig.getValueString("skip_file"), ["debug"]);
|
||||
setFileMask(appConfig.getValueString("skip_file"));
|
||||
|
||||
// All configured OK
|
||||
return true;
|
||||
}
|
||||
|
||||
// Shutdown components
|
||||
void shutdown() {
|
||||
syncListRules = null;
|
||||
fileMask = regex("");
|
||||
directoryMask = regex("");
|
||||
}
|
||||
|
||||
// Load sync_list file if it exists
|
||||
void loadSyncList(string filepath) {
|
||||
// open file as read only
|
||||
auto file = File(filepath, "r");
|
||||
auto range = file.byLine();
|
||||
foreach (line; range) {
|
||||
// Skip comments in file
|
||||
if (line.length == 0 || line[0] == ';' || line[0] == '#') continue;
|
||||
|
||||
// Is the rule a legacy 'include all root files lazy rule?'
|
||||
if (strip(line) == "/*") {
|
||||
// yes ...
|
||||
addLogEntry();
|
||||
addLogEntry("ERROR: Invalid sync_list rule '/*' detected. Please use 'sync_root_files = \"true\"' or --sync-root-files option to sync files in the root path.", ["info", "notify"]);
|
||||
addLogEntry();
|
||||
} else {
|
||||
syncListRules ~= buildNormalizedPath(line);
|
||||
}
|
||||
}
|
||||
// Close reading the 'sync_list' file
|
||||
file.close();
|
||||
}
|
||||
|
||||
// Configure the regex that will be used for 'skip_file'
|
||||
void setFileMask(const(char)[] mask) {
|
||||
fileMask = wild2regex(mask);
|
||||
addLogEntry("Selective Sync File Mask: " ~ to!string(fileMask), ["debug"]);
|
||||
}
|
||||
|
||||
// Configure the regex that will be used for 'skip_dir'
|
||||
void setDirMask(const(char)[] dirmask) {
|
||||
directoryMask = wild2regex(dirmask);
|
||||
addLogEntry("Selective Sync Directory Mask: " ~ to!string(directoryMask), ["debug"]);
|
||||
}
|
||||
|
||||
// Configure skipDirStrictMatch if function is called
|
||||
// By default, skipDirStrictMatch = false;
|
||||
void setSkipDirStrictMatch() {
|
||||
skipDirStrictMatch = true;
|
||||
}
|
||||
|
||||
// Configure skipDotfiles if function is called
|
||||
// By default, skipDotfiles = false;
|
||||
void setSkipDotfiles() {
|
||||
skipDotfiles = true;
|
||||
}
|
||||
|
||||
// return value of skipDotfiles
|
||||
bool getSkipDotfiles() {
|
||||
return skipDotfiles;
|
||||
}
|
||||
|
||||
// Match against sync_list only
|
||||
bool isPathExcludedViaSyncList(string path) {
|
||||
// Debug output that we are performing a 'sync_list' inclusion / exclusion test
|
||||
return isPathExcluded(path);
|
||||
}
|
||||
|
||||
// config file skip_dir parameter
|
||||
bool isDirNameExcluded(string name) {
|
||||
// Does the directory name match skip_dir config entry?
|
||||
// Returns true if the name matches a skip_dir config entry
|
||||
// Returns false if no match
|
||||
addLogEntry("skip_dir evaluation for: " ~ name, ["debug"]);
|
||||
|
||||
// Try full path match first
|
||||
if (!name.matchFirst(directoryMask).empty) {
|
||||
addLogEntry("'!name.matchFirst(directoryMask).empty' returned true = matched", ["debug"]);
|
||||
return true;
|
||||
} else {
|
||||
// Do we check the base name as well?
|
||||
if (!skipDirStrictMatch) {
|
||||
addLogEntry("No Strict Matching Enforced", ["debug"]);
|
||||
|
||||
// Test the entire path working backwards from child
|
||||
string path = buildNormalizedPath(name);
|
||||
string checkPath;
|
||||
foreach_reverse(directory; pathSplitter(path)) {
|
||||
if (directory != "/") {
|
||||
// This will add a leading '/' but that needs to be stripped to check
|
||||
checkPath = "/" ~ directory ~ checkPath;
|
||||
if(!checkPath.strip('/').matchFirst(directoryMask).empty) {
|
||||
addLogEntry("'!checkPath.matchFirst(directoryMask).empty' returned true = matched", ["debug"]);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// No match
|
||||
addLogEntry("Strict Matching Enforced - No Match", ["debug"]);
|
||||
}
|
||||
}
|
||||
// no match
|
||||
return false;
|
||||
}
|
||||
|
||||
// config file skip_file parameter
|
||||
bool isFileNameExcluded(string name) {
|
||||
// Does the file name match skip_file config entry?
|
||||
// Returns true if the name matches a skip_file config entry
|
||||
// Returns false if no match
|
||||
addLogEntry("skip_file evaluation for: " ~ name, ["debug"]);
|
||||
|
||||
// Try full path match first
|
||||
if (!name.matchFirst(fileMask).empty) {
|
||||
return true;
|
||||
} else {
|
||||
// check just the file name
|
||||
string filename = baseName(name);
|
||||
if(!filename.matchFirst(fileMask).empty) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
// no match
|
||||
return false;
|
||||
}
|
||||
|
||||
// test if the given path is not included in the allowed syncListRules
|
||||
// if there are no allowed syncListRules always return false
|
||||
private bool isPathExcluded(string path) {
|
||||
// function variables
|
||||
bool exclude = false;
|
||||
bool exludeDirectMatch = false; // will get updated to true, if there is a pattern match to sync_list entry
|
||||
bool excludeMatched = false; // will get updated to true, if there is a pattern match to sync_list entry
|
||||
bool finalResult = true; // will get updated to false, if pattern match to sync_list entry
|
||||
bool anywhereRuleMatched = false; // will get updated if the 'anywhere' rule matches
|
||||
bool excludeAnywhereMatched = false; // will get updated if the 'anywhere' rule matches
|
||||
bool wildcardRuleMatched = false; // will get updated if the 'wildcard' rule matches
|
||||
bool excludeWildcardMatched = false; // will get updated if the 'wildcard' rule matches
|
||||
int offset;
|
||||
string wildcard = "*";
|
||||
string globbing = "**";
|
||||
|
||||
// always allow the root
|
||||
if (path == ".") return false;
|
||||
// if there are no allowed syncListRules always return false
|
||||
if (syncListRules.empty) return false;
|
||||
|
||||
// To ensure we are checking the 'right' path, build the path
|
||||
path = buildPath("/", buildNormalizedPath(path));
|
||||
|
||||
// Evaluation start point
|
||||
addLogEntry("Evaluation against 'sync_list' rules for this input path: " ~ path, ["debug"]);
|
||||
addLogEntry("[S]exclude = " ~ to!string(exclude), ["debug"]);
|
||||
addLogEntry("[S]exludeDirectMatch = " ~ to!string(exludeDirectMatch), ["debug"]);
|
||||
addLogEntry("[S]excludeAnywhereMatched = " ~ to!string(excludeAnywhereMatched), ["debug"]);
|
||||
addLogEntry("[S]excludeWildcardMatched = " ~ to!string(excludeWildcardMatched), ["debug"]);
|
||||
addLogEntry("[S]excludeMatched = " ~ to!string(excludeMatched), ["debug"]);
|
||||
|
||||
// Unless path is an exact match, entire sync_list entries need to be processed to ensure negative matches are also correctly detected
|
||||
foreach (syncListRuleEntry; syncListRules) {
|
||||
|
||||
// There are several matches we need to think of here
|
||||
// Exclusions:
|
||||
// !foldername/* = As there is no preceding '/' (after the !) .. this is a rule that should exclude 'foldername' and all its children ANYWHERE
|
||||
// !*.extention = As there is no preceding '/' (after the !) .. this is a rule that should exclude any item that has the specified extention ANYWHERE
|
||||
// !/path/to/foldername/* = As there IS a preceding '/' (after the !) .. this is a rule that should exclude this specific path and all its children
|
||||
// !/path/to/foldername/*.extention = As there IS a preceding '/' (after the !) .. this is a rule that should exclude any item that has the specified extention in this path ONLY
|
||||
// !/path/to/foldername/*/specific_target/* = As there IS a preceding '/' (after the !) .. this excludes 'specific_target' in any subfolder of '/path/to/foldername/'
|
||||
//
|
||||
// Inclusions:
|
||||
// foldername/* = As there is no preceding '/' .. this is a rule that should INCLUDE 'foldername' and all its children ANYWHERE
|
||||
// *.extention = As there is no preceding '/' .. this is a rule that should INCLUDE any item that has the specified extention ANYWHERE
|
||||
// /path/to/foldername/* = As there IS a preceding '/' .. this is a rule that should INCLUDE this specific path and all its children
|
||||
// /path/to/foldername/*.extention = As there IS a preceding '/' .. this is a rule that should INCLUDE any item that has the specified extention in this path ONLY
|
||||
// /path/to/foldername/*/specific_target/* = As there IS a preceding '/' .. this INCLUDES 'specific_target' in any subfolder of '/path/to/foldername/'
|
||||
|
||||
// Is this rule an 'exclude' or 'include' rule?
|
||||
bool thisIsAnExcludeRule = false;
|
||||
|
||||
// Switch based on first character of rule to determine rule type
|
||||
switch (syncListRuleEntry[0]) {
|
||||
case '-':
|
||||
// sync_list path starts with '-', this user wants to exclude this path
|
||||
exclude = true; // default exclude
|
||||
thisIsAnExcludeRule = true; // exclude rule
|
||||
offset = 1; // To negate the '-' in the rule entry
|
||||
break;
|
||||
case '!':
|
||||
// sync_list path starts with '!', this user wants to exclude this path
|
||||
exclude = true; // default exclude
|
||||
thisIsAnExcludeRule = true; // exclude rule
|
||||
offset = 1; // To negate the '!' in the rule entry
|
||||
break;
|
||||
case '/':
|
||||
// sync_list path starts with '/', this user wants to include this path
|
||||
// but a '/' at the start causes matching issues, so use the offset for comparison
|
||||
exclude = false; // DO NOT EXCLUDE
|
||||
thisIsAnExcludeRule = false; // INCLUDE rule
|
||||
offset = 0;
|
||||
break;
|
||||
default:
|
||||
// no negative pattern, default is to not exclude
|
||||
exclude = false; // DO NOT EXCLUDE
|
||||
thisIsAnExcludeRule = false; // INCLUDE rule
|
||||
offset = 0;
|
||||
}
|
||||
|
||||
// Update syncListRuleEntry to remove the offset
|
||||
syncListRuleEntry = syncListRuleEntry[offset..$];
|
||||
|
||||
// What 'sync_list' rule are we comparing against?
|
||||
if (thisIsAnExcludeRule) {
|
||||
addLogEntry("Evaluation against EXCLUSION 'sync_list' rule: !" ~ syncListRuleEntry, ["debug"]);
|
||||
} else {
|
||||
addLogEntry("Evaluation against INCLUSION 'sync_list' rule: " ~ syncListRuleEntry, ["debug"]);
|
||||
}
|
||||
|
||||
// Generate the common path prefix from the input path vs the 'sync_list' rule
|
||||
auto comm = commonPrefix(path, syncListRuleEntry);
|
||||
|
||||
// Is path is an exact match of the 'sync_list' rule?
|
||||
if (comm.length == path.length) {
|
||||
// we have a potential exact match
|
||||
// strip any potential '/*' from the sync_list rule, to avoid a potential lesser common match
|
||||
string strippedAllowedPath = strip(syncListRuleEntry, "/*");
|
||||
|
||||
if (path == strippedAllowedPath) {
|
||||
// we have an exact path match
|
||||
addLogEntry("Exact path match with 'sync_list' entry", ["debug"]);
|
||||
|
||||
if (!thisIsAnExcludeRule) {
|
||||
// Include Rule
|
||||
addLogEntry("Evaluation against 'sync_list' rule result: direct match", ["debug"]);
|
||||
// final result
|
||||
finalResult = false;
|
||||
// direct match, break and search rules no more
|
||||
break;
|
||||
} else {
|
||||
// Exclude rule
|
||||
addLogEntry("Evaluation against 'sync_list' rule result: direct match - path to be excluded", ["debug"]);
|
||||
// do not set excludeMatched = true here, otherwise parental path also gets excluded
|
||||
// flag exludeDirectMatch so that a 'wildcard match' will not override this exclude
|
||||
exludeDirectMatch = true;
|
||||
// final result
|
||||
finalResult = true;
|
||||
}
|
||||
} else {
|
||||
// no exact path match, but something common does match
|
||||
addLogEntry("Something 'common' matches the 'sync_list' input path", ["debug"]);
|
||||
|
||||
// do a search for potential common match
|
||||
auto splitAllowedPaths = pathSplitter(strippedAllowedPath);
|
||||
string pathToEvaluate = "";
|
||||
foreach(base; splitAllowedPaths) {
|
||||
pathToEvaluate ~= base;
|
||||
if (path == pathToEvaluate) {
|
||||
// The input path matches what we want to evaluate against as a direct match
|
||||
if (!thisIsAnExcludeRule) {
|
||||
addLogEntry("Evaluation against 'sync_list' rule result: direct match for parental path item", ["debug"]);
|
||||
finalResult = false;
|
||||
// direct match, break and search rules no more
|
||||
break;
|
||||
} else {
|
||||
addLogEntry("Evaluation against 'sync_list' rule result: direct match for parental path item but to be excluded", ["debug"]);
|
||||
finalResult = true;
|
||||
// do not set excludeMatched = true here, otherwise parental path also gets excluded
|
||||
}
|
||||
}
|
||||
pathToEvaluate ~= dirSeparator;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Is path is a subitem/sub-folder of the 'sync_list' rule?
|
||||
if (comm.length == syncListRuleEntry.length) {
|
||||
// The given path is potentially a subitem of an allowed path
|
||||
|
||||
// We want to capture sub-folders / files of allowed syncListRules here, but not explicitly match other items
|
||||
// if there is no wildcard
|
||||
auto subItemPathCheck = syncListRuleEntry ~ "/";
|
||||
if (canFind(path, subItemPathCheck)) {
|
||||
// The 'path' includes the allowed path, and is 'most likely' a sub-path item
|
||||
if (!exclude) {
|
||||
addLogEntry("Evaluation against 'sync_list' rule result: parental path match", ["debug"]);
|
||||
finalResult = false;
|
||||
// parental path matches, break and search rules no more
|
||||
break;
|
||||
} else {
|
||||
addLogEntry("Evaluation against 'sync_list' rule result: parental path match but must be excluded", ["debug"]);
|
||||
finalResult = true;
|
||||
excludeMatched = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Is the 'sync_list' rule an 'anywhere' rule?
|
||||
// EXCLUSION
|
||||
// !foldername/*
|
||||
// !*.extention
|
||||
// INCLUSION
|
||||
// foldername/*
|
||||
// *.extention
|
||||
if (to!string(syncListRuleEntry[0]) != "/") {
|
||||
// reset anywhereRuleMatched
|
||||
anywhereRuleMatched = false;
|
||||
|
||||
// what sort of rule
|
||||
if (thisIsAnExcludeRule) {
|
||||
addLogEntry("anywhere 'sync_list' exclusion rule: !" ~ syncListRuleEntry, ["debug"]);
|
||||
} else {
|
||||
addLogEntry("anywhere 'sync_list' inclusion rule: " ~ syncListRuleEntry, ["debug"]);
|
||||
}
|
||||
|
||||
// this is an 'anywhere' rule
|
||||
string anywhereRuleStripped;
|
||||
// If this 'sync_list' rule end in '/*' - if yes, remove it to allow for easier comparison
|
||||
if (syncListRuleEntry.endsWith("/*")) {
|
||||
// strip '/*' from the end of the rule
|
||||
anywhereRuleStripped = syncListRuleEntry.stripRight("/*");
|
||||
} else {
|
||||
// keep rule 'as-is'
|
||||
anywhereRuleStripped = syncListRuleEntry;
|
||||
}
|
||||
|
||||
if (canFind(path, anywhereRuleStripped)) {
|
||||
// we matched the path to the rule
|
||||
addLogEntry("anywhere rule 'canFind' MATCH", ["debug"]);
|
||||
anywhereRuleMatched = true;
|
||||
} else {
|
||||
// no 'canFind' match, try via regex
|
||||
addLogEntry("No anywhere rule 'canFind' MATCH .. trying a regex match", ["debug"]);
|
||||
|
||||
// create regex from 'syncListRuleEntry'
|
||||
auto allowedMask = regex(createRegexCompatiblePath(syncListRuleEntry));
|
||||
|
||||
// perform regex match attempt
|
||||
if (matchAll(path, allowedMask)) {
|
||||
// we regex matched the path to the rule
|
||||
addLogEntry("anywhere rule 'matchAll via regex' MATCH", ["debug"]);
|
||||
anywhereRuleMatched = true;
|
||||
}
|
||||
}
|
||||
|
||||
// is this rule matched?
|
||||
if (anywhereRuleMatched) {
|
||||
// Is this an exclude rule?
|
||||
if (thisIsAnExcludeRule) {
|
||||
addLogEntry("Evaluation against 'sync_list' rule result: anywhere rule matched and must be excluded", ["debug"]);
|
||||
finalResult = true;
|
||||
excludeAnywhereMatched = true;
|
||||
// anywhere match, break and search rules no more
|
||||
break;
|
||||
} else {
|
||||
addLogEntry("Evaluation against 'sync_list' rule result: anywhere rule matched and must be included", ["debug"]);
|
||||
finalResult = false;
|
||||
excludeAnywhereMatched = false;
|
||||
// anywhere match, break and search rules no more
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Does the 'sync_list' rule contain a wildcard (*) or globbing (**) reference
|
||||
if (canFind(syncListRuleEntry, wildcard)) {
|
||||
// reset the applicable flag
|
||||
wildcardRuleMatched = false;
|
||||
|
||||
// sync_list rule contains some sort of wildcard sequence
|
||||
if (thisIsAnExcludeRule) {
|
||||
addLogEntry("wildcard (* or **) exclusion rule: !" ~ syncListRuleEntry, ["debug"]);
|
||||
} else {
|
||||
addLogEntry("wildcard (* or **) inclusion rule: " ~ syncListRuleEntry, ["debug"]);
|
||||
}
|
||||
|
||||
// Is this a globbing rule (**) or just a single wildcard (*) entries
|
||||
if (canFind(syncListRuleEntry, globbing)) {
|
||||
// globbing (**) rule processing
|
||||
if (matchPathAgainstRule(path, syncListRuleEntry)) {
|
||||
// set the applicable flag
|
||||
wildcardRuleMatched = true;
|
||||
addLogEntry("Evaluation against 'sync_list' rule result: globbing pattern match", ["debug"]);
|
||||
}
|
||||
} else {
|
||||
// wildcard (*) rule processing
|
||||
// create regex from 'syncListRuleEntry'
|
||||
auto allowedMask = regex(createRegexCompatiblePath(syncListRuleEntry));
|
||||
if (matchAll(path, allowedMask)) {
|
||||
// set the applicable flag
|
||||
wildcardRuleMatched = true;
|
||||
addLogEntry("Evaluation against 'sync_list' rule result: wildcard pattern match", ["debug"]);
|
||||
} else {
|
||||
// matchAll no match ... try another way just to be sure
|
||||
if (matchPathAgainstRule(path, syncListRuleEntry)) {
|
||||
// set the applicable flag
|
||||
wildcardRuleMatched = true;
|
||||
addLogEntry("Evaluation against 'sync_list' rule result: wildcard pattern match using segment matching", ["debug"]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Was the rule matched?
|
||||
if (wildcardRuleMatched) {
|
||||
// Is this an exclude rule?
|
||||
if (thisIsAnExcludeRule) {
|
||||
// Yes exclude rule
|
||||
addLogEntry("Evaluation against 'sync_list' rule result: wildcard|globbing rule matched and must be excluded", ["debug"]);
|
||||
finalResult = true;
|
||||
excludeWildcardMatched = true;
|
||||
} else {
|
||||
// include rule
|
||||
addLogEntry("Evaluation against 'sync_list' rule result: wildcard|globbing pattern matched and must be included", ["debug"]);
|
||||
finalResult = false;
|
||||
excludeWildcardMatched = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Interim results after checking each 'sync_list' rule against the input path
|
||||
addLogEntry("[F]exclude = " ~ to!string(exclude), ["debug"]);
|
||||
addLogEntry("[F]exludeDirectMatch = " ~ to!string(exludeDirectMatch), ["debug"]);
|
||||
addLogEntry("[F]excludeAnywhereMatched = " ~ to!string(excludeAnywhereMatched), ["debug"]);
|
||||
addLogEntry("[F]excludeWildcardMatched = " ~ to!string(excludeWildcardMatched), ["debug"]);
|
||||
addLogEntry("[F]excludeMatched = " ~ to!string(excludeMatched), ["debug"]);
|
||||
|
||||
// If exclude or excludeMatched is true, then finalResult has to be true
|
||||
if ((exclude) || (exludeDirectMatch) || (excludeAnywhereMatched) || (excludeWildcardMatched) || (excludeMatched)) {
|
||||
finalResult = true;
|
||||
}
|
||||
|
||||
// results
|
||||
if (finalResult) {
|
||||
addLogEntry("Evaluation against 'sync_list' final result: EXCLUDED", ["debug"]);
|
||||
} else {
|
||||
addLogEntry("Evaluation against 'sync_list' final result: included for sync", ["debug"]);
|
||||
}
|
||||
return finalResult;
|
||||
}
|
||||
|
||||
// Create a wildcard regex compatible string based on the sync list rule
|
||||
string createRegexCompatiblePath(string regexCompatiblePath) {
|
||||
regexCompatiblePath = regexCompatiblePath.replace(".", "\\."); // Escape the dot (.) if present
|
||||
regexCompatiblePath = regexCompatiblePath.replace(" ", "\\s"); // Escape spaces if present
|
||||
regexCompatiblePath = regexCompatiblePath.replace("*", ".*"); // Replace * with '.*' to be compatible with function and to match any characters
|
||||
return regexCompatiblePath;
|
||||
}
|
||||
|
||||
// Create a regex compatible string to match a relevant segment
|
||||
bool matchSegment(string ruleSegment, string pathSegment) {
|
||||
ruleSegment = ruleSegment.replace("*", ".*"); // Replace * with '.*' to be compatible with function and to match any characters
|
||||
ruleSegment = ruleSegment.replace(" ", "\\s"); // Escape spaces if present
|
||||
auto pattern = regex("^" ~ ruleSegment ~ "$");
|
||||
// Check if there's a match
|
||||
return !match(pathSegment, pattern).empty;
|
||||
}
|
||||
|
||||
// Function to handle path matching when using globbing (**)
|
||||
bool matchPathAgainstRule(string path, string rule) {
|
||||
// Split both the path and rule into segments
|
||||
auto pathSegments = pathSplitter(path).filter!(s => !s.empty).array;
|
||||
auto ruleSegments = pathSplitter(rule).filter!(s => !s.empty).array;
|
||||
|
||||
bool lastSegmentMatchesRule = false;
|
||||
size_t i = 0, j = 0;
|
||||
|
||||
while (i < pathSegments.length && j < ruleSegments.length) {
|
||||
if (ruleSegments[j] == "**") {
|
||||
if (j == ruleSegments.length - 1) {
|
||||
return true; // '**' at the end matches everything
|
||||
}
|
||||
|
||||
// Find next matching part after '**'
|
||||
while (i < pathSegments.length && !matchSegment(ruleSegments[j + 1], pathSegments[i])) {
|
||||
i++;
|
||||
}
|
||||
j++; // Move past the '**' in the rule
|
||||
} else {
|
||||
if (!matchSegment(ruleSegments[j], pathSegments[i])) {
|
||||
return false;
|
||||
} else {
|
||||
// increment to next set of values
|
||||
i++;
|
||||
j++;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Ensure that we handle the last segments gracefully
|
||||
if (i >= pathSegments.length && j < ruleSegments.length) {
|
||||
if (j == ruleSegments.length - 1 && ruleSegments[j] == "*") {
|
||||
return true;
|
||||
}
|
||||
if (ruleSegments[j - 1] == pathSegments[i - 1]) {
|
||||
lastSegmentMatchesRule = true;
|
||||
}
|
||||
}
|
||||
|
||||
return j == ruleSegments.length || (j == ruleSegments.length - 1 && ruleSegments[j] == "**") || lastSegmentMatchesRule;
|
||||
}
|
||||
}
|
||||
2781
src/config.d
555
src/curlEngine.d
Normal file
|
|
@ -0,0 +1,555 @@
|
|||
// What is this module called?
|
||||
module curlEngine;
|
||||
|
||||
// What does this module require to function?
|
||||
import std.net.curl;
|
||||
import etc.c.curl;
|
||||
import std.datetime;
|
||||
import std.conv;
|
||||
import std.file;
|
||||
import std.json;
|
||||
import std.stdio;
|
||||
import std.range;
|
||||
import core.memory;
|
||||
|
||||
// What other modules that we have created do we need to import?
|
||||
import log;
|
||||
import util;
|
||||
|
||||
// Shared pool of CurlEngine instances accessible across all threads
|
||||
__gshared CurlEngine[] curlEnginePool; // __gshared is used to declare a variable that is shared across all threads
|
||||
|
||||
class CurlResponse {
|
||||
HTTP.Method method;
|
||||
const(char)[] url;
|
||||
const(char)[][const(char)[]] requestHeaders;
|
||||
const(char)[] postBody;
|
||||
|
||||
bool hasResponse;
|
||||
string[string] responseHeaders;
|
||||
HTTP.StatusLine statusLine;
|
||||
char[] content;
|
||||
|
||||
this() {
|
||||
reset();
|
||||
}
|
||||
|
||||
~this() {
|
||||
reset();
|
||||
}
|
||||
|
||||
void reset() {
|
||||
method = HTTP.Method.undefined;
|
||||
url = "";
|
||||
requestHeaders = null;
|
||||
postBody = [];
|
||||
hasResponse = false;
|
||||
responseHeaders = null;
|
||||
statusLine.reset();
|
||||
content = [];
|
||||
}
|
||||
|
||||
void addRequestHeader(const(char)[] name, const(char)[] value) {
|
||||
requestHeaders[to!string(name)] = to!string(value);
|
||||
}
|
||||
|
||||
void connect(HTTP.Method method, const(char)[] url) {
|
||||
this.method = method;
|
||||
this.url = url;
|
||||
}
|
||||
|
||||
const JSONValue json() {
|
||||
JSONValue json;
|
||||
try {
|
||||
json = content.parseJSON();
|
||||
} catch (JSONException e) {
|
||||
// Log that a JSON Exception was caught, dont output the HTML response from OneDrive
|
||||
addLogEntry("JSON Exception caught when performing HTTP operations - use --debug-https to diagnose further", ["debug"]);
|
||||
}
|
||||
return json;
|
||||
};
|
||||
|
||||
void update(HTTP *http) {
|
||||
hasResponse = true;
|
||||
this.responseHeaders = http.responseHeaders();
|
||||
this.statusLine = http.statusLine;
|
||||
addLogEntry("HTTP Response Headers: " ~ to!string(this.responseHeaders), ["debug"]);
|
||||
addLogEntry("HTTP Status Line: " ~ to!string(this.statusLine), ["debug"]);
|
||||
}
|
||||
|
||||
@safe pure HTTP.StatusLine getStatus() {
|
||||
return this.statusLine;
|
||||
}
|
||||
|
||||
// Return the current value of retryAfterValue
|
||||
int getRetryAfterValue() {
|
||||
int delayBeforeRetry;
|
||||
// Is 'retry-after' in the response headers
|
||||
if ("retry-after" in responseHeaders) {
|
||||
// Set the retry-after value
|
||||
addLogEntry("curlEngine.http.perform() => Received a 'Retry-After' Header Response with the following value: " ~ to!string(responseHeaders["retry-after"]), ["debug"]);
|
||||
addLogEntry("curlEngine.http.perform() => Setting retryAfterValue to: " ~ responseHeaders["retry-after"], ["debug"]);
|
||||
delayBeforeRetry = to!int(responseHeaders["retry-after"]);
|
||||
} else {
|
||||
// Use a 120 second delay as a default given header value was zero
|
||||
// This value is based on log files and data when determining correct process for 429 response handling
|
||||
delayBeforeRetry = 120;
|
||||
// Update that we are over-riding the provided value with a default
|
||||
addLogEntry("HTTP Response Header retry-after value was missing - Using a preconfigured default of: " ~ to!string(delayBeforeRetry), ["debug"]);
|
||||
}
|
||||
return delayBeforeRetry;
|
||||
}
|
||||
|
||||
const string parseRequestHeaders(const(const(char)[][const(char)[]]) headers) {
|
||||
string requestHeadersStr = "";
|
||||
// Ensure response headers is not null and iterate over keys safely.
|
||||
if (headers !is null) {
|
||||
foreach (string header; headers.byKey()) {
|
||||
if (header == "Authorization") {
|
||||
continue;
|
||||
}
|
||||
// Use the 'in' operator to safely check if the key exists in the associative array.
|
||||
if (auto val = header in headers) {
|
||||
requestHeadersStr ~= "< " ~ header ~ ": " ~ *val ~ "\n";
|
||||
}
|
||||
}
|
||||
}
|
||||
return requestHeadersStr;
|
||||
}
|
||||
|
||||
const string parseResponseHeaders(const(string[string]) headers) {
|
||||
string responseHeadersStr = "";
|
||||
// Ensure response headers is not null and iterate over keys safely.
|
||||
if (headers !is null) {
|
||||
foreach (string header; headers.byKey()) {
|
||||
// Check if the key actually exists before accessing it to avoid RangeError.
|
||||
if (auto val = header in headers) { // 'in' checks for the key and returns a pointer to the value if found.
|
||||
responseHeadersStr ~= "> " ~ header ~ ": " ~ *val ~ "\n"; // Dereference pointer to get the value.
|
||||
}
|
||||
}
|
||||
}
|
||||
return responseHeadersStr;
|
||||
}
|
||||
|
||||
const string dumpDebug() {
|
||||
import std.range;
|
||||
import std.format : format;
|
||||
|
||||
string str = "";
|
||||
str ~= format("< %s %s\n", method, url);
|
||||
if (!requestHeaders.empty) {
|
||||
str ~= parseRequestHeaders(requestHeaders);
|
||||
}
|
||||
if (!postBody.empty) {
|
||||
str ~= format("\n----\n%s\n----\n", postBody);
|
||||
}
|
||||
str ~= format("< %s\n", statusLine);
|
||||
if (!responseHeaders.empty) {
|
||||
str ~= parseResponseHeaders(responseHeaders);
|
||||
}
|
||||
return str;
|
||||
}
|
||||
|
||||
const string dumpResponse() {
|
||||
import std.range;
|
||||
import std.format : format;
|
||||
|
||||
string str = "";
|
||||
if (!content.empty) {
|
||||
str ~= format("\n----\n%s\n----\n", content);
|
||||
}
|
||||
return str;
|
||||
}
|
||||
|
||||
override string toString() const {
|
||||
string str = "Curl debugging: \n";
|
||||
str ~= dumpDebug();
|
||||
if (hasResponse) {
|
||||
str ~= "Curl response: \n";
|
||||
str ~= dumpResponse();
|
||||
}
|
||||
return str;
|
||||
}
|
||||
}
|
||||
|
||||
class CurlEngine {
|
||||
|
||||
HTTP http;
|
||||
File uploadFile;
|
||||
CurlResponse response;
|
||||
bool keepAlive;
|
||||
ulong dnsTimeout;
|
||||
string internalThreadId;
|
||||
|
||||
this() {
|
||||
http = HTTP(); // Directly initializes HTTP using its default constructor
|
||||
response = null; // Initialize as null
|
||||
internalThreadId = generateAlphanumericString(); // Give this CurlEngine instance a unique ID
|
||||
}
|
||||
|
||||
// The destructor should only clean up resources owned directly by this CurlEngine instance
|
||||
~this() {
|
||||
// Is the file still open?
|
||||
if (uploadFile.isOpen()) {
|
||||
uploadFile.close();
|
||||
}
|
||||
// Is 'response' cleared?
|
||||
object.destroy(response); // Destroy, then set to null
|
||||
response = null;
|
||||
// Is the actual http instance is stopped?
|
||||
if (!http.isStopped) {
|
||||
http.shutdown();
|
||||
}
|
||||
// Make sure this HTTP instance is destroyed
|
||||
object.destroy(http);
|
||||
// ThreadId needs to be set to null
|
||||
internalThreadId = null;
|
||||
}
|
||||
|
||||
// We are releasing a curl instance back to the pool
|
||||
void releaseEngine() {
|
||||
// Log that we are releasing this engine back to the pool
|
||||
addLogEntry("CurlEngine releaseEngine() called on instance id: " ~ to!string(internalThreadId), ["debug"]);
|
||||
addLogEntry("CurlEngine curlEnginePool size before release: " ~ to!string(curlEnginePool.length), ["debug"]);
|
||||
|
||||
// cleanup this curl instance before putting it back in the pool
|
||||
cleanup(true); // Cleanup instance by resetting values and flushing cookie cache
|
||||
synchronized (CurlEngine.classinfo) {
|
||||
curlEnginePool ~= this;
|
||||
addLogEntry("CurlEngine curlEnginePool size after release: " ~ to!string(curlEnginePool.length), ["debug"]);
|
||||
}
|
||||
// Perform Garbage Collection
|
||||
GC.collect();
|
||||
// Return free memory to the OS
|
||||
GC.minimize();
|
||||
}
|
||||
|
||||
// Initialise this curl instance
|
||||
void initialise(ulong dnsTimeout, ulong connectTimeout, ulong dataTimeout, ulong operationTimeout, int maxRedirects, bool httpsDebug, string userAgent, bool httpProtocol, ulong userRateLimit, ulong protocolVersion, bool keepAlive=true) {
|
||||
// Setting this to false ensures that when we close the curl instance, any open sockets are closed - which we need to do when running
|
||||
// multiple threads and API instances at the same time otherwise we run out of local files | sockets pretty quickly
|
||||
this.keepAlive = keepAlive;
|
||||
this.dnsTimeout = dnsTimeout;
|
||||
|
||||
// Curl Timeout Handling
|
||||
|
||||
// libcurl dns_cache_timeout timeout
|
||||
// https://curl.se/libcurl/c/CURLOPT_DNS_CACHE_TIMEOUT.html
|
||||
// https://dlang.org/library/std/net/curl/http.dns_timeout.html
|
||||
http.dnsTimeout = (dur!"seconds"(dnsTimeout));
|
||||
|
||||
// Timeout for HTTPS connections
|
||||
// https://curl.se/libcurl/c/CURLOPT_CONNECTTIMEOUT.html
|
||||
// https://dlang.org/library/std/net/curl/http.connect_timeout.html
|
||||
http.connectTimeout = (dur!"seconds"(connectTimeout));
|
||||
|
||||
// Timeout for activity on connection
|
||||
// This is a DMD | DLANG specific item, not a libcurl item
|
||||
// https://dlang.org/library/std/net/curl/http.data_timeout.html
|
||||
// https://raw.githubusercontent.com/dlang/phobos/master/std/net/curl.d - private enum _defaultDataTimeout = dur!"minutes"(2);
|
||||
http.dataTimeout = (dur!"seconds"(dataTimeout));
|
||||
|
||||
// Maximum time any operation is allowed to take
|
||||
// This includes dns resolution, connecting, data transfer, etc.
|
||||
// https://curl.se/libcurl/c/CURLOPT_TIMEOUT_MS.html
|
||||
// https://dlang.org/library/std/net/curl/http.operation_timeout.html
|
||||
http.operationTimeout = (dur!"seconds"(operationTimeout));
|
||||
|
||||
// Specify how many redirects should be allowed
|
||||
http.maxRedirects(maxRedirects);
|
||||
// Debug HTTPS
|
||||
http.verbose = httpsDebug;
|
||||
// Use the configured 'user_agent' value
|
||||
http.setUserAgent = userAgent;
|
||||
// What IP protocol version should be used when using Curl - IPv4 & IPv6, IPv4 or IPv6
|
||||
http.handle.set(CurlOption.ipresolve,protocolVersion); // 0 = IPv4 + IPv6, 1 = IPv4 Only, 2 = IPv6 Only
|
||||
|
||||
// What version of HTTP protocol do we use?
|
||||
// Curl >= 7.62.0 defaults to http2 for a significant number of operations
|
||||
if (httpProtocol) {
|
||||
// Downgrade to HTTP 1.1 - yes version = 2 is HTTP 1.1
|
||||
http.handle.set(CurlOption.http_version,2);
|
||||
}
|
||||
|
||||
// Configure upload / download rate limits if configured
|
||||
// 131072 = 128 KB/s - minimum for basic application operations to prevent timeouts
|
||||
// A 0 value means rate is unlimited, and is the curl default
|
||||
if (userRateLimit > 0) {
|
||||
// set rate limit
|
||||
http.handle.set(CurlOption.max_send_speed_large,userRateLimit);
|
||||
http.handle.set(CurlOption.max_recv_speed_large,userRateLimit);
|
||||
}
|
||||
|
||||
// Explicitly set libcurl options to avoid using signal handlers in a multi-threaded environment
|
||||
// See: https://curl.se/libcurl/c/CURLOPT_NOSIGNAL.html
|
||||
// The CURLOPT_NOSIGNAL option is intended for use in multi-threaded programs to ensure that libcurl does not use any signal handling.
|
||||
// Set CURLOPT_NOSIGNAL to 1 to prevent libcurl from using signal handlers, thus avoiding interference with the application's signal handling which could lead to issues such as unstable behavior or application crashes.
|
||||
http.handle.set(CurlOption.nosignal,1);
|
||||
|
||||
// https://curl.se/libcurl/c/CURLOPT_TCP_NODELAY.html
|
||||
// Ensure that TCP_NODELAY is set to 0 to ensure that TCP NAGLE is enabled
|
||||
http.handle.set(CurlOption.tcp_nodelay,0);
|
||||
|
||||
// https://curl.se/libcurl/c/CURLOPT_FORBID_REUSE.html
|
||||
// CURLOPT_FORBID_REUSE - make connection get closed at once after use
|
||||
// Setting this to 0 ensures that we ARE reusing connections (we did this in v2.4.xx) to ensure connections remained open and usable
|
||||
// Setting this to 1 ensures that when we close the curl instance, any open sockets are forced closed when the API curl instance is destroyed
|
||||
// The libcurl default is 0 as per the documentation (to REUSE connections) - ensure we are configuring to reuse sockets
|
||||
http.handle.set(CurlOption.forbid_reuse,0);
|
||||
|
||||
if (httpsDebug) {
|
||||
// Output what options we are using so that in the debug log this can be tracked
|
||||
addLogEntry("http.dnsTimeout = " ~ to!string(dnsTimeout), ["debug"]);
|
||||
addLogEntry("http.connectTimeout = " ~ to!string(connectTimeout), ["debug"]);
|
||||
addLogEntry("http.dataTimeout = " ~ to!string(dataTimeout), ["debug"]);
|
||||
addLogEntry("http.operationTimeout = " ~ to!string(operationTimeout), ["debug"]);
|
||||
addLogEntry("http.maxRedirects = " ~ to!string(maxRedirects), ["debug"]);
|
||||
addLogEntry("http.CurlOption.ipresolve = " ~ to!string(protocolVersion), ["debug"]);
|
||||
addLogEntry("http.header.Connection.keepAlive = " ~ to!string(keepAlive), ["debug"]);
|
||||
}
|
||||
}
|
||||
|
||||
void setResponseHolder(CurlResponse response) {
|
||||
if (response is null) {
|
||||
// Create a response instance if it doesn't already exist
|
||||
if (this.response is null)
|
||||
this.response = new CurlResponse();
|
||||
} else {
|
||||
this.response = response;
|
||||
}
|
||||
}
|
||||
|
||||
void addRequestHeader(const(char)[] name, const(char)[] value) {
|
||||
setResponseHolder(null);
|
||||
http.addRequestHeader(name, value);
|
||||
response.addRequestHeader(name, value);
|
||||
}
|
||||
|
||||
void connect(HTTP.Method method, const(char)[] url) {
|
||||
setResponseHolder(null);
|
||||
if (!keepAlive)
|
||||
addRequestHeader("Connection", "close");
|
||||
http.method = method;
|
||||
http.url = url;
|
||||
response.connect(method, url);
|
||||
}
|
||||
|
||||
void setContent(const(char)[] contentType, const(char)[] sendData) {
|
||||
setResponseHolder(null);
|
||||
addRequestHeader("Content-Type", contentType);
|
||||
if (sendData) {
|
||||
http.contentLength = sendData.length;
|
||||
http.onSend = (void[] buf) {
|
||||
import std.algorithm: min;
|
||||
size_t minLen = min(buf.length, sendData.length);
|
||||
if (minLen == 0) return 0;
|
||||
buf[0 .. minLen] = cast(void[]) sendData[0 .. minLen];
|
||||
sendData = sendData[minLen .. $];
|
||||
return minLen;
|
||||
};
|
||||
response.postBody = sendData;
|
||||
}
|
||||
}
|
||||
|
||||
void setFile(string filepath, string contentRange, ulong offset, ulong offsetSize) {
|
||||
setResponseHolder(null);
|
||||
// open file as read-only in binary mode
|
||||
uploadFile = File(filepath, "rb");
|
||||
|
||||
if (contentRange.empty) {
|
||||
offsetSize = uploadFile.size();
|
||||
} else {
|
||||
addRequestHeader("Content-Range", contentRange);
|
||||
uploadFile.seek(offset);
|
||||
}
|
||||
|
||||
// Setup progress bar to display
|
||||
http.onProgress = delegate int(size_t dltotal, size_t dlnow, size_t ultotal, size_t ulnow) {
|
||||
return 0;
|
||||
};
|
||||
|
||||
addRequestHeader("Content-Type", "application/octet-stream");
|
||||
http.onSend = data => uploadFile.rawRead(data).length;
|
||||
http.contentLength = offsetSize;
|
||||
}
|
||||
|
||||
CurlResponse execute() {
|
||||
scope(exit) {
|
||||
cleanup();
|
||||
}
|
||||
setResponseHolder(null);
|
||||
http.onReceive = (ubyte[] data) {
|
||||
response.content ~= data;
|
||||
// HTTP Server Response Code Debugging if --https-debug is being used
|
||||
return data.length;
|
||||
};
|
||||
http.perform();
|
||||
response.update(&http);
|
||||
return response;
|
||||
}
|
||||
|
||||
CurlResponse download(string originalFilename, string downloadFilename) {
|
||||
setResponseHolder(null);
|
||||
// open downloadFilename as write in binary mode
|
||||
auto file = File(downloadFilename, "wb");
|
||||
|
||||
// function scopes
|
||||
scope(exit) {
|
||||
cleanup();
|
||||
if (file.isOpen()){
|
||||
// close open file
|
||||
file.close();
|
||||
}
|
||||
}
|
||||
|
||||
http.onReceive = (ubyte[] data) {
|
||||
file.rawWrite(data);
|
||||
return data.length;
|
||||
};
|
||||
|
||||
http.perform();
|
||||
|
||||
// Rename downloaded file
|
||||
rename(downloadFilename, originalFilename);
|
||||
|
||||
response.update(&http);
|
||||
return response;
|
||||
}
|
||||
|
||||
// Cleanup this instance internal variables that may have been set
|
||||
void cleanup(bool flushCookies = false) {
|
||||
// Reset any values to defaults, freeing any set objects
|
||||
addLogEntry("CurlEngine cleanup() called on instance id: " ~ to!string(internalThreadId), ["debug"]);
|
||||
|
||||
// Is the instance is stopped?
|
||||
if (!http.isStopped) {
|
||||
// A stopped instance is not usable, these cannot be reset
|
||||
http.clearRequestHeaders();
|
||||
http.onSend = null;
|
||||
http.onReceive = null;
|
||||
http.onReceiveHeader = null;
|
||||
http.onReceiveStatusLine = null;
|
||||
http.onProgress = delegate int(size_t dltotal, size_t dlnow, size_t ultotal, size_t ulnow) {
|
||||
return 0;
|
||||
};
|
||||
http.contentLength = 0;
|
||||
|
||||
// We only do this if we are pushing the curl engine back to the curl pool
|
||||
if (flushCookies) {
|
||||
// Flush the cookie cache as well
|
||||
http.flushCookieJar();
|
||||
http.clearSessionCookies();
|
||||
http.clearAllCookies();
|
||||
}
|
||||
}
|
||||
|
||||
// set the response to null
|
||||
response = null;
|
||||
|
||||
// close file if open
|
||||
if (uploadFile.isOpen()){
|
||||
// close open file
|
||||
uploadFile.close();
|
||||
}
|
||||
}
|
||||
|
||||
// Shut down the curl instance & close any open sockets
|
||||
void shutdownCurlHTTPInstance() {
|
||||
// Log that we are attempting to shutdown this curl instance
|
||||
addLogEntry("CurlEngine shutdownCurlHTTPInstance() called on instance id: " ~ to!string(internalThreadId), ["debug"]);
|
||||
|
||||
// Is this curl instance is stopped?
|
||||
if (!http.isStopped) {
|
||||
addLogEntry("HTTP instance still active: " ~ to!string(internalThreadId), ["debug"]);
|
||||
addLogEntry("HTTP instance isStopped state before http.shutdown(): " ~ to!string(http.isStopped), ["debug"]);
|
||||
http.shutdown();
|
||||
addLogEntry("HTTP instance isStopped state post http.shutdown(): " ~ to!string(http.isStopped), ["debug"]);
|
||||
object.destroy(http); // Destroy, however we cant set to null
|
||||
addLogEntry("HTTP instance shutdown and destroyed: " ~ to!string(internalThreadId), ["debug"]);
|
||||
|
||||
} else {
|
||||
// Already stopped .. destroy it
|
||||
object.destroy(http); // Destroy, however we cant set to null
|
||||
addLogEntry("Stopped HTTP instance shutdown and destroyed: " ~ to!string(internalThreadId), ["debug"]);
|
||||
}
|
||||
// Perform Garbage Collection
|
||||
GC.collect();
|
||||
// Return free memory to the OS
|
||||
GC.minimize();
|
||||
}
|
||||
}
|
||||
|
||||
// Methods to control obtaining and releasing a CurlEngine instance from the curlEnginePool
|
||||
|
||||
// Get a curl instance for the OneDrive API to use
|
||||
CurlEngine getCurlInstance() {
|
||||
addLogEntry("CurlEngine getCurlInstance() called", ["debug"]);
|
||||
|
||||
synchronized (CurlEngine.classinfo) {
|
||||
// What is the current pool size
|
||||
addLogEntry("CurlEngine curlEnginePool current size: " ~ to!string(curlEnginePool.length), ["debug"]);
|
||||
|
||||
if (curlEnginePool.empty) {
|
||||
addLogEntry("CurlEngine curlEnginePool is empty - constructing a new CurlEngine instance", ["debug"]);
|
||||
return new CurlEngine; // Constructs a new CurlEngine with a fresh HTTP instance
|
||||
} else {
|
||||
CurlEngine curlEngine = curlEnginePool[$ - 1];
|
||||
curlEnginePool.popBack(); // assumes a LIFO (last-in, first-out) usage pattern
|
||||
|
||||
// Is this engine stopped?
|
||||
if (curlEngine.http.isStopped) {
|
||||
// return a new curl engine as a stopped one cannot be used
|
||||
addLogEntry("CurlEngine was in a stoppped state (not usable) - constructing a new CurlEngine instance", ["debug"]);
|
||||
return new CurlEngine; // Constructs a new CurlEngine with a fresh HTTP instance
|
||||
} else {
|
||||
// return an existing curl engine
|
||||
addLogEntry("CurlEngine was in a valid state - returning existing CurlEngine instance", ["debug"]);
|
||||
addLogEntry("CurlEngine instance ID: " ~ curlEngine.internalThreadId, ["debug"]);
|
||||
return curlEngine;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Release all CurlEngine instances
|
||||
void releaseAllCurlInstances() {
|
||||
addLogEntry("CurlEngine releaseAllCurlInstances() called", ["debug"]);
|
||||
synchronized (CurlEngine.classinfo) {
|
||||
// What is the current pool size
|
||||
addLogEntry("CurlEngine curlEnginePool size to release: " ~ to!string(curlEnginePool.length), ["debug"]);
|
||||
if (curlEnginePool.length > 0) {
|
||||
// Safely iterate and clean up each CurlEngine instance
|
||||
foreach (curlEngineInstance; curlEnginePool) {
|
||||
try {
|
||||
curlEngineInstance.cleanup(true); // Cleanup instance by resetting values and flushing cookie cache
|
||||
curlEngineInstance.shutdownCurlHTTPInstance(); // Assume proper cleanup of any resources used by HTTP
|
||||
} catch (Exception e) {
|
||||
// Log the error or handle it appropriately
|
||||
// e.g., writeln("Error during cleanup/shutdown: ", e.toString());
|
||||
}
|
||||
|
||||
// It's safe to destroy the object here assuming no other references exist
|
||||
object.destroy(curlEngineInstance); // Destroy, then set to null
|
||||
curlEngineInstance = null;
|
||||
// Perform Garbage Collection on this destroyed curl engine
|
||||
GC.collect();
|
||||
// Log release
|
||||
addLogEntry("CurlEngine released", ["debug"]);
|
||||
}
|
||||
|
||||
// Clear the array after all instances have been handled
|
||||
curlEnginePool.length = 0; // More explicit than curlEnginePool = [];
|
||||
}
|
||||
}
|
||||
// Perform Garbage Collection on the destroyed curl engines
|
||||
GC.collect();
|
||||
// Return free memory to the OS
|
||||
GC.minimize();
|
||||
// Log that all curl engines have been released
|
||||
addLogEntry("CurlEngine releaseAllCurlInstances() completed", ["debug"]);
|
||||
}
|
||||
|
||||
// Return how many curl engines there are
|
||||
ulong curlEnginePoolLength() {
|
||||
return curlEnginePool.length;
|
||||
}
|
||||