Merge branch 'master' into Issue-#49

This commit is contained in:
abraunegg 2018-08-03 11:37:14 +10:00 committed by GitHub
commit 60738dc1d4
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
10 changed files with 313 additions and 46 deletions

161
.travis-ci.sh Normal file
View file

@ -0,0 +1,161 @@
#!/bin/bash
# Based on a test script from avsm/ocaml repo https://github.com/avsm/ocaml
# Adapted from https://www.tomaz.me/2013/12/02/running-travis-ci-tests-on-arm.html
# Adapted from https://github.com/PJK/libcbor/blob/master/.travis-qemu.sh
# Adapted from https://gist.github.com/oznu/b5efd7784e5a820ec3746820f2183dc0
# Adapted from https://blog.lazy-evaluation.net/posts/linux/debian-armhf-bootstrap.html
# Adapted from https://blog.lazy-evaluation.net/posts/linux/debian-stretch-arm64.html
set -e
# CHROOT Directory
CHROOT_DIR=/tmp/chroot
# Debian package dependencies for the host to run ARM under QEMU
DEBIAN_MIRROR="http://ftp.debian.org/debian"
HOST_DEPENDENCIES="qemu-user-static binfmt-support debootstrap sbuild wget"
# Debian package dependencies for the chrooted environment
GUEST_DEPENDENCIES="build-essential libcurl4-openssl-dev libsqlite3-dev libgnutls-openssl27 git"
function setup_arm32_chroot {
# Update apt repository details
sudo apt-get update
# 32Bit Variables
VERSION=jessie
CHROOT_ARCH=armhf
# Host dependencies
sudo apt-get install -qq -y ${HOST_DEPENDENCIES}
# Download LDC compiler
wget https://github.com/ldc-developers/ldc/releases/download/v1.10.0/ldc2-1.10.0-linux-armhf.tar.xz
tar -xf ldc2-1.10.0-linux-armhf.tar.xz
mv ldc2-1.10.0-linux-armhf dlang-${ARCH}
rm -rf ldc2-1.10.0-linux-armhf.tar.xz
# Create chrooted environment
sudo mkdir ${CHROOT_DIR}
sudo debootstrap --foreign --no-check-gpg --variant=buildd --arch=${CHROOT_ARCH} ${VERSION} ${CHROOT_DIR} ${DEBIAN_MIRROR}
sudo cp /usr/bin/qemu-arm-static ${CHROOT_DIR}/usr/bin/
sudo chroot ${CHROOT_DIR} /debootstrap/debootstrap --second-stage
sudo sbuild-createchroot --arch=${CHROOT_ARCH} --foreign --setup-only ${VERSION} ${CHROOT_DIR} ${DEBIAN_MIRROR}
configure_chroot
}
function setup_arm64_chroot {
# Update apt repository details
sudo apt-get update
# 64Bit Variables
VERSION64=stretch
CHROOT_ARCH64=arm64
# Host dependencies
sudo apt-get install -qq -y ${HOST_DEPENDENCIES}
# Download LDC compiler
wget https://github.com/ldc-developers/ldc/releases/download/v1.10.0/ldc2-1.10.0-linux-armhf.tar.xz
tar -xf ldc2-1.10.0-linux-armhf.tar.xz
mv ldc2-1.10.0-linux-armhf dlang-${ARCH}
rm -rf ldc2-1.10.0-linux-armhf.tar.xz
# Create chrooted environment
sudo mkdir ${CHROOT_DIR}
sudo qemu-debootstrap --arch=${CHROOT_ARCH64} ${VERSION64} ${CHROOT_DIR} ${DEBIAN_MIRROR}
configure_chroot
}
function setup_x32_chroot {
# Update apt repository details
sudo apt-get update
# 32Bit Variables
VERSION=jessie
CHROOT_ARCH32=i386
# Host dependencies
sudo apt-get install -qq -y ${HOST_DEPENDENCIES}
# Download DMD compiler
wget http://downloads.dlang.org/releases/2.x/2.081.1/dmd.2.081.1.linux.tar.xz
tar -xf dmd.2.081.1.linux.tar.xz
mv dmd2 dlang-${ARCH}
rm -rf dmd.2.081.1.linux.tar.xz
# Create chrooted environment
sudo mkdir ${CHROOT_DIR}
sudo debootstrap --foreign --no-check-gpg --variant=buildd --arch=${CHROOT_ARCH32} ${VERSION} ${CHROOT_DIR} ${DEBIAN_MIRROR}
sudo cp /usr/bin/qemu-i386-static ${CHROOT_DIR}/usr/bin/
sudo cp /usr/bin/qemu-x86_64-static ${CHROOT_DIR}/usr/bin/
sudo chroot ${CHROOT_DIR} /debootstrap/debootstrap --second-stage
sudo sbuild-createchroot --arch=${CHROOT_ARCH32} --foreign --setup-only ${VERSION} ${CHROOT_DIR} ${DEBIAN_MIRROR}
configure_chroot
}
function configure_chroot {
# Create file with environment variables which will be used inside chrooted environment
echo "export ARCH=${ARCH}" > envvars.sh
echo "export TRAVIS_BUILD_DIR=${TRAVIS_BUILD_DIR}" >> envvars.sh
chmod a+x envvars.sh
# Install dependencies inside chroot
sudo chroot ${CHROOT_DIR} apt-get update
sudo chroot ${CHROOT_DIR} apt-get --allow-unauthenticated install -qq -y ${GUEST_DEPENDENCIES}
# Create build dir and copy travis build files to our chroot environment
sudo mkdir -p ${CHROOT_DIR}/${TRAVIS_BUILD_DIR}
sudo rsync -a ${TRAVIS_BUILD_DIR}/ ${CHROOT_DIR}/${TRAVIS_BUILD_DIR}/
# Indicate chroot environment has been set up
sudo touch ${CHROOT_DIR}/.chroot_is_done
# Call ourselves again which will cause tests to run
sudo chroot ${CHROOT_DIR} bash -c "cd ${TRAVIS_BUILD_DIR} && chmod a+x ./.travis-ci.sh"
sudo chroot ${CHROOT_DIR} bash -c "cd ${TRAVIS_BUILD_DIR} && ./.travis-ci.sh"
}
function build_onedrive {
# Depending on architecture, build onedrive using applicable tool
echo `uname -a`
HOMEDIR=`pwd`
if [ "${ARCH}" = "x64" ]; then
# Build on x86_64 as normal
make clean; make;
else
if [ "${ARCH}" = "x32" ]; then
# 32Bit DMD Build
make clean;
make DC=${HOMEDIR}/dlang-${ARCH}/linux/bin32/dmd
else
# LDC Build - ARM32, ARM64
make clean;
make DC=${HOMEDIR}/dlang-${ARCH}/bin/ldmd2
fi
fi
# Functional testing of built application
test_onedrive
}
function test_onedrive {
# Testing onedrive client
./onedrive --version
}
if [ "${ARCH}" = "arm32" ] || [ "${ARCH}" = "arm64" ] || [ "${ARCH}" = "x32" ]; then
if [ -e "/.chroot_is_done" ]; then
# We are inside ARM chroot
echo "Running inside chrooted QEMU ${ARCH} environment"
. ./envvars.sh
export PATH="$PATH:/usr/sbin:/sbin:/bin"
build_onedrive
else
# Need to set up chrooted environment first
echo "Setting up chrooted ${ARCH} build environment"
if [ "${ARCH}" = "x32" ]; then
# 32Bit i386 Environment
setup_x32_chroot
else
if [ "${ARCH}" = "arm32" ]; then
# 32Bit ARM Environment
setup_arm32_chroot
else
# 64Bit ARM Environment
setup_arm64_chroot
fi
fi
fi
else
# Proceed as normal
echo "Running an x86_64 Build"
build_onedrive
fi

View file

@ -1,8 +1,17 @@
# latest dmd
# sudo access is required
sudo: required
# Compilation language
language: d
# Use latest DMD
d:
- dmd
language: d
# What build architectures will we build on
env:
- ARCH=x64
- ARCH=x32
- ARCH=arm32
# - ARCH=arm64
script: make
script:
- "bash -ex .travis-ci.sh"

View file

@ -3,7 +3,7 @@
The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/)
and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html).
## [2.0.1] - 2018-07-18
## [2.0.2] - 2018-07-18
### Fixed
- Fix systemd service install for builds with DESTDIR defined
- Fix 'HTTP 412 - Precondition Failed' error handling

View file

@ -26,7 +26,7 @@ sudo apt install libsqlite3-dev
curl -fsS https://dlang.org/install.sh | bash -s dmd
```
### Dependencies: Ubuntu/Debian - i386 / i686
### Dependencies: Ubuntu - i386 / i686
**Note:** Validated with `Linux ubuntu-i386-vm 4.13.0-36-generic #40~16.04.1-Ubuntu SMP Fri Feb 16 23:26:51 UTC 2018 i686 i686 i686 GNU/Linux` and DMD 2.081.1
```
sudo apt install build-essential
@ -35,6 +35,27 @@ sudo apt install libsqlite3-dev
curl -fsS https://dlang.org/install.sh | bash -s dmd
```
### Dependencies: Debian - i386 / i686
**Note:** Validated with `Linux debian-i386 4.9.0-7-686-pae #1 SMP Debian 4.9.110-1 (2018-07-05) i686 GNU/Linux` and LDC - the LLVM D compiler (1.8.0).
First install development dependancies as per below:
```
sudo apt install build-essential
sudo apt install libcurl4-openssl-dev
sudo apt install libsqlite3-dev
sudo apt install git
```
Second, install the LDC compiler as per below:
```
mkdir ldc && cd ldc
wget http://ftp.us.debian.org/debian/pool/main/l/ldc/ldc_1.8.0-3_i386.deb
wget http://ftp.us.debian.org/debian/pool/main/l/ldc/libphobos2-ldc-shared-dev_1.8.0-3_i386.deb
wget http://ftp.us.debian.org/debian/pool/main/l/ldc/libphobos2-ldc-shared78_1.8.0-3_i386.deb
wget http://ftp.us.debian.org/debian/pool/main/l/llvm-toolchain-5.0/libllvm5.0_5.0.1-2~bpo9+1_i386.deb
wget http://ftp.us.debian.org/debian/pool/main/n/ncurses/libtinfo6_6.1+20180714-1_i386.deb
sudo dpkg -i ./*.deb
```
### Dependencies: Fedora < Version 18 / CentOS / RHEL
```
sudo yum install libcurl-devel
@ -82,6 +103,14 @@ sudo make install
```
### Building using a different compiler (for example [LDC](https://wiki.dlang.org/LDC)):
#### Debian - i386 / i686
```
git clone https://github.com/abraunegg/onedrive.git
cd onedrive
make make DC=/usr/bin/ldmd2
sudo make install
```
#### ARM Architecture
```
git clone https://github.com/abraunegg/onedrive.git
@ -191,6 +220,7 @@ This file does not get created by default, and should only be created if you wan
Available options:
* `sync_dir`: directory where the files will be synced
* `skip_file`: any files or directories that match this pattern will be skipped during sync.
* `skip_symlinks`: any files or directories that are symlinked will be skipped during sync
### sync_dir
Example: `sync_dir="~/MyDirToSync"`
@ -207,6 +237,11 @@ Patterns are case insensitive. `*` and `?` [wildcards characters](https://techne
**Note:** after changing `skip_file`, you must perform a full synchronization by executing `onedrive --resync`
### skip_symlinks
Example: `skip_symlinks = "true"`
Setting this to `"true"` will skip all symlinks while syncing.
### Selective sync
Selective sync allows you to sync only specific files and directories.
To enable selective sync create a file named `sync_list` in `~/.config/onedrive`.

View file

@ -30,6 +30,9 @@ final class Config
// Configure to skip ONLY temp files (~*.doc etc) by default
// Prior configuration was: .*|~*
setValue("skip_file", "~*");
// By default symlinks are not skipped (using string type
// instead of boolean because hashmap only stores string types)
setValue("skip_symlinks", "false");
if (!load(userConfigFilePath)) {
log.vlog("No config file found, using defaults");
}

View file

@ -2,7 +2,9 @@ import std.datetime;
import std.exception;
import std.path;
import std.string;
import core.stdc.stdlib;
import sqlite;
static import log;
enum ItemType {
file,
@ -29,7 +31,7 @@ struct Item {
final class ItemDatabase
{
// increment this for every change in the db schema
immutable int itemDatabaseVersion = 6;
immutable int itemDatabaseVersion = 7;
Database db;
Statement insertItemStmt;
@ -41,33 +43,21 @@ final class ItemDatabase
this(const(char)[] filename)
{
db = Database(filename);
if (db.getVersion() == 0) {
db.exec("CREATE TABLE item (
driveId TEXT NOT NULL,
id TEXT NOT NULL,
name TEXT NOT NULL,
type TEXT NOT NULL,
eTag TEXT,
cTag TEXT,
mtime TEXT NOT NULL,
parentId TEXT,
crc32Hash TEXT,
sha1Hash TEXT,
quickXorHash TEXT,
remoteDriveId TEXT,
remoteId TEXT,
deltaLink TEXT,
PRIMARY KEY (driveId, id),
FOREIGN KEY (driveId, parentId)
REFERENCES item (driveId, id)
ON DELETE CASCADE
ON UPDATE RESTRICT
)");
db.exec("CREATE INDEX name_idx ON item (name)");
db.exec("CREATE INDEX remote_idx ON item (remoteDriveId, remoteId)");
db.setVersion(itemDatabaseVersion);
int dbVersion;
try {
dbVersion = db.getVersion();
} catch (SqliteException e) {
// An error was generated - what was the error?
log.error("\nAn internal database error occurred: " ~ e.msg ~ "\n");
exit(-1);
}
if (dbVersion == 0) {
createTable();
} else if (db.getVersion() != itemDatabaseVersion) {
throw new Exception("The item database is incompatible, please resync manually");
log.log("The item database is incompatible, re-creating database table structures");
db.exec("DROP TABLE item");
createTable();
}
db.exec("PRAGMA foreign_keys = ON");
db.exec("PRAGMA recursive_triggers = ON");
@ -91,6 +81,34 @@ final class ItemDatabase
deleteItemByIdStmt = db.prepare("DELETE FROM item WHERE driveId = ? AND id = ?");
}
void createTable()
{
db.exec("CREATE TABLE item (
driveId TEXT NOT NULL,
id TEXT NOT NULL,
name TEXT NOT NULL,
type TEXT NOT NULL,
eTag TEXT,
cTag TEXT,
mtime TEXT NOT NULL,
parentId TEXT,
crc32Hash TEXT,
sha1Hash TEXT,
quickXorHash TEXT,
remoteDriveId TEXT,
remoteId TEXT,
deltaLink TEXT,
PRIMARY KEY (driveId, id),
FOREIGN KEY (driveId, parentId)
REFERENCES item (driveId, id)
ON DELETE CASCADE
ON UPDATE RESTRICT
)");
db.exec("CREATE INDEX name_idx ON item (name)");
db.exec("CREATE INDEX remote_idx ON item (remoteDriveId, remoteId)");
db.setVersion(itemDatabaseVersion);
}
void insert(const ref Item item)
{
bindItem(item, insertItemStmt);

View file

@ -58,7 +58,9 @@ int main(string[] args)
bool uploadOnly;
// Add a check mounts option to resolve https://github.com/abraunegg/onedrive/issues/8
bool checkMount;
// Add option for no remote delete
// Add option to skip symlinks
bool skipSymlinks;
// Add option for no remote delete
bool noRemoteDelete;
try {
@ -80,6 +82,7 @@ int main(string[] args)
"resync", "Forget the last saved state, perform a full sync", &resync,
"remove-directory", "Remove a directory on OneDrive - no sync will be performed.", &removeDirectory,
"single-directory", "Specify a single local directory within the OneDrive root to sync.", &singleDirectory,
"skip-symlinks", "Skip syncing of symlinks", &skipSymlinks,
"source-directory", "Source directory to rename or move on OneDrive - no sync will be performed.", &sourceDirectory,
"syncdir", "Set the directory used to sync the files that are synced", &syncDirName,
"synchronize", "Perform a synchronization", &synchronize,
@ -122,10 +125,11 @@ int main(string[] args)
// command line parameters override the config
if (syncDirName) cfg.setValue("sync_dir", syncDirName.expandTilde().absolutePath());
if (skipSymlinks) cfg.setValue("skip_symlinks", "true");
// we should only set noRemoteDelete in an upload-only scenario
if ((uploadOnly)&&(noRemoteDelete)) cfg.setValue("no-remote-delete", "true");
// upgrades
if (exists(configDirName ~ "/items.db")) {
remove(configDirName ~ "/items.db");

View file

@ -2,6 +2,7 @@ import std.net.curl: CurlException, HTTP;
import std.datetime, std.exception, std.file, std.json, std.path;
import std.stdio, std.string, std.uni, std.uri;
import config;
import core.stdc.stdlib;
static import log;
shared bool debugResponse = false;
@ -54,6 +55,7 @@ final class OneDriveApi
this.cfg = cfg;
http = HTTP();
http.dnsTimeout = (dur!"seconds"(5));
http.dataTimeout = (dur!"seconds"(3600));
if (debugHttp) {
http.verbose = true;
.debugResponse = true;
@ -423,7 +425,15 @@ final class OneDriveApi
content ~= data;
return data.length;
};
http.perform();
try {
http.perform();
} catch (CurlException e) {
// Potentially Timeout was reached on handle error
log.error("\nAccess to the Microsoft OneDrive service timed out - Internet connectivity issue?\n");
exit(-1);
}
JSONValue json;
try {
json = content.parseJSON();

View file

@ -2,6 +2,8 @@ module sqlite;
import std.stdio;
import etc.c.sqlite3;
import std.string: fromStringz, toStringz;
import core.stdc.stdlib;
static import log;
extern (C) immutable(char)* sqlite3_errstr(int); // missing from the std library
@ -48,9 +50,16 @@ struct Database
{
// https://www.sqlite.org/c3ref/open.html
int rc = sqlite3_open(toStringz(filename), &pDb);
if (rc != SQLITE_OK) {
if (rc == SQLITE_CANTOPEN) {
// Database cannot be opened
log.error("\nThe database cannot be opened. Please check the permissions of ~/.config/onedrive/items.sqlite3\n");
close();
throw new SqliteException(ifromStringz(sqlite3_errstr(rc)));
exit(-1);
}
if (rc != SQLITE_OK) {
log.error("\nA database access error occurred: " ~ getErrorMessage() ~ "\n");
close();
exit(-1);
}
sqlite3_extended_result_codes(pDb, 1); // always use extended result codes
}
@ -60,7 +69,9 @@ struct Database
// https://www.sqlite.org/c3ref/exec.html
int rc = sqlite3_exec(pDb, toStringz(sql), null, null, null);
if (rc != SQLITE_OK) {
throw new SqliteException(ifromStringz(sqlite3_errmsg(pDb)));
log.error("\nA database execution error occurred: "~ getErrorMessage() ~ "\n");
close();
exit(-1);
}
}
@ -79,6 +90,11 @@ struct Database
return userVersion;
}
string getErrorMessage()
{
return ifromStringz(sqlite3_errmsg(pDb));
}
void setVersion(int userVersion)
{
import std.conv: to;
@ -135,7 +151,7 @@ struct Statement
int rc = sqlite3_step(pStmt);
if (rc == SQLITE_BUSY) {
// Database is locked by another onedrive process
writeln("The database is currently locked by another process - cannot sync");
log.error("The database is currently locked by another process - cannot sync");
return;
}
if (rc == SQLITE_DONE) {
@ -149,7 +165,9 @@ struct Statement
column = fromStringz(sqlite3_column_text(pStmt, i));
}
} else {
throw new SqliteException(ifromStringz(sqlite3_errmsg(sqlite3_db_handle(pStmt))));
string errorMessage = ifromStringz(sqlite3_errmsg(sqlite3_db_handle(pStmt)));
log.error("\nA database statement execution error occurred: "~ errorMessage ~ "\n");
exit(-1);
}
}
}

View file

@ -938,10 +938,18 @@ final class SyncEngine
if(encodeComponent(path).length < maxPathLength){
// path is less than maxPathLength
// skip unexisting symbolic links
if (isSymlink(path) && !exists(readLink(path))) {
log.vlog("Skipping item - symbolic link: ", path);
return;
if (isSymlink(path)) {
// if config says so we skip all symlinked items
if (cfg.getValue("skip_symlinks") == "true") {
log.vlog("Skipping item - skip symbolic links configured: ", path);
return;
}
// skip unexisting symbolic links
else if (!exists(readLink(path))) {
log.vlog("Skipping item - invalid symbolic link: ", path);
return;
}
}
// Restriction and limitations about windows naming files
@ -1194,6 +1202,7 @@ final class SyncEngine
// Is the local file newer than the uploaded file?
SysTime localFileModifiedTime = timeLastModified(path).toUTC();
SysTime remoteFileModifiedTime = SysTime.fromISOExtString(fileDetailsFromOneDrive["fileSystemInfo"]["lastModifiedDateTime"].str);
localFileModifiedTime.fracSecs = Duration.zero;
if (localFileModifiedTime > remoteFileModifiedTime){
// local file is newer