Merge branch 'master' of github.com:skilion/onedrive

This may fix some up load problems.
This commit is contained in:
lanhin 2017-04-24 13:45:52 +08:00
commit 79139600ac
13 changed files with 552 additions and 283 deletions

View file

@ -1,7 +1,6 @@
DC = dmd DC = dmd
DFLAGS = -ofonedrive -L-lcurl -L-lsqlite3 -L-ldl DFLAGS = -ofonedrive -L-lcurl -L-lsqlite3 -L-ldl
DESTDIR = /usr/local/bin DESTDIR = /usr/local/bin
CONFDIR = /usr/local/etc
SOURCES = \ SOURCES = \
src/config.d \ src/config.d \
@ -10,6 +9,7 @@ SOURCES = \
src/main.d \ src/main.d \
src/monitor.d \ src/monitor.d \
src/onedrive.d \ src/onedrive.d \
src/selective.d \
src/sqlite.d \ src/sqlite.d \
src/sync.d \ src/sync.d \
src/upload.d \ src/upload.d \
@ -27,12 +27,10 @@ unittest: $(SOURCES)
clean: clean:
rm -f onedrive.o onedrive rm -f onedrive.o onedrive
install: onedrive onedrive.conf install: onedrive onedrive.service
install onedrive $(DESTDIR)/onedrive install onedrive $(DESTDIR)/onedrive
install -m 644 onedrive.conf $(CONFDIR)/onedrive.conf
install -m 644 onedrive.service /usr/lib/systemd/user install -m 644 onedrive.service /usr/lib/systemd/user
uninstall: uninstall:
rm -f $(DESTDIR)/onedrive rm -f $(DESTDIR)/onedrive
rm -f $(CONFDIR)/onedrive.conf
rm -f /usr/lib/systemd/user/onedrive.service rm -f /usr/lib/systemd/user/onedrive.service

View file

@ -26,31 +26,44 @@ sudo apt-get update && sudo apt-get install dmd-bin
``` ```
### Installation ### Installation
1. `make` ```
2. `sudo make install` git clone git@github.com:skilion/onedrive.git
cd onedrive
make
sudo make install
```
### Configuration: ### Configuration:
You should copy the default config file into your home directory before making changes: You should copy the default config file into your home directory before making changes:
``` ```
mkdir -p ~/.config/onedrive mkdir -p ~/.config/onedrive
cp /usr/local/etc/onedrive.conf ~/.config/onedrive/config cp ./config ~/.config/onedrive/config
``` ```
Available options: Available options:
* `client_id`: application identifier necessary for the [authentication][2]
* `sync_dir`: directory where the files will be synced * `sync_dir`: directory where the files will be synced
* `skip_file`: any files that match this pattern will be skipped during sync * `skip_file`: any files or directories that match this pattern will be skipped during sync
* `skip_dir`: any directories that match this pattern will be skipped during sync
Pattern are case insensitive. Pattern are case insensitive.
`*` and `?` [wildcards characters][3] are supported. `*` and `?` [wildcards characters][1] are supported.
Use `|` to separate multiple patterns. Use `|` to separate multiple patterns.
[2]: https://dev.onedrive.com/auth/msa_oauth.htm [1]: https://technet.microsoft.com/en-us/library/bb490639.aspx
[3]: https://technet.microsoft.com/en-us/library/bb490639.aspx
### Selective sync
Selective sync allows you to sync only specific files and directories.
To enable selective sync create a file named `sync_list` in `~/.config/onedrive`.
Each line represents a path to a file or directory relative from your `sync_dir`.
```
$ cat ~/.config/onedrive/sync_list
Backup
Documents/report.odt
Work/ProjectX
notes.txt
```
### First run ### First run
The first time you run the program you will be asked to sign in. The procedure require a web browser. The first time you run the program you will be asked to sign in. The procedure requires a web browser.
### Service ### Service
If you want to sync your files automatically, enable and start the systemd service: If you want to sync your files automatically, enable and start the systemd service:
@ -68,16 +81,17 @@ journalctl --user-unit onedrive -f
``` ```
onedrive [OPTION]... onedrive [OPTION]...
no option Sync and exit.
-m --monitor Keep monitoring for local and remote changes. -m --monitor Keep monitoring for local and remote changes.
--resync Forget the last saved state, perform a full sync. --resync Forget the last saved state, perform a full sync.
--logout Logout the current user.
--confdir Set the directory to use to store the configuration files.
-v --verbose Print more details, useful for debugging. -v --verbose Print more details, useful for debugging.
-h --help This help information. -h --help This help information.
``` ```
### Notes: ### Notes:
* After changing the filters (`skip_file` or `skip_dir` in your configs) you must execute `onedrive --resync` * After changing `skip_file` in your configs or the sync list, you must execute `onedrive --resync`
* [Windows naming conventions][4] apply * [Windows naming conventions][2] apply
* Use `make debug` to generate an executable for debugging * Use `make debug` to generate an executable for debugging
[4]: https://msdn.microsoft.com/en-us/library/aa365247 [2]: https://msdn.microsoft.com/en-us/library/aa365247

4
config Normal file
View file

@ -0,0 +1,4 @@
# Directory where the files will be synced
sync_dir = "~/OneDrive"
# Skip files and directories that match this pattern
skip_file = ".*|~*"

View file

@ -1,4 +0,0 @@
client_id = "000000004C15842F"
sync_dir = "~/OneDrive"
skip_file = ".*|~*"
skip_dir = ".*"

View file

@ -1,4 +1,5 @@
import std.file, std.regex, std.stdio; import std.file, std.string, std.regex, std.stdio;
import selective;
static import log; static import log;
final class Config final class Config
@ -7,6 +8,7 @@ final class Config
public string statusTokenFilePath; public string statusTokenFilePath;
public string databaseFilePath; public string databaseFilePath;
public string uploadStateFilePath; public string uploadStateFilePath;
public string syncListFilePath;
private string userConfigFilePath; private string userConfigFilePath;
// hashmap for the values found in the user config file // hashmap for the values found in the user config file
@ -16,18 +18,19 @@ final class Config
{ {
refreshTokenFilePath = configDirName ~ "/refresh_token"; refreshTokenFilePath = configDirName ~ "/refresh_token";
statusTokenFilePath = configDirName ~ "/status_token"; statusTokenFilePath = configDirName ~ "/status_token";
databaseFilePath = configDirName ~ "/items.db"; databaseFilePath = configDirName ~ "/items.sqlite3";
uploadStateFilePath = configDirName ~ "/resume_upload"; uploadStateFilePath = configDirName ~ "/resume_upload";
userConfigFilePath = configDirName ~ "/config"; userConfigFilePath = configDirName ~ "/config";
syncListFilePath = configDirName ~ "/sync_list";
} }
void init() void init()
{ {
bool found = false; setValue("sync_dir", "~/OneDrive");
found |= load("/etc/onedrive.conf"); setValue("skip_file", ".*|~*");
found |= load("/usr/local/etc/onedrive.conf"); if (!load(userConfigFilePath)) {
found |= load(userConfigFilePath); log.vlog("No config file found, using defaults");
if (!found) throw new Exception("No config file found"); }
} }
string getValue(string key) string getValue(string key)
@ -40,12 +43,29 @@ final class Config
} }
} }
string getValue(string key, string value)
{
auto p = key in values;
if (p) {
return *p;
} else {
return value;
}
}
void setValue(string key, string value)
{
values[key] = value;
}
private bool load(string filename) private bool load(string filename)
{ {
scope(failure) return false; scope(failure) return false;
auto file = File(filename, "r"); auto file = File(filename, "r");
auto r = regex(`^\s*(\w+)\s*=\s*"(.*)"\s*$`); auto r = regex(`^(\w+)\s*=\s*"(.*)"\s*$`);
foreach (line; file.byLine()) { foreach (line; file.byLine()) {
line = stripLeft(line);
if (line.length == 0 || line[0] == ';' || line[0] == '#') continue;
auto c = line.matchFirst(r); auto c = line.matchFirst(r);
if (!c.empty) { if (!c.empty) {
c.popFront(); // skip the whole match c.popFront(); // skip the whole match
@ -63,6 +83,7 @@ final class Config
unittest unittest
{ {
auto cfg = new Config(""); auto cfg = new Config("");
cfg.load("onedrive.conf"); cfg.load("config");
assert(cfg.getValue("sync_dir") == "~/OneDrive"); assert(cfg.getValue("sync_dir") == "~/OneDrive");
assert(cfg.getValue("empty", "default") == "default");
} }

View file

@ -1,4 +1,4 @@
import std.datetime, std.path, std.string; import std.datetime, std.path, std.exception, std.string;
import sqlite; import sqlite;
enum ItemType enum ItemType
@ -21,6 +21,9 @@ struct Item
final class ItemDatabase final class ItemDatabase
{ {
// increment this for every change in the db schema
immutable int itemDatabaseVersion = 3;
Database db; Database db;
Statement insertItemStmt; Statement insertItemStmt;
Statement updateItemStmt; Statement updateItemStmt;
@ -30,20 +33,26 @@ final class ItemDatabase
this(const(char)[] filename) this(const(char)[] filename)
{ {
db = Database(filename); db = Database(filename);
db.exec("CREATE TABLE IF NOT EXISTS item ( if (db.getVersion() == 0) {
id TEXT PRIMARY KEY, db.exec("CREATE TABLE item (
name TEXT NOT NULL, id TEXT NOT NULL PRIMARY KEY,
type TEXT NOT NULL, name TEXT NOT NULL,
eTag TEXT NOT NULL, type TEXT NOT NULL,
cTag TEXT NOT NULL, eTag TEXT,
mtime TEXT NOT NULL, cTag TEXT,
parentId TEXT, mtime TEXT NOT NULL,
crc32 TEXT, parentId TEXT,
FOREIGN KEY (parentId) REFERENCES item (id) ON DELETE CASCADE crc32 TEXT,
)"); FOREIGN KEY (parentId) REFERENCES item (id) ON DELETE CASCADE
db.exec("CREATE INDEX IF NOT EXISTS name_idx ON item (name)"); )");
db.exec("CREATE INDEX name_idx ON item (name)");
db.setVersion(itemDatabaseVersion);
} else if (db.getVersion() != itemDatabaseVersion) {
throw new Exception("The item database is incompatible, please resync manually");
}
db.exec("PRAGMA foreign_keys = ON"); db.exec("PRAGMA foreign_keys = ON");
db.exec("PRAGMA recursive_triggers = ON"); db.exec("PRAGMA recursive_triggers = ON");
insertItemStmt = db.prepare("INSERT OR REPLACE INTO item (id, name, type, eTag, cTag, mtime, parentId, crc32) VALUES (?, ?, ?, ?, ?, ?, ?, ?)"); insertItemStmt = db.prepare("INSERT OR REPLACE INTO item (id, name, type, eTag, cTag, mtime, parentId, crc32) VALUES (?, ?, ?, ?, ?, ?, ?, ?)");
updateItemStmt = db.prepare(" updateItemStmt = db.prepare("
UPDATE item UPDATE item
@ -54,70 +63,28 @@ final class ItemDatabase
selectItemByParentIdStmt = db.prepare("SELECT id FROM item WHERE parentId = ?"); selectItemByParentIdStmt = db.prepare("SELECT id FROM item WHERE parentId = ?");
} }
void insert(const(char)[] id, const(char)[] name, ItemType type, const(char)[] eTag, const(char)[] cTag, const(char)[] mtime, const(char)[] parentId, const(char)[] crc32) void insert(const ref Item item)
{ {
with (insertItemStmt) { bindItem(item, insertItemStmt);
bind(1, id); insertItemStmt.exec();
bind(2, name);
string typeStr = void;
final switch (type) {
case ItemType.file: typeStr = "file"; break;
case ItemType.dir: typeStr = "dir"; break;
}
bind(3, typeStr);
bind(4, eTag);
bind(5, cTag);
bind(6, mtime);
bind(7, parentId);
bind(8, crc32);
exec();
}
} }
void update(const(char)[] id, const(char)[] name, ItemType type, const(char)[] eTag, const(char)[] cTag, const(char)[] mtime, const(char)[] parentId, const(char)[] crc32) void update(const ref Item item)
{ {
with (updateItemStmt) { bindItem(item, updateItemStmt);
bind(1, id); updateItemStmt.exec();
bind(2, name);
string typeStr = void;
final switch (type) {
case ItemType.file: typeStr = "file"; break;
case ItemType.dir: typeStr = "dir"; break;
}
bind(3, typeStr);
bind(4, eTag);
bind(5, cTag);
bind(6, mtime);
bind(7, parentId);
bind(8, crc32);
exec();
}
} }
void upsert(const(char)[] id, const(char)[] name, ItemType type, const(char)[] eTag, const(char)[] cTag, const(char)[] mtime, const(char)[] parentId, const(char)[] crc32) void upsert(const ref Item item)
{ {
auto s = db.prepare("SELECT COUNT(*) FROM item WHERE id = ?"); auto s = db.prepare("SELECT COUNT(*) FROM item WHERE id = ?");
s.bind(1, id); s.bind(1, item.id);
auto r = s.exec(); auto r = s.exec();
Statement* p; Statement* stmt;
if (r.front[0] == "0") p = &insertItemStmt; if (r.front[0] == "0") stmt = &insertItemStmt;
else p = &updateItemStmt; else stmt = &updateItemStmt;
with (p) { bindItem(item, *stmt);
bind(1, id); stmt.exec();
bind(2, name);
string typeStr = void;
final switch (type) {
case ItemType.file: typeStr = "file"; break;
case ItemType.dir: typeStr = "dir"; break;
}
bind(3, typeStr);
bind(4, eTag);
bind(5, cTag);
bind(6, mtime);
bind(7, parentId);
bind(8, crc32);
exec();
}
} }
Item[] selectChildren(const(char)[] id) Item[] selectChildren(const(char)[] id)
@ -147,7 +114,8 @@ final class ItemDatabase
bool selectByPath(const(char)[] path, out Item item) bool selectByPath(const(char)[] path, out Item item)
{ {
path = "root/" ~ path.chompPrefix("."); // HACK // prefix with the root dir
path = "root/" ~ path.chompPrefix(".");
// initialize the search // initialize the search
string[2][] candidates; // [id, parentId] string[2][] candidates; // [id, parentId]
@ -218,6 +186,25 @@ final class ItemDatabase
return false; return false;
} }
private void bindItem(const ref Item item, ref Statement stmt)
{
with (stmt) with (item) {
bind(1, id);
bind(2, name);
string typeStr = null;
final switch (type) with (ItemType) {
case file: typeStr = "file"; break;
case dir: typeStr = "dir"; break;
}
bind(3, typeStr);
bind(4, eTag);
bind(5, cTag);
bind(6, mtime.toISOExtString());
bind(7, parentId);
bind(8, crc32);
}
}
private Item buildItem(Statement.Result result) private Item buildItem(Statement.Result result)
{ {
assert(!result.empty && result.front.length == 8); assert(!result.empty && result.front.length == 8);
@ -231,31 +218,30 @@ final class ItemDatabase
crc32: result.front[7].dup crc32: result.front[7].dup
}; };
switch (result.front[2]) { switch (result.front[2]) {
case "file": item.type = ItemType.file; break; case "file": item.type = ItemType.file; break;
case "dir": item.type = ItemType.dir; break; case "dir": item.type = ItemType.dir; break;
default: assert(0); default: assert(0);
} }
return item; return item;
} }
// computes the path of the given item id
// the path is relative to the sync directory ex: "./Music/Turbo Killer.mp3"
// a trailing slash is never added
string computePath(const(char)[] id) string computePath(const(char)[] id)
{ {
if (!id) return null;
string path; string path;
auto s = db.prepare("SELECT name, parentId FROM item WHERE id = ?"); auto s = db.prepare("SELECT name, parentId FROM item WHERE id = ?");
while (true) { while (true) {
s.bind(1, id); s.bind(1, id);
auto r = s.exec(); auto r = s.exec();
if (r.empty) { enforce(!r.empty, "Unknow item id");
// no results if (r.front[1]) {
break;
} else if (r.front[1]) {
if (path) path = r.front[0].idup ~ "/" ~ path; if (path) path = r.front[0].idup ~ "/" ~ path;
else path = r.front[0].idup; else path = r.front[0].idup;
} else { } else {
// root // root
if (path) path = "./" ~ path; if (!path) path = ".";
else path = ".";
break; break;
} }
id = r.front[1].dup; id = r.front[1].dup;

View file

@ -1,7 +1,7 @@
import core.stdc.stdlib: EXIT_SUCCESS, EXIT_FAILURE; import core.stdc.stdlib: EXIT_SUCCESS, EXIT_FAILURE;
import core.memory, core.time, core.thread; import core.memory, core.time, core.thread;
import std.getopt, std.file, std.path, std.process; import std.getopt, std.file, std.path, std.process;
import config, itemdb, monitor, onedrive, sync, util; import config, itemdb, monitor, onedrive, selective, sync, util;
static import log; static import log;
int main(string[] args) int main(string[] args)
@ -46,6 +46,14 @@ int main(string[] args)
if (!exists(configDirName)) mkdir(configDirName); if (!exists(configDirName)) mkdir(configDirName);
auto cfg = new config.Config(configDirName); auto cfg = new config.Config(configDirName);
cfg.init(); cfg.init();
// upgrades
if (exists(configDirName ~ "/items.db")) {
remove(configDirName ~ "/items.db");
log.log("Database schema changed, resync needed");
resync = true;
}
if (resync || logout) { if (resync || logout) {
log.log("Deleting the saved status ..."); log.log("Deleting the saved status ...");
safeRemove(cfg.databaseFilePath); safeRemove(cfg.databaseFilePath);
@ -79,13 +87,16 @@ int main(string[] args)
chdir(syncDir); chdir(syncDir);
log.vlog("Initializing the Synchronization Engine ..."); log.vlog("Initializing the Synchronization Engine ...");
auto sync = new SyncEngine(cfg, onedrive, itemdb); auto selectiveSync = new SelectiveSync();
selectiveSync.load(cfg.syncListFilePath);
selectiveSync.setMask(cfg.getValue("skip_file"));
auto sync = new SyncEngine(cfg, onedrive, itemdb, selectiveSync);
sync.init(); sync.init();
if (online) performSync(sync); if (online) performSync(sync);
if (monitor) { if (monitor) {
log.vlog("Initializing monitor ..."); log.vlog("Initializing monitor ...");
Monitor m; Monitor m = new Monitor(selectiveSync);
m.onDirCreated = delegate(string path) { m.onDirCreated = delegate(string path) {
log.vlog("[M] Directory created: ", path); log.vlog("[M] Directory created: ", path);
try { try {

View file

@ -2,7 +2,9 @@ import core.sys.linux.sys.inotify;
import core.stdc.errno; import core.stdc.errno;
import core.sys.posix.poll, core.sys.posix.unistd; import core.sys.posix.poll, core.sys.posix.unistd;
import std.exception, std.file, std.path, std.regex, std.stdio, std.string; import std.exception, std.file, std.path, std.regex, std.stdio, std.string;
import config, util; import config;
import selective;
import util;
static import log; static import log;
// relevant inotify events // relevant inotify events
@ -17,11 +19,9 @@ class MonitorException: ErrnoException
} }
} }
struct Monitor final class Monitor
{ {
bool verbose; bool verbose;
// regex that match files/dirs to skip
private Regex!char skipDir, skipFile;
// inotify file descriptor // inotify file descriptor
private int fd; private int fd;
// map every inotify watch descriptor to its directory // map every inotify watch descriptor to its directory
@ -31,18 +31,23 @@ struct Monitor
// buffer to receive the inotify events // buffer to receive the inotify events
private void[] buffer; private void[] buffer;
private SelectiveSync selectiveSync;
void delegate(string path) onDirCreated; void delegate(string path) onDirCreated;
void delegate(string path) onFileChanged; void delegate(string path) onFileChanged;
void delegate(string path) onDelete; void delegate(string path) onDelete;
void delegate(string from, string to) onMove; void delegate(string from, string to) onMove;
@disable this(this); this(SelectiveSync selectiveSync)
{
assert(selectiveSync);
this.selectiveSync = selectiveSync;
}
void init(Config cfg, bool verbose) void init(Config cfg, bool verbose)
{ {
this.verbose = verbose; this.verbose = verbose;
skipDir = wild2regex(cfg.getValue("skip_dir"));
skipFile = wild2regex(cfg.getValue("skip_file"));
fd = inotify_init(); fd = inotify_init();
if (fd == -1) throw new MonitorException("inotify_init failed"); if (fd == -1) throw new MonitorException("inotify_init failed");
if (!buffer) buffer = new void[4096]; if (!buffer) buffer = new void[4096];
@ -57,12 +62,20 @@ struct Monitor
private void addRecursive(string dirname) private void addRecursive(string dirname)
{ {
if (matchFirst(dirname, skipDir).empty) { // skip filtered items
add(dirname); if (dirname != ".") {
foreach(DirEntry entry; dirEntries(dirname, SpanMode.shallow, false)) { if (selectiveSync.isNameExcluded(baseName(dirname))) {
if (entry.isDir) { return;
addRecursive(entry.name); }
} if (selectiveSync.isPathExcluded(buildNormalizedPath(dirname))) {
return;
}
}
add(dirname);
foreach(DirEntry entry; dirEntries(dirname, SpanMode.shallow, false)) {
if (entry.isDir) {
addRecursive(entry.name);
} }
} }
} }
@ -85,7 +98,7 @@ struct Monitor
} }
throw new MonitorException("inotify_add_watch failed"); throw new MonitorException("inotify_add_watch failed");
} }
wdToDirName[wd] = dirname ~ "/"; wdToDirName[wd] = buildNormalizedPath(dirname) ~ "/";
log.vlog("Monitor directory: ", dirname); log.vlog("Monitor directory: ", dirname);
} }
@ -152,14 +165,11 @@ struct Monitor
// skip filtered items // skip filtered items
path = getPath(event); path = getPath(event);
if (event.mask & IN_ISDIR) { if (selectiveSync.isNameExcluded(baseName(path))) {
if (!matchFirst(path, skipDir).empty) { goto skip;
goto skip; }
} if (selectiveSync.isPathExcluded(path)) {
} else { goto skip;
if (!matchFirst(path, skipFile).empty) {
goto skip;
}
} }
if (event.mask & IN_MOVED_FROM) { if (event.mask & IN_MOVED_FROM) {

View file

@ -6,27 +6,39 @@ static import log;
private immutable { private immutable {
string authUrl = "https://login.live.com/oauth20_authorize.srf"; string clientId = "22c49a0d-d21c-4792-aed1-8f163c982546";
string redirectUrl = "https://login.live.com/oauth20_desktop.srf"; // "urn:ietf:wg:oauth:2.0:oob"; string authUrl = "https://login.microsoftonline.com/common/oauth2/v2.0/authorize";
string tokenUrl = "https://login.live.com/oauth20_token.srf"; string redirectUrl = "https://login.microsoftonline.com/common/oauth2/nativeclient";
string itemByIdUrl = "https://api.onedrive.com/v1.0/drive/items/"; string tokenUrl = "https://login.microsoftonline.com/common/oauth2/v2.0/token";
string itemByPathUrl = "https://api.onedrive.com/v1.0/drive/root:/"; string driveUrl = "https://graph.microsoft.com/v1.0/me/drive";
string itemByIdUrl = "https://graph.microsoft.com/v1.0/me/drive/items/";
string itemByPathUrl = "https://graph.microsoft.com/v1.0/me/drive/root:/";
} }
class OneDriveException: Exception class OneDriveException: Exception
{ {
// HTTP status code int httpStatusCode;
int code; // https://dev.onedrive.com/misc/errors.htm
JSONValue error;
@nogc @safe pure nothrow this(string msg, Throwable next, string file = __FILE__, size_t line = __LINE__) @nogc @safe pure nothrow this(string msg, Throwable next, string file = __FILE__, size_t line = __LINE__)
{ {
super(msg, file, line, next); super(msg, file, line, next);
} }
@safe pure this(int code, string reason, string file = __FILE__, size_t line = __LINE__) @safe pure this(int httpStatusCode, string reason, string file = __FILE__, size_t line = __LINE__)
{ {
this.code = code; this.httpStatusCode = httpStatusCode;
string msg = format("HTTP request returned status code %d (%s)", code, reason); this.error = error;
string msg = format("HTTP request returned status code %d (%s)", httpStatusCode, reason);
super(msg, file, line, next);
}
this(int httpStatusCode, string reason, ref const JSONValue error, string file = __FILE__, size_t line = __LINE__)
{
this.httpStatusCode = httpStatusCode;
this.error = error;
string msg = format("HTTP request returned status code %d (%s)\n%s", httpStatusCode, reason, toJSON(error, true));
super(msg, file, line, next); super(msg, file, line, next);
} }
} }
@ -34,7 +46,6 @@ class OneDriveException: Exception
final class OneDriveApi final class OneDriveApi
{ {
private Config cfg; private Config cfg;
private string clientId;
private string refreshToken, accessToken; private string refreshToken, accessToken;
private SysTime accessTokenExpiration; private SysTime accessTokenExpiration;
/* private */ HTTP http; /* private */ HTTP http;
@ -42,7 +53,6 @@ final class OneDriveApi
this(Config cfg) this(Config cfg)
{ {
this.cfg = cfg; this.cfg = cfg;
this.clientId = cfg.getValue("client_id");
http = HTTP(); http = HTTP();
//http.verbose = true; //http.verbose = true;
} }
@ -51,8 +61,15 @@ final class OneDriveApi
{ {
try { try {
refreshToken = readText(cfg.refreshTokenFilePath); refreshToken = readText(cfg.refreshTokenFilePath);
getDefaultDrive();
} catch (FileException e) { } catch (FileException e) {
return authorize(); return authorize();
} catch (OneDriveException e) {
if (e.httpStatusCode == 400 || e.httpStatusCode == 401) {
log.log("Refresh token invalid");
return authorize();
}
throw e;
} }
return true; return true;
} }
@ -61,12 +78,12 @@ final class OneDriveApi
{ {
import std.stdio, std.regex; import std.stdio, std.regex;
char[] response; char[] response;
string url = authUrl ~ "?client_id=" ~ clientId ~ "&scope=onedrive.readwrite%20offline_access&response_type=code&redirect_uri=" ~ redirectUrl; string url = authUrl ~ "?client_id=" ~ clientId ~ "&scope=files.readwrite%20files.readwrite.all%20offline_access&response_type=code&redirect_uri=" ~ redirectUrl;
log.log("Authorize this app visiting:\n"); log.log("Authorize this app visiting:\n");
write(url, "\n\n", "Enter the response uri: "); write(url, "\n\n", "Enter the response uri: ");
readln(response); readln(response);
// match the authorization code // match the authorization code
auto c = matchFirst(response, r"(?:code=)(([\w\d]+-){4}[\w\d]+)"); auto c = matchFirst(response, r"(?:code=)([\w\d-]+)");
if (c.empty) { if (c.empty) {
log.log("Invalid uri"); log.log("Invalid uri");
return false; return false;
@ -76,13 +93,20 @@ final class OneDriveApi
return true; return true;
} }
// https://dev.onedrive.com/drives/default.htm
JSONValue getDefaultDrive()
{
checkAccessTokenExpired();
return get(driveUrl);
}
// https://dev.onedrive.com/items/view_delta.htm // https://dev.onedrive.com/items/view_delta.htm
JSONValue viewChangesById(const(char)[] id, const(char)[] statusToken) JSONValue viewChangesById(const(char)[] id, const(char)[] statusToken)
{ {
checkAccessTokenExpired(); checkAccessTokenExpired();
const(char)[] url = itemByIdUrl ~ id ~ "/view.delta"; const(char)[] url = itemByIdUrl ~ id ~ "/delta";
url ~= "?select=id,name,eTag,cTag,deleted,file,folder,fileSystemInfo,remoteItem,parentReference"; url ~= "?select=id,name,eTag,cTag,deleted,file,folder,root,fileSystemInfo,remoteItem,parentReference";
if (statusToken) url ~= "?token=" ~ statusToken; if (statusToken) url ~= "&token=" ~ statusToken;
return get(url); return get(url);
} }
@ -90,8 +114,10 @@ final class OneDriveApi
JSONValue viewChangesByPath(const(char)[] path, const(char)[] statusToken) JSONValue viewChangesByPath(const(char)[] path, const(char)[] statusToken)
{ {
checkAccessTokenExpired(); checkAccessTokenExpired();
string url = itemByPathUrl ~ encodeComponent(path) ~ ":/view.delta"; string url = itemByPathUrl ~ encodeComponent(path) ~ ":/delta";
url ~= "?select=id,name,eTag,cTag,deleted,file,folder,fileSystemInfo,remoteItem,parentReference"; // HACK
if (path == ".") url = driveUrl ~ "/root/delta";
url ~= "?select=id,name,eTag,cTag,deleted,file,folder,root,fileSystemInfo,remoteItem,parentReference";
if (statusToken) url ~= "&token=" ~ statusToken; if (statusToken) url ~= "&token=" ~ statusToken;
return get(url); return get(url);
} }
@ -117,7 +143,6 @@ final class OneDriveApi
{ {
checkAccessTokenExpired(); checkAccessTokenExpired();
string url = itemByPathUrl ~ encodeComponent(remotePath) ~ ":/content"; string url = itemByPathUrl ~ encodeComponent(remotePath) ~ ":/content";
http.addRequestHeader("Content-Type", "application/octet-stream");
if (eTag) http.addRequestHeader("If-Match", eTag); if (eTag) http.addRequestHeader("If-Match", eTag);
else url ~= "?@name.conflictBehavior=fail"; else url ~= "?@name.conflictBehavior=fail";
return upload(localPath, url); return upload(localPath, url);
@ -138,14 +163,18 @@ final class OneDriveApi
{ {
checkAccessTokenExpired(); checkAccessTokenExpired();
char[] url = itemByIdUrl ~ id; char[] url = itemByIdUrl ~ id;
if (eTag) http.addRequestHeader("If-Match", eTag); //TODO: investigate why this always fail with 412 (Precondition Failed)
//if (eTag) http.addRequestHeader("If-Match", eTag);
del(url); del(url);
} }
// https://dev.onedrive.com/items/create.htm // https://dev.onedrive.com/items/create.htm
JSONValue createByPath(const(char)[] parentPath, JSONValue item) JSONValue createByPath(const(char)[] parentPath, JSONValue item)
{ {
checkAccessTokenExpired();
string url = itemByPathUrl ~ encodeComponent(parentPath) ~ ":/children"; string url = itemByPathUrl ~ encodeComponent(parentPath) ~ ":/children";
// HACK
if (parentPath == ".") url = driveUrl ~ "/root/children";
http.addRequestHeader("Content-Type", "application/json"); http.addRequestHeader("Content-Type", "application/json");
return post(url, item.toString()); return post(url, item.toString());
} }
@ -154,7 +183,7 @@ final class OneDriveApi
JSONValue createUploadSession(const(char)[] path, const(char)[] eTag = null) JSONValue createUploadSession(const(char)[] path, const(char)[] eTag = null)
{ {
checkAccessTokenExpired(); checkAccessTokenExpired();
string url = itemByPathUrl ~ encodeComponent(path) ~ ":/upload.createSession"; string url = itemByPathUrl ~ encodeComponent(path) ~ ":/createUploadSession";
if (eTag) http.addRequestHeader("If-Match", eTag); if (eTag) http.addRequestHeader("If-Match", eTag);
return post(url, null); return post(url, null);
} }
@ -169,7 +198,8 @@ final class OneDriveApi
} }
http.method = HTTP.Method.put; http.method = HTTP.Method.put;
http.url = uploadUrl; http.url = uploadUrl;
addAccessTokenHeader(); // when using microsoft graph the auth code is different
//addAccessTokenHeader();
import std.conv; import std.conv;
string contentRange = "bytes " ~ to!string(offset) ~ "-" ~ to!string(offset + offsetSize - 1) ~ "/" ~ to!string(fileSize); string contentRange = "bytes " ~ to!string(offset) ~ "-" ~ to!string(offset + offsetSize - 1) ~ "/" ~ to!string(fileSize);
http.addRequestHeader("Content-Range", contentRange); http.addRequestHeader("Content-Range", contentRange);
@ -187,7 +217,8 @@ final class OneDriveApi
JSONValue requestUploadStatus(const(char)[] uploadUrl) JSONValue requestUploadStatus(const(char)[] uploadUrl)
{ {
checkAccessTokenExpired(); checkAccessTokenExpired();
return get(uploadUrl); // when using microsoft graph the auth code is different
return get(uploadUrl, true);
} }
private void redeemToken(const(char)[] authCode) private void redeemToken(const(char)[] authCode)
@ -231,14 +262,14 @@ final class OneDriveApi
http.addRequestHeader("Authorization", accessToken); http.addRequestHeader("Authorization", accessToken);
} }
private JSONValue get(const(char)[] url) private JSONValue get(const(char)[] url, bool skipToken = false)
{ {
scope(exit) http.clearRequestHeaders(); scope(exit) http.clearRequestHeaders();
http.method = HTTP.Method.get; http.method = HTTP.Method.get;
http.url = url; http.url = url;
addAccessTokenHeader(); if (!skipToken) addAccessTokenHeader(); // HACK: requestUploadStatus
auto response = perform(); auto response = perform();
checkHttpCode(); checkHttpCode(response);
return response; return response;
} }
@ -248,8 +279,8 @@ final class OneDriveApi
http.method = HTTP.Method.del; http.method = HTTP.Method.del;
http.url = url; http.url = url;
addAccessTokenHeader(); addAccessTokenHeader();
perform(); auto response = perform();
checkHttpCode(); checkHttpCode(response);
} }
private void download(const(char)[] url, string filename) private void download(const(char)[] url, string filename)
@ -274,7 +305,7 @@ final class OneDriveApi
http.url = url; http.url = url;
addAccessTokenHeader(); addAccessTokenHeader();
auto response = perform(patchData); auto response = perform(patchData);
checkHttpCode(); checkHttpCode(response);
return response; return response;
} }
@ -285,7 +316,7 @@ final class OneDriveApi
http.url = url; http.url = url;
addAccessTokenHeader(); addAccessTokenHeader();
auto response = perform(postData); auto response = perform(postData);
checkHttpCode(); checkHttpCode(response);
return response; return response;
} }
@ -304,7 +335,7 @@ final class OneDriveApi
http.onSend = data => file.rawRead(data).length; http.onSend = data => file.rawRead(data).length;
http.contentLength = file.size; http.contentLength = file.size;
auto response = perform(); auto response = perform();
checkHttpCode(); checkHttpCode(response);
return response; return response;
} }
@ -343,7 +374,15 @@ final class OneDriveApi
} catch (CurlException e) { } catch (CurlException e) {
throw new OneDriveException(e.msg, e); throw new OneDriveException(e.msg, e);
} }
return content.parseJSON(); JSONValue json;
try {
json = content.parseJSON();
} catch (JSONException e) {
e.msg ~= "\n";
e.msg ~= content;
throw e;
}
return json;
} }
private void checkHttpCode() private void checkHttpCode()
@ -352,4 +391,45 @@ final class OneDriveApi
throw new OneDriveException(http.statusLine.code, http.statusLine.reason); throw new OneDriveException(http.statusLine.code, http.statusLine.reason);
} }
} }
private void checkHttpCode(ref const JSONValue response)
{
if (http.statusLine.code / 100 != 2) {
throw new OneDriveException(http.statusLine.code, http.statusLine.reason, response);
}
}
}
unittest
{
string configDirName = expandTilde("~/.config/onedrive");
auto cfg = new config.Config(configDirName);
cfg.init();
OneDriveApi onedrive = new OneDriveApi(cfg);
onedrive.init();
std.file.write("/tmp/test", "test");
// simpleUpload
auto item = onedrive.simpleUpload("/tmp/test", "/test");
try {
item = onedrive.simpleUpload("/tmp/test", "/test");
} catch (OneDriveException e) {
assert(e.httpStatusCode == 409);
}
try {
item = onedrive.simpleUpload("/tmp/test", "/test", "123");
} catch (OneDriveException e) {
assert(e.httpStatusCode == 412);
}
item = onedrive.simpleUpload("/tmp/test", "/test", item["eTag"].str);
// deleteById
try {
onedrive.deleteById(item["id"].str, "123");
} catch (OneDriveException e) {
assert(e.httpStatusCode == 412);
}
onedrive.deleteById(item["id"].str, item["eTag"].str);
onedrive.http.shutdown();
} }

72
src/selective.d Normal file
View file

@ -0,0 +1,72 @@
import std.algorithm;
import std.array;
import std.file;
import std.path;
import std.regex;
import std.stdio;
import util;
final class SelectiveSync
{
private string[] paths;
private Regex!char mask;
void load(string filepath)
{
if (exists(filepath)) {
paths = File(filepath)
.byLine()
.map!(a => buildNormalizedPath(a))
.filter!(a => a.length > 0)
.array;
}
}
void setMask(const(char)[] mask)
{
this.mask = wild2regex(mask);
}
bool isNameExcluded(string name)
{
return !name.matchFirst(mask).empty;
}
bool isPathExcluded(string path)
{
return .isPathExcluded(path, paths);
}
}
// test if the given path is not included in the allowed paths
// if there are no allowed paths always return false
private bool isPathExcluded(string path, string[] allowedPaths)
{
// always allow the root
if (path == ".") return false;
// if there are no allowed paths always return false
if (allowedPaths.empty) return false;
path = buildNormalizedPath(path);
foreach (allowed; allowedPaths) {
auto comm = commonPrefix(path, allowed);
if (comm.length == path.length) {
// the given path is contained in an allowed path
return false;
}
if (comm.length == allowed.length && path[comm.length] == '/') {
// the given path is a subitem of an allowed path
return false;
}
}
return true;
}
unittest
{
assert(isPathExcluded("Documents2", ["Documents"]));
assert(!isPathExcluded("Documents", ["Documents"]));
assert(!isPathExcluded("Documents/a.txt", ["Documents"]));
assert(isPathExcluded("Hello/World", ["Hello/John"]));
assert(!isPathExcluded(".", ["Documents"]));
}

View file

@ -64,6 +64,27 @@ struct Database
} }
} }
int getVersion()
{
int userVersion;
extern (C) int callback(void* user_version, int count, char** column_text, char** column_name) {
import std.c.stdlib: atoi;
*(cast(int*) user_version) = atoi(*column_text);
return 0;
}
int rc = sqlite3_exec(pDb, "PRAGMA user_version", &callback, &userVersion, null);
if (rc != SQLITE_OK) {
throw new SqliteException(ifromStringz(sqlite3_errmsg(pDb)));
}
return userVersion;
}
void setVersion(int userVersion)
{
import std.conv: to;
exec("PRAGMA user_version=" ~ to!string(userVersion));
}
Statement prepare(const(char)[] zSql) Statement prepare(const(char)[] zSql)
{ {
Statement s; Statement s;
@ -170,6 +191,10 @@ unittest
value TEXT value TEXT
)"); )");
assert(db.getVersion() == 0);
db.setVersion(1);
assert(db.getVersion() == 1);
auto s = db.prepare("INSERT INTO test VALUES (?, ?)"); auto s = db.prepare("INSERT INTO test VALUES (?, ?)");
s.bind(1, "key1"); s.bind(1, "key1");
s.bind(2, "value1"); s.bind(2, "value1");

View file

@ -1,25 +1,34 @@
import std.algorithm;
import std.net.curl: CurlTimeoutException;
import std.exception: ErrnoException; import std.exception: ErrnoException;
import std.algorithm, std.datetime, std.file, std.json, std.path, std.regex; import std.datetime, std.file, std.json, std.path;
import std.regex;
import std.stdio, std.string; import std.stdio, std.string;
import config, itemdb, onedrive, upload, util; import config, itemdb, onedrive, selective, upload, util;
static import log; static import log;
// threshold after which files will be uploaded using an upload session // threshold after which files will be uploaded using an upload session
private long thresholdFileSize = 10 * 2^^20; // 10 MiB private long thresholdFileSize = 4 * 2^^20; // 4 MiB
private bool isItemFolder(const ref JSONValue item) private bool isItemFolder(const ref JSONValue item)
{ {
return (("folder" in item.object) !is null); return ("folder" in item) != null;
} }
private bool isItemFile(const ref JSONValue item) private bool isItemFile(const ref JSONValue item)
{ {
return (("file" in item.object) !is null); return ("file" in item) != null;
} }
private bool isItemDeleted(const ref JSONValue item) private bool isItemDeleted(const ref JSONValue item)
{ {
return (("deleted" in item.object) !is null); // HACK: fix for https://github.com/skilion/onedrive/issues/157
return ("deleted" in item) || ("fileSystemInfo" !in item);
}
private bool isItemRoot(const ref JSONValue item)
{
return ("root" in item) != null;
} }
private bool testCrc32(string path, const(char)[] crc32) private bool testCrc32(string path, const(char)[] crc32)
@ -49,23 +58,22 @@ final class SyncEngine
private Config cfg; private Config cfg;
private OneDriveApi onedrive; private OneDriveApi onedrive;
private ItemDatabase itemdb; private ItemDatabase itemdb;
private Regex!char skipDir, skipFile;
private UploadSession session; private UploadSession session;
private SelectiveSync selectiveSync;
// token representing the last status correctly synced // token representing the last status correctly synced
private string statusToken; private string statusToken;
// list of items to skip while applying the changes // list of items to skip while applying the changes
private string[] skippedItems; private string[] skippedItems;
// list of items to delete after the changes has been downloaded // list of items to delete after the changes has been downloaded
private string[] pathsToDelete; private string[] idsToDelete;
this(Config cfg, OneDriveApi onedrive, ItemDatabase itemdb) this(Config cfg, OneDriveApi onedrive, ItemDatabase itemdb, SelectiveSync selectiveSync)
{ {
assert(onedrive && itemdb); assert(onedrive && itemdb && selectiveSync);
this.cfg = cfg; this.cfg = cfg;
this.onedrive = onedrive; this.onedrive = onedrive;
this.itemdb = itemdb; this.itemdb = itemdb;
skipDir = wild2regex(cfg.getValue("skip_dir")); this.selectiveSync = selectiveSync;
skipFile = wild2regex(cfg.getValue("skip_file"));
session = UploadSession(onedrive, cfg.uploadStateFilePath); session = UploadSession(onedrive, cfg.uploadStateFilePath);
} }
@ -91,22 +99,42 @@ final class SyncEngine
try { try {
JSONValue changes; JSONValue changes;
do { do {
changes = onedrive.viewChangesByPath("/", statusToken); // get changes from the server
try {
changes = onedrive.viewChangesByPath(".", statusToken);
} catch (OneDriveException e) {
if (e.httpStatusCode == 410) {
log.log("Status token expired, resyncing");
statusToken = null;
continue;
}
else {
throw e;
}
}
foreach (item; changes["value"].array) { foreach (item; changes["value"].array) {
applyDifference(item); applyDifference(item);
} }
statusToken = changes["@delta.token"].str; // hack to reuse old code
string url;
if ("@odata.nextLink" in changes) url = changes["@odata.nextLink"].str;
if ("@odata.deltaLink" in changes) url = changes["@odata.deltaLink"].str;
auto c = matchFirst(url, r"(?:token=)([\w\d]+)");
c.popFront(); // skip the whole match
statusToken = c.front;
std.file.write(cfg.statusTokenFilePath, statusToken); std.file.write(cfg.statusTokenFilePath, statusToken);
} while (("@odata.nextLink" in changes.object) !is null); } while ("@odata.nextLink" in changes);
} catch (ErrnoException e) { } catch (ErrnoException e) {
throw new SyncException(e.msg, e); throw new SyncException(e.msg, e);
} catch (FileException e) { } catch (FileException e) {
throw new SyncException(e.msg, e); throw new SyncException(e.msg, e);
} catch (CurlTimeoutException e) {
throw new SyncException(e.msg, e);
} catch (OneDriveException e) { } catch (OneDriveException e) {
throw new SyncException(e.msg, e); throw new SyncException(e.msg, e);
} }
// delete items in pathsToDelete // delete items in idsToDelete
if (pathsToDelete.length > 0) deleteItems(); if (idsToDelete.length > 0) deleteItems();
// empty the skipped items // empty the skipped items
skippedItems.length = 0; skippedItems.length = 0;
assumeSafeAppend(skippedItems); assumeSafeAppend(skippedItems);
@ -116,21 +144,27 @@ final class SyncEngine
{ {
string id = item["id"].str; string id = item["id"].str;
string name = item["name"].str; string name = item["name"].str;
string eTag = item["eTag"].str;
string parentId = item["parentReference"]["id"].str;
// HACK: recognize the root directory log.vlog(id, " ", name);
if (name == "root" && parentId[$ - 1] == '0' && parentId[$ - 2] == '!') {
parentId = null; // eTag and parentId do not exists for the root in OneDrive Biz
string eTag, parentId;
if (!isItemRoot(item)) {
eTag = item["eTag"].str;
parentId = item["parentReference"]["id"].str;
} }
// skip unwanted items early // skip unwanted items early
if (skippedItems.find(parentId).length != 0) { if (skippedItems.find(parentId).length != 0) {
log.vlog("Filtered out");
skippedItems ~= id;
return;
}
if (selectiveSync.isNameExcluded(name)) {
log.vlog("Filtered out");
skippedItems ~= id; skippedItems ~= id;
return; return;
} }
log.vlog(id, " ", name);
// rename the local item if it is unsynced and there is a new version of it // rename the local item if it is unsynced and there is a new version of it
Item oldItem; Item oldItem;
@ -145,50 +179,36 @@ final class SyncEngine
} }
} }
// check if the item is to be deleted
if (isItemDeleted(item)) {
log.vlog("The item is marked for deletion");
if (cached) idsToDelete ~= id;
return;
}
// compute the path of the item // compute the path of the item
string path = "."; string path = ".";
if (parentId) { if (parentId) {
path = itemdb.computePath(parentId) ~ "/" ~ name; path = itemdb.computePath(parentId) ~ "/" ~ name;
} // selective sync
if (selectiveSync.isPathExcluded(path)) {
ItemType type; log.vlog("Filtered out: ", path);
if (isItemDeleted(item)) {
log.vlog("The item is marked for deletion");
if (cached) {
itemdb.deleteById(id);
pathsToDelete ~= oldPath;
}
return;
} else if (isItemFile(item)) {
type = ItemType.file;
if (!path.matchFirst(skipFile).empty) {
log.vlog("Filtered out");
return;
}
} else if (isItemFolder(item)) {
type = ItemType.dir;
if (!path.matchFirst(skipDir).empty) {
log.vlog("Filtered out");
skippedItems ~= id; skippedItems ~= id;
return; return;
} }
}
ItemType type;
if (isItemFile(item)) {
type = ItemType.file;
} else if (isItemFolder(item)) {
type = ItemType.dir;
} else { } else {
log.vlog("The item is neither a file nor a directory, skipping"); log.vlog("The item is neither a file nor a directory, skipping");
skippedItems ~= id; skippedItems ~= id;
return; return;
} }
string cTag;
try {
cTag = item["cTag"].str;
} catch (JSONException e) {
// cTag is not returned if the Item is a folder
// https://dev.onedrive.com/resources/item.htm
cTag = "";
}
string mtime = item["fileSystemInfo"]["lastModifiedDateTime"].str;
string crc32; string crc32;
if (type == ItemType.file) { if (type == ItemType.file) {
try { try {
@ -203,8 +223,8 @@ final class SyncEngine
name: name, name: name,
type: type, type: type,
eTag: eTag, eTag: eTag,
cTag: cTag, cTag: "cTag" in item ? item["cTag"].str : null,
mtime: SysTime.fromISOExtString(mtime), mtime: SysTime.fromISOExtString(item["fileSystemInfo"]["lastModifiedDateTime"].str),
parentId: parentId, parentId: parentId,
crc32: crc32 crc32: crc32
}; };
@ -217,9 +237,9 @@ final class SyncEngine
// save the item in the db // save the item in the db
if (oldItem.id) { if (oldItem.id) {
itemdb.update(id, name, type, eTag, cTag, mtime, parentId, crc32); itemdb.update(newItem);
} else { } else {
itemdb.insert(id, name, type, eTag, cTag, mtime, parentId, crc32); itemdb.insert(newItem);
} }
} }
@ -283,8 +303,9 @@ final class SyncEngine
case ItemType.file: case ItemType.file:
if (isFile(path)) { if (isFile(path)) {
SysTime localModifiedTime = timeLastModified(path); SysTime localModifiedTime = timeLastModified(path);
import core.time: Duration; // HACK: reduce time resolution to seconds before comparing
item.mtime.fracSecs = Duration.zero; // HACK item.mtime.fracSecs = Duration.zero;
localModifiedTime.fracSecs = Duration.zero;
if (localModifiedTime == item.mtime) { if (localModifiedTime == item.mtime) {
return true; return true;
} else { } else {
@ -313,7 +334,9 @@ final class SyncEngine
private void deleteItems() private void deleteItems()
{ {
log.vlog("Deleting files ..."); log.vlog("Deleting files ...");
foreach_reverse (path; pathsToDelete) { foreach_reverse (id; idsToDelete) {
string path = itemdb.computePath(id);
itemdb.deleteById(id);
if (exists(path)) { if (exists(path)) {
if (isFile(path)) { if (isFile(path)) {
remove(path); remove(path);
@ -328,8 +351,8 @@ final class SyncEngine
} }
} }
} }
pathsToDelete.length = 0; idsToDelete.length = 0;
assumeSafeAppend(pathsToDelete); assumeSafeAppend(idsToDelete);
} }
// scan the given directory for differences // scan the given directory for differences
@ -355,20 +378,23 @@ final class SyncEngine
private void uploadDifferences(Item item) private void uploadDifferences(Item item)
{ {
log.vlog(item.id, " ", item.name); log.vlog(item.id, " ", item.name);
// skip filtered items
if (selectiveSync.isNameExcluded(item.name)) {
log.vlog("Filtered out");
return;
}
string path = itemdb.computePath(item.id); string path = itemdb.computePath(item.id);
if (selectiveSync.isPathExcluded(path)) {
log.vlog("Filtered out: ", path);
return;
}
final switch (item.type) { final switch (item.type) {
case ItemType.dir: case ItemType.dir:
if (!path.matchFirst(skipDir).empty) {
log.vlog("Filtered out");
break;
}
uploadDirDifferences(item, path); uploadDirDifferences(item, path);
break; break;
case ItemType.file: case ItemType.file:
if (!path.matchFirst(skipFile).empty) {
log.vlog("Filtered out");
break;
}
uploadFileDifferences(item, path); uploadFileDifferences(item, path);
break; break;
} }
@ -401,8 +427,9 @@ final class SyncEngine
if (exists(path)) { if (exists(path)) {
if (isFile(path)) { if (isFile(path)) {
SysTime localModifiedTime = timeLastModified(path); SysTime localModifiedTime = timeLastModified(path);
import core.time: Duration; // HACK: reduce time resolution to seconds before comparing
item.mtime.fracSecs = Duration.zero; // HACK item.mtime.fracSecs = Duration.zero;
localModifiedTime.fracSecs = Duration.zero;
if (localModifiedTime != item.mtime) { if (localModifiedTime != item.mtime) {
log.vlog("The file last modified time has changed"); log.vlog("The file last modified time has changed");
string id = item.id; string id = item.id;
@ -440,26 +467,35 @@ final class SyncEngine
private void uploadNewItems(string path) private void uploadNewItems(string path)
{ {
// skip unexisting symbolic links
if (isSymlink(path) && !exists(readLink(path))) { if (isSymlink(path) && !exists(readLink(path))) {
return; return;
} }
// skip filtered items
if (path != ".") {
if (selectiveSync.isNameExcluded(baseName(path))) {
return;
}
if (selectiveSync.isPathExcluded(path)) {
return;
}
}
if (isDir(path)) { if (isDir(path)) {
if (path.matchFirst(skipDir).empty) { Item item;
Item item; if (!itemdb.selectByPath(path, item)) {
if (!itemdb.selectByPath(path, item)) { uploadCreateDir(path);
uploadCreateDir(path); }
} // recursively traverse children
auto entries = dirEntries(path, SpanMode.shallow, false); auto entries = dirEntries(path, SpanMode.shallow, false);
foreach (DirEntry entry; entries) { foreach (DirEntry entry; entries) {
uploadNewItems(entry.name); uploadNewItems(entry.name);
}
} }
} else { } else {
if (path.matchFirst(skipFile).empty) { Item item;
Item item; if (!itemdb.selectByPath(path, item)) {
if (!itemdb.selectByPath(path, item)) { uploadNewFile(path);
uploadNewFile(path);
}
} }
} }
} }
@ -469,7 +505,7 @@ final class SyncEngine
log.log("Creating remote directory: ", path); log.log("Creating remote directory: ", path);
JSONValue item = ["name": baseName(path).idup]; JSONValue item = ["name": baseName(path).idup];
item["folder"] = parseJSON("{}"); item["folder"] = parseJSON("{}");
auto res = onedrive.createByPath(path.dirName ~ "/", item); auto res = onedrive.createByPath(path.dirName, item);
saveItem(res); saveItem(res);
} }
@ -482,7 +518,6 @@ final class SyncEngine
} else { } else {
response = session.upload(path, path); response = session.upload(path, path);
} }
saveItem(response);
string id = response["id"].str; string id = response["id"].str;
string cTag = response["cTag"].str; string cTag = response["cTag"].str;
SysTime mtime = timeLastModified(path).toUTC(); SysTime mtime = timeLastModified(path).toUTC();
@ -498,7 +533,7 @@ final class SyncEngine
try { try {
onedrive.deleteById(item.id, item.eTag); onedrive.deleteById(item.id, item.eTag);
} catch (OneDriveException e) { } catch (OneDriveException e) {
if (e.code == 404) log.log(e.msg); if (e.httpStatusCode == 404) log.log(e.msg);
else throw e; else throw e;
} }
itemdb.deleteById(item.id); itemdb.deleteById(item.id);
@ -515,31 +550,33 @@ final class SyncEngine
saveItem(res); saveItem(res);
} }
private void saveItem(JSONValue item) private void saveItem(JSONValue jsonItem)
{ {
string id = item["id"].str;
ItemType type; ItemType type;
if (isItemFile(item)) { if (isItemFile(jsonItem)) {
type = ItemType.file; type = ItemType.file;
} else if (isItemFolder(item)) { } else if (isItemFolder(jsonItem)) {
type = ItemType.dir; type = ItemType.dir;
} else { } else {
assert(0); assert(0);
} }
string name = item["name"].str; Item item = {
string eTag = item["eTag"].str; id: jsonItem["id"].str,
string cTag = item["cTag"].str; name: jsonItem["name"].str,
string mtime = item["fileSystemInfo"]["lastModifiedDateTime"].str; type: type,
string parentId = item["parentReference"]["id"].str; eTag: jsonItem["eTag"].str,
string crc32; cTag: "cTag" in jsonItem ? jsonItem["cTag"].str : null,
mtime: SysTime.fromISOExtString(jsonItem["fileSystemInfo"]["lastModifiedDateTime"].str),
parentId: jsonItem["parentReference"]["id"].str
};
if (type == ItemType.file) { if (type == ItemType.file) {
try { try {
crc32 = item["file"]["hashes"]["crc32Hash"].str; item.crc32 = jsonItem["file"]["hashes"]["crc32Hash"].str;
} catch (JSONException e) { } catch (JSONException e) {
// swallow exception log.vlog("The hash is not available");
} }
} }
itemdb.upsert(id, name, type, eTag, cTag, mtime, parentId, crc32); itemdb.upsert(item);
} }
void uploadMoveItem(string from, string to) void uploadMoveItem(string from, string to)
@ -576,7 +613,7 @@ final class SyncEngine
try { try {
uploadDeleteItem(item, path); uploadDeleteItem(item, path);
} catch (OneDriveException e) { } catch (OneDriveException e) {
if (e.code == 404) log.log(e.msg); if (e.httpStatusCode == 404) log.log(e.msg);
else throw e; else throw e;
} }
} }

View file

@ -1,6 +1,7 @@
import std.conv; import std.conv;
import std.digest.crc; import std.digest.crc;
import std.file; import std.file;
import std.net.curl;
import std.path; import std.path;
import std.regex; import std.regex;
import std.socket; import std.socket;
@ -54,7 +55,7 @@ Regex!char wild2regex(const(char)[] pattern)
{ {
string str; string str;
str.reserve(pattern.length + 2); str.reserve(pattern.length + 2);
str ~= "/"; str ~= "^";
foreach (c; pattern) { foreach (c; pattern) {
switch (c) { switch (c) {
case '*': case '*':
@ -67,7 +68,7 @@ Regex!char wild2regex(const(char)[] pattern)
str ~= "[^/]"; str ~= "[^/]";
break; break;
case '|': case '|':
str ~= "$|/"; str ~= "$|^";
break; break;
default: default:
str ~= c; str ~= c;
@ -81,11 +82,25 @@ Regex!char wild2regex(const(char)[] pattern)
// return true if the network connection is available // return true if the network connection is available
bool testNetwork() bool testNetwork()
{ {
try { HTTP http = HTTP("https://login.microsoftonline.com");
auto addr = new InternetAddress("login.live.com", 443); http.method = HTTP.Method.head;
auto socket = new TcpSocket(addr); return http.perform(ThrowOnError.no) == 0;
return socket.isAlive(); }
} catch (SocketException) {
return false; // call globMatch for each string in pattern separated by '|'
} bool multiGlobMatch(const(char)[] path, const(char)[] pattern)
{
foreach (glob; pattern.split('|')) {
if (globMatch!(std.path.CaseSensitive.yes)(path, glob)) {
return true;
}
}
return false;
}
unittest
{
assert(multiGlobMatch(".hidden", ".*"));
assert(multiGlobMatch(".hidden", "file|.*"));
assert(!multiGlobMatch("foo.bar", "foo|bar"));
} }