diff --git a/Makefile b/Makefile index 006d1a0e..363ff7fe 100644 --- a/Makefile +++ b/Makefile @@ -1,7 +1,6 @@ DC = dmd DFLAGS = -ofonedrive -L-lcurl -L-lsqlite3 -L-ldl DESTDIR = /usr/local/bin -CONFDIR = /usr/local/etc SOURCES = \ src/config.d \ @@ -10,6 +9,7 @@ SOURCES = \ src/main.d \ src/monitor.d \ src/onedrive.d \ + src/selective.d \ src/sqlite.d \ src/sync.d \ src/upload.d \ @@ -27,12 +27,10 @@ unittest: $(SOURCES) clean: rm -f onedrive.o onedrive -install: onedrive onedrive.conf +install: onedrive onedrive.service install onedrive $(DESTDIR)/onedrive - install -m 644 onedrive.conf $(CONFDIR)/onedrive.conf install -m 644 onedrive.service /usr/lib/systemd/user uninstall: rm -f $(DESTDIR)/onedrive - rm -f $(CONFDIR)/onedrive.conf rm -f /usr/lib/systemd/user/onedrive.service diff --git a/README.md b/README.md index 70338d56..882c0225 100644 --- a/README.md +++ b/README.md @@ -26,31 +26,44 @@ sudo apt-get update && sudo apt-get install dmd-bin ``` ### Installation -1. `make` -2. `sudo make install` +``` +git clone git@github.com:skilion/onedrive.git +cd onedrive +make +sudo make install +``` ### Configuration: You should copy the default config file into your home directory before making changes: ``` mkdir -p ~/.config/onedrive -cp /usr/local/etc/onedrive.conf ~/.config/onedrive/config +cp ./config ~/.config/onedrive/config ``` Available options: -* `client_id`: application identifier necessary for the [authentication][2] * `sync_dir`: directory where the files will be synced -* `skip_file`: any files that match this pattern will be skipped during sync -* `skip_dir`: any directories that match this pattern will be skipped during sync +* `skip_file`: any files or directories that match this pattern will be skipped during sync Pattern are case insensitive. -`*` and `?` [wildcards characters][3] are supported. +`*` and `?` [wildcards characters][1] are supported. Use `|` to separate multiple patterns. -[2]: https://dev.onedrive.com/auth/msa_oauth.htm -[3]: https://technet.microsoft.com/en-us/library/bb490639.aspx +[1]: https://technet.microsoft.com/en-us/library/bb490639.aspx + +### Selective sync +Selective sync allows you to sync only specific files and directories. +To enable selective sync create a file named `sync_list` in `~/.config/onedrive`. +Each line represents a path to a file or directory relative from your `sync_dir`. +``` +$ cat ~/.config/onedrive/sync_list +Backup +Documents/report.odt +Work/ProjectX +notes.txt +``` ### First run -The first time you run the program you will be asked to sign in. The procedure require a web browser. +The first time you run the program you will be asked to sign in. The procedure requires a web browser. ### Service If you want to sync your files automatically, enable and start the systemd service: @@ -68,16 +81,17 @@ journalctl --user-unit onedrive -f ``` onedrive [OPTION]... -no option Sync and exit. -m --monitor Keep monitoring for local and remote changes. - --resync Forget the last saved state, perform a full sync. + --resync Forget the last saved state, perform a full sync. + --logout Logout the current user. + --confdir Set the directory to use to store the configuration files. -v --verbose Print more details, useful for debugging. -h --help This help information. ``` ### Notes: -* After changing the filters (`skip_file` or `skip_dir` in your configs) you must execute `onedrive --resync` -* [Windows naming conventions][4] apply +* After changing `skip_file` in your configs or the sync list, you must execute `onedrive --resync` +* [Windows naming conventions][2] apply * Use `make debug` to generate an executable for debugging -[4]: https://msdn.microsoft.com/en-us/library/aa365247 +[2]: https://msdn.microsoft.com/en-us/library/aa365247 diff --git a/config b/config new file mode 100644 index 00000000..cc4b5b5a --- /dev/null +++ b/config @@ -0,0 +1,4 @@ +# Directory where the files will be synced +sync_dir = "~/OneDrive" +# Skip files and directories that match this pattern +skip_file = ".*|~*" diff --git a/onedrive.conf b/onedrive.conf deleted file mode 100644 index ae7f2282..00000000 --- a/onedrive.conf +++ /dev/null @@ -1,4 +0,0 @@ -client_id = "000000004C15842F" -sync_dir = "~/OneDrive" -skip_file = ".*|~*" -skip_dir = ".*" diff --git a/src/config.d b/src/config.d index 797c372a..653982d9 100644 --- a/src/config.d +++ b/src/config.d @@ -1,4 +1,5 @@ -import std.file, std.regex, std.stdio; +import std.file, std.string, std.regex, std.stdio; +import selective; static import log; final class Config @@ -7,6 +8,7 @@ final class Config public string statusTokenFilePath; public string databaseFilePath; public string uploadStateFilePath; + public string syncListFilePath; private string userConfigFilePath; // hashmap for the values found in the user config file @@ -16,18 +18,19 @@ final class Config { refreshTokenFilePath = configDirName ~ "/refresh_token"; statusTokenFilePath = configDirName ~ "/status_token"; - databaseFilePath = configDirName ~ "/items.db"; + databaseFilePath = configDirName ~ "/items.sqlite3"; uploadStateFilePath = configDirName ~ "/resume_upload"; userConfigFilePath = configDirName ~ "/config"; + syncListFilePath = configDirName ~ "/sync_list"; } void init() { - bool found = false; - found |= load("/etc/onedrive.conf"); - found |= load("/usr/local/etc/onedrive.conf"); - found |= load(userConfigFilePath); - if (!found) throw new Exception("No config file found"); + setValue("sync_dir", "~/OneDrive"); + setValue("skip_file", ".*|~*"); + if (!load(userConfigFilePath)) { + log.vlog("No config file found, using defaults"); + } } string getValue(string key) @@ -40,12 +43,29 @@ final class Config } } + string getValue(string key, string value) + { + auto p = key in values; + if (p) { + return *p; + } else { + return value; + } + } + + void setValue(string key, string value) + { + values[key] = value; + } + private bool load(string filename) { scope(failure) return false; auto file = File(filename, "r"); - auto r = regex(`^\s*(\w+)\s*=\s*"(.*)"\s*$`); + auto r = regex(`^(\w+)\s*=\s*"(.*)"\s*$`); foreach (line; file.byLine()) { + line = stripLeft(line); + if (line.length == 0 || line[0] == ';' || line[0] == '#') continue; auto c = line.matchFirst(r); if (!c.empty) { c.popFront(); // skip the whole match @@ -63,6 +83,7 @@ final class Config unittest { auto cfg = new Config(""); - cfg.load("onedrive.conf"); + cfg.load("config"); assert(cfg.getValue("sync_dir") == "~/OneDrive"); + assert(cfg.getValue("empty", "default") == "default"); } diff --git a/src/itemdb.d b/src/itemdb.d index 1510a6d2..556aa0de 100644 --- a/src/itemdb.d +++ b/src/itemdb.d @@ -1,4 +1,4 @@ -import std.datetime, std.path, std.string; +import std.datetime, std.path, std.exception, std.string; import sqlite; enum ItemType @@ -21,6 +21,9 @@ struct Item final class ItemDatabase { + // increment this for every change in the db schema + immutable int itemDatabaseVersion = 3; + Database db; Statement insertItemStmt; Statement updateItemStmt; @@ -30,20 +33,26 @@ final class ItemDatabase this(const(char)[] filename) { db = Database(filename); - db.exec("CREATE TABLE IF NOT EXISTS item ( - id TEXT PRIMARY KEY, - name TEXT NOT NULL, - type TEXT NOT NULL, - eTag TEXT NOT NULL, - cTag TEXT NOT NULL, - mtime TEXT NOT NULL, - parentId TEXT, - crc32 TEXT, - FOREIGN KEY (parentId) REFERENCES item (id) ON DELETE CASCADE - )"); - db.exec("CREATE INDEX IF NOT EXISTS name_idx ON item (name)"); + if (db.getVersion() == 0) { + db.exec("CREATE TABLE item ( + id TEXT NOT NULL PRIMARY KEY, + name TEXT NOT NULL, + type TEXT NOT NULL, + eTag TEXT, + cTag TEXT, + mtime TEXT NOT NULL, + parentId TEXT, + crc32 TEXT, + FOREIGN KEY (parentId) REFERENCES item (id) ON DELETE CASCADE + )"); + db.exec("CREATE INDEX name_idx ON item (name)"); + db.setVersion(itemDatabaseVersion); + } else if (db.getVersion() != itemDatabaseVersion) { + throw new Exception("The item database is incompatible, please resync manually"); + } db.exec("PRAGMA foreign_keys = ON"); db.exec("PRAGMA recursive_triggers = ON"); + insertItemStmt = db.prepare("INSERT OR REPLACE INTO item (id, name, type, eTag, cTag, mtime, parentId, crc32) VALUES (?, ?, ?, ?, ?, ?, ?, ?)"); updateItemStmt = db.prepare(" UPDATE item @@ -54,70 +63,28 @@ final class ItemDatabase selectItemByParentIdStmt = db.prepare("SELECT id FROM item WHERE parentId = ?"); } - void insert(const(char)[] id, const(char)[] name, ItemType type, const(char)[] eTag, const(char)[] cTag, const(char)[] mtime, const(char)[] parentId, const(char)[] crc32) + void insert(const ref Item item) { - with (insertItemStmt) { - bind(1, id); - bind(2, name); - string typeStr = void; - final switch (type) { - case ItemType.file: typeStr = "file"; break; - case ItemType.dir: typeStr = "dir"; break; - } - bind(3, typeStr); - bind(4, eTag); - bind(5, cTag); - bind(6, mtime); - bind(7, parentId); - bind(8, crc32); - exec(); - } + bindItem(item, insertItemStmt); + insertItemStmt.exec(); } - void update(const(char)[] id, const(char)[] name, ItemType type, const(char)[] eTag, const(char)[] cTag, const(char)[] mtime, const(char)[] parentId, const(char)[] crc32) + void update(const ref Item item) { - with (updateItemStmt) { - bind(1, id); - bind(2, name); - string typeStr = void; - final switch (type) { - case ItemType.file: typeStr = "file"; break; - case ItemType.dir: typeStr = "dir"; break; - } - bind(3, typeStr); - bind(4, eTag); - bind(5, cTag); - bind(6, mtime); - bind(7, parentId); - bind(8, crc32); - exec(); - } + bindItem(item, updateItemStmt); + updateItemStmt.exec(); } - void upsert(const(char)[] id, const(char)[] name, ItemType type, const(char)[] eTag, const(char)[] cTag, const(char)[] mtime, const(char)[] parentId, const(char)[] crc32) + void upsert(const ref Item item) { auto s = db.prepare("SELECT COUNT(*) FROM item WHERE id = ?"); - s.bind(1, id); + s.bind(1, item.id); auto r = s.exec(); - Statement* p; - if (r.front[0] == "0") p = &insertItemStmt; - else p = &updateItemStmt; - with (p) { - bind(1, id); - bind(2, name); - string typeStr = void; - final switch (type) { - case ItemType.file: typeStr = "file"; break; - case ItemType.dir: typeStr = "dir"; break; - } - bind(3, typeStr); - bind(4, eTag); - bind(5, cTag); - bind(6, mtime); - bind(7, parentId); - bind(8, crc32); - exec(); - } + Statement* stmt; + if (r.front[0] == "0") stmt = &insertItemStmt; + else stmt = &updateItemStmt; + bindItem(item, *stmt); + stmt.exec(); } Item[] selectChildren(const(char)[] id) @@ -147,7 +114,8 @@ final class ItemDatabase bool selectByPath(const(char)[] path, out Item item) { - path = "root/" ~ path.chompPrefix("."); // HACK + // prefix with the root dir + path = "root/" ~ path.chompPrefix("."); // initialize the search string[2][] candidates; // [id, parentId] @@ -218,6 +186,25 @@ final class ItemDatabase return false; } + private void bindItem(const ref Item item, ref Statement stmt) + { + with (stmt) with (item) { + bind(1, id); + bind(2, name); + string typeStr = null; + final switch (type) with (ItemType) { + case file: typeStr = "file"; break; + case dir: typeStr = "dir"; break; + } + bind(3, typeStr); + bind(4, eTag); + bind(5, cTag); + bind(6, mtime.toISOExtString()); + bind(7, parentId); + bind(8, crc32); + } + } + private Item buildItem(Statement.Result result) { assert(!result.empty && result.front.length == 8); @@ -231,31 +218,30 @@ final class ItemDatabase crc32: result.front[7].dup }; switch (result.front[2]) { - case "file": item.type = ItemType.file; break; - case "dir": item.type = ItemType.dir; break; - default: assert(0); + case "file": item.type = ItemType.file; break; + case "dir": item.type = ItemType.dir; break; + default: assert(0); } return item; } + // computes the path of the given item id + // the path is relative to the sync directory ex: "./Music/Turbo Killer.mp3" + // a trailing slash is never added string computePath(const(char)[] id) { - if (!id) return null; string path; auto s = db.prepare("SELECT name, parentId FROM item WHERE id = ?"); while (true) { s.bind(1, id); auto r = s.exec(); - if (r.empty) { - // no results - break; - } else if (r.front[1]) { + enforce(!r.empty, "Unknow item id"); + if (r.front[1]) { if (path) path = r.front[0].idup ~ "/" ~ path; else path = r.front[0].idup; } else { // root - if (path) path = "./" ~ path; - else path = "."; + if (!path) path = "."; break; } id = r.front[1].dup; diff --git a/src/main.d b/src/main.d index 241a096f..0de12d25 100644 --- a/src/main.d +++ b/src/main.d @@ -1,7 +1,7 @@ import core.stdc.stdlib: EXIT_SUCCESS, EXIT_FAILURE; import core.memory, core.time, core.thread; import std.getopt, std.file, std.path, std.process; -import config, itemdb, monitor, onedrive, sync, util; +import config, itemdb, monitor, onedrive, selective, sync, util; static import log; int main(string[] args) @@ -46,6 +46,14 @@ int main(string[] args) if (!exists(configDirName)) mkdir(configDirName); auto cfg = new config.Config(configDirName); cfg.init(); + + // upgrades + if (exists(configDirName ~ "/items.db")) { + remove(configDirName ~ "/items.db"); + log.log("Database schema changed, resync needed"); + resync = true; + } + if (resync || logout) { log.log("Deleting the saved status ..."); safeRemove(cfg.databaseFilePath); @@ -79,13 +87,16 @@ int main(string[] args) chdir(syncDir); log.vlog("Initializing the Synchronization Engine ..."); - auto sync = new SyncEngine(cfg, onedrive, itemdb); + auto selectiveSync = new SelectiveSync(); + selectiveSync.load(cfg.syncListFilePath); + selectiveSync.setMask(cfg.getValue("skip_file")); + auto sync = new SyncEngine(cfg, onedrive, itemdb, selectiveSync); sync.init(); if (online) performSync(sync); if (monitor) { log.vlog("Initializing monitor ..."); - Monitor m; + Monitor m = new Monitor(selectiveSync); m.onDirCreated = delegate(string path) { log.vlog("[M] Directory created: ", path); try { diff --git a/src/monitor.d b/src/monitor.d index cc4dd684..5dec10d4 100644 --- a/src/monitor.d +++ b/src/monitor.d @@ -2,7 +2,9 @@ import core.sys.linux.sys.inotify; import core.stdc.errno; import core.sys.posix.poll, core.sys.posix.unistd; import std.exception, std.file, std.path, std.regex, std.stdio, std.string; -import config, util; +import config; +import selective; +import util; static import log; // relevant inotify events @@ -17,11 +19,9 @@ class MonitorException: ErrnoException } } -struct Monitor +final class Monitor { bool verbose; - // regex that match files/dirs to skip - private Regex!char skipDir, skipFile; // inotify file descriptor private int fd; // map every inotify watch descriptor to its directory @@ -31,18 +31,23 @@ struct Monitor // buffer to receive the inotify events private void[] buffer; + private SelectiveSync selectiveSync; + void delegate(string path) onDirCreated; void delegate(string path) onFileChanged; void delegate(string path) onDelete; void delegate(string from, string to) onMove; - @disable this(this); + this(SelectiveSync selectiveSync) + { + assert(selectiveSync); + this.selectiveSync = selectiveSync; + } void init(Config cfg, bool verbose) { this.verbose = verbose; - skipDir = wild2regex(cfg.getValue("skip_dir")); - skipFile = wild2regex(cfg.getValue("skip_file")); + fd = inotify_init(); if (fd == -1) throw new MonitorException("inotify_init failed"); if (!buffer) buffer = new void[4096]; @@ -57,12 +62,20 @@ struct Monitor private void addRecursive(string dirname) { - if (matchFirst(dirname, skipDir).empty) { - add(dirname); - foreach(DirEntry entry; dirEntries(dirname, SpanMode.shallow, false)) { - if (entry.isDir) { - addRecursive(entry.name); - } + // skip filtered items + if (dirname != ".") { + if (selectiveSync.isNameExcluded(baseName(dirname))) { + return; + } + if (selectiveSync.isPathExcluded(buildNormalizedPath(dirname))) { + return; + } + } + + add(dirname); + foreach(DirEntry entry; dirEntries(dirname, SpanMode.shallow, false)) { + if (entry.isDir) { + addRecursive(entry.name); } } } @@ -85,7 +98,7 @@ struct Monitor } throw new MonitorException("inotify_add_watch failed"); } - wdToDirName[wd] = dirname ~ "/"; + wdToDirName[wd] = buildNormalizedPath(dirname) ~ "/"; log.vlog("Monitor directory: ", dirname); } @@ -152,14 +165,11 @@ struct Monitor // skip filtered items path = getPath(event); - if (event.mask & IN_ISDIR) { - if (!matchFirst(path, skipDir).empty) { - goto skip; - } - } else { - if (!matchFirst(path, skipFile).empty) { - goto skip; - } + if (selectiveSync.isNameExcluded(baseName(path))) { + goto skip; + } + if (selectiveSync.isPathExcluded(path)) { + goto skip; } if (event.mask & IN_MOVED_FROM) { diff --git a/src/onedrive.d b/src/onedrive.d index 53009215..ec6dfe99 100644 --- a/src/onedrive.d +++ b/src/onedrive.d @@ -6,27 +6,39 @@ static import log; private immutable { - string authUrl = "https://login.live.com/oauth20_authorize.srf"; - string redirectUrl = "https://login.live.com/oauth20_desktop.srf"; // "urn:ietf:wg:oauth:2.0:oob"; - string tokenUrl = "https://login.live.com/oauth20_token.srf"; - string itemByIdUrl = "https://api.onedrive.com/v1.0/drive/items/"; - string itemByPathUrl = "https://api.onedrive.com/v1.0/drive/root:/"; + string clientId = "22c49a0d-d21c-4792-aed1-8f163c982546"; + string authUrl = "https://login.microsoftonline.com/common/oauth2/v2.0/authorize"; + string redirectUrl = "https://login.microsoftonline.com/common/oauth2/nativeclient"; + string tokenUrl = "https://login.microsoftonline.com/common/oauth2/v2.0/token"; + string driveUrl = "https://graph.microsoft.com/v1.0/me/drive"; + string itemByIdUrl = "https://graph.microsoft.com/v1.0/me/drive/items/"; + string itemByPathUrl = "https://graph.microsoft.com/v1.0/me/drive/root:/"; } class OneDriveException: Exception { - // HTTP status code - int code; + int httpStatusCode; + // https://dev.onedrive.com/misc/errors.htm + JSONValue error; @nogc @safe pure nothrow this(string msg, Throwable next, string file = __FILE__, size_t line = __LINE__) { super(msg, file, line, next); } - @safe pure this(int code, string reason, string file = __FILE__, size_t line = __LINE__) + @safe pure this(int httpStatusCode, string reason, string file = __FILE__, size_t line = __LINE__) { - this.code = code; - string msg = format("HTTP request returned status code %d (%s)", code, reason); + this.httpStatusCode = httpStatusCode; + this.error = error; + string msg = format("HTTP request returned status code %d (%s)", httpStatusCode, reason); + super(msg, file, line, next); + } + + this(int httpStatusCode, string reason, ref const JSONValue error, string file = __FILE__, size_t line = __LINE__) + { + this.httpStatusCode = httpStatusCode; + this.error = error; + string msg = format("HTTP request returned status code %d (%s)\n%s", httpStatusCode, reason, toJSON(error, true)); super(msg, file, line, next); } } @@ -34,7 +46,6 @@ class OneDriveException: Exception final class OneDriveApi { private Config cfg; - private string clientId; private string refreshToken, accessToken; private SysTime accessTokenExpiration; /* private */ HTTP http; @@ -42,7 +53,6 @@ final class OneDriveApi this(Config cfg) { this.cfg = cfg; - this.clientId = cfg.getValue("client_id"); http = HTTP(); //http.verbose = true; } @@ -51,8 +61,15 @@ final class OneDriveApi { try { refreshToken = readText(cfg.refreshTokenFilePath); + getDefaultDrive(); } catch (FileException e) { return authorize(); + } catch (OneDriveException e) { + if (e.httpStatusCode == 400 || e.httpStatusCode == 401) { + log.log("Refresh token invalid"); + return authorize(); + } + throw e; } return true; } @@ -61,12 +78,12 @@ final class OneDriveApi { import std.stdio, std.regex; char[] response; - string url = authUrl ~ "?client_id=" ~ clientId ~ "&scope=onedrive.readwrite%20offline_access&response_type=code&redirect_uri=" ~ redirectUrl; + string url = authUrl ~ "?client_id=" ~ clientId ~ "&scope=files.readwrite%20files.readwrite.all%20offline_access&response_type=code&redirect_uri=" ~ redirectUrl; log.log("Authorize this app visiting:\n"); write(url, "\n\n", "Enter the response uri: "); readln(response); // match the authorization code - auto c = matchFirst(response, r"(?:code=)(([\w\d]+-){4}[\w\d]+)"); + auto c = matchFirst(response, r"(?:code=)([\w\d-]+)"); if (c.empty) { log.log("Invalid uri"); return false; @@ -76,13 +93,20 @@ final class OneDriveApi return true; } + // https://dev.onedrive.com/drives/default.htm + JSONValue getDefaultDrive() + { + checkAccessTokenExpired(); + return get(driveUrl); + } + // https://dev.onedrive.com/items/view_delta.htm JSONValue viewChangesById(const(char)[] id, const(char)[] statusToken) { checkAccessTokenExpired(); - const(char)[] url = itemByIdUrl ~ id ~ "/view.delta"; - url ~= "?select=id,name,eTag,cTag,deleted,file,folder,fileSystemInfo,remoteItem,parentReference"; - if (statusToken) url ~= "?token=" ~ statusToken; + const(char)[] url = itemByIdUrl ~ id ~ "/delta"; + url ~= "?select=id,name,eTag,cTag,deleted,file,folder,root,fileSystemInfo,remoteItem,parentReference"; + if (statusToken) url ~= "&token=" ~ statusToken; return get(url); } @@ -90,8 +114,10 @@ final class OneDriveApi JSONValue viewChangesByPath(const(char)[] path, const(char)[] statusToken) { checkAccessTokenExpired(); - string url = itemByPathUrl ~ encodeComponent(path) ~ ":/view.delta"; - url ~= "?select=id,name,eTag,cTag,deleted,file,folder,fileSystemInfo,remoteItem,parentReference"; + string url = itemByPathUrl ~ encodeComponent(path) ~ ":/delta"; + // HACK + if (path == ".") url = driveUrl ~ "/root/delta"; + url ~= "?select=id,name,eTag,cTag,deleted,file,folder,root,fileSystemInfo,remoteItem,parentReference"; if (statusToken) url ~= "&token=" ~ statusToken; return get(url); } @@ -117,7 +143,6 @@ final class OneDriveApi { checkAccessTokenExpired(); string url = itemByPathUrl ~ encodeComponent(remotePath) ~ ":/content"; - http.addRequestHeader("Content-Type", "application/octet-stream"); if (eTag) http.addRequestHeader("If-Match", eTag); else url ~= "?@name.conflictBehavior=fail"; return upload(localPath, url); @@ -138,14 +163,18 @@ final class OneDriveApi { checkAccessTokenExpired(); char[] url = itemByIdUrl ~ id; - if (eTag) http.addRequestHeader("If-Match", eTag); + //TODO: investigate why this always fail with 412 (Precondition Failed) + //if (eTag) http.addRequestHeader("If-Match", eTag); del(url); } // https://dev.onedrive.com/items/create.htm JSONValue createByPath(const(char)[] parentPath, JSONValue item) { + checkAccessTokenExpired(); string url = itemByPathUrl ~ encodeComponent(parentPath) ~ ":/children"; + // HACK + if (parentPath == ".") url = driveUrl ~ "/root/children"; http.addRequestHeader("Content-Type", "application/json"); return post(url, item.toString()); } @@ -154,7 +183,7 @@ final class OneDriveApi JSONValue createUploadSession(const(char)[] path, const(char)[] eTag = null) { checkAccessTokenExpired(); - string url = itemByPathUrl ~ encodeComponent(path) ~ ":/upload.createSession"; + string url = itemByPathUrl ~ encodeComponent(path) ~ ":/createUploadSession"; if (eTag) http.addRequestHeader("If-Match", eTag); return post(url, null); } @@ -169,7 +198,8 @@ final class OneDriveApi } http.method = HTTP.Method.put; http.url = uploadUrl; - addAccessTokenHeader(); + // when using microsoft graph the auth code is different + //addAccessTokenHeader(); import std.conv; string contentRange = "bytes " ~ to!string(offset) ~ "-" ~ to!string(offset + offsetSize - 1) ~ "/" ~ to!string(fileSize); http.addRequestHeader("Content-Range", contentRange); @@ -187,7 +217,8 @@ final class OneDriveApi JSONValue requestUploadStatus(const(char)[] uploadUrl) { checkAccessTokenExpired(); - return get(uploadUrl); + // when using microsoft graph the auth code is different + return get(uploadUrl, true); } private void redeemToken(const(char)[] authCode) @@ -231,14 +262,14 @@ final class OneDriveApi http.addRequestHeader("Authorization", accessToken); } - private JSONValue get(const(char)[] url) + private JSONValue get(const(char)[] url, bool skipToken = false) { scope(exit) http.clearRequestHeaders(); http.method = HTTP.Method.get; http.url = url; - addAccessTokenHeader(); + if (!skipToken) addAccessTokenHeader(); // HACK: requestUploadStatus auto response = perform(); - checkHttpCode(); + checkHttpCode(response); return response; } @@ -248,8 +279,8 @@ final class OneDriveApi http.method = HTTP.Method.del; http.url = url; addAccessTokenHeader(); - perform(); - checkHttpCode(); + auto response = perform(); + checkHttpCode(response); } private void download(const(char)[] url, string filename) @@ -274,7 +305,7 @@ final class OneDriveApi http.url = url; addAccessTokenHeader(); auto response = perform(patchData); - checkHttpCode(); + checkHttpCode(response); return response; } @@ -285,7 +316,7 @@ final class OneDriveApi http.url = url; addAccessTokenHeader(); auto response = perform(postData); - checkHttpCode(); + checkHttpCode(response); return response; } @@ -304,7 +335,7 @@ final class OneDriveApi http.onSend = data => file.rawRead(data).length; http.contentLength = file.size; auto response = perform(); - checkHttpCode(); + checkHttpCode(response); return response; } @@ -343,7 +374,15 @@ final class OneDriveApi } catch (CurlException e) { throw new OneDriveException(e.msg, e); } - return content.parseJSON(); + JSONValue json; + try { + json = content.parseJSON(); + } catch (JSONException e) { + e.msg ~= "\n"; + e.msg ~= content; + throw e; + } + return json; } private void checkHttpCode() @@ -352,4 +391,45 @@ final class OneDriveApi throw new OneDriveException(http.statusLine.code, http.statusLine.reason); } } + + private void checkHttpCode(ref const JSONValue response) + { + if (http.statusLine.code / 100 != 2) { + throw new OneDriveException(http.statusLine.code, http.statusLine.reason, response); + } + } +} + +unittest +{ + string configDirName = expandTilde("~/.config/onedrive"); + auto cfg = new config.Config(configDirName); + cfg.init(); + OneDriveApi onedrive = new OneDriveApi(cfg); + onedrive.init(); + std.file.write("/tmp/test", "test"); + + // simpleUpload + auto item = onedrive.simpleUpload("/tmp/test", "/test"); + try { + item = onedrive.simpleUpload("/tmp/test", "/test"); + } catch (OneDriveException e) { + assert(e.httpStatusCode == 409); + } + try { + item = onedrive.simpleUpload("/tmp/test", "/test", "123"); + } catch (OneDriveException e) { + assert(e.httpStatusCode == 412); + } + item = onedrive.simpleUpload("/tmp/test", "/test", item["eTag"].str); + + // deleteById + try { + onedrive.deleteById(item["id"].str, "123"); + } catch (OneDriveException e) { + assert(e.httpStatusCode == 412); + } + onedrive.deleteById(item["id"].str, item["eTag"].str); + + onedrive.http.shutdown(); } diff --git a/src/selective.d b/src/selective.d new file mode 100644 index 00000000..733a6aa6 --- /dev/null +++ b/src/selective.d @@ -0,0 +1,72 @@ +import std.algorithm; +import std.array; +import std.file; +import std.path; +import std.regex; +import std.stdio; +import util; + +final class SelectiveSync +{ + private string[] paths; + private Regex!char mask; + + void load(string filepath) + { + if (exists(filepath)) { + paths = File(filepath) + .byLine() + .map!(a => buildNormalizedPath(a)) + .filter!(a => a.length > 0) + .array; + } + } + + void setMask(const(char)[] mask) + { + this.mask = wild2regex(mask); + } + + bool isNameExcluded(string name) + { + return !name.matchFirst(mask).empty; + } + + bool isPathExcluded(string path) + { + return .isPathExcluded(path, paths); + } +} + +// test if the given path is not included in the allowed paths +// if there are no allowed paths always return false +private bool isPathExcluded(string path, string[] allowedPaths) +{ + // always allow the root + if (path == ".") return false; + // if there are no allowed paths always return false + if (allowedPaths.empty) return false; + + path = buildNormalizedPath(path); + foreach (allowed; allowedPaths) { + auto comm = commonPrefix(path, allowed); + if (comm.length == path.length) { + // the given path is contained in an allowed path + return false; + } + if (comm.length == allowed.length && path[comm.length] == '/') { + // the given path is a subitem of an allowed path + return false; + } + } + return true; +} + +unittest +{ + assert(isPathExcluded("Documents2", ["Documents"])); + assert(!isPathExcluded("Documents", ["Documents"])); + assert(!isPathExcluded("Documents/a.txt", ["Documents"])); + assert(isPathExcluded("Hello/World", ["Hello/John"])); + assert(!isPathExcluded(".", ["Documents"])); +} diff --git a/src/sqlite.d b/src/sqlite.d index 7df5c82d..e42b7dcd 100644 --- a/src/sqlite.d +++ b/src/sqlite.d @@ -64,6 +64,27 @@ struct Database } } + int getVersion() + { + int userVersion; + extern (C) int callback(void* user_version, int count, char** column_text, char** column_name) { + import std.c.stdlib: atoi; + *(cast(int*) user_version) = atoi(*column_text); + return 0; + } + int rc = sqlite3_exec(pDb, "PRAGMA user_version", &callback, &userVersion, null); + if (rc != SQLITE_OK) { + throw new SqliteException(ifromStringz(sqlite3_errmsg(pDb))); + } + return userVersion; + } + + void setVersion(int userVersion) + { + import std.conv: to; + exec("PRAGMA user_version=" ~ to!string(userVersion)); + } + Statement prepare(const(char)[] zSql) { Statement s; @@ -170,6 +191,10 @@ unittest value TEXT )"); + assert(db.getVersion() == 0); + db.setVersion(1); + assert(db.getVersion() == 1); + auto s = db.prepare("INSERT INTO test VALUES (?, ?)"); s.bind(1, "key1"); s.bind(2, "value1"); diff --git a/src/sync.d b/src/sync.d index 634b2dd7..6093539b 100644 --- a/src/sync.d +++ b/src/sync.d @@ -1,25 +1,34 @@ +import std.algorithm; +import std.net.curl: CurlTimeoutException; import std.exception: ErrnoException; -import std.algorithm, std.datetime, std.file, std.json, std.path, std.regex; +import std.datetime, std.file, std.json, std.path; +import std.regex; import std.stdio, std.string; -import config, itemdb, onedrive, upload, util; +import config, itemdb, onedrive, selective, upload, util; static import log; // threshold after which files will be uploaded using an upload session -private long thresholdFileSize = 10 * 2^^20; // 10 MiB +private long thresholdFileSize = 4 * 2^^20; // 4 MiB private bool isItemFolder(const ref JSONValue item) { - return (("folder" in item.object) !is null); + return ("folder" in item) != null; } private bool isItemFile(const ref JSONValue item) { - return (("file" in item.object) !is null); + return ("file" in item) != null; } private bool isItemDeleted(const ref JSONValue item) { - return (("deleted" in item.object) !is null); + // HACK: fix for https://github.com/skilion/onedrive/issues/157 + return ("deleted" in item) || ("fileSystemInfo" !in item); +} + +private bool isItemRoot(const ref JSONValue item) +{ + return ("root" in item) != null; } private bool testCrc32(string path, const(char)[] crc32) @@ -49,23 +58,22 @@ final class SyncEngine private Config cfg; private OneDriveApi onedrive; private ItemDatabase itemdb; - private Regex!char skipDir, skipFile; private UploadSession session; + private SelectiveSync selectiveSync; // token representing the last status correctly synced private string statusToken; // list of items to skip while applying the changes private string[] skippedItems; // list of items to delete after the changes has been downloaded - private string[] pathsToDelete; + private string[] idsToDelete; - this(Config cfg, OneDriveApi onedrive, ItemDatabase itemdb) + this(Config cfg, OneDriveApi onedrive, ItemDatabase itemdb, SelectiveSync selectiveSync) { - assert(onedrive && itemdb); + assert(onedrive && itemdb && selectiveSync); this.cfg = cfg; this.onedrive = onedrive; this.itemdb = itemdb; - skipDir = wild2regex(cfg.getValue("skip_dir")); - skipFile = wild2regex(cfg.getValue("skip_file")); + this.selectiveSync = selectiveSync; session = UploadSession(onedrive, cfg.uploadStateFilePath); } @@ -91,22 +99,42 @@ final class SyncEngine try { JSONValue changes; do { - changes = onedrive.viewChangesByPath("/", statusToken); + // get changes from the server + try { + changes = onedrive.viewChangesByPath(".", statusToken); + } catch (OneDriveException e) { + if (e.httpStatusCode == 410) { + log.log("Status token expired, resyncing"); + statusToken = null; + continue; + } + else { + throw e; + } + } foreach (item; changes["value"].array) { applyDifference(item); } - statusToken = changes["@delta.token"].str; + // hack to reuse old code + string url; + if ("@odata.nextLink" in changes) url = changes["@odata.nextLink"].str; + if ("@odata.deltaLink" in changes) url = changes["@odata.deltaLink"].str; + auto c = matchFirst(url, r"(?:token=)([\w\d]+)"); + c.popFront(); // skip the whole match + statusToken = c.front; std.file.write(cfg.statusTokenFilePath, statusToken); - } while (("@odata.nextLink" in changes.object) !is null); + } while ("@odata.nextLink" in changes); } catch (ErrnoException e) { throw new SyncException(e.msg, e); } catch (FileException e) { throw new SyncException(e.msg, e); + } catch (CurlTimeoutException e) { + throw new SyncException(e.msg, e); } catch (OneDriveException e) { throw new SyncException(e.msg, e); } - // delete items in pathsToDelete - if (pathsToDelete.length > 0) deleteItems(); + // delete items in idsToDelete + if (idsToDelete.length > 0) deleteItems(); // empty the skipped items skippedItems.length = 0; assumeSafeAppend(skippedItems); @@ -116,21 +144,27 @@ final class SyncEngine { string id = item["id"].str; string name = item["name"].str; - string eTag = item["eTag"].str; - string parentId = item["parentReference"]["id"].str; - // HACK: recognize the root directory - if (name == "root" && parentId[$ - 1] == '0' && parentId[$ - 2] == '!') { - parentId = null; + log.vlog(id, " ", name); + + // eTag and parentId do not exists for the root in OneDrive Biz + string eTag, parentId; + if (!isItemRoot(item)) { + eTag = item["eTag"].str; + parentId = item["parentReference"]["id"].str; } // skip unwanted items early if (skippedItems.find(parentId).length != 0) { + log.vlog("Filtered out"); + skippedItems ~= id; + return; + } + if (selectiveSync.isNameExcluded(name)) { + log.vlog("Filtered out"); skippedItems ~= id; return; } - - log.vlog(id, " ", name); // rename the local item if it is unsynced and there is a new version of it Item oldItem; @@ -145,50 +179,36 @@ final class SyncEngine } } + // check if the item is to be deleted + if (isItemDeleted(item)) { + log.vlog("The item is marked for deletion"); + if (cached) idsToDelete ~= id; + return; + } + // compute the path of the item string path = "."; if (parentId) { path = itemdb.computePath(parentId) ~ "/" ~ name; - } - - ItemType type; - if (isItemDeleted(item)) { - log.vlog("The item is marked for deletion"); - if (cached) { - itemdb.deleteById(id); - pathsToDelete ~= oldPath; - } - return; - } else if (isItemFile(item)) { - type = ItemType.file; - if (!path.matchFirst(skipFile).empty) { - log.vlog("Filtered out"); - return; - } - } else if (isItemFolder(item)) { - type = ItemType.dir; - if (!path.matchFirst(skipDir).empty) { - log.vlog("Filtered out"); + // selective sync + if (selectiveSync.isPathExcluded(path)) { + log.vlog("Filtered out: ", path); skippedItems ~= id; return; } + } + + ItemType type; + if (isItemFile(item)) { + type = ItemType.file; + } else if (isItemFolder(item)) { + type = ItemType.dir; } else { log.vlog("The item is neither a file nor a directory, skipping"); skippedItems ~= id; return; } - string cTag; - try { - cTag = item["cTag"].str; - } catch (JSONException e) { - // cTag is not returned if the Item is a folder - // https://dev.onedrive.com/resources/item.htm - cTag = ""; - } - - string mtime = item["fileSystemInfo"]["lastModifiedDateTime"].str; - string crc32; if (type == ItemType.file) { try { @@ -203,8 +223,8 @@ final class SyncEngine name: name, type: type, eTag: eTag, - cTag: cTag, - mtime: SysTime.fromISOExtString(mtime), + cTag: "cTag" in item ? item["cTag"].str : null, + mtime: SysTime.fromISOExtString(item["fileSystemInfo"]["lastModifiedDateTime"].str), parentId: parentId, crc32: crc32 }; @@ -217,9 +237,9 @@ final class SyncEngine // save the item in the db if (oldItem.id) { - itemdb.update(id, name, type, eTag, cTag, mtime, parentId, crc32); + itemdb.update(newItem); } else { - itemdb.insert(id, name, type, eTag, cTag, mtime, parentId, crc32); + itemdb.insert(newItem); } } @@ -283,8 +303,9 @@ final class SyncEngine case ItemType.file: if (isFile(path)) { SysTime localModifiedTime = timeLastModified(path); - import core.time: Duration; - item.mtime.fracSecs = Duration.zero; // HACK + // HACK: reduce time resolution to seconds before comparing + item.mtime.fracSecs = Duration.zero; + localModifiedTime.fracSecs = Duration.zero; if (localModifiedTime == item.mtime) { return true; } else { @@ -313,7 +334,9 @@ final class SyncEngine private void deleteItems() { log.vlog("Deleting files ..."); - foreach_reverse (path; pathsToDelete) { + foreach_reverse (id; idsToDelete) { + string path = itemdb.computePath(id); + itemdb.deleteById(id); if (exists(path)) { if (isFile(path)) { remove(path); @@ -328,8 +351,8 @@ final class SyncEngine } } } - pathsToDelete.length = 0; - assumeSafeAppend(pathsToDelete); + idsToDelete.length = 0; + assumeSafeAppend(idsToDelete); } // scan the given directory for differences @@ -355,20 +378,23 @@ final class SyncEngine private void uploadDifferences(Item item) { log.vlog(item.id, " ", item.name); + + // skip filtered items + if (selectiveSync.isNameExcluded(item.name)) { + log.vlog("Filtered out"); + return; + } string path = itemdb.computePath(item.id); + if (selectiveSync.isPathExcluded(path)) { + log.vlog("Filtered out: ", path); + return; + } + final switch (item.type) { case ItemType.dir: - if (!path.matchFirst(skipDir).empty) { - log.vlog("Filtered out"); - break; - } uploadDirDifferences(item, path); break; case ItemType.file: - if (!path.matchFirst(skipFile).empty) { - log.vlog("Filtered out"); - break; - } uploadFileDifferences(item, path); break; } @@ -401,8 +427,9 @@ final class SyncEngine if (exists(path)) { if (isFile(path)) { SysTime localModifiedTime = timeLastModified(path); - import core.time: Duration; - item.mtime.fracSecs = Duration.zero; // HACK + // HACK: reduce time resolution to seconds before comparing + item.mtime.fracSecs = Duration.zero; + localModifiedTime.fracSecs = Duration.zero; if (localModifiedTime != item.mtime) { log.vlog("The file last modified time has changed"); string id = item.id; @@ -440,26 +467,35 @@ final class SyncEngine private void uploadNewItems(string path) { + // skip unexisting symbolic links if (isSymlink(path) && !exists(readLink(path))) { return; } + + // skip filtered items + if (path != ".") { + if (selectiveSync.isNameExcluded(baseName(path))) { + return; + } + if (selectiveSync.isPathExcluded(path)) { + return; + } + } + if (isDir(path)) { - if (path.matchFirst(skipDir).empty) { - Item item; - if (!itemdb.selectByPath(path, item)) { - uploadCreateDir(path); - } - auto entries = dirEntries(path, SpanMode.shallow, false); - foreach (DirEntry entry; entries) { - uploadNewItems(entry.name); - } + Item item; + if (!itemdb.selectByPath(path, item)) { + uploadCreateDir(path); + } + // recursively traverse children + auto entries = dirEntries(path, SpanMode.shallow, false); + foreach (DirEntry entry; entries) { + uploadNewItems(entry.name); } } else { - if (path.matchFirst(skipFile).empty) { - Item item; - if (!itemdb.selectByPath(path, item)) { - uploadNewFile(path); - } + Item item; + if (!itemdb.selectByPath(path, item)) { + uploadNewFile(path); } } } @@ -469,7 +505,7 @@ final class SyncEngine log.log("Creating remote directory: ", path); JSONValue item = ["name": baseName(path).idup]; item["folder"] = parseJSON("{}"); - auto res = onedrive.createByPath(path.dirName ~ "/", item); + auto res = onedrive.createByPath(path.dirName, item); saveItem(res); } @@ -482,7 +518,6 @@ final class SyncEngine } else { response = session.upload(path, path); } - saveItem(response); string id = response["id"].str; string cTag = response["cTag"].str; SysTime mtime = timeLastModified(path).toUTC(); @@ -498,7 +533,7 @@ final class SyncEngine try { onedrive.deleteById(item.id, item.eTag); } catch (OneDriveException e) { - if (e.code == 404) log.log(e.msg); + if (e.httpStatusCode == 404) log.log(e.msg); else throw e; } itemdb.deleteById(item.id); @@ -515,31 +550,33 @@ final class SyncEngine saveItem(res); } - private void saveItem(JSONValue item) + private void saveItem(JSONValue jsonItem) { - string id = item["id"].str; ItemType type; - if (isItemFile(item)) { + if (isItemFile(jsonItem)) { type = ItemType.file; - } else if (isItemFolder(item)) { + } else if (isItemFolder(jsonItem)) { type = ItemType.dir; } else { assert(0); } - string name = item["name"].str; - string eTag = item["eTag"].str; - string cTag = item["cTag"].str; - string mtime = item["fileSystemInfo"]["lastModifiedDateTime"].str; - string parentId = item["parentReference"]["id"].str; - string crc32; + Item item = { + id: jsonItem["id"].str, + name: jsonItem["name"].str, + type: type, + eTag: jsonItem["eTag"].str, + cTag: "cTag" in jsonItem ? jsonItem["cTag"].str : null, + mtime: SysTime.fromISOExtString(jsonItem["fileSystemInfo"]["lastModifiedDateTime"].str), + parentId: jsonItem["parentReference"]["id"].str + }; if (type == ItemType.file) { try { - crc32 = item["file"]["hashes"]["crc32Hash"].str; + item.crc32 = jsonItem["file"]["hashes"]["crc32Hash"].str; } catch (JSONException e) { - // swallow exception + log.vlog("The hash is not available"); } } - itemdb.upsert(id, name, type, eTag, cTag, mtime, parentId, crc32); + itemdb.upsert(item); } void uploadMoveItem(string from, string to) @@ -576,7 +613,7 @@ final class SyncEngine try { uploadDeleteItem(item, path); } catch (OneDriveException e) { - if (e.code == 404) log.log(e.msg); + if (e.httpStatusCode == 404) log.log(e.msg); else throw e; } } diff --git a/src/util.d b/src/util.d index af51d0a2..f7512f22 100644 --- a/src/util.d +++ b/src/util.d @@ -1,6 +1,7 @@ import std.conv; import std.digest.crc; import std.file; +import std.net.curl; import std.path; import std.regex; import std.socket; @@ -54,7 +55,7 @@ Regex!char wild2regex(const(char)[] pattern) { string str; str.reserve(pattern.length + 2); - str ~= "/"; + str ~= "^"; foreach (c; pattern) { switch (c) { case '*': @@ -67,7 +68,7 @@ Regex!char wild2regex(const(char)[] pattern) str ~= "[^/]"; break; case '|': - str ~= "$|/"; + str ~= "$|^"; break; default: str ~= c; @@ -81,11 +82,25 @@ Regex!char wild2regex(const(char)[] pattern) // return true if the network connection is available bool testNetwork() { - try { - auto addr = new InternetAddress("login.live.com", 443); - auto socket = new TcpSocket(addr); - return socket.isAlive(); - } catch (SocketException) { - return false; - } + HTTP http = HTTP("https://login.microsoftonline.com"); + http.method = HTTP.Method.head; + return http.perform(ThrowOnError.no) == 0; +} + +// call globMatch for each string in pattern separated by '|' +bool multiGlobMatch(const(char)[] path, const(char)[] pattern) +{ + foreach (glob; pattern.split('|')) { + if (globMatch!(std.path.CaseSensitive.yes)(path, glob)) { + return true; + } + } + return false; +} + +unittest +{ + assert(multiGlobMatch(".hidden", ".*")); + assert(multiGlobMatch(".hidden", "file|.*")); + assert(!multiGlobMatch("foo.bar", "foo|bar")); }