Compare commits
935 commits
Author | SHA1 | Date | |
---|---|---|---|
1a88d33be3 | |||
6282ec9e3b | |||
4a60654e3f | |||
a74ff589f8 | |||
651aa16844 | |||
85f99cb899 | |||
d712dd6093 | |||
43b0bed4cb | |||
fc5cbaf2e9 | |||
50d80d333e | |||
99271a45de | |||
fb0a5f0796 | |||
94ccb7a6d0 | |||
b2bf6aa475 | |||
268dcf9120 | |||
f5edb8b394 | |||
4b32dbf12d | |||
628a85dc93 | |||
3340fcca6a | |||
25cc361277 | |||
32a702eaaf | |||
30b108027d | |||
dfd1d1aa4a | |||
06420c9a0a | |||
c9fe8ad051 | |||
8045002c87 | |||
b0eb9ffcdf | |||
aa294b32f8 | |||
669a21747b | |||
3b8d7b3b1a | |||
64a3bfb033 | |||
16b751971f | |||
d960febb18 | |||
77684452aa | |||
4e5a32c210 | |||
56149c285c | |||
ae33616d0b | |||
3ef4a27fb7 | |||
b6d477b04e | |||
5945edc060 | |||
3aa191e3e4 | |||
6324f915fc | |||
8c6bdb984d | |||
d48bb34036 | |||
ce9b781c34 | |||
00ce7eed0e | |||
0838651327 | |||
cfb4933778 | |||
6607ba537b | |||
52b1276282 | |||
5b14157b09 | |||
00ba377c30 | |||
537b4338a6 | |||
eaeebbde8c | |||
c92d8470f4 | |||
b3829c1ef3 | |||
3acb6c13d2 | |||
c10870abd4 | |||
b9a239ace1 | |||
d043d5584f | |||
54e1ab0c16 | |||
ad20628dbe | |||
15233e485f | |||
7d5a5d33fd | |||
6bf0ea5deb | |||
3bca35d345 | |||
678add91f8 | |||
4db2ec02fc | |||
c22010095f | |||
44937abeb8 | |||
b2cea4b1dd | |||
72898345e7 | |||
817656ba90 | |||
1d8e0204bf | |||
19727d9c57 | |||
0b89a1fea0 | |||
87003c5923 | |||
f853b129cd | |||
62859609b7 | |||
5a7da61cbb | |||
12d54db1e4 | |||
c35ff20f5b | |||
95982c374e | |||
0ec1c95e4a | |||
941e1e215d | |||
361c3cf0a1 | |||
812f1a8d31 | |||
39a0048445 | |||
4a6c78a7a6 | |||
c73c0aa608 | |||
591b13f892 | |||
e3ce0ef6a8 | |||
8baec3c3fd | |||
ef8e5bed59 | |||
1eda30d29d | |||
7daf8797f8 | |||
4f15c35256 | |||
da967b06bc | |||
eaf097f7b0 | |||
fd0a028276 | |||
e08c89ad0b | |||
7fe242dfc0 | |||
1fdee4caaf | |||
f2b5a5543e | |||
1f4e0e143c | |||
d1642dab30 | |||
514df625b9 | |||
993770f49a | |||
a35c7c72f5 | |||
e2ed2d2b58 | |||
7a5cf8dd46 | |||
d035ee13cd | |||
a348750ec6 | |||
371b87f62a | |||
19308541eb | |||
738be2d150 | |||
5288f94ac4 | |||
3b7a06cdcd | |||
33423ee441 | |||
abc8b58ee7 | |||
d169dfc642 | |||
b16fe173e0 | |||
de701629a8 | |||
eba676c600 | |||
9b8a25f34c | |||
d4595beaba | |||
7b4f1a4d09 | |||
fd3a849d45 | |||
76752d0352 | |||
64b706f7c1 | |||
f561bd79a3 | |||
d57a695632 | |||
cc3b83afcb | |||
5ee30ff623 | |||
935ab764a4 | |||
b6c28fd2b3 | |||
c06e8ccfb6 | |||
aaf8505205 | |||
b2991fded9 | |||
496ba5fc3e | |||
11361c841f | |||
031c82922d | |||
a17a667e44 | |||
e84b16d7fc | |||
c82e90a140 | |||
90f9b31a8b | |||
87de4cf79c | |||
e422adb477 | |||
3591eededc | |||
0ab2955bd7 | |||
0e0fdacf7c | |||
ca984eba70 | |||
042949f1c1 | |||
2bab99b62c | |||
af86b55b35 | |||
dc8327e4b8 | |||
1935def140 | |||
c7eabab27b | |||
83a79077c6 | |||
39fba9394f | |||
bc5ad62fc0 | |||
2cdd7f4c63 | |||
7ebe0d7949 | |||
bc2728322a | |||
352a02052a | |||
4e1deec933 | |||
5958c54938 | |||
6c2e881314 | |||
cdea464952 | |||
a9aafabb79 | |||
c49446712b | |||
e008468686 | |||
0ebe66412a | |||
b7c7caa0b5 | |||
22fda0a286 | |||
8711074b57 | |||
2995dc66ae | |||
62701e5ef3 | |||
0fffb8fbc0 | |||
767352686f | |||
b6fee40939 | |||
ff8d137057 | |||
ddb9f1eae7 | |||
40743b5a86 | |||
de2bbb5564 | |||
9650ac2418 | |||
69d4a64456 | |||
ee2da78446 | |||
b38fd5c200 | |||
ee2c14c6cc | |||
b7aede6d30 | |||
0c8a24205d | |||
7139578af1 | |||
04c65f9b48 | |||
e08792da42 | |||
d4b733ae89 | |||
8a4931ccda | |||
680090a680 | |||
5506efb313 | |||
10448efd9a | |||
a134fa4a73 | |||
a83af761eb | |||
66a7b15feb | |||
473e273d13 | |||
446be3743c | |||
a52de36d63 | |||
834dc3fbc4 | |||
0db230f538 | |||
37bce6f82d | |||
f08039f511 | |||
8b30c00c81 | |||
3418a4a504 | |||
29b0ef0b98 | |||
5d320505c7 | |||
6bdc5bd3b7 | |||
937f7eaa2b | |||
cf92bc06f9 | |||
f26f03b0a9 | |||
3f0d606f5b | |||
8271adc6a7 | |||
566251da29 | |||
31fcf21f6d | |||
1843a9146d | |||
a5bd4e9133 | |||
3124b2dcf2 | |||
8dc0723640 | |||
07e9cedaed | |||
c4a7c1edbe | |||
d68c27fb4f | |||
21736f4029 | |||
8687deec18 | |||
ff78e84efd | |||
bb889f130e | |||
ba7e4905ff | |||
76bd6e26a5 | |||
7ff077efd8 | |||
c63b390f14 | |||
75405afa28 | |||
dfc87bd7d9 | |||
3c119d3b20 | |||
0158f356db | |||
c0e40e857e | |||
220a00bbdf | |||
e1faa90351 | |||
1ac5d87d98 | |||
8ba49fd24c | |||
2c71e9461e | |||
326fee1053 | |||
33a2de1888 | |||
c30fa4b681 | |||
f72f6c9262 | |||
6f53580583 | |||
61c8652449 | |||
9e87e870ab | |||
198f52394d | |||
9c01ac3151 | |||
8870603939 | |||
58d40c5174 | |||
a2d2ea93cb | |||
f4c0a79425 | |||
89d2085c69 | |||
df3d137772 | |||
dafc37e677 | |||
ecfd391b86 | |||
b210dcd12e | |||
1e2827ad1c | |||
2f47beab60 | |||
ac2e07526a | |||
2901534171 | |||
5d7e4532ef | |||
bfeeae91de | |||
540c2369f3 | |||
b0a1baeaf4 | |||
cceb3cccdd | |||
6febb13408 | |||
2f660ff7b0 | |||
4d25e68c65 | |||
afc5acbe96 | |||
f3f04be2dc | |||
413d703084 | |||
d99bd5a6ea | |||
b7fdff8e2a | |||
0bd9fe03ea | |||
43932e604a | |||
77a351360c | |||
b7781419ce | |||
350fae1fa7 | |||
a73c911a74 | |||
23e5af814f | |||
107c187602 | |||
30e66c0ed0 | |||
7857a5057f | |||
7120aee869 | |||
2b32c3a497 | |||
606ee52ad2 | |||
8a20275dd2 | |||
87393e639b | |||
398e790a5a | |||
f99f284791 | |||
3a1899cbd2 | |||
b08690f1ac | |||
72905b951f | |||
6d92bc8210 | |||
4b56a6103e | |||
1fbe1d34a7 | |||
34f7a379f6 | |||
7171b83f5b | |||
54a6575709 | |||
fc5d7f9327 | |||
e236c7cf12 | |||
65d289d2cd | |||
13b37094f7 | |||
6201938750 | |||
1d4a206683 | |||
761cf3eb87 | |||
16fdd928b6 | |||
1a5732a6b5 | |||
6a9715dc14 | |||
f246dbc6b1 | |||
3b6e38ad9d | |||
994b528dce | |||
6213f48778 | |||
e701ab009d | |||
bd56abc004 | |||
8b88a9a9d0 | |||
c8dfba2ae7 | |||
7143b27296 | |||
ca053213ec | |||
80d433d982 | |||
1e864f201d | |||
cdf5d5eac6 | |||
b0771f79e1 | |||
c9ca00466a | |||
b7eedbd8cd | |||
068f8a6917 | |||
3d5957986a | |||
615aa05b42 | |||
97a468e606 | |||
6c64bec287 | |||
94cc6ee3d6 | |||
1203aebeb7 | |||
b1b814f10a | |||
5c8083001a | |||
fd04d632d6 | |||
71601d3240 | |||
6b20478635 | |||
0dcc76f9f1 | |||
037a6b43e1 | |||
6bec0ddc64 | |||
10606293f7 | |||
b8717fbc53 | |||
cefb3169fa | |||
13d6522be8 | |||
7786a17915 | |||
4ef6c7e331 | |||
42ef8d72b5 | |||
f7eb9f9001 | |||
f107427480 | |||
81e40ba76d | |||
1078795d6f | |||
5837042a26 | |||
424454b292 | |||
cdc6b0b17c | |||
a890951aa4 | |||
0774a8fd06 | |||
e1e20ce3df | |||
38892e763a | |||
e10073ae00 | |||
07a9a94969 | |||
42b7945d9c | |||
cd7b7b0b68 | |||
3ea16b619f | |||
3ba8bb165a | |||
f37fdda836 | |||
1bfd995153 | |||
99a0f4bfed | |||
a971dd0844 | |||
66a172d13a | |||
e163d570d7 | |||
2c253362bf | |||
2babba5b6c | |||
3b87602a81 | |||
6b0b6a66f7 | |||
525504f91a | |||
15a84627e1 | |||
1c6dd424d7 | |||
cef00e74b2 | |||
bbba32cc49 | |||
96d3e68498 | |||
be1757148f | |||
44b95f595a | |||
f24cdf470b | |||
6417810cc0 | |||
62a2b4f393 | |||
8c6d1d19b8 | |||
771a93242f | |||
3341e30ffa | |||
ba94b92d8a | |||
464398dd04 | |||
9b44bb1e96 | |||
5349340818 | |||
3b18638a63 | |||
335eab326d | |||
4a68eb40b7 | |||
545fdcd692 | |||
f21c6f1312 | |||
88f78c163f | |||
48fc64a247 | |||
a4a5a5f267 | |||
ac4da2e2ba | |||
8c23efc6c5 | |||
b163e8eff2 | |||
0d5b004696 | |||
17d5f97cb0 | |||
550ab4b6dd | |||
aa533ef5a4 | |||
b59596523e | |||
b57bb3fde5 | |||
dd989b73d1 | |||
ac02b768d3 | |||
cc2b8085f5 | |||
68cc51ab4d | |||
aa47f1119f | |||
cb57213fb9 | |||
6405c9daeb | |||
313af18671 | |||
2c80033c89 | |||
7a4abfff40 | |||
57d6753f80 | |||
d2193c97f5 | |||
b4c3da6e10 | |||
363f64df36 | |||
ff25371b68 | |||
106e114929 | |||
b365ec6322 | |||
cf4ee5017f | |||
e81de988ee | |||
fe86dd1809 | |||
9ca2f6ae26 | |||
a69e405435 | |||
9f60b95eed | |||
11e094c0cf | |||
a3c4ba1b3b | |||
fed9a393a7 | |||
1ddf622e2b | |||
b4328db2ef | |||
2f35a33f4f | |||
098d6c44e2 | |||
d6509e77b3 | |||
d64f0f5b00 | |||
23f6223816 | |||
e4d7821026 | |||
2d15e97a41 | |||
a3a27266c6 | |||
e44e6b4233 | |||
7684c74600 | |||
3604dc6984 | |||
38e9cc11fd | |||
bfa6ff41bf | |||
6b7f276bb5 | |||
19cf656c19 | |||
0c12ed7df4 | |||
4809f91655 | |||
1364f36aba | |||
0c770efa96 | |||
4ddad0c350 | |||
bcd710a087 | |||
1fb6c66453 | |||
05fc734440 | |||
bc1ce1192b | |||
fa3eb8bc1a | |||
65202fdb04 | |||
431678753d | |||
2099ec2994 | |||
74d7198025 | |||
355689e986 | |||
2a5af086a3 | |||
ff80e99735 | |||
03f7afe156 | |||
0f8921769b | |||
880631982c | |||
5907d9eb0a | |||
786446f79b | |||
2fe0574bcf | |||
928ad9512f | |||
a87897ba3f | |||
76ce1036a4 | |||
beb737e903 | |||
f6002311cd | |||
2b7e36d57f | |||
25fd9a1b00 | |||
810c47fcfe | |||
8c0d43183d | |||
48292a0858 | |||
e23e06305a | |||
ab5599b1d1 | |||
790c75b5cc | |||
578d560a62 | |||
d7348ed868 | |||
e91c131cfb | |||
e3e4e449ef | |||
41d92ddab5 | |||
d653fc7ee2 | |||
35e1800aff | |||
57abd74114 | |||
a117ddbeff | |||
9cc72c2396 | |||
650cb97d3f | |||
d8bfe846c2 | |||
93163051f5 | |||
1c10effb9b | |||
cb352b1329 | |||
3b85d574e0 | |||
9e020f05cc | |||
6fcc5f8196 | |||
42bc0bfd82 | |||
7275b2febb | |||
e321c373fd | |||
46c91bac6e | |||
203da2dfb8 | |||
df361b261b | |||
b7f69e2541 | |||
8376a360cc | |||
ed1d13bc1a | |||
d33443cb4f | |||
fa5c5f79a5 | |||
45dba95557 | |||
e5c1c92f77 | |||
f0c5ca95c6 | |||
aee2052f1e | |||
f9c3ccc311 | |||
0096e7efce | |||
b9dd8c754d | |||
cd07414e7b | |||
fc90ccb425 | |||
060a42ab77 | |||
206ab8f516 | |||
9933d459ed | |||
2f22d38cb0 | |||
2529b99265 | |||
503f360c87 | |||
cb06395a46 | |||
eda3d3c4bb | |||
4a4d44667e | |||
403cfa8d85 | |||
dcc0322cac | |||
bc9f2cf893 | |||
a61d291340 | |||
73f71f6d52 | |||
c05243444f | |||
a0f4f9ddab | |||
628c58ad58 | |||
7f267a5a74 | |||
1b28dc6040 | |||
41ad6f231f | |||
77b1e4bf6e | |||
f14c068ebe | |||
ccbeae4942 | |||
183567eba6 | |||
f9ea31476f | |||
2dc5aa3632 | |||
476ea51373 | |||
2a7e48b823 | |||
d3aa0032bc | |||
7ca1ff95a8 | |||
1c555c3180 | |||
7a18393ed0 | |||
31629e697f | |||
c41adda7a0 | |||
9ae6f8e808 | |||
0170884bd1 | |||
bed6e6bfe1 | |||
50fb2d8bfa | |||
3708828806 | |||
caf51886cf | |||
7170498a95 | |||
e1c81861e8 | |||
feb3d96a7e | |||
6f85d539da | |||
2d450a3a8f | |||
aea2786be5 | |||
896a4676f5 | |||
edd365d21b | |||
1e8395fb73 | |||
4cb0ebbfde | |||
b1fcb814c3 | |||
63ce6a7d10 | |||
1bb4ed765c | |||
2741d66076 | |||
a4d72fc73c | |||
27d0b9641e | |||
76b80e953e | |||
601a3630e7 | |||
afff84c2e4 | |||
fc6e2d9cfe | |||
1f6c568a44 | |||
2321518faa | |||
14cd47b56f | |||
28b37007f2 | |||
f2fe718389 | |||
8c7f664199 | |||
b81b9692c7 | |||
38aa7f063c | |||
262817a630 | |||
a7722229b1 | |||
5c2ad041bd | |||
2217fc2b97 | |||
cf22ec2eb5 | |||
02a7781aaa | |||
8549719df3 | |||
217d72486d | |||
aaa26a4e7e | |||
07c0d16ee9 | |||
c50ad4bfce | |||
aebcffc79a | |||
5228a30799 | |||
d15d71eeb4 | |||
b4d76435dd | |||
4fcab8ff56 | |||
8ecd1989b4 | |||
133c45e545 | |||
db5ceccffb | |||
387bd53438 | |||
34ed1608e8 | |||
0445ae03d3 | |||
c876b9c575 | |||
10adb43a09 | |||
b301ba07e3 | |||
7b4ba2e5d2 | |||
7ba59e0b1d | |||
df3fba1be0 | |||
12947d160f | |||
9b3179540f | |||
e1be2b1e55 | |||
fec892b038 | |||
e99872eaa2 | |||
585b35724f | |||
1ab36f4014 | |||
cfdb295823 | |||
44576ea4ca | |||
cdad8631ec | |||
9d0a6539bd | |||
6a8d575941 | |||
808a2f331b | |||
5da97d0757 | |||
57cf29783a | |||
8b969deae8 | |||
94df009b7d | |||
2ef4f88458 | |||
0fef942071 | |||
ca9a6b6ac0 | |||
a1f4fec3cf | |||
3aa4d4965e | |||
fd4547376c | |||
72467e32ca | |||
2abc05af1d | |||
e32b8b3c7f | |||
230be5a23d | |||
42d8c454e2 | |||
ef006b3c09 | |||
69f343b5ed | |||
25b030ec01 | |||
0424caaaf8 | |||
90a28e0f6c | |||
e8c3c236ef | |||
8ca71142df | |||
ba80dbb4c3 | |||
5cd860a398 | |||
219fce2109 | |||
eb04ad5ffa | |||
ad0daf2df5 | |||
bc3853bc4f | |||
488f756ecd | |||
af07dea6f9 | |||
437fe4d9e4 | |||
893e235151 | |||
92c4c81bf8 | |||
2ef503f9e3 | |||
4e97f8810d | |||
a59490a79e | |||
100e644ae0 | |||
8f71043f23 | |||
3171318503 | |||
f111eab71a | |||
79f4b22f3e | |||
21e504751b | |||
b63be18f35 | |||
b41c5eb05d | |||
b23390c880 | |||
3f5d82c63e | |||
56644c4cbf | |||
66764f279d | |||
170198c557 | |||
68c6c5d75d | |||
c16ba1ba2f | |||
4d62bd53d5 | |||
54f0ccdca6 | |||
ab7e321726 | |||
705f78bec8 | |||
c8a08689ce | |||
64d91b844a | |||
712a101069 | |||
f39621c2da | |||
80675aa8b4 | |||
920ea5905c | |||
ed19175e2c | |||
1d02b3cb7a | |||
366f7a33d1 | |||
1b233ae48a | |||
4a4611ccea | |||
2f3804a3c0 | |||
15a2749382 | |||
14fb1ca637 | |||
7588774efb | |||
d939d38c4f | |||
bb2986b222 | |||
cc7bcb1be8 | |||
db07f4eb6c | |||
096120270d | |||
10dd0300e3 | |||
97ee66e9c0 | |||
749c52651a | |||
687080d435 | |||
3d7843b237 | |||
f1211a20d1 | |||
bbf35f12ea | |||
56f63439e8 | |||
2f90ee2f92 | |||
4aa8c1e267 | |||
33de495ec4 | |||
284e272e8f | |||
7cf2912d2d | |||
aa2e2245f0 | |||
e4ab9e7316 | |||
76dfb910ca | |||
ef6af8e5bc | |||
c617185c14 | |||
8d4a35e34d | |||
bac9ab41ae | |||
239085f1fc | |||
a989fbfedf | |||
20eda60fdf | |||
f779881ff9 | |||
3e35e20cc5 | |||
0280625898 | |||
caec1cb8cb | |||
bac5d6f1a9 | |||
af43b77100 | |||
fbad4b4835 | |||
daec620c1a | |||
2c7fec1cf1 | |||
c8e47a47ff | |||
3b533894a2 | |||
ea26e4d830 | |||
c6923cdf29 | |||
25d2f7284f | |||
521cf33001 | |||
a2889098e9 | |||
ea22d8fef5 | |||
e5a9747d7b | |||
df39646120 | |||
f12bcfb650 | |||
92d2f14f15 | |||
ea2e4a98bf | |||
7c2b3e853d | |||
f93ed007bc | |||
6a6b8f128f | |||
abb82868e4 | |||
f38b13dd00 | |||
4444b9cd8a | |||
770456fe13 | |||
de0f36007e | |||
62c5179653 | |||
0bd714d8a8 | |||
ebad4aed89 | |||
b7cc540c2c | |||
a433b44b56 | |||
7aad5903e2 | |||
79cc599057 | |||
002f9b7aec | |||
f5c3e7643d | |||
8f28daec8a | |||
de98fe8492 | |||
0dfc165534 | |||
f2e007a1d7 | |||
6b8b51a7cc | |||
58c2805237 | |||
3aff2e8251 | |||
ab918dd82d | |||
362bf35688 | |||
99c0267c7a | |||
659283d7f9 | |||
c98d096f25 | |||
ac7789df14 | |||
a9ba0204c5 | |||
4c3b959bf6 | |||
e849eb3de4 | |||
98624267c5 | |||
3f08e271af | |||
efaaaadce2 | |||
b0dac7bf42 | |||
ba834368ca | |||
2d54f3e4d1 | |||
e5b500a041 | |||
388a3bb102 | |||
09f328e63f | |||
b668ecef71 | |||
8cabe38e49 | |||
8b7d772680 | |||
d02be44b9a | |||
d49a598f4b | |||
a71b9bf789 | |||
97193107ee | |||
7a47df7a38 | |||
f2f272a8f8 | |||
a635cab0ad | |||
25b09775ab | |||
7c9ea56293 | |||
18611c833f | |||
0d3b891177 | |||
808e0254e4 | |||
4e12496dfe | |||
460596dd36 | |||
51615bff0b | |||
442e3dee16 | |||
ff822a9ab9 | |||
b15948ed6a | |||
493520ad71 | |||
1f955c82be | |||
8b8cf9c777 | |||
1c6fd5df39 | |||
61c40c6cb1 | |||
a61009bae1 | |||
04c1346578 | |||
8d1818d5a2 | |||
ebef1d88f3 | |||
5d3d46b39b | |||
7719371cdd | |||
b437b9607f | |||
10475c9d2c | |||
27cc2c7922 | |||
54f0279a5c | |||
1172432390 | |||
14c36d6c91 | |||
16da1b4994 | |||
af2ee9e540 | |||
5372d8a9fa | |||
76803dc3d1 | |||
7532c9d8cc | |||
c27ff936fb | |||
83cc2e419b | |||
6959fe86ae | |||
c91dcc3e49 | |||
77b32b6a5e | |||
1d902a5beb | |||
d527b7ef26 | |||
bf43ecda9e | |||
a31fdc6a6f | |||
b75163245f | |||
e8c8e525dd | |||
1d3bf0d360 | |||
0759767916 | |||
bd0790eece | |||
6836f0c744 | |||
79a9815183 | |||
84c69a5234 | |||
9d0c0aea39 | |||
58541f73dc | |||
2e8bc828b6 | |||
8d5ba88c38 | |||
7d6858d86a | |||
1fa7e5f20c | |||
ddc5d602da | |||
46ef8ed376 | |||
388cc1ded5 | |||
bc059eaf0a | |||
f14fbf22a7 | |||
b9890ae2e5 | |||
f7a50c0d97 | |||
fcf1ef2e1f | |||
2553366a89 | |||
7a6b507367 | |||
2ec6aa3df6 | |||
5d388581b1 | |||
1060d85118 | |||
0193f37fed | |||
594bfcec54 | |||
37131ba697 | |||
dfd3045ce4 | |||
025a3b2878 | |||
bbe672bff1 | |||
ddda7e0527 | |||
7d522583ef | |||
53df30810e | |||
b46d3d62b1 | |||
91aaf91aa6 | |||
6a62406db7 | |||
3608dcc4c2 | |||
87cd18379d | |||
06aa42f6d1 | |||
0562c52492 | |||
cc6cbf5ac7 | |||
487cfab8d1 | |||
026809d4d1 | |||
b10e641fe9 | |||
29808c905c | |||
56b63acede | |||
33dc260fd2 | |||
cdb11326b9 | |||
9817104ca1 | |||
c24e4b0b1e | |||
90df7e4c9c | |||
0d05274f32 | |||
11138602ed | |||
859b5510a0 | |||
a26a69ce9f | |||
c5ee62efd8 | |||
23e79d39e1 | |||
cbd3ecbf71 | |||
fc162b94e2 | |||
eaedfac34a | |||
aeee9c8d16 | |||
e70722862c | |||
f785396643 | |||
df7ff4f7b7 | |||
8a98bf3e30 | |||
7f5b5d981b | |||
86ea576144 | |||
11e477f045 | |||
eb2cbfd0a8 | |||
76ac8d3a24 | |||
79d86e0cb9 | |||
1c23d01007 | |||
9680127409 | |||
d9e10799ce |
2
.github/FUNDING.yml
vendored
Normal file
|
@ -0,0 +1,2 @@
|
|||
---
|
||||
github: abraunegg
|
49
.github/ISSUE_TEMPLATE/bug_report.md
vendored
|
@ -1,49 +0,0 @@
|
|||
---
|
||||
name: Bug report
|
||||
about: Create a report to help us improve
|
||||
|
||||
---
|
||||
|
||||
### Bug Report Details ###
|
||||
**Describe the bug**
|
||||
A clear and concise description of what the bug is.
|
||||
|
||||
**Application and Operating System Details:**
|
||||
- OS: Output of `uname -a` & provide your OS & version (CentOS 6.x, Ubuntu 18.x etc)
|
||||
- Are you using a headless system (no gui) or with a gui installed?
|
||||
- Application version: Output of `onedrive --version`
|
||||
- OneDrive Account Type
|
||||
- DMD or LDC compiler version `dmd --version` or `ldmd2 --version`
|
||||
|
||||
**To Reproduce**
|
||||
Steps to reproduce the behavior if not causing an application crash:
|
||||
1. Go to '...'
|
||||
2. Click on '....'
|
||||
3. Scroll down to '....'
|
||||
4. See error
|
||||
|
||||
If issue is replicated by a specific 'file' or 'path' please archive the file and path tree & email to support@mynas.com.au
|
||||
|
||||
**Complete Verbose Log Output**
|
||||
A clear and full log of the problem when running the application in the following manner (ie, not in monitor mode):
|
||||
```
|
||||
onedrive --synchronize --verbose <any of your other needed options>
|
||||
```
|
||||
|
||||
Run the application in a separate terminal window or SSH session and provide the entire application output including the error & crash. When posing the logs, Please format log output to make it easier to read. See https://guides.github.com/features/mastering-markdown/ for more details.
|
||||
|
||||
Application Log Output:
|
||||
```
|
||||
Verbose console log output goes here
|
||||
```
|
||||
|
||||
**Screenshots**
|
||||
If applicable, add screenshots to help explain your problem.
|
||||
|
||||
**Additional context**
|
||||
Add any other context about the problem here.
|
||||
|
||||
### Bug Report Checklist ###
|
||||
- [ ] Detailed description
|
||||
- [ ] Reproduction steps (if applicable)
|
||||
- [ ] Verbose Log Output
|
176
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
Normal file
|
@ -0,0 +1,176 @@
|
|||
name: "Bug Report"
|
||||
description: Create a Bug Report to help us fix your issue
|
||||
title: "Bug: "
|
||||
labels: ["Bug"]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
**Note:** Before submitting a bug report, please ensure you are running the latest 'onedrive' client as built from 'master' and compile by using the latest available DMD or LDC compiler. Refer to the the [INSTALL](https://github.com/abraunegg/onedrive/blob/master/docs/INSTALL.md) document on how to build the client for your system.
|
||||
|
||||
- type: textarea
|
||||
id: bugDescription
|
||||
attributes:
|
||||
label: Describe the bug
|
||||
description: |
|
||||
Add a clear and concise description of what you think the bug is.
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: operatingSystemDetails
|
||||
attributes:
|
||||
label: Operating System Details
|
||||
description: |
|
||||
* What is your Operating System (`uname -a`)
|
||||
* Output of: (`cat /etc/redhat-release`) or (`lsb_release -a`)
|
||||
render: shell
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: dropdown
|
||||
id: installMethod
|
||||
attributes:
|
||||
label: Client Installation Method
|
||||
description: |
|
||||
How did you install the client?
|
||||
multiple: false
|
||||
options:
|
||||
- From Source
|
||||
- From Distribution Package
|
||||
- From 3rd Party Source (PPA, OpenSuSE Build Service etc)
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: dropdown
|
||||
id: accountType
|
||||
attributes:
|
||||
label: OneDrive Account Type
|
||||
description: |
|
||||
What is your OneDrive Account Type?
|
||||
multiple: false
|
||||
options:
|
||||
- Personal
|
||||
- Business | Office365
|
||||
- SharePoint
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: input
|
||||
id: applicationVersion
|
||||
attributes:
|
||||
label: What is your OneDrive Application Version
|
||||
description: |
|
||||
* What is your 'onedrive' client version (`onedrive --version`)?
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: applicationConfig
|
||||
attributes:
|
||||
label: What is your OneDrive Application Configuration
|
||||
description: |
|
||||
* What is your Application Configuration (`onedrive --display-config`)?
|
||||
render: shell
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: curlVersion
|
||||
attributes:
|
||||
label: What is your 'curl' version
|
||||
description: |
|
||||
* What is your output of (`curl --version`)?
|
||||
render: shell
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: dropdown
|
||||
id: syncdirLocation
|
||||
attributes:
|
||||
label: Where is your 'sync_dir' located
|
||||
description: |
|
||||
Is your 'sync_dir' a local directory or on a network mount point?
|
||||
multiple: false
|
||||
options:
|
||||
- Local
|
||||
- Network
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: mountPoints
|
||||
attributes:
|
||||
label: What are all your system 'mount points'
|
||||
description: |
|
||||
* What is your output of (`mount`)?
|
||||
render: shell
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: partitionTypes
|
||||
attributes:
|
||||
label: What are all your local file system partition types
|
||||
description: |
|
||||
* What is your output of (`lsblk -f`)?
|
||||
render: shell
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: usageDetails
|
||||
attributes:
|
||||
label: How do you use 'onedrive'
|
||||
description: |
|
||||
Explain your entire configuration setup - is the OneDrive folder shared with any other system, shared with any other platform at the same time, is the OneDrive account you use shared across multiple systems / platforms / Operating Systems and in use at the same time
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: howToReproduce
|
||||
attributes:
|
||||
label: Steps to reproduce the behaviour
|
||||
description: |
|
||||
List all the steps required to reproduce the issue.
|
||||
|
||||
If issue is replicated by a specific 'file' or 'path' please archive the file and path tree & email to support@mynas.com.au
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: applicationVerboseLog
|
||||
attributes:
|
||||
label: Complete Verbose Log Output
|
||||
description: |
|
||||
A clear and full log of the problem when running the application in the following manner (ie, not in monitor mode): (`onedrive --synchronize --verbose <any of your other needed options>`)
|
||||
|
||||
Run the application in a separate terminal window or SSH session and provide the entire application output including the error & crash.
|
||||
|
||||
Please also generate a full debug log whilst reproducing the issue as per [https://github.com/abraunegg/onedrive/wiki/Generate-debug-log-for-support](https://github.com/abraunegg/onedrive/wiki/Generate-debug-log-for-support) and email to support@mynas.com.au
|
||||
render: shell
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: screenshots
|
||||
attributes:
|
||||
label: Screenshots
|
||||
description: |
|
||||
If applicable, add screenshots to help explain your problem.
|
||||
|
||||
- type: textarea
|
||||
id: otherLogs
|
||||
attributes:
|
||||
label: Other Log Information or Details
|
||||
description: |
|
||||
If applicable, add the relevant output from `dmesg` or similar.
|
||||
render: shell
|
||||
|
||||
- type: textarea
|
||||
id: additionalContext
|
||||
attributes:
|
||||
label: Additional context
|
||||
description: |
|
||||
Add any other relevant additional context for the problem.
|
||||
|
5
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
|
@ -0,0 +1,5 @@
|
|||
blank_issues_enabled: false
|
||||
contact_links:
|
||||
- name: "Have a question?"
|
||||
url: https://github.com/abraunegg/onedrive/discussions
|
||||
about: "Please do not raise a GitHub issue for asking questions - please post your question under GitHub Discussions. When opening a new discussion, please include all relevant details such as including your application version and how you installed the client. Thanks in advance for helping us keep the issue tracker clean!"
|
17
.github/ISSUE_TEMPLATE/feature_request.md
vendored
|
@ -1,17 +0,0 @@
|
|||
---
|
||||
name: Feature request
|
||||
about: Suggest an idea for this project
|
||||
|
||||
---
|
||||
|
||||
**Is your feature request related to a problem? Please describe.**
|
||||
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
|
||||
|
||||
**Describe the solution you'd like**
|
||||
A clear and concise description of what you want to happen.
|
||||
|
||||
**Describe alternatives you've considered**
|
||||
A clear and concise description of any alternative solutions or features you've considered.
|
||||
|
||||
**Additional context**
|
||||
Add any other context or screenshots about the feature request here.
|
45
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
Normal file
|
@ -0,0 +1,45 @@
|
|||
name: "Feature Request"
|
||||
description: Suggest an idea for this project
|
||||
title: "Feature Request: "
|
||||
labels: ["Feature Request"]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Suggest an idea for this project
|
||||
|
||||
- type: textarea
|
||||
id: featureProblem
|
||||
attributes:
|
||||
label: Is your feature request related to a problem? Please describe.
|
||||
description: |
|
||||
A clear and concise description of what the problem is. Ex. I'm always frustrated when ...
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: featureSolution
|
||||
attributes:
|
||||
label: Describe the solution you'd like
|
||||
description: |
|
||||
A clear and concise description of what you want to happen.
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: featureAlternatives
|
||||
attributes:
|
||||
label: Describe alternatives you've considered
|
||||
description: |
|
||||
A clear and concise description of any alternative solutions or features you've considered.
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: additionalContext
|
||||
attributes:
|
||||
label: Additional context
|
||||
description: |
|
||||
Add any other context or information about the feature request here.
|
||||
validations:
|
||||
required: false
|
96
.github/workflows/docker.yaml
vendored
Normal file
|
@ -0,0 +1,96 @@
|
|||
name: Build Docker Images
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ master ]
|
||||
tags: [ 'v*' ]
|
||||
pull_request:
|
||||
# Comment these out to force a test build on a PR
|
||||
branches:
|
||||
- master
|
||||
types: [closed]
|
||||
|
||||
env:
|
||||
DOCKER_HUB_SLUG: driveone/onedrive
|
||||
|
||||
jobs:
|
||||
build:
|
||||
# Comment this out to force a test build on a PR
|
||||
if: (!(github.event.action == 'closed' && github.event.pull_request.merged != true))
|
||||
|
||||
# Build runs on
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
flavor: [ fedora, debian, alpine ]
|
||||
include:
|
||||
- flavor: fedora
|
||||
dockerfile: ./contrib/docker/Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
- flavor: debian
|
||||
dockerfile: ./contrib/docker/Dockerfile-debian
|
||||
platforms: linux/386,linux/amd64,linux/arm64,linux/arm/v7
|
||||
- flavor: alpine
|
||||
dockerfile: ./contrib/docker/Dockerfile-alpine
|
||||
platforms: linux/amd64,linux/arm64
|
||||
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
submodules: recursive
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Docker meta
|
||||
id: docker_meta
|
||||
uses: marcelcoding/ghaction-docker-meta@v2
|
||||
with:
|
||||
tag-edge: true
|
||||
images: |
|
||||
${{ env.DOCKER_HUB_SLUG }}
|
||||
tag-semver: |
|
||||
{{version}}
|
||||
{{major}}.{{minor}}
|
||||
flavor: ${{ matrix.flavor }}
|
||||
main-flavor: ${{ matrix.flavor == 'debian' }}
|
||||
|
||||
- uses: docker/setup-qemu-action@v2
|
||||
with:
|
||||
image: tonistiigi/binfmt:latest
|
||||
platforms: all
|
||||
if: matrix.platforms != 'linux/amd64'
|
||||
|
||||
- uses: docker/setup-buildx-action@v2
|
||||
|
||||
- name: Cache Docker layers
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: /tmp/.buildx-cache
|
||||
key: ${{ runner.os }}-buildx-${{ matrix.flavor }}-${{ github.sha }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-buildx-${{ matrix.flavor }}
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
if: github.event_name != 'pull_request'
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
|
||||
|
||||
- name: Build and Push to Docker
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
context: .
|
||||
file: ${{ matrix.dockerfile }}
|
||||
platforms: ${{ matrix.platforms }}
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
tags: ${{ steps.docker_meta.outputs.tags }}
|
||||
labels: ${{ steps.docker_meta.outputs.labels }}
|
||||
cache-from: type=local,src=/tmp/.buildx-cache
|
||||
cache-to: type=local,dest=/tmp/.buildx-cache-new
|
||||
|
||||
- name: Move cache
|
||||
run: |
|
||||
rm -rf /tmp/.buildx-cache
|
||||
mv /tmp/.buildx-cache-new /tmp/.buildx-cache
|
33
.github/workflows/lock.yml
vendored
Normal file
|
@ -0,0 +1,33 @@
|
|||
name: 'Lock Threads'
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '19 0 * * *'
|
||||
|
||||
jobs:
|
||||
lock:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Lock Threads
|
||||
uses: dessant/lock-threads@v2.0.3
|
||||
with:
|
||||
github-token: ${{ secrets.LOCK_THREADS }}
|
||||
issue-lock-inactive-days: '7'
|
||||
issue-exclude-created-before: ''
|
||||
issue-exclude-labels: ''
|
||||
issue-lock-labels: ''
|
||||
issue-lock-comment: >
|
||||
This issue has been automatically locked since there
|
||||
has not been any recent activity after it was closed.
|
||||
Please open a new issue for related bugs.
|
||||
issue-lock-reason: 'resolved'
|
||||
pr-lock-inactive-days: '7'
|
||||
pr-exclude-created-before: ''
|
||||
pr-exclude-labels: ''
|
||||
pr-lock-labels: ''
|
||||
pr-lock-comment: >
|
||||
This pull request has been automatically locked since there
|
||||
has not been any recent activity after it was closed.
|
||||
Please open a new issue for related bugs.
|
||||
pr-lock-reason: ''
|
||||
process-only: ''
|
43
.github/workflows/testbuild.yaml
vendored
Normal file
|
@ -0,0 +1,43 @@
|
|||
name: Test Build
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ "master" ]
|
||||
pull_request:
|
||||
branches: [ "master" ]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
|
||||
#runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-20.04
|
||||
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
submodules: recursive
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Update Image
|
||||
run: |
|
||||
sudo apt-get clean
|
||||
sudo apt-get update -y
|
||||
|
||||
- name: Install build-essential
|
||||
run: sudo apt install -y build-essential
|
||||
|
||||
- name: Install build-dependencies
|
||||
run: sudo apt install -y libcurl4-openssl-dev libsqlite3-dev pkg-config git curl ldc
|
||||
|
||||
- name: Configure
|
||||
run: ./configure
|
||||
|
||||
- name: Compile
|
||||
run: make clean; make;
|
||||
|
||||
- name: Install
|
||||
run: sudo make install
|
||||
|
||||
- name: Run
|
||||
run: onedrive --version
|
9
.gitignore
vendored
|
@ -1,6 +1,11 @@
|
|||
.*
|
||||
onedrive
|
||||
onedrive.1
|
||||
onedrive.o
|
||||
onedrive.service
|
||||
onedrive@.service
|
||||
version
|
||||
Makefile
|
||||
config.log
|
||||
config.status
|
||||
autom4te.cache/
|
||||
contrib/pacman/PKGBUILD
|
||||
contrib/spec/onedrive.spec
|
||||
|
|
181
.travis-ci.sh
|
@ -1,181 +0,0 @@
|
|||
#!/bin/bash
|
||||
# Based on a test script from avsm/ocaml repo https://github.com/avsm/ocaml
|
||||
# Adapted from https://www.tomaz.me/2013/12/02/running-travis-ci-tests-on-arm.html
|
||||
# Adapted from https://github.com/PJK/libcbor/blob/master/.travis-qemu.sh
|
||||
# Adapted from https://gist.github.com/oznu/b5efd7784e5a820ec3746820f2183dc0
|
||||
# Adapted from https://blog.lazy-evaluation.net/posts/linux/debian-armhf-bootstrap.html
|
||||
# Adapted from https://blog.lazy-evaluation.net/posts/linux/debian-stretch-arm64.html
|
||||
|
||||
set -e
|
||||
|
||||
# CHROOT Directory
|
||||
CHROOT_DIR=/tmp/chroot
|
||||
|
||||
# Debian package dependencies for the host to run ARM under QEMU
|
||||
DEBIAN_MIRROR="http://ftp.us.debian.org/debian"
|
||||
HOST_DEPENDENCIES="qemu-user-static binfmt-support debootstrap sbuild wget"
|
||||
|
||||
# Debian package dependencies for the chrooted environment
|
||||
GUEST_DEPENDENCIES="build-essential libcurl4-openssl-dev libsqlite3-dev libgnutls-openssl27 git"
|
||||
|
||||
function setup_arm32_chroot {
|
||||
# Update apt repository details
|
||||
sudo apt-get update
|
||||
# 32Bit Variables
|
||||
VERSION=jessie
|
||||
CHROOT_ARCH=armhf
|
||||
# Host dependencies
|
||||
sudo apt-get install -qq -y ${HOST_DEPENDENCIES}
|
||||
# Download LDC compiler
|
||||
wget https://github.com/ldc-developers/ldc/releases/download/v1.10.0/ldc2-1.10.0-linux-armhf.tar.xz
|
||||
tar -xf ldc2-1.10.0-linux-armhf.tar.xz
|
||||
mv ldc2-1.10.0-linux-armhf dlang-${ARCH}
|
||||
rm -rf ldc2-1.10.0-linux-armhf.tar.xz
|
||||
# Create chrooted environment
|
||||
sudo mkdir ${CHROOT_DIR}
|
||||
sudo debootstrap --foreign --no-check-gpg --variant=buildd --arch=${CHROOT_ARCH} ${VERSION} ${CHROOT_DIR} ${DEBIAN_MIRROR}
|
||||
sudo cp /usr/bin/qemu-arm-static ${CHROOT_DIR}/usr/bin/
|
||||
sudo chroot ${CHROOT_DIR} /debootstrap/debootstrap --second-stage
|
||||
sudo sbuild-createchroot --arch=${CHROOT_ARCH} --foreign --setup-only ${VERSION} ${CHROOT_DIR} ${DEBIAN_MIRROR}
|
||||
configure_chroot
|
||||
}
|
||||
|
||||
function setup_arm64_chroot {
|
||||
# Update apt repository details
|
||||
sudo apt-get update
|
||||
# 64Bit Variables
|
||||
VERSION64=stretch
|
||||
CHROOT_ARCH64=arm64
|
||||
# Host dependencies
|
||||
sudo apt-get install -qq -y ${HOST_DEPENDENCIES}
|
||||
# Download LDC compiler
|
||||
wget https://github.com/ldc-developers/ldc/releases/download/v1.11.0/ldc2-1.11.0-linux-aarch64.tar.xz
|
||||
tar -xf ldc2-1.11.0-linux-aarch64.tar.xz
|
||||
mv ldc2-1.11.0-linux-aarch64 dlang-${ARCH}
|
||||
rm -rf ldc2-1.11.0-linux-aarch64.tar.xz
|
||||
|
||||
# ARM64 qemu-debootstrap needs to be 1.0.78, Trusty is 1.0.59
|
||||
sudo echo "deb http://archive.ubuntu.com/ubuntu xenial main restricted universe multiverse" >> /etc/apt/sources.list
|
||||
sudo apt-get update
|
||||
sudo apt-get install -t xenial debootstrap
|
||||
|
||||
# Create chrooted environment
|
||||
sudo mkdir ${CHROOT_DIR}
|
||||
sudo qemu-debootstrap --arch=${CHROOT_ARCH64} ${VERSION64} ${CHROOT_DIR} ${DEBIAN_MIRROR}
|
||||
configure_chroot
|
||||
}
|
||||
|
||||
function setup_x32_chroot {
|
||||
# Update apt repository details
|
||||
sudo apt-get update
|
||||
# 32Bit Variables
|
||||
VERSION=jessie
|
||||
CHROOT_ARCH32=i386
|
||||
# Host dependencies
|
||||
sudo apt-get install -qq -y ${HOST_DEPENDENCIES}
|
||||
# Download DMD compiler
|
||||
wget http://downloads.dlang.org/releases/2.x/2.081.1/dmd.2.081.1.linux.tar.xz
|
||||
tar -xf dmd.2.081.1.linux.tar.xz
|
||||
mv dmd2 dlang-${ARCH}
|
||||
rm -rf dmd.2.081.1.linux.tar.xz
|
||||
# Create chrooted environment
|
||||
sudo mkdir ${CHROOT_DIR}
|
||||
sudo debootstrap --foreign --no-check-gpg --variant=buildd --arch=${CHROOT_ARCH32} ${VERSION} ${CHROOT_DIR} ${DEBIAN_MIRROR}
|
||||
sudo cp /usr/bin/qemu-i386-static ${CHROOT_DIR}/usr/bin/
|
||||
sudo cp /usr/bin/qemu-x86_64-static ${CHROOT_DIR}/usr/bin/
|
||||
sudo chroot ${CHROOT_DIR} /debootstrap/debootstrap --second-stage
|
||||
sudo sbuild-createchroot --arch=${CHROOT_ARCH32} --foreign --setup-only ${VERSION} ${CHROOT_DIR} ${DEBIAN_MIRROR}
|
||||
configure_chroot
|
||||
}
|
||||
|
||||
function configure_chroot {
|
||||
# Create file with environment variables which will be used inside chrooted environment
|
||||
echo "export ARCH=${ARCH}" > envvars.sh
|
||||
echo "export TRAVIS_BUILD_DIR=${TRAVIS_BUILD_DIR}" >> envvars.sh
|
||||
chmod a+x envvars.sh
|
||||
|
||||
# Install dependencies inside chroot
|
||||
sudo chroot ${CHROOT_DIR} apt-get update
|
||||
sudo chroot ${CHROOT_DIR} apt-get --allow-unauthenticated install -qq -y ${GUEST_DEPENDENCIES}
|
||||
|
||||
# Create build dir and copy travis build files to our chroot environment
|
||||
sudo mkdir -p ${CHROOT_DIR}/${TRAVIS_BUILD_DIR}
|
||||
sudo rsync -a ${TRAVIS_BUILD_DIR}/ ${CHROOT_DIR}/${TRAVIS_BUILD_DIR}/
|
||||
|
||||
# Indicate chroot environment has been set up
|
||||
sudo touch ${CHROOT_DIR}/.chroot_is_done
|
||||
|
||||
# Call ourselves again which will cause tests to run
|
||||
sudo chroot ${CHROOT_DIR} bash -c "cd ${TRAVIS_BUILD_DIR} && chmod a+x ./.travis-ci.sh"
|
||||
sudo chroot ${CHROOT_DIR} bash -c "cd ${TRAVIS_BUILD_DIR} && ./.travis-ci.sh"
|
||||
}
|
||||
|
||||
function build_onedrive {
|
||||
# Depending on architecture, build onedrive using applicable tool
|
||||
echo `uname -a`
|
||||
HOMEDIR=`pwd`
|
||||
if [ "${ARCH}" = "x64" ]; then
|
||||
# Build on x86_64 as normal
|
||||
make clean; make;
|
||||
else
|
||||
if [ "${ARCH}" = "x32" ]; then
|
||||
# 32Bit DMD Build
|
||||
make clean;
|
||||
make DC=${HOMEDIR}/dlang-${ARCH}/linux/bin32/dmd
|
||||
else
|
||||
# LDC Build - ARM32, ARM64
|
||||
make clean;
|
||||
make DC=${HOMEDIR}/dlang-${ARCH}/bin/ldmd2
|
||||
fi
|
||||
fi
|
||||
# Functional testing of built application
|
||||
test_onedrive
|
||||
}
|
||||
|
||||
function test_onedrive {
|
||||
# Testing onedrive client - does the built application execute?
|
||||
./onedrive --version
|
||||
|
||||
# Functional testing on x64 only
|
||||
if [ "${ARCH}" = "x64" ]; then
|
||||
chmod a+x ./tests/makefiles.sh
|
||||
cd ./tests/
|
||||
./makefiles.sh
|
||||
cd ..
|
||||
mkdir -p ~/.config/onedrive/
|
||||
echo $ODP > ~/.config/onedrive/refresh_token
|
||||
./onedrive --synchronize --verbose --syncdir=~/OneDriveALT
|
||||
# OneDrive Cleanup
|
||||
rm -rf ~/OneDriveALT/*
|
||||
./onedrive --synchronize --verbose --syncdir=~/OneDriveALT
|
||||
fi
|
||||
}
|
||||
|
||||
if [ "${ARCH}" = "arm32" ] || [ "${ARCH}" = "arm64" ] || [ "${ARCH}" = "x32" ]; then
|
||||
if [ -e "/.chroot_is_done" ]; then
|
||||
# We are inside ARM chroot
|
||||
echo "Running inside chrooted QEMU ${ARCH} environment"
|
||||
. ./envvars.sh
|
||||
export PATH="$PATH:/usr/sbin:/sbin:/bin"
|
||||
build_onedrive
|
||||
else
|
||||
# Need to set up chrooted environment first
|
||||
echo "Setting up chrooted ${ARCH} build environment"
|
||||
if [ "${ARCH}" = "x32" ]; then
|
||||
# 32Bit i386 Environment
|
||||
setup_x32_chroot
|
||||
else
|
||||
if [ "${ARCH}" = "arm32" ]; then
|
||||
# 32Bit ARM Environment
|
||||
setup_arm32_chroot
|
||||
else
|
||||
# 64Bit ARM Environment
|
||||
setup_arm64_chroot
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
else
|
||||
# Proceed as normal
|
||||
echo "Running an x86_64 Build"
|
||||
build_onedrive
|
||||
fi
|
17
.travis.yml
|
@ -1,17 +0,0 @@
|
|||
# sudo access is required
|
||||
sudo: required
|
||||
# Compilation language
|
||||
language: d
|
||||
# Use latest DMD
|
||||
d:
|
||||
- dmd
|
||||
|
||||
# What build architectures will we build on
|
||||
env:
|
||||
- ARCH=x64
|
||||
- ARCH=x32
|
||||
- ARCH=arm32
|
||||
- ARCH=arm64
|
||||
|
||||
script:
|
||||
- "bash -ex .travis-ci.sh"
|
1118
CHANGELOG.md
74
Makefile
|
@ -1,74 +0,0 @@
|
|||
DC = dmd
|
||||
DFLAGS = -g -ofonedrive -O -L-lcurl -L-lsqlite3 -L-ldl -J.
|
||||
PREFIX = /usr/local
|
||||
|
||||
ifneq ("$(wildcard /etc/redhat-release)","")
|
||||
RHEL = $(shell cat /etc/redhat-release | grep -E "(Red Hat Enterprise Linux Server|CentOS Linux)" | wc -l)
|
||||
else
|
||||
RHEL = 0
|
||||
endif
|
||||
|
||||
SOURCES = \
|
||||
src/config.d \
|
||||
src/itemdb.d \
|
||||
src/log.d \
|
||||
src/main.d \
|
||||
src/monitor.d \
|
||||
src/onedrive.d \
|
||||
src/qxor.d \
|
||||
src/selective.d \
|
||||
src/sqlite.d \
|
||||
src/sync.d \
|
||||
src/upload.d \
|
||||
src/util.d \
|
||||
src/progress.d
|
||||
|
||||
all: onedrive onedrive.service
|
||||
|
||||
clean:
|
||||
rm -f onedrive onedrive.o onedrive.service onedrive@.service
|
||||
|
||||
install: all
|
||||
mkdir -p $(DESTDIR)/var/log/onedrive
|
||||
chown root.users $(DESTDIR)/var/log/onedrive
|
||||
chmod 0775 $(DESTDIR)/var/log/onedrive
|
||||
install -D onedrive $(DESTDIR)$(PREFIX)/bin/onedrive
|
||||
install -D -m 644 logrotate/onedrive.logrotate $(DESTDIR)/etc/logrotate.d/onedrive
|
||||
ifeq ($(RHEL),1)
|
||||
mkdir -p $(DESTDIR)/usr/lib/systemd/system/
|
||||
chown root.root $(DESTDIR)/usr/lib/systemd/system/
|
||||
chmod 0755 $(DESTDIR)/usr/lib/systemd/system/
|
||||
cp -raf *.service $(DESTDIR)/usr/lib/systemd/system/
|
||||
chmod 0644 $(DESTDIR)/usr/lib/systemd/system/onedrive*.service
|
||||
else
|
||||
mkdir -p $(DESTDIR)/usr/lib/systemd/user/
|
||||
chown root.root $(DESTDIR)/usr/lib/systemd/user/
|
||||
chmod 0755 $(DESTDIR)/usr/lib/systemd/user/
|
||||
cp -raf onedrive.service $(DESTDIR)/usr/lib/systemd/user/
|
||||
chmod 0644 $(DESTDIR)/usr/lib/systemd/user/onedrive.service
|
||||
mkdir -p $(DESTDIR)/usr/lib/systemd/system/
|
||||
chown root.root $(DESTDIR)/usr/lib/systemd/system/
|
||||
chmod 0755 $(DESTDIR)/usr/lib/systemd/system/
|
||||
cp -raf onedrive@.service $(DESTDIR)/usr/lib/systemd/system/
|
||||
chmod 0644 $(DESTDIR)/usr/lib/systemd/system/onedrive@.service
|
||||
endif
|
||||
|
||||
onedrive: version $(SOURCES)
|
||||
$(DC) $(DFLAGS) $(SOURCES)
|
||||
|
||||
onedrive.service:
|
||||
sed "s|@PREFIX@|$(PREFIX)|g" systemd.units/onedrive.service.in > onedrive.service
|
||||
sed "s|@PREFIX@|$(PREFIX)|g" systemd.units/onedrive@.service.in > onedrive@.service
|
||||
|
||||
uninstall:
|
||||
rm -f $(DESTDIR)$(PREFIX)/bin/onedrive
|
||||
rm -f $(DESTDIR)/etc/logrotate.d/onedrive
|
||||
ifeq ($(RHEL),1)
|
||||
rm -f $(DESTDIR)/usr/lib/systemd/system/onedrive*.service
|
||||
else
|
||||
rm -f $(DESTDIR)/usr/lib/systemd/user/onedrive.service
|
||||
rm -f $(DESTDIR)/usr/lib/systemd/system/onedrive@.service
|
||||
endif
|
||||
|
||||
version: .git/HEAD .git/index
|
||||
echo $(shell git describe --tags) >version
|
160
Makefile.in
Normal file
|
@ -0,0 +1,160 @@
|
|||
package = @PACKAGE_NAME@
|
||||
version = @PACKAGE_VERSION@
|
||||
prefix = @prefix@
|
||||
# we don't use @exec_prefix@ because it usually contains '${prefix}' literally
|
||||
# but we use @prefix@/bin/onedrive in the systemd unit files which are generated
|
||||
# from the configure script.
|
||||
# Thus, set exec_prefix unconditionally to prefix
|
||||
# Alternative approach would be add dep on sed, and do manual generation in the Makefile.
|
||||
# exec_prefix = @exec_prefix@
|
||||
exec_prefix = @prefix@
|
||||
datarootdir = @datarootdir@
|
||||
datadir = @datadir@
|
||||
srcdir = @srcdir@
|
||||
bindir = @bindir@
|
||||
mandir = @mandir@
|
||||
sysconfdir = @sysconfdir@
|
||||
docdir = $(datadir)/doc/$(package)
|
||||
VPATH = @srcdir@
|
||||
INSTALL = @INSTALL@
|
||||
|
||||
NOTIFICATIONS = @NOTIFICATIONS@
|
||||
HAVE_SYSTEMD = @HAVE_SYSTEMD@
|
||||
systemduserunitdir = @systemduserunitdir@
|
||||
systemdsystemunitdir = @systemdsystemunitdir@
|
||||
curl_LIBS = @curl_LIBS@
|
||||
sqlite_LIBS = @sqlite_LIBS@
|
||||
notify_LIBS = @notify_LIBS@
|
||||
COMPLETIONS = @COMPLETIONS@
|
||||
BASH_COMPLETION_DIR = @BASH_COMPLETION_DIR@
|
||||
ZSH_COMPLETION_DIR = @ZSH_COMPLETION_DIR@
|
||||
FISH_COMPLETION_DIR = @FISH_COMPLETION_DIR@
|
||||
DEBUG = @DEBUG@
|
||||
|
||||
DC = @DC@
|
||||
DC_TYPE = @DC_TYPE@
|
||||
DCFLAGS = @DCFLAGS@
|
||||
DCFLAGS += -w -g -O -J.
|
||||
ifeq ($(DEBUG),yes)
|
||||
ifeq ($(DC_TYPE),dmd)
|
||||
DCFLAGS += -debug -gs
|
||||
else
|
||||
DCFLAGS += -d-debug -gc
|
||||
endif
|
||||
endif
|
||||
|
||||
ifeq ($(NOTIFICATIONS),yes)
|
||||
NOTIF_VERSIONS=-version=NoPragma -version=NoGdk -version=Notifications
|
||||
# support ldc2 which needs -d prefix for version specification
|
||||
ifeq ($(DC_TYPE),ldc)
|
||||
NOTIF_VERSIONS := $(addprefix -d,$(NOTIF_VERSIONS))
|
||||
endif
|
||||
DCFLAGS += $(NOTIF_VERSIONS)
|
||||
endif
|
||||
|
||||
system_unit_files = contrib/systemd/onedrive@.service
|
||||
user_unit_files = contrib/systemd/onedrive.service
|
||||
|
||||
DOCFILES = README.md config LICENSE CHANGELOG.md docs/Docker.md docs/INSTALL.md docs/SharePoint-Shared-Libraries.md docs/USAGE.md docs/BusinessSharedFolders.md docs/advanced-usage.md docs/application-security.md
|
||||
|
||||
ifneq ("$(wildcard /etc/redhat-release)","")
|
||||
RHEL = $(shell cat /etc/redhat-release | grep -E "(Red Hat Enterprise Linux|CentOS)" | wc -l)
|
||||
RHEL_VERSION = $(shell rpm --eval "%{rhel}")
|
||||
else
|
||||
RHEL = 0
|
||||
RHEL_VERSION = 0
|
||||
endif
|
||||
|
||||
SOURCES = \
|
||||
src/config.d \
|
||||
src/itemdb.d \
|
||||
src/log.d \
|
||||
src/main.d \
|
||||
src/monitor.d \
|
||||
src/onedrive.d \
|
||||
src/qxor.d \
|
||||
src/selective.d \
|
||||
src/sqlite.d \
|
||||
src/sync.d \
|
||||
src/upload.d \
|
||||
src/util.d \
|
||||
src/progress.d \
|
||||
src/arsd/cgi.d
|
||||
|
||||
ifeq ($(NOTIFICATIONS),yes)
|
||||
SOURCES += src/notifications/notify.d src/notifications/dnotify.d
|
||||
endif
|
||||
|
||||
all: onedrive
|
||||
|
||||
clean:
|
||||
rm -f onedrive onedrive.o version
|
||||
rm -rf autom4te.cache
|
||||
rm -f config.log config.status
|
||||
|
||||
# also remove files generated via ./configure
|
||||
distclean: clean
|
||||
rm -f Makefile contrib/pacman/PKGBUILD contrib/spec/onedrive.spec onedrive.1 \
|
||||
$(system_unit_files) $(user_unit_files)
|
||||
|
||||
onedrive: $(SOURCES)
|
||||
if [ -f .git/HEAD ] ; then \
|
||||
git describe --tags > version ; \
|
||||
else \
|
||||
echo $(version) > version ; \
|
||||
fi
|
||||
$(DC) $(DCFLAGS) $(addprefix -L,$(curl_LIBS)) $(addprefix -L,$(sqlite_LIBS)) $(addprefix -L,$(notify_LIBS)) -L-ldl $(SOURCES) -of$@
|
||||
|
||||
install: all
|
||||
$(INSTALL) -D onedrive $(DESTDIR)$(bindir)/onedrive
|
||||
$(INSTALL) -D -m 0644 onedrive.1 $(DESTDIR)$(mandir)/man1/onedrive.1
|
||||
$(INSTALL) -D -m 0644 contrib/logrotate/onedrive.logrotate $(DESTDIR)$(sysconfdir)/logrotate.d/onedrive
|
||||
mkdir -p $(DESTDIR)$(docdir)
|
||||
$(INSTALL) -D -m 0644 $(DOCFILES) $(DESTDIR)$(docdir)
|
||||
ifeq ($(HAVE_SYSTEMD),yes)
|
||||
$(INSTALL) -d -m 0755 $(DESTDIR)$(systemduserunitdir) $(DESTDIR)$(systemdsystemunitdir)
|
||||
ifeq ($(RHEL),1)
|
||||
$(INSTALL) -m 0644 $(system_unit_files) $(DESTDIR)$(systemdsystemunitdir)
|
||||
$(INSTALL) -m 0644 $(user_unit_files) $(DESTDIR)$(systemdsystemunitdir)
|
||||
else
|
||||
$(INSTALL) -m 0644 $(system_unit_files) $(DESTDIR)$(systemdsystemunitdir)
|
||||
$(INSTALL) -m 0644 $(user_unit_files) $(DESTDIR)$(systemduserunitdir)
|
||||
endif
|
||||
else
|
||||
ifeq ($(RHEL_VERSION),6)
|
||||
install -D contrib/init.d/onedrive.init $(DESTDIR)/etc/init.d/onedrive
|
||||
install -D contrib/init.d/onedrive_service.sh $(DESTDIR)$(bindir)/onedrive_service.sh
|
||||
endif
|
||||
endif
|
||||
ifeq ($(COMPLETIONS),yes)
|
||||
$(INSTALL) -D -m 0644 contrib/completions/complete.zsh $(DESTDIR)$(ZSH_COMPLETION_DIR)/_onedrive
|
||||
$(INSTALL) -D -m 0644 contrib/completions/complete.bash $(DESTDIR)$(BASH_COMPLETION_DIR)/onedrive
|
||||
$(INSTALL) -D -m 0644 contrib/completions/complete.fish $(DESTDIR)$(FISH_COMPLETION_DIR)/onedrive.fish
|
||||
endif
|
||||
|
||||
|
||||
uninstall:
|
||||
rm -f $(DESTDIR)$(bindir)/onedrive
|
||||
rm -f $(DESTDIR)$(mandir)/man1/onedrive.1
|
||||
rm -f $(DESTDIR)$(sysconfdir)/logrotate.d/onedrive
|
||||
ifeq ($(HAVE_SYSTEMD),yes)
|
||||
ifeq ($(RHEL),1)
|
||||
rm -f $(DESTDIR)$(systemdsystemunitdir)/onedrive*.service
|
||||
else
|
||||
rm -f $(DESTDIR)$(systemdsystemunitdir)/onedrive*.service
|
||||
rm -f $(DESTDIR)$(systemduserunitdir)/onedrive*.service
|
||||
endif
|
||||
else
|
||||
ifeq ($(RHEL_VERSION),6)
|
||||
rm -f $(DESTDIR)/etc/init.d/onedrive
|
||||
rm -f $(DESTDIR)$(bindir)/onedrive_service.sh
|
||||
endif
|
||||
endif
|
||||
for i in $(DOCFILES) ; do rm -f $(DESTDIR)$(docdir)/$$i ; done
|
||||
ifeq ($(COMPLETIONS),yes)
|
||||
rm -f $(DESTDIR)$(ZSH_COMPLETION_DIR)/_onedrive
|
||||
rm -f $(DESTDIR)$(BASH_COMPLETION_DIR)/onedrive
|
||||
rm -f $(DESTDIR)$(FISH_COMPLETION_DIR)/onedrive.fish
|
||||
endif
|
||||
|
||||
|
473
README.md
|
@ -1,429 +1,92 @@
|
|||
# OneDrive Free Client
|
||||
###### A complete tool to interact with OneDrive on Linux. Built following the UNIX philosophy.
|
||||
# OneDrive Client for Linux
|
||||
[![Version](https://img.shields.io/github/v/release/abraunegg/onedrive)](https://github.com/abraunegg/onedrive/releases)
|
||||
[![Release Date](https://img.shields.io/github/release-date/abraunegg/onedrive)](https://github.com/abraunegg/onedrive/releases)
|
||||
[![Test Build](https://github.com/abraunegg/onedrive/actions/workflows/testbuild.yaml/badge.svg)](https://github.com/abraunegg/onedrive/actions/workflows/testbuild.yaml)
|
||||
[![Build Docker Images](https://github.com/abraunegg/onedrive/actions/workflows/docker.yaml/badge.svg)](https://github.com/abraunegg/onedrive/actions/workflows/docker.yaml)
|
||||
[![Docker Pulls](https://img.shields.io/docker/pulls/driveone/onedrive)](https://hub.docker.com/r/driveone/onedrive)
|
||||
|
||||
### Features:
|
||||
* State caching
|
||||
* Real-Time file monitoring with Inotify
|
||||
* Resumable uploads
|
||||
* Support OneDrive for Business (part of Office 365)
|
||||
* Shared folders (not Business)
|
||||
A free Microsoft OneDrive Client which supports OneDrive Personal, OneDrive for Business, OneDrive for Office365 and SharePoint.
|
||||
|
||||
### What's missing:
|
||||
* While local changes are uploaded right away, remote changes are delayed
|
||||
* No GUI
|
||||
This powerful and highly configurable client can run on all major Linux distributions, FreeBSD, or as a Docker container. It supports one-way and two-way sync capabilities and securely connects to Microsoft OneDrive services.
|
||||
|
||||
## Build Requirements
|
||||
* Build environment must have at least 1GB of memory & 1GB swap space
|
||||
* [libcurl](http://curl.haxx.se/libcurl/)
|
||||
* [SQLite 3](https://www.sqlite.org/)
|
||||
* [Digital Mars D Compiler (DMD)](http://dlang.org/download.html)
|
||||
This client is a 'fork' of the [skilion](https://github.com/skilion/onedrive) client, which the developer has confirmed he has no desire to maintain or support the client ([reference](https://github.com/skilion/onedrive/issues/518#issuecomment-717604726)). This fork has been in active development since mid 2018.
|
||||
|
||||
### Dependencies: Ubuntu/Debian - x86_64
|
||||
```
|
||||
sudo apt install build-essential
|
||||
sudo apt install libcurl4-openssl-dev
|
||||
sudo apt install libsqlite3-dev
|
||||
curl -fsS https://dlang.org/install.sh | bash -s dmd
|
||||
```
|
||||
## Features
|
||||
* State caching
|
||||
* Real-Time local file monitoring with inotify
|
||||
* Real-Time syncing of remote updates via webhooks
|
||||
* File upload / download validation to ensure data integrity
|
||||
* Resumable uploads
|
||||
* Support OneDrive for Business (part of Office 365)
|
||||
* Shared Folder support for OneDrive Personal and OneDrive Business accounts
|
||||
* SharePoint / Office365 Shared Libraries
|
||||
* Desktop notifications via libnotify
|
||||
* Dry-run capability to test configuration changes
|
||||
* Prevent major OneDrive accidental data deletion after configuration change
|
||||
* Support for National cloud deployments (Microsoft Cloud for US Government, Microsoft Cloud Germany, Azure and Office 365 operated by 21Vianet in China)
|
||||
* Supports single & multi-tenanted applications
|
||||
* Supports rate limiting of traffic
|
||||
|
||||
### Dependencies: Ubuntu - i386 / i686
|
||||
**Note:** Validated with `Linux ubuntu-i386-vm 4.13.0-36-generic #40~16.04.1-Ubuntu SMP Fri Feb 16 23:26:51 UTC 2018 i686 i686 i686 GNU/Linux` and DMD 2.081.1
|
||||
```
|
||||
sudo apt install build-essential
|
||||
sudo apt install libcurl4-openssl-dev
|
||||
sudo apt install libsqlite3-dev
|
||||
curl -fsS https://dlang.org/install.sh | bash -s dmd
|
||||
```
|
||||
## What's missing
|
||||
* Ability to encrypt/decrypt files on-the-fly when uploading/downloading files from OneDrive
|
||||
* Support for Windows 'On-Demand' functionality so file is only downloaded when accessed locally
|
||||
|
||||
### Dependencies: Debian - i386 / i686
|
||||
**Note:** Validated with `Linux debian-i386 4.9.0-7-686-pae #1 SMP Debian 4.9.110-1 (2018-07-05) i686 GNU/Linux` and LDC - the LLVM D compiler (1.8.0).
|
||||
## External Enhancements
|
||||
* A GUI for configuration management: [OneDrive Client for Linux GUI](https://github.com/bpozdena/OneDriveGUI)
|
||||
* Colorful log output terminal modification: [OneDrive Client for Linux Colorful log Output](https://github.com/zzzdeb/dotfiles/blob/master/scripts/tools/onedrive_log)
|
||||
* System Tray Icon: [OneDrive Client for Linux System Tray Icon](https://github.com/DanielBorgesOliveira/onedrive_tray)
|
||||
|
||||
First install development dependancies as per below:
|
||||
```
|
||||
sudo apt install build-essential
|
||||
sudo apt install libcurl4-openssl-dev
|
||||
sudo apt install libsqlite3-dev
|
||||
sudo apt install git
|
||||
```
|
||||
Second, install the LDC compiler as per below:
|
||||
```
|
||||
mkdir ldc && cd ldc
|
||||
wget http://ftp.us.debian.org/debian/pool/main/l/ldc/ldc_1.8.0-3_i386.deb
|
||||
wget http://ftp.us.debian.org/debian/pool/main/l/ldc/libphobos2-ldc-shared-dev_1.8.0-3_i386.deb
|
||||
wget http://ftp.us.debian.org/debian/pool/main/l/ldc/libphobos2-ldc-shared78_1.8.0-3_i386.deb
|
||||
wget http://ftp.us.debian.org/debian/pool/main/l/llvm-toolchain-5.0/libllvm5.0_5.0.1-2~bpo9+1_i386.deb
|
||||
wget http://ftp.us.debian.org/debian/pool/main/n/ncurses/libtinfo6_6.1+20180714-1_i386.deb
|
||||
sudo dpkg -i ./*.deb
|
||||
```
|
||||
## Supported Application Version
|
||||
Only the current application release version or greater is supported.
|
||||
|
||||
### Dependencies: Fedora < Version 18 / CentOS / RHEL
|
||||
```
|
||||
sudo yum groupinstall 'Development Tools'
|
||||
sudo yum install libcurl-devel
|
||||
sudo yum install sqlite-devel
|
||||
curl -fsS https://dlang.org/install.sh | bash -s dmd
|
||||
```
|
||||
The current application release version is: [![Version](https://img.shields.io/github/v/release/abraunegg/onedrive)](https://github.com/abraunegg/onedrive/releases)
|
||||
|
||||
### Dependencies: Fedora > Version 18
|
||||
```
|
||||
sudo dnf groupinstall 'Development Tools'
|
||||
sudo dnf install libcurl-devel
|
||||
sudo dnf install sqlite-devel
|
||||
curl -fsS https://dlang.org/install.sh | bash -s dmd
|
||||
```
|
||||
Check the version of the application you are using `onedrive --version` and ensure that you are running either the current release or compile the application yourself from master to get the latest version.
|
||||
|
||||
### Dependencies: Arch Linux
|
||||
```
|
||||
sudo pacman -S curl sqlite dmd
|
||||
```
|
||||
If you are not using the above application version or greater, you must upgrade your application to obtain support.
|
||||
|
||||
### Dependencies: Raspbian (ARMHF)
|
||||
```
|
||||
sudo apt-get install libcurl4-openssl-dev
|
||||
sudo apt-get install libsqlite3-dev
|
||||
wget https://github.com/ldc-developers/ldc/releases/download/v1.10.0/ldc2-1.10.0-linux-armhf.tar.xz
|
||||
tar -xvf ldc2-1.10.0-linux-armhf.tar.xz
|
||||
```
|
||||
## Have a Question
|
||||
If you have a question or need something clarified, please raise a new disscussion post [here](https://github.com/abraunegg/onedrive/discussions)
|
||||
|
||||
### Dependencies: Debian (ARM64)
|
||||
```
|
||||
sudo apt-get install libcurl4-openssl-dev
|
||||
sudo apt-get install libsqlite3-dev
|
||||
wget https://github.com/ldc-developers/ldc/releases/download/v1.11.0/ldc2-1.11.0-linux-aarch64.tar.xz
|
||||
tar -xvf ldc2-1.11.0-linux-aarch64.tar.xz
|
||||
```
|
||||
Be sure to review the Frequently Asked Questions as well before raising a new discussion post.
|
||||
|
||||
### Dependencies: Gentoo
|
||||
```
|
||||
sudo emerge app-portage/layman
|
||||
sudo layman -a dlang
|
||||
```
|
||||
Add ebuild from contrib/gentoo to a local overlay to use.
|
||||
## Frequently Asked Questions
|
||||
Refer to [Frequently Asked Questions](https://github.com/abraunegg/onedrive/wiki/Frequently-Asked-Questions)
|
||||
|
||||
### Dependencies: OpenSuSE Leap 15.0
|
||||
```
|
||||
sudo zypper addrepo --check --refresh --name "D" http://download.opensuse.org/repositories/devel:/languages:/D/openSUSE_Leap_15.0/devel:languages:D.repo
|
||||
sudo zypper install git libcurl-devel sqlite3-devel D:dmd D:libphobos2-0_81 D:phobos-devel D:phobos-devel-static
|
||||
```
|
||||
## Reporting an Issue or Bug
|
||||
If you encounter any bugs you can report them here on GitHub. Before filing an issue be sure to:
|
||||
|
||||
## Compilation & Installation
|
||||
### Building using DMD Reference Compiler:
|
||||
Before cloning and compiling, if you have installed DMD via curl for your OS, you will need to activate DMD as per example below:
|
||||
```
|
||||
Run `source ~/dlang/dmd-2.081.1/activate` in your shell to use dmd-2.081.1.
|
||||
This will setup PATH, LIBRARY_PATH, LD_LIBRARY_PATH, DMD, DC, and PS1.
|
||||
Run `deactivate` later on to restore your environment.
|
||||
```
|
||||
Without performing this step, the compilation process will fail.
|
||||
1. Check the version of the application you are using `onedrive --version` and ensure that you are running a supported application version. If you are not using a supported application version, you must first upgrade your application to a supported version and then re-test for your issue.
|
||||
2. If you are using a supported applcation version, fill in a new bug report using the [issue template](https://github.com/abraunegg/onedrive/issues/new?template=bug_report.md)
|
||||
3. Generate a debug log for support using the following [process](https://github.com/abraunegg/onedrive/wiki/Generate-debug-log-for-support)
|
||||
* If you are in *any* way concerned regarding the sensitivity of the data contained with in the verbose debug log file, create a new OneDrive account, configure the client to use that, use *dummy* data to simulate your environment and then replicate your original issue
|
||||
* If you are still concerned, provide an NDA or confidentiality document to sign
|
||||
4. Upload the debug log to [pastebin](https://pastebin.com/) or archive and email to support@mynas.com.au
|
||||
* If you are concerned regarding the sensitivity of your debug data, encrypt + password protect the archive file and provide the decryption password via an out-of-band (OOB) mechanism. Email support@mynas.com.au for an OOB method for the password to be sent.
|
||||
* If you are still concerned, provide an NDA or confidentiality document to sign
|
||||
|
||||
**Note:** Depending on your DMD version, substitute `2.081.1` above with your DMD version that is installed.
|
||||
## Known issues
|
||||
Refer to [docs/known-issues.md](https://github.com/abraunegg/onedrive/blob/master/docs/known-issues.md)
|
||||
|
||||
```
|
||||
git clone https://github.com/abraunegg/onedrive.git
|
||||
cd onedrive
|
||||
make
|
||||
sudo make install
|
||||
```
|
||||
## Documentation and Configuration Assistance
|
||||
### Installing from Distribution Packages or Building the OneDrive Client for Linux from source
|
||||
Refer to [docs/INSTALL.md](https://github.com/abraunegg/onedrive/blob/master/docs/INSTALL.md)
|
||||
|
||||
### Building using a different compiler (for example [LDC](https://wiki.dlang.org/LDC)):
|
||||
#### Debian - i386 / i686
|
||||
```
|
||||
git clone https://github.com/abraunegg/onedrive.git
|
||||
cd onedrive
|
||||
make DC=/usr/bin/ldmd2
|
||||
sudo make install
|
||||
```
|
||||
### Configuration and Usage
|
||||
Refer to [docs/USAGE.md](https://github.com/abraunegg/onedrive/blob/master/docs/USAGE.md)
|
||||
|
||||
#### ARMHF Architecture
|
||||
```
|
||||
git clone https://github.com/abraunegg/onedrive.git
|
||||
cd onedrive
|
||||
make DC=~/ldc2-1.10.0-linux-armhf/bin/ldmd2
|
||||
sudo make install
|
||||
```
|
||||
### Configure OneDrive Business Shared Folders
|
||||
Refer to [docs/BusinessSharedFolders.md](https://github.com/abraunegg/onedrive/blob/master/docs/BusinessSharedFolders.md)
|
||||
|
||||
#### ARM64 Architecture
|
||||
```
|
||||
git clone https://github.com/abraunegg/onedrive.git
|
||||
cd onedrive
|
||||
make DC=~/ldc2-1.11.0-linux-aarch64/bin/ldmd2
|
||||
sudo make install
|
||||
```
|
||||
### Configure SharePoint / Office 365 Shared Libraries (Business or Education)
|
||||
Refer to [docs/SharePoint-Shared-Libraries.md](https://github.com/abraunegg/onedrive/blob/master/docs/SharePoint-Shared-Libraries.md)
|
||||
|
||||
## Using the client
|
||||
### Upgrading from 'skilion' client
|
||||
The 'skilion' version contains a significant number of defect's in how the local sync state is managed. When upgrading from the 'skilion' version to this version, you may encounter the following error:
|
||||
```
|
||||
Skipping uploading this new file as parent path is not in the database:
|
||||
```
|
||||
In this instance it is advisable to stop any service / onedrive process from running and then remove any `items.sqlite3` file from your configuration directory (`~/.config/onedrive/`) as this will force the creation of a new local cache file.
|
||||
### Configure National Cloud support
|
||||
Refer to [docs/national-cloud-deployments.md](https://github.com/abraunegg/onedrive/blob/master/docs/national-cloud-deployments.md)
|
||||
|
||||
Alternativly add `--resync` to your existing command line onetime only - for example: `onedrive --synchronize --resync` to force a full resync.
|
||||
### Docker support
|
||||
Refer to [docs/Docker.md](https://github.com/abraunegg/onedrive/blob/master/docs/Docker.md)
|
||||
|
||||
### First run :zap:
|
||||
After installing the application you must run it at least once from the terminal to authorize it.
|
||||
### Podman support
|
||||
Refer to [docs/Podman.md](https://github.com/abraunegg/onedrive/blob/master/docs/Podman.md)
|
||||
|
||||
You will be asked to open a specific link using your web browser where you will have to login into your Microsoft Account and give the application the permission to access your files. After giving the permission, you will be redirected to a blank page. Copy the URI of the blank page into the application.
|
||||
|
||||
### Performing a sync
|
||||
By default all files are downloaded in `~/OneDrive`. After authorizing the application, a sync of your data can be performed by running:
|
||||
```
|
||||
onedrive --synchronize
|
||||
```
|
||||
This will synchronize files from your OneDrive account to your `~/OneDrive` local directory.
|
||||
|
||||
If you prefer to use your local files as stored in `~/OneDrive` as the 'source of truth' use the following sync command:
|
||||
```
|
||||
onedrive --synchronize --local-first
|
||||
```
|
||||
|
||||
### Performing a selective directory sync
|
||||
In some cases it may be desirable to sync a single directory under ~/OneDrive without having to change your client configuration. To do this use the following command:
|
||||
```
|
||||
onedrive --synchronize --single-directory '<dir_name>'
|
||||
```
|
||||
|
||||
Example: If the full path is `~/OneDrive/mydir`, the command would be `onedrive --synchronize --single-directory 'mydir'`
|
||||
|
||||
### Performing a 'one-way' sync
|
||||
In some cases it may be desirable to 'upload only' to OneDrive. To do this use the following command:
|
||||
```
|
||||
onedrive --synchronize --upload-only
|
||||
```
|
||||
|
||||
### Increasing logging level
|
||||
When running a sync it may be desirable to see additional information as to the progress and operation of the client. To do this, use the following command:
|
||||
```
|
||||
onedrive --synchronize --verbose
|
||||
```
|
||||
|
||||
### Client Activity Log
|
||||
When running onedrive all actions are logged to `/var/log/onedrive/`
|
||||
|
||||
All logfiles will be in the format of `%username%.onedrive.log`
|
||||
|
||||
An example of the log file is below:
|
||||
```
|
||||
2018-Apr-07 17:09:32.1162837 Loading config ...
|
||||
2018-Apr-07 17:09:32.1167908 No config file found, using defaults
|
||||
2018-Apr-07 17:09:32.1170626 Initializing the OneDrive API ...
|
||||
2018-Apr-07 17:09:32.5359143 Opening the item database ...
|
||||
2018-Apr-07 17:09:32.5515295 All operations will be performed in: /root/OneDrive
|
||||
2018-Apr-07 17:09:32.5518387 Initializing the Synchronization Engine ...
|
||||
2018-Apr-07 17:09:36.6701351 Applying changes of Path ID: <redacted>
|
||||
2018-Apr-07 17:09:37.4434282 Adding OneDrive Root to the local database
|
||||
2018-Apr-07 17:09:37.4478342 The item is already present
|
||||
2018-Apr-07 17:09:37.4513752 The item is already present
|
||||
2018-Apr-07 17:09:37.4550062 The item is already present
|
||||
2018-Apr-07 17:09:37.4586444 The item is already present
|
||||
2018-Apr-07 17:09:37.7663571 Adding OneDrive Root to the local database
|
||||
2018-Apr-07 17:09:37.7739451 Fetching details for OneDrive Root
|
||||
2018-Apr-07 17:09:38.0211861 OneDrive Root exists in the database
|
||||
2018-Apr-07 17:09:38.0215375 Uploading differences of .
|
||||
2018-Apr-07 17:09:38.0220464 Processing <redacted>
|
||||
2018-Apr-07 17:09:38.0224884 The directory has not changed
|
||||
2018-Apr-07 17:09:38.0229369 Processing <redacted>
|
||||
2018-Apr-07 17:09:38.02338 The directory has not changed
|
||||
2018-Apr-07 17:09:38.0237678 Processing <redacted>
|
||||
2018-Apr-07 17:09:38.0242285 The directory has not changed
|
||||
2018-Apr-07 17:09:38.0245977 Processing <redacted>
|
||||
2018-Apr-07 17:09:38.0250788 The directory has not changed
|
||||
2018-Apr-07 17:09:38.0254657 Processing <redacted>
|
||||
2018-Apr-07 17:09:38.0259923 The directory has not changed
|
||||
2018-Apr-07 17:09:38.0263547 Uploading new items of .
|
||||
2018-Apr-07 17:09:38.5708652 Applying changes of Path ID: <redacted>
|
||||
```
|
||||
|
||||
### Uninstall
|
||||
```sh
|
||||
sudo make uninstall
|
||||
# delete the application state
|
||||
rm -rf ~/.config/onedrive
|
||||
```
|
||||
If you are using the `--confdir option`, substitute `~/.config/onedrive` above for that directory.
|
||||
|
||||
If you want to just delete the application key, but keep the items database:
|
||||
```
|
||||
rm -f ~/.config/onedrive/refresh_token
|
||||
```
|
||||
|
||||
## Additional Configuration
|
||||
Additional configuration is optional.
|
||||
If you want to change the defaults, you can copy and edit the included config file into your `~/.config/onedrive` directory:
|
||||
```sh
|
||||
mkdir -p ~/.config/onedrive
|
||||
cp ./config ~/.config/onedrive/config
|
||||
nano ~/.config/onedrive/config
|
||||
```
|
||||
This file does not get created by default, and should only be created if you want to change the 'default' operational parameters.
|
||||
|
||||
Available options:
|
||||
* `sync_dir`: directory where the files will be synced
|
||||
* `skip_file`: any files or directories that match this pattern will be skipped during sync
|
||||
* `skip_symlinks`: any files or directories that are symlinked will be skipped during sync
|
||||
* `monitor_interval`: time interval in seconds by which the monitor process will process local and remote changes
|
||||
|
||||
### sync_dir
|
||||
Example: `sync_dir="~/MyDirToSync"`
|
||||
|
||||
**Please Note:**
|
||||
Proceed with caution here when changing the default sync dir from ~/OneDrive to ~/MyDirToSync
|
||||
|
||||
The issue here is around how the client stores the sync_dir path in the database. If the config file is missing, or you don't use the `--syncdir` parameter - what will happen is the client will default back to `~/OneDrive` and 'think' that either all your data has been deleted - thus delete the content on OneDrive, or will start downloading all data from OneDrive into the default location.
|
||||
|
||||
### skip_file
|
||||
Example: `skip_file = ".*|~*|Desktop|Documents/OneNote*|Documents/IISExpress|Documents/SQL Server Management Studio|Documents/Visual Studio*|Documents/config.xlaunch|Documents/WindowsPowerShell"`
|
||||
|
||||
Patterns are case insensitive. `*` and `?` [wildcards characters](https://technet.microsoft.com/en-us/library/bb490639.aspx) are supported. Use `|` to separate multiple patterns.
|
||||
|
||||
**Note:** after changing `skip_file`, you must perform a full re-synchronization by adding `--resync` to your existing command line - for example: `onedrive --synchronize --resync`
|
||||
|
||||
### skip_symlinks
|
||||
Example: `skip_symlinks = "true"`
|
||||
|
||||
Setting this to `"true"` will skip all symlinks while syncing.
|
||||
|
||||
### monitor_interval
|
||||
Example: `monitor_interval = "300"`
|
||||
|
||||
The monitor interval is defined as the wait time 'between' sync's when running in monitor mode. By default without configuration, the monitor_interval is set to 45 seconds. Setting this value to 300 will run the sync process every 5 minutes.
|
||||
|
||||
### Selective sync
|
||||
Selective sync allows you to sync only specific files and directories.
|
||||
To enable selective sync create a file named `sync_list` in `~/.config/onedrive`.
|
||||
Each line of the file represents a relative path from your `sync_dir`. All files and directories not matching any line of the file will be skipped during all operations.
|
||||
Here is an example of `sync_list`:
|
||||
```text
|
||||
Backup
|
||||
Documents/latest_report.docx
|
||||
Work/ProjectX
|
||||
notes.txt
|
||||
Blender
|
||||
Cinema Soc
|
||||
Codes
|
||||
Textbooks
|
||||
Year 2
|
||||
```
|
||||
**Note:** after changing the sync_list, you must perform a full re-synchronization by adding `--resync` to your existing command line - for example: `onedrive --synchronize --resync`
|
||||
|
||||
### Shared folders
|
||||
Folders shared with you can be synced by adding them to your OneDrive. To do that open your Onedrive, go to the Shared files list, right click on the folder you want to sync and then click on "Add to my OneDrive".
|
||||
|
||||
### OneDrive service running as root user
|
||||
There are two ways that onedrive can be used as a service
|
||||
* via init.d
|
||||
* via systemd
|
||||
|
||||
**Note:** If using the service files, you may need to increase the `fs.inotify.max_user_watches` value on your system to handle the number of files in the directory you are monitoring as the initial value may be too low.
|
||||
|
||||
**init.d**
|
||||
|
||||
```
|
||||
chkconfig onedrive on
|
||||
service onedrive start
|
||||
```
|
||||
To see the logs run:
|
||||
```
|
||||
tail -f /var/log/onedrive/<username>.onedrive.log
|
||||
```
|
||||
To change what 'user' the client runs under (by default root), manually edit the init.d service file and modify `daemon --user root onedrive_service.sh` for the correct user.
|
||||
|
||||
**systemd - Arch, Ubuntu, Debian, OpenSuSE, Fedora**
|
||||
```sh
|
||||
systemctl --user enable onedrive
|
||||
systemctl --user start onedrive
|
||||
```
|
||||
|
||||
To see the logs run:
|
||||
```sh
|
||||
journalctl --user-unit onedrive -f
|
||||
```
|
||||
|
||||
**systemd - Red Hat Enterprise Linux, CentOS Linux**
|
||||
```sh
|
||||
systemctl enable onedrive
|
||||
systemctl start onedrive
|
||||
```
|
||||
|
||||
To see the logs run:
|
||||
```sh
|
||||
journalctl onedrive -f
|
||||
```
|
||||
|
||||
### OneDrive service running as a non-root user via systemd
|
||||
|
||||
In some cases it is desirable to run the OneDrive client as a service, but not running as the 'root' user. In this case, follow the directions below to configure the service for a non-root user.
|
||||
|
||||
1. As the user, who will be running the service, run the application in standalone mode, authorize the application for use & validate that the synchronization is working as expected:
|
||||
```
|
||||
onedrive --synchronize --verbose
|
||||
```
|
||||
2. Once the application is validated and working for your user, as the 'root' user, where <username> is your username from step 1 above.
|
||||
```
|
||||
systemctl enable onedrive@<username>.service
|
||||
systemctl start onedrive@<username>.service
|
||||
```
|
||||
|
||||
3. To view the status of the service running for the user, use the following:
|
||||
```
|
||||
systemctl status onedrive@username.service
|
||||
```
|
||||
|
||||
### Using multiple OneDrive accounts
|
||||
You can run multiple instances of the application specifying a different config directory in order to handle multiple OneDrive accounts.
|
||||
To do this you can use the `--confdir` parameter.
|
||||
Here is an example:
|
||||
```sh
|
||||
onedrive --synchronize --monitor --confdir="~/.config/onedrivePersonal" &
|
||||
onedrive --synchronize --monitor --confdir="~/.config/onedriveWork" &
|
||||
```
|
||||
|
||||
`--monitor` keeps the application running and monitoring for changes
|
||||
|
||||
`&` puts the application in background and leaves the terminal interactive
|
||||
|
||||
## Extra
|
||||
|
||||
### Reporting issues
|
||||
If you encounter any bugs you can report them here on Github. Before filing an issue be sure to:
|
||||
|
||||
1. Check the version of the application you are using `onedrive --version`
|
||||
2. Run the application in verbose mode `onedrive --verbose`
|
||||
3. Have the log of the error (preferably uploaded on an external website such as [pastebin](https://pastebin.com/))
|
||||
4. Collect any information that you may think it is relevant to the error
|
||||
- The steps to trigger the error
|
||||
- What have you already done to try solve it
|
||||
- ...
|
||||
|
||||
### All available commands:
|
||||
```text
|
||||
Usage: onedrive [OPTION]...
|
||||
|
||||
no option No Sync and exit
|
||||
--check-for-nomount Check for the presence of .nosync in the syncdir root. If found, do not perform sync.
|
||||
--confdir Set the directory used to store the configuration files
|
||||
--create-directory Create a directory on OneDrive - no sync will be performed.
|
||||
--destination-directory Destination directory for renamed or move on OneDrive - no sync will be performed.
|
||||
--debug-http Debug OneDrive HTTP communication.
|
||||
-d --download Only download remote changes
|
||||
--local-first Synchronize from the local directory source first, before downloading changes from OneDrive.
|
||||
--logout Logout the current user
|
||||
-m --monitor Keep monitoring for local and remote changes
|
||||
--no-remote-delete Do not delete local file 'deletes' from OneDrive when using --upload-only
|
||||
--print-token Print the access token, useful for debugging
|
||||
--resync Forget the last saved state, perform a full sync
|
||||
--remove-directory Remove a directory on OneDrive - no sync will be performed.
|
||||
--single-directory Specify a single local directory within the OneDrive root to sync.
|
||||
--skip-symlinks Skip syncing of symlinks
|
||||
--source-directory Source directory to rename or move on OneDrive - no sync will be performed.
|
||||
--syncdir Set the directory used to sync the files that are synced
|
||||
--synchronize Perform a synchronization
|
||||
--upload-only Only upload to OneDrive, do not sync changes from OneDrive locally
|
||||
-v --verbose Print more details, useful for debugging
|
||||
--version Print the version and exit
|
||||
-h --help This help information.
|
||||
```
|
||||
|
||||
### File naming
|
||||
The files and directories in the synchronization directory must follow the [Windows naming conventions](https://msdn.microsoft.com/en-us/library/aa365247).
|
||||
The application will crash for example if you have two files with the same name but different case. This is expected behavior and won't be fixed.
|
||||
|
|
290
aclocal.m4
vendored
Normal file
|
@ -0,0 +1,290 @@
|
|||
# generated automatically by aclocal 1.16.1 -*- Autoconf -*-
|
||||
|
||||
# Copyright (C) 1996-2018 Free Software Foundation, Inc.
|
||||
|
||||
# This file is free software; the Free Software Foundation
|
||||
# gives unlimited permission to copy and/or distribute it,
|
||||
# with or without modifications, as long as this notice is preserved.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY, to the extent permitted by law; without
|
||||
# even the implied warranty of MERCHANTABILITY or FITNESS FOR A
|
||||
# PARTICULAR PURPOSE.
|
||||
|
||||
m4_ifndef([AC_CONFIG_MACRO_DIRS], [m4_defun([_AM_CONFIG_MACRO_DIRS], [])m4_defun([AC_CONFIG_MACRO_DIRS], [_AM_CONFIG_MACRO_DIRS($@)])])
|
||||
dnl pkg.m4 - Macros to locate and utilise pkg-config. -*- Autoconf -*-
|
||||
dnl serial 11 (pkg-config-0.29)
|
||||
dnl
|
||||
dnl Copyright © 2004 Scott James Remnant <scott@netsplit.com>.
|
||||
dnl Copyright © 2012-2015 Dan Nicholson <dbn.lists@gmail.com>
|
||||
dnl
|
||||
dnl This program is free software; you can redistribute it and/or modify
|
||||
dnl it under the terms of the GNU General Public License as published by
|
||||
dnl the Free Software Foundation; either version 2 of the License, or
|
||||
dnl (at your option) any later version.
|
||||
dnl
|
||||
dnl This program is distributed in the hope that it will be useful, but
|
||||
dnl WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
dnl MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
dnl General Public License for more details.
|
||||
dnl
|
||||
dnl You should have received a copy of the GNU General Public License
|
||||
dnl along with this program; if not, write to the Free Software
|
||||
dnl Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
|
||||
dnl 02111-1307, USA.
|
||||
dnl
|
||||
dnl As a special exception to the GNU General Public License, if you
|
||||
dnl distribute this file as part of a program that contains a
|
||||
dnl configuration script generated by Autoconf, you may include it under
|
||||
dnl the same distribution terms that you use for the rest of that
|
||||
dnl program.
|
||||
|
||||
dnl PKG_PREREQ(MIN-VERSION)
|
||||
dnl -----------------------
|
||||
dnl Since: 0.29
|
||||
dnl
|
||||
dnl Verify that the version of the pkg-config macros are at least
|
||||
dnl MIN-VERSION. Unlike PKG_PROG_PKG_CONFIG, which checks the user's
|
||||
dnl installed version of pkg-config, this checks the developer's version
|
||||
dnl of pkg.m4 when generating configure.
|
||||
dnl
|
||||
dnl To ensure that this macro is defined, also add:
|
||||
dnl m4_ifndef([PKG_PREREQ],
|
||||
dnl [m4_fatal([must install pkg-config 0.29 or later before running autoconf/autogen])])
|
||||
dnl
|
||||
dnl See the "Since" comment for each macro you use to see what version
|
||||
dnl of the macros you require.
|
||||
m4_defun([PKG_PREREQ],
|
||||
[m4_define([PKG_MACROS_VERSION], [0.29])
|
||||
m4_if(m4_version_compare(PKG_MACROS_VERSION, [$1]), -1,
|
||||
[m4_fatal([pkg.m4 version $1 or higher is required but ]PKG_MACROS_VERSION[ found])])
|
||||
])dnl PKG_PREREQ
|
||||
|
||||
dnl PKG_PROG_PKG_CONFIG([MIN-VERSION])
|
||||
dnl ----------------------------------
|
||||
dnl Since: 0.16
|
||||
dnl
|
||||
dnl Search for the pkg-config tool and set the PKG_CONFIG variable to
|
||||
dnl first found in the path. Checks that the version of pkg-config found
|
||||
dnl is at least MIN-VERSION. If MIN-VERSION is not specified, 0.9.0 is
|
||||
dnl used since that's the first version where most current features of
|
||||
dnl pkg-config existed.
|
||||
AC_DEFUN([PKG_PROG_PKG_CONFIG],
|
||||
[m4_pattern_forbid([^_?PKG_[A-Z_]+$])
|
||||
m4_pattern_allow([^PKG_CONFIG(_(PATH|LIBDIR|SYSROOT_DIR|ALLOW_SYSTEM_(CFLAGS|LIBS)))?$])
|
||||
m4_pattern_allow([^PKG_CONFIG_(DISABLE_UNINSTALLED|TOP_BUILD_DIR|DEBUG_SPEW)$])
|
||||
AC_ARG_VAR([PKG_CONFIG], [path to pkg-config utility])
|
||||
AC_ARG_VAR([PKG_CONFIG_PATH], [directories to add to pkg-config's search path])
|
||||
AC_ARG_VAR([PKG_CONFIG_LIBDIR], [path overriding pkg-config's built-in search path])
|
||||
|
||||
if test "x$ac_cv_env_PKG_CONFIG_set" != "xset"; then
|
||||
AC_PATH_TOOL([PKG_CONFIG], [pkg-config])
|
||||
fi
|
||||
if test -n "$PKG_CONFIG"; then
|
||||
_pkg_min_version=m4_default([$1], [0.9.0])
|
||||
AC_MSG_CHECKING([pkg-config is at least version $_pkg_min_version])
|
||||
if $PKG_CONFIG --atleast-pkgconfig-version $_pkg_min_version; then
|
||||
AC_MSG_RESULT([yes])
|
||||
else
|
||||
AC_MSG_RESULT([no])
|
||||
PKG_CONFIG=""
|
||||
fi
|
||||
fi[]dnl
|
||||
])dnl PKG_PROG_PKG_CONFIG
|
||||
|
||||
dnl PKG_CHECK_EXISTS(MODULES, [ACTION-IF-FOUND], [ACTION-IF-NOT-FOUND])
|
||||
dnl -------------------------------------------------------------------
|
||||
dnl Since: 0.18
|
||||
dnl
|
||||
dnl Check to see whether a particular set of modules exists. Similar to
|
||||
dnl PKG_CHECK_MODULES(), but does not set variables or print errors.
|
||||
dnl
|
||||
dnl Please remember that m4 expands AC_REQUIRE([PKG_PROG_PKG_CONFIG])
|
||||
dnl only at the first occurence in configure.ac, so if the first place
|
||||
dnl it's called might be skipped (such as if it is within an "if", you
|
||||
dnl have to call PKG_CHECK_EXISTS manually
|
||||
AC_DEFUN([PKG_CHECK_EXISTS],
|
||||
[AC_REQUIRE([PKG_PROG_PKG_CONFIG])dnl
|
||||
if test -n "$PKG_CONFIG" && \
|
||||
AC_RUN_LOG([$PKG_CONFIG --exists --print-errors "$1"]); then
|
||||
m4_default([$2], [:])
|
||||
m4_ifvaln([$3], [else
|
||||
$3])dnl
|
||||
fi])
|
||||
|
||||
dnl _PKG_CONFIG([VARIABLE], [COMMAND], [MODULES])
|
||||
dnl ---------------------------------------------
|
||||
dnl Internal wrapper calling pkg-config via PKG_CONFIG and setting
|
||||
dnl pkg_failed based on the result.
|
||||
m4_define([_PKG_CONFIG],
|
||||
[if test -n "$$1"; then
|
||||
pkg_cv_[]$1="$$1"
|
||||
elif test -n "$PKG_CONFIG"; then
|
||||
PKG_CHECK_EXISTS([$3],
|
||||
[pkg_cv_[]$1=`$PKG_CONFIG --[]$2 "$3" 2>/dev/null`
|
||||
test "x$?" != "x0" && pkg_failed=yes ],
|
||||
[pkg_failed=yes])
|
||||
else
|
||||
pkg_failed=untried
|
||||
fi[]dnl
|
||||
])dnl _PKG_CONFIG
|
||||
|
||||
dnl _PKG_SHORT_ERRORS_SUPPORTED
|
||||
dnl ---------------------------
|
||||
dnl Internal check to see if pkg-config supports short errors.
|
||||
AC_DEFUN([_PKG_SHORT_ERRORS_SUPPORTED],
|
||||
[AC_REQUIRE([PKG_PROG_PKG_CONFIG])
|
||||
if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then
|
||||
_pkg_short_errors_supported=yes
|
||||
else
|
||||
_pkg_short_errors_supported=no
|
||||
fi[]dnl
|
||||
])dnl _PKG_SHORT_ERRORS_SUPPORTED
|
||||
|
||||
|
||||
dnl PKG_CHECK_MODULES(VARIABLE-PREFIX, MODULES, [ACTION-IF-FOUND],
|
||||
dnl [ACTION-IF-NOT-FOUND])
|
||||
dnl --------------------------------------------------------------
|
||||
dnl Since: 0.4.0
|
||||
dnl
|
||||
dnl Note that if there is a possibility the first call to
|
||||
dnl PKG_CHECK_MODULES might not happen, you should be sure to include an
|
||||
dnl explicit call to PKG_PROG_PKG_CONFIG in your configure.ac
|
||||
AC_DEFUN([PKG_CHECK_MODULES],
|
||||
[AC_REQUIRE([PKG_PROG_PKG_CONFIG])dnl
|
||||
AC_ARG_VAR([$1][_CFLAGS], [C compiler flags for $1, overriding pkg-config])dnl
|
||||
AC_ARG_VAR([$1][_LIBS], [linker flags for $1, overriding pkg-config])dnl
|
||||
|
||||
pkg_failed=no
|
||||
AC_MSG_CHECKING([for $1])
|
||||
|
||||
_PKG_CONFIG([$1][_CFLAGS], [cflags], [$2])
|
||||
_PKG_CONFIG([$1][_LIBS], [libs], [$2])
|
||||
|
||||
m4_define([_PKG_TEXT], [Alternatively, you may set the environment variables $1[]_CFLAGS
|
||||
and $1[]_LIBS to avoid the need to call pkg-config.
|
||||
See the pkg-config man page for more details.])
|
||||
|
||||
if test $pkg_failed = yes; then
|
||||
AC_MSG_RESULT([no])
|
||||
_PKG_SHORT_ERRORS_SUPPORTED
|
||||
if test $_pkg_short_errors_supported = yes; then
|
||||
$1[]_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "$2" 2>&1`
|
||||
else
|
||||
$1[]_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "$2" 2>&1`
|
||||
fi
|
||||
# Put the nasty error message in config.log where it belongs
|
||||
echo "$$1[]_PKG_ERRORS" >&AS_MESSAGE_LOG_FD
|
||||
|
||||
m4_default([$4], [AC_MSG_ERROR(
|
||||
[Package requirements ($2) were not met:
|
||||
|
||||
$$1_PKG_ERRORS
|
||||
|
||||
Consider adjusting the PKG_CONFIG_PATH environment variable if you
|
||||
installed software in a non-standard prefix.
|
||||
|
||||
_PKG_TEXT])[]dnl
|
||||
])
|
||||
elif test $pkg_failed = untried; then
|
||||
AC_MSG_RESULT([no])
|
||||
m4_default([$4], [AC_MSG_FAILURE(
|
||||
[The pkg-config script could not be found or is too old. Make sure it
|
||||
is in your PATH or set the PKG_CONFIG environment variable to the full
|
||||
path to pkg-config.
|
||||
|
||||
_PKG_TEXT
|
||||
|
||||
To get pkg-config, see <http://pkg-config.freedesktop.org/>.])[]dnl
|
||||
])
|
||||
else
|
||||
$1[]_CFLAGS=$pkg_cv_[]$1[]_CFLAGS
|
||||
$1[]_LIBS=$pkg_cv_[]$1[]_LIBS
|
||||
AC_MSG_RESULT([yes])
|
||||
$3
|
||||
fi[]dnl
|
||||
])dnl PKG_CHECK_MODULES
|
||||
|
||||
|
||||
dnl PKG_CHECK_MODULES_STATIC(VARIABLE-PREFIX, MODULES, [ACTION-IF-FOUND],
|
||||
dnl [ACTION-IF-NOT-FOUND])
|
||||
dnl ---------------------------------------------------------------------
|
||||
dnl Since: 0.29
|
||||
dnl
|
||||
dnl Checks for existence of MODULES and gathers its build flags with
|
||||
dnl static libraries enabled. Sets VARIABLE-PREFIX_CFLAGS from --cflags
|
||||
dnl and VARIABLE-PREFIX_LIBS from --libs.
|
||||
dnl
|
||||
dnl Note that if there is a possibility the first call to
|
||||
dnl PKG_CHECK_MODULES_STATIC might not happen, you should be sure to
|
||||
dnl include an explicit call to PKG_PROG_PKG_CONFIG in your
|
||||
dnl configure.ac.
|
||||
AC_DEFUN([PKG_CHECK_MODULES_STATIC],
|
||||
[AC_REQUIRE([PKG_PROG_PKG_CONFIG])dnl
|
||||
_save_PKG_CONFIG=$PKG_CONFIG
|
||||
PKG_CONFIG="$PKG_CONFIG --static"
|
||||
PKG_CHECK_MODULES($@)
|
||||
PKG_CONFIG=$_save_PKG_CONFIG[]dnl
|
||||
])dnl PKG_CHECK_MODULES_STATIC
|
||||
|
||||
|
||||
dnl PKG_INSTALLDIR([DIRECTORY])
|
||||
dnl -------------------------
|
||||
dnl Since: 0.27
|
||||
dnl
|
||||
dnl Substitutes the variable pkgconfigdir as the location where a module
|
||||
dnl should install pkg-config .pc files. By default the directory is
|
||||
dnl $libdir/pkgconfig, but the default can be changed by passing
|
||||
dnl DIRECTORY. The user can override through the --with-pkgconfigdir
|
||||
dnl parameter.
|
||||
AC_DEFUN([PKG_INSTALLDIR],
|
||||
[m4_pushdef([pkg_default], [m4_default([$1], ['${libdir}/pkgconfig'])])
|
||||
m4_pushdef([pkg_description],
|
||||
[pkg-config installation directory @<:@]pkg_default[@:>@])
|
||||
AC_ARG_WITH([pkgconfigdir],
|
||||
[AS_HELP_STRING([--with-pkgconfigdir], pkg_description)],,
|
||||
[with_pkgconfigdir=]pkg_default)
|
||||
AC_SUBST([pkgconfigdir], [$with_pkgconfigdir])
|
||||
m4_popdef([pkg_default])
|
||||
m4_popdef([pkg_description])
|
||||
])dnl PKG_INSTALLDIR
|
||||
|
||||
|
||||
dnl PKG_NOARCH_INSTALLDIR([DIRECTORY])
|
||||
dnl --------------------------------
|
||||
dnl Since: 0.27
|
||||
dnl
|
||||
dnl Substitutes the variable noarch_pkgconfigdir as the location where a
|
||||
dnl module should install arch-independent pkg-config .pc files. By
|
||||
dnl default the directory is $datadir/pkgconfig, but the default can be
|
||||
dnl changed by passing DIRECTORY. The user can override through the
|
||||
dnl --with-noarch-pkgconfigdir parameter.
|
||||
AC_DEFUN([PKG_NOARCH_INSTALLDIR],
|
||||
[m4_pushdef([pkg_default], [m4_default([$1], ['${datadir}/pkgconfig'])])
|
||||
m4_pushdef([pkg_description],
|
||||
[pkg-config arch-independent installation directory @<:@]pkg_default[@:>@])
|
||||
AC_ARG_WITH([noarch-pkgconfigdir],
|
||||
[AS_HELP_STRING([--with-noarch-pkgconfigdir], pkg_description)],,
|
||||
[with_noarch_pkgconfigdir=]pkg_default)
|
||||
AC_SUBST([noarch_pkgconfigdir], [$with_noarch_pkgconfigdir])
|
||||
m4_popdef([pkg_default])
|
||||
m4_popdef([pkg_description])
|
||||
])dnl PKG_NOARCH_INSTALLDIR
|
||||
|
||||
|
||||
dnl PKG_CHECK_VAR(VARIABLE, MODULE, CONFIG-VARIABLE,
|
||||
dnl [ACTION-IF-FOUND], [ACTION-IF-NOT-FOUND])
|
||||
dnl -------------------------------------------
|
||||
dnl Since: 0.28
|
||||
dnl
|
||||
dnl Retrieves the value of the pkg-config variable for the given module.
|
||||
AC_DEFUN([PKG_CHECK_VAR],
|
||||
[AC_REQUIRE([PKG_PROG_PKG_CONFIG])dnl
|
||||
AC_ARG_VAR([$1], [value of $3 for $2, overriding pkg-config])dnl
|
||||
|
||||
_PKG_CONFIG([$1], [variable="][$3]["], [$2])
|
||||
AS_VAR_COPY([$1], [pkg_cv_][$1])
|
||||
|
||||
AS_VAR_IF([$1], [""], [$5], [$4])dnl
|
||||
])dnl PKG_CHECK_VAR
|
||||
|
67
config
|
@ -1,6 +1,61 @@
|
|||
# Directory where the files will be synced
|
||||
sync_dir = "~/OneDrive"
|
||||
# Skip files and directories that match this pattern
|
||||
skip_file = ".*|~*"
|
||||
# Wait time (seconds) between sync operations in monitor mode
|
||||
monitor_interval = "45"
|
||||
# Configuration for OneDrive Linux Client
|
||||
# This file contains the list of supported configuration fields
|
||||
# with their default values.
|
||||
# All values need to be enclosed in quotes
|
||||
# When changing a config option below, remove the '#' from the start of the line
|
||||
# For explanations of all config options below see docs/USAGE.md or the man page.
|
||||
#
|
||||
# sync_dir = "~/OneDrive"
|
||||
# skip_file = "~*|.~*|*.tmp"
|
||||
# monitor_interval = "300"
|
||||
# skip_dir = ""
|
||||
# log_dir = "/var/log/onedrive/"
|
||||
# drive_id = ""
|
||||
# upload_only = "false"
|
||||
# check_nomount = "false"
|
||||
# check_nosync = "false"
|
||||
# download_only = "false"
|
||||
# disable_notifications = "false"
|
||||
# disable_upload_validation = "false"
|
||||
# enable_logging = "false"
|
||||
# force_http_11 = "false"
|
||||
# local_first = "false"
|
||||
# no_remote_delete = "false"
|
||||
# skip_symlinks = "false"
|
||||
# debug_https = "false"
|
||||
# skip_dotfiles = "false"
|
||||
# skip_size = "1000"
|
||||
# dry_run = "false"
|
||||
# min_notify_changes = "5"
|
||||
# monitor_log_frequency = "6"
|
||||
# monitor_fullscan_frequency = "12"
|
||||
# sync_root_files = "false"
|
||||
# classify_as_big_delete = "1000"
|
||||
# user_agent = ""
|
||||
# remove_source_files = "false"
|
||||
# skip_dir_strict_match = "false"
|
||||
# application_id = ""
|
||||
# resync = "false"
|
||||
# resync_auth = "false"
|
||||
# bypass_data_preservation = "false"
|
||||
# azure_ad_endpoint = ""
|
||||
# azure_tenant_id = "common"
|
||||
# sync_business_shared_folders = "false"
|
||||
# sync_dir_permissions = "700"
|
||||
# sync_file_permissions = "600"
|
||||
# rate_limit = "131072"
|
||||
# webhook_enabled = "false"
|
||||
# webhook_public_url = ""
|
||||
# webhook_listening_host = ""
|
||||
# webhook_listening_port = "8888"
|
||||
# webhook_expiration_interval = "86400"
|
||||
# webhook_renewal_interval = "43200"
|
||||
# space_reservation = "50"
|
||||
# display_running_config = "false"
|
||||
# read_only_auth_scope = "false"
|
||||
# cleanup_local_files = "false"
|
||||
# operation_timeout = "3600"
|
||||
# dns_timeout = "60"
|
||||
# connect_timeout = "10"
|
||||
# data_timeout = "600"
|
||||
# ip_protocol_version = "0"
|
||||
|
|
288
configure.ac
Normal file
|
@ -0,0 +1,288 @@
|
|||
dnl configure.ac for OneDrive Linux Client
|
||||
dnl Copyright 2019 Norbert Preining
|
||||
dnl Licensed GPL v3 or later
|
||||
|
||||
dnl How to make a release
|
||||
dnl - increase the version number in the AC_INIT call below
|
||||
dnl - run autoconf which generates configure
|
||||
dnl - commit the changed files (configure.ac, configure)
|
||||
dnl - tag the release
|
||||
|
||||
AC_PREREQ([2.69])
|
||||
AC_INIT([onedrive],[v2.4.25], [https://github.com/abraunegg/onedrive], [onedrive])
|
||||
AC_CONFIG_SRCDIR([src/main.d])
|
||||
|
||||
|
||||
AC_ARG_VAR([DC], [D compiler executable])
|
||||
AC_ARG_VAR([DCFLAGS], [flags for D compiler])
|
||||
|
||||
dnl necessary programs: install, pkg-config
|
||||
AC_PROG_INSTALL
|
||||
PKG_PROG_PKG_CONFIG
|
||||
|
||||
dnl Determine D compiler
|
||||
dnl we check for dmd, dmd2, and ldc2 in this order
|
||||
dnl furthermore, we set DC_TYPE to either dmd or ldc and export this into the
|
||||
dnl Makefile so that we can adjust command line arguments
|
||||
AC_CHECK_PROGS([DC], [dmd ldmd2 ldc2], NOT_FOUND)
|
||||
DC_TYPE=
|
||||
case $(basename $DC) in
|
||||
dmd) DC_TYPE=dmd ;;
|
||||
ldmd2) DC_TYPE=dmd ;;
|
||||
ldc2) DC_TYPE=ldc ;;
|
||||
NOT_FOUND) AC_MSG_ERROR(Could not find any compatible D compiler, 1)
|
||||
esac
|
||||
|
||||
dnl dash/POSIX version of version comparison
|
||||
vercomp () {
|
||||
IFS=. read -r a0 a1 a2 aa <<EOF
|
||||
$1
|
||||
EOF
|
||||
IFS=. read -r b0 b1 b2 bb <<EOF
|
||||
$2
|
||||
EOF
|
||||
# leading 0 are ignored: 01 == 1, this also
|
||||
# converts empty strings into 0: 1..2 == 1.0.2
|
||||
a0=$(expr $a0 + 0)
|
||||
a1=$(expr $a1 + 0)
|
||||
a2=$(expr $a2 + 0)
|
||||
b0=$(expr $b0 + 0)
|
||||
b1=$(expr $b1 + 0)
|
||||
b2=$(expr $b2 + 0)
|
||||
#echo "$1 parsed to a=$a0 b=$a1 c=$a2 rest=$aa"
|
||||
#echo "$2 parsed to a=$b0 b=$b1 c=$b2 rest=$bb"
|
||||
if test $a0 -lt $b0
|
||||
then
|
||||
return 2
|
||||
elif test $a0 -gt $b0
|
||||
then
|
||||
return 1
|
||||
else
|
||||
if test $a1 -lt $b1
|
||||
then
|
||||
return 2
|
||||
elif test $a1 -gt $b1
|
||||
then
|
||||
return 1
|
||||
else
|
||||
if test $a2 -lt $b2
|
||||
then
|
||||
return 2
|
||||
elif test $a2 -gt $b2
|
||||
then
|
||||
return 1
|
||||
else
|
||||
if test $aa '<' $bb
|
||||
then
|
||||
return 2
|
||||
elif test $aa '>' $bb
|
||||
then
|
||||
return 1
|
||||
else
|
||||
return 0
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
DO_VERSION_CHECK=1
|
||||
AC_ARG_ENABLE(version-check,
|
||||
AS_HELP_STRING([--disable-version-check], [Disable checks of compiler version during configure time]))
|
||||
AS_IF([test "x$enable_version_check" = "xno"], DO_VERSION_CHECK=0,)
|
||||
|
||||
AS_IF([test "$DO_VERSION_CHECK" = "1"],
|
||||
[ dnl do the version check
|
||||
AC_MSG_CHECKING([version of D compiler])
|
||||
# check for valid versions
|
||||
case $(basename $DC) in
|
||||
ldmd2|ldc2)
|
||||
# LDC - the LLVM D compiler (1.12.0): ...
|
||||
VERSION=`$DC --version`
|
||||
# remove everything up to first (
|
||||
VERSION=${VERSION#* (}
|
||||
# remove everthing after ):
|
||||
VERSION=${VERSION%%):*}
|
||||
# now version should be something like L.M.N
|
||||
MINVERSION=1.18.0
|
||||
;;
|
||||
dmd)
|
||||
# DMD64 D Compiler v2.085.1\n...
|
||||
VERSION=`$DC --version | tr '\n' ' '`
|
||||
VERSION=${VERSION#*Compiler v}
|
||||
VERSION=${VERSION%% *}
|
||||
# now version should be something like L.M.N
|
||||
MINVERSION=2.088.0
|
||||
;;
|
||||
esac
|
||||
|
||||
AC_MSG_RESULT([$VERSION])
|
||||
|
||||
vercomp $MINVERSION $VERSION
|
||||
if test $? = 1
|
||||
then
|
||||
AC_MSG_ERROR([Compiler version insufficient, current compiler version $VERSION, minimum version $MINVERSION], 1)
|
||||
fi
|
||||
#echo "MINVERSION=$MINVERSION VERSION=$VERSION"
|
||||
])
|
||||
|
||||
|
||||
|
||||
AC_SUBST([DC_TYPE])
|
||||
dnl In case the environment variable DCFLAGS is set, we export it to the
|
||||
dnl generated Makefile at configure run:
|
||||
AC_SUBST([DCFLAGS])
|
||||
|
||||
dnl The package date is only used in the man page onedrive.1.in
|
||||
dnl we generate onedrive.1 from it during configure run, but we want
|
||||
dnl to have the same date, namely the one when the configure script
|
||||
dnl was generated from the configure.ac (i.e., on release time).
|
||||
dnl this uses a call to the underlying m4 engine to call an external cmd
|
||||
PACKAGE_DATE="m4_esyscmd([date "+%B %Y" | tr -d '\n'])"
|
||||
AC_SUBST([PACKAGE_DATE])
|
||||
|
||||
dnl Check for required modules: curl and sqlite at the moment
|
||||
PKG_CHECK_MODULES([curl],[libcurl])
|
||||
PKG_CHECK_MODULES([sqlite],[sqlite3])
|
||||
|
||||
dnl
|
||||
dnl systemd and unit file directories
|
||||
dnl This is a bit tricky, because we want to allow for
|
||||
dnl --with-systemdsystemunitdir=auto
|
||||
dnl as well as =/path/to/dir
|
||||
dnl The first step is that we check whether the --with options is passed to configure run
|
||||
dnl if yes, we don't do anything (the ,, at the end of the next line), and if not, we
|
||||
dnl set with_systemdsystemunitdir=auto, meaning we will try pkg-config to find the correct
|
||||
dnl value.
|
||||
AC_ARG_WITH([systemdsystemunitdir],
|
||||
[AS_HELP_STRING([--with-systemdsystemunitdir=DIR], [Directory for systemd system service files])],,
|
||||
[with_systemdsystemunitdir=auto])
|
||||
dnl If no value is passed in (or auto/yes is passed in), then we try to find the correct
|
||||
dnl value via pkg-config and put it into $def_systemdsystemunitdir
|
||||
AS_IF([test "x$with_systemdsystemunitdir" = "xyes" -o "x$with_systemdsystemunitdir" = "xauto"],
|
||||
[ dnl true part, so try to determine with pkg-config
|
||||
def_systemdsystemunitdir=$($PKG_CONFIG --variable=systemdsystemunitdir systemd)
|
||||
dnl if we cannot find it via pkg-config, *and* the user explicitely passed it in with,
|
||||
dnl we warn, and in all cases we unset (set to no) the respective variable
|
||||
AS_IF([test "x$def_systemdsystemunitdir" = "x"],
|
||||
[ dnl we couldn't find the default value via pkg-config
|
||||
AS_IF([test "x$with_systemdsystemunitdir" = "xyes"],
|
||||
[AC_MSG_ERROR([systemd support requested but pkg-config unable to query systemd package])])
|
||||
with_systemdsystemunitdir=no
|
||||
],
|
||||
[ dnl pkg-config found the value, use it
|
||||
with_systemdsystemunitdir="$def_systemdsystemunitdir"
|
||||
]
|
||||
)
|
||||
]
|
||||
)
|
||||
dnl finally, if we found a value, put it into the generated Makefile
|
||||
AS_IF([test "x$with_systemdsystemunitdir" != "xno"],
|
||||
[AC_SUBST([systemdsystemunitdir], [$with_systemdsystemunitdir])])
|
||||
|
||||
dnl Now do the same as above for systemduserunitdir!
|
||||
AC_ARG_WITH([systemduserunitdir],
|
||||
[AS_HELP_STRING([--with-systemduserunitdir=DIR], [Directory for systemd user service files])],,
|
||||
[with_systemduserunitdir=auto])
|
||||
AS_IF([test "x$with_systemduserunitdir" = "xyes" -o "x$with_systemduserunitdir" = "xauto"],
|
||||
[
|
||||
def_systemduserunitdir=$($PKG_CONFIG --variable=systemduserunitdir systemd)
|
||||
AS_IF([test "x$def_systemduserunitdir" = "x"],
|
||||
[
|
||||
AS_IF([test "x$with_systemduserunitdir" = "xyes"],
|
||||
[AC_MSG_ERROR([systemd support requested but pkg-config unable to query systemd package])])
|
||||
with_systemduserunitdir=no
|
||||
],
|
||||
[
|
||||
with_systemduserunitdir="$def_systemduserunitdir"
|
||||
]
|
||||
)
|
||||
]
|
||||
)
|
||||
AS_IF([test "x$with_systemduserunitdir" != "xno"],
|
||||
[AC_SUBST([systemduserunitdir], [$with_systemduserunitdir])])
|
||||
|
||||
dnl We enable systemd integration only if we have found both user/system unit dirs
|
||||
AS_IF([test "x$with_systemduserunitdir" != "xno" -a "x$with_systemdsystemunitdir" != "xno"],
|
||||
[havesystemd=yes], [havesystemd=no])
|
||||
AC_SUBST([HAVE_SYSTEMD], $havesystemd)
|
||||
|
||||
|
||||
|
||||
dnl
|
||||
dnl Notification support
|
||||
dnl only check for libnotify if --enable-notifications is given
|
||||
AC_ARG_ENABLE(notifications,
|
||||
AS_HELP_STRING([--enable-notifications], [Enable desktop notifications via libnotify]))
|
||||
|
||||
AS_IF([test "x$enable_notifications" = "xyes"], [enable_notifications=yes], [enable_notifications=no])
|
||||
|
||||
dnl if --enable-notifications was given, check for libnotify, and disable if not found
|
||||
dnl otherwise substitute the notifu
|
||||
AS_IF([test "x$enable_notifications" = "xyes"],
|
||||
[PKG_CHECK_MODULES(notify,libnotify,,enable_notifications=no)],
|
||||
[AC_SUBST([notify_LIBS],"")])
|
||||
AC_SUBST([NOTIFICATIONS],$enable_notifications)
|
||||
|
||||
dnl
|
||||
dnl Completion support
|
||||
dnl First determine whether completions are requested, pass that to Makefile
|
||||
AC_ARG_ENABLE([completions],
|
||||
AS_HELP_STRING([--enable-completions], [Install shell completions for bash, zsh, and fish]))
|
||||
|
||||
AS_IF([test "x$enable_completions" = "xyes"], [enable_completions=yes], [enable_completions=no])
|
||||
|
||||
AC_SUBST([COMPLETIONS],$enable_completions)
|
||||
|
||||
|
||||
dnl if completions are enabled, search for the bash/zsh completion directory in the
|
||||
dnl similar way as we did for the systemd directories
|
||||
AS_IF([test "x$enable_completions" = "xyes"],[
|
||||
AC_ARG_WITH([bash-completion-dir],
|
||||
[AS_HELP_STRING([--with-bash-completion-dir=DIR], [Directory for bash completion files])],
|
||||
,
|
||||
[with_bash_completion_dir=auto])
|
||||
AS_IF([test "x$with_bash_completion_dir" = "xyes" -o "x$with_bash_completion_dir" = "xauto"],
|
||||
[
|
||||
PKG_CHECK_VAR(bashcompdir, [bash-completion], [completionsdir], ,
|
||||
bashcompdir="${sysconfdir}/bash_completion.d")
|
||||
with_bash_completion_dir=$bashcompdir
|
||||
])
|
||||
AC_SUBST([BASH_COMPLETION_DIR], $with_bash_completion_dir)
|
||||
|
||||
AC_ARG_WITH([zsh-completion-dir],
|
||||
[AS_HELP_STRING([--with-zsh-completion-dir=DIR], [Directory for zsh completion files])],,
|
||||
[with_zsh_completion_dir=auto])
|
||||
AS_IF([test "x$with_zsh_completion_dir" = "xyes" -o "x$with_zsh_completion_dir" = "xauto"],
|
||||
[
|
||||
with_zsh_completion_dir="/usr/local/share/zsh/site-functions"
|
||||
])
|
||||
AC_SUBST([ZSH_COMPLETION_DIR], $with_zsh_completion_dir)
|
||||
|
||||
AC_ARG_WITH([fish-completion-dir],
|
||||
[AS_HELP_STRING([--with-fish-completion-dir=DIR], [Directory for fish completion files])],,
|
||||
[with_fish_completion_dir=auto])
|
||||
AS_IF([test "x$with_fish_completion_dir" = "xyes" -o "x$with_fish_completion_dir" = "xauto"],
|
||||
[
|
||||
with_fish_completion_dir="/usr/local/share/fish/completions"
|
||||
])
|
||||
AC_SUBST([FISH_COMPLETION_DIR], $with_fish_completion_dir)
|
||||
|
||||
])
|
||||
|
||||
dnl
|
||||
dnl Debug support
|
||||
AC_ARG_ENABLE(debug,
|
||||
AS_HELP_STRING([--enable-debug], [Pass debug option to the compiler]))
|
||||
AS_IF([test "x$enable_debug" = "xyes"], AC_SUBST([DEBUG],yes), AC_SUBST([DEBUG],no))
|
||||
|
||||
dnl generate necessary files
|
||||
AC_CONFIG_FILES([
|
||||
Makefile
|
||||
contrib/pacman/PKGBUILD
|
||||
contrib/spec/onedrive.spec
|
||||
onedrive.1
|
||||
contrib/systemd/onedrive.service
|
||||
contrib/systemd/onedrive@.service
|
||||
])
|
||||
AC_OUTPUT
|
49
contrib/completions/complete.bash
Normal file
|
@ -0,0 +1,49 @@
|
|||
# BASH completion code for OneDrive Linux Client
|
||||
# (c) 2019 Norbert Preining
|
||||
# License: GPLv3+ (as with the rest of the OneDrive Linux client project)
|
||||
|
||||
_onedrive()
|
||||
{
|
||||
local cur prev
|
||||
|
||||
COMPREPLY=()
|
||||
cur=${COMP_WORDS[COMP_CWORD]}
|
||||
prev=${COMP_WORDS[COMP_CWORD-1]}
|
||||
|
||||
options='--check-for-nomount --check-for-nosync --debug-https --disable-notifications --display-config --display-sync-status --download-only --disable-upload-validation --dry-run --enable-logging --force-http-1.1 --force-http-2 --get-file-link --local-first --logout -m --monitor --no-remote-delete --print-token --reauth --resync --skip-dot-files --skip-symlinks --synchronize --upload-only -v --verbose --version -h --help'
|
||||
argopts='--create-directory --get-O365-drive-id --operation-timeout --remove-directory --single-directory --source-directory'
|
||||
|
||||
# Loop on the arguments to manage conflicting options
|
||||
for (( i=0; i < ${#COMP_WORDS[@]}-1; i++ )); do
|
||||
#exclude some mutually exclusive options
|
||||
[[ ${COMP_WORDS[i]} == '--synchronize' ]] && options=${options/--monitor}
|
||||
[[ ${COMP_WORDS[i]} == '--monitor' ]] && options=${options/--synchronize}
|
||||
done
|
||||
|
||||
case "$prev" in
|
||||
--confdir|--syncdir)
|
||||
_filedir
|
||||
return 0
|
||||
;;
|
||||
|
||||
--get-file-link)
|
||||
if command -v sed &> /dev/null; then
|
||||
pushd "$(onedrive --display-config | sed -n "/sync_dir/s/.*= //p")" &> /dev/null
|
||||
_filedir
|
||||
popd &> /dev/null
|
||||
fi
|
||||
return 0
|
||||
;;
|
||||
--create-directory|--get-O365-drive-id|--operation-timeout|--remove-directory|--single-directory|--source-directory)
|
||||
return 0
|
||||
;;
|
||||
*)
|
||||
COMPREPLY=( $( compgen -W "$options $argopts" -- "$cur"))
|
||||
return 0
|
||||
;;
|
||||
esac
|
||||
|
||||
# notreached
|
||||
return 0
|
||||
}
|
||||
complete -F _onedrive onedrive
|
38
contrib/completions/complete.fish
Normal file
|
@ -0,0 +1,38 @@
|
|||
# FISH completions for OneDrive Linux Client
|
||||
# License: GPLv3+ (as with the rest of the OneDrive Linux client project)
|
||||
|
||||
complete -c onedrive -f
|
||||
|
||||
complete -c onedrive -l check-for-nomount -d 'Check for the presence of .nosync in the syncdir root. If found, do not perform sync.'
|
||||
complete -c onedrive -l check-for-nosync -d 'Check for the presence of .nosync in each directory. If found, skip directory from sync.'
|
||||
complete -c onedrive -l create-directory -d 'Create a directory on OneDrive - no sync will be performed.'
|
||||
complete -c onedrive -l debug-https -d 'Debug OneDrive HTTPS communication.'
|
||||
complete -c onedrive -l disable-notifications -d 'Do not use desktop notifications in monitor mode.'
|
||||
complete -c onedrive -l disable-upload-validation -d 'Disable upload validation when uploading to OneDrive.'
|
||||
complete -c onedrive -l display-config -d 'Display what options the client will use as currently configured - no sync will be performed.'
|
||||
complete -c onedrive -l display-sync-status -d 'Display the sync status of the client - no sync will be performed.'
|
||||
complete -c onedrive -l download-only -d 'Only download remote changes.'
|
||||
complete -c onedrive -l dry-run -d 'Perform a trial sync with no changes made.'
|
||||
complete -c onedrive -l enable-logging -d 'Enable client activity to a separate log file.'
|
||||
complete -c onedrive -l force-http-1.1 -d 'Force the use of HTTP 1.1 for all operations.'
|
||||
complete -c onedrive -l force-http-2 -d 'Force the use of HTTP 2 for all operations.'
|
||||
complete -c onedrive -l get-file-link -d 'Display the file link of a synced file.'
|
||||
complete -c onedrive -l get-O365-drive-id -d 'Query and return the Office 365 Drive ID for a given Office 365 SharePoint Shared Library.'
|
||||
complete -c onedrive -s h -l help -d 'Print help information.'
|
||||
complete -c onedrive -l local-first -d 'Synchronize from the local directory source first, before downloading changes from OneDrive.'
|
||||
complete -c onedrive -l logout -d 'Logout the current user.'
|
||||
complete -c onedrive -n "not __fish_seen_subcommand_from --synchronize" -a "-m --monitor" -d 'Keep monitoring for local and remote changes.'
|
||||
complete -c onedrive -l no-remote-delete -d 'Do not delete local file deletes from OneDrive when using --upload-only.'
|
||||
complete -c onedrive -l operation-timeout -d 'Specify the maximum amount of time (in seconds) an operation is allowed to take.'
|
||||
complete -c onedrive -l print-token -d 'Print the access token, useful for debugging.'
|
||||
complete -c onedrive -l remote-directory -d 'Remove a directory on OneDrive - no sync will be performed.'
|
||||
complete -c onedrive -l reauth -d 'Reauthenticate the client with OneDrive.'
|
||||
complete -c onedrive -l resync -d 'Forget the last saved state, perform a full sync.'
|
||||
complete -c onedrive -l single-directory -d 'Specify a single local directory within the OneDrive root to sync.'
|
||||
complete -c onedrive -l skip-dot-files -d 'Skip dot files and folders from syncing.'
|
||||
complete -c onedrive -l skip-symlinks -d 'Skip syncing of symlinks.'
|
||||
complete -c onedrive -l source-directory -d 'Source directory to rename or move on OneDrive - no sync will be performed.'
|
||||
complete -c onedrive -n "not __fish_seen_subcommand_from --monitor; and not __fish_seen_subcommand_from -m" -l synchronize -d 'Perform a synchronization.'
|
||||
complete -c onedrive -l upload-only -d 'Only upload to OneDrive, do not sync changes from OneDrive locally'
|
||||
complete -c onedrive -s v -l verbose -d 'Print more details, useful for debugging (repeat for extra debugging).'
|
||||
complete -c onedrive -l version -d 'Print the version and exit.'
|
48
contrib/completions/complete.zsh
Normal file
|
@ -0,0 +1,48 @@
|
|||
#compdef onedrive
|
||||
#
|
||||
# ZSH completion code for OneDrive Linux Client
|
||||
# (c) 2019 Norbert Preining
|
||||
# License: GPLv3+ (as with the rest of the OneDrive Linux client project)
|
||||
|
||||
local -a all_opts
|
||||
all_opts=(
|
||||
'--check-for-nomount[Check for the presence of .nosync in the syncdir root. If found, do not perform sync.]'
|
||||
'--check-for-nosync[Check for the presence of .nosync in each directory. If found, skip directory from sync.]'
|
||||
'--confdir[Set the directory used to store the configuration files]:config directory:_files -/'
|
||||
'--create-directory[Create a directory on OneDrive - no sync will be performed.]:directory name:'
|
||||
'--debug-https[Debug OneDrive HTTPS communication.]'
|
||||
'--destination-directory[Destination directory for renamed or move on OneDrive - no sync will be performed.]:directory name:'
|
||||
'--disable-notifications[Do not use desktop notifications in monitor mode.]'
|
||||
'--display-config[Display what options the client will use as currently configured - no sync will be performed.]'
|
||||
'--display-sync-status[Display the sync status of the client - no sync will be performed.]'
|
||||
'--download-only[Only download remote changes]'
|
||||
'--disable-upload-validation[Disable upload validation when uploading to OneDrive]'
|
||||
'--dry-run[Perform a trial sync with no changes made]'
|
||||
'--enable-logging[Enable client activity to a separate log file]'
|
||||
'--force-http-1.1[Force the use of HTTP 1.1 for all operations]'
|
||||
'--force-http-2[Force the use of HTTP 2 for all operations]'
|
||||
'--get-file-link[Display the file link of a synced file.]:file name:'
|
||||
'--get-O365-drive-id[Query and return the Office 365 Drive ID for a given Office 365 SharePoint Shared Library]:'
|
||||
'--local-first[Synchronize from the local directory source first, before downloading changes from OneDrive.]'
|
||||
'--logout[Logout the current user]'
|
||||
'(-m --monitor)'{-m,--monitor}'[Keep monitoring for local and remote changes]'
|
||||
'--no-remote-delete[Do not delete local file deletes from OneDrive when using --upload-only]'
|
||||
'--operation-timeout[Specify the maximum amount of time (in seconds) an operation is allowed to take.]:seconds:'
|
||||
'--print-token[Print the access token, useful for debugging]'
|
||||
'--reauth[Reauthenticate the client with OneDrive]'
|
||||
'--resync[Forget the last saved state, perform a full sync]'
|
||||
'--remove-directory[Remove a directory on OneDrive - no sync will be performed.]:directory name:'
|
||||
'--single-directory[Specify a single local directory within the OneDrive root to sync.]:source directory:_files -/'
|
||||
'--skip-dot-files[Skip dot files and folders from syncing]'
|
||||
'--skip-symlinks[Skip syncing of symlinks]'
|
||||
'--source-directory[Source directory to rename or move on OneDrive - no sync will be performed.]:source directory:'
|
||||
'--syncdir[Specify the local directory used for synchronization to OneDrive]:sync directory:_files -/'
|
||||
'--synchronize[Perform a synchronization]'
|
||||
'--upload-only[Only upload to OneDrive, do not sync changes from OneDrive locally]'
|
||||
'(-v --verbose)'{-v,--verbose}'[Print more details, useful for debugging (repeat for extra debugging)]'
|
||||
'--version[Print the version and exit]'
|
||||
'(-h --help)'{-h,--help}'[Print help information]'
|
||||
)
|
||||
|
||||
_arguments -S "$all_opts[@]" && return 0
|
||||
|
41
contrib/docker/Dockerfile
Normal file
|
@ -0,0 +1,41 @@
|
|||
# -*-Dockerfile-*-
|
||||
|
||||
ARG FEDORA_VERSION=38
|
||||
ARG DEBIAN_VERSION=bullseye
|
||||
ARG GO_VERSION=1.20
|
||||
ARG GOSU_VERSION=1.16
|
||||
|
||||
FROM golang:${GO_VERSION}-${DEBIAN_VERSION} AS builder-gosu
|
||||
ARG GOSU_VERSION
|
||||
RUN go install -ldflags "-s -w" github.com/tianon/gosu@${GOSU_VERSION}
|
||||
|
||||
FROM fedora:${FEDORA_VERSION} AS builder-onedrive
|
||||
|
||||
RUN dnf install -y ldc pkgconf libcurl-devel sqlite-devel git
|
||||
|
||||
ENV PKG_CONFIG=/usr/bin/pkgconf
|
||||
|
||||
COPY . /usr/src/onedrive
|
||||
WORKDIR /usr/src/onedrive
|
||||
|
||||
RUN ./configure \
|
||||
&& make clean \
|
||||
&& make \
|
||||
&& make install
|
||||
|
||||
FROM fedora:${FEDORA_VERSION}
|
||||
|
||||
RUN dnf clean all \
|
||||
&& dnf -y update
|
||||
|
||||
RUN dnf install -y libcurl sqlite ldc-libs \
|
||||
&& dnf clean all \
|
||||
&& mkdir -p /onedrive/conf /onedrive/data
|
||||
|
||||
COPY --from=builder-gosu /go/bin/gosu /usr/local/bin/
|
||||
COPY --from=builder-onedrive /usr/local/bin/onedrive /usr/local/bin/
|
||||
|
||||
COPY contrib/docker/entrypoint.sh /
|
||||
RUN chmod +x /entrypoint.sh
|
||||
|
||||
ENTRYPOINT ["/entrypoint.sh"]
|
38
contrib/docker/Dockerfile-alpine
Normal file
|
@ -0,0 +1,38 @@
|
|||
# -*-Dockerfile-*-
|
||||
|
||||
ARG ALPINE_VERSION=3.18
|
||||
ARG GO_VERSION=1.20
|
||||
ARG GOSU_VERSION=1.16
|
||||
|
||||
FROM golang:${GO_VERSION}-alpine${ALPINE_VERSION} AS builder-gosu
|
||||
ARG GOSU_VERSION
|
||||
RUN go install -ldflags "-s -w" github.com/tianon/gosu@${GOSU_VERSION}
|
||||
|
||||
FROM alpine:${ALPINE_VERSION} AS builder-onedrive
|
||||
|
||||
RUN apk add --update --no-cache alpine-sdk gnupg xz curl-dev sqlite-dev binutils-gold autoconf automake ldc
|
||||
|
||||
COPY . /usr/src/onedrive
|
||||
WORKDIR /usr/src/onedrive
|
||||
|
||||
RUN autoreconf -fiv \
|
||||
&& ./configure \
|
||||
&& make clean \
|
||||
&& make \
|
||||
&& make install
|
||||
|
||||
FROM alpine:${ALPINE_VERSION}
|
||||
|
||||
RUN apk add --upgrade apk-tools \
|
||||
&& apk upgrade --available
|
||||
|
||||
RUN apk add --update --no-cache bash libcurl libgcc shadow sqlite-libs ldc-runtime \
|
||||
&& mkdir -p /onedrive/conf /onedrive/data
|
||||
|
||||
COPY --from=builder-gosu /go/bin/gosu /usr/local/bin/
|
||||
COPY --from=builder-onedrive /usr/local/bin/onedrive /usr/local/bin/
|
||||
|
||||
COPY contrib/docker/entrypoint.sh /
|
||||
RUN chmod +x /entrypoint.sh
|
||||
|
||||
ENTRYPOINT ["/entrypoint.sh"]
|
36
contrib/docker/Dockerfile-debian
Normal file
|
@ -0,0 +1,36 @@
|
|||
# -*-Dockerfile-*-
|
||||
|
||||
ARG DEBIAN_VERSION=stable
|
||||
|
||||
FROM debian:${DEBIAN_VERSION} AS builder-onedrive
|
||||
|
||||
RUN apt-get clean \
|
||||
&& apt-get update \
|
||||
&& DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends build-essential curl ca-certificates libcurl4-openssl-dev libsqlite3-dev libxml2-dev pkg-config git ldc \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
COPY . /usr/src/onedrive
|
||||
WORKDIR /usr/src/onedrive
|
||||
|
||||
RUN ./configure DC=/usr/bin/ldmd2 \
|
||||
&& make clean \
|
||||
&& make \
|
||||
&& make install
|
||||
|
||||
FROM debian:${DEBIAN_VERSION}-slim
|
||||
|
||||
RUN apt-get clean \
|
||||
&& apt-get update \
|
||||
&& apt-get upgrade -y \
|
||||
&& DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends gosu libcurl4 libsqlite3-0 ca-certificates libphobos2-ldc-shared100 \
|
||||
&& rm -rf /var/lib/apt/lists/* \
|
||||
# Fix bug with ssl on armhf: https://serverfault.com/a/1045189
|
||||
&& /usr/bin/c_rehash \
|
||||
&& mkdir -p /onedrive/conf /onedrive/data
|
||||
|
||||
COPY --from=builder-onedrive /usr/local/bin/onedrive /usr/local/bin/
|
||||
|
||||
COPY contrib/docker/entrypoint.sh /
|
||||
RUN chmod +x /entrypoint.sh
|
||||
|
||||
ENTRYPOINT ["/entrypoint.sh"]
|
132
contrib/docker/entrypoint.sh
Executable file
|
@ -0,0 +1,132 @@
|
|||
#!/bin/bash -eu
|
||||
|
||||
set +H -euo pipefail
|
||||
|
||||
: ${ONEDRIVE_UID:=$(stat /onedrive/data -c '%u')}
|
||||
: ${ONEDRIVE_GID:=$(stat /onedrive/data -c '%g')}
|
||||
|
||||
# Create new group using target GID
|
||||
if ! odgroup="$(getent group "$ONEDRIVE_GID")"; then
|
||||
odgroup='onedrive'
|
||||
groupadd "${odgroup}" -g "$ONEDRIVE_GID"
|
||||
else
|
||||
odgroup=${odgroup%%:*}
|
||||
fi
|
||||
|
||||
# Create new user using target UID
|
||||
if ! oduser="$(getent passwd "$ONEDRIVE_UID")"; then
|
||||
oduser='onedrive'
|
||||
useradd -m "${oduser}" -u "$ONEDRIVE_UID" -g "$ONEDRIVE_GID"
|
||||
else
|
||||
oduser="${oduser%%:*}"
|
||||
usermod -g "${odgroup}" "${oduser}"
|
||||
grep -qv root <( groups "${oduser}" ) || { echo 'ROOT level privileges prohibited!'; exit 1; }
|
||||
fi
|
||||
|
||||
# Default parameters
|
||||
ARGS=(--monitor --confdir /onedrive/conf --syncdir /onedrive/data)
|
||||
echo "Base Args: ${ARGS}"
|
||||
|
||||
# Make Verbose output optional, based on an environment variable
|
||||
if [ "${ONEDRIVE_VERBOSE:=0}" == "1" ]; then
|
||||
echo "# We are being verbose"
|
||||
echo "# Adding --verbose"
|
||||
ARGS=(--verbose ${ARGS[@]})
|
||||
fi
|
||||
|
||||
# Tell client to perform debug output, based on an environment variable
|
||||
if [ "${ONEDRIVE_DEBUG:=0}" == "1" ]; then
|
||||
echo "# We are performing debug output"
|
||||
echo "# Adding --verbose --verbose"
|
||||
ARGS=(--verbose --verbose ${ARGS[@]})
|
||||
fi
|
||||
|
||||
# Tell client to perform HTTPS debug output, based on an environment variable
|
||||
if [ "${ONEDRIVE_DEBUG_HTTPS:=0}" == "1" ]; then
|
||||
echo "# We are performing HTTPS debug output"
|
||||
echo "# Adding --debug-https"
|
||||
ARGS=(--debug-https ${ARGS[@]})
|
||||
fi
|
||||
|
||||
# Tell client to perform a resync based on environment variable
|
||||
if [ "${ONEDRIVE_RESYNC:=0}" == "1" ]; then
|
||||
echo "# We are performing a --resync"
|
||||
echo "# Adding --resync --resync-auth"
|
||||
ARGS=(--resync --resync-auth ${ARGS[@]})
|
||||
fi
|
||||
|
||||
# Tell client to sync in download-only mode based on environment variable
|
||||
if [ "${ONEDRIVE_DOWNLOADONLY:=0}" == "1" ]; then
|
||||
echo "# We are synchronizing in download-only mode"
|
||||
echo "# Adding --download-only"
|
||||
ARGS=(--download-only ${ARGS[@]})
|
||||
fi
|
||||
|
||||
# Tell client to sync in upload-only mode based on environment variable
|
||||
if [ "${ONEDRIVE_UPLOADONLY:=0}" == "1" ]; then
|
||||
echo "# We are synchronizing in upload-only mode"
|
||||
echo "# Adding --upload-only"
|
||||
ARGS=(--upload-only ${ARGS[@]})
|
||||
fi
|
||||
|
||||
# Tell client to sync in no-remote-delete mode based on environment variable
|
||||
if [ "${ONEDRIVE_NOREMOTEDELETE:=0}" == "1" ]; then
|
||||
echo "# We are synchronizing in no-remote-delete mode"
|
||||
echo "# Adding --no-remote-delete"
|
||||
ARGS=(--no-remote-delete ${ARGS[@]})
|
||||
fi
|
||||
|
||||
# Tell client to logout based on environment variable
|
||||
if [ "${ONEDRIVE_LOGOUT:=0}" == "1" ]; then
|
||||
echo "# We are logging out"
|
||||
echo "# Adding --logout"
|
||||
ARGS=(--logout ${ARGS[@]})
|
||||
fi
|
||||
|
||||
# Tell client to re-authenticate based on environment variable
|
||||
if [ "${ONEDRIVE_REAUTH:=0}" == "1" ]; then
|
||||
echo "# We are logging out to perform a reauthentication"
|
||||
echo "# Adding --reauth"
|
||||
ARGS=(--reauth ${ARGS[@]})
|
||||
fi
|
||||
|
||||
# Tell client to utilize auth files at the provided locations based on environment variable
|
||||
if [ -n "${ONEDRIVE_AUTHFILES:=""}" ]; then
|
||||
echo "# We are using auth files to perform authentication"
|
||||
echo "# Adding --auth-files ARG"
|
||||
ARGS=(--auth-files ${ONEDRIVE_AUTHFILES} ${ARGS[@]})
|
||||
fi
|
||||
|
||||
# Tell client to utilize provided auth response based on environment variable
|
||||
if [ -n "${ONEDRIVE_AUTHRESPONSE:=""}" ]; then
|
||||
echo "# We are providing the auth response directly to perform authentication"
|
||||
echo "# Adding --auth-response ARG"
|
||||
ARGS=(--auth-response \"${ONEDRIVE_AUTHRESPONSE}\" ${ARGS[@]})
|
||||
fi
|
||||
|
||||
# Tell client to print the running configuration at application startup
|
||||
if [ "${ONEDRIVE_DISPLAY_CONFIG:=0}" == "1" ]; then
|
||||
echo "# We are printing the application running configuration at application startup"
|
||||
echo "# Adding --display-running-config"
|
||||
ARGS=(--display-running-config ${ARGS[@]})
|
||||
fi
|
||||
|
||||
# Tell client to use sync single dir option
|
||||
if [ -n "${ONEDRIVE_SINGLE_DIRECTORY:=""}" ]; then
|
||||
echo "# We are synchronizing in single-directory mode"
|
||||
echo "# Adding --single-directory ARG"
|
||||
ARGS=(--single-directory \"${ONEDRIVE_SINGLE_DIRECTORY}\" ${ARGS[@]})
|
||||
fi
|
||||
|
||||
if [ ${#} -gt 0 ]; then
|
||||
ARGS=("${@}")
|
||||
fi
|
||||
|
||||
echo "# Launching onedrive"
|
||||
# Only switch user if not running as target uid (ie. Docker)
|
||||
if [ "$ONEDRIVE_UID" = "$(id -u)" ]; then
|
||||
/usr/local/bin/onedrive "${ARGS[@]}"
|
||||
else
|
||||
chown "${oduser}:${odgroup}" /onedrive/data /onedrive/conf
|
||||
exec gosu "${oduser}" /usr/local/bin/onedrive "${ARGS[@]}"
|
||||
fi
|
6
contrib/docker/hooks/post_push
Executable file
|
@ -0,0 +1,6 @@
|
|||
#!/bin/bash
|
||||
|
||||
BUILD_DATE=`date "+%Y%m%d%H%M"`
|
||||
|
||||
docker tag ${IMAGE_NAME} "${IMAGE_NAME}-${BUILD_DATE}"
|
||||
docker push "${IMAGE_NAME}-${BUILD_DATE}"
|
|
@ -24,7 +24,8 @@ STOP_TIMEOUT=${STOP_TIMEOUT-5}
|
|||
RETVAL=0
|
||||
|
||||
start() {
|
||||
echo -n $"Starting $APP_NAME: "
|
||||
export PATH=/usr/local/bin/:$PATH
|
||||
echo -n "Starting $APP_NAME: "
|
||||
daemon --user root onedrive_service.sh
|
||||
RETVAL=$?
|
||||
echo
|
||||
|
@ -34,7 +35,7 @@ start() {
|
|||
}
|
||||
|
||||
stop() {
|
||||
echo -n $"Shutting down $APP_NAME: "
|
||||
echo -n "Shutting down $APP_NAME: "
|
||||
killproc onedrive
|
||||
RETVAL=$?
|
||||
echo
|
||||
|
@ -74,7 +75,7 @@ case "$1" in
|
|||
rhstatus
|
||||
;;
|
||||
*)
|
||||
echo $"Usage: $0 {start|stop|restart|reload|status}"
|
||||
echo "Usage: $0 {start|stop|restart|reload|status}"
|
||||
exit 2
|
||||
esac
|
||||
|
5
contrib/init.d/onedrive_service.sh
Normal file
|
@ -0,0 +1,5 @@
|
|||
#!/bin/bash
|
||||
# This script is to assist in starting the onedrive client when using init.d
|
||||
APP_OPTIONS="--monitor --verbose --enable-logging"
|
||||
onedrive "$APP_OPTIONS" > /dev/null 2>&1 &
|
||||
exit 0
|
|
@ -1,10 +1,12 @@
|
|||
# Any OneDrive Client logs configured for here
|
||||
|
||||
/var/log/onedrive/*log {
|
||||
# What user / group should logrotate use?
|
||||
su root users
|
||||
# What user / group should logrotate use?
|
||||
# Logrotate 3.8.9 or greater required otherwise:
|
||||
# "unknown option 'su' -- ignoring line" is generated
|
||||
su root users
|
||||
|
||||
# rotate log files weekly
|
||||
# rotate log files weekly
|
||||
weekly
|
||||
|
||||
# keep 4 weeks worth of backlogs
|
30
contrib/pacman/PKGBUILD.in
Normal file
|
@ -0,0 +1,30 @@
|
|||
pkgname=onedrive
|
||||
pkgver=@PACKAGE_VERSION@
|
||||
pkgrel=1 #patch-level (Increment this when patch is applied)
|
||||
pkgdesc="A free OneDrive Client for Linux. This is a fork of the https://github.com/skilion/onedrive repository"
|
||||
license=("unknown")
|
||||
url="https://github.com/abraunegg/onedrive/"
|
||||
arch=("i686" "x86_64")
|
||||
|
||||
depends=("curl" "gcc-libs" "glibc" "sqlite")
|
||||
makedepends=("dmd" "git" "tar")
|
||||
|
||||
prepare() {
|
||||
cd "$srcdir"
|
||||
wget "https://github.com/abraunegg/onedrive/archive/v$pkgver.tar.gz" -O "$pkgname-$pkgver-patch-$pkgrel.tar.gz" #Pull last commit release
|
||||
tar -xzf "$pkgname-$pkgver-patch-$pkgrel.tar.gz" --one-top-level="$pkgname-$pkgver-patch-$pkgrel" --strip-components 1
|
||||
}
|
||||
|
||||
build() {
|
||||
cd "$pkgname-$pkgver-patch-$pkgrel"
|
||||
git init #Create .git folder required from Makefile
|
||||
git add * #Create .git/index
|
||||
git commit --allow-empty-message -m "" #Create .git/refs/heads/master
|
||||
git tag v$pkgver #Add version tag
|
||||
make PREFIX=/usr onedrive
|
||||
}
|
||||
|
||||
package() {
|
||||
cd "$pkgname-$pkgver-patch-$pkgrel"
|
||||
make PREFIX=/usr DESTDIR="$pkgdir" install
|
||||
}
|
80
contrib/spec/onedrive.spec.in
Normal file
|
@ -0,0 +1,80 @@
|
|||
# Determine based on distribution & version what options & packages to include
|
||||
%if 0%{?fedora} || 0%{?rhel} >= 7
|
||||
%global with_systemd 1
|
||||
%else
|
||||
%global with_systemd 0
|
||||
%endif
|
||||
|
||||
%if 0%{?rhel} >= 7
|
||||
%global rhel_unitdir 1
|
||||
%else
|
||||
%global rhel_unitdir 0
|
||||
%endif
|
||||
|
||||
Name: onedrive
|
||||
Version: 2.4.25
|
||||
Release: 1%{?dist}
|
||||
Summary: Microsoft OneDrive Client
|
||||
Group: System Environment/Network
|
||||
License: GPLv3
|
||||
URL: https://github.com/abraunegg/onedrive
|
||||
Source0: v%{version}.tar.gz
|
||||
BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-root-%(%{__id_u} -n)
|
||||
|
||||
BuildRequires: dmd >= 2.088.0
|
||||
BuildRequires: sqlite-devel >= 3.7.15
|
||||
BuildRequires: libcurl-devel
|
||||
Requires: sqlite >= 3.7.15
|
||||
Requires: libcurl
|
||||
|
||||
%if 0%{?with_systemd}
|
||||
Requires(post): systemd
|
||||
Requires(preun): systemd
|
||||
Requires(postun): systemd
|
||||
%else
|
||||
Requires(post): chkconfig
|
||||
Requires(preun): chkconfig
|
||||
Requires(preun): initscripts
|
||||
Requires(postun): initscripts
|
||||
%endif
|
||||
|
||||
%define debug_package %{nil}
|
||||
|
||||
%description
|
||||
Microsoft OneDrive Client for Linux
|
||||
|
||||
%prep
|
||||
|
||||
%setup -q
|
||||
# This creates the directory %{_builddir}/%{name}-%{version}/
|
||||
|
||||
%build
|
||||
# cd %{_builddir}/%{name}-%{version}
|
||||
%configure
|
||||
make
|
||||
|
||||
%install
|
||||
%make_install PREFIX="%{buildroot}"
|
||||
|
||||
%clean
|
||||
|
||||
%files
|
||||
%doc README.md LICENSE CHANGELOG.md
|
||||
%config %{_sysconfdir}/logrotate.d/onedrive
|
||||
%{_mandir}/man1/%{name}.1.gz
|
||||
%{_docdir}/%{name}
|
||||
%{_bindir}/%{name}
|
||||
%if 0%{?with_systemd}
|
||||
%if 0%{?rhel_unitdir}
|
||||
%{_unitdir}/%{name}.service
|
||||
%{_unitdir}/%{name}@.service
|
||||
%else
|
||||
%{_userunitdir}/%{name}.service
|
||||
%{_unitdir}/%{name}@.service
|
||||
%endif
|
||||
%else
|
||||
%{_bindir}/onedrive_service.sh
|
||||
/etc/init.d/onedrive
|
||||
%endif
|
||||
|
||||
%changelog
|
27
contrib/systemd/onedrive.service.in
Normal file
|
@ -0,0 +1,27 @@
|
|||
[Unit]
|
||||
Description=OneDrive Free Client
|
||||
Documentation=https://github.com/abraunegg/onedrive
|
||||
After=network-online.target
|
||||
Wants=network-online.target
|
||||
|
||||
[Service]
|
||||
# Commented out hardenings are disabled because they may not work out of the box on your distribution
|
||||
# If you know what you are doing please try to enable them.
|
||||
|
||||
ProtectSystem=full
|
||||
#PrivateUsers=true
|
||||
#PrivateDevices=true
|
||||
ProtectHostname=true
|
||||
#ProtectClock=true
|
||||
ProtectKernelTunables=true
|
||||
#ProtectKernelModules=true
|
||||
#ProtectKernelLogs=true
|
||||
ProtectControlGroups=true
|
||||
RestrictRealtime=true
|
||||
ExecStart=@prefix@/bin/onedrive --monitor
|
||||
Restart=on-failure
|
||||
RestartSec=3
|
||||
RestartPreventExitStatus=3
|
||||
|
||||
[Install]
|
||||
WantedBy=default.target
|
27
contrib/systemd/onedrive@.service.in
Normal file
|
@ -0,0 +1,27 @@
|
|||
[Unit]
|
||||
Description=OneDrive Free Client for %i
|
||||
Documentation=https://github.com/abraunegg/onedrive
|
||||
After=network-online.target
|
||||
Wants=network-online.target
|
||||
|
||||
[Service]
|
||||
# Commented out hardenings are disabled because they don't work out of the box.
|
||||
# If you know what you are doing please try to enable them.
|
||||
ProtectSystem=full
|
||||
#PrivateDevices=true
|
||||
ProtectHostname=true
|
||||
#ProtectClock=true
|
||||
ProtectKernelTunables=true
|
||||
#ProtectKernelModules=true
|
||||
#ProtectKernelLogs=true
|
||||
ProtectControlGroups=true
|
||||
RestrictRealtime=true
|
||||
ExecStart=@prefix@/bin/onedrive --monitor --confdir=/home/%i/.config/onedrive
|
||||
User=%i
|
||||
Group=users
|
||||
Restart=on-failure
|
||||
RestartSec=3
|
||||
RestartPreventExitStatus=3
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
192
docs/BusinessSharedFolders.md
Normal file
|
@ -0,0 +1,192 @@
|
|||
# How to configure OneDrive Business Shared Folder Sync
|
||||
## Application Version
|
||||
Before reading this document, please ensure you are running application version [![Version](https://img.shields.io/github/v/release/abraunegg/onedrive)](https://github.com/abraunegg/onedrive/releases) or greater. Use `onedrive --version` to determine what application version you are using and upgrade your client if required.
|
||||
|
||||
## Process Overview
|
||||
Syncing OneDrive Business Shared Folders requires additional configuration for your 'onedrive' client:
|
||||
1. List available shared folders to determine which folder you wish to sync & to validate that you have access to that folder
|
||||
2. Create a new file called 'business_shared_folders' in your config directory which contains a list of the shared folders you wish to sync
|
||||
3. Test the configuration using '--dry-run'
|
||||
4. Sync the OneDrive Business Shared folders as required
|
||||
|
||||
## Listing available OneDrive Business Shared Folders
|
||||
List the available OneDrive Business Shared folders with the following command:
|
||||
```text
|
||||
onedrive --list-shared-folders
|
||||
```
|
||||
This will return a listing of all OneDrive Business Shared folders which have been shared with you and by whom. This is important for conflict resolution:
|
||||
```text
|
||||
Initializing the Synchronization Engine ...
|
||||
|
||||
Listing available OneDrive Business Shared Folders:
|
||||
---------------------------------------
|
||||
Shared Folder: SharedFolder0
|
||||
Shared By: Firstname Lastname
|
||||
---------------------------------------
|
||||
Shared Folder: SharedFolder1
|
||||
Shared By: Firstname Lastname
|
||||
---------------------------------------
|
||||
Shared Folder: SharedFolder2
|
||||
Shared By: Firstname Lastname
|
||||
---------------------------------------
|
||||
Shared Folder: SharedFolder0
|
||||
Shared By: Firstname Lastname (user@domain)
|
||||
---------------------------------------
|
||||
Shared Folder: SharedFolder1
|
||||
Shared By: Firstname Lastname (user@domain)
|
||||
---------------------------------------
|
||||
Shared Folder: SharedFolder2
|
||||
Shared By: Firstname Lastname (user@domain)
|
||||
...
|
||||
```
|
||||
|
||||
## Configuring OneDrive Business Shared Folders
|
||||
1. Create a new file called 'business_shared_folders' in your config directory
|
||||
2. On each new line, list the OneDrive Business Shared Folder you wish to sync
|
||||
```text
|
||||
[alex@centos7full onedrive]$ cat ~/.config/onedrive/business_shared_folders
|
||||
# comment
|
||||
Child Shared Folder
|
||||
# Another comment
|
||||
Top Level to Share
|
||||
[alex@centos7full onedrive]$
|
||||
```
|
||||
3. Validate your configuration with `onedrive --display-config`:
|
||||
```text
|
||||
Configuration file successfully loaded
|
||||
onedrive version = v2.4.3
|
||||
Config path = /home/alex/.config/onedrive-business/
|
||||
Config file found in config path = true
|
||||
Config option 'check_nosync' = false
|
||||
Config option 'sync_dir' = /home/alex/OneDriveBusiness
|
||||
Config option 'skip_dir' =
|
||||
Config option 'skip_file' = ~*|.~*|*.tmp
|
||||
Config option 'skip_dotfiles' = false
|
||||
Config option 'skip_symlinks' = false
|
||||
Config option 'monitor_interval' = 300
|
||||
Config option 'min_notify_changes' = 5
|
||||
Config option 'log_dir' = /var/log/onedrive/
|
||||
Config option 'classify_as_big_delete' = 1000
|
||||
Config option 'sync_root_files' = false
|
||||
Selective sync 'sync_list' configured = false
|
||||
Business Shared Folders configured = true
|
||||
business_shared_folders contents:
|
||||
# comment
|
||||
Child Shared Folder
|
||||
# Another comment
|
||||
Top Level to Share
|
||||
```
|
||||
|
||||
## Performing a sync of OneDrive Business Shared Folders
|
||||
Perform a standalone sync using the following command: `onedrive --synchronize --sync-shared-folders --verbose`:
|
||||
```text
|
||||
onedrive --synchronize --sync-shared-folders --verbose
|
||||
Using 'user' Config Dir: /home/alex/.config/onedrive-business/
|
||||
Using 'system' Config Dir:
|
||||
Configuration file successfully loaded
|
||||
Initializing the OneDrive API ...
|
||||
Configuring Global Azure AD Endpoints
|
||||
Opening the item database ...
|
||||
All operations will be performed in: /home/alex/OneDriveBusiness
|
||||
Application version: v2.4.3
|
||||
Account Type: business
|
||||
Default Drive ID: b!bO8V7s9SSk6r7mWHpIjURotN33W1W2tEv3OXV_oFIdQimEdOHR-1So7CqeT1MfHA
|
||||
Default Root ID: 01WIXGO5V6Y2GOVW7725BZO354PWSELRRZ
|
||||
Remaining Free Space: 1098316220277
|
||||
Fetching details for OneDrive Root
|
||||
OneDrive Root exists in the database
|
||||
Initializing the Synchronization Engine ...
|
||||
Syncing changes from OneDrive ...
|
||||
Applying changes of Path ID: 01WIXGO5V6Y2GOVW7725BZO354PWSELRRZ
|
||||
Number of items from OneDrive to process: 0
|
||||
Attempting to sync OneDrive Business Shared Folders
|
||||
Syncing this OneDrive Business Shared Folder: Child Shared Folder
|
||||
OneDrive Business Shared Folder - Shared By: test user
|
||||
Applying changes of Path ID: 01JRXHEZMREEB3EJVHNVHKNN454Q7DFXPR
|
||||
Adding OneDrive root details for processing
|
||||
Adding OneDrive folder details for processing
|
||||
Adding 4 OneDrive items for processing from OneDrive folder
|
||||
Adding 2 OneDrive items for processing from /Child Shared Folder/Cisco VDI Whitepaper
|
||||
Adding 2 OneDrive items for processing from /Child Shared Folder/SMPP_Shared
|
||||
Processing 11 OneDrive items to ensure consistent local state
|
||||
Syncing this OneDrive Business Shared Folder: Top Level to Share
|
||||
OneDrive Business Shared Folder - Shared By: test user (testuser@mynasau3.onmicrosoft.com)
|
||||
Applying changes of Path ID: 01JRXHEZLRMXHKBYZNOBF3TQOPBXS3VZMA
|
||||
Adding OneDrive root details for processing
|
||||
Adding OneDrive folder details for processing
|
||||
Adding 4 OneDrive items for processing from OneDrive folder
|
||||
Adding 3 OneDrive items for processing from /Top Level to Share/10-Files
|
||||
Adding 2 OneDrive items for processing from /Top Level to Share/10-Files/Cisco VDI Whitepaper
|
||||
Adding 2 OneDrive items for processing from /Top Level to Share/10-Files/Images
|
||||
Adding 8 OneDrive items for processing from /Top Level to Share/10-Files/Images/JPG
|
||||
Adding 8 OneDrive items for processing from /Top Level to Share/10-Files/Images/PNG
|
||||
Adding 2 OneDrive items for processing from /Top Level to Share/10-Files/SMPP
|
||||
Processing 31 OneDrive items to ensure consistent local state
|
||||
Uploading differences of ~/OneDriveBusiness
|
||||
Processing root
|
||||
The directory has not changed
|
||||
Processing SMPP_Local
|
||||
The directory has not changed
|
||||
Processing SMPP-IF-SPEC_v3_3-24858.pdf
|
||||
The file has not changed
|
||||
Processing SMPP_v3_4_Issue1_2-24857.pdf
|
||||
The file has not changed
|
||||
Processing new_local_file.txt
|
||||
The file has not changed
|
||||
Processing root
|
||||
The directory has not changed
|
||||
...
|
||||
The directory has not changed
|
||||
Processing week02-03-Combinational_Logic-v1.pptx
|
||||
The file has not changed
|
||||
Uploading new items of ~/OneDriveBusiness
|
||||
Applying changes of Path ID: 01WIXGO5V6Y2GOVW7725BZO354PWSELRRZ
|
||||
Number of items from OneDrive to process: 0
|
||||
Attempting to sync OneDrive Business Shared Folders
|
||||
Syncing this OneDrive Business Shared Folder: Child Shared Folder
|
||||
OneDrive Business Shared Folder - Shared By: test user
|
||||
Applying changes of Path ID: 01JRXHEZMREEB3EJVHNVHKNN454Q7DFXPR
|
||||
Adding OneDrive root details for processing
|
||||
Adding OneDrive folder details for processing
|
||||
Adding 4 OneDrive items for processing from OneDrive folder
|
||||
Adding 2 OneDrive items for processing from /Child Shared Folder/Cisco VDI Whitepaper
|
||||
Adding 2 OneDrive items for processing from /Child Shared Folder/SMPP_Shared
|
||||
Processing 11 OneDrive items to ensure consistent local state
|
||||
Syncing this OneDrive Business Shared Folder: Top Level to Share
|
||||
OneDrive Business Shared Folder - Shared By: test user (testuser@mynasau3.onmicrosoft.com)
|
||||
Applying changes of Path ID: 01JRXHEZLRMXHKBYZNOBF3TQOPBXS3VZMA
|
||||
Adding OneDrive root details for processing
|
||||
Adding OneDrive folder details for processing
|
||||
Adding 4 OneDrive items for processing from OneDrive folder
|
||||
Adding 3 OneDrive items for processing from /Top Level to Share/10-Files
|
||||
Adding 2 OneDrive items for processing from /Top Level to Share/10-Files/Cisco VDI Whitepaper
|
||||
Adding 2 OneDrive items for processing from /Top Level to Share/10-Files/Images
|
||||
Adding 8 OneDrive items for processing from /Top Level to Share/10-Files/Images/JPG
|
||||
Adding 8 OneDrive items for processing from /Top Level to Share/10-Files/Images/PNG
|
||||
Adding 2 OneDrive items for processing from /Top Level to Share/10-Files/SMPP
|
||||
Processing 31 OneDrive items to ensure consistent local state
|
||||
```
|
||||
|
||||
**Note:** Whenever you modify the `business_shared_folders` file you must perform a `--resync` of your database to clean up stale entries due to changes in your configuration.
|
||||
|
||||
## Enable / Disable syncing of OneDrive Business Shared Folders
|
||||
Performing a sync of the configured OneDrive Business Shared Folders can be enabled / disabled via adding the following to your configuration file.
|
||||
|
||||
### Enable syncing of OneDrive Business Shared Folders via config file
|
||||
```text
|
||||
sync_business_shared_folders = "true"
|
||||
```
|
||||
|
||||
### Disable syncing of OneDrive Business Shared Folders via config file
|
||||
```text
|
||||
sync_business_shared_folders = "false"
|
||||
```
|
||||
|
||||
## Known Issues
|
||||
Shared folders, shared with you from people outside of your 'organisation' are unable to be synced. This is due to the Microsoft Graph API not presenting these folders.
|
||||
|
||||
Shared folders that match this scenario, when you view 'Shared' via OneDrive online, will have a 'world' symbol as per below:
|
||||
|
||||
![shared_with_me](./images/shared_with_me.JPG)
|
||||
|
||||
This issue is being tracked by: [#966](https://github.com/abraunegg/onedrive/issues/966)
|
396
docs/Docker.md
Normal file
|
@ -0,0 +1,396 @@
|
|||
# Run the OneDrive Client for Linux under Docker
|
||||
This client can be run as a Docker container, with 3 available container base options for you to choose from:
|
||||
|
||||
| Container Base | Docker Tag | Description | i686 | x86_64 | ARMHF | AARCH64 |
|
||||
|----------------|-------------|----------------------------------------------------------------|:------:|:------:|:-----:|:-------:|
|
||||
| Alpine Linux | edge-alpine | Docker container based on Alpine 3.18 using 'master' |❌|✔|❌|✔|
|
||||
| Alpine Linux | alpine | Docker container based on Alpine 3.18 using latest release |❌|✔|❌|✔|
|
||||
| Debian | debian | Docker container based on Debian Stable using latest release |✔|✔|✔|✔|
|
||||
| Debian | edge | Docker container based on Debian Stable using 'master' |✔|✔|✔|✔|
|
||||
| Debian | edge-debian | Docker container based on Debian Stable using 'master' |✔|✔|✔|✔|
|
||||
| Debian | latest | Docker container based on Debian Stable using latest release |✔|✔|✔|✔|
|
||||
| Fedora | edge-fedora | Docker container based on Fedora 38 using 'master' |❌|✔|❌|✔|
|
||||
| Fedora | fedora | Docker container based on Fedora 38 using latest release |❌|✔|❌|✔|
|
||||
|
||||
These containers offer a simple monitoring-mode service for the OneDrive Client for Linux.
|
||||
|
||||
The instructions below have been validated on:
|
||||
* Fedora 38
|
||||
|
||||
The instructions below will utilise the 'edge' tag, however this can be substituted for any of the other docker tags such as 'latest' from the table above if desired.
|
||||
|
||||
The 'edge' Docker Container will align closer to all documentation and features, where as 'latest' is the release version from a static point in time. The 'latest' tag however may contain bugs and/or issues that will have been fixed, and those fixes are contained in 'edge'.
|
||||
|
||||
Additionally there are specific version release tags for each release. Refer to https://hub.docker.com/r/driveone/onedrive/tags for any other Docker tags you may be interested in.
|
||||
|
||||
**Note:** The below instructions for docker has been tested and validated when logging into the system as an unprivileged user (non 'root' user).
|
||||
|
||||
## High Level Configuration Steps
|
||||
1. Install 'docker' as per your distribution platform's instructions if not already installed.
|
||||
2. Configure 'docker' to allow non-privileged users to run Docker commands
|
||||
3. Disable 'SELinux' as per your distribution platform's instructions
|
||||
4. Test 'docker' by running a test container without using `sudo`
|
||||
5. Prepare the required docker volumes to store the configuration and data
|
||||
6. Run the 'onedrive' container and perform authorisation
|
||||
7. Running the 'onedrive' container under 'docker'
|
||||
|
||||
## Configuration Steps
|
||||
|
||||
### 1. Install 'docker' on your platform
|
||||
Install 'docker' as per your distribution platform's instructions if not already installed.
|
||||
|
||||
### 2. Configure 'docker' to allow non-privileged users to run Docker commands
|
||||
Read https://docs.docker.com/engine/install/linux-postinstall/ to configure the 'docker' user group with your user account to allow your non 'root' user to run 'docker' commands.
|
||||
|
||||
### 3. Disable SELinux on your platform
|
||||
In order to run the Docker container, SELinux must be disabled. Without doing this, when the application is authenticated in the steps below, the following error will be presented:
|
||||
```text
|
||||
ERROR: The local file system returned an error with the following message:
|
||||
Error Message: /onedrive/conf/refresh_token: Permission denied
|
||||
|
||||
The database cannot be opened. Please check the permissions of ~/.config/onedrive/items.sqlite3
|
||||
```
|
||||
The only known work-around for the above problem at present is to disable SELinux. Please refer to your distribution platform's instructions on how to perform this step.
|
||||
|
||||
* Fedora: https://docs.fedoraproject.org/en-US/quick-docs/selinux-changing-states-and-modes/#_disabling_selinux
|
||||
* Red Hat Enterprise Linux: https://access.redhat.com/solutions/3176
|
||||
|
||||
Post disabling SELinux and reboot your system, confirm that `getenforce` returns `Disabled`:
|
||||
```text
|
||||
$ getenforce
|
||||
Disabled
|
||||
```
|
||||
|
||||
If you are still experiencing permission issues despite disabling SELinux, please read https://www.redhat.com/sysadmin/container-permission-denied-errors
|
||||
|
||||
### 4. Test 'docker' on your platform
|
||||
Ensure that 'docker' is running as a system service, and is enabled to be activated on system reboot:
|
||||
```bash
|
||||
sudo systemctl enable --now docker
|
||||
```
|
||||
|
||||
Test that 'docker' is operational for your 'non-root' user, as per below:
|
||||
```bash
|
||||
[alex@fedora-38-docker-host ~]$ docker run hello-world
|
||||
Unable to find image 'hello-world:latest' locally
|
||||
latest: Pulling from library/hello-world
|
||||
719385e32844: Pull complete
|
||||
Digest: sha256:88ec0acaa3ec199d3b7eaf73588f4518c25f9d34f58ce9a0df68429c5af48e8d
|
||||
Status: Downloaded newer image for hello-world:latest
|
||||
|
||||
Hello from Docker!
|
||||
This message shows that your installation appears to be working correctly.
|
||||
|
||||
To generate this message, Docker took the following steps:
|
||||
1. The Docker client contacted the Docker daemon.
|
||||
2. The Docker daemon pulled the "hello-world" image from the Docker Hub.
|
||||
(amd64)
|
||||
3. The Docker daemon created a new container from that image which runs the
|
||||
executable that produces the output you are currently reading.
|
||||
4. The Docker daemon streamed that output to the Docker client, which sent it
|
||||
to your terminal.
|
||||
|
||||
To try something more ambitious, you can run an Ubuntu container with:
|
||||
$ docker run -it ubuntu bash
|
||||
|
||||
Share images, automate workflows, and more with a free Docker ID:
|
||||
https://hub.docker.com/
|
||||
|
||||
For more examples and ideas, visit:
|
||||
https://docs.docker.com/get-started/
|
||||
|
||||
[alex@fedora-38-docker-host ~]$
|
||||
```
|
||||
|
||||
### 5. Configure the required docker volumes
|
||||
The 'onedrive' Docker container requires 2 docker volumes to operate:
|
||||
* Config Volume
|
||||
* Data Volume
|
||||
|
||||
The first volume is the configuration volume that stores all the applicable application configuration + current runtime state. In a non-containerised environment, this normally resides in `~/.config/onedrive` - in a containerised environment this is stored in the volume tagged as `/onedrive/conf`
|
||||
|
||||
The second volume is the data volume, where all your data from Microsoft OneDrive is stored locally. This volume is mapped to an actual directory point on your local filesystem and this is stored in the volume tagged as `/onedrive/data`
|
||||
|
||||
#### 5.1 Prepare the 'config' volume
|
||||
Create the 'config' volume with the following command:
|
||||
```bash
|
||||
docker volume create onedrive_conf
|
||||
```
|
||||
|
||||
This will create a docker volume labeled `onedrive_conf`, where all configuration of your onedrive account will be stored. You can add a custom config file in this location at a later point in time if required.
|
||||
|
||||
#### 5.2 Prepare the 'data' volume
|
||||
Create the 'data' volume with the following command:
|
||||
```bash
|
||||
docker volume create onedrive_data
|
||||
```
|
||||
|
||||
This will create a docker volume labeled `onedrive_data` and will map to a path on your local filesystem. This is where your data from Microsoft OneDrive will be stored. Keep in mind that:
|
||||
|
||||
* The owner of this specified folder must not be root
|
||||
* The owner of this specified folder must have permissions for its parent directory
|
||||
* Docker will attempt to change the permissions of the volume to the user the container is configured to run as
|
||||
|
||||
**NOTE:** Issues occur when this target folder is a mounted folder of an external system (NAS, SMB mount, USB Drive etc) as the 'mount' itself is owed by 'root'. If this is your use case, you *must* ensure your normal user can mount your desired target without having the target mounted by 'root'. If you do not fix this, your Docker container will fail to start with the following error message:
|
||||
```bash
|
||||
ROOT level privileges prohibited!
|
||||
```
|
||||
|
||||
### 6. First run of Docker container under docker and performing authorisation
|
||||
The 'onedrive' client within the container first needs to be authorised with your Microsoft account. This is achieved by initially running docker in interactive mode.
|
||||
|
||||
Run the docker image with the commands below and make sure to change the value of `ONEDRIVE_DATA_DIR` to the actual onedrive data directory on your filesystem that you wish to use (e.g. `export ONEDRIVE_DATA_DIR="/home/abraunegg/OneDrive"`).
|
||||
|
||||
**Important:** The 'target' folder of `ONEDRIVE_DATA_DIR` must exist before running the docker container. The script below will create 'ONEDRIVE_DATA_DIR' so that it exists locally for the docker volume mapping to occur.
|
||||
|
||||
It is also a requirement that the container be run using a non-root uid and gid, you must insert a non-root UID and GID (e.g.` export ONEDRIVE_UID=1000` and export `ONEDRIVE_GID=1000`). The script below will use `id` to evaluate your system environment to use the correct values.
|
||||
```bash
|
||||
export ONEDRIVE_DATA_DIR="${HOME}/OneDrive"
|
||||
export ONEDRIVE_UID=`id -u`
|
||||
export ONEDRIVE_GID=`id -g`
|
||||
mkdir -p ${ONEDRIVE_DATA_DIR}
|
||||
docker run -it --name onedrive -v onedrive_conf:/onedrive/conf \
|
||||
-v "${ONEDRIVE_DATA_DIR}:/onedrive/data" \
|
||||
-e "ONEDRIVE_UID=${ONEDRIVE_UID}" \
|
||||
-e "ONEDRIVE_GID=${ONEDRIVE_GID}" \
|
||||
driveone/onedrive:edge
|
||||
```
|
||||
|
||||
When the Docker container successfully starts:
|
||||
* You will be asked to open a specific link using your web browser
|
||||
* Login to your Microsoft Account and give the application the permission
|
||||
* After giving the permission, you will be redirected to a blank page
|
||||
* Copy the URI of the blank page into the application prompt to authorise the application
|
||||
|
||||
Once the 'onedrive' application is authorised, the client will automatically start monitoring your `ONEDRIVE_DATA_DIR` for data changes to be uploaded to OneDrive. Files stored on OneDrive will be downloaded to this location.
|
||||
|
||||
If the client is working as expected, you can detach from the container with Ctrl+p, Ctrl+q.
|
||||
|
||||
### 7. Running the 'onedrive' container under 'docker'
|
||||
|
||||
#### 7.1 Check if the monitor service is running
|
||||
```bash
|
||||
docker ps -f name=onedrive
|
||||
```
|
||||
|
||||
#### 7.2 Show 'onedrive' runtime logs
|
||||
```bash
|
||||
docker logs onedrive
|
||||
```
|
||||
|
||||
#### 7.3 Stop running 'onedrive' container
|
||||
```bash
|
||||
docker stop onedrive
|
||||
```
|
||||
|
||||
#### 7.4 Start 'onedrive' container
|
||||
```bash
|
||||
docker start onedrive
|
||||
```
|
||||
|
||||
#### 7.5 Remove 'onedrive' container
|
||||
```bash
|
||||
docker rm -f onedrive
|
||||
```
|
||||
|
||||
## Advanced Usage
|
||||
|
||||
### How to use Docker-compose
|
||||
You can utilise `docker-compose` if available on your platform if you are able to use docker compose schemas > 3.
|
||||
|
||||
In the following example it is assumed you have a `ONEDRIVE_DATA_DIR` environment variable and have already created the `onedrive_conf` volume.
|
||||
|
||||
You can also use docker bind mounts for the configuration folder, e.g. `export ONEDRIVE_CONF="${HOME}/OneDriveConfig"`.
|
||||
|
||||
```
|
||||
version: "3"
|
||||
services:
|
||||
onedrive:
|
||||
image: driveone/onedrive:edge
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
- ONEDRIVE_UID=${PUID}
|
||||
- ONEDRIVE_GID=${PGID}
|
||||
volumes:
|
||||
- onedrive_conf:/onedrive/conf
|
||||
- ${ONEDRIVE_DATA_DIR}:/onedrive/data
|
||||
```
|
||||
|
||||
Note that you still have to perform step 3: First Run.
|
||||
|
||||
### Editing the running configuration and using a 'config' file
|
||||
The 'onedrive' client should run in default configuration, however you can change this default configuration by placing a custom config file in the `onedrive_conf` docker volume. First download the default config from [here](https://raw.githubusercontent.com/abraunegg/onedrive/master/config)
|
||||
Then put it into your onedrive_conf volume path, which can be found with:
|
||||
|
||||
```bash
|
||||
docker volume inspect onedrive_conf
|
||||
```
|
||||
|
||||
Or you can map your own config folder to the config volume. Make sure to copy all files from the docker volume into your mapped folder first.
|
||||
|
||||
The detailed document for the config can be found here: [Configuration](https://github.com/abraunegg/onedrive/blob/master/docs/USAGE.md#configuration)
|
||||
|
||||
### Syncing multiple accounts
|
||||
There are many ways to do this, the easiest is probably to do the following:
|
||||
1. Create a second docker config volume (replace `Work` with your desired name): `docker volume create onedrive_conf_Work`
|
||||
2. And start a second docker monitor container (again replace `Work` with your desired name):
|
||||
```
|
||||
export ONEDRIVE_DATA_DIR_WORK="/home/abraunegg/OneDriveWork"
|
||||
mkdir -p ${ONEDRIVE_DATA_DIR_WORK}
|
||||
docker run -it --restart unless-stopped --name onedrive_Work -v onedrive_conf_Work:/onedrive/conf -v "${ONEDRIVE_DATA_DIR_WORK}:/onedrive/data" driveone/onedrive:edge
|
||||
```
|
||||
|
||||
### Run or update the Docker container with one script
|
||||
If you are experienced with docker and onedrive, you can use the following script:
|
||||
|
||||
```bash
|
||||
# Update ONEDRIVE_DATA_DIR with correct OneDrive directory path
|
||||
ONEDRIVE_DATA_DIR="${HOME}/OneDrive"
|
||||
# Create directory if non-existant
|
||||
mkdir -p ${ONEDRIVE_DATA_DIR}
|
||||
|
||||
firstRun='-d'
|
||||
docker pull driveone/onedrive:edge
|
||||
docker inspect onedrive_conf > /dev/null 2>&1 || { docker volume create onedrive_conf; firstRun='-it'; }
|
||||
docker inspect onedrive > /dev/null 2>&1 && docker rm -f onedrive
|
||||
docker run $firstRun --restart unless-stopped --name onedrive -v onedrive_conf:/onedrive/conf -v "${ONEDRIVE_DATA_DIR}:/onedrive/data" driveone/onedrive:edge
|
||||
```
|
||||
|
||||
## Supported Docker Environment Variables
|
||||
| Variable | Purpose | Sample Value |
|
||||
| ---------------- | --------------------------------------------------- |:--------------------------------------------------------------------------------------------------------------------------------:|
|
||||
| <B>ONEDRIVE_UID</B> | UserID (UID) to run as | 1000 |
|
||||
| <B>ONEDRIVE_GID</B> | GroupID (GID) to run as | 1000 |
|
||||
| <B>ONEDRIVE_VERBOSE</B> | Controls "--verbose" switch on onedrive sync. Default is 0 | 1 |
|
||||
| <B>ONEDRIVE_DEBUG</B> | Controls "--verbose --verbose" switch on onedrive sync. Default is 0 | 1 |
|
||||
| <B>ONEDRIVE_DEBUG_HTTPS</B> | Controls "--debug-https" switch on onedrive sync. Default is 0 | 1 |
|
||||
| <B>ONEDRIVE_RESYNC</B> | Controls "--resync" switch on onedrive sync. Default is 0 | 1 |
|
||||
| <B>ONEDRIVE_DOWNLOADONLY</B> | Controls "--download-only" switch on onedrive sync. Default is 0 | 1 |
|
||||
| <B>ONEDRIVE_UPLOADONLY</B> | Controls "--upload-only" switch on onedrive sync. Default is 0 | 1 |
|
||||
| <B>ONEDRIVE_NOREMOTEDELETE</B> | Controls "--no-remote-delete" switch on onedrive sync. Default is 0 | 1 |
|
||||
| <B>ONEDRIVE_LOGOUT</B> | Controls "--logout" switch. Default is 0 | 1 |
|
||||
| <B>ONEDRIVE_REAUTH</B> | Controls "--reauth" switch. Default is 0 | 1 |
|
||||
| <B>ONEDRIVE_AUTHFILES</B> | Controls "--auth-files" option. Default is "" | "authUrl:responseUrl" |
|
||||
| <B>ONEDRIVE_AUTHRESPONSE</B> | Controls "--auth-response" option. Default is "" | See [here](https://github.com/abraunegg/onedrive/blob/master/docs/USAGE.md#authorize-the-application-with-your-onedrive-account) |
|
||||
| <B>ONEDRIVE_DISPLAY_CONFIG</B> | Controls "--display-running-config" switch on onedrive sync. Default is 0 | 1 |
|
||||
| <B>ONEDRIVE_SINGLE_DIRECTORY</B> | Controls "--single-directory" option. Default = "" | "mydir" |
|
||||
|
||||
### Environment Variables Usage Examples
|
||||
**Verbose Output:**
|
||||
```bash
|
||||
docker container run -e ONEDRIVE_VERBOSE=1 -v onedrive_conf:/onedrive/conf -v "${ONEDRIVE_DATA_DIR}:/onedrive/data" driveone/onedrive:edge
|
||||
```
|
||||
**Debug Output:**
|
||||
```bash
|
||||
docker container run -e ONEDRIVE_DEBUG=1 -v onedrive_conf:/onedrive/conf -v "${ONEDRIVE_DATA_DIR}:/onedrive/data" driveone/onedrive:edge
|
||||
```
|
||||
**Perform a --resync:**
|
||||
```bash
|
||||
docker container run -e ONEDRIVE_RESYNC=1 -v onedrive_conf:/onedrive/conf -v "${ONEDRIVE_DATA_DIR}:/onedrive/data" driveone/onedrive:edge
|
||||
```
|
||||
**Perform a --resync and --verbose:**
|
||||
```bash
|
||||
docker container run -e ONEDRIVE_RESYNC=1 -e ONEDRIVE_VERBOSE=1 -v onedrive_conf:/onedrive/conf -v "${ONEDRIVE_DATA_DIR}:/onedrive/data" driveone/onedrive:edge
|
||||
```
|
||||
**Perform a --logout and re-authenticate:**
|
||||
```bash
|
||||
docker container run -it -e ONEDRIVE_LOGOUT=1 -v onedrive_conf:/onedrive/conf -v "${ONEDRIVE_DATA_DIR}:/onedrive/data" driveone/onedrive:edge
|
||||
```
|
||||
|
||||
## Building a custom Docker image
|
||||
|
||||
### Build Environment Requirements
|
||||
* Build environment must have at least 1GB of memory & 2GB swap space
|
||||
|
||||
You can validate your build environment memory status with the following command:
|
||||
```text
|
||||
cat /proc/meminfo | grep -E 'MemFree|Swap'
|
||||
```
|
||||
This should result in the following similar output:
|
||||
```text
|
||||
MemFree: 3704644 kB
|
||||
SwapCached: 0 kB
|
||||
SwapTotal: 8117244 kB
|
||||
SwapFree: 8117244 kB
|
||||
```
|
||||
|
||||
If you do not have enough swap space, you can use the following script to dynamically allocate a swapfile for building the Docker container:
|
||||
|
||||
```bash
|
||||
cd /var
|
||||
sudo fallocate -l 1.5G swapfile
|
||||
sudo chmod 600 swapfile
|
||||
sudo mkswap swapfile
|
||||
sudo swapon swapfile
|
||||
# make swap permanent
|
||||
sudo nano /etc/fstab
|
||||
# add "/swapfile swap swap defaults 0 0" at the end of file
|
||||
# check it has been assigned
|
||||
swapon -s
|
||||
free -h
|
||||
```
|
||||
|
||||
If you are running a Raspberry Pi, you will need to edit your system configuration to increase your swapfile:
|
||||
|
||||
* Modify the file `/etc/dphys-swapfile` and edit the `CONF_SWAPSIZE`, for example: `CONF_SWAPSIZE=2048`.
|
||||
|
||||
A reboot of your Raspberry Pi is required to make this change effective.
|
||||
|
||||
### Building and running a custom Docker image
|
||||
You can also build your own image instead of pulling the one from [hub.docker.com](https://hub.docker.com/r/driveone/onedrive):
|
||||
```bash
|
||||
git clone https://github.com/abraunegg/onedrive
|
||||
cd onedrive
|
||||
docker build . -t local-onedrive -f contrib/docker/Dockerfile
|
||||
docker container run -v onedrive_conf:/onedrive/conf -v "${ONEDRIVE_DATA_DIR}:/onedrive/data" local-onedrive:latest
|
||||
```
|
||||
|
||||
There are alternate, smaller images available by using `Dockerfile-debian` or `Dockerfile-alpine`. These [multi-stage builder pattern](https://docs.docker.com/develop/develop-images/multistage-build/) Dockerfiles require Docker version at least 17.05.
|
||||
|
||||
### How to build and run a custom Docker image based on Debian
|
||||
``` bash
|
||||
docker build . -t local-ondrive-debian -f contrib/docker/Dockerfile-debian
|
||||
docker container run -v onedrive_conf:/onedrive/conf -v "${ONEDRIVE_DATA_DIR}:/onedrive/data" local-ondrive-debian:latest
|
||||
```
|
||||
|
||||
### How to build and run a custom Docker image based on Alpine Linux
|
||||
``` bash
|
||||
docker build . -t local-ondrive-alpine -f contrib/docker/Dockerfile-alpine
|
||||
docker container run -v onedrive_conf:/onedrive/conf -v "${ONEDRIVE_DATA_DIR}:/onedrive/data" local-ondrive-alpine:latest
|
||||
```
|
||||
|
||||
### How to build and run a custom Docker image for ARMHF (Raspberry Pi)
|
||||
Compatible with:
|
||||
* Raspberry Pi
|
||||
* Raspberry Pi 2
|
||||
* Raspberry Pi Zero
|
||||
* Raspberry Pi 3
|
||||
* Raspberry Pi 4
|
||||
``` bash
|
||||
docker build . -t local-onedrive-armhf -f contrib/docker/Dockerfile-debian
|
||||
docker container run -v onedrive_conf:/onedrive/conf -v "${ONEDRIVE_DATA_DIR}:/onedrive/data" local-onedrive-armhf:latest
|
||||
```
|
||||
|
||||
### How to build and run a custom Docker image for AARCH64 Platforms
|
||||
``` bash
|
||||
docker build . -t local-onedrive-aarch64 -f contrib/docker/Dockerfile-debian
|
||||
docker container run -v onedrive_conf:/onedrive/conf -v "${ONEDRIVE_DATA_DIR}:/onedrive/data" local-onedrive-aarch64:latest
|
||||
```
|
||||
### How to support double-byte languages
|
||||
In some geographic regions, you may need to change and/or update the locale specification of the Docker container to better support the local language used for your local filesystem. To do this, follow the example below:
|
||||
```
|
||||
FROM driveone/onedrive
|
||||
|
||||
ENV DEBIAN_FRONTEND noninteractive
|
||||
|
||||
RUN apt-get update
|
||||
RUN apt-get install -y locales
|
||||
|
||||
RUN echo "ja_JP.UTF-8 UTF-8" > /etc/locale.gen && \
|
||||
locale-gen ja_JP.UTF-8 && \
|
||||
dpkg-reconfigure locales && \
|
||||
/usr/sbin/update-locale LANG=ja_JP.UTF-8
|
||||
|
||||
ENV LC_ALL ja_JP.UTF-8
|
||||
```
|
||||
The above example changes the Docker container to support Japanese. To support your local language, change `ja_JP.UTF-8` to the required entry.
|
277
docs/INSTALL.md
Normal file
|
@ -0,0 +1,277 @@
|
|||
# Installing or Upgrading using Distribution Packages or Building the OneDrive Client for Linux from source
|
||||
|
||||
## Installing or Upgrading using Distribution Packages
|
||||
This project has been packaged for the following Linux distributions as per below. The current client release is: [![Version](https://img.shields.io/github/v/release/abraunegg/onedrive)](https://github.com/abraunegg/onedrive/releases)
|
||||
|
||||
Only the current release version or greater is supported. Earlier versions are not supported and should not be installed or used.
|
||||
|
||||
#### Important Note:
|
||||
Distribution packages may be of an older release when compared to the latest release that is [available](https://github.com/abraunegg/onedrive/releases). If any package version indicator below is 'red' for your distribution, it is recommended that you build from source. Do not install the software from the available distribution package. If a package is out of date, please contact the package maintainer for resolution.
|
||||
|
||||
| Distribution | Package Name & Package Link | PKG_Version | i686 | x86_64 | ARMHF | AARCH64 | Extra Details |
|
||||
|---------------------------------|------------------------------------------------------------------------------|:---------------:|:----:|:------:|:-----:|:-------:|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| Alpine Linux | [onedrive](https://pkgs.alpinelinux.org/packages?name=onedrive&branch=edge) |<a href="https://pkgs.alpinelinux.org/packages?name=onedrive&branch=edge"><img src="https://repology.org/badge/version-for-repo/alpine_edge/onedrive.svg?header=" alt="Alpine Linux Edge package" width="46" height="20"></a>|❌|✔|❌|✔ | |
|
||||
| Arch Linux<br><br>Manjaro Linux | [onedrive-abraunegg](https://aur.archlinux.org/packages/onedrive-abraunegg/) |<a href="https://aur.archlinux.org/packages/onedrive-abraunegg"><img src="https://repology.org/badge/version-for-repo/aur/onedrive-abraunegg.svg?header=" alt="AUR package" width="46" height="20"></a>|✔|✔|✔|✔ | Install via: `pamac build onedrive-abraunegg` from the Arch Linux User Repository (AUR)<br><br>**Note:** If asked regarding a provider for 'd-runtime' and 'd-compiler', select 'liblphobos' and 'ldc'<br><br>**Note:** System must have at least 1GB of memory & 1GB swap space
|
||||
| Debian 11 | [onedrive](https://packages.debian.org/bullseye/source/onedrive) |<a href="https://packages.debian.org/bullseye/source/onedrive"><img src="https://repology.org/badge/version-for-repo/debian_11/onedrive.svg?header=" alt="Debian 11 package" width="46" height="20"></a>|✔|✔|✔|✔| **Note:** Do not install from Debian Package Repositories<br><br>It is recommended that for Debian 11 that you install from OpenSuSE Build Service using the Debian Package Install [Instructions](ubuntu-package-install.md) |
|
||||
| Debian 12 | [onedrive](https://packages.debian.org/bookworm/source/onedrive) |<a href="https://packages.debian.org/bookworm/source/onedrive"><img src="https://repology.org/badge/version-for-repo/debian_12/onedrive.svg?header=" alt="Debian 12 package" width="46" height="20"></a>|✔|✔|✔|✔| **Note:** Do not install from Debian Package Repositories<br><br>It is recommended that for Debian 12 that you install from OpenSuSE Build Service using the Debian Package Install [Instructions](ubuntu-package-install.md) |
|
||||
| Fedora | [onedrive](https://koji.fedoraproject.org/koji/packageinfo?packageID=26044) |<a href="https://koji.fedoraproject.org/koji/packageinfo?packageID=26044"><img src="https://repology.org/badge/version-for-repo/fedora_rawhide/onedrive.svg?header=" alt="Fedora Rawhide package" width="46" height="20"></a>|✔|✔|✔|✔| |
|
||||
| Gentoo | [onedrive](https://gpo.zugaina.org/net-misc/onedrive) | No API Available |✔|✔|❌|❌| |
|
||||
| Homebrew | [onedrive](https://formulae.brew.sh/formula/onedrive) | <a href="https://formulae.brew.sh/formula/onedrive"><img src="https://repology.org/badge/version-for-repo/homebrew/onedrive.svg?header=" alt="Homebrew package" width="46" height="20"></a> |❌|✔|❌|❌| |
|
||||
| Linux Mint 20.x | [onedrive](https://community.linuxmint.com/software/view/onedrive) |<a href="https://community.linuxmint.com/software/view/onedrive"><img src="https://repology.org/badge/version-for-repo/ubuntu_20_04/onedrive.svg?header=" alt="Ubuntu 20.04 package" width="46" height="20"></a> |❌|✔|✔|✔| **Note:** Do not install from Linux Mint Repositories<br><br>It is recommended that for Linux Mint that you install from OpenSuSE Build Service using the Ubuntu Package Install [Instructions](ubuntu-package-install.md) |
|
||||
| Linux Mint 21.x | [onedrive](https://community.linuxmint.com/software/view/onedrive) |<a href="https://community.linuxmint.com/software/view/onedrive"><img src="https://repology.org/badge/version-for-repo/ubuntu_22_04/onedrive.svg?header=" alt="Ubuntu 22.04 package" width="46" height="20"></a> |❌|✔|✔|✔| **Note:** Do not install from Linux Mint Repositories<br><br>It is recommended that for Linux Mint that you install from OpenSuSE Build Service using the Ubuntu Package Install [Instructions](ubuntu-package-install.md) |
|
||||
| NixOS | [onedrive](https://search.nixos.org/packages?channel=20.09&from=0&size=50&sort=relevance&query=onedrive)|<a href="https://search.nixos.org/packages?channel=20.09&from=0&size=50&sort=relevance&query=onedrive"><img src="https://repology.org/badge/version-for-repo/nix_unstable/onedrive.svg?header=" alt="nixpkgs unstable package" width="46" height="20"></a>|❌|✔|❌|❌| Use package `onedrive` either by adding it to `configuration.nix` or by using the command `nix-env -iA <channel name>.onedrive`. This does not install a service. To install a service, use unstable channel (will stabilize in 20.09) and add `services.onedrive.enable=true` in `configuration.nix`. You can also add a custom package using the `services.onedrive.package` option (recommended since package lags upstream). Enabling the service installs a default package too (based on the channel). You can also add multiple onedrive accounts trivially, see [documentation](https://github.com/NixOS/nixpkgs/pull/77734#issuecomment-575874225). |
|
||||
| OpenSuSE | [onedrive](https://software.opensuse.org/package/onedrive) |<a href="https://software.opensuse.org/package/onedrive"><img src="https://repology.org/badge/version-for-repo/opensuse_network_tumbleweed/onedrive.svg?header=" alt="openSUSE Tumbleweed package" width="46" height="20"></a>|✔|✔|❌|❌| |
|
||||
| OpenSuSE Build Service | [onedrive](https://build.opensuse.org/package/show/home:npreining:debian-ubuntu-onedrive/onedrive) | No API Available |✔|✔|✔|✔| Package Build Service for Debian and Ubuntu |
|
||||
| Raspbian | [onedrive](https://archive.raspbian.org/raspbian/pool/main/o/onedrive/) |<a href="https://archive.raspbian.org/raspbian/pool/main/o/onedrive/"><img src="https://repology.org/badge/version-for-repo/raspbian_stable/onedrive.svg?header=" alt="Raspbian Stable package" width="46" height="20"></a> |❌|❌|✔|✔| **Note:** Do not install from Raspbian Package Repositories<br><br>It is recommended that for Raspbian that you install from OpenSuSE Build Service using the Debian Package Install [Instructions](ubuntu-package-install.md) |
|
||||
| Slackware | [onedrive](https://slackbuilds.org/result/?search=onedrive&sv=) |<a href="https://slackbuilds.org/result/?search=onedrive&sv="><img src="https://repology.org/badge/version-for-repo/slackbuilds/onedrive.svg?header=" alt="SlackBuilds package" width="46" height="20"></a>|✔|✔|❌|❌| |
|
||||
| Solus | [onedrive](https://dev.getsol.us/search/query/FB7PIf1jG9Z9/#R) |<a href="https://dev.getsol.us/search/query/FB7PIf1jG9Z9/#R"><img src="https://repology.org/badge/version-for-repo/solus/onedrive.svg?header=" alt="Solus package" width="46" height="20"></a>|✔|✔|❌|❌| |
|
||||
| Ubuntu 20.04 | [onedrive](https://packages.ubuntu.com/focal/onedrive) |<a href="https://packages.ubuntu.com/focal/onedrive"><img src="https://repology.org/badge/version-for-repo/ubuntu_20_04/onedrive.svg?header=" alt="Ubuntu 20.04 package" width="46" height="20"></a> |❌|✔|✔|✔| **Note:** Do not install from Ubuntu Universe<br><br>It is recommended that for Ubuntu that you install from OpenSuSE Build Service using the Ubuntu Package Install [Instructions](ubuntu-package-install.md) |
|
||||
| Ubuntu 22.04 | [onedrive](https://packages.ubuntu.com/jammy/onedrive) |<a href="https://packages.ubuntu.com/jammy/onedrive"><img src="https://repology.org/badge/version-for-repo/ubuntu_22_04/onedrive.svg?header=" alt="Ubuntu 22.04 package" width="46" height="20"></a> |❌|✔|✔|✔| **Note:** Do not install from Ubuntu Universe<br><br>It is recommended that for Ubuntu that you install from OpenSuSE Build Service using the Ubuntu Package Install [Instructions](ubuntu-package-install.md) |
|
||||
| Ubuntu 23.04 | [onedrive](https://packages.ubuntu.com/lunar/onedrive) |<a href="https://packages.ubuntu.com/lunar/onedrive"><img src="https://repology.org/badge/version-for-repo/ubuntu_23_04/onedrive.svg?header=" alt="Ubuntu 23.04 package" width="46" height="20"></a> |❌|✔|✔|✔| **Note:** Do not install from Ubuntu Universe<br><br>It is recommended that for Ubuntu that you install from OpenSuSE Build Service using the Ubuntu Package Install [Instructions](ubuntu-package-install.md) |
|
||||
| Void Linux | [onedrive](https://voidlinux.org/packages/?arch=x86_64&q=onedrive) |<a href="https://voidlinux.org/packages/?arch=x86_64&q=onedrive"><img src="https://repology.org/badge/version-for-repo/void_x86_64/onedrive.svg?header=" alt="Void Linux x86_64 package" width="46" height="20"></a>|✔|✔|❌|❌| |
|
||||
|
||||
#### Important information for all Ubuntu and Ubuntu based distribution users:
|
||||
This information is specifically for the following platforms and distributions:
|
||||
* Ubuntu
|
||||
* Lubuntu
|
||||
* Linux Mint
|
||||
* POP OS
|
||||
* Peppermint OS
|
||||
|
||||
Whilst there are [onedrive](https://packages.ubuntu.com/search?keywords=onedrive&searchon=names&suite=all§ion=all) Universe packages available for Ubuntu, do not install 'onedrive' from these Universe packages. The default Universe packages are out-of-date and are not supported and should not be used. If you wish to use a package, it is highly recommended that you utilise the [OpenSuSE Build Service](ubuntu-package-install.md) to install packages for these platforms. If the OpenSuSE Build Service does not cater for your version, your only option is to build from source.
|
||||
|
||||
If you wish to change this situation so that you can just use the Universe packages via 'apt install onedrive', consider becoming the Ubuntu package maintainer and contribute back to your community.
|
||||
|
||||
## Building from Source - High Level Requirements
|
||||
* Build environment must have at least 1GB of memory & 1GB swap space
|
||||
* Install the required distribution package dependencies
|
||||
* [libcurl](http://curl.haxx.se/libcurl/)
|
||||
* [SQLite 3](https://www.sqlite.org/) >= 3.7.15
|
||||
* [Digital Mars D Compiler (DMD)](http://dlang.org/download.html) or [LDC – the LLVM-based D Compiler](https://github.com/ldc-developers/ldc)
|
||||
|
||||
**Note:** DMD version >= 2.088.0 or LDC version >= 1.18.0 is required to compile this application
|
||||
|
||||
### Example for installing DMD Compiler
|
||||
```text
|
||||
curl -fsS https://dlang.org/install.sh | bash -s dmd
|
||||
```
|
||||
|
||||
### Example for installing LDC Compiler
|
||||
```text
|
||||
curl -fsS https://dlang.org/install.sh | bash -s ldc
|
||||
```
|
||||
|
||||
## Distribution Package Dependencies
|
||||
### Dependencies: Ubuntu 16.x
|
||||
Ubuntu Linux 16.x LTS reached the end of its five-year LTS window on April 30th 2021 and is no longer supported.
|
||||
|
||||
### Dependencies: Ubuntu 18.x / Lubuntu 18.x
|
||||
Ubuntu Linux 18.x LTS reached the end of its five-year LTS window on May 31th 2023 and is no longer supported.
|
||||
|
||||
### Dependencies: Debian 9
|
||||
Debian 9 reached the end of its five-year support window on June 30th 2022 and is no longer supported.
|
||||
|
||||
### Dependencies: Ubuntu 20.x -> Ubuntu 23.x / Debian 10 -> Debian 12 - x86_64
|
||||
These dependencies are also applicable for all Ubuntu based distributions such as:
|
||||
* Lubuntu
|
||||
* Linux Mint
|
||||
* POP OS
|
||||
* Peppermint OS
|
||||
```text
|
||||
sudo apt install build-essential
|
||||
sudo apt install libcurl4-openssl-dev libsqlite3-dev pkg-config git curl
|
||||
curl -fsS https://dlang.org/install.sh | bash -s dmd
|
||||
```
|
||||
For notifications the following is also necessary:
|
||||
```text
|
||||
sudo apt install libnotify-dev
|
||||
```
|
||||
|
||||
### Dependencies: CentOS 6.x / RHEL 6.x
|
||||
CentOS 6.x and RHEL 6.x reached End of Life status on November 30th 2020 and is no longer supported.
|
||||
|
||||
### Dependencies: Fedora < Version 18 / CentOS 7.x / RHEL 7.x
|
||||
```text
|
||||
sudo yum groupinstall 'Development Tools'
|
||||
sudo yum install libcurl-devel sqlite-devel
|
||||
curl -fsS https://dlang.org/install.sh | bash -s dmd-2.099.0
|
||||
```
|
||||
For notifications the following is also necessary:
|
||||
```text
|
||||
sudo yum install libnotify-devel
|
||||
```
|
||||
|
||||
### Dependencies: Fedora > Version 18 / CentOS 8.x / RHEL 8.x / RHEL 9.x
|
||||
```text
|
||||
sudo dnf groupinstall 'Development Tools'
|
||||
sudo dnf install libcurl-devel sqlite-devel
|
||||
curl -fsS https://dlang.org/install.sh | bash -s dmd
|
||||
```
|
||||
For notifications the following is also necessary:
|
||||
```text
|
||||
sudo dnf install libnotify-devel
|
||||
```
|
||||
|
||||
### Dependencies: Arch Linux & Manjaro Linux
|
||||
```text
|
||||
sudo pacman -S make pkg-config curl sqlite ldc
|
||||
```
|
||||
For notifications the following is also necessary:
|
||||
```text
|
||||
sudo pacman -S libnotify
|
||||
```
|
||||
|
||||
### Dependencies: Raspbian (ARMHF) and Ubuntu 22.x / Debian 11 / Debian 12 / Raspbian (ARM64)
|
||||
**Note:** The minimum LDC compiler version required to compile this application is now 1.18.0, which is not available for Debian Buster or distributions based on Debian Buster. You are advised to first upgrade your platform distribution to one that is based on Debian Bullseye (Debian 11) or later.
|
||||
|
||||
These instructions were validated using:
|
||||
* `Linux raspberrypi 5.10.92-v8+ #1514 SMP PREEMPT Mon Jan 17 17:39:38 GMT 2022 aarch64` (2022-01-28-raspios-bullseye-armhf-lite) using Raspberry Pi 3B (revision 1.2)
|
||||
* `Linux raspberrypi 5.10.92-v8+ #1514 SMP PREEMPT Mon Jan 17 17:39:38 GMT 2022 aarch64` (2022-01-28-raspios-bullseye-arm64-lite) using Raspberry Pi 3B (revision 1.2)
|
||||
* `Linux ubuntu 5.15.0-1005-raspi #5-Ubuntu SMP PREEMPT Mon Apr 4 12:21:48 UTC 2022 aarch64 aarch64 aarch64 GNU/Linux` (ubuntu-22.04-preinstalled-server-arm64+raspi) using Raspberry Pi 3B (revision 1.2)
|
||||
|
||||
**Note:** Build environment must have at least 1GB of memory & 1GB swap space. Check with `swapon`.
|
||||
|
||||
```text
|
||||
sudo apt install build-essential
|
||||
sudo apt install libcurl4-openssl-dev libsqlite3-dev pkg-config git curl ldc
|
||||
```
|
||||
For notifications the following is also necessary:
|
||||
```text
|
||||
sudo apt install libnotify-dev
|
||||
```
|
||||
|
||||
### Dependencies: Gentoo
|
||||
```text
|
||||
sudo emerge app-portage/layman
|
||||
sudo layman -a dlang
|
||||
```
|
||||
Add ebuild from contrib/gentoo to a local overlay to use.
|
||||
|
||||
For notifications the following is also necessary:
|
||||
```text
|
||||
sudo emerge x11-libs/libnotify
|
||||
```
|
||||
|
||||
### Dependencies: OpenSuSE Leap 15.0
|
||||
```text
|
||||
sudo zypper addrepo https://download.opensuse.org/repositories/devel:languages:D/openSUSE_Leap_15.0/devel:languages:D.repo
|
||||
sudo zypper refresh
|
||||
sudo zypper install gcc git libcurl-devel sqlite3-devel dmd phobos-devel phobos-devel-static
|
||||
```
|
||||
For notifications the following is also necessary:
|
||||
```text
|
||||
sudo zypper install libnotify-devel
|
||||
```
|
||||
|
||||
### Dependencies: OpenSuSE Leap 15.1
|
||||
```text
|
||||
sudo zypper addrepo https://download.opensuse.org/repositories/devel:languages:D/openSUSE_Leap_15.1/devel:languages:D.repo
|
||||
sudo zypper refresh
|
||||
sudo zypper install gcc git libcurl-devel sqlite3-devel dmd phobos-devel phobos-devel-static
|
||||
```
|
||||
For notifications the following is also necessary:
|
||||
```text
|
||||
sudo zypper install libnotify-devel
|
||||
```
|
||||
|
||||
### Dependencies: OpenSuSE Leap 15.2
|
||||
```text
|
||||
sudo zypper refresh
|
||||
sudo zypper install gcc git libcurl-devel sqlite3-devel dmd phobos-devel phobos-devel-static
|
||||
```
|
||||
For notifications the following is also necessary:
|
||||
```text
|
||||
sudo zypper install libnotify-devel
|
||||
```
|
||||
|
||||
## Compilation & Installation
|
||||
### High Level Steps
|
||||
1. Install the platform dependencies for your Linux OS
|
||||
2. Activate your DMD or LDC compiler
|
||||
3. Clone the GitHub repository, run configure and make, then install
|
||||
4. Deactivate your DMD or LDC compiler
|
||||
|
||||
### Building using DMD Reference Compiler
|
||||
Before cloning and compiling, if you have installed DMD via curl for your OS, you will need to activate DMD as per example below:
|
||||
```text
|
||||
Run `source ~/dlang/dmd-2.088.0/activate` in your shell to use dmd-2.088.0.
|
||||
This will setup PATH, LIBRARY_PATH, LD_LIBRARY_PATH, DMD, DC, and PS1.
|
||||
Run `deactivate` later on to restore your environment.
|
||||
```
|
||||
Without performing this step, the compilation process will fail.
|
||||
|
||||
**Note:** Depending on your DMD version, substitute `2.088.0` above with your DMD version that is installed.
|
||||
|
||||
```text
|
||||
git clone https://github.com/abraunegg/onedrive.git
|
||||
cd onedrive
|
||||
./configure
|
||||
make clean; make;
|
||||
sudo make install
|
||||
```
|
||||
|
||||
### Build options
|
||||
Notifications can be enabled using the `configure` switch `--enable-notifications`.
|
||||
|
||||
Systemd service files are installed in the appropriate directories on the system,
|
||||
as provided by `pkg-config systemd` settings. If the need for overriding the
|
||||
deduced path are necessary, the two options `--with-systemdsystemunitdir` (for
|
||||
the Systemd system unit location), and `--with-systemduserunitdir` (for the
|
||||
Systemd user unit location) can be specified. Passing in `no` to one of these
|
||||
options disabled service file installation.
|
||||
|
||||
By passing `--enable-debug` to the `configure` call, `onedrive` gets built with additional debug
|
||||
information, useful (for example) to get `perf`-issued figures.
|
||||
|
||||
By passing `--enable-completions` to the `configure` call, shell completion functions are
|
||||
installed for `bash`, `zsh` and `fish`. The installation directories are determined
|
||||
as far as possible automatically, but can be overridden by passing
|
||||
`--with-bash-completion-dir=<DIR>`, `--with-zsh-completion-dir=<DIR>`, and
|
||||
`--with-fish-completion-dir=<DIR>` to `configure`.
|
||||
|
||||
### Building using a different compiler (for example [LDC](https://wiki.dlang.org/LDC))
|
||||
#### ARMHF Architecture (Raspbian) and ARM64 Architecture (Ubuntu 22.x / Debian 11 / Raspbian)
|
||||
**Note:** The minimum LDC compiler version required to compile this application is now 1.18.0, which is not available for Debian Buster or distributions based on Debian Buster. You are advised to first upgrade your platform distribution to one that is based on Debian Bullseye (Debian 11) or later.
|
||||
|
||||
**Note:** Build environment must have at least 1GB of memory & 1GB swap space. Check with `swapon`.
|
||||
```text
|
||||
git clone https://github.com/abraunegg/onedrive.git
|
||||
cd onedrive
|
||||
./configure DC=/usr/bin/ldmd2
|
||||
make clean; make
|
||||
sudo make install
|
||||
```
|
||||
|
||||
## Upgrading the client
|
||||
If you have installed the client from a distribution package, the client will be updated when the distribution package is updated by the package maintainer and will be updated to the new application version when you perform your package update.
|
||||
|
||||
If you have built the client from source, to upgrade your client, it is recommended that you first uninstall your existing 'onedrive' binary (see below), then re-install the client by re-cloning, re-compiling and re-installing the client again to install the new version.
|
||||
|
||||
**Note:** Following the uninstall process will remove all client components including *all* systemd files, including any custom files created for specific access such as SharePoint Libraries.
|
||||
|
||||
You can optionally choose to not perform this uninstallation step, and simply re-install the client by re-cloning, re-compiling and re-installing the client again - however the risk here is that you end up with two onedrive client binaries on your system, and depending on your system search path preferences, this will determine which binary is used.
|
||||
|
||||
**Important:** Before performing any upgrade, it is highly recommended for you to stop any running systemd service if applicable to ensure that these services are restarted using the updated client version.
|
||||
|
||||
Post re-install, to confirm that you have the new version of the client installed, use `onedrive --version` to determine the client version that is now installed.
|
||||
|
||||
## Uninstalling the client
|
||||
### Uninstalling the client if installed from distribution package
|
||||
Follow your distribution documentation to uninstall the package that you installed
|
||||
|
||||
### Uninstalling the client if installed and built from source
|
||||
From within your GitHub repository clone, perform the following to remove the 'onedrive' binary:
|
||||
```text
|
||||
sudo make uninstall
|
||||
```
|
||||
|
||||
If you are not upgrading your client, to remove your application state and configuration, perform the following additional step:
|
||||
```
|
||||
rm -rf ~/.config/onedrive
|
||||
```
|
||||
**Note:** If you are using the `--confdir option`, substitute `~/.config/onedrive` for the correct directory storing your client configuration.
|
||||
|
||||
If you want to just delete the application key, but keep the items database:
|
||||
```text
|
||||
rm -f ~/.config/onedrive/refresh_token
|
||||
```
|
360
docs/Podman.md
Normal file
|
@ -0,0 +1,360 @@
|
|||
# Run the OneDrive Client for Linux under Podman
|
||||
This client can be run as a Podman container, with 3 available container base options for you to choose from:
|
||||
|
||||
| Container Base | Docker Tag | Description | i686 | x86_64 | ARMHF | AARCH64 |
|
||||
|----------------|-------------|----------------------------------------------------------------|:------:|:------:|:-----:|:-------:|
|
||||
| Alpine Linux | edge-alpine | Podman container based on Alpine 3.18 using 'master' |❌|✔|❌|✔|
|
||||
| Alpine Linux | alpine | Podman container based on Alpine 3.18 using latest release |❌|✔|❌|✔|
|
||||
| Debian | debian | Podman container based on Debian Stable using latest release |✔|✔|✔|✔|
|
||||
| Debian | edge | Podman container based on Debian Stable using 'master' |✔|✔|✔|✔|
|
||||
| Debian | edge-debian | Podman container based on Debian Stable using 'master' |✔|✔|✔|✔|
|
||||
| Debian | latest | Podman container based on Debian Stable using latest release |✔|✔|✔|✔|
|
||||
| Fedora | edge-fedora | Podman container based on Fedora 38 using 'master' |❌|✔|❌|✔|
|
||||
| Fedora | fedora | Podman container based on Fedora 38 using latest release |❌|✔|❌|✔|
|
||||
|
||||
These containers offer a simple monitoring-mode service for the OneDrive Client for Linux.
|
||||
|
||||
The instructions below have been validated on:
|
||||
* Fedora 38
|
||||
|
||||
The instructions below will utilise the 'edge' tag, however this can be substituted for any of the other docker tags such as 'latest' from the table above if desired.
|
||||
|
||||
The 'edge' Docker Container will align closer to all documentation and features, where as 'latest' is the release version from a static point in time. The 'latest' tag however may contain bugs and/or issues that will have been fixed, and those fixes are contained in 'edge'.
|
||||
|
||||
Additionally there are specific version release tags for each release. Refer to https://hub.docker.com/r/driveone/onedrive/tags for any other Docker tags you may be interested in.
|
||||
|
||||
**Note:** The below instructions for podman has been tested and validated when logging into the system as an unprivileged user (non 'root' user).
|
||||
|
||||
## High Level Configuration Steps
|
||||
1. Install 'podman' as per your distribution platform's instructions if not already installed.
|
||||
2. Disable 'SELinux' as per your distribution platform's instructions
|
||||
3. Test 'podman' by running a test container
|
||||
4. Prepare the required podman volumes to store the configuration and data
|
||||
5. Run the 'onedrive' container and perform authorisation
|
||||
6. Running the 'onedrive' container under 'podman'
|
||||
|
||||
## Configuration Steps
|
||||
|
||||
### 1. Install 'podman' on your platform
|
||||
Install 'podman' as per your distribution platform's instructions if not already installed.
|
||||
|
||||
### 2. Disable SELinux on your platform
|
||||
In order to run the Docker container under 'podman', SELinux must be disabled. Without doing this, when the application is authenticated in the steps below, the following error will be presented:
|
||||
```text
|
||||
ERROR: The local file system returned an error with the following message:
|
||||
Error Message: /onedrive/conf/refresh_token: Permission denied
|
||||
|
||||
The database cannot be opened. Please check the permissions of ~/.config/onedrive/items.sqlite3
|
||||
```
|
||||
The only known work-around for the above problem at present is to disable SELinux. Please refer to your distribution platform's instructions on how to perform this step.
|
||||
|
||||
* Fedora: https://docs.fedoraproject.org/en-US/quick-docs/selinux-changing-states-and-modes/#_disabling_selinux
|
||||
* Red Hat Enterprise Linux: https://access.redhat.com/solutions/3176
|
||||
|
||||
Post disabling SELinux and reboot your system, confirm that `getenforce` returns `Disabled`:
|
||||
```text
|
||||
$ getenforce
|
||||
Disabled
|
||||
```
|
||||
|
||||
If you are still experiencing permission issues despite disabling SELinux, please read https://www.redhat.com/sysadmin/container-permission-denied-errors
|
||||
|
||||
### 3. Test 'podman' on your platform
|
||||
Test that 'podman' is operational for your 'non-root' user, as per below:
|
||||
```bash
|
||||
[alex@fedora38-podman ~]$ podman pull fedora
|
||||
Resolved "fedora" as an alias (/etc/containers/registries.conf.d/000-shortnames.conf)
|
||||
Trying to pull registry.fedoraproject.org/fedora:latest...
|
||||
Getting image source signatures
|
||||
Copying blob b30887322388 done |
|
||||
Copying config a1cd3cbf8a done |
|
||||
Writing manifest to image destination
|
||||
a1cd3cbf8adaa422629f2fcdc629fd9297138910a467b11c66e5ddb2c2753dff
|
||||
[alex@fedora38-podman ~]$ podman run fedora /bin/echo "Welcome to the Podman World"
|
||||
Welcome to the Podman World
|
||||
[alex@fedora38-podman ~]$
|
||||
```
|
||||
|
||||
### 4. Configure the required podman volumes
|
||||
The 'onedrive' Docker container requires 2 podman volumes to operate:
|
||||
* Config Volume
|
||||
* Data Volume
|
||||
|
||||
The first volume is the configuration volume that stores all the applicable application configuration + current runtime state. In a non-containerised environment, this normally resides in `~/.config/onedrive` - in a containerised environment this is stored in the volume tagged as `/onedrive/conf`
|
||||
|
||||
The second volume is the data volume, where all your data from Microsoft OneDrive is stored locally. This volume is mapped to an actual directory point on your local filesystem and this is stored in the volume tagged as `/onedrive/data`
|
||||
|
||||
#### 4.1 Prepare the 'config' volume
|
||||
Create the 'config' volume with the following command:
|
||||
```bash
|
||||
podman volume create onedrive_conf
|
||||
```
|
||||
|
||||
This will create a podman volume labeled `onedrive_conf`, where all configuration of your onedrive account will be stored. You can add a custom config file in this location at a later point in time if required.
|
||||
|
||||
#### 4.2 Prepare the 'data' volume
|
||||
Create the 'data' volume with the following command:
|
||||
```bash
|
||||
podman volume create onedrive_data
|
||||
```
|
||||
|
||||
This will create a podman volume labeled `onedrive_data` and will map to a path on your local filesystem. This is where your data from Microsoft OneDrive will be stored. Keep in mind that:
|
||||
|
||||
* The owner of this specified folder must not be root
|
||||
* Podman will attempt to change the permissions of the volume to the user the container is configured to run as
|
||||
|
||||
**NOTE:** Issues occur when this target folder is a mounted folder of an external system (NAS, SMB mount, USB Drive etc) as the 'mount' itself is owed by 'root'. If this is your use case, you *must* ensure your normal user can mount your desired target without having the target mounted by 'root'. If you do not fix this, your Podman container will fail to start with the following error message:
|
||||
```bash
|
||||
ROOT level privileges prohibited!
|
||||
```
|
||||
|
||||
### 5. First run of Docker container under podman and performing authorisation
|
||||
The 'onedrive' client within the container first needs to be authorised with your Microsoft account. This is achieved by initially running podman in interactive mode.
|
||||
|
||||
Run the podman image with the commands below and make sure to change the value of `ONEDRIVE_DATA_DIR` to the actual onedrive data directory on your filesystem that you wish to use (e.g. `export ONEDRIVE_DATA_DIR="/home/abraunegg/OneDrive"`).
|
||||
|
||||
**Important:** The 'target' folder of `ONEDRIVE_DATA_DIR` must exist before running the podman container. The script below will create 'ONEDRIVE_DATA_DIR' so that it exists locally for the podman volume mapping to occur.
|
||||
|
||||
It is also a requirement that the container be run using a non-root uid and gid, you must insert a non-root UID and GID (e.g.` export ONEDRIVE_UID=1000` and export `ONEDRIVE_GID=1000`). The script below will use `id` to evaluate your system environment to use the correct values.
|
||||
```bash
|
||||
export ONEDRIVE_DATA_DIR="${HOME}/OneDrive"
|
||||
export ONEDRIVE_UID=`id -u`
|
||||
export ONEDRIVE_GID=`id -g`
|
||||
mkdir -p ${ONEDRIVE_DATA_DIR}
|
||||
podman run -it --name onedrive --user "${ONEDRIVE_UID}:${ONEDRIVE_GID}" \
|
||||
-v onedrive_conf:/onedrive/conf:U,Z \
|
||||
-v "${ONEDRIVE_DATA_DIR}:/onedrive/data:U,Z" \
|
||||
driveone/onedrive:edge
|
||||
```
|
||||
|
||||
**Important:** In some scenarios, 'podman' sets the configuration and data directories to a different UID & GID as specified. To resolve this situation, you must run 'podman' with the `--userns=keep-id` flag to ensure 'podman' uses the UID and GID as specified. The updated script example when using `--userns=keep-id` is below:
|
||||
|
||||
```bash
|
||||
export ONEDRIVE_DATA_DIR="${HOME}/OneDrive"
|
||||
export ONEDRIVE_UID=`id -u`
|
||||
export ONEDRIVE_GID=`id -g`
|
||||
mkdir -p ${ONEDRIVE_DATA_DIR}
|
||||
podman run -it --name onedrive --user "${ONEDRIVE_UID}:${ONEDRIVE_GID}" \
|
||||
--userns=keep-id \
|
||||
-v onedrive_conf:/onedrive/conf:U,Z \
|
||||
-v "${ONEDRIVE_DATA_DIR}:/onedrive/data:U,Z" \
|
||||
driveone/onedrive:edge
|
||||
```
|
||||
|
||||
|
||||
**Important:** If you plan to use the 'podman' built in auto-updating of container images described in 'Systemd Service & Auto Updating' below, you must pass an additional argument to set a label during the first run. The updated script example to support auto-updating of container images is below:
|
||||
|
||||
```bash
|
||||
export ONEDRIVE_DATA_DIR="${HOME}/OneDrive"
|
||||
export ONEDRIVE_UID=`id -u`
|
||||
export ONEDRIVE_GID=`id -g`
|
||||
mkdir -p ${ONEDRIVE_DATA_DIR}
|
||||
podman run -it --name onedrive --user "${ONEDRIVE_UID}:${ONEDRIVE_GID}" \
|
||||
--userns=keep-id \
|
||||
-v onedrive_conf:/onedrive/conf:U,Z \
|
||||
-v "${ONEDRIVE_DATA_DIR}:/onedrive/data:U,Z" \
|
||||
-e PODMAN=1 \
|
||||
--label "io.containers.autoupdate=image" \
|
||||
driveone/onedrive:edge
|
||||
```
|
||||
|
||||
When the Podman container successfully starts:
|
||||
* You will be asked to open a specific link using your web browser
|
||||
* Login to your Microsoft Account and give the application the permission
|
||||
* After giving the permission, you will be redirected to a blank page
|
||||
* Copy the URI of the blank page into the application prompt to authorise the application
|
||||
|
||||
Once the 'onedrive' application is authorised, the client will automatically start monitoring your `ONEDRIVE_DATA_DIR` for data changes to be uploaded to OneDrive. Files stored on OneDrive will be downloaded to this location.
|
||||
|
||||
If the client is working as expected, you can detach from the container with Ctrl+p, Ctrl+q.
|
||||
|
||||
### 6. Running the 'onedrive' container under 'podman'
|
||||
|
||||
#### 6.1 Check if the monitor service is running
|
||||
```bash
|
||||
podman ps -f name=onedrive
|
||||
```
|
||||
|
||||
#### 6.2 Show 'onedrive' runtime logs
|
||||
```bash
|
||||
podman logs onedrive
|
||||
```
|
||||
|
||||
#### 6.3 Stop running 'onedrive' container
|
||||
```bash
|
||||
podman stop onedrive
|
||||
```
|
||||
|
||||
#### 6.4 Start 'onedrive' container
|
||||
```bash
|
||||
podman start onedrive
|
||||
```
|
||||
|
||||
#### 6.5 Remove 'onedrive' container
|
||||
```bash
|
||||
podman rm -f onedrive
|
||||
```
|
||||
|
||||
|
||||
## Advanced Usage
|
||||
|
||||
### Systemd Service & Auto Updating
|
||||
|
||||
Podman supports running containers as a systemd service and also auto updating of the container images. Using the existing running container you can generate a systemd unit file to be installed by the **root** user. To have your container image auto-update with podman, it must first be created with the label `"io.containers.autoupdate=image"` mentioned in step 5 above.
|
||||
|
||||
```
|
||||
cd /tmp
|
||||
podman generate systemd --new --restart-policy on-failure --name -f onedrive
|
||||
/tmp/container-onedrive.service
|
||||
|
||||
# copy the generated systemd unit file to the systemd path and reload the daemon
|
||||
|
||||
cp -Z ~/container-onedrive.service /usr/lib/systemd/system
|
||||
systemctl daemon-reload
|
||||
|
||||
#optionally enable it to startup on boot
|
||||
|
||||
systemctl enable container-onedrive.service
|
||||
|
||||
#check status
|
||||
|
||||
systemctl status container-onedrive
|
||||
|
||||
#start/stop/restart container as a systemd service
|
||||
|
||||
systemctl stop container-onedrive
|
||||
systemctl start container-onedrive
|
||||
```
|
||||
|
||||
To update the image using podman (Ad-hoc)
|
||||
```
|
||||
podman auto-update
|
||||
```
|
||||
|
||||
To update the image using systemd (Automatic/Scheduled)
|
||||
```
|
||||
# Enable the podman-auto-update.timer service at system start:
|
||||
|
||||
systemctl enable podman-auto-update.timer
|
||||
|
||||
# Start the service
|
||||
|
||||
systemctl start podman-auto-update.timer
|
||||
|
||||
# Containers with the autoupdate label will be updated on the next scheduled timer
|
||||
|
||||
systemctl list-timers --all
|
||||
```
|
||||
|
||||
### Editing the running configuration and using a 'config' file
|
||||
The 'onedrive' client should run in default configuration, however you can change this default configuration by placing a custom config file in the `onedrive_conf` podman volume. First download the default config from [here](https://raw.githubusercontent.com/abraunegg/onedrive/master/config)
|
||||
Then put it into your onedrive_conf volume path, which can be found with:
|
||||
|
||||
```bash
|
||||
podman volume inspect onedrive_conf
|
||||
```
|
||||
Or you can map your own config folder to the config volume. Make sure to copy all files from the volume into your mapped folder first.
|
||||
|
||||
The detailed document for the config can be found here: [Configuration](https://github.com/abraunegg/onedrive/blob/master/docs/USAGE.md#configuration)
|
||||
|
||||
### Syncing multiple accounts
|
||||
There are many ways to do this, the easiest is probably to do the following:
|
||||
1. Create a second podman config volume (replace `work` with your desired name): `podman volume create onedrive_conf_work`
|
||||
2. And start a second podman monitor container (again replace `work` with your desired name):
|
||||
|
||||
```bash
|
||||
export ONEDRIVE_DATA_DIR_WORK="/home/abraunegg/OneDriveWork"
|
||||
export ONEDRIVE_UID=`id -u`
|
||||
export ONEDRIVE_GID=`id -g`
|
||||
mkdir -p ${ONEDRIVE_DATA_DIR_WORK}
|
||||
podman run -it --name onedrive_work --user "${ONEDRIVE_UID}:${ONEDRIVE_GID}" \
|
||||
--userns=keep-id \
|
||||
-v onedrive_conf_work:/onedrive/conf:U,Z \
|
||||
-v "${ONEDRIVE_DATA_DIR_WORK}:/onedrive/data:U,Z" \
|
||||
-e PODMAN=1 \
|
||||
--label "io.containers.autoupdate=image" \
|
||||
driveone/onedrive:edge
|
||||
```
|
||||
|
||||
## Supported Podman Environment Variables
|
||||
| Variable | Purpose | Sample Value |
|
||||
| ---------------- | --------------------------------------------------- |:-------------:|
|
||||
| <B>ONEDRIVE_UID</B> | UserID (UID) to run as | 1000 |
|
||||
| <B>ONEDRIVE_GID</B> | GroupID (GID) to run as | 1000 |
|
||||
| <B>ONEDRIVE_VERBOSE</B> | Controls "--verbose" switch on onedrive sync. Default is 0 | 1 |
|
||||
| <B>ONEDRIVE_DEBUG</B> | Controls "--verbose --verbose" switch on onedrive sync. Default is 0 | 1 |
|
||||
| <B>ONEDRIVE_DEBUG_HTTPS</B> | Controls "--debug-https" switch on onedrive sync. Default is 0 | 1 |
|
||||
| <B>ONEDRIVE_RESYNC</B> | Controls "--resync" switch on onedrive sync. Default is 0 | 1 |
|
||||
| <B>ONEDRIVE_DOWNLOADONLY</B> | Controls "--download-only" switch on onedrive sync. Default is 0 | 1 |
|
||||
| <B>ONEDRIVE_UPLOADONLY</B> | Controls "--upload-only" switch on onedrive sync. Default is 0 | 1 |
|
||||
| <B>ONEDRIVE_NOREMOTEDELETE</B> | Controls "--no-remote-delete" switch on onedrive sync. Default is 0 | 1 |
|
||||
| <B>ONEDRIVE_LOGOUT</B> | Controls "--logout" switch. Default is 0 | 1 |
|
||||
| <B>ONEDRIVE_REAUTH</B> | Controls "--reauth" switch. Default is 0 | 1 |
|
||||
| <B>ONEDRIVE_AUTHFILES</B> | Controls "--auth-files" option. Default is "" | "authUrl:responseUrl" |
|
||||
| <B>ONEDRIVE_AUTHRESPONSE</B> | Controls "--auth-response" option. Default is "" | See [here](https://github.com/abraunegg/onedrive/blob/master/docs/USAGE.md#authorize-the-application-with-your-onedrive-account) |
|
||||
| <B>ONEDRIVE_DISPLAY_CONFIG</B> | Controls "--display-running-config" switch on onedrive sync. Default is 0 | 1 |
|
||||
| <B>ONEDRIVE_SINGLE_DIRECTORY</B> | Controls "--single-directory" option. Default = "" | "mydir" |
|
||||
|
||||
### Environment Variables Usage Examples
|
||||
**Verbose Output:**
|
||||
```bash
|
||||
podman run -e ONEDRIVE_VERBOSE=1 -v onedrive_conf:/onedrive/conf:U,Z -v "${ONEDRIVE_DATA_DIR}:/onedrive/data:U,Z" --user "${ONEDRIVE_UID}:${ONEDRIVE_GID}" driveone/onedrive:edge
|
||||
```
|
||||
**Debug Output:**
|
||||
```bash
|
||||
podman run -e ONEDRIVE_DEBUG=1 -v onedrive_conf:/onedrive/conf:U,Z -v "${ONEDRIVE_DATA_DIR}:/onedrive/data:U,Z" --user "${ONEDRIVE_UID}:${ONEDRIVE_GID}" driveone/onedrive:edge
|
||||
```
|
||||
**Perform a --resync:**
|
||||
```bash
|
||||
podman run -e ONEDRIVE_RESYNC=1 -v onedrive_conf:/onedrive/conf:U,Z -v "${ONEDRIVE_DATA_DIR}:/onedrive/data:U,Z" --user "${ONEDRIVE_UID}:${ONEDRIVE_GID}" driveone/onedrive:edge
|
||||
```
|
||||
**Perform a --resync and --verbose:**
|
||||
```bash
|
||||
podman run -e ONEDRIVE_RESYNC=1 -e ONEDRIVE_VERBOSE=1 -v onedrive_conf:/onedrive/conf:U,Z -v "${ONEDRIVE_DATA_DIR}:/onedrive/data:U,Z" --user "${ONEDRIVE_UID}:${ONEDRIVE_GID}" driveone/onedrive:edge
|
||||
```
|
||||
**Perform a --logout and re-authenticate:**
|
||||
```bash
|
||||
podman run -it -e ONEDRIVE_LOGOUT=1 -v onedrive_conf:/onedrive/conf:U,Z -v "${ONEDRIVE_DATA_DIR}:/onedrive/data:U,Z" --user "${ONEDRIVE_UID}:${ONEDRIVE_GID}" driveone/onedrive:edge
|
||||
```
|
||||
|
||||
## Building a custom Podman image
|
||||
You can also build your own image instead of pulling the one from [hub.docker.com](https://hub.docker.com/r/driveone/onedrive):
|
||||
```bash
|
||||
git clone https://github.com/abraunegg/onedrive
|
||||
cd onedrive
|
||||
podman build . -t local-onedrive -f contrib/docker/Dockerfile
|
||||
```
|
||||
|
||||
There are alternate, smaller images available by building
|
||||
Dockerfile-debian or Dockerfile-alpine. These [multi-stage builder pattern](https://docs.docker.com/develop/develop-images/multistage-build/)
|
||||
Dockerfiles require Docker version at least 17.05.
|
||||
|
||||
### How to build and run a custom Podman image based on Debian
|
||||
``` bash
|
||||
podman build . -t local-ondrive-debian -f contrib/docker/Dockerfile-debian
|
||||
podman run -v onedrive_conf:/onedrive/conf:U,Z -v "${ONEDRIVE_DATA_DIR}:/onedrive/data:U,Z" --user "${ONEDRIVE_UID}:${ONEDRIVE_GID}" --userns=keep-id local-ondrive-debian:latest
|
||||
```
|
||||
|
||||
### How to build and run a custom Podman image based on Alpine Linux
|
||||
``` bash
|
||||
podman build . -t local-ondrive-alpine -f contrib/docker/Dockerfile-alpine
|
||||
podman run -v onedrive_conf:/onedrive/conf:U,Z -v "${ONEDRIVE_DATA_DIR}:/onedrive/data:U,Z" --user "${ONEDRIVE_UID}:${ONEDRIVE_GID}" --userns=keep-id local-ondrive-alpine:latest
|
||||
```
|
||||
|
||||
### How to build and run a custom Podman image for ARMHF (Raspberry Pi)
|
||||
Compatible with:
|
||||
* Raspberry Pi
|
||||
* Raspberry Pi 2
|
||||
* Raspberry Pi Zero
|
||||
* Raspberry Pi 3
|
||||
* Raspberry Pi 4
|
||||
``` bash
|
||||
podman build . -t local-onedrive-armhf -f contrib/docker/Dockerfile-debian
|
||||
podman run -v onedrive_conf:/onedrive/conf:U,Z -v "${ONEDRIVE_DATA_DIR}:/onedrive/data:U,Z" --user "${ONEDRIVE_UID}:${ONEDRIVE_GID}" --userns=keep-id local-onedrive-armhf:latest
|
||||
```
|
||||
|
||||
### How to build and run a custom Podman image for AARCH64 Platforms
|
||||
``` bash
|
||||
podman build . -t local-onedrive-aarch64 -f contrib/docker/Dockerfile-debian
|
||||
podman run -v onedrive_conf:/onedrive/conf:U,Z -v "${ONEDRIVE_DATA_DIR}:/onedrive/data:U,Z" --user "${ONEDRIVE_UID}:${ONEDRIVE_GID}" --userns=keep-id local-onedrive-aarch64:latest
|
||||
```
|
228
docs/SharePoint-Shared-Libraries.md
Normal file
|
@ -0,0 +1,228 @@
|
|||
# How to configure OneDrive SharePoint Shared Library sync
|
||||
**WARNING:** Several users have reported files being overwritten causing data loss as a result of using this client with SharePoint Libraries when running as a systemd service.
|
||||
|
||||
When this has been investigated, the following has been noted as potential root causes:
|
||||
* File indexing application such as Baloo File Indexer or Tracker3 constantly indexing your OneDrive data
|
||||
* The use of WPS Office and how it 'saves' files by deleting the existing item and replaces it with the saved data
|
||||
|
||||
Additionally there could be a yet unknown bug with the client, however all debugging and data provided previously shows that an 'external' process to the 'onedrive' application modifies the files triggering the undesirable upload to occur.
|
||||
|
||||
**Possible Preventative Actions:**
|
||||
* Disable all File Indexing for your SharePoint Library data. It is out of scope to detail on how you should do this.
|
||||
* Disable using a systemd service for syncing your SharePoint Library data.
|
||||
* Do not use WPS Office to edit your documents. Use OpenOffice or LibreOffice as these do not exhibit the same 'delete to save' action that WPS Office has.
|
||||
|
||||
Additionally, please use caution when using this client with SharePoint.
|
||||
|
||||
## Application Version
|
||||
Before reading this document, please ensure you are running application version [![Version](https://img.shields.io/github/v/release/abraunegg/onedrive)](https://github.com/abraunegg/onedrive/releases) or greater. Use `onedrive --version` to determine what application version you are using and upgrade your client if required.
|
||||
|
||||
## Process Overview
|
||||
Syncing a OneDrive SharePoint library requires additional configuration for your 'onedrive' client:
|
||||
1. Login to OneDrive and under 'Shared Libraries' obtain the shared library name
|
||||
2. Query that shared library name using the client to obtain the required configuration details
|
||||
3. Create a unique local folder which will be the SharePoint Library 'root'
|
||||
4. Configure the client's config file with the required 'drive_id'
|
||||
5. Test the configuration using '--dry-run'
|
||||
6. Sync the SharePoint Library as required
|
||||
|
||||
**Note:** The `--get-O365-drive-id` process below requires a fully configured 'onedrive' configuration so that the applicable Drive ID for the given Office 365 SharePoint Shared Library can be determined. It is highly recommended that you do not use the application 'default' configuration directory for any SharePoint Site, and configure separate items for each site you wish to use.
|
||||
|
||||
## 1. Listing available OneDrive SharePoint Libraries
|
||||
Login to the OneDrive web interface and determine which shared library you wish to configure the client for:
|
||||
![shared_libraries](./images/SharedLibraries.jpg)
|
||||
|
||||
## 2. Query OneDrive API to obtain required configuration details
|
||||
Run the following command using the 'onedrive' client to query the OneDrive API to obtain the required 'drive_id' of the SharePoint Library that you wish to sync:
|
||||
```text
|
||||
onedrive --get-O365-drive-id '<your site name to search>'
|
||||
```
|
||||
This will return something similar to the following:
|
||||
```text
|
||||
Configuration file successfully loaded
|
||||
Configuring Global Azure AD Endpoints
|
||||
Initializing the Synchronization Engine ...
|
||||
Office 365 Library Name Query: <your site name to search>
|
||||
-----------------------------------------------
|
||||
Site Name: <your site name>
|
||||
Library Name: <your library name>
|
||||
drive_id: b!6H_y8B...xU5
|
||||
Library URL: <your library URL>
|
||||
-----------------------------------------------
|
||||
```
|
||||
If there are no matches to the site you are attempting to search, the following will be displayed:
|
||||
```text
|
||||
Configuration file successfully loaded
|
||||
Configuring Global Azure AD Endpoints
|
||||
Initializing the Synchronization Engine ...
|
||||
Office 365 Library Name Query: blah
|
||||
|
||||
ERROR: The requested SharePoint site could not be found. Please check it's name and your permissions to access the site.
|
||||
|
||||
The following SharePoint site names were returned:
|
||||
* <site name 1>
|
||||
* <site name 2>
|
||||
...
|
||||
* <site name X>
|
||||
```
|
||||
This list of site names can be used as a basis to search for the correct site for which you are searching
|
||||
|
||||
## 3. Create a new configuration directory and sync location for this SharePoint Library
|
||||
Create a new configuration directory for this SharePoint Library in the following manner:
|
||||
```text
|
||||
mkdir ~/.config/SharePoint_My_Library_Name
|
||||
```
|
||||
|
||||
Create a new local folder to store the SharePoint Library data in:
|
||||
```text
|
||||
mkdir ~/SharePoint_My_Library_Name
|
||||
```
|
||||
|
||||
**Note:** Do not use spaces in the directory name, use '_' as a replacement
|
||||
|
||||
## 4. Configure SharePoint Library config file with the required 'drive_id' & 'sync_dir' options
|
||||
Download a copy of the default configuration file by downloading this file from GitHub and saving this file in the directory created above:
|
||||
```text
|
||||
wget https://raw.githubusercontent.com/abraunegg/onedrive/master/config -O ~/.config/SharePoint_My_Library_Name/config
|
||||
```
|
||||
|
||||
Update your 'onedrive' configuration file (`~/.config/SharePoint_My_Library_Name/config`) with the local folder where you will store your data:
|
||||
```text
|
||||
sync_dir = "~/SharePoint_My_Library_Name"
|
||||
```
|
||||
|
||||
Update your 'onedrive' configuration file(`~/.config/SharePoint_My_Library_Name/config`) with the 'drive_id' value obtained in the steps above:
|
||||
```text
|
||||
drive_id = "insert the drive_id value from above here"
|
||||
```
|
||||
The OneDrive client will now be configured to sync this SharePoint shared library to your local system and the location you have configured.
|
||||
|
||||
**Note:** After changing `drive_id`, you must perform a full re-synchronization by adding `--resync` to your existing command line.
|
||||
|
||||
## 5. Validate and Test the configuration
|
||||
Validate your new configuration using the `--display-config` option to validate you have configured the application correctly:
|
||||
```text
|
||||
onedrive --confdir="~/.config/SharePoint_My_Library_Name" --display-config
|
||||
```
|
||||
|
||||
Test your new configuration using the `--dry-run` option to validate the application configuration:
|
||||
```text
|
||||
onedrive --confdir="~/.config/SharePoint_My_Library_Name" --synchronize --verbose --dry-run
|
||||
```
|
||||
|
||||
**Note:** As this is a *new* configuration, the application will be required to be re-authorised the first time this command is run with the new configuration.
|
||||
|
||||
## 6. Sync the SharePoint Library as required
|
||||
Sync the SharePoint Library to your system with either `--synchronize` or `--monitor` operations:
|
||||
```text
|
||||
onedrive --confdir="~/.config/SharePoint_My_Library_Name" --synchronize --verbose
|
||||
```
|
||||
|
||||
```text
|
||||
onedrive --confdir="~/.config/SharePoint_My_Library_Name" --monitor --verbose
|
||||
```
|
||||
|
||||
**Note:** As this is a *new* configuration, the application will be required to be re-authorised the first time this command is run with the new configuration.
|
||||
|
||||
## 7. Enable custom systemd service for SharePoint Library
|
||||
Systemd can be used to automatically run this configuration in the background, however, a unique systemd service will need to be setup for this SharePoint Library instance
|
||||
|
||||
In order to automatically start syncing each SharePoint Library, you will need to create a service file for each SharePoint Library. From the applicable 'systemd folder' where the applicable systemd service file exists:
|
||||
* RHEL / CentOS: `/usr/lib/systemd/system`
|
||||
* Others: `/usr/lib/systemd/user` and `/lib/systemd/system`
|
||||
|
||||
### Step1: Create a new systemd service file
|
||||
#### Red Hat Enterprise Linux, CentOS Linux
|
||||
Copy the required service file to a new name:
|
||||
```text
|
||||
sudo cp /usr/lib/systemd/system/onedrive.service /usr/lib/systemd/system/onedrive-SharePoint_My_Library_Name.service
|
||||
```
|
||||
or
|
||||
```text
|
||||
sudo cp /usr/lib/systemd/system/onedrive@.service /usr/lib/systemd/system/onedrive-SharePoint_My_Library_Name@.service
|
||||
```
|
||||
|
||||
#### Others such as Arch, Ubuntu, Debian, OpenSuSE, Fedora
|
||||
Copy the required service file to a new name:
|
||||
```text
|
||||
sudo cp /usr/lib/systemd/user/onedrive.service /usr/lib/systemd/user/onedrive-SharePoint_My_Library_Name.service
|
||||
```
|
||||
or
|
||||
```text
|
||||
sudo cp /lib/systemd/system/onedrive@.service /lib/systemd/system/onedrive-SharePoint_My_Library_Name@.service
|
||||
```
|
||||
|
||||
### Step 2: Edit new systemd service file
|
||||
Edit the new systemd file, updating the line beginning with `ExecStart` so that the confdir mirrors the one you used above:
|
||||
```text
|
||||
ExecStart=/usr/local/bin/onedrive --monitor --confdir="/full/path/to/config/dir"
|
||||
```
|
||||
|
||||
Example:
|
||||
```text
|
||||
ExecStart=/usr/local/bin/onedrive --monitor --confdir="/home/myusername/.config/SharePoint_My_Library_Name"
|
||||
```
|
||||
|
||||
**Note:** When running the client manually, `--confdir="~/.config/......` is acceptable. In a systemd configuration file, the full path must be used. The `~` must be expanded.
|
||||
|
||||
### Step 3: Enable the new systemd service
|
||||
Once the file is correctly editied, you can enable the new systemd service using the following commands.
|
||||
|
||||
#### Red Hat Enterprise Linux, CentOS Linux
|
||||
```text
|
||||
systemctl enable onedrive-SharePoint_My_Library_Name
|
||||
systemctl start onedrive-SharePoint_My_Library_Name
|
||||
```
|
||||
|
||||
#### Others such as Arch, Ubuntu, Debian, OpenSuSE, Fedora
|
||||
```text
|
||||
systemctl --user enable onedrive-SharePoint_My_Library_Name
|
||||
systemctl --user start onedrive-SharePoint_My_Library_Name
|
||||
```
|
||||
or
|
||||
```text
|
||||
systemctl --user enable onedrive-SharePoint_My_Library_Name@myusername.service
|
||||
systemctl --user start onedrive-SharePoint_My_Library_Name@myusername.service
|
||||
```
|
||||
|
||||
### Step 4: Viewing systemd status and logs for the custom service
|
||||
#### Viewing systemd service status - Red Hat Enterprise Linux, CentOS Linux
|
||||
```text
|
||||
systemctl status onedrive-SharePoint_My_Library_Name
|
||||
```
|
||||
|
||||
#### Viewing systemd service status - Others such as Arch, Ubuntu, Debian, OpenSuSE, Fedora
|
||||
```text
|
||||
systemctl --user status onedrive-SharePoint_My_Library_Name
|
||||
```
|
||||
|
||||
#### Viewing journalctl systemd logs - Red Hat Enterprise Linux, CentOS Linux
|
||||
```text
|
||||
journalctl --unit=onedrive-SharePoint_My_Library_Name -f
|
||||
```
|
||||
|
||||
#### Viewing journalctl systemd logs - Others such as Arch, Ubuntu, Debian, OpenSuSE, Fedora
|
||||
```text
|
||||
journalctl --user --unit=onedrive-SharePoint_My_Library_Name -f
|
||||
```
|
||||
|
||||
### Step 5: (Optional) Run custom systemd service at boot without user login
|
||||
In some cases it may be desirable for the systemd service to start without having to login as your 'user'
|
||||
|
||||
All the systemd steps above that utilise the `--user` option, will run the systemd service as your particular user. As such, the systemd service will not start unless you actually login to your system.
|
||||
|
||||
To avoid this issue, you need to reconfigure your 'user' account so that the systemd services you have created will startup without you having to login to your system:
|
||||
```text
|
||||
loginctl enable-linger <your_user_name>
|
||||
```
|
||||
|
||||
Example:
|
||||
```text
|
||||
alex@ubuntu-headless:~$ loginctl enable-linger alex
|
||||
```
|
||||
|
||||
## 8. Configuration for a SharePoint Library is complete
|
||||
The 'onedrive' client configuration for this particular SharePoint Library is now complete.
|
||||
|
||||
# How to configure multiple OneDrive SharePoint Shared Library sync
|
||||
Create a new configuration as per the process above. Repeat these steps for each SharePoint Library that you wish to use.
|
1469
docs/USAGE.md
Normal file
302
docs/advanced-usage.md
Normal file
|
@ -0,0 +1,302 @@
|
|||
# Advanced Configuration of the OneDrive Free Client
|
||||
This document covers the following scenarios:
|
||||
* [Configuring the client to use multiple OneDrive accounts / configurations](#configuring-the-client-to-use-multiple-onedrive-accounts--configurations)
|
||||
* [Configuring the client to use multiple OneDrive accounts / configurations using Docker](#configuring-the-client-to-use-multiple-onedrive-accounts--configurations-using-docker)
|
||||
* [Configuring the client for use in dual-boot (Windows / Linux) situations](#configuring-the-client-for-use-in-dual-boot-windows--linux-situations)
|
||||
* [Configuring the client for use when 'sync_dir' is a mounted directory](#configuring-the-client-for-use-when-sync_dir-is-a-mounted-directory)
|
||||
* [Upload data from the local ~/OneDrive folder to a specific location on OneDrive](#upload-data-from-the-local-onedrive-folder-to-a-specific-location-on-onedrive)
|
||||
|
||||
## Configuring the client to use multiple OneDrive accounts / configurations
|
||||
Essentially, each OneDrive account or SharePoint Shared Library which you require to be synced needs to have its own and unique configuration, local sync directory and service files. To do this, the following steps are needed:
|
||||
1. Create a unique configuration folder for each onedrive client configuration that you need
|
||||
2. Copy to this folder a copy of the default configuration file
|
||||
3. Update the default configuration file as required, changing the required minimum config options and any additional options as needed to support your multi-account configuration
|
||||
4. Authenticate the client using the new configuration directory
|
||||
5. Test the configuration using '--display-config' and '--dry-run'
|
||||
6. Sync the OneDrive account data as required using `--synchronize` or `--monitor`
|
||||
7. Configure a unique systemd service file for this account configuration
|
||||
|
||||
### 1. Create a unique configuration folder for each onedrive client configuration that you need
|
||||
Make the configuration folder as required for this new configuration, for example:
|
||||
```text
|
||||
mkdir ~/.config/my-new-config
|
||||
```
|
||||
|
||||
### 2. Copy to this folder a copy of the default configuration file
|
||||
Copy to this folder a copy of the default configuration file by downloading this file from GitHub and saving this file in the directory created above:
|
||||
```text
|
||||
wget https://raw.githubusercontent.com/abraunegg/onedrive/master/config -O ~/.config/my-new-config/config
|
||||
```
|
||||
|
||||
### 3. Update the default configuration file
|
||||
The following config options *must* be updated to ensure that individual account data is not cross populated with other OneDrive accounts or other configurations:
|
||||
* sync_dir
|
||||
|
||||
Other options that may require to be updated, depending on the OneDrive account that is being configured:
|
||||
* drive_id
|
||||
* application_id
|
||||
* sync_business_shared_folders
|
||||
* skip_dir
|
||||
* skip_file
|
||||
* Creation of a 'sync_list' file if required
|
||||
* Creation of a 'business_shared_folders' file if required
|
||||
|
||||
### 4. Authenticate the client
|
||||
Authenticate the client using the specific configuration file:
|
||||
```text
|
||||
onedrive --confdir="~/.config/my-new-config"
|
||||
```
|
||||
You will be asked to open a specific URL by using your web browser where you will have to login into your Microsoft Account and give the application the permission to access your files. After giving permission to the application, you will be redirected to a blank page. Copy the URI of the blank page into the application.
|
||||
```text
|
||||
[user@hostname ~]$ onedrive --confdir="~/.config/my-new-config"
|
||||
Configuration file successfully loaded
|
||||
Configuring Global Azure AD Endpoints
|
||||
Authorize this app visiting:
|
||||
|
||||
https://.....
|
||||
|
||||
Enter the response uri:
|
||||
|
||||
```
|
||||
|
||||
### 5. Display and Test the configuration
|
||||
Test the configuration using '--display-config' and '--dry-run'. By doing so, this allows you to test any configuration that you have currently made, enabling you to fix this configuration before using the configuration.
|
||||
|
||||
#### Display the configuration
|
||||
```text
|
||||
onedrive --confdir="~/.config/my-new-config" --display-config
|
||||
```
|
||||
|
||||
#### Test the configuration by performing a dry-run
|
||||
```text
|
||||
onedrive --confdir="~/.config/my-new-config" --synchronize --verbose --dry-run
|
||||
```
|
||||
|
||||
If both of these operate as per your expectation, the configuration of this client setup is complete and validated. If not, amend your configuration as required.
|
||||
|
||||
### 6. Sync the OneDrive account data as required
|
||||
Sync the data for the new account configuration as required:
|
||||
```text
|
||||
onedrive --confdir="~/.config/my-new-config" --synchronize --verbose
|
||||
```
|
||||
or
|
||||
```text
|
||||
onedrive --confdir="~/.config/my-new-config" --monitor --verbose
|
||||
```
|
||||
|
||||
* `--synchronize` does a one-time sync
|
||||
* `--monitor` keeps the application running and monitoring for changes both local and remote
|
||||
|
||||
### 7. Automatic syncing of new OneDrive configuration
|
||||
In order to automatically start syncing your OneDrive accounts, you will need to create a service file for each account. From the applicable 'systemd folder' where the applicable systemd service file exists:
|
||||
* RHEL / CentOS: `/usr/lib/systemd/system`
|
||||
* Others: `/usr/lib/systemd/user` and `/lib/systemd/system`
|
||||
|
||||
### Step1: Create a new systemd service file
|
||||
#### Red Hat Enterprise Linux, CentOS Linux
|
||||
Copy the required service file to a new name:
|
||||
```text
|
||||
sudo cp /usr/lib/systemd/system/onedrive.service /usr/lib/systemd/system/onedrive-my-new-config
|
||||
```
|
||||
or
|
||||
```text
|
||||
sudo cp /usr/lib/systemd/system/onedrive@.service /usr/lib/systemd/system/onedrive-my-new-config@.service
|
||||
```
|
||||
|
||||
#### Others such as Arch, Ubuntu, Debian, OpenSuSE, Fedora
|
||||
Copy the required service file to a new name:
|
||||
```text
|
||||
sudo cp /usr/lib/systemd/user/onedrive.service /usr/lib/systemd/user/onedrive-my-new-config.service
|
||||
```
|
||||
or
|
||||
```text
|
||||
sudo cp /lib/systemd/system/onedrive@.service /lib/systemd/system/onedrive-my-new-config@.service
|
||||
```
|
||||
|
||||
### Step 2: Edit new systemd service file
|
||||
Edit the new systemd file, updating the line beginning with `ExecStart` so that the confdir mirrors the one you used above:
|
||||
```text
|
||||
ExecStart=/usr/local/bin/onedrive --monitor --confdir="/full/path/to/config/dir"
|
||||
```
|
||||
|
||||
Example:
|
||||
```text
|
||||
ExecStart=/usr/local/bin/onedrive --monitor --confdir="/home/myusername/.config/my-new-config"
|
||||
```
|
||||
|
||||
**Note:** When running the client manually, `--confdir="~/.config/......` is acceptable. In a systemd configuration file, the full path must be used. The `~` must be expanded.
|
||||
|
||||
### Step 3: Enable the new systemd service
|
||||
Once the file is correctly editied, you can enable the new systemd service using the following commands.
|
||||
|
||||
#### Red Hat Enterprise Linux, CentOS Linux
|
||||
```text
|
||||
systemctl enable onedrive-my-new-config
|
||||
systemctl start onedrive-my-new-config
|
||||
```
|
||||
|
||||
#### Others such as Arch, Ubuntu, Debian, OpenSuSE, Fedora
|
||||
```text
|
||||
systemctl --user enable onedrive-my-new-config
|
||||
systemctl --user start onedrive-my-new-config
|
||||
```
|
||||
or
|
||||
```text
|
||||
systemctl --user enable onedrive-my-new-config@myusername.service
|
||||
systemctl --user start onedrive-my-new-config@myusername.service
|
||||
```
|
||||
|
||||
### Step 4: Viewing systemd status and logs for the custom service
|
||||
#### Viewing systemd service status - Red Hat Enterprise Linux, CentOS Linux
|
||||
```text
|
||||
systemctl status onedrive-my-new-config
|
||||
```
|
||||
|
||||
#### Viewing systemd service status - Others such as Arch, Ubuntu, Debian, OpenSuSE, Fedora
|
||||
```text
|
||||
systemctl --user status onedrive-my-new-config
|
||||
```
|
||||
|
||||
#### Viewing journalctl systemd logs - Red Hat Enterprise Linux, CentOS Linux
|
||||
```text
|
||||
journalctl --unit=onedrive-my-new-config -f
|
||||
```
|
||||
|
||||
#### Viewing journalctl systemd logs - Others such as Arch, Ubuntu, Debian, OpenSuSE, Fedora
|
||||
```text
|
||||
journalctl --user --unit=onedrive-my-new-config -f
|
||||
```
|
||||
|
||||
### Step 5: (Optional) Run custom systemd service at boot without user login
|
||||
In some cases it may be desirable for the systemd service to start without having to login as your 'user'
|
||||
|
||||
All the systemd steps above that utilise the `--user` option, will run the systemd service as your particular user. As such, the systemd service will not start unless you actually login to your system.
|
||||
|
||||
To avoid this issue, you need to reconfigure your 'user' account so that the systemd services you have created will startup without you having to login to your system:
|
||||
```text
|
||||
loginctl enable-linger <your_user_name>
|
||||
```
|
||||
|
||||
Example:
|
||||
```text
|
||||
alex@ubuntu-headless:~$ loginctl enable-linger alex
|
||||
```
|
||||
|
||||
Repeat these steps for each OneDrive new account that you wish to use.
|
||||
|
||||
## Configuring the client to use multiple OneDrive accounts / configurations using Docker
|
||||
In some situations it may be desirable to run multiple Docker containers at the same time, each with their own configuration.
|
||||
|
||||
To run the Docker container successfully, it needs two unique Docker volumes to operate:
|
||||
* Your configuration Docker volumes
|
||||
* Your data Docker volume
|
||||
|
||||
When running multiple Docker containers, this is no different - each Docker container must have it's own configuration and data volume.
|
||||
|
||||
### High level steps:
|
||||
1. Create the required unique Docker volumes for the configuration volume
|
||||
2. Create the required unique local path used for the Docker data volume
|
||||
3. Start the multiple Docker containers with the required configuration for each container
|
||||
|
||||
#### Create the required unique Docker volumes for the configuration volume
|
||||
Create the required unique Docker volumes for the configuration volume(s):
|
||||
```text
|
||||
docker volume create onedrive_conf_sharepoint_site1
|
||||
docker volume create onedrive_conf_sharepoint_site2
|
||||
docker volume create onedrive_conf_sharepoint_site3
|
||||
...
|
||||
docker volume create onedrive_conf_sharepoint_site50
|
||||
```
|
||||
|
||||
#### Create the required unique local path used for the Docker data volume
|
||||
Create the required unique local path used for the Docker data volume
|
||||
```text
|
||||
mkdir -p /use/full/local/path/no/tilda/SharePointSite1
|
||||
mkdir -p /use/full/local/path/no/tilda/SharePointSite2
|
||||
mkdir -p /use/full/local/path/no/tilda/SharePointSite3
|
||||
...
|
||||
mkdir -p /use/full/local/path/no/tilda/SharePointSite50
|
||||
```
|
||||
|
||||
#### Start the Docker container with the required configuration (example)
|
||||
```text
|
||||
docker run -it --name onedrive -v onedrive_conf_sharepoint_site1:/onedrive/conf -v "/use/full/local/path/no/tilda/SharePointSite1:/onedrive/data" driveone/onedrive:latest
|
||||
docker run -it --name onedrive -v onedrive_conf_sharepoint_site2:/onedrive/conf -v "/use/full/local/path/no/tilda/SharePointSite2:/onedrive/data" driveone/onedrive:latest
|
||||
docker run -it --name onedrive -v onedrive_conf_sharepoint_site3:/onedrive/conf -v "/use/full/local/path/no/tilda/SharePointSite3:/onedrive/data" driveone/onedrive:latest
|
||||
...
|
||||
docker run -it --name onedrive -v onedrive_conf_sharepoint_site50:/onedrive/conf -v "/use/full/local/path/no/tilda/SharePointSite50:/onedrive/data" driveone/onedrive:latest
|
||||
```
|
||||
|
||||
#### TIP
|
||||
To avoid 're-authenticating' and 'authorising' each individual Docker container, if all the Docker containers are using the 'same' OneDrive credentials, you can re-use the 'refresh_token' from one Docker container to another by copying this file to the configuration Docker volume of each Docker container.
|
||||
|
||||
If the account credentials are different .. you will need to re-authenticate each Docker container individually.
|
||||
|
||||
## Configuring the client for use in dual-boot (Windows / Linux) situations
|
||||
When dual booting Windows and Linux, depending on the Windows OneDrive account configuration, the 'Files On-Demand' option may be enabled when running OneDrive within your Windows environment.
|
||||
|
||||
When this option is enabled in Windows, if you are sharing this location between your Windows and Linux systems, all files will be a 0 byte link, and cannot be used under Linux.
|
||||
|
||||
To fix the problem of windows turning all files (that should be kept offline) into links, you have to uncheck a specific option in the onedrive settings window. The option in question is `Save space and download files as you use them`.
|
||||
|
||||
To find this setting, open the onedrive pop-up window from the taskbar, click "Help & Settings" > "Settings". This opens a new window. Go to the tab "Settings" and look for the section "Files On-Demand".
|
||||
|
||||
After unchecking the option and clicking "OK", the Windows OneDrive client should restart itself and start actually downloading your files so they will truely be available on your disk when offline. These files will then be fully accessible under Linux and the Linux OneDrive client.
|
||||
|
||||
| OneDrive Personal | Onedrive Business<br>SharePoint |
|
||||
|---|---|
|
||||
| ![Uncheck-Personal](./images/personal-files-on-demand.png) | ![Uncheck-Business](./images/business-files-on-demand.png) |
|
||||
|
||||
## Configuring the client for use when 'sync_dir' is a mounted directory
|
||||
In some environments, your setup might be that your configured 'sync_dir' is pointing to another mounted file system - a NFS|CIFS location, an external drive (USB stuc, eSATA etc). As such, you configure your 'sync_dir' as follows:
|
||||
```text
|
||||
sync_dir = "/path/to/mountpoint/OneDrive"
|
||||
```
|
||||
|
||||
The issue here is - how does the client react if the mount point gets removed - network loss, device removal?
|
||||
|
||||
The client has zero knowledge of any event that causes a mountpoint to become unavailable, thus, the client (if you are running as a service) will assume that you deleted the files, thus, will go ahead and delete all your files on OneDrive. This is most certainly an undesirable action.
|
||||
|
||||
There are a few options here which you can configure in your 'config' file to assist you to prevent this sort of item from occuring:
|
||||
1. classify_as_big_delete
|
||||
2. check_nomount
|
||||
3. check_nosync
|
||||
|
||||
**Note:** Before making any change to your configuration, stop any sync process & stop any onedrive systemd service from running.
|
||||
|
||||
### classify_as_big_delete
|
||||
By default, this uses a value of 1000 files|folders. An undesirable unmount if you have more than 1000 files, this default level will prevent the client from executing the online delete. Modify this value up or down as desired
|
||||
|
||||
### check_nomount & check_nosync
|
||||
These two options are really the right safe guards to use.
|
||||
|
||||
In your 'mount point', *before* you mount your external folder|device, create empty `.nosync` file, so that this is the *only* file present in the mount location before you mount your data to your mount point. When you mount your data, this '.nosync' file will not be visible, but, if the device you are mounting goes away - this '.nosync' file is the only file visible.
|
||||
|
||||
Next, in your 'config' file, configure the following options: `check_nomount = "true"` and `check_nosync = "true"`
|
||||
|
||||
What this will do is tell the client, if at *any* point you see this file - stop syncing - thus, protecting your online data from being deleted by the mounted device being suddenly unavailable.
|
||||
|
||||
After making this sort of change - test with `--dry-run` so you can see the impacts of your mount point being unavailable, and how the client is now reacting. Once you are happy with how the system will react, restart your sync processes.
|
||||
|
||||
|
||||
## Upload data from the local ~/OneDrive folder to a specific location on OneDrive
|
||||
In some environments, you may not want your local ~/OneDrive folder to be uploaded directly to the root of your OneDrive account online.
|
||||
|
||||
Unfortunatly, the OneDrive API lacks any facility to perform a re-direction of data during upload.
|
||||
|
||||
The workaround for this is to structure your local filesystem and reconfigure your client to achieve the desired goal.
|
||||
|
||||
### High level steps:
|
||||
1. Create a new folder, for example `/opt/OneDrive`
|
||||
2. Configure your application config 'sync_dir' to look at this folder
|
||||
3. Inside `/opt/OneDrive` create the folder you wish to sync the data online to, for example: `/opt/OneDrive/RemoteOnlineDestination`
|
||||
4. Configure the application to only sync `/opt/OneDrive/RemoteDestination` via 'sync_list'
|
||||
5. Symbolically link `~/OneDrive` -> `/opt/OneDrive/RemoteOnlineDestination`
|
||||
|
||||
### Outcome:
|
||||
* Your `~/OneDrive` will look / feel as per normal
|
||||
* The data will be stored online under `/RemoteOnlineDestination`
|
||||
|
||||
### Testing:
|
||||
* Validate your configuration with `onedrive --display-config`
|
||||
* Test your configuration with `onedrive --dry-run`
|
97
docs/application-security.md
Normal file
|
@ -0,0 +1,97 @@
|
|||
# OneDrive Client for Linux Application Security
|
||||
This document details the following information:
|
||||
|
||||
* Why is this application an 'unverified publisher'?
|
||||
* Application Security and Permission Scopes
|
||||
* How to change Permission Scopes
|
||||
* How to review your existing application access consent
|
||||
|
||||
## Why is this application an 'unverified publisher'?
|
||||
Publisher Verification, as per the Microsoft [process](https://learn.microsoft.com/en-us/azure/active-directory/develop/publisher-verification-overview) has actually been configured, and, actually has been verified!
|
||||
|
||||
### Verified Publisher Configuration Evidence
|
||||
As per the image below, the Azure portal shows that the 'Publisher Domain' has actually been verified:
|
||||
![confirmed_verified_publisher](./images/confirmed_verified_publisher.jpg)
|
||||
|
||||
* The 'Publisher Domain' is: https://abraunegg.github.io/
|
||||
* The required 'Microsoft Identity Association' is: https://abraunegg.github.io/.well-known/microsoft-identity-association.json
|
||||
|
||||
## Application Security and Permission Scopes
|
||||
There are 2 main components regarding security for this application:
|
||||
* Azure Application Permissions
|
||||
* User Authentication Permissions
|
||||
|
||||
Keeping this in mind, security options should follow the security principal of 'least privilege':
|
||||
> The principle that a security architecture should be designed so that each entity
|
||||
> is granted the minimum system resources and authorizations that the entity needs
|
||||
> to perform its function.
|
||||
|
||||
Reference: [https://csrc.nist.gov/glossary/term/least_privilege](https://csrc.nist.gov/glossary/term/least_privilege)
|
||||
|
||||
As such, the following API permissions are used by default:
|
||||
|
||||
### Default Azure Application Permissions
|
||||
|
||||
| API / Permissions name | Type | Description | Admin consent required |
|
||||
|---|---|---|---|
|
||||
| Files.Read | Delegated | Have read-only access to user files | No |
|
||||
| Files.Read.All | Delegated | Have read-only access to all files user can access | No |
|
||||
| Sites.Read.All | Delegated | Have read-only access to all items in all site collections | No |
|
||||
| offline_access | Delegated | Maintain access to data you have given it access to | No |
|
||||
|
||||
![default_authentication_scopes](./images/default_authentication_scopes.jpg)
|
||||
|
||||
### Default User Authentication Permissions
|
||||
|
||||
When a user authenticates with Microsoft OneDrive, additional account permissions are provided by service to give the user specific access to their data. These are delegated permissions provided by the platform:
|
||||
|
||||
| API / Permissions name | Type | Description | Admin consent required |
|
||||
|---|---|---|---|
|
||||
| Files.ReadWrite | Delegated | Have full access to user files | No |
|
||||
| Files.ReadWrite.All | Delegated | Have full access to all files user can access | No |
|
||||
| Sites.ReadWrite.All | Delegated | Have full access to all items in all site collections | No |
|
||||
| offline_access | Delegated | Maintain access to data you have given it access to | No |
|
||||
|
||||
When these delegated API permissions are combined, these provide the effective authentication scope for the OneDrive Client for Linux to access your data. The resulting effective 'default' permissions will be:
|
||||
|
||||
| API / Permissions name | Type | Description | Admin consent required |
|
||||
|---|---|---|---|
|
||||
| Files.ReadWrite | Delegated | Have full access to user files | No |
|
||||
| Files.ReadWrite.All | Delegated | Have full access to all files user can access | No |
|
||||
| Sites.ReadWrite.All | Delegated | Have full access to all items in all site collections | No |
|
||||
| offline_access | Delegated | Maintain access to data you have given it access to | No |
|
||||
|
||||
These 'default' permissions will allow the OneDrive Client for Linux to read, write and delete data associated with your OneDrive Account.
|
||||
|
||||
## Configuring read-only access to your OneDrive data
|
||||
In some situations, it may be desirable to configure the OneDrive Client for Linux totally in read-only operation.
|
||||
|
||||
To change the application to 'read-only' access, add the following to your configuration file:
|
||||
```text
|
||||
read_only_auth_scope = "true"
|
||||
```
|
||||
This will change the user authentication scope request to use read-only access.
|
||||
|
||||
**Note:** When changing this value, you *must* re-authenticate the client using the `--reauth` option to utilise the change in authentication scopes.
|
||||
|
||||
When using read-only authentication scopes, the uploading of any data or local change to OneDrive will fail with the following error:
|
||||
```
|
||||
2022-Aug-06 13:16:45.3349625 ERROR: Microsoft OneDrive API returned an error with the following message:
|
||||
2022-Aug-06 13:16:45.3351661 Error Message: HTTP request returned status code 403 (Forbidden)
|
||||
2022-Aug-06 13:16:45.3352467 Error Reason: Access denied
|
||||
2022-Aug-06 13:16:45.3352838 Error Timestamp: 2022-06-12T13:16:45
|
||||
2022-Aug-06 13:16:45.3353171 API Request ID: <redacted>
|
||||
```
|
||||
|
||||
As such, it is also advisable for you to add the following to your configuration file so that 'uploads' are prevented:
|
||||
```text
|
||||
download_only = "true"
|
||||
```
|
||||
|
||||
**Important:** Additionally when using 'read_only_auth_scope' you also will need to remove your existing application access consent otherwise old authentication consent will be valid and will be used. This will mean the application will technically have the consent to upload data. See below on how to remove your prior application consent.
|
||||
|
||||
## Reviewing your existing application access consent
|
||||
|
||||
To review your existing application access consent, you need to access the following URL: https://account.live.com/consent/Manage
|
||||
|
||||
From here, you are able to review what applications have been given what access to your data, and remove application access as required.
|
379
docs/build-rpm-howto.md
Normal file
|
@ -0,0 +1,379 @@
|
|||
# RPM Package Build Process
|
||||
The instuctions below have been tested on the following systems:
|
||||
* CentOS 7 x86_64
|
||||
* CentOS 8 x86_64
|
||||
|
||||
These instructions should also be applicable for RedHat & Fedora platforms, or any other RedHat RPM based distribution.
|
||||
|
||||
## Prepare Package Development Environment (CentOS 7, 8)
|
||||
Install the following dependencies on your build system:
|
||||
```text
|
||||
sudo yum groupinstall -y 'Development Tools'
|
||||
sudo yum install -y libcurl-devel
|
||||
sudo yum install -y sqlite-devel
|
||||
sudo yum install -y libnotify-devel
|
||||
sudo yum install -y wget
|
||||
sudo yum install -y http://downloads.dlang.org/releases/2.x/2.088.0/dmd-2.088.0-0.fedora.x86_64.rpm
|
||||
mkdir -p ~/rpmbuild/{BUILD,RPMS,SOURCES,SPECS,SRPMS}
|
||||
```
|
||||
|
||||
## Build RPM from spec file
|
||||
Build the RPM from the provided spec file:
|
||||
```text
|
||||
wget https://github.com/abraunegg/onedrive/archive/refs/tags/v2.4.22.tar.gz -O ~/rpmbuild/SOURCES/v2.4.22.tar.gz
|
||||
wget https://raw.githubusercontent.com/abraunegg/onedrive/master/contrib/spec/onedrive.spec.in -O ~/rpmbuild/SPECS/onedrive.spec
|
||||
rpmbuild -ba ~/rpmbuild/SPECS/onedrive.spec
|
||||
```
|
||||
|
||||
## RPM Build Example Results
|
||||
Below are example output results of building, installing and running the RPM package on the respective platforms:
|
||||
|
||||
### CentOS 7
|
||||
```text
|
||||
[alex@localhost ~]$ rpmbuild -ba ~/rpmbuild/SPECS/onedrive.spec
|
||||
Executing(%prep): /bin/sh -e /var/tmp/rpm-tmp.wi6Tdz
|
||||
+ umask 022
|
||||
+ cd /home/alex/rpmbuild/BUILD
|
||||
+ cd /home/alex/rpmbuild/BUILD
|
||||
+ rm -rf onedrive-2.4.15
|
||||
+ /usr/bin/tar -xf -
|
||||
+ /usr/bin/gzip -dc /home/alex/rpmbuild/SOURCES/v2.4.15.tar.gz
|
||||
+ STATUS=0
|
||||
+ '[' 0 -ne 0 ']'
|
||||
+ cd onedrive-2.4.15
|
||||
+ /usr/bin/chmod -Rf a+rX,u+w,g-w,o-w .
|
||||
+ exit 0
|
||||
Executing(%build): /bin/sh -e /var/tmp/rpm-tmp.dyeEuM
|
||||
+ umask 022
|
||||
+ cd /home/alex/rpmbuild/BUILD
|
||||
+ cd onedrive-2.4.15
|
||||
+ CFLAGS='-O2 -g -pipe -Wall -Wp,-D_FORTIFY_SOURCE=2 -fexceptions -fstack-protector-strong --param=ssp-buffer-size=4 -grecord-gcc-switches -m64 -mtune=generic'
|
||||
+ export CFLAGS
|
||||
+ CXXFLAGS='-O2 -g -pipe -Wall -Wp,-D_FORTIFY_SOURCE=2 -fexceptions -fstack-protector-strong --param=ssp-buffer-size=4 -grecord-gcc-switches -m64 -mtune=generic'
|
||||
+ export CXXFLAGS
|
||||
+ FFLAGS='-O2 -g -pipe -Wall -Wp,-D_FORTIFY_SOURCE=2 -fexceptions -fstack-protector-strong --param=ssp-buffer-size=4 -grecord-gcc-switches -m64 -mtune=generic -I/usr/lib64/gfortran/modules'
|
||||
+ export FFLAGS
|
||||
+ FCFLAGS='-O2 -g -pipe -Wall -Wp,-D_FORTIFY_SOURCE=2 -fexceptions -fstack-protector-strong --param=ssp-buffer-size=4 -grecord-gcc-switches -m64 -mtune=generic -I/usr/lib64/gfortran/modules'
|
||||
+ export FCFLAGS
|
||||
+ LDFLAGS='-Wl,-z,relro '
|
||||
+ export LDFLAGS
|
||||
+ '[' 1 == 1 ']'
|
||||
+ '[' x86_64 == ppc64le ']'
|
||||
++ find . -name config.guess -o -name config.sub
|
||||
+ ./configure --build=x86_64-redhat-linux-gnu --host=x86_64-redhat-linux-gnu --program-prefix= --disable-dependency-tracking --prefix=/usr --exec-prefix=/usr --bindir=/usr/bin --sbindir=/usr/sbin --sysconfdir=/etc --datadir=/usr/share --includedir=/usr/include --libdir=/usr/lib64 --libexecdir=/usr/libexec --localstatedir=/var --sharedstatedir=/var/lib --mandir=/usr/share/man --infodir=/usr/share/info
|
||||
configure: WARNING: unrecognized options: --disable-dependency-tracking
|
||||
checking for a BSD-compatible install... /usr/bin/install -c
|
||||
checking for x86_64-redhat-linux-gnu-pkg-config... no
|
||||
checking for pkg-config... /usr/bin/pkg-config
|
||||
checking pkg-config is at least version 0.9.0... yes
|
||||
checking for dmd... dmd
|
||||
checking version of D compiler... 2.087.0
|
||||
checking for curl... yes
|
||||
checking for sqlite... yes
|
||||
configure: creating ./config.status
|
||||
config.status: creating Makefile
|
||||
config.status: creating contrib/pacman/PKGBUILD
|
||||
config.status: creating contrib/spec/onedrive.spec
|
||||
config.status: creating onedrive.1
|
||||
config.status: creating contrib/systemd/onedrive.service
|
||||
config.status: creating contrib/systemd/onedrive@.service
|
||||
configure: WARNING: unrecognized options: --disable-dependency-tracking
|
||||
+ make
|
||||
if [ -f .git/HEAD ] ; then \
|
||||
git describe --tags > version ; \
|
||||
else \
|
||||
echo v2.4.15 > version ; \
|
||||
fi
|
||||
dmd -w -g -O -J. -L-lcurl -L-lsqlite3 -L-ldl src/config.d src/itemdb.d src/log.d src/main.d src/monitor.d src/onedrive.d src/qxor.d src/selective.d src/sqlite.d src/sync.d src/upload.d src/util.d src/progress.d src/arsd/cgi.d -ofonedrive
|
||||
+ exit 0
|
||||
Executing(%install): /bin/sh -e /var/tmp/rpm-tmp.L3JbHy
|
||||
+ umask 022
|
||||
+ cd /home/alex/rpmbuild/BUILD
|
||||
+ '[' /home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el7.x86_64 '!=' / ']'
|
||||
+ rm -rf /home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el7.x86_64
|
||||
++ dirname /home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el7.x86_64
|
||||
+ mkdir -p /home/alex/rpmbuild/BUILDROOT
|
||||
+ mkdir /home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el7.x86_64
|
||||
+ cd onedrive-2.4.15
|
||||
+ /usr/bin/make install DESTDIR=/home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el7.x86_64 PREFIX=/home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el7.x86_64
|
||||
/usr/bin/install -c -D onedrive /home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el7.x86_64/usr/bin/onedrive
|
||||
/usr/bin/install -c -D onedrive.1 /home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el7.x86_64/usr/share/man/man1/onedrive.1
|
||||
/usr/bin/install -c -D -m 644 contrib/logrotate/onedrive.logrotate /home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el7.x86_64/etc/logrotate.d/onedrive
|
||||
mkdir -p /home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el7.x86_64/usr/share/doc/onedrive
|
||||
/usr/bin/install -c -D -m 644 README.md config LICENSE CHANGELOG.md docs/Docker.md docs/INSTALL.md docs/SharePoint-Shared-Libraries.md docs/USAGE.md docs/BusinessSharedFolders.md docs/advanced-usage.md /home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el7.x86_64/usr/share/doc/onedrive
|
||||
/usr/bin/install -c -d -m 0755 /home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el7.x86_64/usr/lib/systemd/user /home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el7.x86_64/usr/lib/systemd/system
|
||||
/usr/bin/install -c -m 0644 contrib/systemd/onedrive@.service /home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el7.x86_64/usr/lib/systemd/system
|
||||
/usr/bin/install -c -m 0644 contrib/systemd/onedrive.service /home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el7.x86_64/usr/lib/systemd/system
|
||||
+ /usr/lib/rpm/check-buildroot
|
||||
+ /usr/lib/rpm/redhat/brp-compress
|
||||
+ /usr/lib/rpm/redhat/brp-strip /usr/bin/strip
|
||||
+ /usr/lib/rpm/redhat/brp-strip-comment-note /usr/bin/strip /usr/bin/objdump
|
||||
+ /usr/lib/rpm/redhat/brp-strip-static-archive /usr/bin/strip
|
||||
+ /usr/lib/rpm/brp-python-bytecompile /usr/bin/python 1
|
||||
+ /usr/lib/rpm/redhat/brp-python-hardlink
|
||||
+ /usr/lib/rpm/redhat/brp-java-repack-jars
|
||||
Processing files: onedrive-2.4.15-1.el7.x86_64
|
||||
Executing(%doc): /bin/sh -e /var/tmp/rpm-tmp.cpSXho
|
||||
+ umask 022
|
||||
+ cd /home/alex/rpmbuild/BUILD
|
||||
+ cd onedrive-2.4.15
|
||||
+ DOCDIR=/home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el7.x86_64/usr/share/doc/onedrive-2.4.15
|
||||
+ export DOCDIR
|
||||
+ /usr/bin/mkdir -p /home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el7.x86_64/usr/share/doc/onedrive-2.4.15
|
||||
+ cp -pr README.md /home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el7.x86_64/usr/share/doc/onedrive-2.4.15
|
||||
+ cp -pr LICENSE /home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el7.x86_64/usr/share/doc/onedrive-2.4.15
|
||||
+ cp -pr CHANGELOG.md /home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el7.x86_64/usr/share/doc/onedrive-2.4.15
|
||||
+ exit 0
|
||||
Provides: config(onedrive) = 2.4.15-1.el7 onedrive = 2.4.15-1.el7 onedrive(x86-64) = 2.4.15-1.el7
|
||||
Requires(rpmlib): rpmlib(CompressedFileNames) <= 3.0.4-1 rpmlib(FileDigests) <= 4.6.0-1 rpmlib(PayloadFilesHavePrefix) <= 4.0-1
|
||||
Requires(post): systemd
|
||||
Requires(preun): systemd
|
||||
Requires(postun): systemd
|
||||
Requires: ld-linux-x86-64.so.2()(64bit) ld-linux-x86-64.so.2(GLIBC_2.3)(64bit) libc.so.6()(64bit) libc.so.6(GLIBC_2.14)(64bit) libc.so.6(GLIBC_2.15)(64bit) libc.so.6(GLIBC_2.2.5)(64bit) libc.so.6(GLIBC_2.3.2)(64bit) libc.so.6(GLIBC_2.3.4)(64bit) libc.so.6(GLIBC_2.4)(64bit) libc.so.6(GLIBC_2.6)(64bit) libc.so.6(GLIBC_2.8)(64bit) libc.so.6(GLIBC_2.9)(64bit) libcurl.so.4()(64bit) libdl.so.2()(64bit) libdl.so.2(GLIBC_2.2.5)(64bit) libgcc_s.so.1()(64bit) libgcc_s.so.1(GCC_3.0)(64bit) libgcc_s.so.1(GCC_4.2.0)(64bit) libm.so.6()(64bit) libm.so.6(GLIBC_2.2.5)(64bit) libpthread.so.0()(64bit) libpthread.so.0(GLIBC_2.2.5)(64bit) libpthread.so.0(GLIBC_2.3.2)(64bit) libpthread.so.0(GLIBC_2.3.4)(64bit) librt.so.1()(64bit) librt.so.1(GLIBC_2.2.5)(64bit) libsqlite3.so.0()(64bit) rtld(GNU_HASH)
|
||||
Checking for unpackaged file(s): /usr/lib/rpm/check-files /home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el7.x86_64
|
||||
Wrote: /home/alex/rpmbuild/SRPMS/onedrive-2.4.15-1.el7.src.rpm
|
||||
Wrote: /home/alex/rpmbuild/RPMS/x86_64/onedrive-2.4.15-1.el7.x86_64.rpm
|
||||
Executing(%clean): /bin/sh -e /var/tmp/rpm-tmp.nWoW33
|
||||
+ umask 022
|
||||
+ cd /home/alex/rpmbuild/BUILD
|
||||
+ cd onedrive-2.4.15
|
||||
+ exit 0
|
||||
[alex@localhost ~]$ sudo yum -y install /home/alex/rpmbuild/RPMS/x86_64/onedrive-2.4.15-1.el7.x86_64.rpm
|
||||
Loaded plugins: fastestmirror
|
||||
Examining /home/alex/rpmbuild/RPMS/x86_64/onedrive-2.4.15-1.el7.x86_64.rpm: onedrive-2.4.15-1.el7.x86_64
|
||||
Marking /home/alex/rpmbuild/RPMS/x86_64/onedrive-2.4.15-1.el7.x86_64.rpm to be installed
|
||||
Resolving Dependencies
|
||||
--> Running transaction check
|
||||
---> Package onedrive.x86_64 0:2.4.15-1.el7 will be installed
|
||||
--> Finished Dependency Resolution
|
||||
|
||||
Dependencies Resolved
|
||||
|
||||
==============================================================================================================================================================================================
|
||||
Package Arch Version Repository Size
|
||||
==============================================================================================================================================================================================
|
||||
Installing:
|
||||
onedrive x86_64 2.4.15-1.el7 /onedrive-2.4.15-1.el7.x86_64 7.2 M
|
||||
|
||||
Transaction Summary
|
||||
==============================================================================================================================================================================================
|
||||
Install 1 Package
|
||||
|
||||
Total size: 7.2 M
|
||||
Installed size: 7.2 M
|
||||
Downloading packages:
|
||||
Running transaction check
|
||||
Running transaction test
|
||||
Transaction test succeeded
|
||||
Running transaction
|
||||
Installing : onedrive-2.4.15-1.el7.x86_64 1/1
|
||||
Verifying : onedrive-2.4.15-1.el7.x86_64 1/1
|
||||
|
||||
Installed:
|
||||
onedrive.x86_64 0:2.4.15-1.el7
|
||||
|
||||
Complete!
|
||||
[alex@localhost ~]$ which onedrive
|
||||
/usr/bin/onedrive
|
||||
[alex@localhost ~]$ onedrive --version
|
||||
onedrive v2.4.15
|
||||
[alex@localhost ~]$ onedrive --display-config
|
||||
onedrive version = v2.4.15
|
||||
Config path = /home/alex/.config/onedrive
|
||||
Config file found in config path = false
|
||||
Config option 'check_nosync' = false
|
||||
Config option 'sync_dir' = /home/alex/OneDrive
|
||||
Config option 'skip_dir' =
|
||||
Config option 'skip_file' = ~*|.~*|*.tmp
|
||||
Config option 'skip_dotfiles' = false
|
||||
Config option 'skip_symlinks' = false
|
||||
Config option 'monitor_interval' = 300
|
||||
Config option 'min_notify_changes' = 5
|
||||
Config option 'log_dir' = /var/log/onedrive/
|
||||
Config option 'classify_as_big_delete' = 1000
|
||||
Config option 'upload_only' = false
|
||||
Config option 'no_remote_delete' = false
|
||||
Config option 'remove_source_files' = false
|
||||
Config option 'sync_root_files' = false
|
||||
Selective sync 'sync_list' configured = false
|
||||
Business Shared Folders configured = false
|
||||
[alex@localhost ~]$
|
||||
```
|
||||
|
||||
### CentOS 8
|
||||
```text
|
||||
[alex@localhost ~]$ rpmbuild -ba ~/rpmbuild/SPECS/onedrive.spec
|
||||
Executing(%prep): /bin/sh -e /var/tmp/rpm-tmp.UINFyE
|
||||
+ umask 022
|
||||
+ cd /home/alex/rpmbuild/BUILD
|
||||
+ cd /home/alex/rpmbuild/BUILD
|
||||
+ rm -rf onedrive-2.4.15
|
||||
+ /usr/bin/gzip -dc /home/alex/rpmbuild/SOURCES/v2.4.15.tar.gz
|
||||
+ /usr/bin/tar -xof -
|
||||
+ STATUS=0
|
||||
+ '[' 0 -ne 0 ']'
|
||||
+ cd onedrive-2.4.15
|
||||
+ /usr/bin/chmod -Rf a+rX,u+w,g-w,o-w .
|
||||
+ exit 0
|
||||
Executing(%build): /bin/sh -e /var/tmp/rpm-tmp.cX1WQa
|
||||
+ umask 022
|
||||
+ cd /home/alex/rpmbuild/BUILD
|
||||
+ cd onedrive-2.4.15
|
||||
+ CFLAGS='-O2 -g -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fexceptions -fstack-protector-strong -grecord-gcc-switches -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -m64 -mtune=generic -fasynchronous-unwind-tables -fstack-clash-protection -fcf-protection'
|
||||
+ export CFLAGS
|
||||
+ CXXFLAGS='-O2 -g -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fexceptions -fstack-protector-strong -grecord-gcc-switches -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -m64 -mtune=generic -fasynchronous-unwind-tables -fstack-clash-protection -fcf-protection'
|
||||
+ export CXXFLAGS
|
||||
+ FFLAGS='-O2 -g -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fexceptions -fstack-protector-strong -grecord-gcc-switches -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -m64 -mtune=generic -fasynchronous-unwind-tables -fstack-clash-protection -fcf-protection -I/usr/lib64/gfortran/modules'
|
||||
+ export FFLAGS
|
||||
+ FCFLAGS='-O2 -g -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fexceptions -fstack-protector-strong -grecord-gcc-switches -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -m64 -mtune=generic -fasynchronous-unwind-tables -fstack-clash-protection -fcf-protection -I/usr/lib64/gfortran/modules'
|
||||
+ export FCFLAGS
|
||||
+ LDFLAGS='-Wl,-z,relro -Wl,-z,now -specs=/usr/lib/rpm/redhat/redhat-hardened-ld'
|
||||
+ export LDFLAGS
|
||||
+ '[' 1 = 1 ']'
|
||||
+++ dirname ./configure
|
||||
++ find . -name config.guess -o -name config.sub
|
||||
+ '[' 1 = 1 ']'
|
||||
+ '[' x '!=' 'x-Wl,-z,now -specs=/usr/lib/rpm/redhat/redhat-hardened-ld' ']'
|
||||
++ find . -name ltmain.sh
|
||||
+ ./configure --build=x86_64-redhat-linux-gnu --host=x86_64-redhat-linux-gnu --program-prefix= --disable-dependency-tracking --prefix=/usr --exec-prefix=/usr --bindir=/usr/bin --sbindir=/usr/sbin --sysconfdir=/etc --datadir=/usr/share --includedir=/usr/include --libdir=/usr/lib64 --libexecdir=/usr/libexec --localstatedir=/var --sharedstatedir=/var/lib --mandir=/usr/share/man --infodir=/usr/share/info
|
||||
configure: WARNING: unrecognized options: --disable-dependency-tracking
|
||||
checking for a BSD-compatible install... /usr/bin/install -c
|
||||
checking for x86_64-redhat-linux-gnu-pkg-config... /usr/bin/x86_64-redhat-linux-gnu-pkg-config
|
||||
checking pkg-config is at least version 0.9.0... yes
|
||||
checking for dmd... dmd
|
||||
checking version of D compiler... 2.087.0
|
||||
checking for curl... yes
|
||||
checking for sqlite... yes
|
||||
configure: creating ./config.status
|
||||
config.status: creating Makefile
|
||||
config.status: creating contrib/pacman/PKGBUILD
|
||||
config.status: creating contrib/spec/onedrive.spec
|
||||
config.status: creating onedrive.1
|
||||
config.status: creating contrib/systemd/onedrive.service
|
||||
config.status: creating contrib/systemd/onedrive@.service
|
||||
configure: WARNING: unrecognized options: --disable-dependency-tracking
|
||||
+ make
|
||||
if [ -f .git/HEAD ] ; then \
|
||||
git describe --tags > version ; \
|
||||
else \
|
||||
echo v2.4.15 > version ; \
|
||||
fi
|
||||
dmd -w -g -O -J. -L-lcurl -L-lsqlite3 -L-ldl src/config.d src/itemdb.d src/log.d src/main.d src/monitor.d src/onedrive.d src/qxor.d src/selective.d src/sqlite.d src/sync.d src/upload.d src/util.d src/progress.d src/arsd/cgi.d -ofonedrive
|
||||
+ exit 0
|
||||
Executing(%install): /bin/sh -e /var/tmp/rpm-tmp.dNFPdx
|
||||
+ umask 022
|
||||
+ cd /home/alex/rpmbuild/BUILD
|
||||
+ '[' /home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el8.x86_64 '!=' / ']'
|
||||
+ rm -rf /home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el8.x86_64
|
||||
++ dirname /home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el8.x86_64
|
||||
+ mkdir -p /home/alex/rpmbuild/BUILDROOT
|
||||
+ mkdir /home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el8.x86_64
|
||||
+ cd onedrive-2.4.15
|
||||
+ /usr/bin/make install DESTDIR=/home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el8.x86_64 'INSTALL=/usr/bin/install -p' PREFIX=/home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el8.x86_64
|
||||
/usr/bin/install -p -D onedrive /home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el8.x86_64/usr/bin/onedrive
|
||||
/usr/bin/install -p -D onedrive.1 /home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el8.x86_64/usr/share/man/man1/onedrive.1
|
||||
/usr/bin/install -p -D -m 644 contrib/logrotate/onedrive.logrotate /home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el8.x86_64/etc/logrotate.d/onedrive
|
||||
mkdir -p /home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el8.x86_64/usr/share/doc/onedrive
|
||||
/usr/bin/install -p -D -m 644 README.md config LICENSE CHANGELOG.md docs/Docker.md docs/INSTALL.md docs/SharePoint-Shared-Libraries.md docs/USAGE.md docs/BusinessSharedFolders.md docs/advanced-usage.md /home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el8.x86_64/usr/share/doc/onedrive
|
||||
/usr/bin/install -p -d -m 0755 /home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el8.x86_64/usr/lib/systemd/user /home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el8.x86_64/usr/lib/systemd/system
|
||||
/usr/bin/install -p -m 0644 contrib/systemd/onedrive@.service /home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el8.x86_64/usr/lib/systemd/system
|
||||
/usr/bin/install -p -m 0644 contrib/systemd/onedrive.service /home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el8.x86_64/usr/lib/systemd/system
|
||||
+ /usr/lib/rpm/check-buildroot
|
||||
+ /usr/lib/rpm/redhat/brp-ldconfig
|
||||
/sbin/ldconfig: Warning: ignoring configuration file that cannot be opened: /home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el8.x86_64/etc/ld.so.conf: No such file or directory
|
||||
+ /usr/lib/rpm/brp-compress
|
||||
+ /usr/lib/rpm/brp-strip /usr/bin/strip
|
||||
+ /usr/lib/rpm/brp-strip-comment-note /usr/bin/strip /usr/bin/objdump
|
||||
+ /usr/lib/rpm/brp-strip-static-archive /usr/bin/strip
|
||||
+ /usr/lib/rpm/brp-python-bytecompile 1
|
||||
+ /usr/lib/rpm/brp-python-hardlink
|
||||
+ PYTHON3=/usr/libexec/platform-python
|
||||
+ /usr/lib/rpm/redhat/brp-mangle-shebangs
|
||||
Processing files: onedrive-2.4.15-1.el8.x86_64
|
||||
Executing(%doc): /bin/sh -e /var/tmp/rpm-tmp.TnFKbZ
|
||||
+ umask 022
|
||||
+ cd /home/alex/rpmbuild/BUILD
|
||||
+ cd onedrive-2.4.15
|
||||
+ DOCDIR=/home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el8.x86_64/usr/share/doc/onedrive
|
||||
+ export LC_ALL=C
|
||||
+ LC_ALL=C
|
||||
+ export DOCDIR
|
||||
+ /usr/bin/mkdir -p /home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el8.x86_64/usr/share/doc/onedrive
|
||||
+ cp -pr README.md /home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el8.x86_64/usr/share/doc/onedrive
|
||||
+ cp -pr LICENSE /home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el8.x86_64/usr/share/doc/onedrive
|
||||
+ cp -pr CHANGELOG.md /home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el8.x86_64/usr/share/doc/onedrive
|
||||
+ exit 0
|
||||
warning: File listed twice: /usr/share/doc/onedrive
|
||||
warning: File listed twice: /usr/share/doc/onedrive/CHANGELOG.md
|
||||
warning: File listed twice: /usr/share/doc/onedrive/LICENSE
|
||||
warning: File listed twice: /usr/share/doc/onedrive/README.md
|
||||
Provides: config(onedrive) = 2.4.15-1.el8 onedrive = 2.4.15-1.el8 onedrive(x86-64) = 2.4.15-1.el8
|
||||
Requires(rpmlib): rpmlib(CompressedFileNames) <= 3.0.4-1 rpmlib(FileDigests) <= 4.6.0-1 rpmlib(PayloadFilesHavePrefix) <= 4.0-1
|
||||
Requires(post): systemd
|
||||
Requires(preun): systemd
|
||||
Requires(postun): systemd
|
||||
Requires: ld-linux-x86-64.so.2()(64bit) ld-linux-x86-64.so.2(GLIBC_2.3)(64bit) libc.so.6()(64bit) libc.so.6(GLIBC_2.14)(64bit) libc.so.6(GLIBC_2.15)(64bit) libc.so.6(GLIBC_2.2.5)(64bit) libc.so.6(GLIBC_2.3.2)(64bit) libc.so.6(GLIBC_2.3.4)(64bit) libc.so.6(GLIBC_2.4)(64bit) libc.so.6(GLIBC_2.6)(64bit) libc.so.6(GLIBC_2.8)(64bit) libc.so.6(GLIBC_2.9)(64bit) libcurl.so.4()(64bit) libdl.so.2()(64bit) libdl.so.2(GLIBC_2.2.5)(64bit) libgcc_s.so.1()(64bit) libgcc_s.so.1(GCC_3.0)(64bit) libgcc_s.so.1(GCC_4.2.0)(64bit) libm.so.6()(64bit) libm.so.6(GLIBC_2.2.5)(64bit) libpthread.so.0()(64bit) libpthread.so.0(GLIBC_2.2.5)(64bit) libpthread.so.0(GLIBC_2.3.2)(64bit) libpthread.so.0(GLIBC_2.3.4)(64bit) librt.so.1()(64bit) librt.so.1(GLIBC_2.2.5)(64bit) libsqlite3.so.0()(64bit) rtld(GNU_HASH)
|
||||
Checking for unpackaged file(s): /usr/lib/rpm/check-files /home/alex/rpmbuild/BUILDROOT/onedrive-2.4.15-1.el8.x86_64
|
||||
Wrote: /home/alex/rpmbuild/SRPMS/onedrive-2.4.15-1.el8.src.rpm
|
||||
Wrote: /home/alex/rpmbuild/RPMS/x86_64/onedrive-2.4.15-1.el8.x86_64.rpm
|
||||
Executing(%clean): /bin/sh -e /var/tmp/rpm-tmp.FAMTFz
|
||||
+ umask 022
|
||||
+ cd /home/alex/rpmbuild/BUILD
|
||||
+ cd onedrive-2.4.15
|
||||
+ exit 0
|
||||
[alex@localhost ~]$ sudo yum -y install /home/alex/rpmbuild/RPMS/x86_64/onedrive-2.4.15-1.el8.x86_64.rpm
|
||||
Last metadata expiration check: 0:04:07 ago on Fri 14 Jan 2022 14:22:13 EST.
|
||||
Dependencies resolved.
|
||||
==============================================================================================================================================================================================
|
||||
Package Architecture Version Repository Size
|
||||
==============================================================================================================================================================================================
|
||||
Installing:
|
||||
onedrive x86_64 2.4.15-1.el8 @commandline 1.5 M
|
||||
|
||||
Transaction Summary
|
||||
==============================================================================================================================================================================================
|
||||
Install 1 Package
|
||||
|
||||
Total size: 1.5 M
|
||||
Installed size: 7.1 M
|
||||
Downloading Packages:
|
||||
Running transaction check
|
||||
Transaction check succeeded.
|
||||
Running transaction test
|
||||
Transaction test succeeded.
|
||||
Running transaction
|
||||
Preparing : 1/1
|
||||
Installing : onedrive-2.4.15-1.el8.x86_64 1/1
|
||||
Running scriptlet: onedrive-2.4.15-1.el8.x86_64 1/1
|
||||
Verifying : onedrive-2.4.15-1.el8.x86_64 1/1
|
||||
|
||||
Installed:
|
||||
onedrive-2.4.15-1.el8.x86_64
|
||||
|
||||
Complete!
|
||||
[alex@localhost ~]$ which onedrive
|
||||
/usr/bin/onedrive
|
||||
[alex@localhost ~]$ onedrive --version
|
||||
onedrive v2.4.15
|
||||
[alex@localhost ~]$ onedrive --display-config
|
||||
onedrive version = v2.4.15
|
||||
Config path = /home/alex/.config/onedrive
|
||||
Config file found in config path = false
|
||||
Config option 'check_nosync' = false
|
||||
Config option 'sync_dir' = /home/alex/OneDrive
|
||||
Config option 'skip_dir' =
|
||||
Config option 'skip_file' = ~*|.~*|*.tmp
|
||||
Config option 'skip_dotfiles' = false
|
||||
Config option 'skip_symlinks' = false
|
||||
Config option 'monitor_interval' = 300
|
||||
Config option 'min_notify_changes' = 5
|
||||
Config option 'log_dir' = /var/log/onedrive/
|
||||
Config option 'classify_as_big_delete' = 1000
|
||||
Config option 'upload_only' = false
|
||||
Config option 'no_remote_delete' = false
|
||||
Config option 'remove_source_files' = false
|
||||
Config option 'sync_root_files' = false
|
||||
Selective sync 'sync_list' configured = false
|
||||
Business Shared Folders configured = false
|
||||
[alex@localhost ~]$
|
||||
```
|
BIN
docs/images/SharedLibraries.jpg
Normal file
After Width: | Height: | Size: 17 KiB |
BIN
docs/images/application_registration.jpg
Normal file
After Width: | Height: | Size: 65 KiB |
BIN
docs/images/application_registration_done.jpg
Normal file
After Width: | Height: | Size: 72 KiB |
BIN
docs/images/authentication_response_uri.jpg
Normal file
After Width: | Height: | Size: 89 KiB |
BIN
docs/images/authentication_scopes.jpg
Normal file
After Width: | Height: | Size: 86 KiB |
BIN
docs/images/business-files-on-demand.png
Normal file
After Width: | Height: | Size: 15 KiB |
BIN
docs/images/confirmed_verified_publisher.jpg
Normal file
After Width: | Height: | Size: 45 KiB |
BIN
docs/images/default_authentication_scopes.jpg
Normal file
After Width: | Height: | Size: 86 KiB |
BIN
docs/images/personal-files-on-demand.png
Normal file
After Width: | Height: | Size: 17 KiB |
BIN
docs/images/shared_with_me.JPG
Normal file
After Width: | Height: | Size: 45 KiB |
54
docs/known-issues.md
Normal file
|
@ -0,0 +1,54 @@
|
|||
# Known Issues
|
||||
The below are known issues with this client:
|
||||
|
||||
## Moving files into different folders should not cause data to delete and be re-uploaded
|
||||
**Issue Tracker:** [#876](https://github.com/abraunegg/onedrive/issues/876)
|
||||
|
||||
**Description:**
|
||||
|
||||
When running the client in standalone mode (`--synchronize`) moving folders that are successfully synced around between subsequent standalone syncs causes a deletion & re-upload of data to occur.
|
||||
|
||||
**Explanation:**
|
||||
|
||||
Technically, the client is 'working' correctly, as, when moving files, you are 'deleting' them from the current location, but copying them to the 'new location'. As the client is running in standalone sync mode, there is no way to track what OS operations have been done when the client is not running - thus, this is why the 'delete and upload' is occurring.
|
||||
|
||||
**Workaround:**
|
||||
|
||||
If the tracking of moving data to new local directories is requried, it is better to run the client in service mode (`--monitor`) rather than in standalone mode, as the 'move' of files can then be handled at the point when it occurs, so that the data is moved to the new location on OneDrive without the need to be deleted and re-uploaded.
|
||||
|
||||
## Application 'stops' running without any visible reason
|
||||
**Issue Tracker:** [#494](https://github.com/abraunegg/onedrive/issues/494), [#753](https://github.com/abraunegg/onedrive/issues/753), [#792](https://github.com/abraunegg/onedrive/issues/792), [#884](https://github.com/abraunegg/onedrive/issues/884), [#1162](https://github.com/abraunegg/onedrive/issues/1162), [#1408](https://github.com/abraunegg/onedrive/issues/1408), [#1520](https://github.com/abraunegg/onedrive/issues/1520), [#1526](https://github.com/abraunegg/onedrive/issues/1526)
|
||||
|
||||
**Description:**
|
||||
|
||||
When running the client and performing an upload or download operation, the application just stops working without any reason or explanation. If `echo $?` is used after the application has exited without visible reason, an error level of 141 may be provided.
|
||||
|
||||
Additionally, this issue has mainly been seen when the client is operating against Microsoft's Europe Data Centre's.
|
||||
|
||||
**Explanation:**
|
||||
|
||||
The client is heavily dependant on Curl and OpenSSL to perform the activities with the Microsoft OneDrive service. Generally, when this issue occurs, the following is found in the HTTPS Debug Log:
|
||||
```
|
||||
OpenSSL SSL_read: SSL_ERROR_SYSCALL, errno 104
|
||||
```
|
||||
The only way to determine that this is the cause of the application ceasing to work is to generate a HTTPS debug log using the following additional flags:
|
||||
```
|
||||
--verbose --verbose --debug-https
|
||||
```
|
||||
|
||||
This is indicative of the following:
|
||||
* Some sort of flaky Internet connection somewhere between you and the OneDrive service
|
||||
* Some sort of 'broken' HTTPS transparent inspection service inspecting your traffic somewhere between you and the OneDrive service
|
||||
|
||||
**How to resolve:**
|
||||
|
||||
The best avenue of action here are:
|
||||
* Ensure your OS is as up-to-date as possible
|
||||
* Get support from your OS vendor
|
||||
* Speak to your ISP or Help Desk for assistance
|
||||
* Open a ticket with OpenSSL and/or Curl teams to better handle this sort of connection failure
|
||||
* Generate a HTTPS Debug Log for this application and open a new support request with Microsoft and provide the debug log file for their analysis.
|
||||
|
||||
If you wish to diagnose this issue further, refer to the following:
|
||||
|
||||
https://maulwuff.de/research/ssl-debugging.html
|
145
docs/national-cloud-deployments.md
Normal file
|
@ -0,0 +1,145 @@
|
|||
# How to configure access to specific Microsoft Azure deployments
|
||||
## Application Version
|
||||
Before reading this document, please ensure you are running application version [![Version](https://img.shields.io/github/v/release/abraunegg/onedrive)](https://github.com/abraunegg/onedrive/releases) or greater. Use `onedrive --version` to determine what application version you are using and upgrade your client if required.
|
||||
|
||||
## Process Overview
|
||||
In some cases it is a requirement to utilise specific Microsoft Azure cloud deployments to conform with data and security reuqirements that requires data to reside within the geographic borders of that country.
|
||||
Current national clouds that are supported are:
|
||||
* Microsoft Cloud for US Government
|
||||
* Microsoft Cloud Germany
|
||||
* Azure and Office365 operated by 21Vianet in China
|
||||
|
||||
In order to successfully use these specific Microsoft Azure deployments, the following steps are required:
|
||||
1. Register an application with the Microsoft identity platform using the Azure portal
|
||||
2. Configure the new application with the appropriate authentication scopes
|
||||
3. Validate that the authentication / redirect URI is correct for your application registration
|
||||
4. Configure the onedrive client to use the new application id as provided during application registration
|
||||
5. Configure the onedrive client to use the right Microsoft Azure deployment region that your application was registered with
|
||||
6. Authenticate the client
|
||||
|
||||
## Step 1: Register a new application with Microsoft Azure
|
||||
1. Log into your applicable Microsoft Azure Portal with your applicable Office365 identity:
|
||||
|
||||
| National Cloud Environment | Microsoft Azure Portal |
|
||||
|---|---|
|
||||
| Microsoft Cloud for US Government | https://portal.azure.com/ |
|
||||
| Microsoft Cloud Germany | https://portal.azure.com/ |
|
||||
| Azure and Office365 operated by 21Vianet | https://portal.azure.cn/ |
|
||||
|
||||
2. Select 'Azure Active Directory' as the service you wish to configure
|
||||
3. Under 'Manage', select 'App registrations' to register a new application
|
||||
4. Click 'New registration'
|
||||
5. Type in the appropriate details required as per below:
|
||||
|
||||
![application_registration](./images/application_registration.jpg)
|
||||
|
||||
6. To save the application registration, click 'Register' and something similar to the following will be displayed:
|
||||
|
||||
![application_registration_done](./images/application_registration_done.jpg)
|
||||
|
||||
**Note:** The Application (client) ID UUID as displayed after client registration, is what is required as the 'application_id' for Step 4 below.
|
||||
|
||||
## Step 2: Configure application authentication scopes
|
||||
Configure the API permissions as per the following:
|
||||
|
||||
| API / Permissions name | Type | Description | Admin consent required |
|
||||
|---|---|---|---|
|
||||
| Files.ReadWrite | Delegated | Have full access to user files | No |
|
||||
| Files.ReadWrite.All | Delegated | Have full access to all files user can access | No |
|
||||
| Sites.ReadWrite.All | Delegated | Have full access to all items in all site collections | No |
|
||||
| offline_access | Delegated | Maintain access to data you have given it access to | No |
|
||||
|
||||
![authentication_scopes](./images/authentication_scopes.jpg)
|
||||
|
||||
## Step 3: Validate that the authentication / redirect URI is correct
|
||||
Add the appropriate redirect URI for your Azure deployment:
|
||||
|
||||
![authentication_response_uri](./images/authentication_response_uri.jpg)
|
||||
|
||||
A valid entry for the response URI should be one of:
|
||||
* https://login.microsoftonline.us/common/oauth2/nativeclient (Microsoft Cloud for US Government)
|
||||
* https://login.microsoftonline.de/common/oauth2/nativeclient (Microsoft Cloud Germany)
|
||||
* https://login.chinacloudapi.cn/common/oauth2/nativeclient (Azure and Office365 operated by 21Vianet in China)
|
||||
|
||||
For a single-tenant application, it may be necessary to use your specific tenant id instead of "common":
|
||||
* https://login.microsoftonline.us/example.onmicrosoft.us/oauth2/nativeclient (Microsoft Cloud for US Government)
|
||||
* https://login.microsoftonline.de/example.onmicrosoft.de/oauth2/nativeclient (Microsoft Cloud Germany)
|
||||
* https://login.chinacloudapi.cn/example.onmicrosoft.cn/oauth2/nativeclient (Azure and Office365 operated by 21Vianet in China)
|
||||
|
||||
## Step 4: Configure the onedrive client to use new application registration
|
||||
Update to your 'onedrive' configuration file (`~/.config/onedrive/config`) the following:
|
||||
```text
|
||||
application_id = "insert valid entry here"
|
||||
```
|
||||
|
||||
This will reconfigure the client to use the new application registration you have created.
|
||||
|
||||
**Example:**
|
||||
```text
|
||||
application_id = "22c49a0d-d21c-4792-aed1-8f163c982546"
|
||||
```
|
||||
|
||||
## Step 5: Configure the onedrive client to use the specific Microsoft Azure deployment
|
||||
Update to your 'onedrive' configuration file (`~/.config/onedrive/config`) the following:
|
||||
```text
|
||||
azure_ad_endpoint = "insert valid entry here"
|
||||
```
|
||||
|
||||
Valid entries are:
|
||||
* USL4 (Microsoft Cloud for US Government)
|
||||
* USL5 (Microsoft Cloud for US Government - DOD)
|
||||
* DE (Microsoft Cloud Germany)
|
||||
* CN (Azure and Office365 operated by 21Vianet in China)
|
||||
|
||||
This will configure your client to use the correct Azure AD and Graph endpoints as per [https://docs.microsoft.com/en-us/graph/deployments](https://docs.microsoft.com/en-us/graph/deployments)
|
||||
|
||||
**Example:**
|
||||
```text
|
||||
azure_ad_endpoint = "USL4"
|
||||
```
|
||||
|
||||
If the Microsoft Azure deployment does not support multi-tenant applications, update to your 'onedrive' configuration file (`~/.config/onedrive/config`) the following:
|
||||
```text
|
||||
azure_tenant_id = "insert valid entry here"
|
||||
```
|
||||
|
||||
This will configure your client to use the specified tenant id in its Azure AD and Graph endpoint URIs, instead of "common".
|
||||
The tenant id may be the GUID Directory ID (formatted "00000000-0000-0000-0000-000000000000"), or the fully qualified tenant name (e.g. "example.onmicrosoft.us").
|
||||
The GUID Directory ID may be located in the Azure administation page as per [https://docs.microsoft.com/en-us/onedrive/find-your-office-365-tenant-id](https://docs.microsoft.com/en-us/onedrive/find-your-office-365-tenant-id). Note that you may need to go to your national-deployment-specific administration page, rather than following the links within that document.
|
||||
The tenant name may be obtained by following the PowerShell instructions on [https://docs.microsoft.com/en-us/onedrive/find-your-office-365-tenant-id](https://docs.microsoft.com/en-us/onedrive/find-your-office-365-tenant-id); it is shown as the "TenantDomain" upon completion of the "Connect-AzureAD" command.
|
||||
|
||||
**Example:**
|
||||
```text
|
||||
azure_tenant_id = "example.onmicrosoft.us"
|
||||
# or
|
||||
azure_tenant_id = "0c4be462-a1ab-499b-99e0-da08ce52a2cc"
|
||||
```
|
||||
|
||||
## Step 6: Authenticate the client
|
||||
Run the application without any additional command switches.
|
||||
|
||||
You will be asked to open a specific URL by using your web browser where you will have to login into your Microsoft Account and give the application the permission to access your files. After giving permission to the application, you will be redirected to a blank page. Copy the URI of the blank page into the application.
|
||||
```text
|
||||
[user@hostname ~]$ onedrive
|
||||
|
||||
Authorize this app visiting:
|
||||
|
||||
https://.....
|
||||
|
||||
Enter the response uri:
|
||||
|
||||
```
|
||||
|
||||
**Example:**
|
||||
```
|
||||
[user@hostname ~]$ onedrive
|
||||
Authorize this app visiting:
|
||||
|
||||
https://login.microsoftonline.com/common/oauth2/v2.0/authorize?client_id=22c49a0d-d21c-4792-aed1-8f163c982546&scope=Files.ReadWrite%20Files.ReadWrite.all%20Sites.ReadWrite.All%20offline_access&response_type=code&redirect_uri=https://login.microsoftonline.com/common/oauth2/nativeclient
|
||||
|
||||
Enter the response uri: https://login.microsoftonline.com/common/oauth2/nativeclient?code=<redacted>
|
||||
|
||||
Application has been successfully authorised, however no additional command switches were provided.
|
||||
|
||||
Please use --help for further assistance in regards to running this application.
|
||||
```
|
65
docs/privacy-policy.md
Normal file
|
@ -0,0 +1,65 @@
|
|||
# Privacy Policy
|
||||
Effective Date: May 16 2018
|
||||
|
||||
## Introduction
|
||||
|
||||
This Privacy Policy outlines how OneDrive Client for Linux ("we," "our," or "us") collects, uses, and protects information when you use our software ("OneDrive Client for Linux"). We respect your privacy and are committed to ensuring the confidentiality and security of any information you provide while using the Software.
|
||||
|
||||
## Information We Do Not Collect
|
||||
|
||||
We want to be transparent about the fact that we do not collect any personal data, usage data, or tracking data through the Software. This means:
|
||||
|
||||
1. **No Personal Data**: We do not collect any information that can be used to personally identify you, such as your name, email address, phone number, or physical address.
|
||||
|
||||
2. **No Usage Data**: We do not collect data about how you use the Software, such as the features you use, the duration of your sessions, or any interactions within the Software.
|
||||
|
||||
3. **No Tracking Data**: We do not use cookies or similar tracking technologies to monitor your online behavior or track your activities across websites or apps.
|
||||
|
||||
## How We Use Your Information
|
||||
|
||||
Since we do not collect any personal, usage, or tracking data, there is no information for us to use for any purpose.
|
||||
|
||||
## Third-Party Services
|
||||
|
||||
The Software may include links to third-party websites or services, but we do not have control over the privacy practices or content of these third-party services. We encourage you to review the privacy policies of any third-party services you access through the Software.
|
||||
|
||||
## Children's Privacy
|
||||
|
||||
Since we do not collect any personal, usage, or tracking data, there is no restriction on the use of this application by anyone under the age of 18.
|
||||
|
||||
## Information You Choose to Share
|
||||
|
||||
While we do not collect personal data, usage data, or tracking data through the Software, there may be instances where you voluntarily choose to share information with us, particularly when submitting bug reports. These bug reports may contain sensitive information such as account details, file names, and directory names. It's important to note that these details are included in the logs and debug logs solely for the purpose of diagnosing and resolving technical issues with the Software.
|
||||
|
||||
We want to emphasize that, even in these cases, we do not have access to your actual data. The logs and debug logs provided in bug reports are used exclusively for technical troubleshooting and debugging purposes. We take measures to treat this information with the utmost care, and it is only accessible to our technical support and development teams. We do not use this information for any other purpose, and we have strict security measures in place to protect it.
|
||||
|
||||
## Protecting Your Sensitive Data
|
||||
|
||||
We are committed to safeguarding your sensitive data and maintaining its confidentiality. To ensure its protection:
|
||||
|
||||
1. **Limited Access**: Only authorized personnel within our technical support and development teams have access to the logs and debug logs containing sensitive data, and they are trained in handling this information securely.
|
||||
|
||||
2. **Data Encryption**: We use industry-standard encryption protocols to protect the transmission and storage of sensitive data.
|
||||
|
||||
3. **Data Retention**: We retain bug report data for a limited time necessary for resolving the reported issue. Once the issue is resolved, we promptly delete or anonymize the data.
|
||||
|
||||
4. **Security Measures**: We employ robust security measures to prevent unauthorized access, disclosure, or alteration of sensitive data.
|
||||
|
||||
By submitting a bug report, you acknowledge and consent to the inclusion of sensitive information in logs and debug logs for the sole purpose of addressing technical issues with the Software.
|
||||
|
||||
## Your Responsibilities
|
||||
|
||||
While we take measures to protect your sensitive data, it is essential for you to exercise caution when submitting bug reports. Please refrain from including any sensitive or personally identifiable information that is not directly related to the technical issue you are reporting. You have the option to redact or obfuscate sensitive details in bug reports to further protect your data.
|
||||
|
||||
## Changes to this Privacy Policy
|
||||
|
||||
We may update this Privacy Policy from time to time to reflect changes in our practices or for other operational, legal, or regulatory reasons. We will notify you of any material changes by posting the updated Privacy Policy on our website or through the Software. We encourage you to review this Privacy Policy periodically.
|
||||
|
||||
## Contact Us
|
||||
|
||||
If you have any questions or concerns about this Privacy Policy or our privacy practices, please contact us at support@mynas.com.au or via GitHub (https://github.com/abraunegg/onedrive)
|
||||
|
||||
## Conclusion
|
||||
|
||||
By using the Software, you agree to the terms outlined in this Privacy Policy. If you do not agree with any part of this policy, please discontinue the use of the Software.
|
||||
|
54
docs/terms-of-service.md
Normal file
|
@ -0,0 +1,54 @@
|
|||
# OneDrive Client for Linux - Software Service Terms of Service
|
||||
|
||||
## 1. Introduction
|
||||
|
||||
These Terms of Service ("Terms") govern your use of the OneDrive Client for Linux ("Application") software and related Microsoft OneDrive services ("Service") provided by Microsoft. By accessing or using the Service, you agree to comply with and be bound by these Terms. If you do not agree to these Terms, please do not use the Service.
|
||||
|
||||
## 2. License Compliance
|
||||
|
||||
The OneDrive Client for Linux software is licensed under the GNU General Public License, version 3.0 (the "GPLv3"). Your use of the software must comply with the terms and conditions of the GPLv3. A copy of the GPLv3 can be found here: https://www.gnu.org/licenses/gpl-3.0.en.html
|
||||
|
||||
## 3. Use of the Service
|
||||
|
||||
### 3.1. Access and Accounts
|
||||
|
||||
You may need to create an account or provide personal information to access certain features of the Service. You are responsible for maintaining the confidentiality of your account information and are solely responsible for all activities that occur under your account.
|
||||
|
||||
### 3.2. Prohibited Activities
|
||||
|
||||
You agree not to:
|
||||
|
||||
- Use the Service in any way that violates applicable laws or regulations.
|
||||
- Use the Service to engage in any unlawful, harmful, or fraudulent activity.
|
||||
- Use the Service in any manner that disrupts, damages, or impairs the Service.
|
||||
|
||||
## 4. Intellectual Property
|
||||
|
||||
The OneDrive Client for Linux software is subject to the GPLv3, and you must respect all copyrights, trademarks, and other intellectual property rights associated with the software. Any contributions you make to the software must also comply with the GPLv3.
|
||||
|
||||
## 5. Disclaimer of Warranties
|
||||
|
||||
The OneDrive Client for Linux software is provided "as is" without any warranties, either expressed or implied. We do not guarantee that the use of the Application will be error-free or uninterrupted.
|
||||
|
||||
Microsoft is not responsible for OneDrive Client for Linux. Any issues or problems with OneDrive Client for Linux should be raised on GitHub at https://github.com/abraunegg/onedrive or email support@mynas.com.au
|
||||
|
||||
OneDrive Client for Linux is not responsible for the Microsoft OneDrive Service or the Microsoft Graph API Service that this Application utilizes. Any issue with either Microsoft OneDrive or Microsoft Graph API should be raised with Microsoft via their support channel in your country.
|
||||
|
||||
## 6. Limitation of Liability
|
||||
|
||||
To the fullest extent permitted by law, we shall not be liable for any direct, indirect, incidental, special, consequential, or punitive damages, or any loss of profits or revenues, whether incurred directly or indirectly, or any loss of data, use, goodwill, or other intangible losses, resulting from (a) your use or inability to use the Service, or (b) any other matter relating to the Service.
|
||||
|
||||
This limitiation of liability explicitly relates to the use of the OneDrive Client for Linux software and does not affect your rights under the GPLv3.
|
||||
|
||||
## 7. Changes to Terms
|
||||
|
||||
We reserve the right to update or modify these Terms at any time without prior notice. Any changes will be effective immediately upon posting on GitHub. Your continued use of the Service after the posting of changes constitutes your acceptance of such changes. Changes can be reviewed on GitHub.
|
||||
|
||||
## 8. Governing Law
|
||||
|
||||
These Terms shall be governed by and construed in accordance with the laws of Australia, without regard to its conflict of law principles.
|
||||
|
||||
## 9. Contact Us
|
||||
|
||||
If you have any questions or concerns about these Terms, please contact us at https://github.com/abraunegg/onedrive or email support@mynas.com.au
|
||||
|
414
docs/ubuntu-package-install.md
Normal file
|
@ -0,0 +1,414 @@
|
|||
# Installation of 'onedrive' package on Debian and Ubuntu
|
||||
|
||||
This document covers the appropriate steps to install the 'onedrive' client using the provided packages for Debian and Ubuntu.
|
||||
|
||||
#### Important information for all Ubuntu and Ubuntu based distribution users:
|
||||
This information is specifically for the following platforms and distributions:
|
||||
|
||||
* Lubuntu
|
||||
* Linux Mint
|
||||
* POP OS
|
||||
* Peppermint OS
|
||||
* Raspbian
|
||||
* Ubuntu
|
||||
|
||||
Whilst there are [onedrive](https://packages.ubuntu.com/search?keywords=onedrive&searchon=names&suite=all§ion=all) Universe packages available for Ubuntu, do not install 'onedrive' from these Universe packages. The default Ubuntu Universe packages are out-of-date and are not supported and should not be used.
|
||||
|
||||
## Determine which instructions to use
|
||||
Ubuntu and its clones are based on various different releases, thus, you must use the correct instructions below, otherwise you may run into package dependancy issues and will be unable to install the client.
|
||||
|
||||
### Step 1: Remove any configured PPA and associated 'onedrive' package and systemd service files
|
||||
Many Internet 'help' pages provide inconsistent details on how to install the OneDrive Client for Linux. A number of these websites continue to point users to install the client via the yann1ck PPA repository however this PPA no longer exists and should not be used.
|
||||
|
||||
To remove the PPA repository and the older client, perform the following actions:
|
||||
```text
|
||||
sudo apt remove onedrive
|
||||
sudo add-apt-repository --remove ppa:yann1ck/onedrive
|
||||
```
|
||||
|
||||
Additionally, Ubuntu and its clones have a bad habit of creating a 'default' systemd service file when installing the 'onedrive' package so that the client will automatically run the client post being authenticated. This systemd entry is erroneous and needs to be removed.
|
||||
```
|
||||
Created symlink /etc/systemd/user/default.target.wants/onedrive.service → /usr/lib/systemd/user/onedrive.service.
|
||||
```
|
||||
To remove this symbolic link, run the following command:
|
||||
```
|
||||
sudo rm /etc/systemd/user/default.target.wants/onedrive.service
|
||||
```
|
||||
|
||||
### Step 2: Ensure your system is up-to-date
|
||||
Use a script, similar to the following to ensure your system is updated correctly:
|
||||
```text
|
||||
#!/bin/bash
|
||||
rm -rf /var/lib/dpkg/lock-frontend
|
||||
rm -rf /var/lib/dpkg/lock
|
||||
apt-get update
|
||||
apt-get upgrade -y
|
||||
apt-get dist-upgrade -y
|
||||
apt-get autoremove -y
|
||||
apt-get autoclean -y
|
||||
```
|
||||
|
||||
Run this script as 'root' by using `su -` to elevate to 'root'. Example below:
|
||||
```text
|
||||
Welcome to Ubuntu 20.04.1 LTS (GNU/Linux 5.4.0-48-generic x86_64)
|
||||
|
||||
* Documentation: https://help.ubuntu.com
|
||||
* Management: https://landscape.canonical.com
|
||||
* Support: https://ubuntu.com/advantage
|
||||
|
||||
425 updates can be installed immediately.
|
||||
208 of these updates are security updates.
|
||||
To see these additional updates run: apt list --upgradable
|
||||
|
||||
Your Hardware Enablement Stack (HWE) is supported until April 2025.
|
||||
Last login: Thu Jan 20 14:21:48 2022 from my.ip.address
|
||||
alex@ubuntu-20-LTS:~$ su -
|
||||
Password:
|
||||
root@ubuntu-20-LTS:~# ls -la
|
||||
total 28
|
||||
drwx------ 3 root root 4096 Oct 10 2020 .
|
||||
drwxr-xr-x 20 root root 4096 Oct 10 2020 ..
|
||||
-rw------- 1 root root 175 Jan 20 14:23 .bash_history
|
||||
-rw-r--r-- 1 root root 3106 Dec 6 2019 .bashrc
|
||||
drwx------ 2 root root 4096 Apr 23 2020 .cache
|
||||
-rw-r--r-- 1 root root 161 Dec 6 2019 .profile
|
||||
-rwxr-xr-x 1 root root 174 Oct 10 2020 update-os.sh
|
||||
root@ubuntu-20-LTS:~# cat update-os.sh
|
||||
#!/bin/bash
|
||||
rm -rf /var/lib/dpkg/lock-frontend
|
||||
rm -rf /var/lib/dpkg/lock
|
||||
apt-get update
|
||||
apt-get upgrade -y
|
||||
apt-get dist-upgrade -y
|
||||
apt-get autoremove -y
|
||||
apt-get autoclean -y
|
||||
root@ubuntu-20-LTS:~# ./update-os.sh
|
||||
Hit:1 http://au.archive.ubuntu.com/ubuntu focal InRelease
|
||||
Hit:2 http://au.archive.ubuntu.com/ubuntu focal-updates InRelease
|
||||
Hit:3 http://au.archive.ubuntu.com/ubuntu focal-backports InRelease
|
||||
Hit:4 http://security.ubuntu.com/ubuntu focal-security InRelease
|
||||
Reading package lists... 96%
|
||||
...
|
||||
Sourcing file `/etc/default/grub'
|
||||
Sourcing file `/etc/default/grub.d/init-select.cfg'
|
||||
Generating grub configuration file ...
|
||||
Found linux image: /boot/vmlinuz-5.13.0-27-generic
|
||||
Found initrd image: /boot/initrd.img-5.13.0-27-generic
|
||||
Found linux image: /boot/vmlinuz-5.4.0-48-generic
|
||||
Found initrd image: /boot/initrd.img-5.4.0-48-generic
|
||||
Found memtest86+ image: /boot/memtest86+.elf
|
||||
Found memtest86+ image: /boot/memtest86+.bin
|
||||
done
|
||||
Removing linux-modules-5.4.0-26-generic (5.4.0-26.30) ...
|
||||
Processing triggers for libc-bin (2.31-0ubuntu9.2) ...
|
||||
Reading package lists... Done
|
||||
Building dependency tree
|
||||
Reading state information... Done
|
||||
root@ubuntu-20-LTS:~#
|
||||
```
|
||||
|
||||
Reboot your system after running this process before continuing with Step 3.
|
||||
```text
|
||||
reboot
|
||||
```
|
||||
|
||||
### Step 3: Determine what your OS is based on
|
||||
Determine what your OS is based on. To do this, run the following command:
|
||||
```text
|
||||
lsb_release -a
|
||||
```
|
||||
**Example:**
|
||||
```text
|
||||
alex@ubuntu-system:~$ lsb_release -a
|
||||
No LSB modules are available.
|
||||
Distributor ID: Ubuntu
|
||||
Description: Ubuntu 22.04 LTS
|
||||
Release: 22.04
|
||||
Codename: jammy
|
||||
```
|
||||
|
||||
### Step 4: Pick the correct instructions to use
|
||||
If required, review the table below based on your 'lsb_release' information to pick the appropriate instructions to use:
|
||||
|
||||
| Release & Codename | Instructions to use |
|
||||
|--------------------|---------------------|
|
||||
| Linux Mint 19.x | This platform is End-of-Life (EOL) and no longer supported. You must upgrade to Linux Mint 21.x |
|
||||
| Linux Mint 20.x | Use [Ubuntu 20.04](#distribution-ubuntu-2004) instructions below |
|
||||
| Linux Mint 21.x | Use [Ubuntu 22.04](#distribution-ubuntu-2204) instructions below |
|
||||
| Linux Mint Debian Edition (LMDE) 5 / Elsie | Use [Debian 11](#distribution-debian-11) instructions below |
|
||||
| Linux Mint Debian Edition (LMDE) 6 / Faye | Use [Debian 12](#distribution-debian-12) instructions below |
|
||||
| Debian 9 | This platform is End-of-Life (EOL) and no longer supported. You must upgrade to Debian 12 |
|
||||
| Debian 10 | You must build from source or upgrade your Operating System to Debian 12 |
|
||||
| Debian 11 | Use [Debian 11](#distribution-debian-11) instructions below |
|
||||
| Debian 12 | Use [Debian 12](#distribution-debian-12) instructions below |
|
||||
| Raspbian GNU/Linux 10 | You must build from source or upgrade your Operating System to Raspbian GNU/Linux 12 |
|
||||
| Raspbian GNU/Linux 11 | Use [Debian 11](#distribution-debian-11) instructions below |
|
||||
| Raspbian GNU/Linux 12 | Use [Debian 12](#distribution-debian-12) instructions below |
|
||||
| Ubuntu 18.04 / Bionic | This platform is End-of-Life (EOL) and no longer supported. You must upgrade to Ubuntu 22.04 |
|
||||
| Ubuntu 20.04 / Focal | Use [Ubuntu 20.04](#distribution-ubuntu-2004) instructions below |
|
||||
| Ubuntu 21.04 / Hirsute | Use [Ubuntu 21.04](#distribution-ubuntu-2104) instructions below |
|
||||
| Ubuntu 21.10 / Impish | Use [Ubuntu 21.10](#distribution-ubuntu-2110) instructions below |
|
||||
| Ubuntu 22.04 / Jammy | Use [Ubuntu 22.04](#distribution-ubuntu-2204) instructions below |
|
||||
| Ubuntu 22.10 / Kinetic | Use [Ubuntu 22.10](#distribution-ubuntu-2210) instructions below |
|
||||
| Ubuntu 23.04 / Lunar | Use [Ubuntu 23.04](#distribution-ubuntu-2304) instructions below |
|
||||
| Ubuntu 23.10 / Mantic | Use [Ubuntu 23.10](#distribution-ubuntu-2310) instructions below |
|
||||
|
||||
## Distribution Package Install Instructions
|
||||
|
||||
### Distribution: Debian 11
|
||||
The packages support the following platform architectures:
|
||||
| i686 | x86_64 | ARMHF | AARCH64 |
|
||||
|:----:|:------:|:-----:|:-------:|
|
||||
|✔|✔|✔|✔|
|
||||
|
||||
#### Step 1: Add the OpenSuSE Build Service repository release key
|
||||
Add the OpenSuSE Build Service repository release key using the following command:
|
||||
```text
|
||||
wget -qO - https://download.opensuse.org/repositories/home:/npreining:/debian-ubuntu-onedrive/Debian_11/Release.key | gpg --dearmor | sudo tee /usr/share/keyrings/obs-onedrive.gpg > /dev/null
|
||||
```
|
||||
|
||||
#### Step 2: Add the OpenSuSE Build Service repository
|
||||
Add the OpenSuSE Build Service repository using the following command:
|
||||
```text
|
||||
echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/obs-onedrive.gpg] https://download.opensuse.org/repositories/home:/npreining:/debian-ubuntu-onedrive/Debian_11/ ./" | sudo tee /etc/apt/sources.list.d/onedrive.list
|
||||
```
|
||||
|
||||
#### Step 3: Update your apt package cache
|
||||
Run: `sudo apt-get update`
|
||||
|
||||
#### Step 4: Install 'onedrive'
|
||||
Run: `sudo apt install --no-install-recommends --no-install-suggests onedrive`
|
||||
|
||||
#### Step 5: Read 'Known Issues' with these packages
|
||||
Read and understand the [known issues](#known-issues-with-installing-from-the-above-packages) with these packages below, taking any action that is needed.
|
||||
|
||||
### Distribution: Debian 12
|
||||
The packages support the following platform architectures:
|
||||
| i686 | x86_64 | ARMHF | AARCH64 |
|
||||
|:----:|:------:|:-----:|:-------:|
|
||||
|✔|✔|✔|✔|
|
||||
|
||||
#### Step 1: Add the OpenSuSE Build Service repository release key
|
||||
Add the OpenSuSE Build Service repository release key using the following command:
|
||||
```text
|
||||
wget -qO - https://download.opensuse.org/repositories/home:/npreining:/debian-ubuntu-onedrive/Debian_12/Release.key | gpg --dearmor | sudo tee /usr/share/keyrings/obs-onedrive.gpg > /dev/null
|
||||
```
|
||||
|
||||
#### Step 2: Add the OpenSuSE Build Service repository
|
||||
Add the OpenSuSE Build Service repository using the following command:
|
||||
```text
|
||||
echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/obs-onedrive.gpg] https://download.opensuse.org/repositories/home:/npreining:/debian-ubuntu-onedrive/Debian_12/ ./" | sudo tee /etc/apt/sources.list.d/onedrive.list
|
||||
```
|
||||
|
||||
#### Step 3: Update your apt package cache
|
||||
Run: `sudo apt-get update`
|
||||
|
||||
#### Step 4: Install 'onedrive'
|
||||
Run: `sudo apt install --no-install-recommends --no-install-suggests onedrive`
|
||||
|
||||
#### Step 5: Read 'Known Issues' with these packages
|
||||
Read and understand the [known issues](#known-issues-with-installing-from-the-above-packages) with these packages below, taking any action that is needed.
|
||||
|
||||
### Distribution: Ubuntu 20.04
|
||||
The packages support the following platform architectures:
|
||||
| i686 | x86_64 | ARMHF | AARCH64 |
|
||||
|:----:|:------:|:-----:|:-------:|
|
||||
|❌|✔|✔|✔|
|
||||
|
||||
#### Step 1: Add the OpenSuSE Build Service repository release key
|
||||
Add the OpenSuSE Build Service repository release key using the following command:
|
||||
```text
|
||||
wget -qO - https://download.opensuse.org/repositories/home:/npreining:/debian-ubuntu-onedrive/xUbuntu_20.04/Release.key | sudo apt-key add -
|
||||
```
|
||||
|
||||
#### Step 2: Add the OpenSuSE Build Service repository
|
||||
Add the OpenSuSE Build Service repository using the following command:
|
||||
```text
|
||||
echo 'deb https://download.opensuse.org/repositories/home:/npreining:/debian-ubuntu-onedrive/xUbuntu_20.04/ ./' | sudo tee /etc/apt/sources.list.d/onedrive.list
|
||||
```
|
||||
|
||||
#### Step 3: Update your apt package cache
|
||||
Run: `sudo apt-get update`
|
||||
|
||||
#### Step 4: Install 'onedrive'
|
||||
Run: `sudo apt install --no-install-recommends --no-install-suggests onedrive`
|
||||
|
||||
#### Step 5: Read 'Known Issues' with these packages
|
||||
Read and understand the [known issues](#known-issues-with-installing-from-the-above-packages) with these packages below, taking any action that is needed.
|
||||
|
||||
### Distribution: Ubuntu 21.04
|
||||
The packages support the following platform architectures:
|
||||
| i686 | x86_64 | ARMHF | AARCH64 |
|
||||
|:----:|:------:|:-----:|:-------:|
|
||||
|❌|✔|✔|✔|
|
||||
|
||||
#### Step 1: Add the OpenSuSE Build Service repository release key
|
||||
Add the OpenSuSE Build Service repository release key using the following command:
|
||||
```text
|
||||
wget -qO - https://download.opensuse.org/repositories/home:/npreining:/debian-ubuntu-onedrive/xUbuntu_21.04/Release.key | gpg --dearmor | sudo tee /usr/share/keyrings/obs-onedrive.gpg > /dev/null
|
||||
```
|
||||
|
||||
#### Step 2: Add the OpenSuSE Build Service repository
|
||||
Add the OpenSuSE Build Service repository using the following command:
|
||||
```text
|
||||
echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/obs-onedrive.gpg] https://download.opensuse.org/repositories/home:/npreining:/debian-ubuntu-onedrive/xUbuntu_21.04/ ./" | sudo tee /etc/apt/sources.list.d/onedrive.list
|
||||
```
|
||||
|
||||
#### Step 3: Update your apt package cache
|
||||
Run: `sudo apt-get update`
|
||||
|
||||
#### Step 4: Install 'onedrive'
|
||||
Run: `sudo apt install --no-install-recommends --no-install-suggests onedrive`
|
||||
|
||||
#### Step 5: Read 'Known Issues' with these packages
|
||||
Read and understand the [known issues](#known-issues-with-installing-from-the-above-packages) with these packages below, taking any action that is needed.
|
||||
|
||||
### Distribution: Ubuntu 21.10
|
||||
The packages support the following platform architectures:
|
||||
| i686 | x86_64 | ARMHF | AARCH64 |
|
||||
|:----:|:------:|:-----:|:-------:|
|
||||
|❌|✔|✔|✔|
|
||||
|
||||
#### Step 1: Add the OpenSuSE Build Service repository release key
|
||||
Add the OpenSuSE Build Service repository release key using the following command:
|
||||
```text
|
||||
wget -qO - https://download.opensuse.org/repositories/home:/npreining:/debian-ubuntu-onedrive/xUbuntu_21.10/Release.key | gpg --dearmor | sudo tee /usr/share/keyrings/obs-onedrive.gpg > /dev/null
|
||||
```
|
||||
|
||||
#### Step 2: Add the OpenSuSE Build Service repository
|
||||
Add the OpenSuSE Build Service repository using the following command:
|
||||
```text
|
||||
echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/obs-onedrive.gpg] https://download.opensuse.org/repositories/home:/npreining:/debian-ubuntu-onedrive/xUbuntu_21.10/ ./" | sudo tee /etc/apt/sources.list.d/onedrive.list
|
||||
```
|
||||
|
||||
#### Step 3: Update your apt package cache
|
||||
Run: `sudo apt-get update`
|
||||
|
||||
#### Step 4: Install 'onedrive'
|
||||
Run: `sudo apt install --no-install-recommends --no-install-suggests onedrive`
|
||||
|
||||
#### Step 5: Read 'Known Issues' with these packages
|
||||
Read and understand the [known issues](#known-issues-with-installing-from-the-above-packages) with these packages below, taking any action that is needed.
|
||||
|
||||
### Distribution: Ubuntu 22.04
|
||||
The packages support the following platform architectures:
|
||||
| i686 | x86_64 | ARMHF | AARCH64 |
|
||||
|:----:|:------:|:-----:|:-------:|
|
||||
|❌|✔|✔|✔|
|
||||
|
||||
#### Step 1: Add the OpenSuSE Build Service repository release key
|
||||
Add the OpenSuSE Build Service repository release key using the following command:
|
||||
```text
|
||||
wget -qO - https://download.opensuse.org/repositories/home:/npreining:/debian-ubuntu-onedrive/xUbuntu_22.04/Release.key | gpg --dearmor | sudo tee /usr/share/keyrings/obs-onedrive.gpg > /dev/null
|
||||
```
|
||||
|
||||
#### Step 2: Add the OpenSuSE Build Service repository
|
||||
Add the OpenSuSE Build Service repository using the following command:
|
||||
```text
|
||||
echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/obs-onedrive.gpg] https://download.opensuse.org/repositories/home:/npreining:/debian-ubuntu-onedrive/xUbuntu_22.04/ ./" | sudo tee /etc/apt/sources.list.d/onedrive.list
|
||||
```
|
||||
|
||||
#### Step 3: Update your apt package cache
|
||||
Run: `sudo apt-get update`
|
||||
|
||||
#### Step 4: Install 'onedrive'
|
||||
Run: `sudo apt install --no-install-recommends --no-install-suggests onedrive`
|
||||
|
||||
#### Step 5: Read 'Known Issues' with these packages
|
||||
Read and understand the [known issues](#known-issues-with-installing-from-the-above-packages) with these packages below, taking any action that is needed.
|
||||
|
||||
### Distribution: Ubuntu 22.10
|
||||
The packages support the following platform architectures:
|
||||
| i686 | x86_64 | ARMHF | AARCH64 |
|
||||
|:----:|:------:|:-----:|:-------:|
|
||||
|❌|✔|✔|✔|
|
||||
|
||||
#### Step 1: Add the OpenSuSE Build Service repository release key
|
||||
Add the OpenSuSE Build Service repository release key using the following command:
|
||||
```text
|
||||
wget -qO - https://download.opensuse.org/repositories/home:/npreining:/debian-ubuntu-onedrive/xUbuntu_22.10/Release.key | gpg --dearmor | sudo tee /usr/share/keyrings/obs-onedrive.gpg > /dev/null
|
||||
```
|
||||
|
||||
#### Step 2: Add the OpenSuSE Build Service repository
|
||||
Add the OpenSuSE Build Service repository using the following command:
|
||||
```text
|
||||
echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/obs-onedrive.gpg] https://download.opensuse.org/repositories/home:/npreining:/debian-ubuntu-onedrive/xUbuntu_22.10/ ./" | sudo tee /etc/apt/sources.list.d/onedrive.list
|
||||
```
|
||||
|
||||
#### Step 3: Update your apt package cache
|
||||
Run: `sudo apt-get update`
|
||||
|
||||
#### Step 4: Install 'onedrive'
|
||||
Run: `sudo apt install --no-install-recommends --no-install-suggests onedrive`
|
||||
|
||||
#### Step 5: Read 'Known Issues' with these packages
|
||||
Read and understand the [known issues](#known-issues-with-installing-from-the-above-packages) with these packages below, taking any action that is needed.
|
||||
|
||||
### Distribution: Ubuntu 23.04
|
||||
The packages support the following platform architectures:
|
||||
| i686 | x86_64 | ARMHF | AARCH64 |
|
||||
|:----:|:------:|:-----:|:-------:|
|
||||
|❌|✔|✔|✔|
|
||||
|
||||
#### Step 1: Add the OpenSuSE Build Service repository release key
|
||||
Add the OpenSuSE Build Service repository release key using the following command:
|
||||
```text
|
||||
wget -qO - https://download.opensuse.org/repositories/home:/npreining:/debian-ubuntu-onedrive/xUbuntu_23.04/Release.key | gpg --dearmor | sudo tee /usr/share/keyrings/obs-onedrive.gpg > /dev/null
|
||||
```
|
||||
|
||||
#### Step 2: Add the OpenSuSE Build Service repository
|
||||
Add the OpenSuSE Build Service repository using the following command:
|
||||
```text
|
||||
echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/obs-onedrive.gpg] https://download.opensuse.org/repositories/home:/npreining:/debian-ubuntu-onedrive/xUbuntu_23.04/ ./" | sudo tee /etc/apt/sources.list.d/onedrive.list
|
||||
```
|
||||
|
||||
#### Step 3: Update your apt package cache
|
||||
Run: `sudo apt-get update`
|
||||
|
||||
#### Step 4: Install 'onedrive'
|
||||
Run: `sudo apt install --no-install-recommends --no-install-suggests onedrive`
|
||||
|
||||
#### Step 5: Read 'Known Issues' with these packages
|
||||
Read and understand the [known issues](#known-issues-with-installing-from-the-above-packages) with these packages below, taking any action that is needed.
|
||||
|
||||
### Distribution: Ubuntu 23.10
|
||||
The packages support the following platform architectures:
|
||||
| i686 | x86_64 | ARMHF | AARCH64 |
|
||||
|:----:|:------:|:-----:|:-------:|
|
||||
|❌|✔|❌|✔|
|
||||
|
||||
#### Step 1: Add the OpenSuSE Build Service repository release key
|
||||
Add the OpenSuSE Build Service repository release key using the following command:
|
||||
```text
|
||||
wget -qO - https://download.opensuse.org/repositories/home:/npreining:/debian-ubuntu-onedrive/xUbuntu_23.10/Release.key | gpg --dearmor | sudo tee /usr/share/keyrings/obs-onedrive.gpg > /dev/null
|
||||
```
|
||||
|
||||
#### Step 2: Add the OpenSuSE Build Service repository
|
||||
Add the OpenSuSE Build Service repository using the following command:
|
||||
```text
|
||||
echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/obs-onedrive.gpg] https://download.opensuse.org/repositories/home:/npreining:/debian-ubuntu-onedrive/xUbuntu_23.10/ ./" | sudo tee /etc/apt/sources.list.d/onedrive.list
|
||||
```
|
||||
|
||||
#### Step 3: Update your apt package cache
|
||||
Run: `sudo apt-get update`
|
||||
|
||||
#### Step 4: Install 'onedrive'
|
||||
Run: `sudo apt install --no-install-recommends --no-install-suggests onedrive`
|
||||
|
||||
#### Step 5: Read 'Known Issues' with these packages
|
||||
Read and understand the [known issues](#known-issues-with-installing-from-the-above-packages) with these packages below, taking any action that is needed.
|
||||
|
||||
|
||||
## Known Issues with Installing from the above packages
|
||||
|
||||
### 1. The client may segfault | core-dump when exiting
|
||||
When the client is run in `--monitor` mode manually, or when using the systemd service, the client may segfault on exit.
|
||||
|
||||
This issue is caused by the way the 'onedrive' packages are built using the distribution LDC package & the default distribution compiler options which is the root cause for this issue. Refer to: https://bugs.launchpad.net/ubuntu/+source/ldc/+bug/1895969
|
||||
|
||||
**Additional references:**
|
||||
* https://github.com/abraunegg/onedrive/issues/1053
|
||||
* https://github.com/abraunegg/onedrive/issues/1609
|
||||
|
||||
**Resolution Options:**
|
||||
* Uninstall the package and build client from source
|
|
@ -1,5 +0,0 @@
|
|||
#!/bin/bash
|
||||
# This script is to assist in starting the onedrive client when using init.d
|
||||
APP_OPTIONS="--monitor --verbose"
|
||||
onedrive $APP_OPTIONS > /dev/null 2>&1 &
|
||||
exit 0
|
518
install-sh
Executable file
|
@ -0,0 +1,518 @@
|
|||
#!/bin/sh
|
||||
# install - install a program, script, or datafile
|
||||
|
||||
scriptversion=2018-03-11.20; # UTC
|
||||
|
||||
# This originates from X11R5 (mit/util/scripts/install.sh), which was
|
||||
# later released in X11R6 (xc/config/util/install.sh) with the
|
||||
# following copyright and license.
|
||||
#
|
||||
# Copyright (C) 1994 X Consortium
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to
|
||||
# deal in the Software without restriction, including without limitation the
|
||||
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
|
||||
# sell copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# X CONSORTIUM BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN
|
||||
# AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNEC-
|
||||
# TION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
#
|
||||
# Except as contained in this notice, the name of the X Consortium shall not
|
||||
# be used in advertising or otherwise to promote the sale, use or other deal-
|
||||
# ings in this Software without prior written authorization from the X Consor-
|
||||
# tium.
|
||||
#
|
||||
#
|
||||
# FSF changes to this file are in the public domain.
|
||||
#
|
||||
# Calling this script install-sh is preferred over install.sh, to prevent
|
||||
# 'make' implicit rules from creating a file called install from it
|
||||
# when there is no Makefile.
|
||||
#
|
||||
# This script is compatible with the BSD install script, but was written
|
||||
# from scratch.
|
||||
|
||||
tab=' '
|
||||
nl='
|
||||
'
|
||||
IFS=" $tab$nl"
|
||||
|
||||
# Set DOITPROG to "echo" to test this script.
|
||||
|
||||
doit=${DOITPROG-}
|
||||
doit_exec=${doit:-exec}
|
||||
|
||||
# Put in absolute file names if you don't have them in your path;
|
||||
# or use environment vars.
|
||||
|
||||
chgrpprog=${CHGRPPROG-chgrp}
|
||||
chmodprog=${CHMODPROG-chmod}
|
||||
chownprog=${CHOWNPROG-chown}
|
||||
cmpprog=${CMPPROG-cmp}
|
||||
cpprog=${CPPROG-cp}
|
||||
mkdirprog=${MKDIRPROG-mkdir}
|
||||
mvprog=${MVPROG-mv}
|
||||
rmprog=${RMPROG-rm}
|
||||
stripprog=${STRIPPROG-strip}
|
||||
|
||||
posix_mkdir=
|
||||
|
||||
# Desired mode of installed file.
|
||||
mode=0755
|
||||
|
||||
chgrpcmd=
|
||||
chmodcmd=$chmodprog
|
||||
chowncmd=
|
||||
mvcmd=$mvprog
|
||||
rmcmd="$rmprog -f"
|
||||
stripcmd=
|
||||
|
||||
src=
|
||||
dst=
|
||||
dir_arg=
|
||||
dst_arg=
|
||||
|
||||
copy_on_change=false
|
||||
is_target_a_directory=possibly
|
||||
|
||||
usage="\
|
||||
Usage: $0 [OPTION]... [-T] SRCFILE DSTFILE
|
||||
or: $0 [OPTION]... SRCFILES... DIRECTORY
|
||||
or: $0 [OPTION]... -t DIRECTORY SRCFILES...
|
||||
or: $0 [OPTION]... -d DIRECTORIES...
|
||||
|
||||
In the 1st form, copy SRCFILE to DSTFILE.
|
||||
In the 2nd and 3rd, copy all SRCFILES to DIRECTORY.
|
||||
In the 4th, create DIRECTORIES.
|
||||
|
||||
Options:
|
||||
--help display this help and exit.
|
||||
--version display version info and exit.
|
||||
|
||||
-c (ignored)
|
||||
-C install only if different (preserve the last data modification time)
|
||||
-d create directories instead of installing files.
|
||||
-g GROUP $chgrpprog installed files to GROUP.
|
||||
-m MODE $chmodprog installed files to MODE.
|
||||
-o USER $chownprog installed files to USER.
|
||||
-s $stripprog installed files.
|
||||
-t DIRECTORY install into DIRECTORY.
|
||||
-T report an error if DSTFILE is a directory.
|
||||
|
||||
Environment variables override the default commands:
|
||||
CHGRPPROG CHMODPROG CHOWNPROG CMPPROG CPPROG MKDIRPROG MVPROG
|
||||
RMPROG STRIPPROG
|
||||
"
|
||||
|
||||
while test $# -ne 0; do
|
||||
case $1 in
|
||||
-c) ;;
|
||||
|
||||
-C) copy_on_change=true;;
|
||||
|
||||
-d) dir_arg=true;;
|
||||
|
||||
-g) chgrpcmd="$chgrpprog $2"
|
||||
shift;;
|
||||
|
||||
--help) echo "$usage"; exit $?;;
|
||||
|
||||
-m) mode=$2
|
||||
case $mode in
|
||||
*' '* | *"$tab"* | *"$nl"* | *'*'* | *'?'* | *'['*)
|
||||
echo "$0: invalid mode: $mode" >&2
|
||||
exit 1;;
|
||||
esac
|
||||
shift;;
|
||||
|
||||
-o) chowncmd="$chownprog $2"
|
||||
shift;;
|
||||
|
||||
-s) stripcmd=$stripprog;;
|
||||
|
||||
-t)
|
||||
is_target_a_directory=always
|
||||
dst_arg=$2
|
||||
# Protect names problematic for 'test' and other utilities.
|
||||
case $dst_arg in
|
||||
-* | [=\(\)!]) dst_arg=./$dst_arg;;
|
||||
esac
|
||||
shift;;
|
||||
|
||||
-T) is_target_a_directory=never;;
|
||||
|
||||
--version) echo "$0 $scriptversion"; exit $?;;
|
||||
|
||||
--) shift
|
||||
break;;
|
||||
|
||||
-*) echo "$0: invalid option: $1" >&2
|
||||
exit 1;;
|
||||
|
||||
*) break;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
|
||||
# We allow the use of options -d and -T together, by making -d
|
||||
# take the precedence; this is for compatibility with GNU install.
|
||||
|
||||
if test -n "$dir_arg"; then
|
||||
if test -n "$dst_arg"; then
|
||||
echo "$0: target directory not allowed when installing a directory." >&2
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
if test $# -ne 0 && test -z "$dir_arg$dst_arg"; then
|
||||
# When -d is used, all remaining arguments are directories to create.
|
||||
# When -t is used, the destination is already specified.
|
||||
# Otherwise, the last argument is the destination. Remove it from $@.
|
||||
for arg
|
||||
do
|
||||
if test -n "$dst_arg"; then
|
||||
# $@ is not empty: it contains at least $arg.
|
||||
set fnord "$@" "$dst_arg"
|
||||
shift # fnord
|
||||
fi
|
||||
shift # arg
|
||||
dst_arg=$arg
|
||||
# Protect names problematic for 'test' and other utilities.
|
||||
case $dst_arg in
|
||||
-* | [=\(\)!]) dst_arg=./$dst_arg;;
|
||||
esac
|
||||
done
|
||||
fi
|
||||
|
||||
if test $# -eq 0; then
|
||||
if test -z "$dir_arg"; then
|
||||
echo "$0: no input file specified." >&2
|
||||
exit 1
|
||||
fi
|
||||
# It's OK to call 'install-sh -d' without argument.
|
||||
# This can happen when creating conditional directories.
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if test -z "$dir_arg"; then
|
||||
if test $# -gt 1 || test "$is_target_a_directory" = always; then
|
||||
if test ! -d "$dst_arg"; then
|
||||
echo "$0: $dst_arg: Is not a directory." >&2
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
if test -z "$dir_arg"; then
|
||||
do_exit='(exit $ret); exit $ret'
|
||||
trap "ret=129; $do_exit" 1
|
||||
trap "ret=130; $do_exit" 2
|
||||
trap "ret=141; $do_exit" 13
|
||||
trap "ret=143; $do_exit" 15
|
||||
|
||||
# Set umask so as not to create temps with too-generous modes.
|
||||
# However, 'strip' requires both read and write access to temps.
|
||||
case $mode in
|
||||
# Optimize common cases.
|
||||
*644) cp_umask=133;;
|
||||
*755) cp_umask=22;;
|
||||
|
||||
*[0-7])
|
||||
if test -z "$stripcmd"; then
|
||||
u_plus_rw=
|
||||
else
|
||||
u_plus_rw='% 200'
|
||||
fi
|
||||
cp_umask=`expr '(' 777 - $mode % 1000 ')' $u_plus_rw`;;
|
||||
*)
|
||||
if test -z "$stripcmd"; then
|
||||
u_plus_rw=
|
||||
else
|
||||
u_plus_rw=,u+rw
|
||||
fi
|
||||
cp_umask=$mode$u_plus_rw;;
|
||||
esac
|
||||
fi
|
||||
|
||||
for src
|
||||
do
|
||||
# Protect names problematic for 'test' and other utilities.
|
||||
case $src in
|
||||
-* | [=\(\)!]) src=./$src;;
|
||||
esac
|
||||
|
||||
if test -n "$dir_arg"; then
|
||||
dst=$src
|
||||
dstdir=$dst
|
||||
test -d "$dstdir"
|
||||
dstdir_status=$?
|
||||
else
|
||||
|
||||
# Waiting for this to be detected by the "$cpprog $src $dsttmp" command
|
||||
# might cause directories to be created, which would be especially bad
|
||||
# if $src (and thus $dsttmp) contains '*'.
|
||||
if test ! -f "$src" && test ! -d "$src"; then
|
||||
echo "$0: $src does not exist." >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if test -z "$dst_arg"; then
|
||||
echo "$0: no destination specified." >&2
|
||||
exit 1
|
||||
fi
|
||||
dst=$dst_arg
|
||||
|
||||
# If destination is a directory, append the input filename.
|
||||
if test -d "$dst"; then
|
||||
if test "$is_target_a_directory" = never; then
|
||||
echo "$0: $dst_arg: Is a directory" >&2
|
||||
exit 1
|
||||
fi
|
||||
dstdir=$dst
|
||||
dstbase=`basename "$src"`
|
||||
case $dst in
|
||||
*/) dst=$dst$dstbase;;
|
||||
*) dst=$dst/$dstbase;;
|
||||
esac
|
||||
dstdir_status=0
|
||||
else
|
||||
dstdir=`dirname "$dst"`
|
||||
test -d "$dstdir"
|
||||
dstdir_status=$?
|
||||
fi
|
||||
fi
|
||||
|
||||
case $dstdir in
|
||||
*/) dstdirslash=$dstdir;;
|
||||
*) dstdirslash=$dstdir/;;
|
||||
esac
|
||||
|
||||
obsolete_mkdir_used=false
|
||||
|
||||
if test $dstdir_status != 0; then
|
||||
case $posix_mkdir in
|
||||
'')
|
||||
# Create intermediate dirs using mode 755 as modified by the umask.
|
||||
# This is like FreeBSD 'install' as of 1997-10-28.
|
||||
umask=`umask`
|
||||
case $stripcmd.$umask in
|
||||
# Optimize common cases.
|
||||
*[2367][2367]) mkdir_umask=$umask;;
|
||||
.*0[02][02] | .[02][02] | .[02]) mkdir_umask=22;;
|
||||
|
||||
*[0-7])
|
||||
mkdir_umask=`expr $umask + 22 \
|
||||
- $umask % 100 % 40 + $umask % 20 \
|
||||
- $umask % 10 % 4 + $umask % 2
|
||||
`;;
|
||||
*) mkdir_umask=$umask,go-w;;
|
||||
esac
|
||||
|
||||
# With -d, create the new directory with the user-specified mode.
|
||||
# Otherwise, rely on $mkdir_umask.
|
||||
if test -n "$dir_arg"; then
|
||||
mkdir_mode=-m$mode
|
||||
else
|
||||
mkdir_mode=
|
||||
fi
|
||||
|
||||
posix_mkdir=false
|
||||
case $umask in
|
||||
*[123567][0-7][0-7])
|
||||
# POSIX mkdir -p sets u+wx bits regardless of umask, which
|
||||
# is incompatible with FreeBSD 'install' when (umask & 300) != 0.
|
||||
;;
|
||||
*)
|
||||
# Note that $RANDOM variable is not portable (e.g. dash); Use it
|
||||
# here however when possible just to lower collision chance.
|
||||
tmpdir=${TMPDIR-/tmp}/ins$RANDOM-$$
|
||||
|
||||
trap 'ret=$?; rmdir "$tmpdir/a/b" "$tmpdir/a" "$tmpdir" 2>/dev/null; exit $ret' 0
|
||||
|
||||
# Because "mkdir -p" follows existing symlinks and we likely work
|
||||
# directly in world-writeable /tmp, make sure that the '$tmpdir'
|
||||
# directory is successfully created first before we actually test
|
||||
# 'mkdir -p' feature.
|
||||
if (umask $mkdir_umask &&
|
||||
$mkdirprog $mkdir_mode "$tmpdir" &&
|
||||
exec $mkdirprog $mkdir_mode -p -- "$tmpdir/a/b") >/dev/null 2>&1
|
||||
then
|
||||
if test -z "$dir_arg" || {
|
||||
# Check for POSIX incompatibilities with -m.
|
||||
# HP-UX 11.23 and IRIX 6.5 mkdir -m -p sets group- or
|
||||
# other-writable bit of parent directory when it shouldn't.
|
||||
# FreeBSD 6.1 mkdir -m -p sets mode of existing directory.
|
||||
test_tmpdir="$tmpdir/a"
|
||||
ls_ld_tmpdir=`ls -ld "$test_tmpdir"`
|
||||
case $ls_ld_tmpdir in
|
||||
d????-?r-*) different_mode=700;;
|
||||
d????-?--*) different_mode=755;;
|
||||
*) false;;
|
||||
esac &&
|
||||
$mkdirprog -m$different_mode -p -- "$test_tmpdir" && {
|
||||
ls_ld_tmpdir_1=`ls -ld "$test_tmpdir"`
|
||||
test "$ls_ld_tmpdir" = "$ls_ld_tmpdir_1"
|
||||
}
|
||||
}
|
||||
then posix_mkdir=:
|
||||
fi
|
||||
rmdir "$tmpdir/a/b" "$tmpdir/a" "$tmpdir"
|
||||
else
|
||||
# Remove any dirs left behind by ancient mkdir implementations.
|
||||
rmdir ./$mkdir_mode ./-p ./-- "$tmpdir" 2>/dev/null
|
||||
fi
|
||||
trap '' 0;;
|
||||
esac;;
|
||||
esac
|
||||
|
||||
if
|
||||
$posix_mkdir && (
|
||||
umask $mkdir_umask &&
|
||||
$doit_exec $mkdirprog $mkdir_mode -p -- "$dstdir"
|
||||
)
|
||||
then :
|
||||
else
|
||||
|
||||
# The umask is ridiculous, or mkdir does not conform to POSIX,
|
||||
# or it failed possibly due to a race condition. Create the
|
||||
# directory the slow way, step by step, checking for races as we go.
|
||||
|
||||
case $dstdir in
|
||||
/*) prefix='/';;
|
||||
[-=\(\)!]*) prefix='./';;
|
||||
*) prefix='';;
|
||||
esac
|
||||
|
||||
oIFS=$IFS
|
||||
IFS=/
|
||||
set -f
|
||||
set fnord $dstdir
|
||||
shift
|
||||
set +f
|
||||
IFS=$oIFS
|
||||
|
||||
prefixes=
|
||||
|
||||
for d
|
||||
do
|
||||
test X"$d" = X && continue
|
||||
|
||||
prefix=$prefix$d
|
||||
if test -d "$prefix"; then
|
||||
prefixes=
|
||||
else
|
||||
if $posix_mkdir; then
|
||||
(umask=$mkdir_umask &&
|
||||
$doit_exec $mkdirprog $mkdir_mode -p -- "$dstdir") && break
|
||||
# Don't fail if two instances are running concurrently.
|
||||
test -d "$prefix" || exit 1
|
||||
else
|
||||
case $prefix in
|
||||
*\'*) qprefix=`echo "$prefix" | sed "s/'/'\\\\\\\\''/g"`;;
|
||||
*) qprefix=$prefix;;
|
||||
esac
|
||||
prefixes="$prefixes '$qprefix'"
|
||||
fi
|
||||
fi
|
||||
prefix=$prefix/
|
||||
done
|
||||
|
||||
if test -n "$prefixes"; then
|
||||
# Don't fail if two instances are running concurrently.
|
||||
(umask $mkdir_umask &&
|
||||
eval "\$doit_exec \$mkdirprog $prefixes") ||
|
||||
test -d "$dstdir" || exit 1
|
||||
obsolete_mkdir_used=true
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
if test -n "$dir_arg"; then
|
||||
{ test -z "$chowncmd" || $doit $chowncmd "$dst"; } &&
|
||||
{ test -z "$chgrpcmd" || $doit $chgrpcmd "$dst"; } &&
|
||||
{ test "$obsolete_mkdir_used$chowncmd$chgrpcmd" = false ||
|
||||
test -z "$chmodcmd" || $doit $chmodcmd $mode "$dst"; } || exit 1
|
||||
else
|
||||
|
||||
# Make a couple of temp file names in the proper directory.
|
||||
dsttmp=${dstdirslash}_inst.$$_
|
||||
rmtmp=${dstdirslash}_rm.$$_
|
||||
|
||||
# Trap to clean up those temp files at exit.
|
||||
trap 'ret=$?; rm -f "$dsttmp" "$rmtmp" && exit $ret' 0
|
||||
|
||||
# Copy the file name to the temp name.
|
||||
(umask $cp_umask && $doit_exec $cpprog "$src" "$dsttmp") &&
|
||||
|
||||
# and set any options; do chmod last to preserve setuid bits.
|
||||
#
|
||||
# If any of these fail, we abort the whole thing. If we want to
|
||||
# ignore errors from any of these, just make sure not to ignore
|
||||
# errors from the above "$doit $cpprog $src $dsttmp" command.
|
||||
#
|
||||
{ test -z "$chowncmd" || $doit $chowncmd "$dsttmp"; } &&
|
||||
{ test -z "$chgrpcmd" || $doit $chgrpcmd "$dsttmp"; } &&
|
||||
{ test -z "$stripcmd" || $doit $stripcmd "$dsttmp"; } &&
|
||||
{ test -z "$chmodcmd" || $doit $chmodcmd $mode "$dsttmp"; } &&
|
||||
|
||||
# If -C, don't bother to copy if it wouldn't change the file.
|
||||
if $copy_on_change &&
|
||||
old=`LC_ALL=C ls -dlL "$dst" 2>/dev/null` &&
|
||||
new=`LC_ALL=C ls -dlL "$dsttmp" 2>/dev/null` &&
|
||||
set -f &&
|
||||
set X $old && old=:$2:$4:$5:$6 &&
|
||||
set X $new && new=:$2:$4:$5:$6 &&
|
||||
set +f &&
|
||||
test "$old" = "$new" &&
|
||||
$cmpprog "$dst" "$dsttmp" >/dev/null 2>&1
|
||||
then
|
||||
rm -f "$dsttmp"
|
||||
else
|
||||
# Rename the file to the real destination.
|
||||
$doit $mvcmd -f "$dsttmp" "$dst" 2>/dev/null ||
|
||||
|
||||
# The rename failed, perhaps because mv can't rename something else
|
||||
# to itself, or perhaps because mv is so ancient that it does not
|
||||
# support -f.
|
||||
{
|
||||
# Now remove or move aside any old file at destination location.
|
||||
# We try this two ways since rm can't unlink itself on some
|
||||
# systems and the destination file might be busy for other
|
||||
# reasons. In this case, the final cleanup might fail but the new
|
||||
# file should still install successfully.
|
||||
{
|
||||
test ! -f "$dst" ||
|
||||
$doit $rmcmd -f "$dst" 2>/dev/null ||
|
||||
{ $doit $mvcmd -f "$dst" "$rmtmp" 2>/dev/null &&
|
||||
{ $doit $rmcmd -f "$rmtmp" 2>/dev/null; :; }
|
||||
} ||
|
||||
{ echo "$0: cannot unlink or rename $dst" >&2
|
||||
(exit 1); exit 1
|
||||
}
|
||||
} &&
|
||||
|
||||
# Now rename the file to the real destination.
|
||||
$doit $mvcmd "$dsttmp" "$dst"
|
||||
}
|
||||
fi || exit 1
|
||||
|
||||
trap '' 0
|
||||
fi
|
||||
done
|
||||
|
||||
# Local variables:
|
||||
# eval: (add-hook 'before-save-hook 'time-stamp)
|
||||
# time-stamp-start: "scriptversion="
|
||||
# time-stamp-format: "%:y-%02m-%02d.%02H"
|
||||
# time-stamp-time-zone: "UTC0"
|
||||
# time-stamp-end: "; # UTC"
|
||||
# End:
|
391
onedrive.1.in
Normal file
|
@ -0,0 +1,391 @@
|
|||
.TH ONEDRIVE "1" "@PACKAGE_DATE@" "@PACKAGE_VERSION@" "User Commands"
|
||||
.SH NAME
|
||||
onedrive \- folder synchronization with OneDrive
|
||||
.SH SYNOPSIS
|
||||
.B onedrive
|
||||
[\fI\,OPTION\/\fR] \-\-synchronize
|
||||
.br
|
||||
.B onedrive
|
||||
[\fI\,OPTION\/\fR] \-\-monitor
|
||||
.br
|
||||
.B onedrive
|
||||
[\fI\,OPTION\/\fR] \-\-display-config
|
||||
.br
|
||||
.B onedrive
|
||||
[\fI\,OPTION\/\fR] \-\-display-sync-status
|
||||
.SH DESCRIPTION
|
||||
A complete tool to interact with OneDrive on Linux.
|
||||
.SH OPTIONS
|
||||
Without any option given, no sync is done and the program exits.
|
||||
.TP
|
||||
\fB\-\-auth\-files\fP ARG
|
||||
Perform authorization via two files passed in as \fBARG\fP in the format \fBauthUrl:responseUrl\fP.
|
||||
The authorization URL is written to the \fBauthUrl\fP, then \fBonedrive\fP waits for
|
||||
the file \fBresponseUrl\fP to be present, and reads the response from that file.
|
||||
.TP
|
||||
\fB\-\-auth\-response\fP ARG
|
||||
Perform authentication not via interactive dialog but via providing the response url directly.
|
||||
.TP
|
||||
\fB\-\-check\-for\-nomount\fP
|
||||
Check for the presence of .nosync in the syncdir root. If found, do not perform sync.
|
||||
.br
|
||||
Configuration file key: \fBcheck_nomount\fP (default: \fBfalse\fP)
|
||||
.TP
|
||||
\fB\-\-check\-for\-nosync\fP
|
||||
Check for the presence of .nosync in each directory. If found, skip directory from sync.
|
||||
.br
|
||||
Configuration file key: \fBcheck_nosync\fP (default: \fBfalse\fP)
|
||||
.TP
|
||||
\fB\-\-classify\-as\-big\-delete\fP
|
||||
Number of children in a path that is locally removed which will be classified as a 'big data delete'
|
||||
.br
|
||||
Configuration file key: \fBclassify_as_big_delete\fP (default: \fB1000\fP)
|
||||
.TP
|
||||
\fB\-\-cleanup\-local\-files\fP
|
||||
Cleanup additional local files when using \-\-download-only. This will remove local data.
|
||||
.br
|
||||
Configuration file key: \fBcleanup_local_files\fP (default: \fBfalse\fP)
|
||||
.TP
|
||||
\fB\-\-confdir\fP ARG
|
||||
Set the directory used to store the configuration files
|
||||
.TP
|
||||
\fB\-\-create\-directory\fP ARG
|
||||
Create a directory on OneDrive \- no sync will be performed.
|
||||
.TP
|
||||
\fB\-\-create\-share\-link\fP ARG
|
||||
Create a shareable link for an existing file on OneDrive
|
||||
.TP
|
||||
\fB\-\-debug\-https\fP
|
||||
Debug OneDrive HTTPS communication.
|
||||
.br
|
||||
Configuration file key: \fBdebug_https\fP (default: \fBfalse\fP)
|
||||
.TP
|
||||
\fB\-\-destination\-directory\fP ARG
|
||||
Destination directory for renamed or move on OneDrive \- no sync will be performed.
|
||||
.TP
|
||||
\fB\-\-disable\-download\-validation\fP
|
||||
Disable download validation when downloading from OneDrive
|
||||
.br
|
||||
Configuration file key: \fBdisable_download_validation\fP (default: \fBfalse\fP)
|
||||
.TP
|
||||
\fB\-\-disable\-notifications\fP
|
||||
Do not use desktop notifications in monitor mode
|
||||
.br
|
||||
Configuration file key: \fBdisable_notifications\fP (default: \fBfalse\fP)
|
||||
.TP
|
||||
\fB\-\-disable\-upload\-validation\fP
|
||||
Disable upload validation when uploading to OneDrive
|
||||
.br
|
||||
Configuration file key: \fBdisable_upload_validation\fP (default: \fBfalse\fP)
|
||||
.TP
|
||||
\fB\-\-display\-config\fP
|
||||
Display what options the client will use as currently configured \- no sync will be performed.
|
||||
.TP
|
||||
\fB\-\-display\-running\-config\fP
|
||||
Display what options the client has been configured to use on application startup.
|
||||
.TP
|
||||
\fB\-\-display\-sync\-status\fP
|
||||
Display the sync status of the client \- no sync will be performed.
|
||||
.TP
|
||||
\fB\-\-download\-only\fP
|
||||
Replicate the OneDrive online state locally, by only downloading changes from OneDrive. Do not upload local changes to OneDrive.
|
||||
.br
|
||||
Configuration file key: \fBdownload_only\fP (default: \fBfalse\fP)
|
||||
.TP
|
||||
\fB\-\-dry\-run\fP
|
||||
Perform a trial sync with no changes made. Can ONLY be used with --synchronize. Will be ignored for --monitor
|
||||
.br
|
||||
Configuration file key: \fBdry_run\fP (default: \fBfalse\fP)
|
||||
.TP
|
||||
\fB\-\-enable\-logging\fP
|
||||
Enable client activity to a separate log file
|
||||
.br
|
||||
Configuration file key: \fBenable_logging\fP (default: \fBfalse\fP)
|
||||
.TP
|
||||
\fB\-\-force\fP
|
||||
Force the deletion of data when a 'big delete' is detected
|
||||
.TP
|
||||
\fB\-\-force\-http\-11\fP
|
||||
Force the use of HTTP 1.1 for all operations
|
||||
.br
|
||||
Configuration file key: \fBforce_http_11\fP (default: \fBfalse\fP)
|
||||
.TP
|
||||
\fB\-\-force\-sync\fP
|
||||
Force a synchronization of a specific folder, only when using --synchronize --single-directory and ignore
|
||||
.br
|
||||
all non-default skip_dir and skip_file rules
|
||||
.TP
|
||||
\fB\-\-get\-O365\-drive\-id\fP ARG
|
||||
Query and return the Office 365 Drive ID for a given Office 365 SharePoint Shared Library
|
||||
.TP
|
||||
\fB\-\-get\-file\-link\fP ARG
|
||||
Display the file link of a synced file
|
||||
.TP
|
||||
\fB\-\-list\-shared\-folders\fP
|
||||
List OneDrive Business Shared Folders
|
||||
.TP
|
||||
\fB\-\-local\-first\fP
|
||||
Synchronize from the local directory source first, before downloading changes from OneDrive.
|
||||
.br
|
||||
Configuration file key: \fBlocal_first\fP (default: \fBfalse\fP)
|
||||
.TP
|
||||
\fB\-\-logout\fP
|
||||
Logout the current user
|
||||
.TP
|
||||
\fB\-\-log\-dir\fP ARG
|
||||
defines the directory where logging output is saved to, needs to end with a slash
|
||||
.br
|
||||
Configuration file key: \fBlog_dir\fP (default: \fB/var/log/onedrive/\fP)
|
||||
.TP
|
||||
\fB\-\-min\-notify\-changes\fP
|
||||
the minimum number of pending incoming changes necessary to trigger
|
||||
a desktop notification
|
||||
.br
|
||||
Configuration file key: \fBmin_notify_changes\fP (default: \fB5\fP)
|
||||
.TP
|
||||
\fB\-m \-\-modified\-by\fP ARG
|
||||
Display the last modified by details of a given path
|
||||
.TP
|
||||
\fB\-m \-\-monitor\fP
|
||||
Keep monitoring for local and remote changes
|
||||
.TP
|
||||
\fB\-\-monitor\-interval\fP ARG
|
||||
The number of seconds by which each sync operation is undertaken when
|
||||
idle under monitor mode
|
||||
.br
|
||||
Configuration file key: \fBmonitor_interval\fP (default: \fB300\fP)
|
||||
.TP
|
||||
\fB\-\-monitor\-fullscan-frequency\fP ARG
|
||||
Number of sync runs before performing a full local scan of the synced directory
|
||||
.br
|
||||
Configuration file key: \fBmonitor_fullscan_frequency\fP (default: \fB10\fP)
|
||||
.TP
|
||||
\fB\-\-monitor\-log\-frequency\fP ARG
|
||||
Frequency of logging in monitor mode
|
||||
.br
|
||||
Configuration file key: \fBmonitor_log_frequency\fP (default: \fB5\fP)
|
||||
.TP
|
||||
\fB\-\-no\-remote\-delete\fP
|
||||
Do not delete local file 'deletes' from OneDrive when using \fB\-\-upload\-only\fR
|
||||
.br
|
||||
Configuration file key: \fBno_remote_delete\fP (default: \fBfalse\fP)
|
||||
.TP
|
||||
\fB\-\-operation\-timeout\fP ARG
|
||||
Set the maximum amount of time (seconds) a file operation is allowed to take. This includes DNS resolution, connecting, data transfer, etc.
|
||||
.br
|
||||
Configuration file key: \fBoperation_timeout\fP (default: \fB3600\fP)
|
||||
.TP
|
||||
\fB\-\-print\-token\fP
|
||||
Print the access token, useful for debugging
|
||||
.TP
|
||||
\fB\-\-reauth\fP
|
||||
Reauthenticate the client with OneDrive
|
||||
.TP
|
||||
\fB\-\-remove\-directory\fP ARG
|
||||
Remove a directory on OneDrive \- no sync will be performed.
|
||||
.TP
|
||||
\fB\-\-remove\-source\-files\fP
|
||||
Remove source file after successful transfer to OneDrive when using \-\-upload-only
|
||||
.br
|
||||
Configuration file key: \fBremove_source_files\fP (default: \fBfalse\fP)
|
||||
.TP
|
||||
\fB\-\-resync\fP
|
||||
Forget the last saved state, perform a full sync
|
||||
.TP
|
||||
\fB\-\-resync\-auth\fP
|
||||
Approve the use of performing a --resync action without needing CLI authorization
|
||||
.TP
|
||||
\fB\-\-single\-directory\fP ARG
|
||||
Specify a single local directory within the OneDrive root to sync.
|
||||
.TP
|
||||
\fB\-\-skip\-dir\fP ARG
|
||||
Skip any directories that match this pattern from syncing
|
||||
.TP
|
||||
\fB\-\-skip\-dir\-strict\-match\fP
|
||||
When matching skip_dir directories, only match explicit matches
|
||||
.br
|
||||
Configuration file key: \fBskip_dir_strict_match\fP (default: \fBfalse\fP)
|
||||
.TP
|
||||
\fB\-\-skip\-dot\-files\fP
|
||||
Skip dot files and folders from syncing
|
||||
.br
|
||||
Configuration file key: \fBskip_dotfiles\fP (default: \fBfalse\fP)
|
||||
.TP
|
||||
\fB\-\-skip\-file\fP
|
||||
Skip any files that match this pattern from syncing
|
||||
.br
|
||||
Configuration file key: \fBskip_file\fP (default: \fB~*|.~*|*.tmp\fP)
|
||||
.TP
|
||||
\fB\-\-skip\-size\fP ARG
|
||||
Skip new files larger than this size (in MB)
|
||||
.TP
|
||||
\fB\-\-skip\-symlinks\fP
|
||||
Skip syncing of symlinks
|
||||
.br
|
||||
Configuration file key: \fBskip_symlinks\fP (default: \fBfalse\fP)
|
||||
.TP
|
||||
\fB\-\-source\-directory\fP ARG
|
||||
Source directory to rename or move on OneDrive \- no sync will be performed.
|
||||
.TP
|
||||
\fB\-\-space\-reservation\fP ARG
|
||||
The amount of disk space to reserve (in MB) to avoid 100% disk space utilisation
|
||||
.TP
|
||||
\fB\-\-sync\-root\-files\fP
|
||||
Sync all files in sync_dir root when using sync_list.
|
||||
.TP
|
||||
\fB\-\-sync\-shared\-folders\fP
|
||||
Sync OneDrive Business Shared Folders
|
||||
.br
|
||||
Configuration file key: \fBsync_business_shared_folders\fP (default: \fBfalse\fP)
|
||||
.TP
|
||||
\fB\-\-syncdir\fP ARG
|
||||
Set the directory used to sync the files that are synced
|
||||
.br
|
||||
Configuration file key: \fBsync_dir\fP (default: \fB~/OneDrive\fP)
|
||||
.TP
|
||||
\fB\-\-synchronize\fP
|
||||
Perform a synchronization
|
||||
.TP
|
||||
\fB\-\-upload\-only\fP
|
||||
Replicate the locally configured sync_dir state to OneDrive, by only uploading local changes to OneDrive. Do not download changes from OneDrive.
|
||||
.br
|
||||
Configuration file key: \fBupload_only\fP (default: \fBfalse\fP)
|
||||
.TP
|
||||
\fB\-\-user\-agent\fP ARG
|
||||
Set the used User Agent identifier
|
||||
.br
|
||||
Configuration file key: \fBuser_agent\fP (default: don't change)
|
||||
.TP
|
||||
\fB\-v \-\-verbose\fP
|
||||
Print more details, useful for debugging. Given two times (or more)
|
||||
enables even more verbose debug statements.
|
||||
.TP
|
||||
\fB\-\-version\fP
|
||||
Print the version and exit
|
||||
.TP
|
||||
\fB\-\-with\-editing\-perms\fP
|
||||
Create a read-write shareable link for an existing file on OneDrive when used with --create-share-link <file>
|
||||
.TP
|
||||
\fB\-h \-\-help\fP
|
||||
This help information.
|
||||
.PP
|
||||
|
||||
.SH FEATURES
|
||||
|
||||
State caching
|
||||
|
||||
Real-Time file monitoring with Inotify
|
||||
|
||||
File upload / download validation to ensure data integrity
|
||||
|
||||
Resumable uploads
|
||||
|
||||
Support OneDrive for Business (part of Office 365)
|
||||
|
||||
Shared Folder support for OneDrive Personal and OneDrive Business accounts
|
||||
|
||||
SharePoint / Office365 Shared Libraries
|
||||
|
||||
Desktop notifications via libnotify
|
||||
|
||||
Dry-run capability to test configuration changes
|
||||
|
||||
Prevent major OneDrive accidental data deletion after configuration change
|
||||
|
||||
Support for National cloud deployments (Microsoft Cloud for US Government, Microsoft Cloud Germany, Azure and Office 365 operated by 21Vianet in China)
|
||||
|
||||
|
||||
.SH CONFIGURATION
|
||||
|
||||
You should copy the default config file into your home directory before making changes:
|
||||
.nf
|
||||
\fB
|
||||
mkdir\ \-p\ ~/.config/onedrive
|
||||
cp\ @DOCDIR@/config\ ~/.config/onedrive/config
|
||||
\fP
|
||||
.fi
|
||||
|
||||
For the supported options see the above list of command line options
|
||||
for the availability of a configuration key.
|
||||
.PP
|
||||
Pattern are case insensitive.
|
||||
\fB*\fP and \fB?\fP wildcards characters are supported.
|
||||
Use \fB|\fP to separate multiple patterns.
|
||||
|
||||
After changing the filters (\fBskip_file\fP or \fBskip_dir\fP in your configs) you must
|
||||
execute \fBonedrive --synchronize --resync\fP.
|
||||
|
||||
.SH FIRST RUN
|
||||
|
||||
After installing the application you must run it at least once from the terminal
|
||||
to authorize it.
|
||||
|
||||
You will be asked to open a specific link using your web browser where you
|
||||
will have to login into your Microsoft Account and give the application the
|
||||
permission to access your files. After giving the permission, you will be
|
||||
redirected to a blank page. Copy the URI of the blank page into the application.
|
||||
|
||||
|
||||
.SH SYSTEMD INTEGRATION
|
||||
|
||||
Service files are installed into user and system directories.
|
||||
.TP
|
||||
OneDrive service running as root user
|
||||
To enable this mode, run as root user
|
||||
.nf
|
||||
\fB
|
||||
systemctl enable onedrive
|
||||
systemctl start onedrive
|
||||
\fP
|
||||
.fi
|
||||
|
||||
.TP
|
||||
OneDrive service running as root user for a non-root user
|
||||
This mode allows starting the OneDrive service automatically with
|
||||
system start for multiple users. For each \fB<username>\fP run:
|
||||
.nf
|
||||
\fB
|
||||
systemctl enable onedrive@<username>
|
||||
systemctl start onedrive@<username>
|
||||
\fP
|
||||
.fi
|
||||
|
||||
.TP
|
||||
OneDrive service running as non-root user
|
||||
In this mode the service will be started when the user logs in.
|
||||
Run as user
|
||||
.nf
|
||||
\fB
|
||||
systemctl --user enable onedrive
|
||||
systemctl --user start onedrive
|
||||
\fP
|
||||
.fi
|
||||
|
||||
.SH LOGGING OUTPUT
|
||||
|
||||
When running onedrive all actions can be logged to a separate log file.
|
||||
This can be enabled by using the \fB--enable-logging\fP flag.
|
||||
By default, log files will be written to \fB/var/log/onedrive\fP.
|
||||
|
||||
All logfiles will be in the format of \fB%username%.onedrive.log\fP,
|
||||
where \fB%username%\fP represents the user who ran the client.
|
||||
|
||||
|
||||
.SH NOTIFICATIONS
|
||||
|
||||
If OneDrive has been compiled with support for notifications, a running
|
||||
\fBonedrive\fP in monitor mode will send notifications about
|
||||
initialization and errors via libnotify to the dbus.
|
||||
|
||||
Note that this does not work if \fBonedrive\fP is started as root
|
||||
for a user via the \fBonedrive@<username>\fP service.
|
||||
|
||||
.SH SEE ALSO
|
||||
|
||||
Further examples and documentation is available in
|
||||
\f[C]README.md\f[]
|
||||
\f[C]docs/USAGE.md\f[]
|
||||
\f[C]docs/advanced-usage.md\f[]
|
||||
\f[C]docs/BusinessSharedFolders.md\f[]
|
||||
\f[C]docs/SharePoint-Shared-Libraries.md\f[]
|
||||
\f[C]docs/national-cloud-deployments.md\f[]
|
|
@ -1,112 +0,0 @@
|
|||
# Determine based on distribution & version what options & packages to include
|
||||
%if 0%{?fedora} || 0%{?rhel} >= 7
|
||||
%global with_systemd 1
|
||||
%else
|
||||
%global with_systemd 0
|
||||
%endif
|
||||
|
||||
Name: onedrive
|
||||
Version: 1.1.1
|
||||
Release: 1%{?dist}
|
||||
Summary: Microsoft OneDrive Client
|
||||
Group: System Environment/Network
|
||||
License: GPLv3
|
||||
URL: https://github.com/abraunegg/onedrive
|
||||
#Source0: %{name}-%{version}.tar.gz
|
||||
BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-root-%(%{__id_u} -n)
|
||||
|
||||
BuildRequires: git
|
||||
BuildRequires: dmd >= 2.079.0
|
||||
BuildRequires: sqlite-devel >= 3.7.15
|
||||
BuildRequires: libcurl-devel
|
||||
Requires: sqlite >= 3.7.15
|
||||
Requires: libcurl
|
||||
|
||||
%if 0%{?with_systemd}
|
||||
Requires(post): systemd
|
||||
Requires(preun): systemd
|
||||
Requires(postun): systemd
|
||||
%else
|
||||
Requires(post): chkconfig
|
||||
Requires(preun): chkconfig
|
||||
Requires(preun): initscripts
|
||||
Requires(postun): initscripts
|
||||
%endif
|
||||
|
||||
%define debug_package %{nil}
|
||||
|
||||
%description
|
||||
Microsoft OneDrive Client for Linux
|
||||
|
||||
%prep
|
||||
|
||||
%setup -c -D -T
|
||||
# This creates cd %{_builddir}/%{name}-%{version}/
|
||||
# clone the repository
|
||||
git clone https://github.com/abraunegg/onedrive.git .
|
||||
# We should now have %{_builddir}/%{name}-%{version} with the git clone
|
||||
|
||||
%build
|
||||
cd %{_builddir}/%{name}-%{version}
|
||||
make
|
||||
|
||||
%install
|
||||
# Make the destination directories
|
||||
%{__mkdir_p} %{buildroot}/etc/
|
||||
%{__mkdir_p} %{buildroot}/usr/bin/
|
||||
%{__mkdir_p} %{buildroot}/etc/logrotate.d
|
||||
cp %{_builddir}/%{name}-%{version}/onedrive %{buildroot}/usr/bin/onedrive
|
||||
cp %{_builddir}/%{name}-%{version}/logrotate/onedrive.logrotate %{buildroot}/etc/logrotate.d/onedrive
|
||||
%if 0%{?with_systemd}
|
||||
%{__mkdir_p} %{buildroot}/%{_unitdir}
|
||||
cp %{_builddir}/%{name}-%{version}/onedrive.service %{buildroot}/%{_unitdir}/onedrive.service
|
||||
cp %{_builddir}/%{name}-%{version}/onedrive.service %{buildroot}/%{_unitdir}/onedrive@.service
|
||||
%else
|
||||
%{__mkdir_p} %{buildroot}%{_initrddir}
|
||||
cp %{_builddir}/%{name}-%{version}/init.d/onedrive_service.sh %{buildroot}/usr/bin/onedrive_service.sh
|
||||
cp %{_builddir}/%{name}-%{version}/init.d/onedrive.init %{buildroot}%{_initrddir}/onedrive
|
||||
%endif
|
||||
|
||||
%clean
|
||||
|
||||
%files
|
||||
%defattr(0444,root,root,0755)
|
||||
%attr(0555,root,root) /usr/bin/onedrive
|
||||
%attr(0644,root,root) /etc/logrotate.d/onedrive
|
||||
%if 0%{?with_systemd}
|
||||
%attr(0555,root,root) %{_unitdir}/onedrive.service
|
||||
%attr(0555,root,root) %{_unitdir}/onedrive@.service
|
||||
%else
|
||||
%attr(0555,root,root) /usr/bin/onedrive_service.sh
|
||||
%attr(0555,root,root) %{_initrddir}/onedrive
|
||||
%endif
|
||||
|
||||
%pre
|
||||
rm -f /root/.config/onedrive/items.db
|
||||
rm -f /root/.config/onedrive/items.sqlite3
|
||||
rm -f /root/.config/onedrive/resume_upload
|
||||
|
||||
%post
|
||||
mkdir -p /root/.config/onedrive
|
||||
mkdir -p /root/OneDrive
|
||||
mkdir -p /var/log/onedrive
|
||||
chown root.users /var/log/onedrive
|
||||
chmod 0775 /var/log/onedrive
|
||||
%if 0%{?with_systemd}
|
||||
%systemd_post onedrive.service
|
||||
%else
|
||||
chkconfig --add onedrive
|
||||
chkconfig onedrive off
|
||||
%endif
|
||||
|
||||
%preun
|
||||
%if 0%{?with_systemd}
|
||||
%systemd_preun onedrive.service
|
||||
%else
|
||||
if [ $1 -eq 0 ] ; then
|
||||
service onedrive stop &> /dev/null
|
||||
chkconfig --del onedrive &> /dev/null
|
||||
fi
|
||||
%endif
|
||||
|
||||
%changelog
|
8
src/arsd/README.md
Normal file
|
@ -0,0 +1,8 @@
|
|||
The files in this directory have been obtained form the following places:
|
||||
|
||||
cgi.d
|
||||
https://github.com/adamdruppe/arsd/blob/a870179988b8881b04126856105f0fad2cc0018d/cgi.d
|
||||
License: Boost Software License - Version 1.0
|
||||
|
||||
Copyright 2008-2021, Adam D. Ruppe
|
||||
see https://github.com/adamdruppe/arsd/blob/a870179988b8881b04126856105f0fad2cc0018d/LICENSE
|
11810
src/arsd/cgi.d
Normal file
900
src/config.d
|
@ -1,51 +1,593 @@
|
|||
import std.file, std.string, std.regex, std.stdio;
|
||||
import core.stdc.stdlib: EXIT_SUCCESS, EXIT_FAILURE, exit;
|
||||
import std.file, std.string, std.regex, std.stdio, std.process, std.algorithm.searching, std.getopt, std.conv, std.path;
|
||||
import std.algorithm.sorting: sort;
|
||||
import selective;
|
||||
static import log;
|
||||
|
||||
final class Config
|
||||
{
|
||||
public string refreshTokenFilePath;
|
||||
public string deltaLinkFilePath;
|
||||
public string databaseFilePath;
|
||||
public string uploadStateFilePath;
|
||||
public string syncListFilePath;
|
||||
|
||||
private string userConfigFilePath;
|
||||
// application defaults
|
||||
public string defaultSyncDir = "~/OneDrive";
|
||||
public string defaultSkipFile = "~*|.~*|*.tmp";
|
||||
public string defaultSkipDir = "";
|
||||
public string defaultLogFileDir = "/var/log/onedrive/";
|
||||
// application set items
|
||||
public string refreshTokenFilePath = "";
|
||||
public string deltaLinkFilePath = "";
|
||||
public string databaseFilePath = "";
|
||||
public string databaseFilePathDryRun = "";
|
||||
public string uploadStateFilePath = "";
|
||||
public string syncListFilePath = "";
|
||||
public string homePath = "";
|
||||
public string configDirName = "";
|
||||
public string systemConfigDirName = "";
|
||||
public string configFileSyncDir = "";
|
||||
public string configFileSkipFile = "";
|
||||
public string configFileSkipDir = "";
|
||||
public string businessSharedFolderFilePath = "";
|
||||
private string userConfigFilePath = "";
|
||||
private string systemConfigFilePath = "";
|
||||
// was the application just authorised - paste of response uri
|
||||
public bool applicationAuthorizeResponseUri = false;
|
||||
// hashmap for the values found in the user config file
|
||||
private string[string] values;
|
||||
// ARGGGG D is stupid and cannot make hashmap initializations!!!
|
||||
// private string[string] foobar = [ "aa": "bb" ] does NOT work!!!
|
||||
private string[string] stringValues;
|
||||
private bool[string] boolValues;
|
||||
private long[string] longValues;
|
||||
// Compile time regex - this does not change
|
||||
public auto configRegex = ctRegex!(`^(\w+)\s*=\s*"(.*)"\s*$`);
|
||||
// Default directory permission mode
|
||||
public long defaultDirectoryPermissionMode = 700;
|
||||
public int configuredDirectoryPermissionMode;
|
||||
// Default file permission mode
|
||||
public long defaultFilePermissionMode = 600;
|
||||
public int configuredFilePermissionMode;
|
||||
|
||||
this(string configDirName)
|
||||
// Bring in v2.5.0 config items
|
||||
|
||||
// HTTP Struct items, used for configuring HTTP()
|
||||
// Curl Timeout Handling
|
||||
// libcurl dns_cache_timeout timeout
|
||||
immutable int defaultDnsTimeout = 60;
|
||||
// Connect timeout for HTTP|HTTPS connections
|
||||
immutable int defaultConnectTimeout = 10;
|
||||
// With the following settings we force
|
||||
// - if there is no data flow for 10min, abort
|
||||
// - if the download time for one item exceeds 1h, abort
|
||||
//
|
||||
// Timeout for activity on connection
|
||||
// this translates into Curl's CURLOPT_LOW_SPEED_TIME
|
||||
// which says:
|
||||
// It contains the time in number seconds that the
|
||||
// transfer speed should be below the CURLOPT_LOW_SPEED_LIMIT
|
||||
// for the library to consider it too slow and abort.
|
||||
immutable int defaultDataTimeout = 600;
|
||||
// Maximum time any operation is allowed to take
|
||||
// This includes dns resolution, connecting, data transfer, etc.
|
||||
immutable int defaultOperationTimeout = 3600;
|
||||
// Specify how many redirects should be allowed
|
||||
immutable int defaultMaxRedirects = 5;
|
||||
// Specify what IP protocol version should be used when communicating with OneDrive
|
||||
immutable int defaultIpProtocol = 0; // 0 = IPv4 + IPv6, 1 = IPv4 Only, 2 = IPv6 Only
|
||||
|
||||
|
||||
|
||||
this(string confdirOption)
|
||||
{
|
||||
refreshTokenFilePath = configDirName ~ "/refresh_token";
|
||||
deltaLinkFilePath = configDirName ~ "/delta_link";
|
||||
databaseFilePath = configDirName ~ "/items.sqlite3";
|
||||
uploadStateFilePath = configDirName ~ "/resume_upload";
|
||||
userConfigFilePath = configDirName ~ "/config";
|
||||
syncListFilePath = configDirName ~ "/sync_list";
|
||||
// default configuration - entries in config file ~/.config/onedrive/config
|
||||
// an entry here means it can be set via the config file if there is a coresponding entry, read from config and set via update_from_args()
|
||||
stringValues["sync_dir"] = defaultSyncDir;
|
||||
stringValues["skip_file"] = defaultSkipFile;
|
||||
stringValues["skip_dir"] = defaultSkipDir;
|
||||
stringValues["log_dir"] = defaultLogFileDir;
|
||||
stringValues["drive_id"] = "";
|
||||
stringValues["user_agent"] = "";
|
||||
boolValues["upload_only"] = false;
|
||||
boolValues["check_nomount"] = false;
|
||||
boolValues["check_nosync"] = false;
|
||||
boolValues["download_only"] = false;
|
||||
boolValues["disable_notifications"] = false;
|
||||
boolValues["disable_download_validation"] = false;
|
||||
boolValues["disable_upload_validation"] = false;
|
||||
boolValues["enable_logging"] = false;
|
||||
boolValues["force_http_11"] = false;
|
||||
boolValues["local_first"] = false;
|
||||
boolValues["no_remote_delete"] = false;
|
||||
boolValues["skip_symlinks"] = false;
|
||||
boolValues["debug_https"] = false;
|
||||
boolValues["skip_dotfiles"] = false;
|
||||
boolValues["dry_run"] = false;
|
||||
boolValues["sync_root_files"] = false;
|
||||
longValues["verbose"] = log.verbose; // might be initialized by the first getopt call!
|
||||
// The amount of time (seconds) between monitor sync loops
|
||||
longValues["monitor_interval"] = 300;
|
||||
longValues["skip_size"] = 0;
|
||||
longValues["min_notify_changes"] = 5;
|
||||
longValues["monitor_log_frequency"] = 6;
|
||||
// Number of N sync runs before performing a full local scan of sync_dir
|
||||
// By default 12 which means every ~60 minutes a full disk scan of sync_dir will occur
|
||||
// 'monitor_interval' * 'monitor_fullscan_frequency' = 3600 = 1 hour
|
||||
longValues["monitor_fullscan_frequency"] = 12;
|
||||
// Number of children in a path that is locally removed which will be classified as a 'big data delete'
|
||||
longValues["classify_as_big_delete"] = 1000;
|
||||
// Delete source after successful transfer
|
||||
boolValues["remove_source_files"] = false;
|
||||
// Strict matching for skip_dir
|
||||
boolValues["skip_dir_strict_match"] = false;
|
||||
// Allow for a custom Client ID / Application ID to be used to replace the inbuilt default
|
||||
// This is a config file option ONLY
|
||||
stringValues["application_id"] = "";
|
||||
// allow for resync to be set via config file
|
||||
boolValues["resync"] = false;
|
||||
// resync now needs to be acknowledged based on the 'risk' of using it
|
||||
boolValues["resync_auth"] = false;
|
||||
// Ignore data safety checks and overwrite local data rather than preserve & rename
|
||||
// This is a config file option ONLY
|
||||
boolValues["bypass_data_preservation"] = false;
|
||||
// Support National Azure AD endpoints as per https://docs.microsoft.com/en-us/graph/deployments
|
||||
// By default, if empty, use standard Azure AD URL's
|
||||
// Will support the following options:
|
||||
// - USL4
|
||||
// AD Endpoint: https://login.microsoftonline.us
|
||||
// Graph Endpoint: https://graph.microsoft.us
|
||||
// - USL5
|
||||
// AD Endpoint: https://login.microsoftonline.us
|
||||
// Graph Endpoint: https://dod-graph.microsoft.us
|
||||
// - DE
|
||||
// AD Endpoint: https://portal.microsoftazure.de
|
||||
// Graph Endpoint: https://graph.microsoft.de
|
||||
// - CN
|
||||
// AD Endpoint: https://login.chinacloudapi.cn
|
||||
// Graph Endpoint: https://microsoftgraph.chinacloudapi.cn
|
||||
stringValues["azure_ad_endpoint"] = "";
|
||||
// Support single-tenant applications that are not able to use the "common" multiplexer
|
||||
stringValues["azure_tenant_id"] = "common";
|
||||
// Allow enable / disable of the syncing of OneDrive Business Shared Folders via configuration file
|
||||
boolValues["sync_business_shared_folders"] = false;
|
||||
// Configure the default folder permission attributes for newly created folders
|
||||
longValues["sync_dir_permissions"] = defaultDirectoryPermissionMode;
|
||||
// Configure the default file permission attributes for newly created file
|
||||
longValues["sync_file_permissions"] = defaultFilePermissionMode;
|
||||
// Configure download / upload rate limits
|
||||
longValues["rate_limit"] = 0;
|
||||
// To ensure we do not fill up the load disk, how much disk space should be reserved by default
|
||||
longValues["space_reservation"] = 50 * 2^^20; // 50 MB as Bytes
|
||||
// Webhook options
|
||||
boolValues["webhook_enabled"] = false;
|
||||
stringValues["webhook_public_url"] = "";
|
||||
stringValues["webhook_listening_host"] = "";
|
||||
longValues["webhook_listening_port"] = 8888;
|
||||
longValues["webhook_expiration_interval"] = 3600 * 24;
|
||||
longValues["webhook_renewal_interval"] = 3600 * 12;
|
||||
// Log to application output running configuration values
|
||||
boolValues["display_running_config"] = false;
|
||||
// Configure read-only authentication scope
|
||||
boolValues["read_only_auth_scope"] = false;
|
||||
// Flag to cleanup local files when using --download-only
|
||||
boolValues["cleanup_local_files"] = false;
|
||||
|
||||
// DEVELOPER OPTIONS
|
||||
// display_memory = true | false
|
||||
// - It may be desirable to display the memory usage of the application to assist with diagnosing memory issues with the application
|
||||
// - This is especially beneficial when debugging or performing memory tests with Valgrind
|
||||
boolValues["display_memory"] = false;
|
||||
// monitor_max_loop = long value
|
||||
// - It may be desirable to, when running in monitor mode, force monitor mode to 'quit' after X number of loops
|
||||
// - This is especially beneficial when debugging or performing memory tests with Valgrind
|
||||
longValues["monitor_max_loop"] = 0;
|
||||
// display_sync_options = true | false
|
||||
// - It may be desirable to see what options are being passed in to performSync() without enabling the full verbose debug logging
|
||||
boolValues["display_sync_options"] = false;
|
||||
// force_children_scan = true | false
|
||||
// - Force client to use /children rather than /delta to query changes on OneDrive
|
||||
// - This option flags nationalCloudDeployment as true, forcing the client to act like it is using a National Cloud Deployment
|
||||
boolValues["force_children_scan"] = false;
|
||||
// display_processing_time = true | false
|
||||
// - Enabling this option will add function processing times to the console output
|
||||
// - This then enables tracking of where the application is spending most amount of time when processing data when users have questions re performance
|
||||
boolValues["display_processing_time"] = false;
|
||||
|
||||
// HTTPS & CURL Operation Settings
|
||||
// - Maximum time an operation is allowed to take
|
||||
// This includes dns resolution, connecting, data transfer, etc.
|
||||
longValues["operation_timeout"] = defaultOperationTimeout;
|
||||
// libcurl dns_cache_timeout timeout
|
||||
longValues["dns_timeout"] = defaultDnsTimeout;
|
||||
// Timeout for HTTPS connections
|
||||
longValues["connect_timeout"] = defaultConnectTimeout;
|
||||
// Timeout for activity on a HTTPS connection
|
||||
longValues["data_timeout"] = defaultDataTimeout;
|
||||
// What IP protocol version should be used when communicating with OneDrive
|
||||
longValues["ip_protocol_version"] = defaultIpProtocol; // 0 = IPv4 + IPv6, 1 = IPv4 Only, 2 = IPv6 Only
|
||||
|
||||
// EXPAND USERS HOME DIRECTORY
|
||||
// Determine the users home directory.
|
||||
// Need to avoid using ~ here as expandTilde() below does not interpret correctly when running under init.d or systemd scripts
|
||||
// Check for HOME environment variable
|
||||
if (environment.get("HOME") != ""){
|
||||
// Use HOME environment variable
|
||||
log.vdebug("homePath: HOME environment variable set");
|
||||
homePath = environment.get("HOME");
|
||||
} else {
|
||||
if ((environment.get("SHELL") == "") && (environment.get("USER") == "")){
|
||||
// No shell is set or username - observed case when running as systemd service under CentOS 7.x
|
||||
log.vdebug("homePath: WARNING - no HOME environment variable set");
|
||||
log.vdebug("homePath: WARNING - no SHELL environment variable set");
|
||||
log.vdebug("homePath: WARNING - no USER environment variable set");
|
||||
homePath = "/root";
|
||||
} else {
|
||||
// A shell & valid user is set, but no HOME is set, use ~ which can be expanded
|
||||
log.vdebug("homePath: WARNING - no HOME environment variable set");
|
||||
homePath = "~";
|
||||
}
|
||||
}
|
||||
|
||||
// Output homePath calculation
|
||||
log.vdebug("homePath: ", homePath);
|
||||
|
||||
// Determine the correct configuration directory to use
|
||||
string configDirBase;
|
||||
string systemConfigDirBase;
|
||||
if (confdirOption != "") {
|
||||
// A CLI 'confdir' was passed in
|
||||
// Clean up any stray " .. these should not be there ...
|
||||
confdirOption = strip(confdirOption,"\"");
|
||||
log.vdebug("configDirName: CLI override to set configDirName to: ", confdirOption);
|
||||
if (canFind(confdirOption,"~")) {
|
||||
// A ~ was found
|
||||
log.vdebug("configDirName: A '~' was found in configDirName, using the calculated 'homePath' to replace '~'");
|
||||
configDirName = homePath ~ strip(confdirOption,"~","~");
|
||||
} else {
|
||||
configDirName = confdirOption;
|
||||
}
|
||||
} else {
|
||||
// Determine the base directory relative to which user specific configuration files should be stored.
|
||||
if (environment.get("XDG_CONFIG_HOME") != ""){
|
||||
log.vdebug("configDirBase: XDG_CONFIG_HOME environment variable set");
|
||||
configDirBase = environment.get("XDG_CONFIG_HOME");
|
||||
} else {
|
||||
// XDG_CONFIG_HOME does not exist on systems where X11 is not present - ie - headless systems / servers
|
||||
log.vdebug("configDirBase: WARNING - no XDG_CONFIG_HOME environment variable set");
|
||||
configDirBase = homePath ~ "/.config";
|
||||
// Also set up a path to pre-shipped shared configs (which can be overridden by supplying a config file in userspace)
|
||||
systemConfigDirBase = "/etc";
|
||||
}
|
||||
|
||||
// Output configDirBase calculation
|
||||
log.vdebug("configDirBase: ", configDirBase);
|
||||
// Set the default application configuration directory
|
||||
log.vdebug("configDirName: Configuring application to use default config path");
|
||||
// configDirBase contains the correct path so we do not need to check for presence of '~'
|
||||
configDirName = configDirBase ~ "/onedrive";
|
||||
// systemConfigDirBase contains the correct path so we do not need to check for presence of '~'
|
||||
systemConfigDirName = systemConfigDirBase ~ "/onedrive";
|
||||
}
|
||||
|
||||
// Config directory options all determined
|
||||
if (!exists(configDirName)) {
|
||||
// create the directory
|
||||
mkdirRecurse(configDirName);
|
||||
// Configure the applicable permissions for the folder
|
||||
configDirName.setAttributes(returnRequiredDirectoryPermisions());
|
||||
} else {
|
||||
// The config path exists
|
||||
// The path that exists must be a directory, not a file
|
||||
if (!isDir(configDirName)) {
|
||||
if (!confdirOption.empty) {
|
||||
// the configuration path was passed in by the user .. user error
|
||||
writeln("ERROR: --confdir entered value is an existing file instead of an existing directory");
|
||||
} else {
|
||||
// other error
|
||||
writeln("ERROR: ~/.config/onedrive is a file rather than a directory");
|
||||
}
|
||||
// Must exit
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
}
|
||||
|
||||
// configDirName has a trailing /
|
||||
if (!configDirName.empty) log.vlog("Using 'user' Config Dir: ", configDirName);
|
||||
if (!systemConfigDirName.empty) log.vlog("Using 'system' Config Dir: ", systemConfigDirName);
|
||||
|
||||
// Update application set variables based on configDirName
|
||||
refreshTokenFilePath = buildNormalizedPath(configDirName ~ "/refresh_token");
|
||||
deltaLinkFilePath = buildNormalizedPath(configDirName ~ "/delta_link");
|
||||
databaseFilePath = buildNormalizedPath(configDirName ~ "/items.sqlite3");
|
||||
databaseFilePathDryRun = buildNormalizedPath(configDirName ~ "/items-dryrun.sqlite3");
|
||||
uploadStateFilePath = buildNormalizedPath(configDirName ~ "/resume_upload");
|
||||
userConfigFilePath = buildNormalizedPath(configDirName ~ "/config");
|
||||
syncListFilePath = buildNormalizedPath(configDirName ~ "/sync_list");
|
||||
systemConfigFilePath = buildNormalizedPath(systemConfigDirName ~ "/config");
|
||||
businessSharedFolderFilePath = buildNormalizedPath(configDirName ~ "/business_shared_folders");
|
||||
|
||||
// Debug Output for application set variables based on configDirName
|
||||
log.vdebug("refreshTokenFilePath = ", refreshTokenFilePath);
|
||||
log.vdebug("deltaLinkFilePath = ", deltaLinkFilePath);
|
||||
log.vdebug("databaseFilePath = ", databaseFilePath);
|
||||
log.vdebug("databaseFilePathDryRun = ", databaseFilePathDryRun);
|
||||
log.vdebug("uploadStateFilePath = ", uploadStateFilePath);
|
||||
log.vdebug("userConfigFilePath = ", userConfigFilePath);
|
||||
log.vdebug("syncListFilePath = ", syncListFilePath);
|
||||
log.vdebug("systemConfigFilePath = ", systemConfigFilePath);
|
||||
log.vdebug("businessSharedFolderFilePath = ", businessSharedFolderFilePath);
|
||||
}
|
||||
|
||||
void init()
|
||||
bool initialize()
|
||||
{
|
||||
// Default configuration directory
|
||||
setValue("sync_dir", "~/OneDrive");
|
||||
// Configure to skip ONLY temp files (~*.doc etc) by default
|
||||
// Prior configuration was: .*|~*
|
||||
setValue("skip_file", "~*");
|
||||
// By default symlinks are not skipped (using string type
|
||||
// instead of boolean because hashmap only stores string types)
|
||||
setValue("skip_symlinks", "false");
|
||||
// Configure the monitor mode loop - the number of seconds by which
|
||||
// each sync operation is undertaken when idle under monitor mode
|
||||
setValue("monitor_interval", "45");
|
||||
|
||||
if (!load(userConfigFilePath)) {
|
||||
log.vlog("No config file found, using defaults");
|
||||
// Initialise the application
|
||||
if (!exists(userConfigFilePath)) {
|
||||
// 'user' configuration file does not exist
|
||||
// Is there a system configuration file?
|
||||
if (!exists(systemConfigFilePath)) {
|
||||
// 'system' configuration file does not exist
|
||||
log.vlog("No user or system config file found, using application defaults");
|
||||
return true;
|
||||
} else {
|
||||
// 'system' configuration file exists
|
||||
// can we load the configuration file without error?
|
||||
if (load(systemConfigFilePath)) {
|
||||
// configuration file loaded without error
|
||||
log.log("System configuration file successfully loaded");
|
||||
return true;
|
||||
} else {
|
||||
// there was a problem loading the configuration file
|
||||
log.log("System configuration file has errors - please check your configuration");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// 'user' configuration file exists
|
||||
// can we load the configuration file without error?
|
||||
if (load(userConfigFilePath)) {
|
||||
// configuration file loaded without error
|
||||
log.log("Configuration file successfully loaded");
|
||||
return true;
|
||||
} else {
|
||||
// there was a problem loading the configuration file
|
||||
log.log("Configuration file has errors - please check your configuration");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
string getValue(string key)
|
||||
void update_from_args(string[] args)
|
||||
{
|
||||
auto p = key in values;
|
||||
// Add additional options that are NOT configurable via config file
|
||||
stringValues["create_directory"] = "";
|
||||
stringValues["create_share_link"] = "";
|
||||
stringValues["destination_directory"] = "";
|
||||
stringValues["get_file_link"] = "";
|
||||
stringValues["modified_by"] = "";
|
||||
stringValues["get_o365_drive_id"] = "";
|
||||
stringValues["remove_directory"] = "";
|
||||
stringValues["single_directory"] = "";
|
||||
stringValues["source_directory"] = "";
|
||||
stringValues["auth_files"] = "";
|
||||
stringValues["auth_response"] = "";
|
||||
boolValues["display_config"] = false;
|
||||
boolValues["display_sync_status"] = false;
|
||||
boolValues["print_token"] = false;
|
||||
boolValues["logout"] = false;
|
||||
boolValues["reauth"] = false;
|
||||
boolValues["monitor"] = false;
|
||||
boolValues["synchronize"] = false;
|
||||
boolValues["force"] = false;
|
||||
boolValues["list_business_shared_folders"] = false;
|
||||
boolValues["force_sync"] = false;
|
||||
boolValues["with_editing_perms"] = false;
|
||||
|
||||
// Application Startup option validation
|
||||
try {
|
||||
string tmpStr;
|
||||
bool tmpBol;
|
||||
long tmpVerb;
|
||||
// duplicated from main.d to get full help output!
|
||||
auto opt = getopt(
|
||||
|
||||
args,
|
||||
std.getopt.config.bundling,
|
||||
std.getopt.config.caseSensitive,
|
||||
"auth-files",
|
||||
"Perform authentication not via interactive dialog but via files read/writes to these files.",
|
||||
&stringValues["auth_files"],
|
||||
"auth-response",
|
||||
"Perform authentication not via interactive dialog but via providing the response url directly.",
|
||||
&stringValues["auth_response"],
|
||||
"check-for-nomount",
|
||||
"Check for the presence of .nosync in the syncdir root. If found, do not perform sync.",
|
||||
&boolValues["check_nomount"],
|
||||
"check-for-nosync",
|
||||
"Check for the presence of .nosync in each directory. If found, skip directory from sync.",
|
||||
&boolValues["check_nosync"],
|
||||
"classify-as-big-delete",
|
||||
"Number of children in a path that is locally removed which will be classified as a 'big data delete'",
|
||||
&longValues["classify_as_big_delete"],
|
||||
"cleanup-local-files",
|
||||
"Cleanup additional local files when using --download-only. This will remove local data.",
|
||||
&boolValues["cleanup_local_files"],
|
||||
"create-directory",
|
||||
"Create a directory on OneDrive - no sync will be performed.",
|
||||
&stringValues["create_directory"],
|
||||
"create-share-link",
|
||||
"Create a shareable link for an existing file on OneDrive",
|
||||
&stringValues["create_share_link"],
|
||||
"debug-https",
|
||||
"Debug OneDrive HTTPS communication.",
|
||||
&boolValues["debug_https"],
|
||||
"destination-directory",
|
||||
"Destination directory for renamed or move on OneDrive - no sync will be performed.",
|
||||
&stringValues["destination_directory"],
|
||||
"disable-notifications",
|
||||
"Do not use desktop notifications in monitor mode.",
|
||||
&boolValues["disable_notifications"],
|
||||
"disable-download-validation",
|
||||
"Disable download validation when downloading from OneDrive",
|
||||
&boolValues["disable_download_validation"],
|
||||
"disable-upload-validation",
|
||||
"Disable upload validation when uploading to OneDrive",
|
||||
&boolValues["disable_upload_validation"],
|
||||
"display-config",
|
||||
"Display what options the client will use as currently configured - no sync will be performed.",
|
||||
&boolValues["display_config"],
|
||||
"display-running-config",
|
||||
"Display what options the client has been configured to use on application startup.",
|
||||
&boolValues["display_running_config"],
|
||||
"display-sync-status",
|
||||
"Display the sync status of the client - no sync will be performed.",
|
||||
&boolValues["display_sync_status"],
|
||||
"download-only",
|
||||
"Replicate the OneDrive online state locally, by only downloading changes from OneDrive. Do not upload local changes to OneDrive.",
|
||||
&boolValues["download_only"],
|
||||
"dry-run",
|
||||
"Perform a trial sync with no changes made",
|
||||
&boolValues["dry_run"],
|
||||
"enable-logging",
|
||||
"Enable client activity to a separate log file",
|
||||
&boolValues["enable_logging"],
|
||||
"force-http-11",
|
||||
"Force the use of HTTP 1.1 for all operations",
|
||||
&boolValues["force_http_11"],
|
||||
"force",
|
||||
"Force the deletion of data when a 'big delete' is detected",
|
||||
&boolValues["force"],
|
||||
"force-sync",
|
||||
"Force a synchronization of a specific folder, only when using --synchronize --single-directory and ignore all non-default skip_dir and skip_file rules",
|
||||
&boolValues["force_sync"],
|
||||
"get-file-link",
|
||||
"Display the file link of a synced file",
|
||||
&stringValues["get_file_link"],
|
||||
"get-O365-drive-id",
|
||||
"Query and return the Office 365 Drive ID for a given Office 365 SharePoint Shared Library",
|
||||
&stringValues["get_o365_drive_id"],
|
||||
"local-first",
|
||||
"Synchronize from the local directory source first, before downloading changes from OneDrive.",
|
||||
&boolValues["local_first"],
|
||||
"log-dir",
|
||||
"Directory where logging output is saved to, needs to end with a slash.",
|
||||
&stringValues["log_dir"],
|
||||
"logout",
|
||||
"Logout the current user",
|
||||
&boolValues["logout"],
|
||||
"min-notify-changes",
|
||||
"Minimum number of pending incoming changes necessary to trigger a desktop notification",
|
||||
&longValues["min_notify_changes"],
|
||||
"modified-by",
|
||||
"Display the last modified by details of a given path",
|
||||
&stringValues["modified_by"],
|
||||
"monitor|m",
|
||||
"Keep monitoring for local and remote changes",
|
||||
&boolValues["monitor"],
|
||||
"monitor-interval",
|
||||
"Number of seconds by which each sync operation is undertaken when idle under monitor mode.",
|
||||
&longValues["monitor_interval"],
|
||||
"monitor-fullscan-frequency",
|
||||
"Number of sync runs before performing a full local scan of the synced directory",
|
||||
&longValues["monitor_fullscan_frequency"],
|
||||
"monitor-log-frequency",
|
||||
"Frequency of logging in monitor mode",
|
||||
&longValues["monitor_log_frequency"],
|
||||
"no-remote-delete",
|
||||
"Do not delete local file 'deletes' from OneDrive when using --upload-only",
|
||||
&boolValues["no_remote_delete"],
|
||||
"print-token",
|
||||
"Print the access token, useful for debugging",
|
||||
&boolValues["print_token"],
|
||||
"reauth",
|
||||
"Reauthenticate the client with OneDrive",
|
||||
&boolValues["reauth"],
|
||||
"resync",
|
||||
"Forget the last saved state, perform a full sync",
|
||||
&boolValues["resync"],
|
||||
"resync-auth",
|
||||
"Approve the use of performing a --resync action",
|
||||
&boolValues["resync_auth"],
|
||||
"remove-directory",
|
||||
"Remove a directory on OneDrive - no sync will be performed.",
|
||||
&stringValues["remove_directory"],
|
||||
"remove-source-files",
|
||||
"Remove source file after successful transfer to OneDrive when using --upload-only",
|
||||
&boolValues["remove_source_files"],
|
||||
"single-directory",
|
||||
"Specify a single local directory within the OneDrive root to sync.",
|
||||
&stringValues["single_directory"],
|
||||
"skip-dot-files",
|
||||
"Skip dot files and folders from syncing",
|
||||
&boolValues["skip_dotfiles"],
|
||||
"skip-file",
|
||||
"Skip any files that match this pattern from syncing",
|
||||
&stringValues["skip_file"],
|
||||
"skip-dir",
|
||||
"Skip any directories that match this pattern from syncing",
|
||||
&stringValues["skip_dir"],
|
||||
"skip-size",
|
||||
"Skip new files larger than this size (in MB)",
|
||||
&longValues["skip_size"],
|
||||
"skip-dir-strict-match",
|
||||
"When matching skip_dir directories, only match explicit matches",
|
||||
&boolValues["skip_dir_strict_match"],
|
||||
"skip-symlinks",
|
||||
"Skip syncing of symlinks",
|
||||
&boolValues["skip_symlinks"],
|
||||
"source-directory",
|
||||
"Source directory to rename or move on OneDrive - no sync will be performed.",
|
||||
&stringValues["source_directory"],
|
||||
"space-reservation",
|
||||
"The amount of disk space to reserve (in MB) to avoid 100% disk space utilisation",
|
||||
&longValues["space_reservation"],
|
||||
"syncdir",
|
||||
"Specify the local directory used for synchronization to OneDrive",
|
||||
&stringValues["sync_dir"],
|
||||
"synchronize",
|
||||
"Perform a synchronization",
|
||||
&boolValues["synchronize"],
|
||||
"sync-root-files",
|
||||
"Sync all files in sync_dir root when using sync_list.",
|
||||
&boolValues["sync_root_files"],
|
||||
"upload-only",
|
||||
"Replicate the locally configured sync_dir state to OneDrive, by only uploading local changes to OneDrive. Do not download changes from OneDrive.",
|
||||
&boolValues["upload_only"],
|
||||
"user-agent",
|
||||
"Specify a User Agent string to the http client",
|
||||
&stringValues["user_agent"],
|
||||
"confdir",
|
||||
"Set the directory used to store the configuration files",
|
||||
&tmpStr,
|
||||
"verbose|v+",
|
||||
"Print more details, useful for debugging (repeat for extra debugging)",
|
||||
&tmpVerb,
|
||||
"version",
|
||||
"Print the version and exit",
|
||||
&tmpBol,
|
||||
"list-shared-folders",
|
||||
"List OneDrive Business Shared Folders",
|
||||
&boolValues["list_business_shared_folders"],
|
||||
"sync-shared-folders",
|
||||
"Sync OneDrive Business Shared Folders",
|
||||
&boolValues["sync_business_shared_folders"],
|
||||
"with-editing-perms",
|
||||
"Create a read-write shareable link for an existing file on OneDrive when used with --create-share-link <file>",
|
||||
&boolValues["with_editing_perms"]
|
||||
);
|
||||
if (opt.helpWanted) {
|
||||
outputLongHelp(opt.options);
|
||||
exit(EXIT_SUCCESS);
|
||||
}
|
||||
} catch (GetOptException e) {
|
||||
log.error(e.msg);
|
||||
log.error("Try 'onedrive -h' for more information");
|
||||
exit(EXIT_FAILURE);
|
||||
} catch (Exception e) {
|
||||
// error
|
||||
log.error(e.msg);
|
||||
log.error("Try 'onedrive -h' for more information");
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
}
|
||||
|
||||
string getValueString(string key)
|
||||
{
|
||||
auto p = key in stringValues;
|
||||
if (p) {
|
||||
return *p;
|
||||
} else {
|
||||
|
@ -53,47 +595,307 @@ final class Config
|
|||
}
|
||||
}
|
||||
|
||||
string getValue(string key, string value)
|
||||
long getValueLong(string key)
|
||||
{
|
||||
auto p = key in values;
|
||||
auto p = key in longValues;
|
||||
if (p) {
|
||||
return *p;
|
||||
} else {
|
||||
return value;
|
||||
throw new Exception("Missing config value: " ~ key);
|
||||
}
|
||||
}
|
||||
|
||||
void setValue(string key, string value)
|
||||
bool getValueBool(string key)
|
||||
{
|
||||
values[key] = value;
|
||||
auto p = key in boolValues;
|
||||
if (p) {
|
||||
return *p;
|
||||
} else {
|
||||
throw new Exception("Missing config value: " ~ key);
|
||||
}
|
||||
}
|
||||
|
||||
void setValueBool(string key, bool value)
|
||||
{
|
||||
boolValues[key] = value;
|
||||
}
|
||||
|
||||
void setValueString(string key, string value)
|
||||
{
|
||||
stringValues[key] = value;
|
||||
}
|
||||
|
||||
void setValueLong(string key, long value)
|
||||
{
|
||||
longValues[key] = value;
|
||||
}
|
||||
|
||||
// load a configuration file
|
||||
private bool load(string filename)
|
||||
{
|
||||
scope(failure) return false;
|
||||
// configure function variables
|
||||
try {
|
||||
readText(filename);
|
||||
} catch (std.file.FileException e) {
|
||||
// Unable to access required file
|
||||
log.error("ERROR: Unable to access ", e.msg);
|
||||
// Use exit scopes to shutdown API
|
||||
return false;
|
||||
}
|
||||
|
||||
// We were able to readText the config file - so, we should be able to open and read it
|
||||
auto file = File(filename, "r");
|
||||
auto r = regex(`^(\w+)\s*=\s*"(.*)"\s*$`);
|
||||
foreach (line; file.byLine()) {
|
||||
line = stripLeft(line);
|
||||
if (line.length == 0 || line[0] == ';' || line[0] == '#') continue;
|
||||
auto c = line.matchFirst(r);
|
||||
string lineBuffer;
|
||||
|
||||
// configure scopes
|
||||
// - failure
|
||||
scope(failure) {
|
||||
// close file if open
|
||||
if (file.isOpen()){
|
||||
// close open file
|
||||
file.close();
|
||||
}
|
||||
}
|
||||
// - exit
|
||||
scope(exit) {
|
||||
// close file if open
|
||||
if (file.isOpen()){
|
||||
// close open file
|
||||
file.close();
|
||||
}
|
||||
}
|
||||
|
||||
// read file line by line
|
||||
auto range = file.byLine();
|
||||
foreach (line; range) {
|
||||
lineBuffer = stripLeft(line).to!string;
|
||||
if (lineBuffer.length == 0 || lineBuffer[0] == ';' || lineBuffer[0] == '#') continue;
|
||||
auto c = lineBuffer.matchFirst(configRegex);
|
||||
if (!c.empty) {
|
||||
c.popFront(); // skip the whole match
|
||||
string key = c.front.dup;
|
||||
c.popFront();
|
||||
values[key] = c.front.dup;
|
||||
auto p = key in boolValues;
|
||||
if (p) {
|
||||
c.popFront();
|
||||
// only accept "true" as true value. TODO Should we support other formats?
|
||||
setValueBool(key, c.front.dup == "true" ? true : false);
|
||||
} else {
|
||||
auto pp = key in stringValues;
|
||||
if (pp) {
|
||||
c.popFront();
|
||||
setValueString(key, c.front.dup);
|
||||
// detect need for --resync for these:
|
||||
// --syncdir ARG
|
||||
// --skip-file ARG
|
||||
// --skip-dir ARG
|
||||
if (key == "sync_dir") configFileSyncDir = c.front.dup;
|
||||
if (key == "skip_file") {
|
||||
// Handle multiple entries of skip_file
|
||||
if (configFileSkipFile.empty) {
|
||||
// currently no entry exists
|
||||
configFileSkipFile = c.front.dup;
|
||||
} else {
|
||||
// add to existing entry
|
||||
configFileSkipFile = configFileSkipFile ~ "|" ~ to!string(c.front.dup);
|
||||
setValueString("skip_file", configFileSkipFile);
|
||||
}
|
||||
}
|
||||
if (key == "skip_dir") {
|
||||
// Handle multiple entries of skip_dir
|
||||
if (configFileSkipDir.empty) {
|
||||
// currently no entry exists
|
||||
configFileSkipDir = c.front.dup;
|
||||
} else {
|
||||
// add to existing entry
|
||||
configFileSkipDir = configFileSkipDir ~ "|" ~ to!string(c.front.dup);
|
||||
setValueString("skip_dir", configFileSkipDir);
|
||||
}
|
||||
}
|
||||
// --single-directory Strip quotation marks from path
|
||||
// This is an issue when using ONEDRIVE_SINGLE_DIRECTORY with Docker
|
||||
if (key == "single_directory") {
|
||||
// Strip quotation marks from provided path
|
||||
string configSingleDirectory = strip(to!string(c.front.dup), "\"");
|
||||
setValueString("single_directory", configSingleDirectory);
|
||||
}
|
||||
// Azure AD Configuration
|
||||
if (key == "azure_ad_endpoint") {
|
||||
string azureConfigValue = c.front.dup;
|
||||
switch(azureConfigValue) {
|
||||
case "":
|
||||
log.log("Using config option for Global Azure AD Endpoints");
|
||||
break;
|
||||
case "USL4":
|
||||
log.log("Using config option for Azure AD for US Government Endpoints");
|
||||
break;
|
||||
case "USL5":
|
||||
log.log("Using config option for Azure AD for US Government Endpoints (DOD)");
|
||||
break;
|
||||
case "DE":
|
||||
log.log("Using config option for Azure AD Germany");
|
||||
break;
|
||||
case "CN":
|
||||
log.log("Using config option for Azure AD China operated by 21Vianet");
|
||||
break;
|
||||
// Default - all other entries
|
||||
default:
|
||||
log.log("Unknown Azure AD Endpoint - using Global Azure AD Endpoints");
|
||||
}
|
||||
}
|
||||
} else {
|
||||
auto ppp = key in longValues;
|
||||
if (ppp) {
|
||||
c.popFront();
|
||||
setValueLong(key, to!long(c.front.dup));
|
||||
// if key is space_reservation we have to calculate MB -> bytes
|
||||
if (key == "space_reservation") {
|
||||
// temp value
|
||||
ulong tempValue = to!long(c.front.dup);
|
||||
// a value of 0 needs to be made at least 1MB ..
|
||||
if (tempValue == 0) {
|
||||
tempValue = 1;
|
||||
}
|
||||
setValueLong("space_reservation", to!long(tempValue * 2^^20));
|
||||
}
|
||||
} else {
|
||||
log.log("Unknown key in config file: ", key);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
log.log("Malformed config line: ", line);
|
||||
log.log("Malformed config line: ", lineBuffer);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
void configureRequiredDirectoryPermisions() {
|
||||
// return the directory permission mode required
|
||||
// - return octal!defaultDirectoryPermissionMode; ... cant be used .. which is odd
|
||||
// Error: variable defaultDirectoryPermissionMode cannot be read at compile time
|
||||
if (getValueLong("sync_dir_permissions") != defaultDirectoryPermissionMode) {
|
||||
// return user configured permissions as octal integer
|
||||
string valueToConvert = to!string(getValueLong("sync_dir_permissions"));
|
||||
auto convertedValue = parse!long(valueToConvert, 8);
|
||||
configuredDirectoryPermissionMode = to!int(convertedValue);
|
||||
} else {
|
||||
// return default as octal integer
|
||||
string valueToConvert = to!string(defaultDirectoryPermissionMode);
|
||||
auto convertedValue = parse!long(valueToConvert, 8);
|
||||
configuredDirectoryPermissionMode = to!int(convertedValue);
|
||||
}
|
||||
}
|
||||
|
||||
void configureRequiredFilePermisions() {
|
||||
// return the file permission mode required
|
||||
// - return octal!defaultFilePermissionMode; ... cant be used .. which is odd
|
||||
// Error: variable defaultFilePermissionMode cannot be read at compile time
|
||||
if (getValueLong("sync_file_permissions") != defaultFilePermissionMode) {
|
||||
// return user configured permissions as octal integer
|
||||
string valueToConvert = to!string(getValueLong("sync_file_permissions"));
|
||||
auto convertedValue = parse!long(valueToConvert, 8);
|
||||
configuredFilePermissionMode = to!int(convertedValue);
|
||||
} else {
|
||||
// return default as octal integer
|
||||
string valueToConvert = to!string(defaultFilePermissionMode);
|
||||
auto convertedValue = parse!long(valueToConvert, 8);
|
||||
configuredFilePermissionMode = to!int(convertedValue);
|
||||
}
|
||||
}
|
||||
|
||||
int returnRequiredDirectoryPermisions() {
|
||||
// read the configuredDirectoryPermissionMode and return
|
||||
if (configuredDirectoryPermissionMode == 0) {
|
||||
// the configured value is zero, this means that directories would get
|
||||
// values of d---------
|
||||
configureRequiredDirectoryPermisions();
|
||||
}
|
||||
return configuredDirectoryPermissionMode;
|
||||
}
|
||||
|
||||
int returnRequiredFilePermisions() {
|
||||
// read the configuredFilePermissionMode and return
|
||||
if (configuredFilePermissionMode == 0) {
|
||||
// the configured value is zero
|
||||
configureRequiredFilePermisions();
|
||||
}
|
||||
return configuredFilePermissionMode;
|
||||
}
|
||||
|
||||
void resetSkipToDefaults() {
|
||||
// reset skip_file and skip_dir to application defaults
|
||||
// skip_file
|
||||
log.vdebug("original skip_file: ", getValueString("skip_file"));
|
||||
log.vdebug("resetting skip_file");
|
||||
setValueString("skip_file", defaultSkipFile);
|
||||
log.vdebug("reset skip_file: ", getValueString("skip_file"));
|
||||
// skip_dir
|
||||
log.vdebug("original skip_dir: ", getValueString("skip_dir"));
|
||||
log.vdebug("resetting skip_dir");
|
||||
setValueString("skip_dir", defaultSkipDir);
|
||||
log.vdebug("reset skip_dir: ", getValueString("skip_dir"));
|
||||
}
|
||||
}
|
||||
|
||||
void outputLongHelp(Option[] opt)
|
||||
{
|
||||
auto argsNeedingOptions = [
|
||||
"--auth-files",
|
||||
"--auth-response",
|
||||
"--confdir",
|
||||
"--create-directory",
|
||||
"--create-share-link",
|
||||
"--destination-directory",
|
||||
"--get-file-link",
|
||||
"--get-O365-drive-id",
|
||||
"--log-dir",
|
||||
"--min-notify-changes",
|
||||
"--modified-by",
|
||||
"--monitor-interval",
|
||||
"--monitor-log-frequency",
|
||||
"--monitor-fullscan-frequency",
|
||||
"--operation-timeout",
|
||||
"--remove-directory",
|
||||
"--single-directory",
|
||||
"--skip-dir",
|
||||
"--skip-file",
|
||||
"--skip-size",
|
||||
"--source-directory",
|
||||
"--space-reservation",
|
||||
"--syncdir",
|
||||
"--user-agent" ];
|
||||
writeln(`OneDrive - a client for OneDrive Cloud Services
|
||||
|
||||
Usage:
|
||||
onedrive [options] --synchronize
|
||||
Do a one time synchronization
|
||||
onedrive [options] --monitor
|
||||
Monitor filesystem and sync regularly
|
||||
onedrive [options] --display-config
|
||||
Display the currently used configuration
|
||||
onedrive [options] --display-sync-status
|
||||
Query OneDrive service and report on pending changes
|
||||
onedrive -h | --help
|
||||
Show this help screen
|
||||
onedrive --version
|
||||
Show version
|
||||
|
||||
Options:
|
||||
`);
|
||||
foreach (it; opt.sort!("a.optLong < b.optLong")) {
|
||||
writefln(" %s%s%s%s\n %s",
|
||||
it.optLong,
|
||||
it.optShort == "" ? "" : " " ~ it.optShort,
|
||||
argsNeedingOptions.canFind(it.optLong) ? " ARG" : "",
|
||||
it.required ? " (required)" : "", it.help);
|
||||
}
|
||||
}
|
||||
|
||||
unittest
|
||||
{
|
||||
auto cfg = new Config("");
|
||||
cfg.load("config");
|
||||
assert(cfg.getValue("sync_dir") == "~/OneDrive");
|
||||
assert(cfg.getValue("empty", "default") == "default");
|
||||
assert(cfg.getValueString("sync_dir") == "~/OneDrive");
|
||||
}
|
||||
|
|
282
src/itemdb.d
|
@ -2,6 +2,8 @@ import std.datetime;
|
|||
import std.exception;
|
||||
import std.path;
|
||||
import std.string;
|
||||
import std.stdio;
|
||||
import std.algorithm.searching;
|
||||
import core.stdc.stdlib;
|
||||
import sqlite;
|
||||
static import log;
|
||||
|
@ -21,24 +23,25 @@ struct Item {
|
|||
string cTag;
|
||||
SysTime mtime;
|
||||
string parentId;
|
||||
string crc32Hash;
|
||||
string sha1Hash;
|
||||
string quickXorHash;
|
||||
string sha256Hash;
|
||||
string remoteDriveId;
|
||||
string remoteId;
|
||||
string syncStatus;
|
||||
}
|
||||
|
||||
final class ItemDatabase
|
||||
{
|
||||
// increment this for every change in the db schema
|
||||
immutable int itemDatabaseVersion = 7;
|
||||
immutable int itemDatabaseVersion = 11;
|
||||
|
||||
Database db;
|
||||
Statement insertItemStmt;
|
||||
Statement updateItemStmt;
|
||||
Statement selectItemByIdStmt;
|
||||
Statement selectItemByParentIdStmt;
|
||||
Statement deleteItemByIdStmt;
|
||||
string insertItemStmt;
|
||||
string updateItemStmt;
|
||||
string selectItemByIdStmt;
|
||||
string selectItemByParentIdStmt;
|
||||
string deleteItemByIdStmt;
|
||||
bool databaseInitialised = false;
|
||||
|
||||
this(const(char)[] filename)
|
||||
{
|
||||
|
@ -48,8 +51,17 @@ final class ItemDatabase
|
|||
dbVersion = db.getVersion();
|
||||
} catch (SqliteException e) {
|
||||
// An error was generated - what was the error?
|
||||
log.error("\nAn internal database error occurred: " ~ e.msg ~ "\n");
|
||||
exit(-1);
|
||||
if (e.msg == "database is locked") {
|
||||
writeln();
|
||||
log.error("ERROR: onedrive application is already running - check system process list for active application instances");
|
||||
log.vlog(" - Use 'sudo ps aufxw | grep onedrive' to potentially determine acive running process");
|
||||
writeln();
|
||||
} else {
|
||||
writeln();
|
||||
log.error("ERROR: An internal database error occurred: " ~ e.msg);
|
||||
writeln();
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if (dbVersion == 0) {
|
||||
|
@ -59,26 +71,57 @@ final class ItemDatabase
|
|||
db.exec("DROP TABLE item");
|
||||
createTable();
|
||||
}
|
||||
db.exec("PRAGMA foreign_keys = ON");
|
||||
db.exec("PRAGMA recursive_triggers = ON");
|
||||
// Set the enforcement of foreign key constraints.
|
||||
// https://www.sqlite.org/pragma.html#pragma_foreign_keys
|
||||
// PRAGMA foreign_keys = boolean;
|
||||
db.exec("PRAGMA foreign_keys = TRUE");
|
||||
// Set the recursive trigger capability
|
||||
// https://www.sqlite.org/pragma.html#pragma_recursive_triggers
|
||||
// PRAGMA recursive_triggers = boolean;
|
||||
db.exec("PRAGMA recursive_triggers = TRUE");
|
||||
// Set the journal mode for databases associated with the current connection
|
||||
// https://www.sqlite.org/pragma.html#pragma_journal_mode
|
||||
db.exec("PRAGMA journal_mode = WAL");
|
||||
// Automatic indexing is enabled by default as of version 3.7.17
|
||||
// https://www.sqlite.org/pragma.html#pragma_automatic_index
|
||||
// PRAGMA automatic_index = boolean;
|
||||
db.exec("PRAGMA automatic_index = FALSE");
|
||||
// Tell SQLite to store temporary tables in memory. This will speed up many read operations that rely on temporary tables, indices, and views.
|
||||
// https://www.sqlite.org/pragma.html#pragma_temp_store
|
||||
db.exec("PRAGMA temp_store = MEMORY");
|
||||
// Tell SQlite to cleanup database table size
|
||||
// https://www.sqlite.org/pragma.html#pragma_auto_vacuum
|
||||
// PRAGMA schema.auto_vacuum = 0 | NONE | 1 | FULL | 2 | INCREMENTAL;
|
||||
db.exec("PRAGMA auto_vacuum = FULL");
|
||||
// This pragma sets or queries the database connection locking-mode. The locking-mode is either NORMAL or EXCLUSIVE.
|
||||
// https://www.sqlite.org/pragma.html#pragma_locking_mode
|
||||
// PRAGMA schema.locking_mode = NORMAL | EXCLUSIVE
|
||||
db.exec("PRAGMA locking_mode = EXCLUSIVE");
|
||||
|
||||
insertItemStmt = db.prepare("
|
||||
INSERT OR REPLACE INTO item (driveId, id, name, type, eTag, cTag, mtime, parentId, crc32Hash, sha1Hash, quickXorHash, remoteDriveId, remoteId)
|
||||
insertItemStmt = "
|
||||
INSERT OR REPLACE INTO item (driveId, id, name, type, eTag, cTag, mtime, parentId, quickXorHash, sha256Hash, remoteDriveId, remoteId, syncStatus)
|
||||
VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10, ?11, ?12, ?13)
|
||||
");
|
||||
updateItemStmt = db.prepare("
|
||||
";
|
||||
updateItemStmt = "
|
||||
UPDATE item
|
||||
SET name = ?3, type = ?4, eTag = ?5, cTag = ?6, mtime = ?7, parentId = ?8, crc32Hash = ?9, sha1Hash = ?10, quickXorHash = ?11, remoteDriveId = ?12, remoteId = ?13
|
||||
SET name = ?3, type = ?4, eTag = ?5, cTag = ?6, mtime = ?7, parentId = ?8, quickXorHash = ?9, sha256Hash = ?10, remoteDriveId = ?11, remoteId = ?12, syncStatus = ?13
|
||||
WHERE driveId = ?1 AND id = ?2
|
||||
");
|
||||
selectItemByIdStmt = db.prepare("
|
||||
";
|
||||
selectItemByIdStmt = "
|
||||
SELECT *
|
||||
FROM item
|
||||
WHERE driveId = ?1 AND id = ?2
|
||||
");
|
||||
selectItemByParentIdStmt = db.prepare("SELECT * FROM item WHERE driveId = ? AND parentId = ?");
|
||||
deleteItemByIdStmt = db.prepare("DELETE FROM item WHERE driveId = ? AND id = ?");
|
||||
";
|
||||
selectItemByParentIdStmt = "SELECT * FROM item WHERE driveId = ? AND parentId = ?";
|
||||
deleteItemByIdStmt = "DELETE FROM item WHERE driveId = ? AND id = ?";
|
||||
|
||||
// flag that the database is accessible and we have control
|
||||
databaseInitialised = true;
|
||||
}
|
||||
|
||||
bool isDatabaseInitialised()
|
||||
{
|
||||
return databaseInitialised;
|
||||
}
|
||||
|
||||
void createTable()
|
||||
|
@ -92,12 +135,12 @@ final class ItemDatabase
|
|||
cTag TEXT,
|
||||
mtime TEXT NOT NULL,
|
||||
parentId TEXT,
|
||||
crc32Hash TEXT,
|
||||
sha1Hash TEXT,
|
||||
quickXorHash TEXT,
|
||||
sha256Hash TEXT,
|
||||
remoteDriveId TEXT,
|
||||
remoteId TEXT,
|
||||
deltaLink TEXT,
|
||||
syncStatus TEXT,
|
||||
PRIMARY KEY (driveId, id),
|
||||
FOREIGN KEY (driveId, parentId)
|
||||
REFERENCES item (driveId, id)
|
||||
|
@ -106,19 +149,33 @@ final class ItemDatabase
|
|||
)");
|
||||
db.exec("CREATE INDEX name_idx ON item (name)");
|
||||
db.exec("CREATE INDEX remote_idx ON item (remoteDriveId, remoteId)");
|
||||
db.exec("CREATE INDEX item_children_idx ON item (driveId, parentId)");
|
||||
db.exec("CREATE INDEX selectByPath_idx ON item (name, driveId, parentId)");
|
||||
db.setVersion(itemDatabaseVersion);
|
||||
}
|
||||
|
||||
void insert(const ref Item item)
|
||||
{
|
||||
bindItem(item, insertItemStmt);
|
||||
insertItemStmt.exec();
|
||||
auto p = db.prepare(insertItemStmt);
|
||||
bindItem(item, p);
|
||||
p.exec();
|
||||
}
|
||||
|
||||
void update(const ref Item item)
|
||||
{
|
||||
bindItem(item, updateItemStmt);
|
||||
updateItemStmt.exec();
|
||||
auto p = db.prepare(updateItemStmt);
|
||||
bindItem(item, p);
|
||||
p.exec();
|
||||
}
|
||||
|
||||
void dump_open_statements()
|
||||
{
|
||||
db.dump_open_statements();
|
||||
}
|
||||
|
||||
int db_checkpoint()
|
||||
{
|
||||
return db.db_checkpoint();
|
||||
}
|
||||
|
||||
void upsert(const ref Item item)
|
||||
|
@ -127,18 +184,19 @@ final class ItemDatabase
|
|||
s.bind(1, item.driveId);
|
||||
s.bind(2, item.id);
|
||||
auto r = s.exec();
|
||||
Statement* stmt;
|
||||
if (r.front[0] == "0") stmt = &insertItemStmt;
|
||||
else stmt = &updateItemStmt;
|
||||
bindItem(item, *stmt);
|
||||
Statement stmt;
|
||||
if (r.front[0] == "0") stmt = db.prepare(insertItemStmt);
|
||||
else stmt = db.prepare(updateItemStmt);
|
||||
bindItem(item, stmt);
|
||||
stmt.exec();
|
||||
}
|
||||
|
||||
Item[] selectChildren(const(char)[] driveId, const(char)[] id)
|
||||
{
|
||||
selectItemByParentIdStmt.bind(1, driveId);
|
||||
selectItemByParentIdStmt.bind(2, id);
|
||||
auto res = selectItemByParentIdStmt.exec();
|
||||
auto p = db.prepare(selectItemByParentIdStmt);
|
||||
p.bind(1, driveId);
|
||||
p.bind(2, id);
|
||||
auto res = p.exec();
|
||||
Item[] items;
|
||||
while (!res.empty) {
|
||||
items ~= buildItem(res);
|
||||
|
@ -149,9 +207,10 @@ final class ItemDatabase
|
|||
|
||||
bool selectById(const(char)[] driveId, const(char)[] id, out Item item)
|
||||
{
|
||||
selectItemByIdStmt.bind(1, driveId);
|
||||
selectItemByIdStmt.bind(2, id);
|
||||
auto r = selectItemByIdStmt.exec();
|
||||
auto p = db.prepare(selectItemByIdStmt);
|
||||
p.bind(1, driveId);
|
||||
p.bind(2, id);
|
||||
auto r = p.exec();
|
||||
if (!r.empty) {
|
||||
item = buildItem(r);
|
||||
return true;
|
||||
|
@ -159,12 +218,13 @@ final class ItemDatabase
|
|||
return false;
|
||||
}
|
||||
|
||||
// returns if an item id is in the database
|
||||
// returns true if an item id is in the database
|
||||
bool idInLocalDatabase(const(string) driveId, const(string)id)
|
||||
{
|
||||
selectItemByIdStmt.bind(1, driveId);
|
||||
selectItemByIdStmt.bind(2, id);
|
||||
auto r = selectItemByIdStmt.exec();
|
||||
auto p = db.prepare(selectItemByIdStmt);
|
||||
p.bind(1, driveId);
|
||||
p.bind(2, id);
|
||||
auto r = p.exec();
|
||||
if (!r.empty) {
|
||||
return true;
|
||||
}
|
||||
|
@ -176,7 +236,16 @@ final class ItemDatabase
|
|||
bool selectByPath(const(char)[] path, string rootDriveId, out Item item)
|
||||
{
|
||||
Item currItem = { driveId: rootDriveId };
|
||||
path = "root/" ~ path.chompPrefix(".");
|
||||
|
||||
// Issue https://github.com/abraunegg/onedrive/issues/578
|
||||
if (startsWith(path, "./") || path == ".") {
|
||||
// Need to remove the . from the path prefix
|
||||
path = "root/" ~ path.chompPrefix(".");
|
||||
} else {
|
||||
// Leave path as it is
|
||||
path = "root/" ~ path;
|
||||
}
|
||||
|
||||
auto s = db.prepare("SELECT * FROM item WHERE name = ?1 AND driveId IS ?2 AND parentId IS ?3");
|
||||
foreach (name; pathSplitter(path)) {
|
||||
s.bind(1, name);
|
||||
|
@ -199,10 +268,19 @@ final class ItemDatabase
|
|||
}
|
||||
|
||||
// same as selectByPath() but it does not traverse remote folders
|
||||
bool selectByPathNoRemote(const(char)[] path, string rootDriveId, out Item item)
|
||||
bool selectByPathWithoutRemote(const(char)[] path, string rootDriveId, out Item item)
|
||||
{
|
||||
Item currItem = { driveId: rootDriveId };
|
||||
path = "root/" ~ path.chompPrefix(".");
|
||||
|
||||
// Issue https://github.com/abraunegg/onedrive/issues/578
|
||||
if (startsWith(path, "./") || path == ".") {
|
||||
// Need to remove the . from the path prefix
|
||||
path = "root/" ~ path.chompPrefix(".");
|
||||
} else {
|
||||
// Leave path as it is
|
||||
path = "root/" ~ path;
|
||||
}
|
||||
|
||||
auto s = db.prepare("SELECT * FROM item WHERE name IS ?1 AND driveId IS ?2 AND parentId IS ?3");
|
||||
foreach (name; pathSplitter(path)) {
|
||||
s.bind(1, name);
|
||||
|
@ -218,9 +296,10 @@ final class ItemDatabase
|
|||
|
||||
void deleteById(const(char)[] driveId, const(char)[] id)
|
||||
{
|
||||
deleteItemByIdStmt.bind(1, driveId);
|
||||
deleteItemByIdStmt.bind(2, id);
|
||||
deleteItemByIdStmt.exec();
|
||||
auto p = db.prepare(deleteItemByIdStmt);
|
||||
p.bind(1, driveId);
|
||||
p.bind(2, id);
|
||||
p.exec();
|
||||
}
|
||||
|
||||
private void bindItem(const ref Item item, ref Statement stmt)
|
||||
|
@ -240,11 +319,11 @@ final class ItemDatabase
|
|||
bind(6, cTag);
|
||||
bind(7, mtime.toISOExtString());
|
||||
bind(8, parentId);
|
||||
bind(9, crc32Hash);
|
||||
bind(10, sha1Hash);
|
||||
bind(11, quickXorHash);
|
||||
bind(12, remoteDriveId);
|
||||
bind(13, remoteId);
|
||||
bind(9, quickXorHash);
|
||||
bind(10, sha256Hash);
|
||||
bind(11, remoteDriveId);
|
||||
bind(12, remoteId);
|
||||
bind(13, syncStatus);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -260,11 +339,11 @@ final class ItemDatabase
|
|||
cTag: result.front[5].dup,
|
||||
mtime: SysTime.fromISOExtString(result.front[6]),
|
||||
parentId: result.front[7].dup,
|
||||
crc32Hash: result.front[8].dup,
|
||||
sha1Hash: result.front[9].dup,
|
||||
quickXorHash: result.front[10].dup,
|
||||
remoteDriveId: result.front[11].dup,
|
||||
remoteId: result.front[12].dup
|
||||
quickXorHash: result.front[8].dup,
|
||||
sha256Hash: result.front[9].dup,
|
||||
remoteDriveId: result.front[10].dup,
|
||||
remoteId: result.front[11].dup,
|
||||
syncStatus: result.front[12].dup
|
||||
};
|
||||
switch (result.front[3]) {
|
||||
case "file": item.type = ItemType.file; break;
|
||||
|
@ -309,9 +388,14 @@ final class ItemDatabase
|
|||
if (r2.empty) {
|
||||
// root reached
|
||||
assert(path.length >= 4);
|
||||
// remove "root"
|
||||
if (path.length >= 5) path = path[5 .. $];
|
||||
else path = path[4 .. $];
|
||||
// remove "root/" from path string if it exists
|
||||
if (path.length >= 5) {
|
||||
if (canFind(path, "root/")){
|
||||
path = path[5 .. $];
|
||||
}
|
||||
} else {
|
||||
path = path[4 .. $];
|
||||
}
|
||||
// special case of computing the path of the root itself
|
||||
if (path.length == 0) path = ".";
|
||||
break;
|
||||
|
@ -322,6 +406,9 @@ final class ItemDatabase
|
|||
}
|
||||
} else {
|
||||
// broken tree
|
||||
log.vdebug("The following generated a broken tree query:");
|
||||
log.vdebug("Drive ID: ", driveId);
|
||||
log.vdebug("Item ID: ", id);
|
||||
assert(0);
|
||||
}
|
||||
}
|
||||
|
@ -362,4 +449,77 @@ final class ItemDatabase
|
|||
stmt.bind(3, deltaLink);
|
||||
stmt.exec();
|
||||
}
|
||||
|
||||
// National Cloud Deployments (US and DE) do not support /delta as a query
|
||||
// We need to track in the database that this item is in sync
|
||||
// As we query /children to get all children from OneDrive, update anything in the database
|
||||
// to be flagged as not-in-sync, thus, we can use that flag to determing what was previously
|
||||
// in-sync, but now deleted on OneDrive
|
||||
void downgradeSyncStatusFlag(const(char)[] driveId, const(char)[] id)
|
||||
{
|
||||
assert(driveId);
|
||||
auto stmt = db.prepare("UPDATE item SET syncStatus = 'N' WHERE driveId = ?1 AND id = ?2");
|
||||
stmt.bind(1, driveId);
|
||||
stmt.bind(2, id);
|
||||
stmt.exec();
|
||||
}
|
||||
|
||||
// National Cloud Deployments (US and DE) do not support /delta as a query
|
||||
// Select items that have a out-of-sync flag set
|
||||
Item[] selectOutOfSyncItems(const(char)[] driveId)
|
||||
{
|
||||
assert(driveId);
|
||||
Item[] items;
|
||||
auto stmt = db.prepare("SELECT * FROM item WHERE syncStatus = 'N' AND driveId = ?1");
|
||||
stmt.bind(1, driveId);
|
||||
auto res = stmt.exec();
|
||||
while (!res.empty) {
|
||||
items ~= buildItem(res);
|
||||
res.step();
|
||||
}
|
||||
return items;
|
||||
}
|
||||
|
||||
// OneDrive Business Folders are stored in the database potentially without a root | parentRoot link
|
||||
// Select items associated with the provided driveId
|
||||
Item[] selectByDriveId(const(char)[] driveId)
|
||||
{
|
||||
assert(driveId);
|
||||
Item[] items;
|
||||
auto stmt = db.prepare("SELECT * FROM item WHERE driveId = ?1 AND parentId IS NULL");
|
||||
stmt.bind(1, driveId);
|
||||
auto res = stmt.exec();
|
||||
while (!res.empty) {
|
||||
items ~= buildItem(res);
|
||||
res.step();
|
||||
}
|
||||
return items;
|
||||
}
|
||||
|
||||
// Perform a vacuum on the database, commit WAL / SHM to file
|
||||
void performVacuum()
|
||||
{
|
||||
try {
|
||||
auto stmt = db.prepare("VACUUM;");
|
||||
stmt.exec();
|
||||
} catch (SqliteException e) {
|
||||
writeln();
|
||||
log.error("ERROR: Unable to perform a database vacuum: " ~ e.msg);
|
||||
writeln();
|
||||
}
|
||||
}
|
||||
|
||||
// Select distinct driveId items from database
|
||||
string[] selectDistinctDriveIds()
|
||||
{
|
||||
string[] driveIdArray;
|
||||
auto stmt = db.prepare("SELECT DISTINCT driveId FROM item;");
|
||||
auto res = stmt.exec();
|
||||
if (res.empty) return driveIdArray;
|
||||
while (!res.empty) {
|
||||
driveIdArray ~= res.front[0].dup;
|
||||
res.step();
|
||||
}
|
||||
return driveIdArray;
|
||||
}
|
||||
}
|
||||
|
|
189
src/log.d
|
@ -2,22 +2,31 @@ import std.stdio;
|
|||
import std.file;
|
||||
import std.datetime;
|
||||
import std.process;
|
||||
import std.conv;
|
||||
import core.memory;
|
||||
import core.sys.posix.pwd, core.sys.posix.unistd, core.stdc.string : strlen;
|
||||
import std.algorithm : splitter;
|
||||
version(Notifications) {
|
||||
import dnotify;
|
||||
}
|
||||
|
||||
// enable verbose logging
|
||||
long verbose;
|
||||
bool writeLogFile = false;
|
||||
bool logFileWriteFailFlag = false;
|
||||
|
||||
private bool doNotifications;
|
||||
|
||||
// shared string variable for username
|
||||
string username;
|
||||
string logFilePath;
|
||||
static this() {
|
||||
username = getUserName();
|
||||
logFilePath = "/var/log/onedrive/";
|
||||
}
|
||||
|
||||
// enable verbose logging
|
||||
bool verbose;
|
||||
|
||||
void init()
|
||||
void init(string logDir)
|
||||
{
|
||||
writeLogFile = true;
|
||||
username = getUserName();
|
||||
logFilePath = logDir;
|
||||
|
||||
if (!exists(logFilePath)){
|
||||
// logfile path does not exist
|
||||
try {
|
||||
|
@ -25,45 +34,128 @@ void init()
|
|||
}
|
||||
catch (std.file.FileException e) {
|
||||
// we got an error ..
|
||||
writeln("\nUnable to create /var/log/onedrive/ ");
|
||||
writeln("Please manually create /var/log/onedrive/ and set appropriate permissions to allow write access");
|
||||
writeln("The client activity log will be located in the users home directory\n");
|
||||
writeln("\nUnable to access ", logFilePath);
|
||||
writeln("Please manually create '",logFilePath, "' and set appropriate permissions to allow write access");
|
||||
writeln("The requested client activity log will instead be located in your users home directory");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void setNotifications(bool value)
|
||||
{
|
||||
version(Notifications) {
|
||||
// if we try to enable notifications, check for server availability
|
||||
// and disable in case dbus server is not reachable
|
||||
if (value) {
|
||||
auto serverAvailable = dnotify.check_availability();
|
||||
if (!serverAvailable) {
|
||||
log("Notification (dbus) server not available, disabling");
|
||||
value = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
doNotifications = value;
|
||||
}
|
||||
|
||||
void log(T...)(T args)
|
||||
{
|
||||
writeln(args);
|
||||
// Write to log file
|
||||
logfileWriteLine(args);
|
||||
if(writeLogFile){
|
||||
// Write to log file
|
||||
logfileWriteLine(args);
|
||||
}
|
||||
}
|
||||
|
||||
void logAndNotify(T...)(T args)
|
||||
{
|
||||
notify(args);
|
||||
log(args);
|
||||
}
|
||||
|
||||
void fileOnly(T...)(T args)
|
||||
{
|
||||
// Write to log file only
|
||||
logfileWriteLine(args);
|
||||
if(writeLogFile){
|
||||
// Write to log file
|
||||
logfileWriteLine(args);
|
||||
}
|
||||
}
|
||||
|
||||
void vlog(T...)(T args)
|
||||
{
|
||||
if (verbose) {
|
||||
if (verbose >= 1) {
|
||||
writeln(args);
|
||||
// Write to log file
|
||||
logfileWriteLine(args);
|
||||
if(writeLogFile){
|
||||
// Write to log file
|
||||
logfileWriteLine(args);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void vdebug(T...)(T args)
|
||||
{
|
||||
if (verbose >= 2) {
|
||||
writeln("[DEBUG] ", args);
|
||||
if(writeLogFile){
|
||||
// Write to log file
|
||||
logfileWriteLine("[DEBUG] ", args);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void vdebugNewLine(T...)(T args)
|
||||
{
|
||||
if (verbose >= 2) {
|
||||
writeln("\n[DEBUG] ", args);
|
||||
if(writeLogFile){
|
||||
// Write to log file
|
||||
logfileWriteLine("\n[DEBUG] ", args);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void error(T...)(T args)
|
||||
{
|
||||
stderr.writeln(args);
|
||||
// Write to log file
|
||||
logfileWriteLine(args);
|
||||
if(writeLogFile){
|
||||
// Write to log file
|
||||
logfileWriteLine(args);
|
||||
}
|
||||
}
|
||||
|
||||
void errorAndNotify(T...)(T args)
|
||||
{
|
||||
notify(args);
|
||||
error(args);
|
||||
}
|
||||
|
||||
void notify(T...)(T args)
|
||||
{
|
||||
version(Notifications) {
|
||||
if (doNotifications) {
|
||||
string result;
|
||||
foreach (index, arg; args) {
|
||||
result ~= to!string(arg);
|
||||
if (index != args.length - 1)
|
||||
result ~= " ";
|
||||
}
|
||||
auto n = new Notification("OneDrive", result, "IGNORED");
|
||||
try {
|
||||
n.show();
|
||||
// Sent message to notification daemon
|
||||
if (verbose >= 2) {
|
||||
writeln("[DEBUG] Sent notification to notification service. If notification is not displayed, check dbus or notification-daemon for errors");
|
||||
}
|
||||
|
||||
} catch (Throwable e) {
|
||||
vlog("Got exception from showing notification: ", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void logfileWriteLine(T...)(T args)
|
||||
{
|
||||
static import std.exception;
|
||||
// Write to log file
|
||||
string logFileName = .logFilePath ~ .username ~ ".onedrive.log";
|
||||
auto currentTime = Clock.currTime();
|
||||
|
@ -75,26 +167,73 @@ private void logfileWriteLine(T...)(T args)
|
|||
logFile = File(logFileName, "a");
|
||||
}
|
||||
catch (std.exception.ErrnoException e) {
|
||||
// We cannot open the log file in /var/log/onedrive for writing
|
||||
// We cannot open the log file in logFilePath location for writing
|
||||
// The user is not part of the standard 'users' group (GID 100)
|
||||
// Change logfile to ~/onedrive.log putting the log file in the users home directory
|
||||
|
||||
if (!logFileWriteFailFlag) {
|
||||
// write out error message that we cant log to the requested file
|
||||
writeln("\nUnable to write activity log to ", logFileName);
|
||||
writeln("Please set appropriate permissions to allow write access to the logging directory for your user account");
|
||||
writeln("The requested client activity log will instead be located in your users home directory\n");
|
||||
|
||||
// set the flag so we dont keep printing this error message
|
||||
logFileWriteFailFlag = true;
|
||||
}
|
||||
|
||||
string homePath = environment.get("HOME");
|
||||
string logFileNameAlternate = homePath ~ "/onedrive.log";
|
||||
logFile = File(logFileNameAlternate, "a");
|
||||
}
|
||||
// Write to the log file
|
||||
logFile.writeln(timeString, " ", args);
|
||||
logFile.writeln(timeString, "\t", args);
|
||||
logFile.close();
|
||||
}
|
||||
|
||||
private string getUserName()
|
||||
{
|
||||
auto pw = getpwuid(getuid);
|
||||
auto uinfo = pw.pw_gecos[0 .. strlen(pw.pw_gecos)].splitter(',');
|
||||
if (!uinfo.empty && uinfo.front.length){
|
||||
return uinfo.front.idup;
|
||||
|
||||
// get required details
|
||||
auto runtime_pw_name = pw.pw_name[0 .. strlen(pw.pw_name)].splitter(',');
|
||||
auto runtime_pw_uid = pw.pw_uid;
|
||||
auto runtime_pw_gid = pw.pw_gid;
|
||||
|
||||
// user identifiers from process
|
||||
vdebug("Process ID: ", pw);
|
||||
vdebug("User UID: ", runtime_pw_uid);
|
||||
vdebug("User GID: ", runtime_pw_gid);
|
||||
|
||||
// What should be returned as username?
|
||||
if (!runtime_pw_name.empty && runtime_pw_name.front.length){
|
||||
// user resolved
|
||||
vdebug("User Name: ", runtime_pw_name.front.idup);
|
||||
return runtime_pw_name.front.idup;
|
||||
} else {
|
||||
// Unknown user?
|
||||
vdebug("User Name: unknown");
|
||||
return "unknown";
|
||||
}
|
||||
}
|
||||
|
||||
void displayMemoryUsagePreGC()
|
||||
{
|
||||
// Display memory usage
|
||||
writeln("\nMemory Usage pre GC (bytes)");
|
||||
writeln("--------------------");
|
||||
writeln("memory usedSize = ", GC.stats.usedSize);
|
||||
writeln("memory freeSize = ", GC.stats.freeSize);
|
||||
// uncomment this if required, if not using LDC 1.16 as this does not exist in that version
|
||||
//writeln("memory allocatedInCurrentThread = ", GC.stats.allocatedInCurrentThread, "\n");
|
||||
}
|
||||
|
||||
void displayMemoryUsagePostGC()
|
||||
{
|
||||
// Display memory usage
|
||||
writeln("\nMemory Usage post GC (bytes)");
|
||||
writeln("--------------------");
|
||||
writeln("memory usedSize = ", GC.stats.usedSize);
|
||||
writeln("memory freeSize = ", GC.stats.freeSize);
|
||||
// uncomment this if required, if not using LDC 1.16 as this does not exist in that version
|
||||
//writeln("memory allocatedInCurrentThread = ", GC.stats.allocatedInCurrentThread, "\n");
|
||||
}
|
||||
|
|
2200
src/main.d
205
src/monitor.d
|
@ -1,15 +1,15 @@
|
|||
import core.sys.linux.sys.inotify;
|
||||
import core.stdc.errno;
|
||||
import core.sys.posix.poll, core.sys.posix.unistd;
|
||||
import std.exception, std.file, std.path, std.regex, std.stdio, std.string;
|
||||
import std.exception, std.file, std.path, std.regex, std.stdio, std.string, std.algorithm;
|
||||
import core.stdc.stdlib;
|
||||
import config;
|
||||
import selective;
|
||||
import util;
|
||||
static import log;
|
||||
|
||||
// relevant inotify events
|
||||
private immutable uint32_t mask = IN_CLOSE_WRITE | IN_CREATE | IN_DELETE |
|
||||
IN_MOVE | IN_IGNORED | IN_Q_OVERFLOW;
|
||||
private immutable uint32_t mask = IN_CLOSE_WRITE | IN_CREATE | IN_DELETE | IN_MOVE | IN_IGNORED | IN_Q_OVERFLOW;
|
||||
|
||||
class MonitorException: ErrnoException
|
||||
{
|
||||
|
@ -32,6 +32,8 @@ final class Monitor
|
|||
private void[] buffer;
|
||||
// skip symbolic links
|
||||
bool skip_symlinks;
|
||||
// check for .nosync if enabled
|
||||
bool check_nosync;
|
||||
|
||||
private SelectiveSync selectiveSync;
|
||||
|
||||
|
@ -46,16 +48,27 @@ final class Monitor
|
|||
this.selectiveSync = selectiveSync;
|
||||
}
|
||||
|
||||
void init(Config cfg, bool verbose, bool skip_symlinks)
|
||||
void init(Config cfg, bool verbose, bool skip_symlinks, bool check_nosync)
|
||||
{
|
||||
this.verbose = verbose;
|
||||
this.skip_symlinks = skip_symlinks;
|
||||
this.check_nosync = check_nosync;
|
||||
|
||||
assert(onDirCreated && onFileChanged && onDelete && onMove);
|
||||
fd = inotify_init();
|
||||
if (fd < 0) throw new MonitorException("inotify_init failed");
|
||||
if (!buffer) buffer = new void[4096];
|
||||
addRecursive(".");
|
||||
|
||||
// from which point do we start watching for changes?
|
||||
string monitorPath;
|
||||
if (cfg.getValueString("single_directory") != ""){
|
||||
// single directory in use, monitor only this
|
||||
monitorPath = "./" ~ cfg.getValueString("single_directory");
|
||||
} else {
|
||||
// default
|
||||
monitorPath = ".";
|
||||
}
|
||||
addRecursive(monitorPath);
|
||||
}
|
||||
|
||||
void shutdown()
|
||||
|
@ -66,12 +79,38 @@ final class Monitor
|
|||
|
||||
private void addRecursive(string dirname)
|
||||
{
|
||||
// skip filtered items
|
||||
// skip non existing/disappeared items
|
||||
if (!exists(dirname)) {
|
||||
log.vlog("Not adding non-existing/disappeared directory: ", dirname);
|
||||
return;
|
||||
}
|
||||
|
||||
// Skip the monitoring of any user filtered items
|
||||
if (dirname != ".") {
|
||||
if (selectiveSync.isNameExcluded(baseName(dirname))) {
|
||||
return;
|
||||
// Is the directory name a match to a skip_dir entry?
|
||||
// The path that needs to be checked needs to include the '/'
|
||||
// This due to if the user has specified in skip_dir an exclusive path: '/path' - that is what must be matched
|
||||
if (isDir(dirname)) {
|
||||
if (selectiveSync.isDirNameExcluded(dirname.strip('.'))) {
|
||||
// dont add a watch for this item
|
||||
log.vdebug("Skipping monitoring due to skip_dir match: ", dirname);
|
||||
return;
|
||||
}
|
||||
}
|
||||
if (selectiveSync.isPathExcluded(buildNormalizedPath(dirname))) {
|
||||
if (isFile(dirname)) {
|
||||
// Is the filename a match to a skip_file entry?
|
||||
// The path that needs to be checked needs to include the '/'
|
||||
// This due to if the user has specified in skip_file an exclusive path: '/path/file' - that is what must be matched
|
||||
if (selectiveSync.isFileNameExcluded(dirname.strip('.'))) {
|
||||
// dont add a watch for this item
|
||||
log.vdebug("Skipping monitoring due to skip_file match: ", dirname);
|
||||
return;
|
||||
}
|
||||
}
|
||||
// is the path exluded by sync_list?
|
||||
if (selectiveSync.isPathExcludedViaSyncList(buildNormalizedPath(dirname))) {
|
||||
// dont add a watch for this item
|
||||
log.vdebug("Skipping monitoring due to sync_list match: ", dirname);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
@ -85,10 +124,51 @@ final class Monitor
|
|||
}
|
||||
}
|
||||
|
||||
// Do we need to check for .nosync? Only if check_nosync is true
|
||||
if (check_nosync) {
|
||||
if (exists(buildNormalizedPath(dirname) ~ "/.nosync")) {
|
||||
log.vlog("Skipping watching path - .nosync found & --check-for-nosync enabled: ", buildNormalizedPath(dirname));
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// passed all potential exclusions
|
||||
// add inotify watch for this path / directory / file
|
||||
log.vdebug("Calling add() for this dirname: ", dirname);
|
||||
add(dirname);
|
||||
foreach(DirEntry entry; dirEntries(dirname, SpanMode.shallow, false)) {
|
||||
if (entry.isDir) {
|
||||
addRecursive(entry.name);
|
||||
|
||||
// if this is a directory, recursivly add this path
|
||||
if (isDir(dirname)) {
|
||||
// try and get all the directory entities for this path
|
||||
try {
|
||||
auto pathList = dirEntries(dirname, SpanMode.shallow, false);
|
||||
foreach(DirEntry entry; pathList) {
|
||||
if (entry.isDir) {
|
||||
log.vdebug("Calling addRecursive() for this directory: ", entry.name);
|
||||
addRecursive(entry.name);
|
||||
}
|
||||
}
|
||||
// catch any error which is generated
|
||||
} catch (std.file.FileException e) {
|
||||
// Standard filesystem error
|
||||
displayFileSystemErrorMessage(e.msg, getFunctionName!({}));
|
||||
return;
|
||||
} catch (Exception e) {
|
||||
// Issue #1154 handling
|
||||
// Need to check for: Failed to stat file in error message
|
||||
if (canFind(e.msg, "Failed to stat file")) {
|
||||
// File system access issue
|
||||
log.error("ERROR: The local file system returned an error with the following message:");
|
||||
log.error(" Error Message: ", e.msg);
|
||||
log.error("ACCESS ERROR: Please check your UID and GID access to this file, as the permissions on this file is preventing this application to read it");
|
||||
log.error("\nFATAL: Exiting application to avoid deleting data due to local file system access issues\n");
|
||||
// Must exit here
|
||||
exit(-1);
|
||||
} else {
|
||||
// some other error
|
||||
displayFileSystemErrorMessage(e.msg, getFunctionName!({}));
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -104,10 +184,35 @@ final class Monitor
|
|||
log.log("To change the current max number of watches to 524288 run:");
|
||||
log.log("sudo sysctl fs.inotify.max_user_watches=524288");
|
||||
}
|
||||
throw new MonitorException("inotify_add_watch failed");
|
||||
if (errno() == 13) {
|
||||
if ((selectiveSync.getSkipDotfiles()) && (selectiveSync.isDotFile(pathname))) {
|
||||
// no misleading output that we could not add a watch due to permission denied
|
||||
return;
|
||||
} else {
|
||||
log.vlog("WARNING: inotify_add_watch failed - permission denied: ", pathname);
|
||||
return;
|
||||
}
|
||||
}
|
||||
// Flag any other errors
|
||||
log.error("ERROR: inotify_add_watch failed: ", pathname);
|
||||
return;
|
||||
}
|
||||
|
||||
// Add path to inotify watch - required regardless if a '.folder' or 'folder'
|
||||
wdToDirName[wd] = buildNormalizedPath(pathname) ~ "/";
|
||||
log.vlog("Monitor directory: ", pathname);
|
||||
log.vdebug("inotify_add_watch successfully added for: ", pathname);
|
||||
|
||||
// Do we log that we are monitoring this directory?
|
||||
if (isDir(pathname)) {
|
||||
// This is a directory
|
||||
// is the path exluded if skip_dotfiles configured and path is a .folder?
|
||||
if ((selectiveSync.getSkipDotfiles()) && (selectiveSync.isDotFile(pathname))) {
|
||||
// no misleading output that we are monitoring this directory
|
||||
return;
|
||||
}
|
||||
// Log that this is directory is being monitored
|
||||
log.vlog("Monitor directory: ", pathname);
|
||||
}
|
||||
}
|
||||
|
||||
// remove a watch descriptor
|
||||
|
@ -139,6 +244,7 @@ final class Monitor
|
|||
{
|
||||
string path = wdToDirName[event.wd];
|
||||
if (event.len > 0) path ~= fromStringz(event.name.ptr);
|
||||
log.vdebug("inotify path event for: ", path);
|
||||
return path;
|
||||
}
|
||||
|
||||
|
@ -161,7 +267,38 @@ final class Monitor
|
|||
while (i < length) {
|
||||
inotify_event *event = cast(inotify_event*) &buffer[i];
|
||||
string path;
|
||||
string evalPath;
|
||||
// inotify event debug
|
||||
log.vdebug("inotify event wd: ", event.wd);
|
||||
log.vdebug("inotify event mask: ", event.mask);
|
||||
log.vdebug("inotify event cookie: ", event.cookie);
|
||||
log.vdebug("inotify event len: ", event.len);
|
||||
log.vdebug("inotify event name: ", event.name);
|
||||
if (event.mask & IN_ACCESS) log.vdebug("inotify event flag: IN_ACCESS");
|
||||
if (event.mask & IN_MODIFY) log.vdebug("inotify event flag: IN_MODIFY");
|
||||
if (event.mask & IN_ATTRIB) log.vdebug("inotify event flag: IN_ATTRIB");
|
||||
if (event.mask & IN_CLOSE_WRITE) log.vdebug("inotify event flag: IN_CLOSE_WRITE");
|
||||
if (event.mask & IN_CLOSE_NOWRITE) log.vdebug("inotify event flag: IN_CLOSE_NOWRITE");
|
||||
if (event.mask & IN_MOVED_FROM) log.vdebug("inotify event flag: IN_MOVED_FROM");
|
||||
if (event.mask & IN_MOVED_TO) log.vdebug("inotify event flag: IN_MOVED_TO");
|
||||
if (event.mask & IN_CREATE) log.vdebug("inotify event flag: IN_CREATE");
|
||||
if (event.mask & IN_DELETE) log.vdebug("inotify event flag: IN_DELETE");
|
||||
if (event.mask & IN_DELETE_SELF) log.vdebug("inotify event flag: IN_DELETE_SELF");
|
||||
if (event.mask & IN_MOVE_SELF) log.vdebug("inotify event flag: IN_MOVE_SELF");
|
||||
if (event.mask & IN_UNMOUNT) log.vdebug("inotify event flag: IN_UNMOUNT");
|
||||
if (event.mask & IN_Q_OVERFLOW) log.vdebug("inotify event flag: IN_Q_OVERFLOW");
|
||||
if (event.mask & IN_IGNORED) log.vdebug("inotify event flag: IN_IGNORED");
|
||||
if (event.mask & IN_CLOSE) log.vdebug("inotify event flag: IN_CLOSE");
|
||||
if (event.mask & IN_MOVE) log.vdebug("inotify event flag: IN_MOVE");
|
||||
if (event.mask & IN_ONLYDIR) log.vdebug("inotify event flag: IN_ONLYDIR");
|
||||
if (event.mask & IN_DONT_FOLLOW) log.vdebug("inotify event flag: IN_DONT_FOLLOW");
|
||||
if (event.mask & IN_EXCL_UNLINK) log.vdebug("inotify event flag: IN_EXCL_UNLINK");
|
||||
if (event.mask & IN_MASK_ADD) log.vdebug("inotify event flag: IN_MASK_ADD");
|
||||
if (event.mask & IN_ISDIR) log.vdebug("inotify event flag: IN_ISDIR");
|
||||
if (event.mask & IN_ONESHOT) log.vdebug("inotify event flag: IN_ONESHOT");
|
||||
if (event.mask & IN_ALL_EVENTS) log.vdebug("inotify event flag: IN_ALL_EVENTS");
|
||||
|
||||
// skip events that need to be ignored
|
||||
if (event.mask & IN_IGNORED) {
|
||||
// forget the directory associated to the watch descriptor
|
||||
wdToDirName.remove(event.wd);
|
||||
|
@ -170,18 +307,45 @@ final class Monitor
|
|||
throw new MonitorException("Inotify overflow, events missing");
|
||||
}
|
||||
|
||||
// skip filtered items
|
||||
// if the event is not to be ignored, obtain path
|
||||
path = getPath(event);
|
||||
if (selectiveSync.isNameExcluded(baseName(path))) {
|
||||
goto skip;
|
||||
// configure the skip_dir & skip skip_file comparison item
|
||||
evalPath = path.strip('.');
|
||||
|
||||
// Skip events that should be excluded based on application configuration
|
||||
// We cant use isDir or isFile as this information is missing from the inotify event itself
|
||||
// Thus this causes a segfault when attempting to query this - https://github.com/abraunegg/onedrive/issues/995
|
||||
|
||||
// Based on the 'type' of event & object type (directory or file) check that path against the 'right' user exclusions
|
||||
// Directory events should only be compared against skip_dir and file events should only be compared against skip_file
|
||||
if (event.mask & IN_ISDIR) {
|
||||
// The event in question contains IN_ISDIR event mask, thus highly likely this is an event on a directory
|
||||
// This due to if the user has specified in skip_dir an exclusive path: '/path' - that is what must be matched
|
||||
if (selectiveSync.isDirNameExcluded(evalPath)) {
|
||||
// The path to evaluate matches a path that the user has configured to skip
|
||||
goto skip;
|
||||
}
|
||||
} else {
|
||||
// The event in question missing the IN_ISDIR event mask, thus highly likely this is an event on a file
|
||||
// This due to if the user has specified in skip_file an exclusive path: '/path/file' - that is what must be matched
|
||||
if (selectiveSync.isFileNameExcluded(evalPath)) {
|
||||
// The path to evaluate matches a file that the user has configured to skip
|
||||
goto skip;
|
||||
}
|
||||
}
|
||||
if (selectiveSync.isPathExcluded(path)) {
|
||||
|
||||
// is the path, excluded via sync_list
|
||||
if (selectiveSync.isPathExcludedViaSyncList(path)) {
|
||||
// The path to evaluate matches a directory or file that the user has configured not to include in the sync
|
||||
goto skip;
|
||||
}
|
||||
|
||||
// handle the inotify events
|
||||
if (event.mask & IN_MOVED_FROM) {
|
||||
log.vdebug("event IN_MOVED_FROM: ", path);
|
||||
cookieToPath[event.cookie] = path;
|
||||
} else if (event.mask & IN_MOVED_TO) {
|
||||
log.vdebug("event IN_MOVED_TO: ", path);
|
||||
if (event.mask & IN_ISDIR) addRecursive(path);
|
||||
auto from = event.cookie in cookieToPath;
|
||||
if (from) {
|
||||
|
@ -196,15 +360,19 @@ final class Monitor
|
|||
}
|
||||
}
|
||||
} else if (event.mask & IN_CREATE) {
|
||||
log.vdebug("event IN_CREATE: ", path);
|
||||
if (event.mask & IN_ISDIR) {
|
||||
addRecursive(path);
|
||||
if (useCallbacks) onDirCreated(path);
|
||||
}
|
||||
} else if (event.mask & IN_DELETE) {
|
||||
log.vdebug("event IN_DELETE: ", path);
|
||||
if (useCallbacks) onDelete(path);
|
||||
} else if ((event.mask & IN_CLOSE_WRITE) && !(event.mask & IN_ISDIR)) {
|
||||
log.vdebug("event IN_CLOSE_WRITE and ...: ", path);
|
||||
if (useCallbacks) onFileChanged(path);
|
||||
} else {
|
||||
log.vdebug("event unhandled: ", path);
|
||||
assert(0);
|
||||
}
|
||||
|
||||
|
@ -213,6 +381,7 @@ final class Monitor
|
|||
}
|
||||
// assume that the items moved outside the watched directory have been deleted
|
||||
foreach (cookie, path; cookieToPath) {
|
||||
log.vdebug("deleting (post loop): ", path);
|
||||
if (useCallbacks) onDelete(path);
|
||||
remove(path);
|
||||
cookieToPath.remove(cookie);
|
||||
|
|
10
src/notifications/README
Normal file
|
@ -0,0 +1,10 @@
|
|||
The files in this directory have been obtained form the following places:
|
||||
|
||||
dnotify.d
|
||||
https://github.com/Dav1dde/dnotify/blob/master/dnotify.d
|
||||
License: Creative Commons Zro 1.0 Universal
|
||||
see https://github.com/Dav1dde/dnotify/blob/master/LICENSE
|
||||
|
||||
notify.d
|
||||
https://github.com/D-Programming-Deimos/libnotify/blob/master/deimos/notify/notify.d
|
||||
License: GNU Lesser General Public License (LGPL) 2.1 or upwards, see file
|
323
src/notifications/dnotify.d
Normal file
|
@ -0,0 +1,323 @@
|
|||
module dnotify;
|
||||
|
||||
private {
|
||||
import std.string : toStringz;
|
||||
import std.conv : to;
|
||||
import std.traits : isPointer, isArray;
|
||||
import std.variant : Variant;
|
||||
import std.array : appender;
|
||||
|
||||
import deimos.notify.notify;
|
||||
}
|
||||
|
||||
public import deimos.notify.notify : NOTIFY_EXPIRES_DEFAULT, NOTIFY_EXPIRES_NEVER,
|
||||
NotifyUrgency;
|
||||
|
||||
|
||||
version(NoPragma) {
|
||||
} else {
|
||||
pragma(lib, "notify");
|
||||
pragma(lib, "gmodule");
|
||||
pragma(lib, "glib-2.0");
|
||||
}
|
||||
|
||||
extern (C) {
|
||||
private void g_free(void* mem);
|
||||
private void g_list_free(GList* glist);
|
||||
}
|
||||
|
||||
version(NoGdk) {
|
||||
} else {
|
||||
version(NoPragma) {
|
||||
} else {
|
||||
pragma(lib, "gdk_pixbuf");
|
||||
}
|
||||
|
||||
private:
|
||||
extern (C) {
|
||||
GdkPixbuf* gdk_pixbuf_new_from_file(const(char)* filename, GError **error);
|
||||
}
|
||||
}
|
||||
|
||||
class NotificationError : Exception {
|
||||
string message;
|
||||
GError* gerror;
|
||||
|
||||
this(GError* gerror) {
|
||||
this.message = to!(string)(gerror.message);
|
||||
this.gerror = gerror;
|
||||
|
||||
super(this.message);
|
||||
}
|
||||
|
||||
this(string message) {
|
||||
this.message = message;
|
||||
|
||||
super(message);
|
||||
}
|
||||
}
|
||||
|
||||
bool check_availability() {
|
||||
// notify_init might return without dbus server actually started
|
||||
// try to check for running dbus server
|
||||
char **ret_name;
|
||||
char **ret_vendor;
|
||||
char **ret_version;
|
||||
char **ret_spec_version;
|
||||
bool ret;
|
||||
try {
|
||||
return notify_get_server_info(ret_name, ret_vendor, ret_version, ret_spec_version);
|
||||
} catch (NotificationError e) {
|
||||
throw new NotificationError("Cannot find dbus server!");
|
||||
}
|
||||
}
|
||||
|
||||
void init(in char[] name) {
|
||||
notify_init(name.toStringz());
|
||||
}
|
||||
|
||||
alias notify_is_initted is_initted;
|
||||
alias notify_uninit uninit;
|
||||
|
||||
static this() {
|
||||
init(__FILE__);
|
||||
}
|
||||
|
||||
static ~this() {
|
||||
uninit();
|
||||
}
|
||||
|
||||
string get_app_name() {
|
||||
return to!(string)(notify_get_app_name());
|
||||
}
|
||||
|
||||
void set_app_name(in char[] app_name) {
|
||||
notify_set_app_name(app_name.toStringz());
|
||||
}
|
||||
|
||||
string[] get_server_caps() {
|
||||
auto result = appender!(string[])();
|
||||
|
||||
GList* list = notify_get_server_caps();
|
||||
if(list !is null) {
|
||||
for(GList* c = list; c !is null; c = c.next) {
|
||||
result.put(to!(string)(cast(char*)c.data));
|
||||
g_free(c.data);
|
||||
}
|
||||
|
||||
g_list_free(list);
|
||||
}
|
||||
|
||||
return result.data;
|
||||
}
|
||||
|
||||
struct ServerInfo {
|
||||
string name;
|
||||
string vendor;
|
||||
string version_;
|
||||
string spec_version;
|
||||
}
|
||||
|
||||
ServerInfo get_server_info() {
|
||||
char* name;
|
||||
char* vendor;
|
||||
char* version_;
|
||||
char* spec_version;
|
||||
notify_get_server_info(&name, &vendor, &version_, &spec_version);
|
||||
|
||||
scope(exit) {
|
||||
g_free(name);
|
||||
g_free(vendor);
|
||||
g_free(version_);
|
||||
g_free(spec_version);
|
||||
}
|
||||
|
||||
return ServerInfo(to!string(name), to!string(vendor), to!string(version_), to!string(spec_version));
|
||||
}
|
||||
|
||||
|
||||
struct Action {
|
||||
const(char[]) id;
|
||||
const(char[]) label;
|
||||
NotifyActionCallback callback;
|
||||
void* user_ptr;
|
||||
}
|
||||
|
||||
|
||||
class Notification {
|
||||
NotifyNotification* notify_notification;
|
||||
|
||||
const(char)[] summary;
|
||||
const(char)[] body_;
|
||||
const(char)[] icon;
|
||||
|
||||
bool closed = true;
|
||||
|
||||
private int _timeout = NOTIFY_EXPIRES_DEFAULT;
|
||||
const(char)[] _category;
|
||||
NotifyUrgency _urgency;
|
||||
GdkPixbuf* _image;
|
||||
Variant[const(char)[]] _hints;
|
||||
const(char)[] _app_name;
|
||||
Action[] _actions;
|
||||
|
||||
this(in char[] summary, in char[] body_, in char[] icon="")
|
||||
in { assert(is_initted(), "call dnotify.init() before using Notification"); }
|
||||
do {
|
||||
this.summary = summary;
|
||||
this.body_ = body_;
|
||||
this.icon = icon;
|
||||
notify_notification = notify_notification_new(summary.toStringz(), body_.toStringz(), icon.toStringz());
|
||||
}
|
||||
|
||||
bool update(in char[] summary, in char[] body_, in char[] icon="") {
|
||||
this.summary = summary;
|
||||
this.body_ = body_;
|
||||
this.icon = icon;
|
||||
return notify_notification_update(notify_notification, summary.toStringz(), body_.toStringz(), icon.toStringz());
|
||||
}
|
||||
|
||||
void show() {
|
||||
GError* ge;
|
||||
|
||||
if(!notify_notification_show(notify_notification, &ge)) {
|
||||
throw new NotificationError(ge);
|
||||
}
|
||||
}
|
||||
|
||||
@property int timeout() { return _timeout; }
|
||||
@property void timeout(int timeout) {
|
||||
this._timeout = timeout;
|
||||
notify_notification_set_timeout(notify_notification, timeout);
|
||||
}
|
||||
|
||||
@property const(char[]) category() { return _category; }
|
||||
@property void category(in char[] category) {
|
||||
this._category = category;
|
||||
notify_notification_set_category(notify_notification, category.toStringz());
|
||||
}
|
||||
|
||||
@property NotifyUrgency urgency() { return _urgency; }
|
||||
@property void urgency(NotifyUrgency urgency) {
|
||||
this._urgency = urgency;
|
||||
notify_notification_set_urgency(notify_notification, urgency);
|
||||
}
|
||||
|
||||
|
||||
void set_image(GdkPixbuf* pixbuf) {
|
||||
notify_notification_set_image_from_pixbuf(notify_notification, pixbuf);
|
||||
//_image = pixbuf;
|
||||
}
|
||||
|
||||
version(NoGdk) {
|
||||
} else {
|
||||
void set_image(in char[] filename) {
|
||||
GError* ge;
|
||||
// TODO: free pixbuf
|
||||
GdkPixbuf* pixbuf = gdk_pixbuf_new_from_file(filename.toStringz(), &ge);
|
||||
|
||||
if(pixbuf is null) {
|
||||
if(ge is null) {
|
||||
throw new NotificationError("Unable to load file: " ~ filename.idup);
|
||||
} else {
|
||||
throw new NotificationError(ge);
|
||||
}
|
||||
}
|
||||
assert(notify_notification !is null);
|
||||
notify_notification_set_image_from_pixbuf(notify_notification, pixbuf); // TODO: fix segfault
|
||||
//_image = pixbuf;
|
||||
}
|
||||
}
|
||||
|
||||
@property GdkPixbuf* image() { return _image; }
|
||||
|
||||
// using deprecated set_hint_* functions (GVariant is an opaque structure, which needs the glib)
|
||||
void set_hint(T)(in char[] key, T value) {
|
||||
static if(is(T == int)) {
|
||||
notify_notification_set_hint_int32(notify_notification, key, value);
|
||||
} else static if(is(T == uint)) {
|
||||
notify_notification_set_hint_uint32(notify_notification, key, value);
|
||||
} else static if(is(T == double)) {
|
||||
notify_notification_set_hint_double(notify_notification, key, value);
|
||||
} else static if(is(T : const(char)[])) {
|
||||
notify_notification_set_hint_string(notify_notification, key, value.toStringz());
|
||||
} else static if(is(T == ubyte)) {
|
||||
notify_notification_set_hint_byte(notify_notification, key, value);
|
||||
} else static if(is(T == ubyte[])) {
|
||||
notify_notification_set_hint_byte_array(notify_notification, key, value.ptr, value.length);
|
||||
} else {
|
||||
static assert(false, "unsupported value for Notification.set_hint");
|
||||
}
|
||||
|
||||
_hints[key] = Variant(value);
|
||||
}
|
||||
|
||||
// unset hint?
|
||||
|
||||
Variant get_hint(in char[] key) {
|
||||
return _hints[key];
|
||||
}
|
||||
|
||||
@property const(char)[] app_name() { return _app_name; }
|
||||
@property void app_name(in char[] name) {
|
||||
this._app_name = app_name;
|
||||
notify_notification_set_app_name(notify_notification, app_name.toStringz());
|
||||
}
|
||||
|
||||
void add_action(T)(in char[] action, in char[] label, NotifyActionCallback callback, T user_data) {
|
||||
static if(isPointer!T) {
|
||||
void* user_ptr = cast(void*)user_data;
|
||||
} else static if(isArray!T) {
|
||||
void* user_ptr = cast(void*)user_data.ptr;
|
||||
} else {
|
||||
void* user_ptr = cast(void*)&user_data;
|
||||
}
|
||||
|
||||
notify_notification_add_action(notify_notification, action.toStringz(), label.toStringz(),
|
||||
callback, user_ptr, null);
|
||||
|
||||
_actions ~= Action(action, label, callback, user_ptr);
|
||||
}
|
||||
|
||||
void add_action()(Action action) {
|
||||
notify_notification_add_action(notify_notification, action.id.toStringz(), action.label.toStringz(),
|
||||
action.callback, action.user_ptr, null);
|
||||
|
||||
_actions ~= action;
|
||||
}
|
||||
|
||||
@property Action[] actions() { return _actions; }
|
||||
|
||||
void clear_actions() {
|
||||
notify_notification_clear_actions(notify_notification);
|
||||
}
|
||||
|
||||
void close() {
|
||||
GError* ge;
|
||||
|
||||
if(!notify_notification_close(notify_notification, &ge)) {
|
||||
throw new NotificationError(ge);
|
||||
}
|
||||
}
|
||||
|
||||
@property int closed_reason() {
|
||||
return notify_notification_get_closed_reason(notify_notification);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
version(TestMain) {
|
||||
import std.stdio;
|
||||
|
||||
void main() {
|
||||
writeln(get_app_name());
|
||||
set_app_name("bla");
|
||||
writeln(get_app_name());
|
||||
writeln(get_server_caps());
|
||||
writeln(get_server_info());
|
||||
|
||||
auto n = new Notification("foo", "bar", "notification-message-im");
|
||||
n.timeout = 3;
|
||||
n.show();
|
||||
}
|
||||
}
|
195
src/notifications/notify.d
Normal file
|
@ -0,0 +1,195 @@
|
|||
/**
|
||||
* Copyright (C) 2004-2006 Christian Hammond
|
||||
* Copyright (C) 2010 Red Hat, Inc.
|
||||
*
|
||||
* This library is free software; you can redistribute it and/or
|
||||
* modify it under the terms of the GNU Lesser General Public
|
||||
* License as published by the Free Software Foundation; either
|
||||
* version 2.1 of the License, or (at your option) any later version.
|
||||
*
|
||||
* This library is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
* Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public
|
||||
* License along with this library; if not, write to the
|
||||
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
|
||||
* Boston, MA 02111-1307, USA.
|
||||
*/
|
||||
|
||||
module deimos.notify.notify;
|
||||
|
||||
|
||||
enum NOTIFY_VERSION_MAJOR = 0;
|
||||
enum NOTIFY_VERSION_MINOR = 7;
|
||||
enum NOTIFY_VERSION_MICRO = 5;
|
||||
|
||||
template NOTIFY_CHECK_VERSION(int major, int minor, int micro) {
|
||||
enum NOTIFY_CHECK_VERSION = ((NOTIFY_VERSION_MAJOR > major) ||
|
||||
(NOTIFY_VERSION_MAJOR == major && NOTIFY_VERSION_MINOR > minor) ||
|
||||
(NOTIFY_VERSION_MAJOR == major && NOTIFY_VERSION_MINOR == minor &&
|
||||
NOTIFY_VERSION_MICRO >= micro));
|
||||
}
|
||||
|
||||
|
||||
alias ulong GType;
|
||||
alias void function(void*) GFreeFunc;
|
||||
|
||||
struct GError {
|
||||
uint domain;
|
||||
int code;
|
||||
char* message;
|
||||
}
|
||||
|
||||
struct GList {
|
||||
void* data;
|
||||
GList* next;
|
||||
GList* prev;
|
||||
}
|
||||
|
||||
// dummies
|
||||
struct GdkPixbuf {}
|
||||
struct GObject {}
|
||||
struct GObjectClass {}
|
||||
struct GVariant {}
|
||||
|
||||
GType notify_urgency_get_type();
|
||||
|
||||
/**
|
||||
* NOTIFY_EXPIRES_DEFAULT:
|
||||
*
|
||||
* The default expiration time on a notification.
|
||||
*/
|
||||
enum NOTIFY_EXPIRES_DEFAULT = -1;
|
||||
|
||||
/**
|
||||
* NOTIFY_EXPIRES_NEVER:
|
||||
*
|
||||
* The notification never expires. It stays open until closed by the calling API
|
||||
* or the user.
|
||||
*/
|
||||
enum NOTIFY_EXPIRES_NEVER = 0;
|
||||
|
||||
// #define NOTIFY_TYPE_NOTIFICATION (notify_notification_get_type ())
|
||||
// #define NOTIFY_NOTIFICATION(o) (G_TYPE_CHECK_INSTANCE_CAST ((o), NOTIFY_TYPE_NOTIFICATION, NotifyNotification))
|
||||
// #define NOTIFY_NOTIFICATION_CLASS(k) (G_TYPE_CHECK_CLASS_CAST((k), NOTIFY_TYPE_NOTIFICATION, NotifyNotificationClass))
|
||||
// #define NOTIFY_IS_NOTIFICATION(o) (G_TYPE_CHECK_INSTANCE_TYPE ((o), NOTIFY_TYPE_NOTIFICATION))
|
||||
// #define NOTIFY_IS_NOTIFICATION_CLASS(k) (G_TYPE_CHECK_CLASS_TYPE ((k), NOTIFY_TYPE_NOTIFICATION))
|
||||
// #define NOTIFY_NOTIFICATION_GET_CLASS(o) (G_TYPE_INSTANCE_GET_CLASS ((o), NOTIFY_TYPE_NOTIFICATION, NotifyNotificationClass))
|
||||
|
||||
extern (C) {
|
||||
struct NotifyNotificationPrivate;
|
||||
|
||||
struct NotifyNotification {
|
||||
/*< private >*/
|
||||
GObject parent_object;
|
||||
|
||||
NotifyNotificationPrivate *priv;
|
||||
}
|
||||
|
||||
struct NotifyNotificationClass {
|
||||
GObjectClass parent_class;
|
||||
|
||||
/* Signals */
|
||||
void function(NotifyNotification *notification) closed;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* NotifyUrgency:
|
||||
* @NOTIFY_URGENCY_LOW: Low urgency. Used for unimportant notifications.
|
||||
* @NOTIFY_URGENCY_NORMAL: Normal urgency. Used for most standard notifications.
|
||||
* @NOTIFY_URGENCY_CRITICAL: Critical urgency. Used for very important notifications.
|
||||
*
|
||||
* The urgency level of the notification.
|
||||
*/
|
||||
enum NotifyUrgency {
|
||||
NOTIFY_URGENCY_LOW,
|
||||
NOTIFY_URGENCY_NORMAL,
|
||||
NOTIFY_URGENCY_CRITICAL,
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* NotifyActionCallback:
|
||||
* @notification:
|
||||
* @action:
|
||||
* @user_data:
|
||||
*
|
||||
* An action callback function.
|
||||
*/
|
||||
alias void function(NotifyNotification* notification, char* action, void* user_data) NotifyActionCallback;
|
||||
|
||||
|
||||
GType notify_notification_get_type();
|
||||
|
||||
NotifyNotification* notify_notification_new(const(char)* summary, const(char)* body_, const(char)* icon);
|
||||
|
||||
bool notify_notification_update(NotifyNotification* notification, const(char)* summary, const(char)* body_, const(char)* icon);
|
||||
|
||||
bool notify_notification_show(NotifyNotification* notification, GError** error);
|
||||
|
||||
void notify_notification_set_timeout(NotifyNotification* notification, int timeout);
|
||||
|
||||
void notify_notification_set_category(NotifyNotification* notification, const(char)* category);
|
||||
|
||||
void notify_notification_set_urgency(NotifyNotification* notification, NotifyUrgency urgency);
|
||||
|
||||
void notify_notification_set_image_from_pixbuf(NotifyNotification* notification, GdkPixbuf* pixbuf);
|
||||
|
||||
void notify_notification_set_icon_from_pixbuf(NotifyNotification* notification, GdkPixbuf* icon);
|
||||
|
||||
void notify_notification_set_hint_int32(NotifyNotification* notification, const(char)* key, int value);
|
||||
void notify_notification_set_hint_uint32(NotifyNotification* notification, const(char)* key, uint value);
|
||||
|
||||
void notify_notification_set_hint_double(NotifyNotification* notification, const(char)* key, double value);
|
||||
|
||||
void notify_notification_set_hint_string(NotifyNotification* notification, const(char)* key, const(char)* value);
|
||||
|
||||
void notify_notification_set_hint_byte(NotifyNotification* notification, const(char)* key, ubyte value);
|
||||
|
||||
void notify_notification_set_hint_byte_array(NotifyNotification* notification, const(char)* key, const(ubyte)* value, ulong len);
|
||||
|
||||
void notify_notification_set_hint(NotifyNotification* notification, const(char)* key, GVariant* value);
|
||||
|
||||
void notify_notification_set_app_name(NotifyNotification* notification, const(char)* app_name);
|
||||
|
||||
void notify_notification_clear_hints(NotifyNotification* notification);
|
||||
|
||||
void notify_notification_add_action(NotifyNotification* notification, const(char)* action, const(char)* label,
|
||||
NotifyActionCallback callback, void* user_data, GFreeFunc free_func);
|
||||
|
||||
void notify_notification_clear_actions(NotifyNotification* notification);
|
||||
bool notify_notification_close(NotifyNotification* notification, GError** error);
|
||||
|
||||
int notify_notification_get_closed_reason(const NotifyNotification* notification);
|
||||
|
||||
|
||||
|
||||
bool notify_init(const(char)* app_name);
|
||||
void notify_uninit();
|
||||
bool notify_is_initted();
|
||||
|
||||
const(char)* notify_get_app_name();
|
||||
void notify_set_app_name(const(char)* app_name);
|
||||
|
||||
GList *notify_get_server_caps();
|
||||
|
||||
bool notify_get_server_info(char** ret_name, char** ret_vendor, char** ret_version, char** ret_spec_version);
|
||||
}
|
||||
|
||||
version(MainTest) {
|
||||
import std.string;
|
||||
|
||||
void main() {
|
||||
|
||||
notify_init("test".toStringz());
|
||||
|
||||
auto n = notify_notification_new("summary".toStringz(), "body".toStringz(), "none".toStringz());
|
||||
GError* ge;
|
||||
notify_notification_show(n, &ge);
|
||||
|
||||
scope(success) notify_uninit();
|
||||
}
|
||||
}
|
1427
src/onedrive.d
|
@ -22,12 +22,15 @@ class Progress
|
|||
|
||||
|
||||
size_t getTerminalWidth() {
|
||||
size_t column;
|
||||
winsize ws;
|
||||
if(ioctl(STDOUT_FILENO, TIOCGWINSZ, &ws) != -1) {
|
||||
column = ws.ws_col;
|
||||
size_t column = default_width;
|
||||
version (CRuntime_Musl) {
|
||||
} else version(Android) {
|
||||
} else {
|
||||
winsize ws;
|
||||
if(ioctl(STDOUT_FILENO, TIOCGWINSZ, &ws) != -1 && ws.ws_col > 0) {
|
||||
column = ws.ws_col;
|
||||
}
|
||||
}
|
||||
if(column == 0) column = default_width;
|
||||
|
||||
return column;
|
||||
}
|
||||
|
@ -80,7 +83,7 @@ class Progress
|
|||
|
||||
header.formattedWrite("%s %3d%% |", caption, cast(int)(ratio * 100));
|
||||
|
||||
if(counter <= 1 || ratio == 0.0) {
|
||||
if(counter <= 0 || ratio == 0.0) {
|
||||
footer.formattedWrite("| ETA --:--:--:");
|
||||
} else {
|
||||
int h, m, s;
|
||||
|
@ -112,7 +115,7 @@ class Progress
|
|||
this(size_t iterations) {
|
||||
if(iterations <= 0) iterations = 1;
|
||||
|
||||
counter = 0;
|
||||
counter = -1;
|
||||
this.iterations = iterations;
|
||||
start_time = Clock.currTime.toUnixTime;
|
||||
}
|
||||
|
@ -138,7 +141,7 @@ class Progress
|
|||
}
|
||||
|
||||
void reset() {
|
||||
counter = 0;
|
||||
counter = -1;
|
||||
start_time = Clock.currTime.toUnixTime;
|
||||
}
|
||||
|
||||
|
|
12
src/qxor.d
|
@ -1,15 +1,15 @@
|
|||
import std.algorithm;
|
||||
import std.digest.digest;
|
||||
import std.digest;
|
||||
|
||||
// implementation of the QuickXorHash algorithm in D
|
||||
// https://github.com/OneDrive/onedrive-api-docs/blob/live/docs/code-snippets/quickxorhash.md
|
||||
struct QuickXor
|
||||
{
|
||||
private immutable int widthInBits = 160;
|
||||
private immutable size_t lengthInBytes = (widthInBits - 1) / 8 + 1;
|
||||
private immutable size_t lengthInQWords = (widthInBits - 1) / 64 + 1;
|
||||
private immutable int bitsInLastCell = widthInBits % 64; // 32
|
||||
private immutable int shift = 11;
|
||||
private enum int widthInBits = 160;
|
||||
private enum size_t lengthInBytes = (widthInBits - 1) / 8 + 1;
|
||||
private enum size_t lengthInQWords = (widthInBits - 1) / 64 + 1;
|
||||
private enum int bitsInLastCell = widthInBits % 64; // 32
|
||||
private enum int shift = 11;
|
||||
|
||||
private ulong[lengthInQWords] _data;
|
||||
private ulong _lengthSoFar;
|
||||
|
|
377
src/selective.d
|
@ -4,42 +4,214 @@ import std.file;
|
|||
import std.path;
|
||||
import std.regex;
|
||||
import std.stdio;
|
||||
import std.string;
|
||||
import util;
|
||||
import log;
|
||||
|
||||
final class SelectiveSync
|
||||
{
|
||||
private string[] paths;
|
||||
private string[] businessSharedFoldersList;
|
||||
private Regex!char mask;
|
||||
private Regex!char dirmask;
|
||||
private bool skipDirStrictMatch = false;
|
||||
private bool skipDotfiles = false;
|
||||
|
||||
// load sync_list file
|
||||
void load(string filepath)
|
||||
{
|
||||
if (exists(filepath)) {
|
||||
paths = File(filepath)
|
||||
.byLine()
|
||||
.map!(a => buildNormalizedPath(a))
|
||||
.filter!(a => a.length > 0)
|
||||
.array;
|
||||
// open file as read only
|
||||
auto file = File(filepath, "r");
|
||||
auto range = file.byLine();
|
||||
foreach (line; range) {
|
||||
// Skip comments in file
|
||||
if (line.length == 0 || line[0] == ';' || line[0] == '#') continue;
|
||||
paths ~= buildNormalizedPath(line);
|
||||
}
|
||||
file.close();
|
||||
}
|
||||
}
|
||||
|
||||
void setMask(const(char)[] mask)
|
||||
// Configure skipDirStrictMatch if function is called
|
||||
// By default, skipDirStrictMatch = false;
|
||||
void setSkipDirStrictMatch()
|
||||
{
|
||||
skipDirStrictMatch = true;
|
||||
}
|
||||
|
||||
// load business_shared_folders file
|
||||
void loadSharedFolders(string filepath)
|
||||
{
|
||||
if (exists(filepath)) {
|
||||
// open file as read only
|
||||
auto file = File(filepath, "r");
|
||||
auto range = file.byLine();
|
||||
foreach (line; range) {
|
||||
// Skip comments in file
|
||||
if (line.length == 0 || line[0] == ';' || line[0] == '#') continue;
|
||||
businessSharedFoldersList ~= buildNormalizedPath(line);
|
||||
}
|
||||
file.close();
|
||||
}
|
||||
}
|
||||
|
||||
void setFileMask(const(char)[] mask)
|
||||
{
|
||||
this.mask = wild2regex(mask);
|
||||
}
|
||||
|
||||
// config file skip_file parameter
|
||||
bool isNameExcluded(string name)
|
||||
void setDirMask(const(char)[] dirmask)
|
||||
{
|
||||
// Does the file match skip_file config entry?
|
||||
// Returns true if the file matches a skip_file config entry
|
||||
// Returns false if no match
|
||||
return !name.matchFirst(mask).empty;
|
||||
this.dirmask = wild2regex(dirmask);
|
||||
}
|
||||
|
||||
// config sync_list file handling
|
||||
bool isPathExcluded(string path)
|
||||
// Configure skipDotfiles if function is called
|
||||
// By default, skipDotfiles = false;
|
||||
void setSkipDotfiles()
|
||||
{
|
||||
return .isPathExcluded(path, paths) || .isPathMatched(path, mask);
|
||||
skipDotfiles = true;
|
||||
}
|
||||
|
||||
// return value of skipDotfiles
|
||||
bool getSkipDotfiles()
|
||||
{
|
||||
return skipDotfiles;
|
||||
}
|
||||
|
||||
// config file skip_dir parameter
|
||||
bool isDirNameExcluded(string name)
|
||||
{
|
||||
// Does the directory name match skip_dir config entry?
|
||||
// Returns true if the name matches a skip_dir config entry
|
||||
// Returns false if no match
|
||||
log.vdebug("skip_dir evaluation for: ", name);
|
||||
|
||||
// Try full path match first
|
||||
if (!name.matchFirst(dirmask).empty) {
|
||||
log.vdebug("'!name.matchFirst(dirmask).empty' returned true = matched");
|
||||
return true;
|
||||
} else {
|
||||
// Do we check the base name as well?
|
||||
if (!skipDirStrictMatch) {
|
||||
log.vdebug("No Strict Matching Enforced");
|
||||
|
||||
// Test the entire path working backwards from child
|
||||
string path = buildNormalizedPath(name);
|
||||
string checkPath;
|
||||
auto paths = pathSplitter(path);
|
||||
|
||||
foreach_reverse(directory; paths) {
|
||||
if (directory != "/") {
|
||||
// This will add a leading '/' but that needs to be stripped to check
|
||||
checkPath = "/" ~ directory ~ checkPath;
|
||||
if(!checkPath.strip('/').matchFirst(dirmask).empty) {
|
||||
log.vdebug("'!checkPath.matchFirst(dirmask).empty' returned true = matched");
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
log.vdebug("Strict Matching Enforced - No Match");
|
||||
}
|
||||
}
|
||||
// no match
|
||||
return false;
|
||||
}
|
||||
|
||||
// config file skip_file parameter
|
||||
bool isFileNameExcluded(string name)
|
||||
{
|
||||
// Does the file name match skip_file config entry?
|
||||
// Returns true if the name matches a skip_file config entry
|
||||
// Returns false if no match
|
||||
log.vdebug("skip_file evaluation for: ", name);
|
||||
|
||||
// Try full path match first
|
||||
if (!name.matchFirst(mask).empty) {
|
||||
return true;
|
||||
} else {
|
||||
// check just the file name
|
||||
string filename = baseName(name);
|
||||
if(!filename.matchFirst(mask).empty) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
// no match
|
||||
return false;
|
||||
}
|
||||
|
||||
// Match against sync_list only
|
||||
bool isPathExcludedViaSyncList(string path)
|
||||
{
|
||||
// Debug output that we are performing a 'sync_list' inclusion / exclusion test
|
||||
return .isPathExcluded(path, paths);
|
||||
}
|
||||
|
||||
// Match against skip_dir, skip_file & sync_list entries
|
||||
bool isPathExcludedMatchAll(string path)
|
||||
{
|
||||
return .isPathExcluded(path, paths) || .isPathMatched(path, mask) || .isPathMatched(path, dirmask);
|
||||
}
|
||||
|
||||
// is the path a dotfile?
|
||||
bool isDotFile(string path)
|
||||
{
|
||||
// always allow the root
|
||||
if (path == ".") return false;
|
||||
|
||||
path = buildNormalizedPath(path);
|
||||
auto paths = pathSplitter(path);
|
||||
foreach(base; paths) {
|
||||
if (startsWith(base, ".")){
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
// is business shared folder matched
|
||||
bool isSharedFolderMatched(string name)
|
||||
{
|
||||
// if there are no shared folder always return false
|
||||
if (businessSharedFoldersList.empty) return false;
|
||||
|
||||
if (!name.matchFirst(businessSharedFoldersList).empty) {
|
||||
return true;
|
||||
} else {
|
||||
// try a direct comparison just in case
|
||||
foreach (userFolder; businessSharedFoldersList) {
|
||||
if (userFolder == name) {
|
||||
// direct match
|
||||
log.vdebug("'matchFirst' failed to match, however direct comparison was matched: ", name);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// is business shared folder included
|
||||
bool isPathIncluded(string path, string[] allowedPaths)
|
||||
{
|
||||
// always allow the root
|
||||
if (path == ".") return true;
|
||||
// if there are no allowed paths always return true
|
||||
if (allowedPaths.empty) return true;
|
||||
|
||||
path = buildNormalizedPath(path);
|
||||
foreach (allowed; allowedPaths) {
|
||||
auto comm = commonPrefix(path, allowed);
|
||||
if (comm.length == path.length) {
|
||||
// the given path is contained in an allowed path
|
||||
return true;
|
||||
}
|
||||
if (comm.length == allowed.length && path[comm.length] == '/') {
|
||||
// the given path is a subitem of an allowed path
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -47,24 +219,178 @@ final class SelectiveSync
|
|||
// if there are no allowed paths always return false
|
||||
private bool isPathExcluded(string path, string[] allowedPaths)
|
||||
{
|
||||
// function variables
|
||||
bool exclude = false;
|
||||
bool exludeDirectMatch = false; // will get updated to true, if there is a pattern match to sync_list entry
|
||||
bool excludeMatched = false; // will get updated to true, if there is a pattern match to sync_list entry
|
||||
bool finalResult = true; // will get updated to false, if pattern match to sync_list entry
|
||||
int offset;
|
||||
string wildcard = "*";
|
||||
|
||||
// always allow the root
|
||||
if (path == ".") return false;
|
||||
// if there are no allowed paths always return false
|
||||
if (allowedPaths.empty) return false;
|
||||
|
||||
path = buildNormalizedPath(path);
|
||||
foreach (allowed; allowedPaths) {
|
||||
auto comm = commonPrefix(path, allowed);
|
||||
if (comm.length == path.length) {
|
||||
// the given path is contained in an allowed path
|
||||
return false;
|
||||
log.vdebug("Evaluation against 'sync_list' for this path: ", path);
|
||||
log.vdebug("[S]exclude = ", exclude);
|
||||
log.vdebug("[S]exludeDirectMatch = ", exludeDirectMatch);
|
||||
log.vdebug("[S]excludeMatched = ", excludeMatched);
|
||||
|
||||
// unless path is an exact match, entire sync_list entries need to be processed to ensure
|
||||
// negative matches are also correctly detected
|
||||
foreach (allowedPath; allowedPaths) {
|
||||
// is this an inclusion path or finer grained exclusion?
|
||||
switch (allowedPath[0]) {
|
||||
case '-':
|
||||
// sync_list path starts with '-', this user wants to exclude this path
|
||||
exclude = true;
|
||||
// If the sync_list entry starts with '-/' offset needs to be 2, else 1
|
||||
if (startsWith(allowedPath, "-/")){
|
||||
// Offset needs to be 2
|
||||
offset = 2;
|
||||
} else {
|
||||
// Offset needs to be 1
|
||||
offset = 1;
|
||||
}
|
||||
break;
|
||||
case '!':
|
||||
// sync_list path starts with '!', this user wants to exclude this path
|
||||
exclude = true;
|
||||
// If the sync_list entry starts with '!/' offset needs to be 2, else 1
|
||||
if (startsWith(allowedPath, "!/")){
|
||||
// Offset needs to be 2
|
||||
offset = 2;
|
||||
} else {
|
||||
// Offset needs to be 1
|
||||
offset = 1;
|
||||
}
|
||||
break;
|
||||
case '/':
|
||||
// sync_list path starts with '/', this user wants to include this path
|
||||
// but a '/' at the start causes matching issues, so use the offset for comparison
|
||||
exclude = false;
|
||||
offset = 1;
|
||||
break;
|
||||
|
||||
default:
|
||||
// no negative pattern, default is to not exclude
|
||||
exclude = false;
|
||||
offset = 0;
|
||||
}
|
||||
if (comm.length == allowed.length && path[comm.length] == '/') {
|
||||
// the given path is a subitem of an allowed path
|
||||
return false;
|
||||
|
||||
// What are we comparing against?
|
||||
log.vdebug("Evaluation against 'sync_list' entry: ", allowedPath);
|
||||
|
||||
// Generate the common prefix from the path vs the allowed path
|
||||
auto comm = commonPrefix(path, allowedPath[offset..$]);
|
||||
|
||||
// Is path is an exact match of the allowed path?
|
||||
if (comm.length == path.length) {
|
||||
// we have a potential exact match
|
||||
// strip any potential '/*' from the allowed path, to avoid a potential lesser common match
|
||||
string strippedAllowedPath = strip(allowedPath[offset..$], "/*");
|
||||
|
||||
if (path == strippedAllowedPath) {
|
||||
// we have an exact path match
|
||||
log.vdebug("exact path match");
|
||||
if (!exclude) {
|
||||
log.vdebug("Evaluation against 'sync_list' result: direct match");
|
||||
finalResult = false;
|
||||
// direct match, break and go sync
|
||||
break;
|
||||
} else {
|
||||
log.vdebug("Evaluation against 'sync_list' result: direct match - path to be excluded");
|
||||
// do not set excludeMatched = true here, otherwise parental path also gets excluded
|
||||
// flag exludeDirectMatch so that a 'wildcard match' will not override this exclude
|
||||
exludeDirectMatch = true;
|
||||
// final result
|
||||
finalResult = true;
|
||||
}
|
||||
} else {
|
||||
// no exact path match, but something common does match
|
||||
log.vdebug("something 'common' matches the input path");
|
||||
auto splitAllowedPaths = pathSplitter(strippedAllowedPath);
|
||||
string pathToEvaluate = "";
|
||||
foreach(base; splitAllowedPaths) {
|
||||
pathToEvaluate ~= base;
|
||||
if (path == pathToEvaluate) {
|
||||
// The input path matches what we want to evaluate against as a direct match
|
||||
if (!exclude) {
|
||||
log.vdebug("Evaluation against 'sync_list' result: direct match for parental path item");
|
||||
finalResult = false;
|
||||
// direct match, break and go sync
|
||||
break;
|
||||
} else {
|
||||
log.vdebug("Evaluation against 'sync_list' result: direct match for parental path item but to be excluded");
|
||||
finalResult = true;
|
||||
// do not set excludeMatched = true here, otherwise parental path also gets excluded
|
||||
}
|
||||
}
|
||||
pathToEvaluate ~= dirSeparator;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Is path is a subitem/sub-folder of the allowed path?
|
||||
if (comm.length == allowedPath[offset..$].length) {
|
||||
// The given path is potentially a subitem of an allowed path
|
||||
// We want to capture sub-folders / files of allowed paths here, but not explicitly match other items
|
||||
// if there is no wildcard
|
||||
auto subItemPathCheck = allowedPath[offset..$] ~ "/";
|
||||
if (canFind(path, subItemPathCheck)) {
|
||||
// The 'path' includes the allowed path, and is 'most likely' a sub-path item
|
||||
if (!exclude) {
|
||||
log.vdebug("Evaluation against 'sync_list' result: parental path match");
|
||||
finalResult = false;
|
||||
// parental path matches, break and go sync
|
||||
break;
|
||||
} else {
|
||||
log.vdebug("Evaluation against 'sync_list' result: parental path match but must be excluded");
|
||||
finalResult = true;
|
||||
excludeMatched = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Does the allowed path contain a wildcard? (*)
|
||||
if (canFind(allowedPath[offset..$], wildcard)) {
|
||||
// allowed path contains a wildcard
|
||||
// manually replace '*' for '.*' to be compatible with regex
|
||||
string regexCompatiblePath = replace(allowedPath[offset..$], "*", ".*");
|
||||
auto allowedMask = regex(regexCompatiblePath);
|
||||
if (matchAll(path, allowedMask)) {
|
||||
// regex wildcard evaluation matches
|
||||
// if we have a prior pattern match for an exclude, excludeMatched = true
|
||||
if (!exclude && !excludeMatched && !exludeDirectMatch) {
|
||||
// nothing triggered an exclusion before evaluation against wildcard match attempt
|
||||
log.vdebug("Evaluation against 'sync_list' result: wildcard pattern match");
|
||||
finalResult = false;
|
||||
} else {
|
||||
log.vdebug("Evaluation against 'sync_list' result: wildcard pattern matched but must be excluded");
|
||||
finalResult = true;
|
||||
excludeMatched = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return true;
|
||||
// Interim results
|
||||
log.vdebug("[F]exclude = ", exclude);
|
||||
log.vdebug("[F]exludeDirectMatch = ", exludeDirectMatch);
|
||||
log.vdebug("[F]excludeMatched = ", excludeMatched);
|
||||
|
||||
// If exclude or excludeMatched is true, then finalResult has to be true
|
||||
if ((exclude) || (excludeMatched) || (exludeDirectMatch)) {
|
||||
finalResult = true;
|
||||
}
|
||||
|
||||
// results
|
||||
if (finalResult) {
|
||||
log.vdebug("Evaluation against 'sync_list' final result: EXCLUDED");
|
||||
} else {
|
||||
log.vdebug("Evaluation against 'sync_list' final result: included for sync");
|
||||
}
|
||||
return finalResult;
|
||||
}
|
||||
|
||||
// test if the given path is matched by the regex expression.
|
||||
|
@ -85,6 +411,7 @@ private bool isPathMatched(string path, Regex!char mask) {
|
|||
return false;
|
||||
}
|
||||
|
||||
// unit tests
|
||||
unittest
|
||||
{
|
||||
assert(isPathExcluded("Documents2", ["Documents"]));
|
||||
|
|
26
src/sqlite.d
|
@ -3,6 +3,7 @@ import std.stdio;
|
|||
import etc.c.sqlite3;
|
||||
import std.string: fromStringz, toStringz;
|
||||
import core.stdc.stdlib;
|
||||
import std.conv;
|
||||
static import log;
|
||||
|
||||
extern (C) immutable(char)* sqlite3_errstr(int); // missing from the std library
|
||||
|
@ -46,6 +47,22 @@ struct Database
|
|||
close();
|
||||
}
|
||||
|
||||
int db_checkpoint()
|
||||
{
|
||||
return sqlite3_wal_checkpoint(pDb, null);
|
||||
}
|
||||
|
||||
void dump_open_statements()
|
||||
{
|
||||
log.log("Dumpint open statements: \n");
|
||||
auto p = sqlite3_next_stmt(pDb, null);
|
||||
while (p != null) {
|
||||
log.log (" - " ~ ifromStringz(sqlite3_sql(p)) ~ "\n");
|
||||
p = sqlite3_next_stmt(pDb, p);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void open(const(char)[] filename)
|
||||
{
|
||||
// https://www.sqlite.org/c3ref/open.html
|
||||
|
@ -70,6 +87,7 @@ struct Database
|
|||
int rc = sqlite3_exec(pDb, toStringz(sql), null, null, null);
|
||||
if (rc != SQLITE_OK) {
|
||||
log.error("\nA database execution error occurred: "~ getErrorMessage() ~ "\n");
|
||||
log.error("Please retry your command with --resync to fix any local database corruption issues.\n");
|
||||
close();
|
||||
exit(-1);
|
||||
}
|
||||
|
@ -158,15 +176,17 @@ struct Statement
|
|||
row.length = 0;
|
||||
} else if (rc == SQLITE_ROW) {
|
||||
// https://www.sqlite.org/c3ref/data_count.html
|
||||
int count = sqlite3_data_count(pStmt);
|
||||
int count = 0;
|
||||
count = sqlite3_data_count(pStmt);
|
||||
row = new const(char)[][count];
|
||||
foreach (int i, ref column; row) {
|
||||
foreach (size_t i, ref column; row) {
|
||||
// https://www.sqlite.org/c3ref/column_blob.html
|
||||
column = fromStringz(sqlite3_column_text(pStmt, i));
|
||||
column = fromStringz(sqlite3_column_text(pStmt, to!int(i)));
|
||||
}
|
||||
} else {
|
||||
string errorMessage = ifromStringz(sqlite3_errmsg(sqlite3_db_handle(pStmt)));
|
||||
log.error("\nA database statement execution error occurred: "~ errorMessage ~ "\n");
|
||||
log.error("Please retry your command with --resync to fix any local database corruption issues.\n");
|
||||
exit(-1);
|
||||
}
|
||||
}
|
||||
|
|
7041
src/sync.d
246
src/upload.d
|
@ -1,5 +1,5 @@
|
|||
import std.algorithm, std.conv, std.datetime, std.file, std.json;
|
||||
import std.stdio, core.thread;
|
||||
import std.stdio, core.thread, std.string;
|
||||
import progress, onedrive, util;
|
||||
static import log;
|
||||
|
||||
|
@ -39,10 +39,20 @@ struct UploadSession
|
|||
])
|
||||
];
|
||||
|
||||
// Try to create the upload session for this file
|
||||
session = onedrive.createUploadSession(parentDriveId, parentId, filename, eTag, fileSystemInfo);
|
||||
session["localPath"] = localPath;
|
||||
save();
|
||||
return upload();
|
||||
|
||||
if ("uploadUrl" in session){
|
||||
session["localPath"] = localPath;
|
||||
save();
|
||||
return upload();
|
||||
} else {
|
||||
// there was an error
|
||||
log.vlog("Create file upload session failed ... skipping file upload");
|
||||
// return upload() will return a JSONValue response, create an empty JSONValue response to return
|
||||
JSONValue response;
|
||||
return response;
|
||||
}
|
||||
}
|
||||
|
||||
/* Restore the previous upload session.
|
||||
|
@ -52,7 +62,16 @@ struct UploadSession
|
|||
{
|
||||
if (exists(sessionFilePath)) {
|
||||
log.vlog("Trying to restore the upload session ...");
|
||||
session = readText(sessionFilePath).parseJSON();
|
||||
// We cant use JSONType.object check, as this is currently a string
|
||||
// We cant use a try & catch block, as it does not catch std.json.JSONException
|
||||
auto sessionFileText = readText(sessionFilePath);
|
||||
if(canFind(sessionFileText,"@odata.context")) {
|
||||
session = readText(sessionFilePath).parseJSON();
|
||||
} else {
|
||||
log.vlog("Upload session resume data is invalid");
|
||||
remove(sessionFilePath);
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check the session resume file for expirationDateTime
|
||||
if ("expirationDateTime" in session){
|
||||
|
@ -75,30 +94,55 @@ struct UploadSession
|
|||
try {
|
||||
response = onedrive.requestUploadStatus(session["uploadUrl"].str);
|
||||
} catch (OneDriveException e) {
|
||||
// handle any onedrive error response
|
||||
if (e.httpStatusCode == 400) {
|
||||
log.vlog("Upload session not found");
|
||||
return false;
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
session["expirationDateTime"] = response["expirationDateTime"];
|
||||
session["nextExpectedRanges"] = response["nextExpectedRanges"];
|
||||
if (session["nextExpectedRanges"].array.length == 0) {
|
||||
log.vlog("The upload session is completed");
|
||||
|
||||
// do we have a valid response from OneDrive?
|
||||
if (response.type() == JSONType.object){
|
||||
// JSON object
|
||||
if (("expirationDateTime" in response) && ("nextExpectedRanges" in response)){
|
||||
// has the elements we need
|
||||
session["expirationDateTime"] = response["expirationDateTime"];
|
||||
session["nextExpectedRanges"] = response["nextExpectedRanges"];
|
||||
if (session["nextExpectedRanges"].array.length == 0) {
|
||||
log.vlog("The upload session is completed");
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
// bad data
|
||||
log.vlog("Restore file upload session failed - invalid data response from OneDrive");
|
||||
if (exists(sessionFilePath)) {
|
||||
remove(sessionFilePath);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
// not a JSON object
|
||||
log.vlog("Restore file upload session failed - invalid response from OneDrive");
|
||||
if (exists(sessionFilePath)) {
|
||||
remove(sessionFilePath);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
} else {
|
||||
// unable to read the local file
|
||||
log.vlog("Restore file upload session failed - unable to read the local file");
|
||||
remove(sessionFilePath);
|
||||
if (exists(sessionFilePath)) {
|
||||
remove(sessionFilePath);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
// session file contains an error - cant resume
|
||||
log.vlog("Restore file upload session failed - cleaning up session resume");
|
||||
remove(sessionFilePath);
|
||||
if (exists(sessionFilePath)) {
|
||||
remove(sessionFilePath);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
@ -107,46 +151,150 @@ struct UploadSession
|
|||
|
||||
JSONValue upload()
|
||||
{
|
||||
long offset = session["nextExpectedRanges"][0].str.splitter('-').front.to!long;
|
||||
long fileSize = getSize(session["localPath"].str);
|
||||
|
||||
// Upload Progress Bar
|
||||
size_t iteration = (roundTo!int(double(fileSize)/double(fragmentSize)))+1;
|
||||
Progress p = new Progress(iteration);
|
||||
p.title = "Uploading";
|
||||
|
||||
// Response for upload
|
||||
JSONValue response;
|
||||
while (true) {
|
||||
p.next();
|
||||
long fragSize = fragmentSize < fileSize - offset ? fragmentSize : fileSize - offset;
|
||||
// If the resume upload fails, we need to check for a return code here
|
||||
try {
|
||||
response = onedrive.uploadFragment(
|
||||
session["uploadUrl"].str,
|
||||
session["localPath"].str,
|
||||
offset,
|
||||
fragSize,
|
||||
fileSize
|
||||
);
|
||||
offset += fragmentSize;
|
||||
if (offset >= fileSize) break;
|
||||
// update the session details
|
||||
session["expirationDateTime"] = response["expirationDateTime"];
|
||||
session["nextExpectedRanges"] = response["nextExpectedRanges"];
|
||||
save();
|
||||
} catch (OneDriveException e) {
|
||||
// there was an error remove session file
|
||||
remove(sessionFilePath);
|
||||
return response;
|
||||
}
|
||||
|
||||
// session JSON needs to contain valid elements
|
||||
long offset;
|
||||
long fileSize;
|
||||
|
||||
if ("nextExpectedRanges" in session){
|
||||
offset = session["nextExpectedRanges"][0].str.splitter('-').front.to!long;
|
||||
}
|
||||
|
||||
if ("localPath" in session){
|
||||
fileSize = getSize(session["localPath"].str);
|
||||
}
|
||||
|
||||
if ("uploadUrl" in session){
|
||||
// Upload file via session created
|
||||
// Upload Progress Bar
|
||||
size_t iteration = (roundTo!int(double(fileSize)/double(fragmentSize)))+1;
|
||||
Progress p = new Progress(iteration);
|
||||
p.title = "Uploading";
|
||||
long fragmentCount = 0;
|
||||
long fragSize = 0;
|
||||
|
||||
// Initialise the download bar at 0%
|
||||
p.next();
|
||||
|
||||
while (true) {
|
||||
fragmentCount++;
|
||||
log.vdebugNewLine("Fragment: ", fragmentCount, " of ", iteration);
|
||||
p.next();
|
||||
log.vdebugNewLine("fragmentSize: ", fragmentSize, "offset: ", offset, " fileSize: ", fileSize );
|
||||
fragSize = fragmentSize < fileSize - offset ? fragmentSize : fileSize - offset;
|
||||
log.vdebugNewLine("Using fragSize: ", fragSize);
|
||||
|
||||
// fragSize must not be a negative value
|
||||
if (fragSize < 0) {
|
||||
// Session upload will fail
|
||||
// not a JSON object - fragment upload failed
|
||||
log.vlog("File upload session failed - invalid calculation of fragment size");
|
||||
if (exists(sessionFilePath)) {
|
||||
remove(sessionFilePath);
|
||||
}
|
||||
// set response to null as error
|
||||
response = null;
|
||||
return response;
|
||||
}
|
||||
|
||||
// If the resume upload fails, we need to check for a return code here
|
||||
try {
|
||||
response = onedrive.uploadFragment(
|
||||
session["uploadUrl"].str,
|
||||
session["localPath"].str,
|
||||
offset,
|
||||
fragSize,
|
||||
fileSize
|
||||
);
|
||||
} catch (OneDriveException e) {
|
||||
// if a 100 response is generated, continue
|
||||
if (e.httpStatusCode == 100) {
|
||||
continue;
|
||||
}
|
||||
// there was an error response from OneDrive when uploading the file fragment
|
||||
// handle 'HTTP request returned status code 429 (Too Many Requests)' first
|
||||
if (e.httpStatusCode == 429) {
|
||||
auto retryAfterValue = onedrive.getRetryAfterValue();
|
||||
log.vdebug("Fragment upload failed - received throttle request response from OneDrive");
|
||||
log.vdebug("Using Retry-After Value = ", retryAfterValue);
|
||||
// Sleep thread as per request
|
||||
log.log("\nThread sleeping due to 'HTTP request returned status code 429' - The request has been throttled");
|
||||
log.log("Sleeping for ", retryAfterValue, " seconds");
|
||||
Thread.sleep(dur!"seconds"(retryAfterValue));
|
||||
log.log("Retrying fragment upload");
|
||||
} else {
|
||||
// insert a new line as well, so that the below error is inserted on the console in the right location
|
||||
log.vlog("\nFragment upload failed - received an exception response from OneDrive");
|
||||
// display what the error is
|
||||
displayOneDriveErrorMessage(e.msg, getFunctionName!({}));
|
||||
// retry fragment upload in case error is transient
|
||||
log.vlog("Retrying fragment upload");
|
||||
}
|
||||
|
||||
try {
|
||||
response = onedrive.uploadFragment(
|
||||
session["uploadUrl"].str,
|
||||
session["localPath"].str,
|
||||
offset,
|
||||
fragSize,
|
||||
fileSize
|
||||
);
|
||||
} catch (OneDriveException e) {
|
||||
// OneDrive threw another error on retry
|
||||
log.vlog("Retry to upload fragment failed");
|
||||
// display what the error is
|
||||
displayOneDriveErrorMessage(e.msg, getFunctionName!({}));
|
||||
// set response to null as the fragment upload was in error twice
|
||||
response = null;
|
||||
}
|
||||
}
|
||||
// was the fragment uploaded without issue?
|
||||
if (response.type() == JSONType.object){
|
||||
offset += fragmentSize;
|
||||
if (offset >= fileSize) break;
|
||||
// update the session details
|
||||
session["expirationDateTime"] = response["expirationDateTime"];
|
||||
session["nextExpectedRanges"] = response["nextExpectedRanges"];
|
||||
save();
|
||||
} else {
|
||||
// not a JSON object - fragment upload failed
|
||||
log.vlog("File upload session failed - invalid response from OneDrive");
|
||||
if (exists(sessionFilePath)) {
|
||||
remove(sessionFilePath);
|
||||
}
|
||||
// set response to null as error
|
||||
response = null;
|
||||
return response;
|
||||
}
|
||||
}
|
||||
// upload complete
|
||||
p.next();
|
||||
writeln();
|
||||
if (exists(sessionFilePath)) {
|
||||
remove(sessionFilePath);
|
||||
}
|
||||
return response;
|
||||
} else {
|
||||
// session elements were not present
|
||||
log.vlog("Session has no valid upload URL ... skipping this file upload");
|
||||
// return an empty JSON response
|
||||
response = null;
|
||||
return response;
|
||||
}
|
||||
// upload complete
|
||||
p.next();
|
||||
writeln();
|
||||
remove(sessionFilePath);
|
||||
return response;
|
||||
}
|
||||
|
||||
string getUploadSessionLocalFilePath() {
|
||||
// return the session file path
|
||||
string localPath = "";
|
||||
if ("localPath" in session){
|
||||
localPath = session["localPath"].str;
|
||||
}
|
||||
return localPath;
|
||||
}
|
||||
|
||||
// save session details to temp file
|
||||
private void save()
|
||||
{
|
||||
std.file.write(sessionFilePath, session.toString());
|
||||
|
|
388
src/util.d
|
@ -11,10 +11,15 @@ import std.stdio;
|
|||
import std.string;
|
||||
import std.algorithm;
|
||||
import std.uri;
|
||||
import std.json;
|
||||
import std.traits;
|
||||
import qxor;
|
||||
static import log;
|
||||
import core.stdc.stdlib;
|
||||
|
||||
private string deviceName;
|
||||
import log;
|
||||
import config;
|
||||
|
||||
shared string deviceName;
|
||||
|
||||
static this()
|
||||
{
|
||||
|
@ -45,28 +50,6 @@ void safeRemove(const(char)[] path)
|
|||
if (exists(path)) remove(path);
|
||||
}
|
||||
|
||||
// returns the crc32 hex string of a file
|
||||
string computeCrc32(string path)
|
||||
{
|
||||
CRC32 crc;
|
||||
auto file = File(path, "rb");
|
||||
foreach (ubyte[] data; chunks(file, 4096)) {
|
||||
crc.put(data);
|
||||
}
|
||||
return crc.finish().toHexString().dup;
|
||||
}
|
||||
|
||||
// returns the sha1 hash hex string of a file
|
||||
string computeSha1Hash(string path)
|
||||
{
|
||||
SHA1 sha;
|
||||
auto file = File(path, "rb");
|
||||
foreach (ubyte[] data; chunks(file, 4096)) {
|
||||
sha.put(data);
|
||||
}
|
||||
return sha.finish().toHexString().dup;
|
||||
}
|
||||
|
||||
// returns the quickXorHash base64 string of a file
|
||||
string computeQuickXorHash(string path)
|
||||
{
|
||||
|
@ -78,6 +61,16 @@ string computeQuickXorHash(string path)
|
|||
return Base64.encode(qxor.finish());
|
||||
}
|
||||
|
||||
// returns the SHA256 hex string of a file
|
||||
string computeSHA256Hash(string path) {
|
||||
SHA256 sha256;
|
||||
auto file = File(path, "rb");
|
||||
foreach (ubyte[] data; chunks(file, 4096)) {
|
||||
sha256.put(data);
|
||||
}
|
||||
return sha256.finish().toHexString().dup;
|
||||
}
|
||||
|
||||
// converts wildcards (*, ?) to regex
|
||||
Regex!char wild2regex(const(char)[] pattern)
|
||||
{
|
||||
|
@ -107,6 +100,12 @@ Regex!char wild2regex(const(char)[] pattern)
|
|||
case '/':
|
||||
str ~= "\\/";
|
||||
break;
|
||||
case '(':
|
||||
str ~= "\\(";
|
||||
break;
|
||||
case ')':
|
||||
str ~= "\\)";
|
||||
break;
|
||||
default:
|
||||
str ~= c;
|
||||
break;
|
||||
|
@ -117,15 +116,43 @@ Regex!char wild2regex(const(char)[] pattern)
|
|||
}
|
||||
|
||||
// returns true if the network connection is available
|
||||
bool testNetwork()
|
||||
bool testNetwork(Config cfg)
|
||||
{
|
||||
// Use low level HTTP struct
|
||||
auto http = HTTP();
|
||||
http.url = "https://login.microsoftonline.com";
|
||||
// DNS lookup timeout
|
||||
http.dnsTimeout = (dur!"seconds"(cfg.getValueLong("dns_timeout")));
|
||||
// Timeout for connecting
|
||||
http.connectTimeout = (dur!"seconds"(cfg.getValueLong("connect_timeout")));
|
||||
// Data Timeout for HTTPS connections
|
||||
http.dataTimeout = (dur!"seconds"(cfg.getValueLong("data_timeout")));
|
||||
// maximum time any operation is allowed to take
|
||||
// This includes dns resolution, connecting, data transfer, etc.
|
||||
http.operationTimeout = (dur!"seconds"(cfg.getValueLong("operation_timeout")));
|
||||
// What IP protocol version should be used when using Curl - IPv4 & IPv6, IPv4 or IPv6
|
||||
http.handle.set(CurlOption.ipresolve,cfg.getValueLong("ip_protocol_version")); // 0 = IPv4 + IPv6, 1 = IPv4 Only, 2 = IPv6 Only
|
||||
|
||||
// HTTP connection test method
|
||||
http.method = HTTP.Method.head;
|
||||
// Attempt to contact the Microsoft Online Service
|
||||
try {
|
||||
HTTP http = HTTP("https://login.microsoftonline.com");
|
||||
http.dnsTimeout = (dur!"seconds"(5));
|
||||
http.method = HTTP.Method.head;
|
||||
log.vdebug("Attempting to contact online service");
|
||||
http.perform();
|
||||
log.vdebug("Shutting down HTTP engine as successfully reached OneDrive Online Service");
|
||||
http.shutdown();
|
||||
return true;
|
||||
} catch (SocketException) {
|
||||
} catch (SocketException e) {
|
||||
// Socket issue
|
||||
log.vdebug("HTTP Socket Issue");
|
||||
log.error("Cannot connect to Microsoft OneDrive Service - Socket Issue");
|
||||
displayOneDriveErrorMessage(e.msg, getFunctionName!({}));
|
||||
return false;
|
||||
} catch (CurlException e) {
|
||||
// No network connection to OneDrive Service
|
||||
log.vdebug("No Network Connection");
|
||||
log.error("Cannot connect to Microsoft OneDrive Service - Network Connection Issue");
|
||||
displayOneDriveErrorMessage(e.msg, getFunctionName!({}));
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
@ -136,11 +163,12 @@ bool testNetwork()
|
|||
bool readLocalFile(string path)
|
||||
{
|
||||
try {
|
||||
// attempt to read the first 10MB of the file
|
||||
read(path,10000000);
|
||||
// attempt to read up to the first 1 byte of the file
|
||||
// validates we can 'read' the file based on file permissions
|
||||
read(path,1);
|
||||
} catch (std.file.FileException e) {
|
||||
// unable to read the new local file
|
||||
log.log("Skipping uploading this file as it cannot be read (file permissions or file corruption): ", path);
|
||||
displayFileSystemErrorMessage(e.msg, getFunctionName!({}));
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
|
@ -184,11 +212,12 @@ bool isValidName(string path)
|
|||
matched = m.empty;
|
||||
|
||||
// Additional explicit validation checks
|
||||
if (itemName == "Icon") {matched = false;}
|
||||
if (itemName == ".lock") {matched = false;}
|
||||
if (itemName == "desktop.ini") {matched = false;}
|
||||
// _vti_ cannot appear anywhere in a file or folder name
|
||||
if(canFind(itemName, "_vti_")){matched = false;}
|
||||
// Item name cannot equal '~'
|
||||
if (itemName == "~") {matched = false;}
|
||||
|
||||
// return response
|
||||
return matched;
|
||||
|
@ -235,6 +264,299 @@ bool containsASCIIHTMLCodes(string path)
|
|||
return m.empty;
|
||||
}
|
||||
|
||||
// Parse and display error message received from OneDrive
|
||||
void displayOneDriveErrorMessage(string message, string callingFunction)
|
||||
{
|
||||
writeln();
|
||||
log.error("ERROR: Microsoft OneDrive API returned an error with the following message:");
|
||||
auto errorArray = splitLines(message);
|
||||
log.error(" Error Message: ", errorArray[0]);
|
||||
// Extract 'message' as the reason
|
||||
JSONValue errorMessage = parseJSON(replace(message, errorArray[0], ""));
|
||||
// extra debug
|
||||
log.vdebug("Raw Error Data: ", message);
|
||||
log.vdebug("JSON Message: ", errorMessage);
|
||||
|
||||
// What is the reason for the error
|
||||
if (errorMessage.type() == JSONType.object) {
|
||||
// configure the error reason
|
||||
string errorReason;
|
||||
string requestDate;
|
||||
string requestId;
|
||||
|
||||
// set the reason for the error
|
||||
try {
|
||||
// Use error_description as reason
|
||||
errorReason = errorMessage["error_description"].str;
|
||||
} catch (JSONException e) {
|
||||
// we dont want to do anything here
|
||||
}
|
||||
|
||||
// set the reason for the error
|
||||
try {
|
||||
// Use ["error"]["message"] as reason
|
||||
errorReason = errorMessage["error"]["message"].str;
|
||||
} catch (JSONException e) {
|
||||
// we dont want to do anything here
|
||||
}
|
||||
|
||||
// Display the error reason
|
||||
if (errorReason.startsWith("<!DOCTYPE")) {
|
||||
// a HTML Error Reason was given
|
||||
log.error(" Error Reason: A HTML Error response was provided. Use debug logging (--verbose --verbose) to view this error");
|
||||
log.vdebug(errorReason);
|
||||
} else {
|
||||
// a non HTML Error Reason was given
|
||||
log.error(" Error Reason: ", errorReason);
|
||||
}
|
||||
|
||||
// Get the date of request if available
|
||||
try {
|
||||
// Use ["error"]["innerError"]["date"] as date
|
||||
requestDate = errorMessage["error"]["innerError"]["date"].str;
|
||||
} catch (JSONException e) {
|
||||
// we dont want to do anything here
|
||||
}
|
||||
|
||||
// Get the request-id if available
|
||||
try {
|
||||
// Use ["error"]["innerError"]["request-id"] as request-id
|
||||
requestId = errorMessage["error"]["innerError"]["request-id"].str;
|
||||
} catch (JSONException e) {
|
||||
// we dont want to do anything here
|
||||
}
|
||||
|
||||
// Display the date and request id if available
|
||||
if (requestDate != "") log.error(" Error Timestamp: ", requestDate);
|
||||
if (requestId != "") log.error(" API Request ID: ", requestId);
|
||||
}
|
||||
|
||||
// Where in the code was this error generated
|
||||
log.vlog(" Calling Function: ", callingFunction);
|
||||
}
|
||||
|
||||
// Parse and display error message received from the local file system
|
||||
void displayFileSystemErrorMessage(string message, string callingFunction)
|
||||
{
|
||||
writeln();
|
||||
log.error("ERROR: The local file system returned an error with the following message:");
|
||||
auto errorArray = splitLines(message);
|
||||
// What was the error message
|
||||
log.error(" Error Message: ", errorArray[0]);
|
||||
// Where in the code was this error generated
|
||||
log.vlog(" Calling Function: ", callingFunction);
|
||||
// If we are out of disk space (despite download reservations) we need to exit the application
|
||||
ulong localActualFreeSpace = to!ulong(getAvailableDiskSpace("."));
|
||||
if (localActualFreeSpace == 0) {
|
||||
// force exit
|
||||
exit(-1);
|
||||
}
|
||||
}
|
||||
|
||||
// Get the function name that is being called to assist with identifying where an error is being generated
|
||||
string getFunctionName(alias func)() {
|
||||
return __traits(identifier, __traits(parent, func)) ~ "()\n";
|
||||
}
|
||||
|
||||
// Get the latest release version from GitHub
|
||||
JSONValue getLatestReleaseDetails() {
|
||||
// Import curl just for this function
|
||||
import std.net.curl;
|
||||
char[] content;
|
||||
JSONValue githubLatest;
|
||||
JSONValue versionDetails;
|
||||
string latestTag;
|
||||
string publishedDate;
|
||||
|
||||
try {
|
||||
content = get("https://api.github.com/repos/abraunegg/onedrive/releases/latest");
|
||||
} catch (CurlException e) {
|
||||
// curl generated an error - meaning we could not query GitHub
|
||||
log.vdebug("Unable to query GitHub for latest release");
|
||||
}
|
||||
|
||||
try {
|
||||
githubLatest = content.parseJSON();
|
||||
} catch (JSONException e) {
|
||||
// unable to parse the content JSON, set to blank JSON
|
||||
log.vdebug("Unable to parse GitHub JSON response");
|
||||
githubLatest = parseJSON("{}");
|
||||
}
|
||||
|
||||
// githubLatest has to be a valid JSON object
|
||||
if (githubLatest.type() == JSONType.object){
|
||||
// use the returned tag_name
|
||||
if ("tag_name" in githubLatest) {
|
||||
// use the provided tag
|
||||
// "tag_name": "vA.B.CC" and strip 'v'
|
||||
latestTag = strip(githubLatest["tag_name"].str, "v");
|
||||
} else {
|
||||
// set to latestTag zeros
|
||||
log.vdebug("'tag_name' unavailable in JSON response. Setting GitHub 'tag_name' release version to 0.0.0");
|
||||
latestTag = "0.0.0";
|
||||
}
|
||||
// use the returned published_at date
|
||||
if ("published_at" in githubLatest) {
|
||||
// use the provided value
|
||||
publishedDate = githubLatest["published_at"].str;
|
||||
} else {
|
||||
// set to v2.0.0 release date
|
||||
log.vdebug("'published_at' unavailable in JSON response. Setting GitHub 'published_at' date to 2018-07-18T18:00:00Z");
|
||||
publishedDate = "2018-07-18T18:00:00Z";
|
||||
}
|
||||
} else {
|
||||
// JSONValue is not an object
|
||||
log.vdebug("Invalid JSON Object. Setting GitHub 'tag_name' release version to 0.0.0");
|
||||
latestTag = "0.0.0";
|
||||
log.vdebug("Invalid JSON Object. Setting GitHub 'published_at' date to 2018-07-18T18:00:00Z");
|
||||
publishedDate = "2018-07-18T18:00:00Z";
|
||||
}
|
||||
|
||||
// return the latest github version and published date as our own JSON
|
||||
versionDetails = [
|
||||
"latestTag": JSONValue(latestTag),
|
||||
"publishedDate": JSONValue(publishedDate)
|
||||
];
|
||||
|
||||
// return JSON
|
||||
return versionDetails;
|
||||
}
|
||||
|
||||
// Get the release details from the 'current' running version
|
||||
JSONValue getCurrentVersionDetails(string thisVersion) {
|
||||
// Import curl just for this function
|
||||
import std.net.curl;
|
||||
char[] content;
|
||||
JSONValue githubDetails;
|
||||
JSONValue versionDetails;
|
||||
string versionTag = "v" ~ thisVersion;
|
||||
string publishedDate;
|
||||
|
||||
try {
|
||||
content = get("https://api.github.com/repos/abraunegg/onedrive/releases");
|
||||
} catch (CurlException e) {
|
||||
// curl generated an error - meaning we could not query GitHub
|
||||
log.vdebug("Unable to query GitHub for release details");
|
||||
}
|
||||
|
||||
try {
|
||||
githubDetails = content.parseJSON();
|
||||
} catch (JSONException e) {
|
||||
// unable to parse the content JSON, set to blank JSON
|
||||
log.vdebug("Unable to parse GitHub JSON response");
|
||||
githubDetails = parseJSON("{}");
|
||||
}
|
||||
|
||||
// githubDetails has to be a valid JSON array
|
||||
if (githubDetails.type() == JSONType.array){
|
||||
foreach (searchResult; githubDetails.array) {
|
||||
// searchResult["tag_name"].str;
|
||||
if (searchResult["tag_name"].str == versionTag) {
|
||||
log.vdebug("MATCHED version");
|
||||
log.vdebug("tag_name: ", searchResult["tag_name"].str);
|
||||
log.vdebug("published_at: ", searchResult["published_at"].str);
|
||||
publishedDate = searchResult["published_at"].str;
|
||||
}
|
||||
}
|
||||
|
||||
if (publishedDate.empty) {
|
||||
// empty .. no version match ?
|
||||
// set to v2.0.0 release date
|
||||
log.vdebug("'published_at' unavailable in JSON response. Setting GitHub 'published_at' date to 2018-07-18T18:00:00Z");
|
||||
publishedDate = "2018-07-18T18:00:00Z";
|
||||
}
|
||||
} else {
|
||||
// JSONValue is not an Array
|
||||
log.vdebug("Invalid JSON Array. Setting GitHub 'published_at' date to 2018-07-18T18:00:00Z");
|
||||
publishedDate = "2018-07-18T18:00:00Z";
|
||||
}
|
||||
|
||||
// return the latest github version and published date as our own JSON
|
||||
versionDetails = [
|
||||
"versionTag": JSONValue(thisVersion),
|
||||
"publishedDate": JSONValue(publishedDate)
|
||||
];
|
||||
|
||||
// return JSON
|
||||
return versionDetails;
|
||||
}
|
||||
|
||||
// Check the application version versus GitHub latestTag
|
||||
void checkApplicationVersion() {
|
||||
// Get the latest details from GitHub
|
||||
JSONValue latestVersionDetails = getLatestReleaseDetails();
|
||||
string latestVersion = latestVersionDetails["latestTag"].str;
|
||||
SysTime publishedDate = SysTime.fromISOExtString(latestVersionDetails["publishedDate"].str).toUTC();
|
||||
SysTime releaseGracePeriod = publishedDate;
|
||||
SysTime currentTime = Clock.currTime().toUTC();
|
||||
|
||||
// drop fraction seconds
|
||||
publishedDate.fracSecs = Duration.zero;
|
||||
currentTime.fracSecs = Duration.zero;
|
||||
releaseGracePeriod.fracSecs = Duration.zero;
|
||||
// roll the grace period forward to allow distributions to catch up based on their release cycles
|
||||
releaseGracePeriod = releaseGracePeriod.add!"months"(1);
|
||||
|
||||
// what is this clients version?
|
||||
auto currentVersionArray = strip(strip(import("version"), "v")).split("-");
|
||||
string applicationVersion = currentVersionArray[0];
|
||||
|
||||
// debug output
|
||||
log.vdebug("applicationVersion: ", applicationVersion);
|
||||
log.vdebug("latestVersion: ", latestVersion);
|
||||
log.vdebug("publishedDate: ", publishedDate);
|
||||
log.vdebug("currentTime: ", currentTime);
|
||||
log.vdebug("releaseGracePeriod: ", releaseGracePeriod);
|
||||
|
||||
// display details if not current
|
||||
// is application version is older than available on GitHub
|
||||
if (applicationVersion != latestVersion) {
|
||||
// application version is different
|
||||
bool displayObsolete = false;
|
||||
|
||||
// what warning do we present?
|
||||
if (applicationVersion < latestVersion) {
|
||||
// go get this running version details
|
||||
JSONValue thisVersionDetails = getCurrentVersionDetails(applicationVersion);
|
||||
SysTime thisVersionPublishedDate = SysTime.fromISOExtString(thisVersionDetails["publishedDate"].str).toUTC();
|
||||
thisVersionPublishedDate.fracSecs = Duration.zero;
|
||||
log.vdebug("thisVersionPublishedDate: ", thisVersionPublishedDate);
|
||||
|
||||
// the running version grace period is its release date + 1 month
|
||||
SysTime thisVersionReleaseGracePeriod = thisVersionPublishedDate;
|
||||
thisVersionReleaseGracePeriod = thisVersionReleaseGracePeriod.add!"months"(1);
|
||||
log.vdebug("thisVersionReleaseGracePeriod: ", thisVersionReleaseGracePeriod);
|
||||
|
||||
// is this running version obsolete ?
|
||||
if (!displayObsolete) {
|
||||
// if releaseGracePeriod > currentTime
|
||||
// display an information warning that there is a new release available
|
||||
if (releaseGracePeriod.toUnixTime() > currentTime.toUnixTime()) {
|
||||
// inside release grace period ... set flag to false
|
||||
displayObsolete = false;
|
||||
} else {
|
||||
// outside grace period
|
||||
displayObsolete = true;
|
||||
}
|
||||
}
|
||||
|
||||
// display version response
|
||||
writeln();
|
||||
if (!displayObsolete) {
|
||||
// display the new version is available message
|
||||
log.logAndNotify("INFO: A new onedrive client version is available. Please upgrade your client version when possible.");
|
||||
} else {
|
||||
// display the obsolete message
|
||||
log.logAndNotify("WARNING: Your onedrive client version is now obsolete and unsupported. Please upgrade your client version.");
|
||||
}
|
||||
log.log("Current Application Version: ", applicationVersion);
|
||||
log.log("Version Available: ", latestVersion);
|
||||
writeln();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Unit Tests
|
||||
unittest
|
||||
{
|
||||
assert(multiGlobMatch(".hidden", ".*"));
|
||||
|
|
|
@ -1,13 +0,0 @@
|
|||
[Unit]
|
||||
Description=OneDrive Free Client
|
||||
Documentation=https://github.com/abraunegg/onedrive
|
||||
After=network-online.target
|
||||
Wants=network-online.target
|
||||
|
||||
[Service]
|
||||
ExecStart=@PREFIX@/bin/onedrive --monitor
|
||||
Restart=on-failure
|
||||
RestartSec=3
|
||||
|
||||
[Install]
|
||||
WantedBy=default.target
|
|
@ -1,15 +0,0 @@
|
|||
[Unit]
|
||||
Description=OneDrive Free Client for %i
|
||||
Documentation=https://github.com/abraunegg/onedrive
|
||||
After=network-online.target
|
||||
Wants=network-online.target
|
||||
|
||||
[Service]
|
||||
ExecStart=@PREFIX@/bin/onedrive --monitor --confdir=/home/%i/.config/onedrive
|
||||
User=%i
|
||||
Group=users
|
||||
Restart=on-failure
|
||||
RestartSec=3
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
|
@ -1,3 +1,4 @@
|
|||
#!/bin/bash
|
||||
ONEDRIVEALT=~/OneDriveALT
|
||||
if [ ! -d ${ONEDRIVEALT} ]; then
|
||||
mkdir -p ${ONEDRIVEALT}
|
||||
|
|