Compare commits
757 Commits
Author | SHA1 | Date |
---|---|---|
|
f02cbfdc92 | |
|
6c6b2e5b12 | |
|
7a6cb155cc | |
|
266fa0d97f | |
|
a566549875 | |
|
bc1878bf7f | |
|
59d99823e4 | |
|
c5ef7a916e | |
|
f0d9cfcf35 | |
|
548f3c5730 | |
|
f094fb0481 | |
|
b1e08fe947 | |
|
3a9accaea0 | |
|
6a717859b8 | |
|
6d13e3976a | |
|
0d00fa8ab9 | |
|
05e594e101 | |
|
639055bf00 | |
|
35afd1af8a | |
|
09225de945 | |
|
99d01b2ed0 | |
|
e34ebfd4cd | |
|
7aeb2e3b69 | |
|
51e79cb56a | |
|
2ce2561f99 | |
|
88e1f3f4f6 | |
|
b1a0510a0e | |
|
b3d723ad07 | |
|
ed2e8438d2 | |
|
f2ff318f46 | |
|
f889eb99e8 | |
|
737008d662 | |
|
19dbdeb28a | |
|
f39fb6613e | |
|
f6fd288cfa | |
|
63cbe3786e | |
|
98a4d6bb0c | |
|
e47c1d7f58 | |
|
cef25b646f | |
|
4054f3ca75 | |
|
ea1b3b379e | |
|
988009a438 | |
|
b2f221cb86 | |
|
c4349bc945 | |
|
99f107fcb0 | |
|
776c07386d | |
|
edfc292486 | |
|
9ade468ed8 | |
|
d9e22b06e5 | |
|
d0af0804a0 | |
|
9b9c1105eb | |
|
9b862ae49d | |
|
57ef919364 | |
|
833c16ca43 | |
|
83a0f5c328 | |
|
5a7455173c | |
|
edd35d42aa | |
|
50f975d2df | |
|
43e1aaaaa1 | |
|
b15e8e8f88 | |
|
91950446f0 | |
|
0ef4af40dc | |
|
6b6f723094 | |
|
d100841bd9 | |
|
52a42bc1d7 | |
|
30d66003ef | |
|
b95e75aa0b | |
|
8ef2262421 | |
|
fa7f6d1088 | |
|
5f4e7af910 | |
|
a9da549e4f | |
|
4bf065f94e | |
|
912906ef8c | |
|
77cbcd97d9 | |
|
5beb9220d1 | |
|
53193aceb6 | |
|
4d08791d04 | |
|
01769189b7 | |
|
9dd69349ad | |
|
ef0cd58d49 | |
|
d8915512ea | |
|
9fa4c45b6c | |
|
22e269902b | |
|
edaeb9a400 | |
|
f646fffc79 | |
|
ce4b435eb2 | |
|
e395e8ca65 | |
|
60ceccfff1 | |
|
cf78875ab5 | |
|
059f95d4a8 | |
|
9edeb39d45 | |
|
55c4cffcf0 | |
|
59bd176e48 | |
|
7da01328b2 | |
|
36db883ab0 | |
|
632db7d9a2 | |
|
999aeed303 | |
|
a83f76c639 | |
|
663b5be3be | |
|
898fcbb8c1 | |
|
ecc4c1d396 | |
|
89f8533c27 | |
|
7168aa9d02 | |
|
9245030a9d | |
|
a1f8773953 | |
|
35a71ad517 | |
|
e01d8c410b | |
|
4e44020619 | |
|
f2761d6a42 | |
|
f48ff41072 | |
|
08759b895c | |
|
508cd4bf50 | |
|
87dd5d07ea | |
|
134acff3a9 | |
|
c091996231 | |
|
a7e1b4505b | |
|
803d25ac8b | |
|
a36fd05069 | |
|
96c26e2791 | |
|
f74108ed4a | |
|
27866f1db5 | |
|
904105904e | |
|
c2efdb2063 | |
|
edcfe337c3 | |
|
56c8fa868a | |
|
3c1eb0989e | |
|
2bc8e899d7 | |
|
9a8b028c96 | |
|
e5922f04a5 | |
|
11e19108f5 | |
|
4076423d98 | |
|
536f8ce6f0 | |
|
88c74dda70 | |
|
2ebd22c441 | |
|
ca822566d4 | |
|
009a4aabe0 | |
|
cc21cc6acc | |
|
81437b5824 | |
|
45304c608c | |
|
8451024ae2 | |
|
5ea103b36b | |
|
f2be25f83d | |
|
5852d76cd3 | |
|
67dc58f3b5 | |
|
b589c32ce7 | |
|
8e4e745420 | |
|
f9694d6478 | |
|
ea0ca4e4c2 | |
|
3c0f0b542d | |
|
179bf6281e | |
|
f5403bd13a | |
|
5870003c73 | |
|
7ed547786f | |
|
747e9c710e | |
|
7a7653957e | |
|
ad7d022dcf | |
|
e6088c88de | |
|
347f9a9ae8 | |
|
7577f220dc | |
|
acf388bc7b | |
|
5881c71212 | |
|
60f2005209 | |
|
7688317bd1 | |
|
e997493fb5 | |
|
be3fc2e848 | |
|
844e08f92f | |
|
38190c6139 | |
|
45a8a7a589 | |
|
3a9d00ff4f | |
|
7bc80b9574 | |
|
a912a7d7c3 | |
|
1c7d21c485 | |
|
51a4c8bcce | |
|
fbce80bc0a | |
|
6edce242b0 | |
|
8788582f63 | |
|
cc4ce385ee | |
|
98c1b97fb0 | |
|
17e2bf6dce | |
|
c2fef97c84 | |
|
eaeb218508 | |
|
a9cb30ffb4 | |
|
08ab938477 | |
|
5e71ed0854 | |
|
f5356ea702 | |
|
10adfe6128 | |
|
572a055901 | |
|
45915611f6 | |
|
e4970830ac | |
|
b6181e15d9 | |
|
b8cfce2eb8 | |
|
d8bdf2d721 | |
|
e4d52aec2f | |
|
a4b03b12e2 | |
|
bf86cc5f6f | |
|
39a8a309f5 | |
|
97fb53f6de | |
|
314505eb4a | |
|
fb57fb9d74 | |
|
689928eac1 | |
|
42c7493ef0 | |
|
6139fa002b | |
|
a48f4a85e6 | |
|
cbff578ec5 | |
|
dc901282dc | |
|
08372e74ba | |
|
7d8978dc88 | |
|
925adbd488 | |
|
e0b2f0435e | |
|
cb7dbbe2af | |
|
3d94544337 | |
|
8197ea3ddb | |
|
78323cc06a | |
|
ba830fe7d0 | |
|
87dd470344 | |
|
2c45d58659 | |
|
4d4728b90c | |
|
22538907b3 | |
|
f3365ed5ff | |
|
145814f231 | |
|
21bd1657ec | |
|
6be95a3cff | |
|
a7526c8a87 | |
|
8b757bf083 | |
|
be1281ffa3 | |
|
bdc9e42046 | |
|
d66ea211d5 | |
|
da5140d3f0 | |
|
d37872c730 | |
|
997a7904ed | |
|
712c3179ee | |
|
0d28d0c482 | |
|
f22cff5a49 | |
|
7fb8eb7004 | |
|
afdc5334f4 | |
|
c02a7af424 | |
|
17a97d129b | |
|
183ca45b63 | |
|
31986d0d19 | |
|
bf3b846026 | |
|
7d137ed0c5 | |
|
723501526c | |
|
2f4384d911 | |
|
8f2b22a117 | |
|
5f6f7482dd | |
|
f0ea6df5ea | |
|
4df4c594b7 | |
|
08910b3895 | |
|
21c59cfd27 | |
|
151cfb8d39 | |
|
3b545648ac | |
|
0ae3a0c629 | |
|
9c7ef090f1 | |
|
b124f7dfaf | |
|
f87b529195 | |
|
3cddb9b97e | |
|
80f3b2f0bc | |
|
31b8ce4c52 | |
|
f81291c0eb | |
|
59ec9e42c1 | |
|
9cfa13c59a | |
|
ca78d76bb2 | |
|
b0b91a4db0 | |
|
8d9e95c6cd | |
|
154318be0f | |
|
ddb820d2fa | |
|
dce4092085 | |
|
219067be26 | |
|
8f85d20a68 | |
|
492064f8e2 | |
|
32ee9f8071 | |
|
33971c359c | |
|
d9d5aaf1e8 | |
|
bd25ef4b72 | |
|
2b17326630 | |
|
228a4d1f51 | |
|
679b4bc29e | |
|
f1b558d646 | |
|
c8bb8c214f | |
|
c801d6b418 | |
|
0b539e6153 | |
|
8256bbef88 | |
|
b3a7572c0e | |
|
9976b3a6e5 | |
|
bcd79e06aa | |
|
6459a9dd4c | |
|
9b481467f6 | |
|
72f2e43071 | |
|
a6e4c4a0ee | |
|
b4892b9fdc | |
|
63f6194305 | |
|
3dae261b9b | |
|
c6876924c7 | |
|
0c343662f4 | |
|
f4130119ef | |
|
971ea2b358 | |
|
2ce8007b9d | |
|
39da290577 | |
|
cdeae14fd7 | |
|
3cd2f16bc3 | |
|
9435c48f20 | |
|
4a1001a40f | |
|
f0811b3f56 | |
|
f95155c310 | |
|
1b8c07332f | |
|
d17ece7e2c | |
|
e1a081a7f1 | |
|
590e299821 | |
|
f8fbbaef57 | |
|
7fffa454e9 | |
|
22af510375 | |
|
61729d1245 | |
|
c089ccc63b | |
|
3edd80949f | |
|
9bd2b7eeaf | |
|
9142f873f4 | |
|
dc25cba3c8 | |
|
f781f35ea1 | |
|
c07897b7cb | |
|
301e0d69e1 | |
|
aca0be5a9b | |
|
5638565f6a | |
|
c10a175c90 | |
|
968c732d94 | |
|
b290d4e35e | |
|
de383c9c0f | |
|
0c1f4403fb | |
|
555d8e6d23 | |
|
02d0024df9 | |
|
3accf5f254 | |
|
bd8fc2f043 | |
|
40c44368ec | |
|
7331612a11 | |
|
e12d30d9c5 | |
|
c18977bcef | |
|
3d6ec17f27 | |
|
a0e5e791cd | |
|
44d8635042 | |
|
71f857f9c8 | |
|
de99f0efdf | |
|
0d501209fa | |
|
efeb665818 | |
|
7e945197c2 | |
|
a52068dec0 | |
|
7b57980e57 | |
|
c96bdfe5ce | |
|
8018871168 | |
|
826ebe1c2c | |
|
4b90ac3a27 | |
|
0e7f969588 | |
|
01c469b019 | |
|
aa92531b99 | |
|
4542f62fce | |
|
020f674932 | |
|
22fe557c54 | |
|
1339e6463a | |
|
99a7c932f7 | |
|
932ecd93fd | |
|
201aadd33d | |
|
5eecb719d3 | |
|
76d80b17f6 | |
|
094d1538bb | |
|
30411c72c7 | |
|
26b661396a | |
|
e64ec1232e | |
|
c2f0b41076 | |
|
e8470218f6 | |
|
3a9c427855 | |
|
63a434cc55 | |
|
56c0197b6b | |
|
5b0c40f3f0 | |
|
61a9aa6a91 | |
|
8a57b568e9 | |
|
5e42a088d0 | |
|
0f5bed992f | |
|
51dded275b | |
|
61f6593cbe | |
|
8fe0afae56 | |
|
5477ed96fe | |
|
c15f9ea4a2 | |
|
39feb76d2e | |
|
cc2d283f23 | |
|
8e8ed101bb | |
|
d9b587fd4a | |
|
027422b014 | |
|
e44bdee73e | |
|
22bd6181f1 | |
|
e850f8f792 | |
|
2da2109229 | |
|
52c059fc29 | |
|
f1cf7eb8e2 | |
|
f3bbaceb73 | |
|
88491d0fee | |
|
1c9db7136f | |
|
71942082f8 | |
|
6e56e2b63e | |
|
3931b7b771 | |
|
9e64753569 | |
|
98201cf615 | |
|
6ac45eea02 | |
|
17fb0c6bf5 | |
|
cae3bb73f7 | |
|
5a5e60d651 | |
|
fad217447d | |
|
5857d8200b | |
|
a466e94b9c | |
|
08af105dd9 | |
|
3314f947b0 | |
|
27d6b80fad | |
|
34724352f7 | |
|
27f6d2a948 | |
|
b8f18ac860 | |
|
01e4ec9ef1 | |
|
fb1fd00f0e | |
|
569048c19b | |
|
efd8dca58e | |
|
92dbd6b2fc | |
|
7e7388a9eb | |
|
109f0a0887 | |
|
54a665fe7e | |
|
255be72c43 | |
|
04f11fcab8 | |
|
39e5bb92f0 | |
|
0f0ba9b001 | |
|
330495b1e8 | |
|
203400000f | |
|
85ac20080b | |
|
a28c20d9bd | |
|
c9bc7f4b6c | |
|
17cf11eb0b | |
|
39053f59e3 | |
|
295c74ca58 | |
|
1896ec793e | |
|
5c6132dd2c | |
|
438c85b777 | |
|
bbc960d566 | |
|
c963d71ecd | |
|
1b8f2ea7fc | |
|
4c999ec229 | |
|
a2e3e96827 | |
|
4be8429b96 | |
|
0e103cd98b | |
|
83cfabed6f | |
|
dfe09f2fa3 | |
|
ff26961e82 | |
|
ca63bfabe0 | |
|
2aa36b626a | |
|
f0336c0dec | |
|
3dc8478ba7 | |
|
633e6397d0 | |
|
babeea9d9a | |
|
9cb6cc57af | |
|
7eb4ee68ae | |
|
660e856986 | |
|
ac31de2388 | |
|
ae64a967f6 | |
|
2ca8803b3a | |
|
ad9d978fc2 | |
|
7570794632 | |
|
4651ff0e06 | |
|
0e2189c441 | |
|
1ab6042a62 | |
|
a5f5b3a800 | |
|
b95474a8e6 | |
|
e3855c4256 | |
|
51f6dcbd71 | |
|
3d68a135a4 | |
|
9e17e114f7 | |
|
94a71620a1 | |
|
bad7767809 | |
|
fa5cf2de57 | |
|
9eb29c3454 | |
|
4129dfff37 | |
|
c452bd45e6 | |
|
6b0bab0984 | |
|
89d63d83b8 | |
|
e0f177f5c2 | |
|
6bd5862702 | |
|
eb8c780d84 | |
|
58e5c654d3 | |
|
e411c81f3d | |
|
57647b8960 | |
|
5b395b3c04 | |
|
4bd4bbc247 | |
|
4de784d90f | |
|
6c1b7a541b | |
|
6e7b99debb | |
|
996e6251e1 | |
|
a265ca4eef | |
|
306d5ff1ad | |
|
74271d220c | |
|
8a350333a8 | |
|
e9daf2b2bf | |
|
d58071c898 | |
|
b07ee4a645 | |
|
6c65f9098f | |
|
9537b92bf6 | |
|
2c857a1e83 | |
|
b0cdb17328 | |
|
5c0c494526 | |
|
6f2e1af3d7 | |
|
0b7b6808d5 | |
|
7eab6df734 | |
|
32c2ab17e4 | |
|
bc6ebdb80e | |
|
0d6f7e0ca0 | |
|
49948dd637 | |
|
f35d5841c2 | |
|
81148db1fa | |
|
250ac48ed6 | |
|
65d4c3c3ff | |
|
bbfa4c9d3b | |
|
25070032c4 | |
|
4f76bb4ad2 | |
|
465a3b9b80 | |
|
a9434e605b | |
|
06004ac05d | |
|
b2463d2e08 | |
|
13286c2720 | |
|
a734dadfde | |
|
c694580ae5 | |
|
4f3e6a11d2 | |
|
f028f3b6d2 | |
|
2e6e17962c | |
|
52cf42a53c | |
|
5bc77dfe76 | |
|
1869b1e82f | |
|
3532b50c38 | |
|
d0b40257fa | |
|
59b7562a3e | |
|
c99a1293c7 | |
|
eb609f7faa | |
|
7e678b6480 | |
|
b4cd0834cd | |
|
55534e0669 | |
|
2eba91a58f | |
|
01864e4980 | |
|
87cdc8eb98 | |
|
740d5277cc | |
|
a230d32042 | |
|
dde31090fe | |
|
c07fa8b267 | |
|
88597d6a7a | |
|
6bce2e1755 | |
|
fe808ca909 | |
|
90a09c422b | |
|
47bb01b0dc | |
|
2b51f06f21 | |
|
f50048180a | |
|
5945988e96 | |
|
07775d7390 | |
|
c80bec2fe2 | |
|
af7542d8f5 | |
|
b44c0c13ad | |
|
d727bc353c | |
|
3af0bc7a11 | |
|
f1e40d54e4 | |
|
76f9f31e6d | |
|
50496070f6 | |
|
68d8556585 | |
|
1eec242cb0 | |
|
4cf697cd28 | |
|
23a9f357fe | |
|
7c11240034 | |
|
6222fd6872 | |
|
c80871cb63 | |
|
cabd90c570 | |
|
3f46c69c6d | |
|
24599c3057 | |
|
fa64c49244 | |
|
cf609998aa | |
|
82e60bdffe | |
|
83321db4ea | |
|
76ed396602 | |
|
f3215c0474 | |
|
7cb987a32b | |
|
f79acad239 | |
|
548c766920 | |
|
9b3af0596e | |
|
b45e50584f | |
|
1ea4584027 | |
|
61f7ebe156 | |
|
41144f5705 | |
|
5438941c88 | |
|
888309f420 | |
|
704d3d20a7 | |
|
503e325e14 | |
|
182e3921d5 | |
|
e094e04038 | |
|
a2a6d843b9 | |
|
7ddd13f122 | |
|
f0f3cd64b1 | |
|
9cac01d9fd | |
|
c04e0fa21a | |
|
9e869e2bc4 | |
|
948a2b2f87 | |
|
c3b468d932 | |
|
2f5e21caea | |
|
0a8811cfb9 | |
|
4d322f7e6b | |
|
9b4914c49e | |
|
21bc4e1ee7 | |
|
3d6eaef447 | |
|
381313dc41 | |
|
a64da2ba3d | |
|
f6db6fb9e3 | |
|
b3cccc373a | |
|
42ed3b156a | |
|
de57e37217 | |
|
bf54ce8693 | |
|
7621f3ec25 | |
|
ad22361ee5 | |
|
494fcda098 | |
|
bd2e5a16c6 | |
|
844cf56307 | |
|
8dfe59220a | |
|
a60ece6950 | |
|
c39544d79c | |
|
0ccc737631 | |
|
d9f749e28a | |
|
1ed49b7d41 | |
|
166a37865e | |
|
dc31bfb42d | |
|
01e359ae3c | |
|
4d9eb8830b | |
|
beed970e49 | |
|
1314a3381b | |
|
a0d7ec544d | |
|
44d2df4906 | |
|
601c8ace14 | |
|
33b7b89a36 | |
|
6d37e161c8 | |
|
f627daf25a | |
|
ee8016ef9a | |
|
1b33068422 | |
|
1917c33cd1 | |
|
c2960e3315 | |
|
3c1c606de4 | |
|
4725370d8e | |
|
041de62c36 | |
|
e8436e46bd | |
|
36daadd7aa | |
|
dee5b4037c | |
|
d0bd873def | |
|
d13b76087f | |
|
46a990c4d8 | |
|
bfa9d72c8d | |
|
a7d0c6f515 | |
|
d8b30d7711 | |
|
190e46c785 | |
|
da381175ed | |
|
034c1693e9 | |
|
686a28cf08 | |
|
57cf91e4c6 | |
|
b2b5714469 | |
|
477eb9ea84 | |
|
0bda80355b | |
|
df9a90896a | |
|
328a07e406 | |
|
ebf8b106d5 | |
|
276fc8c082 | |
|
9a0dba00e4 | |
|
9b8fe3774e | |
|
73d592ad34 | |
|
cbd7607836 | |
|
8fb3ea2532 | |
|
1b079ebee5 | |
|
755edb0654 | |
|
3ef4160cc8 | |
|
de45d000e0 | |
|
c78d6bc4ce | |
|
7e26863daf | |
|
69b2283719 | |
|
7d22be8375 | |
|
0a1dc91fad | |
|
c05499c5f2 | |
|
a65bec4410 | |
|
a5618c80d8 | |
|
6d217c1e15 | |
|
fdc0b1b242 | |
|
9833ce9032 | |
|
71a0437de6 | |
|
ff38685105 | |
|
b195abf59f | |
|
0b665be787 | |
|
972b8481f1 | |
|
86930737b0 | |
|
86ff4dc5e9 | |
|
f26b1118a1 | |
|
168fc7b2f5 | |
|
7cbbdb960a | |
|
09c664fe4b | |
|
a80b65860a | |
|
1375ab425b | |
|
29dc7a3e9e | |
|
2038a2ecb9 | |
|
e07cb136c4 | |
|
5bcc7c75ea | |
|
8d9a6be16a | |
|
06becc5181 | |
|
1dc9408044 | |
|
9676ccccad | |
|
45e4419a0f | |
|
c9772b0fc3 | |
|
9b9631fe2c | |
|
5c36e803d8 | |
|
fbb36b51b7 | |
|
9220cecebc | |
|
a7f9a28c93 | |
|
1f6fe47c2c | |
|
d926a26cf1 | |
|
4678ab7e71 | |
|
261521d047 | |
|
10ebfaae16 | |
|
e096413eb0 | |
|
30c6b3ec46 | |
|
3667ccfb6f | |
|
543cfc1159 | |
|
679e140084 | |
|
e73e395ed8 | |
|
7c44b7342b | |
|
3e9a380b6b | |
|
cbea41be56 | |
|
85cd7bd75e | |
|
2f20edf823 | |
|
84a108dd41 | |
|
d0d7be40c3 | |
|
7a24cf49cd | |
|
be3b3d39b6 | |
|
11408d2de0 | |
|
7260d43416 | |
|
1d7cb5be53 | |
|
b4f1bd37ea | |
|
2b786abc6a | |
|
bd6b1a60e8 | |
|
967ce33977 | |
|
37ca73a6f9 | |
|
9e1c3c2a65 | |
|
862a6fd235 | |
|
83fd33bf78 | |
|
99b606fef8 | |
|
b0307fd555 | |
|
eb73d49c69 | |
|
d91230e22f | |
|
825e051e3b | |
|
a7ceea1dac | |
|
3ff8a123e1 | |
|
6a4b3701d0 | |
|
5c60e7a9a2 | |
|
a99a96e871 | |
|
f98a9a0748 | |
|
b541a7c98e | |
|
d587e49e67 | |
|
a32314c720 | |
|
908c9af888 | |
|
0b442e2bcf | |
|
a32b5f8321 |
|
@ -0,0 +1,33 @@
|
|||
---
|
||||
name: Bug report
|
||||
about: Create a report to help us improve
|
||||
|
||||
---
|
||||
|
||||
**Describe the bug**
|
||||
A clear and concise description of what the bug is.
|
||||
|
||||
**To Reproduce**
|
||||
Steps to reproduce the behavior:
|
||||
1. Setup osync with the following config file / the following parameters (please provide either anonymized)
|
||||
2. Run osync with following parameters
|
||||
3. Result
|
||||
|
||||
**Expected behavior**
|
||||
A clear and concise description of what you expected to happen.
|
||||
|
||||
** Deviated behavior**
|
||||
How does the actual result deviate from the expected behavior.
|
||||
|
||||
**Logs**
|
||||
Please send logs of what happens.
|
||||
Also, you might run osync with _DEBUG=yes environement variable to have more verbose debug logs.
|
||||
|
||||
**Environment (please complete the following information):**
|
||||
- Full osync version (including build)
|
||||
- OS: [e.g. iOS]
|
||||
- Bitness [e.g. x64 or x86]
|
||||
- Shell (busybox or else)
|
||||
|
||||
**Additional context**
|
||||
Add any other context about the problem here.
|
|
@ -0,0 +1,25 @@
|
|||
# Codespell configuration is within .codespellrc
|
||||
---
|
||||
name: Codespell
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
pull_request:
|
||||
branches: [main]
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
codespell:
|
||||
name: Check for spelling errors
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Annotate locations with typos
|
||||
uses: codespell-project/codespell-problem-matcher@v1
|
||||
- name: Codespell
|
||||
uses: codespell-project/actions-codespell@v2
|
|
@ -0,0 +1,25 @@
|
|||
name: linux-tests
|
||||
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest]
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
sudo apt-get install inotify-tools acl
|
||||
- name: Execute tests and generate coverage report
|
||||
run: |
|
||||
export RUNNING_ON_GITHUB_ACTIONS=true
|
||||
export SSH_PORT=22
|
||||
echo "Running on github actions: ${RUNNING_ON_GITHUB_ACTIONS}"
|
||||
echo "Running on ssh port ${SSH_PORT}"
|
||||
sudo -E bash ./dev/tests/run_tests.sh
|
||||
- name: Upload Coverage to Codecov
|
||||
uses: codecov/codecov-action@v1
|
|
@ -0,0 +1,28 @@
|
|||
name: macosx-tests
|
||||
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
os: [macos-latest]
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Install Bash 4
|
||||
run: |
|
||||
/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)"
|
||||
brew update
|
||||
|
||||
brew install bash
|
||||
brew install fswatch
|
||||
echo "/usr/local/bin" >> $GITHUB_PATH
|
||||
- name: Execute tests and generate coverage report
|
||||
run: |
|
||||
export RUNNING_ON_GITHUB_ACTIONS=true
|
||||
export SSH_PORT=22
|
||||
sudo -E bash ./dev/tests/run_tests.sh
|
||||
- name: Upload Coverage to Codecov
|
||||
uses: codecov/codecov-action@v1
|
|
@ -0,0 +1,29 @@
|
|||
name: windows-tests
|
||||
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
os: [windows-latest]
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: Vampire/setup-wsl@v1
|
||||
with:
|
||||
additional-packages:
|
||||
dos2unix
|
||||
rsync
|
||||
openssh-server
|
||||
- name: Execute tests and generate coverage report
|
||||
shell: wsl-bash {0}
|
||||
run: |
|
||||
export RUNNING_ON_GITHUB_ACTIONS=true
|
||||
export SSH_PORT=22
|
||||
find ./ -type f ! -path "./.git/*" -print0 | xargs -0 -n 1 -P 4 dos2unix
|
||||
service ssh start
|
||||
./dev/tests/run_tests.sh
|
||||
- name: Upload Coverage to Codecov
|
||||
uses: codecov/codecov-action@v1
|
|
@ -1,409 +1,493 @@
|
|||
RECENT CHANGES
|
||||
--------------
|
||||
## RECENT CHANGES
|
||||
|
||||
dd Mmm YYYY: osync v1.2.2. release
|
||||
### Current master
|
||||
|
||||
! Added an option to log conflictual files
|
||||
! Presence of conflictual files can trigger a special mail
|
||||
! new option FORCE_CONFLICT_PREVALANCE
|
||||
- Make --log-conflicts non experimental (randomly fails)
|
||||
- ! new option FORCE_CONFLICT_PREVALANCE which will always use Initiator or Target, regardless of best time
|
||||
- ! target-helper: destination mails etc on target, also, no cmd after on configs
|
||||
|
||||
dd Mmm YYYY: osync v1.2.1 release
|
||||
### 16 June 2023: osync v1.3 release (for full changelog since v1.2 branch see all v1.3-beta/RC entries)
|
||||
|
||||
- Added --no-resume option in order to disable resuming execution on failure
|
||||
- Fixed missing options passed to subprocess in daemon mode
|
||||
- Fixed bogus pgrep can lead to segfault 11 because of recursive KillChilds
|
||||
- Fixed osync deletion not working on systems with ssh banner enabled
|
||||
- Added basic performance profiler to debug version
|
||||
- Fixed GetRemoteOS missing GetConfFileValue preventing to get OS details from /etc/os-release
|
||||
- Fixed low severity security issue where log and run files could be read by other users
|
||||
- Minor enhancements in installer / ofunctions
|
||||
- Fix for new RSYNC protocol
|
||||
- New options ALWAYS_SEND_MAILS to allow sending logs regardless of execution states
|
||||
|
||||
25 Mar 2017: osync v1.2 release (for full changelog of v1.2 branch see all v1.2-beta/RC entries)
|
||||
### 29 June 2020: osync v1.3-RC1 release
|
||||
|
||||
- Check for initiator directory before launching monitor mode
|
||||
- Updated RPM spec file (Thanks to https://github.com/liger1978)
|
||||
- Fixed remote commands can be run on local runs and obviously fail
|
||||
- Minor fixes in installer logic
|
||||
- New option to use SSH_CONTROLMASTER in order to speed up remote sync tasks and preserve a single ssh channel
|
||||
- New option SSH_OPTIONAL_ARGS
|
||||
- Fixed a problem with macos mv not preserving ownership of files from /tmp
|
||||
- Fixed very long outstanding issue with special characters in remote target handling
|
||||
- Fixed an issue where STOP_ON_ERROR_CMD did not work anymore
|
||||
- Fixed a remote file lock problem (thanks to https://github.com/zhangzhishan)
|
||||
- Fixed various cosmetic issues with code and logs
|
||||
- Improved upgrade script
|
||||
- Fixed a possible bash buffer overflow when synchronizing large filesets (tested with 2M files)
|
||||
- This fix actually truncats every string sent to Logger not being more than 16KB
|
||||
- Fixed osync leaving temporary log files behind in RUN_DIR (/tmp by default)
|
||||
- Updated target helper service configuration file
|
||||
- Improved codacy results
|
||||
- Added more debugging
|
||||
- Fixed service logs being junked by spinner
|
||||
- Fixed MINIMUM_SPACE=0 didn't stop the disk space check anymore (Thanks to Val)
|
||||
- Fixed conflict file logs to be less verbose when no conflicts happen
|
||||
|
||||
10 Feb 2017: osync v1.2-RC3 release
|
||||
### 22 May 2019: osync v1.3-beta3 release
|
||||
|
||||
- Uninstaller skips ssh_filter if needed by other program (osync/obackup)
|
||||
- Logger now automatically obfuscates _REMOTE_TOKEN
|
||||
- Logger doesn't show failed commands in stdout, only logs them
|
||||
- Config file update script fixes
|
||||
- Removed old Win10 1607 bash fixes to make Win10 1809 work (breaks Win10 1607 beta bash version...Yeah, nothing I can do about that)
|
||||
|
||||
08 Feb 2017: osync v1.2-RC2 release
|
||||
### 20 May 2019: osync v1.3-beta2 release
|
||||
|
||||
- Tests have run on CentOS 5,7 and 7, Debian 8, Linux Mint 18, Fedora 25, FreeBSD 10.3/pfSense, FreeBSD 11, MacOSX Sierra, Win10 1607 (14393.479) bash, Cygwin x64 and MSYS2 current
|
||||
- Hugely improved ssh_filter
|
||||
- Improved privilege elevation compatibility on SUDO_EXEC=yes runs
|
||||
- Refactored installer logic and added --remove option
|
||||
- Added optional mail body characterset encoding
|
||||
- Fixed log output has escaped UTF-8 characters because of LC_ALL=C
|
||||
- Fixed installer statistics don't report OS
|
||||
- Minor tweaks and fixes in ofunctions
|
||||
- More --summary statistics
|
||||
- Config file syntax now uses booleans instead of yes / no (but still accepts old syntax)
|
||||
- Added boolean update in upgrade script
|
||||
- Config file revision check
|
||||
- Added config file revision in upgrade script
|
||||
- New option --sync-type=initator2target|target2initiator that allows using osync as rsync wrapper for unidirectional sync
|
||||
- New osync target helper service
|
||||
- Fixed multiple race conditions in parallel executions (which also fixes random conflict logs failures)
|
||||
- Fixed directory softdeletion bug
|
||||
- Fixed multiple failed deletions will be retried as many times as failures happened
|
||||
- Fixed remote running on FreeBSD for some commands, thanks to Vladimirek
|
||||
- Fixed (again) deletion propagation when file contains spaces (thanks to http://github.com/weinhold)
|
||||
- Deprecated --log-conflicts for 1.3 branch (is now experimental)
|
||||
- Updated ofunctions
|
||||
- Has better random number generator
|
||||
- IsInteger, IsNumeric and IsNumericExpand are now busybox compatible
|
||||
- Multiple installer fixes
|
||||
- Multiple batch fixes
|
||||
|
||||
13 Dec 2016: osync v1.2-RC1 release
|
||||
### 08 Aug 2018: osync v1.3-beta1 release
|
||||
|
||||
- Unit tests have run on CentOS 5,6 and 7, Debian 8, Linux Mint 18, FreeBSD 10.3/pfSense, FreeBSD 11, MacOSX Sierra, Win10 1607 (14393.479) bash, Cygwin x64 and MSYS2 current
|
||||
- Added optional rsync arguments configuration value
|
||||
- Fixed another random error involving warns and errors triggered by earlier runs with same PID flag files
|
||||
- Adde more preflight checks
|
||||
- Fixed a random appearing issue with Sync being stopped on internet failure introduced in v1.2 rewrite
|
||||
- Resuming operation will not send warnings anymore unless resumed too many timess
|
||||
- Spinner is less prone to move logging on screen
|
||||
- Fixed daemon mode didn't enforce exclusions
|
||||
- Made a quick and dirty preprocessor
|
||||
- ofunctions can now directly be loaded into osync via an include statement
|
||||
- n_osync.sh can be assembled on the fly using bootstrap.sh
|
||||
- Forced remote ssh to use bash (fixes FreeBSD 11 compatibility when default shell is csh)
|
||||
- Faster execution
|
||||
- Reduced number of needed sequential SSH connections for remote sync (4 connections less)
|
||||
- Refactored CheckReplicaPath and CheckDiskSpace into one functon CheckReplicas
|
||||
- Refactored CheckDiskSpace, CheckLocks and WriteLocks into one function HandleLocks
|
||||
- Removed noclobber locking in favor of a more direct method
|
||||
- Improved remote logging
|
||||
- Fixed directory ctime softdeletion
|
||||
- Using mutt as mail program now supports multiple recipients
|
||||
- osync now properly handles symlink deletions (previous bugfix didn't work properly)
|
||||
- Simplified osync-batch runner (internally and for user)
|
||||
- Better filename handling
|
||||
- Easier to read log output
|
||||
- Always passes --silent to osync
|
||||
- All options that do not belong to osync-batch are automatically passed to osync
|
||||
- Improved installer OS detection
|
||||
- Added daemon capability on MacOS X
|
||||
- Fixed upgrade script cannot update header on BSD / MacOS X
|
||||
- Fixed SendEmail function on MacOS X
|
||||
- Fixed MAX_HARD_EXEC_TIME not enforced in sync function introduced with v1.2 rewrite
|
||||
- Fixed MAX_SOFT_EXEC_TIME not enforced bug introduced with v1.2 rewrite
|
||||
- PRESERVE_ACL and PRESERVE_XATTR are ignored when local or remote OS is MacOS or msys or Cygwin
|
||||
- Fixed PRESERVE_EXECUTABILITY was ommited volontary on MacOS X because of rsync syntax
|
||||
- Fixed failed deletion rescheduling under BSD bug introduced with v1.2 rewrite
|
||||
- merge.sh is now BSD and Mac compatible
|
||||
- More work on unit tests:
|
||||
- Unit tests are now BSD / MacOSX / MSYS / Cygwin and Windows 10 bash compatible
|
||||
- Added more ACL tests
|
||||
- Added directory soft deletion tests
|
||||
- Added symlink and broken symlink copy / deletion tests
|
||||
- Made unit tests more robust when aborted
|
||||
- Simplified unit tests needed config files (merged travis and local config files)
|
||||
- Added timed execution tests
|
||||
- More code compliance
|
||||
- Lots of minor fixes
|
||||
- Added an option to log conflictual files
|
||||
- Presence of conflictual files can trigger a special mail
|
||||
- New option --initialize (see #141)
|
||||
- Added OpenRC support (thanks to kozross, see #140)
|
||||
- Added --no-resume option in order to disable resuming execution on failure
|
||||
- Added basic performance profiler to debug version
|
||||
- Fixed summary for file deletions
|
||||
- Fixed an issue with filenames ending with spaces, their deletion not being propagated, and ACL / conflicts not being managed (still they got synced)
|
||||
- Fixed missing options passed to subprocess in daemon mode
|
||||
- Fixed bogus pgrep can lead to segfault 11 because of recursive KillChilds
|
||||
- Fixed osync deletion not working on systems with ssh banner enabled
|
||||
- Improved GetLocalOS and GetRemoteOS OS detection
|
||||
- Fixed GetRemoteOS missing GetConfFileValue preventing to get OS details from /etc/os-release
|
||||
- Fixed low severity security issue where log and run files could be read by other users
|
||||
- Merged Logger and QuickLogger for simplified usage
|
||||
- Fixed inotifyway error in FreeBSD (see #119)
|
||||
- Minor enhancements in installer / ofunctions
|
||||
- Added --prefix option for installer
|
||||
- Installer path fixes
|
||||
- Fixed logging bug with QuickLogger
|
||||
- Refactored time control and parallel execution functions into one single function
|
||||
- Multiple portability improvements
|
||||
- UrlEncode function is now Busybox compatible
|
||||
- IsInteger function is now Busybox compatible
|
||||
- Prevented non unique runtime filenames on some systems where no milliseconds exist
|
||||
- Fixed bogus runtime filenames on Busybox
|
||||
- Upgraded shunit2 test framework to v2.1.8pre (git commit 07bb329)
|
||||
- Multiple smaller fixes and improvements
|
||||
|
||||
19 Nov 2016: osync v1.2-beta3 re-release
|
||||
### 25 Mar 2017: osync v1.2 release (for full changelog of v1.2 branch see all v1.2-beta/RC entries)
|
||||
|
||||
- Fixed blocker bug where local tests tried GetRemoteOS Anyway
|
||||
- Fixed CentOS 5 compatibility bug for checking disk space introduced in beta3
|
||||
- More Android / Busybox compatibility
|
||||
- Made unit tests clean authorized_keys file after usage
|
||||
- Added local unit test where remote OS connection would fail
|
||||
- Check for initiator directory before launching monitor mode
|
||||
- Updated RPM spec file (Thanks to https://github.com/liger1978)
|
||||
- Fixed remote commands can be run on local runs and obviously fail
|
||||
- Minor fixes in installer logic
|
||||
|
||||
18 Nov 2016: osync v1.2-beta3 released
|
||||
### 10 Feb 2017: osync v1.2-RC3 release
|
||||
|
||||
- Improved locking / unlocking replicas
|
||||
- Fixed killing local pid that has lock bug introduced in v1.2 rewrite
|
||||
- Allow remote unlocking when INSTANCE_ID of lock matches local INSTANCE_ID
|
||||
- Fixed failed deletions re-propagation bug introduced in v1.2 rewrite
|
||||
- Faster remote OS detection
|
||||
- New output switches, --no-prefix, --summary, --errors-only
|
||||
- Added busybox (and Android Termux) support
|
||||
- More portable file size functions
|
||||
- More portable compression program commands
|
||||
- More paranoia checks
|
||||
- Added busybox sendmail support
|
||||
- Added tls and ssl support for sendmail
|
||||
- Added --skip-deletion support in config and quicksync modes
|
||||
- Added possibility to skip deletion on initiator or target replica
|
||||
- Prevent lock file racing condition (thanks to https://github.com/allter)
|
||||
- Added ssh password file support
|
||||
- Hugely improved unit tests
|
||||
- Added conflict resolution tests
|
||||
- Added softdeletion tests
|
||||
- Added softdeletion cleanup tests
|
||||
- Added lock tests
|
||||
- Added skip-deletion tests
|
||||
- Added configuration file tests
|
||||
- Added upgrade script test
|
||||
- Added basic daemon mode tests
|
||||
- Simplified logger
|
||||
- All fixes from v1.1.5
|
||||
- Uninstaller skips ssh_filter if needed by other program (osync/obackup)
|
||||
- Logger now automatically obfuscates _REMOTE_TOKEN
|
||||
- Logger doesn't show failed commands in stdout, only logs them
|
||||
|
||||
17 Oct 2016: osync v1.2-beta2 released
|
||||
- osync now propagates symlink deletions and moves symlinks without referrents to deletion dir
|
||||
- Upgrade script now has the ability to add any missing value
|
||||
- Improved unit tests
|
||||
- Added upgrade script test
|
||||
- Added deletion propagation tests
|
||||
### 08 Feb 2017: osync v1.2-RC2 release
|
||||
|
||||
30 Aug 2016: osync v1.2-beta released
|
||||
- Rendered more recent code compatible with bash 3.2+
|
||||
- Added a PKGBUILD file for ArchLinux thanks to Shadowigor (https://github.com/shaodwigor). Builds available at https://aur.archlinux.org/packages/osync/
|
||||
- Some more code compliance & more paranoia checks
|
||||
- Added more preflight checks
|
||||
- Logs sent by mail are easier to read
|
||||
- Better subject (currently running or finished run)
|
||||
- Fixed bogus double log sent in alert mails
|
||||
- Made unix signals posix compliant
|
||||
- Config file upgrade script now updates header
|
||||
- Improved batch runner
|
||||
- Made keep logging value configurable and not mandatory
|
||||
- Fixed handling of processes in uninterruptible sleep state
|
||||
- Parallelized sync functions
|
||||
- Rewrite sync resume process
|
||||
- Added options to ignore permissions, ownership and groups
|
||||
- Refactored WaitFor... functions into one
|
||||
- Improved execution speed
|
||||
- Rewrite sync resume process
|
||||
- Added parallel execution for most secondary fuctions
|
||||
- Lowered sleep time in wait functions
|
||||
- Removed trivial sleep and forking in remote deletion code, send the whole function to background instead
|
||||
- Unlock functions no longer launched if locking failed
|
||||
- Improved WaitFor... functions to accept multiple pids
|
||||
- Added KillAllChilds function to accept multiple pids
|
||||
- Improved logging
|
||||
- Tests have run on CentOS 5,7 and 7, Debian 8, Linux Mint 18, Fedora 25, FreeBSD 10.3/pfSense, FreeBSD 11, MacOSX Sierra, Win10 1607 (14393.479) bash, Cygwin x64 and MSYS2 current
|
||||
- Hugely improved ssh_filter
|
||||
- Improved privilege elevation compatibility on SUDO_EXEC=yes runs
|
||||
- Refactored installer logic and added --remove option
|
||||
- Added optional mail body characterset encoding
|
||||
- Fixed log output has escaped UTF-8 characters because of LC_ALL=C
|
||||
- Fixed installer statistics don't report OS
|
||||
- Minor tweaks and fixes in ofunctions
|
||||
|
||||
17 Nov 2016: osync v1.1.5 released
|
||||
- Backported unit tests from v1.2-beta allowing to fix the following
|
||||
- Allow quicksync mode to specify rsync include / exclude patterns as environment variables
|
||||
- Added default path separator char in quicksync mode for multiple includes / exclusions
|
||||
- Local runs should not check for remote connectivity
|
||||
- Fixed backups go into root of replica instead of .osync_wordir/backups
|
||||
- Fixed error alerts cannot be triggered from subprocesses
|
||||
- Fixed remote locked targets are unlocked in any case
|
||||
### 13 Dec 2016: osync v1.2-RC1 release
|
||||
|
||||
10 Nov 2016: osync v1.1.4 released
|
||||
- Fixed a corner case with sending alerts with logfile attachments when osync is used by multiple users
|
||||
- Unit tests have run on CentOS 5,6 and 7, Debian 8, Linux Mint 18, FreeBSD 10.3/pfSense, FreeBSD 11, MacOSX Sierra, Win10 1607 (14393.479) bash, Cygwin x64 and MSYS2 current
|
||||
- Added optional rsync arguments configuration value
|
||||
- Fixed another random error involving warns and errors triggered by earlier runs with same PID flag files
|
||||
- Adde more preflight checks
|
||||
- Fixed a random appearing issue with Sync being stopped on internet failure introduced in v1.2 rewrite
|
||||
- Resuming operation will not send warnings anymore unless resumed too many timess
|
||||
- Spinner is less prone to move logging on screen
|
||||
- Fixed daemon mode didn't enforce exclusions
|
||||
- Made a quick and dirty preprocessor
|
||||
- ofunctions can now directly be loaded into osync via an include statement
|
||||
- n_osync.sh can be assembled on the fly using bootstrap.sh
|
||||
- Forced remote ssh to use bash (fixes FreeBSD 11 compatibility when default shell is csh)
|
||||
- Faster execution
|
||||
- Reduced number of needed sequential SSH connections for remote sync (4 connections less)
|
||||
- Refactored CheckReplicaPath and CheckDiskSpace into one functon CheckReplicas
|
||||
- Refactored CheckDiskSpace, CheckLocks and WriteLocks into one function HandleLocks
|
||||
- Removed noclobber locking in favor of a more direct method
|
||||
- Improved remote logging
|
||||
- Fixed directory ctime softdeletion
|
||||
- Using mutt as mail program now supports multiple recipients
|
||||
- osync now properly handles symlink deletions (previous bugfix didn't work properly)
|
||||
- Simplified osync-batch runner (internally and for user)
|
||||
- Better filename handling
|
||||
- Easier to read log output
|
||||
- Always passes --silent to osync
|
||||
- All options that do not belong to osync-batch are automatically passed to osync
|
||||
- Improved installer OS detection
|
||||
- Added daemon capability on MacOS X
|
||||
- Fixed upgrade script cannot update header on BSD / MacOS X
|
||||
- Fixed SendEmail function on MacOS X
|
||||
- Fixed MAX_HARD_EXEC_TIME not enforced in sync function introduced with v1.2 rewrite
|
||||
- Fixed MAX_SOFT_EXEC_TIME not enforced bug introduced with v1.2 rewrite
|
||||
- PRESERVE_ACL and PRESERVE_XATTR are ignored when local or remote OS is MacOS or msys or Cygwin
|
||||
- Fixed PRESERVE_EXECUTABILITY was ommited volontary on MacOS X because of rsync syntax
|
||||
- Fixed failed deletion rescheduling under BSD bug introduced with v1.2 rewrite
|
||||
- merge.sh is now BSD and Mac compatible
|
||||
- More work on unit tests:
|
||||
- Unit tests are now BSD / MacOSX / MSYS / Cygwin and Windows 10 bash compatible
|
||||
- Added more ACL tests
|
||||
- Added directory soft deletion tests
|
||||
- Added symlink and broken symlink copy / deletion tests
|
||||
- Made unit tests more robust when aborted
|
||||
- Simplified unit tests needed config files (merged travis and local config files)
|
||||
- Added timed execution tests
|
||||
- More code compliance
|
||||
- Lots of minor fixes
|
||||
|
||||
02 Sep 2016: osync v1.1.3 released
|
||||
- Fixed installer for CYGWIN / MSYS environment
|
||||
### 19 Nov 2016: osync v1.2-beta3 re-release
|
||||
|
||||
28 Aug 2016: osync v1.1.2 released
|
||||
- Renamed sync.conf to sync.conf.example (thanks to https://github.com/hortimech)
|
||||
- Fixed RunAfterHook may be executed twice
|
||||
- Fixed soft deletion when SUDO_EXEC is enabled
|
||||
- Fixed blocker bug where local tests tried GetRemoteOS Anyway
|
||||
- Fixed CentOS 5 compatibility bug for checking disk space introduced in beta3
|
||||
- More Android / Busybox compatibility
|
||||
- Made unit tests clean authorized_keys file after usage
|
||||
- Added local unit test where remote OS connection would fail
|
||||
|
||||
06 Aug 2016: osync v1.1.1 released
|
||||
- Fixed bogus rsync pattern file adding
|
||||
- Fixed soft deletion always enabled on target
|
||||
- Fixed problem with attributes file list function
|
||||
- Fixed deletion propagation code
|
||||
- Fixed missing deletion / backup diretories message in verbose mode
|
||||
### 18 Nov 2016: osync v1.2-beta3 released
|
||||
|
||||
27 Jul 2016: osync v1.1 released
|
||||
- More msys and cygwin compatibility
|
||||
- Logging begins now before any remote checks
|
||||
- Improved process killing and process time control
|
||||
- Redirected ERROR and WARN messages to stderr to systemd catches them into it's journal
|
||||
- Added systemd unit files
|
||||
- Added an option to ignore ssh known hosts (use with caution, can lead to security risks), also updated upgrade script accordingly
|
||||
- Added optional installation statistics
|
||||
- Fixed a nasty bug with log writing and tree_list function
|
||||
- Improved mail fallback
|
||||
- Improved more logging
|
||||
- Fixed conflict prevalance is target in quicksync mode
|
||||
- Fixed file attributes aren't updated in a right manner when file mtime is not altered (Big thanks to vstefanoxx)
|
||||
- Better upgrade script (adding missing new config values)
|
||||
- More fixes for GNU / non-GNU versions of mail command
|
||||
- Added bogus config file checks & environment checks
|
||||
- Added delta copies disable option
|
||||
- Revamped rsync patterns to allow include and exclude patterns
|
||||
- Fully merged codebase with obackup
|
||||
- Passed shellCheck.net
|
||||
- Simplified EscapeSpaces to simple bash substitution
|
||||
- Corrected a lot of minor warnings in order to make code more bullet proof
|
||||
- Added v1.0x to v1.1 upgrade script
|
||||
- Added (much) more verbose debugging (and possibility to remove debug code to gain speed)
|
||||
- Force tree function to overwrite earlier tree files
|
||||
- Add Logger DEBUG to all eval statements
|
||||
- Unlocking happens after TrapQuit has successfully killed any child processes
|
||||
- Replace child_pid by $? directly, add a better sub process killer in TrapQuit
|
||||
- Refactor [local master, local slave, remote slave] code to [local, remote][initiator, target]code
|
||||
- Renamed a lot of code in order to prepare v2 code (master becomes initiator, slave becomes target, sync_id becomes instance_id)
|
||||
- Added some automatic checks in code, for _DEBUG mode (and _PARANOIA_DEBUG now)
|
||||
- Improved Logging
|
||||
- Updated osync to be fully compliant with coding style
|
||||
- Uploaded coding style manifest
|
||||
- Added LSB info to init script for Debian based distros
|
||||
- Improved locking / unlocking replicas
|
||||
- Fixed killing local pid that has lock bug introduced in v1.2 rewrite
|
||||
- Allow remote unlocking when INSTANCE_ID of lock matches local INSTANCE_ID
|
||||
- Fixed failed deletions re-propagation bug introduced in v1.2 rewrite
|
||||
- Faster remote OS detection
|
||||
- New output switches, --no-prefix, --summary, --errors-only
|
||||
- Added busybox (and Android Termux) support
|
||||
- More portable file size functions
|
||||
- More portable compression program commands
|
||||
- More paranoia checks
|
||||
- Added busybox sendmail support
|
||||
- Added tls and ssl support for sendmail
|
||||
- Added --skip-deletion support in config and quicksync modes
|
||||
- Added possibility to skip deletion on initiator or target replica
|
||||
- Prevent lock file racing condition (thanks to https://github.com/allter)
|
||||
- Added ssh password file support
|
||||
- Hugely improved unit tests
|
||||
- Added conflict resolution tests
|
||||
- Added softdeletion tests
|
||||
- Added softdeletion cleanup tests
|
||||
- Added lock tests
|
||||
- Added skip-deletion tests
|
||||
- Added configuration file tests
|
||||
- Added upgrade script test
|
||||
- Added basic daemon mode tests
|
||||
- Simplified logger
|
||||
- All fixes from v1.1.5
|
||||
|
||||
v0-v1.0x - Jun 2013 - Sep 2015
|
||||
------------------------------
|
||||
### 17 Oct 2016: osync v1.2-beta2 released
|
||||
|
||||
22 Jul. 2015: Osync v1.00a released
|
||||
- Small improvements in osync-batch.sh time management
|
||||
- Improved various logging on error
|
||||
- Work in progress: Unit tests (intial tests written by onovy, Thanks again!)
|
||||
- Small Improvements on install and ssh_filter scripts
|
||||
- Improved ssh uri recognition (thanks to onovy)
|
||||
- Fixed #22 (missing full path in soft deletion)
|
||||
- Fixed #21 by adding portable shell readlink / realpath from https://github.com/mkropat/sh-realpath
|
||||
- Added detection of osync.sh script in osync-batch.sh to overcome mising path in crontab
|
||||
- Fixed osync-batch.sh script when osync is in executable path like /usr/local/bin
|
||||
- Fixed multiple keep logging messages since sleep time between commands has been lowered under a second
|
||||
- Added optional checksum parameter for the paranoid :)
|
||||
- Fixed typo in soft deletion code preventing logging slave deleted backup files
|
||||
- Removed legacy lockfile code from init script
|
||||
- Removed hardcoded program name from init script
|
||||
- osync now propagates symlink deletions and moves symlinks without referrents to deletion dir
|
||||
- Upgrade script now has the ability to add any missing value
|
||||
- Improved unit tests
|
||||
- Added upgrade script test
|
||||
- Added deletion propagation tests
|
||||
|
||||
01 Avr. 2015: Osync v1.00pre
|
||||
- Improved and refactored the soft deletion routine by merging conflict backup and soft deletion
|
||||
- Reworked soft deletion code to handle a case where a top level directory gets deleted even if the files contained in it are not old enough (this obviously shouldn't happen on most FS)
|
||||
- Added more logging
|
||||
- Merged various fixes from onovy (http://github.com/onovy) Thanks!
|
||||
- Lowered sleep time between commands
|
||||
- Check if master and slave directories are the same
|
||||
- Check script parameters in osync.sh and osync-batch.sh
|
||||
- Run sync after timeout in --on-changes mode when no changes are detected (helps propagate slave changes)
|
||||
- Fix for locking in --on-changes mode (child should lock/unlock, master process shouldn't unlock)
|
||||
- Remote user is now optional in quicksync mode
|
||||
- Replaced default script execution storage from /dev/shm to /tmp because some rootkit detection software doesn't like this
|
||||
- Fixed bogus error in DEBUG for quicksync mode where no max execution time is set
|
||||
- Prevent debug mode to send alert emails
|
||||
- Fixed an infamous bug introduced with exclude pattern globbing preventing multiple exludes to be processed
|
||||
- Fixed an issue with empty RSYNC_EXCLUDE_FILES
|
||||
- Lowered default compression level for email alerts (for low end systems)
|
||||
- Prevent exclude pattern globbing before the pattern reaches the rsync cmd
|
||||
- Fixed some missing child pids for time control to work
|
||||
- Prevent creation of a sync-id less log file when DEBUG is set
|
||||
- Added a sequential run batch script that can rerun failed batches
|
||||
- Fixed an issue where a failed task never gets resumed after a successfull file replication phase
|
||||
- Added experimental partial downloads support for rsync so big files can be resumed on slow links
|
||||
- Added the ability to keep partial downloads that can be resumed on next run (usefull for big files on slow links that reach max execution time)
|
||||
- Moved msys specific code to Init(Local|Remote)OSSettings
|
||||
- Added a patch by igngvs to fix some issues with Rsync Exclude files
|
||||
- Added a patch by Gary Clark to fix some issues with remote deletion
|
||||
- Minor fixes from obackup codebase
|
||||
- Added compression method fallback (xz, lzma, pigz and gzip)
|
||||
- Removed unused code
|
||||
- Fixed remote OS detection when a banner is used on SSH
|
||||
- Added a routine that reinjects failed deletions for next run in order to prevent bringing back when deletion failed with permission issues
|
||||
- Added treat dir symlink as dir parameter
|
||||
### 30 Aug 2016: osync v1.2-beta released
|
||||
|
||||
27 May 2014: Osync 0.99 RC3
|
||||
- Additionnal delete fix for *BSD and MSYS (deleted file list not created right)
|
||||
- Fixed dry mode to use non dry after run treelists to create delete lists
|
||||
- Added follow symlink parameter
|
||||
- Minor fixes in parameter list when bandwidth parameter is used
|
||||
- Added some additionnal checks for *BSD and MacOS environments
|
||||
- Changed /bin/bash to /usr/bin/env bash for sanity on other systems, also check for bash presence before running
|
||||
- Changed default behavior for quick sync tasks: Will try to resume failed sync tasks once
|
||||
- Some code cleanup for state filenames and sync action names
|
||||
- Fixed deletion propagation (again). Rsync is definitly not designed to delete a list of files / folders. Rsync replaced by rm function which downloads deletion list to remote system.
|
||||
- Added path detection for exclude list file
|
||||
- Added a simple init script and an install script
|
||||
- Fixed an issue with MacOSX using rsync -E differently than other *nix (Thanks to Pierre Clement)
|
||||
- Multislave asynchronous task support (Thanks to Ulrich Norbisrath)
|
||||
- This breaks compat with elder osync runs. Add the SYNC_ID suffix to elder state files to keep deleted file information.
|
||||
- Added an easier debug setting i.e DEBUG=yes ./osync.sh (Again, thanks to Ulrich Norbisrath)
|
||||
- Added hardlink preservation (Thanks to Ulrich Norbisrath)
|
||||
- Added external exclusion file support (Thanks to Pierre Clement)
|
||||
- Fixed some typos in doc and program itself (Thanks to Pierre Clement)
|
||||
- More detailled verbose status messages
|
||||
- More detailled status messages
|
||||
- Fixed a bug preventing propagation of empty directory deletions
|
||||
- Fixed a nasty bug preventing writing lock files on remote system as superuser
|
||||
- Gzipped logs are now deleted once sent
|
||||
- Fixed some typos (thanks to Pavel Kiryukhin)
|
||||
- Fixed a bug with double trailing slashes in certain sceanrios
|
||||
- Sync execution don't fails anymore if files vanish during execution, also vanished files get logged
|
||||
- Add eventual "comm -23" replacement by "grep -F -x -v -f" to enhance compatibility with other platforms (comm is still much faster than grep, so we keep it)
|
||||
- Replaced xargs rm with find -exec rm to better handle file names in soft deletion
|
||||
- Fixed soft deletion not happening with relative paths
|
||||
- Improved process termination behavior
|
||||
- More code merging and cleanup
|
||||
- Fixed a bug preventing deleted files in subdirectories propagation (Thanks to Richard Faasen for pointing that out)
|
||||
- Some more function merge in sync process
|
||||
- Dry mode won't create or modifiy state files anymore and will use dry-state files instead
|
||||
- Improved file monitor mode
|
||||
- Added possibility to daemonize osync in monitor mode
|
||||
- Added monitor mode, which will launch a sync task upon file operations on master replica
|
||||
- Changed conf file default format for ssh uri (old format is still compatible)
|
||||
- Added ssh uri support for slave replicas
|
||||
- Improved execution hooks logs
|
||||
- Various bugfixes introduced with function merge
|
||||
- Added basic MacOS X support (yet not fully tested)
|
||||
- Merged tree list functions into one
|
||||
- Added possibility to quick sync two local directories without any prior configuration
|
||||
- Added time control on OS detection
|
||||
- Rendered more recent code compatible with bash 3.2+
|
||||
- Added a PKGBUILD file for ArchLinux thanks to Shadowigor (https://github.com/shaodwigor). Builds available at https://aur.archlinux.org/packages/osync/
|
||||
- Some more code compliance & more paranoia checks
|
||||
- Added more preflight checks
|
||||
- Logs sent by mail are easier to read
|
||||
- Better subject (currently running or finished run)
|
||||
- Fixed bogus double log sent in alert mails
|
||||
- Made unix signals posix compliant
|
||||
- Config file upgrade script now updates header
|
||||
- Improved batch runner
|
||||
- Made keep logging value configurable and not mandatory
|
||||
- Fixed handling of processes in uninterruptible sleep state
|
||||
- Parallelized sync functions
|
||||
- Rewrite sync resume process
|
||||
- Added options to ignore permissions, ownership and groups
|
||||
- Refactored WaitFor... functions into one
|
||||
- Improved execution speed
|
||||
- Rewrite sync resume process
|
||||
- Added parallel execution for most secondary fuctions
|
||||
- Lowered sleep time in wait functions
|
||||
- Removed trivial sleep and forking in remote deletion code, send the whole function to background instead
|
||||
- Unlock functions no longer launched if locking failed
|
||||
- Improved WaitFor... functions to accept multiple pids
|
||||
- Added KillAllChilds function to accept multiple pids
|
||||
- Improved logging
|
||||
|
||||
02 Nov. 2013: Osync 0.99 RC2
|
||||
- Minor improvement on operating system detection
|
||||
- Improved RunLocalCommand execution hook
|
||||
- Minor improvements on permission checks
|
||||
- Made more portability improvements (mostly for FreeBSD, must be run with bash shell)
|
||||
- Added local and remote operating system detection
|
||||
- Added forced usage of MSYS find on remote MSYS hosts
|
||||
- Updated MSYS handling
|
||||
- Merged MSYS (MinGW minimal system) bash compatibility under Windows from Obackup
|
||||
- Added check for /var/log directory
|
||||
- Added check for shared memory directory
|
||||
- Added alternative way to kill child processes for other OSes and especially for MSYS (which is a very odd way)
|
||||
- Added Sendemail.exe support for windows Alerting
|
||||
- Replaced which commend by type -p, as it is more portable
|
||||
- Added support for ping.exe from windows
|
||||
- Forced usage of MSYS find instead of Windows' find.exe on master
|
||||
- Added an optionnal remote rsync executable path parameter
|
||||
- Fixed an issue with CheckConnectivity3rdPartyHosts
|
||||
- Added an option to stop execution if a local / remote command fails
|
||||
- Improved forced quit command by killing all child processes
|
||||
- Before / after commands are now ignored on dryruns
|
||||
- Improved verbose output
|
||||
- Fixed various typos
|
||||
- Enforced CheckConnectivityRemoteHost and CheckConnectivity3rdPartyHosts checks (if one of these fails, osync is stopped)
|
||||
### 17 Nov 2016: osync v1.1.5 released
|
||||
|
||||
18 Aug. 2013: Osync 0.99 RC1
|
||||
- Added possibility to change default logfile
|
||||
- Fixed a possible error upon master replica lock check
|
||||
- Fixed exclude directorires with spaces in names generate errros on master replica tree functions
|
||||
- Dryruns won't create after run tree lists and therefore not prevent building real run delete lists
|
||||
- Softdelete and conflict backup functions are now time controlled
|
||||
- Added bandwidth limit
|
||||
- Update and delete functions now run rsync with --stats parameter
|
||||
- Fixed LoadConfigFile function will not warn on wrong config file
|
||||
- Added --no-maxtime parameter for sync big changes without enforcing execution time checks
|
||||
- Backported unit tests from v1.2-beta allowing to fix the following
|
||||
- Allow quicksync mode to specify rsync include / exclude patterns as environment variables
|
||||
- Added default path separator char in quicksync mode for multiple includes / exclusions
|
||||
- Local runs should not check for remote connectivity
|
||||
- Fixed backups go into root of replica instead of .osync_wordir/backups
|
||||
- Fixed error alerts cannot be triggered from subprocesses
|
||||
- Fixed remote locked targets are unlocked in any case
|
||||
|
||||
03 Aug. 2013: beta 3 milestone
|
||||
- Softdelete functions do now honor --dry switch
|
||||
- Simplified sync delete functions
|
||||
- Enhanced compatibility with different charsets in filenames
|
||||
- Added CentOS 5 compatibility (comm v5.97 without --nocheck-order function replaced by sort)
|
||||
- Tree functions now honor supplementary rsync arguments
|
||||
- Tree functions now honor exclusion lists
|
||||
### 10 Nov 2016: osync v1.1.4 released
|
||||
|
||||
01 Aug. 2013: beta 2 milestone
|
||||
- Fixed an issue with spaces in directory trees
|
||||
- Fixed an issue with recursive directory trees
|
||||
- Revamped a bit code to add bash 3.2 compatibility
|
||||
- Fixed a corner case with sending alerts with logfile attachments when osync is used by multiple users
|
||||
|
||||
24 Jul. 2013: beta milestone
|
||||
- Fixed some bad error handling in CheckMasterSlaveDirs and LockDirectories
|
||||
- Added support for spaces in sync dirs and exclude lists
|
||||
- Fixed false exit code if no remote slave lock present
|
||||
- Added minimum disk space checks
|
||||
- Added osync support in ssh_filter.sh
|
||||
- Added support for sudo exec on remote slave
|
||||
- Added support for alternative rsync executable
|
||||
- Added support for spaces in sync directories names
|
||||
- Added support for ACL and xattr
|
||||
- Added --force-unlock parameter to bypass any existing locks on replicas
|
||||
- Added full remote support for slave replica
|
||||
- Improved error detection
|
||||
- Made some changes in execution hook output
|
||||
- Fixed an issue with task execution handling exit codes
|
||||
- Added master and slave replicas lock functionnality
|
||||
- Added rsync exclude patterns support
|
||||
- Improved backup items, can now have multiple backups of the same file
|
||||
- Added maximum number of resume tries before trying a fresh stateless execution
|
||||
- Added possibility to resume a sync after an error
|
||||
- Improved task execution time handling
|
||||
- Improved SendAlert handling
|
||||
- Fixed cleanup launched even if DEBUG=yes
|
||||
- Added verbose rsync output
|
||||
- Added --dry and --silent parameters
|
||||
- Added time control
|
||||
- Added master/slave conflict prevalance option
|
||||
- Added soft-deleted items
|
||||
- Added backup items in case of conflict
|
||||
### 02 Sep 2016: osync v1.1.3 released
|
||||
|
||||
19 Jun. 2013: Project begin as Obackup fork
|
||||
- Fixed installer for CYGWIN / MSYS environment
|
||||
|
||||
### 28 Aug 2016: osync v1.1.2 released
|
||||
|
||||
- Renamed sync.conf to sync.conf.example (thanks to https://github.com/hortimech)
|
||||
- Fixed RunAfterHook may be executed twice
|
||||
- Fixed soft deletion when SUDO_EXEC is enabled
|
||||
|
||||
### 06 Aug 2016: osync v1.1.1 released
|
||||
|
||||
- Fixed bogus rsync pattern file adding
|
||||
- Fixed soft deletion always enabled on target
|
||||
- Fixed problem with attributes file list function
|
||||
- Fixed deletion propagation code
|
||||
- Fixed missing deletion / backup diretories message in verbose mode
|
||||
|
||||
### 27 Jul 2016: osync v1.1 released
|
||||
|
||||
- More msys and cygwin compatibility
|
||||
- Logging begins now before any remote checks
|
||||
- Improved process killing and process time control
|
||||
- Redirected ERROR and WARN messages to stderr to systemd catches them into it's journal
|
||||
- Added systemd unit files
|
||||
- Added an option to ignore ssh known hosts (use with caution, can lead to security risks), also updated upgrade script accordingly
|
||||
- Added optional installation statistics
|
||||
- Fixed a nasty bug with log writing and tree_list function
|
||||
- Improved mail fallback
|
||||
- Improved more logging
|
||||
- Fixed conflict prevalance is target in quicksync mode
|
||||
- Fixed file attributes aren't updated in a right manner when file mtime is not altered (Big thanks to vstefanoxx)
|
||||
- Better upgrade script (adding missing new config values)
|
||||
- More fixes for GNU / non-GNU versions of mail command
|
||||
- Added bogus config file checks & environment checks
|
||||
- Added delta copies disable option
|
||||
- Revamped rsync patterns to allow include and exclude patterns
|
||||
- Fully merged codebase with obackup
|
||||
- Passed shellCheck.net
|
||||
- Simplified EscapeSpaces to simple bash substitution
|
||||
- Corrected a lot of minor warnings in order to make code more bullet proof
|
||||
- Added v1.0x to v1.1 upgrade script
|
||||
- Added (much) more verbose debugging (and possibility to remove debug code to gain speed)
|
||||
- Force tree function to overwrite earlier tree files
|
||||
- Add Logger DEBUG to all eval statements
|
||||
- Unlocking happens after TrapQuit has successfully killed any child processes
|
||||
- Replace child_pid by $? directly, add a better sub process killer in TrapQuit
|
||||
- Refactor [local master, local slave, remote slave] code to [local, remote][initiator, target]code
|
||||
- Renamed a lot of code in order to prepare v2 code (master becomes initiator, slave becomes target, sync_id becomes instance_id)
|
||||
- Added some automatic checks in code, for _DEBUG mode (and _PARANOIA_DEBUG now)
|
||||
- Improved Logging
|
||||
- Updated osync to be fully compliant with coding style
|
||||
- Uploaded coding style manifest
|
||||
- Added LSB info to init script for Debian based distros
|
||||
|
||||
## v0-v1.0x - Jun 2013 - Sep 2015
|
||||
|
||||
### 22 Jul. 2015: Osync v1.00a released
|
||||
|
||||
- Small improvements in osync-batch.sh time management
|
||||
- Improved various logging on error
|
||||
- Work in progress: Unit tests (intial tests written by onovy, Thanks again!)
|
||||
- Small Improvements on install and ssh_filter scripts
|
||||
- Improved ssh uri recognition (thanks to onovy)
|
||||
- Fixed #22 (missing full path in soft deletion)
|
||||
- Fixed #21 by adding portable shell readlink / realpath from https://github.com/mkropat/sh-realpath
|
||||
- Added detection of osync.sh script in osync-batch.sh to overcome mising path in crontab
|
||||
- Fixed osync-batch.sh script when osync is in executable path like /usr/local/bin
|
||||
- Fixed multiple keep logging messages since sleep time between commands has been lowered under a second
|
||||
- Added optional checksum parameter for the paranoid :)
|
||||
- Fixed typo in soft deletion code preventing logging slave deleted backup files
|
||||
- Removed legacy lockfile code from init script
|
||||
- Removed hardcoded program name from init script
|
||||
|
||||
### 01 Avr. 2015: Osync v1.00pre
|
||||
|
||||
- Improved and refactored the soft deletion routine by merging conflict backup and soft deletion
|
||||
- Reworked soft deletion code to handle a case where a top level directory gets deleted even if the files contained in it are not old enough (this obviously shouldn't happen on most FS)
|
||||
- Added more logging
|
||||
- Merged various fixes from onovy (http://github.com/onovy) Thanks!
|
||||
- Lowered sleep time between commands
|
||||
- Check if master and slave directories are the same
|
||||
- Check script parameters in osync.sh and osync-batch.sh
|
||||
- Run sync after timeout in --on-changes mode when no changes are detected (helps propagate slave changes)
|
||||
- Fix for locking in --on-changes mode (child should lock/unlock, master process shouldn't unlock)
|
||||
- Remote user is now optional in quicksync mode
|
||||
- Replaced default script execution storage from /dev/shm to /tmp because some rootkit detection software doesn't like this
|
||||
- Fixed bogus error in DEBUG for quicksync mode where no max execution time is set
|
||||
- Prevent debug mode to send alert emails
|
||||
- Fixed an infamous bug introduced with exclude pattern globbing preventing multiple exludes to be processed
|
||||
- Fixed an issue with empty RSYNC_EXCLUDE_FILES
|
||||
- Lowered default compression level for email alerts (for low end systems)
|
||||
- Prevent exclude pattern globbing before the pattern reaches the rsync cmd
|
||||
- Fixed some missing child pids for time control to work
|
||||
- Prevent creation of a sync-id less log file when DEBUG is set
|
||||
- Added a sequential run batch script that can rerun failed batches
|
||||
- Fixed an issue where a failed task never gets resumed after a successfull file replication phase
|
||||
- Added experimental partial downloads support for rsync so big files can be resumed on slow links
|
||||
- Added the ability to keep partial downloads that can be resumed on next run (usefull for big files on slow links that reach max execution time)
|
||||
- Moved msys specific code to Init(Local|Remote)OSSettings
|
||||
- Added a patch by igngvs to fix some issues with Rsync Exclude files
|
||||
- Added a patch by Gary Clark to fix some issues with remote deletion
|
||||
- Minor fixes from obackup codebase
|
||||
- Added compression method fallback (xz, lzma, pigz and gzip)
|
||||
- Removed unused code
|
||||
- Fixed remote OS detection when a banner is used on SSH
|
||||
- Added a routine that reinjects failed deletions for next run in order to prevent bringing back when deletion failed with permission issues
|
||||
- Added treat dir symlink as dir parameter
|
||||
|
||||
### 27 May 2014: Osync 0.99 RC3
|
||||
|
||||
- Additionnal delete fix for *BSD and MSYS (deleted file list not created right)
|
||||
- Fixed dry mode to use non dry after run treelists to create delete lists
|
||||
- Added follow symlink parameter
|
||||
- Minor fixes in parameter list when bandwidth parameter is used
|
||||
- Added some additionnal checks for *BSD and MacOS environments
|
||||
- Changed /bin/bash to /usr/bin/env bash for sanity on other systems, also check for bash presence before running
|
||||
- Changed default behavior for quick sync tasks: Will try to resume failed sync tasks once
|
||||
- Some code cleanup for state filenames and sync action names
|
||||
- Fixed deletion propagation (again). Rsync is definitly not designed to delete a list of files / folders. Rsync replaced by rm function which downloads deletion list to remote system.
|
||||
- Added path detection for exclude list file
|
||||
- Added a simple init script and an install script
|
||||
- Fixed an issue with MacOSX using rsync -E differently than other *nix (Thanks to Pierre Clement)
|
||||
- Multislave asynchronous task support (Thanks to Ulrich Norbisrath)
|
||||
- This breaks compat with elder osync runs. Add the SYNC_ID suffix to elder state files to keep deleted file information.
|
||||
- Added an easier debug setting i.e DEBUG=yes ./osync.sh (Again, thanks to Ulrich Norbisrath)
|
||||
- Added hardlink preservation (Thanks to Ulrich Norbisrath)
|
||||
- Added external exclusion file support (Thanks to Pierre Clement)
|
||||
- Fixed some typos in doc and program itself (Thanks to Pierre Clement)
|
||||
- More detailled verbose status messages
|
||||
- More detailled status messages
|
||||
- Fixed a bug preventing propagation of empty directory deletions
|
||||
- Fixed a nasty bug preventing writing lock files on remote system as superuser
|
||||
- Gzipped logs are now deleted once sent
|
||||
- Fixed some typos (thanks to Pavel Kiryukhin)
|
||||
- Fixed a bug with double trailing slashes in certain sceanrios
|
||||
- Sync execution don't fails anymore if files vanish during execution, also vanished files get logged
|
||||
- Add eventual "comm -23" replacement by "grep -F -x -v -f" to enhance compatibility with other platforms (comm is still much faster than grep, so we keep it)
|
||||
- Replaced xargs rm with find -exec rm to better handle file names in soft deletion
|
||||
- Fixed soft deletion not happening with relative paths
|
||||
- Improved process termination behavior
|
||||
- More code merging and cleanup
|
||||
- Fixed a bug preventing deleted files in subdirectories propagation (Thanks to Richard Faasen for pointing that out)
|
||||
- Some more function merge in sync process
|
||||
- Dry mode won't create or modifiy state files anymore and will use dry-state files instead
|
||||
- Improved file monitor mode
|
||||
- Added possibility to daemonize osync in monitor mode
|
||||
- Added monitor mode, which will launch a sync task upon file operations on master replica
|
||||
- Changed conf file default format for ssh uri (old format is still compatible)
|
||||
- Added ssh uri support for slave replicas
|
||||
- Improved execution hooks logs
|
||||
- Various bugfixes introduced with function merge
|
||||
- Added basic MacOS X support (yet not fully tested)
|
||||
- Merged tree list functions into one
|
||||
- Added possibility to quick sync two local directories without any prior configuration
|
||||
- Added time control on OS detection
|
||||
|
||||
### 02 Nov. 2013: Osync 0.99 RC2
|
||||
|
||||
- Minor improvement on operating system detection
|
||||
- Improved RunLocalCommand execution hook
|
||||
- Minor improvements on permission checks
|
||||
- Made more portability improvements (mostly for FreeBSD, must be run with bash shell)
|
||||
- Added local and remote operating system detection
|
||||
- Added forced usage of MSYS find on remote MSYS hosts
|
||||
- Updated MSYS handling
|
||||
- Merged MSYS (MinGW minimal system) bash compatibility under Windows from Obackup
|
||||
- Added check for /var/log directory
|
||||
- Added check for shared memory directory
|
||||
- Added alternative way to kill child processes for other OSes and especially for MSYS (which is a very odd way)
|
||||
- Added Sendemail.exe support for windows Alerting
|
||||
- Replaced which commend by type -p, as it is more portable
|
||||
- Added support for ping.exe from windows
|
||||
- Forced usage of MSYS find instead of Windows' find.exe on master
|
||||
- Added an optionnal remote rsync executable path parameter
|
||||
- Fixed an issue with CheckConnectivity3rdPartyHosts
|
||||
- Added an option to stop execution if a local / remote command fails
|
||||
- Improved forced quit command by killing all child processes
|
||||
- Before / after commands are now ignored on dryruns
|
||||
- Improved verbose output
|
||||
- Fixed various typos
|
||||
- Enforced CheckConnectivityRemoteHost and CheckConnectivity3rdPartyHosts checks (if one of these fails, osync is stopped)
|
||||
|
||||
### 18 Aug. 2013: Osync 0.99 RC1
|
||||
|
||||
- Added possibility to change default logfile
|
||||
- Fixed a possible error upon master replica lock check
|
||||
- Fixed exclude directorires with spaces in names generate errros on master replica tree functions
|
||||
- Dryruns won't create after run tree lists and therefore not prevent building real run delete lists
|
||||
- Softdelete and conflict backup functions are now time controlled
|
||||
- Added bandwidth limit
|
||||
- Update and delete functions now run rsync with --stats parameter
|
||||
- Fixed LoadConfigFile function will not warn on wrong config file
|
||||
- Added --no-maxtime parameter for sync big changes without enforcing execution time checks
|
||||
|
||||
### 03 Aug. 2013: beta 3 milestone
|
||||
|
||||
- Softdelete functions do now honor --dry switch
|
||||
- Simplified sync delete functions
|
||||
- Enhanced compatibility with different charsets in filenames
|
||||
- Added CentOS 5 compatibility (comm v5.97 without --nocheck-order function replaced by sort)
|
||||
- Tree functions now honor supplementary rsync arguments
|
||||
- Tree functions now honor exclusion lists
|
||||
|
||||
### 01 Aug. 2013: beta 2 milestone
|
||||
|
||||
- Fixed an issue with spaces in directory trees
|
||||
- Fixed an issue with recursive directory trees
|
||||
- Revamped a bit code to add bash 3.2 compatibility
|
||||
|
||||
### 24 Jul. 2013: beta milestone
|
||||
|
||||
- Fixed some bad error handling in CheckMasterSlaveDirs and LockDirectories
|
||||
- Added support for spaces in sync dirs and exclude lists
|
||||
- Fixed false exit code if no remote slave lock present
|
||||
- Added minimum disk space checks
|
||||
- Added osync support in ssh_filter.sh
|
||||
- Added support for sudo exec on remote slave
|
||||
- Added support for alternative rsync executable
|
||||
- Added support for spaces in sync directories names
|
||||
- Added support for ACL and xattr
|
||||
- Added --force-unlock parameter to bypass any existing locks on replicas
|
||||
- Added full remote support for slave replica
|
||||
- Improved error detection
|
||||
- Made some changes in execution hook output
|
||||
- Fixed an issue with task execution handling exit codes
|
||||
- Added master and slave replicas lock functionnality
|
||||
- Added rsync exclude patterns support
|
||||
- Improved backup items, can now have multiple backups of the same file
|
||||
- Added maximum number of resume tries before trying a fresh stateless execution
|
||||
- Added possibility to resume a sync after an error
|
||||
- Improved task execution time handling
|
||||
- Improved SendAlert handling
|
||||
- Fixed cleanup launched even if DEBUG=yes
|
||||
- Added verbose rsync output
|
||||
- Added --dry and --silent parameters
|
||||
- Added time control
|
||||
- Added master/slave conflict prevalance option
|
||||
- Added soft-deleted items
|
||||
- Added backup items in case of conflict
|
||||
|
||||
### 19 Jun. 2013: Project begin as Obackup fork
|
||||
|
|
|
@ -0,0 +1,6 @@
|
|||
## KNOWN ISSUES
|
||||
|
||||
- Cannot finish sync if one replica contains a directory and the other replica contains a file named the same way (Unix doesn't allow this)
|
||||
- Daemon mode monitors changes in the whole replica directories, without honoring exclusion lists
|
||||
- Soft deletion does not honor exclusion lists (ie soft deleted files will be cleaned regardless of any exlude pattern because they are in the deleted folder)
|
||||
- Colors don't work in mac shell
|
|
@ -1,6 +0,0 @@
|
|||
KNOWN ISSUES
|
||||
------------
|
||||
|
||||
- Cannot finish sync if one replica contains a directory and the other replica contains a file named the same way (Unix doesn't allow this)
|
||||
- Soft deletion does not honor exclusion lists (ie soft deleted files will be cleaned regardless of any exlude pattern because they are in the deleted folder)
|
||||
- Colors don't work in mac shell
|
|
@ -1,4 +1,4 @@
|
|||
Copyright (c) 2013-2016, Orsiris de Jong. ozy@netpower.fr
|
||||
Copyright (c) 2013-2023, Orsiris de Jong. ozy@netpower.fr
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
|
@ -0,0 +1,4 @@
|
|||
When submitting a pull request, please modify the files in dev directory rather than those generated on-the-fly.
|
||||
|
||||
You may find all code contained in osync.sh in n_osync.sh and ofunctions.sh
|
||||
You may run your modified code by using `merge.sh osync` in order to generate ../osync.sh
|
250
README.md
250
README.md
|
@ -1,60 +1,82 @@
|
|||
# osync [](https://travis-ci.org/deajan/osync) [](https://opensource.org/licenses/BSD-3-Clause) [](https://github.com/deajan/osync/releases/latest)
|
||||
# osync
|
||||
[](https://opensource.org/licenses/BSD-3-Clause)
|
||||
[](https://github.com/deajan/osync/releases/latest)
|
||||
[](http://isitmaintained.com/project/deajan/osync "Percentage of issues still open")
|
||||
[](https://www.codacy.com/app/ozy/osync?utm_source=github.com&utm_medium=referral&utm_content=deajan/osync&utm_campaign=Badge_Grade)
|
||||
[](https://github.com/deajan/osync/actions/workflows/linux.yml/badge.svg)
|
||||
[](https://github.com/deajan/osync/actions/workflows/windows.yml/badge.svg)
|
||||
[](https://github.com/deajan/osync/actions/workflows/macos.yml/badge.svg)
|
||||
|
||||
A two way filesync script running on bash Llinux, BSD, Android, MacOSX, Cygwin, MSYS2, Win10 bash and virtually any system supporting bash).
|
||||
File synchronization is bidirectional, and can be run manually, as scheduled task, or triggered on file changes in deamon mode.
|
||||
|
||||
A two way filesync script running on bash Linux, BSD, Android, MacOSX, Cygwin, MSYS2, Win10 bash and virtually any system supporting bash.
|
||||
File synchronization is bidirectional, and can be run manually, as scheduled task, or triggered on file changes in monitor mode.
|
||||
It is a command line tool rsync wrapper with a lot of additional features baked in.
|
||||
|
||||
This is a quickstart guide, you can find the full documentation on the author's site.
|
||||
This is a quickstart guide, you can find the full documentation on the [author's site](http://www.netpower.fr/osync).
|
||||
|
||||
About
|
||||
-----
|
||||
osync provides the following capabilities
|
||||
## About
|
||||
|
||||
- Local-Local and Local-Remote sync
|
||||
- Fault tolerance with resume scenarios
|
||||
- File ACL and extended attributes synchronization
|
||||
- Full script Time control
|
||||
- Soft deletions and multiple backups handling
|
||||
- Before / after run command execution
|
||||
- Email alerts
|
||||
- Logging facility
|
||||
- Directory monitoring
|
||||
- Running on schedule or as daemon
|
||||
- Batch runner for multiple sync tasks with rerun option for failed sync tasks
|
||||
osync provides the following capabilities:
|
||||
|
||||
- Local-Local and Local-Remote sync
|
||||
- Fault tolerance with resume scenarios
|
||||
- POSIX ACL and extended attributes synchronization
|
||||
- Full script Time control
|
||||
- Soft deletions and multiple backups handling
|
||||
- Before/after run command execution
|
||||
- Email alerts
|
||||
- Logging facility
|
||||
- Directory monitoring
|
||||
- Running on schedule or as daemon
|
||||
- Batch runner for multiple sync tasks with rerun option for failed sync tasks
|
||||
|
||||
osync is a stateful synchronizer. This means it's agentless and doesn't have to monitor files for changes. Instead, it compares replica file lists between two runs.
|
||||
A full run takes about 2 seconds on a local-local replication and about 7 seconds on a local-remote replication.
|
||||
Disabling some features file like attributes preservation and disk space checks may speed up execution.
|
||||
osync uses a initiator / target sync schema. It can sync local to local or local to remote directories. By definition, initiator replica is always a local directory on the system osync runs on.
|
||||
osync uses pidlocks to prevent multiple concurrent sync processes on/to the same initiator / target replica.
|
||||
osync uses a initiator/target sync schema. It can sync local to local or local to remote directories. By definition, initiator replica is always a local directory on the system osync runs on.
|
||||
osync uses pidlocks to prevent multiple concurrent sync processes on/to the same initiator/target replica.
|
||||
You may launch concurrent sync processes on the same system but as long as the replicas to synchronize are different.
|
||||
Multiple osync tasks may be launched sequentially by osync osync-batch tool.
|
||||
|
||||
Currently, it has been tested on CentOS 5.x, 6.x, 7.x, Fedora 22-25, Debian 6-8, Linux Mint 14-18, Ubuntu 12.04-12.10, FreeBSD 8.3-11, Mac OS X and pfSense 2.3x.
|
||||
Microsoft Windows is supported via MSYS or Cygwin and now via Windows 10 bash.
|
||||
Android support works via busybox (tested on Termux).
|
||||
## Tested platforms
|
||||
|
||||
| Operating system | Version |
|
||||
|------------------|------------------------|
|
||||
| AlmaLinux | 9 |
|
||||
| Android\* | Not known |
|
||||
| CentOS | 5.x, 6.x, 7.x |
|
||||
| Fedora | 22-25 |
|
||||
| FreeBSD | 8.3-11 |
|
||||
| Debian | 6-11 |
|
||||
| Linux Mint | 14-18 |
|
||||
| macOS | Not known |
|
||||
| pfSense | 2.3.x |
|
||||
| QTS (x86) | 4.5.1 |
|
||||
| Ubuntu | 12.04-22.04 |
|
||||
| Windows\*\* | 10 |
|
||||
|
||||
\* via Termux.
|
||||
\*\* via MSYS, Cygwin and WSL.
|
||||
|
||||
Some users also have successfully used osync on Gentoo and created an OpenRC init script for it.
|
||||
|
||||
## Installation
|
||||
|
||||
Installation
|
||||
------------
|
||||
osync has been designed to not delete any data, but rather make backups of conflictual files or soft deletes.
|
||||
Nevertheless, you should always have a neat backup of your data before trying a new sync tool.
|
||||
|
||||
You may get osync on the author's site (stable version) or on github (stable or latest dev snapshot)
|
||||
Getting osync via author's site on **http://www.netpower.fr/osync**
|
||||
|
||||
$ wget http://www.netpower.fr/projects/osync/osync.v1.2.tar.gz
|
||||
$ tar xvf osync.v1.2.tar.gz
|
||||
|
||||
Getting osync via github (remove the -b "stable" if you want latest dev snapshot)
|
||||
Getting osync via GitHub (remove the -b "stable" if you want latest dev snapshot)
|
||||
|
||||
$ git clone -b "stable" https://github.com/deajan/osync
|
||||
$ cd osync
|
||||
$ bash install.sh
|
||||
|
||||
There is also a RPM file that should fit RHEL/CentOS/Fedora and basically any RPM based distro, see the github release.
|
||||
Please note that RPM files will install osync to /usr/bin instead of /usr/bin in order to enforce good practices.
|
||||
Installer script accepts some parameters for automation. Launch install.sh --help for options.
|
||||
|
||||
osync will install itself to /usr/local/bin and an example configuration file will be installed to /etc/osync
|
||||
There is also an RPM file that should fit RHEL/CentOS/Fedora and basically any RPM based distro, see the GitHub release.
|
||||
Please note that RPM files will install osync to `/usr/bin` instead of `/usr/local/bin` in order to enforce good practices.
|
||||
|
||||
osync will install itself to `/usr/local/bin` and an example configuration file will be installed to `/etc/osync`.
|
||||
|
||||
osync needs to run with bash shell. Using any other shell will most probably result in errors.
|
||||
If bash is not your default shell, you may invoke it using
|
||||
|
@ -65,78 +87,90 @@ On *BSD and BusyBox, be sure to have bash installed.
|
|||
|
||||
If you can't install osync, you may just copy osync.sh where you needed and run it from there.
|
||||
|
||||
Archlinux packages are available at https://aur.archlinux.org/packages/osync/ (thanks to Shadowigor, https://github.com/shadowigor)
|
||||
Arch Linux packages are available at <https://aur.archlinux.org/packages/osync/> (thanks to Shadowigor, <https://github.com/shadowigor>).
|
||||
|
||||
## Upgrade from previous configuration files
|
||||
|
||||
Since osync v1.1 the config file format has changed in semantics and adds new config options.
|
||||
Also, master is now called initiator and slave is now called target.
|
||||
osync v1.2 also added multiple new configuration options.
|
||||
osync v1.3 also added multiple new configuration options.
|
||||
|
||||
You can upgrade all v1.0x-v1.2-dev config files by running the upgrade script
|
||||
You can upgrade all v1.0x-v1.3-dev config files by running the upgrade script
|
||||
|
||||
$ ./upgrade-v1.0x-v1.2x.sh /etc/osync/your-config-file.conf
|
||||
$ ./upgrade-v1.0x-v1.3x.sh /etc/osync/your-config-file.conf
|
||||
|
||||
The script will backup your config file, update it's content and try to connect to initiator and target replicas to update the state dir.
|
||||
|
||||
Usage
|
||||
-----
|
||||
Osync can work with in three flavors: Quick sync mode, configuration file mode, and daemon mode.
|
||||
While quick sync mode is convenient to do fast syncs between some directories, a configuration file gives much more functionnality.
|
||||
Please use double quotes as path delimiters. Do not use escaped characters in path names.
|
||||
## Usage
|
||||
|
||||
Osync can work in 3 modes:
|
||||
1. [:rocket: Quick sync mode](#quick-sync-mode)
|
||||
2. [:gear: Configuration file mode](#configuration-file-mode)
|
||||
3. [:mag_right: Monitor mode](#monitor-mode)
|
||||
|
||||
> [!NOTE]
|
||||
> Please use double quotes as path delimiters. Do not use escaped characters in path names.
|
||||
|
||||
### <a id="quick-sync-mode"></a>:rocket: Quick sync mode
|
||||
|
||||
Quick sync mode is convenient to do fast syncs between some directories. However, the [configuration file mode](#configuration-file-mode) gives much more functionality.
|
||||
|
||||
QuickSync example
|
||||
-----------------
|
||||
# osync.sh --initiator="/path/to/dir1" --target="/path/to/remote dir2"
|
||||
# osync.sh --initiator="/path/to/another dir" --target="ssh://user@host.com:22//path/to/dir2" --rsakey=/home/user/.ssh/id_rsa_private_key_example.com
|
||||
|
||||
Summary mode
|
||||
------------
|
||||
osync may output only file changes and errors with the following
|
||||
#### Quick sync with minimal options
|
||||
|
||||
# osync.sh --initiator="/path/to/dir1" --target="/path/to/dir" --summary --errors-only --no-prefix
|
||||
|
||||
This also works in configuration file mode.
|
||||
|
||||
QuickSync with minimal options
|
||||
------------------------------
|
||||
In order to run osync the quickest (without transferring file attributes, without softdeletion, without prior space checks and without remote connectivity checks, you may use the following:
|
||||
|
||||
# MINIMUM_SPACE=0 PRESERVE_ACL=no PRESERVE_XATTR=no SOFT_DELETE_DAYS=0 CONFLICT_BACKUP_DAYS=0 REMOTE_HOST_PING=no osync.sh --initiator="/path/to/another dir" --target="ssh://user@host.com:22//path/to/dir2" --rsakey=/home/user/.ssh/id_rsa_private_key_example.com
|
||||
|
||||
All the settings described here may also be configured in the conf file.
|
||||
|
||||
Running osync with a Configuration file
|
||||
---------------------------------------
|
||||
You'll have to customize the sync.conf file according to your needs.
|
||||
If you intend to sync a remote directory, osync will need a pair of private / public RSA keys to perform remote SSH connections.
|
||||
Also, running sync as superuser requires to configure /etc/sudoers file.
|
||||
Please read the documentation about remote sync setups.
|
||||
Once you've customized a sync.conf file, you may run osync with the following test run:
|
||||
### Summary mode
|
||||
|
||||
osync will output only file changes and errors with the following:
|
||||
|
||||
# osync.sh --initiator="/path/to/dir1" --target="/path/to/dir" --summary --errors-only --no-prefix
|
||||
|
||||
This also works in configuration file mode.
|
||||
|
||||
### <a id="configuration-file-mode"></a>:gear: Configuration file mode
|
||||
|
||||
You'll have to customize the `sync.conf` file according to your needs.
|
||||
|
||||
If you intend to sync a remote directory, osync will need a pair of private/public RSA keys to perform remote SSH connections. Also, running sync as superuser requires to configure the `/etc/sudoers` file.
|
||||
|
||||
> [!TIP]
|
||||
> Read the [example configuration file](https://github.com/deajan/osync/blob/master/sync.conf.example) for documentation about remote sync setups.
|
||||
|
||||
Once you've customized a `sync.conf` file, you may run osync with the following test run:
|
||||
|
||||
# osync.sh /path/to/your.conf --dry
|
||||
|
||||
If everything went well, you may run the actual configuration with one of the following:
|
||||
If everything went well, you may run the actual configuration with:
|
||||
|
||||
# osync.sh /path/to/your.conf
|
||||
|
||||
To display which files and attrs are actually synchronized and which files are to be soft deleted / are in conflict, use `--verbose` (you may mix it with `--silent` to output verbose input only in the log files):
|
||||
|
||||
# osync.sh /path/to/your.conf --verbose
|
||||
# osync.sh /path/to/your.conf --no-maxtime
|
||||
|
||||
Verbose option will display which files and attrs are actually synchronized and which files are to be soft deleted / are in conflict.
|
||||
You may mix "--silent" and "--verbose" parameters to output verbose input only in the log files.
|
||||
No-Maxtime option will disable execution time checks, which is usefull for big initial sync tasks that might take long time. Next runs should then only propagate changes and take much less time.
|
||||
Use `--no-maxtime` to disable execution time checks, which is usefull for big initial sync tasks that might take long time. Next runs should then only propagate changes and take much less time:
|
||||
|
||||
# osync.sh /path/to/your.conf --no-maxtime
|
||||
|
||||
Once you're confident about your fist runs, you may add osync as cron task like the following in /etc/crontab which would run osync every 30 minutes:
|
||||
Once you're confident about your first runs, you may add osync as a cron task like the following in `/etc/crontab` which would run osync every 30 minutes:
|
||||
|
||||
*/30 * * * * root /usr/local/bin/osync.sh /etc/osync/my_sync.conf --silent
|
||||
|
||||
Please note that this syntax works for RedHat / CentOS. On Debian you might want to remove the username (ie root) in order to make the crontab entry work.
|
||||
Please note that this syntax works for RedHat/CentOS. On Debian you might want to remove the username (i.e. root) in order to make the crontab entry work.
|
||||
|
||||
Batch mode
|
||||
----------
|
||||
You may want to sequentially run multiple sync sets between the same servers. In that case, osync-batch.sh is a nice tool that will run every osync conf file, and, if a task fails,
|
||||
### Batch mode
|
||||
|
||||
You may want to sequentially run multiple sync sets between the same servers. In that case, `osync-batch.sh` is a nice tool that will run every osync conf file, and, if a task fails,
|
||||
run it again if there's still some time left.
|
||||
The following example will run all .conf files found in /etc/osync, and retry 3 times every configuration that fails, if the whole sequential run took less than 2 hours.
|
||||
|
||||
To run all `.conf` files found in `/etc/osync`, and retry 3 times every configuration that fails if the whole sequential run took less than 2 hours, use:
|
||||
|
||||
# osync-batch.sh --path=/etc/osync --max-retries=3 --max-exec-time=7200
|
||||
|
||||
|
@ -144,57 +178,75 @@ Having multiple conf files can then be run in a single cron command like
|
|||
|
||||
00 00 * * * root /usr/local/bin/osync-batch.sh --path=/etc/osync --silent
|
||||
|
||||
Daemon mode
|
||||
-----------
|
||||
Additionnaly, you may run osync in monitor mode, which means it will perform a sync upon file operations on initiator replica.
|
||||
This can be a drawback on functionnality versus scheduled mode because this mode only launches a sync task if there are file modifications on the initiator replica, without being able to monitor the target replica.
|
||||
Target replica changes are only synced when initiator replica changes occur, or when a given amount of time (default 600 seconds) passed without any changes on initiator replica.
|
||||
File monitor mode can also be launched as a daemon with an init script. Please read the documentation for more info.
|
||||
Note that monitoring changes requires inotifywait command (inotify-tools package for most Linux distributions).
|
||||
BSD, MacOS X and Windows are not yet supported for this operation mode, unless you find a inotify-tools package on these OSes.
|
||||
### <a id="monitor-mode"></a>:mag_right: Monitor mode
|
||||
|
||||
> [!NOTE]
|
||||
> Monitoring changes requires inotifywait command (`inotify-tools` package for most Linux distributions). BSD, macOS and Windows are not yet supported for this operation mode, unless you find an `inotify-tool` package on these OSes.
|
||||
|
||||
Monitor mode will perform a sync upon file operations on initiator replica. This can be a drawback on functionality versus scheduled mode because this mode only launches a sync task if there are file modifications on the initiator replica, without being able to monitor the target replica. Target replica changes are only synced when initiator replica changes occur, or when a given amount of time (600 seconds by default) passed without any changes on initiator replica.
|
||||
|
||||
This mode can also be launched as a daemon with an init script. Please read the documentation for more info.
|
||||
|
||||
To use this mode, use `--on-changes`:
|
||||
|
||||
# osync.sh /etc/osync/my_sync.conf --on-changes
|
||||
|
||||
Osync file monitor mode may be run as system service with the osync-srv init script. Any configuration file found in /etc/osync will then create a osync daemon instance.
|
||||
You may run the install.sh script which should work in most cases or copy the files by hand (osync.sh to /usr/bin/local, osync-srv to /etc/init.d, sync.conf to /etc/osync).
|
||||
To run this mode as a system service with the `osync-srv` script, you can run the `install.sh` script (which should work in most cases) or copy the files by hand:
|
||||
- `osync.sh` to `/usr/bin/local`
|
||||
- `sync.conf` to `/etc/osync`
|
||||
- For InitV, `osync-srv` to `/etc/init.d`
|
||||
- For systemd, `osync-srv@.service` to `/usr/lib/systemd/system`
|
||||
- For OpenRC, `osync-srv-openrc` to `/etc/init.d/osync-srv-openrc`
|
||||
|
||||
For InitV (any configuration file found in `/etc/osync` will create an osync daemon instance when service is launched on initV):
|
||||
|
||||
$ service osync-srv start
|
||||
$ chkconfig osync-srv on
|
||||
|
||||
Systemd specific (one service per config file)
|
||||
For systemd, launch service (one service per config file to launch) with:
|
||||
|
||||
$ systemctl start osync-srv@configfile.conf
|
||||
$ systemctl enable osync-srv@configfile.conf
|
||||
|
||||
Security enhancements
|
||||
---------------------
|
||||
For OpenRC (user contrib), launch service (one service per config file to launch) with:
|
||||
|
||||
$ rc-update add osync-srv.configfile default
|
||||
|
||||
## Security enhancements
|
||||
|
||||
Remote SSH connection security can be improved by limiting what hostnames may connect, disabling some SSH options and using ssh filter.
|
||||
Please read full documentation in order to configure ssh filter.
|
||||
|
||||
Contributions
|
||||
-------------
|
||||
## Contributions
|
||||
|
||||
All kind of contribs are welcome.
|
||||
|
||||
When submitting a PR, please be sure to modify files in dev directory (dev/n_osync.sh, dev/ofunctions.sh, dev/common_install.sh etc) as most of the main files are generated via merge.sh.
|
||||
When submitting a PR, please be sure to modify files in dev directory (`dev/n_osync.sh`, `dev/ofunctions.sh`, `dev/common_install.sh etc`) as most of the main files are generated via merge.sh.
|
||||
When testing your contribs, generate files via merge.sh or use bootstrap.sh which generates a temporary version of n_osync.sh with all includes.
|
||||
|
||||
Unit tests are run by travis on every PR, but you may also run them manually which adds some tests that travis can't do, via dev/tests/run_tests.sh
|
||||
SSH port can be changed on the fly via environment variable SSH_PORT, eg: SSH_PORT=2222 dev/tests/run_tests.sh
|
||||
Unit tests are run by travis on every PR, but you may also run them manually which adds some tests that travis can't do, via `dev/tests/run_tests.sh`.
|
||||
SSH port can be changed on the fly via environment variable SSH_PORT, e.g.:
|
||||
|
||||
# SSH_PORT=2222 dev/tests/run_tests.sh
|
||||
|
||||
Consider reading CODING_CONVENTIONS.TXT before submitting a patch.
|
||||
|
||||
Troubleshooting
|
||||
---------------
|
||||
You may find osync's logs in /var/log/osync.[INSTANCE_ID].log (or current directory if /var/log is not writable).
|
||||
## Troubleshooting
|
||||
|
||||
You may find osync's logs in `/var/log/osync.[INSTANCE_ID].log` (or current directory if `/var/log` is not writable).
|
||||
Additionnaly, you can use the --verbose flag see to what actions are going on.
|
||||
|
||||
Uninstalling
|
||||
------------
|
||||
When opening an issue, please post the corresponding log files. Also, you may run osync with _DEBUG option in order to have more precise logs, e.g.:
|
||||
|
||||
# _DEBUG=yes ./osync.sh /path/to/conf
|
||||
|
||||
## Uninstalling
|
||||
|
||||
The installer script also has an uninstall mode that will keep configuration files. Use it with
|
||||
|
||||
$ ./install.sh --remove
|
||||
|
||||
Author
|
||||
------
|
||||
Feel free to open an issue on github or mail me for support in my spare time :)
|
||||
## Author
|
||||
|
||||
Feel free to open an issue on GitHub or mail me for support in my spare time :)
|
||||
Orsiris de Jong | ozy@netpower.fr
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
Coding style used for my bash projects (v3.0 Dec 2016)
|
||||
Coding style used for my bash projects (v3.2 Oct 2018)
|
||||
As bash is clearly an error prone script language, we'll use as much standard coding as possible, including some quick and dirty debug techniques described here.
|
||||
|
||||
++++++ Header
|
||||
|
@ -162,6 +162,15 @@ if [ $retval -ne 0 ]; then
|
|||
Logger "Some error message" "ERROR" $retval
|
||||
fi
|
||||
|
||||
++++++ includes
|
||||
|
||||
Using merge.sh, the program may have includes like
|
||||
include #### RemoteLogger SUBSET ####
|
||||
All possible includes are listed in ofunctions.sh
|
||||
Mostly, includes are needed to port functions to a remote shell without writing them again.
|
||||
|
||||
++++++ Remote execution
|
||||
|
||||
Remote commands should always invoke bash (using '"'"' to escape single quotes of 'bash -c "command"'). It is preferable to use ssh heredoc in order to use plain code.
|
||||
If local and remote code is identical, wrap remote code in a function so only minor modifications are needed.
|
||||
Remote code return code is transmitted via exit.
|
||||
|
@ -184,6 +193,9 @@ if [ $retval -ne 0 ]; then
|
|||
Logger "Some error message" "ERROR" $retval
|
||||
fi
|
||||
|
||||
We also need to transmit a couple of environment variables (RUN_DIR; PROGRAM; _LOGGER_VERBOSE... see current setups) in order to make standard code.
|
||||
Include works here too.
|
||||
|
||||
++++++ File variables
|
||||
|
||||
All eval cmd should exit their content to a file called "$RUNDIR/$PROGRAM.${FUNCNAME[0]}.$SCRIPT_PID"
|
||||
|
@ -197,15 +209,6 @@ Quoting happens outside the function call.
|
|||
|
||||
echo "$(myStringFunction $myStringVar)"
|
||||
|
||||
++++++ Finding code errors
|
||||
|
||||
Use shellcheck.net now and then (ignore SC2086 in our case)
|
||||
|
||||
Use a low tech approach to find uneven number of quotes per line
|
||||
|
||||
tr -cd "'\n" < my_bash_file.sh | awk 'length%2==1 {print NR, $0}'
|
||||
tr -cd "\"\n" < my_bash_file.sh | awk 'length%2==1 {print NR, $0}'
|
||||
|
||||
++++++ ofunctions
|
||||
|
||||
As obackup and osync share alot of common functions, ofunctions.sh will host all shared code.
|
||||
|
@ -258,3 +261,16 @@ When launching the program with 'bash -x', add SLEEP_TIME=1 so wait functions wo
|
|||
Ex:
|
||||
|
||||
SLEEP_TIME=1 bash -x ./program.sh
|
||||
|
||||
++++++ Finding code errors
|
||||
|
||||
Before every release, shellcheck must be run
|
||||
Also a grep -Eri "TODO|WIP" osync/* must be run in order to find potential release blockers
|
||||
|
||||
Use shellcheck.net now and then (ignore SC2086 in our case)
|
||||
|
||||
Use a low tech approach to find uneven number of quotes per line
|
||||
|
||||
tr -cd "'\n" < my_bash_file.sh | awk 'length%2==1 {print NR, $0}'
|
||||
tr -cd "\"\n" < my_bash_file.sh | awk 'length%2==1 {print NR, $0}'
|
||||
|
||||
|
|
|
@ -0,0 +1,17 @@
|
|||
## Releases require the following
|
||||
|
||||
- Documentation must be up to date
|
||||
- grep -Eri "TODO|WIP" osync/* must be run in order to find potential release blockers, including in unit tests and config files
|
||||
Run program and then use declare -p to find any leaked variables that should not exist outside of the program
|
||||
- packaging files must be updated (RHEL / Arch)
|
||||
- Before every release, shellcheck must be run
|
||||
- ./shellcheck.sh -e SC2034 -e SC2068 ofunctions.sh
|
||||
- ./shellcheck.sh n_osync.sh
|
||||
- ./shellcheck.sh ../install.sh
|
||||
- ./shellcheck.sh ../osync-batch.sh
|
||||
- ./shellcheck.sh ../ssh_filter.sh
|
||||
- Unexpansion of main and subprograms must be done
|
||||
- Arch repo must be updated
|
||||
- Source must be put to download on www.netpower.fr/osync
|
||||
- Tests must be run against all supported operating systems via run_tests.sh
|
||||
|
|
@ -1,30 +1,65 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
## dev pre-processor bootstrap rev 2016121302
|
||||
## dev pre-processor bootstrap rev 2019052001
|
||||
## Yeah !!! A really tech sounding name... In fact it's just include emulation in bash
|
||||
|
||||
function Usage {
|
||||
echo "$0 - Quick and dirty preprocessor for including ofunctions into programs"
|
||||
echo "Creates and executes $0.tmp.sh"
|
||||
echo "Usage:"
|
||||
echo ""
|
||||
echo "$0 --program=osync|obackup|pmocr [options to pass to program]"
|
||||
echo "Can also be run with BASHVERBOSE=yes environment variable in order to prefix program with bash -x"
|
||||
}
|
||||
|
||||
|
||||
if [ ! -f "./merge.sh" ]; then
|
||||
echo "Plrase run bootstrap.sh from osync/dev directory."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
bootstrapProgram=""
|
||||
opts=()
|
||||
outputFileName="$0"
|
||||
|
||||
source "merge.sh"
|
||||
__PREPROCESSOR_PROGRAM=osync
|
||||
__PREPROCESSOR_Constants
|
||||
for i in "${@}"; do
|
||||
case "$i" in
|
||||
--program=*)
|
||||
bootstrapProgram="${i##*=}"
|
||||
;;
|
||||
*)
|
||||
opts+=("$i")
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
cp "n_$__PREPROCESSOR_PROGRAM.sh" "$outputFileName.tmp.sh"
|
||||
if [ "$bootstrapProgram" == "" ]; then
|
||||
Usage
|
||||
exit 128
|
||||
else
|
||||
source "merge.sh"
|
||||
|
||||
__PREPROCESSOR_PROGRAM=$bootstrapProgram
|
||||
__PREPROCESSOR_PROGRAM_EXEC="n_$bootstrapProgram.sh"
|
||||
__PREPROCESSOR_Constants
|
||||
|
||||
if [ ! -f "$__PREPROCESSOR_PROGRAM_EXEC" ]; then
|
||||
echo "Cannot find file $__PREPROCESSOR_PROGRAM executable [n_$bootstrapProgram.sh]."
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
cp "$__PREPROCESSOR_PROGRAM_EXEC" "$outputFileName.tmp.sh"
|
||||
if [ $? != 0 ]; then
|
||||
echo "Cannot copy original file [n_$__PREPROCESSOR_PROGRAM.sh] to [$outputFileName.tmp.sh]."
|
||||
echo "Cannot copy original file [$__PREPROCESSOR_PROGRAM_EXEC] to [$outputFileName.tmp.sh]."
|
||||
exit 1
|
||||
fi
|
||||
for subset in "${__PREPROCESSOR_SUBSETS[@]}"; do
|
||||
__PREPROCESSOR_MergeSubset "$subset" "${subset//SUBSET/SUBSET END}" "ofunctions.sh" "$outputFileName.tmp.sh"
|
||||
done
|
||||
chmod +x "$0.tmp.sh"
|
||||
chmod +x "$outputFileName.tmp.sh"
|
||||
if [ $? != 0 ]; then
|
||||
echo "Cannot make [$outputFileName] executable.."
|
||||
echo "Cannot make [$outputFileName] executable."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
@ -33,4 +68,8 @@ if type termux-fix-shebang > /dev/null 2>&1; then
|
|||
termux-fix-shebang "$outputFileName.tmp.sh"
|
||||
fi
|
||||
|
||||
"$outputFileName.tmp.sh" "$@"
|
||||
if [ "$BASHVERBOSE" == "yes" ]; then
|
||||
bash -x "$outputFileName.tmp.sh" "${opts[@]}"
|
||||
else
|
||||
"$outputFileName.tmp.sh" "${opts[@]}"
|
||||
fi
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
#!/usr/bin/env bash
|
||||
SUBPROGRAM=[prgname]
|
||||
PROGRAM="$SUBPROGRAM-batch" # Batch program to run osync / obackup instances sequentially and rerun failed ones
|
||||
AUTHOR="(L) 2013-2017 by Orsiris de Jong"
|
||||
AUTHOR="(L) 2013-2020 by Orsiris de Jong"
|
||||
CONTACT="http://www.netpower.fr - ozy@netpower.fr"
|
||||
PROGRAM_BUILD=2016120401
|
||||
PROGRAM_BUILD=2020031502
|
||||
|
||||
## Runs an osync /obackup instance for every conf file found
|
||||
## If an instance fails, run it again if time permits
|
||||
|
@ -26,36 +26,19 @@ else
|
|||
LOG_FILE=./$SUBPROGRAM-batch.log
|
||||
fi
|
||||
|
||||
## Default directory where to store temporary run files
|
||||
if [ -w /tmp ]; then
|
||||
RUN_DIR=/tmp
|
||||
elif [ -w /var/tmp ]; then
|
||||
RUN_DIR=/var/tmp
|
||||
else
|
||||
RUN_DIR=.
|
||||
fi
|
||||
# No need to edit under this line ##############################################################
|
||||
|
||||
function _logger {
|
||||
local value="${1}" # What to log
|
||||
echo -e "$value" >> "$LOG_FILE"
|
||||
}
|
||||
|
||||
function Logger {
|
||||
local value="${1}" # What to log
|
||||
local level="${2}" # Log level: DEBUG, NOTICE, WARN, ERROR, CRITIAL
|
||||
|
||||
prefix="$(date) - "
|
||||
|
||||
if [ "$level" == "CRITICAL" ]; then
|
||||
_logger "$prefix\e[41m$value\e[0m"
|
||||
elif [ "$level" == "ERROR" ]; then
|
||||
_logger "$prefix\e[91m$value\e[0m"
|
||||
elif [ "$level" == "WARN" ]; then
|
||||
_logger "$prefix\e[93m$value\e[0m"
|
||||
elif [ "$level" == "NOTICE" ]; then
|
||||
_logger "$prefix$value"
|
||||
elif [ "$level" == "DEBUG" ]; then
|
||||
if [ "$DEBUG" == "yes" ]; then
|
||||
_logger "$prefix$value"
|
||||
fi
|
||||
else
|
||||
_logger "\e[41mLogger function called without proper loglevel.\e[0m"
|
||||
_logger "$prefix$value"
|
||||
fi
|
||||
}
|
||||
include #### Logger SUBSET ####
|
||||
include #### CleanUp SUBSET ####
|
||||
include #### GenericTrapQuit SUBSET ####
|
||||
|
||||
function CheckEnvironment {
|
||||
## osync / obackup executable full path can be set here if it cannot be found on the system
|
||||
|
@ -145,6 +128,8 @@ function Usage {
|
|||
exit 128
|
||||
}
|
||||
|
||||
trap GenericTrapQuit TERM EXIT HUP QUIT
|
||||
|
||||
opts=""
|
||||
for i in "$@"
|
||||
do
|
||||
|
|
|
@ -2,8 +2,6 @@
|
|||
|
||||
## Installer script suitable for osync / obackup / pmocr
|
||||
|
||||
include #### _OFUNCTIONS_BOOTSTRAP SUBSET ####
|
||||
|
||||
PROGRAM=[prgname]
|
||||
|
||||
PROGRAM_VERSION=$(grep "PROGRAM_VERSION=" $PROGRAM.sh)
|
||||
|
@ -12,33 +10,22 @@ PROGRAM_BINARY=$PROGRAM".sh"
|
|||
PROGRAM_BATCH=$PROGRAM"-batch.sh"
|
||||
SSH_FILTER="ssh_filter.sh"
|
||||
|
||||
SCRIPT_BUILD=2017041701
|
||||
SCRIPT_BUILD=2025012001
|
||||
INSTANCE_ID="installer-$SCRIPT_BUILD"
|
||||
|
||||
## osync / obackup / pmocr / zsnap install script
|
||||
## Tested on RHEL / CentOS 6 & 7, Fedora 23, Debian 7 & 8, Mint 17 and FreeBSD 8, 10 and 11
|
||||
## Please adapt this to fit your distro needs
|
||||
|
||||
include #### OFUNCTIONS MICRO SUBSET ####
|
||||
|
||||
# Get current install.sh path from http://stackoverflow.com/a/246128/2635443
|
||||
SCRIPT_PATH="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||
|
||||
CONF_DIR=$FAKEROOT/etc/$PROGRAM
|
||||
BIN_DIR="$FAKEROOT/usr/local/bin"
|
||||
SERVICE_DIR_INIT=$FAKEROOT/etc/init.d
|
||||
# Should be /usr/lib/systemd/system, but /lib/systemd/system exists on debian & rhel / fedora
|
||||
SERVICE_DIR_SYSTEMD_SYSTEM=$FAKEROOT/lib/systemd/system
|
||||
SERVICE_DIR_SYSTEMD_USER=$FAKEROOT/etc/systemd/user
|
||||
|
||||
if [ "$PROGRAM" == "osync" ]; then
|
||||
SERVICE_NAME="osync-srv"
|
||||
elif [ "$PROGRAM" == "pmocr" ]; then
|
||||
SERVICE_NAME="pmocr-srv"
|
||||
fi
|
||||
|
||||
SERVICE_FILE_INIT="$SERVICE_NAME"
|
||||
SERVICE_FILE_SYSTEMD_SYSTEM="$SERVICE_NAME@.service"
|
||||
SERVICE_FILE_SYSTEMD_USER="$SERVICE_NAME@.service.user"
|
||||
|
||||
## Generic code
|
||||
_LOGGER_SILENT=false
|
||||
_STATS=1
|
||||
ACTION="install"
|
||||
FAKEROOT=""
|
||||
|
||||
## Default log file
|
||||
if [ -w "$FAKEROOT/var/log" ]; then
|
||||
|
@ -49,13 +36,15 @@ else
|
|||
LOG_FILE="./$PROGRAM-install.log"
|
||||
fi
|
||||
|
||||
include #### QuickLogger SUBSET ####
|
||||
include #### UrlEncode SUBSET ####
|
||||
include #### GetLocalOS SUBSET ####
|
||||
include #### GetConfFileValue SUBSET ####
|
||||
include #### CleanUp SUBSET ####
|
||||
include #### GenericTrapQuit SUBSET ####
|
||||
|
||||
function SetLocalOSSettings {
|
||||
USER=root
|
||||
DO_INIT=true
|
||||
|
||||
# LOCAL_OS and LOCAL_OS_FULL are global variables set at GetLocalOS
|
||||
|
||||
|
@ -65,10 +54,12 @@ function SetLocalOSSettings {
|
|||
;;
|
||||
*"MacOSX"*)
|
||||
GROUP=admin
|
||||
DO_INIT=false
|
||||
;;
|
||||
*"msys"*|*"Cygwin"*)
|
||||
*"Cygwin"*|*"Android"*|*"msys"*|*"BusyBox"*)
|
||||
USER=""
|
||||
GROUP=""
|
||||
DO_INIT=false
|
||||
;;
|
||||
*)
|
||||
GROUP=root
|
||||
|
@ -76,12 +67,12 @@ function SetLocalOSSettings {
|
|||
esac
|
||||
|
||||
if [ "$LOCAL_OS" == "Android" ] || [ "$LOCAL_OS" == "BusyBox" ]; then
|
||||
QuickLogger "Cannot be installed on [$LOCAL_OS]. Please use $PROGRAM.sh directly."
|
||||
Logger "Cannot be installed on [$LOCAL_OS]. Please use $PROGRAM.sh directly." "CRITICAL"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ([ "$USER" != "" ] && [ "$(whoami)" != "$USER" ] && [ "$FAKEROOT" == "" ]); then
|
||||
QuickLogger "Must be run as $USER."
|
||||
Logger "Must be run as $USER." "CRITICAL"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
@ -89,66 +80,108 @@ function SetLocalOSSettings {
|
|||
}
|
||||
|
||||
function GetInit {
|
||||
if [ -f /sbin/init ]; then
|
||||
if file /sbin/init | grep systemd > /dev/null; then
|
||||
init="systemd"
|
||||
init="none"
|
||||
if [ -f /sbin/openrc-run ]; then
|
||||
init="openrc"
|
||||
Logger "Detected openrc." "NOTICE"
|
||||
elif [ -f /usr/lib/systemd/systemd ]; then
|
||||
init="systemd"
|
||||
Logger "Detected systemd." "NOTICE"
|
||||
elif [ -f /sbin/init ]; then
|
||||
if type -p file > /dev/null 2>&1; then
|
||||
if file /sbin/init | grep systemd > /dev/null; then
|
||||
init="systemd"
|
||||
Logger "Detected systemd." "NOTICE"
|
||||
else
|
||||
init="initV"
|
||||
fi
|
||||
else
|
||||
init="initV"
|
||||
fi
|
||||
|
||||
if [ $init == "initV" ]; then
|
||||
Logger "Detected initV." "NOTICE"
|
||||
fi
|
||||
else
|
||||
QuickLogger "Can't detect initV or systemd. Service files won't be installed. You can still run $PROGRAM manually or via cron."
|
||||
Logger "Can't detect initV, systemd or openRC. Service files won't be installed. You can still run $PROGRAM manually or via cron." "WARN"
|
||||
init="none"
|
||||
fi
|
||||
}
|
||||
|
||||
function CreateDir {
|
||||
local dir="${1}"
|
||||
local dirMask="${2}"
|
||||
local dirUser="${3}"
|
||||
local dirGroup="${4}"
|
||||
|
||||
if [ ! -d "$dir" ]; then
|
||||
mkdir "$dir"
|
||||
(
|
||||
if [ $(IsInteger $dirMask) -eq 1 ]; then
|
||||
umask $dirMask
|
||||
fi
|
||||
mkdir -p "$dir"
|
||||
)
|
||||
if [ $? == 0 ]; then
|
||||
QuickLogger "Created directory [$dir]."
|
||||
Logger "Created directory [$dir]." "NOTICE"
|
||||
else
|
||||
QuickLogger "Cannot create directory [$dir]."
|
||||
Logger "Cannot create directory [$dir]." "CRITICAL"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ "$dirUser" != "" ]; then
|
||||
userGroup="$dirUser"
|
||||
if [ "$dirGroup" != "" ]; then
|
||||
userGroup="$userGroup"":$dirGroup"
|
||||
fi
|
||||
chown "$userGroup" "$dir"
|
||||
if [ $? != 0 ]; then
|
||||
Logger "Could not set directory ownership on [$dir] to [$userGroup]." "CRITICAL"
|
||||
exit 1
|
||||
else
|
||||
Logger "Set file ownership on [$dir] to [$userGroup]." "NOTICE"
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
function CopyFile {
|
||||
local sourcePath="${1}"
|
||||
local destPath="${2}"
|
||||
local fileName="${3}"
|
||||
local fileMod="${4}"
|
||||
local fileUser="${5}"
|
||||
local fileGroup="${6}"
|
||||
local overwrite="${7:-false}"
|
||||
local sourceFileName="${3}"
|
||||
local destFileName="${4}"
|
||||
local fileMod="${5}"
|
||||
local fileUser="${6}"
|
||||
local fileGroup="${7}"
|
||||
local overwrite="${8:-false}"
|
||||
|
||||
local userGroup=""
|
||||
local oldFileName
|
||||
|
||||
if [ -f "$destPath/$fileName" ] && [ $overwrite == false ]; then
|
||||
oldFileName="$fileName"
|
||||
fileName="$oldFileName.new"
|
||||
cp "$sourcePath/$oldFileName" "$destPath/$fileName"
|
||||
else
|
||||
cp "$sourcePath/$fileName" "$destPath"
|
||||
if [ "$destFileName" == "" ]; then
|
||||
destFileName="$sourceFileName"
|
||||
fi
|
||||
|
||||
if [ -f "$destPath/$destFileName" ] && [ $overwrite == false ]; then
|
||||
destFileName="$sourceFileName.new"
|
||||
Logger "Copying [$sourceFileName] to [$destPath/$destFileName]." "NOTICE"
|
||||
fi
|
||||
|
||||
cp "$sourcePath/$sourceFileName" "$destPath/$destFileName"
|
||||
if [ $? != 0 ]; then
|
||||
QuickLogger "Cannot copy [$fileName] to [$destPath]. Make sure to run install script in the directory containing all other files."
|
||||
QuickLogger "Also make sure you have permissions to write to [$BIN_DIR]."
|
||||
Logger "Cannot copy [$sourcePath/$sourceFileName] to [$destPath/$destFileName]. Make sure to run install script in the directory containing all other files." "CRITICAL"
|
||||
Logger "Also make sure you have permissions to write to [$BIN_DIR]." "ERROR"
|
||||
exit 1
|
||||
else
|
||||
QuickLogger "Copied [$fileName] to [$destPath]."
|
||||
if [ "$fileMod" != "" ]; then
|
||||
chmod "$fileMod" "$destPath/$fileName"
|
||||
Logger "Copied [$sourcePath/$sourceFileName] to [$destPath/$destFileName]." "NOTICE"
|
||||
if [ "$(IsInteger $fileMod)" -eq 1 ]; then
|
||||
chmod "$fileMod" "$destPath/$destFileName"
|
||||
if [ $? != 0 ]; then
|
||||
QuickLogger "Cannot set file permissions of [$destPath/$fileName] to [$fileMod]."
|
||||
Logger "Cannot set file permissions of [$destPath/$destFileName] to [$fileMod]." "CRITICAL"
|
||||
exit 1
|
||||
else
|
||||
QuickLogger "Set file permissions to [$fileMod] on [$destPath/$fileName]."
|
||||
Logger "Set file permissions to [$fileMod] on [$destPath/$destFileName]." "NOTICE"
|
||||
fi
|
||||
elif [ "$fileMod" != "" ]; then
|
||||
Logger "Bogus filemod [$fileMod] for [$destPath] given." "WARN"
|
||||
fi
|
||||
|
||||
if [ "$fileUser" != "" ]; then
|
||||
|
@ -158,12 +191,12 @@ function CopyFile {
|
|||
userGroup="$userGroup"":$fileGroup"
|
||||
fi
|
||||
|
||||
chown "$userGroup" "$destPath/$fileName"
|
||||
chown "$userGroup" "$destPath/$destFileName"
|
||||
if [ $? != 0 ]; then
|
||||
QuickLogger "Could not set file ownership on [$destPath/$fileName] to [$userGroup]."
|
||||
Logger "Could not set file ownership on [$destPath/$destFileName] to [$userGroup]." "CRITICAL"
|
||||
exit 1
|
||||
else
|
||||
QuickLogger "Set file ownership on [$destPath/$fileName] to [$userGroup]."
|
||||
Logger "Set file ownership on [$destPath/$destFileName] to [$userGroup]." "NOTICE"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
@ -179,7 +212,7 @@ function CopyExampleFiles {
|
|||
|
||||
for file in "${exampleFiles[@]}"; do
|
||||
if [ -f "$SCRIPT_PATH/$file" ]; then
|
||||
CopyFile "$SCRIPT_PATH" "$CONF_DIR" "$file" "" "" "" false
|
||||
CopyFile "$SCRIPT_PATH" "$CONF_DIR" "$file" "$file" "" "" "" false
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
@ -203,48 +236,72 @@ function CopyProgram {
|
|||
fi
|
||||
|
||||
for file in "${binFiles[@]}"; do
|
||||
CopyFile "$SCRIPT_PATH" "$BIN_DIR" "$file" 755 "$user" "$group" true
|
||||
CopyFile "$SCRIPT_PATH" "$BIN_DIR" "$file" "$file" 755 "$user" "$group" true
|
||||
done
|
||||
}
|
||||
|
||||
function CopyServiceFiles {
|
||||
if ([ "$init" == "systemd" ] && [ -f "$SCRIPT_PATH/$SERVICE_FILE_SYSTEMD_SYSTEM" ]); then
|
||||
CopyFile "$SCRIPT_PATH" "$SERVICE_DIR_SYSTEMD_SYSTEM" "$SERVICE_FILE_SYSTEMD_SYSTEM" "" "" "" true
|
||||
if [ -f "$SCRIPT_PATH/$SERVICE_FILE_SYSTEMD_SYSTEM_USER" ]; then
|
||||
CopyFile "$SCRIPT_PATH" "$SERVICE_DIR_SYSTEMD_USER" "$SERVICE_FILE_SYSTEMD_USER" "" "" "" true
|
||||
CreateDir "$SERVICE_DIR_SYSTEMD_SYSTEM"
|
||||
CopyFile "$SCRIPT_PATH" "$SERVICE_DIR_SYSTEMD_SYSTEM" "$SERVICE_FILE_SYSTEMD_SYSTEM" "$SERVICE_FILE_SYSTEMD_SYSTEM" "" "" "" true
|
||||
if [ -f "$SCRIPT_PATH/$SERVICE_FILE_SYSTEMD_USER" ]; then
|
||||
CreateDir "$SERVICE_DIR_SYSTEMD_USER"
|
||||
CopyFile "$SCRIPT_PATH" "$SERVICE_DIR_SYSTEMD_USER" "$SERVICE_FILE_SYSTEMD_USER" "$SERVICE_FILE_SYSTEMD_USER" "" "" "" true
|
||||
fi
|
||||
|
||||
QuickLogger "Created [$SERVICE_NAME] service in [$SERVICE_DIR_SYSTEMD_SYSTEM] and [$SERVICE_DIR_SYSTEMD_USER]."
|
||||
QuickLogger "Can be activated with [systemctl start SERVICE_NAME@instance.conf] where instance.conf is the name of the config file in $CONF_DIR."
|
||||
QuickLogger "Can be enabled on boot with [systemctl enable $SERVICE_NAME@instance.conf]."
|
||||
QuickLogger "In userland, active with [systemctl --user start $SERVICE_NAME@instance.conf]."
|
||||
elif ([ "$init" == "initV" ] && [ -f "$SCRIPT_PATH/$SERVICE_FILE_INIT" ] && [ -d "$SERVICE_DIR_INIT" ]); then
|
||||
CopyFile "$SCRIPT_PATH" "$SERVICE_DIR_INIT" "$SERVICE_FILE_INIT" "755" "" "" true
|
||||
if [ -f "$SCRIPT_PATH/$TARGET_HELPER_SERVICE_FILE_SYSTEMD_SYSTEM" ]; then
|
||||
CopyFile "$SCRIPT_PATH" "$SERVICE_DIR_SYSTEMD_SYSTEM" "$TARGET_HELPER_SERVICE_FILE_SYSTEMD_SYSTEM" "$TARGET_HELPER_SERVICE_FILE_SYSTEMD_SYSTEM" "" "" "" true
|
||||
Logger "Created optional service [$TARGET_HELPER_SERVICE_NAME] with same specifications as below." "NOTICE"
|
||||
fi
|
||||
if [ -f "$SCRIPT_PATH/$TARGET_HELPER_SERVICE_FILE_SYSTEMD_USER" ]; then
|
||||
CopyFile "$SCRIPT_PATH" "$SERVICE_DIR_SYSTEMD_USER" "$TARGET_HELPER_SERVICE_FILE_SYSTEMD_USER" "$TARGET_HELPER_SERVICE_FILE_SYSTEMD_USER" "" "" "" true
|
||||
fi
|
||||
|
||||
QuickLogger "Created [$SERVICE_NAME] service in [$SERVICE_DIR_INIT]."
|
||||
QuickLogger "Can be activated with [service $SERVICE_FILE_INIT start]."
|
||||
QuickLogger "Can be enabled on boot with [chkconfig $SERVICE_FILE_INIT on]."
|
||||
|
||||
Logger "Created [$SERVICE_NAME] service in [$SERVICE_DIR_SYSTEMD_SYSTEM] and [$SERVICE_DIR_SYSTEMD_USER]." "NOTICE"
|
||||
Logger "Can be activated with [systemctl start SERVICE_NAME@instance.conf] where instance.conf is the name of the config file in $CONF_DIR." "NOTICE"
|
||||
Logger "Can be enabled on boot with [systemctl enable $SERVICE_NAME@instance.conf]." "NOTICE"
|
||||
Logger "In userland, active with [systemctl --user start $SERVICE_NAME@instance.conf]." "NOTICE"
|
||||
elif ([ "$init" == "initV" ] && [ -f "$SCRIPT_PATH/$SERVICE_FILE_INIT" ] && [ -d "$SERVICE_DIR_INIT" ]); then
|
||||
#CreateDir "$SERVICE_DIR_INIT"
|
||||
CopyFile "$SCRIPT_PATH" "$SERVICE_DIR_INIT" "$SERVICE_FILE_INIT" "$SERVICE_FILE_INIT" "755" "" "" true
|
||||
if [ -f "$SCRIPT_PATH/$TARGET_HELPER_SERVICE_FILE_INIT" ]; then
|
||||
CopyFile "$SCRIPT_PATH" "$SERVICE_DIR_INIT" "$TARGET_HELPER_SERVICE_FILE_INIT" "$TARGET_HELPER_SERVICE_FILE_INIT" "755" "" "" true
|
||||
Logger "Created optional service [$TARGET_HELPER_SERVICE_NAME] with same specifications as below." "NOTICE"
|
||||
fi
|
||||
Logger "Created [$SERVICE_NAME] service in [$SERVICE_DIR_INIT]." "NOTICE"
|
||||
Logger "Can be activated with [service $SERVICE_FILE_INIT start]." "NOTICE"
|
||||
Logger "Can be enabled on boot with [chkconfig $SERVICE_FILE_INIT on]." "NOTICE"
|
||||
elif ([ "$init" == "openrc" ] && [ -f "$SCRIPT_PATH/$SERVICE_FILE_OPENRC" ] && [ -d "$SERVICE_DIR_OPENRC" ]); then
|
||||
# Rename service to usual service file
|
||||
CopyFile "$SCRIPT_PATH" "$SERVICE_DIR_OPENRC" "$SERVICE_FILE_OPENRC" "$SERVICE_FILE_INIT" "755" "" "" true
|
||||
if [ -f "$SCRPT_PATH/$TARGET_HELPER_SERVICE_FILE_OPENRC" ]; then
|
||||
CopyFile "$SCRIPT_PATH" "$SERVICE_DIR_OPENRC" "$TARGET_HELPER_SERVICE_FILE_OPENRC" "$TARGET_HELPER_SERVICE_FILE_OPENRC" "755" "" "" true
|
||||
Logger "Created optional service [$TARGET_HELPER_SERVICE_NAME] with same specifications as below." "NOTICE"
|
||||
fi
|
||||
Logger "Created [$SERVICE_NAME] service in [$SERVICE_DIR_OPENRC]." "NOTICE"
|
||||
Logger "Can be activated with [rc-update add $SERVICE_NAME.instance] where instance is a configuration file found in /etc/osync." "NOTICE"
|
||||
else
|
||||
QuickLogger "Cannot define what init style is in use on this system. Skipping service file installation."
|
||||
Logger "Cannot properly find how to deal with init on this system. Skipping service file installation." "NOTICE"
|
||||
fi
|
||||
}
|
||||
|
||||
function Statistics {
|
||||
if type wget > /dev/null; then
|
||||
if type wget > /dev/null 2>&1; then
|
||||
wget -qO- "$STATS_LINK" > /dev/null 2>&1
|
||||
if [ $? == 0 ]; then
|
||||
return 0
|
||||
fi
|
||||
fi
|
||||
|
||||
if type curl > /dev/null; then
|
||||
if type curl > /dev/null 2>&1; then
|
||||
curl "$STATS_LINK" -o /dev/null > /dev/null 2>&1
|
||||
if [ $? == 0 ]; then
|
||||
return 0
|
||||
fi
|
||||
fi
|
||||
|
||||
QuickLogger "Neiter wget nor curl could be used for. Cannot run statistics. Use the provided link please."
|
||||
Logger "Neiter wget nor curl could be used for. Cannot run statistics. Use the provided link please." "WARN"
|
||||
return 1
|
||||
}
|
||||
|
||||
|
@ -254,12 +311,12 @@ function RemoveFile {
|
|||
if [ -f "$file" ]; then
|
||||
rm -f "$file"
|
||||
if [ $? != 0 ]; then
|
||||
QuickLogger "Could not remove file [$file]."
|
||||
Logger "Could not remove file [$file]." "ERROR"
|
||||
else
|
||||
QuickLogger "Removed file [$file]."
|
||||
Logger "Removed file [$file]." "NOTICE"
|
||||
fi
|
||||
else
|
||||
QuickLogger "File [$file] not found. Skipping."
|
||||
Logger "File [$file] not found. Skipping." "NOTICE"
|
||||
fi
|
||||
}
|
||||
|
||||
|
@ -273,13 +330,25 @@ function RemoveAll {
|
|||
if [ ! -f "$BIN_DIR/osync.sh" ] && [ ! -f "$BIN_DIR/obackup.sh" ]; then # Check if any other program requiring ssh filter is present before removal
|
||||
RemoveFile "$BIN_DIR/$SSH_FILTER"
|
||||
else
|
||||
QuickLogger "Skipping removal of [$BIN_DIR/$SSH_FILTER] because other programs present that need it."
|
||||
Logger "Skipping removal of [$BIN_DIR/$SSH_FILTER] because other programs present that need it." "NOTICE"
|
||||
fi
|
||||
RemoveFile "$SERVICE_DIR_SYSTEMD_SYSTEM/$SERVICE_FILE_SYSTEMD_SYSTEM"
|
||||
RemoveFile "$SERVICE_DIR_SYSTEMD_USER/$SERVICE_FILE_SYSTEMD_SYSTEM"
|
||||
RemoveFile "$SERVICE_DIR_INIT/$SERVICE_FILE_INIT"
|
||||
|
||||
QuickLogger "Skipping configuration files in [$CONF_DIR]. You may remove this directory manually."
|
||||
# Try to uninstall every possible service file
|
||||
if [ $init == "systemd" ]; then
|
||||
RemoveFile "$SERVICE_DIR_SYSTEMD_SYSTEM/$SERVICE_FILE_SYSTEMD_SYSTEM"
|
||||
RemoveFile "$SERVICE_DIR_SYSTEMD_USER/$SERVICE_FILE_SYSTEMD_USER"
|
||||
RemoveFile "$SERVICE_DIR_SYSTEMD_SYSTEM/$TARGET_HELPER_SERVICE_FILE_SYSTEMD_SYSTEM"
|
||||
RemoveFile "$SERVICE_DIR_SYSTEMD_USER/$TARGET_HELPER_SERVICE_FILE_SYSTEMD_USER"
|
||||
elif [ $init == "initV" ]; then
|
||||
RemoveFile "$SERVICE_DIR_INIT/$SERVICE_FILE_INIT"
|
||||
RemoveFile "$SERVICE_DIR_INIT/$TARGET_HELPER_SERVICE_FILE_INIT"
|
||||
elif [ $init == "openrc" ]; then
|
||||
RemoveFile "$SERVICE_DIR_OPENRC/$SERVICE_FILE_OPENRC"
|
||||
RemoveFile "$SERVICE_DIR_OPENRC/$TARGET_HELPER_SERVICE_FILE_OPENRC"
|
||||
else
|
||||
Logger "Can uninstall only from initV, systemd or openRC." "WARN"
|
||||
fi
|
||||
Logger "Skipping configuration files in [$CONF_DIR]. You may remove this directory manually." "NOTICE"
|
||||
}
|
||||
|
||||
function Usage {
|
||||
|
@ -288,43 +357,92 @@ function Usage {
|
|||
echo "--silent Will log and bypass user interaction."
|
||||
echo "--no-stats Used with --silent in order to refuse sending anonymous install stats."
|
||||
echo "--remove Remove the program."
|
||||
echo "--prefix=/path Use prefix to install path."
|
||||
exit 127
|
||||
}
|
||||
|
||||
_LOGGER_SILENT=false
|
||||
_STATS=1
|
||||
ACTION="install"
|
||||
############################## Script entry point
|
||||
|
||||
for i in "$@"
|
||||
do
|
||||
case $i in
|
||||
--silent)
|
||||
_LOGGER_SILENT=true
|
||||
;;
|
||||
--no-stats)
|
||||
_STATS=0
|
||||
;;
|
||||
--remove)
|
||||
ACTION="uninstall"
|
||||
;;
|
||||
--help|-h|-?)
|
||||
Usage
|
||||
esac
|
||||
done
|
||||
function GetCommandlineArguments {
|
||||
for i in "$@"; do
|
||||
case $i in
|
||||
--prefix=*)
|
||||
FAKEROOT="${i##*=}"
|
||||
;;
|
||||
--silent)
|
||||
_LOGGER_SILENT=true
|
||||
;;
|
||||
--no-stats)
|
||||
_STATS=0
|
||||
;;
|
||||
--remove)
|
||||
ACTION="uninstall"
|
||||
;;
|
||||
--help|-h|-?)
|
||||
Usage
|
||||
;;
|
||||
*)
|
||||
Logger "Unknown option '$i'" "ERROR"
|
||||
Usage
|
||||
exit
|
||||
;;
|
||||
esac
|
||||
done
|
||||
}
|
||||
|
||||
if [ "$FAKEROOT" != "" ]; then
|
||||
mkdir -p "$SERVICE_DIR_SYSTEMD_SYSTEM" "$SERVICE_DIR_SYSTEMD_USER" "$BIN_DIR"
|
||||
GetCommandlineArguments "$@"
|
||||
|
||||
CONF_DIR=$FAKEROOT/etc/$PROGRAM
|
||||
BIN_DIR="$FAKEROOT/usr/local/bin"
|
||||
SERVICE_DIR_INIT=$FAKEROOT/etc/init.d
|
||||
# Should be /usr/lib/systemd/system, but /lib/systemd/system exists on debian & rhel / fedora
|
||||
SERVICE_DIR_SYSTEMD_SYSTEM=$FAKEROOT/lib/systemd/system
|
||||
SERVICE_DIR_SYSTEMD_USER=$FAKEROOT/etc/systemd/user
|
||||
SERVICE_DIR_OPENRC=$FAKEROOT/etc/init.d
|
||||
|
||||
if [ "$PROGRAM" == "osync" ]; then
|
||||
SERVICE_NAME="osync-srv"
|
||||
TARGET_HELPER_SERVICE_NAME="osync-target-helper-srv"
|
||||
|
||||
TARGET_HELPER_SERVICE_FILE_INIT="$TARGET_HELPER_SERVICE_NAME"
|
||||
TARGET_HELPER_SERVICE_FILE_SYSTEMD_SYSTEM="$TARGET_HELPER_SERVICE_NAME@.service"
|
||||
TARGET_HELPER_SERVICE_FILE_SYSTEMD_USER="$TARGET_HELPER_SERVICE_NAME@.service.user"
|
||||
TARGET_HELPER_SERVICE_FILE_OPENRC="$TARGET_HELPER_SERVICE_NAME-openrc"
|
||||
elif [ "$PROGRAM" == "pmocr" ]; then
|
||||
SERVICE_NAME="pmocr-srv"
|
||||
fi
|
||||
|
||||
SERVICE_FILE_INIT="$SERVICE_NAME"
|
||||
SERVICE_FILE_SYSTEMD_SYSTEM="$SERVICE_NAME@.service"
|
||||
SERVICE_FILE_SYSTEMD_USER="$SERVICE_NAME@.service.user"
|
||||
SERVICE_FILE_OPENRC="$SERVICE_NAME-openrc"
|
||||
|
||||
## Generic code
|
||||
|
||||
trap GenericTrapQuit TERM EXIT HUP QUIT
|
||||
|
||||
if [ ! -w "$(dirname $LOG_FILE)" ]; then
|
||||
echo "Cannot write to log [$(dirname $LOG_FILE)]."
|
||||
else
|
||||
Logger "Script begin, logging to [$LOG_FILE]." "DEBUG"
|
||||
fi
|
||||
|
||||
# Set default umask
|
||||
umask 0022
|
||||
|
||||
GetLocalOS
|
||||
SetLocalOSSettings
|
||||
GetInit
|
||||
# On Mac OS this always produces a warning which causes the installer to fail with exit code 2
|
||||
# Since we know it won't work anyway, and that's fine, just skip this step
|
||||
if $DO_INIT; then
|
||||
GetInit
|
||||
fi
|
||||
|
||||
STATS_LINK="http://instcount.netpower.fr?program=$PROGRAM&version=$PROGRAM_VERSION&os=$OS&action=$ACTION"
|
||||
|
||||
if [ "$ACTION" == "uninstall" ]; then
|
||||
RemoveAll
|
||||
QuickLogger "$PROGRAM uninstalled."
|
||||
Logger "$PROGRAM uninstalled." "NOTICE"
|
||||
else
|
||||
CreateDir "$CONF_DIR"
|
||||
CreateDir "$BIN_DIR"
|
||||
|
@ -333,11 +451,11 @@ else
|
|||
if [ "$PROGRAM" == "osync" ] || [ "$PROGRAM" == "pmocr" ]; then
|
||||
CopyServiceFiles
|
||||
fi
|
||||
QuickLogger "$PROGRAM installed. Use with $BIN_DIR/$PROGRAM"
|
||||
Logger "$PROGRAM installed. Use with $BIN_DIR/$PROGRAM_BINARY" "NOTICE"
|
||||
if [ "$PROGRAM" == "osync" ] || [ "$PROGRAM" == "obackup" ]; then
|
||||
QuickLogger ""
|
||||
QuickLogger "If connecting remotely, consider setup ssh filter to enhance security."
|
||||
QuickLogger ""
|
||||
echo ""
|
||||
Logger "If connecting remotely, consider setup ssh filter to enhance security." "NOTICE"
|
||||
echo ""
|
||||
fi
|
||||
fi
|
||||
|
||||
|
@ -345,7 +463,7 @@ if [ $_STATS -eq 1 ]; then
|
|||
if [ $_LOGGER_SILENT == true ]; then
|
||||
Statistics
|
||||
else
|
||||
QuickLogger "In order to make usage statistics, the script would like to connect to $STATS_LINK"
|
||||
Logger "In order to make usage statistics, the script would like to connect to $STATS_LINK" "NOTICE"
|
||||
read -r -p "No data except those in the url will be send. Allow [Y/n] " response
|
||||
case $response in
|
||||
[nN])
|
||||
|
|
4891
dev/debug_osync.sh
4891
dev/debug_osync.sh
File diff suppressed because it is too large
Load Diff
160
dev/merge.sh
160
dev/merge.sh
|
@ -1,36 +1,38 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
## MERGE 2017040901
|
||||
## MERGE 2020031501
|
||||
|
||||
## Merges ofunctions.sh and n_program.sh into program.sh
|
||||
## Adds installer
|
||||
|
||||
function __PREPROCESSOR_Merge {
|
||||
PROGRAM=osync
|
||||
PROGRAM=merge
|
||||
INSTANCE_ID=dev
|
||||
|
||||
VERSION=$(grep "PROGRAM_VERSION=" n_$PROGRAM.sh)
|
||||
function Usage {
|
||||
echo "Merges ofunctions.sh and n_program.sh into debug_program.sh and ../program.sh"
|
||||
echo "Usage"
|
||||
echo "$0 osync|obackup|pmocr"
|
||||
}
|
||||
|
||||
function __PREPROCESSOR_Merge {
|
||||
local nPROGRAM="$1"
|
||||
|
||||
if [ -f "$nPROGRAM" ]; then
|
||||
Logger "$nPROGRAM is not found in local path." "CRITICAL"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
VERSION=$(grep "PROGRAM_VERSION=" n_$nPROGRAM.sh)
|
||||
VERSION=${VERSION#*=}
|
||||
__PREPROCESSOR_Constants
|
||||
|
||||
source "ofunctions.sh"
|
||||
if [ $? != 0 ]; then
|
||||
echo "Please run $0 in dev directory with ofunctions.sh"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
__PREPROCESSOR_Unexpand "n_$PROGRAM.sh" "debug_$PROGRAM.sh"
|
||||
__PREPROCESSOR_Unexpand "n_$nPROGRAM.sh" "debug_$nPROGRAM.sh"
|
||||
|
||||
for subset in "${__PREPROCESSOR_SUBSETS[@]}"; do
|
||||
__PREPROCESSOR_MergeSubset "$subset" "${subset//SUBSET/SUBSET END}" "ofunctions.sh" "debug_$PROGRAM.sh"
|
||||
__PREPROCESSOR_MergeSubset "$subset" "${subset//SUBSET/SUBSET END}" "ofunctions.sh" "debug_$nPROGRAM.sh"
|
||||
done
|
||||
|
||||
__PREPROCESSOR_CleanDebug
|
||||
__PREPROCESSOR_CopyCommons
|
||||
rm -f tmp_$PROGRAM.sh
|
||||
if [ $? != 0 ]; then
|
||||
QuickLogger "Cannot remove tmp_$PROGRAM.sh"
|
||||
exit 1
|
||||
fi
|
||||
__PREPROCESSOR_CleanDebug "debug_$nPROGRAM.sh" "../$nPROGRAM.sh"
|
||||
}
|
||||
|
||||
function __PREPROCESSOR_Constants {
|
||||
|
@ -41,11 +43,14 @@ function __PREPROCESSOR_Constants {
|
|||
__PREPROCESSOR_SUBSETS=(
|
||||
'#### OFUNCTIONS FULL SUBSET ####'
|
||||
'#### OFUNCTIONS MINI SUBSET ####'
|
||||
'#### OFUNCTIONS MICRO SUBSET ####'
|
||||
'#### PoorMansRandomGenerator SUBSET ####'
|
||||
'#### _OFUNCTIONS_BOOTSTRAP SUBSET ####'
|
||||
'#### RUN_DIR SUBSET ####'
|
||||
'#### DEBUG SUBSET ####'
|
||||
'#### TrapError SUBSET ####'
|
||||
'#### RemoteLogger SUBSET ####'
|
||||
'#### QuickLogger SUBSET ####'
|
||||
'#### Logger SUBSET ####'
|
||||
'#### GetLocalOS SUBSET ####'
|
||||
'#### IsInteger SUBSET ####'
|
||||
'#### UrlEncode SUBSET ####'
|
||||
|
@ -54,6 +59,10 @@ function __PREPROCESSOR_Constants {
|
|||
'#### VerComp SUBSET ####'
|
||||
'#### GetConfFileValue SUBSET ####'
|
||||
'#### SetConfFileValue SUBSET ####'
|
||||
'#### CheckRFC822 SUBSET ####'
|
||||
'#### CleanUp SUBSET ####'
|
||||
'#### GenericTrapQuit SUBSET ####'
|
||||
'#### FileMove SUBSET ####'
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -63,7 +72,7 @@ function __PREPROCESSOR_Unexpand {
|
|||
|
||||
unexpand "$source" > "$destination"
|
||||
if [ $? != 0 ]; then
|
||||
QuickLogger "Cannot unexpand [$source] to [$destination]."
|
||||
Logger "Cannot unexpand [$source] to [$destination]." "CRITICAL"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
@ -76,60 +85,75 @@ function __PREPROCESSOR_MergeSubset {
|
|||
|
||||
sed -n "/$subsetBegin/,/$subsetEnd/p" "$subsetFile" > "$subsetFile.$subsetBegin"
|
||||
if [ $? != 0 ]; then
|
||||
QuickLogger "Cannot sed subset [$subsetBegin -- $subsetEnd] in [$subsetFile]."
|
||||
Logger "Cannot sed subset [$subsetBegin -- $subsetEnd] in [$subsetFile]." "CRTICIAL"
|
||||
exit 1
|
||||
fi
|
||||
sed "/include $subsetBegin/r $subsetFile.$subsetBegin" "$mergedFile" | grep -v -E "$subsetBegin\$|$subsetEnd\$" > "$mergedFile.tmp"
|
||||
if [ $? != 0 ]; then
|
||||
QuickLogger "Cannot add subset [$subsetBegin] to [$mergedFile]."
|
||||
Logger "Cannot add subset [$subsetBegin] to [$mergedFile]." "CRITICAL"
|
||||
exit 1
|
||||
fi
|
||||
rm -f "$subsetFile.$subsetBegin"
|
||||
if [ $? != 0 ]; then
|
||||
QuickLogger "Cannot remove temporary subset [$subsetFile.$subsetBegin]."
|
||||
Logger "Cannot remove temporary subset [$subsetFile.$subsetBegin]." "CRITICAL"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
rm -f "$mergedFile"
|
||||
if [ $? != 0 ]; then
|
||||
QuickLogger "Cannot remove merged original file [$mergedFile]."
|
||||
Logger "Cannot remove merged original file [$mergedFile]." "CRITICAL"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
mv "$mergedFile.tmp" "$mergedFile"
|
||||
if [ $? != 0 ]; then
|
||||
QuickLogger "Cannot move merged tmp file to original [$mergedFile]."
|
||||
Logger "Cannot move merged tmp file to original [$mergedFile]." "CRITICAL"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
function __PREPROCESSOR_CleanDebug {
|
||||
sed '/'$PARANOIA_DEBUG_BEGIN'/,/'$PARANOIA_DEBUG_END'/d' debug_$PROGRAM.sh | grep -v "$PARANOIA_DEBUG_LINE" > ../$PROGRAM.sh
|
||||
local source="${1}"
|
||||
local destination="${2:-$source}"
|
||||
|
||||
sed '/'$PARANOIA_DEBUG_BEGIN'/,/'$PARANOIA_DEBUG_END'/d' "$source" | grep -v "$PARANOIA_DEBUG_LINE" > "$destination.tmp"
|
||||
if [ $? != 0 ]; then
|
||||
QuickLogger "Cannot remove PARANOIA_DEBUG code from standard build."
|
||||
Logger "Cannot remove PARANOIA_DEBUG code from standard build." "CRITICAL"
|
||||
exit 1
|
||||
else
|
||||
mv -f "$destination.tmp" "$destination"
|
||||
if [ $? -ne 0 ]; then
|
||||
Logger "Cannot move [$destination.tmp] to [$destination]." "CRITICAL"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
chmod +x "debug_$PROGRAM.sh"
|
||||
chmod +x "$source"
|
||||
if [ $? != 0 ]; then
|
||||
QuickLogger "Cannot chmod debug_$PROGRAM.sh"
|
||||
Logger "Cannot chmod [$source]." "CRITICAL"
|
||||
exit 1
|
||||
else
|
||||
QuickLogger "Prepared ./debug_$PROGRAM.sh"
|
||||
Logger "Prepared [$source]." "NOTICE"
|
||||
fi
|
||||
chmod +x "../$PROGRAM.sh"
|
||||
if [ $? != 0 ]; then
|
||||
QuickLogger "Cannot chmod $PROGRAM.sh"
|
||||
exit 1
|
||||
else
|
||||
QuickLogger "Prepared ../$PROGRAM.sh"
|
||||
|
||||
if [ "$source" != "$destination" ]; then
|
||||
|
||||
chmod +x "$destination"
|
||||
if [ $? != 0 ]; then
|
||||
Logger "Cannot chmod [$destination]." "CRITICAL"
|
||||
exit 1
|
||||
else
|
||||
Logger "Prepared [$destination]." "NOTICE"
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
function __PREPROCESSOR_CopyCommons {
|
||||
sed "s/\[prgname\]/$PROGRAM/g" common_install.sh > ../install.sh
|
||||
local nPROGRAM="$1"
|
||||
|
||||
sed "s/\[prgname\]/$nPROGRAM/g" common_install.sh > ../install.sh
|
||||
if [ $? != 0 ]; then
|
||||
QuickLogger "Cannot assemble install."
|
||||
Logger "Cannot assemble install." "CRITICAL"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
@ -137,40 +161,44 @@ function __PREPROCESSOR_CopyCommons {
|
|||
__PREPROCESSOR_MergeSubset "$subset" "${subset//SUBSET/SUBSET END}" "ofunctions.sh" "../install.sh"
|
||||
done
|
||||
|
||||
#sed "s/\[version\]/$VERSION/g" ../tmp_install.sh > ../install.sh
|
||||
#if [ $? != 0 ]; then
|
||||
# QuickLogger "Cannot change install version."
|
||||
# exit 1
|
||||
#fi
|
||||
__PREPROCESSOR_CleanDebug "../install.sh"
|
||||
|
||||
if [ -f "common_batch.sh" ]; then
|
||||
sed "s/\[prgname\]/$PROGRAM/g" common_batch.sh > ../$PROGRAM-batch.sh
|
||||
sed "s/\[prgname\]/$nPROGRAM/g" common_batch.sh > ../$nPROGRAM-batch.sh
|
||||
if [ $? != 0 ]; then
|
||||
QuickLogger "Cannot assemble batch runner."
|
||||
Logger "Cannot assemble batch runner." "CRITICAL"
|
||||
exit 1
|
||||
fi
|
||||
chmod +x ../$PROGRAM-batch.sh
|
||||
if [ $? != 0 ]; then
|
||||
QuickLogger "Cannot chmod $PROGRAM-batch.sh"
|
||||
exit 1
|
||||
else
|
||||
QuickLogger "Prepared ../$PROGRAM-batch.sh"
|
||||
fi
|
||||
fi
|
||||
chmod +x ../install.sh
|
||||
if [ $? != 0 ]; then
|
||||
QuickLogger "Cannot chmod install.sh"
|
||||
exit 1
|
||||
else
|
||||
QuickLogger "Prepared ../install.sh"
|
||||
fi
|
||||
rm -f ../tmp_install.sh
|
||||
if [ $? != 0 ]; then
|
||||
QuickLogger "Cannot chmod $PROGRAM.sh"
|
||||
exit 1
|
||||
|
||||
for subset in "${__PREPROCESSOR_SUBSETS[@]}"; do
|
||||
__PREPROCESSOR_MergeSubset "$subset" "${subset//SUBSET/SUBSET END}" "ofunctions.sh" "../$nPROGRAM-batch.sh"
|
||||
done
|
||||
|
||||
__PREPROCESSOR_CleanDebug "../$nPROGRAM-batch.sh"
|
||||
fi
|
||||
}
|
||||
|
||||
# If sourced don't do anything
|
||||
if [ "$(basename $0)" == "merge.sh" ]; then
|
||||
__PREPROCESSOR_Merge
|
||||
source "./ofunctions.sh"
|
||||
if [ $? != 0 ]; then
|
||||
echo "Please run $0 in dev directory with ofunctions.sh"
|
||||
exit 1
|
||||
fi
|
||||
trap GenericTrapQuit TERM EXIT HUP QUIT
|
||||
|
||||
if [ "$1" == "osync" ]; then
|
||||
__PREPROCESSOR_Merge osync
|
||||
__PREPROCESSOR_CopyCommons osync
|
||||
elif [ "$1" == "obackup" ]; then
|
||||
__PREPROCESSOR_Merge obackup
|
||||
__PREPROCESSOR_CopyCommons obackup
|
||||
elif [ "$1" == "pmocr" ]; then
|
||||
__PREPROCESSOR_Merge pmocr
|
||||
__PREPROCESSOR_CopyCommons pmocr
|
||||
else
|
||||
echo "No valid program given."
|
||||
Usage
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
|
2380
dev/n_osync.sh
2380
dev/n_osync.sh
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
@ -4,5 +4,7 @@
|
|||
#SC1091 = not following source
|
||||
#SC2086 = quoting errors (shellcheck is way too picky about quoting)
|
||||
#SC2120 = only for debug version
|
||||
#SC2034 = unused variabled (can be ignored in ofunctions.sh)
|
||||
#SC2068 = bad array usage (can be ignored in ofunctions.sh)
|
||||
|
||||
shellcheck -e SC1090,SC1091,SC2086,SC2119,SC2120 $1
|
||||
shellcheck -e SC1090,SC1091,SC2086,SC2119,SC2120 $@
|
||||
|
|
|
@ -2,9 +2,10 @@
|
|||
|
||||
###### osync - Rsync based two way sync engine with fault tolerance
|
||||
###### (C) 2013-2016 by Orsiris de Jong (www.netpower.fr)
|
||||
###### osync v1.1x / v1.2x config file rev 2016102101
|
||||
|
||||
## ---------- GENERAL OPTIONS
|
||||
|
||||
[GENERAL]
|
||||
CONFIG_FILE_REVISION=1.3.0
|
||||
|
||||
## Sync job identification
|
||||
INSTANCE_ID="local"
|
||||
|
@ -23,8 +24,11 @@ SSH_RSA_PRIVATE_KEY="/home/backupuser/.ssh/id_rsa"
|
|||
## Alternatively, you may specify an SSH password file (less secure). Needs sshpass utility installed.
|
||||
SSH_PASSWORD_FILE=""
|
||||
|
||||
## When using ssh filter, you must specify a remote token matching the one setup in authorized_keys
|
||||
_REMOTE_TOKEN=SomeAlphaNumericToken9
|
||||
|
||||
## Create sync directories if they do not exist
|
||||
CREATE_DIRS=no
|
||||
CREATE_DIRS=false
|
||||
|
||||
## Log file location. Leaving this empty will create a logfile at /var/log/osync_version_SYNC_ID.log (or current directory if /var/log doesn't exist)
|
||||
LOGFILE=""
|
||||
|
@ -36,7 +40,7 @@ MINIMUM_SPACE=10240
|
|||
BANDWIDTH=0
|
||||
|
||||
## If enabled, synchronization on remote system will be processed as superuser. See documentation for /etc/sudoers file configuration.
|
||||
SUDO_EXEC=no
|
||||
SUDO_EXEC=false
|
||||
## Paranoia option. Don't change this unless you read the documentation.
|
||||
RSYNC_EXECUTABLE=rsync
|
||||
## Remote rsync executable path. Leave this empty in most cases
|
||||
|
@ -61,46 +65,53 @@ RSYNC_EXCLUDE_FROM=""
|
|||
## List elements separator char. You may set an alternative separator char for your directories lists above.
|
||||
PATH_SEPARATOR_CHAR=";"
|
||||
|
||||
## ---------- REMOTE SYNC OPTIONS
|
||||
[REMOTE_OPTIONS]
|
||||
|
||||
## ssh compression should be used unless your remote connection is good enough (LAN)
|
||||
SSH_COMPRESSION=yes
|
||||
SSH_COMPRESSION=true
|
||||
|
||||
## Ignore ssh known hosts. DANGER WILL ROBINSON DANGER ! This can lead to security issues. Only enable this if you know what you're doing.
|
||||
SSH_IGNORE_KNOWN_HOSTS=no
|
||||
SSH_IGNORE_KNOWN_HOSTS=false
|
||||
SSH_CONTROLMASTER=false
|
||||
|
||||
|
||||
## Check for connectivity to remote host before launching remote sync task. Be sure the hosts responds to ping. Failing to ping will stop sync.
|
||||
REMOTE_HOST_PING=no
|
||||
REMOTE_HOST_PING=false
|
||||
|
||||
## Check for internet access by pinging one or more 3rd party hosts before remote sync task. Leave empty if you don't want this check to be be performed. Failing to ping will stop sync.
|
||||
## If you use this function, you should set more than one 3rd party host, and be sure you can ping them.
|
||||
## Be aware some DNS like opendns redirect false hostnames. Also, this adds an extra execution time of a bit less than a minute.
|
||||
REMOTE_3RD_PARTY_HOSTS="www.kernel.org www.google.com"
|
||||
|
||||
## ---------- MISC OPTIONS
|
||||
[MISC_OPTIONS]
|
||||
|
||||
## Optional arguments passed to rsync executable. The following are already managed by the program and shoul never be passed here
|
||||
## -r -l -p -t -g -o -D -E - u- i- n --executability -A -X -L -K -H -8 -zz –skip-compress –checksum –bwlimit –partial –partial-dir –no-whole-file –whole-file –backup –backup-dir –suffix
|
||||
## --exclude --exclude-from --include --include-from --list-only --stats
|
||||
RSYNC_OPTIONAL_ARGS=""
|
||||
|
||||
## Preserve basic linux permissions
|
||||
PRESERVE_PERMISSIONS=yes
|
||||
PRESERVE_OWNER=yes
|
||||
PRESERVE_GROUP=yes
|
||||
PRESERVE_PERMISSIONS=true
|
||||
PRESERVE_OWNER=true
|
||||
PRESERVE_GROUP=true
|
||||
## On MACOS X, does not work and will be ignored
|
||||
PRESERVE_EXECUTABILITY=yes
|
||||
PRESERVE_EXECUTABILITY=true
|
||||
|
||||
## Preserve ACLS. Make sure source and target FS can manage same ACLs or you'll get loads of errors.
|
||||
PRESERVE_ACL=yes
|
||||
PRESERVE_ACL=false
|
||||
## Preserve Xattr. Make sure source and target FS can manage same Xattrs or you'll get loads of errors.
|
||||
PRESERVE_XATTR=yes
|
||||
PRESERVE_XATTR=false
|
||||
## Transforms symlinks into referent files/dirs
|
||||
COPY_SYMLINKS=no
|
||||
COPY_SYMLINKS=false
|
||||
## Treat symlinked dirs as dirs. CAUTION: This also follows symlinks outside of the replica root.
|
||||
KEEP_DIRLINKS=no
|
||||
KEEP_DIRLINKS=false
|
||||
## Preserve hard links. Make sure source and target FS can manage hard links or you will lose them.
|
||||
PRESERVE_HARDLINKS=no
|
||||
PRESERVE_HARDLINKS=false
|
||||
## Do a full checksum on all files that have identical sizes, they are checksummed to see if they actually are identical. This can take a long time.
|
||||
CHECKSUM=no
|
||||
CHECKSUM=false
|
||||
|
||||
## Let RSYNC compress file transfers. Do not use this if both initator and target replicas are on local system. Also, do not use this if you already enabled SSH compression.
|
||||
RSYNC_COMPRESS=yes
|
||||
RSYNC_COMPRESS=true
|
||||
|
||||
## Maximum execution time (in seconds) for sync process. Set these values zero will disable max execution times.
|
||||
## Soft exec time only generates a warning. Hard exec time will generate a warning and stop sync process.
|
||||
|
@ -117,46 +128,60 @@ MIN_WAIT=60
|
|||
## Use 0 to wait indefinitely.
|
||||
MAX_WAIT=7200
|
||||
|
||||
## ---------- BACKUP AND DELETION OPTIONS
|
||||
[BACKUP_DELETE_OPTIONS]
|
||||
|
||||
## Log a list of conflictual files
|
||||
LOG_CONFLICTS=true
|
||||
## Send an email when conflictual files are found (implies LOG_CONFLICTS)
|
||||
ALERT_CONFLICTS=false
|
||||
## Enabling this option will keep a backup of a file on the target replica if it gets updated from the source replica. Backups will be made to .osync_workdir/backups
|
||||
CONFLICT_BACKUP=yes
|
||||
CONFLICT_BACKUP=true
|
||||
## Keep multiple backup versions of the same file. Warning, This can be very space consuming.
|
||||
CONFLICT_BACKUP_MULTIPLE=no
|
||||
CONFLICT_BACKUP_MULTIPLE=false
|
||||
## Osync will clean backup files after a given number of days. Setting this to 0 will disable cleaning and keep backups forever. Warning: This can be very space consuming.
|
||||
CONFLICT_BACKUP_DAYS=30
|
||||
## If the same file exists on both replicas, newer version will be synced. However, if both files have the same timestamp but differ, CONFILCT_PREVALANCE sets winner replica.
|
||||
CONFLICT_PREVALANCE=initiator
|
||||
|
||||
## On deletion propagation to the target replica, a backup of the deleted files can be kept. Deletions will be kept in .osync_workdir/deleted
|
||||
SOFT_DELETE=yes
|
||||
SOFT_DELETE=true
|
||||
## Osync will clean deleted files after a given number of days. Setting this to 0 will disable cleaning and keep deleted files forever. Warning: This can be very space consuming.
|
||||
SOFT_DELETE_DAYS=30
|
||||
|
||||
## Optional deletion skip on replicas. Valid values are "initiator", "target", or "initiator,target"
|
||||
SKIP_DELETION=
|
||||
|
||||
## ---------- RESUME OPTIONS
|
||||
[RESUME_OPTIONS]
|
||||
|
||||
## Try to resume an aborted sync task
|
||||
RESUME_SYNC=yes
|
||||
RESUME_SYNC=true
|
||||
## Number maximum resume tries before initiating a fresh sync.
|
||||
RESUME_TRY=2
|
||||
## When a pidlock exists on slave replica that does not correspond to the initiator's instance-id, force pidlock removal. Be careful with this option if you have multiple initiators.
|
||||
FORCE_STRANGER_LOCK_RESUME=no
|
||||
FORCE_STRANGER_LOCK_RESUME=false
|
||||
|
||||
## Keep partial uploads that can be resumed on next run, experimental feature
|
||||
PARTIAL=no
|
||||
PARTIAL=false
|
||||
|
||||
## Use delta copy algortithm (usefull when local paths are network drives), defaults to yes
|
||||
DELTA_COPIES=yes
|
||||
DELTA_COPIES=true
|
||||
|
||||
## ---------- ALERT OPTIONS
|
||||
[ALERT_OPTIONS]
|
||||
|
||||
## List of alert mails separated by spaces
|
||||
## Most Unix systems (including Win10 bash) have mail support out of the box
|
||||
## Just make sure that the current user has enough privileges to use mail / mutt / sendmail and that the mail system is configured to allow outgoing mails
|
||||
## on pfSense platform, smtp support needs to be configured in System > Advanced > Notifications
|
||||
DESTINATION_MAILS=""
|
||||
|
||||
## Windows specific (msys / cygwin environment) only mail options (used with mailsend.exe from muquit, http://github.com/muquit/mailsend or from sendemail.exe from Brandon Zehm, http://caspian.dotconf.net/menu/Software/SendEmail/)
|
||||
## Optional change of mail body encoding (using iconv)
|
||||
## By default, all mails are sent in UTF-8 format without header (because of maximum compatibility of all platforms)
|
||||
## You may specify an optional encoding here (like "ISO-8859-1" or whatever iconv can handle)
|
||||
MAIL_BODY_CHARSET=""
|
||||
|
||||
## Additional mail parameters needed for Android / Busybox / Cygwin / MSYS
|
||||
## Android & Busybox use sendmail (and openssl if encryption is needed)
|
||||
## MSYS & Cygwin Windows mail support relies on mailsend.exe from muquit, http://github.com/muquit/mailsend which needs to be in %PATH% environment variable
|
||||
SENDER_MAIL="alert@your.system.tld"
|
||||
SMTP_SERVER=smtp.your.isp.tld
|
||||
SMTP_PORT=25
|
||||
|
@ -165,7 +190,7 @@ SMTP_ENCRYPTION=none
|
|||
SMTP_USER=
|
||||
SMTP_PASSWORD=
|
||||
|
||||
## ---------- EXECUTION HOOKS
|
||||
[EXECUTION_HOOKS]
|
||||
|
||||
## Commands can will be run before and / or after sync process (remote execution will only happen if REMOTE_OPERATION is set).
|
||||
LOCAL_RUN_BEFORE_CMD=""
|
||||
|
@ -179,7 +204,7 @@ MAX_EXEC_TIME_PER_CMD_BEFORE=0
|
|||
MAX_EXEC_TIME_PER_CMD_AFTER=0
|
||||
|
||||
## Stops osync execution if one of the above commands fail
|
||||
STOP_ON_CMD_ERROR=yes
|
||||
STOP_ON_CMD_ERROR=true
|
||||
|
||||
## Run local and remote after sync commands even on failure
|
||||
RUN_AFTER_CMD_ON_ERROR=no
|
||||
RUN_AFTER_CMD_ON_ERROR=false
|
||||
|
|
|
@ -2,9 +2,10 @@
|
|||
|
||||
###### osync - Rsync based two way sync engine with fault tolerance
|
||||
###### (C) 2013-2016 by Orsiris de Jong (www.netpower.fr)
|
||||
###### osync v1.1x / v1.2x config file rev 2016102101
|
||||
|
||||
## ---------- GENERAL OPTIONS
|
||||
|
||||
[GENERAL]
|
||||
CONFIG_FILE_REVISION=1.3.0
|
||||
|
||||
## Sync job identification
|
||||
INSTANCE_ID="remote"
|
||||
|
@ -15,18 +16,19 @@ INITIATOR_SYNC_DIR="${HOME}/osync-tests/initiator"
|
|||
|
||||
## Target is the system osync synchronizes to (can be the same system as the initiator in case of local sync tasks). The target directory can be a local or remote path.
|
||||
#TARGET_SYNC_DIR="${HOME}/osync-tests/target"
|
||||
TARGET_SYNC_DIR="ssh://root@localhost:49999/${HOME}/osync-tests/target"
|
||||
TARGET_SYNC_DIR="ssh://root@localhost:44999/${HOME}/osync-tests/target"
|
||||
|
||||
## If the target system is remote, you can specify a RSA key (please use full path). If not defined, the default ~/.ssh/id_rsa will be used. See documentation for further information.
|
||||
SSH_RSA_PRIVATE_KEY="${HOME}/.ssh/id_rsa_local"
|
||||
SSH_RSA_PRIVATE_KEY="${HOME}/.ssh/id_rsa_local_osync_tests"
|
||||
|
||||
## Alternatively, you may specify an SSH password file (less secure). Needs sshpass utility installed.
|
||||
SSH_PASSWORD_FILE=""
|
||||
|
||||
## When using ssh filter, you must specify a remote token matching the one setup in authorized_keys
|
||||
_REMOTE_TOKEN=SomeAlphaNumericToken9
|
||||
|
||||
## Create sync directories if they do not exist
|
||||
CREATE_DIRS=no
|
||||
CREATE_DIRS=false
|
||||
|
||||
## Log file location. Leaving this empty will create a logfile at /var/log/osync_version_SYNC_ID.log (or current directory if /var/log doesn't exist)
|
||||
LOGFILE=""
|
||||
|
@ -38,7 +40,7 @@ MINIMUM_SPACE=10240
|
|||
BANDWIDTH=0
|
||||
|
||||
## If enabled, synchronization on remote system will be processed as superuser. See documentation for /etc/sudoers file configuration.
|
||||
SUDO_EXEC=no
|
||||
SUDO_EXEC=false
|
||||
## Paranoia option. Don't change this unless you read the documentation.
|
||||
RSYNC_EXECUTABLE=rsync
|
||||
## Remote rsync executable path. Leave this empty in most cases
|
||||
|
@ -63,46 +65,53 @@ RSYNC_EXCLUDE_FROM=""
|
|||
## List elements separator char. You may set an alternative separator char for your directories lists above.
|
||||
PATH_SEPARATOR_CHAR=";"
|
||||
|
||||
## ---------- REMOTE SYNC OPTIONS
|
||||
[REMOTE_OPTIONS]
|
||||
|
||||
## ssh compression should be used unless your remote connection is good enough (LAN)
|
||||
SSH_COMPRESSION=yes
|
||||
SSH_COMPRESSION=true
|
||||
|
||||
## Ignore ssh known hosts. DANGER WILL ROBINSON DANGER ! This can lead to security issues. Only enable this if you know what you're doing.
|
||||
SSH_IGNORE_KNOWN_HOSTS=no
|
||||
SSH_IGNORE_KNOWN_HOSTS=false
|
||||
SSH_CONTROLMASTER=false
|
||||
|
||||
|
||||
## Check for connectivity to remote host before launching remote sync task. Be sure the hosts responds to ping. Failing to ping will stop sync.
|
||||
REMOTE_HOST_PING=yes
|
||||
REMOTE_HOST_PING=true
|
||||
|
||||
## Check for internet access by pinging one or more 3rd party hosts before remote sync task. Leave empty if you don't want this check to be be performed. Failing to ping will stop sync.
|
||||
## If you use this function, you should set more than one 3rd party host, and be sure you can ping them.
|
||||
## Be aware some DNS like opendns redirect false hostnames. Also, this adds an extra execution time of a bit less than a minute.
|
||||
REMOTE_3RD_PARTY_HOSTS="www.kernel.org www.google.com"
|
||||
|
||||
## ---------- MISC OPTIONS
|
||||
[MISC_OPTIONS]
|
||||
|
||||
## Optional arguments passed to rsync executable. The following are already managed by the program and shoul never be passed here
|
||||
## -r -l -p -t -g -o -D -E - u- i- n --executability -A -X -L -K -H -8 -zz –skip-compress –checksum –bwlimit –partial –partial-dir –no-whole-file –whole-file –backup –backup-dir –suffix
|
||||
## --exclude --exclude-from --include --include-from --list-only --stats
|
||||
RSYNC_OPTIONAL_ARGS=""
|
||||
|
||||
## Preserve basic linux permissions
|
||||
PRESERVE_PERMISSIONS=yes
|
||||
PRESERVE_OWNER=yes
|
||||
PRESERVE_GROUP=yes
|
||||
PRESERVE_PERMISSIONS=true
|
||||
PRESERVE_OWNER=true
|
||||
PRESERVE_GROUP=true
|
||||
## On MACOS X, does not work and will be ignored
|
||||
PRESERVE_EXECUTABILITY=yes
|
||||
PRESERVE_EXECUTABILITY=true
|
||||
|
||||
## Preserve ACLS. Make sure source and target FS can manage same ACLs or you'll get loads of errors.
|
||||
PRESERVE_ACL=yes
|
||||
PRESERVE_ACL=false
|
||||
## Preserve Xattr. Make sure source and target FS can manage same Xattrs or you'll get loads of errors.
|
||||
PRESERVE_XATTR=yes
|
||||
PRESERVE_XATTR=false
|
||||
## Transforms symlinks into referent files/dirs
|
||||
COPY_SYMLINKS=no
|
||||
COPY_SYMLINKS=false
|
||||
## Treat symlinked dirs as dirs. CAUTION: This also follows symlinks outside of the replica root.
|
||||
KEEP_DIRLINKS=no
|
||||
KEEP_DIRLINKS=false
|
||||
## Preserve hard links. Make sure source and target FS can manage hard links or you will lose them.
|
||||
PRESERVE_HARDLINKS=no
|
||||
PRESERVE_HARDLINKS=false
|
||||
## Do a full checksum on all files that have identical sizes, they are checksummed to see if they actually are identical. This can take a long time.
|
||||
CHECKSUM=no
|
||||
CHECKSUM=false
|
||||
|
||||
## Let RSYNC compress file transfers. Do not use this if both initator and target replicas are on local system. Also, do not use this if you already enabled SSH compression.
|
||||
RSYNC_COMPRESS=yes
|
||||
RSYNC_COMPRESS=true
|
||||
|
||||
## Maximum execution time (in seconds) for sync process. Set these values zero will disable max execution times.
|
||||
## Soft exec time only generates a warning. Hard exec time will generate a warning and stop sync process.
|
||||
|
@ -119,46 +128,60 @@ MIN_WAIT=60
|
|||
## Use 0 to wait indefinitely.
|
||||
MAX_WAIT=7200
|
||||
|
||||
## ---------- BACKUP AND DELETION OPTIONS
|
||||
[BACKUP_DELETE_OPTIONS]
|
||||
|
||||
## Log a list of conflictual files
|
||||
LOG_CONFLICTS=true
|
||||
## Send an email when conflictual files are found (implies LOG_CONFLICTS)
|
||||
ALERT_CONFLICTS=false
|
||||
## Enabling this option will keep a backup of a file on the target replica if it gets updated from the source replica. Backups will be made to .osync_workdir/backups
|
||||
CONFLICT_BACKUP=yes
|
||||
CONFLICT_BACKUP=true
|
||||
## Keep multiple backup versions of the same file. Warning, This can be very space consuming.
|
||||
CONFLICT_BACKUP_MULTIPLE=no
|
||||
CONFLICT_BACKUP_MULTIPLE=false
|
||||
## Osync will clean backup files after a given number of days. Setting this to 0 will disable cleaning and keep backups forever. Warning: This can be very space consuming.
|
||||
CONFLICT_BACKUP_DAYS=30
|
||||
## If the same file exists on both replicas, newer version will be synced. However, if both files have the same timestamp but differ, CONFILCT_PREVALANCE sets winner replica.
|
||||
CONFLICT_PREVALANCE=initiator
|
||||
|
||||
## On deletion propagation to the target replica, a backup of the deleted files can be kept. Deletions will be kept in .osync_workdir/deleted
|
||||
SOFT_DELETE=yes
|
||||
SOFT_DELETE=true
|
||||
## Osync will clean deleted files after a given number of days. Setting this to 0 will disable cleaning and keep deleted files forever. Warning: This can be very space consuming.
|
||||
SOFT_DELETE_DAYS=30
|
||||
|
||||
## Optional deletion skip on replicas. Valid values are "initiator", "target", or "initiator,target"
|
||||
SKIP_DELETION=
|
||||
|
||||
## ---------- RESUME OPTIONS
|
||||
[RESUME_OPTIONS]
|
||||
|
||||
## Try to resume an aborted sync task
|
||||
RESUME_SYNC=yes
|
||||
RESUME_SYNC=true
|
||||
## Number maximum resume tries before initiating a fresh sync.
|
||||
RESUME_TRY=2
|
||||
## When a pidlock exists on slave replica that does not correspond to the initiator's instance-id, force pidlock removal. Be careful with this option if you have multiple initiators.
|
||||
FORCE_STRANGER_LOCK_RESUME=no
|
||||
FORCE_STRANGER_LOCK_RESUME=false
|
||||
|
||||
## Keep partial uploads that can be resumed on next run, experimental feature
|
||||
PARTIAL=no
|
||||
PARTIAL=false
|
||||
|
||||
## Use delta copy algortithm (usefull when local paths are network drives), defaults to yes
|
||||
DELTA_COPIES=yes
|
||||
DELTA_COPIES=true
|
||||
|
||||
## ---------- ALERT OPTIONS
|
||||
[ALERT_OPTIONS]
|
||||
|
||||
## List of alert mails separated by spaces
|
||||
## Most Unix systems (including Win10 bash) have mail support out of the box
|
||||
## Just make sure that the current user has enough privileges to use mail / mutt / sendmail and that the mail system is configured to allow outgoing mails
|
||||
## on pfSense platform, smtp support needs to be configured in System > Advanced > Notifications
|
||||
DESTINATION_MAILS=""
|
||||
|
||||
## Windows specific (msys / cygwin environment) only mail options (used with mailsend.exe from muquit, http://github.com/muquit/mailsend or from sendemail.exe from Brandon Zehm, http://caspian.dotconf.net/menu/Software/SendEmail/)
|
||||
## Optional change of mail body encoding (using iconv)
|
||||
## By default, all mails are sent in UTF-8 format without header (because of maximum compatibility of all platforms)
|
||||
## You may specify an optional encoding here (like "ISO-8859-1" or whatever iconv can handle)
|
||||
MAIL_BODY_CHARSET=""
|
||||
|
||||
## Additional mail parameters needed for Android / Busybox / Cygwin / MSYS
|
||||
## Android & Busybox use sendmail (and openssl if encryption is needed)
|
||||
## MSYS & Cygwin Windows mail support relies on mailsend.exe from muquit, http://github.com/muquit/mailsend which needs to be in %PATH% environment variable
|
||||
SENDER_MAIL="alert@your.system.tld"
|
||||
SMTP_SERVER=smtp.your.isp.tld
|
||||
SMTP_PORT=25
|
||||
|
@ -167,7 +190,7 @@ SMTP_ENCRYPTION=none
|
|||
SMTP_USER=
|
||||
SMTP_PASSWORD=
|
||||
|
||||
## ---------- EXECUTION HOOKS
|
||||
[EXECUTION_HOOKS]
|
||||
|
||||
## Commands can will be run before and / or after sync process (remote execution will only happen if REMOTE_OPERATION is set).
|
||||
LOCAL_RUN_BEFORE_CMD=""
|
||||
|
@ -181,7 +204,7 @@ MAX_EXEC_TIME_PER_CMD_BEFORE=0
|
|||
MAX_EXEC_TIME_PER_CMD_AFTER=0
|
||||
|
||||
## Stops osync execution if one of the above commands fail
|
||||
STOP_ON_CMD_ERROR=yes
|
||||
STOP_ON_CMD_ERROR=true
|
||||
|
||||
## Run local and remote after sync commands even on failure
|
||||
RUN_AFTER_CMD_ON_ERROR=no
|
||||
RUN_AFTER_CMD_ON_ERROR=false
|
||||
|
|
|
@ -1,14 +1,18 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
## If this script is stopped while running, config file values and IS_STABLE value might be in inconsistent state
|
||||
# osync test suite 2023061401
|
||||
|
||||
|
||||
# Allows the following environment variables
|
||||
# RUNNING_ON_GITHUB_ACTIONS=[true|false]
|
||||
# SSH_PORT=22
|
||||
# SKIP_REMOTE=[true|false]
|
||||
|
||||
## On Mac OSX, this needs to be run as root in order to use sudo without password
|
||||
## From current terminal run sudo -s in order to get a new terminal as root
|
||||
|
||||
## On CYGWIN / MSYS, ACL and extended attributes aren't supported
|
||||
|
||||
# osync test suite 2017040801
|
||||
|
||||
# 4 tests:
|
||||
# quicklocal
|
||||
# quickremote (with ssh_filter.sh)
|
||||
|
@ -18,6 +22,7 @@
|
|||
# for each test
|
||||
# files with spaces, subdirs
|
||||
# largefileset (...large ?)
|
||||
# quickremote test with controlmaster enabled
|
||||
# exclusions
|
||||
# conflict resolution initiator with backups / multiple backups
|
||||
# conflict resolution target with backups / multiple backups
|
||||
|
@ -26,6 +31,7 @@
|
|||
# replica lock checks
|
||||
# file attribute tests
|
||||
# local / remote locking resume tests
|
||||
# conflict detection
|
||||
# timed execution tests
|
||||
|
||||
# function test
|
||||
|
@ -37,28 +43,43 @@
|
|||
# setfacl needs double ':' to be compatible with both linux and BSD
|
||||
# setfacl -m o::rwx file
|
||||
|
||||
# On Windows 10 bash, we need to create host SSH keys first with ssh-keygen -A
|
||||
# Then start ssh with service ssh start
|
||||
|
||||
# TODO, use copies of config file on each test function
|
||||
|
||||
if [ "$SKIP_REMOTE" = "" ]; then
|
||||
SKIP_REMOTE=false
|
||||
REMOTE_USER=root
|
||||
fi
|
||||
|
||||
homedir=$(eval echo ~${REMOTE_USER})
|
||||
|
||||
# drupal servers are often unreachable for whetever reason or give 0 bytes files
|
||||
#LARGE_FILESET_URL="http://ftp.drupal.org/files/projects/drupal-8.2.2.tar.gz"
|
||||
LARGE_FILESET_URL="http://www.netpower.fr/sites/default/files/osync-test-files-drupal-8.2.2.tar.gz"
|
||||
LARGE_FILESET_URL="https://ftp.drupal.org/files/projects/drupal-11.0.10.tar.gz"
|
||||
|
||||
OSYNC_DIR="$(pwd)"
|
||||
OSYNC_DIR=${OSYNC_DIR%%/dev*}
|
||||
DEV_DIR="$OSYNC_DIR/dev"
|
||||
TESTS_DIR="$DEV_DIR/tests"
|
||||
|
||||
# Fakeroot for install / uninstall and test of executables
|
||||
FAKEROOT="${homedir}/osync_test_install"
|
||||
|
||||
CONF_DIR="$TESTS_DIR/conf"
|
||||
LOCAL_CONF="local.conf"
|
||||
REMOTE_CONF="remote.conf"
|
||||
OLD_CONF="old.conf"
|
||||
TMP_OLD_CONF="tmp.old.conf"
|
||||
|
||||
OSYNC_EXECUTABLE="osync.sh"
|
||||
OSYNC_EXECUTABLE="$FAKEROOT/usr/local/bin/osync.sh"
|
||||
OSYNC_DEV_EXECUTABLE="dev/n_osync.sh"
|
||||
OSYNC_UPGRADE="upgrade-v1.0x-v1.2x.sh"
|
||||
OSYNC_UPGRADE="upgrade-v1.0x-v1.3x.sh"
|
||||
TMP_FILE="$DEV_DIR/tmp"
|
||||
|
||||
|
||||
OSYNC_TESTS_DIR="${HOME}/osync-tests"
|
||||
OSYNC_TESTS_DIR="${homedir}/osync-tests"
|
||||
INITIATOR_DIR="$OSYNC_TESTS_DIR/initiator"
|
||||
TARGET_DIR="$OSYNC_TESTS_DIR/target"
|
||||
OSYNC_WORKDIR=".osync_workdir"
|
||||
|
@ -71,30 +92,56 @@ OSYNC_VERSION=1.x.y
|
|||
OSYNC_MIN_VERSION=x
|
||||
OSYNC_IS_STABLE=maybe
|
||||
|
||||
PRIVKEY_NAME="id_rsa_local_osync_tests"
|
||||
PUBKEY_NAME="${PRIVKEY_NAME}.pub"
|
||||
|
||||
function SetupSSH {
|
||||
echo -e 'y\n'| ssh-keygen -t rsa -b 2048 -N "" -f "${HOME}/.ssh/id_rsa_local"
|
||||
if ! grep "$(cat ${HOME}/.ssh/id_rsa_local.pub)" "${HOME}/.ssh/authorized_keys"; then
|
||||
echo "from=\"*\",no-port-forwarding,no-X11-forwarding,no-agent-forwarding,no-pty,command=\"/usr/local/bin/ssh_filter.sh SomeAlphaNumericToken9\" $(cat ${HOME}/.ssh/id_rsa_local.pub)" >> "${HOME}/.ssh/authorized_keys"
|
||||
echo "Setting up an ssh key to ${homedir}/.ssh/${PRIVKEY_NAME}"
|
||||
echo -e 'y\n'| ssh-keygen -t rsa -b 2048 -N "" -f "${homedir}/.ssh/${PRIVKEY_NAME}"
|
||||
|
||||
|
||||
|
||||
SSH_AUTH_LINE="from=\"*\",no-port-forwarding,no-X11-forwarding,no-agent-forwarding,no-pty,command=\"$FAKEROOT/usr/local/bin/ssh_filter.sh SomeAlphaNumericToken9\" $(cat ${homedir}/.ssh/${PUBKEY_NAME})"
|
||||
echo "ls -alh ${homedir}"
|
||||
ls -alh "${homedir}"
|
||||
echo "ls -alh ${homedir}/.ssh"
|
||||
ls -alh "${homedir}/.ssh"
|
||||
|
||||
if [ -f "${homedir}/.ssh/authorized_keys" ]; then
|
||||
if ! grep "$(cat ${homedir}/.ssh/${PUBKEY_NAME})" "${homedir}/.ssh/authorized_keys"; then
|
||||
echo "Adding auth line in authorized_keys file ${homedir}/.ssh/authorized_keys"
|
||||
echo "$SSH_AUTH_LINE" >> "${homedir}/.ssh/authorized_keys"
|
||||
fi
|
||||
else
|
||||
echo "Creating authorized_keys file ${homedir}/.ssh/authorized_keys"
|
||||
echo "$SSH_AUTH_LINE" >> "${homedir}/.ssh/authorized_keys"
|
||||
fi
|
||||
chmod 600 "${HOME}/.ssh/authorized_keys"
|
||||
chmod 600 "${homedir}/.ssh/authorized_keys"
|
||||
|
||||
# Add localhost to known hosts so self connect works
|
||||
if [ -z "$(ssh-keygen -F localhost)" ]; then
|
||||
ssh-keyscan -H localhost >> "${HOME}/.ssh/known_hosts"
|
||||
ssh-keyscan -H localhost >> "${homedir}/.ssh/known_hosts"
|
||||
fi
|
||||
|
||||
# Update remote conf files with SSH port
|
||||
sed -i.tmp 's#ssh://.*@localhost:[0-9]*/${HOME}/osync-tests/target#ssh://'$REMOTE_USER'@localhost:'$SSH_PORT'/${HOME}/osync-tests/target#' "$CONF_DIR/$REMOTE_CONF"
|
||||
# Update remote conf files with SSH port and file id location
|
||||
sed -i.tmp 's#ssh://.*@localhost:[0-9]*/${HOME}/osync-tests/target#ssh://'$REMOTE_USER'@localhost:'$SSH_PORT'/'${homedir}'/osync-tests/target#' "$CONF_DIR/$REMOTE_CONF"
|
||||
sed -i.tmp2 's#SSH_RSA_PRIVATE_KEY="${HOME}/.ssh/id_rsa_local_osync_tests"#SSH_RSA_PRIVATE_KEY="'${homedir}'/.ssh/id_rsa_local_osync_tests"#' "$CONF_DIR/$REMOTE_CONF"
|
||||
|
||||
echo "ls -alh ${homedir}/.ssh"
|
||||
ls -alh "${homedir}/.ssh"
|
||||
echo "cat ${homedir}/.ssh/authorized_keys"
|
||||
cat "${homedir}/.ssh/authorized_keys"
|
||||
|
||||
echo "###"
|
||||
echo "END SETUP SSH"
|
||||
}
|
||||
|
||||
function RemoveSSH {
|
||||
local pubkey
|
||||
|
||||
if [ -f "${HOME}/.ssh/id_rsa_local" ]; then
|
||||
|
||||
pubkey=$(cat "${HOME}/.ssh/id_rsa_local.pub")
|
||||
sed -i.bak "s|.*$pubkey.*||g" "${HOME}/.ssh/authorized_keys"
|
||||
rm -f "${HOME}/.ssh/{id_rsa_local.pub,id_rsa_local}"
|
||||
echo "Now removing SSH keys"
|
||||
if [ -f "${homedir}/.ssh/id_rsa_local_osync_tests" ]; then
|
||||
echo "Restoring SSH authorized_keys file"
|
||||
sed -i.bak "s|.*$(cat "${homedir}/.ssh/id_rsa_local_osync_tests.pub")||g" "${homedir}/.ssh/authorized_keys"
|
||||
rm -f "${homedir}/.ssh/{id_rsa_local_osync_tests.pub,id_rsa_local_osync_tests}"
|
||||
fi
|
||||
}
|
||||
|
||||
|
@ -143,21 +190,27 @@ function CreateOldFile () {
|
|||
}
|
||||
|
||||
function PrepareLocalDirs () {
|
||||
# Remote dirs are the same as local dirs, so no problem here
|
||||
if [ -d "$INITIATOR_DIR" ]; then
|
||||
rm -rf "$INITIATOR_DIR"
|
||||
if [ -d "$OSYNC_TESTS_DIR" ]; then
|
||||
rm -rf "$OSYNC_TESTS_DIR"
|
||||
fi
|
||||
mkdir -p "$INITIATOR_DIR"
|
||||
|
||||
if [ -d "$TARGET_DIR" ]; then
|
||||
rm -rf "$TARGET_DIR"
|
||||
fi
|
||||
mkdir -p "$TARGET_DIR"
|
||||
mkdir "$OSYNC_TESTS_DIR"
|
||||
mkdir "$INITIATOR_DIR"
|
||||
mkdir "$TARGET_DIR"
|
||||
}
|
||||
|
||||
function oneTimeSetUp () {
|
||||
START_TIME=$SECONDS
|
||||
|
||||
#echo "Running forced merge"
|
||||
#cd "${DEV_DIR}"
|
||||
#$SUDO_CMD ./merge.sh osync
|
||||
echo "Setting security for files"
|
||||
$SUDO_CMD find ${OSYNC_DIR} -exec chmod 755 {} \+
|
||||
|
||||
echo "Show content of osync dir"
|
||||
ls -alh ${OSYNC_DIR}
|
||||
echo "Running install.sh from ${OSYNC_DIR}"
|
||||
$SUDO_CMD ${OSYNC_DIR}/install.sh --no-stats --prefix="${FAKEROOT}"
|
||||
source "$DEV_DIR/ofunctions.sh"
|
||||
|
||||
# Fix default umask because of ACL test that expects 0022 when creating test files
|
||||
|
@ -168,29 +221,45 @@ function oneTimeSetUp () {
|
|||
echo "Detected OS: $LOCAL_OS"
|
||||
|
||||
# Set some travis related changes
|
||||
if [ "$TRAVIS_RUN" == true ]; then
|
||||
echo "Running with travis settings"
|
||||
REMOTE_USER="travis"
|
||||
RHOST_PING="no"
|
||||
if [ "$RUNNING_ON_GITHUB_ACTIONS" == true ]; then
|
||||
echo "Running with GITHUB ACTIONS settings"
|
||||
#REMOTE_USER="runner"
|
||||
REMOTE_USER="root" # WIP
|
||||
homedir=$(eval echo ~${REMOTE_USER})
|
||||
RHOST_PING=false
|
||||
SetConfFileValue "$CONF_DIR/$REMOTE_CONF" "REMOTE_3RD_PARTY_HOSTS" ""
|
||||
SetConfFileValue "$CONF_DIR/$REMOTE_CONF" "REMOTE_HOST_PING" "no"
|
||||
SetConfFileValue "$CONF_DIR/$REMOTE_CONF" "REMOTE_HOST_PING" false
|
||||
|
||||
SetConfFileValue "$CONF_DIR/$OLD_CONF" "REMOTE_3RD_PARTY_HOSTS" ""
|
||||
SetConfFileValue "$CONF_DIR/$OLD_CONF" "REMOTE_HOST_PING" "no"
|
||||
SetConfFileValue "$CONF_DIR/$OLD_CONF" "REMOTE_HOST_PING" false
|
||||
|
||||
else
|
||||
echo "Running with local settings"
|
||||
REMOTE_USER="root"
|
||||
RHOST_PING="yes"
|
||||
homedir=$(eval echo ~${REMOTE_USER})
|
||||
RHOST_PING=true
|
||||
SetConfFileValue "$CONF_DIR/$REMOTE_CONF" "REMOTE_3RD_PARTY_HOSTS" "\"www.kernel.org www.google.com\""
|
||||
SetConfFileValue "$CONF_DIR/$REMOTE_CONF" "REMOTE_HOST_PING" "yes"
|
||||
SetConfFileValue "$CONF_DIR/$REMOTE_CONF" "REMOTE_HOST_PING" true
|
||||
|
||||
SetConfFileValue "$CONF_DIR/$OLD_CONF" "REMOTE_3RD_PARTY_HOSTS" "\"www.kernel.org www.google.com\""
|
||||
SetConfFileValue "$CONF_DIR/$OLD_CONF" "REMOTE_HOST_PING" "yes"
|
||||
SetConfFileValue "$CONF_DIR/$OLD_CONF" "REMOTE_HOST_PING" true
|
||||
fi
|
||||
|
||||
|
||||
# Fix test directories for Github actions
|
||||
SetConfFileValue "$CONF_DIR/$LOCAL_CONF" INITIATOR_SYNC_DIR "\"${homedir}/osync-tests/initiator\""
|
||||
SetConfFileValue "$CONF_DIR/$LOCAL_CONF" TARGET_SYNC_DIR "\"${homedir}/osync-tests/target\""
|
||||
|
||||
SetConfFileValue "$CONF_DIR/$REMOTE_CONF" INITIATOR_SYNC_DIR "\"${homedir}/osync-tests/initiator\""
|
||||
|
||||
SetConfFileValue "$CONF_DIR/$OLD_CONF" MASTER_SYNC_DIR "\"${homedir}/osync-tests/initiator\""
|
||||
SetConfFileValue "$CONF_DIR/$OLD_CONF" SLAVE_SYNC_DIR "\"${homedir}/osync-tests/target\""
|
||||
|
||||
|
||||
# Get default ssh port from env
|
||||
if [ "$SSH_PORT" == "" ]; then
|
||||
SSH_PORT=22
|
||||
echo "Running with SSH_PORT=${SSH_PORT}"
|
||||
fi
|
||||
|
||||
# Setup modes per test
|
||||
|
@ -200,8 +269,8 @@ function oneTimeSetUp () {
|
|||
readonly __confRemote=3
|
||||
|
||||
osyncParameters=()
|
||||
osyncParameters[$__quickLocal]="--initiator=$INITIATOR_DIR --target=$TARGET_DIR --instance-id=quicklocal"
|
||||
osyncParameters[$__confLocal]="$CONF_DIR/$LOCAL_CONF"
|
||||
osyncParameters[$__quickLocal]="--initiator=$INITIATOR_DIR --target=$TARGET_DIR --instance-id=quicklocal --non-interactive"
|
||||
osyncParameters[$__confLocal]="$CONF_DIR/$LOCAL_CONF --non-interactive"
|
||||
|
||||
osyncDaemonParameters=()
|
||||
|
||||
|
@ -210,9 +279,10 @@ function oneTimeSetUp () {
|
|||
|
||||
osyncDaemonParameters[$__local]="$CONF_DIR/$LOCAL_CONF --on-changes"
|
||||
|
||||
if [ "$LOCAL_OS" != "msys" ] && [ "$LOCAL_OS" != "Cygwin" ]; then
|
||||
osyncParameters[$__quickRemote]="--initiator=$INITIATOR_DIR --target=ssh://localhost:$SSH_PORT/$TARGET_DIR --rsakey=${HOME}/.ssh/id_rsa_local --instance-id=quickremote --remote-token=SomeAlphaNumericToken9"
|
||||
osyncParameters[$__confRemote]="$CONF_DIR/$REMOTE_CONF"
|
||||
# Do not check remote config on msys or cygwin since we don't have a local SSH server
|
||||
if [ "$LOCAL_OS" != "msys" ] && [ "$LOCAL_OS" != "Cygwin" ] && [ $SKIP_REMOTE != true ]; then
|
||||
osyncParameters[$__quickRemote]="--initiator=$INITIATOR_DIR --target=ssh://localhost:$SSH_PORT/$TARGET_DIR --rsakey=${homedir}/.ssh/id_rsa_local_osync_tests --instance-id=quickremote --remote-token=SomeAlphaNumericToken9 --non-interactive"
|
||||
osyncParameters[$__confRemote]="$CONF_DIR/$REMOTE_CONF --non-interactive"
|
||||
|
||||
osyncDaemonParameters[$__remote]="$CONF_DIR/$REMOTE_CONF --on-changes"
|
||||
|
||||
|
@ -245,14 +315,14 @@ function oneTimeSetUp () {
|
|||
SetConfFileValue "$CONF_DIR/$LOCAL_CONF" "SKIP_DELETION" ""
|
||||
SetConfFileValue "$CONF_DIR/$REMOTE_CONF" "SKIP_DELETION" ""
|
||||
|
||||
SetConfFileValue "$CONF_DIR/$LOCAL_CONF" "COPY_SYMLINKS" "no"
|
||||
SetConfFileValue "$CONF_DIR/$REMOTE_CONF" "COPY_SYMLINKS" "no"
|
||||
SetConfFileValue "$CONF_DIR/$LOCAL_CONF" "COPY_SYMLINKS" false
|
||||
SetConfFileValue "$CONF_DIR/$REMOTE_CONF" "COPY_SYMLINKS" false
|
||||
|
||||
SetConfFileValue "$CONF_DIR/$LOCAL_CONF" "CONFLICT_BACKUP_MULTIPLE" "no"
|
||||
SetConfFileValue "$CONF_DIR/$REMOTE_CONF" "CONFLICT_BACKUP_MULTIPLE" "no"
|
||||
SetConfFileValue "$CONF_DIR/$LOCAL_CONF" "CONFLICT_BACKUP_MULTIPLE" false
|
||||
SetConfFileValue "$CONF_DIR/$REMOTE_CONF" "CONFLICT_BACKUP_MULTIPLE" false
|
||||
|
||||
SetConfFileValue "$CONF_DIR/$LOCAL_CONF" "FORCE_STRANGER_LOCK_RESUME" "no"
|
||||
SetConfFileValue "$CONF_DIR/$REMOTE_CONF" "FORCE_STRANGER_LOCK_RESUME" "no"
|
||||
SetConfFileValue "$CONF_DIR/$LOCAL_CONF" "FORCE_STRANGER_LOCK_RESUME" false
|
||||
SetConfFileValue "$CONF_DIR/$REMOTE_CONF" "FORCE_STRANGER_LOCK_RESUME" false
|
||||
|
||||
SetConfFileValue "$CONF_DIR/$REMOTE_CONF" "SOFT_MAX_EXEC_TIME" "7200"
|
||||
SetConfFileValue "$CONF_DIR/$REMOTE_CONF" "HARD_MAX_EXEC_TIME" "10600"
|
||||
|
@ -260,20 +330,25 @@ function oneTimeSetUp () {
|
|||
}
|
||||
|
||||
function oneTimeTearDown () {
|
||||
# Set osync version stable flag back to origin
|
||||
SetConfFileValue "$OSYNC_DIR/$OSYNC_EXECUTABLE" "IS_STABLE" "$OSYNC_IS_STABLE"
|
||||
|
||||
RemoveSSH
|
||||
# Set osync version stable flag back to origin
|
||||
#SetConfFileValue "$OSYNC_DIR/osync.sh" "IS_STABLE" "$OSYNC_IS_STABLE"
|
||||
|
||||
if [ "$SKIP_REMOTE" != true ]; then
|
||||
RemoveSSH
|
||||
fi
|
||||
|
||||
#TODO: uncomment this when dev is done
|
||||
#rm -rf "$OSYNC_TESTS_DIR"
|
||||
rm -f "$TMP_FILE"
|
||||
|
||||
cd "$OSYNC_DIR"
|
||||
$SUDO_CMD ./install.sh --remove --no-stats
|
||||
echo ""
|
||||
echo "Uninstalling osync from $FAKEROOT"
|
||||
$SUDO_CMD ./install.sh --remove --no-stats --prefix="$FAKEROOT"
|
||||
assertEquals "Uninstall failed" "0" $?
|
||||
|
||||
ELAPSED_TIME=$(($SECONDS - $START_TIME))
|
||||
ELAPSED_TIME=$((SECONDS-START_TIME))
|
||||
echo "It took $ELAPSED_TIME seconds to run these tests."
|
||||
}
|
||||
|
||||
|
@ -282,18 +357,70 @@ function setUp () {
|
|||
rm -rf "$TARGET_DIR"
|
||||
}
|
||||
|
||||
function test_SSH {
|
||||
# Make sure we have SSH on your test server
|
||||
# This has become kind of tricky on github actions servers
|
||||
echo "Testing SSH"
|
||||
|
||||
failure=false
|
||||
|
||||
# Testing as "remote user"
|
||||
echo "ls -alh ${homedir}/.ssh"
|
||||
ls -alh "${homedir}/.ssh"
|
||||
|
||||
echo "Running SSH test as ${REMOTE_USER}"
|
||||
# SSH_PORT and SSH_USER are set by oneTimeSetup
|
||||
$SUDO_CMD ssh -i "${homedir}/.ssh/${PRIVKEY_NAME}" -p $SSH_PORT ${REMOTE_USER}@localhost "env _REMOTE_TOKEN=SomeAlphaNumericToken9 echo \"Remotely:\"; whoami; echo \"TEST OK\""
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "SSH test failed"
|
||||
failure=true
|
||||
fi
|
||||
|
||||
# Testing as current user
|
||||
#echo "ls -alh ${homedir}/.ssh"
|
||||
#ls -alh "${homedir}/.ssh"
|
||||
|
||||
#echo "Running SSH test as $(whoami)"
|
||||
#$SUDO_CMD ssh -i "${homedir}/.ssh/${PRIVKEY_NAME}" -p $SSH_PORT $(whoami)@localhost "env _REMOTE_TOKEN=SomeAlphaNumericToken9 echo \"Remotely:\"; whoami; echo \"TEST OK\""
|
||||
#if [ $? -ne 0 ]; then
|
||||
# echo "SSH test failed"
|
||||
# failure=true
|
||||
#fi
|
||||
|
||||
if [ $failure == true ]; then
|
||||
exit 1 # Try to see if we can abort all tests
|
||||
assertEquals "Test SSH failed" false $failure
|
||||
fi
|
||||
}
|
||||
|
||||
# This test has to be done everytime in order for osync executable to be fresh
|
||||
function test_Merge () {
|
||||
cd "$DEV_DIR"
|
||||
./merge.sh
|
||||
./merge.sh osync
|
||||
assertEquals "Merging code" "0" $?
|
||||
|
||||
#WIP use debug code
|
||||
alias cp=cp
|
||||
cp "$DEV_DIR/debug_osync.sh" "$OSYNC_DIR/osync.sh"
|
||||
|
||||
cd "$OSYNC_DIR"
|
||||
$SUDO_CMD ./install.sh --no-stats
|
||||
assertEquals "Install failed" "0" $?
|
||||
|
||||
echo ""
|
||||
echo "Installing osync to $FAKEROOT"
|
||||
$SUDO_CMD ./install.sh --no-stats --prefix="$FAKEROOT"
|
||||
|
||||
# Set osync version to stable while testing to avoid warning message
|
||||
SetConfFileValue "$OSYNC_DIR/$OSYNC_EXECUTABLE" "IS_STABLE" "yes"
|
||||
# Don't use SetConfFileValue here since for whatever reason Travis does not like creating a sed temporary file in $FAKEROOT
|
||||
|
||||
if [ "$RUNNING_ON_GITHUB_ACTIONS" == true ]; then
|
||||
$SUDO_CMD sed -i.tmp 's/^IS_STABLE=.*/IS_STABLE=true/' "$OSYNC_EXECUTABLE"
|
||||
else
|
||||
sed -i.tmp 's/^IS_STABLE=.*/IS_STABLE=true/' "$OSYNC_EXECUTABLE"
|
||||
fi
|
||||
#SetConfFileValue "$OSYNC_EXECUTABLE" "IS_STABLE" true
|
||||
|
||||
|
||||
assertEquals "Install failed" "0" $?
|
||||
}
|
||||
|
||||
function test_LargeFileSet () {
|
||||
|
@ -303,7 +430,7 @@ function test_LargeFileSet () {
|
|||
PrepareLocalDirs
|
||||
DownloadLargeFileSet "$INITIATOR_DIR"
|
||||
|
||||
REMOTE_HOST_PING=$RHOST_PING ./$OSYNC_EXECUTABLE $i
|
||||
REMOTE_HOST_PING=$RHOST_PING $OSYNC_EXECUTABLE $i
|
||||
assertEquals "LargeFileSet test with parameters [$i]." "0" $?
|
||||
|
||||
[ -d "$INITIATOR_DIR/$OSYNC_STATE_DIR" ]
|
||||
|
@ -314,6 +441,15 @@ function test_LargeFileSet () {
|
|||
done
|
||||
}
|
||||
|
||||
function test_controlMaster () {
|
||||
cd "$OSYNC_DIR"
|
||||
|
||||
PrepareLocalDirs
|
||||
echo "Running with parameters ${osyncParameters[$__quickRemote]} --ssh-controlmaster"
|
||||
REMOTE_HOST_PING=$REMOTE_PING $OSYNC_EXECUTABLE ${osyncParameters[$__quickRemote]} --ssh-controlmaster
|
||||
assertEquals "Running quick remote test with controlmaster enabled." "0" $?
|
||||
}
|
||||
|
||||
function test_Exclusions () {
|
||||
# Will sync except php files
|
||||
# RSYNC_EXCLUDE_PATTERN="*.php" is set at runtime for quicksync and in config files for other runs
|
||||
|
@ -331,7 +467,7 @@ function test_Exclusions () {
|
|||
|
||||
numberOfPHPFiles=$(find "$INITIATOR_DIR" ! -wholename "$INITIATOR_DIR/$OSYNC_WORKDIR*" -name "*.php" | wc -l)
|
||||
|
||||
REMOTE_HOST_PING=$RHOST_PING RSYNC_EXCLUDE_PATTERN="*.php" ./$OSYNC_EXECUTABLE $i
|
||||
REMOTE_HOST_PING=$RHOST_PING RSYNC_EXCLUDE_PATTERN="*.php" $OSYNC_EXECUTABLE $i
|
||||
assertEquals "Exclusions with parameters [$i]." "0" $?
|
||||
|
||||
numberOfInitiatorFiles=$(find "$INITIATOR_DIR" ! -wholename "$INITIATOR_DIR/$OSYNC_WORKDIR*" | wc -l)
|
||||
|
@ -343,10 +479,10 @@ function test_Exclusions () {
|
|||
}
|
||||
|
||||
function test_Deletetion () {
|
||||
local iFile1="$INITIATOR_DIR/ific"
|
||||
local iFile2="$INITIATOR_DIR/ifoc"
|
||||
local tFile1="$TARGET_DIR/tfic"
|
||||
local tFile2="$TARGET_DIR/tfoc"
|
||||
local iFile1="$INITIATOR_DIR/i fic"
|
||||
local iFile2="$INITIATOR_DIR/i foc (something)"
|
||||
local tFile1="$TARGET_DIR/t fic"
|
||||
local tFile2="$TARGET_DIR/t foc [nothing]"
|
||||
|
||||
|
||||
for i in "${osyncParameters[@]}"; do
|
||||
|
@ -358,13 +494,13 @@ function test_Deletetion () {
|
|||
touch "$tFile1"
|
||||
touch "$tFile2"
|
||||
|
||||
REMOTE_HOST_PING=$RHOST_PING ./$OSYNC_EXECUTABLE $i
|
||||
REMOTE_HOST_PING=$RHOST_PING $OSYNC_EXECUTABLE $i
|
||||
assertEquals "First deletion run with parameters [$i]." "0" $?
|
||||
|
||||
rm -f "$iFile1"
|
||||
rm -f "$tFile1"
|
||||
|
||||
REMOTE_HOST_PING=$RHOST_PING ./$OSYNC_EXECUTABLE $i
|
||||
REMOTE_HOST_PING=$RHOST_PING $OSYNC_EXECUTABLE $i
|
||||
assertEquals "Second deletion run with parameters [$i]." "0" $?
|
||||
|
||||
[ -f "$TARGET_DIR/$OSYNC_DELETE_DIR/$(basename $iFile1)" ]
|
||||
|
@ -409,7 +545,7 @@ function test_deletion_failure () {
|
|||
touch "$INITIATOR_DIR/$FileA"
|
||||
touch "$TARGET_DIR/$FileB"
|
||||
|
||||
REMOTE_HOST_PING=$RHOST_PING ./$OSYNC_EXECUTABLE $i
|
||||
REMOTE_HOST_PING=$RHOST_PING $OSYNC_EXECUTABLE $i
|
||||
assertEquals "First deletion run with parameters [$i]." "0" $?
|
||||
|
||||
rm -f "$INITIATOR_DIR/$FileA"
|
||||
|
@ -420,7 +556,7 @@ function test_deletion_failure () {
|
|||
$SUDO_CMD $IMMUTABLE_ON_CMD "$INITIATOR_DIR/$FileB"
|
||||
|
||||
# This shuold fail with exitcode 1
|
||||
REMOTE_HOST_PING=$RHOST_PING ./$OSYNC_EXECUTABLE $i
|
||||
REMOTE_HOST_PING=$RHOST_PING $OSYNC_EXECUTABLE $i
|
||||
assertEquals "Second deletion run with parameters [$i]." "1" $?
|
||||
|
||||
# standard file tests
|
||||
|
@ -438,7 +574,7 @@ function test_deletion_failure () {
|
|||
$SUDO_CMD $IMMUTABLE_OFF_CMD "$TARGET_DIR/$FileA"
|
||||
$SUDO_CMD $IMMUTABLE_OFF_CMD "$INITIATOR_DIR/$FileB"
|
||||
|
||||
REMOTE_HOST_PING=$RHOST_PING ./$OSYNC_EXECUTABLE $i
|
||||
REMOTE_HOST_PING=$RHOST_PING $OSYNC_EXECUTABLE $i --verbose
|
||||
assertEquals "Third deletion run with parameters [$i]." "0" $?
|
||||
|
||||
[ ! -f "$TARGET_DIR/$FileA" ]
|
||||
|
@ -462,7 +598,7 @@ function test_skip_deletion () {
|
|||
fi
|
||||
|
||||
# TRAVIS SPECIFIC - time limitation
|
||||
if [ "$TRAVIS_RUN" != true ]; then
|
||||
if [ "$RUNNING_ON_GITHUB_ACTIONS" != true ]; then
|
||||
modes=('initiator' 'target' 'initiator,target')
|
||||
else
|
||||
modes=('target')
|
||||
|
@ -490,14 +626,14 @@ function test_skip_deletion () {
|
|||
touch "$TARGET_DIR/$FileB"
|
||||
|
||||
# First run
|
||||
REMOTE_HOST_PING=$RHOST_PING SKIP_DELETION="$mode" ./$OSYNC_EXECUTABLE $i
|
||||
REMOTE_HOST_PING=$RHOST_PING SKIP_DELETION="$mode" $OSYNC_EXECUTABLE $i
|
||||
assertEquals "First deletion run with parameters [$i]." "0" $?
|
||||
|
||||
rm -f "$INITIATOR_DIR/$FileA"
|
||||
rm -f "$TARGET_DIR/$FileB"
|
||||
|
||||
# Second run
|
||||
REMOTE_HOST_PING=$RHOST_PING SKIP_DELETION="$mode" ./$OSYNC_EXECUTABLE $i
|
||||
REMOTE_HOST_PING=$RHOST_PING SKIP_DELETION="$mode" $OSYNC_EXECUTABLE $i
|
||||
assertEquals "First deletion run with parameters [$i]." "0" $?
|
||||
|
||||
if [ "$mode" == "initiator" ]; then
|
||||
|
@ -541,7 +677,7 @@ function test_handle_symlinks () {
|
|||
fi
|
||||
|
||||
# Check with and without copySymlinks
|
||||
copySymlinks="no"
|
||||
copySymlinks=false
|
||||
|
||||
echo "Running with COPY_SYMLINKS=$copySymlinks"
|
||||
|
||||
|
@ -570,14 +706,14 @@ function test_handle_symlinks () {
|
|||
ln -s "$INITIATOR_DIR/$FileA" "$INITIATOR_DIR/$FileAL"
|
||||
ln -s "$TARGET_DIR/$FileB" "$TARGET_DIR/$FileBL"
|
||||
|
||||
COPY_SYMLINKS=$copySymlinks REMOTE_HOST_PING=$RHOST_PING ./$OSYNC_EXECUTABLE $i
|
||||
COPY_SYMLINKS=$copySymlinks REMOTE_HOST_PING=$RHOST_PING $OSYNC_EXECUTABLE $i
|
||||
assertEquals "First symlink run with parameters [$i]." "0" $?
|
||||
|
||||
# Delete symlinks
|
||||
rm -f "$INITIATOR_DIR/$FileAL"
|
||||
rm -f "$TARGET_DIR/$FileBL"
|
||||
|
||||
COPY_SYMLINKS=$copySymlinks REMOTE_HOST_PING=$RHOST_PING ./$OSYNC_EXECUTABLE $i
|
||||
COPY_SYMLINKS=$copySymlinks REMOTE_HOST_PING=$RHOST_PING $OSYNC_EXECUTABLE $i
|
||||
assertEquals "Second symlink deletion run with parameters [$i]." "0" $?
|
||||
|
||||
# symlink deletion propagation
|
||||
|
@ -596,7 +732,7 @@ function test_handle_symlinks () {
|
|||
rm -f "$INITIATOR_DIR/$FileA"
|
||||
rm -f "$TARGET_DIR/$FileB"
|
||||
|
||||
COPY_SYMLINKS=$copySymlinks REMOTE_HOST_PING=$RHOST_PING ./$OSYNC_EXECUTABLE $i
|
||||
COPY_SYMLINKS=$copySymlinks REMOTE_HOST_PING=$RHOST_PING $OSYNC_EXECUTABLE $i
|
||||
assertEquals "Third broken symlink run with parameters [$i]." "0" $?
|
||||
|
||||
[ -L "$TARGET_DIR/$FileAL" ]
|
||||
|
@ -609,7 +745,7 @@ function test_handle_symlinks () {
|
|||
rm -f "$INITIATOR_DIR/$FileAL"
|
||||
rm -f "$TARGET_DIR/$FileBL"
|
||||
|
||||
COPY_SYMLINKS=$copySymlinks REMOTE_HOST_PING=$RHOST_PING ./$OSYNC_EXECUTABLE $i
|
||||
COPY_SYMLINKS=$copySymlinks REMOTE_HOST_PING=$RHOST_PING $OSYNC_EXECUTABLE $i
|
||||
assertEquals "Fourth symlink deletion run with parameters [$i]." "0" $?
|
||||
|
||||
[ ! -L "$TARGET_DIR/$FileAL" ]
|
||||
|
@ -623,12 +759,12 @@ function test_handle_symlinks () {
|
|||
done
|
||||
|
||||
# TRAVIS SPECIFIC - time limitation
|
||||
if [ "$TRAVIS_RUN" != true ]; then
|
||||
if [ "$RUNNING_ON_GITHUB_ACTIONS" != true ]; then
|
||||
return 0
|
||||
fi
|
||||
|
||||
# Check with and without copySymlinks
|
||||
copySymlinks="yes"
|
||||
copySymlinks=true
|
||||
|
||||
echo "Running with COPY_SYMLINKS=$copySymlinks"
|
||||
|
||||
|
@ -657,14 +793,14 @@ function test_handle_symlinks () {
|
|||
ln -s "$INITIATOR_DIR/$FileA" "$INITIATOR_DIR/$FileAL"
|
||||
ln -s "$TARGET_DIR/$FileB" "$TARGET_DIR/$FileBL"
|
||||
|
||||
COPY_SYMLINKS=$copySymlinks REMOTE_HOST_PING=$RHOST_PING ./$OSYNC_EXECUTABLE $i
|
||||
COPY_SYMLINKS=$copySymlinks REMOTE_HOST_PING=$RHOST_PING $OSYNC_EXECUTABLE $i
|
||||
assertEquals "First symlink run with parameters [$i]." "0" $?
|
||||
|
||||
# Delete symlinks
|
||||
rm -f "$INITIATOR_DIR/$FileAL"
|
||||
rm -f "$TARGET_DIR/$FileBL"
|
||||
|
||||
COPY_SYMLINKS=$copySymlinks REMOTE_HOST_PING=$RHOST_PING ./$OSYNC_EXECUTABLE $i
|
||||
COPY_SYMLINKS=$copySymlinks REMOTE_HOST_PING=$RHOST_PING $OSYNC_EXECUTABLE $i
|
||||
assertEquals "Second symlink deletion run with parameters [$i]." "0" $?
|
||||
|
||||
# symlink deletion propagation
|
||||
|
@ -683,7 +819,7 @@ function test_handle_symlinks () {
|
|||
rm -f "$INITIATOR_DIR/$FileA"
|
||||
rm -f "$TARGET_DIR/$FileB"
|
||||
|
||||
COPY_SYMLINKS=$copySymlinks REMOTE_HOST_PING=$RHOST_PING ./$OSYNC_EXECUTABLE $i
|
||||
COPY_SYMLINKS=$copySymlinks REMOTE_HOST_PING=$RHOST_PING $OSYNC_EXECUTABLE $i
|
||||
assertEquals "Third broken symlink run with parameters should fail [$i]." "1" $?
|
||||
|
||||
[ ! -f "$TARGET_DIR/$FileAL" ]
|
||||
|
@ -696,7 +832,7 @@ function test_handle_symlinks () {
|
|||
rm -f "$INITIATOR_DIR/$FileAL"
|
||||
rm -f "$TARGET_DIR/$FileBL"
|
||||
|
||||
COPY_SYMLINKS=$copySymlinks REMOTE_HOST_PING=$RHOST_PING ./$OSYNC_EXECUTABLE $i
|
||||
COPY_SYMLINKS=$copySymlinks REMOTE_HOST_PING=$RHOST_PING $OSYNC_EXECUTABLE $i
|
||||
assertEquals "Fourth symlink deletion run should resume with parameters [$i]." "0" $?
|
||||
|
||||
[ ! -f "$TARGET_DIR/$FileAL" ]
|
||||
|
@ -727,7 +863,7 @@ function test_softdeletion_cleanup () {
|
|||
PrepareLocalDirs
|
||||
|
||||
# First run
|
||||
#REMOTE_HOST_PING=$RHOST_PING ./$OSYNC_EXECUTABLE $i
|
||||
#REMOTE_HOST_PING=$RHOST_PING $OSYNC_EXECUTABLE $i
|
||||
#assertEquals "First deletion run with parameters [$i]." "0" $?
|
||||
|
||||
# Get current drive
|
||||
|
@ -742,13 +878,13 @@ function test_softdeletion_cleanup () {
|
|||
|
||||
touch "$file.new"
|
||||
|
||||
if [ "$TRAVIS_RUN" == true ] || [ "$LOCAL_OS" == "BSD" ] || [ "$LOCAL_OS" == "MacOSX" ] || [ "$LOCAL_OS" == "WinNT10" ] || [ "$LOCAL_OS" == "msys" ] || [ "$LOCAL_OS" == "Cygwin" ]; then
|
||||
if [ "$RUNNING_ON_GITHUB_ACTIONS" == true ] || [ "$LOCAL_OS" == "BSD" ] || [ "$LOCAL_OS" == "MacOSX" ] || [ "$LOCAL_OS" == "WinNT10" ] || [ "$LOCAL_OS" == "msys" ] || [ "$LOCAL_OS" == "Cygwin" ]; then
|
||||
echo "Skipping changing ctime on file because travis / bsd / macos / Win10 / msys / cygwin does not support debugfs"
|
||||
else
|
||||
CreateOldFile "$file.old"
|
||||
fi
|
||||
done
|
||||
if [ "$TRAVIS_RUN" == true ] || [ "$LOCAL_OS" == "BSD" ] || [ "$LOCAL_OS" == "MacOSX" ] || [ "$LOCAL_OS" == "WinNT10" ] || [ "$LOCAL_OS" == "msys" ] || [ "$LOCAL_OS" == "Cygwin" ]; then
|
||||
if [ "$RUNNING_ON_GITHUB_ACTIONS" == true ] || [ "$LOCAL_OS" == "BSD" ] || [ "$LOCAL_OS" == "MacOSX" ] || [ "$LOCAL_OS" == "WinNT10" ] || [ "$LOCAL_OS" == "msys" ] || [ "$LOCAL_OS" == "Cygwin" ]; then
|
||||
echo "Skipping changing ctime on dir too"
|
||||
else
|
||||
CreateOldFile "$DirA" true
|
||||
|
@ -756,14 +892,14 @@ function test_softdeletion_cleanup () {
|
|||
fi
|
||||
|
||||
# Second run
|
||||
REMOTE_HOST_PING=$RHOST_PING ./$OSYNC_EXECUTABLE $i
|
||||
REMOTE_HOST_PING=$RHOST_PING $OSYNC_EXECUTABLE $i
|
||||
|
||||
# Check file presence
|
||||
for file in "${files[@]}"; do
|
||||
[ -f "$file.new" ]
|
||||
assertEquals "New softdeleted / backed up file [$file.new] exists." "0" $?
|
||||
|
||||
if [ "$TRAVIS_RUN" == true ] || [ "$LOCAL_OS" == "BSD" ] || [ "$LOCAL_OS" == "MacOSX" ] || [ "$LOCAL_OS" == "WinNT10" ] || [ "$LOCAL_OS" == "msys" ] || [ "$LOCAL_OS" == "Cygwin" ]; then
|
||||
if [ "$RUNNING_ON_GITHUB_ACTIONS" == true ] || [ "$LOCAL_OS" == "BSD" ] || [ "$LOCAL_OS" == "MacOSX" ] || [ "$LOCAL_OS" == "WinNT10" ] || [ "$LOCAL_OS" == "msys" ] || [ "$LOCAL_OS" == "Cygwin" ]; then
|
||||
[ ! -f "$file.old" ]
|
||||
assertEquals "Old softdeleted / backed up file [$file.old] is deleted permanently." "0" $?
|
||||
else
|
||||
|
@ -772,7 +908,7 @@ function test_softdeletion_cleanup () {
|
|||
fi
|
||||
done
|
||||
|
||||
if [ "$TRAVIS_RUN" == true ] || [ "$LOCAL_OS" == "BSD" ] || [ "$LOCAL_OS" == "MacOSX" ] || [ "$LOCAL_OS" == "WinNT10" ] || [ "$LOCAL_OS" == "msys" ] || [ "$LOCAL_OS" == "Cygwin" ]; then
|
||||
if [ "$RUNNING_ON_GITHUB_ACTIONS" == true ] || [ "$LOCAL_OS" == "BSD" ] || [ "$LOCAL_OS" == "MacOSX" ] || [ "$LOCAL_OS" == "WinNT10" ] || [ "$LOCAL_OS" == "msys" ] || [ "$LOCAL_OS" == "Cygwin" ]; then
|
||||
[ ! -d "$DirA" ]
|
||||
assertEquals "Old softdeleted / backed up directory [$dirA] is deleted permanently." "0" $?
|
||||
[ ! -d "$DirB" ]
|
||||
|
@ -789,7 +925,7 @@ function test_softdeletion_cleanup () {
|
|||
|
||||
function test_FileAttributePropagation () {
|
||||
|
||||
if [ "$TRAVIS_RUN" == true ]; then
|
||||
if [ "$RUNNING_ON_GITHUB_ACTIONS" == true ]; then
|
||||
echo "Skipping FileAttributePropagation tests as travis does not support getfacl / setfacl."
|
||||
return 0
|
||||
fi
|
||||
|
@ -799,6 +935,11 @@ function test_FileAttributePropagation () {
|
|||
return 0
|
||||
fi
|
||||
|
||||
SetConfFileValue "$CONF_DIR/$LOCAL_CONF" "PRESERVE_ACL" true
|
||||
SetConfFileValue "$CONF_DIR/$LOCAL_CONF" "PRESERVE_XATTR" true
|
||||
SetConfFileValue "$CONF_DIR/$REMOTE_CONF" "PRESERVE_ACL" true
|
||||
SetConfFileValue "$CONF_DIR/$REMOTE_CONF" "PRESERVE_XATTR" true
|
||||
|
||||
for i in "${osyncParameters[@]}"; do
|
||||
cd "$OSYNC_DIR"
|
||||
PrepareLocalDirs
|
||||
|
@ -820,7 +961,7 @@ function test_FileAttributePropagation () {
|
|||
touch "$TARGET_DIR/$FileB"
|
||||
|
||||
# First run
|
||||
PRESERVE_ACL=yes PRESERVE_XATTR=yes REMOTE_HOST_PING=$RHOST_PING ./$OSYNC_EXECUTABLE $i
|
||||
PRESERVE_ACL=yes PRESERVE_XATTR=yes REMOTE_HOST_PING=$RHOST_PING $OSYNC_EXECUTABLE $i
|
||||
assertEquals "First deletion run with parameters [$i]." "0" $?
|
||||
|
||||
sleep 1
|
||||
|
@ -848,7 +989,7 @@ function test_FileAttributePropagation () {
|
|||
assertEquals "Set ACL on target directory" "0" $?
|
||||
|
||||
# Second run
|
||||
PRESERVE_ACL=yes PRESERVE_XATTR=yes REMOTE_HOST_PING=$RHOST_PING ./$OSYNC_EXECUTABLE $i
|
||||
PRESERVE_ACL=yes PRESERVE_XATTR=yes REMOTE_HOST_PING=$RHOST_PING $OSYNC_EXECUTABLE $i
|
||||
assertEquals "First deletion run with parameters [$i]." "0" $?
|
||||
|
||||
getfacl "$TARGET_DIR/$FileA" | grep "other::r-x" > /dev/null
|
||||
|
@ -863,6 +1004,11 @@ function test_FileAttributePropagation () {
|
|||
getfacl "$INITIATOR_DIR/$DirD" | grep "other::-wx" > /dev/null
|
||||
assertEquals "ACLs matched original value on initiator subdirectory." "0" $?
|
||||
done
|
||||
|
||||
SetConfFileValue "$CONF_DIR/$LOCAL_CONF" "PRESERVE_ACL" false
|
||||
SetConfFileValue "$CONF_DIR/$LOCAL_CONF" "PRESERVE_XATTR" false
|
||||
SetConfFileValue "$CONF_DIR/$REMOTE_CONF" "PRESERVE_ACL" false
|
||||
SetConfFileValue "$CONF_DIR/$REMOTE_CONF" "PRESERVE_XATTR" false
|
||||
}
|
||||
|
||||
function test_ConflictBackups () {
|
||||
|
@ -883,14 +1029,14 @@ function test_ConflictBackups () {
|
|||
echo "$FileB" > "$TARGET_DIR/$FileB"
|
||||
|
||||
# First run
|
||||
REMOTE_HOST_PING=$RHOST_PING ./$OSYNC_EXECUTABLE $i
|
||||
REMOTE_HOST_PING=$RHOST_PING $OSYNC_EXECUTABLE $i
|
||||
assertEquals "First deletion run with parameters [$i]." "0" $?
|
||||
|
||||
echo "$FileA+" > "$TARGET_DIR/$FileA"
|
||||
echo "$FileB+" > "$INITIATOR_DIR/$FileB"
|
||||
|
||||
# Second run
|
||||
REMOTE_HOST_PING=$RHOST_PING ./$OSYNC_EXECUTABLE $i
|
||||
REMOTE_HOST_PING=$RHOST_PING $OSYNC_EXECUTABLE $i
|
||||
assertEquals "First deletion run with parameters [$i]." "0" $?
|
||||
|
||||
[ -f "$INITIATOR_DIR/$OSYNC_BACKUP_DIR/$FileA" ]
|
||||
|
@ -906,8 +1052,8 @@ function test_MultipleConflictBackups () {
|
|||
local additionalParameters
|
||||
|
||||
# modify config files
|
||||
SetConfFileValue "$CONF_DIR/$LOCAL_CONF" "CONFLICT_BACKUP_MULTIPLE" "yes"
|
||||
SetConfFileValue "$CONF_DIR/$REMOTE_CONF" "CONFLICT_BACKUP_MULTIPLE" "yes"
|
||||
SetConfFileValue "$CONF_DIR/$LOCAL_CONF" "CONFLICT_BACKUP_MULTIPLE" true
|
||||
SetConfFileValue "$CONF_DIR/$REMOTE_CONF" "CONFLICT_BACKUP_MULTIPLE" true
|
||||
|
||||
if [ "$OSYNC_MIN_VERSION" != "1" ]; then
|
||||
additionalParameters="--errors-only --summary --no-prefix"
|
||||
|
@ -927,28 +1073,28 @@ function test_MultipleConflictBackups () {
|
|||
echo "$FileB" > "$TARGET_DIR/$FileB"
|
||||
|
||||
# First run
|
||||
CONFLICT_BACKUP_MULTIPLE=yes REMOTE_HOST_PING=$RHOST_PING ./$OSYNC_EXECUTABLE $i $additionalParameters
|
||||
CONFLICT_BACKUP_MULTIPLE=true REMOTE_HOST_PING=$RHOST_PING $OSYNC_EXECUTABLE $i $additionalParameters
|
||||
assertEquals "First deletion run with parameters [$i]." "0" $?
|
||||
|
||||
echo "$FileA+" > "$TARGET_DIR/$FileA"
|
||||
echo "$FileB+" > "$INITIATOR_DIR/$FileB"
|
||||
|
||||
# Second run
|
||||
CONFLICT_BACKUP_MULTIPLE=yes REMOTE_HOST_PING=$RHOST_PING ./$OSYNC_EXECUTABLE $i $additionalParameters
|
||||
CONFLICT_BACKUP_MULTIPLE=true REMOTE_HOST_PING=$RHOST_PING $OSYNC_EXECUTABLE $i $additionalParameters
|
||||
assertEquals "First deletion run with parameters [$i]." "0" $?
|
||||
|
||||
echo "$FileA-" > "$TARGET_DIR/$FileA"
|
||||
echo "$FileB-" > "$INITIATOR_DIR/$FileB"
|
||||
|
||||
# Third run
|
||||
CONFLICT_BACKUP_MULTIPLE=yes REMOTE_HOST_PING=$RHOST_PING ./$OSYNC_EXECUTABLE $i $additionalParameters
|
||||
CONFLICT_BACKUP_MULTIPLE=true REMOTE_HOST_PING=$RHOST_PING $OSYNC_EXECUTABLE $i $additionalParameters
|
||||
assertEquals "First deletion run with parameters [$i]." "0" $?
|
||||
|
||||
echo "$FileA*" > "$TARGET_DIR/$FileA"
|
||||
echo "$FileB*" > "$INITIATOR_DIR/$FileB"
|
||||
|
||||
# Fouth run
|
||||
CONFLICT_BACKUP_MULTIPLE=yes REMOTE_HOST_PING=$RHOST_PING ./$OSYNC_EXECUTABLE $i $additionalParameters
|
||||
CONFLICT_BACKUP_MULTIPLE=true REMOTE_HOST_PING=$RHOST_PING $OSYNC_EXECUTABLE $i $additionalParameters
|
||||
assertEquals "First deletion run with parameters [$i]." "0" $?
|
||||
|
||||
# This test may fail only on 31th December at 23:59 :)
|
||||
|
@ -959,8 +1105,8 @@ function test_MultipleConflictBackups () {
|
|||
assertEquals "3 Backup files are present in [$TARGET_DIR/$OSYNC_BACKUP_DIR/]." "0" $?
|
||||
done
|
||||
|
||||
SetConfFileValue "$CONF_DIR/$LOCAL_CONF" "CONFLICT_BACKUP_MULTIPLE" "no"
|
||||
SetConfFileValue "$CONF_DIR/$REMOTE_CONF" "CONFLICT_BACKUP_MULTIPLE" "no"
|
||||
SetConfFileValue "$CONF_DIR/$LOCAL_CONF" "CONFLICT_BACKUP_MULTIPLE" false
|
||||
SetConfFileValue "$CONF_DIR/$REMOTE_CONF" "CONFLICT_BACKUP_MULTIPLE" false
|
||||
}
|
||||
|
||||
function test_Locking () {
|
||||
|
@ -978,12 +1124,12 @@ function test_Locking () {
|
|||
mkdir -p "$INITIATOR_DIR/$OSYNC_WORKDIR"
|
||||
echo 65536 > "$INITIATOR_DIR/$OSYNC_WORKDIR/lock"
|
||||
|
||||
REMOTE_HOST_PING=$RHOST_PING ./$OSYNC_EXECUTABLE $i
|
||||
REMOTE_HOST_PING=$RHOST_PING $OSYNC_EXECUTABLE $i
|
||||
assertEquals "Should be able to resume when initiator has lock without running pid." "0" $?
|
||||
|
||||
echo $$ > "$INITIATOR_DIR/$OSYNC_WORKDIR/lock"
|
||||
|
||||
REMOTE_HOST_PING=$RHOST_PING ./$OSYNC_EXECUTABLE $i
|
||||
REMOTE_HOST_PING=$RHOST_PING $OSYNC_EXECUTABLE $i
|
||||
assertEquals "Should never be able to resume when initiator has lock with running pid." "1" $?
|
||||
done
|
||||
|
||||
|
@ -992,14 +1138,14 @@ function test_Locking () {
|
|||
mkdir -p "$TARGET_DIR/$OSYNC_WORKDIR"
|
||||
echo 65536@quicklocal > "$TARGET_DIR/$OSYNC_WORKDIR/lock"
|
||||
|
||||
REMOTE_HOST_PING=$RHOST_PING ./$OSYNC_EXECUTABLE ${osyncParameters[$__quickLocal]}
|
||||
REMOTE_HOST_PING=$RHOST_PING $OSYNC_EXECUTABLE ${osyncParameters[$__quickLocal]}
|
||||
assertEquals "Should be able to resume locked target with same instance_id in quickLocal mode." "0" $?
|
||||
|
||||
PrepareLocalDirs
|
||||
mkdir -p "$TARGET_DIR/$OSYNC_WORKDIR"
|
||||
echo 65536@local > "$TARGET_DIR/$OSYNC_WORKDIR/lock"
|
||||
|
||||
REMOTE_HOST_PING=$RHOST_PING ./$OSYNC_EXECUTABLE ${osyncParameters[$__confLocal]}
|
||||
REMOTE_HOST_PING=$RHOST_PING $OSYNC_EXECUTABLE ${osyncParameters[$__confLocal]}
|
||||
assertEquals "Should be able to resume locked target with same instance_id in confLocal mode." "0" $?
|
||||
|
||||
if [ "$LOCAL_OS" != "msys" ] && [ "$LOCAL_OS" != "Cygwin" ]; then
|
||||
|
@ -1007,14 +1153,14 @@ function test_Locking () {
|
|||
mkdir -p "$TARGET_DIR/$OSYNC_WORKDIR"
|
||||
echo 65536@quickremote > "$TARGET_DIR/$OSYNC_WORKDIR/lock"
|
||||
|
||||
REMOTE_HOST_PING=$RHOST_PING ./$OSYNC_EXECUTABLE ${osyncParameters[$__quickRemote]}
|
||||
REMOTE_HOST_PING=$RHOST_PING $OSYNC_EXECUTABLE ${osyncParameters[$__quickRemote]}
|
||||
assertEquals "Should be able to resume locked target with same instance_id in quickRemote mode." "0" $?
|
||||
|
||||
PrepareLocalDirs
|
||||
mkdir -p "$TARGET_DIR/$OSYNC_WORKDIR"
|
||||
echo 65536@remote > "$TARGET_DIR/$OSYNC_WORKDIR/lock"
|
||||
|
||||
REMOTE_HOST_PING=$RHOST_PING ./$OSYNC_EXECUTABLE ${osyncParameters[$__confRemote]}
|
||||
REMOTE_HOST_PING=$RHOST_PING $OSYNC_EXECUTABLE ${osyncParameters[$__confRemote]}
|
||||
assertEquals "Should be able to resume locked target with same instance_id in confRemote mode." "0" $?
|
||||
fi
|
||||
|
||||
|
@ -1023,14 +1169,14 @@ function test_Locking () {
|
|||
mkdir -p "$TARGET_DIR/$OSYNC_WORKDIR"
|
||||
echo 65536@bogusinstance > "$TARGET_DIR/$OSYNC_WORKDIR/lock"
|
||||
|
||||
REMOTE_HOST_PING=$RHOST_PING ./$OSYNC_EXECUTABLE ${osyncParameters[$__quickLocal]}
|
||||
REMOTE_HOST_PING=$RHOST_PING $OSYNC_EXECUTABLE ${osyncParameters[$__quickLocal]}
|
||||
assertEquals "Should be able to resume locked local target with bogus instance id in quickLocal mode." "0" $?
|
||||
|
||||
PrepareLocalDirs
|
||||
mkdir -p "$TARGET_DIR/$OSYNC_WORKDIR"
|
||||
echo 65536@bogusinstance > "$TARGET_DIR/$OSYNC_WORKDIR/lock"
|
||||
|
||||
REMOTE_HOST_PING=$RHOST_PING ./$OSYNC_EXECUTABLE ${osyncParameters[$__confLocal]}
|
||||
REMOTE_HOST_PING=$RHOST_PING $OSYNC_EXECUTABLE ${osyncParameters[$__confLocal]}
|
||||
assertEquals "Should be able to resume locked local target with bogus instance_id in confLocal mode." "0" $?
|
||||
|
||||
if [ "$LOCAL_OS" != "msys" ] && [ "$LOCAL_OS" != "Cygwin" ]; then
|
||||
|
@ -1038,21 +1184,21 @@ function test_Locking () {
|
|||
mkdir -p "$TARGET_DIR/$OSYNC_WORKDIR"
|
||||
echo 65536@bogusinstance > "$TARGET_DIR/$OSYNC_WORKDIR/lock"
|
||||
|
||||
REMOTE_HOST_PING=$RHOST_PING ./$OSYNC_EXECUTABLE ${osyncParameters[$__quickRemote]}
|
||||
REMOTE_HOST_PING=$RHOST_PING $OSYNC_EXECUTABLE ${osyncParameters[$__quickRemote]}
|
||||
assertEquals "Should not be able to resume remote locked target with bogus instance_id in quickRemote mode." "1" $?
|
||||
|
||||
PrepareLocalDirs
|
||||
mkdir -p "$TARGET_DIR/$OSYNC_WORKDIR"
|
||||
echo 65536@bogusinstance > "$TARGET_DIR/$OSYNC_WORKDIR/lock"
|
||||
|
||||
REMOTE_HOST_PING=$RHOST_PING ./$OSYNC_EXECUTABLE ${osyncParameters[$__confRemote]}
|
||||
REMOTE_HOST_PING=$RHOST_PING $OSYNC_EXECUTABLE ${osyncParameters[$__confRemote]}
|
||||
assertEquals "Should not be able to resume remote locked target with bogus instance_id in confRemote mode." "1" $?
|
||||
fi
|
||||
|
||||
# Target lock present should be resumed if instance ID is NOT the same as current one but FORCE_STRANGER_UNLOCK=yes
|
||||
|
||||
SetConfFileValue "$CONF_DIR/$LOCAL_CONF" "FORCE_STRANGER_LOCK_RESUME" "yes"
|
||||
SetConfFileValue "$CONF_DIR/$REMOTE_CONF" "FORCE_STRANGER_LOCK_RESUME" "yes"
|
||||
SetConfFileValue "$CONF_DIR/$LOCAL_CONF" "FORCE_STRANGER_LOCK_RESUME" true
|
||||
SetConfFileValue "$CONF_DIR/$REMOTE_CONF" "FORCE_STRANGER_LOCK_RESUME" true
|
||||
|
||||
for i in "${osyncParameters[@]}"; do
|
||||
|
||||
|
@ -1062,12 +1208,63 @@ function test_Locking () {
|
|||
mkdir -p "$INITIATOR_DIR/$OSYNC_WORKDIR"
|
||||
echo 65536@bogusinstance > "$INITIATOR_DIR/$OSYNC_WORKDIR/lock"
|
||||
|
||||
FORCE_STRANGER_UNLOCK=yes REMOTE_HOST_PING=$RHOST_PING ./$OSYNC_EXECUTABLE $i
|
||||
FORCE_STRANGER_UNLOCK=yes REMOTE_HOST_PING=$RHOST_PING $OSYNC_EXECUTABLE $i
|
||||
assertEquals "Should be able to resume when target has lock with different instance id but FORCE_STRANGER_UNLOCK=yes." "0" $?
|
||||
done
|
||||
|
||||
SetConfFileValue "$CONF_DIR/$LOCAL_CONF" "FORCE_STRANGER_LOCK_RESUME" "no"
|
||||
SetConfFileValue "$CONF_DIR/$REMOTE_CONF" "FORCE_STRANGER_LOCK_RESUME" "no"
|
||||
SetConfFileValue "$CONF_DIR/$LOCAL_CONF" "FORCE_STRANGER_LOCK_RESUME" false
|
||||
SetConfFileValue "$CONF_DIR/$REMOTE_CONF" "FORCE_STRANGER_LOCK_RESUME" false
|
||||
}
|
||||
|
||||
function test_ConflictDetetion () {
|
||||
# Tests compatible with v1.4+
|
||||
|
||||
if [ $OSYNC_MIN_VERSION -lt 4 ]; then
|
||||
echo "Skipping conflict detection test because osync min version is $OSYNC_MIN_VERSION (must be 4 at least)."
|
||||
return 0
|
||||
fi
|
||||
|
||||
for i in "${osyncParameters[@]}"; do
|
||||
|
||||
cd "$OSYNC_DIR"
|
||||
PrepareLocalDirs
|
||||
|
||||
FileA="some file"
|
||||
FileB="some other file"
|
||||
|
||||
touch "$INITIATOR_DIR/$FileA"
|
||||
touch "$TARGET_DIR/$FileB"
|
||||
touch "$INITIATOR_DIR/$FileB"
|
||||
touch "$TARGET_DIR/$FileA"
|
||||
|
||||
# Initializing treeList
|
||||
REMOTE_HOST_PING=$RHOST_PING _PARANOIA_DEBUG=no $OSYNC_EXECUTABLE $i --initialize
|
||||
assertEquals "Initialization run with parameters [$i]." "0" $?
|
||||
|
||||
# Now modifying files on both sides
|
||||
|
||||
echo "A" > "$INITIATOR_DIR/$FileA"
|
||||
echo "B" > "$TARGET_DIR/$FileB"
|
||||
echo "BB" > "$INITIATOR_DIR/$FileB"
|
||||
echo "AA" > "$TARGET_DIR/$FileA"
|
||||
|
||||
# Now run should return conflicts
|
||||
|
||||
REMOTE_HOST_PING=$RHOST_PING $OSYNC_EXECUTABLE $i --log-conflicts > "$FAKEROOT/output2.log" 2>&1
|
||||
assertEquals "Second run that should detect conflicts with parameters [$i]." "0" $?
|
||||
|
||||
cat "$FAKEROOT/output2.log"
|
||||
|
||||
#WIP TODO change output.log from output2.log for debug reasons
|
||||
grep "$INITIATOR_DIR/$FileA << >> $TARGET_DIR/$FileA" "$FAKEROOT/output2.log"
|
||||
assertEquals "FileA conflict detect with parameters [$i]." "0" $?
|
||||
|
||||
grep "$INITIATOR_DIR/$FileB << >> $TARGET_DIR/$FileB" "$FAKEROOT/output2.log"
|
||||
assertEquals "FileB conflict detect with parameters [$i]." "0" $?
|
||||
|
||||
#TODO: Missing test for conflict prevalance (once we have FORCE_CONFLICT_PREVALANCE
|
||||
done
|
||||
return 0
|
||||
}
|
||||
|
||||
function test_WaitForTaskCompletion () {
|
||||
|
@ -1227,7 +1424,6 @@ function test_ParallelExec () {
|
|||
|
||||
function test_timedExecution () {
|
||||
local arguments
|
||||
local warnExitCode
|
||||
|
||||
# Clever usage of indexes and exit codes
|
||||
# osync exits with 0 when no problem detected
|
||||
|
@ -1254,7 +1450,7 @@ function test_timedExecution () {
|
|||
PrepareLocalDirs
|
||||
|
||||
echo "Test with args [$i $arguments]."
|
||||
SLEEP_TIME=1 SOFT_MAX_EXEC_TIME=${softTimes[$x]} HARD_MAX_EXEC_TIME=${hardTimes[$x]} ./$OSYNC_EXECUTABLE $i
|
||||
SLEEP_TIME=1 SOFT_MAX_EXEC_TIME=${softTimes[$x]} HARD_MAX_EXEC_TIME=${hardTimes[$x]} $OSYNC_EXECUTABLE $i
|
||||
retval=$?
|
||||
if [ "$OSYNC_MIN_VERSION" -gt 1 ]; then
|
||||
assertEquals "Timed Execution test with timed SOFT_MAX_EXEC_TIME=${softTimes[$x]} and HARD_MAX_EXEC_TIME=${hardTimes[$x]}." $x $retval
|
||||
|
@ -1289,9 +1485,9 @@ function test_UpgradeConfRun () {
|
|||
assertEquals "Conf file upgrade" "0" $?
|
||||
|
||||
# Update remote conf files with SSH port
|
||||
sed -i.tmp 's#ssh://.*@localhost:[0-9]*/${HOME}/osync-tests/target#ssh://'$REMOTE_USER'@localhost:'$SSH_PORT'/${HOME}/osync-tests/target#' "$CONF_DIR/$TMP_OLD_CONF"
|
||||
sed -i.tmp 's#ssh://.*@localhost:[0-9]*/${homedir}/osync-tests/target#ssh://'$REMOTE_USER'@localhost:'$SSH_PORT'/${homedir}/osync-tests/target#' "$CONF_DIR/$TMP_OLD_CONF"
|
||||
|
||||
./$OSYNC_EXECUTABLE "$CONF_DIR/$TMP_OLD_CONF"
|
||||
$OSYNC_EXECUTABLE "$CONF_DIR/$TMP_OLD_CONF"
|
||||
assertEquals "Upgraded conf file execution test" "0" $?
|
||||
|
||||
rm -f "$CONF_DIR/$TMP_OLD_CONF"
|
||||
|
@ -1316,9 +1512,10 @@ function test_DaemonMode () {
|
|||
touch "$INITIATOR_DIR/$FileA"
|
||||
touch "$TARGET_DIR/$FileB"
|
||||
|
||||
./$OSYNC_EXECUTABLE "$CONF_DIR/$LOCAL_CONF" --on-changes &
|
||||
$OSYNC_EXECUTABLE "$CONF_DIR/$LOCAL_CONF" --on-changes &
|
||||
pid=$!
|
||||
|
||||
#TODO: Lower that value when dispatecher is written
|
||||
# Trivial value of 2xMIN_WAIT from config files
|
||||
echo "Sleeping for 120s"
|
||||
sleep 120
|
||||
|
@ -1359,7 +1556,7 @@ function test_NoRemoteAccessTest () {
|
|||
cd "$OSYNC_DIR"
|
||||
PrepareLocalDirs
|
||||
|
||||
REMOTE_HOST_PING=$RHOST_PING ./$OSYNC_EXECUTABLE ${osyncParameters[$__confLocal]}
|
||||
REMOTE_HOST_PING=$RHOST_PING $OSYNC_EXECUTABLE ${osyncParameters[$__confLocal]}
|
||||
assertEquals "Basic local test without remote access." "0" $?
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,46 @@
|
|||
# Contributor Covenant Code of Conduct
|
||||
|
||||
## Our Pledge
|
||||
|
||||
In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation.
|
||||
|
||||
## Our Standards
|
||||
|
||||
Examples of behavior that contributes to creating a positive environment include:
|
||||
|
||||
* Using welcoming and inclusive language
|
||||
* Being respectful of differing viewpoints and experiences
|
||||
* Gracefully accepting constructive criticism
|
||||
* Focusing on what is best for the community
|
||||
* Showing empathy towards other community members
|
||||
|
||||
Examples of unacceptable behavior by participants include:
|
||||
|
||||
* The use of sexualized language or imagery and unwelcome sexual attention or advances
|
||||
* Trolling, insulting/derogatory comments, and personal or political attacks
|
||||
* Public or private harassment
|
||||
* Publishing others' private information, such as a physical or electronic address, without explicit permission
|
||||
* Other conduct which could reasonably be considered inappropriate in a professional setting
|
||||
|
||||
## Our Responsibilities
|
||||
|
||||
Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior.
|
||||
|
||||
Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful.
|
||||
|
||||
## Scope
|
||||
|
||||
This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers.
|
||||
|
||||
## Enforcement
|
||||
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at kate.ward@forestent.com. The project team will review and investigate all complaints, and will respond in a way that it deems appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately.
|
||||
|
||||
Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership.
|
||||
|
||||
## Attribution
|
||||
|
||||
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at [http://contributor-covenant.org/version/1/4][version]
|
||||
|
||||
[homepage]: http://contributor-covenant.org
|
||||
[version]: http://contributor-covenant.org/version/1/4/
|
|
@ -0,0 +1,147 @@
|
|||
Coding Standards
|
||||
================
|
||||
|
||||
shFlags is more than just a simple 20 line shell script. It is a pretty
|
||||
significant library of shell code that at first glance is not that easy to
|
||||
understand. To improve code readability and usability, some guidelines have been
|
||||
set down to make the code more understandable for anyone who wants to read or
|
||||
modify it.
|
||||
|
||||
Function declaration
|
||||
--------------------
|
||||
|
||||
Declare functions using the following form:
|
||||
|
||||
```sh
|
||||
doSomething() {
|
||||
echo 'done!'
|
||||
}
|
||||
```
|
||||
|
||||
One-line functions are allowed if they can fit within the 80 char line limit.
|
||||
|
||||
```sh
|
||||
doSomething() { echo 'done!'; }
|
||||
```
|
||||
|
||||
Function documentation
|
||||
----------------------
|
||||
|
||||
Each function should be preceded by a header that provides the following:
|
||||
|
||||
1. A one-sentence summary of what the function does.
|
||||
|
||||
1. (optional) A longer description of what the function does, and perhaps some
|
||||
special information that helps convey its usage better.
|
||||
|
||||
1. Args: a one-line summary of each argument of the form:
|
||||
|
||||
`name: type: description`
|
||||
|
||||
1. Output: a one-line summary of the output provided. Only output to STDOUT
|
||||
must be documented, unless the output to STDERR is of significance (i.e. not
|
||||
just an error message). The output should be of the form:
|
||||
|
||||
`type: description`
|
||||
|
||||
1. Returns: a one-line summary of the value returned. Returns in shell are
|
||||
always integers, but if the output is a true/false for success (i.e. a
|
||||
boolean), it should be noted. The output should be of the form:
|
||||
|
||||
`type: description`
|
||||
|
||||
Here is a sample header:
|
||||
|
||||
```
|
||||
# Return valid getopt options using currently defined list of long options.
|
||||
#
|
||||
# This function builds a proper getopt option string for short (and long)
|
||||
# options, using the current list of long options for reference.
|
||||
#
|
||||
# Args:
|
||||
# _flags_optStr: integer: option string type (__FLAGS_OPTSTR_*)
|
||||
# Output:
|
||||
# string: generated option string for getopt
|
||||
# Returns:
|
||||
# boolean: success of operation (always returns True)
|
||||
```
|
||||
|
||||
Variable and function names
|
||||
---------------------------
|
||||
|
||||
All shFlags specific constants, variables, and functions will be prefixed
|
||||
appropriately with 'flags'. This is to distinguish usage in the shFlags code
|
||||
from users own scripts so that the shell name space remains predictable to
|
||||
users. The exceptions here are the standard `assertEquals`, etc. functions.
|
||||
|
||||
All non built-in constants and variables will be surrounded with squiggle
|
||||
brackets, e.g. `${flags_someVariable}` to improve code readability.
|
||||
|
||||
Due to some shells not supporting local variables in functions, care in the
|
||||
naming and use of variables, both public and private, is very important.
|
||||
Accidental overriding of the variables can occur easily if care is not taken as
|
||||
all variables are technically global variables in some shells.
|
||||
|
||||
Type | Sample
|
||||
---- | ------
|
||||
global public constant | `FLAGS_TRUE`
|
||||
global private constant | `__FLAGS_SHELL_FLAGS`
|
||||
global public variable | `flags_variable`
|
||||
global private variable | `__flags_variable`
|
||||
global macro | `_FLAGS_SOME_MACRO_`
|
||||
public function | `flags_function`
|
||||
public function, local variable | ``flags_variable_`
|
||||
private function | `_flags_function`
|
||||
private function, local variable | `_flags_variable_`
|
||||
|
||||
Where it makes sense to improve readability, variables can have the first
|
||||
letter of the second and later words capitalized. For example, the local
|
||||
variable name for the help string length is `flags_helpStrLen_`.
|
||||
|
||||
There are three special-case global public variables used. They are used due to
|
||||
overcome the limitations of shell scoping or to prevent forking. The three
|
||||
variables are:
|
||||
|
||||
- `flags_error`
|
||||
- `flags_output`
|
||||
- `flags_return`
|
||||
|
||||
Local variable cleanup
|
||||
----------------------
|
||||
|
||||
As many shells do not support local variables, no support for cleanup of
|
||||
variables is present either. As such, all variables local to a function must be
|
||||
cleared up with the `unset` built-in command at the end of each function.
|
||||
|
||||
Indentation
|
||||
-----------
|
||||
|
||||
Code block indentation is two (2) spaces, and tabs may not be used.
|
||||
|
||||
```sh
|
||||
if [ -z 'some string' ]; then
|
||||
someFunction
|
||||
fi
|
||||
```
|
||||
|
||||
Lines of code should be no longer than 80 characters unless absolutely
|
||||
necessary. When lines are wrapped using the backslash character '\', subsequent
|
||||
lines should be indented with four (4) spaces so as to differentiate from the
|
||||
standard spacing of two characters, and tabs may not be used.
|
||||
|
||||
```sh
|
||||
for x in some set of very long set of arguments that make for a very long \
|
||||
that extends much too long for one line
|
||||
do
|
||||
echo ${x}
|
||||
done
|
||||
```
|
||||
|
||||
When a conditional expression is written using the built-in [ command, and that
|
||||
line must be wrapped, place the control || or && operators on the same line as
|
||||
the expression where possible, with the list to be executed on its own line.
|
||||
|
||||
```sh
|
||||
[ -n 'some really long expression' -a -n 'some other long expr' ] && \
|
||||
echo 'that was actually true!'
|
||||
```
|
|
@ -0,0 +1,201 @@
|
|||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "{}"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright {yyyy} {name of copyright owner}
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
|
@ -0,0 +1,643 @@
|
|||
# shUnit2
|
||||
|
||||
shUnit2 is a [xUnit](http://en.wikipedia.org/wiki/XUnit) unit test framework for
|
||||
Bourne based shell scripts, and it is designed to work in a similar manner to
|
||||
[JUnit](http://www.junit.org), [PyUnit](http://pyunit.sourceforge.net), etc.. If
|
||||
you have ever had the desire to write a unit test for a shell script, shUnit2
|
||||
can do the job.
|
||||
|
||||
[](https://app.travis-ci.com/github/kward/shunit2)
|
||||
|
||||
## Table of Contents
|
||||
|
||||
* [Introduction](#introduction)
|
||||
* [Credits / Contributors](#credits-contributors)
|
||||
* [Feedback](#feedback)
|
||||
* [Quickstart](#quickstart)
|
||||
* [Function Reference](#function-reference)
|
||||
* [General Info](#general-info)
|
||||
* [Asserts](#asserts)
|
||||
* [Failures](#failures)
|
||||
* [Setup/Teardown](#setup-teardown)
|
||||
* [Skipping](#skipping)
|
||||
* [Suites](#suites)
|
||||
* [Advanced Usage](#advanced-usage)
|
||||
* [Some constants you can use](#some-constants-you-can-use)
|
||||
* [Error Handling](#error-handling)
|
||||
* [Including Line Numbers in Asserts (Macros)](#including-line-numbers-in-asserts-macros)
|
||||
* [Test Skipping](#test-skipping)
|
||||
* [Running specific tests from the command line](#cmd-line-args)
|
||||
* [Appendix](#appendix)
|
||||
* [Getting help](#getting-help)
|
||||
* [Zsh](#zsh)
|
||||
|
||||
---
|
||||
|
||||
## <a name="introduction"></a> Introduction
|
||||
|
||||
shUnit2 was originally developed to provide a consistent testing solution for
|
||||
[log4sh][log4sh], a shell based logging framework similar to
|
||||
[log4j](http://logging.apache.org). During the development of that product, a
|
||||
repeated problem of having things work just fine under one shell (`/bin/bash` on
|
||||
Linux to be specific), and then not working under another shell (`/bin/sh` on
|
||||
Solaris) kept coming up. Although several simple tests were run, they were not
|
||||
adequate and did not catch some corner cases. The decision was finally made to
|
||||
write a proper unit test framework after multiple brown-bag releases were made.
|
||||
_Research was done to look for an existing product that met the testing
|
||||
requirements, but no adequate product was found._
|
||||
|
||||
### Tested software
|
||||
|
||||
**Tested Operating Systems** (varies over time)
|
||||
|
||||
OS | Support | Verified
|
||||
----------------------------------- | --------- | --------
|
||||
Ubuntu Linux (14.04.05 LTS) | Travis CI | continuous
|
||||
macOS High Sierra (10.13.3) | Travis CI | continuous
|
||||
FreeBSD | user | unknown
|
||||
Solaris 8, 9, 10 (inc. OpenSolaris) | user | unknown
|
||||
Cygwin | user | unknown
|
||||
|
||||
**Tested Shells**
|
||||
|
||||
* Bourne Shell (__sh__)
|
||||
* BASH - GNU Bourne Again SHell (__bash__)
|
||||
* DASH - Debian Almquist Shell (__dash__)
|
||||
* Korn Shell - AT&T version of the Korn shell (__ksh__)
|
||||
* mksh - MirBSD Korn Shell (__mksh__)
|
||||
* zsh - Zsh (__zsh__) (since 2.1.2) _please see the Zsh shell errata for more information_
|
||||
|
||||
See the appropriate Release Notes for this release
|
||||
(`doc/RELEASE_NOTES-X.X.X.txt`) for the list of actual versions tested.
|
||||
|
||||
### <a name="credits-contributors"></a> Credits / Contributors
|
||||
|
||||
A list of contributors to shUnit2 can be found in `doc/contributors.md`. Many
|
||||
thanks go out to all those who have contributed to make this a better tool.
|
||||
|
||||
shUnit2 is the original product of many hours of work by Kate Ward, the primary
|
||||
author of the code. For related software, check out https://github.com/kward.
|
||||
|
||||
### <a name="feedback"></a> Feedback
|
||||
|
||||
Feedback is most certainly welcome for this document. Send your questions,
|
||||
comments, and criticisms via the
|
||||
[shunit2-users](https://groups.google.com/a/forestent.com/forum/#!forum/shunit2-users/new)
|
||||
forum (created 2018-12-09), or file an issue via
|
||||
https://github.com/kward/shunit2/issues.
|
||||
|
||||
---
|
||||
|
||||
## <a name="quickstart"></a> Quickstart
|
||||
|
||||
This section will give a very quick start to running unit tests with shUnit2.
|
||||
More information is located in later sections.
|
||||
|
||||
Here is a quick sample script to show how easy it is to write a unit test in
|
||||
shell. _Note: the script as it stands expects that you are running it from the
|
||||
"examples" directory._
|
||||
|
||||
```sh
|
||||
#! /bin/sh
|
||||
# file: examples/equality_test.sh
|
||||
|
||||
testEquality() {
|
||||
assertEquals 1 1
|
||||
}
|
||||
|
||||
# Load shUnit2.
|
||||
. ../shunit2
|
||||
```
|
||||
|
||||
Running the unit test should give results similar to the following.
|
||||
|
||||
```console
|
||||
$ cd examples
|
||||
$ ./equality_test.sh
|
||||
testEquality
|
||||
|
||||
Ran 1 test.
|
||||
|
||||
OK
|
||||
```
|
||||
|
||||
W00t! You've just run your first successful unit test. So, what just happened?
|
||||
Quite a bit really, and it all happened simply by sourcing the `shunit2`
|
||||
library. The basic functionality for the script above goes like this:
|
||||
|
||||
* When shUnit2 is sourced, it will walk through any functions defined whose name
|
||||
starts with the string `test`, and add those to an internal list of tests to
|
||||
execute. Once a list of test functions to be run has been determined, shunit2
|
||||
will go to work.
|
||||
* Before any tests are executed, shUnit2 again looks for a function, this time
|
||||
one named `oneTimeSetUp()`. If it exists, it will be run. This function is
|
||||
normally used to setup the environment for all tests to be run. Things like
|
||||
creating directories for output or setting environment variables are good to
|
||||
place here. Just so you know, you can also declare a corresponding function
|
||||
named `oneTimeTearDown()` function that does the same thing, but once all the
|
||||
tests have been completed. It is good for removing temporary directories, etc.
|
||||
* shUnit2 is now ready to run tests. Before doing so though, it again looks for
|
||||
another function that might be declared, one named `setUp()`. If the function
|
||||
exists, it will be run before each test. It is good for resetting the
|
||||
environment so that each test starts with a clean slate. **At this stage, the
|
||||
first test is finally run.** The success of the test is recorded for a report
|
||||
that will be generated later. After the test is run, shUnit2 looks for a final
|
||||
function that might be declared, one named `tearDown()`. If it exists, it will
|
||||
be run after each test. It is a good place for cleaning up after each test,
|
||||
maybe doing things like removing files that were created, or removing
|
||||
directories. This set of steps, `setUp() > test() > tearDown()`, is repeated
|
||||
for all of the available tests.
|
||||
* Once all the work is done, shUnit2 will generate the nice report you saw
|
||||
above. A summary of all the successes and failures will be given so that you
|
||||
know how well your code is doing.
|
||||
|
||||
We should now try adding a test that fails. Change your unit test to look like
|
||||
this.
|
||||
|
||||
```sh
|
||||
#! /bin/sh
|
||||
# file: examples/party_test.sh
|
||||
|
||||
testEquality() {
|
||||
assertEquals 1 1
|
||||
}
|
||||
|
||||
testPartyLikeItIs1999() {
|
||||
year=`date '+%Y'`
|
||||
assertEquals "It's not 1999 :-(" '1999' "${year}"
|
||||
}
|
||||
|
||||
# Load shUnit2.
|
||||
. ../shunit2
|
||||
```
|
||||
|
||||
So, what did you get? I guess it told you that this isn't 1999. Bummer, eh?
|
||||
Hopefully, you noticed a couple of things that were different about the second
|
||||
test. First, we added an optional message that the user will see if the assert
|
||||
fails. Second, we did comparisons of strings instead of integers as in the first
|
||||
test. It doesn't matter whether you are testing for equality of strings or
|
||||
integers. Both work equally well with shUnit2.
|
||||
|
||||
Hopefully, this is enough to get you started with unit testing. If you want a
|
||||
ton more examples, take a look at the tests provided with [log4sh][log4sh] or
|
||||
[shFlags][shflags]. Both provide excellent examples of more advanced usage.
|
||||
shUnit2 was after all written to meet the unit testing need that
|
||||
[log4sh][log4sh] had.
|
||||
|
||||
If you are using distribution packaged shUnit2 which is accessible from
|
||||
`/usr/bin/shunit2` such as Debian, you can load shUnit2 without specifying its
|
||||
path. So the last 2 lines in the above can be replaced by:
|
||||
|
||||
```sh
|
||||
# Load shUnit2.
|
||||
. shunit2
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## <a name="function-reference"></a> Function Reference
|
||||
|
||||
### <a name="general-info"></a> General Info
|
||||
|
||||
Any string values passed should be properly quoted -- they should be
|
||||
surrounded by single-quote (`'`) or double-quote (`"`) characters -- so that the
|
||||
shell will properly parse them.
|
||||
|
||||
### <a name="asserts"></a> Asserts
|
||||
|
||||
assertEquals [message] expected actual
|
||||
|
||||
Asserts that _expected_ and _actual_ are equal to one another. The _expected_
|
||||
and _actual_ values can be either strings or integer values as both will be
|
||||
treated as strings. The _message_ is optional, and must be quoted.
|
||||
|
||||
assertNotEquals [message] unexpected actual
|
||||
|
||||
Asserts that _unexpected_ and _actual_ are not equal to one another. The
|
||||
_unexpected_ and _actual_ values can be either strings or integer values as both
|
||||
will be treated as strings. The _message_ is optional, and must be quoted.
|
||||
|
||||
assertSame [message] expected actual
|
||||
|
||||
This function is functionally equivalent to `assertEquals`.
|
||||
|
||||
assertNotSame [message] unexpected actual
|
||||
|
||||
This function is functionally equivalent to `assertNotEquals`.
|
||||
|
||||
assertContains [message] container content
|
||||
|
||||
Asserts that _container_ contains _content_. The _container_ and _content_
|
||||
values can be either strings or integer values as both will be treated as
|
||||
strings. The _message_ is optional, and must be quoted.
|
||||
|
||||
assertNotContains [message] container content
|
||||
|
||||
Asserts that _container_ does not contain _content_. The _container_ and
|
||||
_content_ values can be either strings or integer values as both will be treated
|
||||
as strings. The _message_ is optional, and must be quoted.
|
||||
|
||||
assertNull [message] value
|
||||
|
||||
Asserts that _value_ is _null_, or in shell terms, a zero-length string. The
|
||||
_value_ must be a string as an integer value does not translate into a zero-
|
||||
length string. The _message_ is optional, and must be quoted.
|
||||
|
||||
assertNotNull [message] value
|
||||
|
||||
Asserts that _value_ is _not null_, or in shell terms, a non-empty string. The
|
||||
_value_ may be a string or an integer as the latter will be parsed as a non-empty
|
||||
string value. The _message_ is optional, and must be quoted.
|
||||
|
||||
assertTrue [message] condition
|
||||
|
||||
Asserts that a given shell test _condition_ is _true_. The condition can be as
|
||||
simple as a shell _true_ value (the value `0` -- equivalent to
|
||||
`${SHUNIT_TRUE}`), or a more sophisticated shell conditional expression. The
|
||||
_message_ is optional, and must be quoted.
|
||||
|
||||
A sophisticated shell conditional expression is equivalent to what the __if__ or
|
||||
__while__ shell built-ins would use (more specifically, what the __test__
|
||||
command would use). Testing for example whether some value is greater than
|
||||
another value can be done this way.
|
||||
|
||||
assertTrue "[ 34 -gt 23 ]"
|
||||
|
||||
Testing for the ability to read a file can also be done. This particular test
|
||||
will fail.
|
||||
|
||||
assertTrue 'test failed' "[ -r /some/non-existant/file ]"
|
||||
|
||||
As the expressions are standard shell __test__ expressions, it is possible to
|
||||
string multiple expressions together with `-a` and `-o` in the standard fashion.
|
||||
This test will succeed as the entire expression evaluates to _true_.
|
||||
|
||||
assertTrue 'test failed' '[ 1 -eq 1 -a 2 -eq 2 ]'
|
||||
|
||||
<i>One word of warning: be very careful with your quoting as shell is not the
|
||||
most forgiving of bad quoting, and things will fail in strange ways.</i>
|
||||
|
||||
assertFalse [message] condition
|
||||
|
||||
Asserts that a given shell test _condition_ is _false_. The condition can be as
|
||||
simple as a shell _false_ value (the value `1` -- equivalent to
|
||||
`${SHUNIT_FALSE}`), or a more sophisticated shell conditional expression. The
|
||||
_message_ is optional, and must be quoted.
|
||||
|
||||
_For examples of more sophisticated expressions, see `assertTrue`._
|
||||
|
||||
### <a name="failures"></a> Failures
|
||||
|
||||
Just to clarify, failures __do not__ test the various arguments against one
|
||||
another. Failures simply fail, optionally with a message, and that is all they
|
||||
do. If you need to test arguments against one another, use asserts.
|
||||
|
||||
If all failures do is fail, why might one use them? There are times when you may
|
||||
have some very complicated logic that you need to test, and the simple asserts
|
||||
provided are simply not adequate. You can do your own validation of the code,
|
||||
use an `assertTrue ${SHUNIT_TRUE}` if your own tests succeeded, and use a
|
||||
failure to record a failure.
|
||||
|
||||
fail [message]
|
||||
|
||||
Fails the test immediately. The _message_ is optional, and must be quoted.
|
||||
|
||||
failNotEquals [message] unexpected actual
|
||||
|
||||
Fails the test immediately, reporting that the _unexpected_ and _actual_ values
|
||||
are not equal to one another. The _message_ is optional, and must be quoted.
|
||||
|
||||
_Note: no actual comparison of unexpected and actual is done._
|
||||
|
||||
failSame [message] expected actual
|
||||
|
||||
Fails the test immediately, reporting that the _expected_ and _actual_ values
|
||||
are the same. The _message_ is optional, and must be quoted.
|
||||
|
||||
_Note: no actual comparison of expected and actual is done._
|
||||
|
||||
failNotSame [message] expected actual
|
||||
|
||||
Fails the test immediately, reporting that the _expected_ and _actual_ values
|
||||
are not the same. The _message_ is optional, and must be quoted.
|
||||
|
||||
_Note: no actual comparison of expected and actual is done._
|
||||
|
||||
failFound [message] content
|
||||
|
||||
Fails the test immediately, reporting that the _content_ was found. The
|
||||
_message_ is optional, and must be quoted.
|
||||
|
||||
_Note: no actual search of content is done._
|
||||
|
||||
failNotFound [message] content
|
||||
|
||||
Fails the test immediately, reporting that the _content_ was not found. The
|
||||
_message_ is optional, and must be quoted.
|
||||
|
||||
_Note: no actual search of content is done._
|
||||
|
||||
### <a name="setup-teardown"></a> Setup/Teardown
|
||||
|
||||
oneTimeSetUp
|
||||
|
||||
This function can be optionally overridden by the user in their test suite.
|
||||
|
||||
If this function exists, it will be called once before any tests are run. It is
|
||||
useful to prepare a common environment for all tests.
|
||||
|
||||
oneTimeTearDown
|
||||
|
||||
This function can be optionally overridden by the user in their test suite.
|
||||
|
||||
If this function exists, it will be called once after all tests are completed.
|
||||
It is useful to clean up the environment after all tests.
|
||||
|
||||
setUp
|
||||
|
||||
This function can be optionally overridden by the user in their test suite.
|
||||
|
||||
If this function exists, it will be called before each test is run. It is useful
|
||||
to reset the environment before each test.
|
||||
|
||||
tearDown
|
||||
|
||||
This function can be optionally overridden by the user in their test suite.
|
||||
|
||||
If this function exists, it will be called after each test completes. It is
|
||||
useful to clean up the environment after each test.
|
||||
|
||||
### <a name="skipping"></a> Skipping
|
||||
|
||||
startSkipping
|
||||
|
||||
This function forces the remaining _assert_ and _fail_ functions to be
|
||||
"skipped", i.e. they will have no effect. Each function skipped will be recorded
|
||||
so that the total of asserts and fails will not be altered.
|
||||
|
||||
endSkipping
|
||||
|
||||
This function returns calls to the _assert_ and _fail_ functions to their
|
||||
default behavior, i.e. they will be called.
|
||||
|
||||
isSkipping
|
||||
|
||||
This function returns the current state of skipping. It can be compared against
|
||||
`${SHUNIT_TRUE}` or `${SHUNIT_FALSE}` if desired.
|
||||
|
||||
### <a name="suites"></a> Suites
|
||||
|
||||
The default behavior of shUnit2 is that all tests will be found dynamically. If
|
||||
you have a specific set of tests you want to run, or you don't want to use the
|
||||
standard naming scheme of prefixing your tests with `test`, these functions are
|
||||
for you. Most users will never use them though.
|
||||
|
||||
suite
|
||||
|
||||
This function can be optionally overridden by the user in their test suite.
|
||||
|
||||
If this function exists, it will be called when `shunit2` is sourced. If it does
|
||||
not exist, shUnit2 will search the parent script for all functions beginning
|
||||
with the word `test`, and they will be added dynamically to the test suite.
|
||||
|
||||
suite_addTest name
|
||||
|
||||
This function adds a function named _name_ to the list of tests scheduled for
|
||||
execution as part of this test suite. This function should only be called from
|
||||
within the `suite()` function.
|
||||
|
||||
---
|
||||
|
||||
## <a name="advanced-usage"></a> Advanced Usage
|
||||
|
||||
### <a name="some-constants-you-can-use"></a> Some constants you can use
|
||||
|
||||
There are several constants provided by shUnit2 as variables that might be of
|
||||
use to you.
|
||||
|
||||
*Predefined*
|
||||
|
||||
| Constant | Value |
|
||||
| --------------- | ----- |
|
||||
| SHUNIT\_TRUE | Standard shell `true` value (the integer value 0). |
|
||||
| SHUNIT\_FALSE | Standard shell `false` value (the integer value 1). |
|
||||
| SHUNIT\_ERROR | The integer value 2. |
|
||||
| SHUNIT\_TMPDIR | Path to temporary directory that will be automatically cleaned up upon exit of shUnit2. |
|
||||
| SHUNIT\_VERSION | The version of shUnit2 you are running. |
|
||||
|
||||
*User defined*
|
||||
|
||||
| Constant | Value |
|
||||
| ----------------- | ----- |
|
||||
| SHUNIT\_CMD\_EXPR | Override which `expr` command is used. By default `expr` is used, except on BSD systems where `gexpr` is used. |
|
||||
| SHUNIT\_COLOR | Enable colorized output. Options are 'auto', 'always', or 'none', with 'auto' being the default. |
|
||||
| SHUNIT\_PARENT | The filename of the shell script containing the tests. This is needed specifically for Zsh support. |
|
||||
| SHUNIT\_TEST\_PREFIX | Define this variable to add a prefix in front of each test name that is output in the test report. |
|
||||
|
||||
### <a name="error-handling"></a> Error handling
|
||||
|
||||
The constants values `SHUNIT_TRUE`, `SHUNIT_FALSE`, and `SHUNIT_ERROR` are
|
||||
returned from nearly every function to indicate the success or failure of the
|
||||
function. Additionally the variable `flags_error` is filled with a detailed
|
||||
error message if any function returns with a `SHUNIT_ERROR` value.
|
||||
|
||||
### <a name="including-line-numbers-in-asserts-macros"></a> Including Line Numbers in Asserts (Macros)
|
||||
|
||||
If you include lots of assert statements in an individual test function, it can
|
||||
become difficult to determine exactly which assert was thrown unless your
|
||||
messages are unique. To help somewhat, line numbers can be included in the
|
||||
assert messages. To enable this, a special shell "macro" must be used rather
|
||||
than the standard assert calls. _Shell doesn't actually have macros; the name is
|
||||
used here as the operation is similar to a standard macro._
|
||||
|
||||
For example, to include line numbers for a `assertEquals()` function call,
|
||||
replace the `assertEquals()` with `${_ASSERT_EQUALS_}`.
|
||||
|
||||
_**Example** -- Asserts with and without line numbers_
|
||||
|
||||
```shell
|
||||
#! /bin/sh
|
||||
# file: examples/lineno_test.sh
|
||||
|
||||
testLineNo() {
|
||||
# This assert will have line numbers included (e.g. "ASSERT:[123] ...").
|
||||
echo "ae: ${_ASSERT_EQUALS_}"
|
||||
${_ASSERT_EQUALS_} 'not equal' 1 2
|
||||
|
||||
# This assert will not have line numbers included (e.g. "ASSERT: ...").
|
||||
assertEquals 'not equal' 1 2
|
||||
}
|
||||
|
||||
# Load shUnit2.
|
||||
. ../shunit2
|
||||
```
|
||||
|
||||
Notes:
|
||||
|
||||
1. Due to how shell parses command-line arguments, _**all strings used with
|
||||
macros should be quoted twice**_. Namely, single-quotes must be converted to single-double-quotes, and vice-versa.<br/>
|
||||
<br/>
|
||||
Normal `assertEquals` call.<br/>
|
||||
`assertEquals 'some message' 'x' ''`<br/>
|
||||
<br/>
|
||||
Macro `_ASSERT_EQUALS_` call. Note the extra quoting around the _message_ and
|
||||
the _null_ value.<br/>
|
||||
`_ASSERT_EQUALS_ '"some message"' 'x' '""'`
|
||||
|
||||
1. Line numbers are not supported in all shells. If a shell does not support
|
||||
them, no errors will be thrown. Supported shells include: __bash__ (>=3.0),
|
||||
__ksh__, __mksh__, and __zsh__.
|
||||
|
||||
### <a name="test-skipping"></a> Test Skipping
|
||||
|
||||
There are times where the test code you have written is just not applicable to
|
||||
the system you are running on. This section describes how to skip these tests
|
||||
but maintain the total test count.
|
||||
|
||||
Probably the easiest example would be shell code that is meant to run under the
|
||||
__bash__ shell, but the unit test is running under the Bourne shell. There are
|
||||
things that just won't work. The following test code demonstrates two sample
|
||||
functions, one that will be run under any shell, and the another that will run
|
||||
only under the __bash__ shell.
|
||||
|
||||
_**Example** -- math include_
|
||||
```sh
|
||||
# file: examples/math.inc.
|
||||
|
||||
add_generic() {
|
||||
num_a=$1
|
||||
num_b=$2
|
||||
|
||||
expr $1 + $2
|
||||
}
|
||||
|
||||
add_bash() {
|
||||
num_a=$1
|
||||
num_b=$2
|
||||
|
||||
echo $(($1 + $2))
|
||||
}
|
||||
```
|
||||
|
||||
And here is a corresponding unit test that correctly skips the `add_bash()` function when the unit test is not running under the __bash__ shell.
|
||||
|
||||
_**Example** -- math unit test_
|
||||
```sh
|
||||
#! /bin/sh
|
||||
# file: examples/math_test.sh
|
||||
|
||||
testAdding() {
|
||||
result=`add_generic 1 2`
|
||||
assertEquals \
|
||||
"the result of '${result}' was wrong" \
|
||||
3 "${result}"
|
||||
|
||||
# Disable non-generic tests.
|
||||
[ -z "${BASH_VERSION:-}" ] && startSkipping
|
||||
|
||||
result=`add_bash 1 2`
|
||||
assertEquals \
|
||||
"the result of '${result}' was wrong" \
|
||||
3 "${result}"
|
||||
}
|
||||
|
||||
oneTimeSetUp() {
|
||||
# Load include to test.
|
||||
. ./math.inc
|
||||
}
|
||||
|
||||
# Load and run shUnit2.
|
||||
. ../shunit2
|
||||
```
|
||||
|
||||
Running the above test under the __bash__ shell will result in the following
|
||||
output.
|
||||
|
||||
```console
|
||||
$ /bin/bash math_test.sh
|
||||
testAdding
|
||||
|
||||
Ran 1 test.
|
||||
|
||||
OK
|
||||
```
|
||||
|
||||
But, running the test under any other Unix shell will result in the following
|
||||
output.
|
||||
|
||||
```console
|
||||
$ /bin/ksh math_test.sh
|
||||
testAdding
|
||||
|
||||
Ran 1 test.
|
||||
|
||||
OK (skipped=1)
|
||||
```
|
||||
|
||||
As you can see, the total number of tests has not changed, but the report
|
||||
indicates that some tests were skipped.
|
||||
|
||||
Skipping can be controlled with the following functions: `startSkipping()`,
|
||||
`endSkipping()`, and `isSkipping()`. Once skipping is enabled, it will remain
|
||||
enabled until the end of the current test function call, after which skipping is
|
||||
disabled.
|
||||
|
||||
### <a name="cmd-line-args"></a> Running specific tests from the command line.
|
||||
|
||||
When running a test script, you may override the default set of tests, or the suite-specified set of tests, by providing additional arguments on the command line. Each additional argument after the `--` marker is assumed to be the name of a test function to be run in the order specified. e.g.
|
||||
|
||||
```console
|
||||
test-script.sh -- testOne testTwo otherFunction
|
||||
```
|
||||
|
||||
or
|
||||
|
||||
```console
|
||||
shunit2 test-script.sh testOne testTwo otherFunction
|
||||
```
|
||||
|
||||
In either case, three functions will be run as tests, `testOne`, `testTwo`, and `otherFunction`. Note that the function `otherFunction` would not normally be run by `shunit2` as part of the implicit collection of tests as it's function name does not match the test function name pattern `test*`.
|
||||
|
||||
If a specified test function does not exist, `shunit2` will still attempt to run that function and thereby cause a failure which `shunit2` will catch and mark as a failed test. All other tests will run normally.
|
||||
|
||||
The specification of tests does not affect how `shunit2` looks for and executes the setup and tear down functions, which will still run as expected.
|
||||
|
||||
---
|
||||
|
||||
## <a name="appendix"></a> Appendix
|
||||
|
||||
### <a name="getting-help"></a> Getting Help
|
||||
|
||||
For help, please send requests to either the shunit2-users@forestent.com mailing
|
||||
list (archives available on the web at
|
||||
https://groups.google.com/a/forestent.com/forum/#!forum/shunit2-users) or
|
||||
directly to Kate Ward <kate dot ward at forestent dot com>.
|
||||
|
||||
### <a name="zsh"></a> Zsh
|
||||
|
||||
For compatibility with Zsh, there is one requirement that must be met -- the
|
||||
`shwordsplit` option must be set. There are three ways to accomplish this.
|
||||
|
||||
1. In the unit-test script, add the following shell code snippet before sourcing
|
||||
the `shunit2` library.
|
||||
|
||||
```sh
|
||||
setopt shwordsplit
|
||||
```
|
||||
|
||||
2. When invoking __zsh__ from either the command-line or as a script with `#!`,
|
||||
add the `-y` parameter.
|
||||
|
||||
```sh
|
||||
#! /bin/zsh -y
|
||||
```
|
||||
|
||||
3. When invoking __zsh__ from the command-line, add `-o shwordsplit --` as
|
||||
parameters before the script name.
|
||||
|
||||
```console
|
||||
$ zsh -o shwordsplit -- some_script
|
||||
```
|
||||
|
||||
[log4sh]: https://github.com/kward/log4sh
|
||||
[shflags]: https://github.com/kward/shflags
|
|
@ -0,0 +1,88 @@
|
|||
#! /bin/sh
|
||||
# vim:et:ft=sh:sts=2:sw=2
|
||||
#
|
||||
# This script runs the provided unit tests and sends the output to the
|
||||
# appropriate file.
|
||||
#
|
||||
# Copyright 2008-2017 Kate Ward. All Rights Reserved.
|
||||
# Released under the Apache 2.0 license.
|
||||
#
|
||||
# Author: kate.ward@forestent.com (Kate Ward)
|
||||
# https://github.com/kward/shunit2
|
||||
#
|
||||
# Source following.
|
||||
# shellcheck disable=SC1090,SC1091
|
||||
# FLAGS variables are dynamically created.
|
||||
# shellcheck disable=SC2154
|
||||
# Disagree with [ p ] && [ q ] vs [ p -a -q ] recommendation.
|
||||
# shellcheck disable=SC2166
|
||||
|
||||
# Treat unset variables as an error.
|
||||
set -u
|
||||
|
||||
die() {
|
||||
[ $# -gt 0 ] && echo "error: $*" >&2
|
||||
exit 1
|
||||
}
|
||||
|
||||
BASE_DIR=$(dirname "$0")
|
||||
LIB_DIR="${BASE_DIR}/lib"
|
||||
|
||||
### Load libraries.
|
||||
. "${LIB_DIR}/shflags" || die 'unable to load shflags library'
|
||||
. "${LIB_DIR}/shlib" || die 'unable to load shlib library'
|
||||
. "${LIB_DIR}/versions" || die 'unable to load versions library'
|
||||
|
||||
# Redefining BASE_DIR now that we have the shlib functions. We need BASE_DIR so
|
||||
# that we can properly load things, even in the event that this script is called
|
||||
# from a different directory.
|
||||
BASE_DIR=$(shlib_relToAbsPath "${BASE_DIR}")
|
||||
|
||||
# Define flags.
|
||||
os_name=$(versions_osName |sed 's/ /_/g')
|
||||
os_version=$(versions_osVersion)
|
||||
|
||||
DEFINE_boolean force false 'force overwrite' f
|
||||
DEFINE_string output_dir "${TMPDIR}" 'output dir' d
|
||||
DEFINE_string output_file "${os_name}-${os_version}.txt" 'output file' o
|
||||
DEFINE_string runner 'test_runner' 'unit test runner' r
|
||||
DEFINE_boolean dry_run false "suppress logging to a file" n
|
||||
|
||||
main() {
|
||||
# Determine output filename.
|
||||
# shellcheck disable=SC2154
|
||||
output="${FLAGS_output_dir:+${FLAGS_output_dir}/}${FLAGS_output_file}"
|
||||
output=$(shlib_relToAbsPath "${output}")
|
||||
|
||||
# Checks.
|
||||
if [ "${FLAGS_dry_run}" -eq "${FLAGS_FALSE}" -a -f "${output}" ]; then
|
||||
if [ "${FLAGS_force}" -eq "${FLAGS_TRUE}" ]; then
|
||||
rm -f "${output}"
|
||||
else
|
||||
echo "not overwriting '${output}'" >&2
|
||||
exit "${FLAGS_ERROR}"
|
||||
fi
|
||||
fi
|
||||
if [ "${FLAGS_dry_run}" -eq "${FLAGS_FALSE}" ]; then
|
||||
touch "${output}" 2>/dev/null || die "unable to write to '${output}'"
|
||||
fi
|
||||
|
||||
# Run tests.
|
||||
(
|
||||
if [ "${FLAGS_dry_run}" -eq "${FLAGS_FALSE}" ]; then
|
||||
"./${FLAGS_runner}" |tee "${output}"
|
||||
else
|
||||
"./${FLAGS_runner}"
|
||||
fi
|
||||
)
|
||||
|
||||
if [ "${FLAGS_dry_run}" -eq "${FLAGS_FALSE}" ]; then
|
||||
echo >&2
|
||||
echo "Output written to '${output}'." >&2
|
||||
fi
|
||||
}
|
||||
|
||||
FLAGS "$@" || exit $?
|
||||
[ "${FLAGS_help}" -eq "${FLAGS_FALSE}" ] || exit
|
||||
eval set -- "${FLAGS_ARGV}"
|
||||
main "${@:-}"
|
|
@ -0,0 +1,47 @@
|
|||
#! /bin/sh
|
||||
#
|
||||
# Initialize the local git hooks this repository.
|
||||
# https://git-scm.com/docs/githooks
|
||||
|
||||
topLevel=$(git rev-parse --show-toplevel)
|
||||
if ! cd "${topLevel}"; then
|
||||
echo "filed to cd into topLevel directory '${topLevel}'"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
hooksDir="${topLevel}/.githooks"
|
||||
if ! hooksPath=$(git config core.hooksPath); then
|
||||
hooksPath="${topLevel}/.git/hooks"
|
||||
fi
|
||||
|
||||
src="${hooksDir}/generic"
|
||||
echo "linking hooks..."
|
||||
for hook in \
|
||||
applypatch-msg \
|
||||
pre-applypatch \
|
||||
post-applypatch \
|
||||
pre-commit \
|
||||
pre-merge-commit \
|
||||
prepare-commit-msg \
|
||||
commit-msg \
|
||||
post-commit \
|
||||
pre-rebase \
|
||||
post-checkout \
|
||||
post-merge \
|
||||
pre-push \
|
||||
pre-receive \
|
||||
update \
|
||||
post-receive \
|
||||
post-update \
|
||||
push-to-checkout \
|
||||
pre-auto-gc \
|
||||
post-rewrite \
|
||||
sendemail-validate \
|
||||
fsmonitor-watchman \
|
||||
p4-pre-submit \
|
||||
post-index-change
|
||||
do
|
||||
echo " ${hook}"
|
||||
dest="${hooksPath}/${hook}"
|
||||
ln -sf "${src}" "${dest}"
|
||||
done
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,39 @@
|
|||
# vim:et:ft=sh:sts=2:sw=2
|
||||
#
|
||||
# Copyright 2008 Kate Ward. All Rights Reserved.
|
||||
# Released under the LGPL (GNU Lesser General Public License).
|
||||
#
|
||||
# Author: kate.ward@forestent.com (Kate Ward)
|
||||
#
|
||||
# Library of shell functions.
|
||||
|
||||
# Convert a relative path into it's absolute equivalent.
|
||||
#
|
||||
# This function will automatically prepend the current working directory if the
|
||||
# path is not already absolute. It then removes all parent references (../) to
|
||||
# reconstruct the proper absolute path.
|
||||
#
|
||||
# Args:
|
||||
# shlib_path_: string: relative path
|
||||
# Outputs:
|
||||
# string: absolute path
|
||||
shlib_relToAbsPath()
|
||||
{
|
||||
shlib_path_=$1
|
||||
|
||||
# prepend current directory to relative paths
|
||||
echo "${shlib_path_}" |grep '^/' >/dev/null 2>&1 \
|
||||
|| shlib_path_="${PWD}/${shlib_path_}"
|
||||
|
||||
# clean up the path. if all seds supported true regular expressions, then
|
||||
# this is what it would be:
|
||||
shlib_old_=${shlib_path_}
|
||||
while true; do
|
||||
shlib_new_=`echo "${shlib_old_}" |sed 's/[^/]*\/\.\.\/*//;s/\/\.\//\//'`
|
||||
[ "${shlib_old_}" = "${shlib_new_}" ] && break
|
||||
shlib_old_=${shlib_new_}
|
||||
done
|
||||
echo "${shlib_new_}"
|
||||
|
||||
unset shlib_path_ shlib_old_ shlib_new_
|
||||
}
|
|
@ -0,0 +1,297 @@
|
|||
#! /bin/sh
|
||||
# vim:et:ft=sh:sts=2:sw=2
|
||||
#
|
||||
# Versions determines the versions of all installed shells.
|
||||
#
|
||||
# Copyright 2008-2020 Kate Ward. All Rights Reserved.
|
||||
# Released under the Apache 2.0 License.
|
||||
#
|
||||
# Author: kate.ward@forestent.com (Kate Ward)
|
||||
# https://github.com/kward/shlib
|
||||
#
|
||||
# This library provides reusable functions that determine actual names and
|
||||
# versions of installed shells and the OS. The library can also be run as a
|
||||
# script if set executable.
|
||||
#
|
||||
# Disable checks that aren't fully portable (POSIX != portable).
|
||||
# shellcheck disable=SC2006
|
||||
|
||||
ARGV0=`basename "$0"`
|
||||
LSB_RELEASE='/etc/lsb-release'
|
||||
VERSIONS_SHELLS='ash /bin/bash /bin/dash /bin/ksh /bin/mksh /bin/pdksh /bin/zsh /usr/xpg4/bin/sh /bin/sh /sbin/sh'
|
||||
|
||||
true; TRUE=$?
|
||||
false; FALSE=$?
|
||||
ERROR=2
|
||||
|
||||
UNAME_R=`uname -r`
|
||||
UNAME_S=`uname -s`
|
||||
|
||||
__versions_haveStrings=${ERROR}
|
||||
|
||||
versions_osName() {
|
||||
os_name_='unrecognized'
|
||||
os_system_=${UNAME_S}
|
||||
os_release_=${UNAME_R}
|
||||
case ${os_system_} in
|
||||
CYGWIN_NT-*) os_name_='Cygwin' ;;
|
||||
Darwin)
|
||||
os_name_=`/usr/bin/sw_vers -productName`
|
||||
os_version_=`versions_osVersion`
|
||||
case ${os_version_} in
|
||||
10.4|10.4.[0-9]*) os_name_='Mac OS X Tiger' ;;
|
||||
10.5|10.5.[0-9]*) os_name_='Mac OS X Leopard' ;;
|
||||
10.6|10.6.[0-9]*) os_name_='Mac OS X Snow Leopard' ;;
|
||||
10.7|10.7.[0-9]*) os_name_='Mac OS X Lion' ;;
|
||||
10.8|10.8.[0-9]*) os_name_='Mac OS X Mountain Lion' ;;
|
||||
10.9|10.9.[0-9]*) os_name_='Mac OS X Mavericks' ;;
|
||||
10.10|10.10.[0-9]*) os_name_='Mac OS X Yosemite' ;;
|
||||
10.11|10.11.[0-9]*) os_name_='Mac OS X El Capitan' ;;
|
||||
10.12|10.12.[0-9]*) os_name_='macOS Sierra' ;;
|
||||
10.13|10.13.[0-9]*) os_name_='macOS High Sierra' ;;
|
||||
10.14|10.14.[0-9]*) os_name_='macOS Mojave' ;;
|
||||
10.15|10.15.[0-9]*) os_name_='macOS Catalina' ;;
|
||||
11.*) os_name_='macOS Big Sur' ;;
|
||||
12.*) os_name_='macOS Monterey' ;;
|
||||
*) os_name_='macOS' ;;
|
||||
esac
|
||||
;;
|
||||
FreeBSD) os_name_='FreeBSD' ;;
|
||||
Linux) os_name_='Linux' ;;
|
||||
SunOS)
|
||||
os_name_='SunOS'
|
||||
if [ -r '/etc/release' ]; then
|
||||
if grep 'OpenSolaris' /etc/release >/dev/null; then
|
||||
os_name_='OpenSolaris'
|
||||
else
|
||||
os_name_='Solaris'
|
||||
fi
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
|
||||
echo ${os_name_}
|
||||
unset os_name_ os_system_ os_release_ os_version_
|
||||
}
|
||||
|
||||
versions_osVersion() {
|
||||
os_version_='unrecognized'
|
||||
os_system_=${UNAME_S}
|
||||
os_release_=${UNAME_R}
|
||||
case ${os_system_} in
|
||||
CYGWIN_NT-*)
|
||||
os_version_=`expr "${os_release_}" : '\([0-9]*\.[0-9]\.[0-9]*\).*'`
|
||||
;;
|
||||
Darwin)
|
||||
os_version_=`/usr/bin/sw_vers -productVersion`
|
||||
;;
|
||||
FreeBSD)
|
||||
os_version_=`expr "${os_release_}" : '\([0-9]*\.[0-9]*\)-.*'`
|
||||
;;
|
||||
Linux)
|
||||
if [ -r '/etc/os-release' ]; then
|
||||
os_version_=`awk -F= '$1~/PRETTY_NAME/{print $2}' /etc/os-release \
|
||||
|sed 's/"//g'`
|
||||
elif [ -r '/etc/redhat-release' ]; then
|
||||
os_version_=`cat /etc/redhat-release`
|
||||
elif [ -r '/etc/SuSE-release' ]; then
|
||||
os_version_=`head -n 1 /etc/SuSE-release`
|
||||
elif [ -r "${LSB_RELEASE}" ]; then
|
||||
if grep -q 'DISTRIB_ID=Ubuntu' "${LSB_RELEASE}"; then
|
||||
# shellcheck disable=SC2002
|
||||
os_version_=`cat "${LSB_RELEASE}" \
|
||||
|awk -F= '$1~/DISTRIB_DESCRIPTION/{print $2}' \
|
||||
|sed 's/"//g;s/ /-/g'`
|
||||
fi
|
||||
fi
|
||||
;;
|
||||
SunOS)
|
||||
if [ -r '/etc/release' ]; then
|
||||
if grep 'OpenSolaris' /etc/release >/dev/null; then # OpenSolaris
|
||||
os_version_=`grep 'OpenSolaris' /etc/release |awk '{print $2"("$3")"}'`
|
||||
else # Solaris
|
||||
major_=`echo "${os_release_}" |sed 's/[0-9]*\.\([0-9]*\)/\1/'`
|
||||
minor_=`grep Solaris /etc/release |sed 's/[^u]*\(u[0-9]*\).*/\1/'`
|
||||
os_version_="${major_}${minor_}"
|
||||
fi
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
|
||||
echo "${os_version_}"
|
||||
unset os_release_ os_system_ os_version_ major_ minor_
|
||||
}
|
||||
|
||||
versions_shellVersion() {
|
||||
shell_=$1
|
||||
|
||||
shell_present_=${FALSE}
|
||||
case "${shell_}" in
|
||||
ash) [ -x '/bin/busybox' ] && shell_present_=${TRUE} ;;
|
||||
*) [ -x "${shell_}" ] && shell_present_=${TRUE} ;;
|
||||
esac
|
||||
if [ ${shell_present_} -eq ${FALSE} ]; then
|
||||
echo 'not installed'
|
||||
return ${FALSE}
|
||||
fi
|
||||
|
||||
version_=''
|
||||
case ${shell_} in
|
||||
# SunOS shells.
|
||||
/sbin/sh) ;;
|
||||
/usr/xpg4/bin/sh) version_=`versions_shell_xpg4 "${shell_}"` ;;
|
||||
|
||||
# Generic shell.
|
||||
*/sh)
|
||||
# This could be one of any number of shells. Try until one fits.
|
||||
version_=''
|
||||
[ -z "${version_}" ] && version_=`versions_shell_bash "${shell_}"`
|
||||
# dash cannot be self determined yet
|
||||
[ -z "${version_}" ] && version_=`versions_shell_ksh "${shell_}"`
|
||||
# pdksh is covered in versions_shell_ksh()
|
||||
[ -z "${version_}" ] && version_=`versions_shell_xpg4 "${shell_}"`
|
||||
[ -z "${version_}" ] && version_=`versions_shell_zsh "${shell_}"`
|
||||
;;
|
||||
|
||||
# Specific shells.
|
||||
ash) version_=`versions_shell_ash "${shell_}"` ;;
|
||||
# bash - Bourne Again SHell (https://www.gnu.org/software/bash/)
|
||||
*/bash) version_=`versions_shell_bash "${shell_}"` ;;
|
||||
*/dash) version_=`versions_shell_dash` ;;
|
||||
# ksh - KornShell (http://www.kornshell.com/)
|
||||
*/ksh) version_=`versions_shell_ksh "${shell_}"` ;;
|
||||
# mksh - MirBSD Korn Shell (http://www.mirbsd.org/mksh.htm)
|
||||
*/mksh) version_=`versions_shell_ksh "${shell_}"` ;;
|
||||
# pdksh - Public Domain Korn Shell (http://web.cs.mun.ca/~michael/pdksh/)
|
||||
*/pdksh) version_=`versions_shell_pdksh "${shell_}"` ;;
|
||||
# zsh (https://www.zsh.org/)
|
||||
*/zsh) version_=`versions_shell_zsh "${shell_}"` ;;
|
||||
|
||||
# Unrecognized shell.
|
||||
*) version_='invalid'
|
||||
esac
|
||||
|
||||
echo "${version_:-unknown}"
|
||||
unset shell_ version_
|
||||
}
|
||||
|
||||
# The ash shell is included in BusyBox.
|
||||
versions_shell_ash() {
|
||||
busybox --help |head -1 |sed 's/BusyBox v\([0-9.]*\) .*/\1/'
|
||||
}
|
||||
|
||||
versions_shell_bash() {
|
||||
$1 --version : 2>&1 |grep 'GNU bash' |sed 's/.*version \([^ ]*\).*/\1/'
|
||||
}
|
||||
|
||||
# Assuming Ubuntu Linux until somebody comes up with a better test. The
|
||||
# following test will return an empty string if dash is not installed.
|
||||
versions_shell_dash() {
|
||||
eval dpkg >/dev/null 2>&1
|
||||
[ $? -eq 127 ] && return # Return if dpkg not found.
|
||||
|
||||
dpkg -l |grep ' dash ' |awk '{print $3}'
|
||||
}
|
||||
|
||||
versions_shell_ksh() {
|
||||
versions_shell_=$1
|
||||
versions_version_=''
|
||||
|
||||
# Try a few different ways to figure out the version.
|
||||
versions_version_=`${versions_shell_} --version : 2>&1`
|
||||
# shellcheck disable=SC2181
|
||||
if [ $? -eq 0 ]; then
|
||||
versions_version_=`echo "${versions_version_}" \
|
||||
|sed 's/.*\([0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9]\).*/\1/'`
|
||||
else
|
||||
versions_version_=''
|
||||
fi
|
||||
if [ -z "${versions_version_}" ]; then
|
||||
# shellcheck disable=SC2016
|
||||
versions_version_=`${versions_shell_} -c 'echo ${KSH_VERSION}'`
|
||||
fi
|
||||
if [ -z "${versions_version_}" ]; then
|
||||
_versions_have_strings
|
||||
versions_version_=`strings "${versions_shell_}" 2>&1 \
|
||||
|grep Version \
|
||||
|sed 's/^.*Version \(.*\)$/\1/;s/ s+ \$$//;s/ /-/g'`
|
||||
fi
|
||||
if [ -z "${versions_version_}" ]; then
|
||||
versions_version_=`versions_shell_pdksh "${versions_shell_}"`
|
||||
fi
|
||||
|
||||
echo "${versions_version_}"
|
||||
unset versions_shell_ versions_version_
|
||||
}
|
||||
|
||||
# mksh - MirBSD Korn Shell (http://www.mirbsd.org/mksh.htm)
|
||||
# mksh is a successor to pdksh (Public Domain Korn Shell).
|
||||
versions_shell_mksh() {
|
||||
versions_shell_ksh
|
||||
}
|
||||
|
||||
# pdksh - Public Domain Korn Shell
|
||||
# pdksh is an obsolete shell, which was replaced by mksh (among others).
|
||||
versions_shell_pdksh() {
|
||||
_versions_have_strings
|
||||
strings "$1" 2>&1 \
|
||||
|grep 'PD KSH' \
|
||||
|sed -e 's/.*PD KSH \(.*\)/\1/;s/ /-/g'
|
||||
}
|
||||
|
||||
versions_shell_xpg4() {
|
||||
_versions_have_strings
|
||||
strings "$1" 2>&1 \
|
||||
|grep 'Version' \
|
||||
|sed -e 's/^@(#)Version //'
|
||||
}
|
||||
|
||||
versions_shell_zsh() {
|
||||
versions_shell_=$1
|
||||
|
||||
# Try a few different ways to figure out the version.
|
||||
# shellcheck disable=SC2016
|
||||
versions_version_=`echo 'echo ${ZSH_VERSION}' |${versions_shell_}`
|
||||
if [ -z "${versions_version_}" ]; then
|
||||
versions_version_=`${versions_shell_} --version : 2>&1`
|
||||
# shellcheck disable=SC2181
|
||||
if [ $? -eq 0 ]; then
|
||||
versions_version_=`echo "${versions_version_}" |awk '{print $2}'`
|
||||
else
|
||||
versions_version_=''
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "${versions_version_}"
|
||||
unset versions_shell_ versions_version_
|
||||
}
|
||||
|
||||
# Determine if the 'strings' binary installed.
|
||||
_versions_have_strings() {
|
||||
[ ${__versions_haveStrings} -ne ${ERROR} ] && return
|
||||
if eval strings /dev/null >/dev/null 2>&1; then
|
||||
__versions_haveStrings=${TRUE}
|
||||
return
|
||||
fi
|
||||
|
||||
echo 'WARN: strings not installed. try installing binutils?' >&2
|
||||
__versions_haveStrings=${FALSE}
|
||||
}
|
||||
|
||||
versions_main() {
|
||||
# Treat unset variables as an error.
|
||||
set -u
|
||||
|
||||
os_name=`versions_osName`
|
||||
os_version=`versions_osVersion`
|
||||
echo "os: ${os_name} version: ${os_version}"
|
||||
|
||||
for shell in ${VERSIONS_SHELLS}; do
|
||||
shell_version=`versions_shellVersion "${shell}"`
|
||||
echo "shell: ${shell} version: ${shell_version}"
|
||||
done
|
||||
}
|
||||
|
||||
if [ "${ARGV0}" = 'versions' ]; then
|
||||
versions_main "$@"
|
||||
fi
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,64 @@
|
|||
#!/bin/sh
|
||||
# vim:et:ft=sh:sts=2:sw=2
|
||||
#
|
||||
# shunit2 unit test for running subset(s) of tests based upon command line args.
|
||||
#
|
||||
# Copyright 2008-2021 Kate Ward. All Rights Reserved.
|
||||
# Released under the Apache 2.0 license.
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# https://github.com/kward/shunit2
|
||||
#
|
||||
# Also shows how non-default tests or a arbitrary subset of tests can be run.
|
||||
#
|
||||
# Disable source following.
|
||||
# shellcheck disable=SC1090,SC1091
|
||||
|
||||
# Load test helpers.
|
||||
. ./shunit2_test_helpers
|
||||
|
||||
CUSTOM_TEST_RAN=''
|
||||
|
||||
# This test does not normally run because it does not begin "test*". Will be
|
||||
# run by setting the arguments to the script to include the name of this test.
|
||||
custom_test() {
|
||||
# Arbitrary assert.
|
||||
assertTrue 0
|
||||
# The true intent is to set this variable, which will be tested below.
|
||||
CUSTOM_TEST_RAN='yup, we ran'
|
||||
}
|
||||
|
||||
# Verify that `customTest()` ran.
|
||||
testCustomTestRan() {
|
||||
assertNotNull "'custom_test()' did not run" "${CUSTOM_TEST_RAN}"
|
||||
}
|
||||
|
||||
# Fail if this test runs, which is shouldn't if arguments are set correctly.
|
||||
testShouldFail() {
|
||||
fail 'testShouldFail should not be run if argument parsing works'
|
||||
}
|
||||
|
||||
oneTimeSetUp() {
|
||||
th_oneTimeSetUp
|
||||
}
|
||||
|
||||
# If zero/one argument(s) are provided, this test is being run in it's
|
||||
# entirety, and therefore we want to set the arguments to the script to
|
||||
# (simulate and) test the processing of command-line specified tests. If we
|
||||
# don't, then the "test_will_fail" test will run (by default) and the overall
|
||||
# test will fail.
|
||||
#
|
||||
# However, if two or more arguments are provided, then assume this test script
|
||||
# is being run by hand to experiment with command-line test specification, and
|
||||
# then don't override the user provided arguments.
|
||||
if [ "$#" -le 1 ]; then
|
||||
# We set the arguments in a POSIX way, inasmuch as we can;
|
||||
# helpful tip:
|
||||
# https://unix.stackexchange.com/questions/258512/how-to-remove-a-positional-parameter-from
|
||||
set -- '--' 'custom_test' 'testCustomTestRan'
|
||||
fi
|
||||
|
||||
# Load and run shunit2.
|
||||
# shellcheck disable=SC2034
|
||||
[ -n "${ZSH_VERSION:-}" ] && SHUNIT_PARENT=$0
|
||||
. "${TH_SHUNIT}"
|
|
@ -0,0 +1,403 @@
|
|||
#! /bin/sh
|
||||
# vim:et:ft=sh:sts=2:sw=2
|
||||
#
|
||||
# shunit2 unit test for assert functions.
|
||||
#
|
||||
# Copyright 2008-2021 Kate Ward. All Rights Reserved.
|
||||
# Released under the Apache 2.0 license.
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Author: kate.ward@forestent.com (Kate Ward)
|
||||
# https://github.com/kward/shunit2
|
||||
#
|
||||
# In this file, all assert calls under test must be wrapped in () so they do not
|
||||
# influence the metrics of the test itself.
|
||||
#
|
||||
# Disable source following.
|
||||
# shellcheck disable=SC1090,SC1091
|
||||
|
||||
# These variables will be overridden by the test helpers.
|
||||
stdoutF="${TMPDIR:-/tmp}/STDOUT"
|
||||
stderrF="${TMPDIR:-/tmp}/STDERR"
|
||||
|
||||
# Load test helpers.
|
||||
. ./shunit2_test_helpers
|
||||
|
||||
commonEqualsSame() {
|
||||
fn=$1
|
||||
|
||||
# These should succeed.
|
||||
|
||||
desc='equal'
|
||||
if (${fn} 'x' 'x' >"${stdoutF}" 2>"${stderrF}"); then
|
||||
th_assertTrueWithNoOutput "${desc}" $? "${stdoutF}" "${stderrF}"
|
||||
else
|
||||
fail "${desc}: unexpected failure"
|
||||
_showTestOutput
|
||||
fi
|
||||
|
||||
desc='equal_with_message'
|
||||
if (${fn} 'some message' 'x' 'x' >"${stdoutF}" 2>"${stderrF}"); then
|
||||
th_assertTrueWithNoOutput "${desc}" $? "${stdoutF}" "${stderrF}"
|
||||
else
|
||||
fail "${desc}: unexpected failure"
|
||||
_showTestOutput
|
||||
fi
|
||||
|
||||
desc='equal_with_spaces'
|
||||
if (${fn} 'abc def' 'abc def' >"${stdoutF}" 2>"${stderrF}"); then
|
||||
th_assertTrueWithNoOutput "${desc}" $? "${stdoutF}" "${stderrF}"
|
||||
else
|
||||
fail "${desc}: unexpected failure"
|
||||
_showTestOutput
|
||||
fi
|
||||
|
||||
desc='equal_null_values'
|
||||
if (${fn} '' '' >"${stdoutF}" 2>"${stderrF}"); then
|
||||
th_assertTrueWithNoOutput "${desc}" $? "${stdoutF}" "${stderrF}"
|
||||
else
|
||||
fail "${desc}: unexpected failure"
|
||||
_showTestOutput
|
||||
fi
|
||||
|
||||
# These should fail.
|
||||
|
||||
desc='not_equal'
|
||||
if (${fn} 'x' 'y' >"${stdoutF}" 2>"${stderrF}"); then
|
||||
fail "${desc}: expected a failure"
|
||||
_showTestOutput
|
||||
else
|
||||
th_assertFalseWithOutput "${desc}" $? "${stdoutF}" "${stderrF}"
|
||||
fi
|
||||
}
|
||||
|
||||
commonNotEqualsSame() {
|
||||
fn=$1
|
||||
|
||||
# These should succeed.
|
||||
|
||||
desc='not_same'
|
||||
if (${fn} 'x' 'y' >"${stdoutF}" 2>"${stderrF}"); then
|
||||
th_assertTrueWithNoOutput "${desc}" $? "${stdoutF}" "${stderrF}"
|
||||
else
|
||||
fail "${desc}: unexpected failure"
|
||||
_showTestOutput
|
||||
fi
|
||||
|
||||
desc='not_same_with_message'
|
||||
if (${fn} 'some message' 'x' 'y' >"${stdoutF}" 2>"${stderrF}"); then
|
||||
th_assertTrueWithNoOutput "${desc}" $? "${stdoutF}" "${stderrF}"
|
||||
else
|
||||
fail "${desc}: unexpected failure"
|
||||
_showTestOutput
|
||||
fi
|
||||
|
||||
# These should fail.
|
||||
|
||||
desc='same'
|
||||
if (${fn} 'x' 'x' >"${stdoutF}" 2>"${stderrF}"); then
|
||||
fail "${desc}: expected a failure"
|
||||
_showTestOutput
|
||||
else
|
||||
th_assertFalseWithOutput "${desc}" $? "${stdoutF}" "${stderrF}"
|
||||
fi
|
||||
|
||||
desc='unequal_null_values'
|
||||
if (${fn} '' '' >"${stdoutF}" 2>"${stderrF}"); then
|
||||
fail "${desc}: expected a failure"
|
||||
_showTestOutput
|
||||
else
|
||||
th_assertFalseWithOutput "${desc}" $? "${stdoutF}" "${stderrF}"
|
||||
fi
|
||||
}
|
||||
|
||||
testAssertEquals() { commonEqualsSame 'assertEquals'; }
|
||||
testAssertNotEquals() { commonNotEqualsSame 'assertNotEquals'; }
|
||||
testAssertSame() { commonEqualsSame 'assertSame'; }
|
||||
testAssertNotSame() { commonNotEqualsSame 'assertNotSame'; }
|
||||
|
||||
testAssertContains() {
|
||||
# Content is present.
|
||||
while read -r desc container content; do
|
||||
if (assertContains "${container}" "${content}" >"${stdoutF}" 2>"${stderrF}"); then
|
||||
th_assertTrueWithNoOutput "${desc}" $? "${stdoutF}" "${stderrF}"
|
||||
else
|
||||
fail "${desc}: unexpected failure"
|
||||
_showTestOutput
|
||||
fi
|
||||
done <<EOF
|
||||
abc_at_start abcdef abc
|
||||
bcd_in_middle abcdef bcd
|
||||
def_at_end abcdef def
|
||||
EOF
|
||||
|
||||
# Content missing.
|
||||
while read -r desc container content; do
|
||||
if (assertContains "${container}" "${content}" >"${stdoutF}" 2>"${stderrF}"); then
|
||||
fail "${desc}: unexpected failure"
|
||||
_showTestOutput
|
||||
else
|
||||
th_assertFalseWithOutput "${desc}" $? "${stdoutF}" "${stderrF}"
|
||||
fi
|
||||
done <<EOF
|
||||
xyz_not_present abcdef xyz
|
||||
zab_contains_start abcdef zab
|
||||
efg_contains_end abcdef efg
|
||||
acf_has_parts abcdef acf
|
||||
EOF
|
||||
|
||||
desc="content_starts_with_dash"
|
||||
if (assertContains 'abc -Xabc def' '-Xabc' >"${stdoutF}" 2>"${stderrF}"); then
|
||||
th_assertTrueWithNoOutput "${desc}" $? "${stdoutF}" "${stderrF}"
|
||||
else
|
||||
fail "${desc}: unexpected failure"
|
||||
_showTestOutput
|
||||
fi
|
||||
|
||||
desc="contains_with_message"
|
||||
if (assertContains 'some message' 'abcdef' 'abc' >"${stdoutF}" 2>"${stderrF}"); then
|
||||
th_assertTrueWithNoOutput "${desc}" $? "${stdoutF}" "${stderrF}"
|
||||
else
|
||||
fail "${desc}: unexpected failure"
|
||||
_showTestOutput
|
||||
fi
|
||||
}
|
||||
|
||||
testAssertNotContains() {
|
||||
# Content not present.
|
||||
while read -r desc container content; do
|
||||
if (assertNotContains "${container}" "${content}" >"${stdoutF}" 2>"${stderrF}"); then
|
||||
th_assertTrueWithNoOutput "${desc}" $? "${stdoutF}" "${stderrF}"
|
||||
else
|
||||
fail "${desc}: unexpected failure"
|
||||
_showTestOutput
|
||||
fi
|
||||
done <<EOF
|
||||
xyz_not_present abcdef xyz
|
||||
zab_contains_start abcdef zab
|
||||
efg_contains_end abcdef efg
|
||||
acf_has_parts abcdef acf
|
||||
EOF
|
||||
|
||||
# Content present.
|
||||
while read -r desc container content; do
|
||||
if (assertNotContains "${container}" "${content}" >"${stdoutF}" 2>"${stderrF}"); then
|
||||
fail "${desc}: expected a failure"
|
||||
_showTestOutput
|
||||
else
|
||||
th_assertFalseWithOutput "${desc}" $? "${stdoutF}" "${stderrF}"
|
||||
fi
|
||||
done <<EOF
|
||||
abc_is_present abcdef abc
|
||||
EOF
|
||||
|
||||
desc='not_contains_with_message'
|
||||
if (assertNotContains 'some message' 'abcdef' 'xyz' >"${stdoutF}" 2>"${stderrF}"); then
|
||||
th_assertTrueWithNoOutput "${desc}" $? "${stdoutF}" "${stderrF}"
|
||||
else
|
||||
fail "${desc}: unexpected failure"
|
||||
_showTestOutput
|
||||
fi
|
||||
}
|
||||
|
||||
testAssertNull() {
|
||||
while read -r desc value; do
|
||||
if (assertNull "${value}" >"${stdoutF}" 2>"${stderrF}"); then
|
||||
fail "${desc}: unexpected failure"
|
||||
_showTestOutput
|
||||
else
|
||||
th_assertFalseWithOutput "${desc}" $? "${stdoutF}" "${stderrF}"
|
||||
fi
|
||||
done <<'EOF'
|
||||
x_alone x
|
||||
x_double_quote_a x"a
|
||||
x_single_quote_a x'a
|
||||
x_dollar_a x$a
|
||||
x_backtick_a x`a
|
||||
EOF
|
||||
|
||||
desc='null_without_message'
|
||||
if (assertNull '' >"${stdoutF}" 2>"${stderrF}"); then
|
||||
th_assertTrueWithNoOutput "${desc}" $? "${stdoutF}" "${stderrF}"
|
||||
else
|
||||
fail "${desc}: unexpected failure"
|
||||
_showTestOutput
|
||||
fi
|
||||
|
||||
desc='null_with_message'
|
||||
if (assertNull 'some message' '' >"${stdoutF}" 2>"${stderrF}"); then
|
||||
th_assertTrueWithNoOutput "${desc}" $? "${stdoutF}" "${stderrF}"
|
||||
else
|
||||
fail "${desc}: unexpected failure"
|
||||
_showTestOutput
|
||||
fi
|
||||
|
||||
desc='x_is_not_null'
|
||||
if (assertNull 'x' >"${stdoutF}" 2>"${stderrF}"); then
|
||||
fail "${desc}: expected a failure"
|
||||
_showTestOutput
|
||||
else
|
||||
th_assertFalseWithOutput "${desc}" $? "${stdoutF}" "${stderrF}"
|
||||
fi
|
||||
}
|
||||
|
||||
testAssertNotNull() {
|
||||
while read -r desc value; do
|
||||
if (assertNotNull "${value}" >"${stdoutF}" 2>"${stderrF}"); then
|
||||
th_assertTrueWithNoOutput "${desc}" $? "${stdoutF}" "${stderrF}"
|
||||
else
|
||||
fail "${desc}: unexpected failure"
|
||||
_showTestOutput
|
||||
fi
|
||||
done <<'EOF'
|
||||
x_alone x
|
||||
x_double_quote_b x"b
|
||||
x_single_quote_b x'b
|
||||
x_dollar_b x$b
|
||||
x_backtick_b x`b
|
||||
EOF
|
||||
|
||||
desc='not_null_with_message'
|
||||
if (assertNotNull 'some message' 'x' >"${stdoutF}" 2>"${stderrF}"); then
|
||||
th_assertTrueWithNoOutput "${desc}" $? "${stdoutF}" "${stderrF}"
|
||||
else
|
||||
fail "${desc}: unexpected failure"
|
||||
_showTestOutput
|
||||
fi
|
||||
|
||||
desc="double_ticks_are_null"
|
||||
if (assertNotNull '' >"${stdoutF}" 2>"${stderrF}"); then
|
||||
fail "${desc}: expected a failure"
|
||||
_showTestOutput
|
||||
else
|
||||
th_assertFalseWithOutput "${desc}" $? "${stdoutF}" "${stderrF}"
|
||||
fi
|
||||
}
|
||||
|
||||
testAssertTrue() {
|
||||
# True values.
|
||||
while read -r desc value; do
|
||||
if (assertTrue "${value}" >"${stdoutF}" 2>"${stderrF}"); then
|
||||
th_assertTrueWithNoOutput "${desc}" $? "${stdoutF}" "${stderrF}"
|
||||
else
|
||||
fail "${desc}: unexpected failure"
|
||||
_showTestOutput
|
||||
fi
|
||||
done <<'EOF'
|
||||
zero 0
|
||||
zero_eq_zero [ 0 -eq 0 ]
|
||||
EOF
|
||||
|
||||
# Not true values.
|
||||
while read -r desc value; do
|
||||
if (assertTrue "${value}" >"${stdoutF}" 2>"${stderrF}"); then
|
||||
fail "${desc}: expected a failure"
|
||||
_showTestOutput
|
||||
else
|
||||
th_assertFalseWithOutput "${desc}" $? "${stdoutF}" "${stderrF}"
|
||||
fi
|
||||
done <<EOF
|
||||
one 1
|
||||
zero_eq_1 [ 0 -eq 1 ]
|
||||
null
|
||||
EOF
|
||||
|
||||
desc='true_with_message'
|
||||
if (assertTrue 'some message' 0 >"${stdoutF}" 2>"${stderrF}"); then
|
||||
th_assertTrueWithNoOutput "${desc}" $? "${stdoutF}" "${stderrF}"
|
||||
else
|
||||
fail "${desc}: unexpected failure"
|
||||
_showTestOutput
|
||||
fi
|
||||
}
|
||||
|
||||
testAssertFalse() {
|
||||
# False values.
|
||||
while read -r desc value; do
|
||||
if (assertFalse "${value}" >"${stdoutF}" 2>"${stderrF}"); then
|
||||
th_assertTrueWithNoOutput "${desc}" $? "${stdoutF}" "${stderrF}"
|
||||
else
|
||||
fail "${desc}: unexpected failure"
|
||||
_showTestOutput
|
||||
fi
|
||||
done <<EOF
|
||||
one 1
|
||||
zero_eq_1 [ 0 -eq 1 ]
|
||||
null
|
||||
EOF
|
||||
|
||||
# Not true values.
|
||||
while read -r desc value; do
|
||||
if (assertFalse "${value}" >"${stdoutF}" 2>"${stderrF}"); then
|
||||
fail "${desc}: expected a failure"
|
||||
_showTestOutput
|
||||
else
|
||||
th_assertFalseWithOutput "${desc}" $? "${stdoutF}" "${stderrF}"
|
||||
fi
|
||||
done <<'EOF'
|
||||
zero 0
|
||||
zero_eq_zero [ 0 -eq 0 ]
|
||||
EOF
|
||||
|
||||
desc='false_with_message'
|
||||
if (assertFalse 'some message' 1 >"${stdoutF}" 2>"${stderrF}"); then
|
||||
th_assertTrueWithNoOutput "${desc}" $? "${stdoutF}" "${stderrF}"
|
||||
else
|
||||
fail "${desc}: unexpected failure"
|
||||
_showTestOutput
|
||||
fi
|
||||
}
|
||||
|
||||
FUNCTIONS='
|
||||
assertEquals assertNotEquals
|
||||
assertSame assertNotSame
|
||||
assertContains assertNotContains
|
||||
assertNull assertNotNull
|
||||
assertTrue assertFalse
|
||||
'
|
||||
|
||||
testTooFewArguments() {
|
||||
for fn in ${FUNCTIONS}; do
|
||||
# These functions support zero arguments.
|
||||
case "${fn}" in
|
||||
assertNull) continue ;;
|
||||
assertNotNull) continue ;;
|
||||
esac
|
||||
|
||||
desc="${fn}"
|
||||
if (${fn} >"${stdoutF}" 2>"${stderrF}"); then
|
||||
fail "${desc}: expected a failure"
|
||||
_showTestOutput
|
||||
else
|
||||
got=$? want=${SHUNIT_ERROR}
|
||||
assertEquals "${desc}: incorrect return code" "${got}" "${want}"
|
||||
th_assertFalseWithError "${desc}" "${got}" "${stdoutF}" "${stderrF}"
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
testTooManyArguments() {
|
||||
for fn in ${FUNCTIONS}; do
|
||||
desc="${fn}"
|
||||
if (${fn} arg1 arg2 arg3 arg4 >"${stdoutF}" 2>"${stderrF}"); then
|
||||
fail "${desc}: expected a failure"
|
||||
_showTestOutput
|
||||
else
|
||||
got=$? want=${SHUNIT_ERROR}
|
||||
assertEquals "${desc}: incorrect return code" "${got}" "${want}"
|
||||
th_assertFalseWithError "${desc}" "${got}" "${stdoutF}" "${stderrF}"
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
oneTimeSetUp() {
|
||||
th_oneTimeSetUp
|
||||
}
|
||||
|
||||
# showTestOutput for the most recently run test.
|
||||
_showTestOutput() { th_showOutput "${SHUNIT_FALSE}" "${stdoutF}" "${stderrF}"; }
|
||||
|
||||
# Load and run shunit2.
|
||||
# shellcheck disable=SC2034
|
||||
[ -n "${ZSH_VERSION:-}" ] && SHUNIT_PARENT=$0
|
||||
. "${TH_SHUNIT}"
|
|
@ -0,0 +1,137 @@
|
|||
#! /bin/sh
|
||||
# vim:et:ft=sh:sts=2:sw=2
|
||||
#
|
||||
# shUnit2 unit test for failure functions. These functions do not test values.
|
||||
#
|
||||
# Copyright 2008-2021 Kate Ward. All Rights Reserved.
|
||||
# Released under the Apache 2.0 license.
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Author: kate.ward@forestent.com (Kate Ward)
|
||||
# https://github.com/kward/shunit2
|
||||
#
|
||||
# Disable source following.
|
||||
# shellcheck disable=SC1090,SC1091
|
||||
|
||||
# These variables will be overridden by the test helpers.
|
||||
stdoutF="${TMPDIR:-/tmp}/STDOUT"
|
||||
stderrF="${TMPDIR:-/tmp}/STDERR"
|
||||
|
||||
# Load test helpers.
|
||||
. ./shunit2_test_helpers
|
||||
|
||||
testFail() {
|
||||
# Test without a message.
|
||||
desc='fail_without_message'
|
||||
if ( fail >"${stdoutF}" 2>"${stderrF}" ); then
|
||||
fail "${desc}: expected a failure"
|
||||
th_showOutput
|
||||
else
|
||||
th_assertFalseWithOutput "${desc}" $? "${stdoutF}" "${stderrF}"
|
||||
fi
|
||||
|
||||
# Test with a message.
|
||||
desc='fail_with_message'
|
||||
if ( fail 'some message' >"${stdoutF}" 2>"${stderrF}" ); then
|
||||
fail "${desc}: expected a failure"
|
||||
th_showOutput
|
||||
else
|
||||
th_assertFalseWithOutput "${desc}" $? "${stdoutF}" "${stderrF}"
|
||||
fi
|
||||
}
|
||||
|
||||
# FN_TESTS hold all the functions to be tested.
|
||||
# shellcheck disable=SC2006
|
||||
FN_TESTS=`
|
||||
# fn num_args pattern
|
||||
cat <<EOF
|
||||
fail 1
|
||||
failNotEquals 3 but was:
|
||||
failFound 2 found:
|
||||
failNotFound 2 not found:
|
||||
failSame 3 not same
|
||||
failNotSame 3 but was:
|
||||
EOF
|
||||
`
|
||||
|
||||
testFailsWithArgs() {
|
||||
echo "${FN_TESTS}" |\
|
||||
while read -r fn num_args pattern; do
|
||||
case "${fn}" in
|
||||
fail) continue ;;
|
||||
esac
|
||||
|
||||
# Test without a message.
|
||||
desc="${fn}_without_message"
|
||||
if ( ${fn} arg1 arg2 >"${stdoutF}" 2>"${stderrF}" ); then
|
||||
fail "${desc}: expected a failure"
|
||||
th_showOutput
|
||||
else
|
||||
th_assertFalseWithOutput "${desc}" $? "${stdoutF}" "${stderrF}"
|
||||
fi
|
||||
|
||||
# Test with a message.
|
||||
arg1='' arg2=''
|
||||
case ${num_args} in
|
||||
1) ;;
|
||||
2) arg1='arg1' ;;
|
||||
3) arg1='arg1' arg2='arg2' ;;
|
||||
esac
|
||||
|
||||
desc="${fn}_with_message"
|
||||
if ( ${fn} 'some message' ${arg1} ${arg2} >"${stdoutF}" 2>"${stderrF}" ); then
|
||||
fail "${desc}: expected a failure"
|
||||
th_showOutput
|
||||
else
|
||||
th_assertFalseWithOutput "${desc}" $? "${stdoutF}" "${stderrF}"
|
||||
if ! grep -- "${pattern}" "${stdoutF}" >/dev/null; then
|
||||
fail "${desc}: incorrect message to STDOUT"
|
||||
th_showOutput
|
||||
fi
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
testTooFewArguments() {
|
||||
echo "${FN_TESTS}" \
|
||||
|while read -r fn num_args pattern; do
|
||||
# Skip functions that support a single message argument.
|
||||
if [ "${num_args}" -eq 1 ]; then
|
||||
continue
|
||||
fi
|
||||
|
||||
desc="${fn}"
|
||||
if (${fn} >"${stdoutF}" 2>"${stderrF}"); then
|
||||
fail "${desc}: expected a failure"
|
||||
_showTestOutput
|
||||
else
|
||||
got=$? want=${SHUNIT_ERROR}
|
||||
assertEquals "${desc}: incorrect return code" "${got}" "${want}"
|
||||
th_assertFalseWithError "${desc}" "${got}" "${stdoutF}" "${stderrF}"
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
testTooManyArguments() {
|
||||
echo "${FN_TESTS}" \
|
||||
|while read -r fn num_args pattern; do
|
||||
desc="${fn}"
|
||||
if (${fn} arg1 arg2 arg3 arg4 >"${stdoutF}" 2>"${stderrF}"); then
|
||||
fail "${desc}: expected a failure"
|
||||
_showTestOutput
|
||||
else
|
||||
got=$? want=${SHUNIT_ERROR}
|
||||
assertEquals "${desc}: incorrect return code" "${got}" "${want}"
|
||||
th_assertFalseWithError "${desc}" "${got}" "${stdoutF}" "${stderrF}"
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
oneTimeSetUp() {
|
||||
th_oneTimeSetUp
|
||||
}
|
||||
|
||||
# Load and run shUnit2.
|
||||
# shellcheck disable=SC2034
|
||||
[ -n "${ZSH_VERSION:-}" ] && SHUNIT_PARENT=$0
|
||||
. "${TH_SHUNIT}"
|
|
@ -0,0 +1,99 @@
|
|||
#! /bin/sh
|
||||
# vim:et:ft=sh:sts=2:sw=2
|
||||
#
|
||||
# shUnit2 unit tests for general commands.
|
||||
#
|
||||
# Copyright 2008-2021 Kate Ward. All Rights Reserved.
|
||||
# Released under the Apache 2.0 license.
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Author: kate.ward@forestent.com (Kate Ward)
|
||||
# https://github.com/kward/shunit2
|
||||
#
|
||||
# Disable source following.
|
||||
# shellcheck disable=SC1090,SC1091
|
||||
|
||||
# These variables will be overridden by the test helpers.
|
||||
stdoutF="${TMPDIR:-/tmp}/STDOUT"
|
||||
stderrF="${TMPDIR:-/tmp}/STDERR"
|
||||
|
||||
# Load test helpers.
|
||||
. ./shunit2_test_helpers
|
||||
|
||||
testSkipping() {
|
||||
# We shouldn't be skipping to start.
|
||||
if isSkipping; then
|
||||
th_error 'skipping *should not be* enabled'
|
||||
return
|
||||
fi
|
||||
|
||||
startSkipping
|
||||
was_skipping_started=${SHUNIT_FALSE}
|
||||
if isSkipping; then was_skipping_started=${SHUNIT_TRUE}; fi
|
||||
|
||||
endSkipping
|
||||
was_skipping_ended=${SHUNIT_FALSE}
|
||||
if isSkipping; then was_skipping_ended=${SHUNIT_TRUE}; fi
|
||||
|
||||
assertEquals "skipping wasn't started" "${was_skipping_started}" "${SHUNIT_TRUE}"
|
||||
assertNotEquals "skipping wasn't ended" "${was_skipping_ended}" "${SHUNIT_TRUE}"
|
||||
return 0
|
||||
}
|
||||
|
||||
testStartSkippingWithMessage() {
|
||||
unittestF="${SHUNIT_TMPDIR}/unittest"
|
||||
sed 's/^#//' >"${unittestF}" <<\EOF
|
||||
## Start skipping with a message.
|
||||
#testSkipping() {
|
||||
# startSkipping 'SKIP-a-Dee-Doo-Dah'
|
||||
#}
|
||||
#SHUNIT_COLOR='none'
|
||||
#. ${TH_SHUNIT}
|
||||
EOF
|
||||
# Ignoring errors with `|| :` as we only care about `FAILED` in the output.
|
||||
( exec "${SHELL:-sh}" "${unittestF}" >"${stdoutF}" 2>"${stderrF}" ) || :
|
||||
if ! grep '\[skipping\] SKIP-a-Dee-Doo-Dah' "${stderrF}" >/dev/null; then
|
||||
fail 'skipping message was not generated'
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
testStartSkippingWithoutMessage() {
|
||||
unittestF="${SHUNIT_TMPDIR}/unittest"
|
||||
sed 's/^#//' >"${unittestF}" <<\EOF
|
||||
## Start skipping with a message.
|
||||
#testSkipping() {
|
||||
# startSkipping
|
||||
#}
|
||||
#SHUNIT_COLOR='none'
|
||||
#. ${TH_SHUNIT}
|
||||
EOF
|
||||
# Ignoring errors with `|| :` as we only care about `FAILED` in the output.
|
||||
( exec "${SHELL:-sh}" "${unittestF}" >"${stdoutF}" 2>"${stderrF}" ) || :
|
||||
if grep '\[skipping\]' "${stderrF}" >/dev/null; then
|
||||
fail 'skipping message was unexpectedly generated'
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
setUp() {
|
||||
for f in "${stdoutF}" "${stderrF}"; do
|
||||
cp /dev/null "${f}"
|
||||
done
|
||||
|
||||
# Reconfigure coloring as some tests override default behavior.
|
||||
_shunit_configureColor "${SHUNIT_COLOR_DEFAULT}"
|
||||
|
||||
# shellcheck disable=SC2034,SC2153
|
||||
SHUNIT_CMD_TPUT=${__SHUNIT_CMD_TPUT}
|
||||
}
|
||||
|
||||
oneTimeSetUp() {
|
||||
SHUNIT_COLOR_DEFAULT="${SHUNIT_COLOR}"
|
||||
th_oneTimeSetUp
|
||||
}
|
||||
|
||||
# Load and run shUnit2.
|
||||
# shellcheck disable=SC2034
|
||||
[ -n "${ZSH_VERSION:-}" ] && SHUNIT_PARENT=$0
|
||||
. "${TH_SHUNIT}"
|
|
@ -0,0 +1,247 @@
|
|||
#! /bin/sh
|
||||
# vim:et:ft=sh:sts=2:sw=2
|
||||
#
|
||||
# shunit2 unit test for macros.
|
||||
#
|
||||
# Copyright 2008-2021 Kate Ward. All Rights Reserved.
|
||||
# Released under the Apache 2.0 license.
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Author: kate.ward@forestent.com (Kate Ward)
|
||||
# https://github.com/kward/shunit2
|
||||
#
|
||||
# Disable source following.
|
||||
# shellcheck disable=SC1090,SC1091
|
||||
|
||||
# These variables will be overridden by the test helpers.
|
||||
stdoutF="${TMPDIR:-/tmp}/STDOUT"
|
||||
stderrF="${TMPDIR:-/tmp}/STDERR"
|
||||
|
||||
# Load test helpers.
|
||||
. ./shunit2_test_helpers
|
||||
|
||||
testAssertEquals() {
|
||||
isLinenoWorking || startSkipping
|
||||
|
||||
( ${_ASSERT_EQUALS_} 'x' 'y' >"${stdoutF}" 2>"${stderrF}" )
|
||||
if ! wasAssertGenerated; then
|
||||
fail '_ASSERT_EQUALS_ failed to produce an ASSERT message'
|
||||
showTestOutput
|
||||
fi
|
||||
|
||||
( ${_ASSERT_EQUALS_} '"some msg"' 'x' 'y' >"${stdoutF}" 2>"${stderrF}" )
|
||||
if ! wasAssertGenerated; then
|
||||
fail '_ASSERT_EQUALS_ (with a message) failed to produce an ASSERT message'
|
||||
showTestOutput
|
||||
fi
|
||||
}
|
||||
|
||||
testAssertNotEquals() {
|
||||
isLinenoWorking || startSkipping
|
||||
|
||||
( ${_ASSERT_NOT_EQUALS_} 'x' 'x' >"${stdoutF}" 2>"${stderrF}" )
|
||||
if ! wasAssertGenerated; then
|
||||
fail '_ASSERT_NOT_EQUALS_ failed to produce an ASSERT message'
|
||||
showTestOutput
|
||||
fi
|
||||
|
||||
( ${_ASSERT_NOT_EQUALS_} '"some msg"' 'x' 'x' >"${stdoutF}" 2>"${stderrF}" )
|
||||
if ! wasAssertGenerated; then
|
||||
fail '_ASSERT_NOT_EQUALS_ (with a message) failed to produce an ASSERT message'
|
||||
showTestOutput
|
||||
fi
|
||||
}
|
||||
|
||||
testSame() {
|
||||
isLinenoWorking || startSkipping
|
||||
|
||||
( ${_ASSERT_SAME_} 'x' 'y' >"${stdoutF}" 2>"${stderrF}" )
|
||||
if ! wasAssertGenerated; then
|
||||
fail '_ASSERT_SAME_ failed to produce an ASSERT message'
|
||||
showTestOutput
|
||||
fi
|
||||
|
||||
( ${_ASSERT_SAME_} '"some msg"' 'x' 'y' >"${stdoutF}" 2>"${stderrF}" )
|
||||
if ! wasAssertGenerated; then
|
||||
fail '_ASSERT_SAME_ (with a message) failed to produce an ASSERT message'
|
||||
showTestOutput
|
||||
fi
|
||||
}
|
||||
|
||||
testNotSame() {
|
||||
isLinenoWorking || startSkipping
|
||||
|
||||
( ${_ASSERT_NOT_SAME_} 'x' 'x' >"${stdoutF}" 2>"${stderrF}" )
|
||||
if ! wasAssertGenerated; then
|
||||
fail '_ASSERT_NOT_SAME_ failed to produce an ASSERT message'
|
||||
showTestOutput
|
||||
fi
|
||||
|
||||
( ${_ASSERT_NOT_SAME_} '"some msg"' 'x' 'x' >"${stdoutF}" 2>"${stderrF}" )
|
||||
if ! wasAssertGenerated; then
|
||||
fail '_ASSERT_NOT_SAME_ (with a message) failed to produce an ASSERT message'
|
||||
showTestOutput
|
||||
fi
|
||||
}
|
||||
|
||||
testNull() {
|
||||
isLinenoWorking || startSkipping
|
||||
|
||||
( ${_ASSERT_NULL_} 'x' >"${stdoutF}" 2>"${stderrF}" )
|
||||
if ! wasAssertGenerated; then
|
||||
fail '_ASSERT_NULL_ failed to produce an ASSERT message'
|
||||
showTestOutput
|
||||
fi
|
||||
|
||||
( ${_ASSERT_NULL_} '"some msg"' 'x' >"${stdoutF}" 2>"${stderrF}" )
|
||||
if ! wasAssertGenerated; then
|
||||
fail '_ASSERT_NULL_ (with a message) failed to produce an ASSERT message'
|
||||
showTestOutput
|
||||
fi
|
||||
}
|
||||
|
||||
testNotNull() {
|
||||
isLinenoWorking || startSkipping
|
||||
|
||||
( ${_ASSERT_NOT_NULL_} '' >"${stdoutF}" 2>"${stderrF}" )
|
||||
if ! wasAssertGenerated; then
|
||||
fail '_ASSERT_NOT_NULL_ failed to produce an ASSERT message'
|
||||
showTestOutput
|
||||
fi
|
||||
|
||||
( ${_ASSERT_NOT_NULL_} '"some msg"' '""' >"${stdoutF}" 2>"${stderrF}" )
|
||||
if ! wasAssertGenerated; then
|
||||
fail '_ASSERT_NOT_NULL_ (with a message) failed to produce an ASSERT message'
|
||||
showTestOutput
|
||||
fi
|
||||
}
|
||||
|
||||
testAssertTrue() {
|
||||
isLinenoWorking || startSkipping
|
||||
|
||||
( ${_ASSERT_TRUE_} "${SHUNIT_FALSE}" >"${stdoutF}" 2>"${stderrF}" )
|
||||
if ! wasAssertGenerated; then
|
||||
fail '_ASSERT_TRUE_ failed to produce an ASSERT message'
|
||||
showTestOutput
|
||||
fi
|
||||
|
||||
( ${_ASSERT_TRUE_} '"some msg"' "${SHUNIT_FALSE}" >"${stdoutF}" 2>"${stderrF}" )
|
||||
if ! wasAssertGenerated; then
|
||||
fail '_ASSERT_TRUE_ (with a message) failed to produce an ASSERT message'
|
||||
showTestOutput
|
||||
fi
|
||||
}
|
||||
|
||||
testAssertFalse() {
|
||||
isLinenoWorking || startSkipping
|
||||
|
||||
( ${_ASSERT_FALSE_} "${SHUNIT_TRUE}" >"${stdoutF}" 2>"${stderrF}" )
|
||||
if ! wasAssertGenerated; then
|
||||
fail '_ASSERT_FALSE_ failed to produce an ASSERT message'
|
||||
showTestOutput
|
||||
fi
|
||||
|
||||
( ${_ASSERT_FALSE_} '"some msg"' "${SHUNIT_TRUE}" >"${stdoutF}" 2>"${stderrF}" )
|
||||
if ! wasAssertGenerated; then
|
||||
fail '_ASSERT_FALSE_ (with a message) failed to produce an ASSERT message'
|
||||
showTestOutput
|
||||
fi
|
||||
}
|
||||
|
||||
testFail() {
|
||||
isLinenoWorking || startSkipping
|
||||
|
||||
( ${_FAIL_} >"${stdoutF}" 2>"${stderrF}" )
|
||||
if ! wasAssertGenerated; then
|
||||
fail '_FAIL_ failed to produce an ASSERT message'
|
||||
showTestOutput
|
||||
fi
|
||||
|
||||
( ${_FAIL_} '"some msg"' >"${stdoutF}" 2>"${stderrF}" )
|
||||
if ! wasAssertGenerated; then
|
||||
fail '_FAIL_ (with a message) failed to produce an ASSERT message'
|
||||
showTestOutput
|
||||
fi
|
||||
}
|
||||
|
||||
testFailNotEquals() {
|
||||
isLinenoWorking || startSkipping
|
||||
|
||||
( ${_FAIL_NOT_EQUALS_} 'x' 'y' >"${stdoutF}" 2>"${stderrF}" )
|
||||
if ! wasAssertGenerated; then
|
||||
fail '_FAIL_NOT_EQUALS_ failed to produce an ASSERT message'
|
||||
showTestOutput
|
||||
fi
|
||||
|
||||
( ${_FAIL_NOT_EQUALS_} '"some msg"' 'x' 'y' >"${stdoutF}" 2>"${stderrF}" )
|
||||
if ! wasAssertGenerated; then
|
||||
fail '_FAIL_NOT_EQUALS_ (with a message) failed to produce an ASSERT message'
|
||||
showTestOutput
|
||||
fi
|
||||
}
|
||||
|
||||
testFailSame() {
|
||||
isLinenoWorking || startSkipping
|
||||
|
||||
( ${_FAIL_SAME_} 'x' 'x' >"${stdoutF}" 2>"${stderrF}" )
|
||||
if ! wasAssertGenerated; then
|
||||
fail '_FAIL_SAME_ failed to produce an ASSERT message'
|
||||
showTestOutput
|
||||
fi
|
||||
|
||||
( ${_FAIL_SAME_} '"some msg"' 'x' 'x' >"${stdoutF}" 2>"${stderrF}" )
|
||||
if ! wasAssertGenerated; then
|
||||
fail '_FAIL_SAME_ (with a message) failed to produce an ASSERT message'
|
||||
showTestOutput
|
||||
fi
|
||||
}
|
||||
|
||||
testFailNotSame() {
|
||||
isLinenoWorking || startSkipping
|
||||
|
||||
( ${_FAIL_NOT_SAME_} 'x' 'y' >"${stdoutF}" 2>"${stderrF}" )
|
||||
if ! wasAssertGenerated; then
|
||||
fail '_FAIL_NOT_SAME_ failed to produce an ASSERT message'
|
||||
showTestOutput
|
||||
fi
|
||||
|
||||
( ${_FAIL_NOT_SAME_} '"some msg"' 'x' 'y' >"${stdoutF}" 2>"${stderrF}" )
|
||||
if ! wasAssertGenerated; then
|
||||
fail '_FAIL_NOT_SAME_ (with a message) failed to produce an ASSERT message'
|
||||
showTestOutput
|
||||
fi
|
||||
}
|
||||
|
||||
oneTimeSetUp() {
|
||||
th_oneTimeSetUp
|
||||
|
||||
if ! isLinenoWorking; then
|
||||
# shellcheck disable=SC2016
|
||||
th_warn '${LINENO} is not working for this shell. Tests will be skipped.'
|
||||
fi
|
||||
}
|
||||
|
||||
# isLinenoWorking returns true if the `$LINENO` shell variable works properly.
|
||||
isLinenoWorking() {
|
||||
# shellcheck disable=SC2016
|
||||
ln='eval echo "${LINENO:-}"'
|
||||
case ${ln} in
|
||||
[0-9]*) return "${SHUNIT_TRUE}" ;;
|
||||
-[0-9]*) return "${SHUNIT_FALSE}" ;; # The dash shell produces negative values.
|
||||
esac
|
||||
return "${SHUNIT_FALSE}"
|
||||
}
|
||||
|
||||
# showTestOutput for the most recently run test.
|
||||
showTestOutput() { th_showOutput "${SHUNIT_FALSE}" "${stdoutF}" "${stderrF}"; }
|
||||
|
||||
# wasAssertGenerated returns true if an ASSERT was generated to STDOUT.
|
||||
wasAssertGenerated() { grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null; }
|
||||
|
||||
# Disable output coloring as it breaks the tests.
|
||||
SHUNIT_COLOR='none'; export SHUNIT_COLOR
|
||||
|
||||
# Load and run shUnit2.
|
||||
# shellcheck disable=SC2034
|
||||
[ -n "${ZSH_VERSION:-}" ] && SHUNIT_PARENT="$0"
|
||||
. "${TH_SHUNIT}"
|
|
@ -0,0 +1,292 @@
|
|||
#! /bin/sh
|
||||
# vim:et:ft=sh:sts=2:sw=2
|
||||
#
|
||||
# shUnit2 unit tests of miscellaneous things
|
||||
#
|
||||
# Copyright 2008-2021 Kate Ward. All Rights Reserved.
|
||||
# Released under the Apache 2.0 license.
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Author: kate.ward@forestent.com (Kate Ward)
|
||||
# https://github.com/kward/shunit2
|
||||
#
|
||||
# Allow usage of legacy backticked `...` notation instead of $(...).
|
||||
# shellcheck disable=SC2006
|
||||
# Disable source following.
|
||||
# shellcheck disable=SC1090,SC1091
|
||||
|
||||
# These variables will be overridden by the test helpers.
|
||||
stdoutF="${TMPDIR:-/tmp}/STDOUT"
|
||||
stderrF="${TMPDIR:-/tmp}/STDERR"
|
||||
|
||||
# Load test helpers.
|
||||
. ./shunit2_test_helpers
|
||||
|
||||
# Note: the test script is prefixed with '#' chars so that shUnit2 does not
|
||||
# incorrectly interpret the embedded functions as real functions.
|
||||
testUnboundVariable() {
|
||||
unittestF="${SHUNIT_TMPDIR}/unittest"
|
||||
sed 's/^#//' >"${unittestF}" <<EOF
|
||||
## Treat unset variables as an error when performing parameter expansion.
|
||||
#set -u
|
||||
#
|
||||
#boom() { x=\$1; } # This function goes boom if no parameters are passed!
|
||||
#test_boom() {
|
||||
# assertEquals 1 1
|
||||
# boom # No parameter given
|
||||
# assertEquals 0 \$?
|
||||
#}
|
||||
#SHUNIT_COLOR='none'
|
||||
#. ${TH_SHUNIT}
|
||||
EOF
|
||||
if ( exec "${SHELL:-sh}" "${unittestF}" >"${stdoutF}" 2>"${stderrF}" ); then
|
||||
fail 'expected a non-zero exit value'
|
||||
fi
|
||||
if ! grep '^ASSERT:unknown failure' "${stdoutF}" >/dev/null; then
|
||||
fail 'assert message was not generated'
|
||||
fi
|
||||
if ! grep '^Ran [0-9]* test' "${stdoutF}" >/dev/null; then
|
||||
fail 'test count message was not generated'
|
||||
fi
|
||||
if ! grep '^FAILED' "${stdoutF}" >/dev/null; then
|
||||
fail 'failure message was not generated'
|
||||
fi
|
||||
}
|
||||
|
||||
# assertEquals repeats message argument.
|
||||
# https://github.com/kward/shunit2/issues/7
|
||||
testIssue7() {
|
||||
# Disable coloring so 'ASSERT:' lines can be matched correctly.
|
||||
_shunit_configureColor 'none'
|
||||
|
||||
# Ignoring errors with `|| :` as we only care about the message in this test.
|
||||
( assertEquals 'Some message.' 1 2 >"${stdoutF}" 2>"${stderrF}" ) || :
|
||||
diff "${stdoutF}" - >/dev/null <<EOF
|
||||
ASSERT:Some message. expected:<1> but was:<2>
|
||||
EOF
|
||||
rtrn=$?
|
||||
assertEquals "${SHUNIT_TRUE}" "${rtrn}"
|
||||
[ "${rtrn}" -eq "${SHUNIT_TRUE}" ] || cat "${stderrF}" >&2
|
||||
}
|
||||
|
||||
# Support prefixes on test output.
|
||||
# https://github.com/kward/shunit2/issues/29
|
||||
testIssue29() {
|
||||
unittestF="${SHUNIT_TMPDIR}/unittest"
|
||||
sed 's/^#//' >"${unittestF}" <<EOF
|
||||
## Support test prefixes.
|
||||
#test_assert() { assertTrue ${SHUNIT_TRUE}; }
|
||||
#SHUNIT_COLOR='none'
|
||||
#SHUNIT_TEST_PREFIX='--- '
|
||||
#. ${TH_SHUNIT}
|
||||
EOF
|
||||
( exec "${SHELL:-sh}" "${unittestF}" >"${stdoutF}" 2>"${stderrF}" )
|
||||
grep '^--- test_assert' "${stdoutF}" >/dev/null
|
||||
rtrn=$?
|
||||
assertEquals "${SHUNIT_TRUE}" "${rtrn}"
|
||||
[ "${rtrn}" -eq "${SHUNIT_TRUE}" ] || cat "${stdoutF}" >&2
|
||||
}
|
||||
|
||||
# Test that certain external commands sometimes "stubbed" by users are escaped.
|
||||
testIssue54() {
|
||||
for c in mkdir rm cat chmod sed; do
|
||||
if grep "^[^#]*${c} " "${TH_SHUNIT}" | grep -qv "command ${c}"; then
|
||||
fail "external call to ${c} not protected somewhere"
|
||||
fi
|
||||
done
|
||||
# shellcheck disable=2016
|
||||
if grep '^[^#]*[^ ] *\[' "${TH_SHUNIT}" | grep -qv '${__SHUNIT_BUILTIN} \['; then
|
||||
fail 'call to [ not protected somewhere'
|
||||
fi
|
||||
# shellcheck disable=2016
|
||||
if grep '^[^#]* *\.' "${TH_SHUNIT}" | grep -qv '${__SHUNIT_BUILTIN} \.'; then
|
||||
fail 'call to . not protected somewhere'
|
||||
fi
|
||||
}
|
||||
|
||||
# shUnit2 should not exit with 0 when it has syntax errors.
|
||||
# https://github.com/kward/shunit2/issues/69
|
||||
testIssue69() {
|
||||
unittestF="${SHUNIT_TMPDIR}/unittest"
|
||||
|
||||
# Note: assertNull not tested as zero arguments == null, which is valid.
|
||||
for t in Equals NotEquals NotNull Same NotSame True False; do
|
||||
assert="assert${t}"
|
||||
sed 's/^#//' >"${unittestF}" <<EOF
|
||||
## Asserts with invalid argument counts should be counted as failures.
|
||||
#test_assert() { ${assert}; }
|
||||
#SHUNIT_COLOR='none'
|
||||
#. ${TH_SHUNIT}
|
||||
EOF
|
||||
# Ignoring errors with `|| :` as we only care about `FAILED` in the output.
|
||||
( exec "${SHELL:-sh}" "${unittestF}" >"${stdoutF}" 2>"${stderrF}" ) || :
|
||||
grep '^FAILED' "${stdoutF}" >/dev/null
|
||||
assertTrue "failure message for ${assert} was not generated" $?
|
||||
done
|
||||
}
|
||||
|
||||
# Ensure that test fails if setup/teardown functions fail.
|
||||
testIssue77() {
|
||||
unittestF="${SHUNIT_TMPDIR}/unittest"
|
||||
for func in oneTimeSetUp setUp tearDown oneTimeTearDown; do
|
||||
sed 's/^#//' >"${unittestF}" <<EOF
|
||||
## Environment failure should end test.
|
||||
#${func}() { return ${SHUNIT_FALSE}; }
|
||||
#test_true() { assertTrue ${SHUNIT_TRUE}; }
|
||||
#SHUNIT_COLOR='none'
|
||||
#. ${TH_SHUNIT}
|
||||
EOF
|
||||
# Ignoring errors with `|| :` as we only care about `FAILED` in the output.
|
||||
( exec "${SHELL:-sh}" "${unittestF}" ) >"${stdoutF}" 2>"${stderrF}" || :
|
||||
grep '^FAILED' "${stdoutF}" >/dev/null
|
||||
assertTrue "failure of ${func}() did not end test" $?
|
||||
done
|
||||
}
|
||||
|
||||
# Ensure a test failure is recorded for code containing syntax errors.
|
||||
# https://github.com/kward/shunit2/issues/84
|
||||
testIssue84() {
|
||||
unittestF="${SHUNIT_TMPDIR}/unittest"
|
||||
sed 's/^#//' >"${unittestF}" <<\EOF
|
||||
## Function with syntax error.
|
||||
#syntax_error() { ${!#3442} -334 a$@2[1]; }
|
||||
#test_syntax_error() {
|
||||
# syntax_error
|
||||
# assertTrue ${SHUNIT_TRUE}
|
||||
#}
|
||||
#SHUNIT_COLOR='none'
|
||||
#SHUNIT_TEST_PREFIX='--- '
|
||||
#. ${TH_SHUNIT}
|
||||
EOF
|
||||
# Ignoring errors with `|| :` as we only care about `FAILED` in the output.
|
||||
( exec "${SHELL:-sh}" "${unittestF}" >"${stdoutF}" 2>"${stderrF}" ) || :
|
||||
if ! grep '^FAILED' "${stdoutF}" >/dev/null; then
|
||||
fail 'failure message was not generated'
|
||||
fi
|
||||
}
|
||||
|
||||
# Demonstrate that asserts are no longer executed in subshells.
|
||||
# https://github.com/kward/shunit2/issues/123
|
||||
#
|
||||
# NOTE: this test only works if the `${BASH_SUBSHELL}` variable is present.
|
||||
testIssue123() {
|
||||
if [ -z "${BASH_SUBSHELL:-}" ]; then
|
||||
# shellcheck disable=SC2016
|
||||
startSkipping 'The ${BASH_SUBSHELL} variable is unavailable in this shell.'
|
||||
fi
|
||||
# shellcheck disable=SC2016
|
||||
assertTrue 'not in subshell' '[[ ${BASH_SUBSHELL} -eq 0 ]]'
|
||||
}
|
||||
|
||||
testPrepForSourcing() {
|
||||
assertEquals '/abc' "`_shunit_prepForSourcing '/abc'`"
|
||||
assertEquals './abc' "`_shunit_prepForSourcing './abc'`"
|
||||
assertEquals './abc' "`_shunit_prepForSourcing 'abc'`"
|
||||
}
|
||||
|
||||
# Test the various ways of declaring functions.
|
||||
#
|
||||
# Prefixing (then stripping) with comment symbol so these functions aren't
|
||||
# treated as real functions by shUnit2.
|
||||
testExtractTestFunctions() {
|
||||
f="${SHUNIT_TMPDIR}/extract_test_functions"
|
||||
sed 's/^#//' <<EOF >"${f}"
|
||||
## Function on a single line.
|
||||
#testABC() { echo 'ABC'; }
|
||||
## Multi-line function with '{' on next line.
|
||||
#test_def()
|
||||
# {
|
||||
# echo 'def'
|
||||
#}
|
||||
## Multi-line function with '{' on first line.
|
||||
#testG3 () {
|
||||
# echo 'G3'
|
||||
#}
|
||||
## Function with numerical values in name.
|
||||
#function test4() { echo '4'; }
|
||||
## Leading space in front of function.
|
||||
# test5() { echo '5'; }
|
||||
## Function with '_' chars in name.
|
||||
#some_test_function() { echo 'some func'; }
|
||||
## Function that sets variables.
|
||||
#func_with_test_vars() {
|
||||
# testVariable=1234
|
||||
#}
|
||||
## Function with keyword but no parenthesis
|
||||
#function test6 { echo '6'; }
|
||||
## Function with keyword but no parenthesis, multi-line
|
||||
#function test7 {
|
||||
# echo '7';
|
||||
#}
|
||||
## Function with no parenthesis, '{' on next line
|
||||
#function test8
|
||||
#{
|
||||
# echo '8'
|
||||
#}
|
||||
## Function with hyphenated name
|
||||
#test-9() {
|
||||
# echo '9';
|
||||
#}
|
||||
## Function without parenthesis or keyword
|
||||
#test_foobar { echo 'hello world'; }
|
||||
## Function with multiple function keywords
|
||||
#function function test_test_test() { echo 'lorem'; }
|
||||
EOF
|
||||
|
||||
actual=`_shunit_extractTestFunctions "${f}"`
|
||||
assertEquals 'testABC test_def testG3 test4 test5 test6 test7 test8 test-9' "${actual}"
|
||||
}
|
||||
|
||||
testColors() {
|
||||
while read -r cmd colors desc; do
|
||||
SHUNIT_CMD_TPUT=${cmd}
|
||||
want=${colors} got=`_shunit_colors`
|
||||
assertEquals "${desc}: incorrect number of colors;" \
|
||||
"${got}" "${want}"
|
||||
done <<'EOF'
|
||||
missing_tput 16 missing tput command
|
||||
mock_tput 256 mock tput command
|
||||
EOF
|
||||
}
|
||||
|
||||
testColorsWitoutTERM() {
|
||||
SHUNIT_CMD_TPUT='mock_tput'
|
||||
got=`TERM='' _shunit_colors`
|
||||
want=16
|
||||
assertEquals "${got}" "${want}"
|
||||
}
|
||||
|
||||
mock_tput() {
|
||||
if [ -z "${TERM}" ]; then
|
||||
# shellcheck disable=SC2016
|
||||
echo 'tput: No value for $TERM and no -T specified'
|
||||
return 2
|
||||
fi
|
||||
if [ "$1" = 'colors' ]; then
|
||||
echo 256
|
||||
return 0
|
||||
fi
|
||||
return 1
|
||||
}
|
||||
|
||||
setUp() {
|
||||
for f in "${stdoutF}" "${stderrF}"; do
|
||||
cp /dev/null "${f}"
|
||||
done
|
||||
|
||||
# Reconfigure coloring as some tests override default behavior.
|
||||
_shunit_configureColor "${SHUNIT_COLOR_DEFAULT}"
|
||||
|
||||
# shellcheck disable=SC2034,SC2153
|
||||
SHUNIT_CMD_TPUT=${__SHUNIT_CMD_TPUT}
|
||||
}
|
||||
|
||||
oneTimeSetUp() {
|
||||
SHUNIT_COLOR_DEFAULT="${SHUNIT_COLOR}"
|
||||
th_oneTimeSetUp
|
||||
}
|
||||
|
||||
# Load and run shUnit2.
|
||||
# shellcheck disable=SC2034
|
||||
[ -n "${ZSH_VERSION:-}" ] && SHUNIT_PARENT=$0
|
||||
. "${TH_SHUNIT}"
|
|
@ -0,0 +1,70 @@
|
|||
#! /bin/sh
|
||||
# vim:et:ft=sh:sts=2:sw=2
|
||||
#
|
||||
# shUnit2 unit tests for `shopt` support.
|
||||
#
|
||||
# Copyright 2008-2021 Kate Ward. All Rights Reserved.
|
||||
# Released under the Apache 2.0 license.
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Author: kate.ward@forestent.com (Kate Ward)
|
||||
# https://github.com/kward/shunit2
|
||||
#
|
||||
# Disable source following.
|
||||
# shellcheck disable=SC1090,SC1091
|
||||
|
||||
# Load test helpers.
|
||||
. ./shunit2_test_helpers
|
||||
|
||||
# Call shopt from a variable so it can be mocked if it doesn't work.
|
||||
SHOPT_CMD='shopt'
|
||||
|
||||
testNullglob() {
|
||||
isShoptWorking || startSkipping
|
||||
|
||||
nullglob=$(${SHOPT_CMD} nullglob |cut -f2)
|
||||
|
||||
# Test without nullglob.
|
||||
${SHOPT_CMD} -u nullglob
|
||||
assertEquals 'test without nullglob' 0 0
|
||||
|
||||
# Test with nullglob.
|
||||
${SHOPT_CMD} -s nullglob
|
||||
assertEquals 'test with nullglob' 1 1
|
||||
|
||||
# Reset nullglob.
|
||||
if [ "${nullglob}" = "on" ]; then
|
||||
${SHOPT_CMD} -s nullglob
|
||||
else
|
||||
${SHOPT_CMD} -u nullglob
|
||||
fi
|
||||
|
||||
unset nullglob
|
||||
}
|
||||
|
||||
oneTimeSetUp() {
|
||||
th_oneTimeSetUp
|
||||
|
||||
if ! isShoptWorking; then
|
||||
SHOPT_CMD='mock_shopt'
|
||||
fi
|
||||
}
|
||||
|
||||
# isShoptWorking returns true if the `shopt` shell command is available.
|
||||
# NOTE: `shopt` is not defined as part of the POSIX standard.
|
||||
isShoptWorking() {
|
||||
# shellcheck disable=SC2039,SC3044
|
||||
( shopt >/dev/null 2>&1 );
|
||||
}
|
||||
|
||||
mock_shopt() {
|
||||
if [ $# -eq 0 ]; then
|
||||
echo "nullglob off"
|
||||
fi
|
||||
return
|
||||
}
|
||||
|
||||
# Load and run shUnit2.
|
||||
# shellcheck disable=SC2034
|
||||
[ -n "${ZSH_VERSION:-}" ] && SHUNIT_PARENT="$0"
|
||||
. "${TH_SHUNIT}"
|
|
@ -0,0 +1,36 @@
|
|||
#! /bin/sh
|
||||
# vim:et:ft=sh:sts=2:sw=2
|
||||
#
|
||||
# shUnit2 unit test for standalone operation.
|
||||
#
|
||||
# Copyright 2008-2021 Kate Ward. All Rights Reserved.
|
||||
# Released under the Apache 2.0 license.
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Author: kate.ward@forestent.com (Kate Ward)
|
||||
# https://github.com/kward/shunit2
|
||||
#
|
||||
# This unit test is purely to test that calling shunit2 directly, while passing
|
||||
# the name of a unit test script, works. When run, this script determines if it
|
||||
# is running as a standalone program, and calls main() if it is.
|
||||
#
|
||||
# Disable source following.
|
||||
# shellcheck disable=SC1090,SC1091
|
||||
|
||||
ARGV0=$(basename "$0")
|
||||
|
||||
# Load test helpers.
|
||||
. ./shunit2_test_helpers
|
||||
|
||||
testStandalone() {
|
||||
assertTrue "${SHUNIT_TRUE}"
|
||||
}
|
||||
|
||||
main() {
|
||||
${TH_SHUNIT} "${ARGV0}"
|
||||
}
|
||||
|
||||
# Run main() if are running as a standalone script.
|
||||
if [ "${ARGV0}" = 'shunit2_standalone_test.sh' ]; then
|
||||
main "$@"
|
||||
fi
|
|
@ -1,124 +0,0 @@
|
|||
#! /bin/sh
|
||||
# $Id$
|
||||
# vim:et:ft=sh:sts=2:sw=2
|
||||
#
|
||||
# Copyright 2008 Kate Ward. All Rights Reserved.
|
||||
# Released under the LGPL (GNU Lesser General Public License)
|
||||
# Author: kate.ward@forestent.com (Kate Ward)
|
||||
#
|
||||
# shUnit2 unit test suite runner.
|
||||
#
|
||||
# This script runs all the unit tests that can be found, and generates a nice
|
||||
# report of the tests.
|
||||
|
||||
MY_NAME=`basename $0`
|
||||
MY_PATH=`dirname $0`
|
||||
|
||||
PREFIX='shunit2_test_'
|
||||
SHELLS='/bin/sh /bin/bash /bin/dash /bin/ksh /bin/pdksh /bin/zsh'
|
||||
TESTS=''
|
||||
for test in ${PREFIX}[a-z]*.sh; do
|
||||
TESTS="${TESTS} ${test}"
|
||||
done
|
||||
|
||||
# load common unit test functions
|
||||
. ../lib/versions
|
||||
. ./shunit2_test_helpers
|
||||
|
||||
usage()
|
||||
{
|
||||
echo "usage: ${MY_NAME} [-e key=val ...] [-s shell(s)] [-t test(s)]"
|
||||
}
|
||||
|
||||
env=''
|
||||
|
||||
# process command line flags
|
||||
while getopts 'e:hs:t:' opt; do
|
||||
case ${opt} in
|
||||
e) # set an environment variable
|
||||
key=`expr "${OPTARG}" : '\([^=]*\)='`
|
||||
val=`expr "${OPTARG}" : '[^=]*=\(.*\)'`
|
||||
if [ -z "${key}" -o -z "${val}" ]; then
|
||||
usage
|
||||
exit 1
|
||||
fi
|
||||
eval "${key}='${val}'"
|
||||
export ${key}
|
||||
env="${env:+${env} }${key}"
|
||||
;;
|
||||
h) usage; exit 0 ;; # output help
|
||||
s) shells=${OPTARG} ;; # list of shells to run
|
||||
t) tests=${OPTARG} ;; # list of tests to run
|
||||
*) usage; exit 1 ;;
|
||||
esac
|
||||
done
|
||||
shift `expr ${OPTIND} - 1`
|
||||
|
||||
# fill shells and/or tests
|
||||
shells=${shells:-${SHELLS}}
|
||||
tests=${tests:-${TESTS}}
|
||||
|
||||
# error checking
|
||||
if [ -z "${tests}" ]; then
|
||||
th_error 'no tests found to run; exiting'
|
||||
exit 1
|
||||
fi
|
||||
|
||||
cat <<EOF
|
||||
#------------------------------------------------------------------------------
|
||||
# System data
|
||||
#
|
||||
|
||||
# test run info
|
||||
shells: ${shells}
|
||||
tests: ${tests}
|
||||
EOF
|
||||
for key in ${env}; do
|
||||
eval "echo \"${key}=\$${key}\""
|
||||
done
|
||||
echo
|
||||
|
||||
# output system data
|
||||
echo "# system info"
|
||||
echo "$ date"
|
||||
date
|
||||
echo
|
||||
|
||||
echo "$ uname -mprsv"
|
||||
uname -mprsv
|
||||
|
||||
#
|
||||
# run tests
|
||||
#
|
||||
|
||||
for shell in ${shells}; do
|
||||
echo
|
||||
|
||||
# check for existance of shell
|
||||
if [ ! -x ${shell} ]; then
|
||||
th_warn "unable to run tests with the ${shell} shell"
|
||||
continue
|
||||
fi
|
||||
|
||||
cat <<EOF
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
# Running the test suite with ${shell}
|
||||
#
|
||||
EOF
|
||||
|
||||
SHUNIT_SHELL=${shell} # pass shell onto tests
|
||||
shell_name=`basename ${shell}`
|
||||
shell_version=`versions_shellVersion "${shell}"`
|
||||
|
||||
echo "shell name: ${shell_name}"
|
||||
echo "shell version: ${shell_version}"
|
||||
|
||||
# execute the tests
|
||||
for suite in ${tests}; do
|
||||
suiteName=`expr "${suite}" : "${PREFIX}\(.*\).sh"`
|
||||
echo
|
||||
echo "--- Executing the '${suiteName}' test suite ---"
|
||||
( exec ${shell} ./${suite} 2>&1; )
|
||||
done
|
||||
done
|
|
@ -1,206 +0,0 @@
|
|||
#! /bin/sh
|
||||
# $Id$
|
||||
# vim:et:ft=sh:sts=2:sw=2
|
||||
#
|
||||
# Copyright 2008 Kate Ward. All Rights Reserved.
|
||||
# Released under the LGPL (GNU Lesser General Public License)
|
||||
#
|
||||
# Author: kate.ward@forestent.com (Kate Ward)
|
||||
#
|
||||
# shUnit2 unit test for assert functions
|
||||
|
||||
# load test helpers
|
||||
. ./shunit2_test_helpers
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
# suite tests
|
||||
#
|
||||
|
||||
commonEqualsSame()
|
||||
{
|
||||
fn=$1
|
||||
|
||||
( ${fn} 'x' 'x' >"${stdoutF}" 2>"${stderrF}" )
|
||||
th_assertTrueWithNoOutput 'equal' $? "${stdoutF}" "${stderrF}"
|
||||
|
||||
( ${fn} "${MSG}" 'x' 'x' >"${stdoutF}" 2>"${stderrF}" )
|
||||
th_assertTrueWithNoOutput 'equal; with msg' $? "${stdoutF}" "${stderrF}"
|
||||
|
||||
( ${fn} 'abc def' 'abc def' >"${stdoutF}" 2>"${stderrF}" )
|
||||
th_assertTrueWithNoOutput 'equal with spaces' $? "${stdoutF}" "${stderrF}"
|
||||
|
||||
( ${fn} 'x' 'y' >"${stdoutF}" 2>"${stderrF}" )
|
||||
th_assertFalseWithOutput 'not equal' $? "${stdoutF}" "${stderrF}"
|
||||
|
||||
( ${fn} '' '' >"${stdoutF}" 2>"${stderrF}" )
|
||||
th_assertTrueWithNoOutput 'null values' $? "${stdoutF}" "${stderrF}"
|
||||
|
||||
( ${fn} arg1 >"${stdoutF}" 2>"${stderrF}" )
|
||||
th_assertFalseWithError 'too few arguments' $? "${stdoutF}" "${stderrF}"
|
||||
|
||||
( ${fn} arg1 arg2 arg3 arg4 >"${stdoutF}" 2>"${stderrF}" )
|
||||
th_assertFalseWithError 'too many arguments' $? "${stdoutF}" "${stderrF}"
|
||||
}
|
||||
|
||||
commonNotEqualsSame()
|
||||
{
|
||||
fn=$1
|
||||
|
||||
( ${fn} 'x' 'y' >"${stdoutF}" 2>"${stderrF}" )
|
||||
th_assertTrueWithNoOutput 'not same' $? "${stdoutF}" "${stderrF}"
|
||||
|
||||
( ${fn} "${MSG}" 'x' 'y' >"${stdoutF}" 2>"${stderrF}" )
|
||||
th_assertTrueWithNoOutput 'not same, with msg' $? "${stdoutF}" "${stderrF}"
|
||||
|
||||
( ${fn} 'x' 'x' >"${stdoutF}" 2>"${stderrF}" )
|
||||
th_assertFalseWithOutput 'same' $? "${stdoutF}" "${stderrF}"
|
||||
|
||||
( ${fn} '' '' >"${stdoutF}" 2>"${stderrF}" )
|
||||
th_assertFalseWithOutput 'null values' $? "${stdoutF}" "${stderrF}"
|
||||
|
||||
( ${fn} arg1 >"${stdoutF}" 2>"${stderrF}" )
|
||||
th_assertFalseWithError 'too few arguments' $? "${stdoutF}" "${stderrF}"
|
||||
|
||||
( ${fn} arg1 arg2 arg3 arg4 >"${stdoutF}" 2>"${stderrF}" )
|
||||
th_assertFalseWithError 'too many arguments' $? "${stdoutF}" "${stderrF}"
|
||||
}
|
||||
|
||||
testAssertEquals()
|
||||
{
|
||||
commonEqualsSame 'assertEquals'
|
||||
}
|
||||
|
||||
testAssertNotEquals()
|
||||
{
|
||||
commonNotEqualsSame 'assertNotEquals'
|
||||
}
|
||||
|
||||
testAssertSame()
|
||||
{
|
||||
commonEqualsSame 'assertSame'
|
||||
}
|
||||
|
||||
testAssertNotSame()
|
||||
{
|
||||
commonNotEqualsSame 'assertNotSame'
|
||||
}
|
||||
|
||||
testAssertNull()
|
||||
{
|
||||
( assertNull '' >"${stdoutF}" 2>"${stderrF}" )
|
||||
th_assertTrueWithNoOutput 'null' $? "${stdoutF}" "${stderrF}"
|
||||
|
||||
( assertNull "${MSG}" '' >"${stdoutF}" 2>"${stderrF}" )
|
||||
th_assertTrueWithNoOutput 'null, with msg' $? "${stdoutF}" "${stderrF}"
|
||||
|
||||
( assertNull 'x' >"${stdoutF}" 2>"${stderrF}" )
|
||||
th_assertFalseWithOutput 'not null' $? "${stdoutF}" "${stderrF}"
|
||||
|
||||
( assertNull >"${stdoutF}" 2>"${stderrF}" )
|
||||
th_assertFalseWithError 'too few arguments' $? "${stdoutF}" "${stderrF}"
|
||||
|
||||
( assertNull arg1 arg2 arg3 >"${stdoutF}" 2>"${stderrF}" )
|
||||
th_assertFalseWithError 'too many arguments' $? "${stdoutF}" "${stderrF}"
|
||||
}
|
||||
|
||||
testAssertNotNull()
|
||||
{
|
||||
( assertNotNull 'x' >"${stdoutF}" 2>"${stderrF}" )
|
||||
th_assertTrueWithNoOutput 'not null' $? "${stdoutF}" "${stderrF}"
|
||||
|
||||
( assertNotNull "${MSG}" 'x' >"${stdoutF}" 2>"${stderrF}" )
|
||||
th_assertTrueWithNoOutput 'not null, with msg' $? "${stdoutF}" "${stderrF}"
|
||||
|
||||
( assertNotNull 'x"b' >"${stdoutF}" 2>"${stderrF}" )
|
||||
th_assertTrueWithNoOutput 'not null, with double-quote' $? \
|
||||
"${stdoutF}" "${stderrF}"
|
||||
|
||||
( assertNotNull "x'b" >"${stdoutF}" 2>"${stderrF}" )
|
||||
th_assertTrueWithNoOutput 'not null, with single-quote' $? \
|
||||
"${stdoutF}" "${stderrF}"
|
||||
|
||||
( assertNotNull 'x$b' >"${stdoutF}" 2>"${stderrF}" )
|
||||
th_assertTrueWithNoOutput 'not null, with dollar' $? \
|
||||
"${stdoutF}" "${stderrF}"
|
||||
|
||||
( assertNotNull 'x`b' >"${stdoutF}" 2>"${stderrF}" )
|
||||
th_assertTrueWithNoOutput 'not null, with backtick' $? \
|
||||
"${stdoutF}" "${stderrF}"
|
||||
|
||||
( assertNotNull '' >"${stdoutF}" 2>"${stderrF}" )
|
||||
th_assertFalseWithOutput 'null' $? "${stdoutF}" "${stderrF}"
|
||||
|
||||
# there is no test for too few arguments as $1 might actually be null
|
||||
|
||||
( assertNotNull arg1 arg2 arg3 >"${stdoutF}" 2>"${stderrF}" )
|
||||
th_assertFalseWithError 'too many arguments' $? "${stdoutF}" "${stderrF}"
|
||||
}
|
||||
|
||||
testAssertTrue()
|
||||
{
|
||||
( assertTrue 0 >"${stdoutF}" 2>"${stderrF}" )
|
||||
th_assertTrueWithNoOutput 'true' $? "${stdoutF}" "${stderrF}"
|
||||
|
||||
( assertTrue "${MSG}" 0 >"${stdoutF}" 2>"${stderrF}" )
|
||||
th_assertTrueWithNoOutput 'true, with msg' $? "${stdoutF}" "${stderrF}"
|
||||
|
||||
( assertTrue '[ 0 -eq 0 ]' >"${stdoutF}" 2>"${stderrF}" )
|
||||
th_assertTrueWithNoOutput 'true condition' $? "${stdoutF}" "${stderrF}"
|
||||
|
||||
( assertTrue 1 >"${stdoutF}" 2>"${stderrF}" )
|
||||
th_assertFalseWithOutput 'false' $? "${stdoutF}" "${stderrF}"
|
||||
|
||||
( assertTrue '[ 0 -eq 1 ]' >"${stdoutF}" 2>"${stderrF}" )
|
||||
th_assertFalseWithOutput 'false condition' $? "${stdoutF}" "${stderrF}"
|
||||
|
||||
( assertTrue '' >"${stdoutF}" 2>"${stderrF}" )
|
||||
th_assertFalseWithOutput 'null' $? "${stdoutF}" "${stderrF}"
|
||||
|
||||
( assertTrue >"${stdoutF}" 2>"${stderrF}" )
|
||||
th_assertFalseWithError 'too few arguments' $? "${stdoutF}" "${stderrF}"
|
||||
|
||||
( assertTrue arg1 arg2 arg3 >"${stdoutF}" 2>"${stderrF}" )
|
||||
th_assertFalseWithError 'too many arguments' $? "${stdoutF}" "${stderrF}"
|
||||
}
|
||||
|
||||
testAssertFalse()
|
||||
{
|
||||
( assertFalse 1 >"${stdoutF}" 2>"${stderrF}" )
|
||||
th_assertTrueWithNoOutput 'false' $? "${stdoutF}" "${stderrF}"
|
||||
|
||||
( assertFalse "${MSG}" 1 >"${stdoutF}" 2>"${stderrF}" )
|
||||
th_assertTrueWithNoOutput 'false, with msg' $? "${stdoutF}" "${stderrF}"
|
||||
|
||||
( assertFalse '[ 0 -eq 1 ]' >"${stdoutF}" 2>"${stderrF}" )
|
||||
th_assertTrueWithNoOutput 'false condition' $? "${stdoutF}" "${stderrF}"
|
||||
|
||||
( assertFalse 0 >"${stdoutF}" 2>"${stderrF}" )
|
||||
th_assertFalseWithOutput 'true' $? "${stdoutF}" "${stderrF}"
|
||||
|
||||
( assertFalse '[ 0 -eq 0 ]' >"${stdoutF}" 2>"${stderrF}" )
|
||||
th_assertFalseWithOutput 'true condition' $? "${stdoutF}" "${stderrF}"
|
||||
|
||||
( assertFalse '' >"${stdoutF}" 2>"${stderrF}" )
|
||||
th_assertFalseWithOutput 'true condition' $? "${stdoutF}" "${stderrF}"
|
||||
|
||||
( assertFalse >"${stdoutF}" 2>"${stderrF}" )
|
||||
th_assertFalseWithError 'too few arguments' $? "${stdoutF}" "${stderrF}"
|
||||
|
||||
( assertFalse arg1 arg2 arg3 >"${stdoutF}" 2>"${stderrF}" )
|
||||
th_assertFalseWithError 'too many arguments' $? "${stdoutF}" "${stderrF}"
|
||||
}
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
# suite functions
|
||||
#
|
||||
|
||||
oneTimeSetUp()
|
||||
{
|
||||
th_oneTimeSetUp
|
||||
|
||||
MSG='This is a test message'
|
||||
}
|
||||
|
||||
# load and run shUnit2
|
||||
[ -n "${ZSH_VERSION:-}" ] && SHUNIT_PARENT=$0
|
||||
. ${TH_SHUNIT}
|
|
@ -1,86 +0,0 @@
|
|||
#! /bin/sh
|
||||
# $Id$
|
||||
# vim:et:ft=sh:sts=2:sw=2
|
||||
#
|
||||
# Copyright 2008 Kate Ward. All Rights Reserved.
|
||||
# Released under the LGPL (GNU Lesser General Public License)
|
||||
#
|
||||
# Author: kate.ward@forestent.com (Kate Ward)
|
||||
#
|
||||
# shUnit2 unit test for failure functions
|
||||
|
||||
# load common unit-test functions
|
||||
. ./shunit2_test_helpers
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# suite tests
|
||||
#
|
||||
|
||||
testFail()
|
||||
{
|
||||
( fail >"${stdoutF}" 2>"${stderrF}" )
|
||||
th_assertFalseWithOutput 'fail' $? "${stdoutF}" "${stderrF}"
|
||||
|
||||
( fail "${MSG}" >"${stdoutF}" 2>"${stderrF}" )
|
||||
th_assertFalseWithOutput 'fail with msg' $? "${stdoutF}" "${stderrF}"
|
||||
|
||||
( fail arg1 >"${stdoutF}" 2>"${stderrF}" )
|
||||
th_assertFalseWithOutput 'too many arguments' $? "${stdoutF}" "${stderrF}"
|
||||
}
|
||||
|
||||
testFailNotEquals()
|
||||
{
|
||||
( failNotEquals 'x' 'x' >"${stdoutF}" 2>"${stderrF}" )
|
||||
th_assertFalseWithOutput 'same' $? "${stdoutF}" "${stderrF}"
|
||||
|
||||
( failNotEquals "${MSG}" 'x' 'x' >"${stdoutF}" 2>"${stderrF}" )
|
||||
th_assertFalseWithOutput 'same with msg' $? "${stdoutF}" "${stderrF}"
|
||||
|
||||
( failNotEquals 'x' 'y' >"${stdoutF}" 2>"${stderrF}" )
|
||||
th_assertFalseWithOutput 'not same' $? "${stdoutF}" "${stderrF}"
|
||||
|
||||
( failNotEquals '' '' >"${stdoutF}" 2>"${stderrF}" )
|
||||
th_assertFalseWithOutput 'null values' $? "${stdoutF}" "${stderrF}"
|
||||
|
||||
( failNotEquals >"${stdoutF}" 2>"${stderrF}" )
|
||||
th_assertFalseWithError 'too few arguments' $? "${stdoutF}" "${stderrF}"
|
||||
|
||||
( failNotEquals arg1 arg2 arg3 arg4 >"${stdoutF}" 2>"${stderrF}" )
|
||||
th_assertFalseWithError 'too many arguments' $? "${stdoutF}" "${stderrF}"
|
||||
}
|
||||
|
||||
testFailSame()
|
||||
{
|
||||
( failSame 'x' 'x' >"${stdoutF}" 2>"${stderrF}" )
|
||||
th_assertFalseWithOutput 'same' $? "${stdoutF}" "${stderrF}"
|
||||
|
||||
( failSame "${MSG}" 'x' 'x' >"${stdoutF}" 2>"${stderrF}" )
|
||||
th_assertFalseWithOutput 'same with msg' $? "${stdoutF}" "${stderrF}"
|
||||
|
||||
( failSame 'x' 'y' >"${stdoutF}" 2>"${stderrF}" )
|
||||
th_assertFalseWithOutput 'not same' $? "${stdoutF}" "${stderrF}"
|
||||
|
||||
( failSame '' '' >"${stdoutF}" 2>"${stderrF}" )
|
||||
th_assertFalseWithOutput 'null values' $? "${stdoutF}" "${stderrF}"
|
||||
|
||||
( failSame >"${stdoutF}" 2>"${stderrF}" )
|
||||
th_assertFalseWithError 'too few arguments' $? "${stdoutF}" "${stderrF}"
|
||||
|
||||
( failSame arg1 arg2 arg3 arg4 >"${stdoutF}" 2>"${stderrF}" )
|
||||
th_assertFalseWithError 'too many arguments' $? "${stdoutF}" "${stderrF}"
|
||||
}
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# suite functions
|
||||
#
|
||||
|
||||
oneTimeSetUp()
|
||||
{
|
||||
th_oneTimeSetUp
|
||||
|
||||
MSG='This is a test message'
|
||||
}
|
||||
|
||||
# load and run shUnit2
|
||||
[ -n "${ZSH_VERSION:-}" ] && SHUNIT_PARENT=$0
|
||||
. ${TH_SHUNIT}
|
|
@ -1,27 +1,37 @@
|
|||
# $Id$
|
||||
# vim:et:ft=sh:sts=2:sw=2
|
||||
#
|
||||
# Copyright 2008 Kate Ward. All Rights Reserved.
|
||||
# Released under the LGPL (GNU Lesser General Public License)
|
||||
# shUnit2 unit test common functions
|
||||
#
|
||||
# Copyright 2008-2021 Kate Ward. All Rights Reserved.
|
||||
# Released under the Apache 2.0 license.
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Author: kate.ward@forestent.com (Kate Ward)
|
||||
# https://github.com/kward/shunit2
|
||||
#
|
||||
# shUnit2 unit test common functions
|
||||
### ShellCheck (http://www.shellcheck.net/)
|
||||
# expr may be antiquated, but it is the only solution in some cases.
|
||||
# shellcheck disable=SC2003
|
||||
# $() are not fully portable (POSIX != portable).
|
||||
# shellcheck disable=SC2006
|
||||
|
||||
# treat unset variables as an error when performing parameter expansion
|
||||
# Exit immediately if a simple command exits with a non-zero status.
|
||||
set -e
|
||||
|
||||
# Treat unset variables as an error when performing parameter expansion.
|
||||
set -u
|
||||
|
||||
# set shwordsplit for zsh
|
||||
# Set shwordsplit for zsh.
|
||||
[ -n "${ZSH_VERSION:-}" ] && setopt shwordsplit
|
||||
|
||||
#
|
||||
# constants
|
||||
# Constants.
|
||||
#
|
||||
|
||||
# path to shUnit2 library. can be overridden by setting SHUNIT_INC
|
||||
TH_SHUNIT=${SHUNIT_INC:-./shunit2}
|
||||
# Path to shUnit2 library. Can be overridden by setting SHUNIT_INC.
|
||||
TH_SHUNIT=${SHUNIT_INC:-./shunit2}; export TH_SHUNIT
|
||||
|
||||
# configure debugging. set the DEBUG environment variable to any
|
||||
# Configure debugging. Set the DEBUG environment variable to any
|
||||
# non-empty value to enable debug output, or TRACE to enable trace
|
||||
# output.
|
||||
TRACE=${TRACE:+'th_trace '}
|
||||
|
@ -32,49 +42,50 @@ DEBUG=${DEBUG:+'th_debug '}
|
|||
[ -z "${DEBUG}" ] && DEBUG=':'
|
||||
|
||||
#
|
||||
# variables
|
||||
# Variables.
|
||||
#
|
||||
|
||||
th_RANDOM=0
|
||||
|
||||
#
|
||||
# functions
|
||||
# Functions.
|
||||
#
|
||||
|
||||
# message functions
|
||||
th_trace() { echo "${MY_NAME}:TRACE $@" >&2; }
|
||||
th_debug() { echo "${MY_NAME}:DEBUG $@" >&2; }
|
||||
th_info() { echo "${MY_NAME}:INFO $@" >&2; }
|
||||
th_warn() { echo "${MY_NAME}:WARN $@" >&2; }
|
||||
th_error() { echo "${MY_NAME}:ERROR $@" >&2; }
|
||||
th_fatal() { echo "${MY_NAME}:FATAL $@" >&2; }
|
||||
# Logging functions.
|
||||
th_trace() { echo "test:TRACE $*" >&2; }
|
||||
th_debug() { echo "test:DEBUG $*" >&2; }
|
||||
th_info() { echo "test:INFO $*" >&2; }
|
||||
th_warn() { echo "test:WARN $*" >&2; }
|
||||
th_error() { echo "test:ERROR $*" >&2; }
|
||||
th_fatal() { echo "test:FATAL $*" >&2; }
|
||||
|
||||
# output subtest name
|
||||
th_subtest() { echo " $@" >&2; }
|
||||
# Output subtest name.
|
||||
th_subtest() { echo " $*" >&2; }
|
||||
|
||||
th_oneTimeSetUp()
|
||||
{
|
||||
# these files will be cleaned up automatically by shUnit2
|
||||
th_oneTimeSetUp() {
|
||||
# These files will be cleaned up automatically by shUnit2.
|
||||
stdoutF="${SHUNIT_TMPDIR}/stdout"
|
||||
stderrF="${SHUNIT_TMPDIR}/stderr"
|
||||
returnF="${SHUNIT_TMPDIR}/return"
|
||||
expectedF="${SHUNIT_TMPDIR}/expected"
|
||||
export stdoutF stderrF returnF expectedF
|
||||
}
|
||||
|
||||
# generate a random number
|
||||
th_generateRandom()
|
||||
{
|
||||
# Generate a random number.
|
||||
th_generateRandom() {
|
||||
tfgr_random=${th_RANDOM}
|
||||
|
||||
while [ "${tfgr_random}" = "${th_RANDOM}" ]; do
|
||||
# shellcheck disable=SC2039
|
||||
if [ -n "${RANDOM:-}" ]; then
|
||||
# $RANDOM works
|
||||
# shellcheck disable=SC2039
|
||||
tfgr_random=${RANDOM}${RANDOM}${RANDOM}$$
|
||||
elif [ -r '/dev/urandom' ]; then
|
||||
tfgr_random=`od -vAn -N4 -tu4 </dev/urandom |sed 's/^[^0-9]*//'`
|
||||
else
|
||||
tfgr_date=`date '+%H%M%S'`
|
||||
tfgr_random=`expr ${tfgr_date} \* $$`
|
||||
tfgr_random=`expr "${tfgr_date}" \* $$`
|
||||
unset tfgr_date
|
||||
fi
|
||||
[ "${tfgr_random}" = "${th_RANDOM}" ] && sleep 1
|
||||
|
@ -84,21 +95,20 @@ th_generateRandom()
|
|||
unset tfgr_random
|
||||
}
|
||||
|
||||
# this section returns the data section from the specified section of a file. a
|
||||
# datasection is defined by a [header], one or more lines of data, and then a
|
||||
# This section returns the data section from the specified section of a file. A
|
||||
# data section is defined by a [header], one or more lines of data, and then a
|
||||
# blank line.
|
||||
th_getDataSect()
|
||||
{
|
||||
th_getDataSect() {
|
||||
th_sgrep "\\[$1\\]" "$2" |sed '1d'
|
||||
}
|
||||
|
||||
# this function greps a section from a file. a section is defined as a group of
|
||||
# lines preceeded and followed by blank lines.
|
||||
th_sgrep()
|
||||
{
|
||||
# This function greps a section from a file. a section is defined as a group of
|
||||
# lines preceded and followed by blank lines..
|
||||
th_sgrep() {
|
||||
th_pattern_=$1
|
||||
shift
|
||||
|
||||
# shellcheck disable=SC2068
|
||||
sed -e '/./{H;$!d;}' -e "x;/${th_pattern_}/"'!d;' $@ |sed '1d'
|
||||
|
||||
unset th_pattern_
|
||||
|
@ -113,19 +123,19 @@ th_sgrep()
|
|||
# th_rtrn_: integer: the return value of the subtest performed
|
||||
# th_stdout_: string: filename where stdout was redirected to
|
||||
# th_stderr_: string: filename where stderr was redirected to
|
||||
th_assertTrueWithNoOutput()
|
||||
{
|
||||
th_assertTrueWithNoOutput() {
|
||||
th_test_=$1
|
||||
th_rtrn_=$2
|
||||
th_stdout_=$3
|
||||
th_stderr_=$4
|
||||
|
||||
assertTrue "${th_test_}; expected return value of zero" ${th_rtrn_}
|
||||
[ ${th_rtrn_} -ne ${SHUNIT_TRUE} ] && cat "${th_stderr_}"
|
||||
assertFalse "${th_test_}; expected no output to STDOUT" \
|
||||
"[ -s '${th_stdout_}' ]"
|
||||
assertFalse "${th_test_}; expected no output to STDERR" \
|
||||
"[ -s '${th_stderr_}' ]"
|
||||
assertEquals "${th_test_}: expected return value of true" "${SHUNIT_TRUE}" "${th_rtrn_}"
|
||||
assertFalse "${th_test_}: expected no output to STDOUT" "[ -s '${th_stdout_}' ]"
|
||||
assertFalse "${th_test_}: expected no output to STDERR" "[ -s '${th_stderr_}' ]"
|
||||
# shellcheck disable=SC2166
|
||||
if [ -s "${th_stdout_}" -o -s "${th_stderr_}" ]; then
|
||||
_th_showOutput "${SHUNIT_FALSE}" "${th_stdout_}" "${th_stderr_}"
|
||||
fi
|
||||
|
||||
unset th_test_ th_rtrn_ th_stdout_ th_stderr_
|
||||
}
|
||||
|
@ -145,13 +155,13 @@ th_assertFalseWithOutput()
|
|||
th_stdout_=$3
|
||||
th_stderr_=$4
|
||||
|
||||
assertFalse "${th_test_}; expected non-zero return value" ${th_rtrn_}
|
||||
assertTrue "${th_test_}; expected output to STDOUT" \
|
||||
"[ -s '${th_stdout_}' ]"
|
||||
assertFalse "${th_test_}; expected no output to STDERR" \
|
||||
"[ -s '${th_stderr_}' ]"
|
||||
[ -s "${th_stdout_}" -a ! -s "${th_stderr_}" ] || \
|
||||
_th_showOutput ${SHUNIT_FALSE} "${th_stdout_}" "${th_stderr_}"
|
||||
assertNotEquals "${th_test_}: expected non-true return value" "${SHUNIT_TRUE}" "${th_rtrn_}"
|
||||
assertTrue "${th_test_}: expected output to STDOUT" "[ -s '${th_stdout_}' ]"
|
||||
assertFalse "${th_test_}: expected no output to STDERR" "[ -s '${th_stderr_}' ]"
|
||||
# shellcheck disable=SC2166
|
||||
if ! [ -s "${th_stdout_}" -a ! -s "${th_stderr_}" ]; then
|
||||
_th_showOutput "${SHUNIT_FALSE}" "${th_stdout_}" "${th_stderr_}"
|
||||
fi
|
||||
|
||||
unset th_test_ th_rtrn_ th_stdout_ th_stderr_
|
||||
}
|
||||
|
@ -164,20 +174,19 @@ th_assertFalseWithOutput()
|
|||
# th_rtrn_: integer: the return value of the subtest performed
|
||||
# th_stdout_: string: filename where stdout was redirected to
|
||||
# th_stderr_: string: filename where stderr was redirected to
|
||||
th_assertFalseWithError()
|
||||
{
|
||||
th_assertFalseWithError() {
|
||||
th_test_=$1
|
||||
th_rtrn_=$2
|
||||
th_stdout_=$3
|
||||
th_stderr_=$4
|
||||
|
||||
assertFalse "${th_test_}; expected non-zero return value" ${th_rtrn_}
|
||||
assertFalse "${th_test_}; expected no output to STDOUT" \
|
||||
"[ -s '${th_stdout_}' ]"
|
||||
assertTrue "${th_test_}; expected output to STDERR" \
|
||||
"[ -s '${th_stderr_}' ]"
|
||||
[ ! -s "${th_stdout_}" -a -s "${th_stderr_}" ] || \
|
||||
_th_showOutput ${SHUNIT_FALSE} "${th_stdout_}" "${th_stderr_}"
|
||||
assertFalse "${th_test_}: expected non-zero return value" "${th_rtrn_}"
|
||||
assertFalse "${th_test_}: expected no output to STDOUT" "[ -s '${th_stdout_}' ]"
|
||||
assertTrue "${th_test_}: expected output to STDERR" "[ -s '${th_stderr_}' ]"
|
||||
# shellcheck disable=SC2166
|
||||
if ! [ ! -s "${th_stdout_}" -a -s "${th_stderr_}" ]; then
|
||||
_th_showOutput "${SHUNIT_FALSE}" "${th_stdout_}" "${th_stderr_}"
|
||||
fi
|
||||
|
||||
unset th_test_ th_rtrn_ th_stdout_ th_stderr_
|
||||
}
|
||||
|
@ -186,35 +195,38 @@ th_assertFalseWithError()
|
|||
# when a non-zero return value is encountered. To properly catch these values,
|
||||
# they are either written to disk, or recognized as an error the file is empty.
|
||||
th_clearReturn() { cp /dev/null "${returnF}"; }
|
||||
th_queryReturn()
|
||||
{
|
||||
th_queryReturn() {
|
||||
if [ -s "${returnF}" ]; then
|
||||
th_return=`cat "${returnF}"`
|
||||
else
|
||||
th_return=${SHUNIT_ERROR}
|
||||
fi
|
||||
export th_return
|
||||
}
|
||||
|
||||
# Providing external and internal calls to the showOutput helper function.
|
||||
th_showOutput() { _th_showOutput $@; }
|
||||
_th_showOutput()
|
||||
{
|
||||
_th_return_=$1
|
||||
_th_stdout_=$2
|
||||
_th_stderr_=$3
|
||||
th_showOutput() { _th_showOutput "$@"; }
|
||||
_th_showOutput() {
|
||||
if isSkipping; then
|
||||
return
|
||||
fi
|
||||
|
||||
isSkipping
|
||||
if [ $? -eq ${SHUNIT_FALSE} -a ${_th_return_} != ${SHUNIT_TRUE} ]; then
|
||||
_th_return_="${1:-${returnF}}"
|
||||
_th_stdout_="${2:-${stdoutF}}"
|
||||
_th_stderr_="${3:-${stderrF}}"
|
||||
|
||||
if [ "${_th_return_}" != "${SHUNIT_TRUE}" ]; then
|
||||
# shellcheck disable=SC2166
|
||||
if [ -n "${_th_stdout_}" -a -s "${_th_stdout_}" ]; then
|
||||
echo '>>> STDOUT' >&2
|
||||
cat "${_th_stdout_}" >&2
|
||||
echo '<<< STDOUT' >&2
|
||||
fi
|
||||
# shellcheck disable=SC2166
|
||||
if [ -n "${_th_stderr_}" -a -s "${_th_stderr_}" ]; then
|
||||
echo '>>> STDERR' >&2
|
||||
cat "${_th_stderr_}" >&2
|
||||
fi
|
||||
if [ -n "${_th_stdout_}" -o -n "${_th_stderr_}" ]; then
|
||||
echo '<<< end output' >&2
|
||||
echo '<<< STDERR' >&2
|
||||
fi
|
||||
fi
|
||||
|
||||
|
@ -222,7 +234,7 @@ _th_showOutput()
|
|||
}
|
||||
|
||||
#
|
||||
# main
|
||||
# Main.
|
||||
#
|
||||
|
||||
${TRACE} 'trace output enabled'
|
||||
|
|
|
@ -1,246 +0,0 @@
|
|||
#! /bin/sh
|
||||
# $Id$
|
||||
# vim:et:ft=sh:sts=2:sw=2
|
||||
#
|
||||
# Copyright 2008 Kate Ward. All Rights Reserved.
|
||||
# Released under the LGPL (GNU Lesser General Public License)
|
||||
# Author: kate.ward@forestent.com (Kate Ward)
|
||||
#
|
||||
# shUnit2 unit test for macros.
|
||||
|
||||
# load test helpers
|
||||
. ./shunit2_test_helpers
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
# suite tests
|
||||
#
|
||||
|
||||
testAssertEquals()
|
||||
{
|
||||
# start skipping if LINENO not available
|
||||
[ -z "${LINENO:-}" ] && startSkipping
|
||||
|
||||
( ${_ASSERT_EQUALS_} 'x' 'y' >"${stdoutF}" 2>"${stderrF}" )
|
||||
grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null
|
||||
rtrn=$?
|
||||
assertTrue '_ASSERT_EQUALS_ failure' ${rtrn}
|
||||
[ ${rtrn} -ne ${SHUNIT_TRUE} ] && cat "${stderrF}" >&2
|
||||
|
||||
( ${_ASSERT_EQUALS_} '"some msg"' 'x' 'y' >"${stdoutF}" 2>"${stderrF}" )
|
||||
grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null
|
||||
rtrn=$?
|
||||
assertTrue '_ASSERT_EQUALS_ w/ msg failure' ${rtrn}
|
||||
[ ${rtrn} -ne ${SHUNIT_TRUE} ] && cat "${stderrF}" >&2
|
||||
}
|
||||
|
||||
testAssertNotEquals()
|
||||
{
|
||||
# start skipping if LINENO not available
|
||||
[ -z "${LINENO:-}" ] && startSkipping
|
||||
|
||||
( ${_ASSERT_NOT_EQUALS_} 'x' 'x' >"${stdoutF}" 2>"${stderrF}" )
|
||||
grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null
|
||||
rtrn=$?
|
||||
assertTrue '_ASSERT_NOT_EQUALS_ failure' ${rtrn}
|
||||
[ ${rtrn} -ne ${SHUNIT_TRUE} ] && cat "${stderrF}" >&2
|
||||
|
||||
( ${_ASSERT_NOT_EQUALS_} '"some msg"' 'x' 'x' >"${stdoutF}" 2>"${stderrF}" )
|
||||
grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null
|
||||
rtrn=$?
|
||||
assertTrue '_ASSERT_NOT_EQUALS_ w/ msg failure' ${rtrn}
|
||||
[ ${rtrn} -ne ${SHUNIT_TRUE} ] && cat "${stderrF}" >&2
|
||||
}
|
||||
|
||||
testSame()
|
||||
{
|
||||
# start skipping if LINENO not available
|
||||
[ -z "${LINENO:-}" ] && startSkipping
|
||||
|
||||
( ${_ASSERT_SAME_} 'x' 'y' >"${stdoutF}" 2>"${stderrF}" )
|
||||
grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null
|
||||
rtrn=$?
|
||||
assertTrue '_ASSERT_SAME_ failure' ${rtrn}
|
||||
[ ${rtrn} -ne ${SHUNIT_TRUE} ] && cat "${stderrF}" >&2
|
||||
|
||||
( ${_ASSERT_SAME_} '"some msg"' 'x' 'y' >"${stdoutF}" 2>"${stderrF}" )
|
||||
grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null
|
||||
rtrn=$?
|
||||
assertTrue '_ASSERT_SAME_ w/ msg failure' ${rtrn}
|
||||
[ ${rtrn} -ne ${SHUNIT_TRUE} ] && cat "${stderrF}" >&2
|
||||
}
|
||||
|
||||
testNotSame()
|
||||
{
|
||||
# start skipping if LINENO not available
|
||||
[ -z "${LINENO:-}" ] && startSkipping
|
||||
|
||||
( ${_ASSERT_NOT_SAME_} 'x' 'x' >"${stdoutF}" 2>"${stderrF}" )
|
||||
grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null
|
||||
rtrn=$?
|
||||
assertTrue '_ASSERT_NOT_SAME_ failure' ${rtrn}
|
||||
[ ${rtrn} -ne ${SHUNIT_TRUE} ] && cat "${stderrF}" >&2
|
||||
|
||||
( ${_ASSERT_NOT_SAME_} '"some msg"' 'x' 'x' >"${stdoutF}" 2>"${stderrF}" )
|
||||
grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null
|
||||
rtrn=$?
|
||||
assertTrue '_ASSERT_NOT_SAME_ w/ msg failure' ${rtrn}
|
||||
[ ${rtrn} -ne ${SHUNIT_TRUE} ] && cat "${stderrF}" >&2
|
||||
}
|
||||
|
||||
testNull()
|
||||
{
|
||||
# start skipping if LINENO not available
|
||||
[ -z "${LINENO:-}" ] && startSkipping
|
||||
|
||||
( ${_ASSERT_NULL_} 'x' >"${stdoutF}" 2>"${stderrF}" )
|
||||
grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null
|
||||
rtrn=$?
|
||||
assertTrue '_ASSERT_NULL_ failure' ${rtrn}
|
||||
[ ${rtrn} -ne ${SHUNIT_TRUE} ] && cat "${stderrF}" >&2
|
||||
|
||||
( ${_ASSERT_NULL_} '"some msg"' 'x' >"${stdoutF}" 2>"${stderrF}" )
|
||||
grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null
|
||||
rtrn=$?
|
||||
assertTrue '_ASSERT_NULL_ w/ msg failure' ${rtrn}
|
||||
[ ${rtrn} -ne ${SHUNIT_TRUE} ] && cat "${stderrF}" >&2
|
||||
}
|
||||
|
||||
testNotNull()
|
||||
{
|
||||
# start skipping if LINENO not available
|
||||
[ -z "${LINENO:-}" ] && startSkipping
|
||||
|
||||
( ${_ASSERT_NOT_NULL_} '' >"${stdoutF}" 2>"${stderrF}" )
|
||||
grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null
|
||||
rtrn=$?
|
||||
assertTrue '_ASSERT_NOT_NULL_ failure' ${rtrn}
|
||||
[ ${rtrn} -ne ${SHUNIT_TRUE} ] && cat "${stderrF}" >&2
|
||||
|
||||
( ${_ASSERT_NOT_NULL_} '"some msg"' '""' >"${stdoutF}" 2>"${stderrF}" )
|
||||
grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null
|
||||
rtrn=$?
|
||||
assertTrue '_ASSERT_NOT_NULL_ w/ msg failure' ${rtrn}
|
||||
[ ${rtrn} -ne ${SHUNIT_TRUE} ] && cat "${stdoutF}" "${stderrF}" >&2
|
||||
}
|
||||
|
||||
testAssertTrue()
|
||||
{
|
||||
# start skipping if LINENO not available
|
||||
[ -z "${LINENO:-}" ] && startSkipping
|
||||
|
||||
( ${_ASSERT_TRUE_} ${SHUNIT_FALSE} >"${stdoutF}" 2>"${stderrF}" )
|
||||
grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null
|
||||
rtrn=$?
|
||||
assertTrue '_ASSERT_TRUE_ failure' ${rtrn}
|
||||
[ ${rtrn} -ne ${SHUNIT_TRUE} ] && cat "${stderrF}" >&2
|
||||
|
||||
|
||||
( ${_ASSERT_TRUE_} '"some msg"' ${SHUNIT_FALSE} >"${stdoutF}" 2>"${stderrF}" )
|
||||
grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null
|
||||
rtrn=$?
|
||||
assertTrue '_ASSERT_TRUE_ w/ msg failure' ${rtrn}
|
||||
[ ${rtrn} -ne ${SHUNIT_TRUE} ] && cat "${stderrF}" >&2
|
||||
}
|
||||
|
||||
testAssertFalse()
|
||||
{
|
||||
# start skipping if LINENO not available
|
||||
[ -z "${LINENO:-}" ] && startSkipping
|
||||
|
||||
( ${_ASSERT_FALSE_} ${SHUNIT_TRUE} >"${stdoutF}" 2>"${stderrF}" )
|
||||
grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null
|
||||
rtrn=$?
|
||||
assertTrue '_ASSERT_FALSE_ failure' ${rtrn}
|
||||
[ ${rtrn} -ne ${SHUNIT_TRUE} ] && cat "${stderrF}" >&2
|
||||
|
||||
( ${_ASSERT_FALSE_} '"some msg"' ${SHUNIT_TRUE} >"${stdoutF}" 2>"${stderrF}" )
|
||||
grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null
|
||||
rtrn=$?
|
||||
assertTrue '_ASSERT_FALSE_ w/ msg failure' ${rtrn}
|
||||
[ ${rtrn} -ne ${SHUNIT_TRUE} ] && cat "${stderrF}" >&2
|
||||
}
|
||||
|
||||
testFail()
|
||||
{
|
||||
# start skipping if LINENO not available
|
||||
[ -z "${LINENO:-}" ] && startSkipping
|
||||
|
||||
( ${_FAIL_} >"${stdoutF}" 2>"${stderrF}" )
|
||||
grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null
|
||||
rtrn=$?
|
||||
assertTrue '_FAIL_ failure' ${rtrn}
|
||||
[ ${rtrn} -ne ${SHUNIT_TRUE} ] && cat "${stderrF}" >&2
|
||||
|
||||
( ${_FAIL_} '"some msg"' >"${stdoutF}" 2>"${stderrF}" )
|
||||
grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null
|
||||
rtrn=$?
|
||||
assertTrue '_FAIL_ w/ msg failure' ${rtrn}
|
||||
[ ${rtrn} -ne ${SHUNIT_TRUE} ] && cat "${stderrF}" >&2
|
||||
}
|
||||
|
||||
testFailNotEquals()
|
||||
{
|
||||
# start skipping if LINENO not available
|
||||
[ -z "${LINENO:-}" ] && startSkipping
|
||||
|
||||
( ${_FAIL_NOT_EQUALS_} 'x' 'y' >"${stdoutF}" 2>"${stderrF}" )
|
||||
grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null
|
||||
rtrn=$?
|
||||
assertTrue '_FAIL_NOT_EQUALS_ failure' ${rtrn}
|
||||
[ ${rtrn} -ne ${SHUNIT_TRUE} ] && cat "${stderrF}" >&2
|
||||
|
||||
( ${_FAIL_NOT_EQUALS_} '"some msg"' 'x' 'y' >"${stdoutF}" 2>"${stderrF}" )
|
||||
grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null
|
||||
rtrn=$?
|
||||
assertTrue '_FAIL_NOT_EQUALS_ w/ msg failure' ${rtrn}
|
||||
[ ${rtrn} -ne ${SHUNIT_TRUE} ] && cat "${stderrF}" >&2
|
||||
}
|
||||
|
||||
testFailSame()
|
||||
{
|
||||
# start skipping if LINENO not available
|
||||
[ -z "${LINENO:-}" ] && startSkipping
|
||||
|
||||
( ${_FAIL_SAME_} 'x' 'x' >"${stdoutF}" 2>"${stderrF}" )
|
||||
grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null
|
||||
rtrn=$?
|
||||
assertTrue '_FAIL_SAME_ failure' ${rtrn}
|
||||
[ ${rtrn} -ne ${SHUNIT_TRUE} ] && cat "${stderrF}" >&2
|
||||
|
||||
( ${_FAIL_SAME_} '"some msg"' 'x' 'x' >"${stdoutF}" 2>"${stderrF}" )
|
||||
grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null
|
||||
rtrn=$?
|
||||
assertTrue '_FAIL_SAME_ w/ msg failure' ${rtrn}
|
||||
[ ${rtrn} -ne ${SHUNIT_TRUE} ] && cat "${stderrF}" >&2
|
||||
}
|
||||
|
||||
testFailNotSame()
|
||||
{
|
||||
# start skipping if LINENO not available
|
||||
[ -z "${LINENO:-}" ] && startSkipping
|
||||
|
||||
( ${_FAIL_NOT_SAME_} 'x' 'y' >"${stdoutF}" 2>"${stderrF}" )
|
||||
grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null
|
||||
rtrn=$?
|
||||
assertTrue '_FAIL_NOT_SAME_ failure' ${rtrn}
|
||||
[ ${rtrn} -ne ${SHUNIT_TRUE} ] && cat "${stderrF}" >&2
|
||||
|
||||
( ${_FAIL_NOT_SAME_} '"some msg"' 'x' 'y' >"${stdoutF}" 2>"${stderrF}" )
|
||||
grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null
|
||||
rtrn=$?
|
||||
assertTrue '_FAIL_NOT_SAME_ w/ msg failure' ${rtrn}
|
||||
[ ${rtrn} -ne ${SHUNIT_TRUE} ] && cat "${stderrF}" >&2
|
||||
}
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
# suite functions
|
||||
#
|
||||
|
||||
oneTimeSetUp()
|
||||
{
|
||||
th_oneTimeSetUp
|
||||
}
|
||||
|
||||
# load and run shUnit2
|
||||
[ -n "${ZSH_VERSION:-}" ] && SHUNIT_PARENT=$0
|
||||
. ${TH_SHUNIT}
|
|
@ -1,160 +0,0 @@
|
|||
#! /bin/sh
|
||||
# $Id$
|
||||
# vim:et:ft=sh:sts=2:sw=2
|
||||
#
|
||||
# Copyright 2008 Kate Ward. All Rights Reserved.
|
||||
# Released under the LGPL (GNU Lesser General Public License)
|
||||
#
|
||||
# Author: kate.ward@forestent.com (Kate Ward)
|
||||
#
|
||||
# shUnit2 unit tests of miscellaneous things
|
||||
|
||||
# load test helpers
|
||||
. ./shunit2_test_helpers
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
# suite tests
|
||||
#
|
||||
|
||||
# Note: the test script is prefixed with '#' chars so that shUnit2 does not
|
||||
# incorrectly interpret the embedded functions as real functions.
|
||||
testUnboundVariable()
|
||||
{
|
||||
unittestF="${SHUNIT_TMPDIR}/unittest"
|
||||
sed 's/^#//' >"${unittestF}" <<EOF
|
||||
## treat unset variables as an error when performing parameter expansion
|
||||
#set -u
|
||||
#
|
||||
#boom() { x=\$1; } # this function goes boom if no parameters are passed!
|
||||
#test_boom()
|
||||
#{
|
||||
# assertEquals 1 1
|
||||
# boom # No parameter given
|
||||
# assertEquals 0 \$?
|
||||
#}
|
||||
#. ${TH_SHUNIT}
|
||||
EOF
|
||||
( exec ${SHUNIT_SHELL:-sh} "${unittestF}" >"${stdoutF}" 2>"${stderrF}" )
|
||||
assertFalse 'expected a non-zero exit value' $?
|
||||
grep '^ASSERT:Unknown failure' "${stdoutF}" >/dev/null
|
||||
assertTrue 'assert message was not generated' $?
|
||||
grep '^Ran [0-9]* test' "${stdoutF}" >/dev/null
|
||||
assertTrue 'test count message was not generated' $?
|
||||
grep '^FAILED' "${stdoutF}" >/dev/null
|
||||
assertTrue 'failure message was not generated' $?
|
||||
}
|
||||
|
||||
testIssue7()
|
||||
{
|
||||
( assertEquals 'Some message.' 1 2 >"${stdoutF}" 2>"${stderrF}" )
|
||||
diff "${stdoutF}" - >/dev/null <<EOF
|
||||
ASSERT:Some message. expected:<1> but was:<2>
|
||||
EOF
|
||||
rtrn=$?
|
||||
assertEquals ${SHUNIT_TRUE} ${rtrn}
|
||||
[ ${rtrn} -ne ${SHUNIT_TRUE} ] && cat "${stderrF}" >&2
|
||||
}
|
||||
|
||||
testPrepForSourcing()
|
||||
{
|
||||
assertEquals '/abc' `_shunit_prepForSourcing '/abc'`
|
||||
assertEquals './abc' `_shunit_prepForSourcing './abc'`
|
||||
assertEquals './abc' `_shunit_prepForSourcing 'abc'`
|
||||
}
|
||||
|
||||
testEscapeCharInStr()
|
||||
{
|
||||
actual=`_shunit_escapeCharInStr '\' ''`
|
||||
assertEquals '' "${actual}"
|
||||
assertEquals 'abc\\' `_shunit_escapeCharInStr '\' 'abc\'`
|
||||
assertEquals 'abc\\def' `_shunit_escapeCharInStr '\' 'abc\def'`
|
||||
assertEquals '\\def' `_shunit_escapeCharInStr '\' '\def'`
|
||||
|
||||
actual=`_shunit_escapeCharInStr '"' ''`
|
||||
assertEquals '' "${actual}"
|
||||
assertEquals 'abc\"' `_shunit_escapeCharInStr '"' 'abc"'`
|
||||
assertEquals 'abc\"def' `_shunit_escapeCharInStr '"' 'abc"def'`
|
||||
assertEquals '\"def' `_shunit_escapeCharInStr '"' '"def'`
|
||||
|
||||
actual=`_shunit_escapeCharInStr '$' ''`
|
||||
assertEquals '' "${actual}"
|
||||
assertEquals 'abc\$' `_shunit_escapeCharInStr '$' 'abc$'`
|
||||
assertEquals 'abc\$def' `_shunit_escapeCharInStr '$' 'abc$def'`
|
||||
assertEquals '\$def' `_shunit_escapeCharInStr '$' '$def'`
|
||||
|
||||
# actual=`_shunit_escapeCharInStr "'" ''`
|
||||
# assertEquals '' "${actual}"
|
||||
# assertEquals "abc\\'" `_shunit_escapeCharInStr "'" "abc'"`
|
||||
# assertEquals "abc\\'def" `_shunit_escapeCharInStr "'" "abc'def"`
|
||||
# assertEquals "\\'def" `_shunit_escapeCharInStr "'" "'def"`
|
||||
|
||||
# # must put the backtick in a variable so the shell doesn't misinterpret it
|
||||
# # while inside a backticked sequence (e.g. `echo '`'` would fail).
|
||||
# backtick='`'
|
||||
# actual=`_shunit_escapeCharInStr ${backtick} ''`
|
||||
# assertEquals '' "${actual}"
|
||||
# assertEquals '\`abc' \
|
||||
# `_shunit_escapeCharInStr "${backtick}" ${backtick}'abc'`
|
||||
# assertEquals 'abc\`' \
|
||||
# `_shunit_escapeCharInStr "${backtick}" 'abc'${backtick}`
|
||||
# assertEquals 'abc\`def' \
|
||||
# `_shunit_escapeCharInStr "${backtick}" 'abc'${backtick}'def'`
|
||||
}
|
||||
|
||||
testEscapeCharInStr_specialChars()
|
||||
{
|
||||
# make sure our forward slash doesn't upset sed
|
||||
assertEquals '/' `_shunit_escapeCharInStr '\' '/'`
|
||||
|
||||
# some shells escape these differently
|
||||
#assertEquals '\\a' `_shunit_escapeCharInStr '\' '\a'`
|
||||
#assertEquals '\\b' `_shunit_escapeCharInStr '\' '\b'`
|
||||
}
|
||||
|
||||
# Test the various ways of declaring functions.
|
||||
#
|
||||
# Prefixing (then stripping) with comment symbol so these functions aren't
|
||||
# treated as real functions by shUnit2.
|
||||
testExtractTestFunctions()
|
||||
{
|
||||
f="${SHUNIT_TMPDIR}/extract_test_functions"
|
||||
sed 's/^#//' <<EOF >"${f}"
|
||||
#testABC() { echo 'ABC'; }
|
||||
#test_def() {
|
||||
# echo 'def'
|
||||
#}
|
||||
#testG3 ()
|
||||
#{
|
||||
# echo 'G3'
|
||||
#}
|
||||
#function test4() { echo '4'; }
|
||||
# test5() { echo '5'; }
|
||||
#some_test_function() { echo 'some func'; }
|
||||
#func_with_test_vars() {
|
||||
# testVariable=1234
|
||||
#}
|
||||
EOF
|
||||
|
||||
actual=`_shunit_extractTestFunctions "${f}"`
|
||||
assertEquals 'testABC test_def testG3 test4 test5' "${actual}"
|
||||
}
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
# suite functions
|
||||
#
|
||||
|
||||
setUp()
|
||||
{
|
||||
for f in ${expectedF} ${stdoutF} ${stderrF}; do
|
||||
cp /dev/null ${f}
|
||||
done
|
||||
}
|
||||
|
||||
oneTimeSetUp()
|
||||
{
|
||||
th_oneTimeSetUp
|
||||
}
|
||||
|
||||
# load and run shUnit2
|
||||
[ -n "${ZSH_VERSION:-}" ] && SHUNIT_PARENT=$0
|
||||
. ${TH_SHUNIT}
|
|
@ -1,41 +0,0 @@
|
|||
#! /bin/sh
|
||||
# $Id$
|
||||
# vim:et:ft=sh:sts=2:sw=2
|
||||
#
|
||||
# Copyright 2010 Kate Ward. All Rights Reserved.
|
||||
# Released under the LGPL (GNU Lesser General Public License)
|
||||
# Author: kate.ward@forestent.com (Kate Ward)
|
||||
#
|
||||
# shUnit2 unit test for standalone operation.
|
||||
#
|
||||
# This unit test is purely to test that calling shunit2 directly, while passing
|
||||
# the name of a unit test script, works. When run, this script determines if it
|
||||
# is running as a standalone program, and calls main() if it is.
|
||||
|
||||
ARGV0=`basename "$0"`
|
||||
|
||||
# load test helpers
|
||||
. ./shunit2_test_helpers
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
# suite tests
|
||||
#
|
||||
|
||||
testStandalone()
|
||||
{
|
||||
assertTrue ${SHUNIT_TRUE}
|
||||
}
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
# main
|
||||
#
|
||||
|
||||
main()
|
||||
{
|
||||
${TH_SHUNIT} "${ARGV0}"
|
||||
}
|
||||
|
||||
# are we running as a standalone?
|
||||
if [ "${ARGV0}" = 'shunit2_test_standalone.sh' ]; then
|
||||
if [ $# -gt 0 ]; then main "$@"; else main; fi
|
||||
fi
|
|
@ -0,0 +1,191 @@
|
|||
#! /bin/sh
|
||||
# vim:et:ft=sh:sts=2:sw=2
|
||||
#
|
||||
# Unit test suite runner.
|
||||
#
|
||||
# Copyright 2008-2020 Kate Ward. All Rights Reserved.
|
||||
# Released under the Apache 2.0 license.
|
||||
#
|
||||
# Author: kate.ward@forestent.com (Kate Ward)
|
||||
# https://github.com/kward/shlib
|
||||
#
|
||||
# This script runs all the unit tests that can be found, and generates a nice
|
||||
# report of the tests.
|
||||
#
|
||||
### Sample usage:
|
||||
#
|
||||
# Run all tests for all shells.
|
||||
# $ ./test_runner
|
||||
#
|
||||
# Run all tests for single shell.
|
||||
# $ ./test_runner -s /bin/bash
|
||||
#
|
||||
# Run single test for all shells.
|
||||
# $ ./test_runner -t shunit_asserts_test.sh
|
||||
#
|
||||
# Run single test for single shell.
|
||||
# $ ./test_runner -s /bin/bash -t shunit_asserts_test.sh
|
||||
#
|
||||
### ShellCheck (http://www.shellcheck.net/)
|
||||
# Disable source following.
|
||||
# shellcheck disable=SC1090,SC1091
|
||||
# expr may be antiquated, but it is the only solution in some cases.
|
||||
# shellcheck disable=SC2003
|
||||
# $() are not fully portable (POSIX != portable).
|
||||
# shellcheck disable=SC2006
|
||||
|
||||
# Return if test_runner already loaded.
|
||||
[ -z "${RUNNER_LOADED:-}" ] || return 0
|
||||
RUNNER_LOADED=0
|
||||
|
||||
RUNNER_ARGV0=`basename "$0"`
|
||||
RUNNER_SHELLS='/bin/sh ash /bin/bash /bin/dash /bin/ksh /bin/mksh /bin/zsh'
|
||||
RUNNER_TEST_SUFFIX='_test.sh'
|
||||
true; RUNNER_TRUE=$?
|
||||
false; RUNNER_FALSE=$?
|
||||
|
||||
runner_warn() { echo "runner:WARN $*" >&2; }
|
||||
runner_error() { echo "runner:ERROR $*" >&2; }
|
||||
runner_fatal() { echo "runner:FATAL $*" >&2; exit 1; }
|
||||
|
||||
runner_usage() {
|
||||
echo "usage: ${RUNNER_ARGV0} [-e key=val ...] [-s shell(s)] [-t test(s)]"
|
||||
}
|
||||
|
||||
_runner_tests() { echo ./*${RUNNER_TEST_SUFFIX} |sed 's#\./##g'; }
|
||||
_runner_testName() {
|
||||
# shellcheck disable=SC1117
|
||||
_runner_testName_=`expr "${1:-}" : "\(.*\)${RUNNER_TEST_SUFFIX}"`
|
||||
if [ -n "${_runner_testName_}" ]; then
|
||||
echo "${_runner_testName_}"
|
||||
else
|
||||
echo 'unknown'
|
||||
fi
|
||||
unset _runner_testName_
|
||||
}
|
||||
|
||||
main() {
|
||||
# Find and load versions library.
|
||||
for _runner_dir_ in . ${LIB_DIR:-lib}; do
|
||||
if [ -r "${_runner_dir_}/versions" ]; then
|
||||
_runner_lib_dir_="${_runner_dir_}"
|
||||
break
|
||||
fi
|
||||
done
|
||||
[ -n "${_runner_lib_dir_}" ] || runner_fatal 'Unable to find versions library.'
|
||||
. "${_runner_lib_dir_}/versions" || runner_fatal 'Unable to load versions library.'
|
||||
unset _runner_dir_ _runner_lib_dir_
|
||||
|
||||
# Process command line flags.
|
||||
env=''
|
||||
while getopts 'e:hs:t:' opt; do
|
||||
case ${opt} in
|
||||
e) # set an environment variable
|
||||
key=`expr "${OPTARG}" : '\([^=]*\)='`
|
||||
val=`expr "${OPTARG}" : '[^=]*=\(.*\)'`
|
||||
# shellcheck disable=SC2166
|
||||
if [ -z "${key}" -o -z "${val}" ]; then
|
||||
runner_usage
|
||||
exit 1
|
||||
fi
|
||||
eval "${key}='${val}'"
|
||||
eval "export ${key}"
|
||||
env="${env:+${env} }${key}"
|
||||
;;
|
||||
h) runner_usage; exit 0 ;; # help output
|
||||
s) shells=${OPTARG} ;; # list of shells to run
|
||||
t) tests=${OPTARG} ;; # list of tests to run
|
||||
*) runner_usage; exit 1 ;;
|
||||
esac
|
||||
done
|
||||
shift "`expr ${OPTIND} - 1`"
|
||||
|
||||
# Fill shells and/or tests.
|
||||
shells=${shells:-${RUNNER_SHELLS}}
|
||||
[ -z "${tests}" ] && tests=`_runner_tests`
|
||||
|
||||
# Error checking.
|
||||
if [ -z "${tests}" ]; then
|
||||
runner_error 'no tests found to run; exiting'
|
||||
exit 1
|
||||
fi
|
||||
|
||||
cat <<EOF
|
||||
#------------------------------------------------------------------------------
|
||||
# System data.
|
||||
#
|
||||
|
||||
$ uname -mprsv
|
||||
`uname -mprsv`
|
||||
|
||||
OS Name: `versions_osName`
|
||||
OS Version: `versions_osVersion`
|
||||
|
||||
### Test run info.
|
||||
shells: ${shells}
|
||||
tests: ${tests}
|
||||
EOF
|
||||
for key in ${env}; do
|
||||
eval "echo \"${key}=\$${key}\""
|
||||
done
|
||||
|
||||
# Run tests.
|
||||
runner_passing_=${RUNNER_TRUE}
|
||||
for shell in ${shells}; do
|
||||
echo
|
||||
|
||||
cat <<EOF
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
# Running the test suite with ${shell}.
|
||||
#
|
||||
EOF
|
||||
|
||||
# Check for existence of shell.
|
||||
shell_bin=${shell}
|
||||
shell_name=''
|
||||
shell_present=${RUNNER_FALSE}
|
||||
case ${shell} in
|
||||
ash)
|
||||
shell_bin=`command -v busybox`
|
||||
[ $? -eq "${RUNNER_TRUE}" ] && shell_present="${RUNNER_TRUE}"
|
||||
shell_bin="${shell_bin:+${shell_bin} }ash"
|
||||
shell_name=${shell}
|
||||
;;
|
||||
*)
|
||||
[ -x "${shell_bin}" ] && shell_present="${RUNNER_TRUE}"
|
||||
shell_name=`basename "${shell}"`
|
||||
;;
|
||||
esac
|
||||
if [ "${shell_present}" -eq "${RUNNER_FALSE}" ]; then
|
||||
runner_warn "unable to run tests with the ${shell_name} shell"
|
||||
continue
|
||||
fi
|
||||
|
||||
shell_version=`versions_shellVersion "${shell}"`
|
||||
|
||||
echo "shell name: ${shell_name}"
|
||||
echo "shell version: ${shell_version}"
|
||||
|
||||
# Execute the tests.
|
||||
for t in ${tests}; do
|
||||
echo
|
||||
echo "--- Executing the '`_runner_testName "${t}"`' test suite. ---"
|
||||
# ${shell_bin} needs word splitting.
|
||||
# shellcheck disable=SC2086
|
||||
( exec ${shell_bin} "./${t}" 2>&1; )
|
||||
shell_passing=$?
|
||||
if [ "${shell_passing}" -ne "${RUNNER_TRUE}" ]; then
|
||||
runner_warn "${shell_bin} not passing"
|
||||
fi
|
||||
test "${runner_passing_}" -eq ${RUNNER_TRUE} -a ${shell_passing} -eq ${RUNNER_TRUE}
|
||||
runner_passing_=$?
|
||||
done
|
||||
done
|
||||
return ${runner_passing_}
|
||||
}
|
||||
|
||||
# Execute main() if this is run in standalone mode (i.e. not from a unit test).
|
||||
if [ -z "${SHUNIT_VERSION}" ]; then
|
||||
main "$@"
|
||||
fi
|
871
install.sh
871
install.sh
File diff suppressed because it is too large
Load Diff
229
osync-batch.sh
229
osync-batch.sh
|
@ -1,9 +1,9 @@
|
|||
#!/usr/bin/env bash
|
||||
SUBPROGRAM=osync
|
||||
PROGRAM="$SUBPROGRAM-batch" # Batch program to run osync / obackup instances sequentially and rerun failed ones
|
||||
AUTHOR="(L) 2013-2017 by Orsiris de Jong"
|
||||
AUTHOR="(L) 2013-2020 by Orsiris de Jong"
|
||||
CONTACT="http://www.netpower.fr - ozy@netpower.fr"
|
||||
PROGRAM_BUILD=2016120401
|
||||
PROGRAM_BUILD=2020031502
|
||||
|
||||
## Runs an osync /obackup instance for every conf file found
|
||||
## If an instance fails, run it again if time permits
|
||||
|
@ -26,37 +26,218 @@ else
|
|||
LOG_FILE=./$SUBPROGRAM-batch.log
|
||||
fi
|
||||
|
||||
## Default directory where to store temporary run files
|
||||
if [ -w /tmp ]; then
|
||||
RUN_DIR=/tmp
|
||||
elif [ -w /var/tmp ]; then
|
||||
RUN_DIR=/var/tmp
|
||||
else
|
||||
RUN_DIR=.
|
||||
fi
|
||||
# No need to edit under this line ##############################################################
|
||||
|
||||
function _logger {
|
||||
local value="${1}" # What to log
|
||||
echo -e "$value" >> "$LOG_FILE"
|
||||
#### RemoteLogger SUBSET ####
|
||||
|
||||
# Array to string converter, see http://stackoverflow.com/questions/1527049/bash-join-elements-of-an-array
|
||||
# usage: joinString separaratorChar Array
|
||||
function joinString {
|
||||
local IFS="$1"; shift; echo "$*";
|
||||
}
|
||||
|
||||
function Logger {
|
||||
local value="${1}" # What to log
|
||||
local level="${2}" # Log level: DEBUG, NOTICE, WARN, ERROR, CRITIAL
|
||||
# Sub function of Logger
|
||||
function _Logger {
|
||||
local logValue="${1}" # Log to file
|
||||
local stdValue="${2}" # Log to screeen
|
||||
local toStdErr="${3:-false}" # Log to stderr instead of stdout
|
||||
|
||||
prefix="$(date) - "
|
||||
if [ "$logValue" != "" ]; then
|
||||
echo -e "$logValue" >> "$LOG_FILE"
|
||||
|
||||
if [ "$level" == "CRITICAL" ]; then
|
||||
_logger "$prefix\e[41m$value\e[0m"
|
||||
elif [ "$level" == "ERROR" ]; then
|
||||
_logger "$prefix\e[91m$value\e[0m"
|
||||
elif [ "$level" == "WARN" ]; then
|
||||
_logger "$prefix\e[93m$value\e[0m"
|
||||
elif [ "$level" == "NOTICE" ]; then
|
||||
_logger "$prefix$value"
|
||||
elif [ "$level" == "DEBUG" ]; then
|
||||
if [ "$DEBUG" == "yes" ]; then
|
||||
_logger "$prefix$value"
|
||||
# Build current log file for alerts if we have a sufficient environment
|
||||
if [ "$_LOGGER_WRITE_PARTIAL_LOGS" == true ] && [ "$RUN_DIR/$PROGRAM" != "/" ]; then
|
||||
echo -e "$logValue" >> "$RUN_DIR/$PROGRAM._Logger.$SCRIPT_PID.$TSTAMP"
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ "$stdValue" != "" ] && [ "$_LOGGER_SILENT" != true ]; then
|
||||
if [ $toStdErr == true ]; then
|
||||
# Force stderr color in subshell
|
||||
(>&2 echo -e "$stdValue")
|
||||
|
||||
else
|
||||
echo -e "$stdValue"
|
||||
fi
|
||||
else
|
||||
_logger "\e[41mLogger function called without proper loglevel.\e[0m"
|
||||
_logger "$prefix$value"
|
||||
fi
|
||||
}
|
||||
|
||||
# Remote logger similar to below Logger, without log to file and alert flags
|
||||
function RemoteLogger {
|
||||
local value="${1}" # Sentence to log (in double quotes)
|
||||
local level="${2}" # Log level
|
||||
local retval="${3:-undef}" # optional return value of command
|
||||
|
||||
local prefix
|
||||
|
||||
if [ "$_LOGGER_PREFIX" == "time" ]; then
|
||||
prefix="RTIME: $SECONDS - "
|
||||
elif [ "$_LOGGER_PREFIX" == "date" ]; then
|
||||
prefix="R $(date) - "
|
||||
else
|
||||
prefix=""
|
||||
fi
|
||||
|
||||
if [ "$level" == "CRITICAL" ]; then
|
||||
_Logger "" "$prefix\e[1;33;41m$value\e[0m" true
|
||||
if [ "$_DEBUG" == true ]; then
|
||||
_Logger -e "" "[$retval] in [$(joinString , ${FUNCNAME[@]})] SP=$SCRIPT_PID P=$$" true
|
||||
fi
|
||||
return
|
||||
elif [ "$level" == "ERROR" ]; then
|
||||
_Logger "" "$prefix\e[31m$value\e[0m" true
|
||||
if [ "$_DEBUG" == true ]; then
|
||||
_Logger -e "" "[$retval] in [$(joinString , ${FUNCNAME[@]})] SP=$SCRIPT_PID P=$$" true
|
||||
fi
|
||||
return
|
||||
elif [ "$level" == "WARN" ]; then
|
||||
_Logger "" "$prefix\e[33m$value\e[0m" true
|
||||
if [ "$_DEBUG" == true ]; then
|
||||
_Logger -e "" "[$retval] in [$(joinString , ${FUNCNAME[@]})] SP=$SCRIPT_PID P=$$" true
|
||||
fi
|
||||
return
|
||||
elif [ "$level" == "NOTICE" ]; then
|
||||
if [ "$_LOGGER_ERR_ONLY" != true ]; then
|
||||
_Logger "" "$prefix$value"
|
||||
fi
|
||||
return
|
||||
elif [ "$level" == "VERBOSE" ]; then
|
||||
if [ "$_LOGGER_VERBOSE" == true ]; then
|
||||
_Logger "" "$prefix$value"
|
||||
fi
|
||||
return
|
||||
elif [ "$level" == "ALWAYS" ]; then
|
||||
_Logger "" "$prefix$value"
|
||||
return
|
||||
elif [ "$level" == "DEBUG" ]; then
|
||||
if [ "$_DEBUG" == true ]; then
|
||||
_Logger "" "$prefix$value"
|
||||
return
|
||||
fi
|
||||
else
|
||||
_Logger "" "\e[41mLogger function called without proper loglevel [$level].\e[0m" true
|
||||
_Logger "" "Value was: $prefix$value" true
|
||||
fi
|
||||
}
|
||||
#### RemoteLogger SUBSET END ####
|
||||
|
||||
# General log function with log levels:
|
||||
|
||||
# Environment variables
|
||||
# _LOGGER_SILENT: Disables any output to stdout & stderr
|
||||
# _LOGGER_ERR_ONLY: Disables any output to stdout except for ALWAYS loglevel
|
||||
# _LOGGER_VERBOSE: Allows VERBOSE loglevel messages to be sent to stdout
|
||||
|
||||
# Loglevels
|
||||
# Except for VERBOSE, all loglevels are ALWAYS sent to log file
|
||||
|
||||
# CRITICAL, ERROR, WARN sent to stderr, color depending on level, level also logged
|
||||
# NOTICE sent to stdout
|
||||
# VERBOSE sent to stdout if _LOGGER_VERBOSE=true
|
||||
# ALWAYS is sent to stdout unless _LOGGER_SILENT=true
|
||||
# DEBUG & PARANOIA_DEBUG are only sent to stdout if _DEBUG=true
|
||||
function Logger {
|
||||
local value="${1}" # Sentence to log (in double quotes)
|
||||
local level="${2}" # Log level
|
||||
local retval="${3:-undef}" # optional return value of command
|
||||
|
||||
local prefix
|
||||
|
||||
if [ "$_LOGGER_PREFIX" == "time" ]; then
|
||||
prefix="TIME: $SECONDS - "
|
||||
elif [ "$_LOGGER_PREFIX" == "date" ]; then
|
||||
prefix="$(date '+%Y-%m-%d %H:%M:%S') - "
|
||||
else
|
||||
prefix=""
|
||||
fi
|
||||
|
||||
## Obfuscate _REMOTE_TOKEN in logs (for ssh_filter usage only in osync and obackup)
|
||||
value="${value/env _REMOTE_TOKEN=$_REMOTE_TOKEN/env _REMOTE_TOKEN=__o_O__}"
|
||||
value="${value/env _REMOTE_TOKEN=\$_REMOTE_TOKEN/env _REMOTE_TOKEN=__o_O__}"
|
||||
|
||||
if [ "$level" == "CRITICAL" ]; then
|
||||
_Logger "$prefix($level):$value" "$prefix\e[1;33;41m$value\e[0m" true
|
||||
ERROR_ALERT=true
|
||||
# ERROR_ALERT / WARN_ALERT is not set in main when Logger is called from a subprocess. We need to create these flag files for ERROR_ALERT / WARN_ALERT to be picked up by Alert
|
||||
echo -e "[$retval] in [$(joinString , ${FUNCNAME[@]})] SP=$SCRIPT_PID P=$$\n$prefix($level):$value" >> "$RUN_DIR/$PROGRAM.ERROR_ALERT.$SCRIPT_PID.$TSTAMP"
|
||||
return
|
||||
elif [ "$level" == "ERROR" ]; then
|
||||
_Logger "$prefix($level):$value" "$prefix\e[91m$value\e[0m" true
|
||||
ERROR_ALERT=true
|
||||
echo -e "[$retval] in [$(joinString , ${FUNCNAME[@]})] SP=$SCRIPT_PID P=$$\n$prefix($level):$value" >> "$RUN_DIR/$PROGRAM.ERROR_ALERT.$SCRIPT_PID.$TSTAMP"
|
||||
return
|
||||
elif [ "$level" == "WARN" ]; then
|
||||
_Logger "$prefix($level):$value" "$prefix\e[33m$value\e[0m" true
|
||||
WARN_ALERT=true
|
||||
echo -e "[$retval] in [$(joinString , ${FUNCNAME[@]})] SP=$SCRIPT_PID P=$$\n$prefix($level):$value" >> "$RUN_DIR/$PROGRAM.WARN_ALERT.$SCRIPT_PID.$TSTAMP"
|
||||
return
|
||||
elif [ "$level" == "NOTICE" ]; then
|
||||
if [ "$_LOGGER_ERR_ONLY" != true ]; then
|
||||
_Logger "$prefix$value" "$prefix$value"
|
||||
fi
|
||||
return
|
||||
elif [ "$level" == "VERBOSE" ]; then
|
||||
if [ "$_LOGGER_VERBOSE" == true ]; then
|
||||
_Logger "$prefix($level):$value" "$prefix$value"
|
||||
fi
|
||||
return
|
||||
elif [ "$level" == "ALWAYS" ]; then
|
||||
_Logger "$prefix$value" "$prefix$value"
|
||||
return
|
||||
elif [ "$level" == "DEBUG" ]; then
|
||||
if [ "$_DEBUG" == true ]; then
|
||||
_Logger "$prefix$value" "$prefix$value"
|
||||
return
|
||||
fi
|
||||
else
|
||||
_Logger "\e[41mLogger function called without proper loglevel [$level].\e[0m" "\e[41mLogger function called without proper loglevel [$level].\e[0m" true
|
||||
_Logger "Value was: $prefix$value" "Value was: $prefix$value" true
|
||||
fi
|
||||
}
|
||||
function CleanUp {
|
||||
# Exit controlmaster before the socket gets deleted
|
||||
if [ "$SSH_CONTROLMASTER" == true ] && [ "$SSH_CMD" != "" ]; then
|
||||
$SSH_CMD -O exit
|
||||
fi
|
||||
|
||||
if [ "$_DEBUG" != true ]; then
|
||||
# Removing optional remote $RUN_DIR that goes into local $RUN_DIR
|
||||
if [ -d "$RUN_DIR/$PROGRAM.remote.$SCRIPT_PID.$TSTAMP" ]; then
|
||||
rm -rf "$RUN_DIR/$PROGRAM.remote.$SCRIPT_PID.$TSTAMP"
|
||||
fi
|
||||
# Removing all temporary run files
|
||||
rm -f "$RUN_DIR/$PROGRAM."*".$SCRIPT_PID.$TSTAMP"
|
||||
# Fix for sed -i requiring backup extension for BSD & Mac (see all sed -i statements)
|
||||
rm -f "$RUN_DIR/$PROGRAM."*".$SCRIPT_PID.$TSTAMP.tmp"
|
||||
fi
|
||||
}
|
||||
|
||||
function GenericTrapQuit {
|
||||
local exitcode=0
|
||||
|
||||
# Get ERROR / WARN alert flags from subprocesses that call Logger
|
||||
if [ -f "$RUN_DIR/$PROGRAM.WARN_ALERT.$SCRIPT_PID.$TSTAMP" ]; then
|
||||
WARN_ALERT=true
|
||||
exitcode=2
|
||||
fi
|
||||
if [ -f "$RUN_DIR/$PROGRAM.ERROR_ALERT.$SCRIPT_PID.$TSTAMP" ]; then
|
||||
ERROR_ALERT=true
|
||||
exitcode=1
|
||||
fi
|
||||
|
||||
CleanUp
|
||||
exit $exitcode
|
||||
}
|
||||
|
||||
|
||||
function CheckEnvironment {
|
||||
## osync / obackup executable full path can be set here if it cannot be found on the system
|
||||
if ! type $SUBPROGRAM.sh > /dev/null 2>&1
|
||||
|
@ -145,6 +326,8 @@ function Usage {
|
|||
exit 128
|
||||
}
|
||||
|
||||
trap GenericTrapQuit TERM EXIT HUP QUIT
|
||||
|
||||
opts=""
|
||||
for i in "$@"
|
||||
do
|
||||
|
|
|
@ -36,14 +36,14 @@ if [ ! -w $(dirname $pidfile) ]; then
|
|||
fi
|
||||
|
||||
start() {
|
||||
if [ ! -f $confdir/*.conf ]; then
|
||||
if ! ls "$confdir/"*.conf > /dev/null 2>&1; then
|
||||
echo "Cannot find any configuration files in $confdir."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
errno=0
|
||||
|
||||
for cfgfile in $confdir/*.conf
|
||||
for cfgfile in "$confdir/"*.conf
|
||||
do
|
||||
if [ -f $progpath/$progexec ]; then
|
||||
$progpath/$progexec $cfgfile --on-changes --errors-only > /dev/null 2>&1 &
|
||||
|
@ -60,7 +60,7 @@ start() {
|
|||
echo "$prog successfully started for configuration file $cfgfile"
|
||||
else
|
||||
echo "Cannot start $prog for configuration file $cfgfile"
|
||||
$errno = 1
|
||||
errno=1
|
||||
fi
|
||||
done
|
||||
|
||||
|
@ -106,7 +106,7 @@ status() {
|
|||
echo "$prog instance $(basename $pfile) is running (pid $(cat $pfile))"
|
||||
else
|
||||
echo "$prog instance $pfile (pid $(cat $pfile)) is dead but pidfile exists."
|
||||
$errno=1
|
||||
errno=1
|
||||
fi
|
||||
done
|
||||
|
||||
|
|
|
@ -0,0 +1,53 @@
|
|||
#!/sbin/openrc-run
|
||||
#
|
||||
# PROGRAM="osync-srv"
|
||||
# SCRIPT_BUILD=2018051701
|
||||
# Script written by Brian Evans (grknight@gentoo.org) in 2018
|
||||
# How to use:
|
||||
#
|
||||
# 0) Rename this to osync-srv, and place it in /etc/init.d.
|
||||
# 1) Ensure that your config is located in /etc/osync, under some name with a
|
||||
# .conf extension.
|
||||
# 2) Ensure that osync.sh can be found in /usr/local/bin.
|
||||
# 3) Ensure that you have rsync and inotify-tools installed.
|
||||
# 4) Symlink this to a name with an extension equal to the basename of your
|
||||
# config file.
|
||||
# 5) Add to the default runlevel under the symlinked name.
|
||||
#
|
||||
# Example:
|
||||
#
|
||||
# Suppose the config is located at /etc/osync/documents.conf. You then want to
|
||||
# symlink as follows:
|
||||
#
|
||||
# # ln -s /etc/init.d/osync-srv /etc/init.d/osync-srv.documents
|
||||
#
|
||||
# Then you can start the service as normal:
|
||||
#
|
||||
# # rc-update add osync-srv.documents default
|
||||
|
||||
depend() {
|
||||
use localmount chrony ntp-client
|
||||
}
|
||||
|
||||
description="Two way directory sync daemon"
|
||||
command=/usr/local/bin/osync.sh
|
||||
conffile="${RC_SVCNAME#*.}.conf"
|
||||
cfgfile="/etc/osync/${conffile}"
|
||||
command_args="${cfgfile} --on-changes --errors-only"
|
||||
command_background="yes"
|
||||
pidfile="/var/run/${RC_SVCNAME}"
|
||||
stopsig=TERM
|
||||
|
||||
start_pre() {
|
||||
if [ "${conffile}" = ".conf" ]; then
|
||||
eerror "${RC_SVCNAME} cannot be started directly. You must create"
|
||||
eerror "symbolic links to it for the configuration you want to start"
|
||||
eerror "osync on and add those to the appropriate runlevels."
|
||||
return 1
|
||||
fi
|
||||
|
||||
if ! [ -f "${cfgfile}" ]; then
|
||||
eerror "Cannot find configuration file ${cfgfile}."
|
||||
return 1
|
||||
fi
|
||||
}
|
|
@ -0,0 +1,139 @@
|
|||
#!/usr/bin/env bash
|
||||
#
|
||||
# osync-srv Two way directory sync daemon
|
||||
#
|
||||
# chkconfig: - 90 99
|
||||
# description: monitors a local directory and syncs to a local or remote \
|
||||
# directory on file changes
|
||||
# processname: /usr/local/bin/osync.sh
|
||||
# config: /etc/osync/*.conf
|
||||
# pidfile: /var/run/osync
|
||||
|
||||
### BEGIN INIT INFO
|
||||
# Provides: osync-target-helper-srv
|
||||
# Required-Start: $local_fs $time
|
||||
# Required-Stop: $local_fs $time
|
||||
# Default-Start: 2 3 4 5
|
||||
# Default-Stop: 0 1 6
|
||||
# Short-Description: osync-target-helpder daemon
|
||||
# Description: Two way directory sync daemon
|
||||
### END INIT INFO
|
||||
|
||||
prog=osync
|
||||
progexec=osync.sh
|
||||
progpath=/usr/local/bin
|
||||
confdir=/etc/osync
|
||||
pidfile=/var/run/$prog-target-helper
|
||||
SCRIPT_BUILD=2018100101
|
||||
|
||||
if [ ! -f $progpath/$progexec ] && [ ! -f $progexec ]; then
|
||||
echo "Cannot find $prog executable in $progpath nor in local path."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! -w $(dirname $pidfile) ]; then
|
||||
pidfile=./$prog
|
||||
fi
|
||||
|
||||
start() {
|
||||
if ! ls "$confdir/"*.conf > /dev/null 2>&1; then
|
||||
echo "Cannot find any configuration files in $confdir."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
errno=0
|
||||
|
||||
for cfgfile in "$confdir/"*.conf
|
||||
do
|
||||
if [ -f $progpath/$progexec ]; then
|
||||
$progpath/$progexec $cfgfile --on-changes-target --errors-only > /dev/null 2>&1 &
|
||||
else
|
||||
echo "Cannot find $prog executable in $progpath"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
pid=$!
|
||||
retval=$?
|
||||
|
||||
if [ $? == 0 ]; then
|
||||
echo $pid > "$pidfile-$(basename $cfgfile)"
|
||||
echo "$prog successfully started for configuration file $cfgfile"
|
||||
else
|
||||
echo "Cannot start $prog for configuration file $cfgfile"
|
||||
errno=1
|
||||
fi
|
||||
done
|
||||
|
||||
exit $errno
|
||||
}
|
||||
|
||||
stop() {
|
||||
if [ ! -f $pidfile-* ]; then
|
||||
echo "No running $prog instances found."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
for pfile in $pidfile-*
|
||||
do
|
||||
if ps -p$(cat $pfile) > /dev/null 2>&1
|
||||
then
|
||||
kill -TERM $(cat $pfile)
|
||||
if [ $? == 0 ]; then
|
||||
rm -f $pfile
|
||||
echo "$prog instance $(basename $pfile) stopped."
|
||||
else
|
||||
echo "Cannot stop $prog instance $(basename $pfile)"
|
||||
fi
|
||||
else
|
||||
rm -f $pfile
|
||||
echo "$prog instance $pfile (pid $(cat $pfile)) is dead but pidfile exists."
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
status() {
|
||||
if [ ! -f $pidfile-* ]; then
|
||||
echo "Cannot find any running $prog instance."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
errno=0
|
||||
|
||||
for pfile in $pidfile-*
|
||||
do
|
||||
if ps -p$(cat $pfile) > /dev/null 2>&1
|
||||
then
|
||||
echo "$prog instance $(basename $pfile) is running (pid $(cat $pfile))"
|
||||
else
|
||||
echo "$prog instance $pfile (pid $(cat $pfile)) is dead but pidfile exists."
|
||||
errno=1
|
||||
fi
|
||||
done
|
||||
|
||||
exit $errno
|
||||
}
|
||||
|
||||
case "$1" in
|
||||
start)
|
||||
start
|
||||
;;
|
||||
stop)
|
||||
stop
|
||||
;;
|
||||
restart)
|
||||
stop
|
||||
start
|
||||
;;
|
||||
status)
|
||||
status
|
||||
;;
|
||||
condrestart|try-restart)
|
||||
status || exit 0
|
||||
restart
|
||||
;;
|
||||
*)
|
||||
echo "Usage: $0 {start|stop|restart|status}"
|
||||
;;
|
||||
esac
|
||||
|
||||
exit 0
|
|
@ -0,0 +1,13 @@
|
|||
[Unit]
|
||||
Description=osync - Target helper service
|
||||
After=time-sync.target local-fs.target network-online.target
|
||||
Requires=time-sync.target local-fs.target
|
||||
Wants=network-online.target
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
ExecStart=/usr/local/bin/osync.sh /etc/osync/%i --on-changes-target --errors-only
|
||||
SuccessExitStatus=0 2
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
|
@ -0,0 +1,11 @@
|
|||
[Unit]
|
||||
Description=A robust two way (bidirectional) file sync script based on rsync with fault tolerance
|
||||
After=time-sync.target local-fs.target network-online.target
|
||||
Wants=network-online.target
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
ExecStart=/usr/local/bin/osync.sh /etc/osync/%i --on-changes-target --silent
|
||||
SuccessExitStatus=0 2
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
|
@ -1,25 +1,113 @@
|
|||
#!/bin/bash
|
||||
|
||||
git clone git+ssh://aur@aur.archlinux.org/osync.git osync.aur &&
|
||||
cd "osync.aur" &&
|
||||
srcdir="." &&
|
||||
source "PKGBUILD" &&
|
||||
HELPTEXT=\
|
||||
"Usage: $0 [OPTIONS]\n"\
|
||||
"Automatically updates the osync version in the AUR.\n"\
|
||||
"\n"\
|
||||
"-y, --yes Do not prompt before committing\n"\
|
||||
"-n, --name=USERNAME Username to use with git in case no global username is set\n"\
|
||||
"-e, --email=EMAIL Email address to use with git in case no global email is set"
|
||||
|
||||
url=$(echo -n ${source[0]} | sed 's/git+//g' | sed 's/#.*//g') &&
|
||||
branch=$(echo -n ${source[0]} | sed 's/.*#branch=//g') &&
|
||||
git clone -b $branch $url &&
|
||||
function cleanup {
|
||||
echo "Cleanup..."
|
||||
cd ..
|
||||
rm -rf osync.aur
|
||||
}
|
||||
|
||||
# Get pkgver from current osync
|
||||
pkgver=$(grep PROGRAM_VERSION= ./osync/osync.sh)
|
||||
pkgver=${pkgver##*=}
|
||||
echo $pkgver
|
||||
# Check getopt compatibility
|
||||
getopt --test > /dev/null
|
||||
if [[ $? -ne 4 ]]; then
|
||||
echo "You don't seem to have the GNU-enhanced getopt available. That shouldn't happen on a modern system with bash installed."
|
||||
exit 38
|
||||
fi
|
||||
|
||||
sed -i "s/pkgver=.*/pkgver=$(pkgver)/g" "PKGBUILD" &&
|
||||
# Parse command line arguments
|
||||
OPTIONS=hyn:e:
|
||||
LONGOPTIONS=help,yes,name:,email:
|
||||
|
||||
PARSED=$(getopt --options=$OPTIONS --longoptions=$LONGOPTIONS --name "$0" -- "$@")
|
||||
if [[ $? -ne 0 ]]; then
|
||||
exit 22
|
||||
fi
|
||||
eval set -- "$PARSED"
|
||||
|
||||
while true; do
|
||||
case "$1" in
|
||||
-h|--help)
|
||||
echo -e "$HELPTEXT"
|
||||
exit 0
|
||||
;;
|
||||
-y|--yes)
|
||||
yes="y"
|
||||
shift
|
||||
;;
|
||||
-n|--name)
|
||||
name="$2"
|
||||
shift 2
|
||||
;;
|
||||
-e|--email)
|
||||
email="$2"
|
||||
shift 2
|
||||
;;
|
||||
--)
|
||||
shift
|
||||
break
|
||||
;;
|
||||
*)
|
||||
echo "Programming error" > /dev/stderr
|
||||
exit 131
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
if [[ -z $name ]];then
|
||||
name=$(git config --global user.name)
|
||||
if [[ -z $name ]]; then
|
||||
echo "Please specify a username for the git commit with the -n|--name option or set it globally with 'git config --global user.name USERNAME"
|
||||
exit 22
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ -z $email ]];then
|
||||
email=$(git config --global user.email)
|
||||
if [[ -z $email ]]; then
|
||||
echo "Please specify an e-mail for the git commit with the -e|--email option or set it globally with 'git config --global user.email EMAIL"
|
||||
exit 22
|
||||
fi
|
||||
fi
|
||||
|
||||
### Main ###
|
||||
|
||||
echo "Cloning AUR package..."
|
||||
if ! git clone -q git+ssh://aur@aur.archlinux.org/osync.git osync.aur || ! cd osync.aur; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
git config user.name "$name"
|
||||
git config user.email "$email"
|
||||
|
||||
echo "Cloning most recent stable version of osync..." &&
|
||||
git clone -qb stable https://github.com/deajan/osync.git > /dev/null &&
|
||||
|
||||
echo "Fetching version..." &&
|
||||
cd osync &&
|
||||
pkgversion="$(git describe --tags --long | sed 's/\([^-]*-\)g/r\1/;s/-/./g')" &&
|
||||
cd .. &&
|
||||
|
||||
echo "Updating version..." &&
|
||||
sed -i "s/pkgver=.*/pkgver=${pkgversion}/g" "PKGBUILD" &&
|
||||
../mksrcinfo &&
|
||||
rm -rf "osync" &&
|
||||
git add . &&
|
||||
git commit -m "Updated version" &&
|
||||
git push origin master &&
|
||||
cd .. &&
|
||||
rm -rf "osync.aur" &&
|
||||
echo "Package updated successfully"
|
||||
|
||||
[[ ! -z $yes ]] || (read -p "About to commit changes to AUR. Are you sure? (y/n) " -n 1 -r && echo "" &&
|
||||
[[ $REPLY =~ ^[Yy]$ ]]) &&
|
||||
|
||||
echo "Committing changes to AUR..." &&
|
||||
git add PKGBUILD .SRCINFO &&
|
||||
git commit -qm "Updated version to ${pkgversion}" &&
|
||||
git push -q origin master &&
|
||||
|
||||
cleanup &&
|
||||
echo "Package updated successfully to version ${pkgversion}" || cleanup
|
||||
|
||||
exit 0
|
||||
|
|
|
@ -9,13 +9,13 @@
|
|||
##### Any other command will return a "syntax error"
|
||||
##### For details, see ssh_filter.log
|
||||
|
||||
SCRIPT_BUILD=2017020802
|
||||
# BUILD=2017020802
|
||||
|
||||
## Allow sudo
|
||||
SUDO_EXEC=yes
|
||||
SUDO_EXEC=true
|
||||
|
||||
## Log all valid commands too
|
||||
_DEBUG=no
|
||||
_DEBUG=false
|
||||
|
||||
## Set remote token in authorized_keys
|
||||
if [ "$1" != "" ]; then
|
||||
|
@ -25,12 +25,12 @@ fi
|
|||
LOG_FILE="${HOME}/.ssh/ssh_filter.log"
|
||||
|
||||
function Log {
|
||||
DATE=$(date)
|
||||
DATE="$(date)"
|
||||
echo "$DATE - $1" >> "$LOG_FILE"
|
||||
}
|
||||
|
||||
function Go {
|
||||
if [ "$_DEBUG" == "yes" ]; then
|
||||
if [ "$_DEBUG" == true ]; then
|
||||
Log "Executing [$SSH_ORIGINAL_COMMAND]."
|
||||
fi
|
||||
eval "$SSH_ORIGINAL_COMMAND"
|
||||
|
@ -38,7 +38,7 @@ function Go {
|
|||
|
||||
case "${SSH_ORIGINAL_COMMAND}" in
|
||||
*"env _REMOTE_TOKEN=$_REMOTE_TOKEN"*)
|
||||
if [ "$SUDO_EXEC" != "yes" ] && [[ $SSH_ORIGINAL_COMMAND == *"sudo "* ]]; then
|
||||
if [ "$SUDO_EXEC" != true ] && [[ $SSH_ORIGINAL_COMMAND == *"sudo "* ]]; then
|
||||
Log "Command [$SSH_ORIGINAL_COMMAND] contains sudo which is not allowed."
|
||||
echo "Syntax error unexpected end of file"
|
||||
exit 1
|
||||
|
|
|
@ -1,10 +1,8 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
###### osync - Rsync based two way sync engine with fault tolerance
|
||||
###### (C) 2013-2017 by Orsiris de Jong (www.netpower.fr)
|
||||
###### osync v1.1x / v1.2x config file rev 2017020801
|
||||
###### (C) 2013-2023 by Orsiris de Jong (www.netpower.fr)
|
||||
|
||||
## ---------- GENERAL OPTIONS
|
||||
[GENERAL]
|
||||
CONFIG_FILE_REVISION=1.3.0
|
||||
|
||||
## Sync job identification
|
||||
INSTANCE_ID="sync_test"
|
||||
|
@ -23,11 +21,14 @@ SSH_RSA_PRIVATE_KEY="/home/backupuser/.ssh/id_rsa"
|
|||
## Alternatively, you may specify an SSH password file (less secure). Needs sshpass utility installed.
|
||||
SSH_PASSWORD_FILE=""
|
||||
|
||||
## use the KRB5 credential cache to access SSH or rsync
|
||||
#KRB5=true
|
||||
|
||||
## When using ssh filter, you must specify a remote token matching the one setup in authorized_keys
|
||||
_REMOTE_TOKEN=SomeAlphaNumericToken9
|
||||
|
||||
## Create sync directories if they do not exist
|
||||
CREATE_DIRS=no
|
||||
## Create sync directories if they do not exist (true/false)
|
||||
CREATE_DIRS=true
|
||||
|
||||
## Log file location. Leaving this empty will create a logfile at /var/log/osync_version_SYNC_ID.log (or current directory if /var/log doesn't exist)
|
||||
LOGFILE=""
|
||||
|
@ -39,7 +40,7 @@ MINIMUM_SPACE=10240
|
|||
BANDWIDTH=0
|
||||
|
||||
## If enabled, synchronization on remote system will be processed as superuser. See documentation for /etc/sudoers file configuration.
|
||||
SUDO_EXEC=no
|
||||
SUDO_EXEC=false
|
||||
## Paranoia option. Don't change this unless you read the documentation.
|
||||
RSYNC_EXECUTABLE=rsync
|
||||
## Remote rsync executable path. Leave this empty in most cases
|
||||
|
@ -64,51 +65,71 @@ RSYNC_EXCLUDE_FROM=""
|
|||
## List elements separator char. You may set an alternative separator char for your directories lists above.
|
||||
PATH_SEPARATOR_CHAR=";"
|
||||
|
||||
## ---------- REMOTE SYNC OPTIONS
|
||||
## By default, osync stores its state into the replica_path/.osync_workdir/state
|
||||
## This behavior can be changed for initiator or slave by overriding the following with an absolute path to a statedir, ex /opt/osync_state/initiator
|
||||
## If osync runs locally, initiator and target state dirs **must** be different
|
||||
INITIATOR_CUSTOM_STATE_DIR=""
|
||||
TARGET_CUSTOM_STATE_DIR=""
|
||||
|
||||
## ssh compression should be used unless your remote connection is good enough (LAN)
|
||||
SSH_COMPRESSION=yes
|
||||
[REMOTE_OPTIONS]
|
||||
|
||||
## ssh compression should be used on WAN links, unless your remote connection is good enough (LAN), in which case it would slow down things
|
||||
SSH_COMPRESSION=false
|
||||
|
||||
## Optional ssh options. Example to lower CPU usage on ssh compression, one can specify '-T -c arcfour -o Compression=no -x'
|
||||
## -T = turn off pseudo-tty, -c arcfour = weakest but fasted ssh encryption (destination must accept "Ciphers arcfour" in sshd_config), -x turns off X11 forwarding
|
||||
## arcfour isn't accepted on most newer systems, you may then prefer any AES encryption if processor has aes-ni hardware acceleration
|
||||
## If the system does not provide hardware assisted acceleration, chacha20-poly1305@openssh.com is a good cipher to select
|
||||
## See: https://wiki.csnu.org/index.php/SSH_ciphers_speed_comparison
|
||||
## -o Compression=no is already handled by SSH_COMPRESSION option
|
||||
## Uncomment the following line to use those optimizations, on secured links only
|
||||
#SSH_OPTIONAL_ARGS="-T -c aes128-ctr -x"
|
||||
#SSH_OPTIONAL_ARGS="-T -c chacha20-poly1305@openssh.com -x"
|
||||
|
||||
## Ignore ssh known hosts. DANGER WILL ROBINSON DANGER ! This can lead to security issues. Only enable this if you know what you're doing.
|
||||
SSH_IGNORE_KNOWN_HOSTS=no
|
||||
SSH_IGNORE_KNOWN_HOSTS=false
|
||||
|
||||
## Use a single TCP connection for all SSH calls. Will make remote sync faster, but may work less good on lossy links.
|
||||
SSH_CONTROLMASTER=false
|
||||
|
||||
## Check for connectivity to remote host before launching remote sync task. Be sure the hosts responds to ping. Failing to ping will stop sync.
|
||||
REMOTE_HOST_PING=no
|
||||
REMOTE_HOST_PING=false
|
||||
|
||||
## Check for internet access by pinging one or more 3rd party hosts before remote sync task. Leave empty if you don't want this check to be be performed. Failing to ping will stop sync.
|
||||
## If you use this function, you should set more than one 3rd party host, and be sure you can ping them.
|
||||
## Be aware some DNS like opendns redirect false hostnames. Also, this adds an extra execution time of a bit less than a minute.
|
||||
REMOTE_3RD_PARTY_HOSTS="www.kernel.org www.google.com"
|
||||
|
||||
## ---------- MISC OPTIONS
|
||||
[MISC_OPTIONS]
|
||||
|
||||
## Optional arguments passed to rsync executable. The following are already managed by the program and shoul never be passed here
|
||||
## -r -l -p -t -g -o -D -E - u- i- n --executability -A -X -L -K -H -8 -zz –skip-compress –checksum –bwlimit –partial –partial-dir –no-whole-file –whole-file –backup –backup-dir –suffix
|
||||
## -r -l -p -t -g -o -D -E - u- i- n --executability -A -X -L -K -H -8 --zz -–skip-compress -–checksum –-bwlimit –-partial –-partial-dir –-no-whole-file –-whole-file –-backup –-backup-dir –-suffix
|
||||
## --exclude --exclude-from --include --include-from --list-only --stats
|
||||
## When dealing with different filesystems for sync, or using SMB mountpoints, try adding --modify-window=2 --omit-dir-times as optional arguments.
|
||||
RSYNC_OPTIONAL_ARGS=""
|
||||
|
||||
## Preserve basic linux permissions
|
||||
PRESERVE_PERMISSIONS=yes
|
||||
PRESERVE_OWNER=yes
|
||||
PRESERVE_GROUP=yes
|
||||
PRESERVE_PERMISSIONS=true
|
||||
PRESERVE_OWNER=true
|
||||
PRESERVE_GROUP=true
|
||||
## On MACOS X, does not work and will be ignored
|
||||
PRESERVE_EXECUTABILITY=yes
|
||||
PRESERVE_EXECUTABILITY=true
|
||||
|
||||
## Preserve ACLS. Make sure source and target FS can handle ACL. Disabled on Mac OSX.
|
||||
PRESERVE_ACL=no
|
||||
PRESERVE_ACL=false
|
||||
## Preserve Xattr. Make sure source and target FS can manage identical XATTRS. Disabled on Mac OSX. Apparently, prior to rsync v3.1.2 there are some performance caveats with transferring XATTRS.
|
||||
PRESERVE_XATTR=no
|
||||
PRESERVE_XATTR=false
|
||||
## Transforms symlinks into referent files/dirs. Be careful as symlinks without referrent will break sync as if standard files could not be copied.
|
||||
COPY_SYMLINKS=no
|
||||
COPY_SYMLINKS=false
|
||||
## Treat symlinked dirs as dirs. CAUTION: This also follows symlinks outside of the replica root.
|
||||
KEEP_DIRLINKS=no
|
||||
KEEP_DIRLINKS=false
|
||||
## Preserve hard links. Make sure source and target FS can manage hard links or you will lose them.
|
||||
PRESERVE_HARDLINKS=no
|
||||
PRESERVE_HARDLINKS=false
|
||||
## Do a full checksum on all files that have identical sizes, they are checksummed to see if they actually are identical. This can take a long time.
|
||||
CHECKSUM=no
|
||||
CHECKSUM=false
|
||||
|
||||
## Let RSYNC compress file transfers. Do not use this if both initator and target replicas are on local system. Also, do not use this if you already enabled SSH compression.
|
||||
RSYNC_COMPRESS=yes
|
||||
## Let RSYNC compress file transfers. Do not use this if both initator and target replicas are on local system. Also, do not use this if you already enabled SSH compression.
|
||||
RSYNC_COMPRESS=true
|
||||
|
||||
## Maximum execution time (in seconds) for sync process. Set these values zero will disable max execution times.
|
||||
## Soft exec time only generates a warning. Hard exec time will generate a warning and stop sync process.
|
||||
|
@ -125,48 +146,57 @@ MIN_WAIT=60
|
|||
## Use 0 to wait indefinitely.
|
||||
MAX_WAIT=7200
|
||||
|
||||
## ---------- BACKUP AND DELETION OPTIONS
|
||||
[BACKUP_DELETE_OPTIONS]
|
||||
|
||||
## Log a list of conflictual files (EXPERIMENTAL)
|
||||
LOG_CONFLICTS=false
|
||||
## Send an email when conflictual files are found (implies LOG_CONFLICTS)
|
||||
ALERT_CONFLICTS=false
|
||||
## Enabling this option will keep a backup of a file on the target replica if it gets updated from the source replica. Backups will be made to .osync_workdir/backups
|
||||
CONFLICT_BACKUP=yes
|
||||
CONFLICT_BACKUP=true
|
||||
## Keep multiple backup versions of the same file. Warning, This can be very space consuming.
|
||||
CONFLICT_BACKUP_MULTIPLE=no
|
||||
CONFLICT_BACKUP_MULTIPLE=false
|
||||
## Osync will clean backup files after a given number of days. Setting this to 0 will disable cleaning and keep backups forever. Warning: This can be very space consuming.
|
||||
CONFLICT_BACKUP_DAYS=30
|
||||
## If the same file exists on both replicas, newer version will be synced. However, if both files have the same timestamp but differ, CONFILCT_PREVALANCE sets winner replica.
|
||||
CONFLICT_PREVALANCE=initiator
|
||||
|
||||
## On deletion propagation to the target replica, a backup of the deleted files can be kept. Deletions will be kept in .osync_workdir/deleted
|
||||
SOFT_DELETE=yes
|
||||
SOFT_DELETE=true
|
||||
## Osync will clean deleted files after a given number of days. Setting this to 0 will disable cleaning and keep deleted files forever. Warning: This can be very space consuming.
|
||||
SOFT_DELETE_DAYS=30
|
||||
|
||||
## Optional deletion skip on replicas. Valid values are "initiator", "target", or "initiator,target"
|
||||
SKIP_DELETION=
|
||||
|
||||
## ---------- RESUME OPTIONS
|
||||
## Optional sync type. By default, osync is bidirectional. You may want to use osync as unidirectional sync in some circumstances. Valid values are "initiator2target" or "target2initiator"
|
||||
SYNC_TYPE=
|
||||
|
||||
[RESUME_OPTIONS]
|
||||
|
||||
## Try to resume an aborted sync task
|
||||
RESUME_SYNC=yes
|
||||
RESUME_SYNC=true
|
||||
## Number maximum resume tries before initiating a fresh sync.
|
||||
RESUME_TRY=2
|
||||
## When a pidlock exists on slave replica that does not correspond to the initiator's instance-id, force pidlock removal. Be careful with this option if you have multiple initiators.
|
||||
FORCE_STRANGER_LOCK_RESUME=no
|
||||
FORCE_STRANGER_LOCK_RESUME=false
|
||||
|
||||
## Keep partial uploads that can be resumed on next run, experimental feature
|
||||
PARTIAL=no
|
||||
PARTIAL=false
|
||||
|
||||
## Use delta copy algortithm (usefull when local paths are network drives), defaults to yes
|
||||
DELTA_COPIES=yes
|
||||
|
||||
## ---------- ALERT OPTIONS
|
||||
## Use delta copy algortithm (usefull when local paths are network drives), defaults to true
|
||||
DELTA_COPIES=true
|
||||
|
||||
[ALERT_OPTIONS]
|
||||
## List of alert mails separated by spaces
|
||||
## Most Unix systems (including Win10 bash) have mail support out of the box
|
||||
## Just make sure that the current user has enough privileges to use mail / mutt / sendmail and that the mail system is configured to allow outgoing mails
|
||||
## on pfSense platform, smtp support needs to be configured in System > Advanced > Notifications
|
||||
DESTINATION_MAILS="your@alert.tld"
|
||||
|
||||
## By default, only sync warnings / errors are sent by mail. This default behavior can be overrided here
|
||||
ALWAYS_SEND_MAILS=false
|
||||
|
||||
## Optional change of mail body encoding (using iconv)
|
||||
## By default, all mails are sent in UTF-8 format without header (because of maximum compatibility of all platforms)
|
||||
## You may specify an optional encoding here (like "ISO-8859-1" or whatever iconv can handle)
|
||||
|
@ -175,7 +205,6 @@ MAIL_BODY_CHARSET=""
|
|||
## Additional mail parameters needed for Android / Busybox / Cygwin / MSYS
|
||||
## Android & Busybox use sendmail (and openssl if encryption is needed)
|
||||
## MSYS & Cygwin Windows mail support relies on mailsend.exe from muquit, http://github.com/muquit/mailsend which needs to be in %PATH% environment variable
|
||||
|
||||
SENDER_MAIL="alert@your.system.tld"
|
||||
SMTP_SERVER=smtp.your.isp.tld
|
||||
SMTP_PORT=25
|
||||
|
@ -184,9 +213,9 @@ SMTP_ENCRYPTION=none
|
|||
SMTP_USER=
|
||||
SMTP_PASSWORD=
|
||||
|
||||
## ---------- EXECUTION HOOKS
|
||||
[EXECUTION_HOOKS]
|
||||
|
||||
## Commands can will be run before and / or after sync process (remote execution will only happen if REMOTE_OPERATION is set).
|
||||
## Commands can will be run before and / or after sync process
|
||||
LOCAL_RUN_BEFORE_CMD=""
|
||||
LOCAL_RUN_AFTER_CMD=""
|
||||
|
||||
|
@ -197,8 +226,8 @@ REMOTE_RUN_AFTER_CMD=""
|
|||
MAX_EXEC_TIME_PER_CMD_BEFORE=0
|
||||
MAX_EXEC_TIME_PER_CMD_AFTER=0
|
||||
|
||||
## Stops osync execution if one of the above commands fail
|
||||
STOP_ON_CMD_ERROR=yes
|
||||
## Stops osync execution if one of the above before commands fail
|
||||
STOP_ON_CMD_ERROR=true
|
||||
|
||||
## Run local and remote after sync commands even on failure
|
||||
RUN_AFTER_CMD_ON_ERROR=no
|
||||
RUN_AFTER_CMD_ON_ERROR=false
|
||||
|
|
|
@ -0,0 +1,102 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
###### osync - Rsync based two way sync engine with fault tolerance
|
||||
###### (C) 2013-2020 by Orsiris de Jong (www.netpower.fr)
|
||||
|
||||
[GENERAL]
|
||||
CONFIG_FILE_REVISION=1.3.0
|
||||
|
||||
## Sync job identification
|
||||
INSTANCE_ID="target_test"
|
||||
|
||||
## Directories to synchronize.
|
||||
## Initiator is the system osync runs on. The initiator directory must be a local path.
|
||||
INITIATOR_SYNC_DIR="/home/git/osync/dir1"
|
||||
#INITIATOR_SYNC_DIR="ssh://backupuser@yourhost.old:22//home/git/osync/dir1"
|
||||
|
||||
## Target is the system osync synchronizes to (can be the same system as the initiator in case of local sync tasks). The target directory can be a local or remote path.
|
||||
TARGET_SYNC_DIR="/home/git/osync/dir2"
|
||||
|
||||
## If the target system is remote, you can specify a RSA key (please use full path). If not defined, the default ~/.ssh/id_rsa will be used. See documentation for further information.
|
||||
SSH_RSA_PRIVATE_KEY="/home/backupuser/.ssh/id_rsa"
|
||||
|
||||
## Alternatively, you may specify an SSH password file (less secure). Needs sshpass utility installed.
|
||||
SSH_PASSWORD_FILE=""
|
||||
|
||||
## When using ssh filter, you must specify a remote token matching the one setup in authorized_keys
|
||||
_REMOTE_TOKEN=SomeAlphaNumericToken9
|
||||
|
||||
## Log file location. Leaving this empty will create a logfile at /var/log/osync_version_SYNC_ID.log (or current directory if /var/log doesn't exist)
|
||||
LOGFILE=""
|
||||
|
||||
## If enabled, synchronization on remote system will be processed as superuser. See documentation for /etc/sudoers file configuration.
|
||||
SUDO_EXEC=false
|
||||
|
||||
## ---------- REMOTE SYNC OPTIONS
|
||||
|
||||
## ssh compression should be used unless your remote connection is good enough (LAN)
|
||||
SSH_COMPRESSION=true
|
||||
|
||||
## Ignore ssh known hosts. DANGER WILL ROBINSON DANGER ! This can lead to security issues. Only enable this if you know what you're doing.
|
||||
SSH_IGNORE_KNOWN_HOSTS=false
|
||||
|
||||
## Check for connectivity to remote host before launching remote sync task. Be sure the hosts responds to ping. Failing to ping will stop sync.
|
||||
REMOTE_HOST_PING=false
|
||||
|
||||
## Check for internet access by pinging one or more 3rd party hosts before remote sync task. Leave empty if you don't want this check to be be performed. Failing to ping will stop sync.
|
||||
## If you use this function, you should set more than one 3rd party host, and be sure you can ping them.
|
||||
## Be aware some DNS like opendns redirect false hostnames. Also, this adds an extra execution time of a bit less than a minute.
|
||||
REMOTE_3RD_PARTY_HOSTS="www.kernel.org www.google.com"
|
||||
|
||||
## Log a message every KEEP_LOGGING seconds just to know the task is still alive
|
||||
KEEP_LOGGING=1801
|
||||
|
||||
## Minimum time (in seconds) in file monitor /daemon mode between modification detection and sync task in order to let copy operations finish.
|
||||
MIN_WAIT=60
|
||||
|
||||
## Maximum time (in seconds) waiting in file monitor / daemon mode. After this time, sync is run.
|
||||
## Use 0 to wait indefinitely.
|
||||
MAX_WAIT=7200
|
||||
|
||||
[ALERT_OPTIONS]
|
||||
|
||||
## List of alert mails separated by spaces
|
||||
## Most Unix systems (including Win10 bash) have mail support out of the box
|
||||
## Just make sure that the current user has enough privileges to use mail / mutt / sendmail and that the mail system is configured to allow outgoing mails
|
||||
## on pfSense platform, smtp support needs to be configured in System > Advanced > Notifications
|
||||
DESTINATION_MAILS="your@alert.tld"
|
||||
|
||||
## Optional change of mail body encoding (using iconv)
|
||||
## By default, all mails are sent in UTF-8 format without header (because of maximum compatibility of all platforms)
|
||||
## You may specify an optional encoding here (like "ISO-8859-1" or whatever iconv can handle)
|
||||
MAIL_BODY_CHARSET=""
|
||||
|
||||
## Additional mail parameters needed for Android / Busybox / Cygwin / MSYS
|
||||
## Android & Busybox use sendmail (and openssl if encryption is needed)
|
||||
## MSYS & Cygwin Windows mail support relies on mailsend.exe from muquit, http://github.com/muquit/mailsend which needs to be in %PATH% environment variable
|
||||
SENDER_MAIL="alert@your.system.tld"
|
||||
SMTP_SERVER=smtp.your.isp.tld
|
||||
SMTP_PORT=25
|
||||
# encryption can be tls, ssl or none
|
||||
SMTP_ENCRYPTION=none
|
||||
SMTP_USER=
|
||||
SMTP_PASSWORD=
|
||||
|
||||
[EXECUTION_HOOKS]
|
||||
|
||||
## Commands can will be run before and / or after sync process (remote execution will only happen if REMOTE_OPERATION is set).
|
||||
LOCAL_RUN_BEFORE_CMD=""
|
||||
LOCAL_RUN_AFTER_CMD=""
|
||||
|
||||
REMOTE_RUN_BEFORE_CMD=""
|
||||
REMOTE_RUN_AFTER_CMD=""
|
||||
|
||||
## Max execution time of commands before they get force killed. Leave 0 if you don't wan't this to happen. Time is specified in seconds.
|
||||
MAX_EXEC_TIME_PER_CMD_BEFORE=0
|
||||
MAX_EXEC_TIME_PER_CMD_AFTER=0
|
||||
|
||||
## Stops osync execution if one of the above commands fail
|
||||
STOP_ON_CMD_ERROR=true
|
||||
|
||||
## Run local and remote after sync commands even on failure
|
||||
RUN_AFTER_CMD_ON_ERROR=false
|
|
@ -2,12 +2,12 @@
|
|||
|
||||
PROGRAM="osync instance upgrade script"
|
||||
SUBPROGRAM="osync"
|
||||
AUTHOR="(C) 2016-2017 by Orsiris de Jong"
|
||||
AUTHOR="(C) 2016-2020 by Orsiris de Jong"
|
||||
CONTACT="http://www.netpower.fr/osync - ozy@netpower.fr"
|
||||
OLD_PROGRAM_VERSION="v1.0x-v1.1x"
|
||||
NEW_PROGRAM_VERSION="v1.2x"
|
||||
CONFIG_FILE_VERSION=2017020801
|
||||
PROGRAM_BUILD=2016121101
|
||||
OLD_PROGRAM_VERSION="v1.0x-v1.2x"
|
||||
NEW_PROGRAM_VERSION="v1.3x"
|
||||
CONFIG_FILE_REVISION=1.3.0
|
||||
PROGRAM_BUILD=2020012201
|
||||
|
||||
## type -p does not work on platforms other than linux (bash). If if does not work, always assume output is not a zero exitcode
|
||||
if ! type "$BASH" > /dev/null; then
|
||||
|
@ -41,6 +41,7 @@ RSYNC_EXCLUDE_FROM
|
|||
PATH_SEPARATOR_CHAR
|
||||
SSH_COMPRESSION
|
||||
SSH_IGNORE_KNOWN_HOSTS
|
||||
SSH_CONTROLMASTER
|
||||
REMOTE_HOST_PING
|
||||
REMOTE_3RD_PARTY_HOSTS
|
||||
RSYNC_OPTIONAL_ARGS
|
||||
|
@ -60,6 +61,8 @@ HARD_MAX_EXEC_TIME
|
|||
KEEP_LOGGING
|
||||
MIN_WAIT
|
||||
MAX_WAIT
|
||||
LOG_CONFLICTS
|
||||
ALERT_CONFLICTS
|
||||
CONFLICT_BACKUP
|
||||
CONFLICT_BACKUP_MULTIPLE
|
||||
CONFLICT_BACKUP_DAYS
|
||||
|
@ -67,6 +70,7 @@ CONFLICT_PREVALANCE
|
|||
SOFT_DELETE
|
||||
SOFT_DELETE_DAYS
|
||||
SKIP_DELETION
|
||||
SYNC_TYPE
|
||||
RESUME_SYNC
|
||||
RESUME_TRY
|
||||
FORCE_STRANGER_LOCK_RESUME
|
||||
|
@ -97,11 +101,11 @@ sync-test
|
|||
${HOME}/backupuser/.ssh/id_rsa
|
||||
''
|
||||
SomeAlphaNumericToken9
|
||||
no
|
||||
false
|
||||
''
|
||||
10240
|
||||
0
|
||||
no
|
||||
false
|
||||
rsync
|
||||
''
|
||||
include
|
||||
|
@ -110,39 +114,43 @@ include
|
|||
''
|
||||
''
|
||||
\;
|
||||
yes
|
||||
no
|
||||
no
|
||||
true
|
||||
false
|
||||
false
|
||||
false
|
||||
'www.kernel.org www.google.com'
|
||||
''
|
||||
yes
|
||||
yes
|
||||
yes
|
||||
yes
|
||||
no
|
||||
no
|
||||
no
|
||||
no
|
||||
no
|
||||
no
|
||||
yes
|
||||
true
|
||||
true
|
||||
true
|
||||
true
|
||||
false
|
||||
false
|
||||
false
|
||||
false
|
||||
false
|
||||
false
|
||||
true
|
||||
7200
|
||||
10600
|
||||
1801
|
||||
60
|
||||
7200
|
||||
yes
|
||||
no
|
||||
false
|
||||
false
|
||||
true
|
||||
false
|
||||
30
|
||||
initiator
|
||||
yes
|
||||
true
|
||||
30
|
||||
''
|
||||
yes
|
||||
''
|
||||
true
|
||||
2
|
||||
no
|
||||
no
|
||||
yes
|
||||
false
|
||||
false
|
||||
true
|
||||
''
|
||||
''
|
||||
alert@your.system.tld
|
||||
|
@ -157,8 +165,8 @@ none
|
|||
''
|
||||
0
|
||||
0
|
||||
yes
|
||||
no
|
||||
true
|
||||
false
|
||||
)
|
||||
|
||||
function Init {
|
||||
|
@ -171,7 +179,8 @@ function Init {
|
|||
FAILED_DELETE_LIST_FILENAME="-failed-delete-$SYNC_ID"
|
||||
|
||||
if [ "${SLAVE_SYNC_DIR:0:6}" == "ssh://" ]; then
|
||||
REMOTE_OPERATION="yes"
|
||||
# Might also exist from old config file as REMOTE_OPERATION=yes
|
||||
REMOTE_OPERATION=true
|
||||
|
||||
# remove leadng 'ssh://'
|
||||
uri=${SLAVE_SYNC_DIR#ssh://*}
|
||||
|
@ -221,22 +230,6 @@ function Usage {
|
|||
exit 128
|
||||
}
|
||||
|
||||
function CheckEnvironment {
|
||||
if [ "$REMOTE_OPERATION" == "yes" ]; then
|
||||
if ! type -p ssh > /dev/null 2>&1
|
||||
then
|
||||
Logger "ssh not present. Cannot start sync." "CRITICAL"
|
||||
return 1
|
||||
fi
|
||||
fi
|
||||
|
||||
if ! type -p rsync > /dev/null 2>&1
|
||||
then
|
||||
Logger "rsync not present. Sync cannot start." "CRITICAL"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
function LoadConfigFile {
|
||||
local config_file="${1}"
|
||||
|
||||
|
@ -263,134 +256,134 @@ function _RenameStateFilesLocal {
|
|||
# Make sure there is no ending slash
|
||||
state_dir="${state_dir%/}/"
|
||||
|
||||
if [ -f "$state_dir""master"$TREE_CURRENT_FILENAME ]; then
|
||||
mv -f "$state_dir""master"$TREE_CURRENT_FILENAME "$state_dir""initiator"$TREE_CURRENT_FILENAME
|
||||
if [ -f "${state_dir}master${TREE_CURRENT_FILENAME}" ]; then
|
||||
mv -f "${state_dir}master${TREE_CURRENT_FILENAME}" "${state_dir}initiator${TREE_CURRENT_FILENAME}"
|
||||
if [ $? != 0 ]; then
|
||||
echo "Error while rewriting "$state_dir"master"$TREE_CURRENT_FILENAME
|
||||
echo "Error while rewriting ${state_dir}master${TREE_CURRENT_FILENAME}"
|
||||
else
|
||||
rewrite=true
|
||||
fi
|
||||
fi
|
||||
if [ -f "$state_dir""master"$TREE_AFTER_FILENAME ]; then
|
||||
mv -f "$state_dir""master"$TREE_AFTER_FILENAME "$state_dir""initiator"$TREE_AFTER_FILENAME
|
||||
if [ -f "${state_dir}master${TREE_AFTER_FILENAME}" ]; then
|
||||
mv -f "${state_dir}master${TREE_AFTER_FILENAME}" "${state_dir}initiator${TREE_AFTER_FILENAME}"
|
||||
if [ $? != 0 ]; then
|
||||
echo "Error while rewriting "$state_dir"master"$TREE_AFTER_FILENAME
|
||||
echo "Error while rewriting ${state_dir}master${TREE_AFTER_FILENAME}"
|
||||
else
|
||||
rewrite=true
|
||||
fi
|
||||
fi
|
||||
if [ -f "$state_dir""master"$DELETED_LIST_FILENAME ]; then
|
||||
mv -f "$state_dir""master"$DELETED_LIST_FILENAME "$state_dir""initiator"$DELETED_LIST_FILENAME
|
||||
if [ -f "${state_dir}master${DELETED_LIST_FILENAME}" ]; then
|
||||
mv -f "${state_dir}master${DELETED_LIST_FILENAME}" "${state_dir}initiator${DELETED_LIST_FILENAME}"
|
||||
if [ $? != 0 ]; then
|
||||
echo "Error while rewriting "$state_dir"master"$DELETED_LIST_FILENAME
|
||||
echo "Error while rewriting ${state_dir}master${DELETED_LIST_FILENAME}"
|
||||
else
|
||||
rewrite=true
|
||||
fi
|
||||
rewrite=true
|
||||
fi
|
||||
if [ -f "$state_dir""master"$FAILED_DELETE_LIST_FILENAME ]; then
|
||||
mv -f "$state_dir""master"$FAILED_DELETE_LIST_FILENAME "$state_dir""initiator"$FAILED_DELETE_LIST_FILENAME
|
||||
if [ -f "${state_dir}master${FAILED_DELETE_LIST_FILENAME}" ]; then
|
||||
mv -f "${state_dir}master${FAILED_DELETE_LIST_FILENAME}" "${state_dir}initiator${FAILED_DELETE_LIST_FILENAME}"
|
||||
if [ $? != 0 ]; then
|
||||
echo "Error while rewriting "$state_dir"master"$FAILED_DELETE_LIST_FILENAME
|
||||
echo "Error while rewriting ${state_dir}master${FAILED_DELETE_LIST_FILENAME}"
|
||||
else
|
||||
rewrite=true
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -f "$state_dir""master"$TREE_CURRENT_FILENAME"-dry" ]; then
|
||||
mv -f "$state_dir""master"$TREE_CURRENT_FILENAME"-dry" "$state_dir""initiator"$TREE_CURRENT_FILENAME"-dry"
|
||||
if [ -f "${state_dir}master${TREE_CURRENT_FILENAME}-dry" ]; then
|
||||
mv -f "${state_dir}master${TREE_CURRENT_FILENAME}-dry" "${state_dir}initiator${TREE_CURRENT_FILENAME}-dry"
|
||||
if [ $? != 0 ]; then
|
||||
echo "Error while rewriting "$state_dir"master"$TREE_CURRENT_FILENAME"-dry"
|
||||
echo "Error while rewriting ${state_dir}master${TREE_CURRENT_FILENAME}-dry"
|
||||
else
|
||||
rewrite=true
|
||||
fi
|
||||
fi
|
||||
if [ -f "$state_dir""master"$TREE_AFTER_FILENAME"-dry" ]; then
|
||||
mv -f "$state_dir""master"$TREE_AFTER_FILENAME"-dry" "$state_dir""initiator"$TREE_AFTER_FILENAME"-dry"
|
||||
if [ -f "${state_dir}master${TREE_AFTER_FILENAME}-dry" ]; then
|
||||
mv -f "${state_dir}master${TREE_AFTER_FILENAME}-dry" "${state_dir}initiator${TREE_AFTER_FILENAME}-dry"
|
||||
if [ $? != 0 ]; then
|
||||
echo "Error while rewriting "$state_dir""master"$TREE_AFTER_FILENAME"
|
||||
echo "Error while rewriting ${state_dir}master${TREE_AFTER_FILENAME}-dry"
|
||||
else
|
||||
rewrite=true
|
||||
fi
|
||||
fi
|
||||
if [ -f "$state_dir""master"$DELETED_LIST_FILENAME"-dry" ]; then
|
||||
mv -f "$state_dir""master"$DELETED_LIST_FILENAME"-dry" "$state_dir""initiator"$DELETED_LIST_FILENAME"-dry"
|
||||
if [ -f "${state_dir}master${DELETED_LIST_FILENAME}-dry" ]; then
|
||||
mv -f "${state_dir}master${DELETED_LIST_FILENAME}-dry" "${state_dir}initiator${DELETED_LIST_FILENAME}-dry"
|
||||
if [ $? != 0 ]; then
|
||||
echo "Error while rewriting "$state_dir"master"$DELETED_LIST_FILENAME"-dry"
|
||||
echo "Error while rewriting ${state_dir}master${DELETED_LIST_FILENAME}-dry"
|
||||
else
|
||||
rewrite=true
|
||||
fi
|
||||
fi
|
||||
if [ -f "$state_dir""master"$FAILED_DELETE_LIST_FILENAME"-dry" ]; then
|
||||
mv -f "$state_dir""master"$FAILED_DELETE_LIST_FILENAME"-dry" "$state_dir""initiator"$FAILED_DELETE_LIST_FILENAME"-dry"
|
||||
if [ -f "${state_dir}master${FAILED_DELETE_LIST_FILENAME}-dry" ]; then
|
||||
mv -f "${state_dir}master${FAILED_DELETE_LIST_FILENAME}-dry" "${state_dir}initiator${FAILED_DELETE_LIST_FILENAME}-dry"
|
||||
if [ $? != 0 ]; then
|
||||
echo "Error while rewriting "$state_dir"master"$FAILED_DELETE_LIST_FILENAME"-dry"
|
||||
echo "Error while rewriting ${state_dir}master${FAILED_DELETE_LIST_FILENAME}-dry"
|
||||
else
|
||||
rewrite=true
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -f "$state_dir""slave"$TREE_CURRENT_FILENAME ]; then
|
||||
mv -f "$state_dir""slave"$TREE_CURRENT_FILENAME "$state_dir""target"$TREE_CURRENT_FILENAME
|
||||
if [ -f "${state_dir}slave${TREE_CURRENT_FILENAME}" ]; then
|
||||
mv -f "${state_dir}slave${TREE_CURRENT_FILENAME}" "${state_dir}target${TREE_CURRENT_FILENAME}"
|
||||
if [ $? != 0 ]; then
|
||||
echo "Error while rewriting "$state_dir"slave"$TREE_CURRENT_FILENAME
|
||||
echo "Error while rewriting ${state_dir}slave${TREE_CURRENT_FILENAME}"
|
||||
else
|
||||
rewrite=true
|
||||
fi
|
||||
fi
|
||||
if [ -f "$state_dir""slave"$TREE_AFTER_FILENAME ]; then
|
||||
mv -f "$state_dir""slave"$TREE_AFTER_FILENAME "$state_dir""target"$TREE_AFTER_FILENAME
|
||||
if [ -f "${state_dir}slave${TREE_AFTER_FILENAME}" ]; then
|
||||
mv -f "${state_dir}slave${TREE_AFTER_FILENAME}" "${state_dir}target${TREE_AFTER_FILENAME}"
|
||||
if [ $? != 0 ]; then
|
||||
echo "Error while rewriting "$state_dir"slave"$TREE_AFTER_FILENAME
|
||||
echo "Error while rewriting ${state_dir}slave${TREE_AFTER_FILENAME}"
|
||||
else
|
||||
rewrite=true
|
||||
fi
|
||||
fi
|
||||
if [ -f "$state_dir""slave"$DELETED_LIST_FILENAME ]; then
|
||||
mv -f "$state_dir""slave"$DELETED_LIST_FILENAME "$state_dir""target"$DELETED_LIST_FILENAME
|
||||
if [ -f "${state_dir}slave${DELETED_LIST_FILENAME}" ]; then
|
||||
mv -f "${state_dir}slave${DELETED_LIST_FILENAME}" "${state_dir}target${DELETED_LIST_FILENAME}"
|
||||
if [ $? != 0 ]; then
|
||||
echo "Error while rewriting "$state_dir"slave"$DELETED_LIST_FILENAME
|
||||
echo "Error while rewriting ${state_dir}slave${DELETED_LIST_FILENAME}"
|
||||
else
|
||||
rewrite=true
|
||||
fi
|
||||
fi
|
||||
if [ -f "$state_dir""slave"$FAILED_DELETE_LIST_FILENAME ]; then
|
||||
mv -f "$state_dir""slave"$FAILED_DELETE_LIST_FILENAME "$state_dir""target"$FAILED_DELETE_LIST_FILENAME
|
||||
if [ -f "${state_dir}slave${FAILED_DELETE_LIST_FILENAME}" ]; then
|
||||
mv -f "${state_dir}slave${FAILED_DELETE_LIST_FILENAME}" "${state_dir}target${FAILED_DELETE_LIST_FILENAME}"
|
||||
if [ $? != 0 ]; then
|
||||
echo "Error while rewriting "$state_dir"slave"$FAILED_DELETE_LIST_FILENAME
|
||||
echo "Error while rewriting ${state_dir}slave${FAILED_DELETE_LIST_FILENAME}"
|
||||
else
|
||||
rewrite=true
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -f "$state_dir""slave"$TREE_CURRENT_FILENAME"-dry" ]; then
|
||||
mv -f "$state_dir""slave"$TREE_CURRENT_FILENAME"-dry" "$state_dir""target"$TREE_CURRENT_FILENAME"-dry"
|
||||
if [ -f "${state_dir}slave${TREE_CURRENT_FILENAME}-dry" ]; then
|
||||
mv -f "${state_dir}slave${TREE_CURRENT_FILENAME}-dry" "${state_dir}target${TREE_CURRENT_FILENAME}-dry"
|
||||
if [ $? != 0 ]; then
|
||||
echo "Error while rewriting "$state_dir"slave"$TREE_CURRENT_FILENAME"-dry"
|
||||
echo "Error while rewriting ${state_dir}slave${TREE_CURRENT_FILENAME}-dry"
|
||||
else
|
||||
rewrite=true
|
||||
fi
|
||||
fi
|
||||
if [ -f "$state_dir""slave"$TREE_AFTER_FILENAME"-dry" ]; then
|
||||
mv -f "$state_dir""slave"$TREE_AFTER_FILENAME"-dry" "$state_dir""target"$TREE_AFTER_FILENAME"-dry"
|
||||
if [ -f "${state_dir}slave${TREE_AFTER_FILENAME}-dry" ]; then
|
||||
mv -f "${state_dir}slave${TREE_AFTER_FILENAME}-dry" "${state_dir}target${TREE_AFTER_FILENAME}-dry"
|
||||
if [ $? != 0 ]; then
|
||||
echo "Error while rewriting "$state_dir"slave"$TREE_AFTER_FILENAME"-dry"
|
||||
echo "Error while rewriting ${state_dir}slave${TREE_AFTER_FILENAME}-dry"
|
||||
else
|
||||
rewrite=true
|
||||
fi
|
||||
fi
|
||||
if [ -f "$state_dir""slave"$DELETED_LIST_FILENAME"-dry" ]; then
|
||||
mv -f "$state_dir""slave"$DELETED_LIST_FILENAME"-dry" "$state_dir""target"$DELETED_LIST_FILENAME"-dry"
|
||||
if [ -f "${state_dir}slave${DELETED_LIST_FILENAME}-dry" ]; then
|
||||
mv -f "${state_dir}slave${DELETED_LIST_FILENAME}-dry" "${state_dir}target${DELETED_LIST_FILENAME}-dry"
|
||||
if [ $? != 0 ]; then
|
||||
echo "Error while rewriting "$state_dir"slave"$DELETED_LIST_FILENAME"-dry"
|
||||
echo "Error while rewriting ${state_dir}slave${DELETED_LIST_FILENAME}-dry"
|
||||
else
|
||||
rewrite=true
|
||||
fi
|
||||
fi
|
||||
if [ -f "$state_dir""slave"$FAILED_DELETE_LIST_FILENAME"-dry" ]; then
|
||||
mv -f "$state_dir""slave"$FAILED_DELETE_LIST_FILENAME"-dry" "$state_dir""target"$FAILED_DELETE_LIST_FILENAME"-dry"
|
||||
if [ -f "${state_dir}slave${FAILED_DELETE_LIST_FILENAME}-dry" ]; then
|
||||
mv -f "${state_dir}slave${FAILED_DELETE_LIST_FILENAME}-dry" "${state_dir}target${FAILED_DELETE_LIST_FILENAME}-dry"
|
||||
if [ $? != 0 ]; then
|
||||
echo "Error while rewriting "$state_dir"slave"$FAILED_DELETE_LIST_FILENAME"-dry"
|
||||
echo "Error while rewriting ${state_dir}slave${FAILED_DELETE_LIST_FILENAME}-dry"
|
||||
else
|
||||
rewrite=true
|
||||
fi
|
||||
|
@ -413,24 +406,24 @@ $SSH_CMD state_dir="${1}" DELETED_LIST_FILENAME="$DELETED_LIST_FILENAME" FAILED_
|
|||
state_dir="${state_dir%/}/"
|
||||
rewrite=false
|
||||
|
||||
if [ -f "$state_dir""master"$DELETED_LIST_FILENAME ]; then
|
||||
mv -f "$state_dir""master"$DELETED_LIST_FILENAME "$state_dir""initiator"$DELETED_LIST_FILENAME
|
||||
if [ -f "${state_dir}master${DELETED_LIST_FILENAME}" ]; then
|
||||
mv -f "${state_dir}master${DELETED_LIST_FILENAME}" "${state_dir}initiator${DELETED_LIST_FILENAME}"
|
||||
if [ $? != 0 ]; then
|
||||
echo "Error while rewriting "$state_dir"master"$DELETED_LIST_FILENAME
|
||||
else
|
||||
rewrite=true
|
||||
fi
|
||||
fi
|
||||
if [ -f "$state_dir""master"$FAILED_DELETE_LIST_FILENAME ]; then
|
||||
mv -f "$state_dir""master"$FAILED_DELETE_LIST_FILENAME "$state_dir""initiator"$FAILED_DELETE_LIST_FILENAME
|
||||
if [ -f "${state_dir}master${FAILED_DELETE_LIST_FILENAME}" ]; then
|
||||
mv -f "${state_dir}master${FAILED_DELETE_LIST_FILENAME}" "${state_dir}initiator${FAILED_DELETE_LIST_FILENAME}"
|
||||
if [ $? != 0 ]; then
|
||||
echo "Error while rewriting "$state_dir"master"$FAILED_DELETE_LIST_FILENAME
|
||||
else
|
||||
rewrite=true
|
||||
fi
|
||||
fi
|
||||
if [ -f "$state_dir""master"$FAILED_DELETE_LIST_FILENAME"-dry" ]; then
|
||||
mv -f "$state_dir""master"$FAILED_DELETE_LIST_FILENAME"-dry" "$state_dir""initiator"$FAILED_DELETE_LIST_FILENAME"-dry"
|
||||
if [ -f "${state_dir}master${FAILED_DELETE_LIST_FILENAME}-dry" ]; then
|
||||
mv -f "${state_dir}master${FAILED_DELETE_LIST_FILENAME}-dry" "${state_dir}initiator${FAILED_DELETE_LIST_FILENAME}-dry"
|
||||
if [ $? != 0 ]; then
|
||||
echo "Error while rewriting "$state_dir"master"$FAILED_DELETE_LIST_FILENAME"-dry"
|
||||
else
|
||||
|
@ -448,14 +441,14 @@ ENDSSH
|
|||
|
||||
function RenameStateFiles {
|
||||
_RenameStateFilesLocal "$MASTER_SYNC_DIR/$OSYNC_DIR/$STATE_DIR"
|
||||
if [ "$REMOTE_OPERATION" != "yes" ]; then
|
||||
if [ "$REMOTE_OPERATION" != "yes" ] || "$REMOTE_OPERATION" == true ]; then
|
||||
_RenameStateFilesLocal "$SLAVE_SYNC_DIR/$OSYNC_DIR/$STATE_DIR"
|
||||
else
|
||||
_RenameStateFilesRemote "$SLAVE_SYNC_DIR/$OSYNC_DIR/$STATE_DIR"
|
||||
fi
|
||||
}
|
||||
|
||||
function RewriteOldConfigFiles {
|
||||
function CheckAndBackup {
|
||||
local config_file="${1}"
|
||||
|
||||
if ! grep "MASTER_SYNC_DIR=" "$config_file" > /dev/null && ! grep "INITIATOR_SYNC_DIR=" "$config_file" > /dev/null; then
|
||||
|
@ -469,6 +462,10 @@ function RewriteOldConfigFiles {
|
|||
echo "Cannot backup config file."
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
function RewriteOldConfigFiles {
|
||||
local config_file="${1}"
|
||||
|
||||
echo "Rewriting config file $config_file"
|
||||
|
||||
|
@ -484,7 +481,7 @@ function RewriteOldConfigFiles {
|
|||
rm -f "$config_file.tmp"
|
||||
}
|
||||
|
||||
function AddMissingConfigOptions {
|
||||
function AddMissingConfigOptionsAndFixBooleans {
|
||||
local config_file="${1}"
|
||||
local counter=0
|
||||
|
||||
|
@ -492,27 +489,69 @@ function AddMissingConfigOptions {
|
|||
if ! grep "^${KEYWORDS[$counter]}=" > /dev/null "$config_file"; then
|
||||
echo "${KEYWORDS[$counter]} not found"
|
||||
if [ $counter -gt 0 ]; then
|
||||
sed -i'.tmp' '/^'${KEYWORDS[$((counter-1))]}'=*/a\'$'\n'${KEYWORDS[$counter]}'="'"${VALUES[$counter]}"'"\'$'\n''' "$config_file"
|
||||
if [ "${VALUES[$counter]}" == true ] || [ "${VALUES[$counter]}" == false ]; then
|
||||
sed -i'.tmp' '/^'${KEYWORDS[$((counter-1))]}'=*/a\'$'\n'${KEYWORDS[$counter]}'='"${VALUES[$counter]}"'\'$'\n''' "$config_file"
|
||||
else
|
||||
sed -i'.tmp' '/^'${KEYWORDS[$((counter-1))]}'=*/a\'$'\n'${KEYWORDS[$counter]}'="'"${VALUES[$counter]}"'"\'$'\n''' "$config_file"
|
||||
fi
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "Cannot add missing ${[KEYWORDS[$counter]}."
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
sed -i'.tmp' '/onfig file rev*/a\'$'\n'${KEYWORDS[$counter]}'="'"${VALUES[$counter]}"'"\'$'\n''' "$config_file"
|
||||
if [ "${VALUES[$counter]}" == true ] || [ "${VALUES[$counter]}" == false ]; then
|
||||
sed -i'.tmp' '/[GENERAL\]$//a\'$'\n'${KEYWORDS[$counter]}'='"${VALUES[$counter]}"'\'$'\n''' "$config_file"
|
||||
else
|
||||
sed -i'.tmp' '/[GENERAL\]$//a\'$'\n'${KEYWORDS[$counter]}'="'"${VALUES[$counter]}"'"\'$'\n''' "$config_file"
|
||||
fi
|
||||
fi
|
||||
echo "Added missing ${KEYWORDS[$counter]} config option with default option [${VALUES[$counter]}]"
|
||||
else
|
||||
# Not the most elegant but the quickest way :)
|
||||
if grep "^${KEYWORDS[$counter]}=yes$" > /dev/null "$config_file"; then
|
||||
sed -i'.tmp' 's/^'${KEYWORDS[$counter]}'=.*/'${KEYWORDS[$counter]}'=true/g' "$config_file"
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "Cannot rewrite ${[KEYWORDS[$counter]} boolean to true."
|
||||
exit 1
|
||||
fi
|
||||
elif grep "^${KEYWORDS[$counter]}=no$" > /dev/null "$config_file"; then
|
||||
sed -i'.tmp' 's/^'${KEYWORDS[$counter]}'=.*/'${KEYWORDS[$counter]}'=false/g' "$config_file"
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "Cannot rewrite ${[KEYWORDS[$counter]} boolean to false."
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
counter=$((counter+1))
|
||||
done
|
||||
}
|
||||
|
||||
function RewriteSections {
|
||||
local config_file="${1}"
|
||||
|
||||
sed -i'.tmp' 's/## ---------- GENERAL OPTIONS/[GENERAL]/g' "$config_file"
|
||||
sed -i'.tmp' 's/## ---------- REMOTE OPTIONS/[REMOTE_OPTIONS]/g' "$config_file"
|
||||
sed -i'.tmp' 's/## ---------- REMOTE SYNC OPTIONS/[REMOTE_OPTIONS]/g' "$config_file"
|
||||
sed -i'.tmp' 's/## ---------- MISC OPTIONS/[MISC_OPTIONS]/g' "$config_file"
|
||||
sed -i'.tmp' 's/## ---------- BACKUP AND DELETION OPTIONS/[BACKUP_DELETE_OPTIONS]/g' "$config_file"
|
||||
sed -i'.tmp' 's/## ---------- BACKUP AND TRASH OPTIONS/[BACKUP_DELETE_OPTIONS]/g' "$config_file"
|
||||
sed -i'.tmp' 's/## ---------- RESUME OPTIONS/[RESUME_OPTIONS]/g' "$config_file"
|
||||
sed -i'.tmp' 's/## ---------- ALERT OPTIONS/[ALERT_OPTIONS]/g' "$config_file"
|
||||
sed -i'.tmp' 's/## ---------- EXECUTION HOOKS/[EXECUTION_HOOKS]/g' "$config_file"
|
||||
}
|
||||
|
||||
function UpdateConfigHeader {
|
||||
local config_file="${1}"
|
||||
|
||||
# "onfig file rev" to deal with earlier variants of the file where c was lower or uppercase
|
||||
#sed -i'.tmp' '/onfig file rev/c\###### '$SUBPROGRAM' config file rev '$CONFIG_FILE_VERSION' '$NEW_PROGRAM_VERSION "$config_file"
|
||||
sed -i'.tmp' 's/.*onfig file rev.*/##### '$SUBPROGRAM' config file rev '$CONFIG_FILE_VERSION' '$NEW_PROGRAM_VERSION'/' "$config_file"
|
||||
rm -f "$config_file.tmp"
|
||||
if ! grep "^CONFIG_FILE_REVISION=" > /dev/null "$config_file"; then
|
||||
if grep "\[GENERAL\]" > /dev/null "$config_file"; then
|
||||
sed -i'.tmp' '/^\[GENERAL\]$/a\'$'\n'CONFIG_FILE_REVISION=$CONFIG_FILE_REVISION$'\n''' "$config_file"
|
||||
else
|
||||
sed -i'.tmp' '/.*onfig file rev.*/a\'$'\n'CONFIG_FILE_REVISION=$CONFIG_FILE_REVISION$'\n''' "$config_file"
|
||||
fi
|
||||
# "onfig file rev" to deal with earlier variants of the file where c was lower or uppercase
|
||||
sed -i'.tmp' 's/.*onfig file rev.*//' "$config_file"
|
||||
fi
|
||||
}
|
||||
|
||||
_QUICK_SYNC=0
|
||||
|
@ -522,11 +561,11 @@ do
|
|||
case $i in
|
||||
--master=*)
|
||||
MASTER_SYNC_DIR=${i##*=}
|
||||
_QUICK_SYNC=$(($_QUICK_SYNC + 1))
|
||||
_QUICK_SYNC=$((_QUICK_SYNC + 1))
|
||||
;;
|
||||
--slave=*)
|
||||
SLAVE_SYNC_DIR=${i##*=}
|
||||
_QUICK_SYNC=$(($_QUICK_SYNC + 1))
|
||||
_QUICK_SYNC=$((_QUICK_SYNC + 1))
|
||||
;;
|
||||
--rsakey=*)
|
||||
SSH_RSA_PRIVATE_KEY=${i##*=}
|
||||
|
@ -548,11 +587,19 @@ elif [ "$1" != "" ] && [ -f "$1" ] && [ -w "$1" ]; then
|
|||
CONF_FILE="${CONF_FILE%/}"
|
||||
LoadConfigFile "$CONF_FILE"
|
||||
Init
|
||||
CheckAndBackup "$CONF_FILE"
|
||||
RewriteSections "$CONF_FILE"
|
||||
RewriteOldConfigFiles "$CONF_FILE"
|
||||
AddMissingConfigOptions "$CONF_FILE"
|
||||
AddMissingConfigOptionsAndFixBooleans "$CONF_FILE"
|
||||
UpdateConfigHeader "$CONF_FILE"
|
||||
RenameStateFiles "$MASTER_SYNC_DIR"
|
||||
RenameStateFiles "$SLAVE_SYNC_DIR"
|
||||
if [ -d "$MASTER_SYNC_DIR" ]; then
|
||||
RenameStateFiles "$MASTER_SYNC_DIR"
|
||||
fi
|
||||
if [ -d "$SLAVE_SYNC_DIR" ]; then
|
||||
RenameStateFiles "$SLAVE_SYNC_DIR"
|
||||
fi
|
||||
rm -f "$CONF_FILE.tmp"
|
||||
echo "Configuration file upgrade finished."
|
||||
else
|
||||
Usage
|
||||
fi
|
Loading…
Reference in New Issue