Compare commits

...

560 Commits

Author SHA1 Message Date
Orsiris de Jong f02cbfdc92
Add codespell 2025-03-08 14:14:21 +01:00
Orsiris de Jong 6c6b2e5b12
Spellfix 2025-03-08 14:00:23 +01:00
Orsiris de Jong 7a6cb155cc Rebuilt targets 2025-01-20 14:49:56 +01:00
Orsiris de Jong 266fa0d97f Reapply #267 to common install script 2025-01-20 14:49:11 +01:00
Orsiris de Jong a566549875 Rebuilt targets 2025-01-20 14:17:21 +01:00
Orsiris de Jong bc1878bf7f
Merge pull request #270 from gruoner/kerberos-patch
Kerberos patch
2025-01-20 11:27:15 +01:00
deajan 59d99823e4 Update largefileset URL 2025-01-20 10:31:50 +01:00
gruoner c5ef7a916e
Merge branch 'deajan:master' into kerberos-patch 2025-01-19 14:11:29 +01:00
gruoner f0d9cfcf35 with the KRB5 parameter one can use kerberos credentials to access SSH or RSYNC 2025-01-19 14:03:33 +01:00
Orsiris de Jong 548f3c5730
Fix Spinner must work without env vars 2024-09-21 20:39:26 +02:00
Orsiris de Jong f094fb0481
Merge pull request #267 from new2f7/patch-3
Update uninstaller to remove all services files
2024-09-04 15:32:39 +02:00
Orsiris de Jong b1e08fe947
Fix typos as per #264 2024-09-04 15:25:54 +02:00
Orsiris de Jong 3a9accaea0
Merge pull request #266 from new2f7/patch-2
Fix spacing in usage help
2024-09-04 15:18:37 +02:00
Orsiris de Jong 6a717859b8
Merge pull request #265 from new2f7/patch-1
Add missing '$' in 'systemctl start $SERVICE_NAME@instance.conf'
2024-09-04 15:17:39 +02:00
new2f7 6d13e3976a Update uninstaller to remove all services files 2024-07-03 13:54:35 +02:00
new2f7 0d00fa8ab9
Fix spacing in usage help 2024-06-12 09:13:36 +02:00
new2f7 05e594e101
Add missing '$' in 'systemctl start $SERVICE_NAME@instance.conf' 2024-06-12 09:10:51 +02:00
Orsiris de Jong 639055bf00
Merge pull request #262 from vitorhcl/add-tested-platforms-table
Transform tested platforms list into a table
2024-03-27 01:00:23 +01:00
Vitor Henrique 35afd1af8a Transform methods column in footnotes 2024-03-23 12:36:05 -03:00
Orsiris de Jong 09225de945
Merge pull request #263 from vitorhcl/improve-usage-section
Improve "Usage" section
2024-03-13 16:39:49 +01:00
Vitor Henrique 99d01b2ed0
Improve monitor mode section 2024-03-13 12:33:20 -03:00
Vitor Henrique e34ebfd4cd
Improve wording of "Monitor mode" section 2024-03-13 12:27:27 -03:00
Vitor Henrique 7aeb2e3b69 Improve Monitor mode section 2024-03-13 11:26:58 -03:00
Vitor Henrique 51e79cb56a Fix modes links 2024-03-13 11:16:55 -03:00
Vitor Henrique 2ce2561f99 Improve Usage section 2024-03-13 11:16:49 -03:00
Vitor Henrique 88e1f3f4f6 Move Termux method to version column 2024-03-13 10:23:32 -03:00
Orsiris de Jong b1a0510a0e
Merge pull request #261 from vitorhcl/fix-code-blocks
Fix code blocks in README
2024-03-13 08:27:31 +01:00
Vitor Henrique b3d723ad07 Transform tested platforms list into a table 2024-03-13 00:42:50 -03:00
Vitor Henrique ed2e8438d2 Fix code blocks in README 2024-03-13 00:19:22 -03:00
Orsiris de Jong f2ff318f46
Merge pull request #260 from vitorhcl/improve-readme
Improve README
2024-03-12 21:18:47 +01:00
Vitor Henrique f889eb99e8 Use "monitor mode" instead of "daemon mode" and move note to note box 2024-03-07 18:54:18 -03:00
Vitor Henrique 737008d662 Improve README 2024-03-07 18:40:05 -03:00
Orsiris de Jong 19dbdeb28a Merge branch 'master' of https://github.com/deajan/osync 2023-09-12 11:00:06 +02:00
Orsiris de Jong f39fb6613e
Merge pull request #257 from nzalev/master
Typo fix
2023-07-12 12:58:22 +02:00
Noah Zalev f6fd288cfa
Typo fix 2023-07-04 14:33:04 -04:00
Orsiris de Jong 63cbe3786e Update changelog 2023-06-16 00:20:54 +02:00
Orsiris de Jong 98a4d6bb0c Rebuilt targets 2023-06-14 19:09:33 +02:00
Orsiris de Jong e47c1d7f58 Bump version 2023-06-14 19:09:05 +02:00
Orsiris de Jong cef25b646f
Prepare v1.3 release 2023-06-14 19:05:58 +02:00
Orsiris de Jong 4054f3ca75 Make sure we update remote.conf rsa key path 2023-06-14 14:30:28 +02:00
Orsiris de Jong ea1b3b379e
Fix misc option names 2023-06-14 14:23:16 +02:00
Orsiris de Jong 988009a438 RebuÃilt targets 2023-06-14 14:06:40 +02:00
Orsiris de Jong b2f221cb86 Make rsync old arguments fix permanent 2023-06-14 14:06:18 +02:00
Orsiris de Jong c4349bc945 Rebuilt targets 2023-06-14 13:50:04 +02:00
Orsiris de Jong 99f107fcb0 Add general fix for #242 2023-06-14 13:49:36 +02:00
Orsiris de Jong 776c07386d Fix tests 2023-06-14 13:49:18 +02:00
Orsiris de Jong edfc292486 Temporarily disable merge test 2023-06-14 11:25:12 +02:00
Orsiris de Jong 9ade468ed8 Add permission fixes for tests 2023-06-14 11:10:24 +02:00
Orsiris de Jong d9e22b06e5 Add preflight merge 2023-06-11 11:26:02 +02:00
Orsiris de Jong d0af0804a0 Fix systemd detection on windows 2023-06-11 11:24:38 +02:00
Orsiris de Jong 9b9c1105eb Rebuilt targets 2023-06-11 11:19:25 +02:00
Orsiris de Jong 9b862ae49d Fix typo 2023-06-11 11:18:39 +02:00
Orsiris de Jong 57ef919364 Update sync paths. Remove log garbage 2023-06-11 11:04:44 +02:00
Orsiris de Jong 833c16ca43 Move osync test dir to /tmp 2023-06-11 09:43:44 +02:00
Orsiris de Jong 83a0f5c328 Rebuilt targets 2023-06-10 19:45:58 +02:00
Orsiris de Jong 5a7455173c Fix typo 2023-06-10 19:45:33 +02:00
Orsiris de Jong edd35d42aa Enable writing partial logs for alerting 2023-06-10 19:44:37 +02:00
Orsiris de Jong 50f975d2df Add --non-interactive switch 2023-06-10 19:43:16 +02:00
Orsiris de Jong 43e1aaaaa1 Bump ofunctions version to 2.4.4 2023-06-10 19:40:24 +02:00
Orsiris de Jong b15e8e8f88 Preserve env variables for sudo calls 2023-06-10 19:20:43 +02:00
Orsiris de Jong 91950446f0 Coherent yaml workflow 2023-06-10 13:51:03 +02:00
Orsiris de Jong 0ef4af40dc Coherent yaml workflow 2023-06-10 13:49:20 +02:00
Orsiris de Jong 6b6f723094 Fix remote tests not updating conf file properly 2023-06-10 13:03:05 +02:00
Orsiris de Jong d100841bd9 Try to fix github env variables 2023-06-10 12:59:42 +02:00
Orsiris de Jong 52a42bc1d7 Add export for env variables 2023-06-10 12:51:41 +02:00
Orsiris de Jong 30d66003ef Use export for env variables 2023-06-10 12:35:26 +02:00
Orsiris de Jong b95e75aa0b Add SSH_PORT info 2023-06-10 12:31:14 +02:00
Orsiris de Jong 8ef2262421 Fix typo in env vars 2023-06-10 12:04:34 +02:00
root fa7f6d1088 Configure default SSH port for github actions 2023-06-10 11:58:12 +02:00
Orsiris de Jong 5f4e7af910
Re-enable all tests since we got remote tests to work 2023-06-10 11:14:46 +02:00
Orsiris de Jong a9da549e4f
Add openssh-server for remote tests 2023-06-10 11:08:32 +02:00
Orsiris de Jong 4bf065f94e
Make tests run on github runner's WSL and macos versions too 2023-06-10 11:07:51 +02:00
Orsiris de Jong 912906ef8c Revert "foo"
This reverts commit 77cbcd97d9.
2023-06-10 10:56:21 +02:00
Orsiris de Jong 77cbcd97d9
foo 2023-06-04 19:53:37 +02:00
Orsiris de Jong 5beb9220d1
Update run_tests.sh 2023-06-04 19:08:55 +02:00
Orsiris de Jong 53193aceb6
WIP ssh test fixes for github actions 2023-06-04 18:57:07 +02:00
Orsiris de Jong 4d08791d04
Try to diag ssh errors in github actions 2023-06-04 18:51:35 +02:00
Orsiris de Jong 01769189b7
Merge pull request #254 from poessl/fix-macos-directory-FileMove
Fix FileMove (used on deletions) not working for directories on macOS
2023-06-04 16:49:36 +02:00
Orsiris de Jong 9dd69349ad
Require macos tests to run as root 2023-06-04 15:48:53 +02:00
Orsiris de Jong ef0cd58d49
Add sudo to linux tests 2023-06-04 15:17:11 +02:00
Orsiris de Jong d8915512ea
Add rsync package to WSL windows tests 2023-06-04 14:57:13 +02:00
Orsiris de Jong 9fa4c45b6c
Renamce License file 2023-06-04 14:48:53 +02:00
J. Poessl 22e269902b
Fix FileMove (used on deletions) not working for directories on macOS 2023-05-31 13:36:13 +02:00
Orsiris de Jong edaeb9a400
Do not enable both SSH and RSYNC compression #251 2023-01-19 22:01:50 +01:00
Orsiris de Jong f646fffc79 WIP macosx tests 2022-07-07 21:58:58 +02:00
Orsiris de Jong ce4b435eb2
Add fswatch package 2022-07-07 21:51:05 +02:00
Orsiris de Jong e395e8ca65
Add dos2unix for windows 2022-07-07 21:49:34 +02:00
Orsiris de Jong 60ceccfff1 WIP run tests on github actions 2022-07-07 17:08:31 +02:00
Orsiris de Jong cf78875ab5 WIP debug github actions tests 2022-07-07 16:40:26 +02:00
Orsiris de Jong 059f95d4a8 WIP fixing github actions tests 2022-07-07 16:34:46 +02:00
Orsiris de Jong 9edeb39d45
add dos2unix package 2022-07-07 16:25:09 +02:00
Orsiris de Jong 55c4cffcf0 Merge branch 'master' of https://github.com/deajan/osync 2022-07-07 16:22:56 +02:00
Orsiris de Jong 59bd176e48 Update shunit to current master 2022-07-07 16:22:35 +02:00
Orsiris de Jong 7da01328b2
Update windows.yml 2022-07-07 12:19:38 +02:00
Orsiris de Jong 36db883ab0
Fix yaml syntax 2022-07-07 11:42:13 +02:00
Orsiris de Jong 632db7d9a2
Create windows.yml 2022-07-07 11:33:04 +02:00
Orsiris de Jong 999aeed303
Add macos github action file 2022-07-07 11:27:39 +02:00
Orsiris de Jong a83f76c639
Update linux.yml 2022-07-07 11:16:45 +02:00
Orsiris de Jong 663b5be3be
Update linux.yml 2022-07-07 11:14:45 +02:00
Orsiris de Jong 898fcbb8c1
Update linux.yml 2022-07-07 11:14:15 +02:00
Orsiris de Jong ecc4c1d396 Merge branch 'master' of https://github.com/deajan/osync 2022-07-07 11:13:57 +02:00
Orsiris de Jong 89f8533c27 Update env var for github actions 2022-07-07 11:12:25 +02:00
Orsiris de Jong 7168aa9d02
Make sure we can run tests 2022-07-07 11:12:19 +02:00
Orsiris de Jong 9245030a9d
Fix typo 2022-07-07 11:06:26 +02:00
Orsiris de Jong a1f8773953
Add github actions workflow 2022-07-07 11:05:40 +02:00
Orsiris de Jong 35a71ad517 Prepare for v1.3.0 release 2022-07-07 11:00:31 +02:00
Orsiris de Jong e01d8c410b Rebuilt targets 2022-07-07 10:58:39 +02:00
Orsiris de Jong 4e44020619 Update ofunctions from pmocr 2022-07-07 10:58:16 +02:00
Orsiris de Jong f2761d6a42
Merge pull request #244 from zhemant/fix/operator-error
Fix/operator error
2022-07-07 10:54:37 +02:00
Hemant Zope f48ff41072 regerate osync using merge.sh 2022-06-17 12:54:40 +02:00
Hemant Zope 08759b895c fix bash operator error 2022-06-17 12:50:14 +02:00
deajan 508cd4bf50 Merged back remote changes 2021-06-29 09:31:02 +02:00
deajan 87dd5d07ea Rebuilt targets 2021-06-29 09:28:21 +02:00
deajan 134acff3a9 Add ALWAYS_SEND_MAILS option 2021-06-29 09:27:54 +02:00
Orsiris de Jong c091996231
Bump version 2021-05-18 20:43:22 +02:00
Orsiris de Jong a7e1b4505b
Fix typo 2021-05-18 20:42:53 +02:00
Orsiris de Jong 803d25ac8b
Fix typo 2021-05-18 20:42:12 +02:00
Orsiris de Jong a36fd05069
Change CheckConnectivity3rdPartyHosts to allow external IPs 2021-05-18 20:36:42 +02:00
Orsiris de Jong 96c26e2791
Add TimeCheck and Ping functions 2021-05-05 14:24:12 +02:00
Orsiris de Jong f74108ed4a
Merge pull request #229 from deajan/custom_statedir
Custom statedir
2021-04-23 09:44:08 +02:00
deajan 27866f1db5 Rebuilt targets 2021-02-24 17:35:17 +01:00
deajan 904105904e Fix remote call not using RemoteLogger 2021-02-24 17:34:47 +01:00
deajan c2efdb2063 Allow custom statedir 2021-02-24 17:23:05 +01:00
deajan edcfe337c3 Mute error when detecting wget/curl 2020-11-29 13:04:31 +01:00
Orsiris de Jong 56c8fa868a
Update bug_report.md 2020-11-19 12:15:26 +01:00
deajan 3c1eb0989e Rebuilt targets 2020-11-15 22:09:21 +01:00
deajan 2bc8e899d7 Fix ofunctions GenericTrapQuit lacks closing comment 2020-11-15 22:08:57 +01:00
deajan 9a8b028c96 Rename variable 2020-11-15 22:07:17 +01:00
deajan e5922f04a5 Simplify --skip-compress rsync argument 2020-11-15 22:00:12 +01:00
Orsiris de Jong 11e19108f5
Merge pull request #219 from kendarorg/master
Support for Qnap local and remote (no ctime support)
2020-11-15 21:47:54 +01:00
Enrico Da Ros 4076423d98 Changed ofunctions to prev version 2020-11-05 09:31:41 +00:00
Enrico Da Ros 536f8ce6f0 Changed support for -z/-zz on Qnap 2020-11-05 09:25:07 +00:00
Enrico Da Ros 88c74dda70 Changed support for -z/-zz on Qnap 2020-11-05 09:17:34 +00:00
Enrico Da Ros 2ebd22c441 Changed support for -z/-zz on Qnap 2020-11-05 09:14:55 +00:00
Enrico Da Ros ca822566d4 Updated with suggestions 2020-11-05 08:52:55 +00:00
Enrico Da Ros 009a4aabe0 Updated with suggestions 2020-11-05 08:49:40 +00:00
Enrico Da Ros cc21cc6acc ps -a instead of ps -A, % diskspace 2020-11-04 15:44:54 +01:00
Enrico Da Ros 81437b5824 Support for Qnap without ctime 2020-11-04 12:13:18 +01:00
Enrico Da Ros 45304c608c Support for Qnap without ctime 2020-11-04 12:09:53 +01:00
Orsiris de Jong 8451024ae2
Merge pull request #204 from deajan/fast-deletion
Fast deletion
2020-07-28 23:33:56 +02:00
deajan 5ea103b36b Rebuilt targets 2020-07-22 13:35:52 +01:00
deajan f2be25f83d Fix #203 check file presence for get file attribs 2020-07-22 13:35:27 +01:00
deajan 5852d76cd3 Rebuilt targets 2020-06-30 11:48:10 +01:00
deajan 67dc58f3b5 Added deleted file list simplification, fix #202 2020-06-30 11:47:48 +01:00
deajan b589c32ce7 Rebuilt targets 2020-06-30 09:21:13 +01:00
deajan 8e4e745420 Make GetConfFileValue reports DEBUG level when noerror is set 2020-06-29 11:02:33 +01:00
deajan f9694d6478 Rebuilt targets 2020-06-29 09:48:32 +01:00
deajan ea0ca4e4c2 Update changelog 2020-06-29 09:48:05 +01:00
deajan 3c0f0b542d Tidy up conflict log messages 2020-06-29 09:24:06 +01:00
deajan 179bf6281e Rebuilt targets 2020-05-13 09:08:14 +01:00
deajan f5403bd13a Make comment more clear about SSH_COMPRESSION options 2020-05-13 09:06:41 +01:00
deajan 5870003c73 Improved example config file comments 2020-05-12 13:19:38 +01:00
deajan 7ed547786f Rebuilt targets 2020-05-03 09:21:40 +01:00
deajan 747e9c710e Fix #195 by removing orphan WIP 2020-05-03 09:21:14 +01:00
deajan 7a7653957e Update changelog 2020-05-03 09:12:36 +01:00
deajan ad7d022dcf Quote all rsync result files 2020-05-03 09:11:15 +01:00
deajan e6088c88de Rebuilt targets 2020-05-03 09:08:37 +01:00
deajan 347f9a9ae8 Make sure all files are quoted 2020-05-03 08:58:01 +01:00
Orsiris de Jong 7577f220dc
Merge pull request #194 from zhangzhishan/master
Fix space problem for getting lock file.
2020-05-03 09:47:52 +02:00
Zhang Zhishan acf388bc7b Fix space problem for getting lock file. 2020-05-02 18:47:25 +08:00
deajan 5881c71212 Rebuilt target 2020-04-29 11:44:28 +01:00
deajan 60f2005209 More init platform exclusions 2020-04-29 11:44:17 +01:00
Orsiris de Jong 7688317bd1
Merge pull request #184 from dbingham/macos-install
Fix install on MacOS to not return error code 2 after a normal install
2020-04-29 12:41:11 +02:00
deajan e997493fb5 Update changelog 2020-04-29 11:35:29 +01:00
deajan be3fc2e848 Rebuilt targets 2020-04-29 11:32:14 +01:00
deajan 844e08f92f More explicit stop on error explanantion 2020-04-29 11:31:44 +01:00
deajan 38190c6139 Fixed STOP_ON_ERROR_CMD did not work anymore 2020-04-29 11:30:59 +01:00
deajan 45a8a7a589 Added parenthesis to tests 2020-04-29 11:19:49 +01:00
deajan 3a9d00ff4f Merged changes 2020-04-29 11:15:03 +01:00
deajan 7bc80b9574 Removed parenthesis from remote logs 2020-04-29 11:12:56 +01:00
deajan a912a7d7c3 Fixed long standing issue with remote rsync calls containing special characters 2020-04-29 11:12:25 +01:00
deajan 1c7d21c485 Cosmetic change 2020-04-29 11:03:54 +01:00
root 51a4c8bcce Rebuilt targets 2020-03-15 22:51:21 +01:00
root fbce80bc0a Renamed ofunctions TrapQuit to GeneralTrapQuit 2020-03-15 22:50:56 +01:00
root 6edce242b0 Rebuilt targetsé 2020-03-15 22:31:33 +01:00
root 8788582f63 TrapQuit function should exist as subset 2020-03-15 22:31:07 +01:00
Daniel Bingham cc4ce385ee Fix install on MacOS to not return error code 2 after a normal install 2020-01-28 10:59:32 -08:00
deajan 98c1b97fb0 Fix file deletion on remote MacOS might not work from /tmp 2020-01-23 19:40:25 +01:00
deajan 17e2bf6dce Update changelog 2020-01-23 18:52:37 +01:00
deajan c2fef97c84 Rebuilt targets 2020-01-23 18:47:01 +01:00
deajan eaeb218508 Fixed MINIMUM_SPACE=0 should not check space requirements, Thanks Val 2020-01-23 18:46:32 +01:00
deajan a9cb30ffb4 Added missing SYNC_TYPE 2020-01-22 18:49:20 +01:00
deajan 08ab938477 Rebuilt targets 2020-01-22 18:40:11 +01:00
deajan 5e71ed0854 Fix usage called before definition 2020-01-22 18:39:34 +01:00
deajan f5356ea702 Update changelog 2020-01-10 11:18:27 +01:00
deajan 10adfe6128 Rebuilt targets 2020-01-10 11:17:10 +01:00
deajan 572a055901 Fixed target helper service overwrites osync service, fix #176 2020-01-10 11:16:24 +01:00
deajan 45915611f6 Rebuilt targets 2020-01-10 11:07:55 +01:00
deajan e4970830ac Update copyright year 2020-01-10 11:07:25 +01:00
deajan b6181e15d9 Update copyright year 2020-01-10 11:07:05 +01:00
deajan b8cfce2eb8 Set default LOG_CONFLICTS=false, fixes #181 2020-01-10 11:03:32 +01:00
deajan d8bdf2d721 Changed file mod from 755 to 644 since we deal with systemd 2020-01-10 10:58:57 +01:00
deajan e4d52aec2f Rebuilt targets 2019-12-25 23:11:28 +01:00
deajan a4b03b12e2 Update merge for FileMove 2019-12-25 21:03:56 +01:00
deajan bf86cc5f6f Refactor mv calls to FileMove just because of macos 2019-12-25 19:29:25 +01:00
deajan 39a8a309f5 Remove beta 1.3 statement 2019-12-25 18:58:09 +01:00
deajan 97fb53f6de Again remarklint fixes 2019-12-25 18:52:07 +01:00
deajan 314505eb4a Fix shellcheck issues 2019-12-25 18:39:53 +01:00
deajan fb57fb9d74 Make remarklint happy 2019-12-25 18:14:47 +01:00
deajan 689928eac1 Merge branch 'master' of https://github.com/deajan/osync 2019-12-25 18:13:34 +01:00
deajan 42c7493ef0 Make remarklint happy 2019-12-25 18:13:18 +01:00
Orsiris de Jong 6139fa002b
Another remarklint fix 2019-12-25 18:03:44 +01:00
Orsiris de Jong a48f4a85e6
Make remarklint happy in codacy 2019-12-25 17:59:20 +01:00
deajan cbff578ec5 Update changelog 2019-12-25 17:45:28 +01:00
deajan dc901282dc Rebuilt targets 2019-12-25 15:13:52 +01:00
deajan 08372e74ba Final fix for #175 correcting symlinks too 2019-12-25 15:13:29 +01:00
deajan 7d8978dc88 WIP #174 2019-12-25 13:51:37 +01:00
deajan 925adbd488 Still doing apple BS fixes 2019-12-24 18:25:17 +01:00
deajan e0b2f0435e Another #174 fix 2019-12-24 18:11:34 +01:00
deajan cb7dbbe2af Another bad fix for #175 2019-12-06 10:53:30 +01:00
deajan 3d94544337 Rebuilt targets 2019-12-06 10:45:14 +01:00
deajan 8197ea3ddb Prevent spinner from logging in service mode 2019-12-06 10:44:51 +01:00
deajan 78323cc06a Rebuilt targets 2019-11-02 14:53:28 +01:00
deajan ba830fe7d0 Removed (big) typo 2019-11-02 14:53:02 +01:00
deajan 87dd470344 Rebuilt targets 2019-11-02 14:49:09 +01:00
deajan 2c45d58659 Test before move 2019-11-02 14:48:43 +01:00
deajan 4d4728b90c Rebuilt targets 2019-11-02 14:45:23 +01:00
deajan 22538907b3 Fix typo 2019-11-02 14:44:14 +01:00
deajan f3365ed5ff [WIP] fix for #175 2019-11-02 14:40:58 +01:00
deajan 145814f231 Removed unused code 2019-10-21 13:56:26 +02:00
deajan 21bd1657ec Added upgrade finished message 2019-10-21 13:55:48 +02:00
deajan 6be95a3cff Added CleanUp function 2019-09-09 14:12:29 +02:00
deajan a7526c8a87 Added missing TrapQuit function 2019-09-09 14:10:13 +02:00
deajan 8b757bf083 Bump version to 2.3.0-RC3 2019-09-09 14:08:59 +02:00
deajan be1281ffa3 Merge branch 'master' of https://github.com/deajan/osync 2019-09-09 13:08:11 +02:00
deajan bdc9e42046 Upgrade script checks for old master/slave dirs before rewriting state files 2019-09-09 13:07:58 +02:00
Orsiris de Jong d66ea211d5
Removed double s7z extension from rsync skip compress 2019-09-04 10:32:36 +02:00
deajan da5140d3f0 Typo fixes 2019-08-18 22:05:50 +02:00
deajan d37872c730 Added source for cipher performance 2019-08-14 15:27:56 +02:00
deajan 997a7904ed Add chacha20-poly1305@openssh.com to cipher list 2019-08-14 15:17:19 +02:00
deajan 712c3179ee Revert non WIP KEEP_LOGGING time 2019-08-14 15:11:38 +02:00
deajan 0d28d0c482 Enhance rsync --skip-compress list 2019-08-14 15:11:10 +02:00
deajan f22cff5a49 Enabled Rsync compression in quicksync mode 2019-08-14 15:09:30 +02:00
deajan 7fb8eb7004 Allow SSH_OPTIONAL_ARGS to be set 2019-08-14 13:43:49 +02:00
deajan afdc5334f4 Updated log with truncated mention 2019-08-09 10:31:57 +02:00
deajan c02a7af424 Fixed typo in _REMOTE_TOKEN log removal 2019-08-09 10:24:40 +02:00
deajan 17a97d129b Cleaning up various remote logging 2019-08-09 10:12:23 +02:00
deajan 183ca45b63 Added missing RemoteLogger prefix 2019-08-09 10:07:38 +02:00
deajan 31986d0d19 WIP cleanup 2019-08-09 10:05:27 +02:00
deajan bf3b846026 Better _REMOTE_TOKEN cleanup 2019-08-09 10:04:45 +02:00
deajan 7d137ed0c5 Another flatten array fix 2019-08-08 21:10:13 +02:00
deajan 723501526c Fixed flatten array comparaison 2019-08-08 21:08:38 +02:00
deajan 2f4384d911 Updated changelog 2019-08-08 20:50:43 +02:00
deajan 8f2b22a117 Added more debugging for ExecTasks 2019-08-08 20:48:10 +02:00
deajan 5f6f7482dd Add warning about truncated output 2019-08-08 20:42:08 +02:00
deajan f0ea6df5ea Prevent possible bash buffer overflow, fixes #171 2019-08-08 18:00:20 +02:00
deajan 4df4c594b7 Obfuscate only part of remote token env 2019-08-08 10:37:57 +02:00
deajan 08910b3895 Fixed typo in build number 2019-07-23 10:40:40 +02:00
deajan 21c59cfd27 Updated RSA key file for quickRemote tests 2019-07-23 10:39:40 +02:00
deajan 151cfb8d39 Updated RSA key file 2019-07-23 10:38:52 +02:00
deajan 3b545648ac Enabled all tests again 2019-07-19 16:46:42 +02:00
deajan 0ae3a0c629 Fixed authorized_keys cleaning 2019-07-19 16:45:54 +02:00
deajan 9c7ef090f1 Rebuilt targets 2019-07-19 16:45:00 +02:00
deajan b124f7dfaf Exit SSH controlmaster before cleanup 2019-07-19 16:44:36 +02:00
deajan f87b529195 Remote remote token after usage #158 2019-07-09 11:29:25 +02:00
deajan 3cddb9b97e Added default SSH_CONTROLMASTER option 2019-07-05 23:27:25 +02:00
deajan 80f3b2f0bc Minor fixes in upgrade script 2019-07-05 23:26:50 +02:00
deajan 31b8ce4c52 Added bogus encryption warning 2019-07-05 23:19:19 +02:00
deajan f81291c0eb Fix cornercase where SSH_CONTROLMASTER might be used in local tests 2019-07-05 23:13:21 +02:00
deajan 59ec9e42c1 Added --ssh-controlmaster test for #169 2019-07-05 23:00:03 +02:00
deajan 9cfa13c59a Rebuilt targets 2019-07-05 22:56:03 +02:00
deajan ca78d76bb2 Fix for junk /osync.remote folder not being deleted 2019-07-05 22:55:34 +02:00
deajan b0b91a4db0 Removed elder PoorMansRandomGenerator function 2019-07-05 22:46:02 +02:00
deajan 8d9e95c6cd Rebuilt targets 2019-07-05 22:37:37 +02:00
deajan 154318be0f Added SSH_CONTROLMASTER option 2019-07-05 22:37:12 +02:00
deajan ddb820d2fa Added #169 CONTROLMASTER option 2019-07-05 22:35:35 +02:00
deajan dce4092085 Add --ssh-controlmaster option 2019-07-05 22:34:24 +02:00
deajan 219067be26 Allow multiple concurrent control masters, and allow spaces in path, enhances #169 2019-07-05 22:30:10 +02:00
Orsiris de Jong 8f85d20a68
Merge pull request #169 from bmorgenthaler/ssh-controlmaster
Initial changes to support SSH Multiplexing
2019-07-05 22:28:03 +02:00
Orsiris de Jong 492064f8e2
Comment about _REMOTE_EXECUTION variable 2019-07-05 22:24:48 +02:00
bmorgenthaler 32ee9f8071 Initial changes to support SSH Multiplexing 2019-07-05 11:25:50 -05:00
deajan 33971c359c Merge branch 'master' of https://github.com/deajan/osync 2019-05-28 09:15:48 +02:00
deajan d9d5aaf1e8 Added more quoting 2019-05-28 09:14:43 +02:00
Orsiris de Jong bd25ef4b72
Added codacy badge 2019-05-22 23:10:04 +02:00
deajan 2b17326630 Rebuilt targets 2019-05-22 22:57:04 +02:00
deajan 228a4d1f51 Revert "Refactored rsync patterns"
This reverts commit 679b4bc29e.
2019-05-22 22:56:27 +02:00
deajan 679b4bc29e Refactored rsync patterns 2019-05-22 22:29:07 +02:00
deajan f1b558d646 Updated changelog 2019-05-22 21:30:21 +02:00
deajan c8bb8c214f Fixed _Logger leaving tmp files 2019-05-22 21:26:12 +02:00
deajan c801d6b418 Fixed quoting 2019-05-22 21:02:26 +02:00
deajan 0b539e6153 Removed unused variable 2019-05-22 20:36:18 +02:00
deajan 8256bbef88 Removed unused variable 2019-05-22 20:35:04 +02:00
deajan b3a7572c0e Fixed typo 2019-05-22 20:33:49 +02:00
deajan 9976b3a6e5 Remove unnecessary $ 2019-05-22 20:30:27 +02:00
deajan bcd79e06aa Remove unused variable 2019-05-22 20:28:51 +02:00
deajan 6459a9dd4c Updated booleans in ssh filter 2019-05-22 19:47:20 +02:00
deajan 9b481467f6 Merge branch 'master' of https://github.com/deajan/osync 2019-05-22 19:44:02 +02:00
deajan 72f2e43071 Rebuilt targets 2019-05-22 19:43:48 +02:00
deajan a6e4c4a0ee Updated target helper config file 2019-05-22 19:43:19 +02:00
deajan b4892b9fdc Removed old helper config file 2019-05-22 19:40:37 +02:00
deajan 63f6194305 Less helper config file checks 2019-05-22 19:39:19 +02:00
deajan 3dae261b9b Less required options in helper config file 2019-05-22 19:38:48 +02:00
deajan c6876924c7 Fixed intentation 2019-05-22 19:33:09 +02:00
Orsiris de Jong 0c343662f4
Update copyright year 2019-05-22 19:26:32 +02:00
Orsiris de Jong f4130119ef
Updated beta version since we made another release 2019-05-22 19:26:03 +02:00
deajan 971ea2b358 Rebuilt targets 2019-05-22 19:13:10 +02:00
deajan 2ce8007b9d Bumped version 2019-05-22 19:12:49 +02:00
Orsiris de Jong 39da290577
Merge pull request #167 from deajan/beta2-platform-tests
Beta2 platform tests
2019-05-22 19:10:42 +02:00
Orsiris de Jong cdeae14fd7
v1.3-beta2 announcement 2019-05-22 14:50:00 +02:00
deajan 3cd2f16bc3 Fixed newer Win10 bash version ping 2019-05-21 23:52:26 +02:00
deajan 9435c48f20 Fix newer bash on Windows10 got rid of elder find cmd bug 2019-05-21 22:11:01 +02:00
deajan 4a1001a40f Set default SSH port for test 2019-05-21 21:31:15 +02:00
Orsiris de Jong f0811b3f56
Make codacy happy 2019-05-21 17:07:41 +02:00
deajan f95155c310 Fixed horrible typo 2019-05-21 14:30:26 +02:00
deajan 1b8c07332f Fix for very old config files 2019-05-21 14:09:30 +02:00
deajan d17ece7e2c Fixed bogus copy paste 2019-05-21 13:54:12 +02:00
deajan e1a081a7f1 Fixed backup before upgrading 2019-05-21 12:51:29 +02:00
deajan 590e299821 Fixed sections are rewritten after inserts 2019-05-21 12:43:10 +02:00
deajan f8fbbaef57 Unexpanded ofunctions 2019-05-21 00:38:59 +02:00
deajan 7fffa454e9 Rebuilt targets 2019-05-21 00:08:44 +02:00
deajan 22af510375 Fixed _QUICK_SYNC value too high 2019-05-21 00:08:19 +02:00
deajan 61729d1245 Update changelog 2019-05-20 23:44:59 +02:00
deajan c089ccc63b Rebuilt targets 2019-05-20 23:31:26 +02:00
deajan 3edd80949f Allow commandline override of config file values 2019-05-20 23:31:04 +02:00
deajan 9bd2b7eeaf Minor tweak 2019-05-20 23:19:59 +02:00
deajan 9142f873f4 Rebuilt targets 2019-05-20 22:54:31 +02:00
deajan dc25cba3c8 Fixed bogus SKIP_DELETION logic when SYNC_TYPE set 2019-05-20 22:54:11 +02:00
deajan f781f35ea1 Rebuilt targets 2019-05-20 22:50:41 +02:00
deajan c07897b7cb Disabled conflictDetect test for v1.3 2019-05-20 22:50:01 +02:00
deajan 301e0d69e1 Enfore SKIP_DELETION when SYNC_TYPE set 2019-05-20 22:43:31 +02:00
deajan aca0be5a9b Rebuilt targets 2019-05-20 22:19:27 +02:00
deajan 5638565f6a Refactored locking handling 2019-05-20 22:18:59 +02:00
deajan c10a175c90 Change work comments 2019-05-20 21:58:35 +02:00
deajan 968c732d94 Re-enabled all tests 2019-05-20 21:56:29 +02:00
deajan b290d4e35e Fixed ACL tests 2019-05-20 21:56:01 +02:00
deajan de383c9c0f Removed unused code 2019-05-20 21:55:45 +02:00
deajan 0c1f4403fb Update comment 2019-05-20 21:38:05 +02:00
deajan 555d8e6d23 Unexpanded current dev 2019-05-20 18:20:01 +02:00
deajan 02d0024df9 Rebuilt targets 2019-05-20 18:18:20 +02:00
deajan 3accf5f254 Improved --summary output 2019-05-20 18:18:00 +02:00
deajan bd8fc2f043 Rebuilt targets 2019-05-20 17:53:04 +02:00
deajan 40c44368ec Fixed deletion logging in unidirectional sync 2019-05-20 17:52:43 +02:00
deajan 7331612a11 Rebuilt targets 2019-05-20 17:35:05 +02:00
deajan e12d30d9c5 Minor code tweaks 2019-05-20 17:15:27 +02:00
deajan c18977bcef Added target helper to install script 2019-05-20 14:55:59 +02:00
deajan 3d6ec17f27 Rebuilt targets 2019-05-20 14:30:04 +02:00
deajan a0e5e791cd Update changelog 2019-05-20 14:29:42 +02:00
deajan 44d8635042 Added --sync-type for unidirectional sync, implements #147 2019-05-20 14:28:26 +02:00
deajan 71f857f9c8 Added --syncÃ-type for unidirectional sync 2019-05-20 14:26:59 +02:00
deajan de99f0efdf Removed work output 2019-05-20 14:06:21 +02:00
deajan 0d501209fa Fixed litteral 'synced' state instead of step 2019-05-20 12:13:24 +02:00
deajan efeb665818 Fixed typo generating bogus resume actions 2019-05-20 12:11:30 +02:00
deajan 7e945197c2 WIP SYNC_TYPE 2019-05-20 12:11:04 +02:00
deajan a52068dec0 Fixed possible issues with spaces 2019-05-20 11:50:58 +02:00
deajan 7b57980e57 Codacy improvements 2019-05-20 11:47:39 +02:00
deajan c96bdfe5ce Rebuilt targets 2019-05-20 11:37:55 +02:00
deajan 8018871168 Minor code improvements 2019-05-20 11:37:37 +02:00
deajan 826ebe1c2c Fix potential quick_sync parameter issues 2019-05-20 11:32:55 +02:00
deajan 4b90ac3a27 WIP --sync-type 2019-05-20 11:19:38 +02:00
deajan 0e7f969588 Fixed summary counters should be global 2019-05-20 11:11:12 +02:00
deajan 01c469b019 Fix indentation 2019-05-20 11:04:02 +02:00
deajan aa92531b99 Rebuilt targets 2019-05-19 17:22:07 +02:00
deajan 4542f62fce Fix subshell variables not propagated 2019-05-19 17:21:32 +02:00
deajan 020f674932 Rebuilt targets 2019-05-19 17:12:53 +02:00
deajan 22fe557c54 Typo fixes 2019-05-19 17:12:12 +02:00
deajan 1339e6463a Rebuilt targets 2019-05-19 17:02:20 +02:00
deajan 99a7c932f7 Quick typo fix 2019-05-19 17:01:50 +02:00
deajan 932ecd93fd Rebuilt targets 2019-05-19 17:01:02 +02:00
deajan 201aadd33d Update summary 2019-05-19 17:00:22 +02:00
deajan 5eecb719d3 Rebuilt targets 2019-05-19 16:42:32 +02:00
deajan 76d80b17f6 Deprecat LOG_CONFLICTS as experimental feature 2019-05-19 16:42:08 +02:00
deajan 094d1538bb More --summary statistics 2019-05-19 16:39:19 +02:00
deajan 30411c72c7 Updated changelog 2019-05-19 13:08:14 +02:00
deajan 26b661396a Improved config file upgrade script 2019-05-19 12:56:41 +02:00
deajan e64ec1232e Updated changelog 2019-05-18 13:43:45 +02:00
deajan c2f0b41076 Rebuilt targets 2019-05-18 13:40:29 +02:00
deajan e8470218f6 Bumped build 2019-05-18 13:40:03 +02:00
deajan 3a9c427855 Improve deletion tests to address #165 2019-05-18 13:39:32 +02:00
Orsiris de Jong 63a434cc55
Merge pull request #166 from weinhold/fix-quoting-escaping-issues
fix several quoting/escaping issues
2019-05-18 13:35:23 +02:00
Ingo Weinhold 56c0197b6b fix several quoting/escaping issues
... reproducible when the remote target sync dir contains spaces.
2019-04-05 14:25:15 +02:00
deajan 5b0c40f3f0 Rebuilt targets 2019-03-15 12:10:27 +01:00
deajan 61a9aa6a91 Bumped version 2019-03-15 12:10:06 +01:00
deajan 8a57b568e9 Improved version check 2019-03-15 12:09:40 +01:00
deajan 5e42a088d0 Rebuilt targets 2019-03-15 11:58:45 +01:00
deajan 0f5bed992f Fixed bogus config file revision check on error 2019-03-15 11:54:40 +01:00
deajan 51dded275b Mailer fixes and semantic fixes 2019-03-01 10:41:09 +01:00
deajan 61f6593cbe Ported fixes from backup_tool_script 2019-03-01 09:28:36 +01:00
deajan 8fe0afae56 Fixed typo 2019-02-26 12:10:13 +01:00
deajan 5477ed96fe Rebuilt targets 2019-02-26 12:07:57 +01:00
deajan c15f9ea4a2 Removed bogus SIMPLE log calls 2019-02-26 12:07:35 +01:00
deajan 39feb76d2e Simpler current log 2019-02-14 10:02:37 +01:00
deajan cc2d283f23 Fixed typo introduced with boolean changes 2019-02-08 13:22:03 +01:00
Orsiris de Jong 8e8ed101bb
Merge pull request #163 from deajan/osync2-compat
Osync2 compat
2019-02-08 13:10:46 +01:00
deajan d9b587fd4a Fixed email RFC822 check 2019-02-07 17:04:41 +01:00
deajan 027422b014 Cosmetic changes 2019-01-29 10:04:27 +01:00
deajan e44bdee73e Fixed more boolean replacements 2019-01-22 18:11:18 +01:00
deajan 22bd6181f1 Removed debug log line 2019-01-12 11:46:42 +01:00
deajan e850f8f792 Fixed boolean conversion after init 2019-01-12 10:49:30 +01:00
deajan 2da2109229 Fixed conflict backup multiple unit test 2019-01-11 23:56:48 +01:00
deajan 52c059fc29 Updated conf file upgrade script 2019-01-11 00:43:19 +01:00
deajan f1cf7eb8e2 Updated config file structure, checks and upgrade script 2019-01-11 00:34:12 +01:00
deajan f3bbaceb73 Reenabled local and remote tests 2019-01-02 20:54:57 +01:00
deajan 88491d0fee Disabled conflictList test 2019-01-02 20:19:24 +01:00
deajan 1c9db7136f No conflictlist file will not trigger an error 2018-12-29 18:41:12 +01:00
deajan 71942082f8 Added config file version check 2018-12-21 19:07:56 +01:00
deajan 6e56e2b63e Removed extra tabs 2018-12-21 18:51:34 +01:00
deajan 3931b7b771 Improved log readability from ExecTasks command output 2018-12-21 14:35:11 +01:00
deajan 9e64753569 Removed extra tabs 2018-12-21 12:44:41 +01:00
deajan 98201cf615 Fixed possible deadlock in ExecTasks 2018-12-17 22:43:34 +01:00
deajan 6ac45eea02 Added comment to variable 2018-12-17 20:07:22 +01:00
deajan 17fb0c6bf5 Merge branch 'master' of https://github.com/deajan/osync 2018-12-17 19:37:25 +01:00
deajan cae3bb73f7 Improved command output logging 2018-12-17 19:37:09 +01:00
Orsiris de Jong 5a5e60d651
Added open issue badge 2018-11-16 12:18:27 +01:00
deajan fad217447d Rebuilt targets 2018-11-05 12:14:03 +01:00
deajan 5857d8200b Added patch from Vladimirek 2018-11-05 12:13:07 +01:00
deajan a466e94b9c Some exit logs should be flagged as errors 2018-11-05 12:10:22 +01:00
deajan 08af105dd9 Added busybox-w32 detection 2018-10-26 12:28:31 +02:00
deajan 3314f947b0 Improved SetConfFileValue 2018-10-26 12:25:11 +02:00
deajan 27d6b80fad Minor fixes 2018-10-18 22:33:46 +02:00
deajan 34724352f7 Update texte files 2018-10-18 00:02:23 +02:00
deajan 27f6d2a948 Rebuilt targets 2018-10-17 23:53:13 +02:00
deajan b8f18ac860 Update coding conventions 2018-10-17 23:52:46 +02:00
deajan 01e4ec9ef1 Update tests 2018-10-17 23:33:52 +02:00
deajan fb1fd00f0e Fixed multiple race conditions 2018-10-17 23:33:30 +02:00
deajan 569048c19b Fixed bogus WAIT_FOR_TASK_COMPLETION_ return from ExecTasks 2018-10-15 19:23:48 +02:00
deajan efd8dca58e Fixed another batch of wrong ExecTaks ids 2018-10-14 23:15:38 +02:00
deajan 92dbd6b2fc Fixed partial WAIT_FOR_TASK_COMPLETION_ output 2018-10-14 23:10:34 +02:00
deajan 7e7388a9eb Fixed variable leak 2018-10-14 23:08:03 +02:00
deajan 109f0a0887 Fixed bogus ExecTasks id checks 2018-10-14 23:06:19 +02:00
deajan 54a665fe7e Removed debugging code 2018-10-14 22:51:01 +02:00
deajan 255be72c43 Removed diagnostic lines 2018-10-14 22:41:18 +02:00
deajan 04f11fcab8 Reactivated all tests 2018-10-14 19:14:22 +02:00
deajan 39e5bb92f0 Removed travis debug lines 2018-10-14 19:13:04 +02:00
deajan 0f0ba9b001 Change remote RUN_DIR for unit tests only 2018-10-14 19:12:16 +02:00
deajan 330495b1e8 Prevent variable leak in Logger 2018-10-14 16:35:34 +02:00
deajan 203400000f Minor code cleanup 2018-10-14 16:17:58 +02:00
deajan 85ac20080b ExecTasks debug __CheckArguments should happen before debug log 2018-10-14 16:06:07 +02:00
deajan a28c20d9bd Fixed variable mismatch 2018-10-14 10:51:11 +02:00
deajan c9bc7f4b6c Change all occurences of '-' in ExecTasks id 2018-10-13 23:41:13 +02:00
deajan 17cf11eb0b ExecTasks ids may not include dots 2018-10-13 17:20:18 +02:00
deajan 39053f59e3 More precise ExecTasks ids 2018-10-13 17:05:53 +02:00
deajan 295c74ca58 More logging for ExecTasks 2018-10-13 17:02:02 +02:00
deajan 1896ec793e Removed empty line making missing shebang 2018-10-13 10:43:11 +02:00
deajan 5c6132dd2c Moved rsyncCmd background execution to eval 2018-10-13 10:36:31 +02:00
deajan 438c85b777 Removed unnecessary ExecTasks from _getFileCtimeMtimeRemote function 2018-10-12 19:42:39 +02:00
deajan bbc960d566 No more ping/ping between initiator/target daemons 2018-10-10 20:58:28 +02:00
deajan c963d71ecd Fixing ping/pong between initiator and targets 2018-10-10 20:57:48 +02:00
deajan 1b8f2ea7fc Added daemon mode known issue 2018-10-10 20:57:22 +02:00
deajan 4c999ec229 Remote deletion returns error when no error has happened 2018-10-10 20:47:11 +02:00
deajan a2e3e96827 WIP for target-helper 2018-10-10 20:39:49 +02:00
deajan 4be8429b96 Allow setting MIN_WAIT and MAX_WAIT on the fly 2018-10-10 15:19:32 +02:00
deajan 0e103cd98b Added push file exclusion in RSYNC_FULL_PATTERNS 2018-10-10 15:13:36 +02:00
deajan 83cfabed6f Litte performance improvement by not running code again 2018-10-10 15:02:00 +02:00
deajan dfe09f2fa3 Rebuilt targets 2018-10-10 02:19:59 +02:00
deajan ff26961e82 Fixed BSD daemon mode 2018-10-10 02:19:38 +02:00
deajan ca63bfabe0 Testing for directories in triggerupdate functions 2018-10-10 02:12:57 +02:00
deajan 2aa36b626a Replaced Logger with RemoteLogger in triggerUpdate 2018-10-10 02:09:46 +02:00
deajan f0336c0dec Added target-helper syncdir checks 2018-10-10 01:57:20 +02:00
deajan 3dc8478ba7 Added default MIN_WAIT and MAX_WAIT values 2018-10-10 01:45:39 +02:00
deajan 633e6397d0 Fixed missing equal sign for password-file 2018-10-10 01:40:26 +02:00
deajan babeea9d9a Fixed unlocking 2018-10-10 01:33:31 +02:00
deajan 9cb6cc57af Fixed typos 2018-10-10 01:31:12 +02:00
deajan 7eb4ee68ae Added unlockReplica pid initialisation 2018-10-10 00:05:46 +02:00
deajan 660e856986 Unlocking only when locks exist 2018-10-10 00:04:30 +02:00
deajan ac31de2388 Cleanup also called when bogus config detected 2018-10-09 23:57:53 +02:00
deajan ae64a967f6 Continuing isolating ConflictLog detection issue 2018-10-08 22:40:41 +02:00
deajan 2ca8803b3a No need for paranoia debug in CleanUp for remote exec 2018-10-08 21:53:30 +02:00
deajan ad9d978fc2 Typo fixing 2018-10-08 21:50:35 +02:00
deajan 7570794632 Improved remote execution results 2018-10-08 21:40:57 +02:00
deajan 4651ff0e06 Added remote detection 2018-10-08 21:34:41 +02:00
deajan 0e2189c441 Added RUN_DIR subset 2018-10-08 21:29:47 +02:00
deajan 1ab6042a62 Fixed typo in _getFileCtimeMtimeRemote 2018-10-08 21:17:36 +02:00
deajan a5f5b3a800 Rebuilt targets 2018-10-08 20:12:59 +02:00
deajan b95474a8e6 Updated expr to not use extended regular expressions 2018-10-08 20:11:39 +02:00
deajan e3855c4256 Revert work on _getCtimeMtimeFileRemote error detection 2018-10-07 17:51:15 +02:00
deajan 51f6dcbd71 Trying to identify concurrency issue with ConflictDetection 2018-10-07 17:50:37 +02:00
deajan 3d68a135a4 Conflict detection random bug isolation 2018-10-07 14:43:31 +02:00
deajan 9e17e114f7 Rebuilt targets 2018-10-07 14:42:41 +02:00
deajan 94a71620a1 Made ctime.mtime functions more clear in syntax 2018-10-07 13:23:45 +02:00
deajan bad7767809 Harmonize ExecTasks calls 2018-10-07 12:59:56 +02:00
deajan fa5cf2de57 Another set of conflictLog issue isolation 2018-10-07 12:25:53 +02:00
deajan 9eb29c3454 Rebuilt targets 2018-10-07 12:25:27 +02:00
deajan 4129dfff37 Typo fix 2018-10-07 12:25:07 +02:00
deajan c452bd45e6 Increased TSTAMP randomization 2018-10-07 12:24:35 +02:00
deajan 6b0bab0984 Removed unused variable 2018-10-07 12:23:59 +02:00
deajan 89d63d83b8 Removed unused variable 2018-10-06 22:16:11 +02:00
deajan e0f177f5c2 Added comment about unit file verification 2018-10-06 21:57:11 +02:00
deajan 6bd5862702 Removed unused argument from conflictList 2018-10-06 21:41:58 +02:00
deajan eb8c780d84 Rebuilt targets 2018-10-06 20:03:58 +02:00
deajan 58e5c654d3 Isolate conflict detect bug 2018-10-05 00:51:20 +02:00
deajan e411c81f3d Removed double debug logs 2018-10-05 00:46:11 +02:00
deajan 57647b8960 Isolate ConflictDetection function 2018-10-05 00:39:48 +02:00
deajan 5b395b3c04 Travis debug can fail 2018-10-02 23:37:33 +02:00
deajan 4bd4bbc247 Reverted travis debug lines 2018-10-02 23:26:04 +02:00
deajan 4de784d90f Added default umask 2018-10-02 23:20:26 +02:00
deajan 6c1b7a541b Fixed bogous CreateDir rights 2018-10-02 23:06:33 +02:00
deajan 6e7b99debb IsInteger should be part of ofunctions micro/mini 2018-10-02 23:00:54 +02:00
deajan 996e6251e1 Added missing IsInteger function 2018-10-02 22:43:24 +02:00
deajan a265ca4eef Travis here and travis there 2018-10-02 22:38:57 +02:00
deajan 306d5ff1ad Added permissions on CreateDir 2018-10-02 22:36:30 +02:00
deajan 74271d220c I f*cking hate travis 2018-10-02 21:27:12 +02:00
deajan 8a350333a8 Travis debug... 2018-10-02 19:32:21 +02:00
deajan e9daf2b2bf Removed unused code 2018-10-02 19:30:36 +02:00
deajan d58071c898 Why is travis so bad... 2018-10-02 19:29:15 +02:00
deajan b07ee4a645 Fixed installer directive 2018-10-02 19:26:57 +02:00
deajan 6c65f9098f Merge branch 'master' of https://github.com/deajan/osync 2018-10-02 19:02:40 +02:00
deajan 9537b92bf6 Rebuilt targets 2018-10-02 19:02:31 +02:00
deajan 2c857a1e83 Fixed removing directories on soft deletion 2018-10-02 19:01:44 +02:00
Orsiris de Jong b0cdb17328 Update issue templates 2018-10-02 18:30:04 +02:00
Orsiris de Jong 5c0c494526
Create PULL_REQUEST_TEMPLATE.md 2018-10-02 18:27:01 +02:00
deajan 6f2e1af3d7 Added release policy 2018-10-02 18:24:40 +02:00
deajan 0b7b6808d5 Add shellcheck exclusions 2018-10-02 18:22:36 +02:00
deajan 7eab6df734 Better log message 2018-10-02 18:19:32 +02:00
deajan 32c2ab17e4 Rebuilt targets 2018-10-02 18:14:07 +02:00
deajan bc6ebdb80e Fixed merge paths and removed unused code 2018-10-02 18:13:48 +02:00
deajan 0d6f7e0ca0 Merge cleanup 2018-10-02 18:11:01 +02:00
deajan 49948dd637 Fixed installer 2018-10-02 18:09:21 +02:00
deajan f35d5841c2 Added cleanup routine 2018-10-02 18:09:12 +02:00
deajan 81148db1fa Fixed merge producing rogue logs 2018-10-02 18:08:03 +02:00
deajan 250ac48ed6 Indentation fix 2018-10-02 11:41:46 +02:00
deajan 65d4c3c3ff Random generator improvements 2018-10-02 11:37:46 +02:00
deajan bbfa4c9d3b Rebuilt targets 2018-10-02 10:52:25 +02:00
deajan 25070032c4 Typo fix 2018-10-02 10:51:45 +02:00
deajan 4f76bb4ad2 Simpler merge process 2018-10-02 10:49:28 +02:00
deajan 465a3b9b80 IsNumeric and IsNumericExpand are now busybox compatible 2018-10-02 10:49:16 +02:00
deajan a9434e605b Removed unused dummy function 2018-10-02 10:09:40 +02:00
deajan 06004ac05d Function reorganisation 2018-10-02 10:07:01 +02:00
deajan b2463d2e08 Added includes to batcher 2018-10-02 09:37:25 +02:00
deajan 13286c2720 [WIP] Merge simplifications 2018-10-02 09:35:48 +02:00
deajan a734dadfde Don't dev at night kids... 2018-10-02 09:24:15 +02:00
deajan c694580ae5 Yet another typo fix 2018-10-02 09:22:41 +02:00
deajan 4f3e6a11d2 Fixed typo 2018-10-02 09:17:59 +02:00
deajan f028f3b6d2 Added preprocessing to batch runner 2018-10-02 09:07:19 +02:00
deajan 2e6e17962c Removed space 2018-10-01 21:04:41 +02:00
deajan 52cf42a53c Add TSTAMP and SCRIPT_PID variables 2018-10-01 21:03:00 +02:00
deajan 5bc77dfe76 Add PoorMansRandomGenerator subset 2018-10-01 21:02:47 +02:00
deajan 1869b1e82f Added Trapquit to trigger CleanUp 2018-10-01 20:49:53 +02:00
deajan 3532b50c38 Added CleanUp subset 2018-10-01 20:46:55 +02:00
deajan d0b40257fa Added cleanup function to installer 2018-10-01 20:46:21 +02:00
deajan 59b7562a3e Fixed Logger missing RUN_DIR 2018-10-01 20:43:27 +02:00
deajan c99a1293c7 Debug conflictLog in travis 2018-10-01 20:28:46 +02:00
deajan eb609f7faa Some release details 2018-10-01 20:13:58 +02:00
deajan 7e678b6480 Debug output for travis conflictDetect tests 2018-10-01 20:01:26 +02:00
deajan b4cd0834cd Fixed shellcheck SC2027 in RsyncPatterns 2018-10-01 14:13:05 +02:00
deajan 55534e0669 Fixed shellcheck SC2053 in Vercomp 2018-10-01 14:10:16 +02:00
deajan 2eba91a58f Removed unused variables 2018-10-01 14:09:20 +02:00
deajan 01864e4980 Rebuilt targets 2018-10-01 14:03:09 +02:00
deajan 87cdc8eb98 Removed extra output from PoorMansRandomGenerator 2018-10-01 14:02:44 +02:00
deajan 740d5277cc Rebuilt targets 2018-10-01 11:31:22 +02:00
deajan a230d32042 Improve PoorMansRandomGenerator function 2018-10-01 11:27:01 +02:00
deajan dde31090fe Improved inotifywait call 2018-10-01 10:49:35 +02:00
deajan c07fa8b267 Added target helper service files 2018-10-01 10:31:01 +02:00
deajan 88597d6a7a Initial target-helper mode working 2018-10-01 10:27:01 +02:00
deajan 6bce2e1755 [WIP] Less restrictive preflight checks in target-helper mode 2018-10-01 10:08:22 +02:00
deajan fe808ca909 [WIP] target_helper switch should not be a half bool half char 2018-10-01 10:01:00 +02:00
deajan 90a09c422b [WIP] Use same variable names in target-helper mode 2018-10-01 09:57:27 +02:00
deajan 47bb01b0dc [WIP] Pass initiator ssh uri on target-helper mode 2018-10-01 09:54:24 +02:00
deajan 2b51f06f21 Removed earlier target helper file 2018-10-01 07:28:03 +02:00
deajan f50048180a Rebuilt targets 2018-10-01 07:27:20 +02:00
deajan 5945988e96 Fix wrong log message on MacOS 2018-10-01 07:26:56 +02:00
deajan 07775d7390 [WIP] osync-target-helper mode 2018-09-30 21:24:09 +02:00
deajan c80bec2fe2 osync target-helper service config file 2018-09-30 16:41:59 +02:00
deajan af7542d8f5 [WIP] target helpder service in osync 2018-09-30 16:08:12 +02:00
deajan b44c0c13ad Rebuilt targets 2018-09-30 14:50:51 +02:00
deajan d727bc353c Fixed rsync args built logic 2018-09-30 14:50:00 +02:00
deajan 3af0bc7a11 Rebuilt targets 2018-09-30 14:07:09 +02:00
deajan f1e40d54e4 Fixed typo 2018-09-30 14:06:24 +02:00
deajan 76f9f31e6d Moved RSYNC_ARGS to RSYNC_DEFAULT_ARGS 2018-09-30 13:57:57 +02:00
deajan 50496070f6 Merge branch 'master' of https://github.com/deajan/osync 2018-08-08 11:45:08 +02:00
Orsiris de Jong 7c11240034
Change logger date output to be language agnostic 2018-07-14 15:37:14 +02:00
52 changed files with 10612 additions and 10640 deletions

33
.github/ISSUE_TEMPLATE/bug_report.md vendored Normal file
View File

@ -0,0 +1,33 @@
---
name: Bug report
about: Create a report to help us improve
---
**Describe the bug**
A clear and concise description of what the bug is.
**To Reproduce**
Steps to reproduce the behavior:
1. Setup osync with the following config file / the following parameters (please provide either anonymized)
2. Run osync with following parameters
3. Result
**Expected behavior**
A clear and concise description of what you expected to happen.
** Deviated behavior**
How does the actual result deviate from the expected behavior.
**Logs**
Please send logs of what happens.
Also, you might run osync with _DEBUG=yes environement variable to have more verbose debug logs.
**Environment (please complete the following information):**
- Full osync version (including build)
- OS: [e.g. iOS]
- Bitness [e.g. x64 or x86]
- Shell (busybox or else)
**Additional context**
Add any other context about the problem here.

25
.github/workflows/codespell.yml vendored Normal file
View File

@ -0,0 +1,25 @@
# Codespell configuration is within .codespellrc
---
name: Codespell
on:
push:
branches: [main]
pull_request:
branches: [main]
permissions:
contents: read
jobs:
codespell:
name: Check for spelling errors
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Annotate locations with typos
uses: codespell-project/codespell-problem-matcher@v1
- name: Codespell
uses: codespell-project/actions-codespell@v2

25
.github/workflows/linux.yml vendored Normal file
View File

@ -0,0 +1,25 @@
name: linux-tests
on: [push, pull_request]
jobs:
build:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ubuntu-latest]
steps:
- uses: actions/checkout@v2
- name: Install dependencies
run: |
sudo apt-get install inotify-tools acl
- name: Execute tests and generate coverage report
run: |
export RUNNING_ON_GITHUB_ACTIONS=true
export SSH_PORT=22
echo "Running on github actions: ${RUNNING_ON_GITHUB_ACTIONS}"
echo "Running on ssh port ${SSH_PORT}"
sudo -E bash ./dev/tests/run_tests.sh
- name: Upload Coverage to Codecov
uses: codecov/codecov-action@v1

28
.github/workflows/macos.yml vendored Normal file
View File

@ -0,0 +1,28 @@
name: macosx-tests
on: [push, pull_request]
jobs:
build:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [macos-latest]
steps:
- uses: actions/checkout@v2
- name: Install Bash 4
run: |
/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)"
brew update
brew install bash
brew install fswatch
echo "/usr/local/bin" >> $GITHUB_PATH
- name: Execute tests and generate coverage report
run: |
export RUNNING_ON_GITHUB_ACTIONS=true
export SSH_PORT=22
sudo -E bash ./dev/tests/run_tests.sh
- name: Upload Coverage to Codecov
uses: codecov/codecov-action@v1

29
.github/workflows/windows.yml vendored Normal file
View File

@ -0,0 +1,29 @@
name: windows-tests
on: [push, pull_request]
jobs:
build:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [windows-latest]
steps:
- uses: actions/checkout@v2
- uses: Vampire/setup-wsl@v1
with:
additional-packages:
dos2unix
rsync
openssh-server
- name: Execute tests and generate coverage report
shell: wsl-bash {0}
run: |
export RUNNING_ON_GITHUB_ACTIONS=true
export SSH_PORT=22
find ./ -type f ! -path "./.git/*" -print0 | xargs -0 -n 1 -P 4 dos2unix
service ssh start
./dev/tests/run_tests.sh
- name: Upload Coverage to Codecov
uses: codecov/codecov-action@v1

130
CHANGELOG.md Normal file → Executable file
View File

@ -1,15 +1,63 @@
RECENT CHANGES
--------------
## RECENT CHANGES
dd Mmm YYYY: osync v1.3 release (for full changelog since v1.2 branch see all v1.3-beta/RC entries)
### Current master
dd Mmm YYYY: osync v1.3-RC1 release
- Make --log-conflicts non experimental (randomly fails)
- ! new option FORCE_CONFLICT_PREVALANCE which will always use Initiator or Target, regardless of best time
- ! target-helper: destination mails etc on target, also, no cmd after on configs
! New option --sync=bidir|initator2target|target2initiator #147
! new option FORCE_CONFLICT_PREVALANCE which will always use Initiator or Target, regardless of best time
! Vercomp function is now BusyBox compatible
### 16 June 2023: osync v1.3 release (for full changelog since v1.2 branch see all v1.3-beta/RC entries)
08 Aug 2018: osync v1.3-beta1 release
- Fix for new RSYNC protocol
- New options ALWAYS_SEND_MAILS to allow sending logs regardless of execution states
### 29 June 2020: osync v1.3-RC1 release
- New option to use SSH_CONTROLMASTER in order to speed up remote sync tasks and preserve a single ssh channel
- New option SSH_OPTIONAL_ARGS
- Fixed a problem with macos mv not preserving ownership of files from /tmp
- Fixed very long outstanding issue with special characters in remote target handling
- Fixed an issue where STOP_ON_ERROR_CMD did not work anymore
- Fixed a remote file lock problem (thanks to https://github.com/zhangzhishan)
- Fixed various cosmetic issues with code and logs
- Improved upgrade script
- Fixed a possible bash buffer overflow when synchronizing large filesets (tested with 2M files)
- This fix actually truncats every string sent to Logger not being more than 16KB
- Fixed osync leaving temporary log files behind in RUN_DIR (/tmp by default)
- Updated target helper service configuration file
- Improved codacy results
- Added more debugging
- Fixed service logs being junked by spinner
- Fixed MINIMUM_SPACE=0 didn't stop the disk space check anymore (Thanks to Val)
- Fixed conflict file logs to be less verbose when no conflicts happen
### 22 May 2019: osync v1.3-beta3 release
- Config file update script fixes
- Removed old Win10 1607 bash fixes to make Win10 1809 work (breaks Win10 1607 beta bash version...Yeah, nothing I can do about that)
### 20 May 2019: osync v1.3-beta2 release
- More --summary statistics
- Config file syntax now uses booleans instead of yes / no (but still accepts old syntax)
- Added boolean update in upgrade script
- Config file revision check
- Added config file revision in upgrade script
- New option --sync-type=initator2target|target2initiator that allows using osync as rsync wrapper for unidirectional sync
- New osync target helper service
- Fixed multiple race conditions in parallel executions (which also fixes random conflict logs failures)
- Fixed directory softdeletion bug
- Fixed multiple failed deletions will be retried as many times as failures happened
- Fixed remote running on FreeBSD for some commands, thanks to Vladimirek
- Fixed (again) deletion propagation when file contains spaces (thanks to http://github.com/weinhold)
- Deprecated --log-conflicts for 1.3 branch (is now experimental)
- Updated ofunctions
- Has better random number generator
- IsInteger, IsNumeric and IsNumericExpand are now busybox compatible
- Multiple installer fixes
- Multiple batch fixes
### 08 Aug 2018: osync v1.3-beta1 release
- Added an option to log conflictual files
- Presence of conflictual files can trigger a special mail
@ -40,20 +88,20 @@ dd Mmm YYYY: osync v1.3-RC1 release
- Upgraded shunit2 test framework to v2.1.8pre (git commit 07bb329)
- Multiple smaller fixes and improvements
25 Mar 2017: osync v1.2 release (for full changelog of v1.2 branch see all v1.2-beta/RC entries)
### 25 Mar 2017: osync v1.2 release (for full changelog of v1.2 branch see all v1.2-beta/RC entries)
- Check for initiator directory before launching monitor mode
- Updated RPM spec file (Thanks to https://github.com/liger1978)
- Fixed remote commands can be run on local runs and obviously fail
- Minor fixes in installer logic
10 Feb 2017: osync v1.2-RC3 release
### 10 Feb 2017: osync v1.2-RC3 release
- Uninstaller skips ssh_filter if needed by other program (osync/obackup)
- Logger now automatically obfuscates _REMOTE_TOKEN
- Logger doesn't show failed commands in stdout, only logs them
08 Feb 2017: osync v1.2-RC2 release
### 08 Feb 2017: osync v1.2-RC2 release
- Tests have run on CentOS 5,7 and 7, Debian 8, Linux Mint 18, Fedora 25, FreeBSD 10.3/pfSense, FreeBSD 11, MacOSX Sierra, Win10 1607 (14393.479) bash, Cygwin x64 and MSYS2 current
- Hugely improved ssh_filter
@ -64,7 +112,7 @@ dd Mmm YYYY: osync v1.3-RC1 release
- Fixed installer statistics don't report OS
- Minor tweaks and fixes in ofunctions
13 Dec 2016: osync v1.2-RC1 release
### 13 Dec 2016: osync v1.2-RC1 release
- Unit tests have run on CentOS 5,6 and 7, Debian 8, Linux Mint 18, FreeBSD 10.3/pfSense, FreeBSD 11, MacOSX Sierra, Win10 1607 (14393.479) bash, Cygwin x64 and MSYS2 current
- Added optional rsync arguments configuration value
@ -113,7 +161,7 @@ dd Mmm YYYY: osync v1.3-RC1 release
- More code compliance
- Lots of minor fixes
19 Nov 2016: osync v1.2-beta3 re-release
### 19 Nov 2016: osync v1.2-beta3 re-release
- Fixed blocker bug where local tests tried GetRemoteOS Anyway
- Fixed CentOS 5 compatibility bug for checking disk space introduced in beta3
@ -121,7 +169,7 @@ dd Mmm YYYY: osync v1.3-RC1 release
- Made unit tests clean authorized_keys file after usage
- Added local unit test where remote OS connection would fail
18 Nov 2016: osync v1.2-beta3 released
### 18 Nov 2016: osync v1.2-beta3 released
- Improved locking / unlocking replicas
- Fixed killing local pid that has lock bug introduced in v1.2 rewrite
@ -151,14 +199,16 @@ dd Mmm YYYY: osync v1.3-RC1 release
- Simplified logger
- All fixes from v1.1.5
17 Oct 2016: osync v1.2-beta2 released
### 17 Oct 2016: osync v1.2-beta2 released
- osync now propagates symlink deletions and moves symlinks without referrents to deletion dir
- Upgrade script now has the ability to add any missing value
- Improved unit tests
- Added upgrade script test
- Added deletion propagation tests
30 Aug 2016: osync v1.2-beta released
### 30 Aug 2016: osync v1.2-beta released
- Rendered more recent code compatible with bash 3.2+
- Added a PKGBUILD file for ArchLinux thanks to Shadowigor (https://github.com/shaodwigor). Builds available at https://aur.archlinux.org/packages/osync/
- Some more code compliance & more paranoia checks
@ -185,7 +235,8 @@ dd Mmm YYYY: osync v1.3-RC1 release
- Added KillAllChilds function to accept multiple pids
- Improved logging
17 Nov 2016: osync v1.1.5 released
### 17 Nov 2016: osync v1.1.5 released
- Backported unit tests from v1.2-beta allowing to fix the following
- Allow quicksync mode to specify rsync include / exclude patterns as environment variables
- Added default path separator char in quicksync mode for multiple includes / exclusions
@ -194,25 +245,30 @@ dd Mmm YYYY: osync v1.3-RC1 release
- Fixed error alerts cannot be triggered from subprocesses
- Fixed remote locked targets are unlocked in any case
10 Nov 2016: osync v1.1.4 released
### 10 Nov 2016: osync v1.1.4 released
- Fixed a corner case with sending alerts with logfile attachments when osync is used by multiple users
02 Sep 2016: osync v1.1.3 released
### 02 Sep 2016: osync v1.1.3 released
- Fixed installer for CYGWIN / MSYS environment
28 Aug 2016: osync v1.1.2 released
### 28 Aug 2016: osync v1.1.2 released
- Renamed sync.conf to sync.conf.example (thanks to https://github.com/hortimech)
- Fixed RunAfterHook may be executed twice
- Fixed soft deletion when SUDO_EXEC is enabled
06 Aug 2016: osync v1.1.1 released
### 06 Aug 2016: osync v1.1.1 released
- Fixed bogus rsync pattern file adding
- Fixed soft deletion always enabled on target
- Fixed problem with attributes file list function
- Fixed deletion propagation code
- Fixed missing deletion / backup diretories message in verbose mode
27 Jul 2016: osync v1.1 released
### 27 Jul 2016: osync v1.1 released
- More msys and cygwin compatibility
- Logging begins now before any remote checks
- Improved process killing and process time control
@ -248,10 +304,10 @@ dd Mmm YYYY: osync v1.3-RC1 release
- Uploaded coding style manifest
- Added LSB info to init script for Debian based distros
v0-v1.0x - Jun 2013 - Sep 2015
------------------------------
## v0-v1.0x - Jun 2013 - Sep 2015
### 22 Jul. 2015: Osync v1.00a released
22 Jul. 2015: Osync v1.00a released
- Small improvements in osync-batch.sh time management
- Improved various logging on error
- Work in progress: Unit tests (intial tests written by onovy, Thanks again!)
@ -267,7 +323,8 @@ v0-v1.0x - Jun 2013 - Sep 2015
- Removed legacy lockfile code from init script
- Removed hardcoded program name from init script
01 Avr. 2015: Osync v1.00pre
### 01 Avr. 2015: Osync v1.00pre
- Improved and refactored the soft deletion routine by merging conflict backup and soft deletion
- Reworked soft deletion code to handle a case where a top level directory gets deleted even if the files contained in it are not old enough (this obviously shouldn't happen on most FS)
- Added more logging
@ -301,7 +358,8 @@ v0-v1.0x - Jun 2013 - Sep 2015
- Added a routine that reinjects failed deletions for next run in order to prevent bringing back when deletion failed with permission issues
- Added treat dir symlink as dir parameter
27 May 2014: Osync 0.99 RC3
### 27 May 2014: Osync 0.99 RC3
- Additionnal delete fix for *BSD and MSYS (deleted file list not created right)
- Fixed dry mode to use non dry after run treelists to create delete lists
- Added follow symlink parameter
@ -348,7 +406,8 @@ v0-v1.0x - Jun 2013 - Sep 2015
- Added possibility to quick sync two local directories without any prior configuration
- Added time control on OS detection
02 Nov. 2013: Osync 0.99 RC2
### 02 Nov. 2013: Osync 0.99 RC2
- Minor improvement on operating system detection
- Improved RunLocalCommand execution hook
- Minor improvements on permission checks
@ -373,7 +432,8 @@ v0-v1.0x - Jun 2013 - Sep 2015
- Fixed various typos
- Enforced CheckConnectivityRemoteHost and CheckConnectivity3rdPartyHosts checks (if one of these fails, osync is stopped)
18 Aug. 2013: Osync 0.99 RC1
### 18 Aug. 2013: Osync 0.99 RC1
- Added possibility to change default logfile
- Fixed a possible error upon master replica lock check
- Fixed exclude directorires with spaces in names generate errros on master replica tree functions
@ -384,7 +444,8 @@ v0-v1.0x - Jun 2013 - Sep 2015
- Fixed LoadConfigFile function will not warn on wrong config file
- Added --no-maxtime parameter for sync big changes without enforcing execution time checks
03 Aug. 2013: beta 3 milestone
### 03 Aug. 2013: beta 3 milestone
- Softdelete functions do now honor --dry switch
- Simplified sync delete functions
- Enhanced compatibility with different charsets in filenames
@ -392,12 +453,14 @@ v0-v1.0x - Jun 2013 - Sep 2015
- Tree functions now honor supplementary rsync arguments
- Tree functions now honor exclusion lists
01 Aug. 2013: beta 2 milestone
### 01 Aug. 2013: beta 2 milestone
- Fixed an issue with spaces in directory trees
- Fixed an issue with recursive directory trees
- Revamped a bit code to add bash 3.2 compatibility
24 Jul. 2013: beta milestone
### 24 Jul. 2013: beta milestone
- Fixed some bad error handling in CheckMasterSlaveDirs and LockDirectories
- Added support for spaces in sync dirs and exclude lists
- Fixed false exit code if no remote slave lock present
@ -427,5 +490,4 @@ v0-v1.0x - Jun 2013 - Sep 2015
- Added soft-deleted items
- Added backup items in case of conflict
19 Jun. 2013: Project begin as Obackup fork
### 19 Jun. 2013: Project begin as Obackup fork

6
KNOWNISSUES.md Normal file
View File

@ -0,0 +1,6 @@
## KNOWN ISSUES
- Cannot finish sync if one replica contains a directory and the other replica contains a file named the same way (Unix doesn't allow this)
- Daemon mode monitors changes in the whole replica directories, without honoring exclusion lists
- Soft deletion does not honor exclusion lists (ie soft deleted files will be cleaned regardless of any exlude pattern because they are in the deleted folder)
- Colors don't work in mac shell

View File

@ -1,6 +0,0 @@
KNOWN ISSUES
------------
- Cannot finish sync if one replica contains a directory and the other replica contains a file named the same way (Unix doesn't allow this)
- Soft deletion does not honor exclusion lists (ie soft deleted files will be cleaned regardless of any exlude pattern because they are in the deleted folder)
- Colors don't work in mac shell

View File

@ -1,4 +1,4 @@
Copyright (c) 2013-2016, Orsiris de Jong. ozy@netpower.fr
Copyright (c) 2013-2023, Orsiris de Jong. ozy@netpower.fr
All rights reserved.
Redistribution and use in source and binary forms, with or without

4
PULL_REQUEST_TEMPLATE.md Normal file
View File

@ -0,0 +1,4 @@
When submitting a pull request, please modify the files in dev directory rather than those generated on-the-fly.
You may find all code contained in osync.sh in n_osync.sh and ofunctions.sh
You may run your modified code by using `merge.sh osync` in order to generate ../osync.sh

227
README.md
View File

@ -1,18 +1,26 @@
# osync [![Build Status](https://travis-ci.org/deajan/osync.svg?branch=master)](https://travis-ci.org/deajan/osync) [![License](https://img.shields.io/badge/License-BSD%203--Clause-blue.svg)](https://opensource.org/licenses/BSD-3-Clause) [![GitHub Release](https://img.shields.io/github/release/deajan/osync.svg?label=Latest)](https://github.com/deajan/osync/releases/latest)
# osync
[![License](https://img.shields.io/badge/License-BSD%203--Clause-blue.svg)](https://opensource.org/licenses/BSD-3-Clause)
[![GitHub Release](https://img.shields.io/github/release/deajan/osync.svg?label=Latest)](https://github.com/deajan/osync/releases/latest)
[![Percentage of issues still open](http://isitmaintained.com/badge/open/deajan/osync.svg)](http://isitmaintained.com/project/deajan/osync "Percentage of issues still open")
[![Codacy Badge](https://api.codacy.com/project/badge/Grade/651acb2fd64642eb91078ba523b7f887)](https://www.codacy.com/app/ozy/osync?utm_source=github.com&utm_medium=referral&utm_content=deajan/osync&utm_campaign=Badge_Grade)
[![linux tests](https://github.com/deajan/osync/actions/workflows/linux.yml/badge.svg)](https://github.com/deajan/osync/actions/workflows/linux.yml/badge.svg)
[![windows tests](https://github.com/deajan/osync/actions/workflows/windows.yml/badge.svg)](https://github.com/deajan/osync/actions/workflows/windows.yml/badge.svg)
[![macos tests](https://github.com/deajan/osync/actions/workflows/macos.yml/badge.svg)](https://github.com/deajan/osync/actions/workflows/macos.yml/badge.svg)
A two way filesync script running on bash Linux, BSD, Android, MacOSX, Cygwin, MSYS2, Win10 bash and virtually any system supporting bash).
File synchronization is bidirectional, and can be run manually, as scheduled task, or triggered on file changes in daemon mode.
A two way filesync script running on bash Linux, BSD, Android, MacOSX, Cygwin, MSYS2, Win10 bash and virtually any system supporting bash.
File synchronization is bidirectional, and can be run manually, as scheduled task, or triggered on file changes in monitor mode.
It is a command line tool rsync wrapper with a lot of additional features baked in.
This is a quickstart guide, you can find the full documentation on the author's site.
This is a quickstart guide, you can find the full documentation on the [author's site](http://www.netpower.fr/osync).
About
-----
osync provides the following capabilities
## About
osync provides the following capabilities:
- Local-Local and Local-Remote sync
- Fault tolerance with resume scenarios
- File ACL and extended attributes synchronization
- POSIX ACL and extended attributes synchronization
- Full script Time control
- Soft deletions and multiple backups handling
- Before/after run command execution
@ -30,23 +38,34 @@ osync uses pidlocks to prevent multiple concurrent sync processes on/to the same
You may launch concurrent sync processes on the same system but as long as the replicas to synchronize are different.
Multiple osync tasks may be launched sequentially by osync osync-batch tool.
Currently, it has been tested on CentOS 5.x, 6.x, 7.x, Fedora 22-25, Debian 6-8, Linux Mint 14-18, Ubuntu 12.04-12.10, FreeBSD 8.3-11, Mac OS X and pfSense 2.3.x.
Microsoft Windows is supported via MSYS or Cygwin and now via Windows 10 bash.
Android support works via Termux.
Some users also have successfully used osync on Gentoo and created an openRC init scriptt for it.
## Tested platforms
| Operating system | Version |
|------------------|------------------------|
| AlmaLinux | 9 |
| Android\* | Not known |
| CentOS | 5.x, 6.x, 7.x |
| Fedora | 22-25 |
| FreeBSD | 8.3-11 |
| Debian | 6-11 |
| Linux Mint | 14-18 |
| macOS | Not known |
| pfSense | 2.3.x |
| QTS (x86) | 4.5.1 |
| Ubuntu | 12.04-22.04 |
| Windows\*\* | 10 |
\* via Termux.
\*\* via MSYS, Cygwin and WSL.
Some users also have successfully used osync on Gentoo and created an OpenRC init script for it.
## Installation
Installation
------------
osync has been designed to not delete any data, but rather make backups of conflictual files or soft deletes.
Nevertheless, you should always have a neat backup of your data before trying a new sync tool.
You may get osync on the author's site (stable version) or on github (stable or latest dev snapshot)
Getting osync via author's site on **http://www.netpower.fr/osync**
$ wget http://www.netpower.fr/projects/osync/osync.v1.2.tar.gz
$ tar xvf osync.v1.2.tar.gz
Getting osync via github (remove the -b "stable" if you want latest dev snapshot)
Getting osync via GitHub (remove the -b "stable" if you want latest dev snapshot)
$ git clone -b "stable" https://github.com/deajan/osync
$ cd osync
@ -54,10 +73,10 @@ Getting osync via github (remove the -b "stable" if you want latest dev snapshot
Installer script accepts some parameters for automation. Launch install.sh --help for options.
There is also a RPM file that should fit RHEL/CentOS/Fedora and basically any RPM based distro, see the github release.
Please note that RPM files will install osync to /usr/bin instead of /usr/local/bin in order to enforce good practices.
There is also an RPM file that should fit RHEL/CentOS/Fedora and basically any RPM based distro, see the GitHub release.
Please note that RPM files will install osync to `/usr/bin` instead of `/usr/local/bin` in order to enforce good practices.
osync will install itself to /usr/local/bin and an example configuration file will be installed to /etc/osync
osync will install itself to `/usr/local/bin` and an example configuration file will be installed to `/etc/osync`.
osync needs to run with bash shell. Using any other shell will most probably result in errors.
If bash is not your default shell, you may invoke it using
@ -68,78 +87,90 @@ On *BSD and BusyBox, be sure to have bash installed.
If you can't install osync, you may just copy osync.sh where you needed and run it from there.
Archlinux packages are available at https://aur.archlinux.org/packages/osync/ (thanks to Shadowigor, https://github.com/shadowigor)
Arch Linux packages are available at <https://aur.archlinux.org/packages/osync/> (thanks to Shadowigor, <https://github.com/shadowigor>).
## Upgrade from previous configuration files
Since osync v1.1 the config file format has changed in semantics and adds new config options.
Also, master is now called initiator and slave is now called target.
osync v1.2 also added multiple new configuration options.
osync v1.3 also added multiple new configuration options.
You can upgrade all v1.0x-v1.2-dev config files by running the upgrade script
You can upgrade all v1.0x-v1.3-dev config files by running the upgrade script
$ ./upgrade-v1.0x-v1.2x.sh /etc/osync/your-config-file.conf
$ ./upgrade-v1.0x-v1.3x.sh /etc/osync/your-config-file.conf
The script will backup your config file, update it's content and try to connect to initiator and target replicas to update the state dir.
Usage
-----
Osync can work with in three flavors: Quick sync mode, configuration file mode, and daemon mode.
While quick sync mode is convenient to do fast syncs between some directories, a configuration file gives much more functionnality.
Please use double quotes as path delimiters. Do not use escaped characters in path names.
## Usage
Osync can work in 3 modes:
1. [:rocket: Quick sync mode](#quick-sync-mode)
2. [:gear: Configuration file mode](#configuration-file-mode)
3. [:mag_right: Monitor mode](#monitor-mode)
> [!NOTE]
> Please use double quotes as path delimiters. Do not use escaped characters in path names.
### <a id="quick-sync-mode"></a>:rocket: Quick sync mode
Quick sync mode is convenient to do fast syncs between some directories. However, the [configuration file mode](#configuration-file-mode) gives much more functionality.
QuickSync example
-----------------
# osync.sh --initiator="/path/to/dir1" --target="/path/to/remote dir2"
# osync.sh --initiator="/path/to/another dir" --target="ssh://user@host.com:22//path/to/dir2" --rsakey=/home/user/.ssh/id_rsa_private_key_example.com
Summary mode
------------
osync may output only file changes and errors with the following
#### Quick sync with minimal options
# osync.sh --initiator="/path/to/dir1" --target="/path/to/dir" --summary --errors-only --no-prefix
This also works in configuration file mode.
QuickSync with minimal options
------------------------------
In order to run osync the quickest (without transferring file attributes, without softdeletion, without prior space checks and without remote connectivity checks, you may use the following:
# MINIMUM_SPACE=0 PRESERVE_ACL=no PRESERVE_XATTR=no SOFT_DELETE_DAYS=0 CONFLICT_BACKUP_DAYS=0 REMOTE_HOST_PING=no osync.sh --initiator="/path/to/another dir" --target="ssh://user@host.com:22//path/to/dir2" --rsakey=/home/user/.ssh/id_rsa_private_key_example.com
All the settings described here may also be configured in the conf file.
Running osync with a Configuration file
---------------------------------------
You'll have to customize the sync.conf file according to your needs.
If you intend to sync a remote directory, osync will need a pair of private / public RSA keys to perform remote SSH connections.
Also, running sync as superuser requires to configure /etc/sudoers file.
Please read the documentation about remote sync setups.
Once you've customized a sync.conf file, you may run osync with the following test run:
### Summary mode
osync will output only file changes and errors with the following:
# osync.sh --initiator="/path/to/dir1" --target="/path/to/dir" --summary --errors-only --no-prefix
This also works in configuration file mode.
### <a id="configuration-file-mode"></a>:gear: Configuration file mode
You'll have to customize the `sync.conf` file according to your needs.
If you intend to sync a remote directory, osync will need a pair of private/public RSA keys to perform remote SSH connections. Also, running sync as superuser requires to configure the `/etc/sudoers` file.
> [!TIP]
> Read the [example configuration file](https://github.com/deajan/osync/blob/master/sync.conf.example) for documentation about remote sync setups.
Once you've customized a `sync.conf` file, you may run osync with the following test run:
# osync.sh /path/to/your.conf --dry
If everything went well, you may run the actual configuration with one of the following:
If everything went well, you may run the actual configuration with:
# osync.sh /path/to/your.conf
To display which files and attrs are actually synchronized and which files are to be soft deleted / are in conflict, use `--verbose` (you may mix it with `--silent` to output verbose input only in the log files):
# osync.sh /path/to/your.conf --verbose
Use `--no-maxtime` to disable execution time checks, which is usefull for big initial sync tasks that might take long time. Next runs should then only propagate changes and take much less time:
# osync.sh /path/to/your.conf --no-maxtime
Verbose option will display which files and attrs are actually synchronized and which files are to be soft deleted / are in conflict.
You may mix "--silent" and "--verbose" parameters to output verbose input only in the log files.
No-Maxtime option will disable execution time checks, which is usefull for big initial sync tasks that might take long time. Next runs should then only propagate changes and take much less time.
Once you're confident about your first runs, you may add osync as a cron task like the following in /etc/crontab which would run osync every 30 minutes:
Once you're confident about your first runs, you may add osync as a cron task like the following in `/etc/crontab` which would run osync every 30 minutes:
*/30 * * * * root /usr/local/bin/osync.sh /etc/osync/my_sync.conf --silent
Please note that this syntax works for RedHat / CentOS. On Debian you might want to remove the username (ie root) in order to make the crontab entry work.
Please note that this syntax works for RedHat/CentOS. On Debian you might want to remove the username (i.e. root) in order to make the crontab entry work.
Batch mode
----------
You may want to sequentially run multiple sync sets between the same servers. In that case, osync-batch.sh is a nice tool that will run every osync conf file, and, if a task fails,
### Batch mode
You may want to sequentially run multiple sync sets between the same servers. In that case, `osync-batch.sh` is a nice tool that will run every osync conf file, and, if a task fails,
run it again if there's still some time left.
The following example will run all .conf files found in /etc/osync, and retry 3 times every configuration that fails, if the whole sequential run took less than 2 hours.
To run all `.conf` files found in `/etc/osync`, and retry 3 times every configuration that fails if the whole sequential run took less than 2 hours, use:
# osync-batch.sh --path=/etc/osync --max-retries=3 --max-exec-time=7200
@ -147,71 +178,75 @@ Having multiple conf files can then be run in a single cron command like
00 00 * * * root /usr/local/bin/osync-batch.sh --path=/etc/osync --silent
Daemon mode
-----------
Additionaly, you may run osync in monitor mode, which means it will perform a sync upon file operations on initiator replica.
This can be a drawback on functionnality versus scheduled mode because this mode only launches a sync task if there are file modifications on the initiator replica, without being able to monitor the target replica.
Target replica changes are only synced when initiator replica changes occur, or when a given amount of time (default 600 seconds) passed without any changes on initiator replica.
File monitor mode can also be launched as a daemon with an init script. Please read the documentation for more info.
Note that monitoring changes requires inotifywait command (inotify-tools package for most Linux distributions).
BSD, MacOS X and Windows are not yet supported for this operation mode, unless you find a inotify-tools package on these OSes.
### <a id="monitor-mode"></a>:mag_right: Monitor mode
> [!NOTE]
> Monitoring changes requires inotifywait command (`inotify-tools` package for most Linux distributions). BSD, macOS and Windows are not yet supported for this operation mode, unless you find an `inotify-tool` package on these OSes.
Monitor mode will perform a sync upon file operations on initiator replica. This can be a drawback on functionality versus scheduled mode because this mode only launches a sync task if there are file modifications on the initiator replica, without being able to monitor the target replica. Target replica changes are only synced when initiator replica changes occur, or when a given amount of time (600 seconds by default) passed without any changes on initiator replica.
This mode can also be launched as a daemon with an init script. Please read the documentation for more info.
To use this mode, use `--on-changes`:
# osync.sh /etc/osync/my_sync.conf --on-changes
Osync file monitor mode may be run as system service with the osync-srv script.
You may run the install.sh script which should work in most cases or copy the files by hand (osync.sh to /usr/bin/local, sync.conf to /etc/osync, osync-srv to /etc/init.d for initV, osync-srv@.service to /usr/lib/systemd/system for systemd, osync-srv-openrc to /etc/init.d/osync-srv-openrc for OpenRC).
To run this mode as a system service with the `osync-srv` script, you can run the `install.sh` script (which should work in most cases) or copy the files by hand:
- `osync.sh` to `/usr/bin/local`
- `sync.conf` to `/etc/osync`
- For InitV, `osync-srv` to `/etc/init.d`
- For systemd, `osync-srv@.service` to `/usr/lib/systemd/system`
- For OpenRC, `osync-srv-openrc` to `/etc/init.d/osync-srv-openrc`
InitV specific instructions:
Any configuration file found in /etc/osync will create a osync daemon instance when service is launched on initV with:
For InitV (any configuration file found in `/etc/osync` will create an osync daemon instance when service is launched on initV):
$ service osync-srv start
$ chkconfig osync-srv on
Systemd specific (one service per config file)
For systemd, launch service (one service per config file to launch) with:
Launch service (one service per config file to launch) with:
$ systemctl start osync-srv@configfile.conf
$ systemctl enable osync-srv@configfile.conf
OpenRC specific instructions (user contrib)
For OpenRC (user contrib), launch service (one service per config file to launch) with:
Launch service (one service per config file to launch) with:
$ rc-update add osync-srv.configfile default
Security enhancements
---------------------
## Security enhancements
Remote SSH connection security can be improved by limiting what hostnames may connect, disabling some SSH options and using ssh filter.
Please read full documentation in order to configure ssh filter.
Contributions
-------------
## Contributions
All kind of contribs are welcome.
When submitting a PR, please be sure to modify files in dev directory (dev/n_osync.sh, dev/ofunctions.sh, dev/common_install.sh etc) as most of the main files are generated via merge.sh.
When submitting a PR, please be sure to modify files in dev directory (`dev/n_osync.sh`, `dev/ofunctions.sh`, `dev/common_install.sh etc`) as most of the main files are generated via merge.sh.
When testing your contribs, generate files via merge.sh or use bootstrap.sh which generates a temporary version of n_osync.sh with all includes.
Unit tests are run by travis on every PR, but you may also run them manually which adds some tests that travis can't do, via dev/tests/run_tests.sh
SSH port can be changed on the fly via environment variable SSH_PORT, eg: SSH_PORT=2222 dev/tests/run_tests.sh
Unit tests are run by travis on every PR, but you may also run them manually which adds some tests that travis can't do, via `dev/tests/run_tests.sh`.
SSH port can be changed on the fly via environment variable SSH_PORT, e.g.:
# SSH_PORT=2222 dev/tests/run_tests.sh
Consider reading CODING_CONVENTIONS.TXT before submitting a patch.
Troubleshooting
---------------
You may find osync's logs in /var/log/osync.[INSTANCE_ID].log (or current directory if /var/log is not writable).
## Troubleshooting
You may find osync's logs in `/var/log/osync.[INSTANCE_ID].log` (or current directory if `/var/log` is not writable).
Additionnaly, you can use the --verbose flag see to what actions are going on.
When opening an issue, please post the corresponding log files. Also, you may run osync with _DEBUG option in order to have more precise logs, eg:
_DEBUG=yes ./osync.sh /path/to/conf
When opening an issue, please post the corresponding log files. Also, you may run osync with _DEBUG option in order to have more precise logs, e.g.:
# _DEBUG=yes ./osync.sh /path/to/conf
## Uninstalling
Uninstalling
------------
The installer script also has an uninstall mode that will keep configuration files. Use it with
$ ./install.sh --remove
Author
------
Feel free to open an issue on github or mail me for support in my spare time :)
## Author
Feel free to open an issue on GitHub or mail me for support in my spare time :)
Orsiris de Jong | ozy@netpower.fr

View File

@ -1,4 +1,4 @@
Coding style used for my bash projects (v3.0 Dec 2016)
Coding style used for my bash projects (v3.2 Oct 2018)
As bash is clearly an error prone script language, we'll use as much standard coding as possible, including some quick and dirty debug techniques described here.
++++++ Header
@ -162,6 +162,15 @@ if [ $retval -ne 0 ]; then
Logger "Some error message" "ERROR" $retval
fi
++++++ includes
Using merge.sh, the program may have includes like
include #### RemoteLogger SUBSET ####
All possible includes are listed in ofunctions.sh
Mostly, includes are needed to port functions to a remote shell without writing them again.
++++++ Remote execution
Remote commands should always invoke bash (using '"'"' to escape single quotes of 'bash -c "command"'). It is preferable to use ssh heredoc in order to use plain code.
If local and remote code is identical, wrap remote code in a function so only minor modifications are needed.
Remote code return code is transmitted via exit.
@ -184,6 +193,9 @@ if [ $retval -ne 0 ]; then
Logger "Some error message" "ERROR" $retval
fi
We also need to transmit a couple of environment variables (RUN_DIR; PROGRAM; _LOGGER_VERBOSE... see current setups) in order to make standard code.
Include works here too.
++++++ File variables
All eval cmd should exit their content to a file called "$RUNDIR/$PROGRAM.${FUNCNAME[0]}.$SCRIPT_PID"
@ -197,15 +209,6 @@ Quoting happens outside the function call.
echo "$(myStringFunction $myStringVar)"
++++++ Finding code errors
Use shellcheck.net now and then (ignore SC2086 in our case)
Use a low tech approach to find uneven number of quotes per line
tr -cd "'\n" < my_bash_file.sh | awk 'length%2==1 {print NR, $0}'
tr -cd "\"\n" < my_bash_file.sh | awk 'length%2==1 {print NR, $0}'
++++++ ofunctions
As obackup and osync share alot of common functions, ofunctions.sh will host all shared code.
@ -258,3 +261,16 @@ When launching the program with 'bash -x', add SLEEP_TIME=1 so wait functions wo
Ex:
SLEEP_TIME=1 bash -x ./program.sh
++++++ Finding code errors
Before every release, shellcheck must be run
Also a grep -Eri "TODO|WIP" osync/* must be run in order to find potential release blockers
Use shellcheck.net now and then (ignore SC2086 in our case)
Use a low tech approach to find uneven number of quotes per line
tr -cd "'\n" < my_bash_file.sh | awk 'length%2==1 {print NR, $0}'
tr -cd "\"\n" < my_bash_file.sh | awk 'length%2==1 {print NR, $0}'

17
dev/RELEASES Normal file
View File

@ -0,0 +1,17 @@
## Releases require the following
- Documentation must be up to date
- grep -Eri "TODO|WIP" osync/* must be run in order to find potential release blockers, including in unit tests and config files
Run program and then use declare -p to find any leaked variables that should not exist outside of the program
- packaging files must be updated (RHEL / Arch)
- Before every release, shellcheck must be run
- ./shellcheck.sh -e SC2034 -e SC2068 ofunctions.sh
- ./shellcheck.sh n_osync.sh
- ./shellcheck.sh ../install.sh
- ./shellcheck.sh ../osync-batch.sh
- ./shellcheck.sh ../ssh_filter.sh
- Unexpansion of main and subprograms must be done
- Arch repo must be updated
- Source must be put to download on www.netpower.fr/osync
- Tests must be run against all supported operating systems via run_tests.sh

View File

@ -1,6 +1,6 @@
#!/usr/bin/env bash
## dev pre-processor bootstrap rev 2018062501
## dev pre-processor bootstrap rev 2019052001
## Yeah !!! A really tech sounding name... In fact it's just include emulation in bash
function Usage {
@ -8,7 +8,7 @@ function Usage {
echo "Creates and executes $0.tmp.sh"
echo "Usage:"
echo ""
echo "$0 --program=osync|osync_target_helper|obackup|pmocr [options to pass to program]"
echo "$0 --program=osync|obackup|pmocr [options to pass to program]"
echo "Can also be run with BASHVERBOSE=yes environment variable in order to prefix program with bash -x"
}
@ -19,16 +19,16 @@ if [ ! -f "./merge.sh" ]; then
fi
bootstrapProgram=""
opts=""
opts=()
outputFileName="$0"
for i in "$@"; do
case $i in
for i in "${@}"; do
case "$i" in
--program=*)
bootstrapProgram="${i##*=}"
;;
*)
opts=$opts" $i"
opts+=("$i")
;;
esac
done
@ -44,7 +44,7 @@ else
__PREPROCESSOR_Constants
if [ ! -f "$__PREPROCESSOR_PROGRAM_EXEC" ]; then
echo "Cannot find file [n_$bootstrapProgram.sh]."
echo "Cannot find file $__PREPROCESSOR_PROGRAM executable [n_$bootstrapProgram.sh]."
exit 1
fi
fi
@ -69,7 +69,7 @@ if type termux-fix-shebang > /dev/null 2>&1; then
fi
if [ "$BASHVERBOSE" == "yes" ]; then
bash -x "$outputFileName.tmp.sh" $opts
bash -x "$outputFileName.tmp.sh" "${opts[@]}"
else
"$outputFileName.tmp.sh" $opts
"$outputFileName.tmp.sh" "${opts[@]}"
fi

View File

@ -1,9 +1,9 @@
#!/usr/bin/env bash
SUBPROGRAM=[prgname]
PROGRAM="$SUBPROGRAM-batch" # Batch program to run osync / obackup instances sequentially and rerun failed ones
AUTHOR="(L) 2013-2017 by Orsiris de Jong"
AUTHOR="(L) 2013-2020 by Orsiris de Jong"
CONTACT="http://www.netpower.fr - ozy@netpower.fr"
PROGRAM_BUILD=2016120401
PROGRAM_BUILD=2020031502
## Runs an osync /obackup instance for every conf file found
## If an instance fails, run it again if time permits
@ -26,36 +26,19 @@ else
LOG_FILE=./$SUBPROGRAM-batch.log
fi
## Default directory where to store temporary run files
if [ -w /tmp ]; then
RUN_DIR=/tmp
elif [ -w /var/tmp ]; then
RUN_DIR=/var/tmp
else
RUN_DIR=.
fi
# No need to edit under this line ##############################################################
function _logger {
local value="${1}" # What to log
echo -e "$value" >> "$LOG_FILE"
}
function Logger {
local value="${1}" # What to log
local level="${2}" # Log level: DEBUG, NOTICE, WARN, ERROR, CRITIAL
prefix="$(date) - "
if [ "$level" == "CRITICAL" ]; then
_logger "$prefix\e[41m$value\e[0m"
elif [ "$level" == "ERROR" ]; then
_logger "$prefix\e[91m$value\e[0m"
elif [ "$level" == "WARN" ]; then
_logger "$prefix\e[93m$value\e[0m"
elif [ "$level" == "NOTICE" ]; then
_logger "$prefix$value"
elif [ "$level" == "DEBUG" ]; then
if [ "$DEBUG" == "yes" ]; then
_logger "$prefix$value"
fi
else
_logger "\e[41mLogger function called without proper loglevel.\e[0m"
_logger "$prefix$value"
fi
}
include #### Logger SUBSET ####
include #### CleanUp SUBSET ####
include #### GenericTrapQuit SUBSET ####
function CheckEnvironment {
## osync / obackup executable full path can be set here if it cannot be found on the system
@ -145,6 +128,8 @@ function Usage {
exit 128
}
trap GenericTrapQuit TERM EXIT HUP QUIT
opts=""
for i in "$@"
do

288
dev/common_install.sh Executable file → Normal file
View File

@ -2,8 +2,6 @@
## Installer script suitable for osync / obackup / pmocr
include #### _OFUNCTIONS_BOOTSTRAP SUBSET ####
PROGRAM=[prgname]
PROGRAM_VERSION=$(grep "PROGRAM_VERSION=" $PROGRAM.sh)
@ -12,12 +10,15 @@ PROGRAM_BINARY=$PROGRAM".sh"
PROGRAM_BATCH=$PROGRAM"-batch.sh"
SSH_FILTER="ssh_filter.sh"
SCRIPT_BUILD=2018070201
SCRIPT_BUILD=2025012001
INSTANCE_ID="installer-$SCRIPT_BUILD"
## osync / obackup / pmocr / zsnap install script
## Tested on RHEL / CentOS 6 & 7, Fedora 23, Debian 7 & 8, Mint 17 and FreeBSD 8, 10 and 11
## Please adapt this to fit your distro needs
include #### OFUNCTIONS MICRO SUBSET ####
# Get current install.sh path from http://stackoverflow.com/a/246128/2635443
SCRIPT_PATH="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
@ -26,56 +27,6 @@ _STATS=1
ACTION="install"
FAKEROOT=""
function GetCommandlineArguments {
for i in "$@"; do
case $i in
--prefix=*)
FAKEROOT="${i##*=}"
;;
--silent)
_LOGGER_SILENT=true
;;
--no-stats)
_STATS=0
;;
--remove)
ACTION="uninstall"
;;
--help|-h|-?)
Usage
;;
*)
Logger "Unknown option '$i'" "SIMPLE"
Usage
exit
;;
esac
done
}
GetCommandlineArguments "$@"
CONF_DIR=$FAKEROOT/etc/$PROGRAM
BIN_DIR="$FAKEROOT/usr/local/bin"
SERVICE_DIR_INIT=$FAKEROOT/etc/init.d
# Should be /usr/lib/systemd/system, but /lib/systemd/system exists on debian & rhel / fedora
SERVICE_DIR_SYSTEMD_SYSTEM=$FAKEROOT/lib/systemd/system
SERVICE_DIR_SYSTEMD_USER=$FAKEROOT/etc/systemd/user
SERVICE_DIR_OPENRC=$FAKEROOT/etc/init.d
if [ "$PROGRAM" == "osync" ]; then
SERVICE_NAME="osync-srv"
elif [ "$PROGRAM" == "pmocr" ]; then
SERVICE_NAME="pmocr-srv"
fi
SERVICE_FILE_INIT="$SERVICE_NAME"
SERVICE_FILE_SYSTEMD_SYSTEM="$SERVICE_NAME@.service"
SERVICE_FILE_SYSTEMD_USER="$SERVICE_NAME@.service.user"
SERVICE_FILE_OPENRC="$SERVICE_NAME-openrc"
## Generic code
## Default log file
if [ -w "$FAKEROOT/var/log" ]; then
LOG_FILE="$FAKEROOT/var/log/$PROGRAM-install.log"
@ -85,13 +36,15 @@ else
LOG_FILE="./$PROGRAM-install.log"
fi
include #### Logger SUBSET ####
include #### UrlEncode SUBSET ####
include #### GetLocalOS SUBSET ####
include #### GetConfFileValue SUBSET ####
include #### CleanUp SUBSET ####
include #### GenericTrapQuit SUBSET ####
function SetLocalOSSettings {
USER=root
DO_INIT=true
# LOCAL_OS and LOCAL_OS_FULL are global variables set at GetLocalOS
@ -101,10 +54,12 @@ function SetLocalOSSettings {
;;
*"MacOSX"*)
GROUP=admin
DO_INIT=false
;;
*"msys"*|*"Cygwin"*)
*"Cygwin"*|*"Android"*|*"msys"*|*"BusyBox"*)
USER=""
GROUP=""
DO_INIT=false
;;
*)
GROUP=root
@ -112,12 +67,12 @@ function SetLocalOSSettings {
esac
if [ "$LOCAL_OS" == "Android" ] || [ "$LOCAL_OS" == "BusyBox" ]; then
Logger "Cannot be installed on [$LOCAL_OS]. Please use $PROGRAM.sh directly." "SIMPLE"
Logger "Cannot be installed on [$LOCAL_OS]. Please use $PROGRAM.sh directly." "CRITICAL"
exit 1
fi
if ([ "$USER" != "" ] && [ "$(whoami)" != "$USER" ] && [ "$FAKEROOT" == "" ]); then
Logger "Must be run as $USER." "SIMPLE"
Logger "Must be run as $USER." "CRITICAL"
exit 1
fi
@ -125,35 +80,68 @@ function SetLocalOSSettings {
}
function GetInit {
init="none"
if [ -f /sbin/openrc-run ]; then
init="openrc"
Logger "Detected openrc." "SIMPLE"
Logger "Detected openrc." "NOTICE"
elif [ -f /usr/lib/systemd/systemd ]; then
init="systemd"
Logger "Detected systemd." "NOTICE"
elif [ -f /sbin/init ]; then
if type -p file > /dev/null 2>&1; then
if file /sbin/init | grep systemd > /dev/null; then
init="systemd"
Logger "Detected systemd." "SIMPLE"
Logger "Detected systemd." "NOTICE"
else
init="initV"
Logger "Detected initV." "SIMPLE"
fi
else
Logger "Can't detect initV, systemd or openRC. Service files won't be installed. You can still run $PROGRAM manually or via cron." "SIMPLE"
init="initV"
fi
if [ $init == "initV" ]; then
Logger "Detected initV." "NOTICE"
fi
else
Logger "Can't detect initV, systemd or openRC. Service files won't be installed. You can still run $PROGRAM manually or via cron." "WARN"
init="none"
fi
}
function CreateDir {
local dir="${1}"
local dirMask="${2}"
local dirUser="${3}"
local dirGroup="${4}"
if [ ! -d "$dir" ]; then
(
if [ $(IsInteger $dirMask) -eq 1 ]; then
umask $dirMask
fi
mkdir -p "$dir"
)
if [ $? == 0 ]; then
Logger "Created directory [$dir]." "SIMPLE"
Logger "Created directory [$dir]." "NOTICE"
else
Logger "Cannot create directory [$dir]." "SIMPLE"
Logger "Cannot create directory [$dir]." "CRITICAL"
exit 1
fi
fi
if [ "$dirUser" != "" ]; then
userGroup="$dirUser"
if [ "$dirGroup" != "" ]; then
userGroup="$userGroup"":$dirGroup"
fi
chown "$userGroup" "$dir"
if [ $? != 0 ]; then
Logger "Could not set directory ownership on [$dir] to [$userGroup]." "CRITICAL"
exit 1
else
Logger "Set file ownership on [$dir] to [$userGroup]." "NOTICE"
fi
fi
}
function CopyFile {
@ -167,32 +155,33 @@ function CopyFile {
local overwrite="${8:-false}"
local userGroup=""
local oldFileName
if [ "$destFileName" == "" ]; then
destFileName="$sourceFileName"
fi
if [ -f "$destPath/$destFileName" ] && [ $overwrite == false ]; then
destfileName="$sourceFileName.new"
Logger "Copying [$sourceFileName] to [$destPath/$destFilename]." "SIMPLE"
destFileName="$sourceFileName.new"
Logger "Copying [$sourceFileName] to [$destPath/$destFileName]." "NOTICE"
fi
cp "$sourcePath/$sourceFileName" "$destPath/$destFileName"
if [ $? != 0 ]; then
Logger "Cannot copy [$sourcePath/$sourceFileName] to [$destPath/$destFileName]. Make sure to run install script in the directory containing all other files." "SIMPLE"
Logger "Also make sure you have permissions to write to [$BIN_DIR]." "SIMPLE"
Logger "Cannot copy [$sourcePath/$sourceFileName] to [$destPath/$destFileName]. Make sure to run install script in the directory containing all other files." "CRITICAL"
Logger "Also make sure you have permissions to write to [$BIN_DIR]." "ERROR"
exit 1
else
Logger "Copied [$sourcePath/$sourceFileName] to [$destPath/$destFileName]." "SIMPLE"
if [ "$fileMod" != "" ]; then
Logger "Copied [$sourcePath/$sourceFileName] to [$destPath/$destFileName]." "NOTICE"
if [ "$(IsInteger $fileMod)" -eq 1 ]; then
chmod "$fileMod" "$destPath/$destFileName"
if [ $? != 0 ]; then
Logger "Cannot set file permissions of [$destPath/$destFileName] to [$fileMod]." "SIMPLE"
Logger "Cannot set file permissions of [$destPath/$destFileName] to [$fileMod]." "CRITICAL"
exit 1
else
Logger "Set file permissions to [$fileMod] on [$destPath/$destFileName]." "SIMPLE"
Logger "Set file permissions to [$fileMod] on [$destPath/$destFileName]." "NOTICE"
fi
elif [ "$fileMod" != "" ]; then
Logger "Bogus filemod [$fileMod] for [$destPath] given." "WARN"
fi
if [ "$fileUser" != "" ]; then
@ -204,10 +193,10 @@ function CopyFile {
chown "$userGroup" "$destPath/$destFileName"
if [ $? != 0 ]; then
Logger "Could not set file ownership on [$destPath/$destFileName] to [$userGroup]." "SIMPLE"
Logger "Could not set file ownership on [$destPath/$destFileName] to [$userGroup]." "CRITICAL"
exit 1
else
Logger "Set file ownership on [$destPath/$destFileName] to [$userGroup]." "SIMPLE"
Logger "Set file ownership on [$destPath/$destFileName] to [$userGroup]." "NOTICE"
fi
fi
fi
@ -259,44 +248,60 @@ function CopyServiceFiles {
CreateDir "$SERVICE_DIR_SYSTEMD_USER"
CopyFile "$SCRIPT_PATH" "$SERVICE_DIR_SYSTEMD_USER" "$SERVICE_FILE_SYSTEMD_USER" "$SERVICE_FILE_SYSTEMD_USER" "" "" "" true
fi
Logger "Created [$SERVICE_NAME] service in [$SERVICE_DIR_SYSTEMD_SYSTEM] and [$SERVICE_DIR_SYSTEMD_USER]." "SIMPLE"
Logger "Can be activated with [systemctl start SERVICE_NAME@instance.conf] where instance.conf is the name of the config file in $CONF_DIR." "SIMPLE"
Logger "Can be enabled on boot with [systemctl enable $SERVICE_NAME@instance.conf]." "SIMPLE"
Logger "In userland, active with [systemctl --user start $SERVICE_NAME@instance.conf]." "SIMPLE"
if [ -f "$SCRIPT_PATH/$TARGET_HELPER_SERVICE_FILE_SYSTEMD_SYSTEM" ]; then
CopyFile "$SCRIPT_PATH" "$SERVICE_DIR_SYSTEMD_SYSTEM" "$TARGET_HELPER_SERVICE_FILE_SYSTEMD_SYSTEM" "$TARGET_HELPER_SERVICE_FILE_SYSTEMD_SYSTEM" "" "" "" true
Logger "Created optional service [$TARGET_HELPER_SERVICE_NAME] with same specifications as below." "NOTICE"
fi
if [ -f "$SCRIPT_PATH/$TARGET_HELPER_SERVICE_FILE_SYSTEMD_USER" ]; then
CopyFile "$SCRIPT_PATH" "$SERVICE_DIR_SYSTEMD_USER" "$TARGET_HELPER_SERVICE_FILE_SYSTEMD_USER" "$TARGET_HELPER_SERVICE_FILE_SYSTEMD_USER" "" "" "" true
fi
Logger "Created [$SERVICE_NAME] service in [$SERVICE_DIR_SYSTEMD_SYSTEM] and [$SERVICE_DIR_SYSTEMD_USER]." "NOTICE"
Logger "Can be activated with [systemctl start SERVICE_NAME@instance.conf] where instance.conf is the name of the config file in $CONF_DIR." "NOTICE"
Logger "Can be enabled on boot with [systemctl enable $SERVICE_NAME@instance.conf]." "NOTICE"
Logger "In userland, active with [systemctl --user start $SERVICE_NAME@instance.conf]." "NOTICE"
elif ([ "$init" == "initV" ] && [ -f "$SCRIPT_PATH/$SERVICE_FILE_INIT" ] && [ -d "$SERVICE_DIR_INIT" ]); then
#CreateDir "$SERVICE_DIR_INIT"
CopyFile "$SCRIPT_PATH" "$SERVICE_DIR_INIT" "$SERVICE_FILE_INIT" "$SERVICE_FILE_INIT" "755" "" "" true
Logger "Created [$SERVICE_NAME] service in [$SERVICE_DIR_INIT]." "SIMPLE"
Logger "Can be activated with [service $SERVICE_FILE_INIT start]." "SIMPLE"
Logger "Can be enabled on boot with [chkconfig $SERVICE_FILE_INIT on]." "SIMPLE"
if [ -f "$SCRIPT_PATH/$TARGET_HELPER_SERVICE_FILE_INIT" ]; then
CopyFile "$SCRIPT_PATH" "$SERVICE_DIR_INIT" "$TARGET_HELPER_SERVICE_FILE_INIT" "$TARGET_HELPER_SERVICE_FILE_INIT" "755" "" "" true
Logger "Created optional service [$TARGET_HELPER_SERVICE_NAME] with same specifications as below." "NOTICE"
fi
Logger "Created [$SERVICE_NAME] service in [$SERVICE_DIR_INIT]." "NOTICE"
Logger "Can be activated with [service $SERVICE_FILE_INIT start]." "NOTICE"
Logger "Can be enabled on boot with [chkconfig $SERVICE_FILE_INIT on]." "NOTICE"
elif ([ "$init" == "openrc" ] && [ -f "$SCRIPT_PATH/$SERVICE_FILE_OPENRC" ] && [ -d "$SERVICE_DIR_OPENRC" ]); then
# Rename service to usual service file
CopyFile "$SCRIPT_PATH" "$SERVICE_DIR_OPENRC" "$SERVICE_FILE_OPENRC" "$SERVICE_FILE_INIT" "755" "" "" true
Logger "Created [$SERVICE_NAME] service in [$SERVICE_DIR_OPENRC]." "SIMPLE"
Logger "Can be activated with [rc-update add $SERVICE_NAME.instance] where instance is a configuration file found in /etc/osync." "SIMPLE"
if [ -f "$SCRPT_PATH/$TARGET_HELPER_SERVICE_FILE_OPENRC" ]; then
CopyFile "$SCRIPT_PATH" "$SERVICE_DIR_OPENRC" "$TARGET_HELPER_SERVICE_FILE_OPENRC" "$TARGET_HELPER_SERVICE_FILE_OPENRC" "755" "" "" true
Logger "Created optional service [$TARGET_HELPER_SERVICE_NAME] with same specifications as below." "NOTICE"
fi
Logger "Created [$SERVICE_NAME] service in [$SERVICE_DIR_OPENRC]." "NOTICE"
Logger "Can be activated with [rc-update add $SERVICE_NAME.instance] where instance is a configuration file found in /etc/osync." "NOTICE"
else
Logger "Cannot properly find how to deal with init on this system. Skipping service file installation." "SIMPLE"
Logger "Cannot properly find how to deal with init on this system. Skipping service file installation." "NOTICE"
fi
}
function Statistics {
if type wget > /dev/null; then
if type wget > /dev/null 2>&1; then
wget -qO- "$STATS_LINK" > /dev/null 2>&1
if [ $? == 0 ]; then
return 0
fi
fi
if type curl > /dev/null; then
if type curl > /dev/null 2>&1; then
curl "$STATS_LINK" -o /dev/null > /dev/null 2>&1
if [ $? == 0 ]; then
return 0
fi
fi
Logger "Neiter wget nor curl could be used for. Cannot run statistics. Use the provided link please." "SIMPLE"
Logger "Neiter wget nor curl could be used for. Cannot run statistics. Use the provided link please." "WARN"
return 1
}
@ -306,12 +311,12 @@ function RemoveFile {
if [ -f "$file" ]; then
rm -f "$file"
if [ $? != 0 ]; then
Logger "Could not remove file [$file]." "SIMPLE"
Logger "Could not remove file [$file]." "ERROR"
else
Logger "Removed file [$file]." "SIMPLE"
Logger "Removed file [$file]." "NOTICE"
fi
else
Logger "File [$file] not found. Skipping." "SIMPLE"
Logger "File [$file] not found. Skipping." "NOTICE"
fi
}
@ -325,13 +330,25 @@ function RemoveAll {
if [ ! -f "$BIN_DIR/osync.sh" ] && [ ! -f "$BIN_DIR/obackup.sh" ]; then # Check if any other program requiring ssh filter is present before removal
RemoveFile "$BIN_DIR/$SSH_FILTER"
else
Logger "Skipping removal of [$BIN_DIR/$SSH_FILTER] because other programs present that need it." "SIMPLE"
Logger "Skipping removal of [$BIN_DIR/$SSH_FILTER] because other programs present that need it." "NOTICE"
fi
# Try to uninstall every possible service file
if [ $init == "systemd" ]; then
RemoveFile "$SERVICE_DIR_SYSTEMD_SYSTEM/$SERVICE_FILE_SYSTEMD_SYSTEM"
RemoveFile "$SERVICE_DIR_SYSTEMD_USER/$SERVICE_FILE_SYSTEMD_USER"
RemoveFile "$SERVICE_DIR_SYSTEMD_SYSTEM/$TARGET_HELPER_SERVICE_FILE_SYSTEMD_SYSTEM"
RemoveFile "$SERVICE_DIR_SYSTEMD_USER/$TARGET_HELPER_SERVICE_FILE_SYSTEMD_USER"
elif [ $init == "initV" ]; then
RemoveFile "$SERVICE_DIR_INIT/$SERVICE_FILE_INIT"
Logger "Skipping configuration files in [$CONF_DIR]. You may remove this directory manually." "SIMPLE"
RemoveFile "$SERVICE_DIR_INIT/$TARGET_HELPER_SERVICE_FILE_INIT"
elif [ $init == "openrc" ]; then
RemoveFile "$SERVICE_DIR_OPENRC/$SERVICE_FILE_OPENRC"
RemoveFile "$SERVICE_DIR_OPENRC/$TARGET_HELPER_SERVICE_FILE_OPENRC"
else
Logger "Can uninstall only from initV, systemd or openRC." "WARN"
fi
Logger "Skipping configuration files in [$CONF_DIR]. You may remove this directory manually." "NOTICE"
}
function Usage {
@ -344,15 +361,88 @@ function Usage {
exit 127
}
############################## Script entry point
function GetCommandlineArguments {
for i in "$@"; do
case $i in
--prefix=*)
FAKEROOT="${i##*=}"
;;
--silent)
_LOGGER_SILENT=true
;;
--no-stats)
_STATS=0
;;
--remove)
ACTION="uninstall"
;;
--help|-h|-?)
Usage
;;
*)
Logger "Unknown option '$i'" "ERROR"
Usage
exit
;;
esac
done
}
GetCommandlineArguments "$@"
CONF_DIR=$FAKEROOT/etc/$PROGRAM
BIN_DIR="$FAKEROOT/usr/local/bin"
SERVICE_DIR_INIT=$FAKEROOT/etc/init.d
# Should be /usr/lib/systemd/system, but /lib/systemd/system exists on debian & rhel / fedora
SERVICE_DIR_SYSTEMD_SYSTEM=$FAKEROOT/lib/systemd/system
SERVICE_DIR_SYSTEMD_USER=$FAKEROOT/etc/systemd/user
SERVICE_DIR_OPENRC=$FAKEROOT/etc/init.d
if [ "$PROGRAM" == "osync" ]; then
SERVICE_NAME="osync-srv"
TARGET_HELPER_SERVICE_NAME="osync-target-helper-srv"
TARGET_HELPER_SERVICE_FILE_INIT="$TARGET_HELPER_SERVICE_NAME"
TARGET_HELPER_SERVICE_FILE_SYSTEMD_SYSTEM="$TARGET_HELPER_SERVICE_NAME@.service"
TARGET_HELPER_SERVICE_FILE_SYSTEMD_USER="$TARGET_HELPER_SERVICE_NAME@.service.user"
TARGET_HELPER_SERVICE_FILE_OPENRC="$TARGET_HELPER_SERVICE_NAME-openrc"
elif [ "$PROGRAM" == "pmocr" ]; then
SERVICE_NAME="pmocr-srv"
fi
SERVICE_FILE_INIT="$SERVICE_NAME"
SERVICE_FILE_SYSTEMD_SYSTEM="$SERVICE_NAME@.service"
SERVICE_FILE_SYSTEMD_USER="$SERVICE_NAME@.service.user"
SERVICE_FILE_OPENRC="$SERVICE_NAME-openrc"
## Generic code
trap GenericTrapQuit TERM EXIT HUP QUIT
if [ ! -w "$(dirname $LOG_FILE)" ]; then
echo "Cannot write to log [$(dirname $LOG_FILE)]."
else
Logger "Script begin, logging to [$LOG_FILE]." "DEBUG"
fi
# Set default umask
umask 0022
GetLocalOS
SetLocalOSSettings
# On Mac OS this always produces a warning which causes the installer to fail with exit code 2
# Since we know it won't work anyway, and that's fine, just skip this step
if $DO_INIT; then
GetInit
fi
STATS_LINK="http://instcount.netpower.fr?program=$PROGRAM&version=$PROGRAM_VERSION&os=$OS&action=$ACTION"
if [ "$ACTION" == "uninstall" ]; then
RemoveAll
Logger "$PROGRAM uninstalled." "SIMPLE"
Logger "$PROGRAM uninstalled." "NOTICE"
else
CreateDir "$CONF_DIR"
CreateDir "$BIN_DIR"
@ -361,10 +451,10 @@ else
if [ "$PROGRAM" == "osync" ] || [ "$PROGRAM" == "pmocr" ]; then
CopyServiceFiles
fi
Logger "$PROGRAM installed. Use with $BIN_DIR/$PROGRAM" "SIMPLE"
Logger "$PROGRAM installed. Use with $BIN_DIR/$PROGRAM_BINARY" "NOTICE"
if [ "$PROGRAM" == "osync" ] || [ "$PROGRAM" == "obackup" ]; then
echo ""
Logger "If connecting remotely, consider setup ssh filter to enhance security." "SIMPLE"
Logger "If connecting remotely, consider setup ssh filter to enhance security." "NOTICE"
echo ""
fi
fi
@ -373,7 +463,7 @@ if [ $_STATS -eq 1 ]; then
if [ $_LOGGER_SILENT == true ]; then
Statistics
else
Logger "In order to make usage statistics, the script would like to connect to $STATS_LINK" "SIMPLE"
Logger "In order to make usage statistics, the script would like to connect to $STATS_LINK" "NOTICE"
read -r -p "No data except those in the url will be send. Allow [Y/n] " response
case $response in
[nN])

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,10 +1,13 @@
#!/usr/bin/env bash
## MERGE 2018062501
## MERGE 2020031501
## Merges ofunctions.sh and n_program.sh into program.sh
## Adds installer
PROGRAM=merge
INSTANCE_ID=dev
function Usage {
echo "Merges ofunctions.sh and n_program.sh into debug_program.sh and ../program.sh"
echo "Usage"
@ -12,30 +15,24 @@ function Usage {
}
function __PREPROCESSOR_Merge {
local PROGRAM="$1"
local nPROGRAM="$1"
VERSION=$(grep "PROGRAM_VERSION=" n_$PROGRAM.sh)
if [ -f "$nPROGRAM" ]; then
Logger "$nPROGRAM is not found in local path." "CRITICAL"
exit 1
fi
VERSION=$(grep "PROGRAM_VERSION=" n_$nPROGRAM.sh)
VERSION=${VERSION#*=}
__PREPROCESSOR_Constants
source "ofunctions.sh"
if [ $? != 0 ]; then
echo "Please run $0 in dev directory with ofunctions.sh"
exit 1
fi
__PREPROCESSOR_Unexpand "n_$PROGRAM.sh" "debug_$PROGRAM.sh"
__PREPROCESSOR_Unexpand "n_$nPROGRAM.sh" "debug_$nPROGRAM.sh"
for subset in "${__PREPROCESSOR_SUBSETS[@]}"; do
__PREPROCESSOR_MergeSubset "$subset" "${subset//SUBSET/SUBSET END}" "ofunctions.sh" "debug_$PROGRAM.sh"
__PREPROCESSOR_MergeSubset "$subset" "${subset//SUBSET/SUBSET END}" "ofunctions.sh" "debug_$nPROGRAM.sh"
done
__PREPROCESSOR_CleanDebug "$PROGRAM"
rm -f tmp_$PROGRAM.sh
if [ $? != 0 ]; then
Logger "Cannot remove tmp_$PROGRAM.sh" "SIMPLE"
exit 1
fi
__PREPROCESSOR_CleanDebug "debug_$nPROGRAM.sh" "../$nPROGRAM.sh"
}
function __PREPROCESSOR_Constants {
@ -46,7 +43,10 @@ function __PREPROCESSOR_Constants {
__PREPROCESSOR_SUBSETS=(
'#### OFUNCTIONS FULL SUBSET ####'
'#### OFUNCTIONS MINI SUBSET ####'
'#### OFUNCTIONS MICRO SUBSET ####'
'#### PoorMansRandomGenerator SUBSET ####'
'#### _OFUNCTIONS_BOOTSTRAP SUBSET ####'
'#### RUN_DIR SUBSET ####'
'#### DEBUG SUBSET ####'
'#### TrapError SUBSET ####'
'#### RemoteLogger SUBSET ####'
@ -60,6 +60,9 @@ function __PREPROCESSOR_Constants {
'#### GetConfFileValue SUBSET ####'
'#### SetConfFileValue SUBSET ####'
'#### CheckRFC822 SUBSET ####'
'#### CleanUp SUBSET ####'
'#### GenericTrapQuit SUBSET ####'
'#### FileMove SUBSET ####'
)
}
@ -69,7 +72,7 @@ function __PREPROCESSOR_Unexpand {
unexpand "$source" > "$destination"
if [ $? != 0 ]; then
Logger "Cannot unexpand [$source] to [$destination]." "SIMPLE"
Logger "Cannot unexpand [$source] to [$destination]." "CRITICAL"
exit 1
fi
}
@ -82,64 +85,75 @@ function __PREPROCESSOR_MergeSubset {
sed -n "/$subsetBegin/,/$subsetEnd/p" "$subsetFile" > "$subsetFile.$subsetBegin"
if [ $? != 0 ]; then
Logger "Cannot sed subset [$subsetBegin -- $subsetEnd] in [$subsetFile]." "SIMPLE"
Logger "Cannot sed subset [$subsetBegin -- $subsetEnd] in [$subsetFile]." "CRTICIAL"
exit 1
fi
sed "/include $subsetBegin/r $subsetFile.$subsetBegin" "$mergedFile" | grep -v -E "$subsetBegin\$|$subsetEnd\$" > "$mergedFile.tmp"
if [ $? != 0 ]; then
Logger "Cannot add subset [$subsetBegin] to [$mergedFile]." "SIMPLE"
Logger "Cannot add subset [$subsetBegin] to [$mergedFile]." "CRITICAL"
exit 1
fi
rm -f "$subsetFile.$subsetBegin"
if [ $? != 0 ]; then
Logger "Cannot remove temporary subset [$subsetFile.$subsetBegin]." "SIMPLE"
Logger "Cannot remove temporary subset [$subsetFile.$subsetBegin]." "CRITICAL"
exit 1
fi
rm -f "$mergedFile"
if [ $? != 0 ]; then
Logger "Cannot remove merged original file [$mergedFile]." "SIMPLE"
Logger "Cannot remove merged original file [$mergedFile]." "CRITICAL"
exit 1
fi
mv "$mergedFile.tmp" "$mergedFile"
if [ $? != 0 ]; then
Logger "Cannot move merged tmp file to original [$mergedFile]." "SIMPLE"
Logger "Cannot move merged tmp file to original [$mergedFile]." "CRITICAL"
exit 1
fi
}
function __PREPROCESSOR_CleanDebug {
local PROGRAM="$1"
local source="${1}"
local destination="${2:-$source}"
sed '/'$PARANOIA_DEBUG_BEGIN'/,/'$PARANOIA_DEBUG_END'/d' debug_$PROGRAM.sh | grep -v "$PARANOIA_DEBUG_LINE" > ../$PROGRAM.sh
sed '/'$PARANOIA_DEBUG_BEGIN'/,/'$PARANOIA_DEBUG_END'/d' "$source" | grep -v "$PARANOIA_DEBUG_LINE" > "$destination.tmp"
if [ $? != 0 ]; then
Logger "Cannot remove PARANOIA_DEBUG code from standard build." "SIMPLE"
exit 1
fi
chmod +x "debug_$PROGRAM.sh"
if [ $? != 0 ]; then
Logger "Cannot chmod debug_$PROGRAM.sh" "SIMPLE"
Logger "Cannot remove PARANOIA_DEBUG code from standard build." "CRITICAL"
exit 1
else
Logger "Prepared ./debug_$PROGRAM.sh" "SIMPLE"
mv -f "$destination.tmp" "$destination"
if [ $? -ne 0 ]; then
Logger "Cannot move [$destination.tmp] to [$destination]." "CRITICAL"
exit 1
fi
chmod +x "../$PROGRAM.sh"
fi
chmod +x "$source"
if [ $? != 0 ]; then
Logger "Cannot chmod $PROGRAM.sh" "SIMPLE"
Logger "Cannot chmod [$source]." "CRITICAL"
exit 1
else
Logger "Prepared ../$PROGRAM.sh" "SIMPLE"
Logger "Prepared [$source]." "NOTICE"
fi
if [ "$source" != "$destination" ]; then
chmod +x "$destination"
if [ $? != 0 ]; then
Logger "Cannot chmod [$destination]." "CRITICAL"
exit 1
else
Logger "Prepared [$destination]." "NOTICE"
fi
fi
}
function __PREPROCESSOR_CopyCommons {
local PROGRAM="$1"
local nPROGRAM="$1"
sed "s/\[prgname\]/$PROGRAM/g" common_install.sh > ../install.sh
sed "s/\[prgname\]/$nPROGRAM/g" common_install.sh > ../install.sh
if [ $? != 0 ]; then
Logger "Cannot assemble install." "SIMPLE"
Logger "Cannot assemble install." "CRITICAL"
exit 1
fi
@ -147,45 +161,34 @@ function __PREPROCESSOR_CopyCommons {
__PREPROCESSOR_MergeSubset "$subset" "${subset//SUBSET/SUBSET END}" "ofunctions.sh" "../install.sh"
done
#sed "s/\[version\]/$VERSION/g" ../tmp_install.sh > ../install.sh
#if [ $? != 0 ]; then
# Logger "Cannot change install version." "SIMPLE"
# exit 1
#fi
__PREPROCESSOR_CleanDebug "../install.sh"
if [ -f "common_batch.sh" ]; then
sed "s/\[prgname\]/$PROGRAM/g" common_batch.sh > ../$PROGRAM-batch.sh
sed "s/\[prgname\]/$nPROGRAM/g" common_batch.sh > ../$nPROGRAM-batch.sh
if [ $? != 0 ]; then
Logger "Cannot assemble batch runner." "SIMPLE"
Logger "Cannot assemble batch runner." "CRITICAL"
exit 1
fi
chmod +x ../$PROGRAM-batch.sh
if [ $? != 0 ]; then
Logger "Cannot chmod $PROGRAM-batch.sh" "SIMPLE"
exit 1
else
Logger "Prepared ../$PROGRAM-batch.sh" "SIMPLE"
fi
fi
chmod +x ../install.sh
if [ $? != 0 ]; then
Logger "Cannot chmod install.sh" "SIMPLE"
exit 1
else
Logger "Prepared ../install.sh" "SIMPLE"
fi
rm -f ../tmp_install.sh
if [ $? != 0 ]; then
Logger "Cannot chmod $PROGRAM.sh" "SIMPLE"
exit 1
for subset in "${__PREPROCESSOR_SUBSETS[@]}"; do
__PREPROCESSOR_MergeSubset "$subset" "${subset//SUBSET/SUBSET END}" "ofunctions.sh" "../$nPROGRAM-batch.sh"
done
__PREPROCESSOR_CleanDebug "../$nPROGRAM-batch.sh"
fi
}
# If sourced don't do anything
if [ "$(basename $0)" == "merge.sh" ]; then
if [ "$1" == "osync" ]; then
source "./ofunctions.sh"
if [ $? != 0 ]; then
echo "Please run $0 in dev directory with ofunctions.sh"
exit 1
fi
trap GenericTrapQuit TERM EXIT HUP QUIT
if [ "$1" == "osync" ]; then
__PREPROCESSOR_Merge osync
__PREPROCESSOR_Merge osync_target_helper
__PREPROCESSOR_CopyCommons osync
elif [ "$1" == "obackup" ]; then
__PREPROCESSOR_Merge obackup

File diff suppressed because it is too large Load Diff

View File

@ -1,455 +0,0 @@
#!/usr/bin/env bash
PROGRAM="osync-target-helper" # Rsync based two way sync engine with fault tolerance
AUTHOR="(C) 2013-2017 by Orsiris de Jong"
CONTACT="http://www.netpower.fr/osync - ozy@netpower.fr"
PROGRAM_VERSION=1.2.2-dev
PROGRAM_BUILD=2017061901
IS_STABLE=no
include #### OFUNCTIONS FULL SUBSET ####
# If using "include" statements, make sure the script does not get executed unless it's loaded by bootstrap
include #### _OFUNCTIONS_BOOTSTRAP SUBSET ####
[ "$_OFUNCTIONS_BOOTSTRAP" != true ] && echo "Please use bootstrap.sh to load this dev version of $(basename $0)" && exit 1
_LOGGER_PREFIX="time"
## Working directory. This directory exists in any replica and contains state files, backups, soft deleted files etc
OSYNC_DIR=".osync_workdir"
function TrapQuit {
local exitcode
# Get ERROR / WARN alert flags from subprocesses that call Logger
if [ -f "$RUN_DIR/$PROGRAM.Logger.warn.$SCRIPT_PID.$TSTAMP" ]; then
WARN_ALERT=true
fi
if [ -f "$RUN_DIR/$PROGRAM.Logger.error.$SCRIPT_PID.$TSTAMP" ]; then
ERROR_ALERT=true
fi
if [ $ERROR_ALERT == true ]; then
Logger "$PROGRAM finished with errors." "ERROR"
if [ "$_DEBUG" != "yes" ]
then
SendAlert
else
Logger "Debug mode, no alert mail will be sent." "NOTICE"
fi
exitcode=1
elif [ $WARN_ALERT == true ]; then
Logger "$PROGRAM finished with warnings." "WARN"
if [ "$_DEBUG" != "yes" ]
then
SendAlert
else
Logger "Debug mode, no alert mail will be sent." "NOTICE"
fi
exitcode=2 # Warning exit code must not force daemon mode to quit
else
Logger "$PROGRAM finished." "ALWAYS"
exitcode=0
fi
CleanUp
KillChilds $SCRIPT_PID > /dev/null 2>&1
exit $exitcode
}
function CheckEnvironment {
__CheckArguments 0 $# "$@" #__WITH_PARANOIA_DEBUG
if ! type ssh > /dev/null 2>&1 ; then
Logger "ssh not present. Cannot start sync." "CRITICAL"
exit 1
fi
if [ "$SSH_PASSWORD_FILE" != "" ] && ! type sshpass > /dev/null 2>&1 ; then
Logger "sshpass not present. Cannot use password authentication." "CRITICAL"
exit 1
fi
}
# Only gets checked in config file mode where all values should be present
function CheckCurrentConfig {
__CheckArguments 0 $# "$@" #__WITH_PARANOIA_DEBUG
# Check all variables that should contain "yes" or "no"
declare -a yes_no_vars=(SUDO_EXEC SSH_COMPRESSION SSH_IGNORE_KNOWN_HOSTS REMOTE_HOST_PING)
for i in "${yes_no_vars[@]}"; do
test="if [ \"\$$i\" != \"yes\" ] && [ \"\$$i\" != \"no\" ]; then Logger \"Bogus $i value [\$$i] defined in config file. Correct your config file or update it using the update script if using and old version.\" \"CRITICAL\"; exit 1; fi"
eval "$test"
done
# Check all variables that should contain a numerical value >= 0
declare -a num_vars=(MIN_WAIT MAX_WAIT)
for i in "${num_vars[@]}"; do
test="if [ $(IsNumericExpand \"\$$i\") -eq 0 ]; then Logger \"Bogus $i value [\$$i] defined in config file. Correct your config file or update it using the update script if using and old version.\" \"CRITICAL\"; exit 1; fi"
eval "$test"
done
}
# Gets checked in quicksync and config file mode
function CheckCurrentConfigAll {
__CheckArguments 0 $# "$@" #__WITH_PARANOIA_DEBUG
local tmp
if [ "$INSTANCE_ID" == "" ]; then
Logger "No INSTANCE_ID defined in config file." "CRITICAL"
exit 1
fi
if [ "$INITIATOR_SYNC_DIR" == "" ]; then
Logger "No INITIATOR_SYNC_DIR set in config file." "CRITICAL"
exit 1
fi
if [ "$TARGET_SYNC_DIR" == "" ]; then
Logger "Not TARGET_SYNC_DIR set in config file." "CRITICAL"
exit 1
fi
if ([ ! -f "$SSH_RSA_PRIVATE_KEY" ] && [ ! -f "$SSH_PASSWORD_FILE" ]); then
Logger "Cannot find rsa private key [$SSH_RSA_PRIVATE_KEY] nor password file [$SSH_PASSWORD_FILE]. No authentication method provided." "CRITICAL"
exit 1
fi
}
function TriggerInitiatorUpdate {
__CheckArguments 0 $# "$@" #__WITH_PARANOIA_DEBUG
$SSH_CMD env _REMOTE_TOKEN="$_REMOTE_TOKEN" \
env _DEBUG="'$_DEBUG'" env _PARANOIA_DEBUG="'$_PARANOIA_DEBUG'" env _LOGGER_SILENT="'$_LOGGER_SILENT'" env _LOGGER_VERBOSE="'$_LOGGER_VERBOSE'" env _LOGGER_PREFIX="'$_LOGGER_PREFIX'" env _LOGGER_ERR_ONLY="'$_LOGGER_ERR_ONLY'" \
env PROGRAM="'$PROGRAM'" env SCRIPT_PID="'$SCRIPT_PID'" TSTAMP="'$TSTAMP'" env INSTANCE_ID="'$INSTANCE_ID'" \
env PUSH_FILE="'$(EscapeSpaces "${INITIATOR[$__updateTriggerFIle]}")'" \
env LC_ALL=C $COMMAND_SUDO' bash -s' << 'ENDSSH' >> "$RUN_DIR/$PROGRAM.${FUNCNAME[0]}.$SCRIPT_PID.$TSTAMP" 2>&1
include #### DEBUG SUBSET ####
include #### TrapError SUBSET ####
include #### RemoteLogger SUBSET ####
echo "$INSTANCE_ID $(date '+%Y%m%dT%H%M%S.%N')" >> "$PUSH_FILE"
ENDSSH
if [ -s "$RUN_DIR/$PROGRAM.${FUNCNAME[0]}.$SCRIPT_PID.$TSTAMP" ] || [ $? != 0 ]; then
(
_LOGGER_PREFIX="RR"
Logger "$(cat $RUN_DIR/$PROGRAM.${FUNCNAME[0]}.$SCRIPT_PID.$TSTAMP)" "ERROR"
)
return 1
fi
return 0
}
function Init {
__CheckArguments 0 $# "$@" #__WITH_PARANOIA_DEBUG
# Set error exit code if a piped command fails
set -o pipefail
set -o errtrace
trap TrapQuit TERM EXIT HUP QUIT
local uri
local hosturiandpath
local hosturi
## Test if target dir is a ssh uri, and if yes, break it down it its values
if [ "${INITIATOR_SYNC_DIR:0:6}" == "ssh://" ]; then
REMOTE_OPERATION="yes"
# remove leadng 'ssh://'
uri=${INITIATOR_SYNC_DIR#ssh://*}
if [[ "$uri" == *"@"* ]]; then
# remove everything after '@'
REMOTE_USER=${uri%@*}
else
REMOTE_USER=$LOCAL_USER
fi
if [ "$SSH_RSA_PRIVATE_KEY" == "" ]; then
if [ ! -f "$SSH_PASSWORD_FILE" ]; then
# Assume that there might exist a standard rsa key
SSH_RSA_PRIVATE_KEY=~/.ssh/id_rsa
fi
fi
# remove everything before '@'
hosturiandpath=${uri#*@}
# remove everything after first '/'
hosturi=${hosturiandpath%%/*}
if [[ "$hosturi" == *":"* ]]; then
REMOTE_PORT=${hosturi##*:}
else
REMOTE_PORT=22
fi
REMOTE_HOST=${hosturi%%:*}
# remove everything before first '/'
TARGET_SYNC_DIR=${hosturiandpath#*/}
else
Logger "No valid remote initiator URI found in [$INITIATOR_SYNC_DIR]." "CRITICAL"
exit 1
fi
if [ "$INITIATOR_SYNC_DIR" == "" ] || [ "$TARGET_SYNC_DIR" == "" ]; then
Logger "Initiator or target path empty." "CRITICAL"
exit 1
fi
## Make sure there is only one trailing slash on path
INITIATOR_SYNC_DIR="${INITIATOR_SYNC_DIR%/}/"
TARGET_SYNC_DIR="${TARGET_SYNC_DIR%/}/"
# Expand ~ if exists
INITIATOR_SYNC_DIR="${INITIATOR_SYNC_DIR/#\~/$HOME}"
TARGET_SYNC_DIR="${TARGET_SYNC_DIR/#\~/$HOME}"
SSH_RSA_PRIVATE_KEY="${SSH_RSA_PRIVATE_KEY/#\~/$HOME}"
SSH_PASSWORD_FILE="${SSH_PASSWORD_FILE/#\~/$HOME}"
## Replica format
## Why the f*** does bash not have simple objects ?
# Local variables used for state filenames
local lockFilename="lock"
local stateDir="state"
local backupDir="backup"
local deleteDir="deleted"
local partialDir="_partial"
local lastAction="last-action"
local resumeCount="resume-count"
if [ "$_DRYRUN" == true ]; then
local drySuffix="-dry"
else
local drySuffix=
fi
# The following associative like array definitions are used for bash ver < 4 compat
readonly __type=0
readonly __replicaDir=1
readonly __lockFile=2
readonly __stateDir=3
readonly __backupDir=4
readonly __deleteDir=5
readonly __partialDir=6
readonly __initiatorLastActionFile=7
readonly __targetLastActionFile=8
readonly __resumeCount=9
readonly __treeCurrentFile=10
readonly __treeAfterFile=11
readonly __treeAfterFileNoSuffix=12
readonly __deletedListFile=13
readonly __failedDeletedListFile=14
readonly __successDeletedListFile=15
readonly __timestampCurrentFile=16
readonly __timestampAfterFile=17
readonly __timestampAfterFileNoSuffix=18
readonly __conflictListFile=19
readonly __updateTriggerFile=20
INITIATOR=()
INITIATOR[$__type]='initiator'
INITIATOR[$__replicaDir]="$INITIATOR_SYNC_DIR"
INITIATOR[$__lockFile]="$INITIATOR_SYNC_DIR$OSYNC_DIR/$lockFilename"
INITIATOR[$__stateDir]="$OSYNC_DIR/$stateDir"
INITIATOR[$__backupDir]="$OSYNC_DIR/$backupDir"
INITIATOR[$__deleteDir]="$OSYNC_DIR/$deleteDir"
INITIATOR[$__partialDir]="$OSYNC_DIR/$partialDir"
INITIATOR[$__initiatorLastActionFile]="$INITIATOR_SYNC_DIR$OSYNC_DIR/$stateDir/initiator-$lastAction-$INSTANCE_ID$drySuffix"
INITIATOR[$__targetLastActionFile]="$INITIATOR_SYNC_DIR$OSYNC_DIR/$stateDir/target-$lastAction-$INSTANCE_ID$drySuffix"
INITIATOR[$__resumeCount]="$INITIATOR_SYNC_DIR$OSYNC_DIR/$stateDir/$resumeCount-$INSTANCE_ID$drySuffix"
INITIATOR[$__treeCurrentFile]="-tree-current-$INSTANCE_ID$drySuffix"
INITIATOR[$__treeAfterFile]="-tree-after-$INSTANCE_ID$drySuffix"
INITIATOR[$__treeAfterFileNoSuffix]="-tree-after-$INSTANCE_ID"
INITIATOR[$__deletedListFile]="-deleted-list-$INSTANCE_ID$drySuffix"
INITIATOR[$__failedDeletedListFile]="-failed-delete-$INSTANCE_ID$drySuffix"
INITIATOR[$__successDeletedListFile]="-success-delete-$INSTANCE_ID$drySuffix"
INITIATOR[$__timestampCurrentFile]="-timestamps-current-$INSTANCE_ID$drySuffix"
INITIATOR[$__timestampAfterFile]="-timestamps-after-$INSTANCE_ID$drySuffix"
INITIATOR[$__timestampAfterFileNoSuffix]="-timestamps-after-$INSTANCE_ID"
INITIATOR[$__conflictListFile]="conflicts-$INSTANCE_ID$drySuffix"
INITIATOR[$__updateTriggerFile]="$INITIATOR_SYNC_DIR$OSYNC_DIR/.osnyc-update.push"
TARGET=()
TARGET[$__type]='target'
TARGET[$__replicaDir]="$TARGET_SYNC_DIR"
TARGET[$__lockFile]="$TARGET_SYNC_DIR$OSYNC_DIR/$lockFilename"
TARGET[$__stateDir]="$OSYNC_DIR/$stateDir"
TARGET[$__backupDir]="$OSYNC_DIR/$backupDir"
TARGET[$__deleteDir]="$OSYNC_DIR/$deleteDir"
TARGET[$__partialDir]="$OSYNC_DIR/$partialDir" # unused
TARGET[$__initiatorLastActionFile]="$TARGET_SYNC_DIR$OSYNC_DIR/$stateDir/initiator-$lastAction-$INSTANCE_ID$drySuffix" # unused
TARGET[$__targetLastActionFile]="$TARGET_SYNC_DIR$OSYNC_DIR/$stateDir/target-$lastAction-$INSTANCE_ID$drySuffix" # unused
TARGET[$__resumeCount]="$TARGET_SYNC_DIR$OSYNC_DIR/$stateDir/$resumeCount-$INSTANCE_ID$drySuffix" # unused
TARGET[$__treeCurrentFile]="-tree-current-$INSTANCE_ID$drySuffix" # unused
TARGET[$__treeAfterFile]="-tree-after-$INSTANCE_ID$drySuffix" # unused
TARGET[$__treeAfterFileNoSuffix]="-tree-after-$INSTANCE_ID" # unused
TARGET[$__deletedListFile]="-deleted-list-$INSTANCE_ID$drySuffix" # unused
TARGET[$__failedDeletedListFile]="-failed-delete-$INSTANCE_ID$drySuffix"
TARGET[$__successDeletedListFile]="-success-delete-$INSTANCE_ID$drySuffix"
TARGET[$__timestampCurrentFile]="-timestamps-current-$INSTANCE_ID$drySuffix"
TARGET[$__timestampAfterFile]="-timestamps-after-$INSTANCE_ID$drySuffix"
TARGET[$__timestampAfterFileNoSuffix]="-timestamps-after-$INSTANCE_ID"
TARGET[$__conflictListFile]="conflicts-$INSTANCE_ID$drySuffix"
TARGET[$__updateTriggerFile]="$TARGET_SYNC_DIR$OSYNC_DIR/.osync-update.push"
}
function Usage {
__CheckArguments 0 $# "$@" #__WITH_PARANOIA_DEBUG
if [ "$IS_STABLE" != "yes" ]; then
echo -e "\e[93mThis is an unstable dev build. Please use with caution.\e[0m"
fi
echo "$PROGRAM $PROGRAM_VERSION $PROGRAM_BUILD"
echo "$AUTHOR"
echo "$CONTACT"
echo ""
echo "You must use $PROGRAM with a full blown configuration file."
echo "Usage: $0 /path/to/config/file [OPTIONS]"
echo ""
echo "[OPTIONS]"
echo "--no-prefix Will suppress time / date suffix from output"
echo "--silent Will run osync without any output to stdout, used for cron jobs"
echo "--errors-only Output only errors (can be combined with silent or verbose)"
echo "--verbose Increases output"
echo "--on-changes Will launch a sync task after a short wait period if there is some file activity on initiator replica. You should try daemon mode instead"
echo ""
exit 128
}
function OnChangesHelper {
__CheckArguments 0 $# "$@" #__WITH_PARANOIA_DEBUG
local cmd
local retval
if [ "$LOCAL_OS" == "MacOSX" ]; then
if ! type fswatch > /dev/null 2>&1 ; then
Logger "No inotifywait command found. Cannot monitor changes." "CRITICAL"
exit 1
fi
else
if ! type inotifywait > /dev/null 2>&1 ; then
Logger "No inotifywait command found. Cannot monitor changes." "CRITICAL"
exit 1
fi
fi
if [ ! -d "$TARGET_SYNC_DIR" ]; then
Logger "Target directory [$TARGET_SYNC_DIR] does not exist. Cannot monitor." "CRITICAL"
exit 1
fi
Logger "#### Running $PROGRAM in file monitor mode." "NOTICE"
while true; do
if [ "$LOCAL_OS" == "MacOSX" ]; then
fswatch $RSYNC_PATTERNS $RSYNC_PARTIAL_EXCLUDE --exclude "$OSYNC_DIR" -1 "$TARGET_SYNC_DIR" > /dev/null &
# Mac fswatch doesn't have timeout switch, replacing wait $! with WaitForTaskCompletion without warning nor spinner and increased SLEEP_TIME to avoid cpu hogging. This sims wait $! with timeout
WaitForTaskCompletion $! 0 $MAX_WAIT 1 0 true false true
else
inotifywait $RSYNC_PATTERNS $RSYNC_PARTIAL_EXCLUDE --exclude "$OSYNC_DIR" -qq -r -e create -e modify -e delete -e move -e attrib --timeout "$MAX_WAIT" "$TARGET_SYNC_DIR" &
wait $!
fi
retval=$?
if [ $retval -eq 0 ]; then
Logger "#### Changes detected, waiting $MIN_WAIT seconds before triggering update on initiator." "NOTICE"
sleep $MIN_WAIT
# inotifywait --timeout result is 2, WaitForTaskCompletion HardTimeout is 1
elif [ "$LOCAL_OS" == "MacOSX" ]; then
Logger "#### Changes or error detected, waiting $MIN_WAIT seconds before triggering update on initiator." "NOTICE"
elif [ $retval -eq 2 ]; then
Logger "#### $MAX_WAIT timeout reached, running sync." "NOTICE"
elif [ $retval -eq 1 ]; then
Logger "#### inotify error detected, waiting $MIN_WAIT seconds before triggering update on initiator." "ERROR" $retval
sleep $MIN_WAIT
fi
TriggerInitiatorUpdate
done
}
#### SCRIPT ENTRY POINT
DESTINATION_MAILS=""
ERROR_ALERT=false
WARN_ALERT=false
if [ $# -eq 0 ]
then
Usage
fi
first=1
for i in "$@"; do
case $i in
--silent)
_LOGGER_SILENT=true
;;
--verbose)
_LOGGER_VERBOSE=true
;;
--help|-h|--version|-v)
Usage
;;
--errors-only)
_LOGGER_ERR_ONLY=true
;;
--no-prefix)
_LOGGER_PREFIX=""
;;
*)
if [ $first == "0" ]; then
Logger "Unknown option '$i'" "CRITICAL"
Usage
fi
;;
esac
first=0
done
# Remove leading space if there is one
opts="${opts# *}"
ConfigFile="${1}"
LoadConfigFile "$ConfigFile"
if [ "$LOGFILE" == "" ]; then
if [ -w /var/log ]; then
LOG_FILE="/var/log/$PROGRAM.$INSTANCE_ID.log"
elif ([ "$HOME" != "" ] && [ -w "$HOME" ]); then
LOG_FILE="$HOME/$PROGRAM.$INSTANCE_ID.log"
else
LOG_FILE="./$PROGRAM.$INSTANCE_ID.log"
fi
else
LOG_FILE="$LOGFILE"
fi
if [ ! -w "$(dirname $LOG_FILE)" ]; then
echo "Cannot write to log [$(dirname $LOG_FILE)]."
else
Logger "Script begin, logging to [$LOG_FILE]." "DEBUG"
fi
if [ "$IS_STABLE" != "yes" ]; then
Logger "This is an unstable dev build [$PROGRAM_BUILD]. Please use with caution." "WARN"
fi
GetLocalOS
InitLocalOSDependingSettings
PreInit
Init
CheckEnvironment
PostInit
CheckCurrentConfig
CheckCurrentConfigAll
DATE=$(date)
Logger "-------------------------------------------------------------" "NOTICE"
Logger "$DRY_WARNING$DATE - $PROGRAM $PROGRAM_VERSION script begin." "ALWAYS"
Logger "-------------------------------------------------------------" "NOTICE"
Logger "Sync task [$INSTANCE_ID] launched as $LOCAL_USER@$LOCAL_HOST (PID $SCRIPT_PID)" "NOTICE"
OnChangesHelper

923
dev/ofunctions.sh Normal file → Executable file

File diff suppressed because it is too large Load Diff

View File

@ -4,5 +4,7 @@
#SC1091 = not following source
#SC2086 = quoting errors (shellcheck is way too picky about quoting)
#SC2120 = only for debug version
#SC2034 = unused variabled (can be ignored in ofunctions.sh)
#SC2068 = bad array usage (can be ignored in ofunctions.sh)
shellcheck -e SC1090,SC1091,SC2086,SC2119,SC2120 $1
shellcheck -e SC1090,SC1091,SC2086,SC2119,SC2120 $@

View File

@ -2,9 +2,10 @@
###### osync - Rsync based two way sync engine with fault tolerance
###### (C) 2013-2016 by Orsiris de Jong (www.netpower.fr)
###### osync v1.1x / v1.2x config file rev 2017060501
## ---------- GENERAL OPTIONS
[GENERAL]
CONFIG_FILE_REVISION=1.3.0
## Sync job identification
INSTANCE_ID="local"
@ -27,7 +28,7 @@ SSH_PASSWORD_FILE=""
_REMOTE_TOKEN=SomeAlphaNumericToken9
## Create sync directories if they do not exist
CREATE_DIRS=no
CREATE_DIRS=false
## Log file location. Leaving this empty will create a logfile at /var/log/osync_version_SYNC_ID.log (or current directory if /var/log doesn't exist)
LOGFILE=""
@ -39,7 +40,7 @@ MINIMUM_SPACE=10240
BANDWIDTH=0
## If enabled, synchronization on remote system will be processed as superuser. See documentation for /etc/sudoers file configuration.
SUDO_EXEC=no
SUDO_EXEC=false
## Paranoia option. Don't change this unless you read the documentation.
RSYNC_EXECUTABLE=rsync
## Remote rsync executable path. Leave this empty in most cases
@ -64,23 +65,25 @@ RSYNC_EXCLUDE_FROM=""
## List elements separator char. You may set an alternative separator char for your directories lists above.
PATH_SEPARATOR_CHAR=";"
## ---------- REMOTE SYNC OPTIONS
[REMOTE_OPTIONS]
## ssh compression should be used unless your remote connection is good enough (LAN)
SSH_COMPRESSION=yes
SSH_COMPRESSION=true
## Ignore ssh known hosts. DANGER WILL ROBINSON DANGER ! This can lead to security issues. Only enable this if you know what you're doing.
SSH_IGNORE_KNOWN_HOSTS=no
SSH_IGNORE_KNOWN_HOSTS=false
SSH_CONTROLMASTER=false
## Check for connectivity to remote host before launching remote sync task. Be sure the hosts responds to ping. Failing to ping will stop sync.
REMOTE_HOST_PING=no
REMOTE_HOST_PING=false
## Check for internet access by pinging one or more 3rd party hosts before remote sync task. Leave empty if you don't want this check to be be performed. Failing to ping will stop sync.
## If you use this function, you should set more than one 3rd party host, and be sure you can ping them.
## Be aware some DNS like opendns redirect false hostnames. Also, this adds an extra execution time of a bit less than a minute.
REMOTE_3RD_PARTY_HOSTS="www.kernel.org www.google.com"
## ---------- MISC OPTIONS
[MISC_OPTIONS]
## Optional arguments passed to rsync executable. The following are already managed by the program and shoul never be passed here
## -r -l -p -t -g -o -D -E - u- i- n --executability -A -X -L -K -H -8 -zz skip-compress checksum bwlimit partial partial-dir no-whole-file whole-file backup backup-dir suffix
@ -88,27 +91,27 @@ REMOTE_3RD_PARTY_HOSTS="www.kernel.org www.google.com"
RSYNC_OPTIONAL_ARGS=""
## Preserve basic linux permissions
PRESERVE_PERMISSIONS=yes
PRESERVE_OWNER=yes
PRESERVE_GROUP=yes
PRESERVE_PERMISSIONS=true
PRESERVE_OWNER=true
PRESERVE_GROUP=true
## On MACOS X, does not work and will be ignored
PRESERVE_EXECUTABILITY=yes
PRESERVE_EXECUTABILITY=true
## Preserve ACLS. Make sure source and target FS can manage same ACLs or you'll get loads of errors.
PRESERVE_ACL=yes
PRESERVE_ACL=false
## Preserve Xattr. Make sure source and target FS can manage same Xattrs or you'll get loads of errors.
PRESERVE_XATTR=yes
PRESERVE_XATTR=false
## Transforms symlinks into referent files/dirs
COPY_SYMLINKS=no
COPY_SYMLINKS=false
## Treat symlinked dirs as dirs. CAUTION: This also follows symlinks outside of the replica root.
KEEP_DIRLINKS=no
KEEP_DIRLINKS=false
## Preserve hard links. Make sure source and target FS can manage hard links or you will lose them.
PRESERVE_HARDLINKS=no
PRESERVE_HARDLINKS=false
## Do a full checksum on all files that have identical sizes, they are checksummed to see if they actually are identical. This can take a long time.
CHECKSUM=no
CHECKSUM=false
## Let RSYNC compress file transfers. Do not use this if both initator and target replicas are on local system. Also, do not use this if you already enabled SSH compression.
RSYNC_COMPRESS=yes
RSYNC_COMPRESS=true
## Maximum execution time (in seconds) for sync process. Set these values zero will disable max execution times.
## Soft exec time only generates a warning. Hard exec time will generate a warning and stop sync process.
@ -125,45 +128,45 @@ MIN_WAIT=60
## Use 0 to wait indefinitely.
MAX_WAIT=7200
## ---------- BACKUP AND DELETION OPTIONS
[BACKUP_DELETE_OPTIONS]
## Log a list of conflictual files
LOG_CONFLICTS=yes
LOG_CONFLICTS=true
## Send an email when conflictual files are found (implies LOG_CONFLICTS)
ALERT_CONFLICTS=no
ALERT_CONFLICTS=false
## Enabling this option will keep a backup of a file on the target replica if it gets updated from the source replica. Backups will be made to .osync_workdir/backups
CONFLICT_BACKUP=yes
CONFLICT_BACKUP=true
## Keep multiple backup versions of the same file. Warning, This can be very space consuming.
CONFLICT_BACKUP_MULTIPLE=no
CONFLICT_BACKUP_MULTIPLE=false
## Osync will clean backup files after a given number of days. Setting this to 0 will disable cleaning and keep backups forever. Warning: This can be very space consuming.
CONFLICT_BACKUP_DAYS=30
## If the same file exists on both replicas, newer version will be synced. However, if both files have the same timestamp but differ, CONFILCT_PREVALANCE sets winner replica.
CONFLICT_PREVALANCE=initiator
## On deletion propagation to the target replica, a backup of the deleted files can be kept. Deletions will be kept in .osync_workdir/deleted
SOFT_DELETE=yes
SOFT_DELETE=true
## Osync will clean deleted files after a given number of days. Setting this to 0 will disable cleaning and keep deleted files forever. Warning: This can be very space consuming.
SOFT_DELETE_DAYS=30
## Optional deletion skip on replicas. Valid values are "initiator", "target", or "initiator,target"
SKIP_DELETION=
## ---------- RESUME OPTIONS
[RESUME_OPTIONS]
## Try to resume an aborted sync task
RESUME_SYNC=yes
RESUME_SYNC=true
## Number maximum resume tries before initiating a fresh sync.
RESUME_TRY=2
## When a pidlock exists on slave replica that does not correspond to the initiator's instance-id, force pidlock removal. Be careful with this option if you have multiple initiators.
FORCE_STRANGER_LOCK_RESUME=no
FORCE_STRANGER_LOCK_RESUME=false
## Keep partial uploads that can be resumed on next run, experimental feature
PARTIAL=no
PARTIAL=false
## Use delta copy algortithm (usefull when local paths are network drives), defaults to yes
DELTA_COPIES=yes
DELTA_COPIES=true
## ---------- ALERT OPTIONS
[ALERT_OPTIONS]
## List of alert mails separated by spaces
## Most Unix systems (including Win10 bash) have mail support out of the box
@ -187,7 +190,7 @@ SMTP_ENCRYPTION=none
SMTP_USER=
SMTP_PASSWORD=
## ---------- EXECUTION HOOKS
[EXECUTION_HOOKS]
## Commands can will be run before and / or after sync process (remote execution will only happen if REMOTE_OPERATION is set).
LOCAL_RUN_BEFORE_CMD=""
@ -201,7 +204,7 @@ MAX_EXEC_TIME_PER_CMD_BEFORE=0
MAX_EXEC_TIME_PER_CMD_AFTER=0
## Stops osync execution if one of the above commands fail
STOP_ON_CMD_ERROR=yes
STOP_ON_CMD_ERROR=true
## Run local and remote after sync commands even on failure
RUN_AFTER_CMD_ON_ERROR=no
RUN_AFTER_CMD_ON_ERROR=false

View File

@ -2,9 +2,10 @@
###### osync - Rsync based two way sync engine with fault tolerance
###### (C) 2013-2016 by Orsiris de Jong (www.netpower.fr)
###### osync v1.1x / v1.2x config file rev 2017060601
## ---------- GENERAL OPTIONS
[GENERAL]
CONFIG_FILE_REVISION=1.3.0
## Sync job identification
INSTANCE_ID="remote"
@ -15,10 +16,10 @@ INITIATOR_SYNC_DIR="${HOME}/osync-tests/initiator"
## Target is the system osync synchronizes to (can be the same system as the initiator in case of local sync tasks). The target directory can be a local or remote path.
#TARGET_SYNC_DIR="${HOME}/osync-tests/target"
TARGET_SYNC_DIR="ssh://root@localhost:49999/${HOME}/osync-tests/target"
TARGET_SYNC_DIR="ssh://root@localhost:44999/${HOME}/osync-tests/target"
## If the target system is remote, you can specify a RSA key (please use full path). If not defined, the default ~/.ssh/id_rsa will be used. See documentation for further information.
SSH_RSA_PRIVATE_KEY="${HOME}/.ssh/id_rsa_local"
SSH_RSA_PRIVATE_KEY="${HOME}/.ssh/id_rsa_local_osync_tests"
## Alternatively, you may specify an SSH password file (less secure). Needs sshpass utility installed.
SSH_PASSWORD_FILE=""
@ -27,7 +28,7 @@ SSH_PASSWORD_FILE=""
_REMOTE_TOKEN=SomeAlphaNumericToken9
## Create sync directories if they do not exist
CREATE_DIRS=no
CREATE_DIRS=false
## Log file location. Leaving this empty will create a logfile at /var/log/osync_version_SYNC_ID.log (or current directory if /var/log doesn't exist)
LOGFILE=""
@ -39,7 +40,7 @@ MINIMUM_SPACE=10240
BANDWIDTH=0
## If enabled, synchronization on remote system will be processed as superuser. See documentation for /etc/sudoers file configuration.
SUDO_EXEC=no
SUDO_EXEC=false
## Paranoia option. Don't change this unless you read the documentation.
RSYNC_EXECUTABLE=rsync
## Remote rsync executable path. Leave this empty in most cases
@ -64,23 +65,25 @@ RSYNC_EXCLUDE_FROM=""
## List elements separator char. You may set an alternative separator char for your directories lists above.
PATH_SEPARATOR_CHAR=";"
## ---------- REMOTE SYNC OPTIONS
[REMOTE_OPTIONS]
## ssh compression should be used unless your remote connection is good enough (LAN)
SSH_COMPRESSION=yes
SSH_COMPRESSION=true
## Ignore ssh known hosts. DANGER WILL ROBINSON DANGER ! This can lead to security issues. Only enable this if you know what you're doing.
SSH_IGNORE_KNOWN_HOSTS=no
SSH_IGNORE_KNOWN_HOSTS=false
SSH_CONTROLMASTER=false
## Check for connectivity to remote host before launching remote sync task. Be sure the hosts responds to ping. Failing to ping will stop sync.
REMOTE_HOST_PING=yes
REMOTE_HOST_PING=true
## Check for internet access by pinging one or more 3rd party hosts before remote sync task. Leave empty if you don't want this check to be be performed. Failing to ping will stop sync.
## If you use this function, you should set more than one 3rd party host, and be sure you can ping them.
## Be aware some DNS like opendns redirect false hostnames. Also, this adds an extra execution time of a bit less than a minute.
REMOTE_3RD_PARTY_HOSTS="www.kernel.org www.google.com"
## ---------- MISC OPTIONS
[MISC_OPTIONS]
## Optional arguments passed to rsync executable. The following are already managed by the program and shoul never be passed here
## -r -l -p -t -g -o -D -E - u- i- n --executability -A -X -L -K -H -8 -zz skip-compress checksum bwlimit partial partial-dir no-whole-file whole-file backup backup-dir suffix
@ -88,27 +91,27 @@ REMOTE_3RD_PARTY_HOSTS="www.kernel.org www.google.com"
RSYNC_OPTIONAL_ARGS=""
## Preserve basic linux permissions
PRESERVE_PERMISSIONS=yes
PRESERVE_OWNER=yes
PRESERVE_GROUP=yes
PRESERVE_PERMISSIONS=true
PRESERVE_OWNER=true
PRESERVE_GROUP=true
## On MACOS X, does not work and will be ignored
PRESERVE_EXECUTABILITY=yes
PRESERVE_EXECUTABILITY=true
## Preserve ACLS. Make sure source and target FS can manage same ACLs or you'll get loads of errors.
PRESERVE_ACL=yes
PRESERVE_ACL=false
## Preserve Xattr. Make sure source and target FS can manage same Xattrs or you'll get loads of errors.
PRESERVE_XATTR=yes
PRESERVE_XATTR=false
## Transforms symlinks into referent files/dirs
COPY_SYMLINKS=no
COPY_SYMLINKS=false
## Treat symlinked dirs as dirs. CAUTION: This also follows symlinks outside of the replica root.
KEEP_DIRLINKS=no
KEEP_DIRLINKS=false
## Preserve hard links. Make sure source and target FS can manage hard links or you will lose them.
PRESERVE_HARDLINKS=no
PRESERVE_HARDLINKS=false
## Do a full checksum on all files that have identical sizes, they are checksummed to see if they actually are identical. This can take a long time.
CHECKSUM=no
CHECKSUM=false
## Let RSYNC compress file transfers. Do not use this if both initator and target replicas are on local system. Also, do not use this if you already enabled SSH compression.
RSYNC_COMPRESS=yes
RSYNC_COMPRESS=true
## Maximum execution time (in seconds) for sync process. Set these values zero will disable max execution times.
## Soft exec time only generates a warning. Hard exec time will generate a warning and stop sync process.
@ -125,45 +128,45 @@ MIN_WAIT=60
## Use 0 to wait indefinitely.
MAX_WAIT=7200
## ---------- BACKUP AND DELETION OPTIONS
[BACKUP_DELETE_OPTIONS]
## Log a list of conflictual files
LOG_CONFLICTS=yes
LOG_CONFLICTS=true
## Send an email when conflictual files are found (implies LOG_CONFLICTS)
ALERT_CONFLICTS=no
ALERT_CONFLICTS=false
## Enabling this option will keep a backup of a file on the target replica if it gets updated from the source replica. Backups will be made to .osync_workdir/backups
CONFLICT_BACKUP=yes
CONFLICT_BACKUP=true
## Keep multiple backup versions of the same file. Warning, This can be very space consuming.
CONFLICT_BACKUP_MULTIPLE=no
CONFLICT_BACKUP_MULTIPLE=false
## Osync will clean backup files after a given number of days. Setting this to 0 will disable cleaning and keep backups forever. Warning: This can be very space consuming.
CONFLICT_BACKUP_DAYS=30
## If the same file exists on both replicas, newer version will be synced. However, if both files have the same timestamp but differ, CONFILCT_PREVALANCE sets winner replica.
CONFLICT_PREVALANCE=initiator
## On deletion propagation to the target replica, a backup of the deleted files can be kept. Deletions will be kept in .osync_workdir/deleted
SOFT_DELETE=yes
SOFT_DELETE=true
## Osync will clean deleted files after a given number of days. Setting this to 0 will disable cleaning and keep deleted files forever. Warning: This can be very space consuming.
SOFT_DELETE_DAYS=30
## Optional deletion skip on replicas. Valid values are "initiator", "target", or "initiator,target"
SKIP_DELETION=
## ---------- RESUME OPTIONS
[RESUME_OPTIONS]
## Try to resume an aborted sync task
RESUME_SYNC=yes
RESUME_SYNC=true
## Number maximum resume tries before initiating a fresh sync.
RESUME_TRY=2
## When a pidlock exists on slave replica that does not correspond to the initiator's instance-id, force pidlock removal. Be careful with this option if you have multiple initiators.
FORCE_STRANGER_LOCK_RESUME=no
FORCE_STRANGER_LOCK_RESUME=false
## Keep partial uploads that can be resumed on next run, experimental feature
PARTIAL=no
PARTIAL=false
## Use delta copy algortithm (usefull when local paths are network drives), defaults to yes
DELTA_COPIES=yes
DELTA_COPIES=true
## ---------- ALERT OPTIONS
[ALERT_OPTIONS]
## List of alert mails separated by spaces
## Most Unix systems (including Win10 bash) have mail support out of the box
@ -187,7 +190,7 @@ SMTP_ENCRYPTION=none
SMTP_USER=
SMTP_PASSWORD=
## ---------- EXECUTION HOOKS
[EXECUTION_HOOKS]
## Commands can will be run before and / or after sync process (remote execution will only happen if REMOTE_OPERATION is set).
LOCAL_RUN_BEFORE_CMD=""
@ -201,7 +204,7 @@ MAX_EXEC_TIME_PER_CMD_BEFORE=0
MAX_EXEC_TIME_PER_CMD_AFTER=0
## Stops osync execution if one of the above commands fail
STOP_ON_CMD_ERROR=yes
STOP_ON_CMD_ERROR=true
## Run local and remote after sync commands even on failure
RUN_AFTER_CMD_ON_ERROR=no
RUN_AFTER_CMD_ON_ERROR=false

View File

@ -1,12 +1,18 @@
#!/usr/bin/env bash
# osync test suite 2023061401
# Allows the following environment variables
# RUNNING_ON_GITHUB_ACTIONS=[true|false]
# SSH_PORT=22
# SKIP_REMOTE=[true|false]
## On Mac OSX, this needs to be run as root in order to use sudo without password
## From current terminal run sudo -s in order to get a new terminal as root
## On CYGWIN / MSYS, ACL and extended attributes aren't supported
# osync test suite 2018070206
# 4 tests:
# quicklocal
# quickremote (with ssh_filter.sh)
@ -16,6 +22,7 @@
# for each test
# files with spaces, subdirs
# largefileset (...large ?)
# quickremote test with controlmaster enabled
# exclusions
# conflict resolution initiator with backups / multiple backups
# conflict resolution target with backups / multiple backups
@ -36,18 +43,30 @@
# setfacl needs double ':' to be compatible with both linux and BSD
# setfacl -m o::rwx file
# On Windows 10 bash, we need to create host SSH keys first with ssh-keygen -A
# Then start ssh with service ssh start
# TODO, use copies of config file on each test function
if [ "$SKIP_REMOTE" = "" ]; then
SKIP_REMOTE=false
REMOTE_USER=root
fi
homedir=$(eval echo ~${REMOTE_USER})
# drupal servers are often unreachable for whetever reason or give 0 bytes files
#LARGE_FILESET_URL="http://ftp.drupal.org/files/projects/drupal-8.2.2.tar.gz"
LARGE_FILESET_URL="http://www.netpower.fr/sites/default/files/osync-test-files.tar.gz"
# Fakeroot for install / uninstall and test of executables
FAKEROOT="${HOME}/osync_test_install"
LARGE_FILESET_URL="https://ftp.drupal.org/files/projects/drupal-11.0.10.tar.gz"
OSYNC_DIR="$(pwd)"
OSYNC_DIR=${OSYNC_DIR%%/dev*}
DEV_DIR="$OSYNC_DIR/dev"
TESTS_DIR="$DEV_DIR/tests"
# Fakeroot for install / uninstall and test of executables
FAKEROOT="${homedir}/osync_test_install"
CONF_DIR="$TESTS_DIR/conf"
LOCAL_CONF="local.conf"
REMOTE_CONF="remote.conf"
@ -56,11 +75,11 @@ TMP_OLD_CONF="tmp.old.conf"
OSYNC_EXECUTABLE="$FAKEROOT/usr/local/bin/osync.sh"
OSYNC_DEV_EXECUTABLE="dev/n_osync.sh"
OSYNC_UPGRADE="upgrade-v1.0x-v1.2x.sh"
OSYNC_UPGRADE="upgrade-v1.0x-v1.3x.sh"
TMP_FILE="$DEV_DIR/tmp"
OSYNC_TESTS_DIR="${HOME}/osync-tests"
OSYNC_TESTS_DIR="${homedir}/osync-tests"
INITIATOR_DIR="$OSYNC_TESTS_DIR/initiator"
TARGET_DIR="$OSYNC_TESTS_DIR/target"
OSYNC_WORKDIR=".osync_workdir"
@ -73,30 +92,56 @@ OSYNC_VERSION=1.x.y
OSYNC_MIN_VERSION=x
OSYNC_IS_STABLE=maybe
PRIVKEY_NAME="id_rsa_local_osync_tests"
PUBKEY_NAME="${PRIVKEY_NAME}.pub"
function SetupSSH {
echo -e 'y\n'| ssh-keygen -t rsa -b 2048 -N "" -f "${HOME}/.ssh/id_rsa_local"
if ! grep "$(cat ${HOME}/.ssh/id_rsa_local.pub)" "${HOME}/.ssh/authorized_keys"; then
echo "from=\"*\",no-port-forwarding,no-X11-forwarding,no-agent-forwarding,no-pty,command=\"$FAKEROOT/usr/local/bin/ssh_filter.sh SomeAlphaNumericToken9\" $(cat ${HOME}/.ssh/id_rsa_local.pub)" >> "${HOME}/.ssh/authorized_keys"
echo "Setting up an ssh key to ${homedir}/.ssh/${PRIVKEY_NAME}"
echo -e 'y\n'| ssh-keygen -t rsa -b 2048 -N "" -f "${homedir}/.ssh/${PRIVKEY_NAME}"
SSH_AUTH_LINE="from=\"*\",no-port-forwarding,no-X11-forwarding,no-agent-forwarding,no-pty,command=\"$FAKEROOT/usr/local/bin/ssh_filter.sh SomeAlphaNumericToken9\" $(cat ${homedir}/.ssh/${PUBKEY_NAME})"
echo "ls -alh ${homedir}"
ls -alh "${homedir}"
echo "ls -alh ${homedir}/.ssh"
ls -alh "${homedir}/.ssh"
if [ -f "${homedir}/.ssh/authorized_keys" ]; then
if ! grep "$(cat ${homedir}/.ssh/${PUBKEY_NAME})" "${homedir}/.ssh/authorized_keys"; then
echo "Adding auth line in authorized_keys file ${homedir}/.ssh/authorized_keys"
echo "$SSH_AUTH_LINE" >> "${homedir}/.ssh/authorized_keys"
fi
chmod 600 "${HOME}/.ssh/authorized_keys"
else
echo "Creating authorized_keys file ${homedir}/.ssh/authorized_keys"
echo "$SSH_AUTH_LINE" >> "${homedir}/.ssh/authorized_keys"
fi
chmod 600 "${homedir}/.ssh/authorized_keys"
# Add localhost to known hosts so self connect works
if [ -z "$(ssh-keygen -F localhost)" ]; then
ssh-keyscan -H localhost >> "${HOME}/.ssh/known_hosts"
ssh-keyscan -H localhost >> "${homedir}/.ssh/known_hosts"
fi
# Update remote conf files with SSH port
sed -i.tmp 's#ssh://.*@localhost:[0-9]*/${HOME}/osync-tests/target#ssh://'$REMOTE_USER'@localhost:'$SSH_PORT'/${HOME}/osync-tests/target#' "$CONF_DIR/$REMOTE_CONF"
# Update remote conf files with SSH port and file id location
sed -i.tmp 's#ssh://.*@localhost:[0-9]*/${HOME}/osync-tests/target#ssh://'$REMOTE_USER'@localhost:'$SSH_PORT'/'${homedir}'/osync-tests/target#' "$CONF_DIR/$REMOTE_CONF"
sed -i.tmp2 's#SSH_RSA_PRIVATE_KEY="${HOME}/.ssh/id_rsa_local_osync_tests"#SSH_RSA_PRIVATE_KEY="'${homedir}'/.ssh/id_rsa_local_osync_tests"#' "$CONF_DIR/$REMOTE_CONF"
echo "ls -alh ${homedir}/.ssh"
ls -alh "${homedir}/.ssh"
echo "cat ${homedir}/.ssh/authorized_keys"
cat "${homedir}/.ssh/authorized_keys"
echo "###"
echo "END SETUP SSH"
}
function RemoveSSH {
local pubkey
if [ -f "${HOME}/.ssh/id_rsa_local" ]; then
pubkey=$(cat "${HOME}/.ssh/id_rsa_local.pub")
sed -i.bak "s|.*$pubkey.*||g" "${HOME}/.ssh/authorized_keys"
rm -f "${HOME}/.ssh/{id_rsa_local.pub,id_rsa_local}"
echo "Now removing SSH keys"
if [ -f "${homedir}/.ssh/id_rsa_local_osync_tests" ]; then
echo "Restoring SSH authorized_keys file"
sed -i.bak "s|.*$(cat "${homedir}/.ssh/id_rsa_local_osync_tests.pub")||g" "${homedir}/.ssh/authorized_keys"
rm -f "${homedir}/.ssh/{id_rsa_local_osync_tests.pub,id_rsa_local_osync_tests}"
fi
}
@ -145,23 +190,27 @@ function CreateOldFile () {
}
function PrepareLocalDirs () {
# Remote dirs are the same as local dirs, so no problem here
if [ -d "$INITIATOR_DIR" ]; then
rm -rf "$INITIATOR_DIR"
if [ -d "$OSYNC_TESTS_DIR" ]; then
rm -rf "$OSYNC_TESTS_DIR"
fi
mkdir -p "$INITIATOR_DIR"
if [ -d "$TARGET_DIR" ]; then
rm -rf "$TARGET_DIR"
fi
mkdir -p "$TARGET_DIR"
mkdir "$OSYNC_TESTS_DIR"
mkdir "$INITIATOR_DIR"
mkdir "$TARGET_DIR"
}
function oneTimeSetUp () {
START_TIME=$SECONDS
mkdir --parents "$FAKEROOT"
#echo "Running forced merge"
#cd "${DEV_DIR}"
#$SUDO_CMD ./merge.sh osync
echo "Setting security for files"
$SUDO_CMD find ${OSYNC_DIR} -exec chmod 755 {} \+
echo "Show content of osync dir"
ls -alh ${OSYNC_DIR}
echo "Running install.sh from ${OSYNC_DIR}"
$SUDO_CMD ${OSYNC_DIR}/install.sh --no-stats --prefix="${FAKEROOT}"
source "$DEV_DIR/ofunctions.sh"
# Fix default umask because of ACL test that expects 0022 when creating test files
@ -172,29 +221,45 @@ function oneTimeSetUp () {
echo "Detected OS: $LOCAL_OS"
# Set some travis related changes
if [ "$TRAVIS_RUN" == true ]; then
echo "Running with travis settings"
REMOTE_USER="travis"
RHOST_PING="no"
if [ "$RUNNING_ON_GITHUB_ACTIONS" == true ]; then
echo "Running with GITHUB ACTIONS settings"
#REMOTE_USER="runner"
REMOTE_USER="root" # WIP
homedir=$(eval echo ~${REMOTE_USER})
RHOST_PING=false
SetConfFileValue "$CONF_DIR/$REMOTE_CONF" "REMOTE_3RD_PARTY_HOSTS" ""
SetConfFileValue "$CONF_DIR/$REMOTE_CONF" "REMOTE_HOST_PING" "no"
SetConfFileValue "$CONF_DIR/$REMOTE_CONF" "REMOTE_HOST_PING" false
SetConfFileValue "$CONF_DIR/$OLD_CONF" "REMOTE_3RD_PARTY_HOSTS" ""
SetConfFileValue "$CONF_DIR/$OLD_CONF" "REMOTE_HOST_PING" "no"
SetConfFileValue "$CONF_DIR/$OLD_CONF" "REMOTE_HOST_PING" false
else
echo "Running with local settings"
REMOTE_USER="root"
RHOST_PING="yes"
homedir=$(eval echo ~${REMOTE_USER})
RHOST_PING=true
SetConfFileValue "$CONF_DIR/$REMOTE_CONF" "REMOTE_3RD_PARTY_HOSTS" "\"www.kernel.org www.google.com\""
SetConfFileValue "$CONF_DIR/$REMOTE_CONF" "REMOTE_HOST_PING" "yes"
SetConfFileValue "$CONF_DIR/$REMOTE_CONF" "REMOTE_HOST_PING" true
SetConfFileValue "$CONF_DIR/$OLD_CONF" "REMOTE_3RD_PARTY_HOSTS" "\"www.kernel.org www.google.com\""
SetConfFileValue "$CONF_DIR/$OLD_CONF" "REMOTE_HOST_PING" "yes"
SetConfFileValue "$CONF_DIR/$OLD_CONF" "REMOTE_HOST_PING" true
fi
# Fix test directories for Github actions
SetConfFileValue "$CONF_DIR/$LOCAL_CONF" INITIATOR_SYNC_DIR "\"${homedir}/osync-tests/initiator\""
SetConfFileValue "$CONF_DIR/$LOCAL_CONF" TARGET_SYNC_DIR "\"${homedir}/osync-tests/target\""
SetConfFileValue "$CONF_DIR/$REMOTE_CONF" INITIATOR_SYNC_DIR "\"${homedir}/osync-tests/initiator\""
SetConfFileValue "$CONF_DIR/$OLD_CONF" MASTER_SYNC_DIR "\"${homedir}/osync-tests/initiator\""
SetConfFileValue "$CONF_DIR/$OLD_CONF" SLAVE_SYNC_DIR "\"${homedir}/osync-tests/target\""
# Get default ssh port from env
if [ "$SSH_PORT" == "" ]; then
SSH_PORT=22
echo "Running with SSH_PORT=${SSH_PORT}"
fi
# Setup modes per test
@ -204,8 +269,8 @@ function oneTimeSetUp () {
readonly __confRemote=3
osyncParameters=()
osyncParameters[$__quickLocal]="--initiator=$INITIATOR_DIR --target=$TARGET_DIR --instance-id=quicklocal"
osyncParameters[$__confLocal]="$CONF_DIR/$LOCAL_CONF"
osyncParameters[$__quickLocal]="--initiator=$INITIATOR_DIR --target=$TARGET_DIR --instance-id=quicklocal --non-interactive"
osyncParameters[$__confLocal]="$CONF_DIR/$LOCAL_CONF --non-interactive"
osyncDaemonParameters=()
@ -215,9 +280,9 @@ function oneTimeSetUp () {
osyncDaemonParameters[$__local]="$CONF_DIR/$LOCAL_CONF --on-changes"
# Do not check remote config on msys or cygwin since we don't have a local SSH server
if [ "$LOCAL_OS" != "msys" ] && [ "$LOCAL_OS" != "Cygwin" ]; then
osyncParameters[$__quickRemote]="--initiator=$INITIATOR_DIR --target=ssh://localhost:$SSH_PORT/$TARGET_DIR --rsakey=${HOME}/.ssh/id_rsa_local --instance-id=quickremote --remote-token=SomeAlphaNumericToken9"
osyncParameters[$__confRemote]="$CONF_DIR/$REMOTE_CONF"
if [ "$LOCAL_OS" != "msys" ] && [ "$LOCAL_OS" != "Cygwin" ] && [ $SKIP_REMOTE != true ]; then
osyncParameters[$__quickRemote]="--initiator=$INITIATOR_DIR --target=ssh://localhost:$SSH_PORT/$TARGET_DIR --rsakey=${homedir}/.ssh/id_rsa_local_osync_tests --instance-id=quickremote --remote-token=SomeAlphaNumericToken9 --non-interactive"
osyncParameters[$__confRemote]="$CONF_DIR/$REMOTE_CONF --non-interactive"
osyncDaemonParameters[$__remote]="$CONF_DIR/$REMOTE_CONF --on-changes"
@ -250,14 +315,14 @@ function oneTimeSetUp () {
SetConfFileValue "$CONF_DIR/$LOCAL_CONF" "SKIP_DELETION" ""
SetConfFileValue "$CONF_DIR/$REMOTE_CONF" "SKIP_DELETION" ""
SetConfFileValue "$CONF_DIR/$LOCAL_CONF" "COPY_SYMLINKS" "no"
SetConfFileValue "$CONF_DIR/$REMOTE_CONF" "COPY_SYMLINKS" "no"
SetConfFileValue "$CONF_DIR/$LOCAL_CONF" "COPY_SYMLINKS" false
SetConfFileValue "$CONF_DIR/$REMOTE_CONF" "COPY_SYMLINKS" false
SetConfFileValue "$CONF_DIR/$LOCAL_CONF" "CONFLICT_BACKUP_MULTIPLE" "no"
SetConfFileValue "$CONF_DIR/$REMOTE_CONF" "CONFLICT_BACKUP_MULTIPLE" "no"
SetConfFileValue "$CONF_DIR/$LOCAL_CONF" "CONFLICT_BACKUP_MULTIPLE" false
SetConfFileValue "$CONF_DIR/$REMOTE_CONF" "CONFLICT_BACKUP_MULTIPLE" false
SetConfFileValue "$CONF_DIR/$LOCAL_CONF" "FORCE_STRANGER_LOCK_RESUME" "no"
SetConfFileValue "$CONF_DIR/$REMOTE_CONF" "FORCE_STRANGER_LOCK_RESUME" "no"
SetConfFileValue "$CONF_DIR/$LOCAL_CONF" "FORCE_STRANGER_LOCK_RESUME" false
SetConfFileValue "$CONF_DIR/$REMOTE_CONF" "FORCE_STRANGER_LOCK_RESUME" false
SetConfFileValue "$CONF_DIR/$REMOTE_CONF" "SOFT_MAX_EXEC_TIME" "7200"
SetConfFileValue "$CONF_DIR/$REMOTE_CONF" "HARD_MAX_EXEC_TIME" "10600"
@ -269,7 +334,9 @@ function oneTimeTearDown () {
# Set osync version stable flag back to origin
#SetConfFileValue "$OSYNC_DIR/osync.sh" "IS_STABLE" "$OSYNC_IS_STABLE"
if [ "$SKIP_REMOTE" != true ]; then
RemoveSSH
fi
#TODO: uncomment this when dev is done
#rm -rf "$OSYNC_TESTS_DIR"
@ -281,7 +348,7 @@ function oneTimeTearDown () {
$SUDO_CMD ./install.sh --remove --no-stats --prefix="$FAKEROOT"
assertEquals "Uninstall failed" "0" $?
ELAPSED_TIME=$(($SECONDS - $START_TIME))
ELAPSED_TIME=$((SECONDS-START_TIME))
echo "It took $ELAPSED_TIME seconds to run these tests."
}
@ -290,12 +357,52 @@ function setUp () {
rm -rf "$TARGET_DIR"
}
function test_SSH {
# Make sure we have SSH on your test server
# This has become kind of tricky on github actions servers
echo "Testing SSH"
failure=false
# Testing as "remote user"
echo "ls -alh ${homedir}/.ssh"
ls -alh "${homedir}/.ssh"
echo "Running SSH test as ${REMOTE_USER}"
# SSH_PORT and SSH_USER are set by oneTimeSetup
$SUDO_CMD ssh -i "${homedir}/.ssh/${PRIVKEY_NAME}" -p $SSH_PORT ${REMOTE_USER}@localhost "env _REMOTE_TOKEN=SomeAlphaNumericToken9 echo \"Remotely:\"; whoami; echo \"TEST OK\""
if [ $? -ne 0 ]; then
echo "SSH test failed"
failure=true
fi
# Testing as current user
#echo "ls -alh ${homedir}/.ssh"
#ls -alh "${homedir}/.ssh"
#echo "Running SSH test as $(whoami)"
#$SUDO_CMD ssh -i "${homedir}/.ssh/${PRIVKEY_NAME}" -p $SSH_PORT $(whoami)@localhost "env _REMOTE_TOKEN=SomeAlphaNumericToken9 echo \"Remotely:\"; whoami; echo \"TEST OK\""
#if [ $? -ne 0 ]; then
# echo "SSH test failed"
# failure=true
#fi
if [ $failure == true ]; then
exit 1 # Try to see if we can abort all tests
assertEquals "Test SSH failed" false $failure
fi
}
# This test has to be done everytime in order for osync executable to be fresh
function test_Merge () {
cd "$DEV_DIR"
./merge.sh osync
assertEquals "Merging code" "0" $?
#WIP use debug code
alias cp=cp
cp "$DEV_DIR/debug_osync.sh" "$OSYNC_DIR/osync.sh"
cd "$OSYNC_DIR"
echo ""
@ -305,12 +412,12 @@ function test_Merge () {
# Set osync version to stable while testing to avoid warning message
# Don't use SetConfFileValue here since for whatever reason Travis does not like creating a sed temporary file in $FAKEROOT
if [ "$TRAVIS_RUN" == true ]; then
$SUDO_CMD sed -i.tmp 's/^IS_STABLE=.*/IS_STABLE=yes/' "$OSYNC_EXECUTABLE"
if [ "$RUNNING_ON_GITHUB_ACTIONS" == true ]; then
$SUDO_CMD sed -i.tmp 's/^IS_STABLE=.*/IS_STABLE=true/' "$OSYNC_EXECUTABLE"
else
sed -i.tmp 's/^IS_STABLE=.*/IS_STABLE=yes/' "$OSYNC_EXECUTABLE"
sed -i.tmp 's/^IS_STABLE=.*/IS_STABLE=true/' "$OSYNC_EXECUTABLE"
fi
#SetConfFileValue "$OSYNC_EXECUTABLE" "IS_STABLE" "yes"
#SetConfFileValue "$OSYNC_EXECUTABLE" "IS_STABLE" true
assertEquals "Install failed" "0" $?
@ -334,6 +441,15 @@ function test_LargeFileSet () {
done
}
function test_controlMaster () {
cd "$OSYNC_DIR"
PrepareLocalDirs
echo "Running with parameters ${osyncParameters[$__quickRemote]} --ssh-controlmaster"
REMOTE_HOST_PING=$REMOTE_PING $OSYNC_EXECUTABLE ${osyncParameters[$__quickRemote]} --ssh-controlmaster
assertEquals "Running quick remote test with controlmaster enabled." "0" $?
}
function test_Exclusions () {
# Will sync except php files
# RSYNC_EXCLUDE_PATTERN="*.php" is set at runtime for quicksync and in config files for other runs
@ -364,9 +480,9 @@ function test_Exclusions () {
function test_Deletetion () {
local iFile1="$INITIATOR_DIR/i fic"
local iFile2="$INITIATOR_DIR/ifoc"
local iFile2="$INITIATOR_DIR/i foc (something)"
local tFile1="$TARGET_DIR/t fic"
local tFile2="$TARGET_DIR/tfoc"
local tFile2="$TARGET_DIR/t foc [nothing]"
for i in "${osyncParameters[@]}"; do
@ -458,7 +574,7 @@ function test_deletion_failure () {
$SUDO_CMD $IMMUTABLE_OFF_CMD "$TARGET_DIR/$FileA"
$SUDO_CMD $IMMUTABLE_OFF_CMD "$INITIATOR_DIR/$FileB"
REMOTE_HOST_PING=$RHOST_PING $OSYNC_EXECUTABLE $i
REMOTE_HOST_PING=$RHOST_PING $OSYNC_EXECUTABLE $i --verbose
assertEquals "Third deletion run with parameters [$i]." "0" $?
[ ! -f "$TARGET_DIR/$FileA" ]
@ -482,7 +598,7 @@ function test_skip_deletion () {
fi
# TRAVIS SPECIFIC - time limitation
if [ "$TRAVIS_RUN" != true ]; then
if [ "$RUNNING_ON_GITHUB_ACTIONS" != true ]; then
modes=('initiator' 'target' 'initiator,target')
else
modes=('target')
@ -561,7 +677,7 @@ function test_handle_symlinks () {
fi
# Check with and without copySymlinks
copySymlinks="no"
copySymlinks=false
echo "Running with COPY_SYMLINKS=$copySymlinks"
@ -643,12 +759,12 @@ function test_handle_symlinks () {
done
# TRAVIS SPECIFIC - time limitation
if [ "$TRAVIS_RUN" != true ]; then
if [ "$RUNNING_ON_GITHUB_ACTIONS" != true ]; then
return 0
fi
# Check with and without copySymlinks
copySymlinks="yes"
copySymlinks=true
echo "Running with COPY_SYMLINKS=$copySymlinks"
@ -762,13 +878,13 @@ function test_softdeletion_cleanup () {
touch "$file.new"
if [ "$TRAVIS_RUN" == true ] || [ "$LOCAL_OS" == "BSD" ] || [ "$LOCAL_OS" == "MacOSX" ] || [ "$LOCAL_OS" == "WinNT10" ] || [ "$LOCAL_OS" == "msys" ] || [ "$LOCAL_OS" == "Cygwin" ]; then
if [ "$RUNNING_ON_GITHUB_ACTIONS" == true ] || [ "$LOCAL_OS" == "BSD" ] || [ "$LOCAL_OS" == "MacOSX" ] || [ "$LOCAL_OS" == "WinNT10" ] || [ "$LOCAL_OS" == "msys" ] || [ "$LOCAL_OS" == "Cygwin" ]; then
echo "Skipping changing ctime on file because travis / bsd / macos / Win10 / msys / cygwin does not support debugfs"
else
CreateOldFile "$file.old"
fi
done
if [ "$TRAVIS_RUN" == true ] || [ "$LOCAL_OS" == "BSD" ] || [ "$LOCAL_OS" == "MacOSX" ] || [ "$LOCAL_OS" == "WinNT10" ] || [ "$LOCAL_OS" == "msys" ] || [ "$LOCAL_OS" == "Cygwin" ]; then
if [ "$RUNNING_ON_GITHUB_ACTIONS" == true ] || [ "$LOCAL_OS" == "BSD" ] || [ "$LOCAL_OS" == "MacOSX" ] || [ "$LOCAL_OS" == "WinNT10" ] || [ "$LOCAL_OS" == "msys" ] || [ "$LOCAL_OS" == "Cygwin" ]; then
echo "Skipping changing ctime on dir too"
else
CreateOldFile "$DirA" true
@ -783,7 +899,7 @@ function test_softdeletion_cleanup () {
[ -f "$file.new" ]
assertEquals "New softdeleted / backed up file [$file.new] exists." "0" $?
if [ "$TRAVIS_RUN" == true ] || [ "$LOCAL_OS" == "BSD" ] || [ "$LOCAL_OS" == "MacOSX" ] || [ "$LOCAL_OS" == "WinNT10" ] || [ "$LOCAL_OS" == "msys" ] || [ "$LOCAL_OS" == "Cygwin" ]; then
if [ "$RUNNING_ON_GITHUB_ACTIONS" == true ] || [ "$LOCAL_OS" == "BSD" ] || [ "$LOCAL_OS" == "MacOSX" ] || [ "$LOCAL_OS" == "WinNT10" ] || [ "$LOCAL_OS" == "msys" ] || [ "$LOCAL_OS" == "Cygwin" ]; then
[ ! -f "$file.old" ]
assertEquals "Old softdeleted / backed up file [$file.old] is deleted permanently." "0" $?
else
@ -792,7 +908,7 @@ function test_softdeletion_cleanup () {
fi
done
if [ "$TRAVIS_RUN" == true ] || [ "$LOCAL_OS" == "BSD" ] || [ "$LOCAL_OS" == "MacOSX" ] || [ "$LOCAL_OS" == "WinNT10" ] || [ "$LOCAL_OS" == "msys" ] || [ "$LOCAL_OS" == "Cygwin" ]; then
if [ "$RUNNING_ON_GITHUB_ACTIONS" == true ] || [ "$LOCAL_OS" == "BSD" ] || [ "$LOCAL_OS" == "MacOSX" ] || [ "$LOCAL_OS" == "WinNT10" ] || [ "$LOCAL_OS" == "msys" ] || [ "$LOCAL_OS" == "Cygwin" ]; then
[ ! -d "$DirA" ]
assertEquals "Old softdeleted / backed up directory [$dirA] is deleted permanently." "0" $?
[ ! -d "$DirB" ]
@ -809,7 +925,7 @@ function test_softdeletion_cleanup () {
function test_FileAttributePropagation () {
if [ "$TRAVIS_RUN" == true ]; then
if [ "$RUNNING_ON_GITHUB_ACTIONS" == true ]; then
echo "Skipping FileAttributePropagation tests as travis does not support getfacl / setfacl."
return 0
fi
@ -819,6 +935,11 @@ function test_FileAttributePropagation () {
return 0
fi
SetConfFileValue "$CONF_DIR/$LOCAL_CONF" "PRESERVE_ACL" true
SetConfFileValue "$CONF_DIR/$LOCAL_CONF" "PRESERVE_XATTR" true
SetConfFileValue "$CONF_DIR/$REMOTE_CONF" "PRESERVE_ACL" true
SetConfFileValue "$CONF_DIR/$REMOTE_CONF" "PRESERVE_XATTR" true
for i in "${osyncParameters[@]}"; do
cd "$OSYNC_DIR"
PrepareLocalDirs
@ -883,6 +1004,11 @@ function test_FileAttributePropagation () {
getfacl "$INITIATOR_DIR/$DirD" | grep "other::-wx" > /dev/null
assertEquals "ACLs matched original value on initiator subdirectory." "0" $?
done
SetConfFileValue "$CONF_DIR/$LOCAL_CONF" "PRESERVE_ACL" false
SetConfFileValue "$CONF_DIR/$LOCAL_CONF" "PRESERVE_XATTR" false
SetConfFileValue "$CONF_DIR/$REMOTE_CONF" "PRESERVE_ACL" false
SetConfFileValue "$CONF_DIR/$REMOTE_CONF" "PRESERVE_XATTR" false
}
function test_ConflictBackups () {
@ -926,8 +1052,8 @@ function test_MultipleConflictBackups () {
local additionalParameters
# modify config files
SetConfFileValue "$CONF_DIR/$LOCAL_CONF" "CONFLICT_BACKUP_MULTIPLE" "yes"
SetConfFileValue "$CONF_DIR/$REMOTE_CONF" "CONFLICT_BACKUP_MULTIPLE" "yes"
SetConfFileValue "$CONF_DIR/$LOCAL_CONF" "CONFLICT_BACKUP_MULTIPLE" true
SetConfFileValue "$CONF_DIR/$REMOTE_CONF" "CONFLICT_BACKUP_MULTIPLE" true
if [ "$OSYNC_MIN_VERSION" != "1" ]; then
additionalParameters="--errors-only --summary --no-prefix"
@ -947,28 +1073,28 @@ function test_MultipleConflictBackups () {
echo "$FileB" > "$TARGET_DIR/$FileB"
# First run
CONFLICT_BACKUP_MULTIPLE=yes REMOTE_HOST_PING=$RHOST_PING $OSYNC_EXECUTABLE $i $additionalParameters
CONFLICT_BACKUP_MULTIPLE=true REMOTE_HOST_PING=$RHOST_PING $OSYNC_EXECUTABLE $i $additionalParameters
assertEquals "First deletion run with parameters [$i]." "0" $?
echo "$FileA+" > "$TARGET_DIR/$FileA"
echo "$FileB+" > "$INITIATOR_DIR/$FileB"
# Second run
CONFLICT_BACKUP_MULTIPLE=yes REMOTE_HOST_PING=$RHOST_PING $OSYNC_EXECUTABLE $i $additionalParameters
CONFLICT_BACKUP_MULTIPLE=true REMOTE_HOST_PING=$RHOST_PING $OSYNC_EXECUTABLE $i $additionalParameters
assertEquals "First deletion run with parameters [$i]." "0" $?
echo "$FileA-" > "$TARGET_DIR/$FileA"
echo "$FileB-" > "$INITIATOR_DIR/$FileB"
# Third run
CONFLICT_BACKUP_MULTIPLE=yes REMOTE_HOST_PING=$RHOST_PING $OSYNC_EXECUTABLE $i $additionalParameters
CONFLICT_BACKUP_MULTIPLE=true REMOTE_HOST_PING=$RHOST_PING $OSYNC_EXECUTABLE $i $additionalParameters
assertEquals "First deletion run with parameters [$i]." "0" $?
echo "$FileA*" > "$TARGET_DIR/$FileA"
echo "$FileB*" > "$INITIATOR_DIR/$FileB"
# Fouth run
CONFLICT_BACKUP_MULTIPLE=yes REMOTE_HOST_PING=$RHOST_PING $OSYNC_EXECUTABLE $i $additionalParameters
CONFLICT_BACKUP_MULTIPLE=true REMOTE_HOST_PING=$RHOST_PING $OSYNC_EXECUTABLE $i $additionalParameters
assertEquals "First deletion run with parameters [$i]." "0" $?
# This test may fail only on 31th December at 23:59 :)
@ -979,8 +1105,8 @@ function test_MultipleConflictBackups () {
assertEquals "3 Backup files are present in [$TARGET_DIR/$OSYNC_BACKUP_DIR/]." "0" $?
done
SetConfFileValue "$CONF_DIR/$LOCAL_CONF" "CONFLICT_BACKUP_MULTIPLE" "no"
SetConfFileValue "$CONF_DIR/$REMOTE_CONF" "CONFLICT_BACKUP_MULTIPLE" "no"
SetConfFileValue "$CONF_DIR/$LOCAL_CONF" "CONFLICT_BACKUP_MULTIPLE" false
SetConfFileValue "$CONF_DIR/$REMOTE_CONF" "CONFLICT_BACKUP_MULTIPLE" false
}
function test_Locking () {
@ -1071,8 +1197,8 @@ function test_Locking () {
# Target lock present should be resumed if instance ID is NOT the same as current one but FORCE_STRANGER_UNLOCK=yes
SetConfFileValue "$CONF_DIR/$LOCAL_CONF" "FORCE_STRANGER_LOCK_RESUME" "yes"
SetConfFileValue "$CONF_DIR/$REMOTE_CONF" "FORCE_STRANGER_LOCK_RESUME" "yes"
SetConfFileValue "$CONF_DIR/$LOCAL_CONF" "FORCE_STRANGER_LOCK_RESUME" true
SetConfFileValue "$CONF_DIR/$REMOTE_CONF" "FORCE_STRANGER_LOCK_RESUME" true
for i in "${osyncParameters[@]}"; do
@ -1086,17 +1212,15 @@ function test_Locking () {
assertEquals "Should be able to resume when target has lock with different instance id but FORCE_STRANGER_UNLOCK=yes." "0" $?
done
SetConfFileValue "$CONF_DIR/$LOCAL_CONF" "FORCE_STRANGER_LOCK_RESUME" "no"
SetConfFileValue "$CONF_DIR/$REMOTE_CONF" "FORCE_STRANGER_LOCK_RESUME" "no"
SetConfFileValue "$CONF_DIR/$LOCAL_CONF" "FORCE_STRANGER_LOCK_RESUME" false
SetConfFileValue "$CONF_DIR/$REMOTE_CONF" "FORCE_STRANGER_LOCK_RESUME" false
}
function test_ConflictDetetion () {
local result
# Tests compatible with v1.4+
# Tests compatible with v1.3+
if [ $OSYNC_MIN_VERSION -lt 3 ]; then
echo "Skipping conflict detection test because osync min version is $OSYNC_MIN_VERSION (must be 3 at least)."
if [ $OSYNC_MIN_VERSION -lt 4 ]; then
echo "Skipping conflict detection test because osync min version is $OSYNC_MIN_VERSION (must be 4 at least)."
return 0
fi
@ -1114,7 +1238,7 @@ function test_ConflictDetetion () {
touch "$TARGET_DIR/$FileA"
# Initializing treeList
REMOTE_HOST_PING=$RHOST_PING $OSYNC_EXECUTABLE $i --initialize
REMOTE_HOST_PING=$RHOST_PING _PARANOIA_DEBUG=no $OSYNC_EXECUTABLE $i --initialize
assertEquals "Initialization run with parameters [$i]." "0" $?
# Now modifying files on both sides
@ -1126,19 +1250,21 @@ function test_ConflictDetetion () {
# Now run should return conflicts
REMOTE_HOST_PING=$RHOST_PING $OSYNC_EXECUTABLE $i --log-conflicts > "$FAKEROOT/output.log" 2>&1
result=$?
cat "$FAKEROOT/output.log"
assertEquals "Second run that should detect conflicts with parameters [$i]." "0" $result
REMOTE_HOST_PING=$RHOST_PING $OSYNC_EXECUTABLE $i --log-conflicts > "$FAKEROOT/output2.log" 2>&1
assertEquals "Second run that should detect conflicts with parameters [$i]." "0" $?
grep "$INITIATOR_DIR/$FileA << >> $TARGET_DIR/$FileA" "$FAKEROOT/output.log"
cat "$FAKEROOT/output2.log"
#WIP TODO change output.log from output2.log for debug reasons
grep "$INITIATOR_DIR/$FileA << >> $TARGET_DIR/$FileA" "$FAKEROOT/output2.log"
assertEquals "FileA conflict detect with parameters [$i]." "0" $?
grep "$INITIATOR_DIR/$FileB << >> $TARGET_DIR/$FileB" "$FAKEROOT/output.log"
grep "$INITIATOR_DIR/$FileB << >> $TARGET_DIR/$FileB" "$FAKEROOT/output2.log"
assertEquals "FileB conflict detect with parameters [$i]." "0" $?
#TODO: Missing test for conflict prevalance (once we have FORCE_CONFLICT_PREVALANCE
done
return 0
}
function test_WaitForTaskCompletion () {
@ -1298,7 +1424,6 @@ function test_ParallelExec () {
function test_timedExecution () {
local arguments
local warnExitCode
# Clever usage of indexes and exit codes
# osync exits with 0 when no problem detected
@ -1360,7 +1485,7 @@ function test_UpgradeConfRun () {
assertEquals "Conf file upgrade" "0" $?
# Update remote conf files with SSH port
sed -i.tmp 's#ssh://.*@localhost:[0-9]*/${HOME}/osync-tests/target#ssh://'$REMOTE_USER'@localhost:'$SSH_PORT'/${HOME}/osync-tests/target#' "$CONF_DIR/$TMP_OLD_CONF"
sed -i.tmp 's#ssh://.*@localhost:[0-9]*/${homedir}/osync-tests/target#ssh://'$REMOTE_USER'@localhost:'$SSH_PORT'/${homedir}/osync-tests/target#' "$CONF_DIR/$TMP_OLD_CONF"
$OSYNC_EXECUTABLE "$CONF_DIR/$TMP_OLD_CONF"
assertEquals "Upgraded conf file execution test" "0" $?
@ -1390,6 +1515,7 @@ function test_DaemonMode () {
$OSYNC_EXECUTABLE "$CONF_DIR/$LOCAL_CONF" --on-changes &
pid=$!
#TODO: Lower that value when dispatecher is written
# Trivial value of 2xMIN_WAIT from config files
echo "Sleeping for 120s"
sleep 120

View File

@ -0,0 +1,147 @@
Coding Standards
================
shFlags is more than just a simple 20 line shell script. It is a pretty
significant library of shell code that at first glance is not that easy to
understand. To improve code readability and usability, some guidelines have been
set down to make the code more understandable for anyone who wants to read or
modify it.
Function declaration
--------------------
Declare functions using the following form:
```sh
doSomething() {
echo 'done!'
}
```
One-line functions are allowed if they can fit within the 80 char line limit.
```sh
doSomething() { echo 'done!'; }
```
Function documentation
----------------------
Each function should be preceded by a header that provides the following:
1. A one-sentence summary of what the function does.
1. (optional) A longer description of what the function does, and perhaps some
special information that helps convey its usage better.
1. Args: a one-line summary of each argument of the form:
`name: type: description`
1. Output: a one-line summary of the output provided. Only output to STDOUT
must be documented, unless the output to STDERR is of significance (i.e. not
just an error message). The output should be of the form:
`type: description`
1. Returns: a one-line summary of the value returned. Returns in shell are
always integers, but if the output is a true/false for success (i.e. a
boolean), it should be noted. The output should be of the form:
`type: description`
Here is a sample header:
```
# Return valid getopt options using currently defined list of long options.
#
# This function builds a proper getopt option string for short (and long)
# options, using the current list of long options for reference.
#
# Args:
# _flags_optStr: integer: option string type (__FLAGS_OPTSTR_*)
# Output:
# string: generated option string for getopt
# Returns:
# boolean: success of operation (always returns True)
```
Variable and function names
---------------------------
All shFlags specific constants, variables, and functions will be prefixed
appropriately with 'flags'. This is to distinguish usage in the shFlags code
from users own scripts so that the shell name space remains predictable to
users. The exceptions here are the standard `assertEquals`, etc. functions.
All non built-in constants and variables will be surrounded with squiggle
brackets, e.g. `${flags_someVariable}` to improve code readability.
Due to some shells not supporting local variables in functions, care in the
naming and use of variables, both public and private, is very important.
Accidental overriding of the variables can occur easily if care is not taken as
all variables are technically global variables in some shells.
Type | Sample
---- | ------
global public constant | `FLAGS_TRUE`
global private constant | `__FLAGS_SHELL_FLAGS`
global public variable | `flags_variable`
global private variable | `__flags_variable`
global macro | `_FLAGS_SOME_MACRO_`
public function | `flags_function`
public function, local variable | ``flags_variable_`
private function | `_flags_function`
private function, local variable | `_flags_variable_`
Where it makes sense to improve readability, variables can have the first
letter of the second and later words capitalized. For example, the local
variable name for the help string length is `flags_helpStrLen_`.
There are three special-case global public variables used. They are used due to
overcome the limitations of shell scoping or to prevent forking. The three
variables are:
- `flags_error`
- `flags_output`
- `flags_return`
Local variable cleanup
----------------------
As many shells do not support local variables, no support for cleanup of
variables is present either. As such, all variables local to a function must be
cleared up with the `unset` built-in command at the end of each function.
Indentation
-----------
Code block indentation is two (2) spaces, and tabs may not be used.
```sh
if [ -z 'some string' ]; then
someFunction
fi
```
Lines of code should be no longer than 80 characters unless absolutely
necessary. When lines are wrapped using the backslash character '\', subsequent
lines should be indented with four (4) spaces so as to differentiate from the
standard spacing of two characters, and tabs may not be used.
```sh
for x in some set of very long set of arguments that make for a very long \
that extends much too long for one line
do
echo ${x}
done
```
When a conditional expression is written using the built-in [ command, and that
line must be wrapped, place the control || or && operators on the same line as
the expression where possible, with the list to be executed on its own line.
```sh
[ -n 'some really long expression' -a -n 'some other long expr' ] && \
echo 'that was actually true!'
```

View File

@ -1,10 +1,15 @@
# shUnit2
shUnit2 is a [xUnit](http://en.wikipedia.org/wiki/XUnit) unit test framework for Bourne based shell scripts, and it is designed to work in a similar manner to [JUnit](http://www.junit.org), [PyUnit](http://pyunit.sourceforge.net), etc.. If you have ever had the desire to write a unit test for a shell script, shUnit2 can do the job.
shUnit2 is a [xUnit](http://en.wikipedia.org/wiki/XUnit) unit test framework for
Bourne based shell scripts, and it is designed to work in a similar manner to
[JUnit](http://www.junit.org), [PyUnit](http://pyunit.sourceforge.net), etc.. If
you have ever had the desire to write a unit test for a shell script, shUnit2
can do the job.
[![Travis CI](https://img.shields.io/travis/kward/shunit2.svg)](https://travis-ci.org/kward/shunit2)
[![Travis CI](https://api.travis-ci.com/kward/shunit2.svg)](https://app.travis-ci.com/github/kward/shunit2)
## Table of Contents
* [Introduction](#introduction)
* [Credits / Contributors](#credits-contributors)
* [Feedback](#feedback)
@ -21,47 +26,76 @@ shUnit2 is a [xUnit](http://en.wikipedia.org/wiki/XUnit) unit test framework for
* [Error Handling](#error-handling)
* [Including Line Numbers in Asserts (Macros)](#including-line-numbers-in-asserts-macros)
* [Test Skipping](#test-skipping)
* [Running specific tests from the command line](#cmd-line-args)
* [Appendix](#appendix)
* [Getting help](#getting-help)
* [Zsh](#zsh)
---
## <a name="introduction"></a> Introduction
shUnit2 was originally developed to provide a consistent testing solution for [log4sh][log4sh], a shell based logging framework similar to [log4j](http://logging.apache.org). During the development of that product, a repeated problem of having things work just fine under one shell (`/bin/bash` on Linux to be specific), and then not working under another shell (`/bin/sh` on Solaris) kept coming up. Although several simple tests were run, they were not adequate and did not catch some corner cases. The decision was finally made to write a proper unit test framework after multiple brown-bag releases were made. _Research was done to look for an existing product that met the testing requirements, but no adequate product was found._
Tested Operating Systems (varies over time)
shUnit2 was originally developed to provide a consistent testing solution for
[log4sh][log4sh], a shell based logging framework similar to
[log4j](http://logging.apache.org). During the development of that product, a
repeated problem of having things work just fine under one shell (`/bin/bash` on
Linux to be specific), and then not working under another shell (`/bin/sh` on
Solaris) kept coming up. Although several simple tests were run, they were not
adequate and did not catch some corner cases. The decision was finally made to
write a proper unit test framework after multiple brown-bag releases were made.
_Research was done to look for an existing product that met the testing
requirements, but no adequate product was found._
* Cygwin
* FreeBSD (user supported)
* Linux (Gentoo, Ubuntu)
* Mac OS X
* Solaris 8, 9, 10 (inc. OpenSolaris)
### Tested software
Tested Shells
**Tested Operating Systems** (varies over time)
OS | Support | Verified
----------------------------------- | --------- | --------
Ubuntu Linux (14.04.05 LTS) | Travis CI | continuous
macOS High Sierra (10.13.3) | Travis CI | continuous
FreeBSD | user | unknown
Solaris 8, 9, 10 (inc. OpenSolaris) | user | unknown
Cygwin | user | unknown
**Tested Shells**
* Bourne Shell (__sh__)
* BASH - GNU Bourne Again SHell (__bash__)
* DASH (__dash__)
* Korn Shell (__ksh__)
* pdksh - Public Domain Korn Shell (__pdksh__)
* DASH - Debian Almquist Shell (__dash__)
* Korn Shell - AT&T version of the Korn shell (__ksh__)
* mksh - MirBSD Korn Shell (__mksh__)
* zsh - Zsh (__zsh__) (since 2.1.2) _please see the Zsh shell errata for more information_
See the appropriate Release Notes for this release (`doc/RELEASE_NOTES-X.X.X.txt`) for the list of actual versions tested.
See the appropriate Release Notes for this release
(`doc/RELEASE_NOTES-X.X.X.txt`) for the list of actual versions tested.
### <a name="credits-contributors"></a> Credits / Contributors
A list of contributors to shUnit2 can be found in `doc/contributors.md`. Many thanks go out to all those who have contributed to make this a better tool.
shUnit2 is the original product of many hours of work by Kate Ward, the primary author of the code. For related software, check out https://github.com/kward.
A list of contributors to shUnit2 can be found in `doc/contributors.md`. Many
thanks go out to all those who have contributed to make this a better tool.
shUnit2 is the original product of many hours of work by Kate Ward, the primary
author of the code. For related software, check out https://github.com/kward.
### <a name="feedback"></a> Feedback
Feedback is most certainly welcome for this document. Send your additions, comments and criticisms to the shunit2-users@google.com mailing list.
Feedback is most certainly welcome for this document. Send your questions,
comments, and criticisms via the
[shunit2-users](https://groups.google.com/a/forestent.com/forum/#!forum/shunit2-users/new)
forum (created 2018-12-09), or file an issue via
https://github.com/kward/shunit2/issues.
---
## <a name="quickstart"></a> Quickstart
This section will give a very quick start to running unit tests with shUnit2. More information is located in later sections.
Here is a quick sample script to show how easy it is to write a unit test in shell. _Note: the script as it stands expects that you are running it from the "examples" directory._
This section will give a very quick start to running unit tests with shUnit2.
More information is located in later sections.
Here is a quick sample script to show how easy it is to write a unit test in
shell. _Note: the script as it stands expects that you are running it from the
"examples" directory._
```sh
#! /bin/sh
@ -72,7 +106,7 @@ testEquality() {
}
# Load shUnit2.
. ./shunit2
. ../shunit2
```
Running the unit test should give results similar to the following.
@ -87,14 +121,38 @@ Ran 1 test.
OK
```
W00t! You've just run your first successful unit test. So, what just happened? Quite a bit really, and it all happened simply by sourcing the `shunit2` library. The basic functionality for the script above goes like this:
W00t! You've just run your first successful unit test. So, what just happened?
Quite a bit really, and it all happened simply by sourcing the `shunit2`
library. The basic functionality for the script above goes like this:
* When shUnit2 is sourced, it will walk through any functions defined whose name starts with the string `test`, and add those to an internal list of tests to execute. Once a list of test functions to be run has been determined, shunit2 will go to work.
* Before any tests are executed, shUnit2 again looks for a function, this time one named `oneTimeSetUp()`. If it exists, it will be run. This function is normally used to setup the environment for all tests to be run. Things like creating directories for output or setting environment variables are good to place here. Just so you know, you can also declare a corresponding function named `oneTimeTearDown()` function that does the same thing, but once all the tests have been completed. It is good for removing temporary directories, etc.
* shUnit2 is now ready to run tests. Before doing so though, it again looks for another function that might be declared, one named `setUp()`. If the function exists, it will be run before each test. It is good for resetting the environment so that each test starts with a clean slate. **At this stage, the first test is finally run.** The success of the test is recorded for a report that will be generated later. After the test is run, shUnit2 looks for a final function that might be declared, one named `tearDown()`. If it exists, it will be run after each test. It is a good place for cleaning up after each test, maybe doing things like removing files that were created, or removing directories. This set of steps, `setUp() > test() > tearDown()`, is repeated for all of the available tests.
* Once all the work is done, shUnit2 will generate the nice report you saw above. A summary of all the successes and failures will be given so that you know how well your code is doing.
* When shUnit2 is sourced, it will walk through any functions defined whose name
starts with the string `test`, and add those to an internal list of tests to
execute. Once a list of test functions to be run has been determined, shunit2
will go to work.
* Before any tests are executed, shUnit2 again looks for a function, this time
one named `oneTimeSetUp()`. If it exists, it will be run. This function is
normally used to setup the environment for all tests to be run. Things like
creating directories for output or setting environment variables are good to
place here. Just so you know, you can also declare a corresponding function
named `oneTimeTearDown()` function that does the same thing, but once all the
tests have been completed. It is good for removing temporary directories, etc.
* shUnit2 is now ready to run tests. Before doing so though, it again looks for
another function that might be declared, one named `setUp()`. If the function
exists, it will be run before each test. It is good for resetting the
environment so that each test starts with a clean slate. **At this stage, the
first test is finally run.** The success of the test is recorded for a report
that will be generated later. After the test is run, shUnit2 looks for a final
function that might be declared, one named `tearDown()`. If it exists, it will
be run after each test. It is a good place for cleaning up after each test,
maybe doing things like removing files that were created, or removing
directories. This set of steps, `setUp() > test() > tearDown()`, is repeated
for all of the available tests.
* Once all the work is done, shUnit2 will generate the nice report you saw
above. A summary of all the successes and failures will be given so that you
know how well your code is doing.
We should now try adding a test that fails. Change your unit test to look like this.
We should now try adding a test that fails. Change your unit test to look like
this.
```sh
#! /bin/sh
@ -110,12 +168,30 @@ testPartyLikeItIs1999() {
}
# Load shUnit2.
. ./shunit2
. ../shunit2
```
So, what did you get? I guess it told you that this isn't 1999. Bummer, eh? Hopefully, you noticed a couple of things that were different about the second test. First, we added an optional message that the user will see if the assert fails. Second, we did comparisons of strings instead of integers as in the first test. It doesn't matter whether you are testing for equality of strings or integers. Both work equally well with shUnit2.
So, what did you get? I guess it told you that this isn't 1999. Bummer, eh?
Hopefully, you noticed a couple of things that were different about the second
test. First, we added an optional message that the user will see if the assert
fails. Second, we did comparisons of strings instead of integers as in the first
test. It doesn't matter whether you are testing for equality of strings or
integers. Both work equally well with shUnit2.
Hopefully, this is enough to get you started with unit testing. If you want a ton more examples, take a look at the tests provided with [log4sh][log4sh] or [shFlags][shflags]. Both provide excellent examples of more advanced usage. shUnit2 was after all written to meet the unit testing need that [log4sh][log4sh] had.
Hopefully, this is enough to get you started with unit testing. If you want a
ton more examples, take a look at the tests provided with [log4sh][log4sh] or
[shFlags][shflags]. Both provide excellent examples of more advanced usage.
shUnit2 was after all written to meet the unit testing need that
[log4sh][log4sh] had.
If you are using distribution packaged shUnit2 which is accessible from
`/usr/bin/shunit2` such as Debian, you can load shUnit2 without specifying its
path. So the last 2 lines in the above can be replaced by:
```sh
# Load shUnit2.
. shunit2
```
---
@ -123,139 +199,212 @@ Hopefully, this is enough to get you started with unit testing. If you want a to
### <a name="general-info"></a> General Info
Any string values passed should be properly quoted -- they should must be surrounded by single-quote (`'`) or double-quote (`"`) characters -- so that the shell will properly parse them.
Any string values passed should be properly quoted -- they should be
surrounded by single-quote (`'`) or double-quote (`"`) characters -- so that the
shell will properly parse them.
### <a name="asserts"></a> Asserts
`assertEquals [message] expected actual`
assertEquals [message] expected actual
Asserts that _expected_ and _actual_ are equal to one another. The _expected_ and _actual_ values can be either strings or integer values as both will be treated as strings. The _message_ is optional, and must be quoted.
Asserts that _expected_ and _actual_ are equal to one another. The _expected_
and _actual_ values can be either strings or integer values as both will be
treated as strings. The _message_ is optional, and must be quoted.
`assertNotEquals [message] unexpected actual`
assertNotEquals [message] unexpected actual
Asserts that _unexpected_ and _actual_ are not equal to one another. The _unexpected_ and _actual_ values can be either strings or integer values as both will be treaded as strings. The _message_ is optional, and must be quoted.
Asserts that _unexpected_ and _actual_ are not equal to one another. The
_unexpected_ and _actual_ values can be either strings or integer values as both
will be treated as strings. The _message_ is optional, and must be quoted.
`assertSame [message] expected actual`
assertSame [message] expected actual
This function is functionally equivalent to `assertEquals`.
`assertNotSame [message] unexpected actual`
assertNotSame [message] unexpected actual
This function is functionally equivalent to `assertNotEquals`.
`assertNull [message] value`
assertContains [message] container content
Asserts that _value_ is _null_, or in shell terms, a zero-length string. The _value_ must be a string as an integer value does not translate into a zero-length string. The _message_ is optional, and must be quoted.
Asserts that _container_ contains _content_. The _container_ and _content_
values can be either strings or integer values as both will be treated as
strings. The _message_ is optional, and must be quoted.
`assertNotNull [message] value`
assertNotContains [message] container content
Asserts that _value_ is _not null_, or in shell terms, a non-empty string. The _value_ may be a string or an integer as the later will be parsed as a non-empty string value. The _message_ is optional, and must be quoted.
Asserts that _container_ does not contain _content_. The _container_ and
_content_ values can be either strings or integer values as both will be treated
as strings. The _message_ is optional, and must be quoted.
`assertTrue [message] condition`
assertNull [message] value
Asserts that a given shell test _condition_ is _true_. The condition can be as simple as a shell _true_ value (the value `0` -- equivalent to `${SHUNIT_TRUE}`), or a more sophisticated shell conditional expression. The _message_ is optional, and must be quoted.
Asserts that _value_ is _null_, or in shell terms, a zero-length string. The
_value_ must be a string as an integer value does not translate into a zero-
length string. The _message_ is optional, and must be quoted.
A sophisticated shell conditional expression is equivalent to what the __if__ or __while__ shell built-ins would use (more specifically, what the __test__ command would use). Testing for example whether some value is greater than another value can be done this way.
assertNotNull [message] value
`assertTrue "[ 34 -gt 23 ]"`
Asserts that _value_ is _not null_, or in shell terms, a non-empty string. The
_value_ may be a string or an integer as the latter will be parsed as a non-empty
string value. The _message_ is optional, and must be quoted.
Testing for the ability to read a file can also be done. This particular test will fail.
assertTrue [message] condition
`assertTrue 'test failed' "[ -r /some/non-existant/file' ]"`
Asserts that a given shell test _condition_ is _true_. The condition can be as
simple as a shell _true_ value (the value `0` -- equivalent to
`${SHUNIT_TRUE}`), or a more sophisticated shell conditional expression. The
_message_ is optional, and must be quoted.
As the expressions are standard shell __test__ expressions, it is possible to string multiple expressions together with `-a` and `-o` in the standard fashion. This test will succeed as the entire expression evaluates to _true_.
A sophisticated shell conditional expression is equivalent to what the __if__ or
__while__ shell built-ins would use (more specifically, what the __test__
command would use). Testing for example whether some value is greater than
another value can be done this way.
`assertTrue 'test failed' '[ 1 -eq 1 -a 2 -eq 2 ]'`
assertTrue "[ 34 -gt 23 ]"
_One word of warning: be very careful with your quoting as shell is not the most forgiving of bad quoting, and things will fail in strange ways._
Testing for the ability to read a file can also be done. This particular test
will fail.
`assertFalse [message] condition`
assertTrue 'test failed' "[ -r /some/non-existant/file ]"
Asserts that a given shell test _condition_ is _false_. The condition can be as simple as a shell _false_ value (the value `1` -- equivalent to `${SHUNIT_FALSE}`), or a more sophisticated shell conditional expression. The _message_ is optional, and must be quoted.
As the expressions are standard shell __test__ expressions, it is possible to
string multiple expressions together with `-a` and `-o` in the standard fashion.
This test will succeed as the entire expression evaluates to _true_.
assertTrue 'test failed' '[ 1 -eq 1 -a 2 -eq 2 ]'
<i>One word of warning: be very careful with your quoting as shell is not the
most forgiving of bad quoting, and things will fail in strange ways.</i>
assertFalse [message] condition
Asserts that a given shell test _condition_ is _false_. The condition can be as
simple as a shell _false_ value (the value `1` -- equivalent to
`${SHUNIT_FALSE}`), or a more sophisticated shell conditional expression. The
_message_ is optional, and must be quoted.
_For examples of more sophisticated expressions, see `assertTrue`._
### <a name="failures"></a> Failures
Just to clarify, failures __do not__ test the various arguments against one another. Failures simply fail, optionally with a message, and that is all they do. If you need to test arguments against one another, use asserts.
Just to clarify, failures __do not__ test the various arguments against one
another. Failures simply fail, optionally with a message, and that is all they
do. If you need to test arguments against one another, use asserts.
If all failures do is fail, why might one use them? There are times when you may have some very complicated logic that you need to test, and the simple asserts provided are simply not adequate. You can do your own validation of the code, use an `assertTrue ${SHUNIT_TRUE}` if your own tests succeeded, and use a failure to record a failure.
If all failures do is fail, why might one use them? There are times when you may
have some very complicated logic that you need to test, and the simple asserts
provided are simply not adequate. You can do your own validation of the code,
use an `assertTrue ${SHUNIT_TRUE}` if your own tests succeeded, and use a
failure to record a failure.
`fail [message]`
fail [message]
Fails the test immediately. The _message_ is optional, and must be quoted.
`failNotEquals [message] unexpected actual`
failNotEquals [message] unexpected actual
Fails the test immediately, reporting that the _unexpected_ and _actual_ values are not equal to one another. The _message_ is optional, and must be quoted.
Fails the test immediately, reporting that the _unexpected_ and _actual_ values
are not equal to one another. The _message_ is optional, and must be quoted.
_Note: no actual comparison of unexpected and actual is done._
`failSame [message] expected actual`
failSame [message] expected actual
Fails the test immediately, reporting that the _expected_ and _actual_ values are the same. The _message_ is optional, and must be quoted.
Fails the test immediately, reporting that the _expected_ and _actual_ values
are the same. The _message_ is optional, and must be quoted.
_Note: no actual comparison of expected and actual is done._
`failNotSame [message] expected actual`
failNotSame [message] expected actual
Fails the test immediately, reporting that the _expected_ and _actual_ values are not the same. The _message_ is optional, and must be quoted.
Fails the test immediately, reporting that the _expected_ and _actual_ values
are not the same. The _message_ is optional, and must be quoted.
_Note: no actual comparison of expected and actual is done._
failFound [message] content
Fails the test immediately, reporting that the _content_ was found. The
_message_ is optional, and must be quoted.
_Note: no actual search of content is done._
failNotFound [message] content
Fails the test immediately, reporting that the _content_ was not found. The
_message_ is optional, and must be quoted.
_Note: no actual search of content is done._
### <a name="setup-teardown"></a> Setup/Teardown
`oneTimeSetUp`
This function can be be optionally overridden by the user in their test suite.
If this function exists, it will be called once before any tests are run. It is useful to prepare a common environment for all tests.
`oneTimeTearDown`
This function can be be optionally overridden by the user in their test suite.
If this function exists, it will be called once after all tests are completed. It is useful to clean up the environment after all tests.
`setUp`
This function can be be optionally overridden by the user in their test suite.
If this function exists, it will be called before each test is run. It is useful to reset the environment before each test.
`tearDown`
This function can be be optionally overridden by the user in their test suite.
If this function exists, it will be called after each test completes. It is useful to clean up the environment after each test.
### <a name="skipping"></a> Skipping
`startSkipping`
This function forces the remaining _assert_ and _fail_ functions to be "skipped", i.e. they will have no effect. Each function skipped will be recorded so that the total of asserts and fails will not be altered.
`endSkipping`
This function returns calls to the _assert_ and _fail_ functions to their default behavior, i.e. they will be called.
`isSkipping`
This function returns the current state of skipping. It can be compared against `${SHUNIT_TRUE}` or `${SHUNIT_FALSE}` if desired.
### <a name="suites"></a> Suites
The default behavior of shUnit2 is that all tests will be found dynamically. If you have a specific set of tests you want to run, or you don't want to use the standard naming scheme of prefixing your tests with `test`, these functions are for you. Most users will never use them though.
`suite`
oneTimeSetUp
This function can be optionally overridden by the user in their test suite.
If this function exists, it will be called when `shunit2` is sourced. If it does not exist, shUnit2 will search the parent script for all functions beginning with the word `test`, and they will be added dynamically to the test suite.
If this function exists, it will be called once before any tests are run. It is
useful to prepare a common environment for all tests.
`suite_addTest name`
oneTimeTearDown
This function adds a function named _name_ to the list of tests scheduled for execution as part of this test suite. This function should only be called from within the `suite()` function.
This function can be optionally overridden by the user in their test suite.
If this function exists, it will be called once after all tests are completed.
It is useful to clean up the environment after all tests.
setUp
This function can be optionally overridden by the user in their test suite.
If this function exists, it will be called before each test is run. It is useful
to reset the environment before each test.
tearDown
This function can be optionally overridden by the user in their test suite.
If this function exists, it will be called after each test completes. It is
useful to clean up the environment after each test.
### <a name="skipping"></a> Skipping
startSkipping
This function forces the remaining _assert_ and _fail_ functions to be
"skipped", i.e. they will have no effect. Each function skipped will be recorded
so that the total of asserts and fails will not be altered.
endSkipping
This function returns calls to the _assert_ and _fail_ functions to their
default behavior, i.e. they will be called.
isSkipping
This function returns the current state of skipping. It can be compared against
`${SHUNIT_TRUE}` or `${SHUNIT_FALSE}` if desired.
### <a name="suites"></a> Suites
The default behavior of shUnit2 is that all tests will be found dynamically. If
you have a specific set of tests you want to run, or you don't want to use the
standard naming scheme of prefixing your tests with `test`, these functions are
for you. Most users will never use them though.
suite
This function can be optionally overridden by the user in their test suite.
If this function exists, it will be called when `shunit2` is sourced. If it does
not exist, shUnit2 will search the parent script for all functions beginning
with the word `test`, and they will be added dynamically to the test suite.
suite_addTest name
This function adds a function named _name_ to the list of tests scheduled for
execution as part of this test suite. This function should only be called from
within the `suite()` function.
---
@ -263,7 +412,8 @@ This function adds a function named _name_ to the list of tests scheduled for ex
### <a name="some-constants-you-can-use"></a> Some constants you can use
There are several constants provided by shUnit2 as variables that might be of use to you.
There are several constants provided by shUnit2 as variables that might be of
use to you.
*Predefined*
@ -280,22 +430,32 @@ There are several constants provided by shUnit2 as variables that might be of us
| Constant | Value |
| ----------------- | ----- |
| SHUNIT\_CMD\_EXPR | Override which `expr` command is used. By default `expr` is used, except on BSD systems where `gexpr` is used. |
| SHUNIT\_COLOR | Enable colorized output. Options are 'auto', 'always', or 'never', with 'auto' being the default. |
| SHUNIT\_COLOR | Enable colorized output. Options are 'auto', 'always', or 'none', with 'auto' being the default. |
| SHUNIT\_PARENT | The filename of the shell script containing the tests. This is needed specifically for Zsh support. |
| SHUNIT\_TEST\_PREFIX | Define this variable to add a prefix in front of each test name that is output in the test report. |
### <a name="error-handling"></a> Error handling
The constants values `SHUNIT_TRUE`, `SHUNIT_FALSE`, and `SHUNIT_ERROR` are returned from nearly every function to indicate the success or failure of the function. Additionally the variable `flags_error` is filled with a detailed error message if any function returns with a `SHUNIT_ERROR` value.
The constants values `SHUNIT_TRUE`, `SHUNIT_FALSE`, and `SHUNIT_ERROR` are
returned from nearly every function to indicate the success or failure of the
function. Additionally the variable `flags_error` is filled with a detailed
error message if any function returns with a `SHUNIT_ERROR` value.
### <a name="including-line-numbers-in-asserts-macros"></a> Including Line Numbers in Asserts (Macros)
If you include lots of assert statements in an individual test function, it can become difficult to determine exactly which assert was thrown unless your messages are unique. To help somewhat, line numbers can be included in the assert messages. To enable this, a special shell "macro" must be used rather than the standard assert calls. _Shell doesn't actually have macros; the name is used here as the operation is similar to a standard macro._
If you include lots of assert statements in an individual test function, it can
become difficult to determine exactly which assert was thrown unless your
messages are unique. To help somewhat, line numbers can be included in the
assert messages. To enable this, a special shell "macro" must be used rather
than the standard assert calls. _Shell doesn't actually have macros; the name is
used here as the operation is similar to a standard macro._
For example, to include line numbers for a `assertEquals()` function call, replace the `assertEquals()` with `${_ASSERT_EQUALS_}`.
For example, to include line numbers for a `assertEquals()` function call,
replace the `assertEquals()` with `${_ASSERT_EQUALS_}`.
_**Example** -- Asserts with and without line numbers_
```sh
```shell
#! /bin/sh
# file: examples/lineno_test.sh
@ -309,20 +469,36 @@ testLineNo() {
}
# Load shUnit2.
. ./shunit2
. ../shunit2
```
Notes:
1. Due to how shell parses command-line arguments, all strings used with macros should be quoted twice. Namely, single-quotes must be converted to single-double-quotes, and vice-versa. If the string being passed is absolutely for sure not empty, the extra quoting is not necessary.<br/><br/>Normal `assertEquals` call.<br/>`assertEquals 'some message' 'x' ''`<br/><br/>Macro `_ASSERT_EQUALS_` call. Note the extra quoting around the _message_ and the _null_ value.<br/>`_ASSERT_EQUALS_ '"some message"' 'x' '""'`
1. Due to how shell parses command-line arguments, _**all strings used with
macros should be quoted twice**_. Namely, single-quotes must be converted to single-double-quotes, and vice-versa.<br/>
<br/>
Normal `assertEquals` call.<br/>
`assertEquals 'some message' 'x' ''`<br/>
<br/>
Macro `_ASSERT_EQUALS_` call. Note the extra quoting around the _message_ and
the _null_ value.<br/>
`_ASSERT_EQUALS_ '"some message"' 'x' '""'`
1. Line numbers are not supported in all shells. If a shell does not support them, no errors will be thrown. Supported shells include: __bash__ (>=3.0), __ksh__, __pdksh__, and __zsh__.
1. Line numbers are not supported in all shells. If a shell does not support
them, no errors will be thrown. Supported shells include: __bash__ (>=3.0),
__ksh__, __mksh__, and __zsh__.
### <a name="test-skipping"></a> Test Skipping
There are times where the test code you have written is just not applicable to the system you are running on. This section describes how to skip these tests but maintain the total test count.
There are times where the test code you have written is just not applicable to
the system you are running on. This section describes how to skip these tests
but maintain the total test count.
Probably the easiest example would be shell code that is meant to run under the __bash__ shell, but the unit test is running under the Bourne shell. There are things that just won't work. The following test code demonstrates two sample functions, one that will be run under any shell, and the another that will run only under the __bash__ shell.
Probably the easiest example would be shell code that is meant to run under the
__bash__ shell, but the unit test is running under the Bourne shell. There are
things that just won't work. The following test code demonstrates two sample
functions, one that will be run under any shell, and the another that will run
only under the __bash__ shell.
_**Example** -- math include_
```sh
@ -371,10 +547,11 @@ oneTimeSetUp() {
}
# Load and run shUnit2.
. ./shunit2
. ../shunit2
```
Running the above test under the __bash__ shell will result in the following output.
Running the above test under the __bash__ shell will result in the following
output.
```console
$ /bin/bash math_test.sh
@ -385,7 +562,8 @@ Ran 1 test.
OK
```
But, running the test under any other Unix shell will result in the following output.
But, running the test under any other Unix shell will result in the following
output.
```console
$ /bin/ksh math_test.sh
@ -396,9 +574,33 @@ Ran 1 test.
OK (skipped=1)
```
As you can see, the total number of tests has not changed, but the report indicates that some tests were skipped.
As you can see, the total number of tests has not changed, but the report
indicates that some tests were skipped.
Skipping can be controlled with the following functions: `startSkipping()`, `endSkipping()`, and `isSkipping()`. Once skipping is enabled, it will remain enabled until the end of the current test function call, after which skipping is disabled.
Skipping can be controlled with the following functions: `startSkipping()`,
`endSkipping()`, and `isSkipping()`. Once skipping is enabled, it will remain
enabled until the end of the current test function call, after which skipping is
disabled.
### <a name="cmd-line-args"></a> Running specific tests from the command line.
When running a test script, you may override the default set of tests, or the suite-specified set of tests, by providing additional arguments on the command line. Each additional argument after the `--` marker is assumed to be the name of a test function to be run in the order specified. e.g.
```console
test-script.sh -- testOne testTwo otherFunction
```
or
```console
shunit2 test-script.sh testOne testTwo otherFunction
```
In either case, three functions will be run as tests, `testOne`, `testTwo`, and `otherFunction`. Note that the function `otherFunction` would not normally be run by `shunit2` as part of the implicit collection of tests as it's function name does not match the test function name pattern `test*`.
If a specified test function does not exist, `shunit2` will still attempt to run that function and thereby cause a failure which `shunit2` will catch and mark as a failed test. All other tests will run normally.
The specification of tests does not affect how `shunit2` looks for and executes the setup and tear down functions, which will still run as expected.
---
@ -406,25 +608,32 @@ Skipping can be controlled with the following functions: `startSkipping()`, `end
### <a name="getting-help"></a> Getting Help
For help, please send requests to either the shunit2-users@googlegroups.com mailing list (archives available on the web at http://groups.google.com/group/shunit2-users) or directly to Kate Ward <kate dot ward at forestent dot com>.
For help, please send requests to either the shunit2-users@forestent.com mailing
list (archives available on the web at
https://groups.google.com/a/forestent.com/forum/#!forum/shunit2-users) or
directly to Kate Ward <kate dot ward at forestent dot com>.
### <a name="zsh"></a> Zsh
For compatibility with Zsh, there is one requirement that must be met -- the `shwordsplit` option must be set. There are three ways to accomplish this.
For compatibility with Zsh, there is one requirement that must be met -- the
`shwordsplit` option must be set. There are three ways to accomplish this.
1. In the unit-test script, add the following shell code snippet before sourcing the `shunit2` library.
1. In the unit-test script, add the following shell code snippet before sourcing
the `shunit2` library.
```sh
setopt shwordsplit
```
1. When invoking __zsh__ from either the command-line or as a script with `#!`, add the `-y` parameter.
2. When invoking __zsh__ from either the command-line or as a script with `#!`,
add the `-y` parameter.
```sh
#! /bin/zsh -y
```
1. When invoking __zsh__ from the command-line, add `-o shwordsplit --` as parameters before the script name.
3. When invoking __zsh__ from the command-line, add `-o shwordsplit --` as
parameters before the script name.
```console
$ zsh -o shwordsplit -- some_script

View File

@ -0,0 +1,47 @@
#! /bin/sh
#
# Initialize the local git hooks this repository.
# https://git-scm.com/docs/githooks
topLevel=$(git rev-parse --show-toplevel)
if ! cd "${topLevel}"; then
echo "filed to cd into topLevel directory '${topLevel}'"
exit 1
fi
hooksDir="${topLevel}/.githooks"
if ! hooksPath=$(git config core.hooksPath); then
hooksPath="${topLevel}/.git/hooks"
fi
src="${hooksDir}/generic"
echo "linking hooks..."
for hook in \
applypatch-msg \
pre-applypatch \
post-applypatch \
pre-commit \
pre-merge-commit \
prepare-commit-msg \
commit-msg \
post-commit \
pre-rebase \
post-checkout \
post-merge \
pre-push \
pre-receive \
update \
post-receive \
post-update \
push-to-checkout \
pre-auto-gc \
post-rewrite \
sendemail-validate \
fsmonitor-watchman \
p4-pre-submit \
post-index-change
do
echo " ${hook}"
dest="${hooksPath}/${hook}"
ln -sf "${src}" "${dest}"
done

View File

@ -3,7 +3,7 @@
#
# Versions determines the versions of all installed shells.
#
# Copyright 2008-2018 Kate Ward. All Rights Reserved.
# Copyright 2008-2020 Kate Ward. All Rights Reserved.
# Released under the Apache 2.0 License.
#
# Author: kate.ward@forestent.com (Kate Ward)
@ -18,7 +18,7 @@
ARGV0=`basename "$0"`
LSB_RELEASE='/etc/lsb-release'
VERSIONS_SHELLS='ash /bin/bash /bin/dash /bin/ksh /bin/pdksh /bin/zsh /bin/sh /usr/xpg4/bin/sh /sbin/sh'
VERSIONS_SHELLS='ash /bin/bash /bin/dash /bin/ksh /bin/mksh /bin/pdksh /bin/zsh /usr/xpg4/bin/sh /bin/sh /sbin/sh'
true; TRUE=$?
false; FALSE=$?
@ -49,6 +49,10 @@ versions_osName() {
10.11|10.11.[0-9]*) os_name_='Mac OS X El Capitan' ;;
10.12|10.12.[0-9]*) os_name_='macOS Sierra' ;;
10.13|10.13.[0-9]*) os_name_='macOS High Sierra' ;;
10.14|10.14.[0-9]*) os_name_='macOS Mojave' ;;
10.15|10.15.[0-9]*) os_name_='macOS Catalina' ;;
11.*) os_name_='macOS Big Sur' ;;
12.*) os_name_='macOS Monterey' ;;
*) os_name_='macOS' ;;
esac
;;
@ -133,10 +137,11 @@ versions_shellVersion() {
version_=''
case ${shell_} in
/sbin/sh) ;; # SunOS
/usr/xpg4/bin/sh)
version_=`versions_shell_xpg4 "${shell_}"`
;; # SunOS
# SunOS shells.
/sbin/sh) ;;
/usr/xpg4/bin/sh) version_=`versions_shell_xpg4 "${shell_}"` ;;
# Generic shell.
*/sh)
# This could be one of any number of shells. Try until one fits.
version_=''
@ -147,16 +152,22 @@ versions_shellVersion() {
[ -z "${version_}" ] && version_=`versions_shell_xpg4 "${shell_}"`
[ -z "${version_}" ] && version_=`versions_shell_zsh "${shell_}"`
;;
# Specific shells.
ash) version_=`versions_shell_ash "${shell_}"` ;;
# bash - Bourne Again SHell (https://www.gnu.org/software/bash/)
*/bash) version_=`versions_shell_bash "${shell_}"` ;;
*/dash)
# Assuming Ubuntu Linux until somebody comes up with a better test. The
# following test will return an empty string if dash is not installed.
version_=`versions_shell_dash`
;;
*/dash) version_=`versions_shell_dash` ;;
# ksh - KornShell (http://www.kornshell.com/)
*/ksh) version_=`versions_shell_ksh "${shell_}"` ;;
# mksh - MirBSD Korn Shell (http://www.mirbsd.org/mksh.htm)
*/mksh) version_=`versions_shell_ksh "${shell_}"` ;;
# pdksh - Public Domain Korn Shell (http://web.cs.mun.ca/~michael/pdksh/)
*/pdksh) version_=`versions_shell_pdksh "${shell_}"` ;;
# zsh (https://www.zsh.org/)
*/zsh) version_=`versions_shell_zsh "${shell_}"` ;;
# Unrecognized shell.
*) version_='invalid'
esac
@ -173,6 +184,8 @@ versions_shell_bash() {
$1 --version : 2>&1 |grep 'GNU bash' |sed 's/.*version \([^ ]*\).*/\1/'
}
# Assuming Ubuntu Linux until somebody comes up with a better test. The
# following test will return an empty string if dash is not installed.
versions_shell_dash() {
eval dpkg >/dev/null 2>&1
[ $? -eq 127 ] && return # Return if dpkg not found.
@ -193,6 +206,10 @@ versions_shell_ksh() {
else
versions_version_=''
fi
if [ -z "${versions_version_}" ]; then
# shellcheck disable=SC2016
versions_version_=`${versions_shell_} -c 'echo ${KSH_VERSION}'`
fi
if [ -z "${versions_version_}" ]; then
_versions_have_strings
versions_version_=`strings "${versions_shell_}" 2>&1 \
@ -207,6 +224,14 @@ versions_shell_ksh() {
unset versions_shell_ versions_version_
}
# mksh - MirBSD Korn Shell (http://www.mirbsd.org/mksh.htm)
# mksh is a successor to pdksh (Public Domain Korn Shell).
versions_shell_mksh() {
versions_shell_ksh
}
# pdksh - Public Domain Korn Shell
# pdksh is an obsolete shell, which was replaced by mksh (among others).
versions_shell_pdksh() {
_versions_have_strings
strings "$1" 2>&1 \

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,64 @@
#!/bin/sh
# vim:et:ft=sh:sts=2:sw=2
#
# shunit2 unit test for running subset(s) of tests based upon command line args.
#
# Copyright 2008-2021 Kate Ward. All Rights Reserved.
# Released under the Apache 2.0 license.
# http://www.apache.org/licenses/LICENSE-2.0
#
# https://github.com/kward/shunit2
#
# Also shows how non-default tests or a arbitrary subset of tests can be run.
#
# Disable source following.
# shellcheck disable=SC1090,SC1091
# Load test helpers.
. ./shunit2_test_helpers
CUSTOM_TEST_RAN=''
# This test does not normally run because it does not begin "test*". Will be
# run by setting the arguments to the script to include the name of this test.
custom_test() {
# Arbitrary assert.
assertTrue 0
# The true intent is to set this variable, which will be tested below.
CUSTOM_TEST_RAN='yup, we ran'
}
# Verify that `customTest()` ran.
testCustomTestRan() {
assertNotNull "'custom_test()' did not run" "${CUSTOM_TEST_RAN}"
}
# Fail if this test runs, which is shouldn't if arguments are set correctly.
testShouldFail() {
fail 'testShouldFail should not be run if argument parsing works'
}
oneTimeSetUp() {
th_oneTimeSetUp
}
# If zero/one argument(s) are provided, this test is being run in it's
# entirety, and therefore we want to set the arguments to the script to
# (simulate and) test the processing of command-line specified tests. If we
# don't, then the "test_will_fail" test will run (by default) and the overall
# test will fail.
#
# However, if two or more arguments are provided, then assume this test script
# is being run by hand to experiment with command-line test specification, and
# then don't override the user provided arguments.
if [ "$#" -le 1 ]; then
# We set the arguments in a POSIX way, inasmuch as we can;
# helpful tip:
# https://unix.stackexchange.com/questions/258512/how-to-remove-a-positional-parameter-from
set -- '--' 'custom_test' 'testCustomTestRan'
fi
# Load and run shunit2.
# shellcheck disable=SC2034
[ -n "${ZSH_VERSION:-}" ] && SHUNIT_PARENT=$0
. "${TH_SHUNIT}"

View File

@ -3,12 +3,16 @@
#
# shunit2 unit test for assert functions.
#
# Copyright 2008-2017 Kate Ward. All Rights Reserved.
# Copyright 2008-2021 Kate Ward. All Rights Reserved.
# Released under the Apache 2.0 license.
# http://www.apache.org/licenses/LICENSE-2.0
#
# Author: kate.ward@forestent.com (Kate Ward)
# https://github.com/kward/shunit2
#
# In this file, all assert calls under test must be wrapped in () so they do not
# influence the metrics of the test itself.
#
# Disable source following.
# shellcheck disable=SC1090,SC1091
@ -22,175 +26,377 @@ stderrF="${TMPDIR:-/tmp}/STDERR"
commonEqualsSame() {
fn=$1
( ${fn} 'x' 'x' >"${stdoutF}" 2>"${stderrF}" )
th_assertTrueWithNoOutput 'equal' $? "${stdoutF}" "${stderrF}"
# These should succeed.
( ${fn} "${MSG}" 'x' 'x' >"${stdoutF}" 2>"${stderrF}" )
th_assertTrueWithNoOutput 'equal; with msg' $? "${stdoutF}" "${stderrF}"
desc='equal'
if (${fn} 'x' 'x' >"${stdoutF}" 2>"${stderrF}"); then
th_assertTrueWithNoOutput "${desc}" $? "${stdoutF}" "${stderrF}"
else
fail "${desc}: unexpected failure"
_showTestOutput
fi
( ${fn} 'abc def' 'abc def' >"${stdoutF}" 2>"${stderrF}" )
th_assertTrueWithNoOutput 'equal with spaces' $? "${stdoutF}" "${stderrF}"
desc='equal_with_message'
if (${fn} 'some message' 'x' 'x' >"${stdoutF}" 2>"${stderrF}"); then
th_assertTrueWithNoOutput "${desc}" $? "${stdoutF}" "${stderrF}"
else
fail "${desc}: unexpected failure"
_showTestOutput
fi
( ${fn} 'x' 'y' >"${stdoutF}" 2>"${stderrF}" )
th_assertFalseWithOutput 'not equal' $? "${stdoutF}" "${stderrF}"
desc='equal_with_spaces'
if (${fn} 'abc def' 'abc def' >"${stdoutF}" 2>"${stderrF}"); then
th_assertTrueWithNoOutput "${desc}" $? "${stdoutF}" "${stderrF}"
else
fail "${desc}: unexpected failure"
_showTestOutput
fi
( ${fn} '' '' >"${stdoutF}" 2>"${stderrF}" )
th_assertTrueWithNoOutput 'null values' $? "${stdoutF}" "${stderrF}"
desc='equal_null_values'
if (${fn} '' '' >"${stdoutF}" 2>"${stderrF}"); then
th_assertTrueWithNoOutput "${desc}" $? "${stdoutF}" "${stderrF}"
else
fail "${desc}: unexpected failure"
_showTestOutput
fi
( ${fn} arg1 >"${stdoutF}" 2>"${stderrF}" )
th_assertFalseWithError 'too few arguments' $? "${stdoutF}" "${stderrF}"
# These should fail.
( ${fn} arg1 arg2 arg3 arg4 >"${stdoutF}" 2>"${stderrF}" )
th_assertFalseWithError 'too many arguments' $? "${stdoutF}" "${stderrF}"
desc='not_equal'
if (${fn} 'x' 'y' >"${stdoutF}" 2>"${stderrF}"); then
fail "${desc}: expected a failure"
_showTestOutput
else
th_assertFalseWithOutput "${desc}" $? "${stdoutF}" "${stderrF}"
fi
}
commonNotEqualsSame() {
fn=$1
( ${fn} 'x' 'y' >"${stdoutF}" 2>"${stderrF}" )
th_assertTrueWithNoOutput 'not same' $? "${stdoutF}" "${stderrF}"
# These should succeed.
( ${fn} "${MSG}" 'x' 'y' >"${stdoutF}" 2>"${stderrF}" )
th_assertTrueWithNoOutput 'not same, with msg' $? "${stdoutF}" "${stderrF}"
desc='not_same'
if (${fn} 'x' 'y' >"${stdoutF}" 2>"${stderrF}"); then
th_assertTrueWithNoOutput "${desc}" $? "${stdoutF}" "${stderrF}"
else
fail "${desc}: unexpected failure"
_showTestOutput
fi
( ${fn} 'x' 'x' >"${stdoutF}" 2>"${stderrF}" )
th_assertFalseWithOutput 'same' $? "${stdoutF}" "${stderrF}"
desc='not_same_with_message'
if (${fn} 'some message' 'x' 'y' >"${stdoutF}" 2>"${stderrF}"); then
th_assertTrueWithNoOutput "${desc}" $? "${stdoutF}" "${stderrF}"
else
fail "${desc}: unexpected failure"
_showTestOutput
fi
( ${fn} '' '' >"${stdoutF}" 2>"${stderrF}" )
th_assertFalseWithOutput 'null values' $? "${stdoutF}" "${stderrF}"
# These should fail.
( ${fn} arg1 >"${stdoutF}" 2>"${stderrF}" )
th_assertFalseWithError 'too few arguments' $? "${stdoutF}" "${stderrF}"
desc='same'
if (${fn} 'x' 'x' >"${stdoutF}" 2>"${stderrF}"); then
fail "${desc}: expected a failure"
_showTestOutput
else
th_assertFalseWithOutput "${desc}" $? "${stdoutF}" "${stderrF}"
fi
( ${fn} arg1 arg2 arg3 arg4 >"${stdoutF}" 2>"${stderrF}" )
th_assertFalseWithError 'too many arguments' $? "${stdoutF}" "${stderrF}"
desc='unequal_null_values'
if (${fn} '' '' >"${stdoutF}" 2>"${stderrF}"); then
fail "${desc}: expected a failure"
_showTestOutput
else
th_assertFalseWithOutput "${desc}" $? "${stdoutF}" "${stderrF}"
fi
}
testAssertEquals() {
commonEqualsSame 'assertEquals'
testAssertEquals() { commonEqualsSame 'assertEquals'; }
testAssertNotEquals() { commonNotEqualsSame 'assertNotEquals'; }
testAssertSame() { commonEqualsSame 'assertSame'; }
testAssertNotSame() { commonNotEqualsSame 'assertNotSame'; }
testAssertContains() {
# Content is present.
while read -r desc container content; do
if (assertContains "${container}" "${content}" >"${stdoutF}" 2>"${stderrF}"); then
th_assertTrueWithNoOutput "${desc}" $? "${stdoutF}" "${stderrF}"
else
fail "${desc}: unexpected failure"
_showTestOutput
fi
done <<EOF
abc_at_start abcdef abc
bcd_in_middle abcdef bcd
def_at_end abcdef def
EOF
# Content missing.
while read -r desc container content; do
if (assertContains "${container}" "${content}" >"${stdoutF}" 2>"${stderrF}"); then
fail "${desc}: unexpected failure"
_showTestOutput
else
th_assertFalseWithOutput "${desc}" $? "${stdoutF}" "${stderrF}"
fi
done <<EOF
xyz_not_present abcdef xyz
zab_contains_start abcdef zab
efg_contains_end abcdef efg
acf_has_parts abcdef acf
EOF
desc="content_starts_with_dash"
if (assertContains 'abc -Xabc def' '-Xabc' >"${stdoutF}" 2>"${stderrF}"); then
th_assertTrueWithNoOutput "${desc}" $? "${stdoutF}" "${stderrF}"
else
fail "${desc}: unexpected failure"
_showTestOutput
fi
desc="contains_with_message"
if (assertContains 'some message' 'abcdef' 'abc' >"${stdoutF}" 2>"${stderrF}"); then
th_assertTrueWithNoOutput "${desc}" $? "${stdoutF}" "${stderrF}"
else
fail "${desc}: unexpected failure"
_showTestOutput
fi
}
testAssertNotEquals() {
commonNotEqualsSame 'assertNotEquals'
}
testAssertNotContains() {
# Content not present.
while read -r desc container content; do
if (assertNotContains "${container}" "${content}" >"${stdoutF}" 2>"${stderrF}"); then
th_assertTrueWithNoOutput "${desc}" $? "${stdoutF}" "${stderrF}"
else
fail "${desc}: unexpected failure"
_showTestOutput
fi
done <<EOF
xyz_not_present abcdef xyz
zab_contains_start abcdef zab
efg_contains_end abcdef efg
acf_has_parts abcdef acf
EOF
testAssertSame() {
commonEqualsSame 'assertSame'
}
# Content present.
while read -r desc container content; do
if (assertNotContains "${container}" "${content}" >"${stdoutF}" 2>"${stderrF}"); then
fail "${desc}: expected a failure"
_showTestOutput
else
th_assertFalseWithOutput "${desc}" $? "${stdoutF}" "${stderrF}"
fi
done <<EOF
abc_is_present abcdef abc
EOF
testAssertNotSame() {
commonNotEqualsSame 'assertNotSame'
desc='not_contains_with_message'
if (assertNotContains 'some message' 'abcdef' 'xyz' >"${stdoutF}" 2>"${stderrF}"); then
th_assertTrueWithNoOutput "${desc}" $? "${stdoutF}" "${stderrF}"
else
fail "${desc}: unexpected failure"
_showTestOutput
fi
}
testAssertNull() {
( assertNull '' >"${stdoutF}" 2>"${stderrF}" )
th_assertTrueWithNoOutput 'null' $? "${stdoutF}" "${stderrF}"
while read -r desc value; do
if (assertNull "${value}" >"${stdoutF}" 2>"${stderrF}"); then
fail "${desc}: unexpected failure"
_showTestOutput
else
th_assertFalseWithOutput "${desc}" $? "${stdoutF}" "${stderrF}"
fi
done <<'EOF'
x_alone x
x_double_quote_a x"a
x_single_quote_a x'a
x_dollar_a x$a
x_backtick_a x`a
EOF
( assertNull "${MSG}" '' >"${stdoutF}" 2>"${stderrF}" )
th_assertTrueWithNoOutput 'null, with msg' $? "${stdoutF}" "${stderrF}"
desc='null_without_message'
if (assertNull '' >"${stdoutF}" 2>"${stderrF}"); then
th_assertTrueWithNoOutput "${desc}" $? "${stdoutF}" "${stderrF}"
else
fail "${desc}: unexpected failure"
_showTestOutput
fi
( assertNull 'x' >"${stdoutF}" 2>"${stderrF}" )
th_assertFalseWithOutput 'not null' $? "${stdoutF}" "${stderrF}"
desc='null_with_message'
if (assertNull 'some message' '' >"${stdoutF}" 2>"${stderrF}"); then
th_assertTrueWithNoOutput "${desc}" $? "${stdoutF}" "${stderrF}"
else
fail "${desc}: unexpected failure"
_showTestOutput
fi
( assertNull >"${stdoutF}" 2>"${stderrF}" )
th_assertFalseWithError 'too few arguments' $? "${stdoutF}" "${stderrF}"
( assertNull arg1 arg2 arg3 >"${stdoutF}" 2>"${stderrF}" )
th_assertFalseWithError 'too many arguments' $? "${stdoutF}" "${stderrF}"
desc='x_is_not_null'
if (assertNull 'x' >"${stdoutF}" 2>"${stderrF}"); then
fail "${desc}: expected a failure"
_showTestOutput
else
th_assertFalseWithOutput "${desc}" $? "${stdoutF}" "${stderrF}"
fi
}
testAssertNotNull()
{
( assertNotNull 'x' >"${stdoutF}" 2>"${stderrF}" )
th_assertTrueWithNoOutput 'not null' $? "${stdoutF}" "${stderrF}"
testAssertNotNull() {
while read -r desc value; do
if (assertNotNull "${value}" >"${stdoutF}" 2>"${stderrF}"); then
th_assertTrueWithNoOutput "${desc}" $? "${stdoutF}" "${stderrF}"
else
fail "${desc}: unexpected failure"
_showTestOutput
fi
done <<'EOF'
x_alone x
x_double_quote_b x"b
x_single_quote_b x'b
x_dollar_b x$b
x_backtick_b x`b
EOF
( assertNotNull "${MSG}" 'x' >"${stdoutF}" 2>"${stderrF}" )
th_assertTrueWithNoOutput 'not null, with msg' $? "${stdoutF}" "${stderrF}"
desc='not_null_with_message'
if (assertNotNull 'some message' 'x' >"${stdoutF}" 2>"${stderrF}"); then
th_assertTrueWithNoOutput "${desc}" $? "${stdoutF}" "${stderrF}"
else
fail "${desc}: unexpected failure"
_showTestOutput
fi
( assertNotNull 'x"b' >"${stdoutF}" 2>"${stderrF}" )
th_assertTrueWithNoOutput 'not null, with double-quote' $? \
"${stdoutF}" "${stderrF}"
( assertNotNull "x'b" >"${stdoutF}" 2>"${stderrF}" )
th_assertTrueWithNoOutput 'not null, with single-quote' $? \
"${stdoutF}" "${stderrF}"
# shellcheck disable=SC2016
( assertNotNull 'x$b' >"${stdoutF}" 2>"${stderrF}" )
th_assertTrueWithNoOutput 'not null, with dollar' $? \
"${stdoutF}" "${stderrF}"
( assertNotNull 'x`b' >"${stdoutF}" 2>"${stderrF}" )
th_assertTrueWithNoOutput 'not null, with backtick' $? \
"${stdoutF}" "${stderrF}"
( assertNotNull '' >"${stdoutF}" 2>"${stderrF}" )
th_assertFalseWithOutput 'null' $? "${stdoutF}" "${stderrF}"
# There is no test for too few arguments as $1 might actually be null.
( assertNotNull arg1 arg2 arg3 >"${stdoutF}" 2>"${stderrF}" )
th_assertFalseWithError 'too many arguments' $? "${stdoutF}" "${stderrF}"
desc="double_ticks_are_null"
if (assertNotNull '' >"${stdoutF}" 2>"${stderrF}"); then
fail "${desc}: expected a failure"
_showTestOutput
else
th_assertFalseWithOutput "${desc}" $? "${stdoutF}" "${stderrF}"
fi
}
testAssertTrue() {
( assertTrue 0 >"${stdoutF}" 2>"${stderrF}" )
th_assertTrueWithNoOutput 'true' $? "${stdoutF}" "${stderrF}"
# True values.
while read -r desc value; do
if (assertTrue "${value}" >"${stdoutF}" 2>"${stderrF}"); then
th_assertTrueWithNoOutput "${desc}" $? "${stdoutF}" "${stderrF}"
else
fail "${desc}: unexpected failure"
_showTestOutput
fi
done <<'EOF'
zero 0
zero_eq_zero [ 0 -eq 0 ]
EOF
( assertTrue "${MSG}" 0 >"${stdoutF}" 2>"${stderrF}" )
th_assertTrueWithNoOutput 'true, with msg' $? "${stdoutF}" "${stderrF}"
# Not true values.
while read -r desc value; do
if (assertTrue "${value}" >"${stdoutF}" 2>"${stderrF}"); then
fail "${desc}: expected a failure"
_showTestOutput
else
th_assertFalseWithOutput "${desc}" $? "${stdoutF}" "${stderrF}"
fi
done <<EOF
one 1
zero_eq_1 [ 0 -eq 1 ]
null
EOF
( assertTrue '[ 0 -eq 0 ]' >"${stdoutF}" 2>"${stderrF}" )
th_assertTrueWithNoOutput 'true condition' $? "${stdoutF}" "${stderrF}"
( assertTrue 1 >"${stdoutF}" 2>"${stderrF}" )
th_assertFalseWithOutput 'false' $? "${stdoutF}" "${stderrF}"
( assertTrue '[ 0 -eq 1 ]' >"${stdoutF}" 2>"${stderrF}" )
th_assertFalseWithOutput 'false condition' $? "${stdoutF}" "${stderrF}"
( assertTrue '' >"${stdoutF}" 2>"${stderrF}" )
th_assertFalseWithOutput 'null' $? "${stdoutF}" "${stderrF}"
( assertTrue >"${stdoutF}" 2>"${stderrF}" )
th_assertFalseWithError 'too few arguments' $? "${stdoutF}" "${stderrF}"
( assertTrue arg1 arg2 arg3 >"${stdoutF}" 2>"${stderrF}" )
th_assertFalseWithError 'too many arguments' $? "${stdoutF}" "${stderrF}"
desc='true_with_message'
if (assertTrue 'some message' 0 >"${stdoutF}" 2>"${stderrF}"); then
th_assertTrueWithNoOutput "${desc}" $? "${stdoutF}" "${stderrF}"
else
fail "${desc}: unexpected failure"
_showTestOutput
fi
}
testAssertFalse() {
( assertFalse 1 >"${stdoutF}" 2>"${stderrF}" )
th_assertTrueWithNoOutput 'false' $? "${stdoutF}" "${stderrF}"
# False values.
while read -r desc value; do
if (assertFalse "${value}" >"${stdoutF}" 2>"${stderrF}"); then
th_assertTrueWithNoOutput "${desc}" $? "${stdoutF}" "${stderrF}"
else
fail "${desc}: unexpected failure"
_showTestOutput
fi
done <<EOF
one 1
zero_eq_1 [ 0 -eq 1 ]
null
EOF
( assertFalse "${MSG}" 1 >"${stdoutF}" 2>"${stderrF}" )
th_assertTrueWithNoOutput 'false, with msg' $? "${stdoutF}" "${stderrF}"
# Not true values.
while read -r desc value; do
if (assertFalse "${value}" >"${stdoutF}" 2>"${stderrF}"); then
fail "${desc}: expected a failure"
_showTestOutput
else
th_assertFalseWithOutput "${desc}" $? "${stdoutF}" "${stderrF}"
fi
done <<'EOF'
zero 0
zero_eq_zero [ 0 -eq 0 ]
EOF
( assertFalse '[ 0 -eq 1 ]' >"${stdoutF}" 2>"${stderrF}" )
th_assertTrueWithNoOutput 'false condition' $? "${stdoutF}" "${stderrF}"
desc='false_with_message'
if (assertFalse 'some message' 1 >"${stdoutF}" 2>"${stderrF}"); then
th_assertTrueWithNoOutput "${desc}" $? "${stdoutF}" "${stderrF}"
else
fail "${desc}: unexpected failure"
_showTestOutput
fi
}
( assertFalse 0 >"${stdoutF}" 2>"${stderrF}" )
th_assertFalseWithOutput 'true' $? "${stdoutF}" "${stderrF}"
FUNCTIONS='
assertEquals assertNotEquals
assertSame assertNotSame
assertContains assertNotContains
assertNull assertNotNull
assertTrue assertFalse
'
( assertFalse '[ 0 -eq 0 ]' >"${stdoutF}" 2>"${stderrF}" )
th_assertFalseWithOutput 'true condition' $? "${stdoutF}" "${stderrF}"
testTooFewArguments() {
for fn in ${FUNCTIONS}; do
# These functions support zero arguments.
case "${fn}" in
assertNull) continue ;;
assertNotNull) continue ;;
esac
( assertFalse '' >"${stdoutF}" 2>"${stderrF}" )
th_assertFalseWithOutput 'true condition' $? "${stdoutF}" "${stderrF}"
desc="${fn}"
if (${fn} >"${stdoutF}" 2>"${stderrF}"); then
fail "${desc}: expected a failure"
_showTestOutput
else
got=$? want=${SHUNIT_ERROR}
assertEquals "${desc}: incorrect return code" "${got}" "${want}"
th_assertFalseWithError "${desc}" "${got}" "${stdoutF}" "${stderrF}"
fi
done
}
( assertFalse >"${stdoutF}" 2>"${stderrF}" )
th_assertFalseWithError 'too few arguments' $? "${stdoutF}" "${stderrF}"
( assertFalse arg1 arg2 arg3 >"${stdoutF}" 2>"${stderrF}" )
th_assertFalseWithError 'too many arguments' $? "${stdoutF}" "${stderrF}"
testTooManyArguments() {
for fn in ${FUNCTIONS}; do
desc="${fn}"
if (${fn} arg1 arg2 arg3 arg4 >"${stdoutF}" 2>"${stderrF}"); then
fail "${desc}: expected a failure"
_showTestOutput
else
got=$? want=${SHUNIT_ERROR}
assertEquals "${desc}: incorrect return code" "${got}" "${want}"
th_assertFalseWithError "${desc}" "${got}" "${stdoutF}" "${stderrF}"
fi
done
}
oneTimeSetUp() {
th_oneTimeSetUp
MSG='This is a test message'
}
# showTestOutput for the most recently run test.
_showTestOutput() { th_showOutput "${SHUNIT_FALSE}" "${stdoutF}" "${stderrF}"; }
# Load and run shunit2.
# shellcheck disable=SC2034
[ -n "${ZSH_VERSION:-}" ] && SHUNIT_PARENT=$0

View File

@ -1,10 +1,11 @@
#! /bin/sh
# vim:et:ft=sh:sts=2:sw=2
#
# shUnit2 unit test for failure functions
# shUnit2 unit test for failure functions. These functions do not test values.
#
# Copyright 2008-2017 Kate Ward. All Rights Reserved.
# Released under the LGPL (GNU Lesser General Public License)
# Copyright 2008-2021 Kate Ward. All Rights Reserved.
# Released under the Apache 2.0 license.
# http://www.apache.org/licenses/LICENSE-2.0
#
# Author: kate.ward@forestent.com (Kate Ward)
# https://github.com/kward/shunit2
@ -20,60 +21,114 @@ stderrF="${TMPDIR:-/tmp}/STDERR"
. ./shunit2_test_helpers
testFail() {
( fail >"${stdoutF}" 2>"${stderrF}" )
th_assertFalseWithOutput 'fail' $? "${stdoutF}" "${stderrF}"
# Test without a message.
desc='fail_without_message'
if ( fail >"${stdoutF}" 2>"${stderrF}" ); then
fail "${desc}: expected a failure"
th_showOutput
else
th_assertFalseWithOutput "${desc}" $? "${stdoutF}" "${stderrF}"
fi
( fail "${MSG}" >"${stdoutF}" 2>"${stderrF}" )
th_assertFalseWithOutput 'fail with msg' $? "${stdoutF}" "${stderrF}"
( fail arg1 >"${stdoutF}" 2>"${stderrF}" )
th_assertFalseWithOutput 'too many arguments' $? "${stdoutF}" "${stderrF}"
# Test with a message.
desc='fail_with_message'
if ( fail 'some message' >"${stdoutF}" 2>"${stderrF}" ); then
fail "${desc}: expected a failure"
th_showOutput
else
th_assertFalseWithOutput "${desc}" $? "${stdoutF}" "${stderrF}"
fi
}
testFailNotEquals() {
( failNotEquals 'x' 'x' >"${stdoutF}" 2>"${stderrF}" )
th_assertFalseWithOutput 'same' $? "${stdoutF}" "${stderrF}"
# FN_TESTS hold all the functions to be tested.
# shellcheck disable=SC2006
FN_TESTS=`
# fn num_args pattern
cat <<EOF
fail 1
failNotEquals 3 but was:
failFound 2 found:
failNotFound 2 not found:
failSame 3 not same
failNotSame 3 but was:
EOF
`
( failNotEquals "${MSG}" 'x' 'x' >"${stdoutF}" 2>"${stderrF}" )
th_assertFalseWithOutput 'same with msg' $? "${stdoutF}" "${stderrF}"
testFailsWithArgs() {
echo "${FN_TESTS}" |\
while read -r fn num_args pattern; do
case "${fn}" in
fail) continue ;;
esac
( failNotEquals 'x' 'y' >"${stdoutF}" 2>"${stderrF}" )
th_assertFalseWithOutput 'not same' $? "${stdoutF}" "${stderrF}"
# Test without a message.
desc="${fn}_without_message"
if ( ${fn} arg1 arg2 >"${stdoutF}" 2>"${stderrF}" ); then
fail "${desc}: expected a failure"
th_showOutput
else
th_assertFalseWithOutput "${desc}" $? "${stdoutF}" "${stderrF}"
fi
( failNotEquals '' '' >"${stdoutF}" 2>"${stderrF}" )
th_assertFalseWithOutput 'null values' $? "${stdoutF}" "${stderrF}"
# Test with a message.
arg1='' arg2=''
case ${num_args} in
1) ;;
2) arg1='arg1' ;;
3) arg1='arg1' arg2='arg2' ;;
esac
( failNotEquals >"${stdoutF}" 2>"${stderrF}" )
th_assertFalseWithError 'too few arguments' $? "${stdoutF}" "${stderrF}"
( failNotEquals arg1 arg2 arg3 arg4 >"${stdoutF}" 2>"${stderrF}" )
th_assertFalseWithError 'too many arguments' $? "${stdoutF}" "${stderrF}"
desc="${fn}_with_message"
if ( ${fn} 'some message' ${arg1} ${arg2} >"${stdoutF}" 2>"${stderrF}" ); then
fail "${desc}: expected a failure"
th_showOutput
else
th_assertFalseWithOutput "${desc}" $? "${stdoutF}" "${stderrF}"
if ! grep -- "${pattern}" "${stdoutF}" >/dev/null; then
fail "${desc}: incorrect message to STDOUT"
th_showOutput
fi
fi
done
}
testFailSame() {
( failSame 'x' 'x' >"${stdoutF}" 2>"${stderrF}" )
th_assertFalseWithOutput 'same' $? "${stdoutF}" "${stderrF}"
testTooFewArguments() {
echo "${FN_TESTS}" \
|while read -r fn num_args pattern; do
# Skip functions that support a single message argument.
if [ "${num_args}" -eq 1 ]; then
continue
fi
( failSame "${MSG}" 'x' 'x' >"${stdoutF}" 2>"${stderrF}" )
th_assertFalseWithOutput 'same with msg' $? "${stdoutF}" "${stderrF}"
desc="${fn}"
if (${fn} >"${stdoutF}" 2>"${stderrF}"); then
fail "${desc}: expected a failure"
_showTestOutput
else
got=$? want=${SHUNIT_ERROR}
assertEquals "${desc}: incorrect return code" "${got}" "${want}"
th_assertFalseWithError "${desc}" "${got}" "${stdoutF}" "${stderrF}"
fi
done
}
( failSame 'x' 'y' >"${stdoutF}" 2>"${stderrF}" )
th_assertFalseWithOutput 'not same' $? "${stdoutF}" "${stderrF}"
( failSame '' '' >"${stdoutF}" 2>"${stderrF}" )
th_assertFalseWithOutput 'null values' $? "${stdoutF}" "${stderrF}"
( failSame >"${stdoutF}" 2>"${stderrF}" )
th_assertFalseWithError 'too few arguments' $? "${stdoutF}" "${stderrF}"
( failSame arg1 arg2 arg3 arg4 >"${stdoutF}" 2>"${stderrF}" )
th_assertFalseWithError 'too many arguments' $? "${stdoutF}" "${stderrF}"
testTooManyArguments() {
echo "${FN_TESTS}" \
|while read -r fn num_args pattern; do
desc="${fn}"
if (${fn} arg1 arg2 arg3 arg4 >"${stdoutF}" 2>"${stderrF}"); then
fail "${desc}: expected a failure"
_showTestOutput
else
got=$? want=${SHUNIT_ERROR}
assertEquals "${desc}: incorrect return code" "${got}" "${want}"
th_assertFalseWithError "${desc}" "${got}" "${stdoutF}" "${stderrF}"
fi
done
}
oneTimeSetUp() {
th_oneTimeSetUp
MSG='This is a test message'
}
# Load and run shUnit2.

View File

@ -0,0 +1,99 @@
#! /bin/sh
# vim:et:ft=sh:sts=2:sw=2
#
# shUnit2 unit tests for general commands.
#
# Copyright 2008-2021 Kate Ward. All Rights Reserved.
# Released under the Apache 2.0 license.
# http://www.apache.org/licenses/LICENSE-2.0
#
# Author: kate.ward@forestent.com (Kate Ward)
# https://github.com/kward/shunit2
#
# Disable source following.
# shellcheck disable=SC1090,SC1091
# These variables will be overridden by the test helpers.
stdoutF="${TMPDIR:-/tmp}/STDOUT"
stderrF="${TMPDIR:-/tmp}/STDERR"
# Load test helpers.
. ./shunit2_test_helpers
testSkipping() {
# We shouldn't be skipping to start.
if isSkipping; then
th_error 'skipping *should not be* enabled'
return
fi
startSkipping
was_skipping_started=${SHUNIT_FALSE}
if isSkipping; then was_skipping_started=${SHUNIT_TRUE}; fi
endSkipping
was_skipping_ended=${SHUNIT_FALSE}
if isSkipping; then was_skipping_ended=${SHUNIT_TRUE}; fi
assertEquals "skipping wasn't started" "${was_skipping_started}" "${SHUNIT_TRUE}"
assertNotEquals "skipping wasn't ended" "${was_skipping_ended}" "${SHUNIT_TRUE}"
return 0
}
testStartSkippingWithMessage() {
unittestF="${SHUNIT_TMPDIR}/unittest"
sed 's/^#//' >"${unittestF}" <<\EOF
## Start skipping with a message.
#testSkipping() {
# startSkipping 'SKIP-a-Dee-Doo-Dah'
#}
#SHUNIT_COLOR='none'
#. ${TH_SHUNIT}
EOF
# Ignoring errors with `|| :` as we only care about `FAILED` in the output.
( exec "${SHELL:-sh}" "${unittestF}" >"${stdoutF}" 2>"${stderrF}" ) || :
if ! grep '\[skipping\] SKIP-a-Dee-Doo-Dah' "${stderrF}" >/dev/null; then
fail 'skipping message was not generated'
fi
return 0
}
testStartSkippingWithoutMessage() {
unittestF="${SHUNIT_TMPDIR}/unittest"
sed 's/^#//' >"${unittestF}" <<\EOF
## Start skipping with a message.
#testSkipping() {
# startSkipping
#}
#SHUNIT_COLOR='none'
#. ${TH_SHUNIT}
EOF
# Ignoring errors with `|| :` as we only care about `FAILED` in the output.
( exec "${SHELL:-sh}" "${unittestF}" >"${stdoutF}" 2>"${stderrF}" ) || :
if grep '\[skipping\]' "${stderrF}" >/dev/null; then
fail 'skipping message was unexpectedly generated'
fi
return 0
}
setUp() {
for f in "${stdoutF}" "${stderrF}"; do
cp /dev/null "${f}"
done
# Reconfigure coloring as some tests override default behavior.
_shunit_configureColor "${SHUNIT_COLOR_DEFAULT}"
# shellcheck disable=SC2034,SC2153
SHUNIT_CMD_TPUT=${__SHUNIT_CMD_TPUT}
}
oneTimeSetUp() {
SHUNIT_COLOR_DEFAULT="${SHUNIT_COLOR}"
th_oneTimeSetUp
}
# Load and run shUnit2.
# shellcheck disable=SC2034
[ -n "${ZSH_VERSION:-}" ] && SHUNIT_PARENT=$0
. "${TH_SHUNIT}"

View File

@ -3,17 +3,15 @@
#
# shunit2 unit test for macros.
#
# Copyright 2008-2017 Kate Ward. All Rights Reserved.
# Copyright 2008-2021 Kate Ward. All Rights Reserved.
# Released under the Apache 2.0 license.
# http://www.apache.org/licenses/LICENSE-2.0
#
# Author: kate.ward@forestent.com (Kate Ward)
# https://github.com/kward/shunit2
#
### ShellCheck http://www.shellcheck.net/
# Disable source following.
# shellcheck disable=SC1090,SC1091
# Presence of LINENO variable is checked.
# shellcheck disable=SC2039
# These variables will be overridden by the test helpers.
stdoutF="${TMPDIR:-/tmp}/STDOUT"
@ -23,215 +21,223 @@ stderrF="${TMPDIR:-/tmp}/STDERR"
. ./shunit2_test_helpers
testAssertEquals() {
# Start skipping if LINENO not available.
[ -z "${LINENO:-}" ] && startSkipping
isLinenoWorking || startSkipping
( ${_ASSERT_EQUALS_} 'x' 'y' >"${stdoutF}" 2>"${stderrF}" )
grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null
rtrn=$?
assertTrue '_ASSERT_EQUALS_ failure' ${rtrn}
[ "${rtrn}" -ne "${SHUNIT_TRUE}" ] && cat "${stderrF}" >&2
if ! wasAssertGenerated; then
fail '_ASSERT_EQUALS_ failed to produce an ASSERT message'
showTestOutput
fi
( ${_ASSERT_EQUALS_} '"some msg"' 'x' 'y' >"${stdoutF}" 2>"${stderrF}" )
grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null
rtrn=$?
assertTrue '_ASSERT_EQUALS_ w/ msg failure' ${rtrn}
[ "${rtrn}" -ne "${SHUNIT_TRUE}" ] && cat "${stderrF}" >&2
if ! wasAssertGenerated; then
fail '_ASSERT_EQUALS_ (with a message) failed to produce an ASSERT message'
showTestOutput
fi
}
testAssertNotEquals() {
# Start skipping if LINENO not available.
[ -z "${LINENO:-}" ] && startSkipping
isLinenoWorking || startSkipping
( ${_ASSERT_NOT_EQUALS_} 'x' 'x' >"${stdoutF}" 2>"${stderrF}" )
grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null
rtrn=$?
assertTrue '_ASSERT_NOT_EQUALS_ failure' ${rtrn}
[ "${rtrn}" -ne "${SHUNIT_TRUE}" ] && cat "${stderrF}" >&2
if ! wasAssertGenerated; then
fail '_ASSERT_NOT_EQUALS_ failed to produce an ASSERT message'
showTestOutput
fi
( ${_ASSERT_NOT_EQUALS_} '"some msg"' 'x' 'x' >"${stdoutF}" 2>"${stderrF}" )
grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null
rtrn=$?
assertTrue '_ASSERT_NOT_EQUALS_ w/ msg failure' ${rtrn}
[ "${rtrn}" -ne "${SHUNIT_TRUE}" ] && cat "${stderrF}" >&2
if ! wasAssertGenerated; then
fail '_ASSERT_NOT_EQUALS_ (with a message) failed to produce an ASSERT message'
showTestOutput
fi
}
testSame() {
# Start skipping if LINENO not available.
[ -z "${LINENO:-}" ] && startSkipping
isLinenoWorking || startSkipping
( ${_ASSERT_SAME_} 'x' 'y' >"${stdoutF}" 2>"${stderrF}" )
grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null
rtrn=$?
assertTrue '_ASSERT_SAME_ failure' ${rtrn}
[ "${rtrn}" -ne "${SHUNIT_TRUE}" ] && cat "${stderrF}" >&2
if ! wasAssertGenerated; then
fail '_ASSERT_SAME_ failed to produce an ASSERT message'
showTestOutput
fi
( ${_ASSERT_SAME_} '"some msg"' 'x' 'y' >"${stdoutF}" 2>"${stderrF}" )
grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null
rtrn=$?
assertTrue '_ASSERT_SAME_ w/ msg failure' ${rtrn}
[ "${rtrn}" -ne "${SHUNIT_TRUE}" ] && cat "${stderrF}" >&2
if ! wasAssertGenerated; then
fail '_ASSERT_SAME_ (with a message) failed to produce an ASSERT message'
showTestOutput
fi
}
testNotSame() {
# Start skipping if LINENO not available.
[ -z "${LINENO:-}" ] && startSkipping
isLinenoWorking || startSkipping
( ${_ASSERT_NOT_SAME_} 'x' 'x' >"${stdoutF}" 2>"${stderrF}" )
grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null
rtrn=$?
assertTrue '_ASSERT_NOT_SAME_ failure' ${rtrn}
[ "${rtrn}" -ne "${SHUNIT_TRUE}" ] && cat "${stderrF}" >&2
if ! wasAssertGenerated; then
fail '_ASSERT_NOT_SAME_ failed to produce an ASSERT message'
showTestOutput
fi
( ${_ASSERT_NOT_SAME_} '"some msg"' 'x' 'x' >"${stdoutF}" 2>"${stderrF}" )
grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null
rtrn=$?
assertTrue '_ASSERT_NOT_SAME_ w/ msg failure' ${rtrn}
[ "${rtrn}" -ne "${SHUNIT_TRUE}" ] && cat "${stderrF}" >&2
if ! wasAssertGenerated; then
fail '_ASSERT_NOT_SAME_ (with a message) failed to produce an ASSERT message'
showTestOutput
fi
}
testNull() {
# Start skipping if LINENO not available.
[ -z "${LINENO:-}" ] && startSkipping
isLinenoWorking || startSkipping
( ${_ASSERT_NULL_} 'x' >"${stdoutF}" 2>"${stderrF}" )
grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null
rtrn=$?
assertTrue '_ASSERT_NULL_ failure' ${rtrn}
[ "${rtrn}" -ne "${SHUNIT_TRUE}" ] && cat "${stderrF}" >&2
if ! wasAssertGenerated; then
fail '_ASSERT_NULL_ failed to produce an ASSERT message'
showTestOutput
fi
( ${_ASSERT_NULL_} '"some msg"' 'x' >"${stdoutF}" 2>"${stderrF}" )
grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null
rtrn=$?
assertTrue '_ASSERT_NULL_ w/ msg failure' ${rtrn}
[ "${rtrn}" -ne "${SHUNIT_TRUE}" ] && cat "${stderrF}" >&2
if ! wasAssertGenerated; then
fail '_ASSERT_NULL_ (with a message) failed to produce an ASSERT message'
showTestOutput
fi
}
testNotNull()
{
# start skipping if LINENO not available
[ -z "${LINENO:-}" ] && startSkipping
testNotNull() {
isLinenoWorking || startSkipping
( ${_ASSERT_NOT_NULL_} '' >"${stdoutF}" 2>"${stderrF}" )
grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null
rtrn=$?
assertTrue '_ASSERT_NOT_NULL_ failure' ${rtrn}
[ "${rtrn}" -ne "${SHUNIT_TRUE}" ] && cat "${stderrF}" >&2
if ! wasAssertGenerated; then
fail '_ASSERT_NOT_NULL_ failed to produce an ASSERT message'
showTestOutput
fi
( ${_ASSERT_NOT_NULL_} '"some msg"' '""' >"${stdoutF}" 2>"${stderrF}" )
grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null
rtrn=$?
assertTrue '_ASSERT_NOT_NULL_ w/ msg failure' ${rtrn}
[ "${rtrn}" -ne "${SHUNIT_TRUE}" ] && cat "${stdoutF}" "${stderrF}" >&2
if ! wasAssertGenerated; then
fail '_ASSERT_NOT_NULL_ (with a message) failed to produce an ASSERT message'
showTestOutput
fi
}
testAssertTrue() {
# Start skipping if LINENO not available.
[ -z "${LINENO:-}" ] && startSkipping
isLinenoWorking || startSkipping
( ${_ASSERT_TRUE_} "${SHUNIT_FALSE}" >"${stdoutF}" 2>"${stderrF}" )
grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null
rtrn=$?
assertTrue '_ASSERT_TRUE_ failure' ${rtrn}
[ "${rtrn}" -ne "${SHUNIT_TRUE}" ] && cat "${stderrF}" >&2
if ! wasAssertGenerated; then
fail '_ASSERT_TRUE_ failed to produce an ASSERT message'
showTestOutput
fi
( ${_ASSERT_TRUE_} '"some msg"' "${SHUNIT_FALSE}" >"${stdoutF}" 2>"${stderrF}" )
grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null
rtrn=$?
assertTrue '_ASSERT_TRUE_ w/ msg failure' ${rtrn}
[ "${rtrn}" -ne "${SHUNIT_TRUE}" ] && cat "${stderrF}" >&2
if ! wasAssertGenerated; then
fail '_ASSERT_TRUE_ (with a message) failed to produce an ASSERT message'
showTestOutput
fi
}
testAssertFalse() {
# Start skipping if LINENO not available.
[ -z "${LINENO:-}" ] && startSkipping
isLinenoWorking || startSkipping
( ${_ASSERT_FALSE_} "${SHUNIT_TRUE}" >"${stdoutF}" 2>"${stderrF}" )
grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null
rtrn=$?
assertTrue '_ASSERT_FALSE_ failure' ${rtrn}
[ "${rtrn}" -ne "${SHUNIT_TRUE}" ] && cat "${stderrF}" >&2
if ! wasAssertGenerated; then
fail '_ASSERT_FALSE_ failed to produce an ASSERT message'
showTestOutput
fi
( ${_ASSERT_FALSE_} '"some msg"' "${SHUNIT_TRUE}" >"${stdoutF}" 2>"${stderrF}" )
grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null
rtrn=$?
assertTrue '_ASSERT_FALSE_ w/ msg failure' ${rtrn}
[ "${rtrn}" -ne "${SHUNIT_TRUE}" ] && cat "${stderrF}" >&2
if ! wasAssertGenerated; then
fail '_ASSERT_FALSE_ (with a message) failed to produce an ASSERT message'
showTestOutput
fi
}
testFail() {
# Start skipping if LINENO not available.
[ -z "${LINENO:-}" ] && startSkipping
isLinenoWorking || startSkipping
( ${_FAIL_} >"${stdoutF}" 2>"${stderrF}" )
grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null
rtrn=$?
assertTrue '_FAIL_ failure' ${rtrn}
[ "${rtrn}" -ne "${SHUNIT_TRUE}" ] && cat "${stderrF}" >&2
if ! wasAssertGenerated; then
fail '_FAIL_ failed to produce an ASSERT message'
showTestOutput
fi
( ${_FAIL_} '"some msg"' >"${stdoutF}" 2>"${stderrF}" )
grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null
rtrn=$?
assertTrue '_FAIL_ w/ msg failure' ${rtrn}
[ "${rtrn}" -ne "${SHUNIT_TRUE}" ] && cat "${stderrF}" >&2
if ! wasAssertGenerated; then
fail '_FAIL_ (with a message) failed to produce an ASSERT message'
showTestOutput
fi
}
testFailNotEquals()
{
# start skipping if LINENO not available
[ -z "${LINENO:-}" ] && startSkipping
testFailNotEquals() {
isLinenoWorking || startSkipping
( ${_FAIL_NOT_EQUALS_} 'x' 'y' >"${stdoutF}" 2>"${stderrF}" )
grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null
rtrn=$?
assertTrue '_FAIL_NOT_EQUALS_ failure' ${rtrn}
[ "${rtrn}" -ne "${SHUNIT_TRUE}" ] && cat "${stderrF}" >&2
if ! wasAssertGenerated; then
fail '_FAIL_NOT_EQUALS_ failed to produce an ASSERT message'
showTestOutput
fi
( ${_FAIL_NOT_EQUALS_} '"some msg"' 'x' 'y' >"${stdoutF}" 2>"${stderrF}" )
grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null
rtrn=$?
assertTrue '_FAIL_NOT_EQUALS_ w/ msg failure' ${rtrn}
[ "${rtrn}" -ne "${SHUNIT_TRUE}" ] && cat "${stderrF}" >&2
if ! wasAssertGenerated; then
fail '_FAIL_NOT_EQUALS_ (with a message) failed to produce an ASSERT message'
showTestOutput
fi
}
testFailSame() {
# Start skipping if LINENO not available.
[ -z "${LINENO:-}" ] && startSkipping
isLinenoWorking || startSkipping
( ${_FAIL_SAME_} 'x' 'x' >"${stdoutF}" 2>"${stderrF}" )
grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null
rtrn=$?
assertTrue '_FAIL_SAME_ failure' ${rtrn}
[ "${rtrn}" -ne "${SHUNIT_TRUE}" ] && cat "${stderrF}" >&2
if ! wasAssertGenerated; then
fail '_FAIL_SAME_ failed to produce an ASSERT message'
showTestOutput
fi
( ${_FAIL_SAME_} '"some msg"' 'x' 'x' >"${stdoutF}" 2>"${stderrF}" )
grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null
rtrn=$?
assertTrue '_FAIL_SAME_ w/ msg failure' ${rtrn}
[ "${rtrn}" -ne "${SHUNIT_TRUE}" ] && cat "${stderrF}" >&2
if ! wasAssertGenerated; then
fail '_FAIL_SAME_ (with a message) failed to produce an ASSERT message'
showTestOutput
fi
}
testFailNotSame() {
# Start skipping if LINENO not available.
[ -z "${LINENO:-}" ] && startSkipping
isLinenoWorking || startSkipping
( ${_FAIL_NOT_SAME_} 'x' 'y' >"${stdoutF}" 2>"${stderrF}" )
grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null
rtrn=$?
assertTrue '_FAIL_NOT_SAME_ failure' ${rtrn}
[ "${rtrn}" -ne "${SHUNIT_TRUE}" ] && cat "${stderrF}" >&2
if ! wasAssertGenerated; then
fail '_FAIL_NOT_SAME_ failed to produce an ASSERT message'
showTestOutput
fi
( ${_FAIL_NOT_SAME_} '"some msg"' 'x' 'y' >"${stdoutF}" 2>"${stderrF}" )
grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null
rtrn=$?
assertTrue '_FAIL_NOT_SAME_ w/ msg failure' ${rtrn}
[ "${rtrn}" -ne "${SHUNIT_TRUE}" ] && cat "${stderrF}" >&2
if ! wasAssertGenerated; then
fail '_FAIL_NOT_SAME_ (with a message) failed to produce an ASSERT message'
showTestOutput
fi
}
oneTimeSetUp() {
th_oneTimeSetUp
if ! isLinenoWorking; then
# shellcheck disable=SC2016
th_warn '${LINENO} is not working for this shell. Tests will be skipped.'
fi
}
# isLinenoWorking returns true if the `$LINENO` shell variable works properly.
isLinenoWorking() {
# shellcheck disable=SC2016
ln='eval echo "${LINENO:-}"'
case ${ln} in
[0-9]*) return "${SHUNIT_TRUE}" ;;
-[0-9]*) return "${SHUNIT_FALSE}" ;; # The dash shell produces negative values.
esac
return "${SHUNIT_FALSE}"
}
# showTestOutput for the most recently run test.
showTestOutput() { th_showOutput "${SHUNIT_FALSE}" "${stdoutF}" "${stderrF}"; }
# wasAssertGenerated returns true if an ASSERT was generated to STDOUT.
wasAssertGenerated() { grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null; }
# Disable output coloring as it breaks the tests.
SHUNIT_COLOR='none'; export SHUNIT_COLOR

View File

@ -3,19 +3,17 @@
#
# shUnit2 unit tests of miscellaneous things
#
# Copyright 2008-2018 Kate Ward. All Rights Reserved.
# Copyright 2008-2021 Kate Ward. All Rights Reserved.
# Released under the Apache 2.0 license.
# http://www.apache.org/licenses/LICENSE-2.0
#
# Author: kate.ward@forestent.com (Kate Ward)
# https://github.com/kward/shunit2
#
### ShellCheck http://www.shellcheck.net/
# $() are not fully portable (POSIX != portable).
# Allow usage of legacy backticked `...` notation instead of $(...).
# shellcheck disable=SC2006
# Disable source following.
# shellcheck disable=SC1090,SC1091
# Not wanting to escape single quotes.
# shellcheck disable=SC1003
# These variables will be overridden by the test helpers.
stdoutF="${TMPDIR:-/tmp}/STDOUT"
@ -41,14 +39,18 @@ testUnboundVariable() {
#SHUNIT_COLOR='none'
#. ${TH_SHUNIT}
EOF
( exec "${SHUNIT_SHELL:-sh}" "${unittestF}" >"${stdoutF}" 2>"${stderrF}" )
assertFalse 'expected a non-zero exit value' $?
grep '^ASSERT:Unknown failure' "${stdoutF}" >/dev/null
assertTrue 'assert message was not generated' $?
grep '^Ran [0-9]* test' "${stdoutF}" >/dev/null
assertTrue 'test count message was not generated' $?
grep '^FAILED' "${stdoutF}" >/dev/null
assertTrue 'failure message was not generated' $?
if ( exec "${SHELL:-sh}" "${unittestF}" >"${stdoutF}" 2>"${stderrF}" ); then
fail 'expected a non-zero exit value'
fi
if ! grep '^ASSERT:unknown failure' "${stdoutF}" >/dev/null; then
fail 'assert message was not generated'
fi
if ! grep '^Ran [0-9]* test' "${stdoutF}" >/dev/null; then
fail 'test count message was not generated'
fi
if ! grep '^FAILED' "${stdoutF}" >/dev/null; then
fail 'failure message was not generated'
fi
}
# assertEquals repeats message argument.
@ -57,7 +59,8 @@ testIssue7() {
# Disable coloring so 'ASSERT:' lines can be matched correctly.
_shunit_configureColor 'none'
( assertEquals 'Some message.' 1 2 >"${stdoutF}" 2>"${stderrF}" )
# Ignoring errors with `|| :` as we only care about the message in this test.
( assertEquals 'Some message.' 1 2 >"${stdoutF}" 2>"${stderrF}" ) || :
diff "${stdoutF}" - >/dev/null <<EOF
ASSERT:Some message. expected:<1> but was:<2>
EOF
@ -77,19 +80,37 @@ testIssue29() {
#SHUNIT_TEST_PREFIX='--- '
#. ${TH_SHUNIT}
EOF
( exec "${SHUNIT_SHELL:-sh}" "${unittestF}" >"${stdoutF}" 2>"${stderrF}" )
( exec "${SHELL:-sh}" "${unittestF}" >"${stdoutF}" 2>"${stderrF}" )
grep '^--- test_assert' "${stdoutF}" >/dev/null
rtrn=$?
assertEquals "${SHUNIT_TRUE}" "${rtrn}"
[ "${rtrn}" -eq "${SHUNIT_TRUE}" ] || cat "${stdoutF}" >&2
}
# Test that certain external commands sometimes "stubbed" by users are escaped.
testIssue54() {
for c in mkdir rm cat chmod sed; do
if grep "^[^#]*${c} " "${TH_SHUNIT}" | grep -qv "command ${c}"; then
fail "external call to ${c} not protected somewhere"
fi
done
# shellcheck disable=2016
if grep '^[^#]*[^ ] *\[' "${TH_SHUNIT}" | grep -qv '${__SHUNIT_BUILTIN} \['; then
fail 'call to [ not protected somewhere'
fi
# shellcheck disable=2016
if grep '^[^#]* *\.' "${TH_SHUNIT}" | grep -qv '${__SHUNIT_BUILTIN} \.'; then
fail 'call to . not protected somewhere'
fi
}
# shUnit2 should not exit with 0 when it has syntax errors.
# https://github.com/kward/shunit2/issues/69
testIssue69() {
unittestF="${SHUNIT_TMPDIR}/unittest"
for t in Equals NotEquals Null NotNull Same NotSame True False; do
# Note: assertNull not tested as zero arguments == null, which is valid.
for t in Equals NotEquals NotNull Same NotSame True False; do
assert="assert${t}"
sed 's/^#//' >"${unittestF}" <<EOF
## Asserts with invalid argument counts should be counted as failures.
@ -97,7 +118,8 @@ testIssue69() {
#SHUNIT_COLOR='none'
#. ${TH_SHUNIT}
EOF
( exec "${SHUNIT_SHELL:-sh}" "${unittestF}" >"${stdoutF}" 2>"${stderrF}" )
# Ignoring errors with `|| :` as we only care about `FAILED` in the output.
( exec "${SHELL:-sh}" "${unittestF}" >"${stdoutF}" 2>"${stderrF}" ) || :
grep '^FAILED' "${stdoutF}" >/dev/null
assertTrue "failure message for ${assert} was not generated" $?
done
@ -114,7 +136,8 @@ testIssue77() {
#SHUNIT_COLOR='none'
#. ${TH_SHUNIT}
EOF
( exec "${SHUNIT_SHELL:-sh}" "${unittestF}" >"${stdoutF}" 2>"${stderrF}" )
# Ignoring errors with `|| :` as we only care about `FAILED` in the output.
( exec "${SHELL:-sh}" "${unittestF}" ) >"${stdoutF}" 2>"${stderrF}" || :
grep '^FAILED' "${stdoutF}" >/dev/null
assertTrue "failure of ${func}() did not end test" $?
done
@ -135,9 +158,24 @@ testIssue84() {
#SHUNIT_TEST_PREFIX='--- '
#. ${TH_SHUNIT}
EOF
( exec "${SHUNIT_SHELL:-sh}" "${unittestF}" >"${stdoutF}" 2>"${stderrF}" )
grep '^FAILED' "${stdoutF}" >/dev/null
assertTrue "failure message for ${assert} was not generated" $?
# Ignoring errors with `|| :` as we only care about `FAILED` in the output.
( exec "${SHELL:-sh}" "${unittestF}" >"${stdoutF}" 2>"${stderrF}" ) || :
if ! grep '^FAILED' "${stdoutF}" >/dev/null; then
fail 'failure message was not generated'
fi
}
# Demonstrate that asserts are no longer executed in subshells.
# https://github.com/kward/shunit2/issues/123
#
# NOTE: this test only works if the `${BASH_SUBSHELL}` variable is present.
testIssue123() {
if [ -z "${BASH_SUBSHELL:-}" ]; then
# shellcheck disable=SC2016
startSkipping 'The ${BASH_SUBSHELL} variable is unavailable in this shell.'
fi
# shellcheck disable=SC2016
assertTrue 'not in subshell' '[[ ${BASH_SUBSHELL} -eq 0 ]]'
}
testPrepForSourcing() {
@ -146,55 +184,6 @@ testPrepForSourcing() {
assertEquals './abc' "`_shunit_prepForSourcing 'abc'`"
}
testEscapeCharInStr() {
while read -r desc char str want; do
got=`_shunit_escapeCharInStr "${char}" "${str}"`
assertEquals "${desc}" "${want}" "${got}"
done <<'EOF'
backslash \ '' ''
backslash_pre \ \def \\def
backslash_mid \ abc\def abc\\def
backslash_post \ abc\ abc\\
quote " '' ''
quote_pre " "def \"def
quote_mid " abc"def abc\"def
quote_post " abc" abc\"
string $ '' ''
string_pre $ $def \$def
string_mid $ abc$def abc\$def
string_post $ abc$ abc\$
EOF
# TODO(20170924:kward) fix or remove.
# actual=`_shunit_escapeCharInStr "'" ''`
# assertEquals '' "${actual}"
# assertEquals "abc\\'" `_shunit_escapeCharInStr "'" "abc'"`
# assertEquals "abc\\'def" `_shunit_escapeCharInStr "'" "abc'def"`
# assertEquals "\\'def" `_shunit_escapeCharInStr "'" "'def"`
# # Must put the backtick in a variable so the shell doesn't misinterpret it
# # while inside a backticked sequence (e.g. `echo '`'` would fail).
# backtick='`'
# actual=`_shunit_escapeCharInStr ${backtick} ''`
# assertEquals '' "${actual}"
# assertEquals '\`abc' \
# `_shunit_escapeCharInStr "${backtick}" ${backtick}'abc'`
# assertEquals 'abc\`' \
# `_shunit_escapeCharInStr "${backtick}" 'abc'${backtick}`
# assertEquals 'abc\`def' \
# `_shunit_escapeCharInStr "${backtick}" 'abc'${backtick}'def'`
}
testEscapeCharInStr_specialChars() {
# Make sure our forward slash doesn't upset sed.
assertEquals '/' "`_shunit_escapeCharInStr '\' '/'`"
# Some shells escape these differently.
# TODO(20170924:kward) fix or remove.
#assertEquals '\\a' `_shunit_escapeCharInStr '\' '\a'`
#assertEquals '\\b' `_shunit_escapeCharInStr '\' '\b'`
}
# Test the various ways of declaring functions.
#
# Prefixing (then stripping) with comment symbol so these functions aren't
@ -223,23 +212,61 @@ testExtractTestFunctions() {
#func_with_test_vars() {
# testVariable=1234
#}
## Function with keyword but no parenthesis
#function test6 { echo '6'; }
## Function with keyword but no parenthesis, multi-line
#function test7 {
# echo '7';
#}
## Function with no parenthesis, '{' on next line
#function test8
#{
# echo '8'
#}
## Function with hyphenated name
#test-9() {
# echo '9';
#}
## Function without parenthesis or keyword
#test_foobar { echo 'hello world'; }
## Function with multiple function keywords
#function function test_test_test() { echo 'lorem'; }
EOF
actual=`_shunit_extractTestFunctions "${f}"`
assertEquals 'testABC test_def testG3 test4 test5' "${actual}"
assertEquals 'testABC test_def testG3 test4 test5 test6 test7 test8 test-9' "${actual}"
}
# Test that certain external commands sometimes "stubbed" by users
# are escaped. See Issue #54.
testProtectedCommands() {
for c in mkdir rm cat chmod; do
grep "^[^#]*${c} " "${TH_SHUNIT}" | grep -qv "command ${c}"
assertFalse "external call to ${c} not protected somewhere" $?
done
grep '^[^#]*[^ ] *\[' "${TH_SHUNIT}" | grep -qv 'command \['
assertFalse "call to [ ... ] not protected somewhere" $?
grep '^[^#]* *\.' "${TH_SHUNIT}" | grep -qv 'command \.'
assertFalse "call to . not protected somewhere" $?
testColors() {
while read -r cmd colors desc; do
SHUNIT_CMD_TPUT=${cmd}
want=${colors} got=`_shunit_colors`
assertEquals "${desc}: incorrect number of colors;" \
"${got}" "${want}"
done <<'EOF'
missing_tput 16 missing tput command
mock_tput 256 mock tput command
EOF
}
testColorsWitoutTERM() {
SHUNIT_CMD_TPUT='mock_tput'
got=`TERM='' _shunit_colors`
want=16
assertEquals "${got}" "${want}"
}
mock_tput() {
if [ -z "${TERM}" ]; then
# shellcheck disable=SC2016
echo 'tput: No value for $TERM and no -T specified'
return 2
fi
if [ "$1" = 'colors' ]; then
echo 256
return 0
fi
return 1
}
setUp() {
@ -249,6 +276,9 @@ setUp() {
# Reconfigure coloring as some tests override default behavior.
_shunit_configureColor "${SHUNIT_COLOR_DEFAULT}"
# shellcheck disable=SC2034,SC2153
SHUNIT_CMD_TPUT=${__SHUNIT_CMD_TPUT}
}
oneTimeSetUp() {

View File

@ -0,0 +1,70 @@
#! /bin/sh
# vim:et:ft=sh:sts=2:sw=2
#
# shUnit2 unit tests for `shopt` support.
#
# Copyright 2008-2021 Kate Ward. All Rights Reserved.
# Released under the Apache 2.0 license.
# http://www.apache.org/licenses/LICENSE-2.0
#
# Author: kate.ward@forestent.com (Kate Ward)
# https://github.com/kward/shunit2
#
# Disable source following.
# shellcheck disable=SC1090,SC1091
# Load test helpers.
. ./shunit2_test_helpers
# Call shopt from a variable so it can be mocked if it doesn't work.
SHOPT_CMD='shopt'
testNullglob() {
isShoptWorking || startSkipping
nullglob=$(${SHOPT_CMD} nullglob |cut -f2)
# Test without nullglob.
${SHOPT_CMD} -u nullglob
assertEquals 'test without nullglob' 0 0
# Test with nullglob.
${SHOPT_CMD} -s nullglob
assertEquals 'test with nullglob' 1 1
# Reset nullglob.
if [ "${nullglob}" = "on" ]; then
${SHOPT_CMD} -s nullglob
else
${SHOPT_CMD} -u nullglob
fi
unset nullglob
}
oneTimeSetUp() {
th_oneTimeSetUp
if ! isShoptWorking; then
SHOPT_CMD='mock_shopt'
fi
}
# isShoptWorking returns true if the `shopt` shell command is available.
# NOTE: `shopt` is not defined as part of the POSIX standard.
isShoptWorking() {
# shellcheck disable=SC2039,SC3044
( shopt >/dev/null 2>&1 );
}
mock_shopt() {
if [ $# -eq 0 ]; then
echo "nullglob off"
fi
return
}
# Load and run shUnit2.
# shellcheck disable=SC2034
[ -n "${ZSH_VERSION:-}" ] && SHUNIT_PARENT="$0"
. "${TH_SHUNIT}"

View File

@ -3,8 +3,9 @@
#
# shUnit2 unit test for standalone operation.
#
# Copyright 2010-2017 Kate Ward. All Rights Reserved.
# Copyright 2008-2021 Kate Ward. All Rights Reserved.
# Released under the Apache 2.0 license.
# http://www.apache.org/licenses/LICENSE-2.0
#
# Author: kate.ward@forestent.com (Kate Ward)
# https://github.com/kward/shunit2
@ -13,13 +14,10 @@
# the name of a unit test script, works. When run, this script determines if it
# is running as a standalone program, and calls main() if it is.
#
### ShellCheck http://www.shellcheck.net/
# $() are not fully portable (POSIX != portable).
# shellcheck disable=SC2006
# Disable source following.
# shellcheck disable=SC1090,SC1091
ARGV0="`basename "$0"`"
ARGV0=$(basename "$0")
# Load test helpers.
. ./shunit2_test_helpers
@ -32,7 +30,7 @@ main() {
${TH_SHUNIT} "${ARGV0}"
}
# Are we running as a standalone?
if [ "${ARGV0}" = 'shunit2_test_standalone.sh' ]; then
if [ $# -gt 0 ]; then main "$@"; else main; fi
# Run main() if are running as a standalone script.
if [ "${ARGV0}" = 'shunit2_standalone_test.sh' ]; then
main "$@"
fi

View File

@ -2,25 +2,27 @@
#
# shUnit2 unit test common functions
#
# Copyright 2008 Kate Ward. All Rights Reserved.
# Copyright 2008-2021 Kate Ward. All Rights Reserved.
# Released under the Apache 2.0 license.
# http://www.apache.org/licenses/LICENSE-2.0
#
# Author: kate.ward@forestent.com (Kate Ward)
# https://github.com/kward/shunit2
#
### ShellCheck (http://www.shellcheck.net/)
# Commands are purposely escaped so they can be mocked outside shUnit2.
# shellcheck disable=SC1001,SC1012
# expr may be antiquated, but it is the only solution in some cases.
# shellcheck disable=SC2003
# $() are not fully portable (POSIX != portable).
# shellcheck disable=SC2006
# Exit immediately if a simple command exits with a non-zero status.
set -e
# Treat unset variables as an error when performing parameter expansion.
set -u
# Set shwordsplit for zsh.
\[ -n "${ZSH_VERSION:-}" ] && setopt shwordsplit
[ -n "${ZSH_VERSION:-}" ] && setopt shwordsplit
#
# Constants.
@ -33,11 +35,11 @@ TH_SHUNIT=${SHUNIT_INC:-./shunit2}; export TH_SHUNIT
# non-empty value to enable debug output, or TRACE to enable trace
# output.
TRACE=${TRACE:+'th_trace '}
\[ -n "${TRACE}" ] && DEBUG=1
\[ -z "${TRACE}" ] && TRACE=':'
[ -n "${TRACE}" ] && DEBUG=1
[ -z "${TRACE}" ] && TRACE=':'
DEBUG=${DEBUG:+'th_debug '}
\[ -z "${DEBUG}" ] && DEBUG=':'
[ -z "${DEBUG}" ] && DEBUG=':'
#
# Variables.
@ -50,12 +52,12 @@ th_RANDOM=0
#
# Logging functions.
th_trace() { echo "${MY_NAME}:TRACE $*" >&2; }
th_debug() { echo "${MY_NAME}:DEBUG $*" >&2; }
th_info() { echo "${MY_NAME}:INFO $*" >&2; }
th_warn() { echo "${MY_NAME}:WARN $*" >&2; }
th_error() { echo "${MY_NAME}:ERROR $*" >&2; }
th_fatal() { echo "${MY_NAME}:FATAL $*" >&2; }
th_trace() { echo "test:TRACE $*" >&2; }
th_debug() { echo "test:DEBUG $*" >&2; }
th_info() { echo "test:INFO $*" >&2; }
th_warn() { echo "test:WARN $*" >&2; }
th_error() { echo "test:ERROR $*" >&2; }
th_fatal() { echo "test:FATAL $*" >&2; }
# Output subtest name.
th_subtest() { echo " $*" >&2; }
@ -73,20 +75,20 @@ th_oneTimeSetUp() {
th_generateRandom() {
tfgr_random=${th_RANDOM}
while \[ "${tfgr_random}" = "${th_RANDOM}" ]; do
while [ "${tfgr_random}" = "${th_RANDOM}" ]; do
# shellcheck disable=SC2039
if \[ -n "${RANDOM:-}" ]; then
if [ -n "${RANDOM:-}" ]; then
# $RANDOM works
# shellcheck disable=SC2039
tfgr_random=${RANDOM}${RANDOM}${RANDOM}$$
elif \[ -r '/dev/urandom' ]; then
elif [ -r '/dev/urandom' ]; then
tfgr_random=`od -vAn -N4 -tu4 </dev/urandom |sed 's/^[^0-9]*//'`
else
tfgr_date=`date '+%H%M%S'`
tfgr_random=`expr "${tfgr_date}" \* $$`
unset tfgr_date
fi
\[ "${tfgr_random}" = "${th_RANDOM}" ] && sleep 1
[ "${tfgr_random}" = "${th_RANDOM}" ] && sleep 1
done
th_RANDOM=${tfgr_random}
@ -127,12 +129,13 @@ th_assertTrueWithNoOutput() {
th_stdout_=$3
th_stderr_=$4
assertTrue "${th_test_}; expected return value of zero" "${th_rtrn_}"
\[ "${th_rtrn_}" -ne "${SHUNIT_TRUE}" ] && \cat "${th_stderr_}"
assertFalse "${th_test_}; expected no output to STDOUT" \
"[ -s '${th_stdout_}' ]"
assertFalse "${th_test_}; expected no output to STDERR" \
"[ -s '${th_stderr_}' ]"
assertEquals "${th_test_}: expected return value of true" "${SHUNIT_TRUE}" "${th_rtrn_}"
assertFalse "${th_test_}: expected no output to STDOUT" "[ -s '${th_stdout_}' ]"
assertFalse "${th_test_}: expected no output to STDERR" "[ -s '${th_stderr_}' ]"
# shellcheck disable=SC2166
if [ -s "${th_stdout_}" -o -s "${th_stderr_}" ]; then
_th_showOutput "${SHUNIT_FALSE}" "${th_stdout_}" "${th_stderr_}"
fi
unset th_test_ th_rtrn_ th_stdout_ th_stderr_
}
@ -152,13 +155,13 @@ th_assertFalseWithOutput()
th_stdout_=$3
th_stderr_=$4
assertFalse "${th_test_}; expected non-zero return value" "${th_rtrn_}"
assertTrue "${th_test_}; expected output to STDOUT" \
"[ -s '${th_stdout_}' ]"
assertFalse "${th_test_}; expected no output to STDERR" \
"[ -s '${th_stderr_}' ]"
\[ -s "${th_stdout_}" -a ! -s "${th_stderr_}" ] || \
assertNotEquals "${th_test_}: expected non-true return value" "${SHUNIT_TRUE}" "${th_rtrn_}"
assertTrue "${th_test_}: expected output to STDOUT" "[ -s '${th_stdout_}' ]"
assertFalse "${th_test_}: expected no output to STDERR" "[ -s '${th_stderr_}' ]"
# shellcheck disable=SC2166
if ! [ -s "${th_stdout_}" -a ! -s "${th_stderr_}" ]; then
_th_showOutput "${SHUNIT_FALSE}" "${th_stdout_}" "${th_stderr_}"
fi
unset th_test_ th_rtrn_ th_stdout_ th_stderr_
}
@ -177,13 +180,13 @@ th_assertFalseWithError() {
th_stdout_=$3
th_stderr_=$4
assertFalse "${th_test_}; expected non-zero return value" "${th_rtrn_}"
assertFalse "${th_test_}; expected no output to STDOUT" \
"[ -s '${th_stdout_}' ]"
assertTrue "${th_test_}; expected output to STDERR" \
"[ -s '${th_stderr_}' ]"
\[ ! -s "${th_stdout_}" -a -s "${th_stderr_}" ] || \
assertFalse "${th_test_}: expected non-zero return value" "${th_rtrn_}"
assertFalse "${th_test_}: expected no output to STDOUT" "[ -s '${th_stdout_}' ]"
assertTrue "${th_test_}: expected output to STDERR" "[ -s '${th_stderr_}' ]"
# shellcheck disable=SC2166
if ! [ ! -s "${th_stdout_}" -a -s "${th_stderr_}" ]; then
_th_showOutput "${SHUNIT_FALSE}" "${th_stdout_}" "${th_stderr_}"
fi
unset th_test_ th_rtrn_ th_stdout_ th_stderr_
}
@ -193,8 +196,8 @@ th_assertFalseWithError() {
# they are either written to disk, or recognized as an error the file is empty.
th_clearReturn() { cp /dev/null "${returnF}"; }
th_queryReturn() {
if \[ -s "${returnF}" ]; then
th_return=`\cat "${returnF}"`
if [ -s "${returnF}" ]; then
th_return=`cat "${returnF}"`
else
th_return=${SHUNIT_ERROR}
fi
@ -204,22 +207,26 @@ th_queryReturn() {
# Providing external and internal calls to the showOutput helper function.
th_showOutput() { _th_showOutput "$@"; }
_th_showOutput() {
_th_return_=$1
_th_stdout_=$2
_th_stderr_=$3
if isSkipping; then
return
fi
isSkipping
if \[ $? -eq "${SHUNIT_FALSE}" -a "${_th_return_}" != "${SHUNIT_TRUE}" ]; then
if \[ -n "${_th_stdout_}" -a -s "${_th_stdout_}" ]; then
_th_return_="${1:-${returnF}}"
_th_stdout_="${2:-${stdoutF}}"
_th_stderr_="${3:-${stderrF}}"
if [ "${_th_return_}" != "${SHUNIT_TRUE}" ]; then
# shellcheck disable=SC2166
if [ -n "${_th_stdout_}" -a -s "${_th_stdout_}" ]; then
echo '>>> STDOUT' >&2
\cat "${_th_stdout_}" >&2
cat "${_th_stdout_}" >&2
echo '<<< STDOUT' >&2
fi
if \[ -n "${_th_stderr_}" -a -s "${_th_stderr_}" ]; then
# shellcheck disable=SC2166
if [ -n "${_th_stderr_}" -a -s "${_th_stderr_}" ]; then
echo '>>> STDERR' >&2
\cat "${_th_stderr_}" >&2
fi
if \[ -n "${_th_stdout_}" -o -n "${_th_stderr_}" ]; then
echo '<<< end output' >&2
cat "${_th_stderr_}" >&2
echo '<<< STDERR' >&2
fi
fi

View File

@ -3,7 +3,7 @@
#
# Unit test suite runner.
#
# Copyright 2008-2017 Kate Ward. All Rights Reserved.
# Copyright 2008-2020 Kate Ward. All Rights Reserved.
# Released under the Apache 2.0 license.
#
# Author: kate.ward@forestent.com (Kate Ward)
@ -12,6 +12,20 @@
# This script runs all the unit tests that can be found, and generates a nice
# report of the tests.
#
### Sample usage:
#
# Run all tests for all shells.
# $ ./test_runner
#
# Run all tests for single shell.
# $ ./test_runner -s /bin/bash
#
# Run single test for all shells.
# $ ./test_runner -t shunit_asserts_test.sh
#
# Run single test for single shell.
# $ ./test_runner -s /bin/bash -t shunit_asserts_test.sh
#
### ShellCheck (http://www.shellcheck.net/)
# Disable source following.
# shellcheck disable=SC1090,SC1091
@ -25,8 +39,10 @@
RUNNER_LOADED=0
RUNNER_ARGV0=`basename "$0"`
RUNNER_SHELLS='/bin/sh ash /bin/bash /bin/dash /bin/ksh /bin/pdksh /bin/zsh'
RUNNER_SHELLS='/bin/sh ash /bin/bash /bin/dash /bin/ksh /bin/mksh /bin/zsh'
RUNNER_TEST_SUFFIX='_test.sh'
true; RUNNER_TRUE=$?
false; RUNNER_FALSE=$?
runner_warn() { echo "runner:WARN $*" >&2; }
runner_error() { echo "runner:ERROR $*" >&2; }
@ -36,7 +52,7 @@ runner_usage() {
echo "usage: ${RUNNER_ARGV0} [-e key=val ...] [-s shell(s)] [-t test(s)]"
}
_runner_tests() { echo ./*${RUNNER_TEST_SUFFIX} |sed 's#./##g'; }
_runner_tests() { echo ./*${RUNNER_TEST_SUFFIX} |sed 's#\./##g'; }
_runner_testName() {
# shellcheck disable=SC1117
_runner_testName_=`expr "${1:-}" : "\(.*\)${RUNNER_TEST_SUFFIX}"`
@ -114,6 +130,7 @@ for key in ${env}; do
done
# Run tests.
runner_passing_=${RUNNER_TRUE}
for shell in ${shells}; do
echo
@ -127,20 +144,20 @@ EOF
# Check for existence of shell.
shell_bin=${shell}
shell_name=''
shell_present=${FALSE}
shell_present=${RUNNER_FALSE}
case ${shell} in
ash)
shell_bin=`which busybox |grep -v '^no busybox'`
[ $? -eq "${TRUE}" -a -n "${shell_bin}" ] && shell_present="${TRUE}"
shell_bin="${shell_bin} ash"
shell_bin=`command -v busybox`
[ $? -eq "${RUNNER_TRUE}" ] && shell_present="${RUNNER_TRUE}"
shell_bin="${shell_bin:+${shell_bin} }ash"
shell_name=${shell}
;;
*)
[ -x "${shell_bin}" ] && shell_present="${TRUE}"
[ -x "${shell_bin}" ] && shell_present="${RUNNER_TRUE}"
shell_name=`basename "${shell}"`
;;
esac
if [ "${shell_present}" -eq "${FALSE}" ]; then
if [ "${shell_present}" -eq "${RUNNER_FALSE}" ]; then
runner_warn "unable to run tests with the ${shell_name} shell"
continue
fi
@ -157,9 +174,18 @@ EOF
# ${shell_bin} needs word splitting.
# shellcheck disable=SC2086
( exec ${shell_bin} "./${t}" 2>&1; )
shell_passing=$?
if [ "${shell_passing}" -ne "${RUNNER_TRUE}" ]; then
runner_warn "${shell_bin} not passing"
fi
test "${runner_passing_}" -eq ${RUNNER_TRUE} -a ${shell_passing} -eq ${RUNNER_TRUE}
runner_passing_=$?
done
done
return ${runner_passing_}
}
# Execute main() if this is run in standalone mode (i.e. not from a unit test).
[ -z "${SHUNIT_VERSION}" ] && main "$@"
if [ -z "${SHUNIT_VERSION}" ]; then
main "$@"
fi

File diff suppressed because it is too large Load Diff

View File

@ -1,9 +1,9 @@
#!/usr/bin/env bash
SUBPROGRAM=osync
PROGRAM="$SUBPROGRAM-batch" # Batch program to run osync / obackup instances sequentially and rerun failed ones
AUTHOR="(L) 2013-2017 by Orsiris de Jong"
AUTHOR="(L) 2013-2020 by Orsiris de Jong"
CONTACT="http://www.netpower.fr - ozy@netpower.fr"
PROGRAM_BUILD=2016120401
PROGRAM_BUILD=2020031502
## Runs an osync /obackup instance for every conf file found
## If an instance fails, run it again if time permits
@ -26,36 +26,217 @@ else
LOG_FILE=./$SUBPROGRAM-batch.log
fi
## Default directory where to store temporary run files
if [ -w /tmp ]; then
RUN_DIR=/tmp
elif [ -w /var/tmp ]; then
RUN_DIR=/var/tmp
else
RUN_DIR=.
fi
# No need to edit under this line ##############################################################
function _logger {
local value="${1}" # What to log
echo -e "$value" >> "$LOG_FILE"
#### RemoteLogger SUBSET ####
# Array to string converter, see http://stackoverflow.com/questions/1527049/bash-join-elements-of-an-array
# usage: joinString separaratorChar Array
function joinString {
local IFS="$1"; shift; echo "$*";
}
function Logger {
local value="${1}" # What to log
local level="${2}" # Log level: DEBUG, NOTICE, WARN, ERROR, CRITIAL
# Sub function of Logger
function _Logger {
local logValue="${1}" # Log to file
local stdValue="${2}" # Log to screeen
local toStdErr="${3:-false}" # Log to stderr instead of stdout
prefix="$(date) - "
if [ "$logValue" != "" ]; then
echo -e "$logValue" >> "$LOG_FILE"
# Build current log file for alerts if we have a sufficient environment
if [ "$_LOGGER_WRITE_PARTIAL_LOGS" == true ] && [ "$RUN_DIR/$PROGRAM" != "/" ]; then
echo -e "$logValue" >> "$RUN_DIR/$PROGRAM._Logger.$SCRIPT_PID.$TSTAMP"
fi
fi
if [ "$stdValue" != "" ] && [ "$_LOGGER_SILENT" != true ]; then
if [ $toStdErr == true ]; then
# Force stderr color in subshell
(>&2 echo -e "$stdValue")
else
echo -e "$stdValue"
fi
fi
}
# Remote logger similar to below Logger, without log to file and alert flags
function RemoteLogger {
local value="${1}" # Sentence to log (in double quotes)
local level="${2}" # Log level
local retval="${3:-undef}" # optional return value of command
local prefix
if [ "$_LOGGER_PREFIX" == "time" ]; then
prefix="RTIME: $SECONDS - "
elif [ "$_LOGGER_PREFIX" == "date" ]; then
prefix="R $(date) - "
else
prefix=""
fi
if [ "$level" == "CRITICAL" ]; then
_logger "$prefix\e[41m$value\e[0m"
_Logger "" "$prefix\e[1;33;41m$value\e[0m" true
if [ "$_DEBUG" == true ]; then
_Logger -e "" "[$retval] in [$(joinString , ${FUNCNAME[@]})] SP=$SCRIPT_PID P=$$" true
fi
return
elif [ "$level" == "ERROR" ]; then
_logger "$prefix\e[91m$value\e[0m"
_Logger "" "$prefix\e[31m$value\e[0m" true
if [ "$_DEBUG" == true ]; then
_Logger -e "" "[$retval] in [$(joinString , ${FUNCNAME[@]})] SP=$SCRIPT_PID P=$$" true
fi
return
elif [ "$level" == "WARN" ]; then
_logger "$prefix\e[93m$value\e[0m"
_Logger "" "$prefix\e[33m$value\e[0m" true
if [ "$_DEBUG" == true ]; then
_Logger -e "" "[$retval] in [$(joinString , ${FUNCNAME[@]})] SP=$SCRIPT_PID P=$$" true
fi
return
elif [ "$level" == "NOTICE" ]; then
_logger "$prefix$value"
if [ "$_LOGGER_ERR_ONLY" != true ]; then
_Logger "" "$prefix$value"
fi
return
elif [ "$level" == "VERBOSE" ]; then
if [ "$_LOGGER_VERBOSE" == true ]; then
_Logger "" "$prefix$value"
fi
return
elif [ "$level" == "ALWAYS" ]; then
_Logger "" "$prefix$value"
return
elif [ "$level" == "DEBUG" ]; then
if [ "$DEBUG" == "yes" ]; then
_logger "$prefix$value"
if [ "$_DEBUG" == true ]; then
_Logger "" "$prefix$value"
return
fi
else
_logger "\e[41mLogger function called without proper loglevel.\e[0m"
_logger "$prefix$value"
_Logger "" "\e[41mLogger function called without proper loglevel [$level].\e[0m" true
_Logger "" "Value was: $prefix$value" true
fi
}
#### RemoteLogger SUBSET END ####
# General log function with log levels:
# Environment variables
# _LOGGER_SILENT: Disables any output to stdout & stderr
# _LOGGER_ERR_ONLY: Disables any output to stdout except for ALWAYS loglevel
# _LOGGER_VERBOSE: Allows VERBOSE loglevel messages to be sent to stdout
# Loglevels
# Except for VERBOSE, all loglevels are ALWAYS sent to log file
# CRITICAL, ERROR, WARN sent to stderr, color depending on level, level also logged
# NOTICE sent to stdout
# VERBOSE sent to stdout if _LOGGER_VERBOSE=true
# ALWAYS is sent to stdout unless _LOGGER_SILENT=true
# DEBUG & PARANOIA_DEBUG are only sent to stdout if _DEBUG=true
function Logger {
local value="${1}" # Sentence to log (in double quotes)
local level="${2}" # Log level
local retval="${3:-undef}" # optional return value of command
local prefix
if [ "$_LOGGER_PREFIX" == "time" ]; then
prefix="TIME: $SECONDS - "
elif [ "$_LOGGER_PREFIX" == "date" ]; then
prefix="$(date '+%Y-%m-%d %H:%M:%S') - "
else
prefix=""
fi
## Obfuscate _REMOTE_TOKEN in logs (for ssh_filter usage only in osync and obackup)
value="${value/env _REMOTE_TOKEN=$_REMOTE_TOKEN/env _REMOTE_TOKEN=__o_O__}"
value="${value/env _REMOTE_TOKEN=\$_REMOTE_TOKEN/env _REMOTE_TOKEN=__o_O__}"
if [ "$level" == "CRITICAL" ]; then
_Logger "$prefix($level):$value" "$prefix\e[1;33;41m$value\e[0m" true
ERROR_ALERT=true
# ERROR_ALERT / WARN_ALERT is not set in main when Logger is called from a subprocess. We need to create these flag files for ERROR_ALERT / WARN_ALERT to be picked up by Alert
echo -e "[$retval] in [$(joinString , ${FUNCNAME[@]})] SP=$SCRIPT_PID P=$$\n$prefix($level):$value" >> "$RUN_DIR/$PROGRAM.ERROR_ALERT.$SCRIPT_PID.$TSTAMP"
return
elif [ "$level" == "ERROR" ]; then
_Logger "$prefix($level):$value" "$prefix\e[91m$value\e[0m" true
ERROR_ALERT=true
echo -e "[$retval] in [$(joinString , ${FUNCNAME[@]})] SP=$SCRIPT_PID P=$$\n$prefix($level):$value" >> "$RUN_DIR/$PROGRAM.ERROR_ALERT.$SCRIPT_PID.$TSTAMP"
return
elif [ "$level" == "WARN" ]; then
_Logger "$prefix($level):$value" "$prefix\e[33m$value\e[0m" true
WARN_ALERT=true
echo -e "[$retval] in [$(joinString , ${FUNCNAME[@]})] SP=$SCRIPT_PID P=$$\n$prefix($level):$value" >> "$RUN_DIR/$PROGRAM.WARN_ALERT.$SCRIPT_PID.$TSTAMP"
return
elif [ "$level" == "NOTICE" ]; then
if [ "$_LOGGER_ERR_ONLY" != true ]; then
_Logger "$prefix$value" "$prefix$value"
fi
return
elif [ "$level" == "VERBOSE" ]; then
if [ "$_LOGGER_VERBOSE" == true ]; then
_Logger "$prefix($level):$value" "$prefix$value"
fi
return
elif [ "$level" == "ALWAYS" ]; then
_Logger "$prefix$value" "$prefix$value"
return
elif [ "$level" == "DEBUG" ]; then
if [ "$_DEBUG" == true ]; then
_Logger "$prefix$value" "$prefix$value"
return
fi
else
_Logger "\e[41mLogger function called without proper loglevel [$level].\e[0m" "\e[41mLogger function called without proper loglevel [$level].\e[0m" true
_Logger "Value was: $prefix$value" "Value was: $prefix$value" true
fi
}
function CleanUp {
# Exit controlmaster before the socket gets deleted
if [ "$SSH_CONTROLMASTER" == true ] && [ "$SSH_CMD" != "" ]; then
$SSH_CMD -O exit
fi
if [ "$_DEBUG" != true ]; then
# Removing optional remote $RUN_DIR that goes into local $RUN_DIR
if [ -d "$RUN_DIR/$PROGRAM.remote.$SCRIPT_PID.$TSTAMP" ]; then
rm -rf "$RUN_DIR/$PROGRAM.remote.$SCRIPT_PID.$TSTAMP"
fi
# Removing all temporary run files
rm -f "$RUN_DIR/$PROGRAM."*".$SCRIPT_PID.$TSTAMP"
# Fix for sed -i requiring backup extension for BSD & Mac (see all sed -i statements)
rm -f "$RUN_DIR/$PROGRAM."*".$SCRIPT_PID.$TSTAMP.tmp"
fi
}
function GenericTrapQuit {
local exitcode=0
# Get ERROR / WARN alert flags from subprocesses that call Logger
if [ -f "$RUN_DIR/$PROGRAM.WARN_ALERT.$SCRIPT_PID.$TSTAMP" ]; then
WARN_ALERT=true
exitcode=2
fi
if [ -f "$RUN_DIR/$PROGRAM.ERROR_ALERT.$SCRIPT_PID.$TSTAMP" ]; then
ERROR_ALERT=true
exitcode=1
fi
CleanUp
exit $exitcode
}
function CheckEnvironment {
## osync / obackup executable full path can be set here if it cannot be found on the system
@ -145,6 +326,8 @@ function Usage {
exit 128
}
trap GenericTrapQuit TERM EXIT HUP QUIT
opts=""
for i in "$@"
do

139
osync-target-helper-srv Executable file
View File

@ -0,0 +1,139 @@
#!/usr/bin/env bash
#
# osync-srv Two way directory sync daemon
#
# chkconfig: - 90 99
# description: monitors a local directory and syncs to a local or remote \
# directory on file changes
# processname: /usr/local/bin/osync.sh
# config: /etc/osync/*.conf
# pidfile: /var/run/osync
### BEGIN INIT INFO
# Provides: osync-target-helper-srv
# Required-Start: $local_fs $time
# Required-Stop: $local_fs $time
# Default-Start: 2 3 4 5
# Default-Stop: 0 1 6
# Short-Description: osync-target-helpder daemon
# Description: Two way directory sync daemon
### END INIT INFO
prog=osync
progexec=osync.sh
progpath=/usr/local/bin
confdir=/etc/osync
pidfile=/var/run/$prog-target-helper
SCRIPT_BUILD=2018100101
if [ ! -f $progpath/$progexec ] && [ ! -f $progexec ]; then
echo "Cannot find $prog executable in $progpath nor in local path."
exit 1
fi
if [ ! -w $(dirname $pidfile) ]; then
pidfile=./$prog
fi
start() {
if ! ls "$confdir/"*.conf > /dev/null 2>&1; then
echo "Cannot find any configuration files in $confdir."
exit 1
fi
errno=0
for cfgfile in "$confdir/"*.conf
do
if [ -f $progpath/$progexec ]; then
$progpath/$progexec $cfgfile --on-changes-target --errors-only > /dev/null 2>&1 &
else
echo "Cannot find $prog executable in $progpath"
exit 1
fi
pid=$!
retval=$?
if [ $? == 0 ]; then
echo $pid > "$pidfile-$(basename $cfgfile)"
echo "$prog successfully started for configuration file $cfgfile"
else
echo "Cannot start $prog for configuration file $cfgfile"
errno=1
fi
done
exit $errno
}
stop() {
if [ ! -f $pidfile-* ]; then
echo "No running $prog instances found."
exit 1
fi
for pfile in $pidfile-*
do
if ps -p$(cat $pfile) > /dev/null 2>&1
then
kill -TERM $(cat $pfile)
if [ $? == 0 ]; then
rm -f $pfile
echo "$prog instance $(basename $pfile) stopped."
else
echo "Cannot stop $prog instance $(basename $pfile)"
fi
else
rm -f $pfile
echo "$prog instance $pfile (pid $(cat $pfile)) is dead but pidfile exists."
fi
done
}
status() {
if [ ! -f $pidfile-* ]; then
echo "Cannot find any running $prog instance."
exit 1
fi
errno=0
for pfile in $pidfile-*
do
if ps -p$(cat $pfile) > /dev/null 2>&1
then
echo "$prog instance $(basename $pfile) is running (pid $(cat $pfile))"
else
echo "$prog instance $pfile (pid $(cat $pfile)) is dead but pidfile exists."
errno=1
fi
done
exit $errno
}
case "$1" in
start)
start
;;
stop)
stop
;;
restart)
stop
start
;;
status)
status
;;
condrestart|try-restart)
status || exit 0
restart
;;
*)
echo "Usage: $0 {start|stop|restart|status}"
;;
esac
exit 0

View File

@ -0,0 +1,13 @@
[Unit]
Description=osync - Target helper service
After=time-sync.target local-fs.target network-online.target
Requires=time-sync.target local-fs.target
Wants=network-online.target
[Service]
Type=simple
ExecStart=/usr/local/bin/osync.sh /etc/osync/%i --on-changes-target --errors-only
SuccessExitStatus=0 2
[Install]
WantedBy=multi-user.target

View File

@ -0,0 +1,11 @@
[Unit]
Description=A robust two way (bidirectional) file sync script based on rsync with fault tolerance
After=time-sync.target local-fs.target network-online.target
Wants=network-online.target
[Service]
Type=simple
ExecStart=/usr/local/bin/osync.sh /etc/osync/%i --on-changes-target --silent
SuccessExitStatus=0 2
[Install]
WantedBy=multi-user.target

2868
osync.sh

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -9,13 +9,13 @@
##### Any other command will return a "syntax error"
##### For details, see ssh_filter.log
SCRIPT_BUILD=2017020802
# BUILD=2017020802
## Allow sudo
SUDO_EXEC=yes
SUDO_EXEC=true
## Log all valid commands too
_DEBUG=no
_DEBUG=false
## Set remote token in authorized_keys
if [ "$1" != "" ]; then
@ -25,12 +25,12 @@ fi
LOG_FILE="${HOME}/.ssh/ssh_filter.log"
function Log {
DATE=$(date)
DATE="$(date)"
echo "$DATE - $1" >> "$LOG_FILE"
}
function Go {
if [ "$_DEBUG" == "yes" ]; then
if [ "$_DEBUG" == true ]; then
Log "Executing [$SSH_ORIGINAL_COMMAND]."
fi
eval "$SSH_ORIGINAL_COMMAND"
@ -38,7 +38,7 @@ function Go {
case "${SSH_ORIGINAL_COMMAND}" in
*"env _REMOTE_TOKEN=$_REMOTE_TOKEN"*)
if [ "$SUDO_EXEC" != "yes" ] && [[ $SSH_ORIGINAL_COMMAND == *"sudo "* ]]; then
if [ "$SUDO_EXEC" != true ] && [[ $SSH_ORIGINAL_COMMAND == *"sudo "* ]]; then
Log "Command [$SSH_ORIGINAL_COMMAND] contains sudo which is not allowed."
echo "Syntax error unexpected end of file"
exit 1

View File

@ -1,10 +1,8 @@
#!/usr/bin/env bash
###### osync - Rsync based two way sync engine with fault tolerance
###### (C) 2013-2017 by Orsiris de Jong (www.netpower.fr)
###### osync v1.1x / v1.2x config file rev 2017060501
###### (C) 2013-2023 by Orsiris de Jong (www.netpower.fr)
## ---------- GENERAL OPTIONS
[GENERAL]
CONFIG_FILE_REVISION=1.3.0
## Sync job identification
INSTANCE_ID="sync_test"
@ -23,11 +21,14 @@ SSH_RSA_PRIVATE_KEY="/home/backupuser/.ssh/id_rsa"
## Alternatively, you may specify an SSH password file (less secure). Needs sshpass utility installed.
SSH_PASSWORD_FILE=""
## use the KRB5 credential cache to access SSH or rsync
#KRB5=true
## When using ssh filter, you must specify a remote token matching the one setup in authorized_keys
_REMOTE_TOKEN=SomeAlphaNumericToken9
## Create sync directories if they do not exist
CREATE_DIRS=no
## Create sync directories if they do not exist (true/false)
CREATE_DIRS=true
## Log file location. Leaving this empty will create a logfile at /var/log/osync_version_SYNC_ID.log (or current directory if /var/log doesn't exist)
LOGFILE=""
@ -39,7 +40,7 @@ MINIMUM_SPACE=10240
BANDWIDTH=0
## If enabled, synchronization on remote system will be processed as superuser. See documentation for /etc/sudoers file configuration.
SUDO_EXEC=no
SUDO_EXEC=false
## Paranoia option. Don't change this unless you read the documentation.
RSYNC_EXECUTABLE=rsync
## Remote rsync executable path. Leave this empty in most cases
@ -64,52 +65,71 @@ RSYNC_EXCLUDE_FROM=""
## List elements separator char. You may set an alternative separator char for your directories lists above.
PATH_SEPARATOR_CHAR=";"
## ---------- REMOTE SYNC OPTIONS
## By default, osync stores its state into the replica_path/.osync_workdir/state
## This behavior can be changed for initiator or slave by overriding the following with an absolute path to a statedir, ex /opt/osync_state/initiator
## If osync runs locally, initiator and target state dirs **must** be different
INITIATOR_CUSTOM_STATE_DIR=""
TARGET_CUSTOM_STATE_DIR=""
## ssh compression should be used unless your remote connection is good enough (LAN)
SSH_COMPRESSION=yes
[REMOTE_OPTIONS]
## ssh compression should be used on WAN links, unless your remote connection is good enough (LAN), in which case it would slow down things
SSH_COMPRESSION=false
## Optional ssh options. Example to lower CPU usage on ssh compression, one can specify '-T -c arcfour -o Compression=no -x'
## -T = turn off pseudo-tty, -c arcfour = weakest but fasted ssh encryption (destination must accept "Ciphers arcfour" in sshd_config), -x turns off X11 forwarding
## arcfour isn't accepted on most newer systems, you may then prefer any AES encryption if processor has aes-ni hardware acceleration
## If the system does not provide hardware assisted acceleration, chacha20-poly1305@openssh.com is a good cipher to select
## See: https://wiki.csnu.org/index.php/SSH_ciphers_speed_comparison
## -o Compression=no is already handled by SSH_COMPRESSION option
## Uncomment the following line to use those optimizations, on secured links only
#SSH_OPTIONAL_ARGS="-T -c aes128-ctr -x"
#SSH_OPTIONAL_ARGS="-T -c chacha20-poly1305@openssh.com -x"
## Ignore ssh known hosts. DANGER WILL ROBINSON DANGER ! This can lead to security issues. Only enable this if you know what you're doing.
SSH_IGNORE_KNOWN_HOSTS=no
SSH_IGNORE_KNOWN_HOSTS=false
## Use a single TCP connection for all SSH calls. Will make remote sync faster, but may work less good on lossy links.
SSH_CONTROLMASTER=false
## Check for connectivity to remote host before launching remote sync task. Be sure the hosts responds to ping. Failing to ping will stop sync.
REMOTE_HOST_PING=no
REMOTE_HOST_PING=false
## Check for internet access by pinging one or more 3rd party hosts before remote sync task. Leave empty if you don't want this check to be be performed. Failing to ping will stop sync.
## If you use this function, you should set more than one 3rd party host, and be sure you can ping them.
## Be aware some DNS like opendns redirect false hostnames. Also, this adds an extra execution time of a bit less than a minute.
REMOTE_3RD_PARTY_HOSTS="www.kernel.org www.google.com"
## ---------- MISC OPTIONS
[MISC_OPTIONS]
## Optional arguments passed to rsync executable. The following are already managed by the program and shoul never be passed here
## -r -l -p -t -g -o -D -E - u- i- n --executability -A -X -L -K -H -8 -zz skip-compress checksum bwlimit partial partial-dir no-whole-file whole-file backup backup-dir suffix
## -r -l -p -t -g -o -D -E - u- i- n --executability -A -X -L -K -H -8 --zz -skip-compress -checksum -bwlimit -partial -partial-dir -no-whole-file -whole-file -backup -backup-dir -suffix
## --exclude --exclude-from --include --include-from --list-only --stats
## When dealing with different filesystems for sync, or using SMB mountpoints, try adding --modify-window=2 --omit-dir-times as optional arguments.
RSYNC_OPTIONAL_ARGS=""
## Preserve basic linux permissions
PRESERVE_PERMISSIONS=yes
PRESERVE_OWNER=yes
PRESERVE_GROUP=yes
PRESERVE_PERMISSIONS=true
PRESERVE_OWNER=true
PRESERVE_GROUP=true
## On MACOS X, does not work and will be ignored
PRESERVE_EXECUTABILITY=yes
PRESERVE_EXECUTABILITY=true
## Preserve ACLS. Make sure source and target FS can handle ACL. Disabled on Mac OSX.
PRESERVE_ACL=no
PRESERVE_ACL=false
## Preserve Xattr. Make sure source and target FS can manage identical XATTRS. Disabled on Mac OSX. Apparently, prior to rsync v3.1.2 there are some performance caveats with transferring XATTRS.
PRESERVE_XATTR=no
PRESERVE_XATTR=false
## Transforms symlinks into referent files/dirs. Be careful as symlinks without referrent will break sync as if standard files could not be copied.
COPY_SYMLINKS=no
COPY_SYMLINKS=false
## Treat symlinked dirs as dirs. CAUTION: This also follows symlinks outside of the replica root.
KEEP_DIRLINKS=no
KEEP_DIRLINKS=false
## Preserve hard links. Make sure source and target FS can manage hard links or you will lose them.
PRESERVE_HARDLINKS=no
PRESERVE_HARDLINKS=false
## Do a full checksum on all files that have identical sizes, they are checksummed to see if they actually are identical. This can take a long time.
CHECKSUM=no
CHECKSUM=false
## Let RSYNC compress file transfers. Do not use this if both initator and target replicas are on local system. Also, do not use this if you already enabled SSH compression.
RSYNC_COMPRESS=yes
RSYNC_COMPRESS=true
## Maximum execution time (in seconds) for sync process. Set these values zero will disable max execution times.
## Soft exec time only generates a warning. Hard exec time will generate a warning and stop sync process.
@ -126,52 +146,57 @@ MIN_WAIT=60
## Use 0 to wait indefinitely.
MAX_WAIT=7200
## ---------- BACKUP AND DELETION OPTIONS
[BACKUP_DELETE_OPTIONS]
## Log a list of conflictual files
LOG_CONFLICTS=yes
## Log a list of conflictual files (EXPERIMENTAL)
LOG_CONFLICTS=false
## Send an email when conflictual files are found (implies LOG_CONFLICTS)
ALERT_CONFLICTS=no
ALERT_CONFLICTS=false
## Enabling this option will keep a backup of a file on the target replica if it gets updated from the source replica. Backups will be made to .osync_workdir/backups
CONFLICT_BACKUP=yes
CONFLICT_BACKUP=true
## Keep multiple backup versions of the same file. Warning, This can be very space consuming.
CONFLICT_BACKUP_MULTIPLE=no
CONFLICT_BACKUP_MULTIPLE=false
## Osync will clean backup files after a given number of days. Setting this to 0 will disable cleaning and keep backups forever. Warning: This can be very space consuming.
CONFLICT_BACKUP_DAYS=30
## If the same file exists on both replicas, newer version will be synced. However, if both files have the same timestamp but differ, CONFILCT_PREVALANCE sets winner replica.
CONFLICT_PREVALANCE=initiator
## On deletion propagation to the target replica, a backup of the deleted files can be kept. Deletions will be kept in .osync_workdir/deleted
SOFT_DELETE=yes
SOFT_DELETE=true
## Osync will clean deleted files after a given number of days. Setting this to 0 will disable cleaning and keep deleted files forever. Warning: This can be very space consuming.
SOFT_DELETE_DAYS=30
## Optional deletion skip on replicas. Valid values are "initiator", "target", or "initiator,target"
SKIP_DELETION=
## ---------- RESUME OPTIONS
## Optional sync type. By default, osync is bidirectional. You may want to use osync as unidirectional sync in some circumstances. Valid values are "initiator2target" or "target2initiator"
SYNC_TYPE=
[RESUME_OPTIONS]
## Try to resume an aborted sync task
RESUME_SYNC=yes
RESUME_SYNC=true
## Number maximum resume tries before initiating a fresh sync.
RESUME_TRY=2
## When a pidlock exists on slave replica that does not correspond to the initiator's instance-id, force pidlock removal. Be careful with this option if you have multiple initiators.
FORCE_STRANGER_LOCK_RESUME=no
FORCE_STRANGER_LOCK_RESUME=false
## Keep partial uploads that can be resumed on next run, experimental feature
PARTIAL=no
PARTIAL=false
## Use delta copy algortithm (usefull when local paths are network drives), defaults to yes
DELTA_COPIES=yes
## ---------- ALERT OPTIONS
## Use delta copy algortithm (usefull when local paths are network drives), defaults to true
DELTA_COPIES=true
[ALERT_OPTIONS]
## List of alert mails separated by spaces
## Most Unix systems (including Win10 bash) have mail support out of the box
## Just make sure that the current user has enough privileges to use mail / mutt / sendmail and that the mail system is configured to allow outgoing mails
## on pfSense platform, smtp support needs to be configured in System > Advanced > Notifications
DESTINATION_MAILS="your@alert.tld"
## By default, only sync warnings / errors are sent by mail. This default behavior can be overrided here
ALWAYS_SEND_MAILS=false
## Optional change of mail body encoding (using iconv)
## By default, all mails are sent in UTF-8 format without header (because of maximum compatibility of all platforms)
## You may specify an optional encoding here (like "ISO-8859-1" or whatever iconv can handle)
@ -188,9 +213,9 @@ SMTP_ENCRYPTION=none
SMTP_USER=
SMTP_PASSWORD=
## ---------- EXECUTION HOOKS
[EXECUTION_HOOKS]
## Commands can will be run before and / or after sync process (remote execution will only happen if REMOTE_OPERATION is set).
## Commands can will be run before and / or after sync process
LOCAL_RUN_BEFORE_CMD=""
LOCAL_RUN_AFTER_CMD=""
@ -201,8 +226,8 @@ REMOTE_RUN_AFTER_CMD=""
MAX_EXEC_TIME_PER_CMD_BEFORE=0
MAX_EXEC_TIME_PER_CMD_AFTER=0
## Stops osync execution if one of the above commands fail
STOP_ON_CMD_ERROR=yes
## Stops osync execution if one of the above before commands fail
STOP_ON_CMD_ERROR=true
## Run local and remote after sync commands even on failure
RUN_AFTER_CMD_ON_ERROR=no
RUN_AFTER_CMD_ON_ERROR=false

View File

@ -1,19 +1,20 @@
#!/usr/bin/env bash
###### osync - Rsync based two way sync engine with fault tolerance
###### (C) 2013-2017 by Orsiris de Jong (www.netpower.fr)
###### osync-target-helper v1.2.2+ config file rev 2017061901
###### (C) 2013-2020 by Orsiris de Jong (www.netpower.fr)
## ---------- GENERAL OPTIONS
[GENERAL]
CONFIG_FILE_REVISION=1.3.0
## Sync job identification
INSTANCE_ID="sync_test"
INSTANCE_ID="target_test"
## Directories to synchronize.
## Initiator is the system main osync runs on. The initiator directory must be a remote path for osync target helper to contact.
INITIATOR_SYNC_DIR="ssh://backupuser@yourhost.old:22//home/git/osync/dir1"
## Initiator is the system osync runs on. The initiator directory must be a local path.
INITIATOR_SYNC_DIR="/home/git/osync/dir1"
#INITIATOR_SYNC_DIR="ssh://backupuser@yourhost.old:22//home/git/osync/dir1"
## Target is the system osync synchronizes to. The target directory must be a local.
## Target is the system osync synchronizes to (can be the same system as the initiator in case of local sync tasks). The target directory can be a local or remote path.
TARGET_SYNC_DIR="/home/git/osync/dir2"
## If the target system is remote, you can specify a RSA key (please use full path). If not defined, the default ~/.ssh/id_rsa will be used. See documentation for further information.
@ -29,24 +30,27 @@ _REMOTE_TOKEN=SomeAlphaNumericToken9
LOGFILE=""
## If enabled, synchronization on remote system will be processed as superuser. See documentation for /etc/sudoers file configuration.
SUDO_EXEC=no
SUDO_EXEC=false
## ---------- REMOTE SYNC OPTIONS
## ssh compression should be used unless your remote connection is good enough (LAN)
SSH_COMPRESSION=yes
SSH_COMPRESSION=true
## Ignore ssh known hosts. DANGER WILL ROBINSON DANGER ! This can lead to security issues. Only enable this if you know what you're doing.
SSH_IGNORE_KNOWN_HOSTS=no
SSH_IGNORE_KNOWN_HOSTS=false
## Check for connectivity to remote host before launching remote sync task. Be sure the hosts responds to ping. Failing to ping will stop sync.
REMOTE_HOST_PING=no
REMOTE_HOST_PING=false
## Check for internet access by pinging one or more 3rd party hosts before remote sync task. Leave empty if you don't want this check to be be performed. Failing to ping will stop sync.
## If you use this function, you should set more than one 3rd party host, and be sure you can ping them.
## Be aware some DNS like opendns redirect false hostnames. Also, this adds an extra execution time of a bit less than a minute.
REMOTE_3RD_PARTY_HOSTS="www.kernel.org www.google.com"
## Log a message every KEEP_LOGGING seconds just to know the task is still alive
KEEP_LOGGING=1801
## Minimum time (in seconds) in file monitor /daemon mode between modification detection and sync task in order to let copy operations finish.
MIN_WAIT=60
@ -54,7 +58,7 @@ MIN_WAIT=60
## Use 0 to wait indefinitely.
MAX_WAIT=7200
## ---------- ALERT OPTIONS
[ALERT_OPTIONS]
## List of alert mails separated by spaces
## Most Unix systems (including Win10 bash) have mail support out of the box
@ -77,3 +81,22 @@ SMTP_PORT=25
SMTP_ENCRYPTION=none
SMTP_USER=
SMTP_PASSWORD=
[EXECUTION_HOOKS]
## Commands can will be run before and / or after sync process (remote execution will only happen if REMOTE_OPERATION is set).
LOCAL_RUN_BEFORE_CMD=""
LOCAL_RUN_AFTER_CMD=""
REMOTE_RUN_BEFORE_CMD=""
REMOTE_RUN_AFTER_CMD=""
## Max execution time of commands before they get force killed. Leave 0 if you don't wan't this to happen. Time is specified in seconds.
MAX_EXEC_TIME_PER_CMD_BEFORE=0
MAX_EXEC_TIME_PER_CMD_AFTER=0
## Stops osync execution if one of the above commands fail
STOP_ON_CMD_ERROR=true
## Run local and remote after sync commands even on failure
RUN_AFTER_CMD_ON_ERROR=false

View File

@ -2,12 +2,12 @@
PROGRAM="osync instance upgrade script"
SUBPROGRAM="osync"
AUTHOR="(C) 2016-2017 by Orsiris de Jong"
AUTHOR="(C) 2016-2020 by Orsiris de Jong"
CONTACT="http://www.netpower.fr/osync - ozy@netpower.fr"
OLD_PROGRAM_VERSION="v1.0x-v1.1x"
NEW_PROGRAM_VERSION="v1.2x"
CONFIG_FILE_VERSION=2017060501
PROGRAM_BUILD=2016121101
OLD_PROGRAM_VERSION="v1.0x-v1.2x"
NEW_PROGRAM_VERSION="v1.3x"
CONFIG_FILE_REVISION=1.3.0
PROGRAM_BUILD=2020012201
## type -p does not work on platforms other than linux (bash). If if does not work, always assume output is not a zero exitcode
if ! type "$BASH" > /dev/null; then
@ -41,6 +41,7 @@ RSYNC_EXCLUDE_FROM
PATH_SEPARATOR_CHAR
SSH_COMPRESSION
SSH_IGNORE_KNOWN_HOSTS
SSH_CONTROLMASTER
REMOTE_HOST_PING
REMOTE_3RD_PARTY_HOSTS
RSYNC_OPTIONAL_ARGS
@ -69,6 +70,7 @@ CONFLICT_PREVALANCE
SOFT_DELETE
SOFT_DELETE_DAYS
SKIP_DELETION
SYNC_TYPE
RESUME_SYNC
RESUME_TRY
FORCE_STRANGER_LOCK_RESUME
@ -99,11 +101,11 @@ sync-test
${HOME}/backupuser/.ssh/id_rsa
''
SomeAlphaNumericToken9
no
false
''
10240
0
no
false
rsync
''
include
@ -112,41 +114,43 @@ include
''
''
\;
yes
no
no
true
false
false
false
'www.kernel.org www.google.com'
''
yes
yes
yes
yes
no
no
no
no
no
no
yes
true
true
true
true
false
false
false
false
false
false
true
7200
10600
1801
60
7200
yes
no
yes
no
false
false
true
false
30
initiator
yes
true
30
''
yes
''
true
2
no
no
yes
false
false
true
''
''
alert@your.system.tld
@ -161,8 +165,8 @@ none
''
0
0
yes
no
true
false
)
function Init {
@ -175,7 +179,8 @@ function Init {
FAILED_DELETE_LIST_FILENAME="-failed-delete-$SYNC_ID"
if [ "${SLAVE_SYNC_DIR:0:6}" == "ssh://" ]; then
REMOTE_OPERATION="yes"
# Might also exist from old config file as REMOTE_OPERATION=yes
REMOTE_OPERATION=true
# remove leadng 'ssh://'
uri=${SLAVE_SYNC_DIR#ssh://*}
@ -225,22 +230,6 @@ function Usage {
exit 128
}
function CheckEnvironment {
if [ "$REMOTE_OPERATION" == "yes" ]; then
if ! type -p ssh > /dev/null 2>&1
then
Logger "ssh not present. Cannot start sync." "CRITICAL"
return 1
fi
fi
if ! type -p rsync > /dev/null 2>&1
then
Logger "rsync not present. Sync cannot start." "CRITICAL"
return 1
fi
}
function LoadConfigFile {
local config_file="${1}"
@ -267,134 +256,134 @@ function _RenameStateFilesLocal {
# Make sure there is no ending slash
state_dir="${state_dir%/}/"
if [ -f "$state_dir""master"$TREE_CURRENT_FILENAME ]; then
mv -f "$state_dir""master"$TREE_CURRENT_FILENAME "$state_dir""initiator"$TREE_CURRENT_FILENAME
if [ -f "${state_dir}master${TREE_CURRENT_FILENAME}" ]; then
mv -f "${state_dir}master${TREE_CURRENT_FILENAME}" "${state_dir}initiator${TREE_CURRENT_FILENAME}"
if [ $? != 0 ]; then
echo "Error while rewriting "$state_dir"master"$TREE_CURRENT_FILENAME
echo "Error while rewriting ${state_dir}master${TREE_CURRENT_FILENAME}"
else
rewrite=true
fi
fi
if [ -f "$state_dir""master"$TREE_AFTER_FILENAME ]; then
mv -f "$state_dir""master"$TREE_AFTER_FILENAME "$state_dir""initiator"$TREE_AFTER_FILENAME
if [ -f "${state_dir}master${TREE_AFTER_FILENAME}" ]; then
mv -f "${state_dir}master${TREE_AFTER_FILENAME}" "${state_dir}initiator${TREE_AFTER_FILENAME}"
if [ $? != 0 ]; then
echo "Error while rewriting "$state_dir"master"$TREE_AFTER_FILENAME
echo "Error while rewriting ${state_dir}master${TREE_AFTER_FILENAME}"
else
rewrite=true
fi
fi
if [ -f "$state_dir""master"$DELETED_LIST_FILENAME ]; then
mv -f "$state_dir""master"$DELETED_LIST_FILENAME "$state_dir""initiator"$DELETED_LIST_FILENAME
if [ -f "${state_dir}master${DELETED_LIST_FILENAME}" ]; then
mv -f "${state_dir}master${DELETED_LIST_FILENAME}" "${state_dir}initiator${DELETED_LIST_FILENAME}"
if [ $? != 0 ]; then
echo "Error while rewriting "$state_dir"master"$DELETED_LIST_FILENAME
echo "Error while rewriting ${state_dir}master${DELETED_LIST_FILENAME}"
else
rewrite=true
fi
rewrite=true
fi
if [ -f "$state_dir""master"$FAILED_DELETE_LIST_FILENAME ]; then
mv -f "$state_dir""master"$FAILED_DELETE_LIST_FILENAME "$state_dir""initiator"$FAILED_DELETE_LIST_FILENAME
if [ -f "${state_dir}master${FAILED_DELETE_LIST_FILENAME}" ]; then
mv -f "${state_dir}master${FAILED_DELETE_LIST_FILENAME}" "${state_dir}initiator${FAILED_DELETE_LIST_FILENAME}"
if [ $? != 0 ]; then
echo "Error while rewriting "$state_dir"master"$FAILED_DELETE_LIST_FILENAME
echo "Error while rewriting ${state_dir}master${FAILED_DELETE_LIST_FILENAME}"
else
rewrite=true
fi
fi
if [ -f "$state_dir""master"$TREE_CURRENT_FILENAME"-dry" ]; then
mv -f "$state_dir""master"$TREE_CURRENT_FILENAME"-dry" "$state_dir""initiator"$TREE_CURRENT_FILENAME"-dry"
if [ -f "${state_dir}master${TREE_CURRENT_FILENAME}-dry" ]; then
mv -f "${state_dir}master${TREE_CURRENT_FILENAME}-dry" "${state_dir}initiator${TREE_CURRENT_FILENAME}-dry"
if [ $? != 0 ]; then
echo "Error while rewriting "$state_dir"master"$TREE_CURRENT_FILENAME"-dry"
echo "Error while rewriting ${state_dir}master${TREE_CURRENT_FILENAME}-dry"
else
rewrite=true
fi
fi
if [ -f "$state_dir""master"$TREE_AFTER_FILENAME"-dry" ]; then
mv -f "$state_dir""master"$TREE_AFTER_FILENAME"-dry" "$state_dir""initiator"$TREE_AFTER_FILENAME"-dry"
if [ -f "${state_dir}master${TREE_AFTER_FILENAME}-dry" ]; then
mv -f "${state_dir}master${TREE_AFTER_FILENAME}-dry" "${state_dir}initiator${TREE_AFTER_FILENAME}-dry"
if [ $? != 0 ]; then
echo "Error while rewriting "$state_dir""master"$TREE_AFTER_FILENAME"
echo "Error while rewriting ${state_dir}master${TREE_AFTER_FILENAME}-dry"
else
rewrite=true
fi
fi
if [ -f "$state_dir""master"$DELETED_LIST_FILENAME"-dry" ]; then
mv -f "$state_dir""master"$DELETED_LIST_FILENAME"-dry" "$state_dir""initiator"$DELETED_LIST_FILENAME"-dry"
if [ -f "${state_dir}master${DELETED_LIST_FILENAME}-dry" ]; then
mv -f "${state_dir}master${DELETED_LIST_FILENAME}-dry" "${state_dir}initiator${DELETED_LIST_FILENAME}-dry"
if [ $? != 0 ]; then
echo "Error while rewriting "$state_dir"master"$DELETED_LIST_FILENAME"-dry"
echo "Error while rewriting ${state_dir}master${DELETED_LIST_FILENAME}-dry"
else
rewrite=true
fi
fi
if [ -f "$state_dir""master"$FAILED_DELETE_LIST_FILENAME"-dry" ]; then
mv -f "$state_dir""master"$FAILED_DELETE_LIST_FILENAME"-dry" "$state_dir""initiator"$FAILED_DELETE_LIST_FILENAME"-dry"
if [ -f "${state_dir}master${FAILED_DELETE_LIST_FILENAME}-dry" ]; then
mv -f "${state_dir}master${FAILED_DELETE_LIST_FILENAME}-dry" "${state_dir}initiator${FAILED_DELETE_LIST_FILENAME}-dry"
if [ $? != 0 ]; then
echo "Error while rewriting "$state_dir"master"$FAILED_DELETE_LIST_FILENAME"-dry"
echo "Error while rewriting ${state_dir}master${FAILED_DELETE_LIST_FILENAME}-dry"
else
rewrite=true
fi
fi
if [ -f "$state_dir""slave"$TREE_CURRENT_FILENAME ]; then
mv -f "$state_dir""slave"$TREE_CURRENT_FILENAME "$state_dir""target"$TREE_CURRENT_FILENAME
if [ -f "${state_dir}slave${TREE_CURRENT_FILENAME}" ]; then
mv -f "${state_dir}slave${TREE_CURRENT_FILENAME}" "${state_dir}target${TREE_CURRENT_FILENAME}"
if [ $? != 0 ]; then
echo "Error while rewriting "$state_dir"slave"$TREE_CURRENT_FILENAME
echo "Error while rewriting ${state_dir}slave${TREE_CURRENT_FILENAME}"
else
rewrite=true
fi
fi
if [ -f "$state_dir""slave"$TREE_AFTER_FILENAME ]; then
mv -f "$state_dir""slave"$TREE_AFTER_FILENAME "$state_dir""target"$TREE_AFTER_FILENAME
if [ -f "${state_dir}slave${TREE_AFTER_FILENAME}" ]; then
mv -f "${state_dir}slave${TREE_AFTER_FILENAME}" "${state_dir}target${TREE_AFTER_FILENAME}"
if [ $? != 0 ]; then
echo "Error while rewriting "$state_dir"slave"$TREE_AFTER_FILENAME
echo "Error while rewriting ${state_dir}slave${TREE_AFTER_FILENAME}"
else
rewrite=true
fi
fi
if [ -f "$state_dir""slave"$DELETED_LIST_FILENAME ]; then
mv -f "$state_dir""slave"$DELETED_LIST_FILENAME "$state_dir""target"$DELETED_LIST_FILENAME
if [ -f "${state_dir}slave${DELETED_LIST_FILENAME}" ]; then
mv -f "${state_dir}slave${DELETED_LIST_FILENAME}" "${state_dir}target${DELETED_LIST_FILENAME}"
if [ $? != 0 ]; then
echo "Error while rewriting "$state_dir"slave"$DELETED_LIST_FILENAME
echo "Error while rewriting ${state_dir}slave${DELETED_LIST_FILENAME}"
else
rewrite=true
fi
fi
if [ -f "$state_dir""slave"$FAILED_DELETE_LIST_FILENAME ]; then
mv -f "$state_dir""slave"$FAILED_DELETE_LIST_FILENAME "$state_dir""target"$FAILED_DELETE_LIST_FILENAME
if [ -f "${state_dir}slave${FAILED_DELETE_LIST_FILENAME}" ]; then
mv -f "${state_dir}slave${FAILED_DELETE_LIST_FILENAME}" "${state_dir}target${FAILED_DELETE_LIST_FILENAME}"
if [ $? != 0 ]; then
echo "Error while rewriting "$state_dir"slave"$FAILED_DELETE_LIST_FILENAME
echo "Error while rewriting ${state_dir}slave${FAILED_DELETE_LIST_FILENAME}"
else
rewrite=true
fi
fi
if [ -f "$state_dir""slave"$TREE_CURRENT_FILENAME"-dry" ]; then
mv -f "$state_dir""slave"$TREE_CURRENT_FILENAME"-dry" "$state_dir""target"$TREE_CURRENT_FILENAME"-dry"
if [ -f "${state_dir}slave${TREE_CURRENT_FILENAME}-dry" ]; then
mv -f "${state_dir}slave${TREE_CURRENT_FILENAME}-dry" "${state_dir}target${TREE_CURRENT_FILENAME}-dry"
if [ $? != 0 ]; then
echo "Error while rewriting "$state_dir"slave"$TREE_CURRENT_FILENAME"-dry"
echo "Error while rewriting ${state_dir}slave${TREE_CURRENT_FILENAME}-dry"
else
rewrite=true
fi
fi
if [ -f "$state_dir""slave"$TREE_AFTER_FILENAME"-dry" ]; then
mv -f "$state_dir""slave"$TREE_AFTER_FILENAME"-dry" "$state_dir""target"$TREE_AFTER_FILENAME"-dry"
if [ -f "${state_dir}slave${TREE_AFTER_FILENAME}-dry" ]; then
mv -f "${state_dir}slave${TREE_AFTER_FILENAME}-dry" "${state_dir}target${TREE_AFTER_FILENAME}-dry"
if [ $? != 0 ]; then
echo "Error while rewriting "$state_dir"slave"$TREE_AFTER_FILENAME"-dry"
echo "Error while rewriting ${state_dir}slave${TREE_AFTER_FILENAME}-dry"
else
rewrite=true
fi
fi
if [ -f "$state_dir""slave"$DELETED_LIST_FILENAME"-dry" ]; then
mv -f "$state_dir""slave"$DELETED_LIST_FILENAME"-dry" "$state_dir""target"$DELETED_LIST_FILENAME"-dry"
if [ -f "${state_dir}slave${DELETED_LIST_FILENAME}-dry" ]; then
mv -f "${state_dir}slave${DELETED_LIST_FILENAME}-dry" "${state_dir}target${DELETED_LIST_FILENAME}-dry"
if [ $? != 0 ]; then
echo "Error while rewriting "$state_dir"slave"$DELETED_LIST_FILENAME"-dry"
echo "Error while rewriting ${state_dir}slave${DELETED_LIST_FILENAME}-dry"
else
rewrite=true
fi
fi
if [ -f "$state_dir""slave"$FAILED_DELETE_LIST_FILENAME"-dry" ]; then
mv -f "$state_dir""slave"$FAILED_DELETE_LIST_FILENAME"-dry" "$state_dir""target"$FAILED_DELETE_LIST_FILENAME"-dry"
if [ -f "${state_dir}slave${FAILED_DELETE_LIST_FILENAME}-dry" ]; then
mv -f "${state_dir}slave${FAILED_DELETE_LIST_FILENAME}-dry" "${state_dir}target${FAILED_DELETE_LIST_FILENAME}-dry"
if [ $? != 0 ]; then
echo "Error while rewriting "$state_dir"slave"$FAILED_DELETE_LIST_FILENAME"-dry"
echo "Error while rewriting ${state_dir}slave${FAILED_DELETE_LIST_FILENAME}-dry"
else
rewrite=true
fi
@ -417,24 +406,24 @@ $SSH_CMD state_dir="${1}" DELETED_LIST_FILENAME="$DELETED_LIST_FILENAME" FAILED_
state_dir="${state_dir%/}/"
rewrite=false
if [ -f "$state_dir""master"$DELETED_LIST_FILENAME ]; then
mv -f "$state_dir""master"$DELETED_LIST_FILENAME "$state_dir""initiator"$DELETED_LIST_FILENAME
if [ -f "${state_dir}master${DELETED_LIST_FILENAME}" ]; then
mv -f "${state_dir}master${DELETED_LIST_FILENAME}" "${state_dir}initiator${DELETED_LIST_FILENAME}"
if [ $? != 0 ]; then
echo "Error while rewriting "$state_dir"master"$DELETED_LIST_FILENAME
else
rewrite=true
fi
fi
if [ -f "$state_dir""master"$FAILED_DELETE_LIST_FILENAME ]; then
mv -f "$state_dir""master"$FAILED_DELETE_LIST_FILENAME "$state_dir""initiator"$FAILED_DELETE_LIST_FILENAME
if [ -f "${state_dir}master${FAILED_DELETE_LIST_FILENAME}" ]; then
mv -f "${state_dir}master${FAILED_DELETE_LIST_FILENAME}" "${state_dir}initiator${FAILED_DELETE_LIST_FILENAME}"
if [ $? != 0 ]; then
echo "Error while rewriting "$state_dir"master"$FAILED_DELETE_LIST_FILENAME
else
rewrite=true
fi
fi
if [ -f "$state_dir""master"$FAILED_DELETE_LIST_FILENAME"-dry" ]; then
mv -f "$state_dir""master"$FAILED_DELETE_LIST_FILENAME"-dry" "$state_dir""initiator"$FAILED_DELETE_LIST_FILENAME"-dry"
if [ -f "${state_dir}master${FAILED_DELETE_LIST_FILENAME}-dry" ]; then
mv -f "${state_dir}master${FAILED_DELETE_LIST_FILENAME}-dry" "${state_dir}initiator${FAILED_DELETE_LIST_FILENAME}-dry"
if [ $? != 0 ]; then
echo "Error while rewriting "$state_dir"master"$FAILED_DELETE_LIST_FILENAME"-dry"
else
@ -452,14 +441,14 @@ ENDSSH
function RenameStateFiles {
_RenameStateFilesLocal "$MASTER_SYNC_DIR/$OSYNC_DIR/$STATE_DIR"
if [ "$REMOTE_OPERATION" != "yes" ]; then
if [ "$REMOTE_OPERATION" != "yes" ] || "$REMOTE_OPERATION" == true ]; then
_RenameStateFilesLocal "$SLAVE_SYNC_DIR/$OSYNC_DIR/$STATE_DIR"
else
_RenameStateFilesRemote "$SLAVE_SYNC_DIR/$OSYNC_DIR/$STATE_DIR"
fi
}
function RewriteOldConfigFiles {
function CheckAndBackup {
local config_file="${1}"
if ! grep "MASTER_SYNC_DIR=" "$config_file" > /dev/null && ! grep "INITIATOR_SYNC_DIR=" "$config_file" > /dev/null; then
@ -473,6 +462,10 @@ function RewriteOldConfigFiles {
echo "Cannot backup config file."
exit 1
fi
}
function RewriteOldConfigFiles {
local config_file="${1}"
echo "Rewriting config file $config_file"
@ -488,7 +481,7 @@ function RewriteOldConfigFiles {
rm -f "$config_file.tmp"
}
function AddMissingConfigOptions {
function AddMissingConfigOptionsAndFixBooleans {
local config_file="${1}"
local counter=0
@ -496,27 +489,69 @@ function AddMissingConfigOptions {
if ! grep "^${KEYWORDS[$counter]}=" > /dev/null "$config_file"; then
echo "${KEYWORDS[$counter]} not found"
if [ $counter -gt 0 ]; then
if [ "${VALUES[$counter]}" == true ] || [ "${VALUES[$counter]}" == false ]; then
sed -i'.tmp' '/^'${KEYWORDS[$((counter-1))]}'=*/a\'$'\n'${KEYWORDS[$counter]}'='"${VALUES[$counter]}"'\'$'\n''' "$config_file"
else
sed -i'.tmp' '/^'${KEYWORDS[$((counter-1))]}'=*/a\'$'\n'${KEYWORDS[$counter]}'="'"${VALUES[$counter]}"'"\'$'\n''' "$config_file"
fi
if [ $? -ne 0 ]; then
echo "Cannot add missing ${[KEYWORDS[$counter]}."
exit 1
fi
else
sed -i'.tmp' '/onfig file rev*/a\'$'\n'${KEYWORDS[$counter]}'="'"${VALUES[$counter]}"'"\'$'\n''' "$config_file"
if [ "${VALUES[$counter]}" == true ] || [ "${VALUES[$counter]}" == false ]; then
sed -i'.tmp' '/[GENERAL\]$//a\'$'\n'${KEYWORDS[$counter]}'='"${VALUES[$counter]}"'\'$'\n''' "$config_file"
else
sed -i'.tmp' '/[GENERAL\]$//a\'$'\n'${KEYWORDS[$counter]}'="'"${VALUES[$counter]}"'"\'$'\n''' "$config_file"
fi
fi
echo "Added missing ${KEYWORDS[$counter]} config option with default option [${VALUES[$counter]}]"
else
# Not the most elegant but the quickest way :)
if grep "^${KEYWORDS[$counter]}=yes$" > /dev/null "$config_file"; then
sed -i'.tmp' 's/^'${KEYWORDS[$counter]}'=.*/'${KEYWORDS[$counter]}'=true/g' "$config_file"
if [ $? -ne 0 ]; then
echo "Cannot rewrite ${[KEYWORDS[$counter]} boolean to true."
exit 1
fi
elif grep "^${KEYWORDS[$counter]}=no$" > /dev/null "$config_file"; then
sed -i'.tmp' 's/^'${KEYWORDS[$counter]}'=.*/'${KEYWORDS[$counter]}'=false/g' "$config_file"
if [ $? -ne 0 ]; then
echo "Cannot rewrite ${[KEYWORDS[$counter]} boolean to false."
exit 1
fi
fi
fi
counter=$((counter+1))
done
}
function RewriteSections {
local config_file="${1}"
sed -i'.tmp' 's/## ---------- GENERAL OPTIONS/[GENERAL]/g' "$config_file"
sed -i'.tmp' 's/## ---------- REMOTE OPTIONS/[REMOTE_OPTIONS]/g' "$config_file"
sed -i'.tmp' 's/## ---------- REMOTE SYNC OPTIONS/[REMOTE_OPTIONS]/g' "$config_file"
sed -i'.tmp' 's/## ---------- MISC OPTIONS/[MISC_OPTIONS]/g' "$config_file"
sed -i'.tmp' 's/## ---------- BACKUP AND DELETION OPTIONS/[BACKUP_DELETE_OPTIONS]/g' "$config_file"
sed -i'.tmp' 's/## ---------- BACKUP AND TRASH OPTIONS/[BACKUP_DELETE_OPTIONS]/g' "$config_file"
sed -i'.tmp' 's/## ---------- RESUME OPTIONS/[RESUME_OPTIONS]/g' "$config_file"
sed -i'.tmp' 's/## ---------- ALERT OPTIONS/[ALERT_OPTIONS]/g' "$config_file"
sed -i'.tmp' 's/## ---------- EXECUTION HOOKS/[EXECUTION_HOOKS]/g' "$config_file"
}
function UpdateConfigHeader {
local config_file="${1}"
if ! grep "^CONFIG_FILE_REVISION=" > /dev/null "$config_file"; then
if grep "\[GENERAL\]" > /dev/null "$config_file"; then
sed -i'.tmp' '/^\[GENERAL\]$/a\'$'\n'CONFIG_FILE_REVISION=$CONFIG_FILE_REVISION$'\n''' "$config_file"
else
sed -i'.tmp' '/.*onfig file rev.*/a\'$'\n'CONFIG_FILE_REVISION=$CONFIG_FILE_REVISION$'\n''' "$config_file"
fi
# "onfig file rev" to deal with earlier variants of the file where c was lower or uppercase
#sed -i'.tmp' '/onfig file rev/c\###### '$SUBPROGRAM' config file rev '$CONFIG_FILE_VERSION' '$NEW_PROGRAM_VERSION "$config_file"
sed -i'.tmp' 's/.*onfig file rev.*/##### '$SUBPROGRAM' config file rev '$CONFIG_FILE_VERSION' '$NEW_PROGRAM_VERSION'/' "$config_file"
rm -f "$config_file.tmp"
sed -i'.tmp' 's/.*onfig file rev.*//' "$config_file"
fi
}
_QUICK_SYNC=0
@ -526,11 +561,11 @@ do
case $i in
--master=*)
MASTER_SYNC_DIR=${i##*=}
_QUICK_SYNC=$(($_QUICK_SYNC + 1))
_QUICK_SYNC=$((_QUICK_SYNC + 1))
;;
--slave=*)
SLAVE_SYNC_DIR=${i##*=}
_QUICK_SYNC=$(($_QUICK_SYNC + 1))
_QUICK_SYNC=$((_QUICK_SYNC + 1))
;;
--rsakey=*)
SSH_RSA_PRIVATE_KEY=${i##*=}
@ -552,11 +587,19 @@ elif [ "$1" != "" ] && [ -f "$1" ] && [ -w "$1" ]; then
CONF_FILE="${CONF_FILE%/}"
LoadConfigFile "$CONF_FILE"
Init
CheckAndBackup "$CONF_FILE"
RewriteSections "$CONF_FILE"
RewriteOldConfigFiles "$CONF_FILE"
AddMissingConfigOptions "$CONF_FILE"
AddMissingConfigOptionsAndFixBooleans "$CONF_FILE"
UpdateConfigHeader "$CONF_FILE"
if [ -d "$MASTER_SYNC_DIR" ]; then
RenameStateFiles "$MASTER_SYNC_DIR"
fi
if [ -d "$SLAVE_SYNC_DIR" ]; then
RenameStateFiles "$SLAVE_SYNC_DIR"
fi
rm -f "$CONF_FILE.tmp"
echo "Configuration file upgrade finished."
else
Usage
fi