kopia lustrzana https://github.com/OpenDroneMap/ODM
Porównaj commity
1139 Commity
Autor | SHA1 | Data |
---|---|---|
Piero Toffanin | ae6726e536 | |
Piero Toffanin | 6da366f806 | |
Piero Toffanin | e4e27c21f2 | |
Piero Toffanin | f9136f7a0d | |
idimitrovski | a2d9eccad5 | |
Piero Toffanin | 424d9e28a0 | |
Andrew Harvey | a0fbd71d41 | |
Piero Toffanin | 6084d1dca0 | |
Piero Toffanin | aef4182cf9 | |
Piero Toffanin | 6c0fe6e79d | |
Piero Toffanin | 17dfc7599a | |
Piero Toffanin | a70e7445ad | |
Piero Toffanin | 981bf88b48 | |
Piero Toffanin | ad63392e1a | |
Piero Toffanin | 77f8ffc8cd | |
Piero Toffanin | 4d7cf32a8c | |
Stephen Mather | 5a439c0ab6 | |
Piero Toffanin | ffcda0dc57 | |
Stephen Mather | 2c6fd1dd9f | |
Sylvain POULAIN | cb3229a3d4 | |
Piero Toffanin | fc9c94880f | |
kielnino | b204a2eb98 | |
Piero Toffanin | d9f77bea54 | |
kielnino | 10947ecddf | |
kielnino | f7c7044823 | |
Piero Toffanin | ae50133886 | |
Piero Toffanin | 9fd3bf3edd | |
Piero Toffanin | fb85b754fb | |
Piero Toffanin | 30f89c068c | |
Piero Toffanin | 260b4ef864 | |
Piero Toffanin | fb5d88366e | |
Piero Toffanin | f793627402 | |
Piero Toffanin | 9183218f1b | |
Piero Toffanin | 1283df206e | |
Piero Toffanin | 76a061b86a | |
Piero Toffanin | 32d933027e | |
Piero Toffanin | a29280157e | |
Piero Toffanin | 704c285b8f | |
Piero Toffanin | 5674e68e9f | |
Piero Toffanin | d419d9f038 | |
Piero Toffanin | b3ae35f5e5 | |
Piero Toffanin | 18d4d31be7 | |
Piero Toffanin | 16ccd277ec | |
Piero Toffanin | 7048868f28 | |
Piero Toffanin | b14ffd919a | |
Piero Toffanin | 4d1d0350a5 | |
Piero Toffanin | 7261c29efc | |
Piero Toffanin | 2ccad6ee9d | |
Piero Toffanin | 6acf9835e5 | |
Piero Toffanin | 5b5df3aaf7 | |
Piero Toffanin | 26cc9fbf93 | |
Piero Toffanin | b08f955963 | |
Piero Toffanin | d028873f63 | |
Piero Toffanin | 2d2b809530 | |
Piero Toffanin | 7e05a5b04e | |
Piero Toffanin | e0ab6ae7ed | |
Piero Toffanin | eceae8d2e4 | |
Piero Toffanin | 55570385c1 | |
Piero Toffanin | eed840c9bb | |
Piero Toffanin | 8376f24f08 | |
Piero Toffanin | 6d70a4f0be | |
Piero Toffanin | 6df5e0b711 | |
Piero Toffanin | 5d9564fda3 | |
Piero Toffanin | eccb203d7a | |
Piero Toffanin | 2df4afaecf | |
Piero Toffanin | e5ed68846e | |
Piero Toffanin | 7cf71628f3 | |
Piero Toffanin | 237bf8fb87 | |
Piero Toffanin | a542e7b78d | |
Piero Toffanin | 52fa5d12e6 | |
Piero Toffanin | e3296f0379 | |
Piero Toffanin | a06f6f19b2 | |
Piero Toffanin | 2d94934595 | |
Piero Toffanin | 08d03905e6 | |
Merten Fermont | f70e55c9eb | |
Merten Fermont | a89803c2eb | |
Piero Toffanin | de7595aeef | |
Piero Toffanin | aa0e9f68df | |
Piero Toffanin | 7ca122dbf6 | |
Piero Toffanin | 0d303aab16 | |
Piero Toffanin | 6dc0c98fa0 | |
Merten Fermont | c679d400c8 | |
Piero Toffanin | 38af615657 | |
Piero Toffanin | fc8dd7c5c5 | |
Piero Toffanin | 6eca279c4b | |
Piero Toffanin | 681ee18925 | |
Piero Toffanin | f9a3c5eb0e | |
Piero Toffanin | a56b52d0df | |
Piero Toffanin | f6be28db2a | |
Piero Toffanin | 5988be1f57 | |
Piero Toffanin | d9600741d1 | |
Piero Toffanin | 57c61d918d | |
Piero Toffanin | 7277eabd0b | |
Piero Toffanin | d78b8ff399 | |
Piero Toffanin | d10bef2631 | |
Piero Toffanin | 2930927207 | |
Piero Toffanin | 83fef16cb1 | |
Piero Toffanin | 2fea4d9f3d | |
Piero Toffanin | 50162147ce | |
Piero Toffanin | 07b641dc09 | |
Piero Toffanin | d2cd5d9336 | |
Piero Toffanin | 340e32af8f | |
Piero Toffanin | 8276751d07 | |
Piero Toffanin | ebba01aad5 | |
Piero Toffanin | f4549846de | |
Piero Toffanin | f5604a05a8 | |
Piero Toffanin | 3fc46a1e04 | |
Piero Toffanin | 4b8cf9af3d | |
Piero Toffanin | e9e18050a2 | |
Piero Toffanin | 9d15982850 | |
mdchia | 820ea4a4e3 | |
Saijin-Naib | e84c77dd56 | |
Stephen Mather | d929d7b8fa | |
Piero Toffanin | b948109e8f | |
Sebastien | c3593c0f69 | |
Sebastien | 5a20a22a1a | |
Adrien-ANTON-LUDWIG | b4aa3a9be0 | |
Adrien-ANTON-LUDWIG | 65c20796be | |
Piero Toffanin | 8bc251aea2 | |
Piero Toffanin | c32a8a5c59 | |
Piero Toffanin | f75a87977e | |
Piero Toffanin | e329c9a77b | |
rexliuser | be1fec2bd7 | |
Adrien-ANTON-LUDWIG | 87f82a1582 | |
Adrien-ANTON-LUDWIG | 9b9ba724c6 | |
Adrien-ANTON-LUDWIG | ee5ff3258f | |
Piero Toffanin | 80fd9dffdc | |
fr-damo | df0ea97321 | |
Piero Toffanin | 967fec0974 | |
fr-damo | e1b5a5ef65 | |
Piero Toffanin | 8121fca607 | |
Piero Toffanin | 80c4ce517c | |
udaf-mcq | afd38f631d | |
Piero Toffanin | eb95137a4c | |
Sebastien | eb4f30651e | |
Piero Toffanin | cefcfde07d | |
Piero Toffanin | b620e4e6cc | |
Liuxuyang | 8a4a309ceb | |
Piero Toffanin | cfa689b5da | |
Piero Toffanin | 0b8c75ca10 | |
Piero Toffanin | 3a4b98a7eb | |
Piero Toffanin | c2ab760dd9 | |
Piero Toffanin | dee9feed17 | |
Piero Toffanin | 542dd6d053 | |
Piero Toffanin | 5deab15e5f | |
Piero Toffanin | 6d37355d6b | |
Piero Toffanin | ba1cc39adb | |
Piero Toffanin | 54b0ac9bb0 | |
Piero Toffanin | 12b8f43912 | |
Piero Toffanin | ad091fd9af | |
Piero Toffanin | a2e63508c2 | |
Piero Toffanin | bebea18697 | |
Piero Toffanin | 58c9fd2231 | |
Piero Toffanin | 567cc3c872 | |
Piero Toffanin | 59019dac66 | |
Piero Toffanin | ef1ea9a067 | |
Piero Toffanin | 9014912c98 | |
Piero Toffanin | ad100525b5 | |
Piero Toffanin | 6ebb8b50d7 | |
Piero Toffanin | 8c300ab4de | |
Piero Toffanin | 609abfd115 | |
Howard Butler | 607ce5ffa6 | |
Piero Toffanin | ce6c745715 | |
Piero Toffanin | 4dd4da20c3 | |
Piero Toffanin | adc0570c53 | |
Piero Toffanin | 552b45bce4 | |
Piero Toffanin | 27abb8bb10 | |
Piero Toffanin | 05e8323174 | |
Piero Toffanin | f172e91b7e | |
Piero Toffanin | ed07b18bad | |
Piero Toffanin | 3535c64347 | |
Piero Toffanin | b076b667a4 | |
Piero Toffanin | 8e735e01d3 | |
Piero Toffanin | 396dde0d2c | |
Piero Toffanin | 4c7c37bbd4 | |
Piero Toffanin | 182bcfa68f | |
Piero Toffanin | 5db0d0111d | |
Piero Toffanin | 80e4b4d649 | |
rexliuser | 4a26aa1c9c | |
Piero Toffanin | a922aaecbc | |
Stephen Mather | 7be148a90a | |
Stephen Mather | 3f1975b353 | |
Piero Toffanin | b8965b50db | |
Piero Toffanin | ffad2b02e8 | |
Piero Toffanin | 1ae7974019 | |
Piero Toffanin | c0d5e21d38 | |
Piero Toffanin | f82b6a1f82 | |
Antonio Eugenio Burriel | ca7abe165a | |
Antonio Eugenio Burriel | 0f595cab80 | |
Piero Toffanin | d340d8601d | |
Stephen Mather | 14048cc049 | |
Piero Toffanin | f7c87172e9 | |
Piero Toffanin | c34f227157 | |
Piero Toffanin | 7aade078ad | |
Piero Toffanin | ac89d2212e | |
Piero Toffanin | cdf876a46b | |
Piero Toffanin | 8c0e1b3173 | |
Piero Toffanin | f27b611c43 | |
Piero Toffanin | e736670094 | |
Piero Toffanin | f8cd626ae8 | |
Piero Toffanin | 21e9df61f7 | |
Piero Toffanin | 2c8780c4d1 | |
Yunpeng Li | 1ea2a990e5 | |
Piero Toffanin | 706221c626 | |
Piero Toffanin | 02570ed632 | |
Piero Toffanin | 7048dd86fd | |
Piero Toffanin | bd0f33f978 | |
Piero Toffanin | 2361fce01d | |
Piero Toffanin | 6c94338a85 | |
Esteban | a2ee77b114 | |
Piero Toffanin | 6c32fc0594 | |
Esteban | a11992ab0f | |
Esteban | 9735c1cff8 | |
Piero Toffanin | 7bf91d1402 | |
Piero Toffanin | 4798aefc6a | |
Esteban | 59df84f1a8 | |
Esteban | 749f90bc37 | |
Piero Toffanin | 91201d5842 | |
Piero Toffanin | 41020ef1a8 | |
Piero Toffanin | 51feb49d09 | |
Piero Toffanin | f60dc33df0 | |
Piero Toffanin | c4874df8cb | |
Luca Di Leo | f89ddfb1bd | |
Luca Di Leo | d013539275 | |
Luca Di Leo | 3942755b10 | |
Luca Di Leo | d2ad5bac49 | |
Piero Toffanin | 4b3306ec9e | |
Piero Toffanin | 1d4827dd32 | |
Piero Toffanin | 02e4851230 | |
Piero Toffanin | 976db04148 | |
Piero Toffanin | 7cbe959da6 | |
Piero Toffanin | 6a7ab131ca | |
Piero Toffanin | b404366725 | |
Piero Toffanin | 4aa83c9956 | |
Piero Toffanin | 61483d9287 | |
Piero Toffanin | ddc1bb26b1 | |
Piero Toffanin | b0040f8f34 | |
Piero Toffanin | 61cff70be6 | |
Piero Toffanin | 24575bb25c | |
Piero Toffanin | ec6af4aa04 | |
Piero Toffanin | a2698f3ec9 | |
Piero Toffanin | 73887c6bcf | |
Piero Toffanin | 7bd81a93a2 | |
lurenzzzz | c85c54f505 | |
Piero Toffanin | 066e5bebb4 | |
HeDo | 6b0f8f62ff | |
HeDo | be142549e3 | |
Piero Toffanin | ffa7871c33 | |
Piero Toffanin | 473d496620 | |
Luca Di Leo | 7ace79cdc4 | |
Luca Di Leo | 31bfa95f19 | |
Luca Di Leo | fa3eb4af96 | |
HeDo | 02b92d322c | |
HeDo | 240ab7b108 | |
HeDo | 93be23b9ba | |
HeDo | c5f67024d1 | |
HeDo | 08b2755c6c | |
Piero Toffanin | 5ac36051a4 | |
Piero Toffanin | 266db75e36 | |
Piero Toffanin | b392c7a09d | |
Piero Toffanin | 22464d85f3 | |
zfb132 | 197981440b | |
HeDo | 34311a2380 | |
HeDo | 7c855688a1 | |
Piero Toffanin | 5259fd7007 | |
Piero Toffanin | 4f660ffd44 | |
Piero Toffanin | a58c50a663 | |
Piero Toffanin | 3d4725c615 | |
Piero Toffanin | 4a0b60bf70 | |
Piero Toffanin | 92cab06a51 | |
Esteban | 8f7755d4f5 | |
Esteban | 952cdf8b4b | |
Piero Toffanin | 74fcfe0e44 | |
Piero Toffanin | 0d6d2e6631 | |
Piero Toffanin | f72e9cc259 | |
Piero Toffanin | cd31933002 | |
Piero Toffanin | 6a72cb011f | |
Piero Toffanin | 759c2dbfba | |
Piero Toffanin | 09bf59ab87 | |
ckato | 91959bf299 | |
Piero Toffanin | 0289ab5062 | |
Piero Toffanin | bc87a1487e | |
Piero Toffanin | 6c5677ebc8 | |
Piero Toffanin | 0a2a06f61f | |
Piero Toffanin | a5764af46b | |
Piero Toffanin | e208372e05 | |
Piero Toffanin | 8cbe1f8c3f | |
Piero Toffanin | 89ea21284a | |
Piero Toffanin | 276bf0b090 | |
Piero Toffanin | de4bd062ec | |
Piero Toffanin | 28e51a4901 | |
Piero Toffanin | 97c9a4f773 | |
Piero Toffanin | ef41bed7d5 | |
Piero Toffanin | 3b11a371e4 | |
Piero Toffanin | 45a67cb244 | |
Piero Toffanin | 711a3f463d | |
Piero Toffanin | 53ec818f6d | |
Piero Toffanin | 643f92a66d | |
Piero Toffanin | dc997d3bbb | |
Piero Toffanin | 23028f3add | |
Piero Toffanin | f6c448745d | |
Piero Toffanin | 4139c951c5 | |
Piero Toffanin | 67787f0059 | |
Piero Toffanin | 64b687a3a6 | |
Piero Toffanin | 9932742920 | |
Piero Toffanin | 7e9a1c3886 | |
Piero Toffanin | 8b0b63d357 | |
Piero Toffanin | 17121d8d80 | |
Piero Toffanin | d1997b6270 | |
Piero Toffanin | 0da6067796 | |
Piero Toffanin | 1106635d09 | |
Piero Toffanin | a46f56731d | |
Piero Toffanin | 0118fcb5a6 | |
Piero Toffanin | f0e716f28b | |
Piero Toffanin | 0ede11b788 | |
Piero Toffanin | 270da0b757 | |
Piero Toffanin | 3296aa7a51 | |
Piero Toffanin | 94d0fedc0d | |
Piero Toffanin | 305ec0730f | |
Luca Di Leo | 60125010f2 | |
Piero Toffanin | 7c4da76280 | |
Piero Toffanin | 3680b54d64 | |
Piero Toffanin | bf824d3583 | |
Piero Toffanin | 280ba2c50b | |
Piero Toffanin | 64544f3c41 | |
Piero Toffanin | 9f17e8451a | |
Piero Toffanin | 2b2875bec6 | |
Piero Toffanin | 15131be6cc | |
Piero Toffanin | 6e50ed8fcd | |
Piero Toffanin | d105f3f499 | |
Piero Toffanin | aa82f76747 | |
Luca Di Leo | 02257a62cd | |
Luca Di Leo | a74337f8fe | |
Luca Di Leo | 2e362c2238 | |
Stephen Vincent Mather | 3f6b8b4936 | |
Stephen Vincent Mather | 999434e161 | |
Stephen Vincent Mather | e997e13e75 | |
Piero Toffanin | 4c77ce47d8 | |
Piero Toffanin | 0c0aa24dd9 | |
Piero Toffanin | 697727aeef | |
Piero Toffanin | 8cb70002f5 | |
Luca Di Leo | 9efea5a966 | |
Piero Toffanin | d55202daad | |
Piero Toffanin | 8410186b66 | |
Piero Toffanin | 9f1fafa38e | |
Piero Toffanin | 3d08c09ac9 | |
Piero Toffanin | de54db315a | |
Julien | 214b6ef9af | |
Piero Toffanin | be0165bbf4 | |
Piero Toffanin | 70155e35e8 | |
HeDo | d4846ad7d0 | |
Luca Di Leo | 62abebb741 | |
Luca Di Leo | 49a7554910 | |
Piero Toffanin | c60763a312 | |
Piero Toffanin | 551a76df02 | |
Luca Di Leo | 1f76738e18 | |
twchambers | 1f1365708c | |
Luca Di Leo | fc0e0e4641 | |
Piero Toffanin | 98ffccaf49 | |
Piero Toffanin | a330a8934d | |
Piero Toffanin | 8e72d30668 | |
Piero Toffanin | 4137f20bd9 | |
Piero Toffanin | e5bd090ba5 | |
Piero Toffanin | f728371074 | |
Piero Toffanin | 9f8eca2b83 | |
Piero Toffanin | 28c48c34e6 | |
Piero Toffanin | 35db4ae89f | |
Piero Toffanin | d74f573dbf | |
Piero Toffanin | 360ab3cc5f | |
Piero Toffanin | 9a77a4e611 | |
Piero Toffanin | 7aec8925d7 | |
Piero Toffanin | ce82d181b9 | |
Saijin-Naib | b8e77f6750 | |
Piero Toffanin | 87880d24fb | |
Piero Toffanin | 07be544325 | |
Piero Toffanin | e91eb5dad9 | |
Saijin-Naib | 020979da4f | |
Piero Toffanin | 9ddb4554c4 | |
Piero Toffanin | d3832e1cf5 | |
Piero Toffanin | d30b498e87 | |
Thor | b60995a512 | |
Shuo Zhong | 0e1ece8412 | |
Piero Toffanin | be6b1ba7f0 | |
Piero Toffanin | 89eede36f1 | |
Piero Toffanin | 95712f4b47 | |
Piero Toffanin | 0810bfe868 | |
Piero Toffanin | c463136cb0 | |
Piero Toffanin | 4cf7f159b7 | |
Piero Toffanin | 8b60e80d6f | |
Piero Toffanin | c51d52b056 | |
Shuo Zhong | 054a4414a1 | |
Piero Toffanin | c80d4e0486 | |
Piero Toffanin | f7f94f4ff1 | |
Piero Toffanin | 5a129b6ef5 | |
Piero Toffanin | e9f0b14bec | |
Piero Toffanin | 92a685c390 | |
Piero Toffanin | f1400eea03 | |
Piero Toffanin | 8e567ab543 | |
Piero Toffanin | be4d3d0cd1 | |
Piero Toffanin | 07d8101780 | |
Piero Toffanin | c1cd2456b1 | |
Piero Toffanin | 5c33cfe625 | |
Piero Toffanin | 1a0db4247e | |
Piero Toffanin | af765a961d | |
Piero Toffanin | c46d6efbc3 | |
Stephen Mather | 917380fb20 | |
Piero Toffanin | 70b6e655a9 | |
Piero Toffanin | 2732773eb1 | |
Piero Toffanin | 22b5f4f236 | |
Piero Toffanin | caa1c0206e | |
Piero Toffanin | 1a876ca540 | |
Piero Toffanin | e67b094a97 | |
Stephen Mather | 9c4d9add0f | |
Stephen Mather | 40973c386a | |
Stephen Mather | 9e9037b232 | |
Piero Toffanin | 13105722ad | |
Piero Toffanin | 5ff55a5ff8 | |
Piero Toffanin | ed8bc94288 | |
Piero Toffanin | 85396167d6 | |
Piero Toffanin | edd7809eb6 | |
Piero Toffanin | 96ecbba8cb | |
Piero Toffanin | c7e93e18dd | |
Piero Toffanin | 70d2b76c78 | |
Piero Toffanin | 4134f72e8f | |
Piero Toffanin | d620507c25 | |
Piero Toffanin | 7be182b6d5 | |
Piero Toffanin | cda8f227b1 | |
Piero Toffanin | ff31f9b0dd | |
Piero Toffanin | 6c012d189e | |
Piero Toffanin | 4c59bb7c5c | |
Piero Toffanin | 181f9a89ce | |
Piero Toffanin | 57a804598e | |
Piero Toffanin | 61d1865bd3 | |
Piero Toffanin | 093929dd91 | |
Piero Toffanin | 75c8068836 | |
Piero Toffanin | b29a4f1a70 | |
Sylvain POULAIN | 3c00ffb3a7 | |
Piero Toffanin | be5c710c1a | |
Piero Toffanin | 0249d6b6c8 | |
Piero Toffanin | 081d4c8948 | |
Piero Toffanin | f6d6210827 | |
Piero Toffanin | c7676ba5f3 | |
Piero Toffanin | cd720006f6 | |
Piero Toffanin | deb53279e3 | |
Piero Toffanin | 44914f867c | |
Piero Toffanin | 3486641224 | |
Piero Toffanin | b645b2077c | |
Piero Toffanin | cbf33f9f7c | |
Piero Toffanin | 9af52a26c9 | |
Piero Toffanin | 481ea86f7b | |
Piero Toffanin | eb5cc0e1dd | |
Piero Toffanin | f0bdea4aed | |
Piero Toffanin | 0faa7a593b | |
Piero Toffanin | 21895ca71a | |
Piero Toffanin | 333eec7644 | |
Piero Toffanin | 643ea4eee6 | |
Piero Toffanin | 03e224691b | |
Piero Toffanin | 278e85db6a | |
Piero Toffanin | 8884c9c2ea | |
Piero Toffanin | 8abd163823 | |
Piero Toffanin | 2b2115c298 | |
Piero Toffanin | 808f9e40a2 | |
Piero Toffanin | db913403bc | |
Piero Toffanin | 9549a7de26 | |
Piero Toffanin | 3fd7b3c086 | |
Piero Toffanin | a6b4958caf | |
Piero Toffanin | 4cb5d75cfc | |
Piero Toffanin | 78dfe26cf3 | |
Piero Toffanin | 434042e644 | |
Piero Toffanin | 3453eda4dd | |
Piero Toffanin | 847c7ebf01 | |
Piero Toffanin | 3637cadbf3 | |
Piero Toffanin | 2a526f1120 | |
Piero Toffanin | 6031131a65 | |
Piero Toffanin | f696c33b11 | |
Piero Toffanin | d8ad954702 | |
Dani Llewellyn | c6508b3f8b | |
Dani Llewellyn | 07fcf73a88 | |
Dani Llewellyn | 5ab34603a6 | |
Piero Toffanin | 775820c957 | |
Piero Toffanin | d85805cc00 | |
Piero Toffanin | 5f80a930d5 | |
Esteban | 4c6173758a | |
Piero Toffanin | 181652e3dd | |
Piero Toffanin | d880e6ecfc | |
Carsten | 750938732e | |
Piero Toffanin | 281fb52269 | |
Piero Toffanin | e0210afda6 | |
Piero Toffanin | 0bd0434225 | |
Piero Toffanin | 30a0ed2a44 | |
Piero Toffanin | 0bbd16d24f | |
Piero Toffanin | 02679f3894 | |
Piero Toffanin | 90a60cb58a | |
Piero Toffanin | a182f1bba3 | |
Umang Kalra | 3200dd27cd | |
Piero Toffanin | 6fee4be2b6 | |
dsandson | 04052540fc | |
Piero Toffanin | dbcdd5a3cb | |
Thor | d7277bea64 | |
Thor | d09025513a | |
Piero Toffanin | 77674fa7af | |
Piero Toffanin | 916e22b25c | |
Piero Toffanin | 393786a7ce | |
Stephen Mather | 61eefa7751 | |
Piero Toffanin | 56f932f717 | |
Piero Toffanin | 7343758d04 | |
Piero Toffanin | 646a9373bc | |
Piero Toffanin | b19a51c847 | |
Piero Toffanin | a2f1579af7 | |
Piero Toffanin | 8b41d5fb7b | |
Piero Toffanin | 5d54d342c9 | |
Piero Toffanin | 4272cd9d76 | |
Piero Toffanin | 2b92f6ccf2 | |
usplm | 14ec9c1f90 | |
usplm | 9c64f8fdcc | |
Piero Toffanin | dae70b32ae | |
Piero Toffanin | 3fa065e0af | |
Piero Toffanin | 51abdd1373 | |
Piero Toffanin | 1e778d8a9f | |
Piero Toffanin | a2f5de779e | |
Piero Toffanin | ac6a0a9fc0 | |
Piero Toffanin | 55a7821058 | |
Piero Toffanin | 9171455012 | |
Piero Toffanin | 12bb725992 | |
Piero Toffanin | 6da2f91f9d | |
Piero Toffanin | 97c35317e8 | |
DarrylB6438 | 41d3e20620 | |
Piero Toffanin | b2c9079624 | |
Piero Toffanin | 5dd0859f47 | |
Piero Toffanin | 1a905e9709 | |
Piero Toffanin | 9cff356f49 | |
Piero Toffanin | 24f9945fba | |
Piero Toffanin | 801b771ba8 | |
Piero Toffanin | 08e1e33919 | |
Piero Toffanin | b584459fc9 | |
Piero Toffanin | d61d0e0cbe | |
Piero Toffanin | dc763e486f | |
Piero Toffanin | f1fc89e517 | |
Piero Toffanin | 4aeae213f2 | |
Piero Toffanin | 461992fb18 | |
luzpaz | 1bec793f9d | |
Piero Toffanin | d21c5baf64 | |
Piero Toffanin | 44d0039de8 | |
Piero Toffanin | b7ae59b429 | |
Piero Toffanin | 6e23de03a0 | |
Piero Toffanin | cc7fb2efa5 | |
Piero Toffanin | c7b6fe52f3 | |
Piero Toffanin | f4da6d9c03 | |
Piero Toffanin | ba4fa0d555 | |
Stephen Mather | 26feef4fdb | |
Piero Toffanin | 1789f09387 | |
Piero Toffanin | 812281a6ae | |
Piero Toffanin | d99043ca6b | |
Piero Toffanin | 5c82b65788 | |
usplm | ea5a4b4053 | |
Piero Toffanin | da73ada89b | |
usplm | c1b9ff4c8c | |
Piero Toffanin | 760238b9cd | |
usplm | c35ab4480b | |
usplm | d7ae810958 | |
usplm | cca10a82ac | |
usplm | 7ec2434072 | |
usplm | d640e0dfd9 | |
usplm | 1a0711671f | |
Piero Toffanin | 12619dfd64 | |
Piero Toffanin | e283125db9 | |
Stephen Vincent Mather | 12dac7cd20 | |
Piero Toffanin | 72310bc9a9 | |
Piero Toffanin | 9f27c21a2a | |
Piero Toffanin | 4a9ca4c283 | |
Saijin-Naib | 9e09da12ff | |
Piero Toffanin | a568d1fb2d | |
Piero Toffanin | 885f568503 | |
Saijin-Naib | 3223262055 | |
Saijin-Naib | b1a523d26a | |
Piero Toffanin | 0f2a4897b3 | |
Piero Toffanin | 77b0f39589 | |
Piero Toffanin | 70d68f59ca | |
Piero Toffanin | e1fac6c395 | |
Piero Toffanin | 89d28a97a7 | |
Shuo Zhong | be50747ff2 | |
Shuo Zhong | 8f5915cb79 | |
Piero Toffanin | 7f7bc332ba | |
Stephen Mather | 559ccbacad | |
Stephen Mather | 1e0ea2d074 | |
Piero Toffanin | 0339e0108a | |
Shuo Zhong | 9fd42959d3 | |
Piero Toffanin | 157a2eb90a | |
Piero Toffanin | 200a553019 | |
Piero Toffanin | 40e0f3d413 | |
Nyall Dawson | 54ecc60424 | |
Piero Toffanin | 8b93e068fe | |
Piero Toffanin | 264104190b | |
Piero Toffanin | 55ad45a3f3 | |
Piero Toffanin | 653faedf1c | |
Piero Toffanin | 44abc0c1f5 | |
Piero Toffanin | 3ea4d068db | |
Stephen Mather | af98edd8bf | |
Piero Toffanin | b1c9152cc8 | |
Piero Toffanin | 0ea71dfb4f | |
Piero Toffanin | 9042a9ad28 | |
Piero Toffanin | 2a4e7735a3 | |
Saijin-Naib | 2105630298 | |
Saijin-Naib | e7bc52f2a0 | |
Piero Toffanin | 7da3e19e6d | |
Piero Toffanin | e80b89a055 | |
Piero Toffanin | f3f0d21b2a | |
Piero Toffanin | 982a490ee0 | |
Piero Toffanin | 4f2b7d6bda | |
Piero Toffanin | a83490a2e7 | |
Piero Toffanin | 41ad541aa2 | |
Piero Toffanin | 71cd79e19d | |
Piero Toffanin | 142b496a7e | |
Piero Toffanin | a4ff33ced6 | |
Piero Toffanin | 493f05406d | |
Piero Toffanin | b9bf1d75a7 | |
Piero Toffanin | 49d0906ce5 | |
Piero Toffanin | 275f719024 | |
Stephen Mather | d97c2fae1e | |
Piero Toffanin | ed425d2465 | |
Piero Toffanin | d62ca21243 | |
Piero Toffanin | bbe3e87010 | |
Piero Toffanin | c21ff9e6c2 | |
Piero Toffanin | 9d4fcda5a4 | |
Piero Toffanin | 732205910b | |
Piero Toffanin | 14f933afae | |
Piero Toffanin | 07a4c3f61c | |
Piero Toffanin | a764b8c64c | |
Piero Toffanin | b2654e15f3 | |
Piero Toffanin | e26380e5f5 | |
Piero Toffanin | 09a4d0cdf6 | |
Piero Toffanin | 47bd1d600d | |
Piero Toffanin | 5cc688bc4d | |
Piero Toffanin | ecb82b9dd5 | |
Piero Toffanin | 01fb2b259e | |
Mihai | 12cf668ba2 | |
Piero Toffanin | beb4a7e3ff | |
Piero Toffanin | 73472c1cad | |
Piero Toffanin | aa53e8b88f | |
Piero Toffanin | ea5a60eb92 | |
Piero Toffanin | bfd09aa506 | |
Saijin-Naib | 0e3a7be8cc | |
Piero Toffanin | f389ab280c | |
Piero Toffanin | beec0603ac | |
Piero Toffanin | 1cc082d3f6 | |
Piero Toffanin | 09c24d7138 | |
Piero Toffanin | 3130aa4f6c | |
Piero Toffanin | 47eb29f31c | |
Piero Toffanin | d893727f1f | |
Piero Toffanin | 3b53dd7cd0 | |
Piero Toffanin | 6f6827091f | |
Piero Toffanin | c6b43094ba | |
Piero Toffanin | 3c548b5985 | |
Piero Toffanin | 330a1f5529 | |
Piero Toffanin | 9945edaddf | |
Piero Toffanin | 6914190e46 | |
Piero Toffanin | 407a2bc072 | |
Piero Toffanin | 43cd11b78d | |
Piero Toffanin | 1facc004e3 | |
Piero Toffanin | 592382c83f | |
Esteban | 6f07f1ad33 | |
Piero Toffanin | 2e87505ce7 | |
Piero Toffanin | 14f03a5c5f | |
Esteban | d9a788dc0b | |
Piero Toffanin | ccb8b6ef7b | |
Saijin-Naib | 733d47977e | |
Piero Toffanin | e009656b01 | |
Stephen Mather | 6025cec776 | |
Stephen Mather | 33f6fe9a75 | |
Stephen Mather | a4d7189414 | |
Stephen Mather | 92299bce23 | |
Piero Toffanin | a06207eeb5 | |
Piero Toffanin | a4ef8a9f2b | |
Piero Toffanin | 922d5fa355 | |
Piero Toffanin | 8e86daae9d | |
Piero Toffanin | 29fa4eb36f | |
Piero Toffanin | 0e062b56ca | |
Piero Toffanin | f89e07ac41 | |
Piero Toffanin | 0d3b169822 | |
Piero Toffanin | b13b6d8fd4 | |
Piero Toffanin | 5cf85a78b1 | |
Piero Toffanin | e0974e18d3 | |
Piero Toffanin | da276c2211 | |
Piero Toffanin | c1da63b4dd | |
Piero Toffanin | e65afba7b6 | |
Piero Toffanin | 07be35270f | |
Piero Toffanin | 60316873d3 | |
Piero Toffanin | 8be32919b4 | |
Piero Toffanin | 5c3a65041a | |
Piero Toffanin | f22a594bc4 | |
Piero Toffanin | 91f7080d52 | |
sbonaime | ae53410acf | |
sbonaime | ad6d493fad | |
sbonaime | f82e39463d | |
Piero Toffanin | 6b2c3859d5 | |
Piero Toffanin | d3ae0adda4 | |
Piero Toffanin | 3f1de1e3cd | |
Piero Toffanin | eb1d263e78 | |
Piero Toffanin | e6b3c74b27 | |
Piero Toffanin | c13dd17a1b | |
Piero Toffanin | b2f34cfd9c | |
Piero Toffanin | 9dca3a1ea9 | |
Piero Toffanin | f947c18b29 | |
Piero Toffanin | 3f2dcb8657 | |
Piero Toffanin | 086716d7d7 | |
Piero Toffanin | aee8997dcb | |
Piero Toffanin | b2354332b9 | |
bonaime | 3620b13016 | |
Piero Toffanin | 95aad66458 | |
Piero Toffanin | ebcf04dff3 | |
Piero Toffanin | 673a25b79c | |
Piero Toffanin | cbb62bfab0 | |
Piero Toffanin | 851030ff49 | |
Piero Toffanin | 9d7c4bf751 | |
Piero Toffanin | d48f40b830 | |
Piero Toffanin | c0d275072a | |
Piero Toffanin | 09cb979aae | |
Piero Toffanin | 609d5ea004 | |
Piero Toffanin | 416c02c491 | |
Piero Toffanin | aed99a0a85 | |
Piero Toffanin | 7506dbf8ea | |
Saijin-Naib | eefd08b073 | |
Piero Toffanin | 2c5ec51588 | |
Piero Toffanin | d9cf169cb5 | |
Piero Toffanin | 6267b8ba96 | |
Piero Toffanin | eb9a4111ab | |
Piero Toffanin | c8033cc26b | |
Piero Toffanin | c4cebbb2cb | |
Piero Toffanin | 68c7cce003 | |
Piero Toffanin | 52e3fc5bbd | |
Piero Toffanin | 4e2e4a6878 | |
luz paz | cb6835d1bc | |
Piero Toffanin | 08fcb03f22 | |
Piero Toffanin | 88326108e9 | |
Piero Toffanin | 2c2ac29d49 | |
Piero Toffanin | 37886f77c2 | |
Piero Toffanin | 3e64c715fe | |
Piero Toffanin | d7b986a1b1 | |
Piero Toffanin | d536e7ebfe | |
Piero Toffanin | 15ed7602c0 | |
Esteban | 36fff9916a | |
Piero Toffanin | e1e3e9f49b | |
Piero Toffanin | ab6986395e | |
Piero Toffanin | e586961ab4 | |
Piero Toffanin | bd69e0c727 | |
Piero Toffanin | 73f0ce404d | |
markFieldman | def3c6bb9b | |
markFieldman | 27e6116977 | |
Piero Toffanin | 3957278c2e | |
Piero Toffanin | 7d15bf8720 | |
Piero Toffanin | 646b2e4f70 | |
Piero Toffanin | 3386b7dc3e | |
Piero Toffanin | 28566c0acc | |
Piero Toffanin | fef659af41 | |
Piero Toffanin | 4b0f64e1bf | |
Esteban | 87325db7bf | |
Esteban | a0ba674dd4 | |
Piero Toffanin | 1d0c838d27 | |
Piero Toffanin | 66f32588c6 | |
Piero Toffanin | 6ed490d2e3 | |
Piero Toffanin | 755b261686 | |
Piero Toffanin | 1a06af4c5a | |
Piero Toffanin | 239f051940 | |
Piero Toffanin | 497b5c60ae | |
Piero Toffanin | 70face0492 | |
Piero Toffanin | b8cf083020 | |
Piero Toffanin | 0ca7d1cefd | |
Piero Toffanin | 4a25bdfb46 | |
Piero Toffanin | 389940c338 | |
Piero Toffanin | 3f8765f5e7 | |
Piero Toffanin | b544ca2464 | |
Piero Toffanin | fe37770c52 | |
Piero Toffanin | b2764ae7f3 | |
Piero Toffanin | 8566ba617c | |
Piero Toffanin | 944cd4a0bf | |
Piero Toffanin | 86b0ddd81c | |
Piero Toffanin | 1a925e14c5 | |
Piero Toffanin | eabb7c942c | |
Piero Toffanin | 6fb7a26791 | |
Piero Toffanin | b492bbfadc | |
Piero Toffanin | d5a472eeec | |
Piero Toffanin | 2f41ed420b | |
Piero Toffanin | db0e600872 | |
Piero Toffanin | 792500bd1b | |
Piero Toffanin | 1dd0b4b457 | |
Piero Toffanin | 86e6296217 | |
Piero Toffanin | 4eba4cf310 | |
Piero Toffanin | 998a7cb016 | |
Piero Toffanin | 7fa98c5668 | |
Piero Toffanin | b69f5ef26b | |
Piero Toffanin | 7fd8fb4c87 | |
Piero Toffanin | d9f208cf31 | |
Piero Toffanin | 202a66c4e4 | |
Piero Toffanin | e1a326f936 | |
Saijin-Naib | f4985026de | |
Piero Toffanin | 11f42102e2 | |
Piero Toffanin | 47846022fc | |
Piero Toffanin | 83952db23e | |
Robin Dassy | 5eb72f0ddd | |
Piero Toffanin | b64ab3008c | |
Piero Toffanin | 969d60ce8a | |
Piero Toffanin | 72d3d21eb7 | |
Piero Toffanin | 32ef814806 | |
Piero Toffanin | ff3a5b7f13 | |
Piero Toffanin | 1e9de89779 | |
Piero Toffanin | 35213a047e | |
Piero Toffanin | 15275d9aa0 | |
Piero Toffanin | 849e9e24a3 | |
Piero Toffanin | 57ae11da39 | |
Piero Toffanin | 29b3d7ec93 | |
Piero Toffanin | f9b04a5190 | |
Piero Toffanin | 9846daec48 | |
Piero Toffanin | 93be23d2d3 | |
Piero Toffanin | 9369a4c2d4 | |
Piero Toffanin | ba607c8357 | |
Piero Toffanin | c10daf3f6f | |
Shuo Zhong | a8a53244dc | |
Piero Toffanin | bd3069ee2b | |
Piero Toffanin | d6c40929d4 | |
Piero Toffanin | 43870b6411 | |
Piero Toffanin | 7f198d90ec | |
Piero Toffanin | 5dc2e224ce | |
Piero Toffanin | 8791b74b73 | |
Piero Toffanin | 4dadb53a49 | |
Piero Toffanin | 2af9ebb4d9 | |
Piero Toffanin | 0c81756cb7 | |
Piero Toffanin | fbdf5fcc21 | |
Piero Toffanin | efab1b0ebd | |
Mike Green | 579a4399e3 | |
Piero Toffanin | 5cb332ae7b | |
Piero Toffanin | ee145fe7a1 | |
Piero Toffanin | 8d80beb58a | |
Piero Toffanin | eb933eabe4 | |
Piero Toffanin | 5259491165 | |
Piero Toffanin | 59ed0c6662 | |
Piero Toffanin | 0589483b9b | |
Piero Toffanin | 1042e50a49 | |
Piero Toffanin | 1ed9087e4f | |
Piero Toffanin | 096ca554a6 | |
Piero Toffanin | 2d74aa9f57 | |
Piero Toffanin | ed8ae1b1a8 | |
Piero Toffanin | 88ab373f12 | |
Piero Toffanin | 97cd395a2a | |
Piero Toffanin | dd9748d192 | |
Piero Toffanin | 868e1b92b0 | |
Piero Toffanin | cd854bcc58 | |
Piero Toffanin | e9d9b3f34a | |
Piero Toffanin | 73ee3192b2 | |
Piero Toffanin | 1915afe380 | |
Piero Toffanin | 263e7bec7a | |
Piero Toffanin | bea173cfa6 | |
Piero Toffanin | 7a07410b0e | |
Piero Toffanin | b7518e54bf | |
Piero Toffanin | 38bdd13cef | |
Piero Toffanin | 4f7bb8018f | |
Piero Toffanin | 106be000ae | |
Piero Toffanin | 539871bac4 | |
Piero Toffanin | 5c36ee0ee9 | |
Piero Toffanin | 7d4e5d2a3f | |
Piero Toffanin | e0b5459dd5 | |
Saijin-Naib | a52bbe6154 | |
Saijin-Naib | 4ae2b7f335 | |
Piero Toffanin | 715fe56bf6 | |
Piero Toffanin | 9a11f37560 | |
Magnus Linderoth | 93991ec4df | |
Magnus Linderoth | dc1ba2966d | |
Piero Toffanin | ea12d56880 | |
Piero Toffanin | 0cfd3c9a9b | |
Piero Toffanin | 48f08d5d0c | |
Piero Toffanin | 2c3eabdc2c | |
Piero Toffanin | c97d8b6b47 | |
Piero Toffanin | 899000bfa6 | |
Piero Toffanin | 0a198f4b3e | |
Piero Toffanin | dddfc45190 | |
Piero Toffanin | fe12823e6a | |
Piero Toffanin | 6826db9d31 | |
Piero Toffanin | 365549f51a | |
Piero Toffanin | 4174907083 | |
Piero Toffanin | 7a00b0a904 | |
Piero Toffanin | a16505861c | |
Piero Toffanin | 11be44d3b0 | |
Piero Toffanin | d2b2f6ba6a | |
Piero Toffanin | 92d868e33e | |
Piero Toffanin | 335802b563 | |
Piero Toffanin | 4e9f53e0a2 | |
Piero Toffanin | 47ffe80556 | |
Stephen Mather | b9f0bb9fba | |
Piero Toffanin | ba90ae781e | |
Piero Toffanin | baae4951a5 | |
Piero Toffanin | f1cdfc655b | |
Piero Toffanin | 7aa6444637 | |
Piero Toffanin | d827ee62d7 | |
Piero Toffanin | 9fe8be2773 | |
Piero Toffanin | bdd83dcee6 | |
Piero Toffanin | 2e5f5ef47c | |
Piero Toffanin | 873ab47866 | |
Piero Toffanin | edf8d883bc | |
Piero Toffanin | 084038543e | |
Piero Toffanin | 74a35035c7 | |
Piero Toffanin | 3c6b978d26 | |
Piero Toffanin | 6bcbacde8e | |
Piero Toffanin | 2b638d2059 | |
Darío Hereñú | 59156d0981 | |
Piero Toffanin | 3eb648e652 | |
Piero Toffanin | 901cd8f693 | |
Piero Toffanin | e7e598650e | |
Piero Toffanin | 70e07e5840 | |
Piero Toffanin | 897919b402 | |
Piero Toffanin | 34962b900e | |
Piero Toffanin | c20ac4ca33 | |
Piero Toffanin | a9933299e2 | |
Piero Toffanin | 035d4cbdd9 | |
Stephen Mather | b4ce3ef2cd | |
Stephen Mather | ffdf4962d8 | |
Piero Toffanin | 64c2d4322b | |
Piero Toffanin | 76767fec19 | |
Piero Toffanin | 420a78a10e | |
Piero Toffanin | 23dcfb2f00 | |
Piero Toffanin | c8b596d056 | |
Piero Toffanin | f63da10342 | |
Piero Toffanin | f89545ace9 | |
Piero Toffanin | 1aa7e8c0b6 | |
Piero Toffanin | 0e59c26d88 | |
Piero Toffanin | 633cb0bb89 | |
Piero Toffanin | 29ab4fd892 | |
Piotr Maślanka | 37396921f9 | |
Piero Toffanin | 429f0c80ed | |
Piero Toffanin | 6ac36fc02e | |
Piero Toffanin | be7cf63493 | |
Piero Toffanin | 5f7c94777f | |
Piero Toffanin | 7a05d6ca27 | |
Piero Toffanin | bad894108d | |
Piero Toffanin | 18c2712456 | |
Piero Toffanin | d40dd396ff | |
Piero Toffanin | ec50ca9c49 | |
Piero Toffanin | 51c57e7e49 | |
Piero Toffanin | 0043e98b04 | |
Piero Toffanin | b571281b7b | |
Piero Toffanin | 1259d51faf | |
Piero Toffanin | 079b80dbe0 | |
Piero Toffanin | e2324e27ba | |
Piero Toffanin | 3effeb88d1 | |
Piero Toffanin | e9d598e020 | |
Piero Toffanin | 398943005a | |
Piero Toffanin | ad94452b20 | |
Piero Toffanin | 9c551ba902 | |
Piero Toffanin | a2df616cc9 | |
Piero Toffanin | 1ec387f9ae | |
Piero Toffanin | c0ab16f5ae | |
Piero Toffanin | 8e40842d56 | |
Piero Toffanin | 86fb8dc84b | |
Piero Toffanin | f325c8154c | |
Piero Toffanin | 4bc1c12839 | |
Piero Toffanin | 50b4fc601d | |
Piero Toffanin | 91854478b4 | |
Piero Toffanin | d77de0ced4 | |
Piero Toffanin | 717b6dcb6e | |
Piero Toffanin | 6acc9d4162 | |
Saijin-Naib | 69bf0be7a7 | |
Piero Toffanin | f0c495eedc | |
Saijin-Naib | 7d55532bde | |
Piero Toffanin | d258b31d05 | |
Piero Toffanin | c69b22e99c | |
Piero Toffanin | c27f78d17f | |
Piero Toffanin | 1dadae2cc3 | |
Piero Toffanin | e46ff4ee78 | |
Piero Toffanin | 5ef0e7c129 | |
Piero Toffanin | e914800fb1 | |
Piero Toffanin | ef6121b70a | |
Piero Toffanin | ecccceffcc | |
Piero Toffanin | 9fac4d0471 | |
Piero Toffanin | 7cd63f87df | |
Piero Toffanin | 6fd2533e81 | |
Piero Toffanin | 822172250f | |
Piero Toffanin | 50b258b9e8 | |
Piero Toffanin | 9cca1e40f2 | |
Piero Toffanin | 5f8d3bedd5 | |
Piero Toffanin | 22373321b4 | |
Piero Toffanin | 5e98c8bbc1 | |
Piero Toffanin | 3cd360d61d | |
Piero Toffanin | dcd9c9f0f3 | |
Piero Toffanin | 132d9c8447 | |
Piero Toffanin | 0d9d65969d | |
Piero Toffanin | 139aa44b0f | |
Piero Toffanin | 5a357677cb | |
Piero Toffanin | 5a5685e705 | |
Piero Toffanin | c72a78aad8 | |
Piero Toffanin | f7dfc98832 | |
Piero Toffanin | 4ba45dcd2f | |
Piero Toffanin | 3a1048eba4 | |
Piero Toffanin | 7bc5d1722d | |
Piero Toffanin | 8040c1763f | |
Piero Toffanin | 520277e898 | |
Piero Toffanin | c9f56c33eb | |
Piero Toffanin | 63a2a160fe | |
Piero Toffanin | c256a87920 | |
Piero Toffanin | 9be3e3ff4d | |
Piero Toffanin | 8cd9671904 | |
charnesp | 78a1fdd613 | |
Piero Toffanin | 59771b4962 | |
Piero Toffanin | 947ecb15d9 | |
Piero Toffanin | 08c0e45a28 | |
Piero Toffanin | 02fe3fe031 | |
Piero Toffanin | 1a34343a73 | |
Piero Toffanin | 64861119cc | |
Stephen Mather | 38434e9184 | |
Stephen Mather | dbcc656b13 | |
Piero Toffanin | f24c108b7e | |
Piero Toffanin | 7a210e2714 | |
Piero Toffanin | 9ca3386ff3 | |
Piero Toffanin | 3f75ad58a2 | |
Piero Toffanin | ae3f6b8785 | |
Piero Toffanin | 1b0f17042e | |
Stephen Mather | 0a49eaeeb5 | |
Piero Toffanin | d5b9342710 | |
Piero Toffanin | 88e38ed1ad | |
Piero Toffanin | 8c7e783816 | |
Saijin-Naib | 7da2cf3d44 | |
Saijin-Naib | a66c9a14b8 | |
Piero Toffanin | fbd61963d5 | |
Piero Toffanin | b420f43638 | |
Piero Toffanin | f075e152f3 | |
Piero Toffanin | bae16634c7 | |
Piero Toffanin | b373964d8e | |
Piero Toffanin | 9a250db821 | |
Piero Toffanin | b010c0f546 | |
Piero Toffanin | fea74f5bd3 | |
Saijin-Naib | 4a82c4171d | |
Piero Toffanin | 72ecbc10f5 | |
Piero Toffanin | 48f1c1ea7d | |
Piero Toffanin | b3e3d04713 | |
Ragocnai | 8b8484b41a | |
Piero Toffanin | 6574ffdeeb | |
Piero Toffanin | 2221e1451a | |
Piero Toffanin | 0ac43a0cd2 | |
Piero Toffanin | 66f5ff7222 | |
Piero Toffanin | 321c92691c | |
Piero Toffanin | 7c0b335f53 | |
Piero Toffanin | 9b04a74554 | |
Piero Toffanin | 42f544f561 | |
Piero Toffanin | 8bf66b35cb | |
Piero Toffanin | 2032d35580 | |
Piero Toffanin | 1a15daa39b | |
Piero Toffanin | 59bc813b92 | |
Piero Toffanin | e54ca14c4b | |
Piero Toffanin | 5f935a9ddc | |
Piero Toffanin | 8d83b0d046 | |
Piero Toffanin | 97711f5d89 | |
Piero Toffanin | fbd9986c68 | |
Piero Toffanin | 0273ccedee | |
Piero Toffanin | 4b138622ec | |
Piero Toffanin | d7ff945560 | |
Piero Toffanin | c42dd3e93d | |
Piero Toffanin | 9192668051 | |
Piero Toffanin | f3c167029c | |
Piero Toffanin | f2f73e97ea | |
Piero Toffanin | 65d441117b | |
Piero Toffanin | 2543107bf0 | |
Piero Toffanin | 6a3f7005f0 | |
Piero Toffanin | e675b53703 | |
Piero Toffanin | 874479231e | |
Piero Toffanin | acf243596c | |
Piero Toffanin | 409c70f03e | |
Piero Toffanin | 56162ec4a8 | |
Piero Toffanin | 41053c0d67 | |
Piero Toffanin | 59cdca704c | |
Piero Toffanin | e36ce32c0c | |
Piero Toffanin | eb193465e6 | |
Piero Toffanin | 0049e87c48 | |
Piero Toffanin | 5e3737a3f5 | |
Piero Toffanin | cf259688fe | |
Piero Toffanin | bb0dfc1243 | |
Piero Toffanin | 84774703d5 | |
Piero Toffanin | 26e333318c | |
Piero Toffanin | 330a09f133 | |
Piero Toffanin | 6c71b0732f | |
Piero Toffanin | f049525b6e | |
Piero Toffanin | 804cdb196b | |
Piero Toffanin | 42979c7e64 | |
Piero Toffanin | 79f9f415de | |
Piero Toffanin | a27f9b564d | |
Piero Toffanin | a722ee69e1 | |
Piero Toffanin | e39afc8b5b | |
Piero Toffanin | 6085acd0ad | |
Piero Toffanin | b3ed31300f | |
Piero Toffanin | d5538a1467 | |
Piero Toffanin | 28e89e7907 | |
Piero Toffanin | 208263c743 | |
Piero Toffanin | c821b34c0c | |
Piero Toffanin | c87c21dc03 | |
Piero Toffanin | 8e2577090a | |
Piero Toffanin | 602089dfed | |
Piero Toffanin | 80a85db3f9 | |
Piero Toffanin | 1f2d93fd0d | |
Piero Toffanin | 3b2a60737c | |
Piero Toffanin | 029ced4fd9 | |
Piero Toffanin | 3906876276 | |
Piero Toffanin | 265c9943ac | |
Piero Toffanin | 5a3f041c40 | |
Piero Toffanin | 4075b9f31c | |
Piero Toffanin | 7cbd014ee9 | |
Piero Toffanin | 4d5ea4f280 | |
Piero Toffanin | dac624ab79 | |
Piero Toffanin | 3345e72e84 | |
Piero Toffanin | e96481a861 | |
Piero Toffanin | ebcbbf3487 | |
Piero Toffanin | 3ca4629f08 | |
Piero Toffanin | 8b6d636abf | |
Piero Toffanin | a8077d0473 | |
Piero Toffanin | 1788b498bb | |
Piero Toffanin | d38a04c854 | |
Piero Toffanin | 697f6bb300 | |
Piero Toffanin | 29a346c6aa | |
Piero Toffanin | 72816ceaf5 | |
Piero Toffanin | 9c5ace63a8 | |
Piero Toffanin | 686e0c1d21 | |
Piero Toffanin | 69a381dd81 | |
Piero Toffanin | b2224e5b68 | |
Piero Toffanin | ad9def8a51 | |
Piero Toffanin | eaa8b8364d | |
Piero Toffanin | dc034dbf01 | |
Piero Toffanin | a04c475d63 | |
Piero Toffanin | 0b6a0e92af | |
Piero Toffanin | 3dabc60ea4 | |
Piero Toffanin | 43d67019da | |
Piero Toffanin | 880adaadf8 | |
Piero Toffanin | 8e49d99939 | |
Piero Toffanin | ae118f7782 | |
Piero Toffanin | 2dcd21719a | |
Piero Toffanin | bf34769ae0 | |
Piero Toffanin | e91a428eb9 | |
Piero Toffanin | 05f9e60496 | |
Piero Toffanin | 0574569a28 | |
Piero Toffanin | a482a57da2 | |
Piero Toffanin | b726465e47 | |
Piero Toffanin | bb9a2061c1 | |
Piero Toffanin | d91f4db580 | |
Piero Toffanin | 3f4a363479 | |
Piero Toffanin | 62f4ff9645 |
|
@ -0,0 +1,33 @@
|
|||
name: Issue Triage
|
||||
on:
|
||||
issues:
|
||||
types:
|
||||
- opened
|
||||
jobs:
|
||||
issue_triage:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
issues: write
|
||||
steps:
|
||||
- uses: pierotofy/issuewhiz@v1
|
||||
with:
|
||||
ghToken: ${{ secrets.GITHUB_TOKEN }}
|
||||
openAI: ${{ secrets.OPENAI_TOKEN }}
|
||||
filter: |
|
||||
- "#"
|
||||
variables: |
|
||||
- Q: "A question about using a software or seeking guidance on doing something?"
|
||||
- B: "Reporting an issue or a software bug?"
|
||||
- P: "Describes an issue with processing a set of images or a particular dataset?"
|
||||
- D: "Contains a link to a dataset or images?"
|
||||
- E: "Contains a suggestion for an improvement or a feature request?"
|
||||
- SC: "Describes an issue related to compiling or building source code?"
|
||||
logic: |
|
||||
- 'Q and (not B) and (not P) and (not E) and (not SC) and not (title_lowercase ~= ".*bug: .+")': [comment: "Could we move this conversation over to the forum at https://community.opendronemap.org? The forum is the right place to ask questions (we try to keep the GitHub issue tracker for feature requests and bugs only). Thank you!", close: true, stop: true]
|
||||
- "B and (not P) and (not E) and (not SC)": [label: "software fault", stop: true]
|
||||
- "P and D": [label: "possible software fault", stop: true]
|
||||
- "P and (not D) and (not SC) and (not E)": [comment: "Thanks for the report, but it looks like you didn't include a copy of your dataset for us to reproduce this issue? Please make sure to follow our [issue guidelines](https://github.com/OpenDroneMap/ODM/blob/master/docs/issue_template.md) :pray: ", close: true, stop: true]
|
||||
- "E": [label: enhancement, stop: true]
|
||||
- "SC": [label: "possible software fault"]
|
||||
|
||||
signature: "p.s. I'm just an automated script, not a human being."
|
|
@ -1,84 +0,0 @@
|
|||
name: Publish Docker and WSL Images
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
tags:
|
||||
- v*
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
# Use the repository information of the checked-out code to format docker tags
|
||||
- name: Docker meta
|
||||
id: docker_meta
|
||||
uses: crazy-max/ghaction-docker-meta@v1
|
||||
with:
|
||||
images: opendronemap/odm
|
||||
tag-semver: |
|
||||
{{version}}
|
||||
- name: Build and push Docker image
|
||||
id: docker_build
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
file: ./portable.Dockerfile
|
||||
platforms: linux/amd64
|
||||
push: true
|
||||
tags: |
|
||||
${{ steps.docker_meta.outputs.tags }}
|
||||
opendronemap/odm:latest
|
||||
- name: Export WSL image
|
||||
id: wsl_export
|
||||
run: |
|
||||
docker pull opendronemap/odm
|
||||
docker export $(docker create opendronemap/odm) --output odm-wsl-rootfs-amd64.tar.gz
|
||||
gzip odm-wsl-rootfs-amd64.tar.gz
|
||||
echo ::set-output name=amd64-rootfs::"odm-wsl-rootfs-amd64.tar.gz"
|
||||
# Convert tag into a GitHub Release if we're building a tag
|
||||
- name: Create Release
|
||||
if: github.event_name == 'tag'
|
||||
id: create_release
|
||||
uses: actions/create-release@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
tag_name: ${{ github.ref }}
|
||||
release_name: Release ${{ github.ref }}
|
||||
draft: false
|
||||
prerelease: false
|
||||
# Upload the WSL image to the new Release if we're building a tag
|
||||
- name: Upload amd64 Release Asset
|
||||
if: github.event_name == 'tag'
|
||||
id: upload-amd64-wsl-rootfs
|
||||
uses: actions/upload-release-asset@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.create_release.outputs.upload_url }} # This pulls from the CREATE RELEASE step above, referencing it's ID to get its outputs object, which include a `upload_url`. See this blog post for more info: https://jasonet.co/posts/new-features-of-github-actions/#passing-data-to-future-steps
|
||||
asset_path: ./${{ steps.wsl_export.outputs.amd64-rootfs }}
|
||||
asset_name: ${{ steps.wsl_export.outputs.amd64-rootfs }}
|
||||
asset_content_type: application/gzip
|
||||
# Always archive the WSL rootfs
|
||||
- name: Upload amd64 Artifact
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: wsl-rootfs
|
||||
path: ${{ steps.wsl_export.outputs.amd64-rootfs }}
|
||||
- name: Docker image digest and WSL rootfs download URL
|
||||
run: |
|
||||
echo "Docker image digest: ${{ steps.docker_build.outputs.digest }}"
|
||||
echo "WSL AMD64 rootfs URL: ${{ steps.upload-amd64-wsl-rootfs.browser_download_url }}"
|
||||
|
|
@ -0,0 +1,39 @@
|
|||
name: Publish Docker GPU Images
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
tags:
|
||||
- v*
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: self-hosted
|
||||
timeout-minutes: 2880
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Build and push Docker image
|
||||
id: docker_build
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
file: ./gpu.Dockerfile
|
||||
platforms: linux/amd64
|
||||
push: true
|
||||
no-cache: true
|
||||
tags: opendronemap/odm:gpu
|
||||
# Trigger NodeODM build
|
||||
- name: Dispatch NodeODM Build Event
|
||||
id: nodeodm_dispatch
|
||||
run: |
|
||||
curl -X POST -u "${{secrets.PAT_USERNAME}}:${{secrets.PAT_TOKEN}}" -H "Accept: application/vnd.github.everest-preview+json" -H "Content-Type: application/json" https://api.github.com/repos/OpenDroneMap/NodeODM/actions/workflows/publish-docker-gpu.yaml/dispatches --data '{"ref": "master"}'
|
|
@ -0,0 +1,53 @@
|
|||
name: Publish Docker and WSL Images
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
tags:
|
||||
- v*
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: self-hosted
|
||||
timeout-minutes: 2880
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
with:
|
||||
config-inline: |
|
||||
[worker.oci]
|
||||
max-parallelism = 1
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
# Use the repository information of the checked-out code to format docker tags
|
||||
- name: Docker meta
|
||||
id: docker_meta
|
||||
uses: crazy-max/ghaction-docker-meta@v1
|
||||
with:
|
||||
images: opendronemap/odm
|
||||
tag-semver: |
|
||||
{{version}}
|
||||
- name: Build and push Docker image
|
||||
id: docker_build
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
file: ./portable.Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
no-cache: true
|
||||
tags: |
|
||||
${{ steps.docker_meta.outputs.tags }}
|
||||
opendronemap/odm:latest
|
||||
# Trigger NodeODM build
|
||||
- name: Dispatch NodeODM Build Event
|
||||
id: nodeodm_dispatch
|
||||
run: |
|
||||
curl -X POST -u "${{secrets.PAT_USERNAME}}:${{secrets.PAT_TOKEN}}" -H "Accept: application/vnd.github.everest-preview+json" -H "Content-Type: application/json" https://api.github.com/repos/OpenDroneMap/NodeODM/actions/workflows/publish-docker.yaml/dispatches --data '{"ref": "master"}'
|
|
@ -1,53 +0,0 @@
|
|||
name: Publish Snap
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
tags:
|
||||
- v**
|
||||
|
||||
jobs:
|
||||
build-and-release:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
architecture:
|
||||
- amd64
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
- name: Build
|
||||
id: build
|
||||
uses: diddlesnaps/snapcraft-multiarch-action@v1
|
||||
with:
|
||||
snapcraft-args: --enable-experimental-package-repositories
|
||||
architecture: ${{ matrix.architecture }}
|
||||
- name: Review
|
||||
uses: diddlesnaps/snapcraft-review-tools-action@v1
|
||||
with:
|
||||
snap: ${{ steps.build.outputs.snap }}
|
||||
isClassic: 'false'
|
||||
- name: Publish unstable builds to Edge
|
||||
if: github.ref == 'refs/heads/master'
|
||||
uses: snapcore/action-publish@v1
|
||||
with:
|
||||
store_login: ${{ secrets.STORE_LOGIN }}
|
||||
snap: ${{ steps.build.outputs.snap }}
|
||||
release: edge
|
||||
- name: Publish tagged prerelease builds to Beta
|
||||
# These are identified by having a hyphen in the tag name, e.g.: v1.0.0-beta1
|
||||
if: startsWith(github.ref, 'refs/tags/v') && contains(github.ref, '-')
|
||||
uses: snapcore/action-publish@v1
|
||||
with:
|
||||
store_login: ${{ secrets.STORE_LOGIN }}
|
||||
snap: ${{ steps.build.outputs.snap }}
|
||||
release: beta
|
||||
- name: Publish tagged stable or release-candidate builds to Candidate
|
||||
# These are identified by NOT having a hyphen in the tag name, OR having "-RC" or "-rc" in the tag name.
|
||||
if: startsWith(github.ref, 'refs/tags/v1') && ( ( ! contains(github.ref, '-') ) || contains(github.ref, '-RC') || contains(github.ref, '-rc') )
|
||||
uses: snapcore/action-publish@v1
|
||||
with:
|
||||
store_login: ${{ secrets.STORE_LOGIN }}
|
||||
snap: ${{ steps.build.outputs.snap }}
|
||||
release: candidate
|
|
@ -0,0 +1,65 @@
|
|||
name: Publish Windows Setup
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
tags:
|
||||
- v*
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: windows-2019
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.8.1'
|
||||
architecture: 'x64'
|
||||
- uses: Jimver/cuda-toolkit@v0.2.4
|
||||
id: cuda-toolkit
|
||||
with:
|
||||
cuda: '11.4.0'
|
||||
- name: Setup cmake
|
||||
uses: jwlawson/actions-setup-cmake@v1.13
|
||||
with:
|
||||
cmake-version: '3.24.x'
|
||||
- name: Extract code signing cert
|
||||
id: code_sign
|
||||
uses: timheuer/base64-to-file@v1
|
||||
with:
|
||||
fileName: 'comodo.pfx'
|
||||
encodedString: ${{ secrets.CODE_SIGNING_CERT }}
|
||||
- name: Install venv
|
||||
run: |
|
||||
python -m pip install virtualenv
|
||||
- name: Build sources
|
||||
run: |
|
||||
python configure.py build
|
||||
- name: Free up space
|
||||
run: |
|
||||
rmdir SuperBuild\download /s /q
|
||||
rmdir SuperBuild\build /s /q
|
||||
shell: cmd
|
||||
- name: Create setup
|
||||
env:
|
||||
CODE_SIGN_CERT_PATH: ${{ steps.code_sign.outputs.filePath }}
|
||||
run: |
|
||||
python configure.py dist --code-sign-cert-path $env:CODE_SIGN_CERT_PATH
|
||||
- name: Upload Setup File
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: Setup
|
||||
path: dist\*.exe
|
||||
- name: Upload Setup to Release
|
||||
uses: svenstaro/upload-release-action@v2
|
||||
if: startsWith(github.ref, 'refs/tags/')
|
||||
with:
|
||||
repo_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
file: dist/*.exe
|
||||
file_glob: true
|
||||
tag: ${{ github.ref }}
|
||||
overwrite: true
|
||||
|
|
@ -9,6 +9,10 @@ jobs:
|
|||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
- name: Set Swap Space
|
||||
uses: pierotofy/set-swap-space@master
|
||||
with:
|
||||
swap-size-gb: 12
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
- name: Set up Docker Buildx
|
||||
|
@ -20,23 +24,64 @@ jobs:
|
|||
platforms: linux/amd64
|
||||
push: false
|
||||
|
||||
snapcraft:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
architecture:
|
||||
- amd64
|
||||
# snapcraft:
|
||||
# runs-on: ubuntu-latest
|
||||
# strategy:
|
||||
# matrix:
|
||||
# architecture:
|
||||
# - amd64
|
||||
# steps:
|
||||
# - name: Checkout
|
||||
# uses: actions/checkout@v2
|
||||
# - name: Set Swap Space
|
||||
# uses: pierotofy/set-swap-space@master
|
||||
# with:
|
||||
# swap-size-gb: 12
|
||||
# - name: Build
|
||||
# id: build
|
||||
# uses: diddlesnaps/snapcraft-multiarch-action@v1
|
||||
# with:
|
||||
# architecture: ${{ matrix.architecture }}
|
||||
# - name: Review
|
||||
# uses: diddlesnaps/snapcraft-review-tools-action@v1
|
||||
# with:
|
||||
# snap: ${{ steps.build.outputs.snap }}
|
||||
# isClassic: 'false'
|
||||
|
||||
windows:
|
||||
runs-on: windows-2019
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
- name: Build
|
||||
id: build
|
||||
uses: diddlesnaps/snapcraft-multiarch-action@v1
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
snapcraft-args: --enable-experimental-package-repositories
|
||||
architecture: ${{ matrix.architecture }}
|
||||
- name: Review
|
||||
uses: diddlesnaps/snapcraft-review-tools-action@v1
|
||||
python-version: '3.8.1'
|
||||
architecture: 'x64'
|
||||
- uses: Jimver/cuda-toolkit@v0.2.4
|
||||
id: cuda-toolkit
|
||||
with:
|
||||
snap: ${{ steps.build.outputs.snap }}
|
||||
isClassic: 'false'
|
||||
cuda: '11.4.0'
|
||||
- name: Setup cmake
|
||||
uses: jwlawson/actions-setup-cmake@v1.13
|
||||
with:
|
||||
cmake-version: '3.24.x'
|
||||
- name: Install venv
|
||||
run: |
|
||||
python -m pip install virtualenv
|
||||
- name: Build sources
|
||||
run: |
|
||||
python configure.py build
|
||||
- name: Free up space
|
||||
run: |
|
||||
rmdir SuperBuild\download /s /q
|
||||
rmdir SuperBuild\build /s /q
|
||||
shell: cmd
|
||||
- name: Create setup
|
||||
run: |
|
||||
python configure.py dist
|
||||
- name: Upload Setup File
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: Setup
|
||||
path: dist\*.exe
|
||||
|
|
|
@ -27,3 +27,11 @@ settings.yaml
|
|||
.setupdevenv
|
||||
__pycache__
|
||||
*.snap
|
||||
storage/
|
||||
|
||||
|
||||
vcpkg/
|
||||
venv/
|
||||
python38/
|
||||
dist/
|
||||
innosetup/
|
|
@ -1,18 +0,0 @@
|
|||
cmake_minimum_required(VERSION 2.8)
|
||||
|
||||
project(OpenDroneMap C CXX)
|
||||
|
||||
# TODO(edgar): add option in order to point to CMAKE_PREFIX_PATH
|
||||
# if we want to build SuperBuild in an external directory.
|
||||
# It is assumed that SuperBuild have been compiled.
|
||||
|
||||
# Set third party libs location
|
||||
set(CMAKE_PREFIX_PATH "${CMAKE_CURRENT_SOURCE_DIR}/SuperBuild/install")
|
||||
|
||||
# move binaries to the same bin directory
|
||||
set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/bin)
|
||||
|
||||
option(ODM_BUILD_SLAM "Build SLAM module" OFF)
|
||||
|
||||
# Add ODM sub-modules
|
||||
add_subdirectory(modules)
|
49
Dockerfile
49
Dockerfile
|
@ -1,8 +1,8 @@
|
|||
FROM ubuntu:20.04 AS builder
|
||||
FROM ubuntu:21.04 AS builder
|
||||
|
||||
# Env variables
|
||||
ENV DEBIAN_FRONTEND=noninteractive \
|
||||
PYTHONPATH="$PYTHONPATH:/code/SuperBuild/install/lib/python3.8/dist-packages:/code/SuperBuild/src/opensfm" \
|
||||
PYTHONPATH="$PYTHONPATH:/code/SuperBuild/install/lib/python3.9/dist-packages:/code/SuperBuild/install/lib/python3.8/dist-packages:/code/SuperBuild/install/bin/opensfm" \
|
||||
LD_LIBRARY_PATH="$LD_LIBRARY_PATH:/code/SuperBuild/install/lib"
|
||||
|
||||
# Prepare directories
|
||||
|
@ -11,46 +11,26 @@ WORKDIR /code
|
|||
# Copy everything
|
||||
COPY . ./
|
||||
|
||||
# Use old-releases for 21.04
|
||||
RUN printf "deb http://old-releases.ubuntu.com/ubuntu/ hirsute main restricted\ndeb http://old-releases.ubuntu.com/ubuntu/ hirsute-updates main restricted\ndeb http://old-releases.ubuntu.com/ubuntu/ hirsute universe\ndeb http://old-releases.ubuntu.com/ubuntu/ hirsute-updates universe\ndeb http://old-releases.ubuntu.com/ubuntu/ hirsute multiverse\ndeb http://old-releases.ubuntu.com/ubuntu/ hirsute-updates multiverse\ndeb http://old-releases.ubuntu.com/ubuntu/ hirsute-backports main restricted universe multiverse" > /etc/apt/sources.list
|
||||
|
||||
# Run the build
|
||||
RUN bash configure.sh install
|
||||
|
||||
# Clean Superbuild
|
||||
RUN rm -rf \
|
||||
/code/SuperBuild/build/opencv \
|
||||
/code/SuperBuild/download \
|
||||
/code/SuperBuild/src/ceres \
|
||||
/code/SuperBuild/src/untwine \
|
||||
/code/SuperBuild/src/gflags \
|
||||
/code/SuperBuild/src/hexer \
|
||||
/code/SuperBuild/src/lastools \
|
||||
/code/SuperBuild/src/laszip \
|
||||
/code/SuperBuild/src/mvstexturing \
|
||||
/code/SuperBuild/src/opencv \
|
||||
/code/SuperBuild/src/opengv \
|
||||
/code/SuperBuild/src/pcl \
|
||||
/code/SuperBuild/src/pdal \
|
||||
/code/SuperBuild/src/openmvs \
|
||||
/code/SuperBuild/build/openmvs \
|
||||
/code/SuperBuild/src/vcg \
|
||||
/code/SuperBuild/src/zstd
|
||||
|
||||
# find in /code and delete...
|
||||
RUN find /code \
|
||||
# ...*static* libraries...
|
||||
-type f -name "*.a" -delete \
|
||||
# ...and intermediate object files
|
||||
-or -type f -name "*.o" -delete
|
||||
RUN bash configure.sh clean
|
||||
|
||||
### END Builder
|
||||
|
||||
### Use a second image for the final asset to reduce the number and
|
||||
# size of the layers.
|
||||
FROM ubuntu:20.04
|
||||
FROM ubuntu:21.04
|
||||
|
||||
# Env variables
|
||||
ENV DEBIAN_FRONTEND=noninteractive \
|
||||
PYTHONPATH="$PYTHONPATH:/code/SuperBuild/install/lib/python3.8/dist-packages:/code/SuperBuild/src/opensfm" \
|
||||
LD_LIBRARY_PATH="$LD_LIBRARY_PATH:/code/SuperBuild/install/lib"
|
||||
PYTHONPATH="$PYTHONPATH:/code/SuperBuild/install/lib/python3.9:/code/SuperBuild/install/lib/python3.8/dist-packages:/code/SuperBuild/install/bin/opensfm" \
|
||||
LD_LIBRARY_PATH="$LD_LIBRARY_PATH:/code/SuperBuild/install/lib" \
|
||||
PDAL_DRIVER_PATH="/code/SuperBuild/install/bin"
|
||||
|
||||
WORKDIR /code
|
||||
|
||||
|
@ -60,11 +40,16 @@ COPY --from=builder /code /code
|
|||
# Copy the Python libraries installed via pip from the builder
|
||||
COPY --from=builder /usr/local /usr/local
|
||||
|
||||
# Use old-releases for 21.04
|
||||
RUN printf "deb http://old-releases.ubuntu.com/ubuntu/ hirsute main restricted\ndeb http://old-releases.ubuntu.com/ubuntu/ hirsute-updates main restricted\ndeb http://old-releases.ubuntu.com/ubuntu/ hirsute universe\ndeb http://old-releases.ubuntu.com/ubuntu/ hirsute-updates universe\ndeb http://old-releases.ubuntu.com/ubuntu/ hirsute multiverse\ndeb http://old-releases.ubuntu.com/ubuntu/ hirsute-updates multiverse\ndeb http://old-releases.ubuntu.com/ubuntu/ hirsute-backports main restricted universe multiverse" > /etc/apt/sources.list
|
||||
|
||||
# Install shared libraries that we depend on via APT, but *not*
|
||||
# the -dev packages to save space!
|
||||
# Also run a smoke test on ODM and OpenSfM
|
||||
RUN bash configure.sh installruntimedepsonly \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
|
||||
|
||||
&& rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* \
|
||||
&& bash run.sh --help \
|
||||
&& bash -c "eval $(python3 /code/opendm/context.py) && python3 -c 'from opensfm import io, pymap'"
|
||||
# Entry point
|
||||
ENTRYPOINT ["python3", "/code/run.py"]
|
||||
|
|
176
README.md
176
README.md
|
@ -15,7 +15,7 @@ If you would rather not type commands in a shell and are looking for a friendly
|
|||
|
||||
## Quickstart
|
||||
|
||||
The easiest way to run ODM is via docker. To install docker, see [docs.docker.com](https://docs.docker.com). Once you have docker installed and [working](https://docs.docker.com/get-started/#test-docker-installation), you can run ODM by placing some images (JPEGs or TIFFs) in a folder named “images” (for example `C:\Users\youruser\datasets\project\images` or `/home/youruser/datasets/project/images`) and simply run from a Command Prompt / Terminal:
|
||||
The easiest way to run ODM on is via docker. To install docker, see [docs.docker.com](https://docs.docker.com). Once you have docker installed and [working](https://docs.docker.com/get-started/#test-docker-installation), you can run ODM by placing some images (JPEGs or TIFFs) in a folder named “images” (for example `C:\Users\youruser\datasets\project\images` or `/home/youruser/datasets/project/images`) and simply run from a Command Prompt / Terminal:
|
||||
|
||||
```bash
|
||||
# Windows
|
||||
|
@ -25,7 +25,7 @@ docker run -ti --rm -v c:/Users/youruser/datasets:/datasets opendronemap/odm --p
|
|||
docker run -ti --rm -v /home/youruser/datasets:/datasets opendronemap/odm --project-path /datasets project
|
||||
```
|
||||
|
||||
You can pass [additional parameters](https://docs.opendronemap.org/arguments.html) by appending them to the command:
|
||||
You can pass [additional parameters](https://docs.opendronemap.org/arguments/) by appending them to the command:
|
||||
|
||||
```bash
|
||||
docker run -ti --rm -v /datasets:/datasets opendronemap/odm --project-path /datasets project [--additional --parameters --here]
|
||||
|
@ -73,96 +73,98 @@ See http://docs.opendronemap.org for tutorials and more guides.
|
|||
|
||||
## Forum
|
||||
|
||||
We have a vibrant [community forum](https://community.opendronemap.org/). You can [search it](https://community.opendronemap.org/search?expanded=true) for issues you might be having with ODM and you can post questions there. We encourage users of ODM to partecipate in the forum and to engage with fellow drone mapping users.
|
||||
We have a vibrant [community forum](https://community.opendronemap.org/). You can [search it](https://community.opendronemap.org/search?expanded=true) for issues you might be having with ODM and you can post questions there. We encourage users of ODM to participate in the forum and to engage with fellow drone mapping users.
|
||||
|
||||
## Snap Package
|
||||
## Windows Setup
|
||||
|
||||
ODM is now available as a Snap Package from the Snap Store. To install you may use the Snap Store (available itself as a Snap Package) or the command line:
|
||||
ODM can be installed natively on Windows. Just download the latest setup from the [releases](https://github.com/OpenDroneMap/ODM/releases) page. After opening the ODM Console you can process datasets by typing:
|
||||
|
||||
```bash
|
||||
sudo snap install opendronemap
|
||||
run C:\Users\youruser\datasets\project [--additional --parameters --here]
|
||||
```
|
||||
|
||||
To run, you will need a terminal window into which you can type:
|
||||
## GPU Acceleration
|
||||
|
||||
ODM has support for doing SIFT feature extraction on a GPU, which is about 2x faster than the CPU on a typical consumer laptop. To use this feature, you need to use the `opendronemap/odm:gpu` docker image instead of `opendronemap/odm` and you need to pass the `--gpus all` flag:
|
||||
|
||||
```
|
||||
docker run -ti --rm -v c:/Users/youruser/datasets:/datasets --gpus all opendronemap/odm:gpu --project-path /datasets project
|
||||
```
|
||||
|
||||
When you run ODM, if the GPU is recognized, in the first few lines of output you should see:
|
||||
|
||||
```
|
||||
[INFO] Writing exif overrides
|
||||
[INFO] Maximum photo dimensions: 4000px
|
||||
[INFO] Found GPU device: Intel(R) OpenCL HD Graphics
|
||||
[INFO] Using GPU for extracting SIFT features
|
||||
```
|
||||
|
||||
The SIFT GPU implementation is CUDA-based, so should work with most NVIDIA graphics cards of the GTX 9xx Generation or newer.
|
||||
|
||||
If you have an NVIDIA card, you can test that docker is recognizing the GPU by running:
|
||||
|
||||
```
|
||||
docker run --rm --gpus all nvidia/cuda:10.0-base nvidia-smi
|
||||
```
|
||||
|
||||
If you see an output that looks like this:
|
||||
|
||||
```
|
||||
Fri Jul 24 18:51:55 2020
|
||||
+-----------------------------------------------------------------------------+
|
||||
| NVIDIA-SMI 440.82 Driver Version: 440.82 CUDA Version: 10.2 |
|
||||
|-------------------------------+----------------------+----------------------+
|
||||
| GPU Name Persistence-M| Bus-Id Disp.A | Volatile Uncorr. ECC |
|
||||
| Fan Temp Perf Pwr:Usage/Cap| Memory-Usage | GPU-Util Compute M. |
|
||||
```
|
||||
|
||||
You're in good shape!
|
||||
|
||||
See https://github.com/NVIDIA/nvidia-docker and https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/install-guide.html#docker for information on docker/NVIDIA setup.
|
||||
|
||||
## Native Install (Ubuntu 21.04)
|
||||
|
||||
You can run ODM natively on Ubuntu 21.04 (although we don't recommend it):
|
||||
|
||||
```bash
|
||||
opendronemap
|
||||
|
||||
# or
|
||||
|
||||
snap run opendronemap
|
||||
|
||||
# or
|
||||
|
||||
/snap/bin/opendronemap
|
||||
git clone https://github.com/OpenDroneMap/ODM
|
||||
cd ODM
|
||||
bash configure.sh install
|
||||
```
|
||||
|
||||
Snap packages will be kept up-to-date automatically, so you don't need to update ODM manually.
|
||||
You can then process datasets with `./run.sh /datasets/odm_data_aukerman`
|
||||
|
||||
## WSL or WSL2 Install
|
||||
## Native Install (MacOS)
|
||||
|
||||
Note: This requires that you have installed WSL already by following [the instructions on Microsoft's Website](https://docs.microsoft.com/en-us/windows/wsl/install-win10).
|
||||
You can run ODM natively on Intel/ARM MacOS.
|
||||
|
||||
You can run ODM via WSL or WSL2 by downloading the `rootfs.tar.gz` file from [the releases page on GitHub](https://github.com/OpenDroneMap/ODM/releases). Once you have the file saved to your `Downloads` folder in Windows, open a PowerShell or CMD window by right-clicking the Flag Menu (bottom left by default) and selecting "Windows PowerShell", or alternatively by using the [Windows Terminal from the Windows Store](https://www.microsoft.com/store/productId/9N0DX20HK701).
|
||||
First install:
|
||||
|
||||
Inside a PowerShell window, or Windows Terminal running PowerShell, type the following:
|
||||
* Xcode 13 (not 14, there's currently a bug)
|
||||
* [Homebrew](https://docs.brew.sh/Installation)
|
||||
|
||||
```powershell
|
||||
# PowerShell
|
||||
wsl.exe --import ODM $env:APPDATA\ODM C:\path\to\your\Downloads\rootfs.tar.gz
|
||||
```
|
||||
|
||||
Alternatively if you're using `CMD.exe` or the `CMD` support in Windows Terminal type:
|
||||
|
||||
```cmd
|
||||
# CMD
|
||||
wsl.exe --import ODM %APPDATA%\ODM C:\path\to\your\Downloads\rootfs.tar.gz
|
||||
```
|
||||
|
||||
In either case, make sure you replace `C:\path\to\your\Downloads\rootfs.tar.gz` with the actual path to your `rootfs.tar.gz` file.
|
||||
|
||||
This will save a new Hard Disk image to your Windows `AppData` folder at `C:\Users\username\AppData\roaming\ODM` (where `username` is your Username in Windows), and will set-up a new WSL "distro" called `ODM`.
|
||||
|
||||
You may start the ODM distro by using the relevant option in the Windows Terminal (from the Windows Store) or by executing `wsl.exe -d ODM` in a PowerShell or CMD window.
|
||||
|
||||
ODM is installed to the distro's `/code` directory. You may execute it with:
|
||||
Then Run:
|
||||
|
||||
```bash
|
||||
/code/run.sh
|
||||
git clone https://github.com/OpenDroneMap/ODM
|
||||
cd ODM
|
||||
bash configure_macos.sh install
|
||||
```
|
||||
|
||||
### Updating ODM in WSL
|
||||
You can then process datasets with `./run.sh /datasets/odm_data_aukerman`
|
||||
|
||||
The easiest way to update the installation of ODM is to download the new `rootfs.tar.gz` file and import it as another distro. You may then unregister the original instance the same way you delete ODM from WSL (see next heading).
|
||||
|
||||
### Deleting an ODM in WSL instance
|
||||
|
||||
```cmd
|
||||
wsl.exe --unregister ODM
|
||||
```
|
||||
|
||||
Finally you'll want to delete the files by using your Windows File Manager (Explorer) to navigate to `%APPDATA%`, find the `ODM` directory, and delete it by dragging it to the recycle bin. To permanently delete it empty the recycle bin.
|
||||
|
||||
If you have installed to a different directory by changing the `--import` command you ran to install you must use that directory name to delete the correct files. This is likely the case if you have multiple ODM installations or are updating an already-installed installation.
|
||||
|
||||
## Native Install (Ubuntu 20.04)
|
||||
|
||||
You can run ODM natively on Ubuntu 20.04 LTS (although we don't recommend it):
|
||||
|
||||
1. Download the source from [here](https://github.com/OpenDroneMap/ODM/archive/master.zip)
|
||||
2. Run `bash configure.sh install`
|
||||
3. Download a sample dataset from [here](https://github.com/OpenDroneMap/odm_data_aukerman/archive/master.zip) (about 550MB) and extract it in `/datasets/aukerman`
|
||||
4. Run `./run.sh --project-path /datasets odm_data_aukerman`
|
||||
This could be improved in the future. [Helps us create a Homebrew formula](https://github.com/OpenDroneMap/ODM/issues/1531).
|
||||
|
||||
### Updating a native installation
|
||||
|
||||
When updating to a newer version of ODM, it is recommended that you run
|
||||
When updating to a newer version of native ODM, it is recommended that you run:
|
||||
|
||||
bash configure.sh reinstall
|
||||
`bash configure.sh reinstall`
|
||||
|
||||
to ensure all the dependent packages and modules get updated.
|
||||
|
||||
### Build From Source
|
||||
### Build Docker Images From Source
|
||||
|
||||
If you want to rebuild your own docker image (if you have changed the source code, for example), from the ODM folder you can type:
|
||||
|
||||
|
@ -187,10 +189,16 @@ Experimental flags need to be enabled in Docker to use the ```--squash``` flag.
|
|||
|
||||
After this, you must restart docker.
|
||||
|
||||
## Video Support
|
||||
|
||||
Starting from version 3.0.4, ODM can automatically extract images from video files (.mp4, .mov, .lrv, .ts). Just place one or more video files into the `images` folder and run the program as usual. Subtitles files (.srt) with GPS information are also supported. Place .srt files in the `images` folder, making sure that the filenames match. For example, `my_video.mp4` ==> `my_video.srt` (case-sensitive).
|
||||
|
||||
## Developers
|
||||
|
||||
Help improve our software! We welcome contributions from everyone, whether to add new features, improve speed, fix existing bugs or add support for more cameras. Check our [code of conduct](https://github.com/OpenDroneMap/documents/blob/master/CONDUCT.md), the [contributing guidelines](https://github.com/OpenDroneMap/documents/blob/master/CONTRIBUTING.md) and [how decisions are made](https://github.com/OpenDroneMap/documents/blob/master/GOVERNANCE.md#how-decisions-are-made).
|
||||
|
||||
|
||||
### Installation and first run
|
||||
For Linux users, the easiest way to modify the software is to make sure docker is installed, clone the repository and then run from a shell:
|
||||
|
||||
```bash
|
||||
|
@ -209,6 +217,18 @@ You can now make changes to the ODM source. When you are ready to test the chang
|
|||
```bash
|
||||
(odmdev) [user:/code] master+* ± ./run.sh --project-path /datasets mydataset
|
||||
```
|
||||
### Stop dev container
|
||||
```bash
|
||||
docker stop odmdev
|
||||
```
|
||||
### To come back to dev environement
|
||||
change your_username to your username
|
||||
```bash
|
||||
docker start odmdev
|
||||
docker exec -ti odmdev bash
|
||||
su your_username
|
||||
```
|
||||
|
||||
|
||||
If you have questions, join the developer's chat at https://community.opendronemap.org/c/developers-chat/21
|
||||
|
||||
|
@ -216,6 +236,30 @@ If you have questions, join the developer's chat at https://community.opendronem
|
|||
2. Submit a pull request with detailed changes and test results
|
||||
3. Have fun!
|
||||
|
||||
### Citation
|
||||
### Troubleshooting
|
||||
The dev environment makes use of `opendronemap/nodeodm` by default. You may want to run
|
||||
`docker pull opendronemap/nodeodm` before running `./start-dev-env.sh` to avoid using an old cached version.
|
||||
|
||||
In order to make a clean build, remove `~/.odm-dev-home` and `ODM/.setupdevenv`.
|
||||
|
||||
## Credits
|
||||
|
||||
ODM makes use of [several libraries](https://github.com/OpenDroneMap/ODM/blob/master/snap/snapcraft.yaml#L36) and other awesome open source projects to perform its tasks. Among them we'd like to highlight:
|
||||
|
||||
- [OpenSfM](https://github.com/mapillary/OpenSfM)
|
||||
- [OpenMVS](https://github.com/cdcseacave/openMVS/)
|
||||
- [PDAL](https://github.com/PDAL/PDAL)
|
||||
- [Entwine](https://entwine.io/)
|
||||
- [MVS Texturing](https://github.com/nmoehrle/mvs-texturing)
|
||||
- [GRASS GIS](https://grass.osgeo.org/)
|
||||
- [GDAL](https://gdal.org/)
|
||||
- [PoissonRecon](https://github.com/mkazhdan/PoissonRecon)
|
||||
|
||||
|
||||
## Citation
|
||||
|
||||
> *OpenDroneMap Authors* ODM - A command line toolkit to generate maps, point clouds, 3D models and DEMs from drone, balloon or kite images. **OpenDroneMap/ODM GitHub Page** 2020; [https://github.com/OpenDroneMap/ODM](https://github.com/OpenDroneMap/ODM)
|
||||
|
||||
## Trademark
|
||||
|
||||
See [Trademark Guidelines](https://github.com/OpenDroneMap/documents/blob/master/TRADEMARK.md)
|
||||
|
|
|
@ -2,22 +2,51 @@ cmake_minimum_required(VERSION 3.1)
|
|||
|
||||
project(ODM-SuperBuild)
|
||||
|
||||
# Setup SuperBuild root location
|
||||
set(SB_ROOT_DIR ${CMAKE_CURRENT_SOURCE_DIR})
|
||||
|
||||
if (NOT CMAKE_BUILD_TYPE)
|
||||
message(STATUS "No build type selected, default to Release")
|
||||
set(CMAKE_BUILD_TYPE "Release")
|
||||
endif()
|
||||
|
||||
# Setup SuperBuild root location
|
||||
set(SB_ROOT_DIR ${CMAKE_CURRENT_SOURCE_DIR})
|
||||
if (APPLE)
|
||||
find_program(HOMEBREW_EXECUTABLE brew)
|
||||
mark_as_advanced(FORCE HOMEBREW_EXECUTABLE)
|
||||
if (HOMEBREW_EXECUTABLE)
|
||||
# Detected a Homebrew install, query for its install prefix.
|
||||
execute_process(COMMAND ${HOMEBREW_EXECUTABLE} --prefix
|
||||
OUTPUT_VARIABLE HOMEBREW_INSTALL_PREFIX
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE)
|
||||
message(STATUS "Detected Homebrew with install prefix: "
|
||||
"${HOMEBREW_INSTALL_PREFIX}, adding to CMake search paths.")
|
||||
list(APPEND CMAKE_PREFIX_PATH "${HOMEBREW_INSTALL_PREFIX}")
|
||||
endif()
|
||||
|
||||
# Path to additional CMake modules
|
||||
set(CMAKE_MODULE_PATH ${SB_ROOT_DIR}/cmake)
|
||||
# Use homebrew's clang compiler since Apple
|
||||
# does not allow us to link to libomp
|
||||
set(CXX_PATH ${HOMEBREW_INSTALL_PREFIX}/bin/c++-12)
|
||||
set(APPLE_CMAKE_ARGS "")
|
||||
|
||||
message("Checking for ${CXX_PATH}...")
|
||||
if(EXISTS "${CXX_PATH}")
|
||||
message("Found Homebrew's C++ compiler: ${CXX_PATH}")
|
||||
set(CMAKE_CXX_COMPILER ${CXX_PATH})
|
||||
list(APPEND APPLE_CMAKE_ARGS "-DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER}")
|
||||
endif()
|
||||
|
||||
include(ExternalProject)
|
||||
include(ExternalProject-Setup)
|
||||
|
||||
option(ODM_BUILD_SLAM "Build SLAM module" OFF)
|
||||
set(C_PATH ${HOMEBREW_INSTALL_PREFIX}/bin/gcc-12)
|
||||
message("Checking for ${C_PATH}...")
|
||||
if(EXISTS "${C_PATH}")
|
||||
message("Found Homebrew's C compiler: ${C_PATH}")
|
||||
set(CMAKE_C_COMPILER ${C_PATH})
|
||||
list(APPEND APPLE_CMAKE_ARGS "-DCMAKE_C_COMPILER=${CMAKE_C_COMPILER}")
|
||||
endif()
|
||||
|
||||
if (NOT APPLE_CMAKE_ARGS)
|
||||
message(WARNING "Cannot find Homebrew's compiler, compilation might fail...")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
################################
|
||||
# Setup SuperBuild directories #
|
||||
|
@ -35,6 +64,7 @@ message(STATUS "SuperBuild files will be downloaded to: ${SB_DOWNLOAD_DIR}")
|
|||
set(SB_SOURCE_DIR "${SB_ROOT_DIR}/src"
|
||||
CACHE PATH "Location where source tar-balls are (will be).")
|
||||
mark_as_advanced(SB_SOURCE_DIR)
|
||||
set(SB_BUILD_DIR "${SB_ROOT_DIR}/build")
|
||||
|
||||
message(STATUS "SuperBuild source files will be extracted to: ${SB_SOURCE_DIR}")
|
||||
|
||||
|
@ -54,6 +84,47 @@ mark_as_advanced(SB_BINARY_DIR)
|
|||
|
||||
message(STATUS "SuperBuild binary files will be located to: ${SB_BINARY_DIR}")
|
||||
|
||||
if (WIN32)
|
||||
if (NOT DEFINED CMAKE_TOOLCHAIN_FILE)
|
||||
message(FATAL_ERROR "CMAKE_TOOLCHAIN_FILE not set. You need to set it to the path of vcpkg.cmake")
|
||||
endif()
|
||||
get_filename_component(CMAKE_TOOLCHAIN_DIR ${CMAKE_TOOLCHAIN_FILE} DIRECTORY)
|
||||
get_filename_component(VCPKG_ROOT "${CMAKE_TOOLCHAIN_DIR}/../../" ABSOLUTE)
|
||||
set(WIN32_CMAKE_ARGS "-DCMAKE_TOOLCHAIN_FILE=${CMAKE_TOOLCHAIN_FILE}")
|
||||
set(PYTHON_HOME "${SB_ROOT_DIR}/../venv")
|
||||
set(PYTHON_EXE_PATH "${PYTHON_HOME}/Scripts/python")
|
||||
|
||||
# Use the GDAL version that comes with pip
|
||||
set(GDAL_ROOT "${PYTHON_HOME}/Lib/site-packages/osgeo")
|
||||
set(GDAL_LIBRARY "${GDAL_ROOT}/lib/gdal_i.lib")
|
||||
set(GDAL_INCLUDE_DIR "${GDAL_ROOT}/include/gdal")
|
||||
|
||||
# Also download missing headers :/
|
||||
if (NOT EXISTS "${GDAL_INCLUDE_DIR}/ogrsf_frmts.h")
|
||||
file(DOWNLOAD "https://raw.githubusercontent.com/OSGeo/gdal/release/3.2/gdal/ogr/ogrsf_frmts/ogrsf_frmts.h" "${GDAL_INCLUDE_DIR}/ogrsf_frmts.h")
|
||||
endif()
|
||||
|
||||
message("Copying VCPKG DLLs...")
|
||||
file(GLOB COPY_DLLS "${VCPKG_ROOT}/installed/x64-windows/bin/*.dll")
|
||||
file(COPY ${COPY_DLLS} DESTINATION "${SB_INSTALL_DIR}/bin")
|
||||
|
||||
message("Copying CUDA DLLs...")
|
||||
file(GLOB CUDA_DLLS "$ENV{CUDA_PATH}/bin/cudart64*.dll")
|
||||
file(COPY ${CUDA_DLLS} DESTINATION "${SB_INSTALL_DIR}/bin")
|
||||
|
||||
set(WIN32_GDAL_ARGS -DGDAL_FOUND=TRUE -DGDAL_LIBRARY=${GDAL_LIBRARY} -DGDAL_INCLUDE_DIR=${GDAL_INCLUDE_DIR})
|
||||
elseif(APPLE)
|
||||
set(PYTHON_HOME "${SB_ROOT_DIR}/../venv")
|
||||
set(PYTHON_EXE_PATH "${PYTHON_HOME}/bin/python")
|
||||
else()
|
||||
set(PYTHON_EXE_PATH "/usr/bin/python3")
|
||||
endif()
|
||||
|
||||
# Path to additional CMake modules
|
||||
set(CMAKE_MODULE_PATH ${SB_ROOT_DIR}/cmake)
|
||||
|
||||
include(ExternalProject)
|
||||
include(ExternalProject-Setup)
|
||||
|
||||
#########################################
|
||||
# Download and install third party libs #
|
||||
|
@ -68,19 +139,10 @@ option(ODM_BUILD_OpenCV "Force to build OpenCV library" OFF)
|
|||
SETUP_EXTERNAL_PROJECT(OpenCV ${ODM_OpenCV_Version} ${ODM_BUILD_OpenCV})
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------------------------
|
||||
# Point Cloud Library (PCL)
|
||||
#
|
||||
set(ODM_PCL_Version 1.8.0)
|
||||
option(ODM_BUILD_PCL "Force to build PCL library" OFF)
|
||||
|
||||
SETUP_EXTERNAL_PROJECT(PCL ${ODM_PCL_Version} ${ODM_BUILD_PCL})
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------------------------
|
||||
# Google Flags library (GFlags)
|
||||
#
|
||||
set(ODM_GFlags_Version 2.1.2)
|
||||
set(ODM_GFlags_Version 2.2.2)
|
||||
option(ODM_BUILD_GFlags "Force to build GFlags library" OFF)
|
||||
|
||||
SETUP_EXTERNAL_PROJECT(GFlags ${ODM_GFlags_Version} ${ODM_BUILD_GFlags})
|
||||
|
@ -89,7 +151,7 @@ SETUP_EXTERNAL_PROJECT(GFlags ${ODM_GFlags_Version} ${ODM_BUILD_GFlags})
|
|||
# ---------------------------------------------------------------------------------------------
|
||||
# Ceres Solver
|
||||
#
|
||||
set(ODM_Ceres_Version 1.10.0)
|
||||
set(ODM_Ceres_Version 2.0.0)
|
||||
option(ODM_BUILD_Ceres "Force to build Ceres library" OFF)
|
||||
|
||||
SETUP_EXTERNAL_PROJECT(Ceres ${ODM_Ceres_Version} ${ODM_BUILD_Ceres})
|
||||
|
@ -106,11 +168,28 @@ SETUP_EXTERNAL_PROJECT(Hexer 1.4 ON)
|
|||
|
||||
set(custom_libs OpenSfM
|
||||
LASzip
|
||||
Zstd
|
||||
PDAL
|
||||
Untwine
|
||||
PDALPython
|
||||
Untwine
|
||||
Entwine
|
||||
MvsTexturing
|
||||
OpenMVS
|
||||
OpenMVS
|
||||
FPCFilter
|
||||
PyPopsift
|
||||
Obj2Tiles
|
||||
OpenPointClass
|
||||
ExifTool
|
||||
RenderDEM
|
||||
)
|
||||
|
||||
externalproject_add(mve
|
||||
GIT_REPOSITORY https://github.com/OpenDroneMap/mve.git
|
||||
GIT_TAG 290
|
||||
UPDATE_COMMAND ""
|
||||
SOURCE_DIR ${SB_SOURCE_DIR}/mve
|
||||
CMAKE_ARGS ${WIN32_CMAKE_ARGS} ${APPLE_CMAKE_ARGS}
|
||||
BUILD_IN_SOURCE 1
|
||||
INSTALL_COMMAND ""
|
||||
)
|
||||
|
||||
foreach(lib ${custom_libs})
|
||||
|
@ -120,44 +199,82 @@ endforeach()
|
|||
include(ProcessorCount)
|
||||
ProcessorCount(nproc)
|
||||
|
||||
if (WIN32)
|
||||
set (POISSON_BUILD_CMD ${CMAKE_MAKE_PROGRAM} ${SB_SOURCE_DIR}/PoissonRecon/PoissonRecon.vcxproj /p:configuration=${CMAKE_BUILD_TYPE} /p:Platform=x64 /p:PlatformToolset=${CMAKE_VS_PLATFORM_TOOLSET} /p:WindowsTargetPlatformVersion=${CMAKE_VS_WINDOWS_TARGET_PLATFORM_VERSION})
|
||||
set (POISSON_BIN_PATH "x64/${CMAKE_BUILD_TYPE}/PoissonRecon.exe")
|
||||
else()
|
||||
set (POISSON_BUILD_CMD make -j${nproc} poissonrecon)
|
||||
set (POISSON_BIN_PATH "Linux/PoissonRecon")
|
||||
if (APPLE)
|
||||
set(POISSON_BUILD_CMD COMPILER=${CMAKE_CXX_COMPILER} make -j${nproc} -f Makefile.macos poissonrecon)
|
||||
endif()
|
||||
endif()
|
||||
externalproject_add(poissonrecon
|
||||
GIT_REPOSITORY https://github.com/mkazhdan/PoissonRecon.git
|
||||
GIT_TAG ce5005ae3094d902d551a65a8b3131e06f45e7cf
|
||||
GIT_REPOSITORY https://github.com/OpenDroneMap/PoissonRecon.git
|
||||
GIT_TAG 290
|
||||
PREFIX ${SB_BINARY_DIR}/PoissonRecon
|
||||
SOURCE_DIR ${SB_SOURCE_DIR}/PoissonRecon
|
||||
UPDATE_COMMAND ""
|
||||
CONFIGURE_COMMAND ""
|
||||
BUILD_IN_SOURCE 1
|
||||
BUILD_COMMAND make -j${nproc} poissonrecon
|
||||
INSTALL_COMMAND ""
|
||||
BUILD_COMMAND ${POISSON_BUILD_CMD}
|
||||
INSTALL_COMMAND ${CMAKE_COMMAND} -E copy ${SB_SOURCE_DIR}/PoissonRecon/Bin/${POISSON_BIN_PATH} ${SB_INSTALL_DIR}/bin
|
||||
)
|
||||
|
||||
externalproject_add(dem2mesh
|
||||
GIT_REPOSITORY https://github.com/OpenDroneMap/dem2mesh.git
|
||||
GIT_TAG master
|
||||
GIT_TAG 334
|
||||
PREFIX ${SB_BINARY_DIR}/dem2mesh
|
||||
SOURCE_DIR ${SB_SOURCE_DIR}/dem2mesh
|
||||
UPDATE_COMMAND ""
|
||||
BUILD_IN_SOURCE 1
|
||||
INSTALL_COMMAND ""
|
||||
CMAKE_ARGS -DCMAKE_INSTALL_PREFIX:PATH=${SB_INSTALL_DIR}
|
||||
${WIN32_GDAL_ARGS}
|
||||
${APPLE_CMAKE_ARGS}
|
||||
)
|
||||
|
||||
externalproject_add(dem2points
|
||||
GIT_REPOSITORY https://github.com/OpenDroneMap/dem2points.git
|
||||
GIT_TAG master
|
||||
PREFIX ${SB_BINARY_DIR}/dem2points
|
||||
SOURCE_DIR ${SB_SOURCE_DIR}/dem2points
|
||||
UPDATE_COMMAND ""
|
||||
BUILD_IN_SOURCE 1
|
||||
INSTALL_COMMAND ""
|
||||
CMAKE_ARGS -DCMAKE_INSTALL_PREFIX:PATH=${SB_INSTALL_DIR}
|
||||
${WIN32_GDAL_ARGS}
|
||||
${APPLE_CMAKE_ARGS}
|
||||
)
|
||||
|
||||
externalproject_add(odm_orthophoto
|
||||
DEPENDS opencv
|
||||
GIT_REPOSITORY https://github.com/OpenDroneMap/odm_orthophoto.git
|
||||
GIT_TAG 317
|
||||
PREFIX ${SB_BINARY_DIR}/odm_orthophoto
|
||||
SOURCE_DIR ${SB_SOURCE_DIR}/odm_orthophoto
|
||||
CMAKE_ARGS -DCMAKE_INSTALL_PREFIX:PATH=${SB_INSTALL_DIR}
|
||||
${WIN32_CMAKE_ARGS} ${WIN32_GDAL_ARGS}
|
||||
)
|
||||
|
||||
externalproject_add(fastrasterfilter
|
||||
GIT_REPOSITORY https://github.com/OpenDroneMap/FastRasterFilter.git
|
||||
GIT_TAG main
|
||||
PREFIX ${SB_BINARY_DIR}/fastrasterfilter
|
||||
SOURCE_DIR ${SB_SOURCE_DIR}/fastrasterfilter
|
||||
CMAKE_ARGS -DCMAKE_INSTALL_PREFIX:PATH=${SB_INSTALL_DIR}
|
||||
${WIN32_CMAKE_ARGS} ${WIN32_GDAL_ARGS}
|
||||
)
|
||||
|
||||
externalproject_add(lastools
|
||||
GIT_REPOSITORY https://github.com/LAStools/LAStools.git
|
||||
GIT_TAG 2ef44281645999ec7217facec84a5913bbbbe165
|
||||
GIT_REPOSITORY https://github.com/OpenDroneMap/LAStools.git
|
||||
GIT_TAG 250
|
||||
PREFIX ${SB_BINARY_DIR}/lastools
|
||||
SOURCE_DIR ${SB_SOURCE_DIR}/lastools
|
||||
CONFIGURE_COMMAND ""
|
||||
CMAKE_COMMAND ""
|
||||
CMAKE_GENERATOR ""
|
||||
UPDATE_COMMAND ""
|
||||
BUILD_IN_SOURCE 1
|
||||
BUILD_COMMAND make -C LASlib -j${nproc} CXXFLAGS='-std=c++11' && make -C src -j${nproc} CXXFLAGS='-std=c++11' lasmerge
|
||||
INSTALL_COMMAND install -m755 -D -t ${SB_INSTALL_DIR}/bin ${SB_SOURCE_DIR}/lastools/bin/lasmerge
|
||||
CMAKE_ARGS -DCMAKE_INSTALL_PREFIX:PATH=${SB_INSTALL_DIR}
|
||||
)
|
||||
|
||||
externalproject_add(draco
|
||||
GIT_REPOSITORY https://github.com/OpenDroneMap/draco
|
||||
GIT_SHALLOW ON
|
||||
GIT_TAG 304
|
||||
PREFIX ${SB_BINARY_DIR}/draco
|
||||
SOURCE_DIR ${SB_SOURCE_DIR}/draco
|
||||
CMAKE_ARGS -DDRACO_TRANSCODER_SUPPORTED=ON
|
||||
-DCMAKE_INSTALL_PREFIX:PATH=${SB_INSTALL_DIR}
|
||||
${WIN32_CMAKE_ARGS}
|
||||
)
|
||||
|
|
|
@ -8,7 +8,7 @@ ExternalProject_Add(${_proj_name}
|
|||
STAMP_DIR ${_SB_BINARY_DIR}/stamp
|
||||
#--Download step--------------
|
||||
DOWNLOAD_DIR ${SB_DOWNLOAD_DIR}
|
||||
URL http://ceres-solver.org/ceres-solver-1.14.0.tar.gz
|
||||
URL http://ceres-solver.org/ceres-solver-2.0.0.tar.gz
|
||||
#--Update/Patch step----------
|
||||
UPDATE_COMMAND ""
|
||||
#--Configure step-------------
|
||||
|
@ -18,7 +18,10 @@ ExternalProject_Add(${_proj_name}
|
|||
-DCMAKE_CXX_FLAGS=-fPIC
|
||||
-DBUILD_EXAMPLES=OFF
|
||||
-DBUILD_TESTING=OFF
|
||||
-DMINIGLOG=ON
|
||||
-DMINIGLOG_MAX_LOG_LEVEL=-100
|
||||
-DCMAKE_INSTALL_PREFIX:PATH=${SB_INSTALL_DIR}
|
||||
${WIN32_CMAKE_ARGS}
|
||||
#--Build step-----------------
|
||||
BINARY_DIR ${_SB_BINARY_DIR}
|
||||
#--Install step---------------
|
||||
|
|
|
@ -0,0 +1,36 @@
|
|||
set(_proj_name entwine)
|
||||
set(_SB_BINARY_DIR "${SB_BINARY_DIR}/${_proj_name}")
|
||||
|
||||
if (NOT WIN32)
|
||||
set(EXTRA_CMAKE_ARGS -DCMAKE_CXX_FLAGS=-isystem\ ${SB_SOURCE_DIR}/pdal)
|
||||
endif()
|
||||
|
||||
ExternalProject_Add(${_proj_name}
|
||||
DEPENDS pdal
|
||||
PREFIX ${_SB_BINARY_DIR}
|
||||
TMP_DIR ${_SB_BINARY_DIR}/tmp
|
||||
STAMP_DIR ${_SB_BINARY_DIR}/stamp
|
||||
#--Download step--------------
|
||||
DOWNLOAD_DIR ${SB_DOWNLOAD_DIR}
|
||||
GIT_REPOSITORY https://github.com/OpenDroneMap/entwine/
|
||||
GIT_TAG 290
|
||||
#--Update/Patch step----------
|
||||
UPDATE_COMMAND ""
|
||||
#--Configure step-------------
|
||||
SOURCE_DIR ${SB_SOURCE_DIR}/${_proj_name}
|
||||
CMAKE_ARGS
|
||||
${EXTRA_CMAKE_ARGS}
|
||||
-DADDITIONAL_LINK_DIRECTORIES_PATHS=${SB_INSTALL_DIR}/lib
|
||||
-DWITH_TESTS=OFF
|
||||
-DWITH_ZSTD=OFF
|
||||
-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}
|
||||
-DCMAKE_INSTALL_PREFIX:PATH=${SB_INSTALL_DIR}
|
||||
#--Build step-----------------
|
||||
BINARY_DIR ${_SB_BINARY_DIR}
|
||||
#--Install step---------------
|
||||
INSTALL_DIR ${SB_INSTALL_DIR}
|
||||
#--Output logging-------------
|
||||
LOG_DOWNLOAD OFF
|
||||
LOG_CONFIGURE OFF
|
||||
LOG_BUILD OFF
|
||||
)
|
|
@ -0,0 +1,38 @@
|
|||
set(_proj_name exiftool)
|
||||
set(_SB_BINARY_DIR "${SB_BINARY_DIR}/${_proj_name}")
|
||||
|
||||
if (WIN32)
|
||||
ExternalProject_Add(${_proj_name}
|
||||
PREFIX ${_SB_BINARY_DIR}
|
||||
TMP_DIR ${_SB_BINARY_DIR}/tmp
|
||||
STAMP_DIR ${_SB_BINARY_DIR}/stamp
|
||||
#--Download step--------------
|
||||
DOWNLOAD_DIR ${SB_DOWNLOAD_DIR}
|
||||
URL https://github.com/OpenDroneMap/windows-deps/releases/download/2.5.0/exiftool.zip
|
||||
SOURCE_DIR ${SB_SOURCE_DIR}/${_proj_name}
|
||||
UPDATE_COMMAND ""
|
||||
CONFIGURE_COMMAND ""
|
||||
BUILD_IN_SOURCE 1
|
||||
BUILD_COMMAND ""
|
||||
INSTALL_COMMAND ${CMAKE_COMMAND} -E copy ${SB_SOURCE_DIR}/${_proj_name}/exiftool.exe ${SB_INSTALL_DIR}/bin
|
||||
#--Output logging-------------
|
||||
LOG_DOWNLOAD OFF
|
||||
LOG_CONFIGURE OFF
|
||||
LOG_BUILD OFF
|
||||
)
|
||||
else()
|
||||
externalproject_add(${_proj_name}
|
||||
PREFIX ${_SB_BINARY_DIR}
|
||||
TMP_DIR ${_SB_BINARY_DIR}/tmp
|
||||
STAMP_DIR ${_SB_BINARY_DIR}/stamp
|
||||
SOURCE_DIR ${SB_SOURCE_DIR}/${_proj_name}
|
||||
#--Download step--------------
|
||||
DOWNLOAD_DIR ${SB_DOWNLOAD_DIR}
|
||||
URL https://github.com/exiftool/exiftool/archive/refs/tags/12.62.zip
|
||||
UPDATE_COMMAND ""
|
||||
CONFIGURE_COMMAND ""
|
||||
BUILD_IN_SOURCE 1
|
||||
BUILD_COMMAND perl Makefile.PL PREFIX=${SB_INSTALL_DIR} LIB=${SB_INSTALL_DIR}/bin/lib
|
||||
INSTALL_COMMAND make install && rm -fr ${SB_INSTALL_DIR}/man
|
||||
)
|
||||
endif()
|
|
@ -1,4 +1,4 @@
|
|||
set(_proj_name pangolin)
|
||||
set(_proj_name fpcfilter)
|
||||
set(_SB_BINARY_DIR "${SB_BINARY_DIR}/${_proj_name}")
|
||||
|
||||
ExternalProject_Add(${_proj_name}
|
||||
|
@ -7,16 +7,15 @@ ExternalProject_Add(${_proj_name}
|
|||
STAMP_DIR ${_SB_BINARY_DIR}/stamp
|
||||
#--Download step--------------
|
||||
DOWNLOAD_DIR ${SB_DOWNLOAD_DIR}
|
||||
URL https://github.com/paulinus/Pangolin/archive/b7c66570b336e012bf3124e2a7411d417a1d35f7.zip
|
||||
URL_MD5 9b7938d1045d26b27a637b663e647aef
|
||||
GIT_REPOSITORY https://github.com/OpenDroneMap/FPCFilter
|
||||
GIT_TAG 331
|
||||
#--Update/Patch step----------
|
||||
UPDATE_COMMAND ""
|
||||
#--Configure step-------------
|
||||
SOURCE_DIR ${SB_SOURCE_DIR}/${_proj_name}
|
||||
CMAKE_ARGS
|
||||
-DCPP11_NO_BOOST=1
|
||||
-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}
|
||||
-DCMAKE_INSTALL_PREFIX:PATH=${SB_INSTALL_DIR}
|
||||
|
||||
#--Build step-----------------
|
||||
BINARY_DIR ${_SB_BINARY_DIR}
|
||||
#--Install step---------------
|
||||
|
@ -26,4 +25,3 @@ ExternalProject_Add(${_proj_name}
|
|||
LOG_CONFIGURE OFF
|
||||
LOG_BUILD OFF
|
||||
)
|
||||
|
|
@ -15,7 +15,7 @@ ExternalProject_Add(${_proj_name}
|
|||
SOURCE_DIR ${SB_SOURCE_DIR}/${_proj_name}
|
||||
CMAKE_ARGS
|
||||
-DCMAKE_POSITION_INDEPENDENT_CODE=ON
|
||||
-DCMAKE_BUILD_TYPE:STRING=Release
|
||||
-DCMAKE_BUILD_TYPE:STRING=${CMAKE_BUILD_TYPE}
|
||||
-DCMAKE_INSTALL_PREFIX:PATH=${SB_INSTALL_DIR}
|
||||
#--Build step-----------------
|
||||
BINARY_DIR ${_SB_BINARY_DIR}
|
||||
|
|
|
@ -15,6 +15,7 @@ ExternalProject_Add(${_proj_name}
|
|||
SOURCE_DIR ${SB_SOURCE_DIR}/${_proj_name}
|
||||
CMAKE_ARGS
|
||||
-DCMAKE_INSTALL_PREFIX:PATH=${SB_INSTALL_DIR}
|
||||
${WIN32_GDAL_ARGS}
|
||||
#--Build step-----------------
|
||||
BINARY_DIR ${_SB_BINARY_DIR}
|
||||
#--Install step---------------
|
||||
|
|
|
@ -2,22 +2,24 @@ set(_proj_name mvstexturing)
|
|||
set(_SB_BINARY_DIR "${SB_BINARY_DIR}/${_proj_name}")
|
||||
|
||||
ExternalProject_Add(${_proj_name}
|
||||
DEPENDS
|
||||
DEPENDS mve
|
||||
PREFIX ${_SB_BINARY_DIR}
|
||||
TMP_DIR ${_SB_BINARY_DIR}/tmp
|
||||
STAMP_DIR ${_SB_BINARY_DIR}/stamp
|
||||
#--Download step--------------
|
||||
DOWNLOAD_DIR ${SB_DOWNLOAD_DIR}/${_proj_name}
|
||||
GIT_REPOSITORY https://github.com/OpenDroneMap/mvs-texturing
|
||||
GIT_TAG 221
|
||||
GIT_TAG 290
|
||||
#--Update/Patch step----------
|
||||
UPDATE_COMMAND ""
|
||||
#--Configure step-------------
|
||||
SOURCE_DIR ${SB_SOURCE_DIR}/${_proj_name}
|
||||
CMAKE_ARGS
|
||||
-DRESEARCH=OFF
|
||||
-DCMAKE_BUILD_TYPE:STRING=Release
|
||||
-DCMAKE_BUILD_TYPE:STRING=${CMAKE_BUILD_TYPE}
|
||||
-DCMAKE_INSTALL_PREFIX:PATH=${SB_INSTALL_DIR}
|
||||
${WIN32_CMAKE_ARGS}
|
||||
${APPLE_CMAKE_ARGS}
|
||||
#--Build step-----------------
|
||||
BINARY_DIR ${_SB_BINARY_DIR}
|
||||
#--Install step---------------
|
||||
|
|
|
@ -1,78 +0,0 @@
|
|||
set(_proj_name orb_slam2)
|
||||
set(_SB_BINARY_DIR "${SB_BINARY_DIR}/${_proj_name}")
|
||||
|
||||
ExternalProject_Add(${_proj_name}
|
||||
DEPENDS opencv pangolin
|
||||
PREFIX ${_SB_BINARY_DIR}
|
||||
TMP_DIR ${_SB_BINARY_DIR}/tmp
|
||||
STAMP_DIR ${_SB_BINARY_DIR}/stamp
|
||||
#--Download step--------------
|
||||
DOWNLOAD_DIR ${SB_DOWNLOAD_DIR}
|
||||
URL https://github.com/paulinus/ORB_SLAM2/archive/7c11f186a53a75560cd17352d327b0bc127a82de.zip
|
||||
#--Update/Patch step----------
|
||||
UPDATE_COMMAND ""
|
||||
#--Configure step-------------
|
||||
SOURCE_DIR ${SB_SOURCE_DIR}/${_proj_name}
|
||||
CMAKE_ARGS
|
||||
-DCMAKE_INSTALL_PREFIX:PATH=${SB_INSTALL_DIR}
|
||||
#--Build step-----------------
|
||||
BINARY_DIR ${_SB_BINARY_DIR}
|
||||
#--Install step---------------
|
||||
INSTALL_COMMAND ""
|
||||
#--Output logging-------------
|
||||
LOG_DOWNLOAD OFF
|
||||
LOG_CONFIGURE OFF
|
||||
LOG_BUILD OFF
|
||||
)
|
||||
|
||||
# DBoW2
|
||||
set(DBoW2_BINARY_DIR "${SB_BINARY_DIR}/DBoW2")
|
||||
file(MAKE_DIRECTORY "${DBoW2_BINARY_DIR}")
|
||||
|
||||
ExternalProject_Add_Step(${_proj_name} build_DBoW2
|
||||
COMMAND make -j2
|
||||
DEPENDEES configure_DBoW2
|
||||
DEPENDERS configure
|
||||
WORKING_DIRECTORY ${DBoW2_BINARY_DIR}
|
||||
ALWAYS 1
|
||||
)
|
||||
|
||||
ExternalProject_Add_Step(${_proj_name} configure_DBoW2
|
||||
COMMAND ${CMAKE_COMMAND} <SOURCE_DIR>/Thirdparty/DBoW2
|
||||
-DOpenCV_DIR=${SB_INSTALL_DIR}/share/OpenCV
|
||||
-DCMAKE_BUILD_TYPE=Release
|
||||
DEPENDEES download
|
||||
DEPENDERS build_DBoW2
|
||||
WORKING_DIRECTORY ${DBoW2_BINARY_DIR}
|
||||
ALWAYS 1
|
||||
)
|
||||
|
||||
# g2o
|
||||
set(g2o_BINARY_DIR "${SB_BINARY_DIR}/g2o")
|
||||
file(MAKE_DIRECTORY "${g2o_BINARY_DIR}")
|
||||
|
||||
ExternalProject_Add_Step(${_proj_name} build_g2o
|
||||
COMMAND make -j2
|
||||
DEPENDEES configure_g2o
|
||||
DEPENDERS configure
|
||||
WORKING_DIRECTORY ${g2o_BINARY_DIR}
|
||||
ALWAYS 1
|
||||
)
|
||||
|
||||
ExternalProject_Add_Step(${_proj_name} configure_g2o
|
||||
COMMAND ${CMAKE_COMMAND} <SOURCE_DIR>/Thirdparty/g2o
|
||||
-DCMAKE_BUILD_TYPE=Release
|
||||
DEPENDEES download
|
||||
DEPENDERS build_g2o
|
||||
WORKING_DIRECTORY ${g2o_BINARY_DIR}
|
||||
ALWAYS 1
|
||||
)
|
||||
|
||||
# Uncompress Vocabulary
|
||||
ExternalProject_Add_Step(${_proj_name} uncompress_vocabulary
|
||||
COMMAND tar -xf ORBvoc.txt.tar.gz
|
||||
DEPENDEES download
|
||||
DEPENDERS configure
|
||||
WORKING_DIRECTORY <SOURCE_DIR>/Vocabulary
|
||||
ALWAYS 1
|
||||
)
|
|
@ -0,0 +1,35 @@
|
|||
set(_proj_name obj2tiles)
|
||||
set(_SB_BINARY_DIR "${SB_BINARY_DIR}/${_proj_name}")
|
||||
|
||||
set(OBJ2TILES_VERSION v1.0.12)
|
||||
set(OBJ2TILES_EXT "")
|
||||
|
||||
set(OBJ2TILES_ARCH "Linux64")
|
||||
if (WIN32)
|
||||
set(OBJ2TILES_ARCH "Win64")
|
||||
set(OBJ2TILES_EXT ".exe")
|
||||
elseif(${CMAKE_SYSTEM_PROCESSOR} STREQUAL "aarch64")
|
||||
set(OBJ2TILES_ARCH "LinuxArm")
|
||||
elseif(APPLE)
|
||||
set(OBJ2TILES_ARCH "Osx64")
|
||||
endif()
|
||||
|
||||
|
||||
ExternalProject_Add(${_proj_name}
|
||||
PREFIX ${_SB_BINARY_DIR}
|
||||
TMP_DIR ${_SB_BINARY_DIR}/tmp
|
||||
STAMP_DIR ${_SB_BINARY_DIR}/stamp
|
||||
#--Download step--------------
|
||||
DOWNLOAD_DIR ${SB_DOWNLOAD_DIR}
|
||||
URL https://github.com/OpenDroneMap/Obj2Tiles/releases/download/${OBJ2TILES_VERSION}/Obj2Tiles-${OBJ2TILES_ARCH}.zip
|
||||
SOURCE_DIR ${SB_SOURCE_DIR}/${_proj_name}
|
||||
UPDATE_COMMAND ""
|
||||
CONFIGURE_COMMAND ""
|
||||
BUILD_IN_SOURCE 1
|
||||
BUILD_COMMAND ""
|
||||
INSTALL_COMMAND ${CMAKE_COMMAND} -E copy ${SB_SOURCE_DIR}/${_proj_name}/Obj2Tiles${OBJ2TILES_EXT} ${SB_INSTALL_DIR}/bin
|
||||
#--Output logging-------------
|
||||
LOG_DOWNLOAD OFF
|
||||
LOG_CONFIGURE OFF
|
||||
LOG_BUILD OFF
|
||||
)
|
|
@ -1,6 +1,35 @@
|
|||
set(_proj_name opencv)
|
||||
set(_SB_BINARY_DIR "${SB_BINARY_DIR}/${_proj_name}")
|
||||
|
||||
if (WIN32)
|
||||
set(OCV_CMAKE_EXTRA_ARGS -DPYTHON3_NUMPY_INCLUDE_DIRS=${PYTHON_HOME}/lib/site-packages/numpy/core/include
|
||||
-DPYTHON3_PACKAGES_PATH=${PYTHON_HOME}/lib/site-packages
|
||||
-DPYTHON3_EXECUTABLE=${PYTHON_EXE_PATH}
|
||||
-DWITH_MSMF=OFF
|
||||
-DOPENCV_LIB_INSTALL_PATH=${SB_INSTALL_DIR}/lib
|
||||
-DOPENCV_BIN_INSTALL_PATH=${SB_INSTALL_DIR}/bin)
|
||||
elseif(APPLE)
|
||||
# macOS is unable to automatically detect our Python libs
|
||||
set(OCV_CMAKE_EXTRA_ARGS -DPYTHON3_NUMPY_INCLUDE_DIRS=${PYTHON_HOME}/lib/python3.8/site-packages/numpy/core/include
|
||||
-DPYTHON3_PACKAGES_PATH=${PYTHON_HOME}/lib/python3.8/site-packages
|
||||
-DPYTHON3_EXECUTABLE=${PYTHON_EXE_PATH}
|
||||
-DPYTHON3_LIBRARIES=${HOMEBREW_INSTALL_PREFIX}/opt/python@3.8/Frameworks/Python.framework/Versions/3.8/lib/libpython3.8.dylib
|
||||
-DPYTHON3_INCLUDE_DIR=${HOMEBREW_INSTALL_PREFIX}/opt/python@3.8/Frameworks/Python.framework/Versions/3.8/include/python3.8/
|
||||
-DPYTHON3_INCLUDE_PATH=${HOMEBREW_INSTALL_PREFIX}/opt/python@3.8/Frameworks/Python.framework/Versions/3.8/include/python3.8/
|
||||
-DPYTHON3INTERP_FOUND=ON
|
||||
-DPYTHON3LIBS_FOUND=ON
|
||||
-DPYTHON_DEFAULT_AVAILABLE=ON
|
||||
-DPYTHON_DEFAULT_EXECUTABLE=${PYTHON_EXE_PATH}
|
||||
-DPYTHON3_VERSION_MAJOR=3
|
||||
-DPYTHON3_VERSION_MINOR=8
|
||||
-DOPENCV_CONFIG_INSTALL_PATH=
|
||||
-DOPENCV_PYTHON_INSTALL_PATH=${SB_INSTALL_DIR}/lib/python3.8/dist-packages
|
||||
-DHAVE_opencv_python3=ON
|
||||
-DOPENCV_PYTHON_SKIP_DETECTION=ON
|
||||
-DOPENCV_LIB_INSTALL_PATH=${SB_INSTALL_DIR}/lib
|
||||
-DOPENCV_BIN_INSTALL_PATH=${SB_INSTALL_DIR}/bin)
|
||||
endif()
|
||||
|
||||
ExternalProject_Add(${_proj_name}
|
||||
PREFIX ${_SB_BINARY_DIR}
|
||||
TMP_DIR ${_SB_BINARY_DIR}/tmp
|
||||
|
@ -25,13 +54,14 @@ ExternalProject_Add(${_proj_name}
|
|||
-DBUILD_opencv_objdetect=ON
|
||||
-DBUILD_opencv_photo=ON
|
||||
-DBUILD_opencv_legacy=ON
|
||||
-DBUILD_opencv_python=ON
|
||||
-DWITH_FFMPEG=${ODM_BUILD_SLAM}
|
||||
-DBUILD_opencv_python3=ON
|
||||
-DWITH_FFMPEG=ON
|
||||
-DWITH_CUDA=OFF
|
||||
-DWITH_GTK=${ODM_BUILD_SLAM}
|
||||
-DWITH_GTK=OFF
|
||||
-DWITH_VTK=OFF
|
||||
-DWITH_EIGEN=OFF
|
||||
-DWITH_OPENNI=OFF
|
||||
-DWITH_OPENEXR=OFF
|
||||
-DBUILD_EXAMPLES=OFF
|
||||
-DBUILD_TESTS=OFF
|
||||
-DBUILD_PERF_TESTS=OFF
|
||||
|
@ -48,8 +78,10 @@ ExternalProject_Add(${_proj_name}
|
|||
-DBUILD_opencv_ts=OFF
|
||||
-DBUILD_opencv_xfeatures2d=ON
|
||||
-DOPENCV_ALLOCATOR_STATS_COUNTER_TYPE=int64_t
|
||||
-DCMAKE_BUILD_TYPE:STRING=Release
|
||||
-DCMAKE_BUILD_TYPE:STRING=${CMAKE_BUILD_TYPE}
|
||||
-DCMAKE_INSTALL_PREFIX:PATH=${SB_INSTALL_DIR}
|
||||
${WIN32_CMAKE_ARGS}
|
||||
${OCV_CMAKE_EXTRA_ARGS}
|
||||
#--Build step-----------------
|
||||
BINARY_DIR ${_SB_BINARY_DIR}
|
||||
#--Install step---------------
|
||||
|
@ -58,4 +90,4 @@ ExternalProject_Add(${_proj_name}
|
|||
LOG_DOWNLOAD OFF
|
||||
LOG_CONFIGURE OFF
|
||||
LOG_BUILD OFF
|
||||
)
|
||||
)
|
|
@ -2,8 +2,8 @@ set(_proj_name openmvs)
|
|||
set(_SB_BINARY_DIR "${SB_BINARY_DIR}/${_proj_name}")
|
||||
|
||||
externalproject_add(vcg
|
||||
GIT_REPOSITORY https://github.com/cdcseacave/VCG.git
|
||||
GIT_TAG master
|
||||
GIT_REPOSITORY https://github.com/OpenDroneMap/VCG.git
|
||||
GIT_TAG 285
|
||||
UPDATE_COMMAND ""
|
||||
SOURCE_DIR ${SB_SOURCE_DIR}/vcg
|
||||
CONFIGURE_COMMAND ""
|
||||
|
@ -12,15 +12,48 @@ externalproject_add(vcg
|
|||
INSTALL_COMMAND ""
|
||||
)
|
||||
|
||||
externalproject_add(eigen34
|
||||
GIT_REPOSITORY https://gitlab.com/libeigen/eigen.git
|
||||
GIT_TAG 7176ae16238ded7fb5ed30a7f5215825b3abd134
|
||||
UPDATE_COMMAND ""
|
||||
SOURCE_DIR ${SB_SOURCE_DIR}/eigen34
|
||||
CONFIGURE_COMMAND ""
|
||||
BUILD_IN_SOURCE 1
|
||||
BUILD_COMMAND ""
|
||||
INSTALL_COMMAND ""
|
||||
)
|
||||
|
||||
SET(ARM64_CMAKE_ARGS "")
|
||||
|
||||
if(${CMAKE_SYSTEM_PROCESSOR} STREQUAL "aarch64" OR ${CMAKE_SYSTEM_PROCESSOR} STREQUAL "arm64")
|
||||
SET(ARM64_CMAKE_ARGS -DOpenMVS_USE_SSE=OFF)
|
||||
endif()
|
||||
|
||||
SET(GPU_CMAKE_ARGS "")
|
||||
if(UNIX)
|
||||
if (EXISTS "/usr/local/cuda/lib64/stubs")
|
||||
SET(GPU_CMAKE_ARGS -DCMAKE_LIBRARY_PATH=/usr/local/cuda/lib64/stubs)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(WIN32)
|
||||
# On Windows systems without NVIDIA GPUs, OpenMVS will not launch
|
||||
# unless a CUDA DLL is available; we download a dummy DLL
|
||||
# generated with https://github.com/ykhwong/dummy-dll-generator that is
|
||||
# loaded UNLESS the real CUDA DLL is available, since it will
|
||||
# be loaded before our dummy DLL.
|
||||
file(DOWNLOAD "https://github.com/OpenDroneMap/windows-deps/releases/download/2.5.0/nvcuda_dummy.dll" "${SB_INSTALL_DIR}/bin/nvcuda.dll")
|
||||
endif()
|
||||
|
||||
ExternalProject_Add(${_proj_name}
|
||||
DEPENDS ceres opencv vcg
|
||||
DEPENDS ceres opencv vcg eigen34
|
||||
PREFIX ${_SB_BINARY_DIR}
|
||||
TMP_DIR ${_SB_BINARY_DIR}/tmp
|
||||
STAMP_DIR ${_SB_BINARY_DIR}/stamp
|
||||
#--Download step--------------
|
||||
DOWNLOAD_DIR ${SB_DOWNLOAD_DIR}
|
||||
GIT_REPOSITORY https://github.com/OpenDroneMap/openMVS
|
||||
GIT_TAG 230
|
||||
GIT_TAG 320
|
||||
#--Update/Patch step----------
|
||||
UPDATE_COMMAND ""
|
||||
#--Configure step-------------
|
||||
|
@ -28,8 +61,14 @@ ExternalProject_Add(${_proj_name}
|
|||
CMAKE_ARGS
|
||||
-DOpenCV_DIR=${SB_INSTALL_DIR}/lib/cmake/opencv4
|
||||
-DVCG_ROOT=${SB_SOURCE_DIR}/vcg
|
||||
-DCMAKE_BUILD_TYPE=Release
|
||||
-DEIGEN3_INCLUDE_DIR=${SB_SOURCE_DIR}/eigen34/
|
||||
-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}
|
||||
-DCMAKE_INSTALL_PREFIX=${SB_INSTALL_DIR}
|
||||
-DOpenMVS_ENABLE_TESTS=OFF
|
||||
-DOpenMVS_MAX_CUDA_COMPATIBILITY=ON
|
||||
${GPU_CMAKE_ARGS}
|
||||
${WIN32_CMAKE_ARGS}
|
||||
${ARM64_CMAKE_ARGS}
|
||||
#--Build step-----------------
|
||||
BINARY_DIR ${_SB_BINARY_DIR}
|
||||
#--Install step---------------
|
||||
|
|
|
@ -0,0 +1,33 @@
|
|||
set(_proj_name openpointclass)
|
||||
set(_SB_BINARY_DIR "${SB_BINARY_DIR}/${_proj_name}")
|
||||
|
||||
ExternalProject_Add(${_proj_name}
|
||||
DEPENDS pdal eigen34
|
||||
PREFIX ${_SB_BINARY_DIR}
|
||||
TMP_DIR ${_SB_BINARY_DIR}/tmp
|
||||
STAMP_DIR ${_SB_BINARY_DIR}/stamp
|
||||
#--Download step--------------
|
||||
DOWNLOAD_DIR ${SB_DOWNLOAD_DIR}
|
||||
GIT_REPOSITORY https://github.com/uav4geo/OpenPointClass
|
||||
GIT_TAG v1.1.3
|
||||
#--Update/Patch step----------
|
||||
UPDATE_COMMAND ""
|
||||
#--Configure step-------------
|
||||
SOURCE_DIR ${SB_SOURCE_DIR}/${_proj_name}
|
||||
CMAKE_ARGS
|
||||
-DPDAL_DIR=${SB_INSTALL_DIR}/lib/cmake/PDAL
|
||||
-DWITH_GBT=ON
|
||||
-DBUILD_PCTRAIN=OFF
|
||||
-DEIGEN3_INCLUDE_DIR=${SB_SOURCE_DIR}/eigen34/
|
||||
-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}
|
||||
-DCMAKE_INSTALL_PREFIX:PATH=${SB_INSTALL_DIR}
|
||||
${WIN32_CMAKE_ARGS}
|
||||
#--Build step-----------------
|
||||
BINARY_DIR ${_SB_BINARY_DIR}
|
||||
#--Install step---------------
|
||||
INSTALL_DIR ${SB_INSTALL_DIR}
|
||||
#--Output logging-------------
|
||||
LOG_DOWNLOAD OFF
|
||||
LOG_CONFIGURE OFF
|
||||
LOG_BUILD OFF
|
||||
)
|
|
@ -1,5 +1,21 @@
|
|||
set(_proj_name opensfm)
|
||||
set(_SB_BINARY_DIR "${SB_BINARY_DIR}/${_proj_name}")
|
||||
include(ProcessorCount)
|
||||
ProcessorCount(nproc)
|
||||
|
||||
set(EXTRA_INCLUDE_DIRS "")
|
||||
if(WIN32)
|
||||
set(OpenCV_DIR "${SB_INSTALL_DIR}/x64/vc16/lib")
|
||||
set(BUILD_CMD ${CMAKE_COMMAND} --build "${SB_BUILD_DIR}/opensfm" --config "${CMAKE_BUILD_TYPE}")
|
||||
else()
|
||||
set(BUILD_CMD make "-j${nproc}")
|
||||
if (APPLE)
|
||||
set(OpenCV_DIR "${SB_INSTALL_DIR}")
|
||||
set(EXTRA_INCLUDE_DIRS "${HOMEBREW_INSTALL_PREFIX}/include")
|
||||
else()
|
||||
set(OpenCV_DIR "${SB_INSTALL_DIR}/lib/cmake/opencv4")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
ExternalProject_Add(${_proj_name}
|
||||
DEPENDS ceres opencv gflags
|
||||
|
@ -9,17 +25,20 @@ ExternalProject_Add(${_proj_name}
|
|||
#--Download step--------------
|
||||
DOWNLOAD_DIR ${SB_DOWNLOAD_DIR}
|
||||
GIT_REPOSITORY https://github.com/OpenDroneMap/OpenSfM/
|
||||
GIT_TAG 230
|
||||
GIT_TAG 330
|
||||
#--Update/Patch step----------
|
||||
UPDATE_COMMAND git submodule update --init --recursive
|
||||
#--Configure step-------------
|
||||
SOURCE_DIR ${SB_SOURCE_DIR}/${_proj_name}
|
||||
CONFIGURE_COMMAND cmake <SOURCE_DIR>/${_proj_name}/src
|
||||
SOURCE_DIR ${SB_INSTALL_DIR}/bin/${_proj_name}
|
||||
CONFIGURE_COMMAND ${CMAKE_COMMAND} <SOURCE_DIR>/${_proj_name}/src
|
||||
-DCERES_ROOT_DIR=${SB_INSTALL_DIR}
|
||||
-DOpenCV_DIR=${SB_INSTALL_DIR}/lib/cmake/opencv4
|
||||
-DOpenCV_DIR=${OpenCV_DIR}
|
||||
-DADDITIONAL_INCLUDE_DIRS=${SB_INSTALL_DIR}/include
|
||||
-DYET_ADDITIONAL_INCLUDE_DIRS=${EXTRA_INCLUDE_DIRS}
|
||||
-DOPENSFM_BUILD_TESTS=off
|
||||
-DPYTHON_EXECUTABLE=/usr/bin/python3
|
||||
-DPYTHON_EXECUTABLE=${PYTHON_EXE_PATH}
|
||||
${WIN32_CMAKE_ARGS}
|
||||
BUILD_COMMAND ${BUILD_CMD}
|
||||
#--Build step-----------------
|
||||
BINARY_DIR ${_SB_BINARY_DIR}
|
||||
#--Install step---------------
|
||||
|
|
|
@ -1,58 +0,0 @@
|
|||
set(_proj_name pcl)
|
||||
set(_SB_BINARY_DIR "${SB_BINARY_DIR}/${_proj_name}")
|
||||
|
||||
ExternalProject_Add(${_proj_name}
|
||||
PREFIX ${_SB_BINARY_DIR}
|
||||
TMP_DIR ${_SB_BINARY_DIR}/tmp
|
||||
STAMP_DIR ${_SB_BINARY_DIR}/stamp
|
||||
#--Download step--------------
|
||||
DOWNLOAD_DIR ${SB_DOWNLOAD_DIR}
|
||||
|
||||
# PCL 1.8 + Fix for loading large point clouds https://github.com/OpenDroneMap/pcl/commit/924ab1137fbfa3004f222fb0834e3d66881ec057
|
||||
URL https://github.com/OpenDroneMap/pcl/archive/master.zip
|
||||
|
||||
#-- TODO: Use PCL 1.9.1 when we upgrade to a newer version of Ubuntu. Currently
|
||||
#-- it's troublesome to compile due to the older version of Boost shipping with 16.04.
|
||||
#-- URL https://github.com/PointCloudLibrary/pcl/archive/pcl-1.9.1.tar.gz
|
||||
|
||||
#--Update/Patch step----------
|
||||
UPDATE_COMMAND ""
|
||||
#--Configure step-------------
|
||||
SOURCE_DIR ${SB_SOURCE_DIR}/${_proj_name}
|
||||
CMAKE_ARGS
|
||||
-DBUILD_features=OFF
|
||||
-DBUILD_filters=OFF
|
||||
-DBUILD_geometry=OFF
|
||||
-DBUILD_keypoints=OFF
|
||||
-DBUILD_outofcore=OFF
|
||||
-DBUILD_people=OFF
|
||||
-DBUILD_recognition=OFF
|
||||
-DBUILD_registration=OFF
|
||||
-DBUILD_sample_consensus=OFF
|
||||
-DBUILD_segmentation=OFF
|
||||
-DBUILD_features=OFF
|
||||
-DBUILD_surface_on_nurbs=OFF
|
||||
-DBUILD_tools=OFF
|
||||
-DBUILD_tracking=OFF
|
||||
-DBUILD_visualization=OFF
|
||||
-DWITH_QT=OFF
|
||||
-DBUILD_OPENNI=OFF
|
||||
-DBUILD_OPENNI2=OFF
|
||||
-DWITH_OPENNI=OFF
|
||||
-DWITH_OPENNI2=OFF
|
||||
-DWITH_FZAPI=OFF
|
||||
-DWITH_LIBUSB=OFF
|
||||
-DWITH_PCAP=OFF
|
||||
-DWITH_PXCAPI=OFF
|
||||
-DCMAKE_BUILD_TYPE=Release
|
||||
-DPCL_VERBOSITY_LEVEL=Error
|
||||
-DCMAKE_INSTALL_PREFIX:PATH=${SB_INSTALL_DIR}
|
||||
#--Build step-----------------
|
||||
BINARY_DIR ${_SB_BINARY_DIR}
|
||||
#--Install step---------------
|
||||
INSTALL_DIR ${SB_INSTALL_DIR}
|
||||
#--Output logging-------------
|
||||
LOG_DOWNLOAD OFF
|
||||
LOG_CONFIGURE OFF
|
||||
LOG_BUILD OFF
|
||||
)
|
|
@ -1,14 +1,22 @@
|
|||
set(_proj_name pdal)
|
||||
set(_SB_BINARY_DIR "${SB_BINARY_DIR}/${_proj_name}")
|
||||
|
||||
if (WIN32)
|
||||
set(LASZIP_LIB "${SB_INSTALL_DIR}/lib/laszip.lib")
|
||||
elseif(APPLE)
|
||||
set(LASZIP_LIB "${SB_INSTALL_DIR}/lib/liblaszip.dylib")
|
||||
else()
|
||||
set(LASZIP_LIB "${SB_INSTALL_DIR}/lib/liblaszip.so")
|
||||
endif()
|
||||
|
||||
ExternalProject_Add(${_proj_name}
|
||||
DEPENDS zstd hexer laszip
|
||||
DEPENDS hexer laszip
|
||||
PREFIX ${_SB_BINARY_DIR}
|
||||
TMP_DIR ${_SB_BINARY_DIR}/tmp
|
||||
STAMP_DIR ${_SB_BINARY_DIR}/stamp
|
||||
#--Download step--------------
|
||||
DOWNLOAD_DIR ${SB_DOWNLOAD_DIR}
|
||||
URL https://github.com/PDAL/PDAL/archive/2.2.0.zip
|
||||
URL https://github.com/OpenDroneMap/PDAL/archive/refs/heads/333.zip
|
||||
#--Update/Patch step----------
|
||||
UPDATE_COMMAND ""
|
||||
#--Configure step-------------
|
||||
|
@ -17,30 +25,32 @@ ExternalProject_Add(${_proj_name}
|
|||
-DBUILD_PGPOINTCLOUD_TESTS=OFF
|
||||
-DBUILD_PLUGIN_PGPOINTCLOUD=OFF
|
||||
-DBUILD_PLUGIN_CPD=OFF
|
||||
-DBUILD_PLUGIN_GREYHOUND=OFF
|
||||
-DBUILD_PLUGIN_HEXBIN=ON
|
||||
-DBUILD_PLUGIN_ICEBRIDGE=OFF
|
||||
-DBUILD_PLUGIN_MRSID=OFF
|
||||
-DBUILD_PLUGIN_NITF=OFF
|
||||
-DBUILD_PLUGIN_OCI=OFF
|
||||
-DBUILD_PLUGIN_P2G=OFF
|
||||
-DBUILD_PLUGIN_SQLITE=OFF
|
||||
-DBUILD_PLUGIN_RIVLIB=OFF
|
||||
-DBUILD_PLUGIN_PYTHON=OFF
|
||||
-DWITH_ZSTD=ON
|
||||
-DENABLE_CTEST=OFF
|
||||
-DWITH_APPS=ON
|
||||
-DWITH_LAZPERF=OFF
|
||||
-DWITH_GEOTIFF=ON
|
||||
-DWITH_LASZIP=ON
|
||||
-DLASZIP_FOUND=TRUE
|
||||
-DLASZIP_LIBRARIES=${SB_INSTALL_DIR}/lib/liblaszip.so
|
||||
-DLASZIP_VERSION=3.1.1
|
||||
-DLASZIP_INCLUDE_DIR=${SB_INSTALL_DIR}/include
|
||||
-DLASZIP_LIBRARY=${SB_INSTALL_DIR}/lib/liblaszip.so
|
||||
-DWITH_TESTS=OFF
|
||||
-DCMAKE_BUILD_TYPE=Release
|
||||
-DBUILD_PLUGIN_GREYHOUND=OFF
|
||||
-DBUILD_PLUGIN_HEXBIN=ON
|
||||
-DBUILD_PLUGIN_ICEBRIDGE=OFF
|
||||
-DBUILD_PLUGIN_MRSID=OFF
|
||||
-DBUILD_PLUGIN_NITF=OFF
|
||||
-DBUILD_PLUGIN_OCI=OFF
|
||||
-DBUILD_PLUGIN_P2G=OFF
|
||||
-DBUILD_PLUGIN_SQLITE=OFF
|
||||
-DBUILD_PLUGIN_RIVLIB=OFF
|
||||
-DBUILD_PLUGIN_PYTHON=OFF
|
||||
-DWITH_ZSTD=OFF
|
||||
-DENABLE_CTEST=OFF
|
||||
-DWITH_APPS=ON
|
||||
-DWITH_LAZPERF=OFF
|
||||
-DWITH_GEOTIFF=ON
|
||||
-DWITH_LASZIP=ON
|
||||
-DLASZIP_FOUND=TRUE
|
||||
-DLASZIP_LIBRARIES=${LASZIP_LIB}
|
||||
-DLASZIP_VERSION=3.1.1
|
||||
-DLASZIP_INCLUDE_DIR=${SB_INSTALL_DIR}/include
|
||||
-DLASZIP_LIBRARY=${LASZIP_LIB}
|
||||
-DWITH_TESTS=OFF
|
||||
-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}
|
||||
-DCMAKE_INSTALL_PREFIX:PATH=${SB_INSTALL_DIR}
|
||||
${WIN32_CMAKE_ARGS}
|
||||
${WIN32_GDAL_ARGS}
|
||||
#--Build step-----------------
|
||||
BINARY_DIR ${_SB_BINARY_DIR}
|
||||
#--Install step---------------
|
||||
|
@ -50,3 +60,4 @@ ExternalProject_Add(${_proj_name}
|
|||
LOG_CONFIGURE OFF
|
||||
LOG_BUILD OFF
|
||||
)
|
||||
|
||||
|
|
|
@ -0,0 +1,36 @@
|
|||
set(_proj_name pdal-python)
|
||||
set(_SB_BINARY_DIR "${SB_BINARY_DIR}/${_proj_name}")
|
||||
|
||||
if (WIN32)
|
||||
set(PP_EXTRA_ARGS -DPYTHON3_EXECUTABLE=${PYTHON_EXE_PATH}
|
||||
-DPython3_NumPy_INCLUDE_DIRS=${PYTHON_HOME}/lib/site-packages/numpy/core/include)
|
||||
endif()
|
||||
|
||||
ExternalProject_Add(${_proj_name}
|
||||
DEPENDS pdal
|
||||
PREFIX ${_SB_BINARY_DIR}
|
||||
TMP_DIR ${_SB_BINARY_DIR}/tmp
|
||||
STAMP_DIR ${_SB_BINARY_DIR}/stamp
|
||||
#--Download step--------------
|
||||
DOWNLOAD_DIR ${SB_DOWNLOAD_DIR}
|
||||
GIT_REPOSITORY https://github.com/OpenDroneMap/pdal-python
|
||||
GIT_TAG main
|
||||
#--Update/Patch step----------
|
||||
UPDATE_COMMAND ""
|
||||
#--Configure step-------------
|
||||
SOURCE_DIR ${SB_SOURCE_DIR}/${_proj_name}
|
||||
CMAKE_ARGS
|
||||
-DPDAL_DIR=${SB_INSTALL_DIR}/lib/cmake/PDAL
|
||||
-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}
|
||||
-DCMAKE_INSTALL_PREFIX:PATH=${SB_INSTALL_DIR}/lib/python3.8/dist-packages
|
||||
${WIN32_CMAKE_ARGS}
|
||||
${PP_EXTRA_ARGS}
|
||||
#--Build step-----------------
|
||||
BINARY_DIR ${_SB_BINARY_DIR}
|
||||
#--Install step---------------
|
||||
INSTALL_DIR ${SB_INSTALL_DIR}
|
||||
#--Output logging-------------
|
||||
LOG_DOWNLOAD OFF
|
||||
LOG_CONFIGURE OFF
|
||||
LOG_BUILD OFF
|
||||
)
|
|
@ -0,0 +1,36 @@
|
|||
set(_SB_BINARY_DIR "${SB_BINARY_DIR}/pypopsift")
|
||||
|
||||
# Pypopsift
|
||||
find_package(CUDA 7.0)
|
||||
|
||||
if(CUDA_FOUND)
|
||||
ExternalProject_Add(pypopsift
|
||||
DEPENDS opensfm
|
||||
PREFIX ${_SB_BINARY_DIR}
|
||||
TMP_DIR ${_SB_BINARY_DIR}/tmp
|
||||
STAMP_DIR ${_SB_BINARY_DIR}/stamp
|
||||
#--Download step--------------
|
||||
DOWNLOAD_DIR ${SB_DOWNLOAD_DIR}
|
||||
GIT_REPOSITORY https://github.com/OpenDroneMap/pypopsift
|
||||
GIT_TAG 288
|
||||
#--Update/Patch step----------
|
||||
UPDATE_COMMAND ""
|
||||
#--Configure step-------------
|
||||
SOURCE_DIR ${SB_SOURCE_DIR}/pypopsift
|
||||
CMAKE_ARGS
|
||||
-DOUTPUT_DIR=${SB_INSTALL_DIR}/bin/opensfm/opensfm
|
||||
-DCMAKE_INSTALL_PREFIX=${SB_INSTALL_DIR}
|
||||
${WIN32_CMAKE_ARGS}
|
||||
${ARM64_CMAKE_ARGS}
|
||||
#--Build step-----------------
|
||||
BINARY_DIR ${_SB_BINARY_DIR}
|
||||
#--Install step---------------
|
||||
INSTALL_DIR ${SB_INSTALL_DIR}
|
||||
#--Output logging-------------
|
||||
LOG_DOWNLOAD OFF
|
||||
LOG_CONFIGURE OFF
|
||||
LOG_BUILD OFF
|
||||
)
|
||||
else()
|
||||
message(WARNING "Could not find CUDA >= 7.0")
|
||||
endif()
|
|
@ -1,20 +1,24 @@
|
|||
set(_proj_name zstd)
|
||||
set(_proj_name renderdem)
|
||||
set(_SB_BINARY_DIR "${SB_BINARY_DIR}/${_proj_name}")
|
||||
|
||||
ExternalProject_Add(${_proj_name}
|
||||
DEPENDS pdal
|
||||
PREFIX ${_SB_BINARY_DIR}
|
||||
TMP_DIR ${_SB_BINARY_DIR}/tmp
|
||||
STAMP_DIR ${_SB_BINARY_DIR}/stamp
|
||||
#--Download step--------------
|
||||
DOWNLOAD_DIR ${SB_DOWNLOAD_DIR}
|
||||
GIT_REPOSITORY https://github.com/facebook/zstd
|
||||
GIT_TAG b84274da0f641907dfe472d5da132d872202e9b8
|
||||
GIT_REPOSITORY https://github.com/OpenDroneMap/RenderDEM
|
||||
GIT_TAG main
|
||||
#--Update/Patch step----------
|
||||
UPDATE_COMMAND ""
|
||||
#--Configure step-------------
|
||||
SOURCE_DIR ${SB_SOURCE_DIR}/${_proj_name}
|
||||
CONFIGURE_COMMAND ${CMAKE_COMMAND} -DZSTD_BUILD_PROGRAMS=OFF -DCMAKE_BUILD_TYPE:STRING=Release -DCMAKE_INSTALL_PREFIX:PATH=${SB_INSTALL_DIR}
|
||||
<SOURCE_DIR>/build/cmake
|
||||
CMAKE_ARGS
|
||||
-DPDAL_DIR=${SB_INSTALL_DIR}/lib/cmake/PDAL
|
||||
-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}
|
||||
-DCMAKE_INSTALL_PREFIX:PATH=${SB_INSTALL_DIR}
|
||||
${WIN32_CMAKE_ARGS}
|
||||
#--Build step-----------------
|
||||
BINARY_DIR ${_SB_BINARY_DIR}
|
||||
#--Install step---------------
|
|
@ -8,15 +8,15 @@ ExternalProject_Add(${_proj_name}
|
|||
STAMP_DIR ${_SB_BINARY_DIR}/stamp
|
||||
#--Download step--------------
|
||||
DOWNLOAD_DIR ${SB_DOWNLOAD_DIR}
|
||||
GIT_REPOSITORY https://github.com/pierotofy/untwine/
|
||||
GIT_TAG insttgt
|
||||
GIT_REPOSITORY https://github.com/OpenDroneMap/untwine/
|
||||
GIT_TAG 317
|
||||
#--Update/Patch step----------
|
||||
UPDATE_COMMAND ""
|
||||
#--Configure step-------------
|
||||
SOURCE_DIR ${SB_SOURCE_DIR}/${_proj_name}
|
||||
CMAKE_ARGS
|
||||
-DPDAL_DIR=${SB_INSTALL_DIR}/lib/cmake/PDAL
|
||||
-DCMAKE_BUILD_TYPE=Release
|
||||
-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}
|
||||
-DCMAKE_INSTALL_PREFIX:PATH=${SB_INSTALL_DIR}
|
||||
#--Build step-----------------
|
||||
BINARY_DIR ${_SB_BINARY_DIR}
|
||||
|
|
2
VERSION
2
VERSION
|
@ -1 +1 @@
|
|||
2.3.1
|
||||
3.5.1
|
||||
|
|
|
@ -0,0 +1,213 @@
|
|||
import sys, platform
|
||||
if sys.platform != 'win32':
|
||||
print("This script is for Windows only! Use configure.sh instead.")
|
||||
exit(1)
|
||||
if sys.version_info.major != 3 or sys.version_info.minor != 8:
|
||||
print("You need to use Python 3.8.x (due to the requirements.txt). You are using %s instead." % platform.python_version())
|
||||
exit(1)
|
||||
|
||||
import argparse
|
||||
import subprocess
|
||||
import os
|
||||
import stat
|
||||
import urllib.request
|
||||
import shutil
|
||||
import zipfile
|
||||
|
||||
from venv import EnvBuilder
|
||||
|
||||
parser = argparse.ArgumentParser(description='ODM Windows Configure Script')
|
||||
parser.add_argument('action',
|
||||
type=str,
|
||||
choices=["build", "clean", "dist", "vcpkg_export"],
|
||||
help='Action: %(choices)s')
|
||||
parser.add_argument('--build-vcpkg',
|
||||
type=bool,
|
||||
help='Build VCPKG environment from scratch instead of downloading prebuilt one.')
|
||||
parser.add_argument('--vcpkg-archive-url',
|
||||
type=str,
|
||||
default='https://github.com/OpenDroneMap/windows-deps/releases/download/2.5.0/vcpkg-export-250.zip',
|
||||
required=False,
|
||||
help='Path to VCPKG export archive')
|
||||
parser.add_argument('--code-sign-cert-path',
|
||||
type=str,
|
||||
default='',
|
||||
required=False,
|
||||
help='Path to pfx code signing certificate')
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
def run(cmd, cwd=os.getcwd()):
|
||||
env = os.environ.copy()
|
||||
print(cmd)
|
||||
p = subprocess.Popen(cmd, shell=True, env=env, cwd=cwd)
|
||||
retcode = p.wait()
|
||||
if retcode != 0:
|
||||
raise Exception("Command returned %s" % retcode)
|
||||
|
||||
# https://izziswift.com/shutil-rmtree-fails-on-windows-with-access-is-denied/
|
||||
def rmtree(top):
|
||||
for root, dirs, files in os.walk(top, topdown=False):
|
||||
for name in files:
|
||||
filename = os.path.join(root, name)
|
||||
os.chmod(filename, stat.S_IWUSR)
|
||||
os.remove(filename)
|
||||
for name in dirs:
|
||||
os.rmdir(os.path.join(root, name))
|
||||
os.rmdir(top)
|
||||
|
||||
def vcpkg_requirements():
|
||||
with open("vcpkg-requirements.txt") as f:
|
||||
pckgs = list(filter(lambda l: len(l) > 0, map(str.strip, f.read().split("\n"))))
|
||||
return pckgs
|
||||
|
||||
def build():
|
||||
# Create python virtual env
|
||||
if not os.path.isdir("venv"):
|
||||
print("Creating virtual env --> venv/")
|
||||
ebuilder = EnvBuilder(with_pip=True)
|
||||
ebuilder.create("venv")
|
||||
|
||||
run("venv\\Scripts\\pip install --ignore-installed -r requirements.txt")
|
||||
|
||||
# Download / build VCPKG environment
|
||||
if not os.path.isdir("vcpkg"):
|
||||
if args.build_vcpkg:
|
||||
print("TODO")
|
||||
# git clone vcpkg repo
|
||||
# bootstrap
|
||||
# install requirements
|
||||
|
||||
else:
|
||||
if not os.path.exists("vcpkg-env.zip"):
|
||||
print("Downloading %s" % args.vcpkg_archive_url)
|
||||
with urllib.request.urlopen(args.vcpkg_archive_url) as response, open( "vcpkg-env.zip", 'wb') as out_file:
|
||||
shutil.copyfileobj(response, out_file)
|
||||
if not os.path.exists("vcpkg"):
|
||||
print("Extracting vcpkg-env.zip --> vcpkg/")
|
||||
with zipfile.ZipFile("vcpkg-env.zip") as z:
|
||||
top_dir = z.namelist()[0]
|
||||
z.extractall(".")
|
||||
|
||||
if os.path.exists(top_dir):
|
||||
os.rename(top_dir, "vcpkg")
|
||||
else:
|
||||
print("Warning! Something looks wrong in the VCPKG archive... check the vcpkg/ directory.")
|
||||
safe_remove("vcpkg-env.zip")
|
||||
|
||||
if not os.path.exists(os.path.join("SuperBuild", "build")) or not os.path.exists(os.path.join("SuperBuild", "install")):
|
||||
print("Compiling SuperBuild")
|
||||
|
||||
build_dir = os.path.join("SuperBuild", "build")
|
||||
if not os.path.isdir(build_dir):
|
||||
os.mkdir(build_dir)
|
||||
|
||||
toolchain_file = os.path.join(os.getcwd(), "vcpkg", "scripts", "buildsystems", "vcpkg.cmake")
|
||||
run("cmake .. -DCMAKE_TOOLCHAIN_FILE=\"%s\"" % toolchain_file, cwd=build_dir)
|
||||
run("cmake --build . --config Release", cwd=build_dir)
|
||||
|
||||
def vcpkg_export():
|
||||
if not os.path.exists("vcpkg"):
|
||||
print("vcpkg directory does not exist. Did you build the environment?")
|
||||
exit(1)
|
||||
|
||||
pkgs = vcpkg_requirements()
|
||||
out = "vcpkg-export-%s" % odm_version().replace(".", "")
|
||||
run("vcpkg\\vcpkg export %s --output=%s --zip" % (" ".join(pkgs), out))
|
||||
|
||||
def odm_version():
|
||||
with open("VERSION") as f:
|
||||
return f.read().split("\n")[0].strip()
|
||||
|
||||
def safe_remove(path):
|
||||
if os.path.isdir(path):
|
||||
rmtree(path)
|
||||
elif os.path.isfile(path):
|
||||
os.remove(path)
|
||||
|
||||
def clean():
|
||||
safe_remove("vcpkg-download.zip")
|
||||
safe_remove("vcpkg")
|
||||
safe_remove("venv")
|
||||
safe_remove(os.path.join("SuperBuild", "build"))
|
||||
safe_remove(os.path.join("SuperBuild", "download"))
|
||||
safe_remove(os.path.join("SuperBuild", "src"))
|
||||
safe_remove(os.path.join("SuperBuild", "install"))
|
||||
|
||||
def dist():
|
||||
if not os.path.exists("SuperBuild\\install"):
|
||||
print("You need to run configure.py build before you can run dist")
|
||||
exit(1)
|
||||
|
||||
if not os.path.exists("SuperBuild\\download"):
|
||||
os.mkdir("SuperBuild\\download")
|
||||
|
||||
# Download VC++ runtime
|
||||
vcredist_path = os.path.join("SuperBuild", "download", "vc_redist.x64.zip")
|
||||
if not os.path.isfile(vcredist_path):
|
||||
vcredist_url = "https://github.com/OpenDroneMap/windows-deps/releases/download/2.5.0/VC_redist.x64.zip"
|
||||
print("Downloading %s" % vcredist_url)
|
||||
with urllib.request.urlopen(vcredist_url) as response, open(vcredist_path, 'wb') as out_file:
|
||||
shutil.copyfileobj(response, out_file)
|
||||
|
||||
print("Extracting --> vc_redist.x64.exe")
|
||||
with zipfile.ZipFile(vcredist_path) as z:
|
||||
z.extractall(os.path.join("SuperBuild", "download"))
|
||||
|
||||
# Download portable python
|
||||
if not os.path.isdir("python38"):
|
||||
pythonzip_path = os.path.join("SuperBuild", "download", "python38.zip")
|
||||
python_url = "https://github.com/OpenDroneMap/windows-deps/releases/download/2.5.0/python-3.8.1-embed-amd64-less-pth.zip"
|
||||
if not os.path.exists(pythonzip_path):
|
||||
print("Downloading %s" % python_url)
|
||||
with urllib.request.urlopen(python_url) as response, open( pythonzip_path, 'wb') as out_file:
|
||||
shutil.copyfileobj(response, out_file)
|
||||
|
||||
os.mkdir("python38")
|
||||
|
||||
print("Extracting --> python38/")
|
||||
with zipfile.ZipFile(pythonzip_path) as z:
|
||||
z.extractall("python38")
|
||||
|
||||
# Download signtool
|
||||
signtool_path = os.path.join("SuperBuild", "download", "signtool.exe")
|
||||
signtool_url = "https://github.com/OpenDroneMap/windows-deps/releases/download/2.5.0/signtool.exe"
|
||||
if not os.path.exists(signtool_path):
|
||||
print("Downloading %s" % signtool_url)
|
||||
with urllib.request.urlopen(signtool_url) as response, open(signtool_path, 'wb') as out_file:
|
||||
shutil.copyfileobj(response, out_file)
|
||||
|
||||
# Download innosetup
|
||||
if not os.path.isdir("innosetup"):
|
||||
innosetupzip_path = os.path.join("SuperBuild", "download", "innosetup.zip")
|
||||
innosetup_url = "https://github.com/OpenDroneMap/windows-deps/releases/download/2.5.0/innosetup-portable-win32-6.0.5-3.zip"
|
||||
if not os.path.exists(innosetupzip_path):
|
||||
print("Downloading %s" % innosetup_url)
|
||||
with urllib.request.urlopen(innosetup_url) as response, open(innosetupzip_path, 'wb') as out_file:
|
||||
shutil.copyfileobj(response, out_file)
|
||||
|
||||
os.mkdir("innosetup")
|
||||
|
||||
print("Extracting --> innosetup/")
|
||||
with zipfile.ZipFile(innosetupzip_path) as z:
|
||||
z.extractall("innosetup")
|
||||
|
||||
# Run
|
||||
cs_flags = '/DSKIP_SIGN=1'
|
||||
if args.code_sign_cert_path:
|
||||
cs_flags = '"/Ssigntool=%s sign /f %s /fd SHA1 /t http://timestamp.sectigo.com $f"' % (signtool_path, args.code_sign_cert_path)
|
||||
run("innosetup\\iscc /Qp " + cs_flags + " \"innosetup.iss\"")
|
||||
|
||||
print("Done! Setup created in dist/")
|
||||
|
||||
if args.action == 'build':
|
||||
build()
|
||||
elif args.action == 'vcpkg_export':
|
||||
vcpkg_export()
|
||||
elif args.action == 'dist':
|
||||
dist()
|
||||
elif args.action == 'clean':
|
||||
clean()
|
||||
else:
|
||||
args.print_help()
|
||||
exit(1)
|
66
configure.sh
66
configure.sh
|
@ -3,21 +3,21 @@
|
|||
# Ensure the DEBIAN_FRONTEND environment variable is set for apt-get calls
|
||||
APT_GET="env DEBIAN_FRONTEND=noninteractive $(command -v apt-get)"
|
||||
|
||||
check_version(){
|
||||
check_version(){
|
||||
UBUNTU_VERSION=$(lsb_release -r)
|
||||
case "$UBUNTU_VERSION" in
|
||||
*"20.04"*)
|
||||
*"20.04"*|*"21.04"*)
|
||||
echo "Ubuntu: $UBUNTU_VERSION, good!"
|
||||
;;
|
||||
*"18.04"*|*"16.04"*)
|
||||
echo "ODM 2.1 has upgraded to Ubuntu 20.04, but you're on $UBUNTU_VERSION"
|
||||
echo "ODM 2.1 has upgraded to Ubuntu 21.04, but you're on $UBUNTU_VERSION"
|
||||
echo "* The last version of ODM that supports Ubuntu 16.04 is v1.0.2."
|
||||
echo "* The last version of ODM that supports Ubuntu 18.04 is v2.0.0."
|
||||
echo "We recommend you to upgrade, or better yet, use docker."
|
||||
exit 1
|
||||
;;
|
||||
*)
|
||||
echo "You are not on Ubuntu 20.04 (detected: $UBUNTU_VERSION)"
|
||||
echo "You are not on Ubuntu 21.04 (detected: $UBUNTU_VERSION)"
|
||||
echo "It might be possible to run ODM on a newer version of Ubuntu, however, you cannot rely on this script."
|
||||
exit 1
|
||||
;;
|
||||
|
@ -54,10 +54,13 @@ ensure_prereqs() {
|
|||
echo "Installing tzdata"
|
||||
sudo $APT_GET install -y -qq tzdata
|
||||
|
||||
echo "Enabling PPA for Ubuntu GIS"
|
||||
sudo $APT_GET install -y -qq --no-install-recommends software-properties-common
|
||||
sudo add-apt-repository -y ppa:ubuntugis/ubuntugis-unstable
|
||||
sudo $APT_GET update
|
||||
UBUNTU_VERSION=$(lsb_release -r)
|
||||
if [[ "$UBUNTU_VERSION" == *"20.04"* ]]; then
|
||||
echo "Enabling PPA for Ubuntu GIS"
|
||||
sudo $APT_GET install -y -qq --no-install-recommends software-properties-common
|
||||
sudo add-apt-repository -y ppa:ubuntugis/ubuntugis-unstable
|
||||
sudo $APT_GET update
|
||||
fi
|
||||
|
||||
echo "Installing Python PIP"
|
||||
sudo $APT_GET install -y -qq --no-install-recommends \
|
||||
|
@ -77,7 +80,13 @@ installdepsfromsnapcraft() {
|
|||
*) key=build-packages; ;; # shouldn't be needed, but it's here just in case
|
||||
esac
|
||||
|
||||
cat snap/snapcraft.yaml | \
|
||||
UBUNTU_VERSION=$(lsb_release -r)
|
||||
SNAPCRAFT_FILE="snapcraft.yaml"
|
||||
if [[ "$UBUNTU_VERSION" == *"21.04"* ]]; then
|
||||
SNAPCRAFT_FILE="snapcraft21.yaml"
|
||||
fi
|
||||
|
||||
cat snap/$SNAPCRAFT_FILE | \
|
||||
shyaml get-values-0 parts.$section.$key | \
|
||||
xargs -0 sudo $APT_GET install -y -qq --no-install-recommends
|
||||
}
|
||||
|
@ -95,10 +104,9 @@ installruntimedepsonly() {
|
|||
installdepsfromsnapcraft runtime opensfm
|
||||
echo "Installing OpenMVS Dependencies"
|
||||
installdepsfromsnapcraft runtime openmvs
|
||||
|
||||
}
|
||||
|
||||
install() {
|
||||
|
||||
installreqs() {
|
||||
cd /code
|
||||
|
||||
## Set up library paths
|
||||
|
@ -118,8 +126,18 @@ install() {
|
|||
echo "Installing OpenMVS Dependencies"
|
||||
installdepsfromsnapcraft build openmvs
|
||||
|
||||
set -e
|
||||
|
||||
# edt requires numpy to build
|
||||
pip install --ignore-installed numpy==1.23.1
|
||||
pip install --ignore-installed -r requirements.txt
|
||||
#if [ ! -z "$GPU_INSTALL" ]; then
|
||||
#fi
|
||||
set +e
|
||||
}
|
||||
|
||||
install() {
|
||||
installreqs
|
||||
|
||||
if [ ! -z "$PORTABLE_INSTALL" ]; then
|
||||
echo "Replacing g++ and gcc with our scripts for portability..."
|
||||
|
@ -140,11 +158,6 @@ install() {
|
|||
mkdir -p build && cd build
|
||||
cmake .. && make -j$processes
|
||||
|
||||
echo "Compiling build"
|
||||
cd ${RUNPATH}
|
||||
mkdir -p build && cd build
|
||||
cmake .. && make -j$processes
|
||||
|
||||
echo "Configuration Finished"
|
||||
}
|
||||
|
||||
|
@ -165,9 +178,20 @@ reinstall() {
|
|||
uninstall
|
||||
install
|
||||
}
|
||||
|
||||
clean() {
|
||||
rm -rf \
|
||||
${RUNPATH}/SuperBuild/build \
|
||||
${RUNPATH}/SuperBuild/download \
|
||||
${RUNPATH}/SuperBuild/src
|
||||
|
||||
# find in /code and delete static libraries and intermediate object files
|
||||
find ${RUNPATH} -type f -name "*.a" -delete -or -type f -name "*.o" -delete
|
||||
}
|
||||
|
||||
usage() {
|
||||
echo "Usage:"
|
||||
echo "bash configure.sh <install|update|uninstall|help> [nproc]"
|
||||
echo "bash configure.sh <install|update|uninstall|installreqs|help> [nproc]"
|
||||
echo "Subcommands:"
|
||||
echo " install"
|
||||
echo " Installs all dependencies and modules for running OpenDroneMap"
|
||||
|
@ -177,12 +201,16 @@ usage() {
|
|||
echo " Removes SuperBuild and build modules, then re-installs them. Note this does not update OpenDroneMap to the latest version. "
|
||||
echo " uninstall"
|
||||
echo " Removes SuperBuild and build modules. Does not uninstall dependencies"
|
||||
echo " installreqs"
|
||||
echo " Only installs the requirements (does not build SuperBuild)"
|
||||
echo " clean"
|
||||
echo " Cleans the SuperBuild directory by removing temporary files. "
|
||||
echo " help"
|
||||
echo " Displays this message"
|
||||
echo "[nproc] is an optional argument that can set the number of processes for the make -j tag. By default it uses $(nproc)"
|
||||
}
|
||||
|
||||
if [[ $1 =~ ^(install|installruntimedepsonly|reinstall|uninstall)$ ]]; then
|
||||
if [[ $1 =~ ^(install|installruntimedepsonly|reinstall|uninstall|installreqs|clean)$ ]]; then
|
||||
RUNPATH="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||
"$1"
|
||||
else
|
||||
|
|
|
@ -0,0 +1,121 @@
|
|||
#!/bin/bash
|
||||
uname=$(uname)
|
||||
if [[ "$uname" != "Darwin" ]]; then
|
||||
echo "This script is meant for MacOS only."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ $2 =~ ^[0-9]+$ ]] ; then
|
||||
processes=$2
|
||||
else
|
||||
processes=$(sysctl -n hw.ncpu)
|
||||
fi
|
||||
|
||||
ensure_prereqs() {
|
||||
export DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
if ! command -v xcodebuild &> /dev/null; then
|
||||
echo "You need to install Xcode first. Go to the App Store and download Xcode"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! command -v brew &> /dev/null; then
|
||||
echo "You need to install Homebrew first. https://brew.sh/"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
}
|
||||
|
||||
installreqs() {
|
||||
ensure_prereqs
|
||||
|
||||
brew install cmake gcc@12 python@3.8 tbb@2020 eigen gdal boost cgal libomp
|
||||
brew link tbb@2020
|
||||
|
||||
python3.8 -m pip install virtualenv
|
||||
|
||||
if [ ! -e ${RUNPATH}/venv ]; then
|
||||
python3.8 -m virtualenv venv
|
||||
fi
|
||||
|
||||
source venv/bin/activate
|
||||
pip install --ignore-installed -r requirements.txt
|
||||
}
|
||||
|
||||
install() {
|
||||
installreqs
|
||||
|
||||
echo "Compiling SuperBuild"
|
||||
cd ${RUNPATH}/SuperBuild
|
||||
mkdir -p build && cd build
|
||||
cmake .. && make -j$processes
|
||||
|
||||
cd /tmp
|
||||
pip download GDAL==3.6.2
|
||||
tar -xpzf GDAL-3.6.2.tar.gz
|
||||
cd GDAL-3.6.2
|
||||
if [ -e /opt/homebrew/bin/gdal-config ]; then
|
||||
python setup.py build_ext --gdal-config /opt/homebrew/bin/gdal-config
|
||||
else
|
||||
python setup.py build_ext --gdal-config /usr/local/bin/gdal-config
|
||||
fi
|
||||
python setup.py build
|
||||
python setup.py install
|
||||
rm -fr /tmp/GDAL-3.6.2 /tmp/GDAL-3.6.2.tar.gz
|
||||
|
||||
cd ${RUNPATH}
|
||||
|
||||
echo "Configuration Finished"
|
||||
}
|
||||
|
||||
uninstall() {
|
||||
echo "Removing SuperBuild and build directories"
|
||||
cd ${RUNPATH}/SuperBuild
|
||||
rm -rfv build src download install
|
||||
cd ../
|
||||
rm -rfv build
|
||||
}
|
||||
|
||||
reinstall() {
|
||||
echo "Reinstalling ODM modules"
|
||||
uninstall
|
||||
install
|
||||
}
|
||||
|
||||
clean() {
|
||||
rm -rf \
|
||||
${RUNPATH}/SuperBuild/build \
|
||||
${RUNPATH}/SuperBuild/download \
|
||||
${RUNPATH}/SuperBuild/src
|
||||
|
||||
# find in /code and delete static libraries and intermediate object files
|
||||
find ${RUNPATH} -type f -name "*.a" -delete -or -type f -name "*.o" -delete
|
||||
}
|
||||
|
||||
usage() {
|
||||
echo "Usage:"
|
||||
echo "bash configure.sh <install|update|uninstall|installreqs|help> [nproc]"
|
||||
echo "Subcommands:"
|
||||
echo " install"
|
||||
echo " Installs all dependencies and modules for running OpenDroneMap"
|
||||
echo " reinstall"
|
||||
echo " Removes SuperBuild and build modules, then re-installs them. Note this does not update OpenDroneMap to the latest version. "
|
||||
echo " uninstall"
|
||||
echo " Removes SuperBuild and build modules. Does not uninstall dependencies"
|
||||
echo " installreqs"
|
||||
echo " Only installs the requirements (does not build SuperBuild)"
|
||||
echo " clean"
|
||||
echo " Cleans the SuperBuild directory by removing temporary files. "
|
||||
echo " help"
|
||||
echo " Displays this message"
|
||||
echo "[nproc] is an optional argument that can set the number of processes for the make -j tag. By default it uses $(nproc)"
|
||||
}
|
||||
|
||||
if [[ $1 =~ ^(install|installruntimedepsonly|reinstall|uninstall|installreqs|clean)$ ]]; then
|
||||
RUNPATH="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||
"$1"
|
||||
else
|
||||
echo "Invalid instructions." >&2
|
||||
usage
|
||||
exit 1
|
||||
fi
|
|
@ -0,0 +1,8 @@
|
|||
@echo off
|
||||
|
||||
setlocal
|
||||
call win32env.bat
|
||||
|
||||
start "ODM Console" cmd /k "echo ____________________________ && echo / ____ _____ __ __ \ && echo ^| / __ \ ^| __ \ ^| \/ ^| ^| && echo ^| ^| ^| ^| ^| ^| ^| ^| ^| ^| \ / ^| ^| && echo ^| ^| ^| ^| ^| ^| ^| ^| ^| ^| ^|\/^| ^| ^| && echo ^| ^| ^|__^| ^| ^| ^|__^| ^| ^| ^| ^| ^| ^| && echo ^| \____/ ^|_____/ ^|_^| ^|_^| ^| && echo \____________________________/ && @echo off && FOR /F %%i in (VERSION) do echo version: %%i && @echo on && echo. && run --help
|
||||
|
||||
endlocal
|
|
@ -103,7 +103,7 @@ def writeMetadata(filename, format3d):
|
|||
'--stereo='+format3d,
|
||||
filename,
|
||||
filename+'.injected'])
|
||||
# check metadata injector was succesful
|
||||
# check metadata injector was successful
|
||||
if os.path.exists(filename+'.injected'):
|
||||
os.remove(filename)
|
||||
os.rename(filename+'.injected', filename)
|
||||
|
|
|
@ -0,0 +1,26 @@
|
|||
# exif_binner.py
|
||||
|
||||
Bins multispectral drone images by spectral band, using EXIF data. Also verifies that each bin is complete (i.e. contains all expected bands) and can log errors to a CSV file. Excludes RGB images by default.
|
||||
|
||||
## Requirements
|
||||
|
||||
- [Pillow](https://pillow.readthedocs.io/en/stable/installation.html) library for reading images and EXIF data.
|
||||
- [tqdm](https://github.com/tqdm/tqdm#installation) for progress bars - can be removed
|
||||
|
||||
## Usage
|
||||
|
||||
```
|
||||
exif_binner.py <args> <path to folder of images to rename> <output folder>
|
||||
```
|
||||
|
||||
Optional arguments:
|
||||
|
||||
- `-b`/`--bands <integer>`: Number of expected bands per capture. Default: `5`
|
||||
- `-s`/`--sequential <True/False>`: Use sequential capture group in filenames rather than original capture ID. Default: `True`
|
||||
- `-z`/`--zero_pad <integer>`: If using sequential capture groups, zero-pad the group number to this many digits. 0 for no padding, -1 for auto padding. Default: `5`
|
||||
- `-w`/`--whitespace_replace <string>`: Replace whitespace characters with this character. Default: `-`
|
||||
- `-l`/`--logfile <filename>`: Write processed image metadata to this CSV file
|
||||
- `-r`/`--replace_filename <string>`: Use this instead of using the original filename in new filenames.
|
||||
- `-f`/`--force`: Do not ask for processing confirmation.
|
||||
- `-g`/`--no_grouping`: Do not apply grouping, only validate and add band name.
|
||||
- Show these on the command line with `-h`/`--help`.
|
|
@ -0,0 +1,210 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# Originally developed by Ming Chia at the Australian Plant Phenomics Facility (Australian National University node)
|
||||
|
||||
# Usage:
|
||||
# exif_binner.py <args> <path to folder of images to rename> <output folder>
|
||||
|
||||
# standard libraries
|
||||
import sys
|
||||
import os
|
||||
import shutil
|
||||
import re
|
||||
import csv
|
||||
import math
|
||||
import argparse
|
||||
|
||||
# other imports
|
||||
import PIL
|
||||
from PIL import Image, ExifTags
|
||||
from tqdm import tqdm # optional: see "swap with this for no tqdm" below
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
|
||||
# required args
|
||||
parser.add_argument("file_dir", help="input folder of images")
|
||||
parser.add_argument("output_dir", help="output folder to copy images to")
|
||||
|
||||
# args with defaults
|
||||
parser.add_argument("-b", "--bands", help="number of expected bands per capture", type=int, default=5)
|
||||
parser.add_argument("-s", "--sequential", help="use sequential capture group in filenames rather than original capture ID", type=bool, default=True)
|
||||
parser.add_argument("-z", "--zero_pad", help="if using sequential capture groups, zero-pad the group number to this many digits. 0 for no padding, -1 for auto padding", type=int, default=5)
|
||||
parser.add_argument("-w", "--whitespace_replace", help="replace whitespace characters with this character", type=str, default="-")
|
||||
|
||||
# optional args no defaults
|
||||
parser.add_argument("-l", "--logfile", help="write image metadata used to this CSV file", type=str)
|
||||
parser.add_argument("-r", "--replace_filename", help="use this instead of using the original filename in new filenames", type=str)
|
||||
parser.add_argument("-f", "--force", help="don't ask for confirmation", action="store_true")
|
||||
parser.add_argument("-g", "--no_grouping", help="do not apply grouping, only validate and add band name", action="store_true")
|
||||
args = parser.parse_args()
|
||||
|
||||
file_dir = args.file_dir
|
||||
output_dir = args.output_dir
|
||||
replacement_character = args.whitespace_replace
|
||||
expected_bands = args.bands
|
||||
logfile = args.logfile
|
||||
|
||||
output_valid = os.path.join(output_dir, "valid")
|
||||
output_invalid = os.path.join(output_dir, "invalid")
|
||||
|
||||
file_count = len(os.listdir(file_dir))
|
||||
|
||||
auto_zero_pad = len(str(math.ceil(float(file_count) / float(expected_bands))))
|
||||
|
||||
if args.zero_pad >= 1:
|
||||
if int("9" * args.zero_pad) < math.ceil(float(file_count) / float(expected_bands)):
|
||||
raise ValueError("Zero pad must have more digits than maximum capture groups! Attempted to pad " + str(args.zero_pad) + " digits with "
|
||||
+ str(file_count) + " files and " + str(expected_bands) + " bands (up to " + str(math.ceil(float(file_count) / float(expected_bands)))
|
||||
+ " capture groups possible, try at least " + str(auto_zero_pad) + " digits to zero pad)")
|
||||
|
||||
if args.force is False:
|
||||
print("Input dir: " + str(file_dir) + " (" + str(file_count) + " files)")
|
||||
print("Output folder: " + str(output_dir))
|
||||
if args.replace_filename:
|
||||
print("Replacing all basic filenames with: " + args.replace_filename)
|
||||
else:
|
||||
print("Replace whitespace in filenames with: " + replacement_character)
|
||||
print("Number of expected bands: " + str(expected_bands))
|
||||
if logfile:
|
||||
print("Save image processing metadata to: " + logfile)
|
||||
confirmation = input("Confirm processing [Y/N]: ")
|
||||
if confirmation.lower() in ["y"]:
|
||||
pass
|
||||
else:
|
||||
sys.exit()
|
||||
|
||||
no_exif_n = 0
|
||||
|
||||
images = []
|
||||
|
||||
print("Indexing images ...")
|
||||
|
||||
|
||||
# for filename in os.listdir(file_dir): # swap with this for no tqdm
|
||||
for filename in tqdm(os.listdir(file_dir)):
|
||||
old_path = os.path.join(file_dir, filename)
|
||||
file_name, file_ext = os.path.splitext(filename)
|
||||
image_entry = {"name": filename, "valid": True, "band": "-", "ID": "-", "group": 0, "DateTime": "-", "error": "-"} # dashes to ensure CSV exports properly, can be blank
|
||||
try:
|
||||
img = Image.open(old_path)
|
||||
except PIL.UnidentifiedImageError as img_err:
|
||||
# if it tries importing a file it can't read as an image
|
||||
# uncomment to print errors
|
||||
# sys.stderr.write(str(img_err) + "\n")
|
||||
no_exif_n += 1
|
||||
if logfile:
|
||||
image_entry["valid"] = False
|
||||
image_entry["error"] = "Not readable as image: " + str(img_err)
|
||||
images.append(image_entry)
|
||||
continue
|
||||
for key, val in img.getexif().items():
|
||||
if key in ExifTags.TAGS:
|
||||
# print(ExifTags.TAGS[key] + ":" + str(val)) # debugging
|
||||
if ExifTags.TAGS[key] == "XMLPacket":
|
||||
# find bandname
|
||||
bandname_start = val.find(b'<Camera:BandName>')
|
||||
bandname_end = val.find(b'</Camera:BandName>')
|
||||
bandname_coded = val[(bandname_start + 17):bandname_end]
|
||||
bandname = bandname_coded.decode("UTF-8")
|
||||
image_entry["band"] = str(bandname)
|
||||
# find capture ID
|
||||
image_entry["ID"] = re.findall('CaptureUUID="([^"]*)"', str(val))[0]
|
||||
if ExifTags.TAGS[key] == "DateTime":
|
||||
image_entry["DateTime"] = str(val)
|
||||
image_entry["band"].replace(" ", "-")
|
||||
if len(image_entry["band"]) >= 99: # if it's too long, wrong value (RGB pic has none)
|
||||
# no exif present
|
||||
no_exif_n += 1
|
||||
image_entry["valid"] = False
|
||||
image_entry["error"] = "Image band name appears to be too long"
|
||||
elif image_entry["ID"] == "" and expected_bands > 1:
|
||||
no_exif_n += 1
|
||||
image_entry["valid"] = False
|
||||
image_entry["error"] = "No Capture ID found"
|
||||
if (file_ext.lower() in [".jpg", ".jpeg"]) and (image_entry["band"] == "-"): # hack for DJI RGB jpgs
|
||||
# handle = open(old_path, 'rb').read()
|
||||
# xmp_start = handle.find(b'<x:xmpmeta')
|
||||
# xmp_end = handle.find(b'</x:xmpmeta')
|
||||
# xmp_bit = handle[xmp_start:xmp_end + 12]
|
||||
# image_entry["ID"] = re.findall('CaptureUUID="([^"]*)"', str(xmp_bit))[0]
|
||||
# image_entry["band"] = "RGB" # TODO: we assume this. may not hold true for all datasets
|
||||
|
||||
no_exif_n += 1 # this is just to keep a separate invalid message, comment out this whole if block and the jpgs shoud be handled by the "no capture ID" case
|
||||
image_entry["valid"] = False
|
||||
image_entry["error"] = "RGB jpg, not counting for multispec processing"
|
||||
images.append(image_entry)
|
||||
# print(new_path) # debugging
|
||||
|
||||
print(str(no_exif_n) + " files were not multispectral images")
|
||||
no_matching_bands_n = 0
|
||||
new_capture_id = 1
|
||||
capture_ids = {}
|
||||
|
||||
images = sorted(images, key=lambda img: (img["DateTime"], img["name"]))
|
||||
|
||||
# now sort and identify valid entries
|
||||
if not args.no_grouping:
|
||||
# for this_img in images: # swap with this for no tqdm
|
||||
for this_img in tqdm(images):
|
||||
if not this_img["valid"]: # prefiltered in last loop
|
||||
continue
|
||||
same_id_images = [image for image in images if image["ID"] == this_img["ID"]]
|
||||
if len(same_id_images) != expected_bands: # defaults to True, so only need to filter out not in
|
||||
no_matching_bands_n += 1
|
||||
this_img["valid"] = False
|
||||
this_img["error"] = "Capture ID has too few/too many bands"
|
||||
else:
|
||||
if this_img["ID"] in capture_ids.keys():
|
||||
this_img["group"] = capture_ids[this_img["ID"]]
|
||||
else:
|
||||
capture_ids[this_img["ID"]] = new_capture_id
|
||||
this_img["group"] = capture_ids[this_img["ID"]] # a little less efficient but we know it works this way
|
||||
new_capture_id += 1
|
||||
print(str(no_matching_bands_n) + " images had unexpected bands in same capture")
|
||||
|
||||
os.makedirs(output_valid, exist_ok=True)
|
||||
os.makedirs(output_invalid, exist_ok=True)
|
||||
|
||||
identifier = ""
|
||||
|
||||
# then do the actual copy
|
||||
# for this_img in images: # swap with this for no tqdm
|
||||
for this_img in tqdm(images):
|
||||
old_path = os.path.join(file_dir, this_img["name"])
|
||||
file_name, file_ext = os.path.splitext(this_img["name"])
|
||||
|
||||
if args.whitespace_replace:
|
||||
file_name = replacement_character.join(file_name.split())
|
||||
if args.replace_filename and not args.no_grouping:
|
||||
file_name = args.replace_filename
|
||||
|
||||
if this_img["valid"]:
|
||||
prefix = output_valid
|
||||
if args.no_grouping:
|
||||
file_name_full = file_name + "-" + this_img["band"] + file_ext
|
||||
else:
|
||||
# set ID based on args
|
||||
if args.sequential:
|
||||
if args.zero_pad == 0:
|
||||
identifier = str(this_img["group"])
|
||||
elif args.zero_pad == -1:
|
||||
identifier = str(this_img["group"]).zfill(auto_zero_pad)
|
||||
else:
|
||||
identifier = str(this_img["group"]).zfill(args.zero_pad)
|
||||
else:
|
||||
identifier = this_img["ID"]
|
||||
file_name_full = identifier + "-" + file_name + "-" + this_img["band"] + file_ext
|
||||
else:
|
||||
prefix = output_invalid
|
||||
file_name_full = file_name + file_ext
|
||||
new_path = os.path.join(prefix, file_name_full)
|
||||
shutil.copy(old_path, new_path)
|
||||
|
||||
if logfile:
|
||||
header = images[0].keys()
|
||||
with open(logfile, 'w', newline='') as logfile_handle:
|
||||
dict_writer = csv.DictWriter(logfile_handle, header)
|
||||
dict_writer.writeheader()
|
||||
dict_writer.writerows(images)
|
||||
|
||||
print("Done!")
|
|
@ -0,0 +1,21 @@
|
|||
MIT License
|
||||
|
||||
Copyright (c) 2022 APPF-ANU
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
|
@ -0,0 +1,9 @@
|
|||
# Semi-automated Masking for 360 images
|
||||
|
||||
For usage with 360 images and Open Drone Map (ODM) to mask out the tripod/user/camera mount. 360 models in ODM can be made from 360 images, but unless you mask out the camera mount there will be repeated artifacts along the camera path. ODM supports image masking but requires a mask for each image. Since the 360 camera is generally on a fixed mount (bike helmet, moving tripod, etc), you can make one mask and then duplicate this for all images, but this is tedious to do by hand.
|
||||
|
||||
This snippet takes the file path of a single image mask and duplicates it for all images in the dataset. After creating the masks, process the original images and the masks together in ODM you'll get a clean model with the camera mount artifacts eliminated.
|
||||
|
||||
Before using this code snippet, open one of your 360 images in an image editor and mask out the helmet or tripod, etc at the bottom of your image. Save this image as a png and then use it as the mask image that will be duplicated for all images in the dataset.
|
||||
|
||||
See https://docs.opendronemap.org/masks/ for more details on mask creation.
|
|
@ -0,0 +1,62 @@
|
|||
#!/usr/bin/env python3
|
||||
import sys
|
||||
import os
|
||||
|
||||
import PIL
|
||||
|
||||
from PIL import Image, ExifTags
|
||||
|
||||
import shutil
|
||||
|
||||
from tqdm import tqdm
|
||||
|
||||
import argparse
|
||||
parser = argparse.ArgumentParser()
|
||||
|
||||
# Usage:
|
||||
# python exif_renamer.py <path to folder of images to rename> <output folder>
|
||||
|
||||
parser.add_argument("file_dir", help="input folder of images")
|
||||
parser.add_argument("output_dir", help="output folder to copy images to")
|
||||
parser.add_argument("mask_file", help="filename or path to Mask file to be duplicated for all images")
|
||||
parser.add_argument("-f", "--force", help="don't ask for confirmation", action="store_true")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
file_dir = args.file_dir
|
||||
mask_file_path = args.mask_file
|
||||
output_dir = args.output_dir
|
||||
|
||||
file_count = len(os.listdir(file_dir))
|
||||
|
||||
if args.force is False:
|
||||
print("Input dir: " + str(file_dir))
|
||||
print("Output folder: " + str(output_dir) + " (" + str(file_count) + " files)")
|
||||
confirmation = input("Confirm processing [Y/N]: ")
|
||||
if confirmation.lower() in ["y"]:
|
||||
pass
|
||||
else:
|
||||
sys.exit()
|
||||
|
||||
os.makedirs(output_dir, exist_ok=True)
|
||||
|
||||
no_exif_n = 0
|
||||
|
||||
# Uses tqdm() for the progress bar, if not needed swap with
|
||||
# for filename in os.listdir(file_dir):
|
||||
for filename in tqdm(os.listdir(file_dir)):
|
||||
old_path = mask_file_path
|
||||
#print(mask_file_path)
|
||||
file_name, file_ext = os.path.splitext(filename)
|
||||
|
||||
try:
|
||||
img = Image.open(old_path)
|
||||
except PIL.UnidentifiedImageError as img_err:
|
||||
# if it tries importing a file it can't read as an image
|
||||
# can be commented out if you just wanna skip errors
|
||||
sys.stderr.write(str(img_err) + "\n")
|
||||
continue
|
||||
new_path = os.path.join(output_dir, file_name + "_mask" + file_ext)
|
||||
#print(new_path) # debugging
|
||||
shutil.copy(old_path, new_path)
|
||||
print("Done!")
|
|
@ -0,0 +1,29 @@
|
|||
# Merge Preview
|
||||
|
||||
Quickly projects drone images on a map by using georeferencing, camera angles and a global DTM. The images are then merged using ODM's split-merge algorithms.
|
||||
|
||||
Quality is obviously not good, works only for nadir-only images and requires the images to have gimbal/camera angle information (not all drones provide this information).
|
||||
|
||||
Usage:
|
||||
|
||||
```
|
||||
# Install DDB (required for geoprojection)
|
||||
|
||||
curl -fsSL https://get.dronedb.app -o get-ddb.sh
|
||||
sh get-ddb.sh
|
||||
|
||||
# Run
|
||||
|
||||
python3 mergepreview.py -i images/*.JPG --size 25%
|
||||
```
|
||||
|
||||
## Example
|
||||
|
||||
![screen](https://user-images.githubusercontent.com/1951843/134249725-e178489a-e271-4244-abed-e624cd510b88.png)
|
||||
|
||||
|
||||
[Sheffield Park](https://community.opendronemap.org/t/sheffield-park-1/58) images processed with this script.
|
||||
|
||||
## Disclaimer
|
||||
|
||||
This script is highly experimental. We welcome contributions to improve it.
|
|
@ -0,0 +1,126 @@
|
|||
import argparse
|
||||
import sys
|
||||
sys.path.append("../../")
|
||||
|
||||
import os
|
||||
from opendm import orthophoto
|
||||
from opendm.cutline import compute_cutline
|
||||
import glob
|
||||
from opendm.system import run
|
||||
from opendm import log
|
||||
import shutil
|
||||
|
||||
|
||||
parser = argparse.ArgumentParser(description='Quick Merge Preview')
|
||||
parser.add_argument('input',
|
||||
metavar='<paths>',
|
||||
nargs='+',
|
||||
help='Path to input images or image folder')
|
||||
parser.add_argument('--size', '-s',
|
||||
metavar='<percentage>',
|
||||
type=str,
|
||||
help='Size in percentage terms',
|
||||
default='25%')
|
||||
parser.add_argument('--force', '-f',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help="Force remove existing directories")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
try:
|
||||
log.ODM_INFO("Checking for DDB...")
|
||||
run("ddb --version")
|
||||
except:
|
||||
log.ODM_ERROR("ddb is not installed. Install it first: https://docs.dronedb.app")
|
||||
|
||||
if len(args.input) == 1 and os.path.isdir(args.input[0]):
|
||||
input_images = []
|
||||
for ext in ["JPG", "JPEG", "TIF", "tiff", "tif", "TIFF"]:
|
||||
input_images += glob.glob(os.path.join(args.input[0], "*.%s" % ext))
|
||||
else:
|
||||
input_images = args.input
|
||||
|
||||
log.ODM_INFO("Processing %s images" % len(input_images))
|
||||
|
||||
if len(input_images) == 0:
|
||||
log.ODM_ERROR("No images")
|
||||
exit(1)
|
||||
|
||||
cwd_path = os.path.dirname(input_images[0])
|
||||
tmp_path = os.path.join(cwd_path, "tmp")
|
||||
if os.path.isdir(tmp_path):
|
||||
if args.force:
|
||||
log.ODM_INFO("Removing previous directory %s" % tmp_path)
|
||||
shutil.rmtree(tmp_path)
|
||||
else:
|
||||
log.ODM_ERROR("%s exists. Pass --force to override." % tmp_path)
|
||||
exit(1)
|
||||
|
||||
os.makedirs(tmp_path)
|
||||
|
||||
for f in input_images:
|
||||
name, _ = os.path.splitext(os.path.basename(f))
|
||||
geojson = os.path.join(tmp_path, "%s.geojson" % name)
|
||||
gpkg = os.path.join(tmp_path, "%s.gpkg" % name)
|
||||
|
||||
run("ddb geoproj \"%s\" \"%s\" -s \"%s\"" % (tmp_path, f, args.size))
|
||||
|
||||
# Bounds (GPKG)
|
||||
run("ddb info --format geojson --geometry polygon \"%s\" > \"%s\"" % (f, geojson))
|
||||
run("ogr2ogr \"%s\" \"%s\"" % (gpkg, geojson))
|
||||
|
||||
log.ODM_INFO("Computing cutlines")
|
||||
|
||||
projected_images = glob.glob(os.path.join(tmp_path, "*.tif"))
|
||||
all_orthos_and_ortho_cuts = []
|
||||
|
||||
for f in projected_images:
|
||||
name, _ = os.path.splitext(os.path.basename(f))
|
||||
cutline_file = os.path.join(tmp_path, "%s_cutline.gpkg" % name)
|
||||
bounds_file_path = os.path.join(tmp_path, "%s.gpkg" % name)
|
||||
|
||||
compute_cutline(f,
|
||||
bounds_file_path,
|
||||
cutline_file,
|
||||
4,
|
||||
scale=1)
|
||||
|
||||
cut_raster = os.path.join(tmp_path, "%s_cut.tif" % name)
|
||||
orthophoto.compute_mask_raster(f, cutline_file,
|
||||
cut_raster,
|
||||
blend_distance=20, only_max_coords_feature=True)
|
||||
|
||||
feathered_raster = os.path.join(tmp_path, "%s_feathered.tif" % name)
|
||||
|
||||
orthophoto.feather_raster(f, feathered_raster,
|
||||
blend_distance=20
|
||||
)
|
||||
|
||||
all_orthos_and_ortho_cuts.append([feathered_raster, cut_raster])
|
||||
|
||||
log.ODM_INFO("Merging...")
|
||||
|
||||
if len(all_orthos_and_ortho_cuts) > 1:
|
||||
# TODO: histogram matching via rasterio
|
||||
# currently parts have different color tones
|
||||
output_file = os.path.join(cwd_path, 'mergepreview.tif')
|
||||
|
||||
if os.path.isfile(output_file):
|
||||
os.remove(output_file)
|
||||
|
||||
orthophoto.merge(all_orthos_and_ortho_cuts, output_file, {
|
||||
'TILED': 'YES',
|
||||
'COMPRESS': 'LZW',
|
||||
'PREDICTOR': '2',
|
||||
'BIGTIFF': 'IF_SAFER',
|
||||
'BLOCKXSIZE': 512,
|
||||
'BLOCKYSIZE': 512
|
||||
})
|
||||
|
||||
|
||||
log.ODM_INFO("Wrote %s" % output_file)
|
||||
shutil.rmtree(tmp_path)
|
||||
else:
|
||||
log.ODM_ERROR("Error: no orthos found to merge")
|
||||
exit(1)
|
|
@ -0,0 +1,112 @@
|
|||
#!/usr/bin/env python3
|
||||
# A script to calculate agricultural indices
|
||||
# NDVI - Normalized Difference Vegetation Index - (NIR−RED)/(NIR + RED)
|
||||
# NDRE - Normalized Difference Red Edge - (NIR−RE)/(NIR + RE)
|
||||
# GNDVI - Green NDVI - (NIR−GREEN)/(NIR + GREEN)
|
||||
# https://support.micasense.com/hc/en-us/articles/226531127-Creating-agricultural-indices-NDVI-NDRE-in-QGIS-
|
||||
# requires python-gdal
|
||||
|
||||
import numpy
|
||||
import argparse
|
||||
import os.path
|
||||
try:
|
||||
from osgeo import gdal
|
||||
from osgeo import osr
|
||||
except ImportError:
|
||||
raise ImportError("You need to install python-gdal : \
|
||||
run `sudo apt-get install libgdal-dev` \
|
||||
# Check Gdal version with \
|
||||
gdal-config --version \
|
||||
#install corresponding gdal version with pip : \
|
||||
pip3 install GDAL==2.4.0")
|
||||
|
||||
|
||||
def parse_args():
|
||||
argument_parser = argparse.ArgumentParser('Createa from a multispectral orthophoto \
|
||||
a Geotif with NDVI, NDRE and GNDVI agricultural indices')
|
||||
|
||||
argument_parser.add_argument("orthophoto", metavar="<orthophoto.tif>",
|
||||
type=argparse.FileType('r'),
|
||||
help="The CIR orthophoto. Must be a GeoTiff.")
|
||||
argument_parser.add_argument("-red", type=int,
|
||||
help="Red band number")
|
||||
argument_parser.add_argument("-green", type=int,
|
||||
help="Green band number")
|
||||
argument_parser.add_argument("-blue", type=int,
|
||||
help="Blue band number")
|
||||
argument_parser.add_argument("-re", type=int,
|
||||
help="RedEdge band number")
|
||||
argument_parser.add_argument("-nir", type=int,
|
||||
help="NIR band number")
|
||||
argument_parser.add_argument("out", metavar="<outfile.tif>",
|
||||
type=argparse.FileType('w'),
|
||||
help="The output file.")
|
||||
argument_parser.add_argument("--overwrite", "-o",
|
||||
action='store_true',
|
||||
default=False,
|
||||
help="Will overwrite output file if it exists. ")
|
||||
return argument_parser.parse_args()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
# Suppress/hide warning when dividing by zero
|
||||
numpy.seterr(divide='ignore', invalid='ignore')
|
||||
|
||||
rootdir = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
# Parse args
|
||||
args = parse_args()
|
||||
|
||||
if not args.overwrite and os.path.isfile(os.path.join(rootdir, args.out.name)):
|
||||
print("File exists, rename or use -o to overwrite.")
|
||||
exit()
|
||||
|
||||
# import raster
|
||||
print("Reading file")
|
||||
raster = gdal.Open(args.orthophoto.name)
|
||||
orthophoto = raster.ReadAsArray()
|
||||
|
||||
# parse out bands
|
||||
print("Reading rasters")
|
||||
red_matrix=orthophoto[args.red-1].astype(float)
|
||||
green_matrix=orthophoto[args.green-1].astype(float)
|
||||
blue_matrix=orthophoto[args.blue-1].astype(float)
|
||||
re_matrix=orthophoto[args.re-1].astype(float)
|
||||
nir_matrix=orthophoto[args.nir-1].astype(float)
|
||||
|
||||
outfile = args.out
|
||||
|
||||
# NDVI
|
||||
print("Computing NDVI")
|
||||
#ndvi = calc_ndvi(nir_matrix, red_matrix)
|
||||
ndvi = (nir_matrix.astype(float) - red_matrix.astype(float)) / (nir_matrix + red_matrix)
|
||||
# NDRE
|
||||
print("Computing NDRE")
|
||||
#ndre = calc_ndre(nir_matrix, re_matrix)
|
||||
ndre = (nir_matrix.astype(float) - re_matrix.astype(float)) / (nir_matrix + re_matrix)
|
||||
|
||||
# GNDVI
|
||||
print("Computing GNDVI")
|
||||
#gndvi = calc_gndvi(nir_matrix, green_matrix)
|
||||
gndvi = (nir_matrix.astype(float) - green_matrix.astype(float)) / (nir_matrix + green_matrix)
|
||||
|
||||
__import__("IPython").embed()
|
||||
|
||||
print("Saving Files")
|
||||
# export raster
|
||||
|
||||
for name, matrix in zip(['ndvi', 'ndre', 'gndvi' ] ,[ndvi,ndre,gndvi] ):
|
||||
print(name)
|
||||
out_driver = gdal.GetDriverByName('GTiff')\
|
||||
.Create(name+'_'+outfile.name, int(ndvi.shape[1]), int(ndvi.shape[0]), 1, gdal.GDT_Float32)
|
||||
outband = out_driver.GetRasterBand(1)
|
||||
outband.SetDescription(name.capitalize())
|
||||
outband.WriteArray(matrix)
|
||||
outcrs = osr.SpatialReference()
|
||||
outcrs.ImportFromWkt(raster.GetProjectionRef())
|
||||
out_driver.SetProjection(outcrs.ExportToWkt())
|
||||
out_driver.SetGeoTransform(raster.GetGeoTransform())
|
||||
outband.FlushCache()
|
||||
|
||||
|
|
@ -0,0 +1,73 @@
|
|||
#!/usr/bin/env python3
|
||||
# A script to rename.
|
||||
# requires python-gdal
|
||||
|
||||
import argparse
|
||||
import sys
|
||||
try:
|
||||
from osgeo import gdal
|
||||
except ImportError:
|
||||
raise ImportError("You need to install python-gdal : \
|
||||
run `sudo apt-get install libgdal-dev` \
|
||||
# Check Gdal version with \
|
||||
gdal-config --version \
|
||||
#install corresponding gdal version with pip : \
|
||||
pip3 install GDAL==2.4.0")
|
||||
|
||||
def parse_args():
|
||||
""" Parse arguments """
|
||||
argument_parser = argparse.ArgumentParser(
|
||||
"A script that rename inplace Sentera AGX710 Geotiff orthophoto. ")
|
||||
argument_parser.add_argument("orthophoto", metavar="<orthophoto.tif>",
|
||||
type=argparse.FileType('r'),
|
||||
help="The input orthophoto. Must be a GeoTiff.")
|
||||
return argument_parser.parse_args()
|
||||
|
||||
|
||||
def rename_sentera_agx710_layers(name):
|
||||
""" Only rename Geotif built from Sentera AGX710 images with ODM """
|
||||
if raster.RasterCount != 7:
|
||||
raise ImportError(F'File {name} does not have 7 layers as a regular\
|
||||
Geotif built from Sentera AGX710 images with ODM')
|
||||
|
||||
if 'RedGreenBlue' in raster.GetRasterBand(1).GetDescription() and \
|
||||
'RedEdgeGarbageNIR' in raster.GetRasterBand(2).GetDescription():
|
||||
|
||||
print("Sentera AGX710 Geotiff file has been detected.\
|
||||
Layers are name are :")
|
||||
print("RedGreenBlue for Band 1\nRedEdgeGarbageNIR for Band 2\
|
||||
\nNone for Band 3\nNone for Band 4\nNone for Band 5\nNone for Band 6")
|
||||
print("\nAfter renaming bands will be :")
|
||||
print("Red for Band 1\nGreen for Band 2\nBlue for Band 3\n\
|
||||
RedEdge for Band 4\nGarbage for Band 5\nNIR for Band 6")
|
||||
|
||||
answer = input(
|
||||
"Are you sure you want to rename the layers of the input file ? [yes/no] ")
|
||||
if answer =='yes':
|
||||
raster.GetRasterBand(1).SetDescription('Red')
|
||||
raster.GetRasterBand(2).SetDescription('Green')
|
||||
raster.GetRasterBand(3).SetDescription('Blue')
|
||||
raster.GetRasterBand(4).SetDescription('RedEdge')
|
||||
raster.GetRasterBand(5).SetDescription('Garbage')
|
||||
raster.GetRasterBand(6).SetDescription('NIR')
|
||||
# raster.GetRasterBand(7).SetDescription('Alpha')
|
||||
else:
|
||||
print("No renaming")
|
||||
else :
|
||||
print(F'No need for band renaming in {name}')
|
||||
sys.exit()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
# Parse args
|
||||
args = parse_args()
|
||||
|
||||
# import raster
|
||||
raster = gdal.Open(args.orthophoto.name, gdal.GA_Update)
|
||||
|
||||
# Rename layers
|
||||
rename_sentera_agx710_layers(args.orthophoto.name)
|
||||
|
||||
# de-reference the datasets, which triggers gdal to save
|
||||
raster = None
|
|
@ -0,0 +1,69 @@
|
|||
# Orthorectification Tool
|
||||
|
||||
![image](https://user-images.githubusercontent.com/1951843/111536715-fc91c380-8740-11eb-844c-5b7960186391.png)
|
||||
|
||||
This tool is capable of orthorectifying individual images (or all images) from an existing ODM reconstruction.
|
||||
|
||||
![image](https://user-images.githubusercontent.com/1951843/111529183-3ad6b500-8738-11eb-9960-b1aa676f863b.png)
|
||||
|
||||
## Usage
|
||||
|
||||
After running a reconstruction using ODM:
|
||||
|
||||
```
|
||||
docker run -ti --rm -v /home/youruser/datasets:/datasets opendronemap/odm --project-path /datasets project
|
||||
```
|
||||
|
||||
You can run the orthorectification module by running:
|
||||
|
||||
```
|
||||
docker run -ti --rm -v /home/youruser/datasets:/datasets --entrypoint /code/contrib/orthorectify/run.sh opendronemap/odm /datasets/project
|
||||
```
|
||||
|
||||
This will start the orthorectification process for all images in the dataset. See additional flags you can pass at the end of the command above:
|
||||
|
||||
```
|
||||
usage: orthorectify.py [-h] [--dem DEM] [--no-alpha NO_ALPHA]
|
||||
[--interpolation {nearest,bilinear}]
|
||||
[--outdir OUTDIR] [--image-list IMAGE_LIST]
|
||||
[--images IMAGES] [--threads THREADS]
|
||||
[--skip-visibility-test SKIP_VISIBILITY_TEST]
|
||||
dataset
|
||||
|
||||
Orthorectification Tool
|
||||
|
||||
positional arguments:
|
||||
dataset Path to ODM dataset
|
||||
|
||||
optional arguments:
|
||||
-h, --help show this help message and exit
|
||||
--dem DEM Absolute path to DEM to use to
|
||||
orthorectify images. Default:
|
||||
odm_dem/dsm.tif
|
||||
--no-alpha NO_ALPHA Don't output an alpha channel
|
||||
--interpolation {nearest,bilinear}
|
||||
Type of interpolation to use to sample
|
||||
pixel values.Default: bilinear
|
||||
--outdir OUTDIR Output directory where to store results.
|
||||
Default: orthorectified
|
||||
--image-list IMAGE_LIST
|
||||
Path to file that contains the list of
|
||||
image filenames to orthorectify. By
|
||||
default all images in a dataset are
|
||||
processed. Default: img_list.txt
|
||||
--images IMAGES Comma-separated list of filenames to
|
||||
rectify. Use as an alternative to --image-
|
||||
list. Default: process all images.
|
||||
--skip-visibility-test SKIP_VISIBILITY_TEST
|
||||
Skip visibility testing (faster but leaves
|
||||
artifacts due to relief displacement)
|
||||
```
|
||||
|
||||
## Roadmap
|
||||
|
||||
Help us improve this module! We could add:
|
||||
|
||||
- [ ] GPU support for faster processing
|
||||
- [ ] Merging of multiple orthorectified images (blending, filtering, seam leveling)
|
||||
- [ ] Faster visibility test
|
||||
- [ ] Different methods for orthorectification (direct)
|
|
@ -0,0 +1,392 @@
|
|||
#!/usr/bin/env python3
|
||||
# Author: Piero Toffanin
|
||||
# License: AGPLv3
|
||||
|
||||
import os
|
||||
import sys
|
||||
sys.path.insert(0, os.path.join("..", "..", os.path.dirname(__file__)))
|
||||
|
||||
from math import sqrt
|
||||
import rasterio
|
||||
import numpy as np
|
||||
import numpy.ma as ma
|
||||
import multiprocessing
|
||||
import argparse
|
||||
import functools
|
||||
from skimage.draw import line
|
||||
from opensfm import dataset
|
||||
|
||||
default_dem_path = "odm_dem/dsm.tif"
|
||||
default_outdir = "orthorectified"
|
||||
default_image_list = "img_list.txt"
|
||||
|
||||
parser = argparse.ArgumentParser(description='Orthorectification Tool')
|
||||
parser.add_argument('dataset',
|
||||
type=str,
|
||||
help='Path to ODM dataset')
|
||||
parser.add_argument('--dem',
|
||||
type=str,
|
||||
default=default_dem_path,
|
||||
help='Absolute path to DEM to use to orthorectify images. Default: %(default)s')
|
||||
parser.add_argument('--no-alpha',
|
||||
type=bool,
|
||||
help="Don't output an alpha channel")
|
||||
parser.add_argument('--interpolation',
|
||||
type=str,
|
||||
choices=('nearest', 'bilinear'),
|
||||
default='bilinear',
|
||||
help="Type of interpolation to use to sample pixel values.Default: %(default)s")
|
||||
parser.add_argument('--outdir',
|
||||
type=str,
|
||||
default=default_outdir,
|
||||
help="Output directory where to store results. Default: %(default)s")
|
||||
parser.add_argument('--image-list',
|
||||
type=str,
|
||||
default=default_image_list,
|
||||
help="Path to file that contains the list of image filenames to orthorectify. By default all images in a dataset are processed. Default: %(default)s")
|
||||
parser.add_argument('--images',
|
||||
type=str,
|
||||
default="",
|
||||
help="Comma-separated list of filenames to rectify. Use as an alternative to --image-list. Default: process all images.")
|
||||
parser.add_argument('--threads',
|
||||
type=int,
|
||||
default=multiprocessing.cpu_count(),
|
||||
help="Number of CPU processes to use. Default: %(default)s")
|
||||
parser.add_argument('--skip-visibility-test',
|
||||
type=bool,
|
||||
help="Skip visibility testing (faster but leaves artifacts due to relief displacement)")
|
||||
args = parser.parse_args()
|
||||
|
||||
dataset_path = args.dataset
|
||||
dem_path = os.path.join(dataset_path, default_dem_path) if args.dem == default_dem_path else args.dem
|
||||
interpolation = args.interpolation
|
||||
with_alpha = not args.no_alpha
|
||||
image_list = os.path.join(dataset_path, default_image_list) if args.image_list == default_image_list else args.image_list
|
||||
|
||||
cwd_path = os.path.join(dataset_path, default_outdir) if args.outdir == default_outdir else args.outdir
|
||||
|
||||
if not os.path.exists(cwd_path):
|
||||
os.makedirs(cwd_path)
|
||||
|
||||
target_images = [] # all
|
||||
|
||||
if args.images:
|
||||
target_images = list(map(str.strip, args.images.split(",")))
|
||||
print("Processing %s images" % len(target_images))
|
||||
elif args.image_list:
|
||||
with open(image_list) as f:
|
||||
target_images = list(filter(lambda filename: filename != '', map(str.strip, f.read().split("\n"))))
|
||||
print("Processing %s images" % len(target_images))
|
||||
|
||||
if not os.path.exists(dem_path):
|
||||
print("Whoops! %s does not exist. Provide a path to a valid DEM" % dem_path)
|
||||
exit(1)
|
||||
|
||||
|
||||
def bilinear_interpolate(im, x, y):
|
||||
x = np.asarray(x)
|
||||
y = np.asarray(y)
|
||||
|
||||
x0 = np.floor(x).astype(int)
|
||||
x1 = x0 + 1
|
||||
y0 = np.floor(y).astype(int)
|
||||
y1 = y0 + 1
|
||||
|
||||
x0 = np.clip(x0, 0, im.shape[1]-1)
|
||||
x1 = np.clip(x1, 0, im.shape[1]-1)
|
||||
y0 = np.clip(y0, 0, im.shape[0]-1)
|
||||
y1 = np.clip(y1, 0, im.shape[0]-1)
|
||||
|
||||
Ia = im[ y0, x0 ]
|
||||
Ib = im[ y1, x0 ]
|
||||
Ic = im[ y0, x1 ]
|
||||
Id = im[ y1, x1 ]
|
||||
|
||||
wa = (x1-x) * (y1-y)
|
||||
wb = (x1-x) * (y-y0)
|
||||
wc = (x-x0) * (y1-y)
|
||||
wd = (x-x0) * (y-y0)
|
||||
|
||||
return wa*Ia + wb*Ib + wc*Ic + wd*Id
|
||||
|
||||
# Read DEM
|
||||
print("Reading DEM: %s" % dem_path)
|
||||
with rasterio.open(dem_path) as dem_raster:
|
||||
dem = dem_raster.read()[0]
|
||||
dem_has_nodata = dem_raster.profile.get('nodata') is not None
|
||||
|
||||
if dem_has_nodata:
|
||||
m = ma.array(dem, mask=dem==dem_raster.nodata)
|
||||
dem_min_value = m.min()
|
||||
dem_max_value = m.max()
|
||||
else:
|
||||
dem_min_value = dem.min()
|
||||
dem_max_value = dem.max()
|
||||
|
||||
print("DEM Minimum: %s" % dem_min_value)
|
||||
print("DEM Maximum: %s" % dem_max_value)
|
||||
|
||||
h, w = dem.shape
|
||||
|
||||
crs = dem_raster.profile.get('crs')
|
||||
dem_offset_x, dem_offset_y = (0, 0)
|
||||
|
||||
if crs:
|
||||
print("DEM has a CRS: %s" % str(crs))
|
||||
|
||||
# Read coords.txt
|
||||
coords_file = os.path.join(dataset_path, "odm_georeferencing", "coords.txt")
|
||||
if not os.path.exists(coords_file):
|
||||
print("Whoops! Cannot find %s (we need that!)" % coords_file)
|
||||
exit(1)
|
||||
|
||||
with open(coords_file) as f:
|
||||
l = f.readline() # discard
|
||||
|
||||
# second line is a northing/easting offset
|
||||
l = f.readline().rstrip()
|
||||
dem_offset_x, dem_offset_y = map(float, l.split(" "))
|
||||
|
||||
print("DEM offset: (%s, %s)" % (dem_offset_x, dem_offset_y))
|
||||
|
||||
print("DEM dimensions: %sx%s pixels" % (w, h))
|
||||
|
||||
# Read reconstruction
|
||||
udata = dataset.UndistortedDataSet(dataset.DataSet(os.path.join(dataset_path, "opensfm")), undistorted_data_path=os.path.join(dataset_path, "opensfm", "undistorted"))
|
||||
reconstructions = udata.load_undistorted_reconstruction()
|
||||
if len(reconstructions) == 0:
|
||||
raise Exception("No reconstructions available")
|
||||
|
||||
max_workers = args.threads
|
||||
print("Using %s threads" % max_workers)
|
||||
|
||||
reconstruction = reconstructions[0]
|
||||
for shot in reconstruction.shots.values():
|
||||
if len(target_images) == 0 or shot.id in target_images:
|
||||
|
||||
print("Processing %s..." % shot.id)
|
||||
shot_image = udata.load_undistorted_image(shot.id)
|
||||
|
||||
r = shot.pose.get_rotation_matrix()
|
||||
Xs, Ys, Zs = shot.pose.get_origin()
|
||||
cam_grid_y, cam_grid_x = dem_raster.index(Xs + dem_offset_x, Ys + dem_offset_y)
|
||||
|
||||
a1 = r[0][0]
|
||||
b1 = r[0][1]
|
||||
c1 = r[0][2]
|
||||
a2 = r[1][0]
|
||||
b2 = r[1][1]
|
||||
c2 = r[1][2]
|
||||
a3 = r[2][0]
|
||||
b3 = r[2][1]
|
||||
c3 = r[2][2]
|
||||
|
||||
if not args.skip_visibility_test:
|
||||
distance_map = np.full((h, w), np.nan)
|
||||
|
||||
for j in range(0, h):
|
||||
for i in range(0, w):
|
||||
distance_map[j][i] = sqrt((cam_grid_x - i) ** 2 + (cam_grid_y - j) ** 2)
|
||||
distance_map[distance_map==0] = 1e-7
|
||||
|
||||
print("Camera pose: (%f, %f, %f)" % (Xs, Ys, Zs))
|
||||
|
||||
img_h, img_w, num_bands = shot_image.shape
|
||||
half_img_w = (img_w - 1) / 2.0
|
||||
half_img_h = (img_h - 1) / 2.0
|
||||
print("Image dimensions: %sx%s pixels" % (img_w, img_h))
|
||||
f = shot.camera.focal * max(img_h, img_w)
|
||||
has_nodata = dem_raster.profile.get('nodata') is not None
|
||||
|
||||
def process_pixels(step):
|
||||
imgout = np.full((num_bands, dem_bbox_h, dem_bbox_w), np.nan)
|
||||
|
||||
minx = dem_bbox_w
|
||||
miny = dem_bbox_h
|
||||
maxx = 0
|
||||
maxy = 0
|
||||
|
||||
for j in range(dem_bbox_miny, dem_bbox_maxy + 1):
|
||||
if j % max_workers == step:
|
||||
im_j = j - dem_bbox_miny
|
||||
|
||||
for i in range(dem_bbox_minx, dem_bbox_maxx + 1):
|
||||
im_i = i - dem_bbox_minx
|
||||
|
||||
# World coordinates
|
||||
Za = dem[j][i]
|
||||
|
||||
# Skip nodata
|
||||
if has_nodata and Za == dem_raster.nodata:
|
||||
continue
|
||||
|
||||
Xa, Ya = dem_raster.xy(j, i)
|
||||
|
||||
# Remove offset (our cameras don't have the geographic offset)
|
||||
Xa -= dem_offset_x
|
||||
Ya -= dem_offset_y
|
||||
|
||||
# Colinearity function http://web.pdx.edu/~jduh/courses/geog493f14/Week03.pdf
|
||||
dx = (Xa - Xs)
|
||||
dy = (Ya - Ys)
|
||||
dz = (Za - Zs)
|
||||
|
||||
den = a3 * dx + b3 * dy + c3 * dz
|
||||
x = half_img_w - (f * (a1 * dx + b1 * dy + c1 * dz) / den)
|
||||
y = half_img_h - (f * (a2 * dx + b2 * dy + c2 * dz) / den)
|
||||
|
||||
if x >= 0 and y >= 0 and x <= img_w - 1 and y <= img_h - 1:
|
||||
# Visibility test
|
||||
if not args.skip_visibility_test:
|
||||
check_dem_points = np.column_stack(line(i, j, cam_grid_x, cam_grid_y))
|
||||
check_dem_points = check_dem_points[np.all(np.logical_and(np.array([0, 0]) <= check_dem_points, check_dem_points < [w, h]), axis=1)]
|
||||
|
||||
visible = True
|
||||
for p in check_dem_points:
|
||||
ray_z = Zs + (distance_map[p[1]][p[0]] / distance_map[j][i]) * dz
|
||||
if ray_z > dem_max_value:
|
||||
break
|
||||
|
||||
if dem[p[1]][p[0]] > ray_z:
|
||||
visible = False
|
||||
break
|
||||
if not visible:
|
||||
continue
|
||||
|
||||
if interpolation == 'bilinear':
|
||||
xi = img_w - 1 - x
|
||||
yi = img_h - 1 - y
|
||||
values = bilinear_interpolate(shot_image, xi, yi)
|
||||
else:
|
||||
# nearest
|
||||
xi = img_w - 1 - int(round(x))
|
||||
yi = img_h - 1 - int(round(y))
|
||||
values = shot_image[yi][xi]
|
||||
|
||||
# We don't consider all zero values (pure black)
|
||||
# to be valid sample values. This will sometimes miss
|
||||
# valid sample values.
|
||||
|
||||
if not np.all(values == 0):
|
||||
minx = min(minx, im_i)
|
||||
miny = min(miny, im_j)
|
||||
maxx = max(maxx, im_i)
|
||||
maxy = max(maxy, im_j)
|
||||
|
||||
for b in range(num_bands):
|
||||
imgout[b][im_j][im_i] = values[b]
|
||||
|
||||
# for b in range(num_bands):
|
||||
# minx = min(minx, im_i)
|
||||
# miny = min(miny, im_j)
|
||||
# maxx = max(maxx, im_i)
|
||||
# maxy = max(maxy, im_j)
|
||||
# imgout[b][im_j][im_i] = 255
|
||||
return (imgout, (minx, miny, maxx, maxy))
|
||||
|
||||
# Compute bounding box of image coverage
|
||||
# assuming a flat plane at Z = min Z
|
||||
# (Otherwise we have to scan the entire DEM)
|
||||
# The Xa,Ya equations are just derived from the colinearity equations
|
||||
# solving for Xa and Ya instead of x,y
|
||||
def dem_coordinates(cpx, cpy):
|
||||
"""
|
||||
:param cpx principal point X (image coordinates)
|
||||
:param cpy principal point Y (image coordinates)
|
||||
"""
|
||||
Za = dem_min_value
|
||||
m = (a3*b1*cpy - a1*b3*cpy - (a3*b2 - a2*b3)*cpx - (a2*b1 - a1*b2)*f)
|
||||
Xa = dem_offset_x + (m*Xs + (b3*c1*cpy - b1*c3*cpy - (b3*c2 - b2*c3)*cpx - (b2*c1 - b1*c2)*f)*Za - (b3*c1*cpy - b1*c3*cpy - (b3*c2 - b2*c3)*cpx - (b2*c1 - b1*c2)*f)*Zs)/m
|
||||
Ya = dem_offset_y + (m*Ys - (a3*c1*cpy - a1*c3*cpy - (a3*c2 - a2*c3)*cpx - (a2*c1 - a1*c2)*f)*Za + (a3*c1*cpy - a1*c3*cpy - (a3*c2 - a2*c3)*cpx - (a2*c1 - a1*c2)*f)*Zs)/m
|
||||
|
||||
y, x = dem_raster.index(Xa, Ya)
|
||||
return (x, y)
|
||||
|
||||
dem_ul = dem_coordinates(-(img_w - 1) / 2.0, -(img_h - 1) / 2.0)
|
||||
dem_ur = dem_coordinates((img_w - 1) / 2.0, -(img_h - 1) / 2.0)
|
||||
dem_lr = dem_coordinates((img_w - 1) / 2.0, (img_h - 1) / 2.0)
|
||||
dem_ll = dem_coordinates(-(img_w - 1) / 2.0, (img_h - 1) / 2.0)
|
||||
dem_bbox = [dem_ul, dem_ur, dem_lr, dem_ll]
|
||||
dem_bbox_x = np.array(list(map(lambda xy: xy[0], dem_bbox)))
|
||||
dem_bbox_y = np.array(list(map(lambda xy: xy[1], dem_bbox)))
|
||||
|
||||
dem_bbox_minx = min(w - 1, max(0, dem_bbox_x.min()))
|
||||
dem_bbox_miny = min(h - 1, max(0, dem_bbox_y.min()))
|
||||
dem_bbox_maxx = min(w - 1, max(0, dem_bbox_x.max()))
|
||||
dem_bbox_maxy = min(h - 1, max(0, dem_bbox_y.max()))
|
||||
|
||||
dem_bbox_w = 1 + dem_bbox_maxx - dem_bbox_minx
|
||||
dem_bbox_h = 1 + dem_bbox_maxy - dem_bbox_miny
|
||||
|
||||
print("Iterating over DEM box: [(%s, %s), (%s, %s)] (%sx%s pixels)" % (dem_bbox_minx, dem_bbox_miny, dem_bbox_maxx, dem_bbox_maxy, dem_bbox_w, dem_bbox_h))
|
||||
|
||||
if max_workers > 1:
|
||||
with multiprocessing.Pool(max_workers) as p:
|
||||
results = p.map(process_pixels, range(max_workers))
|
||||
else:
|
||||
results = [process_pixels(0)]
|
||||
|
||||
results = list(filter(lambda r: r[1][0] <= r[1][2] and r[1][1] <= r[1][3], results))
|
||||
|
||||
# Merge image
|
||||
imgout, _ = results[0]
|
||||
|
||||
for j in range(dem_bbox_miny, dem_bbox_maxy + 1):
|
||||
im_j = j - dem_bbox_miny
|
||||
resimg, _ = results[j % max_workers]
|
||||
for b in range(num_bands):
|
||||
imgout[b][im_j] = resimg[b][im_j]
|
||||
|
||||
# Merge bounds
|
||||
minx = dem_bbox_w
|
||||
miny = dem_bbox_h
|
||||
maxx = 0
|
||||
maxy = 0
|
||||
|
||||
for _, bounds in results:
|
||||
minx = min(bounds[0], minx)
|
||||
miny = min(bounds[1], miny)
|
||||
maxx = max(bounds[2], maxx)
|
||||
maxy = max(bounds[3], maxy)
|
||||
|
||||
print("Output bounds: (%s, %s), (%s, %s) pixels" % (minx, miny, maxx, maxy))
|
||||
if minx <= maxx and miny <= maxy:
|
||||
imgout = imgout[:,miny:maxy+1,minx:maxx+1]
|
||||
|
||||
if with_alpha:
|
||||
alpha = np.zeros((imgout.shape[1], imgout.shape[2]), dtype=np.uint8)
|
||||
|
||||
# Set all not-NaN indices to 255
|
||||
alpha[~np.isnan(imgout[0])] = 255
|
||||
|
||||
# Cast
|
||||
imgout = imgout.astype(shot_image.dtype)
|
||||
|
||||
dem_transform = dem_raster.profile['transform']
|
||||
offset_x, offset_y = dem_raster.xy(dem_bbox_miny + miny, dem_bbox_minx + minx, offset='ul')
|
||||
|
||||
profile = {
|
||||
'driver': 'GTiff',
|
||||
'width': imgout.shape[2],
|
||||
'height': imgout.shape[1],
|
||||
'count': num_bands + 1 if with_alpha else num_bands,
|
||||
'dtype': imgout.dtype.name,
|
||||
'transform': rasterio.transform.Affine(dem_transform[0], dem_transform[1], offset_x,
|
||||
dem_transform[3], dem_transform[4], offset_y),
|
||||
'nodata': None,
|
||||
'crs': crs
|
||||
}
|
||||
|
||||
outfile = os.path.join(cwd_path, shot.id)
|
||||
if not outfile.endswith(".tif"):
|
||||
outfile = outfile + ".tif"
|
||||
|
||||
with rasterio.open(outfile, 'w', BIGTIFF="IF_SAFER", **profile) as wout:
|
||||
for b in range(num_bands):
|
||||
wout.write(imgout[b], b + 1)
|
||||
if with_alpha:
|
||||
wout.write(alpha, num_bands + 1)
|
||||
|
||||
print("Wrote %s" % outfile)
|
||||
else:
|
||||
print("Cannot orthorectify image (is the image inside the DEM bounds?)")
|
|
@ -0,0 +1,5 @@
|
|||
#!/bin/bash
|
||||
__dirname=$(cd "$(dirname "$0")"; pwd -P)
|
||||
cd "${__dirname}"
|
||||
|
||||
PYTHONPATH=$PYTHONPATH:/code/SuperBuild/install/bin/opensfm python3 orthorectify.py "$@"
|
|
@ -0,0 +1,44 @@
|
|||
# Point Cloud To DEM
|
||||
|
||||
Convert point clouds (LAS, LAZ, PLY, and any other format compatible with [PDAL](https://pdal.io/stages/readers.html)) to GeoTIFF elevation models.
|
||||
|
||||
![image](https://user-images.githubusercontent.com/1951843/112354653-492a5100-8ca3-11eb-9f21-4dda4cae976f.png)
|
||||
|
||||
This tool includes methods to perform efficient and scalable gapfill interpolation and is the same method used by ODM's processing pipeline. It is offered here as a standalone module for processing individual point clouds.
|
||||
|
||||
## Usage
|
||||
|
||||
```
|
||||
docker run -ti --rm -v /home/youruser/folder_with_point_cloud:/input --entrypoint /code/contrib/pc2dem/pc2dem.py opendronemap/odm /input/point_cloud.las [flags]
|
||||
```
|
||||
|
||||
The result (`dsm.tif` or `dtm.tif`) will be stored in the same folder as the input point cloud. See additional `flags` you can pass at the end of the command above:
|
||||
|
||||
```
|
||||
usage: pc2dem.py [-h] [--type {dsm,dtm}] [--resolution RESOLUTION]
|
||||
[--gapfill-steps GAPFILL_STEPS]
|
||||
point_cloud
|
||||
|
||||
Generate DEMs from point clouds using ODM's algorithm.
|
||||
|
||||
positional arguments:
|
||||
point_cloud Path to point cloud file (.las, .laz,
|
||||
.ply)
|
||||
|
||||
optional arguments:
|
||||
-h, --help show this help message and exit
|
||||
--type {dsm,dtm} Type of DEM. Default: dsm
|
||||
--resolution RESOLUTION
|
||||
Resolution in m/px. Default: 0.05
|
||||
--gapfill-steps GAPFILL_STEPS
|
||||
Number of steps used to fill areas with
|
||||
gaps. Set to 0 to disable gap filling.
|
||||
Starting with a radius equal to the output
|
||||
resolution, N different DEMs are generated
|
||||
with progressively bigger radius using the
|
||||
inverse distance weighted (IDW) algorithm
|
||||
and merged together. Remaining gaps are
|
||||
then merged using nearest neighbor
|
||||
interpolation. Default: 3
|
||||
|
||||
```
|
|
@ -0,0 +1,55 @@
|
|||
#!/usr/bin/env python3
|
||||
# Author: Piero Toffanin
|
||||
# License: AGPLv3
|
||||
|
||||
import os
|
||||
import sys
|
||||
sys.path.insert(0, os.path.join("..", "..", os.path.dirname(__file__)))
|
||||
|
||||
import argparse
|
||||
import multiprocessing
|
||||
from opendm.dem import commands
|
||||
|
||||
parser = argparse.ArgumentParser(description='Generate DEMs from point clouds using ODM\'s algorithm.')
|
||||
parser.add_argument('point_cloud',
|
||||
type=str,
|
||||
help='Path to point cloud file (.las, .laz, .ply)')
|
||||
parser.add_argument('--type',
|
||||
type=str,
|
||||
choices=("dsm", "dtm"),
|
||||
default="dsm",
|
||||
help="Type of DEM. Default: %(default)s")
|
||||
parser.add_argument('--resolution',
|
||||
type=float,
|
||||
default=0.05,
|
||||
help='Resolution in m/px. Default: %(default)s')
|
||||
parser.add_argument('--gapfill-steps',
|
||||
default=3,
|
||||
type=int,
|
||||
help='Number of steps used to fill areas with gaps. Set to 0 to disable gap filling. '
|
||||
'Starting with a radius equal to the output resolution, N different DEMs are generated with '
|
||||
'progressively bigger radius using the inverse distance weighted (IDW) algorithm '
|
||||
'and merged together. Remaining gaps are then merged using nearest neighbor interpolation. '
|
||||
'Default: %(default)s')
|
||||
args = parser.parse_args()
|
||||
|
||||
if not os.path.exists(args.point_cloud):
|
||||
print("%s does not exist" % args.point_cloud)
|
||||
exit(1)
|
||||
|
||||
outdir = os.path.dirname(args.point_cloud)
|
||||
|
||||
radius_steps = [args.resolution / 2.0]
|
||||
for _ in range(args.gapfill_steps - 1):
|
||||
radius_steps.append(radius_steps[-1] * 2) # 2 is arbitrary, maybe there's a better value?
|
||||
|
||||
commands.create_dem(args.point_cloud,
|
||||
args.type,
|
||||
output_type='idw' if args.type == 'dtm' else 'max',
|
||||
radiuses=list(map(str, radius_steps)),
|
||||
gapfill=args.gapfill_steps > 0,
|
||||
outdir=outdir,
|
||||
resolution=args.resolution,
|
||||
decimation=1,
|
||||
max_workers=multiprocessing.cpu_count()
|
||||
)
|
|
@ -0,0 +1,16 @@
|
|||
# Resize
|
||||
|
||||
Resize a dataset (and optional GCP file).
|
||||
|
||||
Resizes images, keeps Exif data. The EXIF width and height attributes will be updated accordingly also. ODM GCP files are scaled also.
|
||||
|
||||
Usage:
|
||||
|
||||
```
|
||||
pip install -r requirements.txt
|
||||
python3 resize.py -i images/ -o resized/ 25%
|
||||
python3 resize.py -i images/1.JPG -o resized.JPG 25%
|
||||
python3 resize.py -i gcp_list.txt -o resized_gcp_list.txt
|
||||
```
|
||||
|
||||
Originally forked from https://github.com/pierotofy/exifimageresize
|
|
@ -0,0 +1,2 @@
|
|||
Pillow==8.0.1
|
||||
piexif==1.1.2
|
|
@ -0,0 +1,169 @@
|
|||
import argparse
|
||||
import os
|
||||
import glob
|
||||
import shutil
|
||||
from PIL import Image
|
||||
import piexif
|
||||
import multiprocessing
|
||||
from multiprocessing.pool import ThreadPool
|
||||
import sys
|
||||
sys.path.append("../../")
|
||||
from opendm.gcp import GCPFile
|
||||
|
||||
parser = argparse.ArgumentParser(description='Exif Image Resize')
|
||||
parser.add_argument('--input', '-i',
|
||||
metavar='<path>',
|
||||
required=True,
|
||||
help='Path to input image/GCP or image folder')
|
||||
parser.add_argument('--output', '-o',
|
||||
metavar='<path>',
|
||||
required=True,
|
||||
help='Path to output image/GCP or image folder')
|
||||
parser.add_argument('--force', '-f',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='Overwrite results')
|
||||
parser.add_argument('amount',
|
||||
metavar='<pixel|percentage%>',
|
||||
type=str,
|
||||
help='Pixel of largest side or percentage to resize images by')
|
||||
args = parser.parse_args()
|
||||
|
||||
def die(msg):
|
||||
print(msg)
|
||||
exit(1)
|
||||
|
||||
class nonloc:
|
||||
errors = 0
|
||||
|
||||
def resize_image(image_path, out_path, resize_to, out_path_is_file=False):
|
||||
"""
|
||||
:param image_path: path to the image
|
||||
:param out_path: path to the output directory or file
|
||||
:param resize_to: percentage ("perc%") or pixels
|
||||
"""
|
||||
try:
|
||||
im = Image.open(image_path)
|
||||
path, ext = os.path.splitext(image_path)
|
||||
if out_path_is_file:
|
||||
resized_image_path = out_path
|
||||
else:
|
||||
resized_image_path = os.path.join(out_path, os.path.basename(image_path))
|
||||
|
||||
width, height = im.size
|
||||
max_side = max(width, height)
|
||||
|
||||
if isinstance(resize_to, str) and resize_to.endswith("%"):
|
||||
ratio = float(resize_to[:-1]) / 100.0
|
||||
else:
|
||||
ratio = float(resize_to) / float(max_side)
|
||||
|
||||
resized_width = int(width * ratio)
|
||||
resized_height = int(height * ratio)
|
||||
|
||||
im.thumbnail((resized_width, resized_height), Image.LANCZOS)
|
||||
|
||||
driver = ext[1:].upper()
|
||||
if driver == 'JPG':
|
||||
driver = 'JPEG'
|
||||
|
||||
if 'exif' in im.info:
|
||||
exif_dict = piexif.load(im.info['exif'])
|
||||
exif_dict['Exif'][piexif.ExifIFD.PixelXDimension] = resized_width
|
||||
exif_dict['Exif'][piexif.ExifIFD.PixelYDimension] = resized_height
|
||||
im.save(resized_image_path, driver, exif=piexif.dump(exif_dict), quality=100)
|
||||
else:
|
||||
im.save(resized_image_path, driver, quality=100)
|
||||
|
||||
im.close()
|
||||
|
||||
print("{} ({}x{}) --> {} ({}x{})".format(image_path, width, height, resized_image_path, resized_width, resized_height))
|
||||
except (IOError, ValueError) as e:
|
||||
print("Error: Cannot resize {}: {}.".format(image_path, str(e)))
|
||||
nonloc.errors += 1
|
||||
|
||||
def resize_gcp(gcp_path, out_path, resize_to, out_path_is_file=False):
|
||||
"""
|
||||
:param gcp_path: path to the GCP
|
||||
:param out_path: path to the output directory or file
|
||||
:param resize_to: percentage ("perc%") or pixels
|
||||
"""
|
||||
try:
|
||||
if out_path_is_file:
|
||||
resized_gcp_path = out_path
|
||||
else:
|
||||
resized_gcp_path = os.path.join(out_path, os.path.basename(gcp_path))
|
||||
|
||||
if resize_to.endswith("%"):
|
||||
ratio = float(resize_to[:-1]) / 100.0
|
||||
else:
|
||||
ratio = resize_to
|
||||
|
||||
gcp = GCPFile(gcp_path)
|
||||
if gcp.entries_count() > 0:
|
||||
gcp.make_resized_copy(resized_gcp_path, ratio)
|
||||
else:
|
||||
raise ValueError("No GCP entries")
|
||||
|
||||
print("{} --> {}".format(gcp_path, resized_gcp_path))
|
||||
except (IOError, ValueError) as e:
|
||||
print("Error: Cannot resize {}: {}.".format(gcp_path, str(e)))
|
||||
nonloc.errors += 1
|
||||
|
||||
if not args.amount.endswith("%"):
|
||||
args.amount = float(args.amount)
|
||||
if args.amount <= 0:
|
||||
die("Invalid amount")
|
||||
else:
|
||||
try:
|
||||
if float(args.amount[:-1]) <= 0:
|
||||
die("Invalid amount")
|
||||
except:
|
||||
die("Invalid amount")
|
||||
|
||||
|
||||
files = []
|
||||
gcps = []
|
||||
|
||||
if os.path.isdir(args.input):
|
||||
for ext in ["JPG", "JPEG", "PNG", "TIFF", "TIF"]:
|
||||
files += glob.glob("{}/*.{}".format(args.input, ext))
|
||||
files += glob.glob("{}/*.{}".format(args.input, ext.lower()))
|
||||
gcps = glob.glob("{}/*.txt".format(args.input))
|
||||
elif os.path.exists(args.input):
|
||||
_, ext = os.path.splitext(args.input)
|
||||
if ext.lower() == ".txt":
|
||||
gcps = [args.input]
|
||||
else:
|
||||
files = [args.input]
|
||||
else:
|
||||
die("{} does not exist".format(args.input))
|
||||
|
||||
create_dir = len(files) > 1 or args.output.endswith("/") or len(gcps) > 1
|
||||
|
||||
if create_dir and os.path.isdir(args.output):
|
||||
if not args.force:
|
||||
die("{} exists, pass --force to overwrite results".format(args.output))
|
||||
else:
|
||||
shutil.rmtree(args.output)
|
||||
elif not create_dir and os.path.isfile(args.output):
|
||||
if not args.force:
|
||||
die("{} exists, pass --force to overwrite results".format(args.output))
|
||||
else:
|
||||
os.remove(args.output)
|
||||
|
||||
if create_dir:
|
||||
os.makedirs(args.output)
|
||||
|
||||
pool = ThreadPool(processes=multiprocessing.cpu_count())
|
||||
|
||||
def resize(file):
|
||||
_, ext = os.path.splitext(file)
|
||||
if ext.lower() == ".txt":
|
||||
return resize_gcp(file, args.output, args.amount, not create_dir)
|
||||
else:
|
||||
return resize_image(file, args.output, args.amount, not create_dir)
|
||||
pool.map(resize, files + gcps)
|
||||
|
||||
print("Process completed, {} errors.".format(nonloc.errors))
|
||||
|
|
@ -88,7 +88,7 @@ try:
|
|||
elif typ == 'tgi':
|
||||
indeks = calcTgi(red, green, blue)
|
||||
|
||||
with rasterio.open(outFileName, 'w', **profile) as dst:
|
||||
with rasterio.open(outFileName, 'w', BIGTIFF="IF_SAFER", **profile) as dst:
|
||||
dst.write(indeks.astype(rasterio.float32), 1)
|
||||
except rasterio.errors.RasterioIOError:
|
||||
print bcolors.FAIL + 'Orthophoto file not found or access denied' + bcolors.ENDC
|
||||
|
|
|
@ -9,4 +9,9 @@ do
|
|||
fi
|
||||
done
|
||||
|
||||
/usr/bin/g++_real -march=nehalem "${args[@]}"
|
||||
ARCH=nehalem
|
||||
if [[ $(uname -m) == "aarch64" ]]; then
|
||||
ARCH=armv8-a
|
||||
fi
|
||||
|
||||
/usr/bin/g++_real -march=$ARCH "${args[@]}"
|
||||
|
|
|
@ -9,4 +9,9 @@ do
|
|||
fi
|
||||
done
|
||||
|
||||
/usr/bin/gcc_real -march=nehalem "${args[@]}"
|
||||
ARCH=nehalem
|
||||
if [[ $(uname -m) == "aarch64" ]]; then
|
||||
ARCH=armv8-a
|
||||
fi
|
||||
|
||||
/usr/bin/gcc_real -march=$ARCH "${args[@]}"
|
||||
|
|
|
@ -5,14 +5,10 @@ First of all, thank you for taking the time to report an issue.
|
|||
|
||||
Before you continue, make sure you are in the right place. Please open an issue only to report faults and bugs. For questions and discussion please open a topic on http://community.opendronemap.org/c/opendronemap.
|
||||
|
||||
Please use the format below to report bugs and faults. It will help improve the resolution process.
|
||||
Please use the format below to report bugs and faults.
|
||||
****************************************
|
||||
|
||||
### How did you install OpenDroneMap? (Docker, natively, ...)?
|
||||
|
||||
[Type answer here]
|
||||
|
||||
### What's your browser and operating system? (Copy/paste the output of https://www.whatismybrowser.com/)
|
||||
### How did you install ODM? (Docker, installer, natively, ...)?
|
||||
|
||||
[Type answer here]
|
||||
|
||||
|
@ -24,7 +20,7 @@ Please use the format below to report bugs and faults. It will help improve the
|
|||
|
||||
[Type answer here]
|
||||
|
||||
### How can we reproduce this? (What steps did you do to trigger the problem? What parameters are you using for processing? If possible please include a copy of your dataset uploaded on Google Drive or Dropbox. Be detailed)
|
||||
### How can we reproduce this? What steps did you do to trigger the problem? If this is an issue with processing a dataset, YOU MUST include a copy of your dataset AND task output log, uploaded on Google Drive or Dropbox (otherwise we cannot reproduce this).
|
||||
|
||||
[Type answer here]
|
||||
|
||||
|
|
|
@ -0,0 +1,50 @@
|
|||
FROM nvidia/cuda:11.2.2-devel-ubuntu20.04 AS builder
|
||||
|
||||
# Env variables
|
||||
ENV DEBIAN_FRONTEND=noninteractive \
|
||||
PYTHONPATH="$PYTHONPATH:/code/SuperBuild/install/lib/python3.9/dist-packages:/code/SuperBuild/install/lib/python3.8/dist-packages:/code/SuperBuild/install/bin/opensfm" \
|
||||
LD_LIBRARY_PATH="$LD_LIBRARY_PATH:/code/SuperBuild/install/lib"
|
||||
|
||||
# Prepare directories
|
||||
WORKDIR /code
|
||||
|
||||
# Copy everything
|
||||
COPY . ./
|
||||
|
||||
# Run the build
|
||||
RUN PORTABLE_INSTALL=YES GPU_INSTALL=YES bash configure.sh install
|
||||
|
||||
# Clean Superbuild
|
||||
RUN bash configure.sh clean
|
||||
|
||||
### END Builder
|
||||
|
||||
### Use a second image for the final asset to reduce the number and
|
||||
# size of the layers.
|
||||
FROM nvidia/cuda:11.2.2-runtime-ubuntu20.04
|
||||
#FROM nvidia/cuda:11.2.0-devel-ubuntu20.04
|
||||
|
||||
# Env variables
|
||||
ENV DEBIAN_FRONTEND=noninteractive \
|
||||
PYTHONPATH="$PYTHONPATH:/code/SuperBuild/install/lib/python3.9/dist-packages:/code/SuperBuild/install/lib/python3.8/dist-packages:/code/SuperBuild/install/bin/opensfm" \
|
||||
LD_LIBRARY_PATH="$LD_LIBRARY_PATH:/code/SuperBuild/install/lib" \
|
||||
PDAL_DRIVER_PATH="/code/SuperBuild/install/bin"
|
||||
|
||||
WORKDIR /code
|
||||
|
||||
# Copy everything we built from the builder
|
||||
COPY --from=builder /code /code
|
||||
|
||||
# Copy the Python libraries installed via pip from the builder
|
||||
COPY --from=builder /usr/local /usr/local
|
||||
|
||||
# Install shared libraries that we depend on via APT, but *not*
|
||||
# the -dev packages to save space!
|
||||
# Also run a smoke test on ODM and OpenSfM
|
||||
RUN bash configure.sh installruntimedepsonly \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* \
|
||||
&& bash run.sh --help \
|
||||
&& bash -c "eval $(python3 /code/opendm/context.py) && python3 -c 'from opensfm import io, pymap'"
|
||||
# Entry point
|
||||
ENTRYPOINT ["python3", "/code/run.py"]
|
|
@ -0,0 +1,162 @@
|
|||
; Script generated by the Inno Setup Script Wizard.
|
||||
; SEE THE DOCUMENTATION FOR DETAILS ON CREATING INNO SETUP SCRIPT FILES!
|
||||
|
||||
#define MyAppName "ODM"
|
||||
#define VerFile FileOpen("VERSION")
|
||||
#define MyAppVersion FileRead(VerFile)
|
||||
#expr FileClose(VerFile)
|
||||
#undef VerFile
|
||||
#define MyAppPublisher "OpenDroneMap"
|
||||
#define MyAppURL "https://opendronemap.org"
|
||||
|
||||
[Setup]
|
||||
; NOTE: The value of AppId uniquely identifies this application.
|
||||
; Do not use the same AppId value in installers for other applications.
|
||||
; (To generate a new GUID, click Tools | Generate GUID inside the IDE.)
|
||||
AppId={{443998BA-9F8F-4A69-9A96-0D8FBC8C6393}
|
||||
AppName={#MyAppName}
|
||||
AppVersion={#MyAppVersion}
|
||||
AppPublisher={#MyAppPublisher}
|
||||
AppPublisherURL={#MyAppURL}
|
||||
AppSupportURL={#MyAppURL}
|
||||
AppUpdatesURL={#MyAppURL}
|
||||
DefaultDirName=C:\ODM
|
||||
DefaultGroupName={#MyAppName}
|
||||
AllowNoIcons=yes
|
||||
LicenseFile=LICENSE
|
||||
OutputDir=dist
|
||||
OutputBaseFilename=ODM_Setup_{#MyAppVersion}
|
||||
Compression=lzma
|
||||
SolidCompression=yes
|
||||
ArchitecturesAllowed=x64
|
||||
ArchitecturesInstallIn64BitMode=x64
|
||||
#ifndef SKIP_SIGN
|
||||
SignTool=signtool
|
||||
#endif
|
||||
PrivilegesRequired=lowest
|
||||
PrivilegesRequiredOverridesAllowed=commandline
|
||||
UsePreviousAppDir=no
|
||||
;SetupIconFile=setup.ico
|
||||
|
||||
[Languages]
|
||||
Name: "english"; MessagesFile: "compiler:Default.isl"
|
||||
|
||||
[Files]
|
||||
Source: "contrib\*"; DestDir: "{app}\contrib"; Flags: ignoreversion recursesubdirs createallsubdirs
|
||||
Source: "licenses\*"; DestDir: "{app}\licenses"; Flags: ignoreversion recursesubdirs createallsubdirs
|
||||
Source: "opendm\*"; DestDir: "{app}\opendm"; Excludes: "__pycache__"; Flags: ignoreversion recursesubdirs createallsubdirs
|
||||
Source: "stages\*"; DestDir: "{app}\stages"; Excludes: "__pycache__"; Flags: ignoreversion recursesubdirs createallsubdirs
|
||||
Source: "SuperBuild\install\bin\*"; DestDir: "{app}\SuperBuild\install\bin"; Excludes: "__pycache__"; Flags: ignoreversion recursesubdirs createallsubdirs
|
||||
Source: "SuperBuild\install\lib\python3.8\*"; DestDir: "{app}\SuperBuild\install\lib\python3.8"; Excludes: "__pycache__"; Flags: ignoreversion recursesubdirs createallsubdirs
|
||||
Source: "venv\*"; DestDir: "{app}\venv"; Excludes: "__pycache__,pyvenv.cfg"; Flags: ignoreversion recursesubdirs createallsubdirs
|
||||
Source: "python38\*"; DestDir: "{app}\venv\Scripts"; Excludes: "__pycache__"; Flags: ignoreversion recursesubdirs createallsubdirs
|
||||
Source: "console.bat"; DestDir: "{app}"; Flags: ignoreversion
|
||||
Source: "VERSION"; DestDir: "{app}"; Flags: ignoreversion
|
||||
Source: "LICENSE"; DestDir: "{app}"; Flags: ignoreversion
|
||||
Source: "run.bat"; DestDir: "{app}"; Flags: ignoreversion
|
||||
Source: "run.py"; DestDir: "{app}"; Flags: ignoreversion
|
||||
Source: "settings.yaml"; DestDir: "{app}"; Flags: ignoreversion
|
||||
Source: "win32env.bat"; DestDir: "{app}"; Flags: ignoreversion
|
||||
Source: "winrun.bat"; DestDir: "{app}"; Flags: ignoreversion
|
||||
Source: "SuperBuild\download\vc_redist.x64.exe"; DestDir: {tmp}; Flags: dontcopy
|
||||
Source: "winpostinstall.bat"; DestDir: "{app}"; Flags: ignoreversion
|
||||
|
||||
[Dirs]
|
||||
Name: "{commonappdata}\ODM"; Permissions: users-modify
|
||||
|
||||
[Icons]
|
||||
Name: {group}\ODM Console; Filename: "{app}\console.bat"; WorkingDir: "{app}"
|
||||
Name: "{userdesktop}\ODM Console"; Filename: "{app}\console.bat"; WorkingDir: "{app}"; Tasks: desktopicon
|
||||
|
||||
[Tasks]
|
||||
Name: "desktopicon"; Description: "{cm:CreateDesktopIcon}"; GroupDescription: "{cm:AdditionalIcons}"; Flags: unchecked
|
||||
|
||||
[Run]
|
||||
Filename: "{tmp}\vc_redist.x64.exe"; StatusMsg: "Installing Visual C++ Redistributable Packages for Visual Studio 2019"; Parameters: "/quiet"; Check: VC2019RedistNeedsInstall ; Flags: waituntilterminated
|
||||
Filename: "{app}\winpostinstall.bat"; StatusMsg: "Post Install"; Flags: waituntilterminated runhidden
|
||||
Filename: "{app}\console.bat"; Description: {cm:LaunchProgram,ODM Console}; Flags: nowait postinstall skipifsilent
|
||||
|
||||
[Code]
|
||||
|
||||
function VC2019RedistNeedsInstall: Boolean;
|
||||
var
|
||||
Version: String;
|
||||
begin
|
||||
if RegQueryStringValue(HKEY_LOCAL_MACHINE,
|
||||
'SOFTWARE\Microsoft\VisualStudio\14.0\VC\Runtimes\x64', 'Version', Version) then
|
||||
begin
|
||||
// Is the installed version at least 14.14 ?
|
||||
Log('VC Redist Version check : found ' + Version);
|
||||
Result := (CompareStr(Version, 'v14.14.26429.03')<0);
|
||||
end
|
||||
else
|
||||
begin
|
||||
// Not even an old version installed
|
||||
Result := True;
|
||||
end;
|
||||
if (Result) then
|
||||
begin
|
||||
ExtractTemporaryFile('vc_redist.x64.exe');
|
||||
end;
|
||||
end;
|
||||
|
||||
function GetUninstallString(): String;
|
||||
var
|
||||
sUnInstPath: String;
|
||||
sUnInstallString: String;
|
||||
begin
|
||||
sUnInstPath := ExpandConstant('Software\Microsoft\Windows\CurrentVersion\Uninstall\{#emit SetupSetting("AppId")}_is1');
|
||||
sUnInstallString := '';
|
||||
if not RegQueryStringValue(HKLM, sUnInstPath, 'UninstallString', sUnInstallString) then
|
||||
RegQueryStringValue(HKCU, sUnInstPath, 'UninstallString', sUnInstallString);
|
||||
Result := sUnInstallString;
|
||||
end;
|
||||
|
||||
function IsUpgrade(): Boolean;
|
||||
begin
|
||||
Result := (GetUninstallString() <> '');
|
||||
end;
|
||||
|
||||
function UnInstallOldVersion(): Integer;
|
||||
var
|
||||
sUnInstallString: String;
|
||||
iResultCode: Integer;
|
||||
begin
|
||||
{ Return Values: }
|
||||
{ 1 - uninstall string is empty }
|
||||
{ 2 - error executing the UnInstallString }
|
||||
{ 3 - successfully executed the UnInstallString }
|
||||
|
||||
{ default return value }
|
||||
Result := 0;
|
||||
|
||||
{ get the uninstall string of the old app }
|
||||
sUnInstallString := GetUninstallString();
|
||||
if sUnInstallString <> '' then begin
|
||||
sUnInstallString := RemoveQuotes(sUnInstallString);
|
||||
if Exec(sUnInstallString, '/SILENT /NORESTART /SUPPRESSMSGBOXES','', SW_HIDE, ewWaitUntilTerminated, iResultCode) then
|
||||
Result := 3
|
||||
else
|
||||
Result := 2;
|
||||
end else
|
||||
Result := 1;
|
||||
end;
|
||||
|
||||
procedure CurStepChanged(CurStep: TSetupStep);
|
||||
begin
|
||||
if (CurStep=ssInstall) then
|
||||
begin
|
||||
if (IsUpgrade()) then
|
||||
begin
|
||||
UnInstallOldVersion();
|
||||
end;
|
||||
end;
|
||||
end;
|
||||
|
||||
[UninstallDelete]
|
||||
Type: filesandordirs; Name: "{app}\SuperBuild"
|
||||
Type: filesandordirs; Name: "{app}\contrib"
|
||||
Type: filesandordirs; Name: "{app}\licenses"
|
||||
Type: filesandordirs; Name: "{app}\opendm"
|
||||
Type: filesandordirs; Name: "{app}\stages"
|
||||
Type: filesandordirs; Name: "{app}\venv"
|
|
@ -154,7 +154,7 @@ documentation for any purpose and without fee, provided that:
|
|||
all copies of this software and any modification thereof and in
|
||||
supporting documentation;
|
||||
2. Any color-handling application which displays TekHVC color
|
||||
cooordinates identifies these as TekHVC color coordinates in any
|
||||
coordinates identifies these as TekHVC color coordinates in any
|
||||
interface that displays these coordinates and in any associated
|
||||
documentation;
|
||||
3. The term "TekHVC" is always used, and is only used, in association
|
||||
|
|
|
@ -19,7 +19,6 @@ Licensing for portions of OpenDroneMap are as follows:
|
|||
* Flann - BSD2 - http://opensource.org/licenses/bsd-license.php
|
||||
* Eigen - MPL2 - http://www.mozilla.org/MPL/2.0
|
||||
* Qhull - http://www.qhull.org/COPYING.txt
|
||||
* vtk5 - BSD - http://www.vtk.org/VTK/project/license.html
|
||||
* libext - https://github.com/OpenDroneMap/OpenDroneMap/blob/gh-pages/licenses/libext_copyright.txt
|
||||
* libx11 - https://github.com/OpenDroneMap/OpenDroneMap/blob/gh-pages/licenses/libx11_copyright.txt
|
||||
* MVS Texturing - BSD - https://github.com/nmoehrle/mvs-texturing/blob/master/LICENSE.txt
|
||||
|
|
|
@ -1,13 +0,0 @@
|
|||
if (NOT CMAKE_BUILD_TYPE)
|
||||
message(STATUS "No build type selected, default to Release")
|
||||
set(CMAKE_BUILD_TYPE "Release")
|
||||
endif()
|
||||
|
||||
# Add ODM sub-modules
|
||||
add_subdirectory(odm_georef)
|
||||
add_subdirectory(odm_orthophoto)
|
||||
add_subdirectory(odm_cleanmesh)
|
||||
|
||||
if (ODM_BUILD_SLAM)
|
||||
add_subdirectory(odm_slam)
|
||||
endif ()
|
|
@ -1,19 +0,0 @@
|
|||
project(odm_cleanmesh)
|
||||
cmake_minimum_required(VERSION 2.8)
|
||||
|
||||
set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} ${CMAKE_SOURCE_DIR})
|
||||
|
||||
set (CMAKE_CXX_STANDARD 11)
|
||||
find_package(VTK REQUIRED)
|
||||
include(${VTK_USE_FILE})
|
||||
|
||||
# Add compiler options.
|
||||
add_definitions(-Wall -Wextra)
|
||||
|
||||
# Add source directory
|
||||
aux_source_directory("./src" SRC_LIST)
|
||||
|
||||
# Add exectuteable
|
||||
add_executable(${PROJECT_NAME} ${SRC_LIST})
|
||||
|
||||
target_link_libraries(${PROJECT_NAME} ${VTK_LIBRARIES})
|
|
@ -1,106 +0,0 @@
|
|||
/*
|
||||
Copyright (c) 2006, Michael Kazhdan and Matthew Bolitho
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
Redistributions of source code must retain the above copyright notice, this list of
|
||||
conditions and the following disclaimer. Redistributions in binary form must reproduce
|
||||
the above copyright notice, this list of conditions and the following disclaimer
|
||||
in the documentation and/or other materials provided with the distribution.
|
||||
|
||||
Neither the name of the Johns Hopkins University nor the names of its contributors
|
||||
may be used to endorse or promote products derived from this software without specific
|
||||
prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY
|
||||
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO THE IMPLIED WARRANTIES
|
||||
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
|
||||
SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
|
||||
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
|
||||
BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
|
||||
ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
|
||||
DAMAGE.
|
||||
*/
|
||||
|
||||
#ifndef CMD_LINE_PARSER_INCLUDED
|
||||
#define CMD_LINE_PARSER_INCLUDED
|
||||
|
||||
#include <stdarg.h>
|
||||
#include <cstring>
|
||||
#include <cstdlib>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#ifdef WIN32
|
||||
int strcasecmp( const char* c1 , const char* c2 );
|
||||
#endif // WIN32
|
||||
|
||||
class cmdLineReadable
|
||||
{
|
||||
public:
|
||||
bool set;
|
||||
char *name;
|
||||
cmdLineReadable( const char *name );
|
||||
virtual ~cmdLineReadable( void );
|
||||
virtual int read( char** argv , int argc );
|
||||
virtual void writeValue( char* str ) const;
|
||||
};
|
||||
|
||||
template< class Type > void cmdLineWriteValue( Type t , char* str );
|
||||
template< class Type > void cmdLineCleanUp( Type* t );
|
||||
template< class Type > Type cmdLineInitialize( void );
|
||||
template< class Type > Type cmdLineCopy( Type t );
|
||||
template< class Type > Type cmdLineStringToType( const char* str );
|
||||
|
||||
template< class Type >
|
||||
class cmdLineParameter : public cmdLineReadable
|
||||
{
|
||||
public:
|
||||
Type value;
|
||||
cmdLineParameter( const char *name );
|
||||
cmdLineParameter( const char *name , Type v );
|
||||
~cmdLineParameter( void );
|
||||
int read( char** argv , int argc );
|
||||
void writeValue( char* str ) const;
|
||||
bool expectsArg( void ) const { return true; }
|
||||
};
|
||||
|
||||
template< class Type , int Dim >
|
||||
class cmdLineParameterArray : public cmdLineReadable
|
||||
{
|
||||
public:
|
||||
Type values[Dim];
|
||||
cmdLineParameterArray( const char *name, const Type* v=NULL );
|
||||
~cmdLineParameterArray( void );
|
||||
int read( char** argv , int argc );
|
||||
void writeValue( char* str ) const;
|
||||
bool expectsArg( void ) const { return true; }
|
||||
};
|
||||
|
||||
template< class Type >
|
||||
class cmdLineParameters : public cmdLineReadable
|
||||
{
|
||||
public:
|
||||
int count;
|
||||
Type *values;
|
||||
cmdLineParameters( const char* name );
|
||||
~cmdLineParameters( void );
|
||||
int read( char** argv , int argc );
|
||||
void writeValue( char* str ) const;
|
||||
bool expectsArg( void ) const { return true; }
|
||||
};
|
||||
|
||||
void cmdLineParse( int argc , char **argv, cmdLineReadable** params );
|
||||
char* FileExtension( char* fileName );
|
||||
char* LocalFileName( char* fileName );
|
||||
char* DirectoryName( char* fileName );
|
||||
char* GetFileExtension( const char* fileName );
|
||||
char* GetLocalFileName( const char* fileName );
|
||||
char** ReadWords( const char* fileName , int& cnt );
|
||||
|
||||
#include "CmdLineParser.inl"
|
||||
#endif // CMD_LINE_PARSER_INCLUDED
|
|
@ -1,300 +0,0 @@
|
|||
/* -*- C++ -*-
|
||||
Copyright (c) 2006, Michael Kazhdan and Matthew Bolitho
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
Redistributions of source code must retain the above copyright notice, this list of
|
||||
conditions and the following disclaimer. Redistributions in binary form must reproduce
|
||||
the above copyright notice, this list of conditions and the following disclaimer
|
||||
in the documentation and/or other materials provided with the distribution.
|
||||
|
||||
Neither the name of the Johns Hopkins University nor the names of its contributors
|
||||
may be used to endorse or promote products derived from this software without specific
|
||||
prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY
|
||||
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO THE IMPLIED WARRANTIES
|
||||
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
|
||||
SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
|
||||
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
|
||||
BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
|
||||
ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
|
||||
DAMAGE.
|
||||
*/
|
||||
|
||||
#include <cassert>
|
||||
#include <string.h>
|
||||
|
||||
#if defined( WIN32 ) || defined( _WIN64 )
|
||||
inline int strcasecmp( const char* c1 , const char* c2 ){ return _stricmp( c1 , c2 ); }
|
||||
#endif // WIN32 || _WIN64
|
||||
|
||||
template< > void cmdLineCleanUp< int >( int* t ){ *t = 0; }
|
||||
template< > void cmdLineCleanUp< float >( float* t ){ *t = 0; }
|
||||
template< > void cmdLineCleanUp< double >( double* t ){ *t = 0; }
|
||||
template< > void cmdLineCleanUp< char* >( char** t ){ if( *t ) free( *t ) ; *t = NULL; }
|
||||
template< > int cmdLineInitialize< int >( void ){ return 0; }
|
||||
template< > float cmdLineInitialize< float >( void ){ return 0.f; }
|
||||
template< > double cmdLineInitialize< double >( void ){ return 0.; }
|
||||
template< > char* cmdLineInitialize< char* >( void ){ return NULL; }
|
||||
template< > void cmdLineWriteValue< int >( int t , char* str ){ sprintf( str , "%d" , t ); }
|
||||
template< > void cmdLineWriteValue< float >( float t , char* str ){ sprintf( str , "%f" , t ); }
|
||||
template< > void cmdLineWriteValue< double >( double t , char* str ){ sprintf( str , "%f" , t ); }
|
||||
template< > void cmdLineWriteValue< char* >( char* t , char* str ){ if( t ) sprintf( str , "%s" , t ) ; else str[0]=0; }
|
||||
template< > int cmdLineCopy( int t ){ return t; }
|
||||
template< > float cmdLineCopy( float t ){ return t; }
|
||||
template< > double cmdLineCopy( double t ){ return t; }
|
||||
#if defined( WIN32 ) || defined( _WIN64 )
|
||||
template< > char* cmdLineCopy( char* t ){ return _strdup( t ); }
|
||||
#else // !WIN32 && !_WIN64
|
||||
template< > char* cmdLineCopy( char* t ){ return strdup( t ); }
|
||||
#endif // WIN32 || _WIN64
|
||||
template< > int cmdLineStringToType( const char* str ){ return atoi( str ); }
|
||||
template< > float cmdLineStringToType( const char* str ){ return float( atof( str ) ); }
|
||||
template< > double cmdLineStringToType( const char* str ){ return double( atof( str ) ); }
|
||||
#if defined( WIN32 ) || defined( _WIN64 )
|
||||
template< > char* cmdLineStringToType( const char* str ){ return _strdup( str ); }
|
||||
#else // !WIN32 && !_WIN64
|
||||
template< > char* cmdLineStringToType( const char* str ){ return strdup( str ); }
|
||||
#endif // WIN32 || _WIN64
|
||||
|
||||
|
||||
/////////////////////
|
||||
// cmdLineReadable //
|
||||
/////////////////////
|
||||
#if defined( WIN32 ) || defined( _WIN64 )
|
||||
inline cmdLineReadable::cmdLineReadable( const char *name ) : set(false) { this->name = _strdup( name ); }
|
||||
#else // !WIN32 && !_WIN64
|
||||
inline cmdLineReadable::cmdLineReadable( const char *name ) : set(false) { this->name = strdup( name ); }
|
||||
#endif // WIN32 || _WIN64
|
||||
|
||||
inline cmdLineReadable::~cmdLineReadable( void ){ if( name ) free( name ) ; name = NULL; }
|
||||
inline int cmdLineReadable::read( char** , int ){ set = true ; return 0; }
|
||||
inline void cmdLineReadable::writeValue( char* str ) const { str[0] = 0; }
|
||||
|
||||
//////////////////////
|
||||
// cmdLineParameter //
|
||||
//////////////////////
|
||||
template< class Type > cmdLineParameter< Type >::~cmdLineParameter( void ) { cmdLineCleanUp( &value ); }
|
||||
template< class Type > cmdLineParameter< Type >::cmdLineParameter( const char *name ) : cmdLineReadable( name ){ value = cmdLineInitialize< Type >(); }
|
||||
template< class Type > cmdLineParameter< Type >::cmdLineParameter( const char *name , Type v ) : cmdLineReadable( name ){ value = cmdLineCopy< Type >( v ); }
|
||||
template< class Type >
|
||||
int cmdLineParameter< Type >::read( char** argv , int argc )
|
||||
{
|
||||
if( argc>0 )
|
||||
{
|
||||
cmdLineCleanUp< Type >( &value ) , value = cmdLineStringToType< Type >( argv[0] );
|
||||
set = true;
|
||||
return 1;
|
||||
}
|
||||
else return 0;
|
||||
}
|
||||
template< class Type >
|
||||
void cmdLineParameter< Type >::writeValue( char* str ) const { cmdLineWriteValue< Type >( value , str ); }
|
||||
|
||||
|
||||
///////////////////////////
|
||||
// cmdLineParameterArray //
|
||||
///////////////////////////
|
||||
template< class Type , int Dim >
|
||||
cmdLineParameterArray< Type , Dim >::cmdLineParameterArray( const char *name , const Type* v ) : cmdLineReadable( name )
|
||||
{
|
||||
if( v ) for( int i=0 ; i<Dim ; i++ ) values[i] = cmdLineCopy< Type >( v[i] );
|
||||
else for( int i=0 ; i<Dim ; i++ ) values[i] = cmdLineInitialize< Type >();
|
||||
}
|
||||
template< class Type , int Dim >
|
||||
cmdLineParameterArray< Type , Dim >::~cmdLineParameterArray( void ){ for( int i=0 ; i<Dim ; i++ ) cmdLineCleanUp< Type >( values+i ); }
|
||||
template< class Type , int Dim >
|
||||
int cmdLineParameterArray< Type , Dim >::read( char** argv , int argc )
|
||||
{
|
||||
if( argc>=Dim )
|
||||
{
|
||||
for( int i=0 ; i<Dim ; i++ ) cmdLineCleanUp< Type >( values+i ) , values[i] = cmdLineStringToType< Type >( argv[i] );
|
||||
set = true;
|
||||
return Dim;
|
||||
}
|
||||
else return 0;
|
||||
}
|
||||
template< class Type , int Dim >
|
||||
void cmdLineParameterArray< Type , Dim >::writeValue( char* str ) const
|
||||
{
|
||||
char* temp=str;
|
||||
for( int i=0 ; i<Dim ; i++ )
|
||||
{
|
||||
cmdLineWriteValue< Type >( values[i] , temp );
|
||||
temp = str+strlen( str );
|
||||
}
|
||||
}
|
||||
///////////////////////
|
||||
// cmdLineParameters //
|
||||
///////////////////////
|
||||
template< class Type >
|
||||
cmdLineParameters< Type >::cmdLineParameters( const char* name ) : cmdLineReadable( name ) , values(NULL) , count(0) { }
|
||||
template< class Type >
|
||||
cmdLineParameters< Type >::~cmdLineParameters( void )
|
||||
{
|
||||
if( values ) delete[] values;
|
||||
values = NULL;
|
||||
count = 0;
|
||||
}
|
||||
template< class Type >
|
||||
int cmdLineParameters< Type >::read( char** argv , int argc )
|
||||
{
|
||||
if( values ) delete[] values;
|
||||
values = NULL;
|
||||
|
||||
if( argc>0 )
|
||||
{
|
||||
count = atoi(argv[0]);
|
||||
if( count <= 0 || argc <= count ) return 1;
|
||||
values = new Type[count];
|
||||
if( !values ) return 0;
|
||||
for( int i=0 ; i<count ; i++ ) values[i] = cmdLineStringToType< Type >( argv[i+1] );
|
||||
set = true;
|
||||
return count+1;
|
||||
}
|
||||
else return 0;
|
||||
}
|
||||
template< class Type >
|
||||
void cmdLineParameters< Type >::writeValue( char* str ) const
|
||||
{
|
||||
char* temp=str;
|
||||
for( int i=0 ; i<count ; i++ )
|
||||
{
|
||||
cmdLineWriteValue< Type >( values[i] , temp );
|
||||
temp = str+strlen( str );
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
inline char* FileExtension( char* fileName )
|
||||
{
|
||||
char* temp = fileName;
|
||||
for( unsigned int i=0 ; i<strlen(fileName) ; i++ ) if( fileName[i]=='.' ) temp = &fileName[i+1];
|
||||
return temp;
|
||||
}
|
||||
|
||||
inline char* GetFileExtension( const char* fileName )
|
||||
{
|
||||
char* fileNameCopy;
|
||||
char* ext=NULL;
|
||||
char* temp;
|
||||
|
||||
fileNameCopy=new char[strlen(fileName)+1];
|
||||
assert(fileNameCopy);
|
||||
strcpy(fileNameCopy,fileName);
|
||||
temp=strtok(fileNameCopy,".");
|
||||
while(temp!=NULL)
|
||||
{
|
||||
if(ext!=NULL){delete[] ext;}
|
||||
ext=new char[strlen(temp)+1];
|
||||
assert(ext);
|
||||
strcpy(ext,temp);
|
||||
temp=strtok(NULL,".");
|
||||
}
|
||||
delete[] fileNameCopy;
|
||||
return ext;
|
||||
}
|
||||
inline char* GetLocalFileName( const char* fileName )
|
||||
{
|
||||
char* fileNameCopy;
|
||||
char* name=NULL;
|
||||
char* temp;
|
||||
|
||||
fileNameCopy=new char[strlen(fileName)+1];
|
||||
assert(fileNameCopy);
|
||||
strcpy(fileNameCopy,fileName);
|
||||
temp=strtok(fileNameCopy,"\\");
|
||||
while(temp!=NULL){
|
||||
if(name!=NULL){delete[] name;}
|
||||
name=new char[strlen(temp)+1];
|
||||
assert(name);
|
||||
strcpy(name,temp);
|
||||
temp=strtok(NULL,"\\");
|
||||
}
|
||||
delete[] fileNameCopy;
|
||||
return name;
|
||||
}
|
||||
inline char* LocalFileName( char* fileName )
|
||||
{
|
||||
char* temp = fileName;
|
||||
for( int i=0 ; i<(int)strlen(fileName) ; i++ ) if( fileName[i] =='\\' ) temp = &fileName[i+1];
|
||||
return temp;
|
||||
}
|
||||
inline char* DirectoryName( char* fileName )
|
||||
{
|
||||
for( int i=int( strlen(fileName) )-1 ; i>=0 ; i-- )
|
||||
if( fileName[i] =='\\' )
|
||||
{
|
||||
fileName[i] = 0;
|
||||
return fileName;
|
||||
}
|
||||
fileName[0] = 0;
|
||||
return fileName;
|
||||
}
|
||||
|
||||
inline void cmdLineParse( int argc , char **argv , cmdLineReadable** params )
|
||||
{
|
||||
while( argc>0 )
|
||||
{
|
||||
if( argv[0][0]=='-' )
|
||||
{
|
||||
cmdLineReadable* readable=NULL;
|
||||
for( int i=0 ; params[i]!=NULL && readable==NULL ; i++ ) if( !strcasecmp( params[i]->name , argv[0]+1 ) ) readable = params[i];
|
||||
if( readable )
|
||||
{
|
||||
int j = readable->read( argv+1 , argc-1 );
|
||||
argv += j , argc -= j;
|
||||
}
|
||||
else
|
||||
{
|
||||
fprintf( stderr , "[WARNING] Invalid option: %s\n" , argv[0] );
|
||||
for( int i=0 ; params[i]!=NULL ; i++ ) printf( "\t-%s\n" , params[i]->name );
|
||||
}
|
||||
}
|
||||
else fprintf( stderr , "[WARNING] Parameter name should be of the form -<name>: %s\n" , argv[0] );
|
||||
++argv , --argc;
|
||||
}
|
||||
}
|
||||
|
||||
inline char** ReadWords(const char* fileName,int& cnt)
|
||||
{
|
||||
char** names;
|
||||
char temp[500];
|
||||
FILE* fp;
|
||||
|
||||
fp=fopen(fileName,"r");
|
||||
if(!fp){return NULL;}
|
||||
cnt=0;
|
||||
while(fscanf(fp," %s ",temp)==1){cnt++;}
|
||||
fclose(fp);
|
||||
|
||||
names=new char*[cnt];
|
||||
if(!names){return NULL;}
|
||||
|
||||
fp=fopen(fileName,"r");
|
||||
if(!fp){
|
||||
delete[] names;
|
||||
cnt=0;
|
||||
return NULL;
|
||||
}
|
||||
cnt=0;
|
||||
while(fscanf(fp," %s ",temp)==1){
|
||||
names[cnt]=new char[strlen(temp)+1];
|
||||
if(!names){
|
||||
for(int j=0;j<cnt;j++){delete[] names[j];}
|
||||
delete[] names;
|
||||
cnt=0;
|
||||
fclose(fp);
|
||||
return NULL;
|
||||
}
|
||||
strcpy(names[cnt],temp);
|
||||
cnt++;
|
||||
}
|
||||
fclose(fp);
|
||||
return names;
|
||||
}
|
|
@ -1,33 +0,0 @@
|
|||
#include <cstdio>
|
||||
#include <cstdarg>
|
||||
#include "CmdLineParser.h"
|
||||
|
||||
struct Logger{
|
||||
bool verbose;
|
||||
const char* outputFile;
|
||||
|
||||
Logger(){
|
||||
this->verbose = false;
|
||||
this->outputFile = NULL;
|
||||
}
|
||||
|
||||
void operator() ( const char* format , ... )
|
||||
{
|
||||
if( outputFile )
|
||||
{
|
||||
FILE* fp = fopen( outputFile , "a" );
|
||||
va_list args;
|
||||
va_start( args , format );
|
||||
vfprintf( fp , format , args );
|
||||
fclose( fp );
|
||||
va_end( args );
|
||||
}
|
||||
if( verbose )
|
||||
{
|
||||
va_list args;
|
||||
va_start( args , format );
|
||||
vprintf( format , args );
|
||||
va_end( args );
|
||||
}
|
||||
}
|
||||
};
|
|
@ -1,114 +0,0 @@
|
|||
#include <iostream>
|
||||
#include <string>
|
||||
#include <fstream>
|
||||
#include <vtkPolyDataConnectivityFilter.h>
|
||||
#include <vtkSmartPointer.h>
|
||||
#include <vtkPLYReader.h>
|
||||
#include <vtkPLYWriter.h>
|
||||
#include <vtkAlgorithmOutput.h>
|
||||
#include <vtkQuadricDecimation.h>
|
||||
#include "CmdLineParser.h"
|
||||
#include "Logger.h"
|
||||
|
||||
Logger logWriter;
|
||||
|
||||
cmdLineParameter< char* >
|
||||
InputFile( "inputFile" ) ,
|
||||
OutputFile( "outputFile" );
|
||||
cmdLineParameter< int >
|
||||
DecimateMesh( "decimateMesh" );
|
||||
cmdLineReadable
|
||||
RemoveIslands( "removeIslands" ) ,
|
||||
Verbose( "verbose" );
|
||||
|
||||
cmdLineReadable* params[] = {
|
||||
&InputFile , &OutputFile , &DecimateMesh, &RemoveIslands, &Verbose ,
|
||||
NULL
|
||||
};
|
||||
|
||||
void help(char *ex){
|
||||
std::cout << "Usage: " << ex << std::endl
|
||||
<< "\t -" << InputFile.name << " <input polygon mesh>" << std::endl
|
||||
<< "\t -" << OutputFile.name << " <output polygon mesh>" << std::endl
|
||||
<< "\t [-" << DecimateMesh.name << " <target number of vertices>]" << std::endl
|
||||
<< "\t [-" << RemoveIslands.name << "]" << std::endl
|
||||
|
||||
<< "\t [-" << Verbose.name << "]" << std::endl;
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
|
||||
void logArgs(cmdLineReadable* params[], Logger& logWriter){
|
||||
logWriter("Running with parameters:\n");
|
||||
char str[1024];
|
||||
for( int i=0 ; params[i] ; i++ ){
|
||||
if( params[i]->set ){
|
||||
params[i]->writeValue( str );
|
||||
if( strlen( str ) ) logWriter( "\t--%s %s\n" , params[i]->name , str );
|
||||
else logWriter( "\t--%s\n" , params[i]->name );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
int main(int argc, char **argv) {
|
||||
cmdLineParse( argc-1 , &argv[1] , params );
|
||||
if( !InputFile.set || !OutputFile.set ) help(argv[0]);
|
||||
if( !RemoveIslands.set && !DecimateMesh.set ) help (argv[0]);
|
||||
|
||||
|
||||
logWriter.verbose = Verbose.set;
|
||||
// logWriter.outputFile = "odm_cleanmesh_log.txt";
|
||||
logArgs(params, logWriter);
|
||||
|
||||
vtkSmartPointer<vtkPLYReader> reader =
|
||||
vtkSmartPointer<vtkPLYReader>::New();
|
||||
reader->SetFileName ( InputFile.value );
|
||||
reader->Update();
|
||||
|
||||
vtkPolyData *nextOutput = reader->GetOutput();
|
||||
|
||||
vtkSmartPointer<vtkPolyDataConnectivityFilter> connectivityFilter =
|
||||
vtkSmartPointer<vtkPolyDataConnectivityFilter>::New();
|
||||
connectivityFilter->SetExtractionModeToLargestRegion();
|
||||
|
||||
vtkSmartPointer<vtkQuadricDecimation> decimationFilter =
|
||||
vtkSmartPointer<vtkQuadricDecimation>::New();
|
||||
|
||||
if (RemoveIslands.set){
|
||||
logWriter("Removing islands\n");
|
||||
connectivityFilter->SetInputData(nextOutput);
|
||||
connectivityFilter->Update();
|
||||
nextOutput = connectivityFilter->GetOutput();
|
||||
}
|
||||
|
||||
if (DecimateMesh.set){
|
||||
logWriter("Decimating mesh\n");
|
||||
|
||||
int vertexCount = nextOutput->GetNumberOfPoints();
|
||||
logWriter("Current vertex count: %d\n", vertexCount);
|
||||
logWriter("Wanted vertex count: %d\n", DecimateMesh.value);
|
||||
|
||||
if (vertexCount > DecimateMesh.value){
|
||||
double targetReduction = 1.0 - static_cast<double>(DecimateMesh.value) / static_cast<double>(vertexCount);
|
||||
logWriter("Target reduction set to %f\n", targetReduction);
|
||||
decimationFilter->SetTargetReduction(targetReduction);
|
||||
decimationFilter->SetInputData(nextOutput);
|
||||
decimationFilter->Update();
|
||||
nextOutput = decimationFilter->GetOutput();
|
||||
}else{
|
||||
logWriter("Skipping decimation\n");
|
||||
}
|
||||
}
|
||||
|
||||
logWriter("Saving cleaned mesh to file... \n");
|
||||
|
||||
vtkSmartPointer<vtkPLYWriter> plyWriter =
|
||||
vtkSmartPointer<vtkPLYWriter>::New();
|
||||
plyWriter->SetFileName(OutputFile.value);
|
||||
plyWriter->SetFileTypeToBinary();
|
||||
plyWriter->SetInputData(nextOutput);
|
||||
plyWriter->Write();
|
||||
|
||||
logWriter("OK\n");
|
||||
}
|
|
@ -1,43 +0,0 @@
|
|||
project(odm_georef)
|
||||
cmake_minimum_required(VERSION 2.8)
|
||||
|
||||
# Set pcl dir to the input spedified with option -DPCL_DIR="path"
|
||||
set(PCL_DIR "PCL_DIR-NOTFOUND" CACHE "PCL_DIR" "Path to the pcl installation directory")
|
||||
set(OPENCV_DIR "OPENCV_DIR-NOTFOUND" CACHE "OPENCV_DIR" "Path to the opencv installation directory")
|
||||
set(PROJ4_INCLUDE_DIR "/usr/include/" CACHE "PROJ4_INCLUDE_DIR" "Path to the proj4 inlcude directory")
|
||||
find_library(PROJ4_LIBRARY "libproj.so" PATHS "/usr/lib" "/usr/lib/x86_64-linux-gnu")
|
||||
#set(PROJ4_LIBRARY "/usr/lib/x86_64-linux-gnu/libproj.so" CACHE "PROJ4_LIBRARY" "Path to the proj4 library directory")
|
||||
|
||||
# Add compiler options.
|
||||
add_definitions(-Wall -Wextra -Wconversion -pedantic -std=c++11)
|
||||
|
||||
# Find pcl at the location specified by PCL_DIR
|
||||
find_package(VTK 6.0 REQUIRED)
|
||||
find_package(PCL 1.8 HINTS "${PCL_DIR}/share/pcl-1.8")
|
||||
|
||||
# Find OpenCV at the default location
|
||||
find_package(OpenCV HINTS "${OPENCV_DIR}" REQUIRED)
|
||||
|
||||
# Only link with required opencv modules.
|
||||
set(OpenCV_LIBS opencv_core opencv_imgproc opencv_highgui)
|
||||
|
||||
# Add the PCL and Eigen include dirs.
|
||||
# Necessary since the PCL_INCLUDE_DIR variable set bu find_package is broken.)
|
||||
include_directories(${PCL_ROOT}/include/pcl-${PCL_VERSION_MAJOR}.${PCL_VERSION_MINOR})
|
||||
include_directories(${EIGEN_ROOT})
|
||||
|
||||
# PDAL and jsoncpp
|
||||
find_package(PDAL REQUIRED CONFIG)
|
||||
include_directories(${PDAL_INCLUDE_DIRS})
|
||||
include_directories("${PROJECT_SOURCE_DIR}/../../SuperBuild/src/pdal/vendor/jsoncpp/dist")
|
||||
link_directories(${PDAL_LIBRARY_DIRS})
|
||||
add_definitions(${PDAL_DEFINITIONS})
|
||||
|
||||
# Add source directory
|
||||
aux_source_directory("./src" SRC_LIST)
|
||||
|
||||
# Add exectuteable
|
||||
add_executable(${PROJECT_NAME} ${SRC_LIST})
|
||||
|
||||
# Link
|
||||
target_link_libraries(${PROJECT_NAME} ${PCL_COMMON_LIBRARIES} ${PCL_IO_LIBRARIES} ${PCL_SURFACE_LIBRARIES} ${PROJ4_LIBRARY} ${OpenCV_LIBS} jsoncpp ${PDAL_LIBRARIES})
|
|
@ -1,149 +0,0 @@
|
|||
// This
|
||||
#include "FindTransform.hpp"
|
||||
|
||||
Vec3::Vec3(double x, double y, double z) :x_(x), y_(y), z_(z)
|
||||
{
|
||||
|
||||
}
|
||||
Vec3::Vec3(const Vec3 &o) : x_(o.x_), y_(o.y_), z_(o.z_)
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
Vec3 Vec3::cross(Vec3 o) const
|
||||
{
|
||||
Vec3 res;
|
||||
res.x_ = y_*o.z_ - z_*o.y_;
|
||||
res.y_ = z_*o.x_ - x_*o.z_;
|
||||
res.z_ = x_*o.y_ - y_*o.x_;
|
||||
return res;
|
||||
}
|
||||
|
||||
double Vec3::dot(Vec3 o) const
|
||||
{
|
||||
return x_*o.x_ + y_*o.y_ + z_*o.z_;
|
||||
}
|
||||
|
||||
double Vec3::length() const
|
||||
{
|
||||
return sqrt(x_*x_ + y_*y_ + z_*z_);
|
||||
}
|
||||
|
||||
Vec3 Vec3::norm() const
|
||||
{
|
||||
Vec3 res;
|
||||
double l = length();
|
||||
res.x_ = x_ / l;
|
||||
res.y_ = y_ / l;
|
||||
res.z_ = z_ / l;
|
||||
return res;
|
||||
}
|
||||
|
||||
Vec3 Vec3::operator*(double d) const
|
||||
{
|
||||
return Vec3(x_*d, y_*d, z_*d);
|
||||
}
|
||||
|
||||
Vec3 Vec3::operator+(Vec3 o) const
|
||||
{
|
||||
return Vec3(x_ + o.x_, y_ + o.y_,z_ + o.z_);
|
||||
}
|
||||
|
||||
Vec3 Vec3::operator-(Vec3 o) const
|
||||
{
|
||||
return Vec3(x_ - o.x_, y_ - o.y_,z_ - o.z_);
|
||||
}
|
||||
|
||||
OnMat3::OnMat3(Vec3 r1, Vec3 r2, Vec3 r3) : r1_(r1), r2_(r2), r3_(r3)
|
||||
{
|
||||
c1_.x_ = r1_.x_; c2_.x_ = r1_.y_; c3_.x_ = r1_.z_;
|
||||
c1_.y_ = r2_.x_; c2_.y_ = r2_.y_; c3_.y_ = r2_.z_;
|
||||
c1_.z_ = r3_.x_; c2_.z_ = r3_.y_; c3_.z_ = r3_.z_;
|
||||
}
|
||||
OnMat3::OnMat3(const OnMat3 &o) : r1_(o.r1_), r2_(o.r2_), r3_(o.r3_)
|
||||
{
|
||||
c1_.x_ = r1_.x_; c2_.x_ = r1_.y_; c3_.x_ = r1_.z_;
|
||||
c1_.y_ = r2_.x_; c2_.y_ = r2_.y_; c3_.y_ = r2_.z_;
|
||||
c1_.z_ = r3_.x_; c2_.z_ = r3_.y_; c3_.z_ = r3_.z_;
|
||||
}
|
||||
|
||||
double OnMat3::det() const
|
||||
{
|
||||
return r1_.x_*r2_.y_*r3_.z_ + r1_.y_*r2_.z_*r3_.x_ + r1_.z_*r2_.x_*r3_.y_ - r1_.z_*r2_.y_*r3_.x_ - r1_.y_*r2_.x_*r3_.z_ - r1_.x_*r2_.z_*r3_.y_;
|
||||
}
|
||||
|
||||
OnMat3 OnMat3::transpose() const
|
||||
{
|
||||
return OnMat3(Vec3(r1_.x_, r2_.x_, r3_.x_), Vec3(r1_.y_, r2_.y_, r3_.y_), Vec3(r1_.z_, r2_.z_, r3_.z_));
|
||||
}
|
||||
|
||||
OnMat3 OnMat3::operator*(OnMat3 o) const
|
||||
{
|
||||
return OnMat3( Vec3(r1_.dot(o.c1_), r1_.dot(o.c2_), r1_.dot(o.c3_)),
|
||||
Vec3(r2_.dot(o.c1_), r2_.dot(o.c2_), r2_.dot(o.c3_)),
|
||||
Vec3(r3_.dot(o.c1_), r3_.dot(o.c2_), r3_.dot(o.c3_)));
|
||||
}
|
||||
|
||||
Vec3 OnMat3::operator*(Vec3 o)
|
||||
{
|
||||
return Vec3(r1_.dot(o), r2_.dot(o), r3_.dot(o));
|
||||
}
|
||||
|
||||
Mat4::Mat4()
|
||||
{
|
||||
r1c1_ = 1.0; r1c2_ = 0.0; r1c3_ = 0.0; r1c4_ = 0.0;
|
||||
r2c1_ = 0.0; r2c2_ = 1.0; r2c3_ = 0.0; r2c4_ = 0.0;
|
||||
r3c1_ = 0.0; r3c2_ = 0.0; r3c3_ = 1.0; r3c4_ = 0.0;
|
||||
r4c1_ = 0.0; r4c2_ = 0.0; r4c3_ = 0.0; r4c4_ = 1.0;
|
||||
}
|
||||
|
||||
Mat4::Mat4(OnMat3 rotation, Vec3 translation, double scaling)
|
||||
{
|
||||
r1c1_ = scaling * rotation.r1_.x_; r1c2_ = scaling * rotation.r1_.y_; r1c3_ = scaling * rotation.r1_.z_; r1c4_ = translation.x_;
|
||||
r2c1_ = scaling * rotation.r2_.x_; r2c2_ = scaling * rotation.r2_.y_; r2c3_ = scaling * rotation.r2_.z_; r2c4_ = translation.y_;
|
||||
r3c1_ = scaling * rotation.r3_.x_; r3c2_ = scaling * rotation.r3_.y_; r3c3_ = scaling * rotation.r3_.z_; r3c4_ = translation.z_;
|
||||
r4c1_ = 0.0; r4c2_ = 0.0; r4c3_ = 0.0; r4c4_ = 1.0;
|
||||
}
|
||||
|
||||
Vec3 Mat4::operator*(Vec3 o)
|
||||
{
|
||||
return Vec3(
|
||||
r1c1_ * o.x_ + r1c2_* o.y_ + r1c3_* o.z_ + r1c4_,
|
||||
r2c1_ * o.x_ + r2c2_* o.y_ + r2c3_* o.z_ + r2c4_,
|
||||
r3c1_ * o.x_ + r3c2_* o.y_ + r3c3_* o.z_ + r3c4_
|
||||
);
|
||||
}
|
||||
|
||||
void FindTransform::findTransform(Vec3 fromA, Vec3 fromB, Vec3 fromC, Vec3 toA, Vec3 toB, Vec3 toC)
|
||||
{
|
||||
Vec3 a1 = toA;
|
||||
Vec3 b1 = toB;
|
||||
Vec3 c1 = toC;
|
||||
Vec3 a2 = fromA;
|
||||
Vec3 b2 = fromB;
|
||||
Vec3 c2 = fromC;
|
||||
|
||||
Vec3 y1 = (a1 - c1).cross(b1 - c1).norm();
|
||||
Vec3 z1 = (a1 - c1).norm();
|
||||
Vec3 x1 = y1.cross(z1);
|
||||
|
||||
Vec3 y2 = (a2 - c2).cross(b2 - c2).norm();
|
||||
Vec3 z2 = (a2 - c2).norm();
|
||||
Vec3 x2 = y2.cross(z2);
|
||||
OnMat3 mat1 = OnMat3(x1, y1, z1).transpose();
|
||||
OnMat3 mat2 = OnMat3(x2, y2, z2).transpose();
|
||||
|
||||
OnMat3 rotation = mat1 * mat2.transpose();
|
||||
Vec3 translation = c1 - c2;
|
||||
|
||||
double scale = (a1 - c1).length() / (a2 - c2).length();
|
||||
|
||||
translation = rotation * c2 * (-scale) + c1;
|
||||
Mat4 transformation(rotation, translation, scale);
|
||||
transform_ = transformation;
|
||||
}
|
||||
|
||||
double FindTransform::error(Vec3 fromA, Vec3 toA)
|
||||
{
|
||||
return (transform_*fromA - toA).length();
|
||||
}
|
|
@ -1,165 +0,0 @@
|
|||
// C++
|
||||
#include <math.h>
|
||||
#include <string>
|
||||
#include <iomanip>
|
||||
#include <sstream>
|
||||
#include <iostream>
|
||||
|
||||
/*!
|
||||
* \brief Handles basic 3d vector math.
|
||||
**/
|
||||
struct Vec3
|
||||
{
|
||||
Vec3(double x = 0.0, double y = 0.0, double z = 0.0);
|
||||
Vec3(const Vec3 &o);
|
||||
|
||||
double x_,y_,z_; /**< The x, y and z values of the vector. **/
|
||||
|
||||
/*!
|
||||
* \brief cross The cross product between two vectors.
|
||||
**/
|
||||
Vec3 cross(Vec3 o) const;
|
||||
|
||||
/*!
|
||||
* \brief dot The scalar product between two vectors.
|
||||
**/
|
||||
double dot(Vec3 o) const;
|
||||
|
||||
/*!
|
||||
* \brief length The length of the vector.
|
||||
**/
|
||||
double length() const;
|
||||
|
||||
/*!
|
||||
* \brief norm Returns a normalized version of this vector.
|
||||
**/
|
||||
Vec3 norm() const;
|
||||
|
||||
/*!
|
||||
* \brief Scales this vector.
|
||||
**/
|
||||
Vec3 operator*(double d) const;
|
||||
|
||||
/*!
|
||||
* \brief Addition between two vectors.
|
||||
**/
|
||||
Vec3 operator+(Vec3 o) const;
|
||||
|
||||
/*!
|
||||
* \brief Subtraction between two vectors.
|
||||
**/
|
||||
Vec3 operator-(Vec3 o) const;
|
||||
|
||||
friend std::ostream & operator<<(std::ostream &os, Vec3 v)
|
||||
{
|
||||
return os << "[" << std::setprecision(8) << v.x_ << ", " << std::setprecision(4) << v.y_ << ", " << v.z_ << "]";
|
||||
}
|
||||
};
|
||||
|
||||
/*!
|
||||
* \brief Describes a 3d orthonormal matrix.
|
||||
**/
|
||||
class OnMat3
|
||||
{
|
||||
public:
|
||||
OnMat3(Vec3 r1, Vec3 r2, Vec3 r3);
|
||||
OnMat3(const OnMat3 &o);
|
||||
|
||||
Vec3 r1_; /**< The first row of the matrix. **/
|
||||
Vec3 r2_; /**< The second row of the matrix. **/
|
||||
Vec3 r3_; /**< The third row of the matrix. **/
|
||||
Vec3 c1_; /**< The first column of the matrix. **/
|
||||
Vec3 c2_; /**< The second column of the matrix. **/
|
||||
Vec3 c3_; /**< The third column of the matrix. **/
|
||||
|
||||
/*!
|
||||
* \brief The determinant of the matrix.
|
||||
**/
|
||||
double det() const;
|
||||
|
||||
/*!
|
||||
* \brief The transpose of the OnMat3 (equal to inverse).
|
||||
**/
|
||||
OnMat3 transpose() const;
|
||||
|
||||
/*!
|
||||
* \brief Matrix multiplication between two ON matrices.
|
||||
**/
|
||||
OnMat3 operator*(OnMat3 o) const;
|
||||
|
||||
/*!
|
||||
* \brief Right side multiplication with a 3d vector.
|
||||
**/
|
||||
Vec3 operator*(Vec3 o);
|
||||
|
||||
friend std::ostream & operator<<(std::ostream &os, OnMat3 m)
|
||||
{
|
||||
return os << "[" << std::endl << m.r1_ << std::endl << m.r2_ << std::endl << m.r3_ << std::endl << "]" << std::endl;
|
||||
}
|
||||
};
|
||||
|
||||
/*!
|
||||
* \brief Describes an affine transformation.
|
||||
**/
|
||||
class Mat4
|
||||
{
|
||||
public:
|
||||
Mat4();
|
||||
Mat4(OnMat3 rotation, Vec3 translation, double scaling);
|
||||
|
||||
/*!
|
||||
* \brief Right side multiplication with a 3d vector.
|
||||
**/
|
||||
Vec3 operator*(Vec3 o);
|
||||
|
||||
double r1c1_; /**< Matrix element 0 0 **/
|
||||
double r1c2_; /**< Matrix element 0 1 **/
|
||||
double r1c3_; /**< Matrix element 0 2 **/
|
||||
double r1c4_; /**< Matrix element 0 3 **/
|
||||
double r2c1_; /**< Matrix element 1 0 **/
|
||||
double r2c2_; /**< Matrix element 1 1 **/
|
||||
double r2c3_; /**< Matrix element 1 2 **/
|
||||
double r2c4_; /**< Matrix element 1 3 **/
|
||||
double r3c1_; /**< Matrix element 2 0 **/
|
||||
double r3c2_; /**< Matrix element 2 1 **/
|
||||
double r3c3_; /**< Matrix element 2 2 **/
|
||||
double r3c4_; /**< Matrix element 2 3 **/
|
||||
double r4c1_; /**< Matrix element 3 0 **/
|
||||
double r4c2_; /**< Matrix element 3 1 **/
|
||||
double r4c3_; /**< Matrix element 3 2 **/
|
||||
double r4c4_; /**< Matrix element 3 3 **/
|
||||
|
||||
friend std::ostream & operator<<(std::ostream &os, Mat4 m)
|
||||
{
|
||||
std::stringstream ss;
|
||||
ss.precision(8);
|
||||
ss.setf(std::ios::fixed, std::ios::floatfield);
|
||||
|
||||
ss << "[ " << m.r1c1_ << ",\t" << m.r1c2_ << ",\t" << m.r1c3_ << ",\t" << m.r1c4_ << " ]" << std::endl <<
|
||||
"[ " << m.r2c1_ << ",\t" << m.r2c2_ << ",\t" << m.r2c3_ << ",\t" << m.r2c4_ << " ]" << std::endl <<
|
||||
"[ " << m.r3c1_ << ",\t" << m.r3c2_ << ",\t" << m.r3c3_ << ",\t" << m.r3c4_ << " ]" << std::endl <<
|
||||
"[ " << m.r4c1_ << ",\t" << m.r4c2_ << ",\t" << m.r4c3_ << ",\t" << m.r4c4_ << " ]";
|
||||
|
||||
return os << ss.str();
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
class FindTransform
|
||||
{
|
||||
public:
|
||||
/*!
|
||||
* \brief findTransform Generates an affine transform from the three 'from' vector to the three 'to' vectors.
|
||||
* The transform is such that transform * fromA = toA,
|
||||
* transform * fromB = toB,
|
||||
* transform * fromC = toC,
|
||||
**/
|
||||
void findTransform(Vec3 fromA, Vec3 fromB, Vec3 fromC, Vec3 toA, Vec3 toB, Vec3 toC);
|
||||
|
||||
/*!
|
||||
* \brief error Returns the distance beteween the 'from' and 'to' vectors, after the transform has been applied.
|
||||
**/
|
||||
double error(Vec3 fromA, Vec3 toA);
|
||||
|
||||
Mat4 transform_; /**< The affine transform. **/
|
||||
};
|
Plik diff jest za duży
Load Diff
|
@ -1,322 +0,0 @@
|
|||
#pragma once
|
||||
|
||||
// C++
|
||||
#include <string>
|
||||
#include <sstream>
|
||||
#include <fstream>
|
||||
|
||||
// PCL
|
||||
#include <pcl/common/eigen.h>
|
||||
#include <pcl/common/common.h>
|
||||
#include <pcl/io/ply_io.h>
|
||||
// Modified PCL
|
||||
#include "modifiedPclFunctions.hpp"
|
||||
|
||||
// Logger
|
||||
#include "Logger.hpp"
|
||||
|
||||
// Transformation
|
||||
#include "FindTransform.hpp"
|
||||
|
||||
// PDAL matrix transform filter
|
||||
#include "MatrixTransformFilter.hpp"
|
||||
|
||||
/*!
|
||||
* \brief The GeorefSystem struct is used to store information about a georeference system.
|
||||
*/
|
||||
struct GeorefSystem
|
||||
{
|
||||
std::string system_; /**< The name of the system. **/
|
||||
double eastingOffset_; /**< The easting offset for the georeference system. **/
|
||||
double northingOffset_; /**< The northing offset for the georeference system. **/
|
||||
|
||||
friend std::ostream& operator<<(std::ostream &os, const GeorefSystem &geo);
|
||||
};
|
||||
|
||||
/*!
|
||||
* \brief The GeorefGCP struct used to store information about a GCP.
|
||||
*/
|
||||
struct GeorefGCP
|
||||
{
|
||||
double x_; /**< The X coordinate of the GCP **/
|
||||
double y_; /**< The Y coordinate of the GCP **/
|
||||
double z_; /**< The Z coordinate of the GCP **/
|
||||
|
||||
bool use_; /**< Bool to check if the GCP is corresponding in the local model **/
|
||||
|
||||
double localX_; /**< The corresponding X coordinate in the model **/
|
||||
double localY_; /**< The corresponding Y coordinate in the model **/
|
||||
double localZ_; /**< The corresponding Z coordinate in the model **/
|
||||
|
||||
size_t cameraIndex_; /**< The index to the corresponding camera for the image. **/
|
||||
|
||||
double pixelX_; /**< The pixels x-position for the GCP in the corresponding image **/
|
||||
double pixelY_; /**< The pixels y-position for the GCP in the corresponding image **/
|
||||
|
||||
std::string image_; /**< The corresponding image for the GCP **/
|
||||
std::string idgcp_; /**< The corresponding identification for the GCP **/
|
||||
|
||||
GeorefGCP();
|
||||
~GeorefGCP();
|
||||
|
||||
void extractGCP(std::istringstream &gcpStream);
|
||||
|
||||
/*!
|
||||
* \brief getPos Get the local position of the GCP.
|
||||
*/
|
||||
Vec3 getPos();
|
||||
|
||||
/*!
|
||||
* \brief getReferencedPos Get the georeferenced position of the GCP.
|
||||
*/
|
||||
Vec3 getReferencedPos();
|
||||
};
|
||||
|
||||
/*!
|
||||
* \brief The GeorefCamera struct is used to store information about a camera.
|
||||
*/
|
||||
struct GeorefCamera
|
||||
{
|
||||
GeorefCamera();
|
||||
GeorefCamera(const GeorefCamera &other);
|
||||
~GeorefCamera();
|
||||
|
||||
/*!
|
||||
* \brief extractCamera Extracts a camera's intrinsic and extrinsic parameters from a stream.
|
||||
*/
|
||||
void extractCamera(std::ifstream &bundleStream);
|
||||
|
||||
/*!
|
||||
* \brief extractCameraGeoref Extracts a camera's world position from a stream.
|
||||
*/
|
||||
void extractCameraGeoref(std::istringstream &coordStream);
|
||||
|
||||
/*!
|
||||
* \brief getPos Get the local position of the camera.
|
||||
*/
|
||||
Vec3 getPos();
|
||||
|
||||
/*!
|
||||
* \brief getReferencedPos Get the georeferenced position of the camera.
|
||||
*/
|
||||
Vec3 getReferencedPos();
|
||||
|
||||
/*!
|
||||
* \brief isValid Whether this camera is valid based on its parameters.
|
||||
*/
|
||||
bool isValid();
|
||||
|
||||
double focalLength_; /**< The focal length of the camera. */
|
||||
double k1_; /**< The k1 lens distortion parameter. **/
|
||||
double k2_; /**< The k2 lens distortion parameter. **/
|
||||
|
||||
double easting_; /**< The easting of the camera. **/
|
||||
double northing_; /**< The northing of the camera. **/
|
||||
double altitude_; /**< The altitude of the camera. **/
|
||||
|
||||
Eigen::Affine3f* transform_; /**< The rotation of the camera. **/
|
||||
Eigen::Vector3f* position_; /**< The position of the camera. **/
|
||||
Eigen::Affine3f* pose_; /**< The pose of the camera. **/
|
||||
|
||||
friend std::ostream& operator<<(std::ostream &os, const GeorefCamera &cam);
|
||||
};
|
||||
|
||||
/*!
|
||||
* \brief The GeorefBestTriplet struct is used to store the best triplet found.
|
||||
*/
|
||||
struct GeorefBestTriplet
|
||||
{
|
||||
size_t t_; /**< First ordinate of the best triplet found. **/
|
||||
size_t s_; /**< Second ordinate of the best triplet found. **/
|
||||
size_t p_; /**< Third ordinate of the best triplet found. **/
|
||||
double err_; /**< Error of this triplet. **/
|
||||
};
|
||||
|
||||
/*!
|
||||
* \brief The Georef class is used to transform a mesh into a georeferenced system.
|
||||
* The class reads camera positions from a bundle file.
|
||||
* The class reads the georefenced camera positions from a coords file.
|
||||
* The class reads a textured mesh from an OBJ-file.
|
||||
* The class writes the georeferenced textured mesh to an OBJ-file.
|
||||
* The class uses file read and write from pcl.
|
||||
*/
|
||||
class Georef
|
||||
{
|
||||
public:
|
||||
Georef();
|
||||
~Georef();
|
||||
|
||||
int run(int argc, char* argv[]);
|
||||
|
||||
private:
|
||||
|
||||
/*!
|
||||
* \brief parseArguments Parses command line arguments.
|
||||
* \param argc Application argument count.
|
||||
* \param argv Argument values.
|
||||
*/
|
||||
void parseArguments(int argc, char* argv[]);
|
||||
|
||||
/*!
|
||||
* \brief printHelp Prints help, explaining usage. Can be shown by calling the program with argument: "-help".
|
||||
*/
|
||||
void printHelp();
|
||||
|
||||
/*!
|
||||
* \brief setDefaultOutput Setup the output file name given the input file name.
|
||||
*/
|
||||
void setDefaultOutput();
|
||||
|
||||
/*!
|
||||
* \brief setDefaultPointCloudOutput Setup the output file name given the input file name.
|
||||
*/
|
||||
void setDefaultPointCloudOutput();
|
||||
|
||||
/*!
|
||||
* \brief createGeoreferencedModel Makes the input file georeferenced and saves it to the output file.
|
||||
*/
|
||||
void createGeoreferencedModel();
|
||||
|
||||
/*!
|
||||
* \brief readCameras Reads the camera information from the bundle file.
|
||||
*/
|
||||
void readCameras();
|
||||
|
||||
/*!
|
||||
* \brief readGCP Reads the ground control points from the gcp file.
|
||||
*/
|
||||
void readGCPs();
|
||||
|
||||
/*!
|
||||
* \brief calculateGCPOffset Calculates an offset weighted from the ground control points read in the readGCP function.
|
||||
*/
|
||||
void calculateGCPOffset();
|
||||
|
||||
/*!
|
||||
* \brief barycentricCoordinates Returns the world position of a point inside a 2d triangle by using the triangle vertex positions.
|
||||
*/
|
||||
pcl::PointXYZ barycentricCoordinates(pcl::PointXY point, pcl::PointXYZ vert0, pcl::PointXYZ vert1, pcl::PointXYZ vert2, pcl::PointXY p0, pcl::PointXY p1, pcl::PointXY p2);
|
||||
|
||||
/*!
|
||||
* \brief performGeoreferencingWithGCP Performs the georeferencing of the model with the ground control points.
|
||||
*/
|
||||
void performGeoreferencingWithGCP();
|
||||
|
||||
/*!
|
||||
* \brief createGeoreferencedModelFromGCPData Makes the input file georeferenced and saves it to the output file.
|
||||
*/
|
||||
void createGeoreferencedModelFromGCPData();
|
||||
|
||||
/*!
|
||||
* \brief createGeoreferencedModelFromExifData Makes the input file georeferenced and saves it to the output file.
|
||||
*/
|
||||
void createGeoreferencedModelFromExifData();
|
||||
|
||||
/*!
|
||||
* \brief chooseBestGCPTriplet Chooses the best triplet of GCPs to use when making the model georeferenced.
|
||||
*/
|
||||
void chooseBestGCPTriplet(size_t &gcp0, size_t &gcp1, size_t &gcp2);
|
||||
|
||||
/*!
|
||||
* \brief findBestGCPTriplet Partitioned version of chooseBestGCPTriplet.
|
||||
*/
|
||||
void findBestGCPTriplet(size_t &gcp0, size_t &gcp1, size_t &gcp2, size_t offset, size_t stride, double &minTotError);
|
||||
|
||||
/*!
|
||||
* \brief chooseBestCameraTriplet Chooses the best triplet of cameras to use when making the model georeferenced.
|
||||
*/
|
||||
void chooseBestCameraTriplet(size_t &cam0, size_t &cam1, size_t &cam2);
|
||||
|
||||
/*!
|
||||
* \brief findBestCameraTriplet Partitioned version of chooseBestCameraTriplet.
|
||||
*/
|
||||
void findBestCameraTriplet(size_t &cam0, size_t &cam1, size_t &cam2, size_t offset, size_t stride, double &minTotError);
|
||||
|
||||
/*!
|
||||
* \brief printGeorefSystem Prints a file containing information about the georeference system, next to the ouptut file.
|
||||
**/
|
||||
void printGeorefSystem();
|
||||
|
||||
/*!
|
||||
* \brief printFinalTransform Prints a file containing the final transform, next to the output file.
|
||||
**/
|
||||
template <typename Scalar>
|
||||
void printFinalTransform(const Eigen::Transform<Scalar, 3, Eigen::Affine> &transform);
|
||||
|
||||
|
||||
/*!
|
||||
* \brief Loads a model from an .obj file (replacement for the pcl obj loader).
|
||||
*
|
||||
* \param inputFile Path to the .obj file.
|
||||
* \param mesh The model.
|
||||
* \return True if model was loaded successfully.
|
||||
*/
|
||||
bool loadObjFile(std::string inputFile, pcl::TextureMesh &mesh);
|
||||
|
||||
/*!
|
||||
* \brief Function is compied straight from the function in the pcl::io module.
|
||||
*/
|
||||
bool readHeader (const std::string &file_name, pcl::PCLPointCloud2 &cloud,
|
||||
Eigen::Vector4f &origin, Eigen::Quaternionf &orientation,
|
||||
int &file_version, int &data_type, unsigned int &data_idx,
|
||||
const int offset);
|
||||
|
||||
|
||||
Logger log_; /**< Logging object. */
|
||||
std::string logFile_; /**< The path to the output log file. */
|
||||
|
||||
std::string finalTransformFile_; /**< The path to the file for the final transform. */
|
||||
|
||||
std::string bundleFilename_; /**< The path to the cameras bundle file. **/
|
||||
std::string inputCoordFilename_; /**< The path to the cameras exif gps positions file. **/
|
||||
std::string outputCoordFilename_; /**< The path to the cameras georeferenced gps positions file. **/
|
||||
std::string gcpFilename_; /**< The path to the GCP file **/
|
||||
std::string transformFilename_; /**< The path to the input transform file **/
|
||||
std::string imagesListPath_; /**< Path to the image list. **/
|
||||
std::string imagesLocation_; /**< The folder containing the images in the image list. **/
|
||||
std::string inputObjFilename_; /**< The path to the input mesh obj file. **/
|
||||
std::string outputObjFilename_; /**< The path to the output mesh obj file. **/
|
||||
std::string inputPointCloudFilename_; /**< The path to the input point cloud file. **/
|
||||
std::string outputPointCloudFilename_; /**< The path to the output point cloud file. **/
|
||||
std::string georefFilename_; /**< The path to the output offset file. **/
|
||||
std::string outputPointCloudSrs_; /**< The spatial reference system of the point cloud file to be written. Can be an EPSG string (e.g. “EPSG:26910”) or a WKT string. **/
|
||||
|
||||
bool georeferencePointCloud_;
|
||||
bool exportCoordinateFile_;
|
||||
bool exportGeorefSystem_;
|
||||
bool useGCP_; /**< Check if GCP-file is present and use this to georeference the model. **/
|
||||
bool useTransform_;
|
||||
// double bundleResizedTo_; /**< The size used in the previous steps to calculate the camera focal_length. */
|
||||
|
||||
std::vector<GeorefCamera> cameras_; /**< A vector of all cameras. **/
|
||||
std::vector<GeorefGCP> gcps_; /**< A vector of all GCPs. **/
|
||||
std::vector<std::string> imageList_; /**< A vector containing the names of the corresponding cameras. **/
|
||||
|
||||
GeorefSystem georefSystem_; /**< Contains the georeference system. **/
|
||||
|
||||
bool multiMaterial_; /**< True if the mesh has multiple materials. **/
|
||||
|
||||
std::vector<pcl::MTLReader> companions_; /**< Materials (used by loadOBJFile). **/
|
||||
void performFinalTransform(Mat4 &transMat, pcl::TextureMesh &mesh, pcl::PointCloud<pcl::PointXYZ>::Ptr &meshCloud, bool addUTM);
|
||||
|
||||
template <typename Scalar>
|
||||
void transformPointCloud(const char *inputFile, const Eigen::Transform<Scalar, 3, Eigen::Affine> &transform, const char *outputFile);
|
||||
|
||||
void createGeoreferencedModelFromSFM();
|
||||
};
|
||||
|
||||
/*!
|
||||
* \brief The Georef class
|
||||
*/
|
||||
class GeorefException : public std::exception
|
||||
{
|
||||
|
||||
public:
|
||||
GeorefException() : message("Error in Georef") {}
|
||||
GeorefException(std::string msgInit) : message("Error in Georef:\n" + msgInit) {}
|
||||
~GeorefException() throw() {}
|
||||
virtual const char* what() const throw() {return message.c_str(); }
|
||||
|
||||
private:
|
||||
std::string message; /**< The error message **/
|
||||
};
|
|
@ -1,31 +0,0 @@
|
|||
#include "Logger.hpp"
|
||||
|
||||
|
||||
Logger::Logger(bool isPrintingInCout) : isPrintingInCout_(isPrintingInCout)
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
Logger::~Logger()
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
void Logger::print(std::string filePath)
|
||||
{
|
||||
std::ofstream file(filePath.c_str(), std::ios::binary);
|
||||
file << logStream_.str();
|
||||
file.close();
|
||||
}
|
||||
|
||||
bool Logger::isPrintingInCout() const
|
||||
{
|
||||
return isPrintingInCout_;
|
||||
}
|
||||
|
||||
void Logger::setIsPrintingInCout(bool isPrintingInCout)
|
||||
{
|
||||
isPrintingInCout_ = isPrintingInCout;
|
||||
}
|
||||
|
||||
|
|
@ -1,68 +0,0 @@
|
|||
#pragma once
|
||||
|
||||
// STL
|
||||
#include <string>
|
||||
#include <sstream>
|
||||
#include <fstream>
|
||||
#include <iostream>
|
||||
|
||||
/*!
|
||||
* \brief The Logger class is used to store program messages in a log file.
|
||||
* \details By using the << operator while printInCout is set, the class writes both to
|
||||
* cout and to file, if the flag is not set, output is written to file only.
|
||||
*/
|
||||
class Logger
|
||||
{
|
||||
public:
|
||||
/*!
|
||||
* \brief Logger Contains functionality for printing and displaying log information.
|
||||
* \param printInCout Flag toggling if operator << also writes to cout.
|
||||
*/
|
||||
Logger(bool isPrintingInCout = true);
|
||||
|
||||
/*!
|
||||
* \brief Destructor.
|
||||
*/
|
||||
~Logger();
|
||||
|
||||
/*!
|
||||
* \brief print Prints the contents of the log to file.
|
||||
* \param filePath Path specifying where to write the log.
|
||||
*/
|
||||
void print(std::string filePath);
|
||||
|
||||
/*!
|
||||
* \brief isPrintingInCout Check if console printing flag is set.
|
||||
* \return Console printing flag.
|
||||
*/
|
||||
bool isPrintingInCout() const;
|
||||
|
||||
/*!
|
||||
* \brief setIsPrintingInCout Set console printing flag.
|
||||
* \param isPrintingInCout Value, if true, messages added to the log are also printed in cout.
|
||||
*/
|
||||
void setIsPrintingInCout(bool isPrintingInCout);
|
||||
|
||||
/*!
|
||||
* Operator for printing messages to log and in the standard output stream if desired.
|
||||
*/
|
||||
template<class T>
|
||||
friend Logger& operator<< (Logger &log, T t)
|
||||
{
|
||||
// If console printing is enabled.
|
||||
if (log.isPrintingInCout_)
|
||||
{
|
||||
std::cout << t;
|
||||
std::cout.flush();
|
||||
}
|
||||
// Write to log.
|
||||
log.logStream_ << t;
|
||||
|
||||
return log;
|
||||
}
|
||||
|
||||
private:
|
||||
bool isPrintingInCout_; /*!< If flag is set, log is printed in cout and written to the log. */
|
||||
|
||||
std::stringstream logStream_; /*!< Stream for storing the log. */
|
||||
};
|
|
@ -1,2 +0,0 @@
|
|||
#include "MatrixTransformFilter.hpp"
|
||||
|
|
@ -1,38 +0,0 @@
|
|||
#pragma once
|
||||
|
||||
#include <pcl/common/eigen.h>
|
||||
#include <pdal/PointTable.hpp>
|
||||
#include <pdal/PointView.hpp>
|
||||
#include <pdal/io/PlyReader.hpp>
|
||||
#include <pdal/io/LasWriter.hpp>
|
||||
#include <pdal/Options.hpp>
|
||||
#include <pdal/Filter.hpp>
|
||||
|
||||
namespace pdal{
|
||||
template <typename Scalar>
|
||||
class MatrixTransformFilter : public Filter{
|
||||
Eigen::Transform<Scalar, 3, Eigen::Affine> transform;
|
||||
|
||||
public:
|
||||
MatrixTransformFilter(const Eigen::Transform<Scalar, 3, Eigen::Affine> &transform)
|
||||
: transform(transform){};
|
||||
|
||||
std::string getName() const { return "MatrixTransformFilter"; }
|
||||
|
||||
virtual void filter(PointView &view)
|
||||
{
|
||||
for (PointId id = 0; id < view.size(); ++id)
|
||||
{
|
||||
Scalar x = view.getFieldAs<Scalar>(Dimension::Id::X, id);
|
||||
Scalar y = view.getFieldAs<Scalar>(Dimension::Id::Y, id);
|
||||
Scalar z = view.getFieldAs<Scalar>(Dimension::Id::Z, id);
|
||||
|
||||
view.setField(pdal::Dimension::Id::X, id, transform (0, 0) * x + transform (0, 1) * y + transform (0, 2) * z + transform (0, 3));
|
||||
view.setField(pdal::Dimension::Id::Y, id, transform (1, 0) * x + transform (1, 1) * y + transform (1, 2) * z + transform (1, 3));
|
||||
view.setField(pdal::Dimension::Id::Z, id, transform (2, 0) * x + transform (2, 1) * y + transform (2, 2) * z + transform (2, 3));
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: implement streaming mode
|
||||
};
|
||||
}
|
|
@ -1,8 +0,0 @@
|
|||
#include "Georef.hpp"
|
||||
|
||||
int main(int argc, char* argv[])
|
||||
{
|
||||
Georef ref;
|
||||
return ref.run(argc, argv);
|
||||
}
|
||||
|
|
@ -1,341 +0,0 @@
|
|||
/*
|
||||
* Software License Agreement (BSD License)
|
||||
*
|
||||
* Point Cloud Library (PCL) - www.pointclouds.org
|
||||
* Copyright (c) 2012-, Open Perception, Inc.
|
||||
*
|
||||
* All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions
|
||||
* are met:
|
||||
*
|
||||
* * Redistributions of source code must retain the above copyright
|
||||
* notice, this list of conditions and the following disclaimer.
|
||||
* * Redistributions in binary form must reproduce the above
|
||||
* copyright notice, this list of conditions and the following
|
||||
* disclaimer in the documentation and/or other materials provided
|
||||
* with the distribution.
|
||||
* * Neither the name of the copyright holder(s) nor the names of its
|
||||
* contributors may be used to endorse or promote products derived
|
||||
* from this software without specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
|
||||
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
|
||||
* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
|
||||
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
||||
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
|
||||
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
* POSSIBILITY OF SUCH DAMAGE.
|
||||
*
|
||||
*/
|
||||
|
||||
#include "modifiedPclFunctions.hpp"
|
||||
|
||||
int saveOBJFile(const std::string &file_name, const pcl::TextureMesh &tex_mesh, unsigned precision)
|
||||
{
|
||||
if (tex_mesh.cloud.data.empty ())
|
||||
{
|
||||
PCL_ERROR ("[pcl::io::saveOBJFile] Input point cloud has no data!\n");
|
||||
return (-1);
|
||||
}
|
||||
|
||||
std::ostringstream fs;
|
||||
fs.precision (precision);
|
||||
|
||||
// Define material file
|
||||
std::string mtl_file_name = file_name.substr (0, file_name.find_last_of (".")) + ".mtl";
|
||||
// Strip path for "mtllib" command
|
||||
std::string mtl_file_name_nopath = mtl_file_name;
|
||||
//std::cout << mtl_file_name_nopath << std::endl;
|
||||
mtl_file_name_nopath.erase (0, mtl_file_name.find_last_of ('/') + 1);
|
||||
|
||||
/* Write 3D information */
|
||||
// number of points
|
||||
int nr_points = tex_mesh.cloud.width * tex_mesh.cloud.height;
|
||||
int point_size = tex_mesh.cloud.data.size () / nr_points;
|
||||
|
||||
// mesh size
|
||||
int nr_meshes = tex_mesh.tex_polygons.size ();
|
||||
// number of faces for header
|
||||
int nr_faces = 0;
|
||||
for (int m = 0; m < nr_meshes; ++m)
|
||||
nr_faces += tex_mesh.tex_polygons[m].size ();
|
||||
|
||||
// Write the header information
|
||||
fs << "####" << std::endl;
|
||||
fs << "# OBJ dataFile simple version. File name: " << file_name << std::endl;
|
||||
fs << "# Vertices: " << nr_points << std::endl;
|
||||
fs << "# Faces: " <<nr_faces << std::endl;
|
||||
fs << "# Material information:" << std::endl;
|
||||
fs << "mtllib " << mtl_file_name_nopath << std::endl;
|
||||
fs << "####" << std::endl;
|
||||
|
||||
// Write vertex coordinates
|
||||
fs << "# Vertices" << std::endl;
|
||||
for (int i = 0; i < nr_points; ++i)
|
||||
{
|
||||
int xyz = 0;
|
||||
// "v" just be written one
|
||||
bool v_written = false;
|
||||
for (size_t d = 0; d < tex_mesh.cloud.fields.size (); ++d)
|
||||
{
|
||||
int count = tex_mesh.cloud.fields[d].count;
|
||||
if (count == 0)
|
||||
count = 1; // we simply cannot tolerate 0 counts (coming from older converter code)
|
||||
int c = 0;
|
||||
// adding vertex
|
||||
if ((tex_mesh.cloud.fields[d].datatype == pcl::PCLPointField::FLOAT32) /*sensor_msgs::PointField::FLOAT32)*/ && (
|
||||
tex_mesh.cloud.fields[d].name == "x" ||
|
||||
tex_mesh.cloud.fields[d].name == "y" ||
|
||||
tex_mesh.cloud.fields[d].name == "z"))
|
||||
{
|
||||
if (!v_written)
|
||||
{
|
||||
// write vertices beginning with v
|
||||
fs << "v ";
|
||||
v_written = true;
|
||||
}
|
||||
float value;
|
||||
memcpy (&value, &tex_mesh.cloud.data[i * point_size + tex_mesh.cloud.fields[d].offset + c * sizeof (float)], sizeof (float));
|
||||
fs << value;
|
||||
if (++xyz == 3)
|
||||
break;
|
||||
fs << " ";
|
||||
}
|
||||
}
|
||||
if (xyz != 3)
|
||||
{
|
||||
PCL_ERROR ("[pcl::io::saveOBJFile] Input point cloud has no XYZ data!\n");
|
||||
return (-2);
|
||||
}
|
||||
fs << std::endl;
|
||||
}
|
||||
fs << "# "<< nr_points <<" vertices" << std::endl;
|
||||
|
||||
// // Write vertex normals
|
||||
// for (int i = 0; i < nr_points; ++i)
|
||||
// {
|
||||
// int xyz = 0;
|
||||
// // "vn" just be written one
|
||||
// bool v_written = false;
|
||||
// for (size_t d = 0; d < tex_mesh.cloud.fields.size (); ++d)
|
||||
// {
|
||||
// int count = tex_mesh.cloud.fields[d].count;
|
||||
// if (count == 0)
|
||||
// count = 1; // we simply cannot tolerate 0 counts (coming from older converter code)
|
||||
// int c = 0;
|
||||
// // adding vertex
|
||||
// if ((tex_mesh.cloud.fields[d].datatype == pcl::PCLPointField::FLOAT32) && (
|
||||
// tex_mesh.cloud.fields[d].name == "normal_x" ||
|
||||
// tex_mesh.cloud.fields[d].name == "normal_y" ||
|
||||
// tex_mesh.cloud.fields[d].name == "normal_z"))
|
||||
// {
|
||||
// if (!v_written)
|
||||
// {
|
||||
// // write vertices beginning with vn
|
||||
// fs << "vn ";
|
||||
// v_written = true;
|
||||
// }
|
||||
// float value;
|
||||
// memcpy (&value, &tex_mesh.cloud.data[i * point_size + tex_mesh.cloud.fields[d].offset + c * sizeof (float)], sizeof (float));
|
||||
// fs << value;
|
||||
// if (++xyz == 3)
|
||||
// break;
|
||||
// fs << " ";
|
||||
// }
|
||||
// }
|
||||
// if (xyz != 3)
|
||||
// {
|
||||
// //PCL_ERROR ("[pcl::io::saveOBJFile] Input point cloud has no normals!\n");
|
||||
// //return (-2);
|
||||
// }
|
||||
// fs << std::endl;
|
||||
// }
|
||||
// Write vertex texture with "vt" (adding latter)
|
||||
|
||||
for (int m = 0; m < nr_meshes; ++m)
|
||||
{
|
||||
if(tex_mesh.tex_coordinates.size() == 0)
|
||||
continue;
|
||||
|
||||
//PCL_INFO ("%d vertex textures in submesh %d\n", tex_mesh.tex_coordinates[m].size (), m);
|
||||
fs << "# " << tex_mesh.tex_coordinates[m].size() << " vertex textures in submesh " << m << std::endl;
|
||||
for (size_t i = 0; i < tex_mesh.tex_coordinates[m].size (); ++i)
|
||||
{
|
||||
fs << "vt ";
|
||||
fs << tex_mesh.tex_coordinates[m][i][0] << " " << tex_mesh.tex_coordinates[m][i][1] << std::endl;
|
||||
}
|
||||
}
|
||||
|
||||
int f_idx = 0;
|
||||
|
||||
// int idx_vt =0;
|
||||
//PCL_INFO ("Writting faces...\n");
|
||||
for (int m = 0; m < nr_meshes; ++m)
|
||||
{
|
||||
if (m > 0)
|
||||
f_idx += tex_mesh.tex_polygons[m-1].size ();
|
||||
|
||||
if(tex_mesh.tex_materials.size() !=0)
|
||||
{
|
||||
fs << "# The material will be used for mesh " << m << std::endl;
|
||||
//TODO pbl here with multi texture and unseen faces
|
||||
fs << "usemtl " << tex_mesh.tex_materials[m].tex_name << std::endl;
|
||||
fs << "# Faces" << std::endl;
|
||||
}
|
||||
for (size_t i = 0; i < tex_mesh.tex_polygons[m].size(); ++i)
|
||||
{
|
||||
// Write faces with "f"
|
||||
fs << "f";
|
||||
size_t j = 0;
|
||||
// There's one UV per vertex per face, i.e., the same vertex can have
|
||||
// different UV depending on the face.
|
||||
for (j = 0; j < tex_mesh.tex_polygons[m][i].vertices.size (); ++j)
|
||||
{
|
||||
unsigned int idx = tex_mesh.tex_polygons[m][i].vertices[j] + 1;
|
||||
fs << " " << idx
|
||||
<< "/" << 3*(i+f_idx) +j+1;
|
||||
//<< "/" << idx; // vertex index in obj file format starting with 1
|
||||
}
|
||||
fs << std::endl;
|
||||
}
|
||||
//PCL_INFO ("%d faces in mesh %d \n", tex_mesh.tex_polygons[m].size () , m);
|
||||
fs << "# "<< tex_mesh.tex_polygons[m].size() << " faces in mesh " << m << std::endl;
|
||||
}
|
||||
fs << "# End of File";
|
||||
|
||||
// Close obj file
|
||||
//PCL_INFO ("Closing obj file\n");
|
||||
std::ofstream ofs(file_name.c_str ());
|
||||
ofs << fs.str() << std::endl;
|
||||
ofs.close ();
|
||||
|
||||
/* Write material defination for OBJ file*/
|
||||
// Open file
|
||||
//PCL_INFO ("Writing material files\n");
|
||||
//dont do it if no material to write
|
||||
if(tex_mesh.tex_materials.size() ==0)
|
||||
return (0);
|
||||
|
||||
// Empty string stream
|
||||
fs.str("");
|
||||
|
||||
//std::cout << "MTL file is located at_ " << mtl_file_name << std::endl;
|
||||
// default
|
||||
fs << "#" << std::endl;
|
||||
fs << "# Wavefront material file" << std::endl;
|
||||
fs << "#" << std::endl;
|
||||
for(int m = 0; m < nr_meshes; ++m)
|
||||
{
|
||||
fs << "newmtl " << tex_mesh.tex_materials[m].tex_name << std::endl;
|
||||
fs << "Ka "<< tex_mesh.tex_materials[m].tex_Ka.r << " " << tex_mesh.tex_materials[m].tex_Ka.g << " " << tex_mesh.tex_materials[m].tex_Ka.b << std::endl; // defines the ambient color of the material to be (r,g,b).
|
||||
fs << "Kd "<< tex_mesh.tex_materials[m].tex_Kd.r << " " << tex_mesh.tex_materials[m].tex_Kd.g << " " << tex_mesh.tex_materials[m].tex_Kd.b << std::endl; // defines the diffuse color of the material to be (r,g,b).
|
||||
fs << "Ks "<< tex_mesh.tex_materials[m].tex_Ks.r << " " << tex_mesh.tex_materials[m].tex_Ks.g << " " << tex_mesh.tex_materials[m].tex_Ks.b << std::endl; // defines the specular color of the material to be (r,g,b). This color shows up in highlights.
|
||||
fs << "d " << tex_mesh.tex_materials[m].tex_d << std::endl; // defines the transparency of the material to be alpha.
|
||||
fs << "Ns "<< tex_mesh.tex_materials[m].tex_Ns << std::endl; // defines the shininess of the material to be s.
|
||||
fs << "illum "<< tex_mesh.tex_materials[m].tex_illum << std::endl; // denotes the illumination model used by the material.
|
||||
// illum = 1 indicates a flat material with no specular highlights, so the value of Ks is not used.
|
||||
// illum = 2 denotes the presence of specular highlights, and so a specification for Ks is required.
|
||||
fs << "map_Kd " << tex_mesh.tex_materials[m].tex_file << std::endl;
|
||||
fs << "###" << std::endl;
|
||||
}
|
||||
|
||||
|
||||
std::ofstream omfs(mtl_file_name.c_str ());
|
||||
omfs << fs.str() << std::endl;
|
||||
omfs.close ();
|
||||
|
||||
return (0);
|
||||
}
|
||||
|
||||
bool getPixelCoordinates(const pcl::PointXYZ &pt, const pcl::TextureMapping<pcl::PointXYZ>::Camera &cam, pcl::PointXY &UV_coordinates)
|
||||
{
|
||||
if (pt.z > 0)
|
||||
{
|
||||
// compute image center and dimension
|
||||
double sizeX = cam.width;
|
||||
double sizeY = cam.height;
|
||||
double cx, cy;
|
||||
if (cam.center_w > 0)
|
||||
cx = cam.center_w;
|
||||
else
|
||||
cx = sizeX / 2.0;
|
||||
if (cam.center_h > 0)
|
||||
cy = cam.center_h;
|
||||
else
|
||||
cy = sizeY / 2.0;
|
||||
|
||||
double focal_x, focal_y;
|
||||
if (cam.focal_length_w > 0)
|
||||
focal_x = cam.focal_length_w;
|
||||
else
|
||||
focal_x = cam.focal_length;
|
||||
if (cam.focal_length_h > 0)
|
||||
focal_y = cam.focal_length_h;
|
||||
else
|
||||
focal_y = cam.focal_length;
|
||||
|
||||
// project point on camera's image plane
|
||||
UV_coordinates.x = static_cast<float> ((focal_x * (pt.x / pt.z) + cx)); //horizontal
|
||||
UV_coordinates.y = static_cast<float> ((focal_y * (pt.y / pt.z) + cy)); //vertical
|
||||
|
||||
// point is visible!
|
||||
if (UV_coordinates.x >= 1.0 && UV_coordinates.x <= (sizeX - 1.0) && UV_coordinates.y >= 1.0 && UV_coordinates.y <= (sizeY - 1.0))
|
||||
{
|
||||
return (true); // point was visible by the camera
|
||||
}
|
||||
}
|
||||
|
||||
// point is NOT visible by the camera
|
||||
UV_coordinates.x = -1.0f;
|
||||
UV_coordinates.y = -1.0f;
|
||||
return (false); // point was not visible by the camera
|
||||
}
|
||||
|
||||
bool isFaceProjected (const pcl::TextureMapping<pcl::PointXYZ>::Camera &camera, const pcl::PointXYZ &p1, const pcl::PointXYZ &p2, const pcl::PointXYZ &p3, pcl::PointXY &proj1, pcl::PointXY &proj2, pcl::PointXY &proj3)
|
||||
{
|
||||
return (getPixelCoordinates(p1, camera, proj1) && getPixelCoordinates(p2, camera, proj2) && getPixelCoordinates(p3, camera, proj3));
|
||||
}
|
||||
|
||||
void getTriangleCircumscribedCircleCentroid( const pcl::PointXY &p1, const pcl::PointXY &p2, const pcl::PointXY &p3, pcl::PointXY &circumcenter, double &radius)
|
||||
{
|
||||
// compute centroid's coordinates (translate back to original coordinates)
|
||||
circumcenter.x = static_cast<float> (p1.x + p2.x + p3.x ) / 3;
|
||||
circumcenter.y = static_cast<float> (p1.y + p2.y + p3.y ) / 3;
|
||||
double r1 = (circumcenter.x - p1.x) * (circumcenter.x - p1.x) + (circumcenter.y - p1.y) * (circumcenter.y - p1.y) ;
|
||||
double r2 = (circumcenter.x - p2.x) * (circumcenter.x - p2.x) + (circumcenter.y - p2.y) * (circumcenter.y - p2.y) ;
|
||||
double r3 = (circumcenter.x - p3.x) * (circumcenter.x - p3.x) + (circumcenter.y - p3.y) * (circumcenter.y - p3.y) ;
|
||||
|
||||
// radius
|
||||
radius = std::sqrt( std::max( r1, std::max( r2, r3) )) ;
|
||||
}
|
||||
|
||||
bool checkPointInsideTriangle(const pcl::PointXY &p1, const pcl::PointXY &p2, const pcl::PointXY &p3, const pcl::PointXY &pt)
|
||||
{
|
||||
// Compute vectors
|
||||
Eigen::Vector2d v0, v1, v2;
|
||||
v0(0) = p3.x - p1.x; v0(1) = p3.y - p1.y; // v0= C - A
|
||||
v1(0) = p2.x - p1.x; v1(1) = p2.y - p1.y; // v1= B - A
|
||||
v2(0) = pt.x - p1.x; v2(1) = pt.y - p1.y; // v2= P - A
|
||||
|
||||
// Compute dot products
|
||||
double dot00 = v0.dot(v0); // dot00 = dot(v0, v0)
|
||||
double dot01 = v0.dot(v1); // dot01 = dot(v0, v1)
|
||||
double dot02 = v0.dot(v2); // dot02 = dot(v0, v2)
|
||||
double dot11 = v1.dot(v1); // dot11 = dot(v1, v1)
|
||||
double dot12 = v1.dot(v2); // dot12 = dot(v1, v2)
|
||||
|
||||
// Compute barycentric coordinates
|
||||
double invDenom = 1.0 / (dot00*dot11 - dot01*dot01);
|
||||
double u = (dot11*dot02 - dot01*dot12) * invDenom;
|
||||
double v = (dot00*dot12 - dot01*dot02) * invDenom;
|
||||
|
||||
// Check if point is in triangle
|
||||
return ((u >= 0) && (v >= 0) && (u + v < 1));
|
||||
}
|
|
@ -1,20 +0,0 @@
|
|||
#pragma once
|
||||
|
||||
// STL
|
||||
#include <iostream>
|
||||
#include <fstream>
|
||||
|
||||
// PCL
|
||||
#include <pcl/point_types.h>
|
||||
#include <pcl/surface/texture_mapping.h>
|
||||
#include <pcl/io/obj_io.h>
|
||||
|
||||
int saveOBJFile(const std::string &file_name, const pcl::TextureMesh &tex_mesh, unsigned precision);
|
||||
|
||||
bool getPixelCoordinates(const pcl::PointXYZ &pt, const pcl::TextureMapping<pcl::PointXYZ>::Camera &cam, pcl::PointXY &UV_coordinates);
|
||||
|
||||
bool isFaceProjected (const pcl::TextureMapping<pcl::PointXYZ>::Camera &camera, const pcl::PointXYZ &p1, const pcl::PointXYZ &p2, const pcl::PointXYZ &p3, pcl::PointXY &proj1, pcl::PointXY &proj2, pcl::PointXY &proj3);
|
||||
|
||||
void getTriangleCircumscribedCircleCentroid(const pcl::PointXY &p1, const pcl::PointXY &p2, const pcl::PointXY &p3, pcl::PointXY &circumcenter, double &radius);
|
||||
|
||||
bool checkPointInsideTriangle(const pcl::PointXY &p1, const pcl::PointXY &p2, const pcl::PointXY &p3, const pcl::PointXY &pt);
|
|
@ -1,39 +0,0 @@
|
|||
project(odm_orthophoto)
|
||||
cmake_minimum_required(VERSION 2.8)
|
||||
|
||||
# Set pcl dir to the input spedified with option -DPCL_DIR="path"
|
||||
set(PCL_DIR "PCL_DIR-NOTFOUND" CACHE "PCL_DIR" "Path to the pcl installation directory")
|
||||
set(OPENCV_DIR "OPENCV_DIR-NOTFOUND" CACHE "OPENCV_DIR" "Path to the OPENCV installation directory")
|
||||
|
||||
# Add compiler options.
|
||||
add_definitions(-Wall -Wextra)
|
||||
|
||||
# Find pcl at the location specified by PCL_DIR
|
||||
find_package(VTK 6.0 REQUIRED)
|
||||
find_package(PCL 1.8 HINTS "${PCL_DIR}/share/pcl-1.8" REQUIRED)
|
||||
find_package(GDAL REQUIRED)
|
||||
include_directories(${GDAL_INCLUDE_DIR})
|
||||
|
||||
# Find OpenCV at the default location
|
||||
find_package(OpenCV HINTS "${OPENCV_DIR}" REQUIRED)
|
||||
|
||||
# Only link with required opencv modules.
|
||||
set(OpenCV_LIBS opencv_core opencv_imgproc opencv_highgui)
|
||||
|
||||
# Add the PCL, Eigen and OpenCV include dirs.
|
||||
# Necessary since the PCL_INCLUDE_DIR variable set by find_package is broken.)
|
||||
include_directories(${PCL_ROOT}/include/pcl-${PCL_VERSION_MAJOR}.${PCL_VERSION_MINOR})
|
||||
include_directories(${EIGEN_ROOT})
|
||||
include_directories(${OpenCV_INCLUDE_DIRS})
|
||||
|
||||
#library_directories(${OpenCV_LIBRARY_DIRS})
|
||||
|
||||
# Add source directory
|
||||
aux_source_directory("./src" SRC_LIST)
|
||||
|
||||
# Add exectuteable
|
||||
add_executable(${PROJECT_NAME} ${SRC_LIST})
|
||||
set_target_properties(${PROJECT_NAME} PROPERTIES
|
||||
CXX_STANDARD 11
|
||||
)
|
||||
target_link_libraries(odm_orthophoto ${PCL_COMMON_LIBRARIES} ${PCL_IO_LIBRARIES} ${PCL_SURFACE_LIBRARIES} ${OpenCV_LIBS} ${GDAL_LIBRARY})
|
|
@ -1,29 +0,0 @@
|
|||
#include "Logger.hpp"
|
||||
|
||||
|
||||
Logger::Logger(bool isPrintingInCout) : isPrintingInCout_(isPrintingInCout)
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
Logger::~Logger()
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
void Logger::print(std::string filePath)
|
||||
{
|
||||
std::ofstream file(filePath.c_str(), std::ios::binary);
|
||||
file << logStream_.str();
|
||||
file.close();
|
||||
}
|
||||
|
||||
bool Logger::isPrintingInCout() const
|
||||
{
|
||||
return isPrintingInCout_;
|
||||
}
|
||||
|
||||
void Logger::setIsPrintingInCout(bool isPrintingInCout)
|
||||
{
|
||||
isPrintingInCout_ = isPrintingInCout;
|
||||
}
|
|
@ -1,68 +0,0 @@
|
|||
#pragma once
|
||||
|
||||
// STL
|
||||
#include <string>
|
||||
#include <sstream>
|
||||
#include <fstream>
|
||||
#include <iostream>
|
||||
|
||||
/*!
|
||||
* \brief The Logger class is used to store program messages in a log file.
|
||||
* \details By using the << operator while printInCout is set, the class writes both to
|
||||
* cout and to file, if the flag is not set, output is written to file only.
|
||||
*/
|
||||
class Logger
|
||||
{
|
||||
public:
|
||||
/*!
|
||||
* \brief Logger Contains functionality for printing and displaying log information.
|
||||
* \param printInCout Flag toggling if operator << also writes to cout.
|
||||
*/
|
||||
Logger(bool isPrintingInCout = true);
|
||||
|
||||
/*!
|
||||
* \brief Destructor.
|
||||
*/
|
||||
~Logger();
|
||||
|
||||
/*!
|
||||
* \brief print Prints the contents of the log to file.
|
||||
* \param filePath Path specifying where to write the log.
|
||||
*/
|
||||
void print(std::string filePath);
|
||||
|
||||
/*!
|
||||
* \brief isPrintingInCout Check if console printing flag is set.
|
||||
* \return Console printing flag.
|
||||
*/
|
||||
bool isPrintingInCout() const;
|
||||
|
||||
/*!
|
||||
* \brief setIsPrintingInCout Set console printing flag.
|
||||
* \param isPrintingInCout Value, if true, messages added to the log are also printed in cout.
|
||||
*/
|
||||
void setIsPrintingInCout(bool isPrintingInCout);
|
||||
|
||||
/*!
|
||||
* Operator for printing messages to log and in the standard output stream if desired.
|
||||
*/
|
||||
template<class T>
|
||||
friend Logger& operator<< (Logger &log, T t)
|
||||
{
|
||||
// If console printing is enabled.
|
||||
if (log.isPrintingInCout_)
|
||||
{
|
||||
std::cout << t;
|
||||
std::cout.flush();
|
||||
}
|
||||
// Write to log.
|
||||
log.logStream_ << t;
|
||||
|
||||
return log;
|
||||
}
|
||||
|
||||
private:
|
||||
bool isPrintingInCout_; /*!< If flag is set, log is printed in cout and written to the log. */
|
||||
|
||||
std::stringstream logStream_; /*!< Stream for storing the log. */
|
||||
};
|
Plik diff jest za duży
Load Diff
|
@ -1,206 +0,0 @@
|
|||
#pragma once
|
||||
|
||||
// C++
|
||||
#include <limits.h>
|
||||
#include <istream>
|
||||
#include <ostream>
|
||||
|
||||
// PCL
|
||||
#include <pcl/io/obj_io.h>
|
||||
#include <pcl/common/transforms.h>
|
||||
|
||||
// OpenCV
|
||||
#include <opencv2/imgproc/imgproc.hpp>
|
||||
#include <opencv2/highgui/highgui.hpp>
|
||||
|
||||
// PCL
|
||||
#include <pcl/common/eigen.h>
|
||||
#include <pcl/common/common.h>
|
||||
|
||||
// OpenCV
|
||||
#include <opencv2/core/core.hpp>
|
||||
|
||||
// GDAL
|
||||
#include "gdal_priv.h"
|
||||
#include "cpl_conv.h" // for CPLMalloc()
|
||||
|
||||
// Logger
|
||||
#include "Logger.hpp"
|
||||
|
||||
struct Bounds{
|
||||
float xMin;
|
||||
float xMax;
|
||||
float yMin;
|
||||
float yMax;
|
||||
|
||||
Bounds() : xMin(0), xMax(0), yMin(0), yMax(0) {}
|
||||
Bounds(float xMin, float xMax, float yMin, float yMax) :
|
||||
xMin(xMin), xMax(xMax), yMin(yMin), yMax(yMax){}
|
||||
Bounds(const Bounds &b) {
|
||||
xMin = b.xMin;
|
||||
xMax = b.xMax;
|
||||
yMin = b.yMin;
|
||||
yMax = b.yMax;
|
||||
}
|
||||
};
|
||||
|
||||
/*!
|
||||
* \brief The OdmOrthoPhoto class is used to create an orthographic photo over a given area.
|
||||
* The class reads an oriented textured mesh from an OBJ-file.
|
||||
* The class uses file read from pcl.
|
||||
* The class uses image read and write from opencv.
|
||||
*/
|
||||
class OdmOrthoPhoto
|
||||
{
|
||||
public:
|
||||
OdmOrthoPhoto();
|
||||
~OdmOrthoPhoto();
|
||||
|
||||
/*!
|
||||
* \brief run Runs the ortho photo functionality using the provided input arguments.
|
||||
* For a list of accepted arguments, pleas see the main page documentation or
|
||||
* call the program with parameter "-help".
|
||||
* \param argc Application argument count.
|
||||
* \param argv Argument values.
|
||||
* \return 0 if successful.
|
||||
*/
|
||||
int run(int argc, char* argv[]);
|
||||
|
||||
private:
|
||||
int width, height;
|
||||
void parseArguments(int argc, char* argv[]);
|
||||
void printHelp();
|
||||
|
||||
void createOrthoPhoto();
|
||||
|
||||
/*!
|
||||
* \brief Compute the boundary points so that the entire model fits inside the photo.
|
||||
*
|
||||
* \param mesh The model which decides the boundary.
|
||||
*/
|
||||
Bounds computeBoundsForModel(const pcl::TextureMesh &mesh);
|
||||
|
||||
/*!
|
||||
* \brief Creates a transformation which aligns the area for the orthophoto.
|
||||
*/
|
||||
Eigen::Transform<float, 3, Eigen::Affine> getROITransform(float xMin, float yMin) const;
|
||||
|
||||
template <typename T>
|
||||
void initBands(int count);
|
||||
|
||||
template <typename T>
|
||||
void initAlphaBand();
|
||||
|
||||
template <typename T>
|
||||
void finalizeAlphaBand();
|
||||
|
||||
void saveTIFF(const std::string &filename, GDALDataType dataType);
|
||||
|
||||
/*!
|
||||
* \brief Renders a triangle into the ortho photo.
|
||||
*
|
||||
* Pixel center defined as middle of pixel for triangle rasterisation, and in lower left corner for texture look-up.
|
||||
*
|
||||
* \param texture The texture of the polygon.
|
||||
* \param polygon The polygon as athree indices relative meshCloud.
|
||||
* \param meshCloud Contains all vertices.
|
||||
* \param uvs Contains the texture coordinates for the active material.
|
||||
* \param faceIndex The index of the face.
|
||||
*/
|
||||
template <typename T>
|
||||
void drawTexturedTriangle(const cv::Mat &texture, const pcl::Vertices &polygon, const pcl::PointCloud<pcl::PointXYZ>::Ptr &meshCloud, const std::vector<Eigen::Vector2f> &uvs, size_t faceIndex);
|
||||
|
||||
/*!
|
||||
* \brief Sets the color of a pixel in the photo.
|
||||
*
|
||||
* \param row The row index of the pixel.
|
||||
* \param col The column index of the pixel.
|
||||
* \param s The u texture-coordinate, multiplied with the number of columns in the texture.
|
||||
* \param t The v texture-coordinate, multiplied with the number of rows in the texture.
|
||||
* \param texture The texture from which to get the color.
|
||||
**/
|
||||
template <typename T>
|
||||
void renderPixel(int row, int col, float u, float v, const cv::Mat &texture);
|
||||
|
||||
/*!
|
||||
* \brief Calculates the barycentric coordinates of a point in a triangle.
|
||||
*
|
||||
* \param v1 The first triangle vertex.
|
||||
* \param v2 The second triangle vertex.
|
||||
* \param v3 The third triangle vertex.
|
||||
* \param x The x coordinate of the point.
|
||||
* \param y The y coordinate of the point.
|
||||
* \param l1 The first vertex weight.
|
||||
* \param l2 The second vertex weight.
|
||||
* \param l3 The third vertex weight.
|
||||
*/
|
||||
void getBarycentricCoordinates(pcl::PointXYZ v1, pcl::PointXYZ v2, pcl::PointXYZ v3, float x, float y, float &l1, float &l2, float &l3) const;
|
||||
|
||||
/*!
|
||||
* \brief Check if a given polygon is a sliver polygon.
|
||||
*
|
||||
* \param v1 The first vertex of the polygon.
|
||||
* \param v2 The second vertex of the polygon.
|
||||
* \param v3 The third vertex of the polygon.
|
||||
*/
|
||||
bool isSliverPolygon(pcl::PointXYZ v1, pcl::PointXYZ v2, pcl::PointXYZ v3) const;
|
||||
|
||||
/*!
|
||||
* \brief Check if the model is suitable for ortho photo generation.
|
||||
*
|
||||
* \param mesh The model.
|
||||
* \return True if the model is ok for generating ortho photo.
|
||||
*/
|
||||
bool isModelOk(const pcl::TextureMesh &mesh);
|
||||
|
||||
/*!
|
||||
* \brief Loads a model from an .obj file (replacement for the pcl obj loader).
|
||||
*
|
||||
* \param inputFile Path to the .obj file.
|
||||
* \param mesh The model.
|
||||
* \return True if model was loaded successfully.
|
||||
*/
|
||||
bool loadObjFile(std::string inputFile, pcl::TextureMesh &mesh, std::vector<pcl::MTLReader> &companions);
|
||||
|
||||
/*!
|
||||
* \brief Function is compied straight from the function in the pcl::io module.
|
||||
*/
|
||||
bool readHeader (const std::string &file_name, pcl::PCLPointCloud2 &cloud,
|
||||
Eigen::Vector4f &origin, Eigen::Quaternionf &orientation,
|
||||
int &file_version, int &data_type, unsigned int &data_idx,
|
||||
const int offset,
|
||||
std::vector<pcl::MTLReader> &companions);
|
||||
|
||||
Logger log_; /**< Logging object. */
|
||||
|
||||
std::vector<std::string> inputFiles;
|
||||
std::string outputFile_; /**< Path to the destination file. */
|
||||
std::string outputCornerFile_; /**< Path to the output corner file. */
|
||||
std::string logFile_; /**< Path to the log file. */
|
||||
std::string bandsOrder;
|
||||
|
||||
float resolution_; /**< The number of pixels per meter in the ortho photo. */
|
||||
|
||||
std::vector<void *> bands;
|
||||
std::vector<GDALColorInterp> colorInterps;
|
||||
void *alphaBand; // Keep alpha band separate
|
||||
int currentBandIndex;
|
||||
|
||||
cv::Mat depth_; /**< The depth of the ortho photo as an OpenCV matrix, CV_32F. */
|
||||
};
|
||||
|
||||
/*!
|
||||
* \brief The OdmOrthoPhoto class
|
||||
*/
|
||||
class OdmOrthoPhotoException : public std::exception
|
||||
{
|
||||
|
||||
public:
|
||||
OdmOrthoPhotoException() : message("Error in OdmOrthoPhoto") {}
|
||||
OdmOrthoPhotoException(std::string msgInit) : message("Error in OdmOrthoPhoto:\n" + msgInit) {}
|
||||
~OdmOrthoPhotoException() throw() {}
|
||||
virtual const char* what() const throw() {return message.c_str(); }
|
||||
|
||||
private:
|
||||
std::string message; /**< The error message **/
|
||||
};
|
|
@ -1,8 +0,0 @@
|
|||
// Ortho photo generator.
|
||||
#include "OdmOrthoPhoto.hpp"
|
||||
|
||||
int main(int argc, char* argv[])
|
||||
{
|
||||
OdmOrthoPhoto orthoPhotoGenerator;
|
||||
return orthoPhotoGenerator.run(argc, argv);
|
||||
}
|
|
@ -0,0 +1,56 @@
|
|||
import os
|
||||
from opendm.net import download
|
||||
from opendm import log
|
||||
import zipfile
|
||||
import time
|
||||
import sys
|
||||
|
||||
def get_model(namespace, url, version, name = "model.onnx"):
|
||||
version = version.replace(".", "_")
|
||||
|
||||
base_dir = os.path.join(os.path.dirname(__file__), "..")
|
||||
if sys.platform == 'win32':
|
||||
base_dir = os.path.join(os.getenv('PROGRAMDATA'),"ODM")
|
||||
base_dir = os.path.join(os.path.abspath(base_dir), "storage", "models")
|
||||
|
||||
namespace_dir = os.path.join(base_dir, namespace)
|
||||
versioned_dir = os.path.join(namespace_dir, version)
|
||||
|
||||
if not os.path.isdir(versioned_dir):
|
||||
os.makedirs(versioned_dir, exist_ok=True)
|
||||
|
||||
# Check if we need to download it
|
||||
model_file = os.path.join(versioned_dir, name)
|
||||
if not os.path.isfile(model_file):
|
||||
log.ODM_INFO("Downloading AI model from %s ..." % url)
|
||||
|
||||
last_update = 0
|
||||
|
||||
def callback(progress):
|
||||
nonlocal last_update
|
||||
|
||||
time_has_elapsed = time.time() - last_update >= 2
|
||||
|
||||
if time_has_elapsed or int(progress) == 100:
|
||||
log.ODM_INFO("Downloading: %s%%" % int(progress))
|
||||
last_update = time.time()
|
||||
|
||||
try:
|
||||
downloaded_file = download(url, versioned_dir, progress_callback=callback)
|
||||
except Exception as e:
|
||||
log.ODM_WARNING("Cannot download %s: %s" % (url, str(e)))
|
||||
return None
|
||||
|
||||
if os.path.basename(downloaded_file).lower().endswith(".zip"):
|
||||
log.ODM_INFO("Extracting %s ..." % downloaded_file)
|
||||
with zipfile.ZipFile(downloaded_file, 'r') as z:
|
||||
z.extractall(versioned_dir)
|
||||
os.remove(downloaded_file)
|
||||
|
||||
if not os.path.isfile(model_file):
|
||||
log.ODM_WARNING("Cannot find %s (is the URL to the AI model correct?)" % model_file)
|
||||
return None
|
||||
else:
|
||||
return model_file
|
||||
else:
|
||||
return model_file
|
|
@ -0,0 +1,147 @@
|
|||
import os
|
||||
import shutil
|
||||
import json
|
||||
import codem
|
||||
import dataclasses
|
||||
import pdal
|
||||
import numpy as np
|
||||
import rasterio
|
||||
from rasterio.crs import CRS
|
||||
from opendm.utils import double_quote
|
||||
from opendm import log
|
||||
from opendm import io
|
||||
from opendm import system
|
||||
from opendm.concurrency import get_max_memory
|
||||
|
||||
def get_point_cloud_crs(file):
|
||||
pipeline = pdal.Pipeline(json.dumps([ file ]))
|
||||
metadata = pipeline.quickinfo
|
||||
|
||||
reader_metadata = [val for key, val in metadata.items() if "readers" in key]
|
||||
crs = CRS.from_string(reader_metadata[0]["srs"]["horizontal"])
|
||||
return str(crs)
|
||||
|
||||
def get_raster_crs(file):
|
||||
with rasterio.open(file, 'r') as f:
|
||||
return str(f.crs)
|
||||
|
||||
def reproject_point_cloud(file, out_srs):
|
||||
out_file = io.related_file_path(file, postfix="_reprojected_tmp")
|
||||
pipeline = pdal.Pipeline(json.dumps([ file, {
|
||||
"type": "filters.reprojection",
|
||||
"out_srs": out_srs
|
||||
}, out_file]))
|
||||
pipeline.execute()
|
||||
return out_file
|
||||
|
||||
def reproject_raster(file, out_srs):
|
||||
out_file = io.related_file_path(file, postfix="_reprojected_tmp")
|
||||
kwargs = {
|
||||
'input': double_quote(file),
|
||||
'output': double_quote(out_file),
|
||||
'out_srs': out_srs,
|
||||
'max_memory': get_max_memory()
|
||||
}
|
||||
system.run('gdalwarp '
|
||||
'-t_srs {out_srs} '
|
||||
'{input} '
|
||||
'{output} '
|
||||
'--config GDAL_CACHEMAX {max_memory}% '.format(**kwargs))
|
||||
return out_file
|
||||
|
||||
def compute_alignment_matrix(input_laz, align_file, stats_dir):
|
||||
if os.path.exists(stats_dir):
|
||||
shutil.rmtree(stats_dir)
|
||||
os.mkdir(stats_dir)
|
||||
|
||||
# Check if we need to reproject align file
|
||||
input_crs = get_point_cloud_crs(input_laz)
|
||||
log.ODM_INFO("Input CRS: %s" % input_crs)
|
||||
|
||||
_, ext = os.path.splitext(align_file)
|
||||
repr_func = None
|
||||
|
||||
if ext.lower() in [".tif"]:
|
||||
align_crs = get_raster_crs(align_file)
|
||||
repr_func = reproject_raster
|
||||
elif ext.lower() in [".las", ".laz"]:
|
||||
align_crs = get_point_cloud_crs(align_file)
|
||||
repr_func = reproject_point_cloud
|
||||
else:
|
||||
log.ODM_WARNING("Unsupported alignment file: %s" % align_file)
|
||||
return
|
||||
|
||||
to_delete = []
|
||||
|
||||
try:
|
||||
log.ODM_INFO("Align CRS: %s" % align_crs)
|
||||
if input_crs != align_crs:
|
||||
# Reprojection needed
|
||||
log.ODM_INFO("Reprojecting %s to %s" % (align_file, input_crs))
|
||||
align_file = repr_func(align_file, input_crs)
|
||||
to_delete.append(align_file)
|
||||
|
||||
conf = dataclasses.asdict(codem.CodemRunConfig(align_file, input_laz, OUTPUT_DIR=stats_dir))
|
||||
fnd_obj, aoi_obj = codem.preprocess(conf)
|
||||
fnd_obj.prep()
|
||||
aoi_obj.prep()
|
||||
log.ODM_INFO("Aligning reconstruction to %s" % align_file)
|
||||
log.ODM_INFO("Coarse registration...")
|
||||
dsm_reg = codem.coarse_registration(fnd_obj, aoi_obj, conf)
|
||||
log.ODM_INFO("Fine registration...")
|
||||
icp_reg = codem.fine_registration(fnd_obj, aoi_obj, dsm_reg, conf)
|
||||
|
||||
app_reg = codem.registration.ApplyRegistration(
|
||||
fnd_obj,
|
||||
aoi_obj,
|
||||
icp_reg.registration_parameters,
|
||||
icp_reg.residual_vectors,
|
||||
icp_reg.residual_origins,
|
||||
conf,
|
||||
None,
|
||||
)
|
||||
|
||||
reg = app_reg.get_registration_transformation()
|
||||
|
||||
# Write JSON to stats folder
|
||||
with open(os.path.join(stats_dir, "registration.json"), 'w') as f:
|
||||
del dsm_reg.registration_parameters['matrix']
|
||||
del icp_reg.registration_parameters['matrix']
|
||||
|
||||
f.write(json.dumps({
|
||||
'coarse': dsm_reg.registration_parameters,
|
||||
'fine': icp_reg.registration_parameters,
|
||||
}, indent=4))
|
||||
|
||||
matrix = np.fromstring(reg['matrix'], dtype=float, sep=' ').reshape((4, 4))
|
||||
return matrix
|
||||
finally:
|
||||
for f in to_delete:
|
||||
if os.path.isfile(f):
|
||||
os.unlink(f)
|
||||
|
||||
def transform_point_cloud(input_laz, a_matrix, output_laz):
|
||||
pipe = [
|
||||
input_laz,
|
||||
{
|
||||
'type': 'filters.transformation',
|
||||
'matrix': " ".join(list(map(str, a_matrix.flatten()))),
|
||||
},
|
||||
output_laz,
|
||||
]
|
||||
p = pdal.Pipeline(json.dumps(pipe))
|
||||
p.execute()
|
||||
|
||||
def transform_obj(input_obj, a_matrix, geo_offset, output_obj):
|
||||
g_off = np.array([geo_offset[0], geo_offset[1], 0, 0])
|
||||
|
||||
with open(input_obj, 'r') as fin:
|
||||
with open(output_obj, 'w') as fout:
|
||||
lines = fin.readlines()
|
||||
for line in lines:
|
||||
if line.startswith("v "):
|
||||
v = np.fromstring(line.strip()[2:] + " 1", sep=' ', dtype=float)
|
||||
vt = (a_matrix.dot((v + g_off)) - g_off)[:3]
|
||||
fout.write("v " + " ".join(map(str, list(vt))) + '\n')
|
||||
else:
|
||||
fout.write(line)
|
|
@ -0,0 +1,76 @@
|
|||
from opendm import log
|
||||
from shlex import _find_unsafe
|
||||
import json
|
||||
import os
|
||||
|
||||
def double_quote(s):
|
||||
"""Return a shell-escaped version of the string *s*."""
|
||||
if not s:
|
||||
return '""'
|
||||
if _find_unsafe(s) is None:
|
||||
return s
|
||||
|
||||
# use double quotes, and prefix double quotes with a \
|
||||
# the string $"b is then quoted as "$\"b"
|
||||
return '"' + s.replace('"', '\\\"') + '"'
|
||||
|
||||
def args_to_dict(args):
|
||||
args_dict = vars(args)
|
||||
result = {}
|
||||
for k in sorted(args_dict.keys()):
|
||||
# Skip _is_set keys
|
||||
if k.endswith("_is_set"):
|
||||
continue
|
||||
|
||||
# Don't leak token
|
||||
if k == 'sm_cluster' and args_dict[k] is not None:
|
||||
result[k] = True
|
||||
else:
|
||||
result[k] = args_dict[k]
|
||||
|
||||
return result
|
||||
|
||||
def save_opts(opts_json, args):
|
||||
try:
|
||||
with open(opts_json, "w", encoding='utf-8') as f:
|
||||
f.write(json.dumps(args_to_dict(args)))
|
||||
except Exception as e:
|
||||
log.ODM_WARNING("Cannot save options to %s: %s" % (opts_json, str(e)))
|
||||
|
||||
def compare_args(opts_json, args, rerun_stages):
|
||||
if not os.path.isfile(opts_json):
|
||||
return {}
|
||||
|
||||
try:
|
||||
diff = {}
|
||||
|
||||
with open(opts_json, "r", encoding="utf-8") as f:
|
||||
prev_args = json.loads(f.read())
|
||||
cur_args = args_to_dict(args)
|
||||
|
||||
for opt in cur_args:
|
||||
cur_value = cur_args[opt]
|
||||
prev_value = prev_args.get(opt, None)
|
||||
stage = rerun_stages.get(opt, None)
|
||||
|
||||
if stage is not None and cur_value != prev_value:
|
||||
diff[opt] = prev_value
|
||||
|
||||
return diff
|
||||
except:
|
||||
return {}
|
||||
|
||||
def find_rerun_stage(opts_json, args, rerun_stages, processopts):
|
||||
# Find the proper rerun stage if one is not explicitly set
|
||||
if not ('rerun_is_set' in args or 'rerun_from_is_set' in args or 'rerun_all_is_set' in args):
|
||||
args_diff = compare_args(opts_json, args, rerun_stages)
|
||||
if args_diff:
|
||||
if 'split_is_set' in args:
|
||||
return processopts[processopts.index('dataset'):], args_diff
|
||||
|
||||
try:
|
||||
stage_idxs = [processopts.index(rerun_stages[opt]) for opt in args_diff.keys() if rerun_stages[opt] is not None]
|
||||
return processopts[min(stage_idxs):], args_diff
|
||||
except ValueError as e:
|
||||
print(str(e))
|
||||
return None, {}
|
|
@ -0,0 +1,90 @@
|
|||
|
||||
import time
|
||||
import numpy as np
|
||||
import cv2
|
||||
import os
|
||||
import onnxruntime as ort
|
||||
from opendm import log
|
||||
from threading import Lock
|
||||
|
||||
mutex = Lock()
|
||||
|
||||
# Implementation based on https://github.com/danielgatis/rembg by Daniel Gatis
|
||||
|
||||
# Use GPU if it is available, otherwise CPU
|
||||
provider = "CUDAExecutionProvider" if "CUDAExecutionProvider" in ort.get_available_providers() else "CPUExecutionProvider"
|
||||
|
||||
class BgFilter():
|
||||
def __init__(self, model):
|
||||
self.model = model
|
||||
|
||||
log.ODM_INFO(' ?> Using provider %s' % provider)
|
||||
self.load_model()
|
||||
|
||||
|
||||
def load_model(self):
|
||||
log.ODM_INFO(' -> Loading the model')
|
||||
|
||||
self.session = ort.InferenceSession(self.model, providers=[provider])
|
||||
|
||||
def normalize(self, img, mean, std, size):
|
||||
im = cv2.resize(img, size, interpolation=cv2.INTER_AREA)
|
||||
im_ary = np.array(im)
|
||||
im_ary = im_ary / np.max(im_ary)
|
||||
|
||||
tmpImg = np.zeros((im_ary.shape[0], im_ary.shape[1], 3))
|
||||
tmpImg[:, :, 0] = (im_ary[:, :, 0] - mean[0]) / std[0]
|
||||
tmpImg[:, :, 1] = (im_ary[:, :, 1] - mean[1]) / std[1]
|
||||
tmpImg[:, :, 2] = (im_ary[:, :, 2] - mean[2]) / std[2]
|
||||
|
||||
tmpImg = tmpImg.transpose((2, 0, 1))
|
||||
|
||||
return {
|
||||
self.session.get_inputs()[0]
|
||||
.name: np.expand_dims(tmpImg, 0)
|
||||
.astype(np.float32)
|
||||
}
|
||||
|
||||
def get_mask(self, img):
|
||||
height, width, c = img.shape
|
||||
|
||||
with mutex:
|
||||
ort_outs = self.session.run(
|
||||
None,
|
||||
self.normalize(
|
||||
img, (0.485, 0.456, 0.406), (0.229, 0.224, 0.225), (320, 320) # <-- image size
|
||||
),
|
||||
)
|
||||
|
||||
pred = ort_outs[0][:, 0, :, :]
|
||||
|
||||
ma = np.max(pred)
|
||||
mi = np.min(pred)
|
||||
|
||||
pred = (pred - mi) / (ma - mi)
|
||||
pred = np.squeeze(pred)
|
||||
|
||||
pred *= 255
|
||||
pred = pred.astype("uint8")
|
||||
output = cv2.resize(pred, (width, height), interpolation=cv2.INTER_LANCZOS4)
|
||||
output[output > 127] = 255
|
||||
output[output <= 127] = 0
|
||||
|
||||
return output
|
||||
|
||||
def run_img(self, img_path, dest):
|
||||
img = cv2.imread(img_path, cv2.IMREAD_COLOR)
|
||||
if img is None:
|
||||
return None
|
||||
|
||||
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
|
||||
mask = self.get_mask(img)
|
||||
|
||||
img_name = os.path.basename(img_path)
|
||||
fpath = os.path.join(dest, img_name)
|
||||
|
||||
fname, _ = os.path.splitext(fpath)
|
||||
mask_name = fname + '_mask.png'
|
||||
cv2.imwrite(mask_name, mask)
|
||||
|
||||
return mask_name
|
|
@ -0,0 +1,121 @@
|
|||
import fiona
|
||||
import fiona.crs
|
||||
import os
|
||||
import io
|
||||
import json
|
||||
from opendm import system
|
||||
from pyproj import CRS
|
||||
from opendm.location import transformer
|
||||
from opendm.utils import double_quote
|
||||
from osgeo import ogr
|
||||
from opendm.shots import get_origin
|
||||
|
||||
def compute_boundary_from_shots(reconstruction_json, buffer=0, reconstruction_offset=(0, 0)):
|
||||
if not os.path.isfile(reconstruction_json):
|
||||
raise IOError(reconstruction_json + " does not exist.")
|
||||
|
||||
with open(reconstruction_json) as f:
|
||||
data = json.load(f)
|
||||
reconstruction = data[0]
|
||||
|
||||
mp = ogr.Geometry(ogr.wkbMultiPoint)
|
||||
|
||||
for shot_image in reconstruction['shots']:
|
||||
shot = reconstruction['shots'][shot_image]
|
||||
if shot.get('gps_dop', 999999) < 999999:
|
||||
camera = reconstruction['cameras'][shot['camera']]
|
||||
|
||||
p = ogr.Geometry(ogr.wkbPoint)
|
||||
origin = get_origin(shot)
|
||||
|
||||
p.AddPoint_2D(origin[0] + reconstruction_offset[0], origin[1] + reconstruction_offset[1])
|
||||
mp.AddGeometry(p)
|
||||
|
||||
if mp.GetGeometryCount() < 3:
|
||||
return None
|
||||
|
||||
convexhull = mp.ConvexHull()
|
||||
boundary = convexhull.Buffer(buffer)
|
||||
|
||||
return load_boundary(boundary.ExportToJson())
|
||||
|
||||
def load_boundary(boundary_json, reproject_to_proj4=None):
|
||||
if not isinstance(boundary_json, str):
|
||||
boundary_json = json.dumps(boundary_json)
|
||||
|
||||
with fiona.open(io.BytesIO(boundary_json.encode('utf-8')), 'r') as src:
|
||||
if len(src) != 1:
|
||||
raise IOError("Boundary must have a single polygon (found: %s)" % len(src))
|
||||
|
||||
geom = src[0]['geometry']
|
||||
|
||||
if geom['type'] != 'Polygon':
|
||||
raise IOError("Boundary must have a polygon feature (found: %s)" % geom['type'])
|
||||
|
||||
rings = geom['coordinates']
|
||||
|
||||
if len(rings) == 0:
|
||||
raise IOError("Boundary geometry has no rings")
|
||||
|
||||
coords = rings[0]
|
||||
if len(coords) == 0:
|
||||
raise IOError("Boundary geometry has no coordinates")
|
||||
|
||||
dimensions = len(coords[0])
|
||||
|
||||
if reproject_to_proj4 is not None:
|
||||
t = transformer(CRS.from_proj4(fiona.crs.to_string(src.crs)),
|
||||
CRS.from_proj4(reproject_to_proj4))
|
||||
coords = [t.TransformPoint(*c)[:dimensions] for c in coords]
|
||||
|
||||
return coords
|
||||
|
||||
def boundary_offset(boundary, reconstruction_offset):
|
||||
if boundary is None or reconstruction_offset is None:
|
||||
return boundary
|
||||
|
||||
res = []
|
||||
dims = len(boundary[0])
|
||||
for c in boundary:
|
||||
if dims == 2:
|
||||
res.append((c[0] - reconstruction_offset[0], c[1] - reconstruction_offset[1]))
|
||||
else:
|
||||
res.append((c[0] - reconstruction_offset[0], c[1] - reconstruction_offset[1], c[2]))
|
||||
|
||||
return res
|
||||
|
||||
def as_polygon(boundary):
|
||||
if boundary is None:
|
||||
return None
|
||||
|
||||
return "POLYGON((" + ", ".join([" ".join(map(str, c)) for c in boundary]) + "))"
|
||||
|
||||
def as_geojson(boundary):
|
||||
return '{"type":"FeatureCollection","features":[{"type":"Feature","properties":{},"geometry":{"type":"Polygon","coordinates":[%s]}}]}' % str(list(map(list, boundary)))
|
||||
|
||||
def export_to_bounds_files(boundary, proj4, bounds_json_file, bounds_gpkg_file):
|
||||
with open(bounds_json_file, "w") as f:
|
||||
f.write(json.dumps({
|
||||
"type": "FeatureCollection",
|
||||
"name": "bounds",
|
||||
"features": [{
|
||||
"type": "Feature",
|
||||
"properties": {},
|
||||
"geometry": {
|
||||
"type": "Polygon",
|
||||
"coordinates": [boundary]
|
||||
}
|
||||
}]
|
||||
}))
|
||||
|
||||
if os.path.isfile(bounds_gpkg_file):
|
||||
os.remove(bounds_gpkg_file)
|
||||
|
||||
kwargs = {
|
||||
'proj4': proj4,
|
||||
'input': double_quote(bounds_json_file),
|
||||
'output': double_quote(bounds_gpkg_file)
|
||||
}
|
||||
|
||||
system.run('ogr2ogr -overwrite -f GPKG -a_srs "{proj4}" {output} {input}'.format(**kwargs))
|
||||
|
|
@ -0,0 +1,66 @@
|
|||
import os
|
||||
import shutil
|
||||
from opendm import system
|
||||
from opendm.concurrency import get_max_memory
|
||||
from opendm import io
|
||||
from opendm import log
|
||||
|
||||
def convert_to_cogeo(src_path, blocksize=256, max_workers=1, compression="DEFLATE"):
|
||||
"""
|
||||
Guarantee that the .tif passed as an argument is a Cloud Optimized GeoTIFF (cogeo)
|
||||
The file is destructively converted into a cogeo.
|
||||
If the file cannot be converted, the function does not change the file
|
||||
:param src_path: path to GeoTIFF
|
||||
:return: True on success
|
||||
"""
|
||||
|
||||
if not os.path.isfile(src_path):
|
||||
logger.warning("Cannot convert to cogeo: %s (file does not exist)" % src_path)
|
||||
return False
|
||||
|
||||
log.ODM_INFO("Optimizing %s as Cloud Optimized GeoTIFF" % src_path)
|
||||
|
||||
|
||||
tmpfile = io.related_file_path(src_path, postfix='_cogeo')
|
||||
swapfile = io.related_file_path(src_path, postfix='_cogeo_swap')
|
||||
|
||||
kwargs = {
|
||||
'threads': max_workers if max_workers else 'ALL_CPUS',
|
||||
'blocksize': blocksize,
|
||||
'max_memory': get_max_memory(),
|
||||
'src_path': src_path,
|
||||
'tmpfile': tmpfile,
|
||||
'compress': compression,
|
||||
'predictor': '2' if compression in ['LZW', 'DEFLATE'] else '1',
|
||||
}
|
||||
|
||||
try:
|
||||
system.run("gdal_translate "
|
||||
"-of COG "
|
||||
"-co NUM_THREADS={threads} "
|
||||
"-co BLOCKSIZE={blocksize} "
|
||||
"-co COMPRESS={compress} "
|
||||
"-co PREDICTOR={predictor} "
|
||||
"-co BIGTIFF=IF_SAFER "
|
||||
"-co RESAMPLING=NEAREST "
|
||||
"--config GDAL_CACHEMAX {max_memory}% "
|
||||
"--config GDAL_NUM_THREADS {threads} "
|
||||
"\"{src_path}\" \"{tmpfile}\" ".format(**kwargs))
|
||||
except Exception as e:
|
||||
log.ODM_WARNING("Cannot create Cloud Optimized GeoTIFF: %s" % str(e))
|
||||
|
||||
if os.path.isfile(tmpfile):
|
||||
shutil.move(src_path, swapfile) # Move to swap location
|
||||
|
||||
try:
|
||||
shutil.move(tmpfile, src_path)
|
||||
except IOError as e:
|
||||
log.ODM_WARNING("Cannot move %s to %s: %s" % (tmpfile, src_path, str(e)))
|
||||
shutil.move(swapfile, src_path) # Attempt to restore
|
||||
|
||||
if os.path.isfile(swapfile):
|
||||
os.remove(swapfile)
|
||||
|
||||
return True
|
||||
else:
|
||||
return False
|
|
@ -1,5 +1,6 @@
|
|||
from psutil import virtual_memory
|
||||
from vmem import virtual_memory
|
||||
import os
|
||||
import sys
|
||||
try:
|
||||
import Queue as queue
|
||||
except:
|
||||
|
@ -24,6 +25,9 @@ def get_max_memory_mb(minimum = 100, use_at_most = 0.5):
|
|||
"""
|
||||
return max(minimum, (virtual_memory().available / 1024 / 1024) * use_at_most)
|
||||
|
||||
def get_total_memory():
|
||||
return virtual_memory().total
|
||||
|
||||
def parallel_map(func, items, max_workers=1, single_thread_fallback=True):
|
||||
"""
|
||||
Our own implementation for parallel processing
|
||||
|
@ -65,7 +69,7 @@ def parallel_map(func, items, max_workers=1, single_thread_fallback=True):
|
|||
|
||||
i = 1
|
||||
for t in items:
|
||||
pq.put((i, t.copy()))
|
||||
pq.put((i, t))
|
||||
i += 1
|
||||
|
||||
def stop_workers():
|
||||
|
|
629
opendm/config.py
629
opendm/config.py
|
@ -11,19 +11,106 @@ import sys
|
|||
# parse arguments
|
||||
processopts = ['dataset', 'split', 'merge', 'opensfm', 'openmvs', 'odm_filterpoints',
|
||||
'odm_meshing', 'mvs_texturing', 'odm_georeferencing',
|
||||
'odm_dem', 'odm_orthophoto', 'odm_report']
|
||||
'odm_dem', 'odm_orthophoto', 'odm_report', 'odm_postprocess']
|
||||
|
||||
rerun_stages = {
|
||||
'3d_tiles': 'odm_postprocess',
|
||||
'align': 'odm_georeferencing',
|
||||
'auto_boundary': 'odm_filterpoints',
|
||||
'auto_boundary_distance': 'odm_filterpoints',
|
||||
'bg_removal': 'dataset',
|
||||
'boundary': 'odm_filterpoints',
|
||||
'build_overviews': 'odm_orthophoto',
|
||||
'camera_lens': 'dataset',
|
||||
'cameras': 'dataset',
|
||||
'cog': 'odm_dem',
|
||||
'copy_to': 'odm_postprocess',
|
||||
'crop': 'odm_georeferencing',
|
||||
'dem_decimation': 'odm_dem',
|
||||
'dem_euclidean_map': 'odm_dem',
|
||||
'dem_gapfill_steps': 'odm_dem',
|
||||
'dem_resolution': 'odm_dem',
|
||||
'dsm': 'odm_dem',
|
||||
'dtm': 'odm_dem',
|
||||
'end_with': None,
|
||||
'fast_orthophoto': 'odm_filterpoints',
|
||||
'feature_quality': 'opensfm',
|
||||
'feature_type': 'opensfm',
|
||||
'force_gps': 'opensfm',
|
||||
'gcp': 'dataset',
|
||||
'geo': 'dataset',
|
||||
'gltf': 'mvs_texturing',
|
||||
'gps_accuracy': 'dataset',
|
||||
'help': None,
|
||||
'ignore_gsd': 'opensfm',
|
||||
'matcher_neighbors': 'opensfm',
|
||||
'matcher_order': 'opensfm',
|
||||
'matcher_type': 'opensfm',
|
||||
'max_concurrency': None,
|
||||
'merge': 'Merge',
|
||||
'mesh_octree_depth': 'odm_meshing',
|
||||
'mesh_size': 'odm_meshing',
|
||||
'min_num_features': 'opensfm',
|
||||
'name': None,
|
||||
'no_gpu': None,
|
||||
'optimize_disk_space': None,
|
||||
'orthophoto_compression': 'odm_orthophoto',
|
||||
'orthophoto_cutline': 'odm_orthophoto',
|
||||
'orthophoto_kmz': 'odm_orthophoto',
|
||||
'orthophoto_no_tiled': 'odm_orthophoto',
|
||||
'orthophoto_png': 'odm_orthophoto',
|
||||
'orthophoto_resolution': 'odm_orthophoto',
|
||||
'pc_classify': 'odm_georeferencing',
|
||||
'pc_copc': 'odm_georeferencing',
|
||||
'pc_csv': 'odm_georeferencing',
|
||||
'pc_ept': 'odm_georeferencing',
|
||||
'pc_filter': 'openmvs',
|
||||
'pc_las': 'odm_georeferencing',
|
||||
'pc_quality': 'opensfm',
|
||||
'pc_rectify': 'odm_georeferencing',
|
||||
'pc_sample': 'odm_filterpoints',
|
||||
'pc_skip_geometric': 'openmvs',
|
||||
'primary_band': 'dataset',
|
||||
'project_path': None,
|
||||
'radiometric_calibration': 'opensfm',
|
||||
'rerun': None,
|
||||
'rerun_all': None,
|
||||
'rerun_from': None,
|
||||
'rolling_shutter': 'opensfm',
|
||||
'rolling_shutter_readout': 'opensfm',
|
||||
'sfm_algorithm': 'opensfm',
|
||||
'sfm_no_partial': 'opensfm',
|
||||
'skip_3dmodel': 'odm_meshing',
|
||||
'skip_band_alignment': 'opensfm',
|
||||
'skip_orthophoto': 'odm_orthophoto',
|
||||
'skip_report': 'odm_report',
|
||||
'sky_removal': 'dataset',
|
||||
'sm_cluster': 'split',
|
||||
'sm_no_align': 'split',
|
||||
'smrf_scalar': 'odm_dem',
|
||||
'smrf_slope': 'odm_dem',
|
||||
'smrf_threshold': 'odm_dem',
|
||||
'smrf_window': 'odm_dem',
|
||||
'split': 'split',
|
||||
'split_image_groups': 'split',
|
||||
'split_overlap': 'split',
|
||||
'texturing_keep_unseen_faces': 'mvs_texturing',
|
||||
'texturing_single_material': 'mvs_texturing',
|
||||
'texturing_skip_global_seam_leveling': 'mvs_texturing',
|
||||
'tiles': 'odm_dem',
|
||||
'use_3dmesh': 'mvs_texturing',
|
||||
'use_exif': 'dataset',
|
||||
'use_fixed_camera_params': 'opensfm',
|
||||
'use_hybrid_bundle_adjustment': 'opensfm',
|
||||
'version': None,
|
||||
'video_limit': 'dataset',
|
||||
'video_resolution': 'dataset',
|
||||
}
|
||||
|
||||
with open(os.path.join(context.root_path, 'VERSION')) as version_file:
|
||||
__version__ = version_file.read().strip()
|
||||
|
||||
|
||||
def alphanumeric_string(string):
|
||||
import re
|
||||
if re.match('^[a-zA-Z0-9_-]+$', string) is None:
|
||||
msg = '{0} is not a valid name. Must use alphanumeric characters.'.format(string)
|
||||
raise argparse.ArgumentTypeError(msg)
|
||||
return string
|
||||
|
||||
def path_or_json_string(string):
|
||||
try:
|
||||
return io.path_or_json_string_to_dict(string)
|
||||
|
@ -68,39 +155,34 @@ def config(argv=None, parser=None):
|
|||
if args is not None and argv is None:
|
||||
return args
|
||||
|
||||
if sys.platform == 'win32':
|
||||
usage_bin = 'run'
|
||||
else:
|
||||
usage_bin = 'run.sh'
|
||||
|
||||
if parser is None:
|
||||
parser = SettingsParser(description='ODM',
|
||||
usage='%(prog)s [options] <project name>',
|
||||
parser = SettingsParser(description='ODM is a command line toolkit to generate maps, point clouds, 3D models and DEMs from drone, balloon or kite images.',
|
||||
usage='%s [options] <dataset name>' % usage_bin,
|
||||
yaml_file=open(context.settings_path))
|
||||
|
||||
parser.add_argument('--project-path',
|
||||
metavar='<path>',
|
||||
action=StoreValue,
|
||||
help='Path to the project folder')
|
||||
|
||||
help='Path to the project folder. Your project folder should contain subfolders for each dataset. Each dataset should have an "images" folder.')
|
||||
parser.add_argument('name',
|
||||
metavar='<project name>',
|
||||
metavar='<dataset name>',
|
||||
action=StoreValue,
|
||||
type=alphanumeric_string,
|
||||
type=str,
|
||||
default='code',
|
||||
nargs='?',
|
||||
help='Name of Project (i.e subdirectory of projects folder)')
|
||||
|
||||
parser.add_argument('--resize-to',
|
||||
metavar='<integer>',
|
||||
action=StoreValue,
|
||||
default=2048,
|
||||
type=int,
|
||||
help='Legacy option (use --feature-quality instead). Resizes images by the largest side for feature extraction purposes only. '
|
||||
'Set to -1 to disable. This does not affect the final orthophoto '
|
||||
'resolution quality and will not resize the original images. Default: %(default)s')
|
||||
help='Name of dataset (i.e subfolder name within project folder). Default: %(default)s')
|
||||
|
||||
parser.add_argument('--end-with', '-e',
|
||||
metavar='<string>',
|
||||
action=StoreValue,
|
||||
default='odm_report',
|
||||
default='odm_postprocess',
|
||||
choices=processopts,
|
||||
help=('Can be one of:' + ' | '.join(processopts)))
|
||||
help='End processing at this stage. Can be one of: %(choices)s. Default: %(default)s')
|
||||
|
||||
rerun = parser.add_mutually_exclusive_group()
|
||||
|
||||
|
@ -108,42 +190,35 @@ def config(argv=None, parser=None):
|
|||
metavar='<string>',
|
||||
action=StoreValue,
|
||||
choices=processopts,
|
||||
help=('Can be one of:' + ' | '.join(processopts)))
|
||||
help=('Rerun this stage only and stop. Can be one of: %(choices)s. Default: %(default)s'))
|
||||
|
||||
rerun.add_argument('--rerun-all',
|
||||
action=StoreTrue,
|
||||
nargs=0,
|
||||
default=False,
|
||||
help='force rerun of all tasks')
|
||||
help='Permanently delete all previous results and rerun the processing pipeline.')
|
||||
|
||||
rerun.add_argument('--rerun-from',
|
||||
action=RerunFrom,
|
||||
metavar='<string>',
|
||||
choices=processopts,
|
||||
help=('Can be one of:' + ' | '.join(processopts)))
|
||||
|
||||
# parser.add_argument('--video',
|
||||
# metavar='<string>',
|
||||
# help='Path to the video file to process')
|
||||
|
||||
# parser.add_argument('--slam-config',
|
||||
# metavar='<string>',
|
||||
# help='Path to config file for orb-slam')
|
||||
help=('Rerun processing from this stage. Can be one of: %(choices)s. Default: %(default)s'))
|
||||
|
||||
parser.add_argument('--min-num-features',
|
||||
metavar='<integer>',
|
||||
action=StoreValue,
|
||||
default=8000,
|
||||
default=10000,
|
||||
type=int,
|
||||
help=('Minimum number of features to extract per image. '
|
||||
'More features leads to better results but slower '
|
||||
'execution. Default: %(default)s'))
|
||||
'More features can be useful for finding more matches between images, '
|
||||
'potentially allowing the reconstruction of areas with little overlap or insufficient features. '
|
||||
'More features also slow down processing. Default: %(default)s'))
|
||||
|
||||
parser.add_argument('--feature-type',
|
||||
metavar='<string>',
|
||||
action=StoreValue,
|
||||
default='sift',
|
||||
choices=['sift', 'hahog'],
|
||||
default='dspsift',
|
||||
choices=['akaze', 'dspsift', 'hahog', 'orb', 'sift'],
|
||||
help=('Choose the algorithm for extracting keypoints and computing descriptors. '
|
||||
'Can be one of: %(choices)s. Default: '
|
||||
'%(default)s'))
|
||||
|
@ -161,39 +236,30 @@ def config(argv=None, parser=None):
|
|||
metavar='<string>',
|
||||
action=StoreValue,
|
||||
default='flann',
|
||||
choices=['flann', 'bow'],
|
||||
help=('Matcher algorithm, Fast Library for Approximate Nearest Neighbors or Bag of Words. FLANN is slower, but more stable. BOW is faster, but can sometimes miss valid matches. '
|
||||
choices=['bow', 'bruteforce', 'flann'],
|
||||
help=('Matcher algorithm, Fast Library for Approximate Nearest Neighbors or Bag of Words. FLANN is slower, but more stable. BOW is faster, but can sometimes miss valid matches. BRUTEFORCE is very slow but robust.'
|
||||
'Can be one of: %(choices)s. Default: '
|
||||
'%(default)s'))
|
||||
|
||||
parser.add_argument('--matcher-neighbors',
|
||||
metavar='<integer>',
|
||||
action=StoreValue,
|
||||
default=8,
|
||||
type=int,
|
||||
help='Number of nearest images to pre-match based on GPS '
|
||||
'exif data. Set to 0 to skip pre-matching. '
|
||||
'Neighbors works together with Distance parameter, '
|
||||
'set both to 0 to not use pre-matching. OpenSFM '
|
||||
'uses both parameters at the same time, Bundler '
|
||||
'uses only one which has value, prefering the '
|
||||
'Neighbors parameter. Default: %(default)s')
|
||||
|
||||
parser.add_argument('--matcher-distance',
|
||||
metavar='<integer>',
|
||||
metavar='<positive integer>',
|
||||
action=StoreValue,
|
||||
default=0,
|
||||
type=int,
|
||||
help='Distance threshold in meters to find pre-matching '
|
||||
'images based on GPS exif data. Set both '
|
||||
'matcher-neighbors and this to 0 to skip '
|
||||
'pre-matching. Default: %(default)s')
|
||||
help='Perform image matching with the nearest images based on GPS exif data. Set to 0 to match by triangulation. Default: %(default)s')
|
||||
|
||||
parser.add_argument('--matcher-order',
|
||||
metavar='<positive integer>',
|
||||
action=StoreValue,
|
||||
default=0,
|
||||
type=int,
|
||||
help='Perform image matching with the nearest N images based on image filename order. Can speed up processing of sequential images, such as those extracted from video. It is applied only on non-georeferenced datasets. Set to 0 to disable. Default: %(default)s')
|
||||
|
||||
parser.add_argument('--use-fixed-camera-params',
|
||||
action=StoreTrue,
|
||||
nargs=0,
|
||||
default=False,
|
||||
help='Turn off camera parameter optimization during bundler')
|
||||
help='Turn off camera parameter optimization during bundle adjustment. This can be sometimes useful for improving results that exhibit doming/bowling or when images are taken with a rolling shutter camera. Default: %(default)s')
|
||||
|
||||
parser.add_argument('--cameras',
|
||||
default='',
|
||||
|
@ -205,16 +271,15 @@ def config(argv=None, parser=None):
|
|||
'Can be specified either as path to a cameras.json file or as a '
|
||||
'JSON string representing the contents of a '
|
||||
'cameras.json file. Default: %(default)s')
|
||||
|
||||
|
||||
parser.add_argument('--camera-lens',
|
||||
metavar='<string>',
|
||||
action=StoreValue,
|
||||
default='auto',
|
||||
choices=['auto', 'perspective', 'brown', 'fisheye', 'spherical'],
|
||||
choices=['auto', 'perspective', 'brown', 'fisheye', 'fisheye_opencv', 'spherical', 'equirectangular', 'dual'],
|
||||
help=('Set a camera projection type. Manually setting a value '
|
||||
'can help improve geometric undistortion. By default the application '
|
||||
'tries to determine a lens type from the images metadata. Can be '
|
||||
'set to one of: %(choices)s. Default: '
|
||||
'tries to determine a lens type from the images metadata. Can be one of: %(choices)s. Default: '
|
||||
'%(default)s'))
|
||||
|
||||
parser.add_argument('--radiometric-calibration',
|
||||
|
@ -223,11 +288,11 @@ def config(argv=None, parser=None):
|
|||
default='none',
|
||||
choices=['none', 'camera', 'camera+sun'],
|
||||
help=('Set the radiometric calibration to perform on images. '
|
||||
'When processing multispectral images you should set this option '
|
||||
'to obtain reflectance values (otherwise you will get digital number values). '
|
||||
'[camera] applies black level, vignetting, row gradient gain/exposure compensation (if appropriate EXIF tags are found). '
|
||||
'When processing multispectral and thermal images you should set this option '
|
||||
'to obtain reflectance/temperature values (otherwise you will get digital number values). '
|
||||
'[camera] applies black level, vignetting, row gradient gain/exposure compensation (if appropriate EXIF tags are found) and computes absolute temperature values. '
|
||||
'[camera+sun] is experimental, applies all the corrections of [camera], plus compensates for spectral radiance registered via a downwelling light sensor (DLS) taking in consideration the angle of the sun. '
|
||||
'Can be set to one of: %(choices)s. Default: '
|
||||
'Can be one of: %(choices)s. Default: '
|
||||
'%(default)s'))
|
||||
|
||||
parser.add_argument('--max-concurrency',
|
||||
|
@ -239,78 +304,80 @@ def config(argv=None, parser=None):
|
|||
'processes. Peak memory requirement is ~1GB per '
|
||||
'thread and 2 megapixel image resolution. Default: %(default)s'))
|
||||
|
||||
parser.add_argument('--depthmap-resolution',
|
||||
metavar='<positive float>',
|
||||
action=StoreValue,
|
||||
type=float,
|
||||
default=640,
|
||||
help=('Legacy option (use --pc-quality instead). Controls the density of the point cloud by setting the resolution of the depthmap images. Higher values take longer to compute '
|
||||
'but produce denser point clouds. '
|
||||
'Default: %(default)s'))
|
||||
|
||||
parser.add_argument('--opensfm-depthmap-min-consistent-views',
|
||||
metavar='<integer: 2 <= x <= 9>',
|
||||
action=StoreValue,
|
||||
type=int,
|
||||
default=3,
|
||||
help=('Minimum number of views that should reconstruct a point for it to be valid. Use lower values '
|
||||
'if your images have less overlap. Lower values result in denser point clouds '
|
||||
'but with more noise. '
|
||||
'Default: %(default)s'))
|
||||
|
||||
parser.add_argument('--opensfm-depthmap-method',
|
||||
metavar='<string>',
|
||||
action=StoreValue,
|
||||
default='PATCH_MATCH',
|
||||
choices=['PATCH_MATCH', 'BRUTE_FORCE', 'PATCH_MATCH_SAMPLE'],
|
||||
help=('Raw depthmap computation algorithm. '
|
||||
'PATCH_MATCH and PATCH_MATCH_SAMPLE are faster, but might miss some valid points. '
|
||||
'BRUTE_FORCE takes longer but produces denser reconstructions. '
|
||||
'Default: %(default)s'))
|
||||
|
||||
parser.add_argument('--opensfm-depthmap-min-patch-sd',
|
||||
metavar='<positive float>',
|
||||
action=StoreValue,
|
||||
type=float,
|
||||
default=1,
|
||||
help=('When using PATCH_MATCH or PATCH_MATCH_SAMPLE, controls the standard deviation threshold to include patches. '
|
||||
'Patches with lower standard deviation are ignored. '
|
||||
'Default: %(default)s'))
|
||||
|
||||
parser.add_argument('--use-hybrid-bundle-adjustment',
|
||||
action=StoreTrue,
|
||||
nargs=0,
|
||||
default=False,
|
||||
help='Run local bundle adjustment for every image added to the reconstruction and a global '
|
||||
'adjustment every 100 images. Speeds up reconstruction for very large datasets.')
|
||||
'adjustment every 100 images. Speeds up reconstruction for very large datasets. Default: %(default)s')
|
||||
|
||||
parser.add_argument('--sfm-algorithm',
|
||||
metavar='<string>',
|
||||
action=StoreValue,
|
||||
default='incremental',
|
||||
choices=['incremental', 'triangulation', 'planar'],
|
||||
help=('Choose the structure from motion algorithm. For aerial datasets, if camera GPS positions and angles are available, triangulation can generate better results. For planar scenes captured at fixed altitude with nadir-only images, planar can be much faster. '
|
||||
'Can be one of: %(choices)s. Default: '
|
||||
'%(default)s'))
|
||||
|
||||
parser.add_argument('--sfm-no-partial',
|
||||
action=StoreTrue,
|
||||
nargs=0,
|
||||
default=False,
|
||||
help='Do not attempt to merge partial reconstructions. This can happen when images do not have sufficient overlap or are isolated. Default: %(default)s')
|
||||
|
||||
parser.add_argument('--sky-removal',
|
||||
action=StoreTrue,
|
||||
nargs=0,
|
||||
default=False,
|
||||
help='Automatically compute image masks using AI to remove the sky. Experimental. Default: %(default)s')
|
||||
|
||||
parser.add_argument('--bg-removal',
|
||||
action=StoreTrue,
|
||||
nargs=0,
|
||||
default=False,
|
||||
help='Automatically compute image masks using AI to remove the background. Experimental. Default: %(default)s')
|
||||
|
||||
parser.add_argument('--use-3dmesh',
|
||||
action=StoreTrue,
|
||||
nargs=0,
|
||||
default=False,
|
||||
help='Use a full 3D mesh to compute the orthophoto instead of a 2.5D mesh. This option is a bit faster and provides similar results in planar areas.')
|
||||
help='Use a full 3D mesh to compute the orthophoto instead of a 2.5D mesh. This option is a bit faster and provides similar results in planar areas. Default: %(default)s')
|
||||
|
||||
parser.add_argument('--skip-3dmodel',
|
||||
action=StoreTrue,
|
||||
nargs=0,
|
||||
default=False,
|
||||
help='Skip generation of a full 3D model. This can save time if you only need 2D results such as orthophotos and DEMs.')
|
||||
|
||||
parser.add_argument('--use-opensfm-dense',
|
||||
action=StoreTrue,
|
||||
nargs=0,
|
||||
default=False,
|
||||
help='Use opensfm to compute dense point cloud alternatively')
|
||||
|
||||
help='Skip generation of a full 3D model. This can save time if you only need 2D results such as orthophotos and DEMs. Default: %(default)s')
|
||||
|
||||
parser.add_argument('--skip-report',
|
||||
action=StoreTrue,
|
||||
nargs=0,
|
||||
default=False,
|
||||
help='Skip generation of PDF report. This can save time if you don\'t need a report. Default: %(default)s')
|
||||
|
||||
parser.add_argument('--skip-orthophoto',
|
||||
action=StoreTrue,
|
||||
nargs=0,
|
||||
default=False,
|
||||
help='Skip generation of the orthophoto. This can save time if you only need 3D results or DEMs. Default: %(default)s')
|
||||
|
||||
parser.add_argument('--ignore-gsd',
|
||||
action=StoreTrue,
|
||||
nargs=0,
|
||||
default=False,
|
||||
help='Ignore Ground Sampling Distance (GSD). GSD '
|
||||
'caps the maximum resolution of image outputs and '
|
||||
'resizes images when necessary, resulting in faster processing and '
|
||||
'lower memory usage. Since GSD is an estimate, sometimes ignoring it can result in slightly better image output quality.')
|
||||
|
||||
help='Ignore Ground Sampling Distance (GSD).'
|
||||
'A memory and processor hungry change relative to the default behavior if set to true. '
|
||||
'Ordinarily, GSD estimates are used to cap the maximum resolution of image outputs and resizes images when necessary, resulting in faster processing and lower memory usage. '
|
||||
'Since GSD is an estimate, sometimes ignoring it can result in slightly better image output quality. '
|
||||
'Never set --ignore-gsd to true unless you are positive you need it, and even then: do not use it. Default: %(default)s')
|
||||
|
||||
parser.add_argument('--no-gpu',
|
||||
action=StoreTrue,
|
||||
nargs=0,
|
||||
default=False,
|
||||
help='Do not use GPU acceleration, even if it\'s available. Default: %(default)s')
|
||||
|
||||
parser.add_argument('--mesh-size',
|
||||
metavar='<positive integer>',
|
||||
action=StoreValue,
|
||||
|
@ -320,41 +387,21 @@ def config(argv=None, parser=None):
|
|||
'Default: %(default)s'))
|
||||
|
||||
parser.add_argument('--mesh-octree-depth',
|
||||
metavar='<positive integer>',
|
||||
metavar='<integer: 1 <= x <= 14>',
|
||||
action=StoreValue,
|
||||
default=10,
|
||||
default=11,
|
||||
type=int,
|
||||
help=('Oct-tree depth used in the mesh reconstruction, '
|
||||
help=('Octree depth used in the mesh reconstruction, '
|
||||
'increase to get more vertices, recommended '
|
||||
'values are 8-12. Default: %(default)s'))
|
||||
|
||||
parser.add_argument('--mesh-samples',
|
||||
metavar='<float >= 1.0>',
|
||||
action=StoreValue,
|
||||
default=1.0,
|
||||
type=float,
|
||||
help=('Number of points per octree node, recommended '
|
||||
'and default value: %(default)s'))
|
||||
|
||||
parser.add_argument('--mesh-point-weight',
|
||||
metavar='<positive float>',
|
||||
action=StoreValue,
|
||||
default=4,
|
||||
type=float,
|
||||
help=('This floating point value specifies the importance'
|
||||
' that interpolation of the point samples is given in the '
|
||||
'formulation of the screened Poisson equation. The results '
|
||||
'of the original (unscreened) Poisson Reconstruction can '
|
||||
'be obtained by setting this value to 0.'
|
||||
'Default= %(default)s'))
|
||||
|
||||
parser.add_argument('--fast-orthophoto',
|
||||
action=StoreTrue,
|
||||
nargs=0,
|
||||
default=False,
|
||||
help='Skips dense reconstruction and 3D model generation. '
|
||||
'It generates an orthophoto directly from the sparse reconstruction. '
|
||||
'If you just need an orthophoto and do not need a full 3D model, turn on this option.')
|
||||
'If you just need an orthophoto and do not need a full 3D model, turn on this option. Default: %(default)s')
|
||||
|
||||
parser.add_argument('--crop',
|
||||
metavar='<positive float>',
|
||||
|
@ -362,10 +409,36 @@ def config(argv=None, parser=None):
|
|||
default=3,
|
||||
type=float,
|
||||
help=('Automatically crop image outputs by creating a smooth buffer '
|
||||
'around the dataset boundaries, shrinked by N meters. '
|
||||
'around the dataset boundaries, shrunk by N meters. '
|
||||
'Use 0 to disable cropping. '
|
||||
'Default: %(default)s'))
|
||||
|
||||
parser.add_argument('--boundary',
|
||||
default='',
|
||||
metavar='<json>',
|
||||
action=StoreValue,
|
||||
type=path_or_json_string,
|
||||
help='GeoJSON polygon limiting the area of the reconstruction. '
|
||||
'Can be specified either as path to a GeoJSON file or as a '
|
||||
'JSON string representing the contents of a '
|
||||
'GeoJSON file. Default: %(default)s')
|
||||
|
||||
parser.add_argument('--auto-boundary',
|
||||
action=StoreTrue,
|
||||
nargs=0,
|
||||
default=False,
|
||||
help='Automatically set a boundary using camera shot locations to limit the area of the reconstruction. '
|
||||
'This can help remove far away background artifacts (sky, background landscapes, etc.). See also --boundary. '
|
||||
'Default: %(default)s')
|
||||
|
||||
parser.add_argument('--auto-boundary-distance',
|
||||
metavar='<positive float>',
|
||||
action=StoreValue,
|
||||
type=float,
|
||||
default=0,
|
||||
help='Specify the distance between camera shot locations and the outer edge of the boundary when computing the boundary with --auto-boundary. Set to 0 to automatically choose a value. '
|
||||
'Default: %(default)s')
|
||||
|
||||
parser.add_argument('--pc-quality',
|
||||
metavar='<string>',
|
||||
action=StoreValue,
|
||||
|
@ -379,7 +452,7 @@ def config(argv=None, parser=None):
|
|||
action=StoreTrue,
|
||||
nargs=0,
|
||||
default=False,
|
||||
help='Classify the point cloud outputs using a Simple Morphological Filter. '
|
||||
help='Classify the point cloud outputs. '
|
||||
'You can control the behavior of this option by tweaking the --dem-* parameters. '
|
||||
'Default: '
|
||||
'%(default)s')
|
||||
|
@ -388,37 +461,48 @@ def config(argv=None, parser=None):
|
|||
action=StoreTrue,
|
||||
nargs=0,
|
||||
default=False,
|
||||
help='Export the georeferenced point cloud in CSV format. Default: %(default)s')
|
||||
help='Export the georeferenced point cloud in CSV format. Default: %(default)s')
|
||||
|
||||
parser.add_argument('--pc-las',
|
||||
action=StoreTrue,
|
||||
nargs=0,
|
||||
default=False,
|
||||
help='Export the georeferenced point cloud in LAS format. Default: %(default)s')
|
||||
help='Export the georeferenced point cloud in LAS format. Default: %(default)s')
|
||||
|
||||
parser.add_argument('--pc-ept',
|
||||
action=StoreTrue,
|
||||
nargs=0,
|
||||
default=False,
|
||||
help='Export the georeferenced point cloud in Entwine Point Tile (EPT) format. Default: %(default)s')
|
||||
help='Export the georeferenced point cloud in Entwine Point Tile (EPT) format. Default: %(default)s')
|
||||
|
||||
parser.add_argument('--pc-copc',
|
||||
action=StoreTrue,
|
||||
nargs=0,
|
||||
default=False,
|
||||
help='Save the georeferenced point cloud in Cloud Optimized Point Cloud (COPC) format. Default: %(default)s')
|
||||
|
||||
parser.add_argument('--pc-filter',
|
||||
metavar='<positive float>',
|
||||
action=StoreValue,
|
||||
type=float,
|
||||
default=2.5,
|
||||
help='Filters the point cloud by removing points that deviate more than N standard deviations from the local mean. Set to 0 to disable filtering.'
|
||||
'\nDefault: '
|
||||
'%(default)s')
|
||||
default=5,
|
||||
help='Filters the point cloud by removing points that deviate more than N standard deviations from the local mean. Set to 0 to disable filtering. '
|
||||
'Default: %(default)s')
|
||||
|
||||
parser.add_argument('--pc-sample',
|
||||
metavar='<positive float>',
|
||||
action=StoreValue,
|
||||
type=float,
|
||||
default=0,
|
||||
help='Filters the point cloud by keeping only a single point around a radius N (in meters). This can be useful to limit the output resolution of the point cloud. Set to 0 to disable sampling.'
|
||||
'\nDefault: '
|
||||
'%(default)s')
|
||||
help='Filters the point cloud by keeping only a single point around a radius N (in meters). This can be useful to limit the output resolution of the point cloud and remove duplicate points. Set to 0 to disable sampling. '
|
||||
'Default: %(default)s')
|
||||
|
||||
parser.add_argument('--pc-skip-geometric',
|
||||
action=StoreTrue,
|
||||
nargs=0,
|
||||
default=False,
|
||||
help='Geometric estimates improve the accuracy of the point cloud by computing geometrically consistent depthmaps but may not be usable in larger datasets. This flag disables geometric estimates. '
|
||||
'Default: %(default)s')
|
||||
|
||||
parser.add_argument('--smrf-scalar',
|
||||
metavar='<positive float>',
|
||||
|
@ -426,8 +510,7 @@ def config(argv=None, parser=None):
|
|||
type=float,
|
||||
default=1.25,
|
||||
help='Simple Morphological Filter elevation scalar parameter. '
|
||||
'\nDefault: '
|
||||
'%(default)s')
|
||||
'Default: %(default)s')
|
||||
|
||||
parser.add_argument('--smrf-slope',
|
||||
metavar='<positive float>',
|
||||
|
@ -435,8 +518,7 @@ def config(argv=None, parser=None):
|
|||
type=float,
|
||||
default=0.15,
|
||||
help='Simple Morphological Filter slope parameter (rise over run). '
|
||||
'\nDefault: '
|
||||
'%(default)s')
|
||||
'Default: %(default)s')
|
||||
|
||||
parser.add_argument('--smrf-threshold',
|
||||
metavar='<positive float>',
|
||||
|
@ -444,8 +526,7 @@ def config(argv=None, parser=None):
|
|||
type=float,
|
||||
default=0.5,
|
||||
help='Simple Morphological Filter elevation threshold parameter (meters). '
|
||||
'\nDefault: '
|
||||
'%(default)s')
|
||||
'Default: %(default)s')
|
||||
|
||||
parser.add_argument('--smrf-window',
|
||||
metavar='<positive float>',
|
||||
|
@ -453,92 +534,87 @@ def config(argv=None, parser=None):
|
|||
type=float,
|
||||
default=18.0,
|
||||
help='Simple Morphological Filter window radius parameter (meters). '
|
||||
'\nDefault: '
|
||||
'%(default)s')
|
||||
|
||||
parser.add_argument('--texturing-data-term',
|
||||
metavar='<string>',
|
||||
action=StoreValue,
|
||||
default='gmi',
|
||||
choices=['gmi', 'area'],
|
||||
help=('Data term: [area, gmi]. Default: '
|
||||
'%(default)s'))
|
||||
|
||||
parser.add_argument('--texturing-outlier-removal-type',
|
||||
metavar='<string>',
|
||||
action=StoreValue,
|
||||
default='gauss_clamping',
|
||||
choices=['none', 'gauss_clamping', 'gauss_damping'],
|
||||
help=('Type of photometric outlier removal method: '
|
||||
'[none, gauss_damping, gauss_clamping]. Default: '
|
||||
'%(default)s'))
|
||||
'Default: %(default)s')
|
||||
|
||||
parser.add_argument('--texturing-skip-global-seam-leveling',
|
||||
action=StoreTrue,
|
||||
nargs=0,
|
||||
default=False,
|
||||
help=('Skip global seam leveling. Useful for IR data.'
|
||||
'Default: %(default)s'))
|
||||
help=('Skip normalization of colors across all images. Useful when processing radiometric data. Default: %(default)s'))
|
||||
|
||||
parser.add_argument('--texturing-skip-local-seam-leveling',
|
||||
parser.add_argument('--texturing-keep-unseen-faces',
|
||||
action=StoreTrue,
|
||||
nargs=0,
|
||||
default=False,
|
||||
help='Skip local seam blending. Default: %(default)s')
|
||||
help=('Keep faces in the mesh that are not seen in any camera. '
|
||||
'Default: %(default)s'))
|
||||
|
||||
parser.add_argument('--texturing-tone-mapping',
|
||||
metavar='<string>',
|
||||
action=StoreValue,
|
||||
choices=['none', 'gamma'],
|
||||
default='none',
|
||||
help='Turn on gamma tone mapping or none for no tone '
|
||||
'mapping. Choices are \'gamma\' or \'none\'. '
|
||||
'Default: %(default)s ')
|
||||
parser.add_argument('--texturing-single-material',
|
||||
action=StoreTrue,
|
||||
nargs=0,
|
||||
default=False,
|
||||
help=('Generate OBJs that have a single material and a single texture file instead of multiple ones. '
|
||||
'Default: %(default)s'))
|
||||
|
||||
parser.add_argument('--gltf',
|
||||
action=StoreTrue,
|
||||
nargs=0,
|
||||
default=False,
|
||||
help=('Generate single file Binary glTF (GLB) textured models. '
|
||||
'Default: %(default)s'))
|
||||
|
||||
parser.add_argument('--gcp',
|
||||
metavar='<path string>',
|
||||
action=StoreValue,
|
||||
default=None,
|
||||
help=('Path to the file containing the ground control '
|
||||
'points used for georeferencing. Default: '
|
||||
'%(default)s. The file needs to '
|
||||
'points used for georeferencing. '
|
||||
'The file needs to '
|
||||
'use the following format: \n'
|
||||
'EPSG:<code> or <+proj definition>\n'
|
||||
'geo_x geo_y geo_z im_x im_y image_name [gcp_name] [extra1] [extra2]'))
|
||||
'geo_x geo_y geo_z im_x im_y image_name [gcp_name] [extra1] [extra2]\n'
|
||||
'Default: %(default)s'))
|
||||
|
||||
parser.add_argument('--geo',
|
||||
metavar='<path string>',
|
||||
action=StoreValue,
|
||||
default=None,
|
||||
help=('Path to the image geolocation file containing the camera center coordinates used for georeferencing. '
|
||||
'Note that omega/phi/kappa are currently not supported (you can set them to 0). '
|
||||
'Default: '
|
||||
'%(default)s. The file needs to '
|
||||
'If you don\'t have values for yaw/pitch/roll you can set them to 0. '
|
||||
'The file needs to '
|
||||
'use the following format: \n'
|
||||
'EPSG:<code> or <+proj definition>\n'
|
||||
'image_name geo_x geo_y geo_z [omega (degrees)] [phi (degrees)] [kappa (degrees)] [horz accuracy (meters)] [vert accuracy (meters)]'
|
||||
''))
|
||||
'image_name geo_x geo_y geo_z [yaw (degrees)] [pitch (degrees)] [roll (degrees)] [horz accuracy (meters)] [vert accuracy (meters)]\n'
|
||||
'Default: %(default)s'))
|
||||
|
||||
parser.add_argument('--align',
|
||||
metavar='<path string>',
|
||||
action=StoreValue,
|
||||
default=None,
|
||||
help=('Path to a GeoTIFF DEM or a LAS/LAZ point cloud '
|
||||
'that the reconstruction outputs should be automatically aligned to. Experimental. '
|
||||
'Default: %(default)s'))
|
||||
|
||||
parser.add_argument('--use-exif',
|
||||
action=StoreTrue,
|
||||
nargs=0,
|
||||
default=False,
|
||||
help=('Use this tag if you have a gcp_list.txt but '
|
||||
'want to use the exif geotags instead'))
|
||||
help=('Use this tag if you have a GCP File but '
|
||||
'want to use the EXIF information for georeferencing instead. Default: %(default)s'))
|
||||
|
||||
parser.add_argument('--dtm',
|
||||
action=StoreTrue,
|
||||
nargs=0,
|
||||
default=False,
|
||||
help='Use this tag to build a DTM (Digital Terrain Model, ground only) using a simple '
|
||||
'morphological filter. Check the --dem* and --smrf* parameters for finer tuning.')
|
||||
'morphological filter. Check the --dem* and --smrf* parameters for finer tuning. Default: %(default)s')
|
||||
|
||||
parser.add_argument('--dsm',
|
||||
action=StoreTrue,
|
||||
nargs=0,
|
||||
default=False,
|
||||
help='Use this tag to build a DSM (Digital Surface Model, ground + objects) using a progressive '
|
||||
'morphological filter. Check the --dem* parameters for finer tuning.')
|
||||
'morphological filter. Check the --dem* parameters for finer tuning. Default: %(default)s')
|
||||
|
||||
parser.add_argument('--dem-gapfill-steps',
|
||||
metavar='<positive integer>',
|
||||
|
@ -549,15 +625,15 @@ def config(argv=None, parser=None):
|
|||
'Starting with a radius equal to the output resolution, N different DEMs are generated with '
|
||||
'progressively bigger radius using the inverse distance weighted (IDW) algorithm '
|
||||
'and merged together. Remaining gaps are then merged using nearest neighbor interpolation. '
|
||||
'\nDefault=%(default)s')
|
||||
'Default: %(default)s')
|
||||
|
||||
parser.add_argument('--dem-resolution',
|
||||
metavar='<float>',
|
||||
action=StoreValue,
|
||||
type=float,
|
||||
default=5,
|
||||
help='DSM/DTM resolution in cm / pixel. Note that this value is capped by a ground sampling distance (GSD) estimate. To remove the cap, check --ignore-gsd also.'
|
||||
'\nDefault: %(default)s')
|
||||
help='DSM/DTM resolution in cm / pixel. Note that this value is capped by a ground sampling distance (GSD) estimate.'
|
||||
' Default: %(default)s')
|
||||
|
||||
parser.add_argument('--dem-decimation',
|
||||
metavar='<positive integer>',
|
||||
|
@ -565,8 +641,7 @@ def config(argv=None, parser=None):
|
|||
default=1,
|
||||
type=int,
|
||||
help='Decimate the points before generating the DEM. 1 is no decimation (full quality). '
|
||||
'100 decimates ~99%% of the points. Useful for speeding up '
|
||||
'generation.\nDefault=%(default)s')
|
||||
'100 decimates ~99%% of the points. Useful for speeding up generation of DEM results in very large datasets. Default: %(default)s')
|
||||
|
||||
parser.add_argument('--dem-euclidean-map',
|
||||
action=StoreTrue,
|
||||
|
@ -584,22 +659,29 @@ def config(argv=None, parser=None):
|
|||
action=StoreValue,
|
||||
default=5,
|
||||
type=float,
|
||||
help=('Orthophoto resolution in cm / pixel. Note that this value is capped by a ground sampling distance (GSD) estimate. To remove the cap, check --ignore-gsd also.\n'
|
||||
help=('Orthophoto resolution in cm / pixel. Note that this value is capped by a ground sampling distance (GSD) estimate.'
|
||||
'Default: %(default)s'))
|
||||
|
||||
parser.add_argument('--orthophoto-no-tiled',
|
||||
action=StoreTrue,
|
||||
nargs=0,
|
||||
default=False,
|
||||
help='Set this parameter if you want a stripped geoTIFF.\n'
|
||||
help='Set this parameter if you want a striped GeoTIFF. '
|
||||
'Default: %(default)s')
|
||||
|
||||
|
||||
parser.add_argument('--orthophoto-png',
|
||||
action=StoreTrue,
|
||||
nargs=0,
|
||||
default=False,
|
||||
help='Set this parameter if you want to generate a PNG rendering of the orthophoto.\n'
|
||||
help='Set this parameter if you want to generate a PNG rendering of the orthophoto. '
|
||||
'Default: %(default)s')
|
||||
|
||||
parser.add_argument('--orthophoto-kmz',
|
||||
action=StoreTrue,
|
||||
nargs=0,
|
||||
default=False,
|
||||
help='Set this parameter if you want to generate a Google Earth (KMZ) rendering of the orthophoto. '
|
||||
'Default: %(default)s')
|
||||
|
||||
parser.add_argument('--orthophoto-compression',
|
||||
metavar='<string>',
|
||||
|
@ -607,7 +689,7 @@ def config(argv=None, parser=None):
|
|||
type=str,
|
||||
choices=['JPEG', 'LZW', 'PACKBITS', 'DEFLATE', 'LZMA', 'NONE'],
|
||||
default='DEFLATE',
|
||||
help='Set the compression to use for orthophotos. Options: %(choices)s.\nDefault: %(default)s')
|
||||
help='Set the compression to use for orthophotos. Can be one of: %(choices)s. Default: %(default)s')
|
||||
|
||||
parser.add_argument('--orthophoto-cutline',
|
||||
action=StoreTrue,
|
||||
|
@ -627,38 +709,66 @@ def config(argv=None, parser=None):
|
|||
'suitable for viewers like Leaflet or OpenLayers. '
|
||||
'Default: %(default)s')
|
||||
|
||||
parser.add_argument('--3d-tiles',
|
||||
action=StoreTrue,
|
||||
nargs=0,
|
||||
default=False,
|
||||
help='Generate OGC 3D Tiles outputs. Default: %(default)s')
|
||||
|
||||
parser.add_argument('--rolling-shutter',
|
||||
action=StoreTrue,
|
||||
nargs=0,
|
||||
default=False,
|
||||
help='Turn on rolling shutter correction. If the camera '
|
||||
'has a rolling shutter and the images were taken in motion, you can turn on this option '
|
||||
'to improve the accuracy of the results. See also --rolling-shutter-readout. '
|
||||
'Default: %(default)s')
|
||||
|
||||
parser.add_argument('--rolling-shutter-readout',
|
||||
type=float,
|
||||
action=StoreValue,
|
||||
metavar='<positive integer>',
|
||||
default=0,
|
||||
help='Override the rolling shutter readout time for your camera sensor (in milliseconds), instead of using the rolling shutter readout database. '
|
||||
'Note that not all cameras are present in the database. Set to 0 to use the database value. '
|
||||
'Default: %(default)s')
|
||||
|
||||
parser.add_argument('--build-overviews',
|
||||
action=StoreTrue,
|
||||
nargs=0,
|
||||
default=False,
|
||||
help='Build orthophoto overviews using gdaladdo.')
|
||||
help='Build orthophoto overviews for faster display in programs such as QGIS. Default: %(default)s')
|
||||
|
||||
parser.add_argument('--verbose', '-v',
|
||||
parser.add_argument('--cog',
|
||||
action=StoreTrue,
|
||||
nargs=0,
|
||||
default=False,
|
||||
help='Print additional messages to the console\n'
|
||||
'Default: %(default)s')
|
||||
help='Create Cloud-Optimized GeoTIFFs instead of normal GeoTIFFs. Default: %(default)s')
|
||||
|
||||
parser.add_argument('--time',
|
||||
action=StoreTrue,
|
||||
nargs=0,
|
||||
default=False,
|
||||
help='Generates a benchmark file with runtime info\n'
|
||||
'Default: %(default)s')
|
||||
|
||||
parser.add_argument('--debug',
|
||||
action=StoreTrue,
|
||||
nargs=0,
|
||||
default=False,
|
||||
help='Print debug messages\n'
|
||||
'Default: %(default)s')
|
||||
parser.add_argument('--copy-to',
|
||||
metavar='<path>',
|
||||
action=StoreValue,
|
||||
help='Copy output results to this folder after processing.')
|
||||
|
||||
parser.add_argument('--version',
|
||||
action='version',
|
||||
version='ODM {0}'.format(__version__),
|
||||
help='Displays version number and exits. ')
|
||||
|
||||
parser.add_argument('--video-limit',
|
||||
type=int,
|
||||
action=StoreValue,
|
||||
default=500,
|
||||
metavar='<positive integer>',
|
||||
help='Maximum number of frames to extract from video files for processing. Set to 0 for no limit. Default: %(default)s')
|
||||
|
||||
parser.add_argument('--video-resolution',
|
||||
type=int,
|
||||
action=StoreValue,
|
||||
default=4000,
|
||||
metavar='<positive integer>',
|
||||
help='The maximum output resolution of extracted video frames in pixels. Default: %(default)s')
|
||||
|
||||
parser.add_argument('--split',
|
||||
type=int,
|
||||
action=StoreValue,
|
||||
|
@ -668,7 +778,7 @@ def config(argv=None, parser=None):
|
|||
'splitting a large dataset into smaller '
|
||||
'submodels, images are grouped into clusters. '
|
||||
'This value regulates the number of images that '
|
||||
'each cluster should have on average.')
|
||||
'each cluster should have on average. Default: %(default)s')
|
||||
|
||||
parser.add_argument('--split-overlap',
|
||||
type=float,
|
||||
|
@ -679,13 +789,22 @@ def config(argv=None, parser=None):
|
|||
'After grouping images into clusters, images '
|
||||
'that are closer than this radius to a cluster '
|
||||
'are added to the cluster. This is done to ensure '
|
||||
'that neighboring submodels overlap.')
|
||||
'that neighboring submodels overlap. Default: %(default)s')
|
||||
|
||||
parser.add_argument('--split-multitracks',
|
||||
action=StoreTrue,
|
||||
nargs=0,
|
||||
default=False,
|
||||
help='Split multi-track reconstructions.')
|
||||
parser.add_argument('--split-image-groups',
|
||||
metavar='<path string>',
|
||||
action=StoreValue,
|
||||
default=None,
|
||||
help=('Path to the image groups file that controls how images should be split into groups. '
|
||||
'The file needs to use the following format: \n'
|
||||
'image_name group_name\n'
|
||||
'Default: %(default)s'))
|
||||
|
||||
parser.add_argument('--sm-no-align',
|
||||
action=StoreTrue,
|
||||
nargs=0,
|
||||
default=False,
|
||||
help='Skip alignment of submodels in split-merge. Useful if GPS is good enough on very large datasets. Default: %(default)s')
|
||||
|
||||
parser.add_argument('--sm-cluster',
|
||||
metavar='<string>',
|
||||
|
@ -719,7 +838,7 @@ def config(argv=None, parser=None):
|
|||
type=float,
|
||||
action=StoreValue,
|
||||
metavar='<positive float>',
|
||||
default=10,
|
||||
default=3,
|
||||
help='Set a value in meters for the GPS Dilution of Precision (DOP) '
|
||||
'information for all images. If your images are tagged '
|
||||
'with high precision GPS information (RTK), this value will be automatically '
|
||||
|
@ -752,7 +871,23 @@ def config(argv=None, parser=None):
|
|||
'It\'s recommended to choose a band which has sharp details and is in focus. '
|
||||
'Default: %(default)s'))
|
||||
|
||||
args = parser.parse_args(argv)
|
||||
parser.add_argument('--skip-band-alignment',
|
||||
action=StoreTrue,
|
||||
nargs=0,
|
||||
default=False,
|
||||
help=('When processing multispectral datasets, ODM will automatically align the images for each band. '
|
||||
'If the images have been postprocessed and are already aligned, use this option. '
|
||||
'Default: %(default)s'))
|
||||
|
||||
args, unknown = parser.parse_known_args(argv)
|
||||
DEPRECATED = ["--verbose", "--debug", "--time", "--resize-to", "--depthmap-resolution", "--pc-geometric", "--texturing-data-term", "--texturing-outlier-removal-type", "--texturing-tone-mapping", "--texturing-skip-local-seam-leveling"]
|
||||
unknown_e = [p for p in unknown if p not in DEPRECATED]
|
||||
if len(unknown_e) > 0:
|
||||
raise parser.error("unrecognized arguments: %s" % " ".join(unknown_e))
|
||||
|
||||
for p in unknown:
|
||||
if p in DEPRECATED:
|
||||
log.ODM_WARNING("%s is no longer a valid argument and will be ignored!" % p)
|
||||
|
||||
# check that the project path setting has been set properly
|
||||
if not args.project_path:
|
||||
|
@ -789,8 +924,4 @@ def config(argv=None, parser=None):
|
|||
log.ODM_ERROR("Cluster node seems to be offline: %s" % str(e))
|
||||
sys.exit(1)
|
||||
|
||||
# if args.radiometric_calibration != "none" and not args.texturing_skip_global_seam_leveling:
|
||||
# log.ODM_WARNING("radiometric-calibration is turned on, automatically setting --texturing-skip-global-seam-leveling")
|
||||
# args.texturing_skip_global_seam_leveling = True
|
||||
|
||||
return args
|
||||
|
|
|
@ -8,47 +8,40 @@ root_path, _ = os.path.split(current_path)
|
|||
|
||||
superbuild_path = os.path.join(root_path, 'SuperBuild')
|
||||
superbuild_bin_path = os.path.join(superbuild_path, 'install', 'bin')
|
||||
tests_path = os.path.join(root_path, 'tests')
|
||||
tests_data_path = os.path.join(root_path, 'tests/test_data')
|
||||
|
||||
# add opencv,opensfm to python path
|
||||
python_packages_paths = [os.path.join(superbuild_path, p) for p in [
|
||||
'install/lib/python3.9/dist-packages',
|
||||
'install/lib/python3.8/dist-packages',
|
||||
'install/lib/python3/dist-packages',
|
||||
'src/opensfm'
|
||||
'install/bin/opensfm',
|
||||
]]
|
||||
for p in python_packages_paths:
|
||||
sys.path.append(p)
|
||||
|
||||
|
||||
# define opensfm path
|
||||
opensfm_path = os.path.join(superbuild_path, "src/opensfm")
|
||||
opensfm_path = os.path.join(superbuild_bin_path, "opensfm")
|
||||
|
||||
# define orb_slam2 path
|
||||
orb_slam2_path = os.path.join(superbuild_path, "src/orb_slam2")
|
||||
|
||||
poisson_recon_path = os.path.join(superbuild_path, 'src', 'PoissonRecon', 'Bin', 'Linux', 'PoissonRecon')
|
||||
dem2mesh_path = os.path.join(superbuild_path, 'src', 'dem2mesh', 'dem2mesh')
|
||||
dem2points_path = os.path.join(superbuild_path, 'src', 'dem2points', 'dem2points')
|
||||
poisson_recon_path = os.path.join(superbuild_bin_path, 'PoissonRecon')
|
||||
dem2mesh_path = os.path.join(superbuild_bin_path, 'dem2mesh')
|
||||
dem2points_path = os.path.join(superbuild_bin_path, 'dem2points')
|
||||
|
||||
# define mvstex path
|
||||
mvstex_path = os.path.join(superbuild_path, "install/bin/texrecon")
|
||||
mvstex_path = os.path.join(superbuild_bin_path, "texrecon")
|
||||
|
||||
# openmvs paths
|
||||
omvs_densify_path = os.path.join(superbuild_path, "install/bin/OpenMVS/DensifyPointCloud")
|
||||
omvs_densify_path = os.path.join(superbuild_bin_path, "OpenMVS", "DensifyPointCloud")
|
||||
omvs_reconstructmesh_path = os.path.join(superbuild_bin_path, "OpenMVS", "ReconstructMesh")
|
||||
|
||||
# define txt2las path
|
||||
txt2las_path = os.path.join(superbuild_path, 'src/las-tools/bin')
|
||||
pdal_path = os.path.join(superbuild_path, 'build/pdal/bin')
|
||||
|
||||
# define odm modules path
|
||||
odm_modules_path = os.path.join(root_path, "build/bin")
|
||||
odm_modules_src_path = os.path.join(root_path, "modules")
|
||||
fpcfilter_path = os.path.join(superbuild_bin_path, "FPCFilter")
|
||||
|
||||
odm_orthophoto_path = os.path.join(superbuild_bin_path, "odm_orthophoto")
|
||||
settings_path = os.path.join(root_path, 'settings.yaml')
|
||||
|
||||
# Define supported image extensions
|
||||
supported_extensions = {'.jpg','.jpeg','.png', '.tif', '.tiff', '.bmp'}
|
||||
supported_video_extensions = {'.mp4', '.mov', '.lrv', '.ts'}
|
||||
|
||||
# Define the number of cores
|
||||
num_cores = multiprocessing.cpu_count()
|
||||
|
|
|
@ -1,9 +1,11 @@
|
|||
from opendm import context
|
||||
from opendm.system import run
|
||||
from opendm import log
|
||||
from opendm.point_cloud import export_summary_json
|
||||
from osgeo import ogr
|
||||
import json, os
|
||||
from opendm.concurrency import get_max_memory
|
||||
from opendm.utils import double_quote
|
||||
|
||||
class Cropper:
|
||||
def __init__(self, storage_dir, files_prefix = "crop"):
|
||||
|
@ -36,13 +38,13 @@ class Cropper:
|
|||
# ext = .tif
|
||||
|
||||
original_geotiff = os.path.join(path, "{}.original{}".format(basename, ext))
|
||||
os.rename(geotiff_path, original_geotiff)
|
||||
os.replace(geotiff_path, original_geotiff)
|
||||
|
||||
try:
|
||||
kwargs = {
|
||||
'gpkg_path': gpkg_path,
|
||||
'geotiffInput': original_geotiff,
|
||||
'geotiffOutput': geotiff_path,
|
||||
'gpkg_path': double_quote(gpkg_path),
|
||||
'geotiffInput': double_quote(original_geotiff),
|
||||
'geotiffOutput': double_quote(geotiff_path),
|
||||
'options': ' '.join(map(lambda k: '-co {}={}'.format(k, gdal_options[k]), gdal_options)),
|
||||
'warpOptions': ' '.join(warp_options),
|
||||
'max_memory': get_max_memory()
|
||||
|
@ -63,7 +65,7 @@ class Cropper:
|
|||
log.ODM_WARNING('Something went wrong while cropping: {}'.format(e))
|
||||
|
||||
# Revert rename
|
||||
os.rename(original_geotiff, geotiff_path)
|
||||
os.replace(original_geotiff, geotiff_path)
|
||||
|
||||
return geotiff_path
|
||||
|
||||
|
@ -94,7 +96,7 @@ class Cropper:
|
|||
convexhull = geomcol.ConvexHull()
|
||||
|
||||
# If buffer distance is specified
|
||||
# Create two buffers, one shrinked by
|
||||
# Create two buffers, one shrunk by
|
||||
# N + 3 and then that buffer expanded by 3
|
||||
# so that we get smooth corners. \m/
|
||||
BUFFER_SMOOTH_DISTANCE = 3
|
||||
|
@ -147,7 +149,7 @@ class Cropper:
|
|||
|
||||
boundary_file_path = self.path('boundary.json')
|
||||
|
||||
run('pdal info --boundary --filters.hexbin.edge_size=1 --filters.hexbin.threshold=0 {0} > {1}'.format(decimated_pointcloud_path, boundary_file_path))
|
||||
run('pdal info --boundary --filters.hexbin.edge_size=1 --filters.hexbin.threshold=0 "{0}" > "{1}"'.format(decimated_pointcloud_path, boundary_file_path))
|
||||
|
||||
pc_geojson_boundary_feature = None
|
||||
|
||||
|
@ -158,8 +160,8 @@ class Cropper:
|
|||
if pc_geojson_boundary_feature is None: raise RuntimeError("Could not determine point cloud boundaries")
|
||||
|
||||
# Write bounds to GeoJSON
|
||||
bounds_geojson_path = self.path('bounds.geojson')
|
||||
with open(bounds_geojson_path, "w") as f:
|
||||
tmp_bounds_geojson_path = self.path('tmp-bounds.geojson')
|
||||
with open(tmp_bounds_geojson_path, "w") as f:
|
||||
f.write(json.dumps({
|
||||
"type": "FeatureCollection",
|
||||
"features": [{
|
||||
|
@ -171,7 +173,7 @@ class Cropper:
|
|||
# Create a convex hull around the boundary
|
||||
# as to encompass the entire area (no holes)
|
||||
driver = ogr.GetDriverByName('GeoJSON')
|
||||
ds = driver.Open(bounds_geojson_path, 0) # ready-only
|
||||
ds = driver.Open(tmp_bounds_geojson_path, 0) # ready-only
|
||||
layer = ds.GetLayer()
|
||||
|
||||
# Collect all Geometry
|
||||
|
@ -183,7 +185,7 @@ class Cropper:
|
|||
convexhull = geomcol.ConvexHull()
|
||||
|
||||
# If buffer distance is specified
|
||||
# Create two buffers, one shrinked by
|
||||
# Create two buffers, one shrunk by
|
||||
# N + 3 and then that buffer expanded by 3
|
||||
# so that we get smooth corners. \m/
|
||||
BUFFER_SMOOTH_DISTANCE = 3
|
||||
|
@ -201,7 +203,7 @@ class Cropper:
|
|||
# Save to a new file
|
||||
bounds_geojson_path = self.path('bounds.geojson')
|
||||
if os.path.exists(bounds_geojson_path):
|
||||
driver.DeleteDataSource(bounds_geojson_path)
|
||||
os.remove(bounds_geojson_path)
|
||||
|
||||
out_ds = driver.CreateDataSource(bounds_geojson_path)
|
||||
layer = out_ds.CreateLayer("convexhull", geom_type=ogr.wkbPolygon)
|
||||
|
@ -218,6 +220,10 @@ class Cropper:
|
|||
# Remove decimated point cloud
|
||||
if os.path.exists(decimated_pointcloud_path):
|
||||
os.remove(decimated_pointcloud_path)
|
||||
|
||||
# Remove tmp bounds
|
||||
if os.path.exists(tmp_bounds_geojson_path):
|
||||
os.remove(tmp_bounds_geojson_path)
|
||||
|
||||
return bounds_geojson_path
|
||||
|
||||
|
@ -236,7 +242,7 @@ class Cropper:
|
|||
bounds_geojson_path = self.create_bounds_geojson(pointcloud_path, buffer_distance, decimation_step)
|
||||
|
||||
summary_file_path = os.path.join(self.storage_dir, '{}.summary.json'.format(self.files_prefix))
|
||||
run('pdal info --summary {0} > {1}'.format(pointcloud_path, summary_file_path))
|
||||
export_summary_json(pointcloud_path, summary_file_path)
|
||||
|
||||
pc_proj4 = None
|
||||
with open(summary_file_path, 'r') as f:
|
||||
|
@ -247,10 +253,13 @@ class Cropper:
|
|||
|
||||
bounds_gpkg_path = os.path.join(self.storage_dir, '{}.bounds.gpkg'.format(self.files_prefix))
|
||||
|
||||
if os.path.isfile(bounds_gpkg_path):
|
||||
os.remove(bounds_gpkg_path)
|
||||
|
||||
# Convert bounds to GPKG
|
||||
kwargs = {
|
||||
'input': bounds_geojson_path,
|
||||
'output': bounds_gpkg_path,
|
||||
'input': double_quote(bounds_geojson_path),
|
||||
'output': double_quote(bounds_gpkg_path),
|
||||
'proj4': pc_proj4
|
||||
}
|
||||
|
||||
|
|
|
@ -1,68 +1,188 @@
|
|||
import os
|
||||
import shutil
|
||||
import rasterio
|
||||
import fiona
|
||||
import numpy as np
|
||||
import math
|
||||
import sys
|
||||
from opendm import log
|
||||
from opendm import io
|
||||
from opendm import concurrency
|
||||
from opendm import get_image_size
|
||||
from opendm import system
|
||||
import math
|
||||
|
||||
def compute_cutline(orthophoto_file, crop_area_file, destination, max_concurrency=1, tmpdir=None, scale=1):
|
||||
from skimage.feature import canny
|
||||
from skimage.draw import line
|
||||
from skimage.graph import route_through_array
|
||||
import shapely
|
||||
from shapely.geometry import LineString, mapping, shape
|
||||
from shapely.ops import polygonize, unary_union
|
||||
|
||||
if sys.platform == 'win32':
|
||||
# Temporary fix for: ValueError: GEOSGeom_createLinearRing_r returned a NULL pointer
|
||||
# https://github.com/Toblerity/Shapely/issues/1005
|
||||
shapely.speedups.disable()
|
||||
|
||||
def write_raster(data, file):
|
||||
profile = {
|
||||
'driver': 'GTiff',
|
||||
'width': data.shape[1],
|
||||
'height': data.shape[0],
|
||||
'count': 1,
|
||||
'dtype': 'float32',
|
||||
'transform': None,
|
||||
'nodata': None,
|
||||
'crs': None
|
||||
}
|
||||
|
||||
with rasterio.open(file, 'w', BIGTIFF="IF_SAFER", **profile) as wout:
|
||||
wout.write(data, 1)
|
||||
|
||||
def compute_cutline(orthophoto_file, crop_area_file, destination, max_concurrency=1, scale=1):
|
||||
if io.file_exists(orthophoto_file) and io.file_exists(crop_area_file):
|
||||
from opendm.grass_engine import grass
|
||||
log.ODM_INFO("Computing cutline")
|
||||
|
||||
if tmpdir and not io.dir_exists(tmpdir):
|
||||
system.mkdir_p(tmpdir)
|
||||
|
||||
scale = max(0.0001, min(1, scale))
|
||||
scaled_orthophoto = None
|
||||
|
||||
if scale < 1:
|
||||
log.ODM_INFO("Scaling orthophoto to %s%% to compute cutline" % (scale * 100))
|
||||
|
||||
scaled_orthophoto = os.path.join(tmpdir, os.path.basename(io.related_file_path(orthophoto_file, postfix=".scaled")))
|
||||
scaled_orthophoto = io.related_file_path(orthophoto_file, postfix=".scaled")
|
||||
# Scale orthophoto before computing cutline
|
||||
system.run("gdal_translate -outsize {}% 0 "
|
||||
"-co NUM_THREADS={} "
|
||||
"--config GDAL_CACHEMAX {}% "
|
||||
"{} {}".format(
|
||||
'"{}" "{}"'.format(
|
||||
scale * 100,
|
||||
max_concurrency,
|
||||
concurrency.get_max_memory(),
|
||||
orthophoto_file,
|
||||
scaled_orthophoto
|
||||
))
|
||||
|
||||
orthophoto_file = scaled_orthophoto
|
||||
|
||||
try:
|
||||
ortho_width,ortho_height = get_image_size.get_image_size(orthophoto_file, fallback_on_error=False)
|
||||
log.ODM_INFO("Orthophoto dimensions are %sx%s" % (ortho_width, ortho_height))
|
||||
number_lines = int(max(8, math.ceil(min(ortho_width, ortho_height) / 256.0)))
|
||||
except:
|
||||
log.ODM_INFO("Cannot compute orthophoto dimensions, setting arbitrary number of lines.")
|
||||
number_lines = 32
|
||||
|
||||
log.ODM_INFO("Number of lines: %s" % number_lines)
|
||||
# open raster
|
||||
f = rasterio.open(orthophoto_file)
|
||||
rast = f.read(1) # First band only
|
||||
height, width = rast.shape
|
||||
number_lines = int(max(8, math.ceil(min(width, height) / 256.0)))
|
||||
line_hor_offset = int(width / number_lines)
|
||||
line_ver_offset = int(height / number_lines)
|
||||
|
||||
gctx = grass.create_context({'auto_cleanup' : False, 'tmpdir': tmpdir})
|
||||
gctx.add_param('orthophoto_file', orthophoto_file)
|
||||
gctx.add_param('crop_area_file', crop_area_file)
|
||||
gctx.add_param('number_lines', number_lines)
|
||||
gctx.add_param('max_concurrency', max_concurrency)
|
||||
gctx.add_param('memory', int(concurrency.get_max_memory_mb(300)))
|
||||
gctx.set_location(orthophoto_file)
|
||||
if line_hor_offset <= 2 or line_ver_offset <= 2:
|
||||
log.ODM_WARNING("Cannot compute cutline, orthophoto is too small (%sx%spx)" % (width, height))
|
||||
return
|
||||
|
||||
cutline_file = gctx.execute(os.path.join("opendm", "grass", "compute_cutline.grass"))
|
||||
if cutline_file != 'error':
|
||||
if io.file_exists(cutline_file):
|
||||
shutil.move(cutline_file, destination)
|
||||
log.ODM_INFO("Generated cutline file: %s --> %s" % (cutline_file, destination))
|
||||
gctx.cleanup()
|
||||
return destination
|
||||
crop_f = fiona.open(crop_area_file, 'r')
|
||||
if len(crop_f) == 0:
|
||||
log.ODM_WARNING("Crop area is empty, cannot compute cutline")
|
||||
return
|
||||
|
||||
crop_poly = shape(crop_f[1]['geometry'])
|
||||
crop_f.close()
|
||||
|
||||
linestrings = []
|
||||
|
||||
# Compute canny edges on first band
|
||||
edges = canny(rast)
|
||||
|
||||
def compute_linestrings(direction):
|
||||
log.ODM_INFO("Computing %s cutlines" % direction)
|
||||
# Initialize cost map
|
||||
cost_map = np.full((height, width), 1, dtype=np.float32)
|
||||
|
||||
# Write edges to cost map
|
||||
cost_map[edges==True] = 0 # Low cost
|
||||
|
||||
# Write "barrier, floor is lava" costs
|
||||
if direction == 'vertical':
|
||||
lines = [((i, 0), (i, height - 1)) for i in range(line_hor_offset, width - line_hor_offset, line_hor_offset)]
|
||||
points = []
|
||||
pad_x = int(line_hor_offset / 2.0)
|
||||
for i in range(0, len(lines)):
|
||||
a,b = lines[i]
|
||||
points.append(((a[0] - pad_x , a[1]), (b[0] - pad_x, b[1])))
|
||||
a,b = lines[-1]
|
||||
points.append(((a[0] + pad_x , a[1]), (b[0] + pad_x, b[1])))
|
||||
else:
|
||||
log.ODM_WARNING("Unexpected script result: %s. No cutline file has been generated." % cutline_file)
|
||||
else:
|
||||
log.ODM_WARNING("Could not generate orthophoto cutline. An error occured when running GRASS. No orthophoto will be generated.")
|
||||
lines = [((0, j), (width - 1, j)) for j in range(line_ver_offset, height - line_ver_offset, line_ver_offset)]
|
||||
points = []
|
||||
pad_y = int(line_ver_offset / 2.0)
|
||||
for i in range(0, len(lines)):
|
||||
a,b = lines[i]
|
||||
points.append(((a[0] , a[1] - pad_y), (b[0], b[1] - pad_y)))
|
||||
a,b = lines[-1]
|
||||
points.append(((a[0] , a[1] + pad_y), (b[0], b[1] + pad_y)))
|
||||
|
||||
for a, b in lines:
|
||||
rr,cc = line(*a, *b)
|
||||
cost_map[cc, rr] = 9999 # Lava
|
||||
|
||||
# Calculate route
|
||||
for a, b in points:
|
||||
line_coords, cost = route_through_array(cost_map, (a[1], a[0]), (b[1], b[0]), fully_connected=True, geometric=True)
|
||||
|
||||
# Convert to geographic
|
||||
geo_line_coords = [f.xy(*c) for c in line_coords]
|
||||
|
||||
# Simplify
|
||||
ls = LineString(geo_line_coords)
|
||||
linestrings.append(ls.simplify(0.05, preserve_topology=False))
|
||||
|
||||
compute_linestrings('vertical')
|
||||
compute_linestrings('horizontal')
|
||||
|
||||
|
||||
# Generate polygons and keep only those inside the crop area
|
||||
log.ODM_INFO("Generating polygons... this could take a bit.")
|
||||
polygons = []
|
||||
for p in polygonize(unary_union(linestrings)):
|
||||
if crop_poly.contains(p):
|
||||
polygons.append(p)
|
||||
|
||||
# This should never happen
|
||||
if len(polygons) == 0:
|
||||
log.ODM_WARNING("No polygons, cannot compute cutline")
|
||||
return
|
||||
|
||||
log.ODM_INFO("Merging polygons")
|
||||
cutline_polygons = unary_union(polygons)
|
||||
if not hasattr(cutline_polygons, '__getitem__'):
|
||||
cutline_polygons = [cutline_polygons]
|
||||
|
||||
largest_cutline = cutline_polygons[0]
|
||||
max_area = largest_cutline.area
|
||||
for p in cutline_polygons:
|
||||
if p.area > max_area:
|
||||
max_area = p.area
|
||||
largest_cutline = p
|
||||
|
||||
log.ODM_INFO("Largest cutline found: %s m^2" % max_area)
|
||||
|
||||
meta = {
|
||||
'crs': {'init': str(f.crs).lower() },
|
||||
'driver': 'GPKG',
|
||||
'schema': {
|
||||
'properties': {},
|
||||
'geometry': 'Polygon'
|
||||
}
|
||||
}
|
||||
|
||||
# Remove previous
|
||||
if os.path.exists(destination):
|
||||
os.remove(destination)
|
||||
|
||||
with fiona.open(destination, 'w', **meta) as sink:
|
||||
sink.write({
|
||||
'geometry': mapping(largest_cutline),
|
||||
'properties': {}
|
||||
})
|
||||
f.close()
|
||||
log.ODM_INFO("Wrote %s" % destination)
|
||||
|
||||
# Cleanup
|
||||
if scaled_orthophoto is not None and os.path.exists(scaled_orthophoto):
|
||||
os.remove(scaled_orthophoto)
|
||||
else:
|
||||
log.ODM_WARNING("We've been asked to compute cutline, but either %s or %s is missing. Skipping..." % (orthophoto_file, crop_area_file))
|
||||
|
|
|
@ -5,193 +5,120 @@ import numpy
|
|||
import math
|
||||
import time
|
||||
import shutil
|
||||
import glob
|
||||
import re
|
||||
from joblib import delayed, Parallel
|
||||
from opendm.system import run
|
||||
from opendm import point_cloud
|
||||
from opendm import io
|
||||
from opendm import system
|
||||
from opendm.concurrency import get_max_memory, parallel_map
|
||||
from scipy import ndimage
|
||||
from opendm.concurrency import get_max_memory, parallel_map, get_total_memory
|
||||
from datetime import datetime
|
||||
from opendm.vendor.gdal_fillnodata import main as gdal_fillnodata
|
||||
from opendm import log
|
||||
try:
|
||||
import Queue as queue
|
||||
except:
|
||||
import queue
|
||||
import threading
|
||||
|
||||
from .ground_rectification.rectify import run_rectification
|
||||
from . import pdal
|
||||
|
||||
def classify(lasFile, scalar, slope, threshold, window, verbose=False):
|
||||
try:
|
||||
# GDAL >= 3.3
|
||||
from osgeo_utils.gdal_proximity import main as gdal_proximity
|
||||
except ModuleNotFoundError:
|
||||
# GDAL <= 3.2
|
||||
try:
|
||||
from osgeo.utils.gdal_proximity import main as gdal_proximity
|
||||
except:
|
||||
pass
|
||||
|
||||
def classify(lasFile, scalar, slope, threshold, window):
|
||||
start = datetime.now()
|
||||
|
||||
try:
|
||||
pdal.run_pdaltranslate_smrf(lasFile, lasFile, scalar, slope, threshold, window, verbose)
|
||||
pdal.run_pdaltranslate_smrf(lasFile, lasFile, scalar, slope, threshold, window)
|
||||
except:
|
||||
log.ODM_WARNING("Error creating classified file %s" % lasFile)
|
||||
|
||||
log.ODM_INFO('Created %s in %s' % (os.path.relpath(lasFile), datetime.now() - start))
|
||||
log.ODM_INFO('Created %s in %s' % (lasFile, datetime.now() - start))
|
||||
return lasFile
|
||||
|
||||
def rectify(lasFile, debug=False, reclassify_threshold=5, min_area=750, min_points=500):
|
||||
def rectify(lasFile, reclassify_threshold=5, min_area=750, min_points=500):
|
||||
start = datetime.now()
|
||||
|
||||
try:
|
||||
# Currently, no Python 2 lib that supports reading and writing LAZ, so we will do it manually until ODM is migrated to Python 3
|
||||
# When migration is done, we can move to pylas and avoid using PDAL for convertion
|
||||
tempLasFile = os.path.join(os.path.dirname(lasFile), 'tmp.las')
|
||||
|
||||
# Convert LAZ to LAS
|
||||
cmd = [
|
||||
'pdal',
|
||||
'translate',
|
||||
'-i %s' % lasFile,
|
||||
'-o %s' % tempLasFile
|
||||
]
|
||||
system.run(' '.join(cmd))
|
||||
|
||||
log.ODM_INFO("Rectifying {} using with [reclassify threshold: {}, min area: {}, min points: {}]".format(lasFile, reclassify_threshold, min_area, min_points))
|
||||
run_rectification(
|
||||
input=tempLasFile, output=tempLasFile, debug=debug, \
|
||||
input=lasFile, output=lasFile, \
|
||||
reclassify_plan='median', reclassify_threshold=reclassify_threshold, \
|
||||
extend_plan='surrounding', extend_grid_distance=5, \
|
||||
min_area=min_area, min_points=min_points)
|
||||
|
||||
# Convert LAS to LAZ
|
||||
cmd = [
|
||||
'pdal',
|
||||
'translate',
|
||||
'-i %s' % tempLasFile,
|
||||
'-o %s' % lasFile
|
||||
]
|
||||
system.run(' '.join(cmd))
|
||||
os.remove(tempLasFile)
|
||||
|
||||
log.ODM_INFO('Created %s in %s' % (lasFile, datetime.now() - start))
|
||||
except Exception as e:
|
||||
raise Exception("Error rectifying ground in file %s: %s" % (lasFile, str(e)))
|
||||
log.ODM_WARNING("Error rectifying ground in file %s: %s" % (lasFile, str(e)))
|
||||
|
||||
log.ODM_INFO('Created %s in %s' % (os.path.relpath(lasFile), datetime.now() - start))
|
||||
return lasFile
|
||||
|
||||
error = None
|
||||
|
||||
def create_dem(input_point_cloud, dem_type, output_type='max', radiuses=['0.56'], gapfill=True,
|
||||
outdir='', resolution=0.1, max_workers=1, max_tile_size=4096,
|
||||
verbose=False, decimation=None, keep_unfilled_copy=False,
|
||||
apply_smoothing=True):
|
||||
decimation=None, with_euclidean_map=False,
|
||||
apply_smoothing=True, max_tiles=None):
|
||||
""" Create DEM from multiple radii, and optionally gapfill """
|
||||
|
||||
global error
|
||||
error = None
|
||||
|
||||
start = datetime.now()
|
||||
|
||||
if not os.path.exists(outdir):
|
||||
log.ODM_INFO("Creating %s" % outdir)
|
||||
os.mkdir(outdir)
|
||||
|
||||
extent = point_cloud.get_extent(input_point_cloud)
|
||||
log.ODM_INFO("Point cloud bounds are [minx: %s, maxx: %s] [miny: %s, maxy: %s]" % (extent['minx'], extent['maxx'], extent['miny'], extent['maxy']))
|
||||
ext_width = extent['maxx'] - extent['minx']
|
||||
ext_height = extent['maxy'] - extent['miny']
|
||||
|
||||
w, h = (int(math.ceil(ext_width / float(resolution))),
|
||||
int(math.ceil(ext_height / float(resolution))))
|
||||
|
||||
# Set a floor, no matter the resolution parameter
|
||||
# (sometimes a wrongly estimated scale of the model can cause the resolution
|
||||
# to be set unrealistically low, causing errors)
|
||||
RES_FLOOR = 64
|
||||
if w < RES_FLOOR and h < RES_FLOOR:
|
||||
prev_w, prev_h = w, h
|
||||
|
||||
if w >= h:
|
||||
w, h = (RES_FLOOR, int(math.ceil(ext_height / ext_width * RES_FLOOR)))
|
||||
else:
|
||||
w, h = (int(math.ceil(ext_width / ext_height * RES_FLOOR)), RES_FLOOR)
|
||||
|
||||
floor_ratio = prev_w / float(w)
|
||||
resolution *= floor_ratio
|
||||
radiuses = [str(float(r) * floor_ratio) for r in radiuses]
|
||||
|
||||
log.ODM_WARNING("Really low resolution DEM requested %s will set floor at %s pixels. Resolution changed to %s. The scale of this reconstruction might be off." % ((prev_w, prev_h), RES_FLOOR, resolution))
|
||||
|
||||
final_dem_pixels = w * h
|
||||
|
||||
num_splits = int(max(1, math.ceil(math.log(math.ceil(final_dem_pixels / float(max_tile_size * max_tile_size)))/math.log(2))))
|
||||
num_tiles = num_splits * num_splits
|
||||
log.ODM_INFO("DEM resolution is %s, max tile size is %s, will split DEM generation into %s tiles" % ((h, w), max_tile_size, num_tiles))
|
||||
|
||||
tile_bounds_width = ext_width / float(num_splits)
|
||||
tile_bounds_height = ext_height / float(num_splits)
|
||||
|
||||
tiles = []
|
||||
|
||||
for r in radiuses:
|
||||
minx = extent['minx']
|
||||
|
||||
for x in range(num_splits):
|
||||
miny = extent['miny']
|
||||
if x == num_splits - 1:
|
||||
maxx = extent['maxx']
|
||||
else:
|
||||
maxx = minx + tile_bounds_width
|
||||
|
||||
for y in range(num_splits):
|
||||
if y == num_splits - 1:
|
||||
maxy = extent['maxy']
|
||||
else:
|
||||
maxy = miny + tile_bounds_height
|
||||
|
||||
filename = os.path.join(os.path.abspath(outdir), '%s_r%s_x%s_y%s.tif' % (dem_type, r, x, y))
|
||||
|
||||
tiles.append({
|
||||
'radius': r,
|
||||
'bounds': {
|
||||
'minx': minx,
|
||||
'maxx': maxx,
|
||||
'miny': miny,
|
||||
'maxy': maxy
|
||||
},
|
||||
'filename': filename
|
||||
})
|
||||
|
||||
miny = maxy
|
||||
minx = maxx
|
||||
|
||||
# Sort tiles by increasing radius
|
||||
tiles.sort(key=lambda t: float(t['radius']), reverse=True)
|
||||
|
||||
def process_tile(q):
|
||||
log.ODM_INFO("Generating %s (%s, radius: %s, resolution: %s)" % (q['filename'], output_type, q['radius'], resolution))
|
||||
|
||||
d = pdal.json_gdal_base(q['filename'], output_type, q['radius'], resolution, q['bounds'])
|
||||
|
||||
if dem_type == 'dtm':
|
||||
d = pdal.json_add_classification_filter(d, 2)
|
||||
|
||||
if decimation is not None:
|
||||
d = pdal.json_add_decimation_filter(d, decimation)
|
||||
|
||||
pdal.json_add_readers(d, [input_point_cloud])
|
||||
pdal.run_pipeline(d, verbose=verbose)
|
||||
|
||||
parallel_map(process_tile, tiles, max_workers)
|
||||
kwargs = {
|
||||
'input': input_point_cloud,
|
||||
'outdir': outdir,
|
||||
'outputType': output_type,
|
||||
'radiuses': ",".join(map(str, radiuses)),
|
||||
'resolution': resolution,
|
||||
'maxTiles': 0 if max_tiles is None else max_tiles,
|
||||
'decimation': 1 if decimation is None else decimation,
|
||||
'classification': 2 if dem_type == 'dtm' else -1,
|
||||
'tileSize': max_tile_size
|
||||
}
|
||||
system.run('renderdem "{input}" '
|
||||
'--outdir "{outdir}" '
|
||||
'--output-type {outputType} '
|
||||
'--radiuses {radiuses} '
|
||||
'--resolution {resolution} '
|
||||
'--max-tiles {maxTiles} '
|
||||
'--decimation {decimation} '
|
||||
'--classification {classification} '
|
||||
'--tile-size {tileSize} '
|
||||
'--force '.format(**kwargs), env_vars={'OMP_NUM_THREADS': max_workers})
|
||||
|
||||
output_file = "%s.tif" % dem_type
|
||||
output_path = os.path.abspath(os.path.join(outdir, output_file))
|
||||
|
||||
# Verify tile results
|
||||
for t in tiles:
|
||||
if not os.path.exists(t['filename']):
|
||||
raise Exception("Error creating %s, %s failed to be created" % (output_file, t['filename']))
|
||||
|
||||
# Fetch tiles
|
||||
tiles = []
|
||||
for p in glob.glob(os.path.join(os.path.abspath(outdir), "*.tif")):
|
||||
filename = os.path.basename(p)
|
||||
m = re.match("^r([\d\.]+)_x\d+_y\d+\.tif", filename)
|
||||
if m is not None:
|
||||
tiles.append({'filename': p, 'radius': float(m.group(1))})
|
||||
|
||||
if len(tiles) == 0:
|
||||
raise system.ExitException("No DEM tiles were generated, something went wrong")
|
||||
|
||||
log.ODM_INFO("Generated %s tiles" % len(tiles))
|
||||
|
||||
# Sort tiles by decreasing radius
|
||||
tiles.sort(key=lambda t: float(t['radius']), reverse=True)
|
||||
|
||||
# Create virtual raster
|
||||
tiles_vrt_path = os.path.abspath(os.path.join(outdir, "tiles.vrt"))
|
||||
run('gdalbuildvrt "%s" "%s"' % (tiles_vrt_path, '" "'.join(map(lambda t: t['filename'], tiles))))
|
||||
tiles_file_list = os.path.abspath(os.path.join(outdir, "tiles_list.txt"))
|
||||
with open(tiles_file_list, 'w') as f:
|
||||
for t in tiles:
|
||||
f.write(t['filename'] + '\n')
|
||||
|
||||
run('gdalbuildvrt -input_file_list "%s" "%s" ' % (tiles_file_list, tiles_vrt_path))
|
||||
|
||||
merged_vrt_path = os.path.abspath(os.path.join(outdir, "merged.vrt"))
|
||||
geotiff_tmp_path = os.path.abspath(os.path.join(outdir, 'tiles.tmp.tif'))
|
||||
geotiff_small_path = os.path.abspath(os.path.join(outdir, 'tiles.small.tif'))
|
||||
geotiff_small_filled_path = os.path.abspath(os.path.join(outdir, 'tiles.small_filled.tif'))
|
||||
geotiff_path = os.path.abspath(os.path.join(outdir, 'tiles.tif'))
|
||||
|
@ -203,7 +130,6 @@ def create_dem(input_point_cloud, dem_type, output_type='max', radiuses=['0.56']
|
|||
'tiles_vrt': tiles_vrt_path,
|
||||
'merged_vrt': merged_vrt_path,
|
||||
'geotiff': geotiff_path,
|
||||
'geotiff_tmp': geotiff_tmp_path,
|
||||
'geotiff_small': geotiff_small_path,
|
||||
'geotiff_small_filled': geotiff_small_filled_path
|
||||
}
|
||||
|
@ -212,38 +138,34 @@ def create_dem(input_point_cloud, dem_type, output_type='max', radiuses=['0.56']
|
|||
# Sometimes, for some reason gdal_fillnodata.py
|
||||
# behaves strangely when reading data directly from a .VRT
|
||||
# so we need to convert to GeoTIFF first.
|
||||
# Scale to 10% size
|
||||
run('gdal_translate '
|
||||
'-co NUM_THREADS={threads} '
|
||||
'-co BIGTIFF=IF_SAFER '
|
||||
'-co COMPRESS=DEFLATE '
|
||||
'--config GDAL_CACHEMAX {max_memory}% '
|
||||
'{tiles_vrt} {geotiff_tmp}'.format(**kwargs))
|
||||
|
||||
# Scale to 10% size
|
||||
run('gdal_translate '
|
||||
'-co NUM_THREADS={threads} '
|
||||
'-co BIGTIFF=IF_SAFER '
|
||||
'--config GDAL_CACHEMAX {max_memory}% '
|
||||
'-outsize 10% 0 '
|
||||
'{geotiff_tmp} {geotiff_small}'.format(**kwargs))
|
||||
'-outsize 10% 0 '
|
||||
'"{tiles_vrt}" "{geotiff_small}"'.format(**kwargs))
|
||||
|
||||
# Fill scaled
|
||||
run('gdal_fillnodata.py '
|
||||
'-co NUM_THREADS={threads} '
|
||||
'-co BIGTIFF=IF_SAFER '
|
||||
'--config GDAL_CACHEMAX {max_memory}% '
|
||||
'-b 1 '
|
||||
'-of GTiff '
|
||||
'{geotiff_small} {geotiff_small_filled}'.format(**kwargs))
|
||||
|
||||
gdal_fillnodata(['.',
|
||||
'-co', 'NUM_THREADS=%s' % kwargs['threads'],
|
||||
'-co', 'BIGTIFF=IF_SAFER',
|
||||
'-co', 'COMPRESS=DEFLATE',
|
||||
'--config', 'GDAL_CACHE_MAX', str(kwargs['max_memory']) + '%',
|
||||
'-b', '1',
|
||||
'-of', 'GTiff',
|
||||
kwargs['geotiff_small'], kwargs['geotiff_small_filled']])
|
||||
|
||||
# Merge filled scaled DEM with unfilled DEM using bilinear interpolation
|
||||
run('gdalbuildvrt -resolution highest -r bilinear "%s" "%s" "%s"' % (merged_vrt_path, geotiff_small_filled_path, geotiff_tmp_path))
|
||||
run('gdalbuildvrt -resolution highest -r bilinear "%s" "%s" "%s"' % (merged_vrt_path, geotiff_small_filled_path, tiles_vrt_path))
|
||||
run('gdal_translate '
|
||||
'-co NUM_THREADS={threads} '
|
||||
'-co TILED=YES '
|
||||
'-co BIGTIFF=IF_SAFER '
|
||||
'-co COMPRESS=DEFLATE '
|
||||
'--config GDAL_CACHEMAX {max_memory}% '
|
||||
'{merged_vrt} {geotiff}'.format(**kwargs))
|
||||
'"{merged_vrt}" "{geotiff}"'.format(**kwargs))
|
||||
else:
|
||||
run('gdal_translate '
|
||||
'-co NUM_THREADS={threads} '
|
||||
|
@ -251,25 +173,25 @@ def create_dem(input_point_cloud, dem_type, output_type='max', radiuses=['0.56']
|
|||
'-co BIGTIFF=IF_SAFER '
|
||||
'-co COMPRESS=DEFLATE '
|
||||
'--config GDAL_CACHEMAX {max_memory}% '
|
||||
'{tiles_vrt} {geotiff}'.format(**kwargs))
|
||||
'"{tiles_vrt}" "{geotiff}"'.format(**kwargs))
|
||||
|
||||
if apply_smoothing:
|
||||
median_smoothing(geotiff_path, output_path)
|
||||
median_smoothing(geotiff_path, output_path, num_workers=max_workers)
|
||||
os.remove(geotiff_path)
|
||||
else:
|
||||
os.rename(geotiff_path, output_path)
|
||||
os.replace(geotiff_path, output_path)
|
||||
|
||||
if os.path.exists(geotiff_tmp_path):
|
||||
if not keep_unfilled_copy:
|
||||
os.remove(geotiff_tmp_path)
|
||||
else:
|
||||
os.rename(geotiff_tmp_path, io.related_file_path(output_path, postfix=".unfilled"))
|
||||
if os.path.exists(tiles_vrt_path):
|
||||
if with_euclidean_map:
|
||||
emap_path = io.related_file_path(output_path, postfix=".euclideand")
|
||||
compute_euclidean_map(tiles_vrt_path, emap_path, overwrite=True)
|
||||
|
||||
for cleanup_file in [tiles_vrt_path, merged_vrt_path, geotiff_small_path, geotiff_small_filled_path]:
|
||||
for cleanup_file in [tiles_vrt_path, tiles_file_list, merged_vrt_path, geotiff_small_path, geotiff_small_filled_path]:
|
||||
if os.path.exists(cleanup_file): os.remove(cleanup_file)
|
||||
|
||||
for t in tiles:
|
||||
if os.path.exists(t['filename']): os.remove(t['filename'])
|
||||
|
||||
|
||||
log.ODM_INFO('Completed %s in %s' % (output_file, datetime.now() - start))
|
||||
|
||||
|
||||
|
@ -282,55 +204,63 @@ def compute_euclidean_map(geotiff_path, output_path, overwrite=False):
|
|||
with rasterio.open(geotiff_path) as f:
|
||||
nodata = f.nodatavals[0]
|
||||
|
||||
if not os.path.exists(output_path) or overwrite:
|
||||
log.ODM_INFO("Computing euclidean distance: %s" % output_path)
|
||||
run('gdal_proximity.py "%s" "%s" -values %s' % (geotiff_path, output_path, nodata))
|
||||
if not os.path.isfile(output_path) or overwrite:
|
||||
if os.path.isfile(output_path):
|
||||
os.remove(output_path)
|
||||
|
||||
if os.path.exists(output_path):
|
||||
return output_path
|
||||
log.ODM_INFO("Computing euclidean distance: %s" % output_path)
|
||||
|
||||
if gdal_proximity is not None:
|
||||
try:
|
||||
gdal_proximity(['gdal_proximity.py',
|
||||
geotiff_path, output_path, '-values', str(nodata),
|
||||
'-co', 'TILED=YES',
|
||||
'-co', 'BIGTIFF=IF_SAFER',
|
||||
'-co', 'COMPRESS=DEFLATE',
|
||||
])
|
||||
except Exception as e:
|
||||
log.ODM_WARNING("Cannot compute euclidean distance: %s" % str(e))
|
||||
|
||||
if os.path.exists(output_path):
|
||||
return output_path
|
||||
else:
|
||||
log.ODM_WARNING("Cannot compute euclidean distance file: %s" % output_path)
|
||||
else:
|
||||
log.ODM_WARNING("Cannot compute euclidean distance file: %s" % output_path)
|
||||
log.ODM_WARNING("Cannot compute euclidean map, gdal_proximity is missing")
|
||||
|
||||
else:
|
||||
log.ODM_INFO("Found a euclidean distance map: %s" % output_path)
|
||||
return output_path
|
||||
|
||||
|
||||
def median_smoothing(geotiff_path, output_path, smoothing_iterations=1):
|
||||
def median_smoothing(geotiff_path, output_path, window_size=512, num_workers=1, radius=4):
|
||||
""" Apply median smoothing """
|
||||
start = datetime.now()
|
||||
|
||||
if not os.path.exists(geotiff_path):
|
||||
raise Exception('File %s does not exist!' % geotiff_path)
|
||||
|
||||
log.ODM_INFO('Starting smoothing...')
|
||||
kwargs = {
|
||||
'input': geotiff_path,
|
||||
'output': output_path,
|
||||
'window': window_size,
|
||||
'radius': radius,
|
||||
}
|
||||
system.run('fastrasterfilter "{input}" '
|
||||
'--output "{output}" '
|
||||
'--window-size {window} '
|
||||
'--radius {radius} '
|
||||
'--co TILED=YES '
|
||||
'--co BIGTIFF=IF_SAFER '
|
||||
'--co COMPRESS=DEFLATE '.format(**kwargs), env_vars={'OMP_NUM_THREADS': num_workers})
|
||||
|
||||
with rasterio.open(geotiff_path) as img:
|
||||
nodata = img.nodatavals[0]
|
||||
dtype = img.dtypes[0]
|
||||
arr = img.read()[0]
|
||||
log.ODM_INFO('Completed smoothing to create %s in %s' % (output_path, datetime.now() - start))
|
||||
return output_path
|
||||
|
||||
# Median filter (careful, changing the value 5 might require tweaking)
|
||||
# the lines below. There's another numpy function that takes care of
|
||||
# these edge cases, but it's slower.
|
||||
for i in range(smoothing_iterations):
|
||||
log.ODM_INFO("Smoothing iteration %s" % str(i + 1))
|
||||
arr = ndimage.median_filter(arr, size=5, output=dtype)
|
||||
|
||||
# Fill corner points with nearest value
|
||||
if arr.shape >= (4, 4):
|
||||
arr[0][:2] = arr[1][0] = arr[1][1]
|
||||
arr[0][-2:] = arr[1][-1] = arr[2][-1]
|
||||
arr[-1][:2] = arr[-2][0] = arr[-2][1]
|
||||
arr[-1][-2:] = arr[-2][-1] = arr[-2][-2]
|
||||
|
||||
# Median filter leaves a bunch of zeros in nodata areas
|
||||
locs = numpy.where(arr == 0.0)
|
||||
arr[locs] = nodata
|
||||
|
||||
# write output
|
||||
with rasterio.open(output_path, 'w', **img.profile) as imgout:
|
||||
imgout.write(arr, 1)
|
||||
def get_dem_radius_steps(stats_file, steps, resolution, multiplier = 1.0):
|
||||
radius_steps = [point_cloud.get_spacing(stats_file, resolution) * multiplier]
|
||||
for _ in range(steps - 1):
|
||||
radius_steps.append(radius_steps[-1] * math.sqrt(2))
|
||||
|
||||
log.ODM_INFO('Completed smoothing to create %s in %s' % (os.path.relpath(output_path), datetime.now() - start))
|
||||
|
||||
return output_path
|
||||
return radius_steps
|
|
@ -35,7 +35,7 @@ class DistanceDimension(Dimension):
|
|||
return 'distance_to_ground'
|
||||
|
||||
def get_las_type(self):
|
||||
return 10
|
||||
return 'float64'
|
||||
|
||||
def __calculate_angle(self, model):
|
||||
"Calculate the angle between the estimated plane and the XY plane"
|
||||
|
|
|
@ -20,4 +20,4 @@ class ExtendedDimension(Dimension):
|
|||
return 'extended'
|
||||
|
||||
def get_las_type(self):
|
||||
return 3
|
||||
return 'uint16'
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Ładowanie…
Reference in New Issue