Merge branch 'dev' into orb_slam2

Former-commit-id: ec147b2c7e
pull/1161/head
Pau Gargallo 2016-05-18 13:09:36 +02:00
commit 2ed473c168
102 zmienionych plików z 2262 dodań i 1368 usunięć

Wyświetl plik

@ -1 +1,6 @@
Dockerfile
tests/test_data
SuperBuild/build
SuperBuild/download
SuperBuild/install
SuperBuild/src

4
.gitignore vendored
Wyświetl plik

@ -7,6 +7,10 @@ share/
src/
download/
SuperBuild/build/
SuperBuild/install/
build/
cmvs.tar.gz
parallel.tar.bz2
LAStools.zip

Wyświetl plik

@ -4,23 +4,51 @@ MAINTAINER Danilo Bargen <mail@dbrgn.ch>
# Env variables
ENV DEBIAN_FRONTEND noninteractive
# Install git
RUN apt-get update \
&& apt-get install -y git
# Install dependencies
RUN apt-get install -y --install-recommends \
build-essential cmake g++ gcc gFortran perl git autoconf \
curl wget \
unzip \
imagemagick jhead proj-bin libproj-dev\
libjpeg-dev libboost-all-dev libgsl0-dev libx11-dev libxext-dev liblapack-dev \
libeigen3-dev libflann-dev libvtk5-dev libqhull-dev libusb-1.0-0-dev\
libjson-perl \
libzip-dev \
libswitch-perl \
libcv-dev libcvaux-dev libopencv-dev \
libgoogle-glog-dev libatlas-base-dev libsuitesparse-dev \
RUN apt-get update \
&& sudo apt-get remove libdc1394-22-dev \
&& apt-get install -y --install-recommends \
build-essential \
cmake \
git \
python-pip \
libgdal-dev \
libgeotiff-dev \
pkg-config \
libgtk2.0-dev \
libavcodec-dev \
libavformat-dev \
libswscale-dev \
python-dev \
python-numpy \
libtbb2 \
libtbb-dev \
libjpeg-dev \
libpng-dev \
libtiff-dev \
libjasper-dev \
libflann-dev \
libproj-dev \
libxext-dev \
liblapack-dev \
libeigen3-dev \
libvtk5-dev \
python-networkx \
libgoogle-glog-dev \
libsuitesparse-dev \
libboost-filesystem-dev \
libboost-iostreams-dev \
libboost-regex-dev \
libboost-python-dev \
libboost-date-time-dev \
libboost-thread-dev \
python-empy \
python-nose \
python-pyside \
python-pyexiv2 \
python-scipy \
jhead \
liblas-bin \
&& apt-get autoremove \
&& apt-get clean
@ -38,11 +66,15 @@ ADD . /code/
RUN git submodule init && git submodule update
# Build OpenDroneMap
RUN ./install.sh && \
RUN bash ./configure.sh && \
mkdir build && cd build && cmake .. && make && cd .. && \
chown -R odm:odm /code
USER odm
ENV PYTHONPATH=${PYTHONPATH}:/code/SuperBuild/install/lib/python2.7/dist-packages:/code/SuperBuild/src/opensfm \
LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:/code/SuperBuild/install/lib
# Entry point
VOLUME ["/images"]
WORKDIR /images
ENTRYPOINT ["/code/run.pl"]
# WORKDIR /images
ENTRYPOINT ["python", "/code/run.py", "--project-path", "/images"]

674
LICENSE 100644
Wyświetl plik

@ -0,0 +1,674 @@
GNU GENERAL PUBLIC LICENSE
Version 3, 29 June 2007
Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
Preamble
The GNU General Public License is a free, copyleft license for
software and other kinds of works.
The licenses for most software and other practical works are designed
to take away your freedom to share and change the works. By contrast,
the GNU General Public License is intended to guarantee your freedom to
share and change all versions of a program--to make sure it remains free
software for all its users. We, the Free Software Foundation, use the
GNU General Public License for most of our software; it applies also to
any other work released this way by its authors. You can apply it to
your programs, too.
When we speak of free software, we are referring to freedom, not
price. Our General Public Licenses are designed to make sure that you
have the freedom to distribute copies of free software (and charge for
them if you wish), that you receive source code or can get it if you
want it, that you can change the software or use pieces of it in new
free programs, and that you know you can do these things.
To protect your rights, we need to prevent others from denying you
these rights or asking you to surrender the rights. Therefore, you have
certain responsibilities if you distribute copies of the software, or if
you modify it: responsibilities to respect the freedom of others.
For example, if you distribute copies of such a program, whether
gratis or for a fee, you must pass on to the recipients the same
freedoms that you received. You must make sure that they, too, receive
or can get the source code. And you must show them these terms so they
know their rights.
Developers that use the GNU GPL protect your rights with two steps:
(1) assert copyright on the software, and (2) offer you this License
giving you legal permission to copy, distribute and/or modify it.
For the developers' and authors' protection, the GPL clearly explains
that there is no warranty for this free software. For both users' and
authors' sake, the GPL requires that modified versions be marked as
changed, so that their problems will not be attributed erroneously to
authors of previous versions.
Some devices are designed to deny users access to install or run
modified versions of the software inside them, although the manufacturer
can do so. This is fundamentally incompatible with the aim of
protecting users' freedom to change the software. The systematic
pattern of such abuse occurs in the area of products for individuals to
use, which is precisely where it is most unacceptable. Therefore, we
have designed this version of the GPL to prohibit the practice for those
products. If such problems arise substantially in other domains, we
stand ready to extend this provision to those domains in future versions
of the GPL, as needed to protect the freedom of users.
Finally, every program is threatened constantly by software patents.
States should not allow patents to restrict development and use of
software on general-purpose computers, but in those that do, we wish to
avoid the special danger that patents applied to a free program could
make it effectively proprietary. To prevent this, the GPL assures that
patents cannot be used to render the program non-free.
The precise terms and conditions for copying, distribution and
modification follow.
TERMS AND CONDITIONS
0. Definitions.
"This License" refers to version 3 of the GNU General Public License.
"Copyright" also means copyright-like laws that apply to other kinds of
works, such as semiconductor masks.
"The Program" refers to any copyrightable work licensed under this
License. Each licensee is addressed as "you". "Licensees" and
"recipients" may be individuals or organizations.
To "modify" a work means to copy from or adapt all or part of the work
in a fashion requiring copyright permission, other than the making of an
exact copy. The resulting work is called a "modified version" of the
earlier work or a work "based on" the earlier work.
A "covered work" means either the unmodified Program or a work based
on the Program.
To "propagate" a work means to do anything with it that, without
permission, would make you directly or secondarily liable for
infringement under applicable copyright law, except executing it on a
computer or modifying a private copy. Propagation includes copying,
distribution (with or without modification), making available to the
public, and in some countries other activities as well.
To "convey" a work means any kind of propagation that enables other
parties to make or receive copies. Mere interaction with a user through
a computer network, with no transfer of a copy, is not conveying.
An interactive user interface displays "Appropriate Legal Notices"
to the extent that it includes a convenient and prominently visible
feature that (1) displays an appropriate copyright notice, and (2)
tells the user that there is no warranty for the work (except to the
extent that warranties are provided), that licensees may convey the
work under this License, and how to view a copy of this License. If
the interface presents a list of user commands or options, such as a
menu, a prominent item in the list meets this criterion.
1. Source Code.
The "source code" for a work means the preferred form of the work
for making modifications to it. "Object code" means any non-source
form of a work.
A "Standard Interface" means an interface that either is an official
standard defined by a recognized standards body, or, in the case of
interfaces specified for a particular programming language, one that
is widely used among developers working in that language.
The "System Libraries" of an executable work include anything, other
than the work as a whole, that (a) is included in the normal form of
packaging a Major Component, but which is not part of that Major
Component, and (b) serves only to enable use of the work with that
Major Component, or to implement a Standard Interface for which an
implementation is available to the public in source code form. A
"Major Component", in this context, means a major essential component
(kernel, window system, and so on) of the specific operating system
(if any) on which the executable work runs, or a compiler used to
produce the work, or an object code interpreter used to run it.
The "Corresponding Source" for a work in object code form means all
the source code needed to generate, install, and (for an executable
work) run the object code and to modify the work, including scripts to
control those activities. However, it does not include the work's
System Libraries, or general-purpose tools or generally available free
programs which are used unmodified in performing those activities but
which are not part of the work. For example, Corresponding Source
includes interface definition files associated with source files for
the work, and the source code for shared libraries and dynamically
linked subprograms that the work is specifically designed to require,
such as by intimate data communication or control flow between those
subprograms and other parts of the work.
The Corresponding Source need not include anything that users
can regenerate automatically from other parts of the Corresponding
Source.
The Corresponding Source for a work in source code form is that
same work.
2. Basic Permissions.
All rights granted under this License are granted for the term of
copyright on the Program, and are irrevocable provided the stated
conditions are met. This License explicitly affirms your unlimited
permission to run the unmodified Program. The output from running a
covered work is covered by this License only if the output, given its
content, constitutes a covered work. This License acknowledges your
rights of fair use or other equivalent, as provided by copyright law.
You may make, run and propagate covered works that you do not
convey, without conditions so long as your license otherwise remains
in force. You may convey covered works to others for the sole purpose
of having them make modifications exclusively for you, or provide you
with facilities for running those works, provided that you comply with
the terms of this License in conveying all material for which you do
not control copyright. Those thus making or running the covered works
for you must do so exclusively on your behalf, under your direction
and control, on terms that prohibit them from making any copies of
your copyrighted material outside their relationship with you.
Conveying under any other circumstances is permitted solely under
the conditions stated below. Sublicensing is not allowed; section 10
makes it unnecessary.
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
No covered work shall be deemed part of an effective technological
measure under any applicable law fulfilling obligations under article
11 of the WIPO copyright treaty adopted on 20 December 1996, or
similar laws prohibiting or restricting circumvention of such
measures.
When you convey a covered work, you waive any legal power to forbid
circumvention of technological measures to the extent such circumvention
is effected by exercising rights under this License with respect to
the covered work, and you disclaim any intention to limit operation or
modification of the work as a means of enforcing, against the work's
users, your or third parties' legal rights to forbid circumvention of
technological measures.
4. Conveying Verbatim Copies.
You may convey verbatim copies of the Program's source code as you
receive it, in any medium, provided that you conspicuously and
appropriately publish on each copy an appropriate copyright notice;
keep intact all notices stating that this License and any
non-permissive terms added in accord with section 7 apply to the code;
keep intact all notices of the absence of any warranty; and give all
recipients a copy of this License along with the Program.
You may charge any price or no price for each copy that you convey,
and you may offer support or warranty protection for a fee.
5. Conveying Modified Source Versions.
You may convey a work based on the Program, or the modifications to
produce it from the Program, in the form of source code under the
terms of section 4, provided that you also meet all of these conditions:
a) The work must carry prominent notices stating that you modified
it, and giving a relevant date.
b) The work must carry prominent notices stating that it is
released under this License and any conditions added under section
7. This requirement modifies the requirement in section 4 to
"keep intact all notices".
c) You must license the entire work, as a whole, under this
License to anyone who comes into possession of a copy. This
License will therefore apply, along with any applicable section 7
additional terms, to the whole of the work, and all its parts,
regardless of how they are packaged. This License gives no
permission to license the work in any other way, but it does not
invalidate such permission if you have separately received it.
d) If the work has interactive user interfaces, each must display
Appropriate Legal Notices; however, if the Program has interactive
interfaces that do not display Appropriate Legal Notices, your
work need not make them do so.
A compilation of a covered work with other separate and independent
works, which are not by their nature extensions of the covered work,
and which are not combined with it such as to form a larger program,
in or on a volume of a storage or distribution medium, is called an
"aggregate" if the compilation and its resulting copyright are not
used to limit the access or legal rights of the compilation's users
beyond what the individual works permit. Inclusion of a covered work
in an aggregate does not cause this License to apply to the other
parts of the aggregate.
6. Conveying Non-Source Forms.
You may convey a covered work in object code form under the terms
of sections 4 and 5, provided that you also convey the
machine-readable Corresponding Source under the terms of this License,
in one of these ways:
a) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by the
Corresponding Source fixed on a durable physical medium
customarily used for software interchange.
b) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by a
written offer, valid for at least three years and valid for as
long as you offer spare parts or customer support for that product
model, to give anyone who possesses the object code either (1) a
copy of the Corresponding Source for all the software in the
product that is covered by this License, on a durable physical
medium customarily used for software interchange, for a price no
more than your reasonable cost of physically performing this
conveying of source, or (2) access to copy the
Corresponding Source from a network server at no charge.
c) Convey individual copies of the object code with a copy of the
written offer to provide the Corresponding Source. This
alternative is allowed only occasionally and noncommercially, and
only if you received the object code with such an offer, in accord
with subsection 6b.
d) Convey the object code by offering access from a designated
place (gratis or for a charge), and offer equivalent access to the
Corresponding Source in the same way through the same place at no
further charge. You need not require recipients to copy the
Corresponding Source along with the object code. If the place to
copy the object code is a network server, the Corresponding Source
may be on a different server (operated by you or a third party)
that supports equivalent copying facilities, provided you maintain
clear directions next to the object code saying where to find the
Corresponding Source. Regardless of what server hosts the
Corresponding Source, you remain obligated to ensure that it is
available for as long as needed to satisfy these requirements.
e) Convey the object code using peer-to-peer transmission, provided
you inform other peers where the object code and Corresponding
Source of the work are being offered to the general public at no
charge under subsection 6d.
A separable portion of the object code, whose source code is excluded
from the Corresponding Source as a System Library, need not be
included in conveying the object code work.
A "User Product" is either (1) a "consumer product", which means any
tangible personal property which is normally used for personal, family,
or household purposes, or (2) anything designed or sold for incorporation
into a dwelling. In determining whether a product is a consumer product,
doubtful cases shall be resolved in favor of coverage. For a particular
product received by a particular user, "normally used" refers to a
typical or common use of that class of product, regardless of the status
of the particular user or of the way in which the particular user
actually uses, or expects or is expected to use, the product. A product
is a consumer product regardless of whether the product has substantial
commercial, industrial or non-consumer uses, unless such uses represent
the only significant mode of use of the product.
"Installation Information" for a User Product means any methods,
procedures, authorization keys, or other information required to install
and execute modified versions of a covered work in that User Product from
a modified version of its Corresponding Source. The information must
suffice to ensure that the continued functioning of the modified object
code is in no case prevented or interfered with solely because
modification has been made.
If you convey an object code work under this section in, or with, or
specifically for use in, a User Product, and the conveying occurs as
part of a transaction in which the right of possession and use of the
User Product is transferred to the recipient in perpetuity or for a
fixed term (regardless of how the transaction is characterized), the
Corresponding Source conveyed under this section must be accompanied
by the Installation Information. But this requirement does not apply
if neither you nor any third party retains the ability to install
modified object code on the User Product (for example, the work has
been installed in ROM).
The requirement to provide Installation Information does not include a
requirement to continue to provide support service, warranty, or updates
for a work that has been modified or installed by the recipient, or for
the User Product in which it has been modified or installed. Access to a
network may be denied when the modification itself materially and
adversely affects the operation of the network or violates the rules and
protocols for communication across the network.
Corresponding Source conveyed, and Installation Information provided,
in accord with this section must be in a format that is publicly
documented (and with an implementation available to the public in
source code form), and must require no special password or key for
unpacking, reading or copying.
7. Additional Terms.
"Additional permissions" are terms that supplement the terms of this
License by making exceptions from one or more of its conditions.
Additional permissions that are applicable to the entire Program shall
be treated as though they were included in this License, to the extent
that they are valid under applicable law. If additional permissions
apply only to part of the Program, that part may be used separately
under those permissions, but the entire Program remains governed by
this License without regard to the additional permissions.
When you convey a copy of a covered work, you may at your option
remove any additional permissions from that copy, or from any part of
it. (Additional permissions may be written to require their own
removal in certain cases when you modify the work.) You may place
additional permissions on material, added by you to a covered work,
for which you have or can give appropriate copyright permission.
Notwithstanding any other provision of this License, for material you
add to a covered work, you may (if authorized by the copyright holders of
that material) supplement the terms of this License with terms:
a) Disclaiming warranty or limiting liability differently from the
terms of sections 15 and 16 of this License; or
b) Requiring preservation of specified reasonable legal notices or
author attributions in that material or in the Appropriate Legal
Notices displayed by works containing it; or
c) Prohibiting misrepresentation of the origin of that material, or
requiring that modified versions of such material be marked in
reasonable ways as different from the original version; or
d) Limiting the use for publicity purposes of names of licensors or
authors of the material; or
e) Declining to grant rights under trademark law for use of some
trade names, trademarks, or service marks; or
f) Requiring indemnification of licensors and authors of that
material by anyone who conveys the material (or modified versions of
it) with contractual assumptions of liability to the recipient, for
any liability that these contractual assumptions directly impose on
those licensors and authors.
All other non-permissive additional terms are considered "further
restrictions" within the meaning of section 10. If the Program as you
received it, or any part of it, contains a notice stating that it is
governed by this License along with a term that is a further
restriction, you may remove that term. If a license document contains
a further restriction but permits relicensing or conveying under this
License, you may add to a covered work material governed by the terms
of that license document, provided that the further restriction does
not survive such relicensing or conveying.
If you add terms to a covered work in accord with this section, you
must place, in the relevant source files, a statement of the
additional terms that apply to those files, or a notice indicating
where to find the applicable terms.
Additional terms, permissive or non-permissive, may be stated in the
form of a separately written license, or stated as exceptions;
the above requirements apply either way.
8. Termination.
You may not propagate or modify a covered work except as expressly
provided under this License. Any attempt otherwise to propagate or
modify it is void, and will automatically terminate your rights under
this License (including any patent licenses granted under the third
paragraph of section 11).
However, if you cease all violation of this License, then your
license from a particular copyright holder is reinstated (a)
provisionally, unless and until the copyright holder explicitly and
finally terminates your license, and (b) permanently, if the copyright
holder fails to notify you of the violation by some reasonable means
prior to 60 days after the cessation.
Moreover, your license from a particular copyright holder is
reinstated permanently if the copyright holder notifies you of the
violation by some reasonable means, this is the first time you have
received notice of violation of this License (for any work) from that
copyright holder, and you cure the violation prior to 30 days after
your receipt of the notice.
Termination of your rights under this section does not terminate the
licenses of parties who have received copies or rights from you under
this License. If your rights have been terminated and not permanently
reinstated, you do not qualify to receive new licenses for the same
material under section 10.
9. Acceptance Not Required for Having Copies.
You are not required to accept this License in order to receive or
run a copy of the Program. Ancillary propagation of a covered work
occurring solely as a consequence of using peer-to-peer transmission
to receive a copy likewise does not require acceptance. However,
nothing other than this License grants you permission to propagate or
modify any covered work. These actions infringe copyright if you do
not accept this License. Therefore, by modifying or propagating a
covered work, you indicate your acceptance of this License to do so.
10. Automatic Licensing of Downstream Recipients.
Each time you convey a covered work, the recipient automatically
receives a license from the original licensors, to run, modify and
propagate that work, subject to this License. You are not responsible
for enforcing compliance by third parties with this License.
An "entity transaction" is a transaction transferring control of an
organization, or substantially all assets of one, or subdividing an
organization, or merging organizations. If propagation of a covered
work results from an entity transaction, each party to that
transaction who receives a copy of the work also receives whatever
licenses to the work the party's predecessor in interest had or could
give under the previous paragraph, plus a right to possession of the
Corresponding Source of the work from the predecessor in interest, if
the predecessor has it or can get it with reasonable efforts.
You may not impose any further restrictions on the exercise of the
rights granted or affirmed under this License. For example, you may
not impose a license fee, royalty, or other charge for exercise of
rights granted under this License, and you may not initiate litigation
(including a cross-claim or counterclaim in a lawsuit) alleging that
any patent claim is infringed by making, using, selling, offering for
sale, or importing the Program or any portion of it.
11. Patents.
A "contributor" is a copyright holder who authorizes use under this
License of the Program or a work on which the Program is based. The
work thus licensed is called the contributor's "contributor version".
A contributor's "essential patent claims" are all patent claims
owned or controlled by the contributor, whether already acquired or
hereafter acquired, that would be infringed by some manner, permitted
by this License, of making, using, or selling its contributor version,
but do not include claims that would be infringed only as a
consequence of further modification of the contributor version. For
purposes of this definition, "control" includes the right to grant
patent sublicenses in a manner consistent with the requirements of
this License.
Each contributor grants you a non-exclusive, worldwide, royalty-free
patent license under the contributor's essential patent claims, to
make, use, sell, offer for sale, import and otherwise run, modify and
propagate the contents of its contributor version.
In the following three paragraphs, a "patent license" is any express
agreement or commitment, however denominated, not to enforce a patent
(such as an express permission to practice a patent or covenant not to
sue for patent infringement). To "grant" such a patent license to a
party means to make such an agreement or commitment not to enforce a
patent against the party.
If you convey a covered work, knowingly relying on a patent license,
and the Corresponding Source of the work is not available for anyone
to copy, free of charge and under the terms of this License, through a
publicly available network server or other readily accessible means,
then you must either (1) cause the Corresponding Source to be so
available, or (2) arrange to deprive yourself of the benefit of the
patent license for this particular work, or (3) arrange, in a manner
consistent with the requirements of this License, to extend the patent
license to downstream recipients. "Knowingly relying" means you have
actual knowledge that, but for the patent license, your conveying the
covered work in a country, or your recipient's use of the covered work
in a country, would infringe one or more identifiable patents in that
country that you have reason to believe are valid.
If, pursuant to or in connection with a single transaction or
arrangement, you convey, or propagate by procuring conveyance of, a
covered work, and grant a patent license to some of the parties
receiving the covered work authorizing them to use, propagate, modify
or convey a specific copy of the covered work, then the patent license
you grant is automatically extended to all recipients of the covered
work and works based on it.
A patent license is "discriminatory" if it does not include within
the scope of its coverage, prohibits the exercise of, or is
conditioned on the non-exercise of one or more of the rights that are
specifically granted under this License. You may not convey a covered
work if you are a party to an arrangement with a third party that is
in the business of distributing software, under which you make payment
to the third party based on the extent of your activity of conveying
the work, and under which the third party grants, to any of the
parties who would receive the covered work from you, a discriminatory
patent license (a) in connection with copies of the covered work
conveyed by you (or copies made from those copies), or (b) primarily
for and in connection with specific products or compilations that
contain the covered work, unless you entered into that arrangement,
or that patent license was granted, prior to 28 March 2007.
Nothing in this License shall be construed as excluding or limiting
any implied license or other defenses to infringement that may
otherwise be available to you under applicable patent law.
12. No Surrender of Others' Freedom.
If conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot convey a
covered work so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you may
not convey it at all. For example, if you agree to terms that obligate you
to collect a royalty for further conveying from those to whom you convey
the Program, the only way you could satisfy both those terms and this
License would be to refrain entirely from conveying the Program.
13. Use with the GNU Affero General Public License.
Notwithstanding any other provision of this License, you have
permission to link or combine any covered work with a work licensed
under version 3 of the GNU Affero General Public License into a single
combined work, and to convey the resulting work. The terms of this
License will continue to apply to the part which is the covered work,
but the special requirements of the GNU Affero General Public License,
section 13, concerning interaction through a network will apply to the
combination as such.
14. Revised Versions of this License.
The Free Software Foundation may publish revised and/or new versions of
the GNU General Public License from time to time. Such new versions will
be similar in spirit to the present version, but may differ in detail to
address new problems or concerns.
Each version is given a distinguishing version number. If the
Program specifies that a certain numbered version of the GNU General
Public License "or any later version" applies to it, you have the
option of following the terms and conditions either of that numbered
version or of any later version published by the Free Software
Foundation. If the Program does not specify a version number of the
GNU General Public License, you may choose any version ever published
by the Free Software Foundation.
If the Program specifies that a proxy can decide which future
versions of the GNU General Public License can be used, that proxy's
public statement of acceptance of a version permanently authorizes you
to choose that version for the Program.
Later license versions may give you additional or different
permissions. However, no additional obligations are imposed on any
author or copyright holder as a result of your choosing to follow a
later version.
15. Disclaimer of Warranty.
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
16. Limitation of Liability.
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
SUCH DAMAGES.
17. Interpretation of Sections 15 and 16.
If the disclaimer of warranty and limitation of liability provided
above cannot be given local legal effect according to their terms,
reviewing courts shall apply local law that most closely approximates
an absolute waiver of all civil liability in connection with the
Program, unless a warranty or assumption of liability accompanies a
copy of the Program in return for a fee.
END OF TERMS AND CONDITIONS
How to Apply These Terms to Your New Programs
If you develop a new program, and you want it to be of the greatest
possible use to the public, the best way to achieve this is to make it
free software which everyone can redistribute and change under these terms.
To do so, attach the following notices to the program. It is safest
to attach them to the start of each source file to most effectively
state the exclusion of warranty; and each file should have at least
the "copyright" line and a pointer to where the full notice is found.
{one line to give the program's name and a brief idea of what it does.}
Copyright (C) {year} {name of author}
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
Also add information on how to contact you by electronic and paper mail.
If the program does terminal interaction, make it output a short
notice like this when it starts in an interactive mode:
{project} Copyright (C) {year} {fullname}
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
This is free software, and you are welcome to redistribute it
under certain conditions; type `show c' for details.
The hypothetical commands `show w' and `show c' should show the appropriate
parts of the General Public License. Of course, your program's commands
might be different; for a GUI interface, you would use an "about box".
You should also get your employer (if you work as a programmer) or school,
if any, to sign a "copyright disclaimer" for the program, if necessary.
For more information on this, and how to apply and follow the GNU GPL, see
<http://www.gnu.org/licenses/>.
The GNU General Public License does not permit incorporating your program
into proprietary programs. If your program is a subroutine library, you
may consider it more useful to permit linking proprietary applications with
the library. If this is what you want to do, use the GNU Lesser General
Public License instead of this License. But first, please read
<http://www.gnu.org/philosophy/why-not-lgpl.html>.

197
README.md
Wyświetl plik

@ -1,9 +1,15 @@
# OpenDroneMap
![](https://opendronemap.github.io/OpenDroneMap/img/odm_image.png)
What is it?
===========
OpenDroneMap is a toolchain for processing raw civilian UAS imagery to other useful products. What kind of products?
OpenDroneMap is an open source toolkit for processing aerial drone imagery. Typical drones use simple point-and-shoot cameras, so the images from drones, while from a different perspective, are similar to any pictures taken from point-and-shoot cameras, i.e. non-metric imagery. OpenDroneMap turns those simple images into three dimensional geographic data that can be used in combination with other geographic datasets.
![](https://opendronemap.github.io/OpenDroneMap/img/tol_ptcloud.png)
In a word, OpenDroneMap is a toolchain for processing raw civilian UAS imagery to other useful products. What kind of products?
1. Point Clouds
2. Digital Surface Models
@ -26,52 +32,116 @@ Developers
Help improve our software!
1. Join our [Gitter](https://gitter.im/OpenDroneMap)
2. Try to keep commits clean and simple
3. Submit a pull request with detailed changes and test results
[![Join the chat at https://gitter.im/OpenDroneMap/OpenDroneMap](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/OpenDroneMap/OpenDroneMap?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
1. Try to keep commits clean and simple
2. Submit a pull request with detailed changes and test results
Steps to get OpenDroneMap running:
==================================
(Requires Ubuntu 12.04 or later, see https://github.com/OpenDroneMap/odm_vagrant for running on Windows in a VM)
(Requires Ubuntu 14.04 or later, see https://github.com/OpenDroneMap/odm_vagrant for running on Windows in a VM)
Run install.sh to build.
Support for Ubuntu 12.04 is currently BROKEN with the addition of OpenSfM and Ceres-Solver. We are working hard to get it working again in the future.
./install.sh
#### Building OpenDroneMap using git
From a directory full of your images, run
cd path/to/odm/dir
git clone https://github.com/OpenDroneMap/OpenDroneMap.git .
export PYTHONPATH=$PYTHONPATH:`pwd`/SuperBuild/install/lib/python2.7/dist-packages:`pwd`/SuperBuild/src/opensfm
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:`pwd`/SuperBuild/install/lib
bash configure.sh
mkdir build && cd build && cmake .. && make && cd ..
./run.pl
For Ubuntu 15.10 users, this will help you get running:
An overview of installing and running OpenDroneMap on Ubuntu can be found here: https://www.youtube.com/watch?v=e2qp3o8caPs
sudo apt-get install python-xmltodict
sudo ln -s /usr/lib/x86_64-linux-gnu/libproj.so.9 /usr/lib/libproj.so
#### Running OpenDroneMap
Here are some other videos:
First you need a set of images, which may or may not be georeferenced. There are two ways OpenDroneMap can understand geographic coordinates. First, the images can be geotagged in their EXIF data. This is the default. Alternatively, you can create a GCP file, [a process detailed here](https://github.com/OpenDroneMap/OpenDroneMap/wiki/2.-Running-OpenDroneMap#running-odm-with-ground-control)
Create a project folder and places your images in an "images" directory:
|-- /path/to/project/
|-- images/
|-- img-1234.jpg
|-- ...
Example data can be cloned from https://github.com/OpenDroneMap/odm_data
Then run:
python run.py --project-path /path/to/project
There are many options for tuning your project. See the [wiki](https://github.com/OpenDroneMap/OpenDroneMap/wiki/3.-Run-Time-Parameters) or run `python run.py -h`
When the process finishes, the results will be organized as follows
|-- images/
|-- img-1234.jpg
|-- ...
|-- images_resize/
|-- img-1234.jpg
|-- ...
|-- opensfm/
|-- not much useful in here
|-- pmvs/
|-- recon0/
|-- models/
|-- option-0000.ply # Dense point cloud
|-- odm_meshing/
|-- odm_mesh.ply # A 3D mesh
|-- odm_meshing_log.txt # Output of the meshing task. May point out errors.
|-- odm_texturing/
|-- odm_textured_model.obj # Textured mesh
|-- odm_textured_model_geo.obj # Georeferenced textured mesh
|-- texture_N.jpg # Associated textured images used by the model
|-- odm_georeferencing/
|-- odm_georeferenced_model.ply # A georeferenced dense point cloud
|-- odm_georeferenced_model.ply.laz # LAZ format point cloud
|-- odm_georeferenced_model.csv # XYZ format point cloud
|-- odm_georeferencing_log.txt # Georeferencing log
|-- odm_georeferencing_utm_log.txt # Log for the extract_utm portion
|-- odm_georeferencing/
|-- odm_orthophoto.png # Orthophoto image (no coordinates)
|-- odm_orthophoto.tif # Orthophoto GeoTiff
|-- odm_orthophoto_log.txt # Log file
|-- gdal_translate_log.txt # Log for georeferencing the png file
##### Viewing your results
Any file ending in .obj or .ply can be opened and viewed in [MeshLab](http://meshlab.sourceforge.net/) or similar software. That includes `pmvs/recon0/models/option-000.ply`, `odm_meshing/odm_mesh.ply`, `odm_texturing/odm_textured_model[_geo].obj`, or `odm_georeferencing/odm_georeferenced_model.ply`. Below is an example textured mesh:
![](https://opendronemap.github.io/OpenDroneMap/img/tol_text.png)
You can also view the orthophoto GeoTIFF in QGIS or other mapping software:
![](https://raw.githubusercontent.com/OpenDroneMap/OpenDroneMap/dev/img/bellus_map.png)
#### Using Docker
You can build and run OpenDroneMap in a Docker container:
export IMAGES=/absolute/path/to/your/project
docker build -t opendronemap:latest .
docker run -v $IMAGES:/images opendronemap:latest
Replace /absolute/path/to/your/images with an absolute path to the directory containing your project (where the images are)
To pass in custom parameters to the `run.py` script, simply pass it as arguments to the `docker run` command.
---
Here are some other videos, which may be outdated:
- https://www.youtube.com/watch?v=7ZTufQkODLs (2015-01-30)
- https://www.youtube.com/watch?v=m0i4GQdfl8A (2015-03-15)
Now that texturing is in the code base, you can access the full textured meshes using MeshLab. Open MeshLab, choose `File:Import Mesh` and choose your textured mesh from a location similar to the following: `reconstruction-with-image-size-1200-results\odm_texturing\odm_textured_model.obj`
For Ubuntu 15.10 users, this will help you get running:
```
sudo apt-get install python-xmltodict
sudo ln -s /usr/lib/x86_64-linux-gnu/libproj.so.9 /usr/lib/libproj.so
```
---
Alternatively, you can also run OpenDroneMap in a Docker container:
export IMAGES=/absolute/path/to/your/images
docker build -t opendronemap:latest .
docker run -v $IMAGES:/images opendronemap:latest
To pass in custom parameters to the `run.pl` script, simply pass it as arguments to the `docker run` command.
---
Example data can be found at https://github.com/OpenDroneMap/odm_data
---
Long term, the aim is for the toolchain to also be able to optionally push to a variety of online data repositories, pushing hi-resolution aerials to [OpenAerialMap](http://opentopography.org/), point clouds to [OpenTopography](http://opentopography.org/), and pushing digital elevation models to an emerging global repository (yet to be named...). That leaves only digital surface model meshes and UV textured meshes with no global repository home.
@ -84,68 +154,3 @@ Documentation:
For documentation, please take a look at our [wiki](https://github.com/OpenDroneMap/OpenDroneMap/wiki).
Troubleshooting:
================
Make sure you have enough RAM and CPU. Only lowercase file extension supported now.
If you run ODM with your own camera, it is possible you will see something like this:
```
- configuration:
--cmvs-maxImages: 500
--end-with: pmvs
--match-size: 200
--matcher-ratio: 0.6
--matcher-threshold: 2
--pmvs-csize: 2
--pmvs-level: 1
--pmvs-minImageNum: 3
--pmvs-threshold: 0.7
--pmvs-wsize: 7
--resize-to: 1200
--start-with: resize
- source files - Fri Sep 19 13:47:42 UTC 2014
no CCD width or focal length found for DSC05391.JPG - camera: "SONY DSC-HX5V"
no CCD width or focal length found for DSC05392.JPG - camera: "SONY DSC-HX5V"
no CCD width or focal length found for DSC05393.JPG - camera: "SONY DSC-HX5V"
no CCD width or focal length found for DSC05394.JPG - camera: "SONY DSC-HX5V"
no CCD width or focal length found for DSC05395.JPG - camera: "SONY DSC-HX5V"
no CCD width or focal length found for DSC05396.JPG - camera: "SONY DSC-HX5V"
no CCD width or focal length found for DSC05397.JPG - camera: "SONY DSC-HX5V"
no CCD width or focal length found for DSC05398.JPG - camera: "SONY DSC-HX5V"
no CCD width or focal length found for DSC05399.JPG - camera: "SONY DSC-HX5V"
found no usable images - quitting
Died at ../../OpenDroneMap/./run.pl line 364.
```
This means that your camera is not in the database, https://github.com/OpenDroneMap/OpenDroneMap/blob/gh-pages/ccd_defs.json
This problem is easily remedied. We need to know CCD size in the camera. We'll get these for our Sony Cyber-shot DSC-HX5 from dpreview: http://www.dpreview.com/products/sony/compacts/sony_dschx5/specifications
So, we'll add the following line to our ccd_defs.json:
"SONY DSC-HX5V": 6.104,
To check that ccd_defs.json compiles, run ccd_defs_check.pl
If it prints the message 'CCD_DEFS compiles OK', then you can commit your changes.
And so others can use it, we'll do a pull request to add it to our array for everyone else.
---
Maintainers can run the ccd_defs.json compilation test automatically by creating a
symbolic link in .git/hooks to hooks/pre-commit
cd .git/hooks
ln -s ../../hooks/pre-commit
If ccd_defs.json does not compile, then the pre-commit hook will abort the commit.

Wyświetl plik

@ -94,7 +94,6 @@ SETUP_EXTERNAL_PROJECT(Ceres ${ODM_Ceres_Version} ${ODM_BUILD_Ceres})
# Clustering Views for Multi-view Stereo (CMVS)
# Catkin
# Ecto
# LAStools
#
set(custom_libs OpenGV
@ -102,6 +101,7 @@ set(custom_libs OpenGV
CMVS
Catkin
Ecto
PDAL
LAStools
Pangolin
ORB_SLAM2)

Wyświetl plik

@ -14,6 +14,7 @@ ExternalProject_Add(${_proj_name}
#--Configure step-------------
SOURCE_DIR ${SB_SOURCE_DIR}/${_proj_name}
CMAKE_ARGS
-DCATKIN_ENABLE_TESTING=OFF
-DCMAKE_INSTALL_PREFIX:PATH=${SB_INSTALL_DIR}
#--Build step-----------------
BINARY_DIR ${_SB_BINARY_DIR}

Wyświetl plik

@ -8,13 +8,16 @@ ExternalProject_Add(${_proj_name}
STAMP_DIR ${_SB_BINARY_DIR}/stamp
#--Download step--------------
DOWNLOAD_DIR ${SB_DOWNLOAD_DIR}/${_proj_name}
URL https://github.com/plasmodic/ecto/archive/master.zip
URL_MD5 8cd70c525bcda99d9eba5ac2865e42dd
URL https://github.com/plasmodic/ecto/archive/c6178ed0102a66cebf503a4213c27b0f60cfca69.zip
URL_MD5 A5C4757B656D536D3E3CC1DC240EC158
#--Update/Patch step----------
UPDATE_COMMAND ""
#--Configure step-------------
SOURCE_DIR ${SB_SOURCE_DIR}/${_proj_name}
CMAKE_ARGS
-DBUILD_DOC=OFF
-DBUILD_SAMPLES=OFF
-DCATKIN_ENABLE_TESTING=OFF
-DCMAKE_INSTALL_PREFIX:PATH=${SB_INSTALL_DIR}
#--Build step-----------------
BINARY_DIR ${_SB_BINARY_DIR}
@ -24,4 +27,4 @@ ExternalProject_Add(${_proj_name}
LOG_DOWNLOAD OFF
LOG_CONFIGURE OFF
LOG_BUILD OFF
)
)

Wyświetl plik

@ -1,26 +0,0 @@
set(_proj_name las-tools)
set(_SB_BINARY_DIR "${SB_BINARY_DIR}/${_proj_name}")
ExternalProject_Add(${_proj_name}
PREFIX ${_SB_BINARY_DIR}
TMP_DIR ${_SB_BINARY_DIR}/tmp
STAMP_DIR ${_SB_BINARY_DIR}/stamp
#--Download step--------------
DOWNLOAD_DIR ${SB_DOWNLOAD_DIR}
URL http://lastools.org/download/LAStools.zip
#--Update/Patch step----------
UPDATE_COMMAND ""
#--Configure step-------------
SOURCE_DIR ${SB_SOURCE_DIR}/${_proj_name}
CONFIGURE_COMMAND ""
WORKING_DIRECTORY "${SB_SOURCE_DIR}/${_proj_name}"
#--Build step-----------------
BUILD_IN_SOURCE 1
#--Install step---------------
INSTALL_DIR ${SB_INSTALL_DIR}
INSTALL_COMMAND ""
#--Output logging-------------
LOG_DOWNLOAD OFF
LOG_CONFIGURE OFF
LOG_BUILD OFF
)

Wyświetl plik

@ -8,7 +8,6 @@ ExternalProject_Add(${_proj_name}
#--Download step--------------
DOWNLOAD_DIR ${SB_DOWNLOAD_DIR}
URL https://github.com/paulinus/opengv/archive/python-wrapper.zip
URL_MD5 6afd5dfbec8f11b556e794d009bbbcc2
#--Update/Patch step----------
UPDATE_COMMAND ""
#--Configure step-------------

Wyświetl plik

@ -0,0 +1,46 @@
set(_proj_name pdal)
set(_SB_BINARY_DIR "${SB_BINARY_DIR}/${_proj_name}")
ExternalProject_Add(${_proj_name}
PREFIX ${_SB_BINARY_DIR}
TMP_DIR ${_SB_BINARY_DIR}/tmp
STAMP_DIR ${_SB_BINARY_DIR}/stamp
#--Download step--------------
DOWNLOAD_DIR ${SB_DOWNLOAD_DIR}
URL https://github.com/PDAL/PDAL/archive/d242c6704aafe85fd49fda11adae63d07ce11b76.zip
URL_MD5 14a7319e1f8483808eb93732cfa6511a
#--Update/Patch step----------
UPDATE_COMMAND ""
#--Configure step-------------
SOURCE_DIR ${SB_SOURCE_DIR}/${_proj_name}
CMAKE_ARGS
-BUILD_PGPOINTCLOUD_TESTS=OFF
-BUILD_PLUGIN_PCL=ON
-BUILD_PLUGIN_PGPOINTCLOUD=ON
-DBUILD_PLUGIN_CPD=OFF
-DBUILD_PLUGIN_GREYHOUND=OFF
-DBUILD_PLUGIN_HEXBIN=OFF
-DBUILD_PLUGIN_ICEBRIDGE=OFF
-DBUILD_PLUGIN_MRSID=OFF
-DBUILD_PLUGIN_NITF=OFF
-DBUILD_PLUGIN_OCI=OFF
-DBUILD_PLUGIN_P2G=OFF
-DBUILD_PLUGIN_SQLITE=OFF
-DBUILD_PLUGIN_RIVLIB=OFF
-DBUILD_PLUGIN_PYTHON=OFF
-DENABLE_CTEST=OFF
-DWITH_APPS=ON
-DWITH_LAZPERF=OFF
-DWITH_GEOTIFF=ON
-DWITH_LASZIP=ON
-DWITH_TESTS=OFF
-DCMAKE_INSTALL_PREFIX:PATH=${SB_INSTALL_DIR}
#--Build step-----------------
BINARY_DIR ${_SB_BINARY_DIR}
#--Install step---------------
INSTALL_DIR ${SB_INSTALL_DIR}
#--Output logging-------------
LOG_DOWNLOAD OFF
LOG_CONFIGURE OFF
LOG_BUILD OFF
)

Wyświetl plik

@ -11,9 +11,9 @@ fi
echo -e "\e[1;34mUpdating the system\e[0;39m"
sudo apt-get update
END_CMD1=$?
sudo apt-get upgrade -y
END_CMD2=$?
if [ $END_CMD1 -ne 0 -o $END_CMD2 -ne 0 ]
# sudo apt-get upgrade -y
# END_CMD2=$?
if [ $END_CMD1 -ne 0 ]
then
echo -e "\e[1;31mERROR: \e[39mWhen Updating the system\e[0m"
exit 1
@ -25,7 +25,10 @@ sudo apt-get install build-essential \
cmake \
git \
python-pip \
pkg-config -y
libgdal-dev \
gdal-bin \
libgeotiff-dev \
pkg-config -y -qq
if [ $? -ne 0 ]
then
echo -e "\e[1;31mERROR: \e[39mWhen Installing Required Requisites\e[0m"
@ -51,7 +54,7 @@ sudo apt-get install libgtk2.0-dev \
libxext-dev \
liblapack-dev \
libeigen3-dev \
libvtk5-dev -y
libvtk5-dev -y -qq
if [ $? -ne 0 ]
then
echo -e "\e[1;31mERROR: \e[39mError when Installing Dependencies Requisites\e[0m"
@ -67,12 +70,12 @@ echo -e "\e[1;34mInstalling OpenSfM Dependencies\e[0;39m"
sudo apt-get install python-networkx \
libgoogle-glog-dev \
libsuitesparse-dev \
libboost-filesystem1.55-dev \
libboost-iostreams1.55-dev \
libboost-regex1.55-dev \
libboost-python1.55-dev \
#libboost1.55-all-dev \
libboost-python-dev -y
libboost-filesystem-dev \
libboost-iostreams-dev \
libboost-regex-dev \
libboost-python-dev \
libboost-date-time-dev \
libboost-thread-dev -y -qq
sudo pip install -U PyYAML \
exifread \
@ -89,7 +92,7 @@ echo -e "\e[1;34mInstalling Ecto Dependencies\e[0;39m"
sudo pip install -U catkin-pkg
sudo apt-get install python-empy \
python-nose \
python-pyside -y
python-pyside -y -qq
if [ $? -ne 0 ]
then
echo -e "\e[1;31mERROR: \e[39mError when Installing Ecto Dependencies\e[0m"
@ -101,7 +104,7 @@ echo -e "\e[1;34mInstalling OpenDroneMap Dependencies\e[0;39m"
sudo apt-get install python-pyexiv2 \
python-scipy \
jhead \
liblas-bin -y
liblas-bin -y -qq
if [ $? -ne 0 ]
then
echo -e "\e[1;31mERROR: \e[39mError when Installing OpenDroneMap Dependencies\e[0m"
@ -115,11 +118,11 @@ NUM_CORES=`grep -c processor /proc/cpuinfo`
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:`pwd`/SuperBuild/install/lib
## Add SuperBuild path to the python path
export PYTHONPATH=$PYTHONPATH:`pwd`/SuperBuild/install/lib/python2.7/dist-packages
export PYTHONPATH=$PYTHONPATH:`pwd`/SuperBuild/install/lib/python2.7/dist-packages:`pwd`/SuperBuild/src/opensfm
## Compile SuperBuild
cd SuperBuild
mkdir -p build && cd build
cmake .. && make -j ${NUM_CORES}
cmake .. && make -j${NUM_CORES}
echo -e "\e[1;34mScript finished\e[0;39m"

Wyświetl plik

@ -1,319 +0,0 @@
{
"Apple iPhone 5s": 8.46,
"Asahi Optical Co.,Ltd. PENTAX Optio330RS": 7.176,
"Canon Canon DIGITAL IXUS 400": 7.176,
"Canon Canon DIGITAL IXUS 40": 5.76,
"Canon Canon DIGITAL IXUS 430": 6.18,
"Canon Canon DIGITAL IXUS 500": 7.176,
"Canon Canon DIGITAL IXUS 50": 5.76,
"Canon Canon DIGITAL IXUS 55": 5.76,
"Canon Canon DIGITAL IXUS 60": 5.76,
"Canon Canon DIGITAL IXUS 65": 5.76,
"Canon Canon DIGITAL IXUS 700": 7.176,
"Canon Canon DIGITAL IXUS 750": 7.176,
"Canon Canon DIGITAL IXUS 800 IS": 5.76,
"Canon Canon DIGITAL IXUS II": 5.27,
"Canon Canon EOS 10D": 22.7,
"Canon Canon EOS-1D Mark II": 28.7,
"Canon Canon EOS-1Ds Mark II": 35.95,
"Canon Canon EOS 20D": 22.5,
"Canon Canon EOS 20D": 22.5,
"Canon Canon EOS 300D DIGITAL": 22.66,
"Canon Canon EOS 30D": 22.5,
"Canon Canon EOS 350D DIGITAL": 22.2,
"Canon Canon EOS 400D DIGITAL": 22.2,
"Canon Canon EOS 40D": 22.2,
"Canon Canon EOS 5D": 35.8,
"Canon Canon EOS 5D Mark II": 36.0,
"Canon Canon EOS DIGITAL REBEL": 22.66,
"Canon Canon EOS DIGITAL REBEL XT": 22.2,
"Canon Canon EOS DIGITAL REBEL XTi": 22.2,
"Canon Canon EOS Kiss Digital": 22.66,
"Canon Canon IXY DIGITAL 600": 7.176,
"Canon Canon PowerShot A10": 5.23,
"Canon Canon PowerShot A20": 7.176,
"Canon Canon PowerShot A400": 4.54,
"Canon Canon PowerShot A40": 5.27,
"Canon Canon PowerShot A510": 5.76,
"Canon Canon PowerShot A520": 5.76,
"Canon Canon PowerShot A530": 5.76,
"Canon Canon PowerShot A60": 5.27,
"Canon Canon PowerShot A620": 7.176,
"Canon Canon PowerShot A630": 7.176,
"Canon Canon PowerShot A640": 7.176,
"Canon Canon PowerShot A700": 5.76,
"Canon Canon PowerShot A70": 5.27,
"Canon Canon PowerShot A710 IS": 5.76,
"Canon Canon PowerShot A75": 5.27,
"Canon Canon PowerShot A80": 7.176,
"Canon Canon PowerShot A85": 5.27,
"Canon Canon PowerShot A95": 7.176,
"Canon Canon PowerShot G12": 7.44,
"Canon Canon PowerShot G1": 7.176,
"Canon Canon PowerShot G2": 7.176,
"Canon Canon PowerShot G3": 7.176,
"Canon Canon PowerShot G5": 7.176,
"Canon Canon PowerShot G6": 7.176,
"Canon Canon PowerShot G7": 7.176,
"Canon Canon PowerShot G9": 7.6,
"Canon Canon PowerShot Pro1": 8.8,
"Canon Canon PowerShot S100": 7.44,
"Canon Canon PowerShot S110": 5.27,
"Canon Canon PowerShot S1 IS": 5.27,
"Canon Canon PowerShot S200": 5.27,
"Canon Canon PowerShot S2 IS": 5.76,
"Canon Canon PowerShot S30": 7.176,
"Canon Canon PowerShot S3 IS": 5.76,
"Canon Canon PowerShot S400": 7.176,
"Canon Canon PowerShot S40": 7.176,
"Canon Canon PowerShot S410": 7.176,
"Canon Canon PowerShot S45": 7.176,
"Canon Canon PowerShot S500": 7.176,
"Canon Canon PowerShot S50": 7.176,
"Canon Canon PowerShot S60": 7.176,
"Canon Canon PowerShot S70": 7.176,
"Canon Canon PowerShot S80": 7.176,
"Canon Canon PowerShot SD1000": 5.75,
"Canon Canon PowerShot SD100": 5.27,
"Canon Canon PowerShot SD10": 5.75,
"Canon Canon PowerShot SD110": 5.27,
"Canon Canon PowerShot SD200": 5.76,
"Canon Canon PowerShot SD300": 5.76,
"Canon Canon PowerShot SD400": 5.76,
"Canon Canon PowerShot SD450": 5.76,
"Canon Canon PowerShot SD500": 7.176,
"Canon Canon PowerShot SD550": 7.176,
"Canon Canon PowerShot SD600": 5.76,
"Canon Canon PowerShot SD630": 5.76,
"Canon Canon PowerShot SD700 IS": 5.76,
"Canon Canon PowerShot SD750": 5.75,
"Canon Canon PowerShot SD800 IS": 5.76,
"Canon Canon PowerShot SX260 HS": 6.2,
"Canon EOS 300D DIGITAL": 22.66,
"Canon EOS DIGITAL REBEL": 22.66,
"Canon PowerShot A510": 5.76,
"Canon PowerShot S30": 7.176,
"CASIO COMPUTER CO.,LTD. EX-S500": 5.76,
"CASIO COMPUTER CO.,LTD. EX-Z1000": 7.716,
"CASIO COMPUTER CO.,LTD EX-Z30": 5.76,
"CASIO COMPUTER CO.,LTD. EX-Z600": 5.76,
"CASIO COMPUTER CO.,LTD. EX-Z60": 7.176,
"CASIO COMPUTER CO.,LTD EX-Z750": 7.176,
"CASIO COMPUTER CO.,LTD. EX-Z850": 7.176,
"DJI FC300S": 6.16,
"DJI FC300X": 6.2,
"DJI FC350": 6.17,
"DJI PHANTOM VISION FC200": 6.17,
"EASTMAN KODAK COMPANY KODAK CX7330 ZOOM DIGITAL CAMERA": 5.27,
"EASTMAN KODAK COMPANY KODAK CX7530 ZOOM DIGITAL CAMERA": 5.76,
"EASTMAN KODAK COMPANY KODAK DX3900 ZOOM DIGITAL CAMERA": 7.176,
"EASTMAN KODAK COMPANY KODAK DX4900 ZOOM DIGITAL CAMERA": 7.176,
"EASTMAN KODAK COMPANY KODAK DX6340 ZOOM DIGITAL CAMERA": 5.27,
"EASTMAN KODAK COMPANY KODAK DX6490 ZOOM DIGITAL CAMERA": 5.76,
"EASTMAN KODAK COMPANY KODAK DX7630 ZOOM DIGITAL CAMERA": 7.176,
"EASTMAN KODAK COMPANY KODAK Z650 ZOOM DIGITAL CAMERA": 5.76,
"EASTMAN KODAK COMPANY KODAK Z700 ZOOM DIGITAL CAMERA": 5.76,
"EASTMAN KODAK COMPANY KODAK Z740 ZOOM DIGITAL CAMERA": 5.76,
"FUJIFILM FinePix2600Zoom": 5.27,
"FUJIFILM FinePix40i": 7.6,
"FUJIFILM FinePix A310": 5.27,
"FUJIFILM FinePix A330": 5.27,
"FUJIFILM FinePix A600": 7.6,
"FUJIFILM FinePix E500": 5.76,
"FUJIFILM FinePix E510": 5.76,
"FUJIFILM FinePix E550": 7.6,
"FUJIFILM FinePix E900": 7.78,
"FUJIFILM FinePix F10": 7.6,
"FUJIFILM FinePix F30": 7.6,
"FUJIFILM FinePix F450": 5.76,
"FUJIFILM FinePix F601 ZOOM": 7.6,
"FUJIFILM FinePix S3Pro": 23,
"FUJIFILM FinePix S5000": 5.27,
"FUJIFILM FinePix S5200": 5.76,
"FUJIFILM FinePix S5500": 5.27,
"FUJIFILM FinePix S6500fd": 7.6,
"FUJIFILM FinePix S7000": 7.6,
"FUJIFILM FinePix Z2": 5.76,
"Garmin VIRB": 6.17,
"GoPro HD2 U": 5.8,
"Gopro HD3": 5.76,
"GoPro HERO4 Black": 6.17,
"GoPro HERO4 Silver":6.17,
"Hewlett-Packard hp 635 Digital Camera": 4.54,
"Hewlett-Packard hp PhotoSmart 43x series": 5.27,
"Hewlett-Packard HP PhotoSmart 618 (V1.1)": 5.27,
"Hewlett-Packard HP PhotoSmart C945 (V01.61)": 7.176,
"Hewlett-Packard HP PhotoSmart R707 (V01.00)": 7.176,
"KONICA MILOLTA DYNAX 5D": 23.5,
"Konica Minolta Camera, Inc. DiMAGE A2": 8.8,
"KONICA MINOLTA CAMERA, Inc. DiMAGE G400": 5.76,
"Konica Minolta Camera, Inc. DiMAGE Z2": 5.76,
"KONICA MINOLTA DiMAGE A200": 8.8,
"KONICA MINOLTA DiMAGE X1": 7.176,
"KONICA MINOLTA DYNAX 5D": 23.5,
"Minolta Co., Ltd. DiMAGE F100": 7.176,
"Minolta Co., Ltd. DiMAGE Xi": 5.27,
"Minolta Co., Ltd. DiMAGE Xt": 5.27,
"Minolta Co., Ltd. DiMAGE Z1": 5.27,
"Mobius Action Cam": 2.1,
"NIKON COOLPIX L3": 5.76,
"NIKON COOLPIX P2": 7.176,
"NIKON COOLPIX P7700": 7.44,
"NIKON COOLPIX S4": 5.76,
"NIKON COOLPIX S7c": 5.76,
"NIKON COOLPIX S8000": 6.17,
"NIKON CORPORATION NIKON 1 J4": 13.2,
"NIKON CORPORATION NIKON D100": 23.7,
"NIKON CORPORATION NIKON D1": 23.7,
"NIKON CORPORATION NIKON D1H": 23.7,
"NIKON CORPORATION NIKON D200": 23.6,
"NIKON CORPORATION NIKON D2H": 23.3,
"NIKON CORPORATION NIKON D2X": 23.7,
"NIKON CORPORATION NIKON D40": 23.7,
"NIKON CORPORATION NIKON D50": 23.7,
"NIKON CORPORATION NIKON D60": 23.6,
"NIKON CORPORATION NIKON D700": 36,
"NIKON CORPORATION NIKON D70": 23.7,
"NIKON CORPORATION NIKON D70s": 23.7,
"NIKON CORPORATION NIKON D80": 23.6,
"NIKON CORPORATION NIKON D5300": 23.5,
"NIKON D5100": 23.6,
"NIKON E2500": 5.27,
"NIKON E3100": 5.27,
"NIKON E3200": 5.27,
"NIKON E3700": 5.27,
"NIKON E4200": 7.176,
"NIKON E4300": 7.18,
"NIKON E4500": 7.176,
"NIKON E4600": 5.76,
"NIKON E5000": 8.8,
"NIKON E5200": 7.176,
"NIKON E5400": 7.176,
"NIKON E5600": 5.76,
"NIKON E5700": 8.8,
"NIKON E5900": 7.176,
"NIKON E7600": 7.176,
"NIKON E775": 5.27,
"NIKON E7900": 7.176,
"NIKON E8800": 8.8,
"NIKON E990": 7.176,
"NIKON E995": 7.176,
"NIKON S1": 5.76,
"Nokia N80": 5.27,
"Nokia N93": 4.536,
"Nokia N95": 5.7,
"OLYMPUS CORPORATION C-5000Z": 7.176,
"OLYMPUS CORPORATION C5060WZ": 7.176,
"OLYMPUS CORPORATION C750UZ": 5.27,
"OLYMPUS CORPORATION C765UZ": 5.76,
"OLYMPUS CORPORATION C8080WZ": 8.8,
"OLYMPUS CORPORATION X250,D560Z,C350Z": 5.76,
"OLYMPUS CORPORATION X-3,C-60Z": 7.176,
"OLYMPUS CORPORATION X400,D580Z,C460Z": 5.27,
"OLYMPUS IMAGING CORP. E-500": 17.3,
"OLYMPUS IMAGING CORP. E-510": 17.3,
"OLYMPUS IMAGING CORP. FE115,X715": 5.76,
"OLYMPUS IMAGING CORP. SP310": 7.176,
"OLYMPUS IMAGING CORP. SP510UZ": 5.75,
"OLYMPUS IMAGING CORP. SP550UZ": 5.76,
"OLYMPUS IMAGING CORP. uD600,S600": 5.75,
"OLYMPUS_IMAGING_CORP. X450,D535Z,C370Z": 5.27,
"OLYMPUS IMAGING CORP. X550,D545Z,C480Z": 5.76,
"Olympus E-M10": 17.3,
"Olympus E-PL5": 17.3,
"Olympus E-PL6": 17.3,
"OLYMPUS OPTICAL CO.,LTD C2040Z": 6.4,
"OLYMPUS OPTICAL CO.,LTD C211Z": 5.27,
"OLYMPUS OPTICAL CO.,LTD C2Z,D520Z,C220Z": 4.54,
"OLYMPUS OPTICAL CO.,LTD C3000Z": 7.176,
"OLYMPUS OPTICAL CO.,LTD C300Z,D550Z": 5.4,
"OLYMPUS OPTICAL CO.,LTD C4100Z,C4000Z": 7.176,
"OLYMPUS OPTICAL CO.,LTD C750UZ": 5.27,
"OLYMPUS OPTICAL CO.,LTD X-2,C-50Z": 7.176,
"OLYMPUS SP550UZ": 5.76,
"OLYMPUS X100,D540Z,C310Z": 5.27,
"Panasonic DMC-FX01": 5.76,
"Panasonic DMC-FX07": 5.75,
"Panasonic DMC-FX9": 5.76,
"Panasonic DMC-FZ20": 5.76,
"Panasonic DMC-FZ2": 4.54,
"Panasonic DMC-FZ30": 7.176,
"Panasonic DMC-FZ50": 7.176,
"Panasonic DMC-FZ5": 5.76,
"Panasonic DMC-FZ7": 5.76,
"Panasonic DMC-LC1": 8.8,
"Panasonic DMC-LC33": 5.76,
"Panasonic DMC-LX1": 8.5,
"Panasonic DMC-LZ2": 5.76,
"Panasonic DMC-TS4": 6.08,
"Panasonic DMC-TZ1": 5.75,
"Panasonic DMC-TZ3": 5.68,
"Panasonic DMC-TZ5": 6.12,
"PENTAX Corporation PENTAX *ist DL": 23.5,
"PENTAX Corporation PENTAX *ist DS2": 23.5,
"PENTAX Corporation PENTAX *ist DS": 23.5,
"PENTAX Corporation PENTAX K100D": 23.5,
"PENTAX Corporation PENTAX Optio 450": 7.176,
"PENTAX Corporation PENTAX Optio 550": 7.176,
"PENTAX Corporation PENTAX Optio E10": 5.76,
"PENTAX Corporation PENTAX Optio S40": 5.76,
"PENTAX Corporation PENTAX Optio S4": 5.76,
"PENTAX Corporation PENTAX Optio S50": 5.76,
"PENTAX Corporation PENTAX Optio S5i": 5.76,
"PENTAX Corporation PENTAX Optio S5z": 5.76,
"PENTAX Corporation PENTAX Optio SV": 5.76,
"PENTAX Corporation PENTAX Optio WP": 5.75,
"PHANTOM VISION FC200": 6.17,
"RICOH CaplioG3 modelM": 5.27,
"RICOH Caplio GX": 7.176,
"RICOH Caplio R30": 5.75,
"Samsung Digimax 301": 5.27,
"Samsung Techwin <Digimax i5, Samsung #1>": 5.76,
"SAMSUNG TECHWIN Pro 815": 8.8,
"SONY A7": 35.9,
"SONY A7R": 35.9,
"SONY DSC-F828": 8.8,
"SONY DSC-H1": 6.104,
"SONY DSC-H2": 5.744,
"SONY DSC-H3": 5.744,
"SONY DSC-H5": 5.744,
"SONY DSC-H7": 5.744,
"SONY DSC-H9": 5.744,
"SONY DSC-HX5V": 6.104,
"SONY DSC-HX50V": 6.17,
"SONY DSC-N12": 7.176,
"SONY DSC-P100": 7.176,
"SONY DSC-P10": 7.176,
"SONY DSC-P12": 7.176,
"SONY DSC-P150": 7.176,
"SONY DSC-P200": 7.176,
"SONY DSC-P52": 5.27,
"SONY DSC-P72": 5.27,
"SONY DSC-P73": 5.27,
"SONY DSC-P8": 5.27,
"SONY DSC-R1": 21.5,
"SONY DSC-S40": 5.27,
"SONY DSC-S600": 5.76,
"SONY DSC-T9": 7.18,
"SONY DSC-V1": 7.176,
"SONY DSC-W1": 7.176,
"SONY DSC-W30": 5.76,
"SONY DSC-W50": 5.75,
"SONY DSC-W5": 7.176,
"SONY DSC-W7": 7.176,
"SONY DSC-W80": 5.75,
"SONY ILCE-7S": 35.8,
"SONY ILCE-5100": 23.5,
"SONY NEX-5T": 23.4,
"SONY NEX-5R": 23.4,
"SONY NEX-7": 23.5,
"SONY SLT-A55": 23.5,
"SONY SLT-A57": 23.5,
"SONY SLT-A65": 23.5,
"SONY SLT-A77": 23.5,
"SONY SLT-A77 II": 23.5,
"SONY SLT-A77V": 23.5,
"SONY SLT-A99": 35.8,
"Vexcel UltraCam": 28.00,
"Mantis i23": 45.00
}

Wyświetl plik

@ -0,0 +1 @@
305acb70d8d2c350a8374fbb5028d914facf3fa4

Wyświetl plik

@ -0,0 +1 @@
db8d210f5994e4e1782de7fd7d51241aa5e82d3e

Wyświetl plik

@ -0,0 +1 @@
113f8182f61db99c24d194ddfba0f6b0a07fe272

Wyświetl plik

@ -0,0 +1 @@
2d1bc1543ef974c0c6a41db46c8c813c6a3b5692

Wyświetl plik

@ -0,0 +1 @@
fd7c406148027f41dc619fd833b9f55f9973a202

Wyświetl plik

@ -0,0 +1 @@
911fe91791a8506d3c641e51451a2f0a9121690f

Wyświetl plik

@ -0,0 +1 @@
715b5748537e5b44e0631d83da797e764531df8c

Wyświetl plik

@ -28,7 +28,7 @@ GeorefGCP::~GeorefGCP()
void GeorefGCP::extractGCP(std::istringstream &gcpStream)
{
gcpStream >> x_ >> y_ >> z_ >> pixelY_ >> pixelX_ >> image_;
gcpStream >> x_ >> y_ >> z_ >> pixelX_ >> pixelY_ >> image_;
}
Vec3 GeorefGCP::getPos()
@ -865,7 +865,7 @@ void Georef::performGeoreferencingWithGCP()
if (nrGCPUsable < 3)
{
throw GeorefException("Less than 3 GCPs have correspondences in the generated model.");
throw GeorefException("Fewer than 3 GCPs have correspondences in the generated model.");
}
size_t gcp0; size_t gcp1; size_t gcp2;

Wyświetl plik

@ -1,228 +1,255 @@
import argparse
import context
# parse arguments
processopts = ['resize', 'opensfm', 'slam', 'cmvs', 'pmvs',
'odm_meshing', 'odm_texturing', 'odm_georeferencing',
'odm_orthophoto']
class RerunFrom(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
setattr(namespace, self.dest, processopts[processopts.index(values):])
parser = argparse.ArgumentParser(description='OpenDroneMap')
parser.add_argument('--project-path',
metavar='<string>',
help='Path to the project to process')
def config():
parser.add_argument('--project-path',
metavar='<string>',
help='Path to the project to process')
parser.add_argument('--resize-to', # currently doesn't support 'orig'
metavar='<integer>',
default=2400,
type=int,
help='resizes images by the largest side')
parser.add_argument('--resize-to', # currently doesn't support 'orig'
metavar='<integer>',
default=2400,
type=int,
help='resizes images by the largest side')
parser.add_argument('--start-with', '-s',
metavar='<string>',
default='resize',
choices=processopts,
help=('Can be one of: ' + ' | '.join(processopts)))
parser.add_argument('--start-with', '-s',
metavar='<string>',
default='resize',
choices=processopts,
help=('Can be one of: ' + ' | '.join(processopts)))
parser.add_argument('--end-with', '-e',
metavar='<string>',
default='odm_orthophoto',
choices=processopts,
help=('Can be one of:' + ' | '.join(processopts)))
parser.add_argument('--end-with', '-e',
metavar='<string>',
default='odm_orthophoto',
choices=processopts,
help=('Can be one of:' + ' | '.join(processopts)))
parser.add_argument('--rerun', '-r',
metavar='<string>',
choices=processopts,
help=('Can be one of:' + ' | '.join(processopts)))
rerun = parser.add_mutually_exclusive_group()
parser.add_argument('--video',
metavar='<string>',
help='Path to the video file to process')
rerun.add_argument('--rerun', '-r',
metavar='<string>',
choices=processopts,
help=('Can be one of:' + ' | '.join(processopts)))
parser.add_argument('--slam-config',
metavar='<string>',
help='Path to config file for orb-slam')
rerun.add_argument('--rerun-all',
action='store_true',
default=False,
help='force rerun of all tasks')
parser.add_argument('--force-focal',
metavar='<positive float>',
type=float,
help=('Override the focal length information for the '
'images'))
rerun.add_argument('--rerun-from',
action=RerunFrom,
metavar='<string>',
choices=processopts,
help=('Can be one of:' + ' | '.join(processopts)))
parser.add_argument('--force-ccd',
metavar='<positive float>',
type=float,
help='Override the ccd width information for the images')
parser.add_argument('--video',
metavar='<string>',
help='Path to the video file to process')
parser.add_argument('--min-num-features',
metavar='<integer>',
default=4000,
type=int,
help=('Minimum number of features to extract per image. '
'More features leads to better results but slower '
'execution. Default: %(default)s'))
parser.add_argument('--slam-config',
metavar='<string>',
help='Path to config file for orb-slam')
parser.add_argument('--matcher-threshold',
metavar='<percent>',
default=2.0,
type=float,
help=('Ignore matched keypoints if the two images share '
'less than <float> percent of keypoints. Default:'
' $(default)s'))
parser.add_argument('--force-focal',
metavar='<positive float>',
type=float,
help=('Override the focal length information for the '
'images'))
parser.add_argument('--matcher-ratio',
metavar='<float>',
default=0.6,
type=float,
help=('Ratio of the distance to the next best matched '
'keypoint. Default: %(default)s'))
parser.add_argument('--force-ccd',
metavar='<positive float>',
type=float,
help='Override the ccd width information for the images')
parser.add_argument('--matcher-neighbors',
type=int,
metavar='<integer>',
default=8,
help='Number of nearest images to pre-match based on GPS '
'exif data. Set to 0 to skip pre-matching. '
'Neighbors works together with Distance parameter, '
'set both to 0 to not use pre-matching. OpenSFM '
'uses both parameters at the same time, Bundler '
'uses only one which has value, prefering the '
'Neighbors parameter. Default: %(default)s')
parser.add_argument('--min-num-features',
metavar='<integer>',
default=4000,
type=int,
help=('Minimum number of features to extract per image. '
'More features leads to better results but slower '
'execution. Default: %(default)s'))
parser.add_argument('--matcher-distance',
metavar='<integer>',
default=0,
type=int,
help='Distance threshold in meters to find pre-matching '
'images based on GPS exif data. Set to 0 to skip '
'pre-matching. Default: %(default)s')
parser.add_argument('--matcher-threshold',
metavar='<percent>',
default=2.0,
type=float,
help=('Ignore matched keypoints if the two images share '
'less than <float> percent of keypoints. Default:'
' $(default)s'))
parser.add_argument('--cmvs-maxImages',
metavar='<integer>',
default=500,
type=int,
help='The maximum number of images per cluster. '
parser.add_argument('--matcher-ratio',
metavar='<float>',
default=0.6,
type=float,
help=('Ratio of the distance to the next best matched '
'keypoint. Default: %(default)s'))
parser.add_argument('--matcher-neighbors',
type=int,
metavar='<integer>',
default=8,
help='Number of nearest images to pre-match based on GPS '
'exif data. Set to 0 to skip pre-matching. '
'Neighbors works together with Distance parameter, '
'set both to 0 to not use pre-matching. OpenSFM '
'uses both parameters at the same time, Bundler '
'uses only one which has value, prefering the '
'Neighbors parameter. Default: %(default)s')
parser.add_argument('--matcher-distance',
metavar='<integer>',
default=0,
type=int,
help='Distance threshold in meters to find pre-matching '
'images based on GPS exif data. Set to 0 to skip '
'pre-matching. Default: %(default)s')
parser.add_argument('--cmvs-maxImages',
metavar='<integer>',
default=500,
type=int,
help='The maximum number of images per cluster. '
'Default: %(default)s')
parser.add_argument('--pmvs-level',
metavar='<positive integer>',
default=1,
type=int,
help=('The level in the image pyramid that is used '
'for the computation. see '
'http://www.di.ens.fr/pmvs/documentation.html for '
'more pmvs documentation. Default: %(default)s'))
parser.add_argument('--pmvs-level',
metavar='<positive integer>',
default=1,
type=int,
help=('The level in the image pyramid that is used '
'for the computation. see '
'http://www.di.ens.fr/pmvs/documentation.html for '
'more pmvs documentation. Default: %(default)s'))
parser.add_argument('--pmvs-csize',
metavar='< positive integer>',
default=2,
type=int,
help='Cell size controls the density of reconstructions'
parser.add_argument('--pmvs-csize',
metavar='< positive integer>',
default=2,
type=int,
help='Cell size controls the density of reconstructions'
'Default: %(default)s')
parser.add_argument('--pmvs-threshold',
metavar='<float: -1.0 <= x <= 1.0>',
default=0.7,
type=float,
help=('A patch reconstruction is accepted as a success '
'and kept if its associated photometric consistency '
'measure is above this threshold. Default: %(default)s'))
parser.add_argument('--pmvs-threshold',
metavar='<float: -1.0 <= x <= 1.0>',
default=0.7,
type=float,
help=('A patch reconstruction is accepted as a success '
'and kept if its associated photometric consistency '
'measure is above this threshold. Default: %(default)s'))
parser.add_argument('--pmvs-wsize',
metavar='<positive integer>',
default=7,
type=int,
help='pmvs samples wsize x wsize pixel colors from '
'each image to compute photometric consistency '
'score. For example, when wsize=7, 7x7=49 pixel '
'colors are sampled in each image. Increasing the '
'value leads to more stable reconstructions, but '
'the program becomes slower. Default: %(default)s')
parser.add_argument('--pmvs-wsize',
metavar='<positive integer>',
default=7,
type=int,
help='pmvs samples wsize x wsize pixel colors from '
'each image to compute photometric consistency '
'score. For example, when wsize=7, 7x7=49 pixel '
'colors are sampled in each image. Increasing the '
'value leads to more stable reconstructions, but '
'the program becomes slower. Default: %(default)s')
parser.add_argument('--pmvs-minImageNum',
metavar='<positive integer>',
default=3,
type=int,
help=('Each 3D point must be visible in at least '
'minImageNum images for being reconstructed. 3 is '
'suggested in general. Default: %(default)s'))
parser.add_argument('--pmvs-minImageNum',
metavar='<positive integer>',
default=3,
type=int,
help=('Each 3D point must be visible in at least '
'minImageNum images for being reconstructed. 3 is '
'suggested in general. Default: %(default)s'))
parser.add_argument('--pmvs-num-cores',
metavar='<positive integer>',
default=1,
type=int,
help=('The maximum number of cores to use in dense '
'reconstruction. Default: %(default)s'))
parser.add_argument('--pmvs-num-cores',
metavar='<positive integer>',
default=context.num_cores,
type=int,
help=('The maximum number of cores to use in dense '
'reconstruction. Default: %(default)s'))
parser.add_argument('--odm_meshing-maxVertexCount',
metavar='<positive integer>',
default=100000,
type=int,
help=('The maximum vertex count of the output mesh '
'Default: %(default)s'))
parser.add_argument('--odm_meshing-maxVertexCount',
metavar='<positive integer>',
default=100000,
type=int,
help=('The maximum vertex count of the output mesh '
'Default: %(default)s'))
parser.add_argument('--odm_meshing-octreeDepth',
metavar='<positive integer>',
default=9,
type=int,
help=('Oct-tree depth used in the mesh reconstruction, '
'increase to get more vertices, recommended '
'values are 8-12. Default: %(default)s'))
parser.add_argument('--odm_meshing-octreeDepth',
metavar='<positive integer>',
default=9,
type=int,
help=('Oct-tree depth used in the mesh reconstruction, '
'increase to get more vertices, recommended '
'values are 8-12. Default: %(default)s'))
parser.add_argument('--odm_meshing-samplesPerNode',
metavar='<float >= 1.0>',
default=1.0,
type=float,
help=('Number of points per octree node, recommended '
'and default value: %(default)s'))
parser.add_argument('--odm_meshing-samplesPerNode',
metavar='<float >= 1.0>',
default=1.0,
type=float,
help=('Number of points per octree node, recommended '
'and default value: %(default)s'))
parser.add_argument('--odm_meshing-solverDivide',
metavar='<positive integer>',
default=9,
type=int,
help=('Oct-tree depth at which the Laplacian equation '
'is solved in the surface reconstruction step. '
'Increasing this value increases computation '
'times slightly but helps reduce memory usage. '
'Default: %(default)s'))
parser.add_argument('--odm_meshing-solverDivide',
metavar='<positive integer>',
default=9,
type=int,
help=('Oct-tree depth at which the Laplacian equation '
'is solved in the surface reconstruction step. '
'Increasing this value increases computation '
'times slightly but helps reduce memory usage. '
'Default: %(default)s'))
parser.add_argument('--odm_texturing-textureResolution',
metavar='<positive integer>',
default=4096,
type=int,
help=('The resolution of the output textures. Must be '
'greater than textureWithSize. Default: %(default)s'))
parser.add_argument('--odm_texturing-textureResolution',
metavar='<positive integer>',
default=4096,
type=int,
help=('The resolution of the output textures. Must be '
'greater than textureWithSize. Default: %(default)s'))
parser.add_argument('--odm_texturing-textureWithSize',
metavar='<positive integer>',
default=3600,
type=int,
help=('The resolution to rescale the images performing '
'the texturing. Default: %(default)s'))
parser.add_argument('--odm_texturing-textureWithSize',
metavar='<positive integer>',
default=3600,
type=int,
help=('The resolution to rescale the images performing '
'the texturing. Default: %(default)s'))
parser.add_argument('--odm_georeferencing-gcpFile',
metavar='<path string>',
default='gcp_list.txt',
help=('path to the file containing the ground control '
'points used for georeferencing. Default: '
'%(default)s. The file needs to '
'be on the following line format: \neasting '
'northing height pixelrow pixelcol imagename'))
parser.add_argument('--odm_georeferencing-gcpFile',
metavar='<path string>',
default='gcp_list.txt',
help=('path to the file containing the ground control '
'points used for georeferencing. Default: '
'%(default)s. The file needs to '
'be on the following line format: \neasting '
'northing height pixelrow pixelcol imagename'))
parser.add_argument('--odm_georeferencing-useGcp',
action = 'store_true',
default = False,
help = 'Enabling GCPs from the file above. The GCP file '
parser.add_argument('--odm_georeferencing-useGcp',
action='store_true',
default=False,
help='Enabling GCPs from the file above. The GCP file '
'is not used by default.')
parser.add_argument('--odm_orthophoto-resolution',
metavar='<float > 0.0>',
default=20.0,
type=float,
help=('Orthophoto ground resolution in pixels/meter'
'Default: %(default)s'))
parser.add_argument('--odm_orthophoto-resolution',
metavar='<float > 0.0>',
default=20.0,
type=float,
help=('Orthophoto ground resolution in pixels/meter'
'Default: %(default)s'))
parser.add_argument('--zip-results',
action='store_true',
default=False,
help='compress the results using gunzip')
parser.add_argument('--zip-results',
action='store_true',
default=False,
help='compress the results using gunzip')
args = vars(parser.parse_args())
parser.add_argument('--time',
action='store_true',
default=False,
help='Generates a benchmark file with runtime info\n'
'Default: %(default)s')
return parser.parse_args()

Wyświetl plik

@ -7,7 +7,8 @@ scripts_path = os.path.abspath(os.path.dirname(__file__))
root_path, _ = os.path.split(scripts_path)
superbuild_path = os.path.join(root_path, 'SuperBuild')
ccd_widths_path = os.path.join(root_path, 'data/ccd_defs.json')
tests_path = os.path.join(root_path, 'tests')
tests_data_path = os.path.join(root_path, 'tests/test_data')
# add opencv to python path
pyopencv_path = os.path.join(superbuild_path, 'install/lib/python2.7/dist-packages')
@ -15,6 +16,7 @@ sys.path.append(pyopencv_path)
# define opensfm path
opensfm_path = os.path.join(superbuild_path, "src/opensfm")
ccd_widths_path = os.path.join(opensfm_path, 'opensfm/data/sensor_data.json')
# define orb_slam2 path
orb_slam2_path = os.path.join(superbuild_path, "src/orb_slam2")
@ -26,6 +28,7 @@ pmvs2_path = os.path.join(superbuild_path, "install/bin/pmvs2")
# define txt2las path
txt2las_path = os.path.join(superbuild_path, 'src/las-tools/bin')
pdal_path = os.path.join(superbuild_path, 'build/pdal/bin')
# define odm modules path
odm_modules_path = os.path.join(root_path, "build/bin")

Wyświetl plik

@ -1,25 +1,32 @@
import os
def get_files_list(path_dir):
return os.listdir(path_dir)
def absolute_path_file(path_file):
return os.path.abspath(path_file)
def extract_file_from_path_file(path_file):
path, file = os.path.split(path_file)
return file
def extract_path_from_file(file):
path_file = os.path.abspath(os.path.dirname(file))
path, file = os.path.split(path_file)
return path
def join_paths(path1, path2):
return os.path.join(path1, path2)
def file_exists(path_file):
return os.path.isfile(path_file)
return os.path.isfile(path_file)
def dir_exists(dirname):
return os.path.isdir(dirname)
return os.path.isdir(dirname)

Wyświetl plik

@ -1,3 +1,5 @@
import logging
HEADER = '\033[95m'
OKBLUE = '\033[94m'
OKGREEN = '\033[92m'
@ -5,14 +7,33 @@ WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
# TODO add file handling
logging.addLevelName(logging.INFO, '%s[%s]' % (OKBLUE, logging.getLevelName(logging.INFO)))
logging.addLevelName(logging.WARNING, '%s[%s]' % (WARNING, logging.getLevelName(logging.WARNING)))
logging.addLevelName(logging.ERROR, '%s[%s]' % (FAIL, logging.getLevelName(logging.ERROR)))
logging.addLevelName(logging.DEBUG, '%s[%s]' % (OKGREEN, logging.getLevelName(logging.DEBUG)))
logging.basicConfig(level=logging.DEBUG,
format='%(levelname)-14s %(message)s' + ENDC)
def ODM_INFO(str):
print OKBLUE + '[INFO] ' + str + ENDC
logging.info(str)
def ODM_WARNING(str):
print WARNING + '[WARNING] ' + str + ENDC
logging.warning(str)
def ODM_ERROR(str):
print FAIL + '[ERROR] ' + str + ENDC
logging.error(str)
def ODM_EXCEPTION(str):
logging.exception(str)
def ODM_DEBUG(str):
print OKGREEN + '[DEBUG] ' + str + ENDC
logging.debug(str)

Wyświetl plik

@ -4,14 +4,17 @@ import json
import datetime
import sys
import subprocess
import string
from opendm import context
from opendm import log
def get_ccd_widths():
"""Return the CCD Width of the camera listed in the JSON defs file."""
with open(context.ccd_widths_path) as jsonFile:
return json.load(jsonFile)
with open(context.ccd_widths_path) as f:
sensor_data = json.loads(f.read())
return dict(zip(map(string.lower, sensor_data.keys()), sensor_data.values()))
def run(cmd):
"""Run a system command"""
@ -19,14 +22,31 @@ def run(cmd):
returnCode = os.system(cmd)
if (returnCode != 0):
# TODO(edgar): add as log.ODM_ERROR
sys.exit("\nquitting cause: \n\t" + cmd + "\nreturned with code " +
log.ODM_ERROR("quitting cause: \n\t" + cmd + "\nreturned with code " +
str(returnCode) + ".\n")
sys.exit('An error occurred. Check stdout above or the logs.')
def now():
"""Return the current time"""
return datetime.datetime.now().strftime('%a %b %d %H:%M:%S %Z %Y')
def now_raw():
return datetime.datetime.now()
def benchmark(start, benchmarking_file, process):
"""
runs a benchmark with a start datetime object
:return: the running time (delta)
"""
# Write to benchmark file
delta = (datetime.datetime.now() - start).total_seconds()
with open(benchmarking_file, 'a') as b:
b.write('%s runtime: %s seconds\n' % (process, delta))
def run_and_return(cmdSrc, cmdDest=None):
"""Run a system command and return the output"""
process = subprocess.Popen(cmdSrc, stdout=subprocess.PIPE, shell=True)
@ -35,8 +55,8 @@ def run_and_return(cmdSrc, cmdDest=None):
def mkdir_p(path):
'''Make a directory including parent directories.
'''
"""Make a directory including parent directories.
"""
try:
os.makedirs(path)
except os.error as exc:
@ -50,26 +70,3 @@ def calculate_EPSG(utmZone, south):
return 32700 + utmZone
else:
return 32600 + utmZone
def parse_coordinate_system():
"""Write attributes to jobOptions from coord file"""
if os.path.isfile(jobOptions['jobDir'] +
'/odm_georeferencing/coordFile.txt'):
with open(jobOptions['jobDir'] + '/odm_georeferencing/coordFile.txt') as f:
for lineNumber, line in enumerate(f):
if lineNumber == 0:
tokens = line.split(' ')
if len(tokens) == 3:
utmZoneString = tokens[2][0:len(tokens[2])-2].strip()
utmSouthBool = (tokens[2][len(tokens[2])-2].strip() == 'S')
jobOptions['csString'] = '+datum=WGS84 +proj=utm +zone=' \
+ utmZoneString + (' +south' if utmSouthBool else '')
jobOptions['epsg'] = calculate_EPSG(int(utmZoneString), utmSouthBool)
elif lineNumber == 1:
tokens = line.split(' ')
if len(tokens) == 2:
jobOptions['utmEastOffset'] = int(tokens[0].strip())
jobOptions['utmNorthOffset'] = int(tokens[1].strip())
else:
break

Wyświetl plik

@ -8,143 +8,144 @@ from scripts.resize import resize
from scripts.opensfm import opensfm
# Define pipeline tasks
tasks_dict = { '1': 'resize',
'2': 'opensfm',
'3': 'cmvs',
'4': 'pmvs',
'5': 'odm_meshing',
'6': 'odm_texturing',
'7': 'odm_georeferencing',
'8': 'odm_orthophoto',
'9': 'zip_results' }
tasks_dict = {'1': 'resize',
'2': 'opensfm',
'3': 'cmvs',
'4': 'pmvs',
'5': 'odm_meshing',
'6': 'odm_texturing',
'7': 'odm_georeferencing',
'8': 'odm_orthophoto',
'9': 'zip_results'}
class ODMTaskManager(object):
"""docstring for ODMTaskManager"""
def __init__(self, odm_app):
self.odm_app = odm_app
self.initial_task_id = 0
self.current_task_id = 0
self.final_task_id = len(tasks_dict)
self.tasks = self.init_tasks(tasks_dict, self.odm_app)
"""docstring for ODMTaskManager"""
def init_tasks(self, _tasks_dict, _odm_app):
# dict to store tasks objects
tasks = {}
# loop over tasks dict
for key, in _tasks_dict:
# instantiate and append ODMTask
task_name = _tasks_dict[key]
tasks[key] = ODMTask(key, task_name)
def __init__(self, odm_app):
self.odm_app = odm_app
self.initial_task_id = 0
self.current_task_id = 0
self.final_task_id = len(tasks_dict)
self.tasks = self.init_tasks(tasks_dict, self.odm_app)
# setup tasks
if task_name == 'resize':
# setup this task
command = resize
inputs = { 'project_path': _odm_app.project_path,
'args': _odm_app.args,
'photos': _odm_app.photos }
def init_tasks(self, _tasks_dict, _odm_app):
# dict to store tasks objects
tasks = {}
# loop over tasks dict
for key, in _tasks_dict:
# instantiate and append ODMTask
task_name = _tasks_dict[key]
tasks[key] = ODMTask(key, task_name)
elif task_name == 'opensfm':
# setup this task
command = opensfm
inputs = { 'project_path': _odm_app.project_path,
'args': _odm_app.args,
'photos': _odm_app.photos }
# setup tasks
if task_name == 'resize':
# setup this task
command = resize
inputs = {'project_path': _odm_app.project_path,
'args': _odm_app.args,
'photos': _odm_app.photos}
elif task_name == 'cmvs':
# setup this task
command = None
inputs = {}
elif task_name == 'opensfm':
# setup this task
command = opensfm
inputs = {'project_path': _odm_app.project_path,
'args': _odm_app.args,
'photos': _odm_app.photos}
elif task_name == 'pmvs':
# setup this task
command = None
inputs = {}
elif task_name == 'cmvs':
# setup this task
command = None
inputs = {}
elif task_name == 'odm_meshing':
# setup this task
command = None
inputs = {}
elif task_name == 'pmvs':
# setup this task
command = None
inputs = {}
elif task_name == 'odm_texturing':
# setup this task
command = None
inputs = {}
elif task_name == 'odm_meshing':
# setup this task
command = None
inputs = {}
elif task_name == 'odm_georeferencing':
# setup this task
command = None
inputs = {}
elif task_name == 'odm_texturing':
# setup this task
command = None
inputs = {}
elif task_name == 'odm_orthophoto':
# setup this task
command = None
inputs = {}
elif task_name == 'odm_georeferencing':
# setup this task
command = None
inputs = {}
elif task_name == 'zip_results':
# setup this task
command = None
inputs = {}
elif task_name == 'odm_orthophoto':
# setup this task
command = None
inputs = {}
else:
log.ODM_ERROR('task_name %s is not valid' % task_name)
elif task_name == 'zip_results':
# setup this task
command = None
inputs = {}
# setup task configuration
task = tasks[key]
task.command = command
task.inputs = inputs
else:
log.ODM_ERROR('task_name %s is not valid' % task_name)
return tasks
# setup task configuration
task = tasks[key]
task.command = command
task.inputs = inputs
def run_tasks(self):
return tasks
#curr_task = self.tasks['resize']
def run_tasks(self):
#self.tasks['resize']
# curr_task = self.tasks['resize']
for id in range(self.initial_task_id, self.final_task_id + 1):
# catch task with current id
task = self.tasks[str(id)]
# update task tracking
log.ODM_INFO('Running task %s: %s' % (task.id, task.name))
self.current_task_id = task.id
# run task
task.state = task.run()
if task.state == 2:
log.ODM_INFO('Succeeded task %s: %s - %s' % (task.id, task.name, system.now()))
else:
log.ODM_ERROR('Aborted task %s: %s' % (task.id, task.name))
# self.tasks['resize']
for id in range(self.initial_task_id, self.final_task_id + 1):
# catch task with current id
task = self.tasks[str(id)]
# update task tracking
log.ODM_INFO('Running task %s: %s' % (task.id, task.name))
self.current_task_id = task.id
# run task
task.state = task.run()
if task.state == 2:
log.ODM_INFO('Succeeded task %s: %s - %s' % (task.id, task.name, system.now()))
else:
log.ODM_ERROR('Aborted task %s: %s' % (task.id, task.name))
class ODMTask(object):
"""docstring for ODMTask"""
def __init__(self, id, name):
# task definition
self.id = id
self.name = name
# task i/o
self.command = None
self.inputs = {}
# Current task state (0:waiting, 1:running, 2:succeded: 3:failed)
# By default we set a task in waiting state
self.state = 0
"""docstring for ODMTask"""
# Launch task
def run(self):
# while doing something
self.state = 1
return self.launch_command()
def __init__(self, id, name):
# task definition
self.id = id
self.name = name
# task i/o
self.command = None
self.inputs = {}
# Current task state (0:waiting, 1:running, 2:succeded: 3:failed)
# By default we set a task in waiting state
self.state = 0
def launch_command(self):
if self.command is None:
log.ODM_ERROR('Call method for task %s not defined' % self.name)
return 3 # failed
# run conmmand
try:
succeed = self.command(**self.inputs)
return 2 if succeed else 3 # 2:succeed, 3:failed
except Exception, e:
log.ODM_ERROR(str(e))
return 3 # failed
# Launch task
def run(self):
# while doing something
self.state = 1
return self.launch_command()
def launch_command(self):
if self.command is None:
log.ODM_ERROR('Call method for task %s not defined' % self.name)
return 3 # failed
# run conmmand
try:
succeed = self.command(**self.inputs)
return 2 if succeed else 3 # 2:succeed, 3:failed
except Exception, e:
log.ODM_ERROR(str(e))
return 3 # failed

Wyświetl plik

@ -1,18 +1,19 @@
import os
import cv2
import pyexiv2
import subprocess
import re
from fractions import Fraction
from opensfm.exif import sensor_string
import log
import io
import system
import context
class ODM_Photo:
""" ODMPhoto - a class for ODMPhotos
"""
def __init__(self, path_file, force_focal, force_ccd):
# general purpose
self.path_file = path_file
@ -26,14 +27,15 @@ class ODM_Photo:
# other attributes
self.camera_make = None
self.camera_model = None
self.make_model = None
# parse values from metadata
self.parse_pyexiv2_values(self.path_file, force_focal, force_ccd)
# compute focal lenght into pixels
# compute focal length into pixels
self.update_focal()
# print log message
log.ODM_DEBUG('Loaded %s | dimensions: %s x %s | focal: %s | ccd: %s' % \
(self.filename, self.width, self.height, self.focal_length, self.ccd_width))
log.ODM_DEBUG('Loaded %s | camera: %s | dimensions: %s x %s | focal: %s | ccd: %s' %
(self.filename, self.make_model, self.width, self.height, self.focal_length, self.ccd_width))
def update_focal(self):
# compute focal length in pixels
@ -59,36 +61,46 @@ class ODM_Photo:
try:
val = metadata[key].value
# parse tag names
if key == 'Exif.Image.Make': self.camera_make = val
elif key == 'Exif.Image.Model': self.camera_model = val
elif key == 'Exif.Photo.FocalLength': self.focal_length = float(val)
except Exception, e:
if key == 'Exif.Image.Make':
self.camera_make = val
elif key == 'Exif.Image.Model':
self.camera_model = val
elif key == 'Exif.Photo.FocalLength':
self.focal_length = float(val)
except (pyexiv2.ExifValueError, ValueError) as e:
pass
self.make_model = sensor_string(self.camera_make, self.camera_model)
# needed to do that since sometimes metadata contains wrong data
img = cv2.imread(_path_file)
self.width = img.shape[1]
self.height = img.shape[0]
# force focal and ccd_width with user parameter
if _force_focal: self.focal_length = _force_focal
if _force_ccd: self.ccd_width = _force_ccd
if _force_focal:
self.focal_length = _force_focal
if _force_ccd:
self.ccd_width = _force_ccd
# find ccd_width from file if needed
if self.ccd_width is None and self.camera_model is not None:
# load ccd_widths from file
ccd_widths = system.get_ccd_widths()
# search ccd by camera model
key = [x for x in ccd_widths.keys() if self.camera_model in x]
key = [x for x in ccd_widths.keys() if self.make_model in x]
# convert to float if found
if key: self.ccd_width = float(ccd_widths[key[0]])
# else:
# log.ODM_ERROR('Could not find ccd_width in file')
if key:
self.ccd_width = float(ccd_widths[key[0]])
else:
log.ODM_WARNING('Could not find ccd_width in file. Use --force-ccd or edit the sensor_data.json '
'file to manually input ccd width')
# TODO: finish this class
class ODM_Reconstruction(object):
"""docstring for ODMReconstruction"""
def __init__(self, arg):
super(ODMReconstruction, self).__init__()
self.arg = arg
@ -96,6 +108,7 @@ class ODM_Reconstruction(object):
class ODM_GCPoint(object):
"""docstring for ODMPoint"""
def __init__(self, x, y, z):
self.x = x
self.y = y
@ -104,6 +117,7 @@ class ODM_GCPoint(object):
class ODM_GeoRef(object):
"""docstring for ODMUtmZone"""
def __init__(self):
self.datum = 'WGS84'
self.epsg = None
@ -123,40 +137,86 @@ class ODM_GeoRef(object):
log.ODM_ERROR('Unknown pole format %s' % _pole)
return
def coord_to_fractions(self, coord, refs):
deg_dec = abs(float(coord))
deg = int(deg_dec)
minute_dec = (deg_dec-deg)*60
minute = int(minute_dec)
def convert_to_las(self, _file):
sec_dec = (minute_dec-minute)*60
sec_dec = round(sec_dec,3)
sec_denominator = 1000
sec_numerator = int(sec_dec*sec_denominator)
if float(coord) >= 0:
latRef = refs[0]
else:
latRef = refs[1]
output = str(deg) + '/1 ' + str(minute) + '/1 ' + str(sec_numerator) + '/' + str(sec_denominator)
return output, latRef
def convert_to_las(self, _file, pdalXML):
if not self.epsg:
log.ODM_ERROR('Empty EPSG: Could not convert to LAS')
return
kwargs = { 'bin': context.txt2las_path,
'f_in': _file,
'f_out': _file + '.laz',
'east': self.utm_east_offset,
'north': self.utm_north_offset,
'epsg': self.epsg }
kwargs = {'bin': context.pdal_path,
'f_in': _file,
'f_out': _file + '.las',
'east': self.utm_east_offset,
'north': self.utm_north_offset,
'epsg': self.epsg,
'xml': pdalXML}
# call txt2las
system.run('{bin}/txt2las -i {f_in} -o {f_out} -skip 30 -parse xyzRGBssss ' \
'-set_scale 0.01 0.01 0.01 -set_offset {east} {north} 0 ' \
'-translate_xyz 0 -epsg {epsg}'.format(**kwargs))
# system.run('{bin}/txt2las -i {f_in} -o {f_out} -skip 30 -parse xyzRGBssss ' \
# '-set_scale 0.01 0.01 0.01 -set_offset {east} {north} 0 ' \
# '-translate_xyz 0 -epsg {epsg}'.format(**kwargs))
#
# create pipeline file transform.xml to enable transformation
pipelineXml = '<?xml version=\"1.0\" encoding=\"utf-8\"?>'
pipelineXml += '<Pipeline version=\"1.0\">'
pipelineXml += ' <Writer type=\"writers.las\">'
pipelineXml += ' <Option name=\"filename\">'
pipelineXml += ' transformed.las'
pipelineXml += ' </Option>'
pipelineXml += ' <Filter type=\"filters.transformation\">'
pipelineXml += ' <Option name=\"matrix\">'
pipelineXml += ' 1 0 0 {east}'.format(**kwargs)
pipelineXml += ' 0 1 0 {north}'.format(**kwargs)
pipelineXml += ' 0 0 1 0'
pipelineXml += ' 0 0 0 1'
pipelineXml += ' </Option>'
pipelineXml += ' <Reader type=\"readers.ply\">'
pipelineXml += ' <Option name=\"filename\">'
pipelineXml += ' untransformed.ply'
pipelineXml += ' </Option>'
pipelineXml += ' </Reader>'
pipelineXml += ' </Filter>'
pipelineXml += ' </Writer>'
pipelineXml += '</Pipeline>'
with open(pdalXML, 'w') as f:
f.write(pipelineXml)
# call pdal
system.run('{bin}/pdal pipeline -i {xml} --readers.ply.filename={f_in} '
'--writers.las.filename={f_out}'.format(**kwargs))
def utm_to_latlon(self, _file, _photo, idx):
gcp = self.gcps[idx]
kwargs = { 'datum': self.datum,
'zone': self.utm_zone,
'file': _file,
'x': gcp.x + self.utm_east_offset,
'y': gcp.y + self.utm_north_offset,
'z': gcp.z }
kwargs = {'epsg': self.epsg,
'file': _file,
'x': gcp.x + self.utm_east_offset,
'y': gcp.y + self.utm_north_offset,
'z': gcp.z}
latlon = system.run_and_return('echo {x} {y} | cs2cs +proj=utm ' \
'+datum={datum} +ellps={datum} +zone={zone} +units=m +to ' \
'+proj=latlon +ellps={datum}'.format(**kwargs)).split()
latlon = system.run_and_return('echo {x} {y} {z} '.format(**kwargs),
'gdaltransform -s_srs \"EPSG:{epsg}\" '
'-t_srs \"EPSG:4326\"'.format(**kwargs)).split()
# Example: 83d18'16.285"W
# Example: 41d2'11.789"N
@ -169,104 +229,110 @@ class ODM_GeoRef(object):
alt_str = ''
else:
log.ODM_ERROR('Something went wrong %s' % latlon)
tokens = re.split("[d '\"]+", lon_str)
if len(tokens) >= 4:
lon_deg, lon_min, lon_sec = tokens[:3]
lon_sec_frac = Fraction(lon_sec)
lon_sec_numerator = str(lon_sec_frac._numerator)
lon_sec_denominator = str(lon_sec_frac._denominator)
lon_ref = tokens[3]
tokens = re.split("[d '\"]+", lat_str)
if len(tokens) >= 4:
lat_deg, lat_min, lat_sec = tokens[:3]
lat_sec_frac = Fraction(lat_sec)
lat_sec_numerator = str(lat_sec_frac._numerator)
lat_sec_denominator = str(lat_sec_frac._denominator)
lat_ref = tokens[3]
alt_numerator = arc_denominator = 0 # BUG: arc_denominator is never used
if alt_str:
alt_frac = Fraction(alt_str)
alt_numerator = alt_frac._numerator
alt_denominator = alt_frac._denominator
lat_frac = self.coord_to_fractions(latlon[1], ['N', 'S'])
lon_frac = self.coord_to_fractions(latlon[0], ['E', 'W'])
# read image metadata
metadata = pyexiv2.ImageMetadata(_photo.path_file)
metadata.read()
## set values
# set values
# GPS latitude
key = 'Exif.GPSInfo.GPSLatitude'
value = [Fraction(int(lat_deg), 1), Fraction(int(lat_min), 1), \
Fraction(int(lat_sec_numerator), int(lat_sec_denominator))]
metadata[key] = pyexiv2.ExifTag(key, value)
value = lat_frac[0].split(' ')
log.ODM_DEBUG('lat_frac: %s %s %s' % (value[0], value[1], value[2]))
metadata[key] = pyexiv2.ExifTag(key,
[Fraction(value[0]),
Fraction(value[1]),
Fraction(value[2])])
key = 'Exif.GPSInfo.GPSLatitudeRef'
value = '%s' % lat_ref
value = lat_frac[1]
metadata[key] = pyexiv2.ExifTag(key, value)
# GPS longitude
key = 'Exif.GPSInfo.GPSLongitude'
value = [Fraction(int(lon_deg), 1), Fraction(int(lon_min), 1), \
Fraction(int(lon_sec_numerator), int(lon_sec_denominator))]
metadata[key] = pyexiv2.ExifTag(key, value)
value = lon_frac[0].split(' ')
metadata[key] = pyexiv2.ExifTag(key,
[Fraction(value[0]),
Fraction(value[1]),
Fraction(value[2])])
key = 'Exif.GPSInfo.GPSLongitudeRef'
value = '%s' % lon_ref
value = lon_frac[1]
metadata[key] = pyexiv2.ExifTag(key, value)
# GPS altitude
altitude = abs(int(float(latlon[2])*100))
key = 'Exif.GPSInfo.GPSAltitude'
value = Fraction(int(gcp.z), 1)
value = Fraction(altitude, 1)
metadata[key] = pyexiv2.ExifTag(key, value)
if latlon[2] >= 0:
altref = '0'
else:
altref = '1'
key = 'Exif.GPSInfo.GPSAltitudeRef'
metadata[key] = pyexiv2.ExifTag(key, '0')
metadata[key] = pyexiv2.ExifTag(key, altref)
## write values
# write values
metadata.write()
def parse_coordinate_system(self, _file):
"""Write attributes to jobOptions from coord file"""
# check for coordinate file existence
if not io.file_exists(_file):
log.ODM_ERROR('Could not find file %s' % _coords_file)
log.ODM_ERROR('Could not find file %s' % _file)
return
with open(_file) as f:
# extract reference system and utm zone from first line.
# We will assume the following format:
# 'WGS84 UTM 17N'
line = f.readline().split(' ')
self.datum = line[0]
self.utm_pole = line[2][len(line)-1]
self.utm_zone = int(line[2][:len(line)-1])
# extract east and west offsets from second line.
# We will assume the following format:
# '440143 4588391'
line = f.readline().split(' ')
self.utm_east_offset = int(line[0])
self.utm_north_offset = int(line[1])
line = f.readline()
log.ODM_DEBUG('Line: %s' % line)
ref = line.split(' ')
# match_wgs_utm = re.search('WGS84 UTM (\d{1,2})(N|S)', line, re.I)
if ref[0] == 'WGS84' and ref[1] == 'UTM': # match_wgs_utm:
self.datum = ref[0]
self.utm_pole = ref[2][len(ref) - 1]
self.utm_zone = int(ref[2][:len(ref) - 1])
# extract east and west offsets from second line.
# We will assume the following format:
# '440143 4588391'
# update EPSG
self.epsg = self.calculate_EPSG(self.utm_zone, self.utm_pole)
# If the first line looks like "EPSG:n" or "epsg:n"
elif ref[0].split(':')[0].lower() == 'epsg':
self.epsg = line.split(':')[1]
else:
log.ODM_ERROR('Could not parse coordinates. Bad CRS supplied: %s' % line)
return
offsets = f.readline().split(' ')
self.utm_east_offset = int(offsets[0])
self.utm_north_offset = int(offsets[1])
# parse coordinates
lines = f.readlines()
for l in lines:
x, y, z = l.split(' ')[:3]
xyz = l.split(' ')
if len(xyz) == 3:
x, y, z = xyz[:3]
elif len(xyz) == 2:
x, y = xyz[:2]
z = 0
self.gcps.append(ODM_GCPoint(float(x), float(y), float(z)))
# update EPSG
self.epsg = self.calculate_EPSG(self.utm_zone, self.utm_pole)
class ODM_Tree(object):
def __init__(self, root_path):
### root path to the project
# root path to the project
self.root_path = io.absolute_path_file(root_path)
### modules paths
# modules paths
# here are defined where all modules should be located in
# order to keep track all files al directories during the
@ -279,9 +345,13 @@ class ODM_Tree(object):
self.odm_texturing = io.join_paths(self.root_path, 'odm_texturing')
self.odm_georeferencing = io.join_paths(self.root_path, 'odm_georeferencing')
self.odm_orthophoto = io.join_paths(self.root_path, 'odm_orthophoto')
self.odm_pdal = io.join_paths(self.root_path, 'pdal')
# important files paths
# benchmarking
self.benchmarking = io.join_paths(self.root_path, 'benchmark.txt')
### important files paths
# opensfm
self.opensfm_bundle = io.join_paths(self.opensfm, 'bundle_r000.out')
self.opensfm_bundle_list = io.join_paths(self.opensfm, 'list_r000.out')
@ -294,11 +364,11 @@ class ODM_Tree(object):
self.pmvs_visdat = io.join_paths(self.pmvs_rec_path, 'vis.dat')
self.pmvs_options = io.join_paths(self.pmvs_rec_path, 'pmvs_options.txt')
self.pmvs_model = io.join_paths(self.pmvs_rec_path, 'models/option-0000.ply')
# odm_meshing
self.odm_mesh = io.join_paths(self.odm_meshing, 'odm_mesh.ply')
self.odm_meshing_log = io.join_paths(self.odm_meshing, 'odm_meshing_log.txt')
# odm_texturing
self.odm_texturing_undistorted_image_path = io.join_paths(
self.odm_texturing, 'undistorted')
@ -306,14 +376,6 @@ class ODM_Tree(object):
self.odm_texturing, 'odm_textured_model.obj')
self.odm_textured_model_mtl = io.join_paths(
self.odm_texturing, 'odm_textured_model.mtl')
self.odm_textured_model_txt_geo = io.join_paths(
self.odm_texturing, 'odm_textured_model_geo.txt')
self.odm_textured_model_ply_geo = io.join_paths(
self.odm_texturing, 'odm_textured_model_geo.ply')
self.odm_textured_model_obj_geo = io.join_paths(
self.odm_texturing, 'odm_textured_model_geo.obj')
self.odm_textured_model_mtl_geo = io.join_paths(
self.odm_texturing, 'odm_textured_model_geo.mtl')
self.odm_texuring_log = io.join_paths(
self.odm_texturing, 'odm_texturing_log.txt')
@ -328,14 +390,22 @@ class ODM_Tree(object):
self.odm_georeferencing, 'odm_georeferencing_utm_log.txt')
self.odm_georeferencing_log = io.join_paths(
self.odm_georeferencing, 'odm_georeferencing_log.txt')
self.odm_georeferencing_model_txt_geo = io.join_paths(
self.odm_georeferencing, 'odm_georeferencing_model_geo.txt')
self.odm_georeferencing_model_ply_geo = io.join_paths(
self.odm_georeferencing, 'odm_georeferenced_model.ply')
self.odm_georeferencing_model_obj_geo = io.join_paths(
self.odm_texturing, 'odm_textured_model_geo.obj') # these files will be kept in odm_texturing/
self.odm_georeferencing_model_mtl_geo = io.join_paths(
self.odm_texturing, 'odm_textured_model_geo.mtl') # these files will be kept in odm_texturing/
self.odm_georeferencing_xyz_file = io.join_paths(
self.odm_georeferencing, 'odm_georeferenced_model.csv')
self.odm_georeferencing_pdal = io.join_paths(
self.odm_georeferencing, 'pipeline.xml')
# odm_orthophoto
self.odm_orthophoto_file = io.join_paths(self.odm_orthophoto, 'odm_orthophoto.png')
self.odm_orthophoto_tif = io.join_paths(self.odm_orthophoto, 'odm_orthophoto.tif')
self.odm_orthophoto_corners = io.join_paths(self.odm_orthophoto, 'odm_orthphoto_corners.txt')
self.odm_orthophoto_log = io.join_paths(self.odm_orthophoto, 'odm_orthophoto_log.txt')
self.odm_orthophoto_tif_log = io.join_paths(self.odm_orthophoto, 'gdal_translate_log.txt')

38
run.py
Wyświetl plik

@ -9,28 +9,32 @@ import ecto
from scripts.odm_app import ODMApp
def usage():
log.ODM_ERROR('USAGE: %s --project-path [project_path]' % sys.argv[0])
log.ODM_ERROR('OpenDroneMap app finished - %s' % system.now())
sys.exit(0)
log.ODM_ERROR('USAGE: %s --project-path [project_path]' % sys.argv[0])
log.ODM_ERROR('OpenDroneMap app finished - %s' % system.now())
sys.exit(0)
if __name__ == '__main__':
log.ODM_INFO('Initializing OpenDroneMap app - %s' % system.now())
log.ODM_INFO('Initializing OpenDroneMap app - %s' % system.now())
# Force to provide the images path
if config.args.get('project_path') is None:
usage()
args = config.config()
# create an instance of my App BlackBox
# internally configure all tasks
app = ODMApp(args=config.args)
# Force to provide the images path
if args.project_path is None:
usage()
# create a plasm that only contains the BlackBox
plasm = ecto.Plasm()
plasm.insert(app)
# create an instance of my App BlackBox
# internally configure all tasks
app = ODMApp(args=args)
# execute the plasm
plasm.execute(niter=1)
# create a plasm that only contains the BlackBox
plasm = ecto.Plasm()
plasm.insert(app)
log.ODM_INFO('OpenDroneMap app finished - %s' % system.now())
# execute the plasm
plasm.execute(niter=1)
log.ODM_INFO('OpenDroneMap app finished - %s' % system.now())

7
run.sh 100755
Wyświetl plik

@ -0,0 +1,7 @@
#!/bin/bash
RUNPATH="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
export PYTHONPATH=$RUNPATH/SuperBuild/install/lib/python2.7/dist-packages:$RUNPATH/SuperBuild/src/opensfm:$PYTHONPATH
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$RUNPATH/SuperBuild/install/lib
python $RUNPATH/run.py "$@"

Wyświetl plik

@ -5,6 +5,7 @@ from opendm import log
from opendm import system
from opendm import context
class ODMCmvsCell(ecto.Cell):
def declare_params(self, params):
@ -20,19 +21,25 @@ class ODMCmvsCell(ecto.Cell):
outputs.declare("reconstruction", "list of ODMReconstructions", [])
def process(self, inputs, outputs):
# Benchmarking
start_time = system.now_raw()
log.ODM_INFO('Running OMD CMVS Cell')
log.ODM_INFO('Running ODM CMVS Cell')
# get inputs
args = self.inputs.args
tree = self.inputs.tree
# check if we rerun cell or not
rerun_cell = args['rerun'] is not None \
and args['rerun'] == 'cmvs'
rerun_cell = (args.rerun is not None and
args.rerun == 'cmvs') or \
(args.rerun_all) or \
(args.rerun_from is not None and
'cmvs' in args.rerun_from)
if not io.file_exists(tree.pmvs_bundle) or rerun_cell:
log.ODM_DEBUG('Writting CMVS vis in: %s' % tree.pmvs_bundle)
log.ODM_DEBUG('Writing CMVS vis in: %s' % tree.pmvs_bundle)
# copy bundle file to pmvs dir
from shutil import copyfile
@ -50,7 +57,10 @@ class ODMCmvsCell(ecto.Cell):
system.run('{bin} {prefix}/ {max_images} {cores}'.format(**kwargs))
else:
log.ODM_WARNING('Found a valid CMVS file in: %s' %
(tree.pmvs_bundle))
tree.pmvs_bundle)
log.ODM_INFO('Running OMD CMVS Cell - Finished')
return ecto.OK if args['end_with'] != 'cmvs' else ecto.QUIT
if args.time:
system.benchmark(start_time, tree.benchmarking, 'CMVS')
log.ODM_INFO('Running ODM CMVS Cell - Finished')
return ecto.OK if args.end_with != 'cmvs' else ecto.QUIT

Wyświetl plik

@ -6,13 +6,14 @@ from opendm import io
from opendm import types
from opendm import log
class ODMLoadDatasetCell(ecto.Cell):
def declare_params(self, params):
params.declare("force_focal", 'Override the focal length information for the '
'images', None)
'images', None)
params.declare("force_ccd", 'Override the ccd widht information for the '
'images', None)
'images', None)
def declare_io(self, params, inputs, outputs):
inputs.declare("tree", "Struct with paths", [])
@ -65,4 +66,4 @@ class ODMLoadDatasetCell(ecto.Cell):
outputs.photos = photos
log.ODM_INFO('Running ODM Load Dataset Cell - Finished')
return ecto.OK
return ecto.OK

Wyświetl plik

@ -1,8 +1,11 @@
import ecto
import os
from opendm import context
from opendm import types
from opendm import config
from opendm import io
from opendm import system
from dataset import ODMLoadDatasetCell
from resize import ODMResizeCell
@ -15,11 +18,14 @@ from odm_texturing import ODMTexturingCell
from odm_georeferencing import ODMGeoreferencingCell
from odm_orthophoto import ODMOrthoPhotoCell
class ODMApp(ecto.BlackBox):
''' ODMApp - a class for ODM Activities
'''
"""ODMApp - a class for ODM Activities
"""
def __init__(self, *args, **kwargs):
ecto.BlackBox.__init__(self, *args, **kwargs)
self.tree = None
@staticmethod
def declare_direct_params(p):
@ -31,107 +37,114 @@ class ODMApp(ecto.BlackBox):
Implement the virtual function from the base class
Only cells from which something is forwarded have to be declared
"""
cells = { 'args': ecto.Constant(value=p.args),
'dataset': ODMLoadDatasetCell(force_focal=p.args['force_focal'],
force_ccd=p.args['force_ccd']),
'resize': ODMResizeCell(resize_to=p.args['resize_to']),
'opensfm': ODMOpenSfMCell(use_exif_size=False,
feature_process_size=p.args['resize_to'],
feature_min_frames=p.args['min_num_features'],
processes=context.num_cores,
matching_gps_neighbors=p.args['matcher_neighbors'],
matching_gps_distance=p.args['matcher_distance']),
'slam': ODMSlamCell(),
'cmvs': ODMCmvsCell(max_images=p.args['cmvs_maxImages']),
'pmvs': ODMPmvsCell(level=p.args['pmvs_level'],
csize=p.args['pmvs_csize'],
thresh=p.args['pmvs_threshold'],
wsize=p.args['pmvs_wsize'],
min_imgs=p.args['pmvs_minImageNum'],
cores=p.args['pmvs_num_cores']),
'meshing': ODMeshingCell(max_vertex=p.args['odm_meshing_maxVertexCount'],
oct_tree=p.args['odm_meshing_octreeDepth'],
samples=p.args['odm_meshing_samplesPerNode'],
solver=p.args['odm_meshing_solverDivide']),
'texturing': ODMTexturingCell(resize=p.args['resize_to'],
resolution=p.args['odm_texturing_textureResolution'],
size=p.args['odm_texturing_textureWithSize']),
'georeferencing': ODMGeoreferencingCell(img_size=p.args['resize_to'],
gcp_file=p.args['odm_georeferencing_gcpFile'],
use_gcp=p.args['odm_georeferencing_useGcp']),
'orthophoto': ODMOrthoPhotoCell(resolution=p.args['odm_orthophoto_resolution'])
cells = {'args': ecto.Constant(value=p.args),
'dataset': ODMLoadDatasetCell(force_focal=p.args.force_focal,
force_ccd=p.args.force_ccd),
'resize': ODMResizeCell(resize_to=p.args.resize_to),
'opensfm': ODMOpenSfMCell(use_exif_size=False,
feature_process_size=p.args.resize_to,
feature_min_frames=p.args.min_num_features,
processes=context.num_cores,
matching_gps_neighbors=p.args.matcher_neighbors,
matching_gps_distance=p.args.matcher_distance),
'slam': ODMSlamCell(),
'cmvs': ODMCmvsCell(max_images=p.args.cmvs_maxImages),
'pmvs': ODMPmvsCell(level=p.args.pmvs_level,
csize=p.args.pmvs_csize,
thresh=p.args.pmvs_threshold,
wsize=p.args.pmvs_wsize,
min_imgs=p.args.pmvs_minImageNum,
cores=p.args.pmvs_num_cores),
'meshing': ODMeshingCell(max_vertex=p.args.odm_meshing_maxVertexCount,
oct_tree=p.args.odm_meshing_octreeDepth,
samples=p.args.odm_meshing_samplesPerNode,
solver=p.args.odm_meshing_solverDivide),
'texturing': ODMTexturingCell(resize=p.args.resize_to,
resolution=p.args.odm_texturing_textureResolution,
size=p.args.odm_texturing_textureWithSize),
'georeferencing': ODMGeoreferencingCell(img_size=p.args.resize_to,
gcp_file=p.args.odm_georeferencing_gcpFile,
use_gcp=p.args.odm_georeferencing_useGcp),
'orthophoto': ODMOrthoPhotoCell(resolution=p.args.odm_orthophoto_resolution)
}
}
return cells
return cells
def configure(self, p, _i, _o):
tree = types.ODM_Tree(p.args['project_path'])
tree = types.ODM_Tree(p.args.project_path)
self.tree = ecto.Constant(value=tree)
# TODO(dakota) put this somewhere better maybe
if p.args.time and io.file_exists(tree.benchmarking):
# Delete the previously made file
os.remove(tree.benchmarking)
with open(tree.benchmarking, 'a') as b:
b.write('ODM Benchmarking file created %s\nNumber of Cores: %s\n\n' % (system.now(), context.num_cores))
def connections(self, _p):
run_slam = _p.args.get('video') is not None
# define initial task
initial_task = _p.args['start_with']
initial_task_id = config.processopts.index(initial_task)
# TODO: What is this?
# initial_task = _p.args['start_with']
# initial_task_id = config.processopts.index(initial_task)
## define the connections like you would for the plasm
# define the connections like you would for the plasm
connections = []
if run_slam:
# run slam cell
connections += [ self.tree[:] >> self.slam['tree'],
self.args[:] >> self.slam['args'] ]
connections += [self.tree[:] >> self.slam['tree'],
self.args[:] >> self.slam['args']]
# run cmvs
connections += [ self.tree[:] >> self.cmvs['tree'],
self.args[:] >> self.cmvs['args'],
self.slam['reconstruction'] >> self.cmvs['reconstruction'] ]
connections += [self.tree[:] >> self.cmvs['tree'],
self.args[:] >> self.cmvs['args'],
self.slam['reconstruction'] >> self.cmvs['reconstruction']]
else:
# load the dataset
connections = [ self.tree[:] >> self.dataset['tree'] ]
connections = [self.tree[:] >> self.dataset['tree']]
# run resize cell
connections += [ self.tree[:] >> self.resize['tree'],
self.args[:] >> self.resize['args'],
self.dataset['photos'] >> self.resize['photos'] ]
connections += [self.tree[:] >> self.resize['tree'],
self.args[:] >> self.resize['args'],
self.dataset['photos'] >> self.resize['photos']]
# run opensfm with images from load dataset
connections += [ self.tree[:] >> self.opensfm['tree'],
self.args[:] >> self.opensfm['args'],
self.resize['photos'] >> self.opensfm['photos'] ]
connections += [self.tree[:] >> self.opensfm['tree'],
self.args[:] >> self.opensfm['args'],
self.resize['photos'] >> self.opensfm['photos']]
# run cmvs
connections += [ self.tree[:] >> self.cmvs['tree'],
self.args[:] >> self.cmvs['args'],
self.opensfm['reconstruction'] >> self.cmvs['reconstruction'] ]
connections += [self.tree[:] >> self.cmvs['tree'],
self.args[:] >> self.cmvs['args'],
self.opensfm['reconstruction'] >> self.cmvs['reconstruction']]
# run pmvs
connections += [ self.tree[:] >> self.pmvs['tree'],
self.args[:] >> self.pmvs['args'],
self.cmvs['reconstruction'] >> self.pmvs['reconstruction'] ]
connections += [self.tree[:] >> self.pmvs['tree'],
self.args[:] >> self.pmvs['args'],
self.cmvs['reconstruction'] >> self.pmvs['reconstruction']]
# create odm mesh
connections += [ self.tree[:] >> self.meshing['tree'],
self.args[:] >> self.meshing['args'],
self.pmvs['reconstruction'] >> self.meshing['reconstruction'] ]
connections += [self.tree[:] >> self.meshing['tree'],
self.args[:] >> self.meshing['args'],
self.pmvs['reconstruction'] >> self.meshing['reconstruction']]
# create odm texture
connections += [ self.tree[:] >> self.texturing['tree'],
self.args[:] >> self.texturing['args'],
self.meshing['reconstruction'] >> self.texturing['reconstruction'] ]
connections += [self.tree[:] >> self.texturing['tree'],
self.args[:] >> self.texturing['args'],
self.meshing['reconstruction'] >> self.texturing['reconstruction']]
if not run_slam:
# create odm georeference
connections += [ self.tree[:] >> self.georeferencing['tree'],
self.args[:] >> self.georeferencing['args'],
self.dataset['photos'] >> self.georeferencing['photos'],
self.texturing['reconstruction'] >> self.georeferencing['reconstruction'] ]
connections += [self.tree[:] >> self.georeferencing['tree'],
self.args[:] >> self.georeferencing['args'],
self.dataset['photos'] >> self.georeferencing['photos'],
self.texturing['reconstruction'] >> self.georeferencing['reconstruction']]
## create odm orthophoto
connections += [ self.tree[:] >> self.orthophoto['tree'],
self.args[:] >> self.orthophoto['args'],
self.georeferencing['reconstruction'] >> self.orthophoto['reconstruction'] ]
# create odm orthophoto
connections += [self.tree[:] >> self.orthophoto['tree'],
self.args[:] >> self.orthophoto['args'],
self.georeferencing['reconstruction'] >> self.orthophoto['reconstruction']]
return connections

Wyświetl plik

@ -1,4 +1,5 @@
import ecto
import csv
from opendm import io
from opendm import log
@ -6,6 +7,7 @@ from opendm import types
from opendm import system
from opendm import context
class ODMGeoreferencingCell(ecto.Cell):
def declare_params(self, params):
params.declare("gcp_file", 'path to the file containing the ground control '
@ -23,12 +25,15 @@ class ODMGeoreferencingCell(ecto.Cell):
outputs.declare("reconstruction", "list of ODMReconstructions", [])
def process(self, inputs, outputs):
# Benchmarking
start_time = system.now_raw()
log.ODM_INFO('Running OMD Georeferencing Cell')
log.ODM_INFO('Running ODM Georeferencing Cell')
# get inputs
args = self.inputs.args
tree = self.inputs.tree
gcpfile = io.join_paths(tree.root_path, self.params.gcp_file)
# define paths and create working directories
system.mkdir_p(tree.odm_georeferencing)
@ -38,229 +43,107 @@ class ODMGeoreferencingCell(ecto.Cell):
if not self.params.use_gcp and \
not io.file_exists(tree.odm_georeferencing_coords):
log.ODM_WARNING('Warning: No coordinates file. ' \
'Generating coordinates file in: %s' % tree.odm_georeferencing_coords)
log.ODM_WARNING('Warning: No coordinates file. '
'Generating coordinates file in: %s'
% tree.odm_georeferencing_coords)
try:
# odm_georeference definitions
kwargs = {
'bin': context.odm_modules_path,
'imgs': tree.dataset_resize,
'imgs': tree.dataset_raw,
'imgs_list': tree.opensfm_bundle_list,
'coords': tree.odm_georeferencing_coords,
'log': tree.odm_georeferencing_utm_log
}
# run UTM extraction binary
system.run('{bin}/odm_extract_utm -imagesPath {imgs}/ ' \
'-imageListFile {imgs_list} -outputCoordFile {coords} ' \
'-logFile {log}'.format(**kwargs))
system.run('{bin}/odm_extract_utm -imagesPath {imgs}/ '
'-imageListFile {imgs_list} -outputCoordFile {coords} '
'-logFile {log}'.format(**kwargs))
except Exception, e:
log.ODM_ERROR('Could not generate GCP file from images metadata.' \
'Consider rerunning with argument --odm_georeferencing-useGcp' \
' and provide a proper GCP file')
log.ODM_ERROR('Could not generate GCP file from images metadata.'
'Consider rerunning with argument --odm_georeferencing-useGcp'
' and provide a proper GCP file')
log.ODM_ERROR(e)
return ecto.QUIT
# check if we rerun cell or not
rerun_cell = args['rerun'] is not None \
and args['rerun'] == 'odm_georeferencing'
rerun_cell = (args.rerun is not None and
args.rerun == 'odm_georeferencing') or \
(args.rerun_all) or \
(args.rerun_from is not None and
'odm_georeferencing' in args.rerun_from)
if not io.file_exists(tree.odm_textured_model_obj_geo) or \
not io.file_exists(tree.odm_textured_model_ply_geo) or rerun_cell:
if not io.file_exists(tree.odm_georeferencing_model_obj_geo) or \
not io.file_exists(tree.odm_georeferencing_model_ply_geo) or rerun_cell:
# odm_georeference definitions
kwargs = {
'bin': context.odm_modules_path,
'bundle': tree.opensfm_bundle,
'imgs': tree.dataset_resize,
'imgs': tree.dataset_raw,
'imgs_list': tree.opensfm_bundle_list,
'model': tree.odm_textured_model_obj,
'pc': tree.pmvs_model,
'log': tree.odm_georeferencing_log,
'coords': tree.odm_georeferencing_coords,
'pc_geo': tree.odm_textured_model_ply_geo,
'geo_sys': tree.odm_textured_model_txt_geo,
'model_geo': tree.odm_textured_model_obj_geo,
'pc_geo': tree.odm_georeferencing_model_ply_geo,
'geo_sys': tree.odm_georeferencing_model_txt_geo,
'model_geo': tree.odm_georeferencing_model_obj_geo,
'size': self.params.img_size,
'gcp': io.join_paths(tree.root_path, self.params.gcp_file),
'gcp': gcpfile,
}
if self.params.use_gcp and \
io.file_exists(tree.odm_georeferencing_coords):
io.file_exists(gcpfile):
system.run('{bin}/odm_georef -bundleFile {bundle} -inputCoordFile {coords} ' \
'-bundleResizedTo {size} -inputFile {model} -outputFile {model_geo} ' \
'-inputPointCloudFile {pc} -outputPointCloudFile {pc_geo} ' \
'-logFile {log} -georefFileOutputPath {geo_sys} -gcpFile {gcp} ' \
'-outputCoordFile {coords}'.format(**kwargs))
system.run('{bin}/odm_georef -bundleFile {bundle} -imagesPath {imgs} -imagesListPath {imgs_list} '
'-bundleResizedTo {size} -inputFile {model} -outputFile {model_geo} '
'-inputPointCloudFile {pc} -outputPointCloudFile {pc_geo} '
'-logFile {log} -georefFileOutputPath {geo_sys} -gcpFile {gcp} '
'-outputCoordFile {coords}'.format(**kwargs))
else:
system.run('{bin}/odm_georef -bundleFile {bundle} -inputCoordFile {coords} ' \
'-inputFile {model} -outputFile {model_geo} ' \
'-inputPointCloudFile {pc} -outputPointCloudFile {pc_geo} ' \
'-logFile {log} -georefFileOutputPath {geo_sys}'.format(**kwargs))
system.run('{bin}/odm_georef -bundleFile {bundle} -inputCoordFile {coords} '
'-inputFile {model} -outputFile {model_geo} '
'-inputPointCloudFile {pc} -outputPointCloudFile {pc_geo} '
'-logFile {log} -georefFileOutputPath {geo_sys}'.format(**kwargs))
# update images metadata
geo_ref = types.ODM_GeoRef()
geo_ref.parse_coordinate_system(tree.odm_georeferencing_coords)
for idx, photo in enumerate(self.inputs.photos):
geo_ref.utm_to_latlon(tree.odm_georeferencing_latlon, photo, idx)
# convert ply model to LAS reference system
geo_ref.convert_to_las(tree.odm_georeferencing_model_ply_geo,
tree.odm_georeferencing_pdal)
# XYZ point cloud output
log.ODM_INFO("Creating geo-referenced CSV file (XYZ format, can be used with GRASS to create DEM)")
with open(tree.odm_georeferencing_xyz_file, "wb") as csvfile:
csvfile_writer = csv.writer(csvfile, delimiter=",")
reachedpoints = False
with open(tree.odm_georeferencing_model_ply_geo) as f:
for lineNumber, line in enumerate(f):
if reachedpoints:
tokens = line.split(" ")
csv_line = [float(tokens[0])+geo_ref.utm_east_offset,
float(tokens[1])+geo_ref.utm_north_offset,
tokens[2]]
csvfile_writer.writerow(csv_line)
if line.startswith("end_header"):
reachedpoints = True
csvfile.close()
else:
log.ODM_WARNING('Found a valid georeferenced model in: %s' \
% tree.odm_textured_model_ply_geo)
log.ODM_WARNING('Found a valid georeferenced model in: %s'
% tree.odm_georeferencing_model_ply_geo)
if args.time:
system.benchmark(start_time, tree.benchmarking, 'Georeferencing')
# update images metadata
geo_ref = types.ODM_GeoRef()
geo_ref.parse_coordinate_system(tree.odm_georeferencing_coords)
for idx, photo in enumerate(self.inputs.photos):
geo_ref.utm_to_latlon(tree.odm_georeferencing_latlon, photo, idx)
# convert ply model to LAS reference system
geo_ref.convert_to_las(tree.odm_textured_model_ply_geo)
log.ODM_INFO('Running OMD Georeferencing Cell - Finished')
return ecto.OK if args['end_with'] != 'odm_georeferencing' else ecto.QUIT
def odm_georeferencing():
"""Run odm_georeferencing"""
print "\n - running georeferencing - " + now()
os.chdir(jobOptions["jobDir"])
try:
os.mkdir(jobOptions["jobDir"] + "/odm_georeferencing")
except:
pass
if not args.odm_georeferencing_useGcp:
run("\"" + BIN_PATH + "/odm_extract_utm\" -imagesPath " + jobOptions["srcDir"] + "/ -imageListFile " \
+ jobOptions["jobDir"] + "/pmvs/list.rd.txt -outputCoordFile " + jobOptions["jobDir"] \
+ "/odm_georeferencing/coordFile.txt")
run("\"" + BIN_PATH + "/odm_georef\" -bundleFile " + jobOptions["jobDir"] \
+ "/pmvs/bundle.rd.out -inputCoordFile " + jobOptions["jobDir"] \
+ "/odm_georeferencing/coordFile.txt -inputFile " + jobOptions["jobDir"] \
+ "-results/odm_texturing/odm_textured_model.obj -outputFile " + jobOptions["jobDir"] \
+ "-results/odm_texturing/odm_textured_model_geo.obj -inputPointCloudFile " \
+ jobOptions["jobDir"] + "-results/option-0000.ply -outputPointCloudFile " + jobOptions["jobDir"] \
+ "-results/option-0000_georef.ply -logFile " + jobOptions["jobDir"] \
+ "/odm_georeferencing/odm_georeferencing_log.txt -georefFileOutputPath " + jobOptions["jobDir"] \
+ "-results/odm_texturing/odm_textured_model_geo_georef_system.txt")
elif os.path.isfile(jobOptions["srcDir"] + "/" + args.odm_georeferencing_gcpFile):
run("\"" + BIN_PATH + "/odm_georef\" -bundleFile " + jobOptions["jobDir"] \
+ "/pmvs/bundle.rd.out -gcpFile " + jobOptions["srcDir"] + "/" + args.odm_georeferencing_gcpFile \
+ " -imagesPath " + jobOptions["srcDir"] + "/ -imagesListPath " + jobOptions["jobDir"] \
+ "/pmvs/list.rd.txt -bundleResizedTo " + str(jobOptions["resizeTo"]) + " -inputFile " \
+ jobOptions["jobDir"] + "-results/odm_texturing/odm_textured_model.obj -outputFile " \
+ jobOptions["jobDir"] + "-results/odm_texturing/odm_textured_model_geo.obj -outputCoordFile " \
+ jobOptions["jobDir"] + "/odm_georeferencing/coordFile.txt -inputPointCloudFile " \
+ jobOptions["jobDir"] + "-results/option-0000.ply -outputPointCloudFile " + jobOptions["jobDir"] \
+ "-results/option-0000_georef.ply -logFile " + jobOptions["jobDir"] \
+ "/odm_georeferencing/odm_georeferencing_log.txt -georefFileOutputPath " + jobOptions["jobDir"] \
+ "-results/odm_texturing/odm_textured_model_geo_georef_system.txt")
else:
print "Warning: No GCP file. Consider rerunning with argument --odm_georeferencing-useGcp false --start-with odm_georeferencing"
print "Skipping orthophoto"
args.end_with = "odm_georeferencing"
if "csString" not in jobOptions:
parse_coordinate_system()
if "csString" in jobOptions and "utmEastOffset" in jobOptions and "utmNorthOffset" in jobOptions:
images = []
with open(jobOptions["jobDir"] + "/pmvs/list.rd.txt") as f:
images = f.readlines()
if len(images) > 0:
with open(jobOptions["jobDir"] + "/odm_georeferencing/coordFile.txt") as f:
for lineNumber, line in enumerate(f):
if lineNumber >= 2 and lineNumber - 2 < len(images):
tokens = line.split(' ')
if len(tokens) >= 3:
x = float(tokens[0])
y = float(tokens[1])
z = float(tokens[2])
filename = images[lineNumber - 2]
run("echo " + str(x + jobOptions["utmEastOffset"]) + " " \
+ str(y + jobOptions["utmNorthOffset"]) + " " + str(z) \
+ " | cs2cs " + jobOptions["csString"] + " +to +datum=WGS84 +proj=latlong > " \
+ jobOptions["jobDir"] + "/odm_georeferencing/latlong.txt")
with open(jobOptions["jobDir"] + "/odm_georeferencing/latlong.txt") as latlongFile:
latlongLine = latlongFile.readline()
tokens = latlongLine.split()
if len(tokens) >= 2:
exifGpsInfoWritten = False
lonString = tokens[0] # Example: 83d18'16.285"W
latString = tokens[1] # Example: 41d2'11.789"N
altString = ""
if len(tokens) > 2:
altString = tokens[2] # Example: 0.998
tokens = re.split("[d '\"]+", lonString)
if len(tokens) >= 4:
lonDeg = tokens[0]
lonMin = tokens[1]
lonSec = tokens[2]
lonSecFrac = fractions.Fraction(lonSec)
lonSecNumerator = str(lonSecFrac._numerator)
lonSecDenominator = str(lonSecFrac._denominator)
lonRef = tokens[3]
tokens = re.split("[d '\"]+", latString)
if len(tokens) >= 4:
latDeg = tokens[0]
latMin = tokens[1]
latSec = tokens[2]
latSecFrac = fractions.Fraction(latSec)
latSecNumerator = str(latSecFrac._numerator)
latSecDenominator = str(latSecFrac._denominator)
latRef = tokens[3]
exivCmd = "exiv2 -q"
exivCmd += " -M\"set Exif.GPSInfo.GPSLatitude " + latDeg + "/1 " \
+ latMin + "/1 " + latSecNumerator + "/" + latSecDenominator + "\""
exivCmd += " -M\"set Exif.GPSInfo.GPSLatitudeRef " + latRef + "\""
exivCmd += " -M\"set Exif.GPSInfo.GPSLongitude " + lonDeg + "/1 " \
+ lonMin + "/1 " + lonSecNumerator + "/" + lonSecDenominator + "\""
exivCmd += " -M\"set Exif.GPSInfo.GPSLongitudeRef " + lonRef + "\""
altNumerator = arcDenominator = 0 # BUG: arcDenominator is never used
if altString:
altFrac = fractions.Fraction(altString)
altNumerator = str(altFrac._numerator)
altDenominator = str(altFrac._denominator)
exivCmd += " -M\"set Exif.GPSInfo.GPSAltitude " + altNumerator + "/" + altDenominator + "\""
exivCmd += " -M\"set Exif.GPSInfo.GPSAltitudeRef 0\""
exivCmd += " " + filename
run(exivCmd)
exifGpsInfoWritten = True
if not exifGpsInfoWritten:
print(" Warning: Failed setting EXIF GPS info for " \
+ filename + " based on " + latlongLine)
if "epsg" in jobOptions and "utmEastOffset" in jobOptions and "utmNorthOffset" in jobOptions:
lasCmd = "\"" + BIN_PATH + "/txt2las\" -i " + jobOptions["jobDir"] + \
"-results/option-0000_georef.ply -o " + jobOptions["jobDir"] \
+ "-results/pointcloud_georef.laz -skip 30 -parse xyzRGBssss -set_scale 0.01 0.01 0.01 -set_offset " \
+ str(jobOptions["utmEastOffset"]) + " " + str(jobOptions["utmNorthOffset"]) + " 0 -translate_xyz " \
+ str(jobOptions["utmEastOffset"]) + " " + str(jobOptions["utmNorthOffset"]) \
+ " 0 -epsg " + str(jobOptions["epsg"])
print(" Creating geo-referenced LAS file (expecting warning)...")
run(lasCmd)
if args['--end-with'] != "odm_georeferencing":
odm_orthophoto()
log.ODM_INFO('Running ODM Georeferencing Cell - Finished')
return ecto.OK if args.end_with != 'odm_georeferencing' else ecto.QUIT

Wyświetl plik

@ -5,20 +5,20 @@ from opendm import io
from opendm import system
from opendm import context
class ODMeshingCell(ecto.Cell):
class ODMeshingCell(ecto.Cell):
def declare_params(self, params):
params.declare("max_vertex", 'The maximum vertex count of the output '
'mesh', 100000)
'mesh', 100000)
params.declare("oct_tree", 'Oct-tree depth used in the mesh reconstruction, '
'increase to get more vertices, recommended '
'values are 8-12', 9)
'increase to get more vertices, recommended '
'values are 8-12', 9)
params.declare("samples", 'Number of points per octree node, recommended '
'value: 1.0', 1)
'value: 1.0', 1)
params.declare("solver", 'Oct-tree depth at which the Laplacian equation '
'is solved in the surface reconstruction step. '
'Increasing this value increases computation '
'times slightly but helps reduce memory usage.', 9)
'is solved in the surface reconstruction step. '
'Increasing this value increases computation '
'times slightly but helps reduce memory usage.', 9)
def declare_io(self, params, inputs, outputs):
inputs.declare("tree", "Struct with paths", [])
@ -27,8 +27,11 @@ class ODMeshingCell(ecto.Cell):
outputs.declare("reconstruction", "Clusters output. list of ODMReconstructions", [])
def process(self, inputs, outputs):
log.ODM_INFO('Running OMD Meshing Cell')
# Benchmarking
start_time = system.now_raw()
log.ODM_INFO('Running ODM Meshing Cell')
# get inputs
args = self.inputs.args
@ -38,11 +41,14 @@ class ODMeshingCell(ecto.Cell):
system.mkdir_p(tree.odm_meshing)
# check if we rerun cell or not
rerun_cell = args['rerun'] is not None \
and args['rerun'] == 'odm_meshing'
rerun_cell = (args.rerun is not None and
args.rerun == 'odm_meshing') or \
(args.rerun_all) or \
(args.rerun_from is not None and
'odm_meshing' in args.rerun_from)
if not io.file_exists(tree.odm_mesh) or rerun_cell:
log.ODM_DEBUG('Writting ODM Mesh file in: %s' % tree.odm_mesh)
log.ODM_DEBUG('Writing ODM Mesh file in: %s' % tree.odm_mesh)
kwargs = {
'bin': context.odm_modules_path,
@ -52,17 +58,20 @@ class ODMeshingCell(ecto.Cell):
'max_vertex': self.params.max_vertex,
'oct_tree': self.params.oct_tree,
'samples': self.params.samples,
'solver':self.params.solver
'solver': self.params.solver
}
# run meshing binary
system.run('{bin}/odm_meshing -inputFile {infile} ' \
'-outputFile {outfile} -logFile {log} ' \
'-maxVertexCount {max_vertex} -octreeDepth {oct_tree} ' \
'-samplesPerNode {samples} -solverDivide {solver}'.format(**kwargs))
system.run('{bin}/odm_meshing -inputFile {infile} '
'-outputFile {outfile} -logFile {log} '
'-maxVertexCount {max_vertex} -octreeDepth {oct_tree} '
'-samplesPerNode {samples} -solverDivide {solver}'.format(**kwargs))
else:
log.ODM_WARNING('Found a valid ODM Mesh file in: %s' %
(tree.odm_mesh))
log.ODM_INFO('Running OMD Meshing Cell - Finished')
return ecto.OK if args['end_with'] != 'odm_meshing' else ecto.QUIT
log.ODM_WARNING('Found a valid ODM Mesh file in: %s' %
tree.odm_mesh)
if args.time:
system.benchmark(start_time, tree.benchmarking, 'Meshing')
log.ODM_INFO('Running ODM Meshing Cell - Finished')
return ecto.OK if args.end_with != 'odm_meshing' else ecto.QUIT

Wyświetl plik

@ -4,6 +4,8 @@ from opendm import io
from opendm import log
from opendm import system
from opendm import context
from opendm import types
class ODMOrthoPhotoCell(ecto.Cell):
def declare_params(self, params):
@ -16,6 +18,9 @@ class ODMOrthoPhotoCell(ecto.Cell):
def process(self, inputs, outputs):
# Benchmarking
start_time = system.now_raw()
log.ODM_INFO('Running OMD OrthoPhoto Cell')
# get inputs
@ -26,78 +31,75 @@ class ODMOrthoPhotoCell(ecto.Cell):
system.mkdir_p(tree.odm_orthophoto)
# check if we rerun cell or not
rerun_cell = args['rerun'] is not None \
and args['rerun'] == 'odm_orthophoto'
rerun_cell = (args.rerun is not None and
args.rerun == 'odm_orthophoto') or \
(args.rerun_all) or \
(args.rerun_from is not None and
'odm_orthophoto' in args.rerun_from)
if not io.file_exists(tree.odm_orthophoto_file) or rerun_cell:
# odm_georeference definitions
# odm_orthophoto definitions
kwargs = {
'bin': context.odm_modules_path,
'model_geo': tree.odm_textured_model_obj_geo,
'model_geo': tree.odm_georeferencing_model_obj_geo,
'log': tree.odm_orthophoto_log,
'ortho': tree.odm_orthophoto_file,
'corners': tree.odm_orthophoto_corners,
'res': self.params.resolution
}
# run odm_georeference
system.run('{bin}/odm_orthophoto -inputFile {model_geo} ' \
'-logFile {log} -outputFile {ortho} -resolution {res} ' \
'-outputCornerFile {corners}'.format(**kwargs))
# run odm_orthophoto
system.run('{bin}/odm_orthophoto -inputFile {model_geo} '
'-logFile {log} -outputFile {ortho} -resolution {res} '
'-outputCornerFile {corners}'.format(**kwargs))
# Create georeferenced GeoTiff
geotiffcreated = False
georef = types.ODM_GeoRef()
# creates the coord refs # TODO I don't want to have to do this twice- after odm_georef
georef.parse_coordinate_system(tree.odm_georeferencing_coords)
if georef.epsg and georef.utm_east_offset and georef.utm_north_offset:
ulx = uly = lrx = lry = 0.0
with open(tree.odm_orthophoto_corners) as f:
for lineNumber, line in enumerate(f):
if lineNumber == 0:
tokens = line.split(' ')
if len(tokens) == 4:
ulx = float(tokens[0]) + \
float(georef.utm_east_offset)
lry = float(tokens[1]) + \
float(georef.utm_north_offset)
lrx = float(tokens[2]) + \
float(georef.utm_east_offset)
uly = float(tokens[3]) + \
float(georef.utm_north_offset)
log.ODM_INFO('Creating GeoTIFF')
kwargs = {
'ulx': ulx,
'uly': uly,
'lrx': lrx,
'lry': lry,
'epsg': georef.epsg,
'png': tree.odm_orthophoto_file,
'tiff': tree.odm_orthophoto_tif,
'log': tree.odm_orthophoto_tif_log
}
system.run('gdal_translate -a_ullr {ulx} {uly} {lrx} {lry} '
'-a_srs \"EPSG:{epsg}\" {png} {tiff} > {log}'.format(**kwargs))
geotiffcreated = True
if not geotiffcreated:
log.ODM_WARNING('No geo-referenced orthophoto created due '
'to missing geo-referencing or corner coordinates.')
else:
log.ODM_WARNING('Found a valid orthophoto in: %s' % tree.odm_orthophoto_file)
log.ODM_INFO('Running OMD OrthoPhoto Cell - Finished')
return ecto.OK if args['end_with'] != 'odm_orthophoto' else ecto.QUIT
if args.time:
system.benchmark(start_time, tree.benchmarking, 'Orthophoto')
def odm_orthophoto():
"""Run odm_orthophoto"""
print "\n - running orthophoto generation - " + system.now()
os.chdir(jobOptions["jobDir"])
try:
os.mkdir(jobOptions["jobDir"] + "/odm_orthophoto")
except:
pass
run("\"" + BIN_PATH + "/odm_orthophoto\" -inputFile " + jobOptions["jobDir"] + \
"-results/odm_texturing/odm_textured_model_geo.obj -logFile " + jobOptions["jobDir"] \
+ "/odm_orthophoto/odm_orthophoto_log.txt -outputFile " + jobOptions["jobDir"] \
+ "-results/odm_orthphoto.png -resolution 20.0 -outputCornerFile " + jobOptions["jobDir"] \
+ "/odm_orthphoto_corners.txt")
if "csString" not in jobOptions:
parse_coordinate_system()
geoTiffCreated = False
if ("csString" in jobOptions and
"utmEastOffset" in jobOptions and "utmNorthOffset" in jobOptions):
ulx = uly = lrx = lry = 0.0
with open(jobOptions["jobDir"] +
"/odm_orthphoto_corners.txt") as f:
for lineNumber, line in enumerate(f):
if lineNumber == 0:
tokens = line.split(' ')
if len(tokens) == 4:
ulx = float(tokens[0]) + \
float(jobOptions["utmEastOffset"])
lry = float(tokens[1]) + \
float(jobOptions["utmNorthOffset"])
lrx = float(tokens[2]) + \
float(jobOptions["utmEastOffset"])
uly = float(tokens[3]) + \
float(jobOptions["utmNorthOffset"])
print(" Creating GeoTIFF...")
sys.stdout.write(" ")
run("gdal_translate -a_ullr " + str(ulx) + " " + str(uly) + " " +
str(lrx) + " " + str(lry) + " -a_srs \"" + jobOptions["csString"] +
"\" " + jobOptions["jobDir"] + "-results/odm_orthphoto.png " +
jobOptions["jobDir"] + "-results/odm_orthphoto.tif")
geoTiffCreated = True
if not geoTiffCreated:
print " Warning: No geo-referenced orthophoto created due to missing geo-referencing or corner coordinates."
log.ODM_INFO('Running ODM OrthoPhoto Cell - Finished')
return ecto.OK if args.end_with != 'odm_orthophoto' else ecto.QUIT

Wyświetl plik

@ -7,14 +7,14 @@ from opendm import io
from opendm import system
from opendm import context
class ODMTexturingCell(ecto.Cell):
class ODMTexturingCell(ecto.Cell):
def declare_params(self, params):
params.declare("resize", 'resizes images by the largest side', 2400)
params.declare("resolution", 'The resolution of the output textures. Must be '
'greater than textureWithSize.', 4096)
'greater than textureWithSize.', 4096)
params.declare("size", 'The resolution to rescale the images performing '
'the texturing.', 3600)
'the texturing.', 3600)
def declare_io(self, params, inputs, outputs):
inputs.declare("tree", "Struct with paths", [])
@ -23,8 +23,11 @@ class ODMTexturingCell(ecto.Cell):
outputs.declare("reconstruction", "Clusters output. list of ODMReconstructions", [])
def process(self, inputs, outputs):
log.ODM_INFO('Running OMD Texturing Cell')
# Benchmarking
start_time = system.now_raw()
log.ODM_INFO('Running ODM Texturing Cell')
# get inputs
args = self.inputs.args
@ -34,8 +37,11 @@ class ODMTexturingCell(ecto.Cell):
system.mkdir_p(tree.odm_texturing)
# check if we rerun cell or not
rerun_cell = args['rerun'] is not None \
and args['rerun'] == 'odm_texturing'
rerun_cell = (args.rerun is not None and
args.rerun == 'odm_texturing') or \
(args.rerun_all) or \
(args.rerun_from is not None and
'odm_texturing' in args.rerun_from)
# Undistort radial distortion
if not os.path.isdir(tree.odm_texturing_undistorted_image_path) or rerun_cell:
@ -60,8 +66,8 @@ class ODMTexturingCell(ecto.Cell):
(tree.opensfm_reconstruction))
if not io.file_exists(tree.odm_textured_model_obj) or rerun_cell:
log.ODM_DEBUG('Writting ODM Textured file in: %s' \
% tree.odm_textured_model_obj)
log.ODM_DEBUG('Writing ODM Textured file in: %s'
% tree.odm_textured_model_obj)
# odm_texturing definitions
kwargs = {
@ -78,14 +84,17 @@ class ODMTexturingCell(ecto.Cell):
}
# run texturing binary
system.run('{bin}/odm_texturing -bundleFile {bundle} ' \
'-imagesPath {imgs_path} -imagesListPath {imgs_list} ' \
'-inputModelPath {model} -outputFolder {out_dir}/ ' \
'-textureResolution {resolution} -bundleResizedTo {resize} ' \
'-textureWithSize {size} -logFile {log}'.format(**kwargs))
system.run('{bin}/odm_texturing -bundleFile {bundle} '
'-imagesPath {imgs_path} -imagesListPath {imgs_list} '
'-inputModelPath {model} -outputFolder {out_dir}/ '
'-textureResolution {resolution} -bundleResizedTo {resize} '
'-textureWithSize {size} -logFile {log}'.format(**kwargs))
else:
log.ODM_WARNING('Found a valid ODM Texture file in: %s' \
% tree.odm_textured_model_obj)
log.ODM_INFO('Running OMD Texturing Cell - Finished')
return ecto.OK if args['end_with'] != 'odm_texturing' else ecto.QUIT
log.ODM_WARNING('Found a valid ODM Texture file in: %s'
% tree.odm_textured_model_obj)
if args.time:
system.benchmark(start_time, tree.benchmarking, 'Texturing')
log.ODM_INFO('Running ODM Texturing Cell - Finished')
return ecto.OK if args.end_with != 'odm_texturing' else ecto.QUIT

Wyświetl plik

@ -5,6 +5,7 @@ from opendm import io
from opendm import system
from opendm import context
class ODMOpenSfMCell(ecto.Cell):
def declare_params(self, params):
params.declare("use_exif_size", "The application arguments.", False)
@ -22,7 +23,10 @@ class ODMOpenSfMCell(ecto.Cell):
def process(self, inputs, outputs):
log.ODM_INFO('Running OMD OpenSfm Cell')
# Benchmarking
start_time = system.now_raw()
log.ODM_INFO('Running ODM OpenSfM Cell')
# get inputs
tree = self.inputs.tree
@ -30,7 +34,7 @@ class ODMOpenSfMCell(ecto.Cell):
photos = self.inputs.photos
if not photos:
log.ODM_ERROR('Not enough photos in photos array to start OpenSfm')
log.ODM_ERROR('Not enough photos in photos array to start OpenSfM')
return ecto.QUIT
# create working directories
@ -38,11 +42,13 @@ class ODMOpenSfMCell(ecto.Cell):
system.mkdir_p(tree.pmvs)
# check if we rerun cell or not
rerun_cell = args['rerun'] is not None \
and args['rerun'] == 'opensfm'
rerun_cell = (args.rerun is not None and
args.rerun == 'opensfm') or \
(args.rerun_all) or \
(args.rerun_from is not None and
'opensfm' in args.rerun_from)
### check if reconstruction was done before
# check if reconstruction was done before
if not io.file_exists(tree.opensfm_reconstruction) or rerun_cell:
# create file list
@ -60,7 +66,7 @@ class ODMOpenSfMCell(ecto.Cell):
"matching_gps_neighbors: %s" % self.params.matching_gps_neighbors
]
if args['matcher_distance']>0:
if args.matcher_distance > 0:
config.append("matching_gps_distance: %s" % self.params.matching_gps_distance)
# write config file
@ -69,32 +75,33 @@ class ODMOpenSfMCell(ecto.Cell):
fout.write("\n".join(config))
# run OpenSfM reconstruction
system.run('PYTHONPATH=%s %s/bin/run_all %s' %
(context.pyopencv_path, context.opensfm_path, tree.opensfm))
system.run('PYTHONPATH=%s %s/bin/run_all %s' %
(context.pyopencv_path, context.opensfm_path, tree.opensfm))
else:
log.ODM_WARNING('Found a valid OpenSfm file in: %s' %
(tree.opensfm_reconstruction))
log.ODM_WARNING('Found a valid OpenSfM file in: %s' %
tree.opensfm_reconstruction)
### check if reconstruction was exported to bundler before
# check if reconstruction was exported to bundler before
if not io.file_exists(tree.opensfm_bundle_list) or rerun_cell:
# convert back to bundler's format
system.run('PYTHONPATH=%s %s/bin/export_bundler %s' %
(context.pyopencv_path, context.opensfm_path, tree.opensfm))
(context.pyopencv_path, context.opensfm_path, tree.opensfm))
else:
log.ODM_WARNING('Found a valid Bundler file in: %s' %
(tree.opensfm_reconstruction))
log.ODM_WARNING('Found a valid Bundler file in: %s' %
tree.opensfm_reconstruction)
### check if reconstruction was exported to pmvs before
# check if reconstruction was exported to pmvs before
if not io.file_exists(tree.pmvs_visdat) or rerun_cell:
# run PMVS converter
system.run('PYTHONPATH=%s %s/bin/export_pmvs %s --output %s' %
(context.pyopencv_path, context.opensfm_path, tree.opensfm, tree.pmvs))
system.run('PYTHONPATH=%s %s/bin/export_pmvs %s --output %s' %
(context.pyopencv_path, context.opensfm_path, tree.opensfm, tree.pmvs))
else:
log.ODM_WARNING('Found a valid CMVS file in: %s' % tree.pmvs_visdat)
log.ODM_INFO('Running OMD OpenSfm Cell - Finished')
return ecto.OK if args['end_with'] != 'opensfm' else ecto.QUIT
if args.time:
system.benchmark(start_time, tree.benchmarking, 'OpenSfM')
log.ODM_INFO('Running ODM OpenSfM Cell - Finished')
return ecto.OK if args.end_with != 'opensfm' else ecto.QUIT

Wyświetl plik

@ -5,26 +5,26 @@ from opendm import log
from opendm import system
from opendm import context
class ODMPmvsCell(ecto.Cell):
class ODMPmvsCell(ecto.Cell):
def declare_params(self, params):
params.declare("level", 'The level in the image pyramid that is used '
'for the computation', 1)
params.declare("csize", 'Cell size controls the density of reconstructions', 2)
params.declare("thresh", 'A patch reconstruction is accepted as a success '
'and kept, if its associcated photometric consistency '
'measure is above this threshold.', 0.7)
'and kept, if its associcated photometric consistency '
'measure is above this threshold.', 0.7)
params.declare("wsize", 'pmvs samples wsize x wsize pixel colors from '
'each image to compute photometric consistency '
'score. For example, when wsize=7, 7x7=49 pixel '
'colors are sampled in each image. Increasing the '
'value leads to more stable reconstructions, but '
'the program becomes slower.', 7)
'each image to compute photometric consistency '
'score. For example, when wsize=7, 7x7=49 pixel '
'colors are sampled in each image. Increasing the '
'value leads to more stable reconstructions, but '
'the program becomes slower.', 7)
params.declare("min_imgs", 'Each 3D point must be visible in at least '
'minImageNum images for being reconstructed. 3 is '
'suggested in general.', 3)
'minImageNum images for being reconstructed. 3 is '
'suggested in general.', 3)
params.declare("cores", 'The maximum number of cores to use in dense '
' reconstruction.', context.num_cores)
' reconstruction.', context.num_cores)
def declare_io(self, params, inputs, outputs):
inputs.declare("tree", "Struct with paths", [])
@ -33,7 +33,10 @@ class ODMPmvsCell(ecto.Cell):
outputs.declare("reconstruction", "list of ODMReconstructions", [])
def process(self, inputs, outputs):
# Benchmarking
start_time = system.now_raw()
log.ODM_INFO('Running OMD PMVS Cell')
# get inputs
@ -41,8 +44,11 @@ class ODMPmvsCell(ecto.Cell):
tree = self.inputs.tree
# check if we rerun cell or not
rerun_cell = args['rerun'] is not None \
and args['rerun'] == 'pmvs'
rerun_cell = (args.rerun is not None and
args.rerun == 'pmvs') or \
(args.rerun_all) or \
(args.rerun_from is not None and
'pmvs' in args.rerun_from)
if not io.file_exists(tree.pmvs_model) or rerun_cell:
log.ODM_DEBUG('Creating dense pointcloud in: %s' % tree.pmvs_model)
@ -59,15 +65,18 @@ class ODMPmvsCell(ecto.Cell):
}
# generate pmvs2 options
system.run('{bin} {prefix}/ {level} {csize} {thresh} {wsize} ' \
system.run('{bin} {prefix}/ {level} {csize} {thresh} {wsize} '
'{min_imgs} {cores}'.format(**kwargs))
# run pmvs2
system.run('%s %s/ option-0000' % \
(context.pmvs2_path, tree.pmvs_rec_path))
system.run('%s %s/ option-0000' %
(context.pmvs2_path, tree.pmvs_rec_path))
else:
log.ODM_WARNING('Found a valid PMVS file in %s' % tree.pmvs_model)
log.ODM_INFO('Running OMD PMVS Cell - Finished')
return ecto.OK if args['end_with'] != 'pmvs' else ecto.QUIT
if args.time:
system.benchmark(start_time, tree.benchmarking, 'PMVS')
log.ODM_INFO('Running ODM PMVS Cell - Finished')
return ecto.OK if args.end_with != 'pmvs' else ecto.QUIT

Wyświetl plik

@ -7,7 +7,8 @@ from opendm import system
from opendm import io
from opendm import types
class ODMResizeCell(ecto.Cell):
class ODMResizeCell(ecto.Cell):
def declare_params(self, params):
params.declare("resize_to", "resizes images by the largest side", 2400)
@ -19,6 +20,9 @@ class ODMResizeCell(ecto.Cell):
def process(self, inputs, outputs):
# Benchmarking
start_time = system.now_raw()
log.ODM_INFO('Running ODM Resize Cell')
# get inputs
@ -29,15 +33,22 @@ class ODMResizeCell(ecto.Cell):
if not photos:
log.ODM_ERROR('Not enough photos in photos to resize')
return ecto.QUIT
if self.params.resize_to <= 0:
log.ODM_ERROR('Resize parameter must be greater than 0')
return ecto.QUIT
# create working directory
system.mkdir_p(tree.dataset_resize)
log.ODM_DEBUG('Resizing dataset to: %s' % tree.dataset_resize)
# check if we rerun cell or not
rerun_cell = args['rerun'] is not None \
and args['rerun'] == 'resize'
rerun_cell = (args.rerun is not None and
args.rerun == 'resize') or \
(args.rerun_all) or \
(args.rerun_from is not None and
'resize' in args.rerun_from)
# loop over photos
for photo in photos:
@ -53,6 +64,8 @@ class ODMResizeCell(ecto.Cell):
img = cv2.imread(path_file)
# compute new size
max_side = max(img.shape[0], img.shape[1])
if max_side <= self.params.resize_to:
log.ODM_WARNING('Resize Parameter is greater than the largest side of the image')
ratio = float(self.params.resize_to) / float(max_side)
img_r = cv2.resize(img, None, fx=ratio, fy=ratio)
# write image with opencv
@ -65,8 +78,8 @@ class ODMResizeCell(ecto.Cell):
# copy metadata
old_meta.copy(new_meta)
# update metadata size
new_meta['Exif.Photo.PixelXDimension'].value = img_r.shape[0]
new_meta['Exif.Photo.PixelYDimension'].value = img_r.shape[1]
new_meta['Exif.Photo.PixelXDimension'] = img_r.shape[0]
new_meta['Exif.Photo.PixelYDimension'] = img_r.shape[1]
new_meta.write()
# update photos array with new values
photo.path_file = new_path_file
@ -75,17 +88,21 @@ class ODMResizeCell(ecto.Cell):
photo.update_focal()
# log message
log.ODM_DEBUG('Resized %s | dimensions: %s' % \
(photo.filename, img_r.shape))
log.ODM_DEBUG('Resized %s | dimensions: %s' %
(photo.filename, img_r.shape))
else:
# log message
log.ODM_WARNING('Already resized %s | dimensions: %s x %s' % \
(photo.filename, photo.width, photo.height))
log.ODM_WARNING('Already resized %s | dimensions: %s x %s' %
(photo.filename, photo.width, photo.height))
log.ODM_INFO('Resized %s images' % len(photos))
# append photos to cell output
self.outputs.photos = photos
if args.time:
system.benchmark(start_time, tree.benchmarking, 'Resizing')
log.ODM_INFO('Running ODM Resize Cell - Finished')
return ecto.OK if args['end_with'] != 'resize' else ecto.QUIT
return ecto.OK if args.end_with != 'resize' else ecto.QUIT

Plik binarny nie jest wyświetlany.

Po

Szerokość:  |  Wysokość:  |  Rozmiar: 120 KiB

Wyświetl plik

@ -0,0 +1,20 @@
WGS84 UTM 32N
274914.738,4603349.014,400,2121.02804,-405.91779,DJI_0068.JPG
274914.738,4603349.014,400,2113.45101,-52.37843,DJI_0104.JPG
274914.738,4603349.014,400,2251.05140,-2510.08324,1JI_0076.JPG
274915.887,4603307.715,400,2189.08075,-1266.93925,DJI_0068.JPG
274915.887,4603307.715,400,1249.52906,-136.41574,DJI_0104.JPG
274915.887,4603307.715,400561.05432,-1057.69568,DJI_0083.JPG
274915.887,4603307.715,400,2239.44947,-1593.32652,1JI_0076.JPG
274985.284,4603319.756,400,3619.20999,-978.85879,DJI_0068.JPG
274985.284,4603319.756,400,1506.89252,-1535.16355,DJI_0104.JPG
274985.284,4603319.756,400,783.53534,-1793.26811,1JI_0076.JPG
274920.710,4603258.802,400,2342.84463,-2285.96671,DJI_0068.JPG
274920.710,4603258.802,400,305.37748,-303.36595,DJI_0104.JPG
274920.710,4603258.802,400,1531.52745,-896.39311,DJI_0083.JPG
274920.710,4603258.802,400,3671.08645,-2198.67114,DJI_0063.JPG
274920.710,4603258.802,400.1007.75409,-1286.05432,DJI_0074.JPG
274879.571,4603204.279,400,2693.25350,-1682.97313,DJI_0083.JPG
274879.571,4603204.279,400,2513.14252,-1413.82886,DJI_0063.JPG
274879.571,4603204.279,400,2150.56951,-2093.38493,DJI_0074.JPG

Wyświetl plik

@ -0,0 +1,66 @@
Do not exspect more acurate results with these coordinates!!
These GCPs are reverse geotagged from an OpenDroneMap created Geotif
with WGS84 coordinates in the EXIF tags.
I also do not have a height value for the points!
(Set to 400m for testing purposes...)
-----------
CRS:
----
WGS 84 / UTM zone 17N
EPSG: 32617
-----------
GCP point values:
----
GCP_PointNr,X_Coordinate,Y_Coordinate
1,274914.738,4603349.014,400
2,274915.887,4603307.715,400
3,274985.284,4603319.756,400
4,274920.710,4603258.802,400
5,274879.571,4603204.279,400
-----------
-----------
Better human readable gcp_list.txt
----
WGS84 UTM 32N
## x1 y1 z1 pixelx1 pixely1 imagename1
## 0/0 of Pixel Coordinate = Upper left hand corner
# GCP 1
274914.738,4603349.014,400,2121.02804,-405.91779,DJI_0068.JPG
274914.738,4603349.014,400,2113.45101,-52.37843,DJI_0104.JPG
274914.738,4603349.014,400,2251.05140,-2510.08324,1JI_0076.JPG
# GCP 2
274915.887,4603307.715,400,2189.08075,-1266.93925,DJI_0068.JPG
274915.887,4603307.715,400,1249.52906,-136.41574,DJI_0104.JPG
274915.887,4603307.715,400561.05432,-1057.69568,DJI_0083.JPG
274915.887,4603307.715,400,2239.44947,-1593.32652,1JI_0076.JPG
# GCP 3
274985.284,4603319.756,400,3619.20999,-978.85879,DJI_0068.JPG
274985.284,4603319.756,400,1506.89252,-1535.16355,DJI_0104.JPG
274985.284,4603319.756,400,783.53534,-1793.26811,1JI_0076.JPG
# GCP 4
274920.710,4603258.802,400,2342.84463,-2285.96671,DJI_0068.JPG
274920.710,4603258.802,400,305.37748,-303.36595,DJI_0104.JPG
274920.710,4603258.802,400,1531.52745,-896.39311,DJI_0083.JPG
274920.710,4603258.802,400,3671.08645,-2198.67114,DJI_0063.JPG
274920.710,4603258.802,400.1007.75409,-1286.05432,DJI_0074.JPG
# GCP 5
274879.571,4603204.279,400,2693.25350,-1682.97313,DJI_0083.JPG
274879.571,4603204.279,400,2513.14252,-1413.82886,DJI_0063.JPG
274879.571,4603204.279,400,2150.56951,-2093.38493,DJI_0074.JPG

Wyświetl plik

@ -0,0 +1 @@
c46a1e3a765b6974b8b5995d472eed6c620eeb09

Wyświetl plik

@ -0,0 +1 @@
433a51fcac348bc741b98f2cb9c098b03b6ab26c

Wyświetl plik

@ -0,0 +1 @@
35f630bafe453d675c773f1d8304184a4c21e468

Wyświetl plik

@ -0,0 +1 @@
5af5613cfaa98bfde835e10353cfb85c1660b5dc

Wyświetl plik

@ -0,0 +1 @@
93aae65839132a59f768335b8e73949ba1e2f359

Wyświetl plik

@ -0,0 +1 @@
f410a8ef95e061299711be032f383820a8ad1e15

Wyświetl plik

@ -0,0 +1 @@
188f3a590c8a91558b30a5b6a224bdfba9d82dd5

Wyświetl plik

@ -0,0 +1 @@
4f1a66fedadd856e1dc3c1a0cde1814ea8a72894

Wyświetl plik

@ -0,0 +1 @@
409e9b1d887a9e9cba52ca8ecacc33e05c0c9314

Wyświetl plik

@ -0,0 +1 @@
16065e90bb3ea4008e4a27d0f49ec66f259fa913

Wyświetl plik

@ -0,0 +1 @@
1917e099a3b2adcabebd7041d37d6b7977bf30c5

Wyświetl plik

@ -0,0 +1 @@
b8706d4e9b67915b9b0975adaee56e9630606045

Wyświetl plik

@ -0,0 +1 @@
dce4dda3f9a2aa68ff86e411b995c7662826b170

Wyświetl plik

@ -0,0 +1 @@
7067afed67b2cf72fb498055a095169778211f69

Wyświetl plik

@ -0,0 +1 @@
4aea8e7c5542c1f79fcd95abb0b1076b31e42f31

Wyświetl plik

@ -0,0 +1 @@
8246e21532c104b967234af499dd8bbf55c5bf2e

Wyświetl plik

@ -0,0 +1 @@
2f85bde952fd7dbd9d735eee6a8505b458b9844c

Wyświetl plik

@ -0,0 +1 @@
db4f33b3262068a951aeaed6aa17d8a40b40cf23

Wyświetl plik

@ -0,0 +1 @@
2efd6b3d9032816897311f49a9238eae9d04c635

Wyświetl plik

@ -0,0 +1 @@
6de1552905c42662b59285042312806b05cc7e42

Wyświetl plik

@ -0,0 +1 @@
1bf5aecc149a06dd92c962a8da39aaaa43f7d03e

Wyświetl plik

@ -0,0 +1 @@
29effdc9362a0662909affd7be8369c7ae5d6c54

Wyświetl plik

@ -0,0 +1 @@
952c2bf088e08a84326e22b204dbc4d170e49bd7

Wyświetl plik

@ -0,0 +1 @@
757beccea99b76190c46db0a40cdf46f08427ce7

Wyświetl plik

@ -0,0 +1 @@
a238ee569ed3dd6e7be97c44a794a8830661b082

Wyświetl plik

@ -0,0 +1 @@
a5c4a9a482b8fb2a9c6619e6b8092e7748fe696f

Wyświetl plik

@ -0,0 +1 @@
9dc06158dcaacb40103bce64114c4942523c8846

Wyświetl plik

@ -0,0 +1 @@
c88a2f12dddf24224c8389bb40099a5dbb272c76

Wyświetl plik

@ -0,0 +1 @@
5e7f8618b8ec7dd675dc53d96ef95e50cc424529

Wyświetl plik

@ -0,0 +1 @@
1f5562dd1af3a472a19ceb53bee1b44719d43992

Wyświetl plik

@ -0,0 +1 @@
e82b039272a860c403dc0a0d7738b111be06ab7f

Wyświetl plik

@ -0,0 +1 @@
eaed567e0e494c38b00a25e743d2e418fd28e0d5

Wyświetl plik

@ -0,0 +1 @@
27f5ffca875a9bd1c924a5b5553b50ba26d5855a

Wyświetl plik

@ -0,0 +1 @@
a60b91fb98bbe1b77f94c4c32de88a5d84c55abf

Wyświetl plik

@ -0,0 +1 @@
4e52fd0fe248e67110ccc4af0de7ec40cc8eef2b

Wyświetl plik

@ -0,0 +1 @@
2c561456c52dd0404e8354c3f3d34265062f5e14

Wyświetl plik

@ -0,0 +1 @@
301103cda49808fbd3f5f770ae8dbf2c0de4a9f6

Wyświetl plik

@ -0,0 +1 @@
22fb70047ae075f9a34bcd0e03351da6290beeab

Wyświetl plik

@ -0,0 +1 @@
e232f2ffb01ea6b78feeb6c445ea479c75d66622

Wyświetl plik

@ -0,0 +1 @@
0a8b2f270baef4ef0b151ebe7a3b627ea98dbbd4

Wyświetl plik

@ -0,0 +1 @@
05906528782ca67fe4bc330f895c1a48f88d59fe

Wyświetl plik

@ -0,0 +1 @@
c007bc6aff96b971c2c584ff6050ee9090a7a7bb

Wyświetl plik

@ -0,0 +1 @@
5321686860c05bf37bfdbd4368e064f13c412ae6

Wyświetl plik

@ -0,0 +1 @@
d72b03f209763d1aed63daa33f54636ec0e42a26

Wyświetl plik

@ -0,0 +1 @@
abc9f9e31c8cb5a41fb5a01edf85a4eb36ccdb6c

Wyświetl plik

@ -0,0 +1 @@
1266e82d092f03a8cbd53def7ad79e30983c095f

Wyświetl plik

@ -0,0 +1 @@
37242dbfb1a1f495dd1a0c3ee5a52e9c68e459f9

Wyświetl plik

@ -0,0 +1 @@
93fa8cc5c29c5d1aabbfdbffc92785284d457455

Wyświetl plik

@ -0,0 +1 @@
91d866ec9b9204891dc33c02b243d35da4163bd8

Wyświetl plik

@ -0,0 +1 @@
49be66934b063f0284799eb1c53a551cd9540104

Wyświetl plik

@ -0,0 +1 @@
911346e6a894beda70db0aec8762533098038293

Wyświetl plik

@ -0,0 +1 @@
a70e736bfbacc3c7bcfafcd0cfc60460f559bc4b

Wyświetl plik

@ -0,0 +1 @@
b6f182f047b12bb0f07a7b50657a95b4a7d3128c

Wyświetl plik

@ -0,0 +1 @@
06a3532652e03b0164705ce81d6f83610115266a

Wyświetl plik

@ -0,0 +1 @@
adc7d7549ed6fb1a5627dbcfe0b8f98aa4ff682d

Wyświetl plik

@ -0,0 +1 @@
76d8c1e5e431671f309025fb1b9e137c3cab56ce

Wyświetl plik

@ -0,0 +1 @@
91edf5595835d16721754e873eb9ac195681a5d8

Some files were not shown because too many files have changed in this diff Show More