PostGIS, Python 3.5, raster datatype inclusion in task, db tests

pull/43/head
Piero Toffanin 2016-11-07 17:25:33 -05:00
rodzic de92deaa6c
commit 1979f8382a
13 zmienionych plików z 198 dodań i 151 usunięć

1
.gitignore vendored
Wyświetl plik

@ -91,3 +91,4 @@ ENV/
node_modules/ node_modules/
webpack-stats.json webpack-stats.json
pip-selfcheck.json pip-selfcheck.json
.idea/

Wyświetl plik

@ -1,4 +1,4 @@
FROM python:2.7 FROM python:3.5
MAINTAINER Piero Toffanin <pt@masseranolabs.com> MAINTAINER Piero Toffanin <pt@masseranolabs.com>
ENV PYTHONUNBUFFERED 1 ENV PYTHONUNBUFFERED 1
@ -18,7 +18,7 @@ RUN git submodule init
RUN git submodule update RUN git submodule update
# Install Node.js + npm requirements for testing node-OpenDroneMap and React # Install Node.js + npm requirements for testing node-OpenDroneMap and React
RUN curl --silent --location https://deb.nodesource.com/setup_6.x | bash - RUN curl --silent --location https://deb.nodesource.com/setup_7.x | bash -
RUN apt-get install -y nodejs RUN apt-get install -y nodejs
WORKDIR /webodm/nodeodm/external/node-OpenDroneMap WORKDIR /webodm/nodeodm/external/node-OpenDroneMap

Wyświetl plik

@ -2,7 +2,7 @@
[![Build Status](https://travis-ci.org/OpenDroneMap/WebODM.svg?branch=master)](https://travis-ci.org/OpenDroneMap/WebODM) [![Build Status](https://travis-ci.org/OpenDroneMap/WebODM.svg?branch=master)](https://travis-ci.org/OpenDroneMap/WebODM)
An open source solution for drone image processing. The long term vision includes a web interface, API and Mission Planner. A free, user-friendly application and API for drone image processing.
![Alt text](/screenshots/ui-mockup.png?raw=true "WebODM") ![Alt text](/screenshots/ui-mockup.png?raw=true "WebODM")
@ -35,7 +35,8 @@ Linux users can connect to 127.0.0.1.
If you want to run WebODM natively, you will need to install: If you want to run WebODM natively, you will need to install:
* PostgreSQL (>= 9.5) * PostgreSQL (>= 9.5)
* Python 2.7 * PostGIS 2.3
* Python 3.5
Then these steps should be sufficient to get you up and running: Then these steps should be sufficient to get you up and running:
@ -48,7 +49,7 @@ Create a `WebODM\webodm\local_settings.py` file containing:
``` ```
DATABASES = { DATABASES = {
'default': { 'default': {
'ENGINE': 'django.db.backends.postgresql', 'ENGINE': 'django.contrib.gis.db.backends.postgis',
'NAME': 'webodm_dev', 'NAME': 'webodm_dev',
'USER': 'postgres', 'USER': 'postgres',
'PASSWORD': 'postgres', 'PASSWORD': 'postgres',
@ -68,15 +69,26 @@ webpack
chmod +x start.sh && ./start.sh chmod +x start.sh && ./start.sh
``` ```
If you are getting a `rt_raster_gdal_warp: Could not create GDAL transformation object for output dataset creation`, make sure that your PostGIS installation has PROJ support:
```
SELECT PostGIS_Full_Version();
```
You may also need to set the environment variable PROJSO to the .so or .dll projection library your PostGIS is using. This just needs to have the name of the file. So for example on Windows, you would in Control Panel -> System -> Environment Variables add a system variable called PROJSO and set it to libproj.dll (if you are using proj 4.6.1). You'll have to restart your PostgreSQL service/daemon after this change. [http://postgis.net/docs/manual-2.0/RT_ST_Transform.html](http://postgis.net/docs/manual-2.0/RT_ST_Transform.html)
## Roadmap ## Roadmap
- [X] User Registration / Authentication - [X] User Registration / Authentication
- [X] UI mockup - [X] UI mockup
- [ ] Task Processing - [X] Task Processing
- [ ] Model display (using Cesium/Leaflet) for both 2D and 3D outputs. - [ ] Model display (using Cesium/Leaflet) for both 2D and 3D outputs.
- [X] Cluster management and setup. - [X] Cluster management and setup.
- [ ] Mission Planner - [ ] Mission Planner
- [X] API - [X] API
- [ ] Documentation - [ ] Documentation
- [ ] Android Mobile App
- [ ] iOS Mobile App
- [ ] Processing Nodes Volunteer Network
- [X] Unit Testing - [X] Unit Testing
## Terminology ## Terminology

Wyświetl plik

@ -14,10 +14,10 @@ class TaskIDsSerializer(serializers.BaseSerializer):
class TaskSerializer(serializers.ModelSerializer): class TaskSerializer(serializers.ModelSerializer):
project = serializers.PrimaryKeyRelatedField(queryset=models.Project.objects.all()) project = serializers.PrimaryKeyRelatedField(queryset=models.Project.objects.all())
processing_node = serializers.PrimaryKeyRelatedField(queryset=ProcessingNode.objects.all()) processing_node = serializers.PrimaryKeyRelatedField(queryset=ProcessingNode.objects.all())
images_count = serializers.IntegerField( images_count = serializers.SerializerMethodField()
source='imageupload_set.count',
read_only=True def get_images_count(self, obj):
) return obj.imageupload_set.count()
class Meta: class Meta:
model = models.Task model = models.Task

Wyświetl plik

@ -1,5 +1,3 @@
from __future__ import unicode_literals
from django.apps import AppConfig from django.apps import AppConfig
class MainConfig(AppConfig): class MainConfig(AppConfig):

Plik binarny nie jest wyświetlany.

Wyświetl plik

@ -1,8 +1,6 @@
from __future__ import unicode_literals
import time, os import time, os
import traceback
from django.contrib.gis.gdal import GDALRaster
from django.db import models from django.db import models
from django.db.models import signals from django.db.models import signals
from django.contrib.gis.db import models as gismodels from django.contrib.gis.db import models as gismodels
@ -154,143 +152,144 @@ class Task(models.Model):
ready to be processed execute some logic. This could be communication ready to be processed execute some logic. This could be communication
with a processing node or executing a pending action. with a processing node or executing a pending action.
""" """
try:
if self.processing_node:
# Need to process some images (UUID not yet set)?
if not self.uuid:
logger.info("Processing... {}".format(self))
images = [image.path() for image in self.imageupload_set.all()] if self.processing_node:
# Need to process some images (UUID not yet set)?
if not self.uuid:
logger.info("Processing... {}".format(self))
try: images = [image.path() for image in self.imageupload_set.all()]
# This takes a while
uuid = self.processing_node.process_new_task(images, self.name, self.options)
# Refresh task object before committing change
self.refresh_from_db()
self.uuid = uuid
self.save()
# TODO: log process has started processing
except ProcessingException as e:
self.set_failure(e.message)
if self.pending_action is not None:
try: try:
if self.pending_action == self.PendingActions.CANCEL: # This takes a while
# Do we need to cancel the task on the processing node? uuid = self.processing_node.process_new_task(images, self.name, self.options)
logger.info("Canceling task {}".format(self))
if self.processing_node and self.uuid:
self.processing_node.cancel_task(self.uuid)
self.pending_action = None
self.save()
else:
raise ProcessingException("Cannot cancel a task that has no processing node or UUID")
elif self.pending_action == self.PendingActions.RESTART: # Refresh task object before committing change
logger.info("Restarting task {}".format(self)) self.refresh_from_db()
if self.processing_node and self.uuid: self.uuid = uuid
# Check if the UUID is still valid, as processing nodes purge
# results after a set amount of time, the UUID might have eliminated.
try:
info = self.processing_node.get_task_info(self.uuid)
uuid_still_exists = info['uuid'] == self.uuid
except ProcessingException:
uuid_still_exists = False
if uuid_still_exists:
# Good to go
self.processing_node.restart_task(self.uuid)
else:
# Task has been purged (or processing node is offline)
# TODO: what if processing node went offline?
# Process this as a new task
# Removing its UUID will cause the scheduler
# to process this the next tick
self.uuid = None
self.console_output = ""
self.processing_time = -1
self.status = None
self.last_error = None
self.pending_action = None
self.save()
else:
raise ProcessingException("Cannot restart a task that has no processing node or UUID")
elif self.pending_action == self.PendingActions.REMOVE:
logger.info("Removing task {}".format(self))
if self.processing_node and self.uuid:
# Attempt to delete the resources on the processing node
# We don't care if this fails, as resources on processing nodes
# Are expected to be purged on their own after a set amount of time anyway
try:
self.processing_node.remove_task(self.uuid)
except ProcessingException:
pass
# What's more important is that we delete our task properly here
self.delete()
# Stop right here!
return
except ProcessingException as e:
self.last_error = e.message
self.save() self.save()
# TODO: log process has started processing
if self.processing_node: except ProcessingException as e:
# Need to update status (first time, queued or running?) self.set_failure(str(e))
if self.uuid and self.status in [None, status_codes.QUEUED, status_codes.RUNNING]:
# Update task info from processing node
try:
info = self.processing_node.get_task_info(self.uuid)
self.processing_time = info["processingTime"]
self.status = info["status"]["code"]
current_lines_count = len(self.console_output.split("\n")) - 1 if self.pending_action is not None:
self.console_output += self.processing_node.get_task_console_output(self.uuid, current_lines_count) try:
if self.pending_action == self.PendingActions.CANCEL:
# Do we need to cancel the task on the processing node?
logger.info("Canceling task {}".format(self))
if self.processing_node and self.uuid:
self.processing_node.cancel_task(self.uuid)
self.pending_action = None
self.save()
else:
raise ProcessingException("Cannot cancel a task that has no processing node or UUID")
if "errorMessage" in info["status"]: elif self.pending_action == self.PendingActions.RESTART:
self.last_error = info["status"]["errorMessage"] logger.info("Restarting task {}".format(self))
if self.processing_node and self.uuid:
# Has the task just been canceled, failed, or completed? # Check if the UUID is still valid, as processing nodes purge
if self.status in [status_codes.FAILED, status_codes.COMPLETED, status_codes.CANCELED]: # results after a set amount of time, the UUID might have eliminated.
logger.info("Processing status: {} for {}".format(self.status, self)) try:
info = self.processing_node.get_task_info(self.uuid)
uuid_still_exists = info['uuid'] == self.uuid
except ProcessingException:
uuid_still_exists = False
if self.status == status_codes.COMPLETED: if uuid_still_exists:
try: # Good to go
orthophoto_stream = self.processing_node.download_task_asset(self.uuid, "orthophoto.tif") self.processing_node.restart_task(self.uuid)
orthophoto_filename = "orthophoto_{}.tif".format(int(time.time()))
orthophoto_path = os.path.join(settings.MEDIA_ROOT,
assets_directory_path(self.id, self.project.id, orthophoto_filename))
# Save to disk
with open(orthophoto_path, 'wb') as fd:
for chunk in orthophoto_stream.iter_content(4096):
fd.write(chunk)
# Create raster layer
self.orthophoto = raster_models.RasterLayer.objects.create(rasterfile=orthophoto_path)
self.save()
except ProcessingException as e:
self.set_failure(e.message)
else:
# FAILED, CANCELED
self.save()
else: else:
# Still waiting... # Task has been purged (or processing node is offline)
# TODO: what if processing node went offline?
# Process this as a new task
# Removing its UUID will cause the scheduler
# to process this the next tick
self.uuid = None
self.console_output = ""
self.processing_time = -1
self.status = None
self.last_error = None
self.pending_action = None
self.save()
else:
raise ProcessingException("Cannot restart a task that has no processing node or UUID")
elif self.pending_action == self.PendingActions.REMOVE:
logger.info("Removing task {}".format(self))
if self.processing_node and self.uuid:
# Attempt to delete the resources on the processing node
# We don't care if this fails, as resources on processing nodes
# Are expected to be purged on their own after a set amount of time anyway
try:
self.processing_node.remove_task(self.uuid)
except ProcessingException:
pass
# What's more important is that we delete our task properly here
self.delete()
# Stop right here!
return
except ProcessingException as e:
self.last_error = str(e)
self.save()
if self.processing_node:
# Need to update status (first time, queued or running?)
if self.uuid and self.status in [None, status_codes.QUEUED, status_codes.RUNNING]:
# Update task info from processing node
try:
info = self.processing_node.get_task_info(self.uuid)
self.processing_time = info["processingTime"]
self.status = info["status"]["code"]
current_lines_count = len(self.console_output.split("\n")) - 1
self.console_output += self.processing_node.get_task_console_output(self.uuid, current_lines_count)
if "errorMessage" in info["status"]:
self.last_error = info["status"]["errorMessage"]
# Has the task just been canceled, failed, or completed?
if self.status in [status_codes.FAILED, status_codes.COMPLETED, status_codes.CANCELED]:
logger.info("Processing status: {} for {}".format(self.status, self))
if self.status == status_codes.COMPLETED:
try:
orthophoto_stream = self.processing_node.download_task_asset(self.uuid, "orthophoto.tif")
orthophoto_path = os.path.join(settings.MEDIA_ROOT,
assets_directory_path(self.id, self.project.id, "orthophoto.tif"))
# Save to disk original photo
with open(orthophoto_path, 'wb') as fd:
for chunk in orthophoto_stream.iter_content(4096):
fd.write(chunk)
# Add to database another copy
self.orthophoto = GDALRaster(orthophoto_path, write=True)
# TODO: Create tiles
self.save()
except ProcessingException as e:
self.set_failure(str(e))
else:
# FAILED, CANCELED
self.save() self.save()
except ProcessingException as e: else:
self.set_failure(e.message) # Still waiting...
except Exception as e: self.save()
logger.error("Uncaught error: {} {}".format(e.message, traceback.format_exc())) except ProcessingException as e:
self.set_failure(str(e))
def set_failure(self, error_message): def set_failure(self, error_message):
logger.error("{} ERROR: {}".format(self, error_message)) logger.error("{} ERROR: {}".format(self, error_message))

Wyświetl plik

@ -1,4 +1,4 @@
import logging import logging, traceback
from apscheduler.schedulers.background import BackgroundScheduler from apscheduler.schedulers.background import BackgroundScheduler
from apscheduler.schedulers import SchedulerAlreadyRunningError, SchedulerNotRunningError from apscheduler.schedulers import SchedulerAlreadyRunningError, SchedulerNotRunningError
from threading import Thread, Lock from threading import Thread, Lock
@ -74,12 +74,15 @@ def process_pending_tasks():
tasks_mutex.release() tasks_mutex.release()
def process(task): def process(task):
task.process() try:
task.process()
# Might have been deleted # Might have been deleted
if task.pk is not None: if task.pk is not None:
task.processing_lock = False task.processing_lock = False
task.save() task.save()
except Exception as e:
logger.error("Uncaught error: {} {}".format(e, traceback.format_exc()))
if tasks.count() > 0: if tasks.count() > 0:
pool = ThreadPool(tasks.count()) pool = ThreadPool(tasks.count())

Wyświetl plik

@ -127,7 +127,8 @@ class TaskListItem extends React.Component {
const doAction = () => { const doAction = () => {
this.setState({actionButtonsDisabled: true}); this.setState({actionButtonsDisabled: true});
$.post(`/api/projects/${this.state.task.project}/tasks/${this.state.task.id}/${action}/`, let url = `/api/projects/${this.state.task.project}/tasks/${this.state.task.id}/${action}/`;
$.post(url,
{ {
uuid: this.state.task.uuid uuid: this.state.task.uuid
} }

Wyświetl plik

@ -0,0 +1,20 @@
from django.contrib.gis.gdal import GDALRaster
from .classes import BootTestCase
from app.models import Task, Project
import os
class TestApi(BootTestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_db(self):
# Make sure we can use PostGIS raster type
task = Task.objects.create(project=Project.objects.get(pk=1),
orthophoto=GDALRaster(os.path.join("app", "fixtures", "orthophoto.tif"), write=True))
task.refresh_from_db()
self.assertTrue(task.orthophoto.srid == 4326)
self.assertTrue(task.orthophoto.width == 252) # not original size, warp happened

Wyświetl plik

@ -1,4 +1,15 @@
FROM postgres:9.5 FROM postgres:9.5
MAINTAINER Piero Toffanin <pt@masseranolabs.com> MAINTAINER Piero Toffanin <pt@masseranolabs.com>
ENV POSTGIS_MAJOR 2.3
ENV POSTGIS_VERSION 2.3.0+dfsg-2.pgdg80+1
RUN apt-get update \
&& apt-get install -y --no-install-recommends \
postgresql-$PG_MAJOR-postgis-$POSTGIS_MAJOR=$POSTGIS_VERSION \
postgresql-$PG_MAJOR-postgis-$POSTGIS_MAJOR-scripts=$POSTGIS_VERSION \
postgis=$POSTGIS_VERSION \
&& rm -rf /var/lib/apt/lists/*
EXPOSE 5432 EXPOSE 5432
COPY init.sql /docker-entrypoint-initdb.d/init-db.sql COPY init.sql /docker-entrypoint-initdb.d/init-db.sql

Wyświetl plik

@ -1,2 +1,3 @@
ALTER USER postgres PASSWORD 'postgres'; ALTER USER postgres PASSWORD 'postgres';
CREATE DATABASE webodm_dev; CREATE DATABASE webodm_dev;
ALTER DATABASE webodm_dev SET postgis.gdal_enabled_drivers TO 'GTiff';

Wyświetl plik

@ -1,3 +1,4 @@
import requests
from django.test import TestCase from django.test import TestCase
from django.utils import six from django.utils import six
import subprocess, time import subprocess, time
@ -6,7 +7,7 @@ from .models import ProcessingNode
from .api_client import ApiClient from .api_client import ApiClient
from requests.exceptions import ConnectionError from requests.exceptions import ConnectionError
from .exceptions import ProcessingException from .exceptions import ProcessingException
import status_codes from . import status_codes
current_dir = path.dirname(path.realpath(__file__)) current_dir = path.dirname(path.realpath(__file__))
@ -76,7 +77,7 @@ class TestClientApi(TestCase):
online_node = ProcessingNode.objects.get(pk=1) online_node = ProcessingNode.objects.get(pk=1)
# Can call info(), options() # Can call info(), options()
self.assertTrue(type(api.info()['version']) in [str, unicode]) self.assertTrue(type(api.info()['version']) == str)
self.assertTrue(len(api.options()) > 0) self.assertTrue(len(api.options()) > 0)
# Can call new_task() # Can call new_task()
@ -90,8 +91,8 @@ class TestClientApi(TestCase):
# Can call task_info() # Can call task_info()
task_info = api.task_info(uuid) task_info = api.task_info(uuid)
self.assertTrue(isinstance(task_info['dateCreated'], (int, long))) self.assertTrue(isinstance(task_info['dateCreated'], int))
self.assertTrue(isinstance(task_info['uuid'], (str, unicode))) self.assertTrue(isinstance(task_info['uuid'], str))
# Can download assets? # Can download assets?
# Here we are waiting for the task to be completed # Here we are waiting for the task to be completed
@ -101,7 +102,7 @@ class TestClientApi(TestCase):
task_info = api.task_info(uuid) task_info = api.task_info(uuid)
if task_info['status']['code'] == status_codes.COMPLETED: if task_info['status']['code'] == status_codes.COMPLETED:
asset = api.task_download(uuid, "all.zip") asset = api.task_download(uuid, "all.zip")
self.assertTrue(isinstance(asset, (str, unicode))) # Binary content, really self.assertTrue(isinstance(asset, requests.Response)) # Binary content, really
break break
except ProcessingException: except ProcessingException:
pass pass
@ -114,7 +115,7 @@ class TestClientApi(TestCase):
# task_output # task_output
self.assertTrue(isinstance(api.task_output(uuid, 0), list)) self.assertTrue(isinstance(api.task_output(uuid, 0), list))
self.assertTrue(isinstance(online_node.get_task_console_output(uuid, 0), (str, unicode))) self.assertTrue(isinstance(online_node.get_task_console_output(uuid, 0), str))
self.assertRaises(ProcessingException, online_node.get_task_console_output, "wrong-uuid", 0) self.assertRaises(ProcessingException, online_node.get_task_console_output, "wrong-uuid", 0)