OpenDroneMap-WebODM/app/tests/test_api_task.py

913 wiersze
40 KiB
Python
Czysty Zwykły widok Historia

2019-12-02 20:22:24 +00:00
import io
2017-02-01 23:11:39 +00:00
import os
import time
import logging
from datetime import timedelta
2017-07-10 14:30:33 +00:00
import json
import requests
from PIL import Image
2017-02-01 23:11:39 +00:00
from django.contrib.auth.models import User
from rest_framework import status
from rest_framework.test import APIClient
import worker
from django.utils import timezone
2019-01-16 19:51:57 +00:00
from app import pending_actions
2019-12-02 20:22:24 +00:00
from app.api.formulas import algos
2017-07-20 14:10:03 +00:00
from app.models import Project, Task, ImageUpload
from app.models.task import task_directory_path, full_task_directory_path, TaskInterruptedException
2018-07-30 15:55:46 +00:00
from app.plugins.signals import task_completed, task_removed, task_removing
2017-02-01 23:11:39 +00:00
from app.tests.classes import BootTransactionTestCase
from nodeodm import status_codes
from nodeodm.models import ProcessingNode, OFFLINE_MINUTES
from app.testwatch import testWatch
2018-07-30 15:55:46 +00:00
from .utils import start_processing_node, clear_test_media_root, catch_signal
2017-02-01 23:11:39 +00:00
# We need to test the task API in a TransactionTestCase because
# task processing happens on a separate thread, and normal TestCases
# do not commit changes to the DB, so spawning a new thread will show no
# data in it.
from webodm import settings
logger = logging.getLogger('app.logger')
DELAY = 2 # time to sleep for during process launch, background processing, etc.
2017-02-07 22:09:30 +00:00
class TestApiTask(BootTransactionTestCase):
def setUp(self):
super().setUp()
clear_test_media_root()
2017-02-01 23:11:39 +00:00
def test_task(self):
client = APIClient()
2019-12-02 17:57:16 +00:00
with start_processing_node():
user = User.objects.get(username="testuser")
self.assertFalse(user.is_superuser)
2017-02-07 22:09:30 +00:00
2019-12-02 17:57:16 +00:00
other_user = User.objects.get(username="testuser2")
2017-02-01 23:11:39 +00:00
2019-12-02 17:57:16 +00:00
project = Project.objects.create(
owner=user,
name="test project"
)
other_project = Project.objects.create(
owner=other_user,
name="another test project"
)
other_task = Task.objects.create(project=other_project)
2017-02-01 23:11:39 +00:00
2019-12-02 17:57:16 +00:00
# Start processing node
2017-02-01 23:11:39 +00:00
2019-12-02 17:57:16 +00:00
# Create processing node
pnode = ProcessingNode.objects.create(hostname="localhost", port=11223)
2017-02-01 23:11:39 +00:00
2019-12-02 17:57:16 +00:00
# Verify that it's working
self.assertTrue(pnode.api_version is not None)
2017-02-01 23:11:39 +00:00
2019-12-02 17:57:16 +00:00
# task creation via file upload
image1 = open("app/fixtures/tiny_drone_image.jpg", 'rb')
image2 = open("app/fixtures/tiny_drone_image_2.jpg", 'rb')
2019-12-02 17:57:16 +00:00
img1 = Image.open("app/fixtures/tiny_drone_image.jpg")
2019-12-02 17:57:16 +00:00
# Not authenticated?
res = client.post("/api/projects/{}/tasks/".format(project.id), {
'images': [image1, image2]
}, format="multipart")
self.assertTrue(res.status_code == status.HTTP_403_FORBIDDEN);
image1.seek(0)
image2.seek(0)
2019-12-02 17:57:16 +00:00
client.login(username="testuser", password="test1234")
2018-02-23 19:48:59 +00:00
2019-12-02 17:57:16 +00:00
# Cannot create a task for a project that does not exist
res = client.post("/api/projects/0/tasks/", {
'images': [image1, image2]
}, format="multipart")
self.assertTrue(res.status_code == status.HTTP_404_NOT_FOUND)
image1.seek(0)
image2.seek(0)
2018-02-23 19:48:59 +00:00
2019-12-02 17:57:16 +00:00
# Cannot create a task for a project for which we have no access to
res = client.post("/api/projects/{}/tasks/".format(other_project.id), {
'images': [image1, image2]
}, format="multipart")
self.assertTrue(res.status_code == status.HTTP_404_NOT_FOUND)
image1.seek(0)
image2.seek(0)
2018-02-23 19:48:59 +00:00
2019-12-02 17:57:16 +00:00
# Cannot create a task without images
res = client.post("/api/projects/{}/tasks/".format(project.id), {
'images': []
}, format="multipart")
self.assertTrue(res.status_code == status.HTTP_400_BAD_REQUEST)
2018-12-10 15:01:38 +00:00
2019-12-02 17:57:16 +00:00
# Cannot create a task with just 1 image
res = client.post("/api/projects/{}/tasks/".format(project.id), {
'images': image1
}, format="multipart")
self.assertTrue(res.status_code == status.HTTP_400_BAD_REQUEST)
image1.seek(0)
2018-12-10 15:01:38 +00:00
2019-12-02 17:57:16 +00:00
# Normal case with images[], name and processing node parameter
res = client.post("/api/projects/{}/tasks/".format(project.id), {
'images': [image1, image2],
'name': 'test_task',
'processing_node': pnode.id
}, format="multipart")
self.assertTrue(res.status_code == status.HTTP_201_CREATED)
multiple_param_task = Task.objects.latest('created_at')
self.assertTrue(multiple_param_task.name == 'test_task')
self.assertTrue(multiple_param_task.processing_node.id == pnode.id)
self.assertEqual(multiple_param_task.import_url, "")
image1.seek(0)
image2.seek(0)
2019-02-08 03:07:49 +00:00
2019-12-02 17:57:16 +00:00
# Uploaded images should be the same size as originals
with Image.open(multiple_param_task.task_path("tiny_drone_image.jpg")) as im:
self.assertTrue(im.size == img1.size)
2019-06-27 18:48:43 +00:00
2019-12-02 17:57:16 +00:00
# Normal case with images[], GCP, name and processing node parameter and resize_to option
testWatch.clear()
gcp = open("app/fixtures/gcp.txt", 'r')
2018-02-23 19:48:59 +00:00
res = client.post("/api/projects/{}/tasks/".format(project.id), {
2019-12-02 17:57:16 +00:00
'images': [image1, image2, gcp],
2018-02-23 19:48:59 +00:00
'name': 'test_task',
'processing_node': pnode.id,
'resize_to': img1.size[0] / 2.0
}, format="multipart")
self.assertTrue(res.status_code == status.HTTP_201_CREATED)
2019-12-02 17:57:16 +00:00
resized_task = Task.objects.latest('created_at')
2018-02-23 19:48:59 +00:00
image1.seek(0)
image2.seek(0)
2019-12-02 17:57:16 +00:00
gcp.seek(0)
2018-02-23 19:48:59 +00:00
2019-12-02 17:57:16 +00:00
# Uploaded images should have been resized
with Image.open(resized_task.task_path("tiny_drone_image.jpg")) as im:
self.assertTrue(im.size[0] == img1.size[0] / 2.0)
2017-02-01 23:11:39 +00:00
2019-12-02 17:57:16 +00:00
# GCP should have been scaled
with open(resized_task.task_path("gcp.txt")) as f:
lines = list(map(lambda l: l.strip(), f.readlines()))
2017-02-01 23:11:39 +00:00
2019-12-02 17:57:16 +00:00
[x, y, z, px, py, imagename, *extras] = lines[1].split(' ')
self.assertTrue(imagename == "tiny_drone_image.JPG") # case insensitive
self.assertTrue(float(px) == 2.0) # scaled by half
self.assertTrue(float(py) == 3.0) # scaled by half
self.assertTrue(float(x) == 576529.22) # Didn't change
[x, y, z, px, py, imagename, *extras] = lines[5].split(' ')
self.assertTrue(imagename == "missing_image.jpg")
self.assertTrue(float(px) == 8.0) # Didn't change
self.assertTrue(float(py) == 8.0) # Didn't change
# Resize progress is 100%
resized_task.refresh_from_db()
self.assertEqual(resized_task.resize_progress, 1.0)
# Upload progress is 100%
self.assertEqual(resized_task.upload_progress, 1.0)
# Upload progress callback has been called
self.assertTrue(testWatch.get_calls_count("Task.process.callback") > 0)
# This is not a partial task
self.assertFalse(resized_task.partial)
# Case with malformed GCP file option
with open("app/fixtures/gcp_malformed.txt", 'r') as malformed_gcp:
res = client.post("/api/projects/{}/tasks/".format(project.id), {
'images': [image1, image2, malformed_gcp],
'name': 'test_task',
'processing_node': pnode.id,
'resize_to': img1.size[0] / 2.0
}, format="multipart")
self.assertTrue(res.status_code == status.HTTP_201_CREATED)
malformed_gcp_task = Task.objects.latest('created_at')
# We just pass it along, it will get errored out during processing
# But we shouldn't fail.
with open(malformed_gcp_task.task_path("gcp_malformed.txt")) as f:
lines = list(map(lambda l: l.strip(), f.readlines()))
self.assertTrue(lines[1] == "<O_O>")
image1.seek(0)
image2.seek(0)
# Cannot create a task with images[], name, but invalid processing node parameter
res = client.post("/api/projects/{}/tasks/".format(project.id), {
'images': [image1, image2],
'name': 'test_task',
'processing_node': 9999
}, format="multipart")
self.assertTrue(res.status_code == status.HTTP_400_BAD_REQUEST)
image1.seek(0)
image2.seek(0)
2017-02-01 23:11:39 +00:00
2019-12-02 17:57:16 +00:00
# Normal case with images[] parameter
res = client.post("/api/projects/{}/tasks/".format(project.id), {
'images': [image1, image2],
'auto_processing_node': 'false'
}, format="multipart")
self.assertTrue(res.status_code == status.HTTP_201_CREATED)
image1.seek(0)
image2.seek(0)
2018-12-10 15:01:38 +00:00
2019-12-02 17:57:16 +00:00
# Should have returned the id of the newly created task
task = Task.objects.latest('created_at')
self.assertTrue('id' in res.data)
self.assertTrue(str(task.id) == res.data['id'])
2017-02-01 23:11:39 +00:00
2019-12-02 17:57:16 +00:00
# Progress is at 0%
self.assertEqual(task.running_progress, 0.0)
2018-01-26 15:01:14 +00:00
2019-12-02 17:57:16 +00:00
# Two images should have been uploaded
self.assertTrue(ImageUpload.objects.filter(task=task).count() == 2)
2018-12-10 15:01:38 +00:00
2019-12-02 17:57:16 +00:00
# Can_rerun_from should be an empty list
self.assertTrue(len(res.data['can_rerun_from']) == 0)
2017-02-01 23:11:39 +00:00
2019-12-02 17:57:16 +00:00
# processing_node_name should be null
self.assertTrue(res.data['processing_node_name'] is None)
2017-02-01 23:11:39 +00:00
2019-12-02 17:57:16 +00:00
# No processing node is set
self.assertTrue(task.processing_node is None)
2017-02-01 23:11:39 +00:00
2019-12-02 20:22:24 +00:00
# tiles.json, bounds, metadata should not be accessible at this point
2019-12-02 17:57:16 +00:00
tile_types = ['orthophoto', 'dsm', 'dtm']
2019-12-02 20:22:24 +00:00
endpoints = ['tiles.json', 'bounds', 'metadata']
for ep in endpoints:
for tile_type in tile_types:
res = client.get("/api/projects/{}/tasks/{}/{}/{}".format(project.id, task.id, tile_type, ep))
self.assertEqual(res.status_code, status.HTTP_404_NOT_FOUND)
2017-02-01 23:11:39 +00:00
2019-12-02 17:57:16 +00:00
# Neither should an individual tile
# Z/X/Y coords are chosen based on node-odm test dataset for orthophoto_tiles/
res = client.get("/api/projects/{}/tasks/{}/orthophoto/tiles/16/16020/42443.png".format(project.id, task.id))
self.assertTrue(res.status_code == status.HTTP_404_NOT_FOUND)
2017-02-01 23:11:39 +00:00
2019-12-02 17:57:16 +00:00
# Cannot access a tiles.json we have no access to
res = client.get("/api/projects/{}/tasks/{}/orthophoto/tiles.json".format(other_project.id, other_task.id))
2017-02-01 23:11:39 +00:00
self.assertTrue(res.status_code == status.HTTP_404_NOT_FOUND)
2019-12-02 17:57:16 +00:00
# Cannot access an individual tile we have no access to
res = client.get("/api/projects/{}/tasks/{}/orthophoto/tiles/16/16020/42443.png".format(other_project.id, other_task.id))
self.assertTrue(res.status_code == status.HTTP_404_NOT_FOUND)
2017-02-01 23:11:39 +00:00
2019-12-02 17:57:16 +00:00
# Cannot download assets (they don't exist yet)
for asset in list(task.ASSETS_MAP.keys()):
res = client.get("/api/projects/{}/tasks/{}/download/{}".format(project.id, task.id, asset))
self.assertTrue(res.status_code == status.HTTP_404_NOT_FOUND)
2017-02-01 23:11:39 +00:00
2019-12-02 17:57:16 +00:00
# Cannot access raw assets (they don't exist yet)
res = client.get("/api/projects/{}/tasks/{}/assets/odm_orthophoto/odm_orthophoto.tif".format(project.id, task.id))
self.assertTrue(res.status_code == status.HTTP_404_NOT_FOUND)
2018-02-17 17:35:03 +00:00
2019-12-02 17:57:16 +00:00
# Cannot assign processing node to a task we have no access to
res = client.patch("/api/projects/{}/tasks/{}/".format(other_project.id, other_task.id), {
'processing_node': pnode.id
})
self.assertTrue(res.status_code == status.HTTP_404_NOT_FOUND)
2017-02-01 23:11:39 +00:00
2019-12-02 17:57:16 +00:00
# No UUID at this point
self.assertTrue(len(task.uuid) == 0)
2019-12-02 17:57:16 +00:00
# Assign processing node to task via API
res = client.patch("/api/projects/{}/tasks/{}/".format(project.id, task.id), {
'processing_node': pnode.id
})
self.assertTrue(res.status_code == status.HTTP_200_OK)
2019-12-02 17:57:16 +00:00
# On update worker.tasks.process_pending_tasks should have been called in the background
# (during tests this is sync)
# Processing should have started and a UUID is assigned
# Calling process pending tasks should finish the process
# and invoke the plugins completed signal
task.refresh_from_db()
self.assertTrue(task.status in [status_codes.RUNNING, status_codes.COMPLETED]) # Sometimes this finishes before we get here
self.assertTrue(len(task.uuid) > 0)
with catch_signal(task_completed) as handler:
retry_count = 0
while task.status != status_codes.COMPLETED:
worker.tasks.process_pending_tasks()
time.sleep(DELAY)
task.refresh_from_db()
retry_count += 1
if retry_count > 10:
break
self.assertEqual(task.status, status_codes.COMPLETED)
# Progress is 100%
self.assertTrue(task.running_progress == 1.0)
handler.assert_any_call(
sender=Task,
task_id=task.id,
signal=task_completed,
)
# Processing node should have a "rerun_from" option
pnode_rerun_from_opts = list(filter(lambda d: 'name' in d and d['name'] == 'rerun-from', pnode.available_options))[0]
self.assertTrue(len(pnode_rerun_from_opts['domain']) > 0)
# The can_rerun_from field of a task should now be populated
# with the same values as the "rerun_from" domain values of
# the processing node
res = client.get("/api/projects/{}/tasks/{}/".format(project.id, task.id))
self.assertTrue(res.status_code == status.HTTP_200_OK)
self.assertTrue(pnode_rerun_from_opts['domain'] == res.data['can_rerun_from'])
2018-12-10 15:01:38 +00:00
2019-12-02 17:57:16 +00:00
# processing_node_name should be the name of the pnode
self.assertEqual(res.data['processing_node_name'], str(pnode))
2018-12-10 15:01:38 +00:00
2019-12-02 17:57:16 +00:00
# Can download assets
for asset in list(task.ASSETS_MAP.keys()):
res = client.get("/api/projects/{}/tasks/{}/download/{}".format(project.id, task.id, asset))
self.assertEqual(res.status_code, status.HTTP_200_OK)
2018-12-10 15:01:38 +00:00
2019-12-02 17:57:16 +00:00
# We can stream downloads
res = client.get("/api/projects/{}/tasks/{}/download/{}?_force_stream=1".format(project.id, task.id, list(task.ASSETS_MAP.keys())[0]))
self.assertTrue(res.status_code == status.HTTP_200_OK)
self.assertTrue(res.has_header('_stream'))
2019-12-02 17:57:16 +00:00
# A textured mesh archive file should exist
self.assertTrue(os.path.exists(task.assets_path(task.ASSETS_MAP["textured_model.zip"]["deferred_path"])))
2019-12-02 17:57:16 +00:00
# Tiles archives should have been created
self.assertTrue(os.path.exists(task.assets_path(task.ASSETS_MAP["dsm_tiles.zip"]["deferred_path"])))
self.assertTrue(os.path.exists(task.assets_path(task.ASSETS_MAP["dtm_tiles.zip"]["deferred_path"])))
self.assertTrue(os.path.exists(task.assets_path(task.ASSETS_MAP["orthophoto_tiles.zip"]["deferred_path"])))
2019-12-02 17:57:16 +00:00
# Can download raw assets
res = client.get("/api/projects/{}/tasks/{}/assets/odm_orthophoto/odm_orthophoto.tif".format(project.id, task.id))
self.assertTrue(res.status_code == status.HTTP_200_OK)
2019-01-15 21:48:59 +00:00
2019-12-02 20:22:24 +00:00
# Can access tiles.json, bounds and metadata
for ep in endpoints:
for tile_type in tile_types:
res = client.get("/api/projects/{}/tasks/{}/{}/{}".format(project.id, task.id, tile_type, ep))
self.assertTrue(res.status_code == status.HTTP_200_OK)
2019-12-02 17:57:16 +00:00
# Bounds are what we expect them to be
# (4 coords in lat/lon)
2019-12-02 20:22:24 +00:00
res = client.get("/api/projects/{}/tasks/{}/orthophoto/tiles.json".format(project.id, task.id))
2019-12-02 17:57:16 +00:00
tiles = json.loads(res.content.decode("utf-8"))
self.assertTrue(len(tiles['bounds']) == 4)
self.assertTrue(round(tiles['bounds'][0], 7) == -91.9945132)
2019-12-02 20:22:24 +00:00
res = client.get("/api/projects/{}/tasks/{}/orthophoto/bounds".format(project.id, task.id))
bounds = json.loads(res.content.decode("utf-8"))
self.assertTrue(len(bounds['bounds']) == 4)
self.assertTrue(round(bounds['bounds'][0], 7) == -91.9945132)
# Metadata checks for orthophoto
res = client.get("/api/projects/{}/tasks/{}/orthophoto/metadata".format(project.id, task.id))
self.assertEqual(res.status_code, status.HTTP_200_OK)
metadata = json.loads(res.content.decode("utf-8"))
fields = ['bounds', 'minzoom', 'maxzoom', 'statistics', 'algorithms', 'color_maps', 'tiles', 'scheme', 'name']
for f in fields:
self.assertTrue(f in metadata)
# Colormaps and algorithms should be empty lists
self.assertEqual(metadata['algorithms'], [])
self.assertEqual(metadata['color_maps'], [])
# Address key is removed
self.assertFalse('address' in metadata)
# Scheme is xyz
self.assertEqual(metadata['scheme'], 'xyz')
# Tiles URL has no extra params
self.assertTrue(metadata['tiles'][0].endswith('.png'))
# Histogram stats are available (3 bands for orthophoto)
self.assertTrue(len(metadata['statistics']) == 3)
for b in ['1', '2', '3']:
2019-12-02 21:32:28 +00:00
self.assertEqual(len(metadata['statistics'][b]['histogram']), 2)
self.assertEqual(len(metadata['statistics'][b]['histogram'][0]), 255)
self.assertTrue('max' in metadata['statistics'][b])
self.assertTrue('min' in metadata['statistics'][b])
2019-12-02 20:22:24 +00:00
# Metadata with invalid formula
res = client.get("/api/projects/{}/tasks/{}/orthophoto/metadata?formula=INVALID".format(project.id, task.id))
self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST)
# Metadata with a valid formula but invalid bands
res = client.get("/api/projects/{}/tasks/{}/orthophoto/metadata?formula=NDVI&bands=ABC".format(project.id, task.id))
self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST)
# Medatata with valid formula and bands
res = client.get("/api/projects/{}/tasks/{}/orthophoto/metadata?formula=NDVI&bands=RGN".format(project.id, task.id))
self.assertEqual(res.status_code, status.HTTP_200_OK)
metadata = json.loads(res.content.decode("utf-8"))
# Colormaps and algorithms are populated
self.assertTrue(len(metadata['algorithms']) > 0)
self.assertTrue(len(metadata['color_maps']) > 0)
2019-12-02 22:23:26 +00:00
# Algorithms have valid keys
for k in ['id', 'filters', 'expr', 'help']:
for a in metadata['algorithms']:
self.assertTrue(k in a)
self.assertTrue(len(a['filters']) > 0)
2019-12-02 20:22:24 +00:00
# Colormap is for algorithms
2019-12-02 21:32:28 +00:00
self.assertEqual(len([x for x in metadata['color_maps'] if x['key'] == 'rdylgn']), 1)
self.assertEqual(len([x for x in metadata['color_maps'] if x['key'] == 'jet_r']), 0)
2019-12-02 20:22:24 +00:00
# Formula parameters are copied to tile URL
self.assertTrue(metadata['tiles'][0].endswith('?formula=NDVI&bands=RGN'))
# Histogram stats are available (1 band)
self.assertTrue(len(metadata['statistics']) == 1)
# Medatata with valid formula and bands that specifies a scale range
res = client.get("/api/projects/{}/tasks/{}/orthophoto/metadata?formula=VARI".format(project.id, task.id))
self.assertEqual(res.status_code, status.HTTP_200_OK)
metadata = json.loads(res.content.decode("utf-8"))
self.assertTrue(len(metadata['statistics']) == 1)
# Min/max values have been replaced
self.assertEqual(metadata['statistics']['1']['min'], algos['VARI']['range'][0])
self.assertEqual(metadata['statistics']['1']['max'], algos['VARI']['range'][1])
# Metadata for DSM/DTM
for tile_type in ['dsm', 'dtm']:
res = client.get("/api/projects/{}/tasks/{}/{}/metadata".format(project.id, task.id, tile_type))
self.assertEqual(res.status_code, status.HTTP_200_OK)
metadata = json.loads(res.content.decode("utf-8"))
# Colormaps are populated
self.assertTrue(len(metadata['color_maps']) > 0)
# Colormaps are for elevation
2019-12-02 21:32:28 +00:00
self.assertEqual(len([x for x in metadata['color_maps'] if x['key'] == 'rdylgn']), 0)
self.assertEqual(len([x for x in metadata['color_maps'] if x['key'] == 'jet_r']), 1)
2019-12-02 20:22:24 +00:00
# Algorithms are empty
self.assertEqual(len(metadata['algorithms']), 0)
# Min/max values are what we expect them to be
self.assertEqual(len(metadata['statistics']), 1)
2019-12-02 21:32:28 +00:00
self.assertEqual(round(metadata['statistics']['1']['min'], 2), 156.92)
self.assertEqual(round(metadata['statistics']['1']['max'], 2), 164.88)
2019-12-02 20:22:24 +00:00
2019-12-02 17:57:16 +00:00
# Can access individual tiles
for tile_type in tile_types:
res = client.get("/api/projects/{}/tasks/{}/{}/tiles/17/32042/46185.png".format(project.id, task.id, tile_type))
2019-12-02 19:11:08 +00:00
self.assertEqual(res.status_code, status.HTTP_200_OK)
2017-07-10 14:30:33 +00:00
2019-12-02 21:32:28 +00:00
with Image.open(io.BytesIO(res.content)) as i:
2019-12-02 20:22:24 +00:00
self.assertEqual(i.width, 256)
self.assertEqual(i.height, 256)
# Can access retina tiles
for tile_type in tile_types:
res = client.get("/api/projects/{}/tasks/{}/{}/tiles/17/32042/46185@2x.png".format(project.id, task.id, tile_type))
self.assertEqual(res.status_code, status.HTTP_200_OK)
2019-12-02 21:32:28 +00:00
with Image.open(io.BytesIO(res.content)) as i:
2019-12-02 20:22:24 +00:00
self.assertEqual(i.width, 512)
self.assertEqual(i.height, 512)
2019-12-02 22:23:26 +00:00
# TODO: Test hillshade
2019-12-02 17:57:16 +00:00
# Another user does not have access to the resources
other_client = APIClient()
other_client.login(username="testuser2", password="test1234")
2019-12-02 17:57:16 +00:00
def accessResources(expectedStatus):
for tile_type in tile_types:
res = other_client.get("/api/projects/{}/tasks/{}/{}/tiles.json".format(project.id, task.id, tile_type))
2019-12-02 19:11:08 +00:00
self.assertEqual(res.status_code, expectedStatus)
2019-12-02 19:11:08 +00:00
res = other_client.get("/api/projects/{}/tasks/{}/{}/tiles/17/32042/46185.png".format(project.id, task.id, tile_type))
self.assertEqual(res.status_code, expectedStatus)
2019-12-02 17:57:16 +00:00
res = other_client.get("/api/projects/{}/tasks/{}/".format(project.id, task.id))
2019-12-02 19:11:08 +00:00
self.assertEqual(res.status_code, expectedStatus)
2019-12-02 17:57:16 +00:00
accessResources(status.HTTP_404_NOT_FOUND)
2019-12-02 17:57:16 +00:00
# Original owner enables sharing
res = client.patch("/api/projects/{}/tasks/{}/".format(project.id, task.id), {
'public': True
})
self.assertTrue(res.status_code == status.HTTP_200_OK)
2019-12-02 17:57:16 +00:00
# Now other user can acccess resources
accessResources(status.HTTP_200_OK)
2019-12-02 17:57:16 +00:00
# He cannot change a task
res = other_client.patch("/api/projects/{}/tasks/{}/".format(project.id, task.id), {
'name': "Changed! Uh oh"
})
self.assertEqual(res.status_code, status.HTTP_404_NOT_FOUND)
2019-12-02 17:57:16 +00:00
# User logs out
other_client.logout()
2019-12-02 17:57:16 +00:00
# He can still access the resources as anonymous
accessResources(status.HTTP_200_OK)
2019-12-02 17:57:16 +00:00
# Restart a task
testWatch.clear()
res = client.post("/api/projects/{}/tasks/{}/restart/".format(project.id, task.id))
self.assertTrue(res.status_code == status.HTTP_200_OK)
# process_task is called in the background
task.refresh_from_db()
2019-12-02 17:57:16 +00:00
self.assertTrue(task.status in [status_codes.RUNNING, status_codes.COMPLETED])
2019-12-02 17:57:16 +00:00
# Should return without issues
task.check_if_canceled()
2019-12-02 17:57:16 +00:00
# Cancel a task
res = client.post("/api/projects/{}/tasks/{}/cancel/".format(project.id, task.id))
self.assertTrue(res.status_code == status.HTTP_200_OK)
2019-12-02 17:57:16 +00:00
# task is processed right away
2019-12-02 17:57:16 +00:00
# Should have been canceled
task.refresh_from_db()
self.assertTrue(task.status == status_codes.CANCELED)
self.assertTrue(task.pending_action is None)
2019-01-16 19:51:57 +00:00
2019-12-02 17:57:16 +00:00
# Manually set pending action
task.pending_action = pending_actions.CANCEL
task.save()
2019-01-16 19:51:57 +00:00
2019-12-02 17:57:16 +00:00
# Should raise TaskInterruptedException
self.assertRaises(TaskInterruptedException, task.check_if_canceled)
2019-01-16 19:51:57 +00:00
2019-12-02 17:57:16 +00:00
# Restore
task.pending_action = None
task.save()
2019-12-02 17:57:16 +00:00
# Remove a task and verify that it calls the proper plugins signals
with catch_signal(task_removing) as h1:
with catch_signal(task_removed) as h2:
res = client.post("/api/projects/{}/tasks/{}/remove/".format(project.id, task.id))
self.assertTrue(res.status_code == status.HTTP_200_OK)
2018-07-30 15:55:46 +00:00
2019-12-02 17:57:16 +00:00
h1.assert_called_once_with(sender=Task, task_id=task.id, signal=task_removing)
h2.assert_called_once_with(sender=Task, task_id=task.id, signal=task_removed)
2018-07-30 15:55:46 +00:00
2019-12-02 17:57:16 +00:00
# task is processed right away
2019-12-02 17:57:16 +00:00
# Has been removed along with assets
self.assertFalse(Task.objects.filter(pk=task.id).exists())
self.assertFalse(ImageUpload.objects.filter(task=task).exists())
2019-12-02 17:57:16 +00:00
task_assets_path = os.path.join(settings.MEDIA_ROOT, task_directory_path(task.id, task.project.id))
self.assertFalse(os.path.exists(task_assets_path))
2018-02-20 16:38:55 +00:00
# Create a task
2017-02-07 22:09:30 +00:00
res = client.post("/api/projects/{}/tasks/".format(project.id), {
'images': [image1, image2],
'name': 'test_task_offline',
2017-02-15 19:49:36 +00:00
'processing_node': pnode.id,
'auto_processing_node': 'false'
2017-02-07 22:09:30 +00:00
}, format="multipart")
self.assertTrue(res.status_code == status.HTTP_201_CREATED)
task = Task.objects.get(pk=res.data['id'])
2018-02-23 19:48:59 +00:00
image1.seek(0)
image2.seek(0)
2017-02-07 22:09:30 +00:00
# Processing should fail and set an error
task.refresh_from_db()
self.assertTrue(task.last_error is not None)
self.assertTrue(task.status == status_codes.FAILED)
# Now bring it back online
2019-12-02 17:57:16 +00:00
with start_processing_node():
2017-02-07 22:09:30 +00:00
2019-12-02 17:57:16 +00:00
# Restart
res = client.post("/api/projects/{}/tasks/{}/restart/".format(project.id, task.id))
self.assertTrue(res.status_code == status.HTTP_200_OK)
task.refresh_from_db()
2017-02-07 22:09:30 +00:00
2019-12-02 17:57:16 +00:00
# After processing, the task should have restarted, and have no UUID or status
self.assertTrue(task.status is None)
self.assertTrue(len(task.uuid) == 0)
2017-02-07 22:09:30 +00:00
2019-12-02 17:57:16 +00:00
# Another step and it should have acquired a UUID
worker.tasks.process_pending_tasks()
task.refresh_from_db()
self.assertTrue(task.status in [status_codes.RUNNING, status_codes.COMPLETED])
self.assertTrue(len(task.uuid) > 0)
2019-12-02 17:57:16 +00:00
# Another step and it should be completed
time.sleep(DELAY)
worker.tasks.process_pending_tasks()
task.refresh_from_db()
self.assertTrue(task.status == status_codes.COMPLETED)
2017-02-01 23:11:39 +00:00
2018-01-26 15:01:14 +00:00
2019-12-02 17:57:16 +00:00
# Test rerun-from clearing mechanism:
2018-01-26 15:01:14 +00:00
2019-12-02 17:57:16 +00:00
# 1 .Set some task options, including rerun_from
task.options = [{'name': 'mesh-size', 'value':1000},
{'name': 'rerun-from', 'value': 'odm_meshing'}]
task.save()
2018-01-26 15:01:14 +00:00
2019-12-02 17:57:16 +00:00
# 2. Remove the task directly from node-odm (simulate a task purge)
self.assertTrue(task.processing_node.remove_task(task.uuid))
2018-01-26 15:01:14 +00:00
2019-12-02 17:57:16 +00:00
# 3. Restart the task
res = client.post("/api/projects/{}/tasks/{}/restart/".format(project.id, task.id))
self.assertTrue(res.status_code == status.HTTP_200_OK)
2018-01-26 15:01:14 +00:00
2019-12-02 17:57:16 +00:00
# 4. Check that the rerun_from parameter has been cleared
# but the other parameters are still set
task.refresh_from_db()
self.assertTrue(len(task.uuid) == 0)
self.assertTrue(len(list(filter(lambda d: d['name'] == 'rerun-from', task.options))) == 0)
self.assertTrue(len(list(filter(lambda d: d['name'] == 'mesh-size', task.options))) == 1)
2019-12-02 17:57:16 +00:00
# Test connection, timeout errors
def connTimeout(*args, **kwargs):
raise requests.exceptions.ConnectTimeout("Simulated timeout")
2019-12-02 17:57:16 +00:00
testWatch.intercept("nodeodm.api_client.task_output", connTimeout)
worker.tasks.process_pending_tasks()
2017-02-01 23:11:39 +00:00
2019-12-02 17:57:16 +00:00
# Timeout errors should be handled by retrying again at a later time
# and not fail
task.refresh_from_db()
self.assertTrue(task.last_error is None)
2019-12-02 17:57:16 +00:00
# Reassigning the task to another project should move its assets
self.assertTrue(os.path.exists(full_task_directory_path(task.id, project.id)))
self.assertTrue(len(task.imageupload_set.all()) == 2)
for image in task.imageupload_set.all():
self.assertTrue('project/{}/'.format(project.id) in image.image.path)
2019-12-02 17:57:16 +00:00
task.project = other_project
task.save()
task.refresh_from_db()
self.assertFalse(os.path.exists(full_task_directory_path(task.id, project.id)))
self.assertTrue(os.path.exists(full_task_directory_path(task.id, other_project.id)))
2019-12-02 17:57:16 +00:00
for image in task.imageupload_set.all():
self.assertTrue('project/{}/'.format(other_project.id) in image.image.path)
# Restart node-odm as to not generate orthophotos
testWatch.clear()
2019-12-02 17:57:16 +00:00
with start_processing_node("--test_skip_orthophotos"):
res = client.post("/api/projects/{}/tasks/".format(project.id), {
'images': [image1, image2],
'name': 'test_task_no_orthophoto',
'processing_node': pnode.id,
'auto_processing_node': 'false'
}, format="multipart")
self.assertTrue(res.status_code == status.HTTP_201_CREATED)
2019-12-02 17:57:16 +00:00
worker.tasks.process_pending_tasks()
time.sleep(DELAY)
worker.tasks.process_pending_tasks()
2019-12-02 17:57:16 +00:00
task = Task.objects.get(pk=res.data['id'])
self.assertTrue(task.status == status_codes.COMPLETED)
# Orthophoto files/directories should be missing
self.assertFalse(os.path.exists(task.assets_path("odm_orthophoto", "odm_orthophoto.tif")))
self.assertFalse(os.path.exists(task.assets_path("orthophoto_tiles")))
# orthophoto_extent should be none
self.assertTrue(task.orthophoto_extent is None)
# but other extents should be populated
self.assertTrue(task.dsm_extent is not None)
self.assertTrue(task.dtm_extent is not None)
self.assertTrue(os.path.exists(task.assets_path("dsm_tiles")))
self.assertTrue(os.path.exists(task.assets_path("dtm_tiles")))
# Can access only tiles of available assets
res = client.get("/api/projects/{}/tasks/{}/dsm/tiles.json".format(project.id, task.id))
2019-12-02 19:11:08 +00:00
self.assertEqual(res.status_code, status.HTTP_200_OK)
2019-12-02 17:57:16 +00:00
res = client.get("/api/projects/{}/tasks/{}/dtm/tiles.json".format(project.id, task.id))
2019-12-02 19:11:08 +00:00
self.assertEqual(res.status_code, status.HTTP_200_OK)
2019-12-02 17:57:16 +00:00
res = client.get("/api/projects/{}/tasks/{}/orthophoto/tiles.json".format(project.id, task.id))
2019-12-02 19:11:08 +00:00
self.assertEqual(res.status_code, status.HTTP_404_NOT_FOUND)
2019-12-02 17:57:16 +00:00
# Available assets should be missing orthophoto.tif type
# but others such as textured_model.zip should be available
res = client.get("/api/projects/{}/tasks/{}/".format(project.id, task.id))
self.assertFalse('orthophoto.tif' in res.data['available_assets'])
self.assertFalse('orthophoto_tiles.zip' in res.data['available_assets'])
self.assertTrue('textured_model.zip' in res.data['available_assets'])
image1.close()
image2.close()
gcp.close()
2017-02-07 22:09:30 +00:00
def test_task_auto_processing_node(self):
project = Project.objects.get(name="User Test Project")
task = Task.objects.create(project=project, name="Test")
pnode = ProcessingNode.objects.create(hostname="invalid-host", port=11223)
another_pnode = ProcessingNode.objects.create(hostname="invalid-host-2", port=11223)
# By default
self.assertTrue(task.auto_processing_node)
self.assertTrue(task.processing_node is None)
# Simulate an error
task.last_error = "Test error"
task.save()
worker.tasks.process_pending_tasks()
# A processing node should not have been assigned
task.refresh_from_db()
self.assertTrue(task.processing_node is None)
# Remove error
task.last_error = None
task.save()
worker.tasks.process_pending_tasks()
# A processing node should not have been assigned because no processing nodes are online
task.refresh_from_db()
self.assertTrue(task.processing_node is None)
2018-02-16 22:05:09 +00:00
# Bring a processing node online
pnode.last_refreshed = timezone.now()
pnode.save()
self.assertTrue(pnode.is_online())
# A processing node has been assigned
worker.tasks.process_pending_tasks()
task.refresh_from_db()
self.assertTrue(task.processing_node.id == pnode.id)
# Task should have failed (no images provided, invalid host...)
self.assertTrue(task.last_error is not None)
# Bring another processing node online, and bring the old one offline
pnode.last_refreshed = timezone.now() - timedelta(minutes=OFFLINE_MINUTES)
pnode.save()
another_pnode.last_refreshed = timezone.now()
another_pnode.save()
# Remove error, set status to queued
task.last_error = None
task.status = status_codes.QUEUED
task.save()
worker.tasks.process_pending_tasks()
# Processing node is now cleared and a new one will be assigned on the next tick
task.refresh_from_db()
self.assertTrue(task.processing_node is None)
2018-06-01 03:14:11 +00:00
self.assertTrue(task.status is None)
worker.tasks.process_pending_tasks()
task.refresh_from_db()
self.assertTrue(task.processing_node.id == another_pnode.id)
# Set task to queued, bring node offline
2018-06-01 03:14:11 +00:00
task.last_error = None
task.status = status_codes.RUNNING
task.save()
another_pnode.last_refreshed = timezone.now() - timedelta(minutes=OFFLINE_MINUTES)
another_pnode.save()
worker.tasks.process_pending_tasks()
task.refresh_from_db()
# Processing node is still there, but task should have failed
self.assertTrue(task.status == status_codes.FAILED)
self.assertTrue("Processing node went offline." in task.last_error)
def test_task_manual_processing_node(self):
user = User.objects.get(username="testuser")
project = Project.objects.create(name="User Test Project", owner=user)
task = Task.objects.create(project=project, name="Test", auto_processing_node=False)
# Bring a processing node online
pnode = ProcessingNode.objects.create(hostname="invalid-host", port=11223)
pnode.last_refreshed = timezone.now()
pnode.save()
self.assertTrue(pnode.is_online())
worker.tasks.process_pending_tasks()
# A processing node should not have been assigned because we asked
# not to via auto_processing_node = false
task.refresh_from_db()
self.assertTrue(task.processing_node is None)
2019-06-27 18:48:43 +00:00
def test_task_chunked_uploads(self):
2019-12-02 17:57:16 +00:00
with start_processing_node():
client = APIClient()
2019-06-27 18:48:43 +00:00
2019-12-02 17:57:16 +00:00
user = User.objects.get(username="testuser")
self.assertFalse(user.is_superuser)
2019-06-27 18:48:43 +00:00
2019-12-02 17:57:16 +00:00
project = Project.objects.create(
owner=user,
name="test project"
)
2019-06-27 18:48:43 +00:00
2019-12-02 17:57:16 +00:00
pnode = ProcessingNode.objects.create(hostname="localhost", port=11223)
2019-06-27 18:48:43 +00:00
2019-12-02 17:57:16 +00:00
# task creation via chunked upload
image1 = open("app/fixtures/tiny_drone_image.jpg", 'rb')
image2 = open("app/fixtures/tiny_drone_image_2.jpg", 'rb')
2019-12-02 17:57:16 +00:00
# Cannot create partial task without credentials
res = client.post("/api/projects/{}/tasks/".format(project.id), {
'auto_processing_node': 'true',
'partial': 'true'
}, format="multipart")
self.assertTrue(res.status_code == status.HTTP_403_FORBIDDEN);
2019-06-27 18:48:43 +00:00
2019-12-02 17:57:16 +00:00
client.login(username="testuser", password="test1234")
2019-06-27 18:48:43 +00:00
2019-12-02 17:57:16 +00:00
# Can after login
res = client.post("/api/projects/{}/tasks/".format(project.id), {
'auto_processing_node': 'true',
'partial': 'true'
}, format="multipart")
self.assertTrue(res.status_code == status.HTTP_201_CREATED)
2019-06-27 18:48:43 +00:00
2019-12-02 17:57:16 +00:00
task = Task.objects.get(pk=res.data['id'])
2019-06-27 18:48:43 +00:00
2019-12-02 17:57:16 +00:00
# It's partial
self.assertTrue(task.partial)
2019-06-27 18:48:43 +00:00
2019-12-02 17:57:16 +00:00
# It should not get processed
worker.tasks.process_pending_tasks()
time.sleep(DELAY)
self.assertEqual(task.upload_progress, 0.0)
2019-06-27 18:48:43 +00:00
2019-12-02 17:57:16 +00:00
# Upload to inexisting task lead to 404
wrong_task_id = '11111111-1111-1111-1111-111111111111'
res = client.post("/api/projects/{}/tasks/{}/upload/".format(project.id, wrong_task_id), {
'images': [image1],
}, format="multipart")
self.assertEqual(res.status_code, status.HTTP_404_NOT_FOUND)
image1.seek(0)
2019-06-27 18:48:43 +00:00
2019-12-02 17:57:16 +00:00
# Upload works with one image
res = client.post("/api/projects/{}/tasks/{}/upload/".format(project.id, task.id), {
'images': [image1],
}, format="multipart")
self.assertEqual(res.status_code, status.HTTP_200_OK)
self.assertEqual(res.data['success'], True)
image1.seek(0)
2019-06-27 18:48:43 +00:00
2019-12-02 17:57:16 +00:00
# Cannot commit with a single image
res = client.post("/api/projects/{}/tasks/{}/commit/".format(project.id, task.id))
self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST)
2019-06-27 18:48:43 +00:00
2019-12-02 17:57:16 +00:00
# And second image
res = client.post("/api/projects/{}/tasks/{}/upload/".format(project.id, task.id), {
'images': [image2],
}, format="multipart")
self.assertEqual(res.status_code, status.HTTP_200_OK)
self.assertEqual(res.data['success'], True)
image2.seek(0)
2019-06-27 18:48:43 +00:00
2019-12-02 17:57:16 +00:00
# Task hasn't started
self.assertEqual(task.upload_progress, 0.0)
2019-06-27 18:48:43 +00:00
2019-12-02 17:57:16 +00:00
# Can commit with two images
res = client.post("/api/projects/{}/tasks/{}/commit/".format(project.id, task.id))
self.assertEqual(res.status_code, status.HTTP_200_OK)
self.assertEqual(res.data['id'], str(task.id))
2019-06-27 18:48:43 +00:00
2019-12-02 17:57:16 +00:00
task.refresh_from_db()
2019-06-27 18:48:43 +00:00
2019-12-02 17:57:16 +00:00
# No longer partial
self.assertFalse(task.partial)
2019-12-02 17:57:16 +00:00
# Image count has been updated
self.assertEqual(task.images_count, 2)
2019-12-02 17:57:16 +00:00
# Make sure processing begins
worker.tasks.process_pending_tasks()
time.sleep(DELAY)
2019-12-02 17:57:16 +00:00
task.refresh_from_db()
self.assertEqual(task.upload_progress, 1.0)
2017-02-07 22:09:30 +00:00
2019-12-02 17:57:16 +00:00
image1.close()
image2.close()