2016-11-09 21:13:43 +00:00
|
|
|
import os
|
2018-02-16 22:05:09 +00:00
|
|
|
from wsgiref.util import FileWrapper
|
2016-11-09 21:13:43 +00:00
|
|
|
|
2019-01-15 22:51:32 +00:00
|
|
|
import mimetypes
|
2019-02-20 21:42:20 +00:00
|
|
|
|
2019-03-04 16:42:51 +00:00
|
|
|
from shutil import copyfileobj
|
2017-11-30 23:49:58 +00:00
|
|
|
from django.core.exceptions import ObjectDoesNotExist, SuspiciousFileOperation, ValidationError
|
2019-03-04 16:42:51 +00:00
|
|
|
from django.core.files.uploadedfile import InMemoryUploadedFile
|
2017-02-15 19:20:41 +00:00
|
|
|
from django.db import transaction
|
2019-01-15 22:51:32 +00:00
|
|
|
from django.http import FileResponse
|
2016-11-09 21:13:43 +00:00
|
|
|
from django.http import HttpResponse
|
2016-10-13 20:28:32 +00:00
|
|
|
from rest_framework import status, serializers, viewsets, filters, exceptions, permissions, parsers
|
2018-02-16 22:05:09 +00:00
|
|
|
from rest_framework.decorators import detail_route
|
2019-02-19 18:44:40 +00:00
|
|
|
from rest_framework.permissions import AllowAny
|
2016-10-13 16:21:12 +00:00
|
|
|
from rest_framework.response import Response
|
2016-11-09 21:13:43 +00:00
|
|
|
from rest_framework.views import APIView
|
2017-03-14 19:01:18 +00:00
|
|
|
|
2018-02-15 21:23:29 +00:00
|
|
|
from app import models, pending_actions
|
2019-02-20 21:42:20 +00:00
|
|
|
from nodeodm import status_codes
|
2016-10-12 22:18:37 +00:00
|
|
|
from nodeodm.models import ProcessingNode
|
2018-02-15 21:23:29 +00:00
|
|
|
from worker import tasks as worker_tasks
|
2020-04-02 20:58:58 +00:00
|
|
|
from .common import get_and_check_project
|
|
|
|
from app.security import path_traversal_check
|
2020-12-18 21:54:00 +00:00
|
|
|
from django.utils.translation import gettext_lazy as _
|
2016-10-12 22:18:37 +00:00
|
|
|
|
2016-11-01 19:11:36 +00:00
|
|
|
|
2019-02-20 21:42:20 +00:00
|
|
|
def flatten_files(request_files):
|
|
|
|
# MultiValueDict in, flat array of files out
|
|
|
|
return [file for filesList in map(
|
|
|
|
lambda key: request_files.getlist(key),
|
|
|
|
[keys for keys in request_files])
|
|
|
|
for file in filesList]
|
|
|
|
|
2016-10-12 22:18:37 +00:00
|
|
|
class TaskIDsSerializer(serializers.BaseSerializer):
|
|
|
|
def to_representation(self, obj):
|
|
|
|
return obj.id
|
2016-11-10 17:26:04 +00:00
|
|
|
|
2016-10-12 22:18:37 +00:00
|
|
|
class TaskSerializer(serializers.ModelSerializer):
|
|
|
|
project = serializers.PrimaryKeyRelatedField(queryset=models.Project.objects.all())
|
|
|
|
processing_node = serializers.PrimaryKeyRelatedField(queryset=ProcessingNode.objects.all())
|
2018-12-04 18:04:22 +00:00
|
|
|
processing_node_name = serializers.SerializerMethodField()
|
2018-01-24 22:04:53 +00:00
|
|
|
can_rerun_from = serializers.SerializerMethodField()
|
2016-11-07 22:25:33 +00:00
|
|
|
|
2018-12-04 18:04:22 +00:00
|
|
|
def get_processing_node_name(self, obj):
|
|
|
|
if obj.processing_node is not None:
|
|
|
|
return str(obj.processing_node)
|
|
|
|
else:
|
|
|
|
return None
|
|
|
|
|
2018-01-24 22:04:53 +00:00
|
|
|
def get_can_rerun_from(self, obj):
|
|
|
|
"""
|
|
|
|
When a task has been associated with a processing node
|
|
|
|
and if the processing node supports the "rerun-from" parameter
|
|
|
|
this method returns the valid values for "rerun-from" for that particular
|
|
|
|
processing node.
|
|
|
|
|
|
|
|
TODO: this could be improved by returning an empty array if a task was created
|
|
|
|
and purged by the processing node (which would require knowing how long a task is being kept
|
2018-12-04 15:02:13 +00:00
|
|
|
see https://github.com/OpenDroneMap/NodeODM/issues/32
|
2018-01-24 22:04:53 +00:00
|
|
|
:return: array of valid rerun-from parameters
|
|
|
|
"""
|
|
|
|
if obj.processing_node is not None:
|
|
|
|
rerun_from_option = list(filter(lambda d: 'name' in d and d['name'] == 'rerun-from', obj.processing_node.available_options))
|
|
|
|
if len(rerun_from_option) > 0 and 'domain' in rerun_from_option[0]:
|
|
|
|
return rerun_from_option[0]['domain']
|
|
|
|
|
|
|
|
return []
|
|
|
|
|
2016-10-12 22:18:37 +00:00
|
|
|
class Meta:
|
|
|
|
model = models.Task
|
2018-04-08 16:48:54 +00:00
|
|
|
exclude = ('console_output', 'orthophoto_extent', 'dsm_extent', 'dtm_extent', )
|
2017-12-03 22:56:30 +00:00
|
|
|
read_only_fields = ('processing_time', 'status', 'last_error', 'created_at', 'pending_action', 'available_assets', )
|
2016-11-09 21:13:43 +00:00
|
|
|
|
2016-10-13 16:21:12 +00:00
|
|
|
class TaskViewSet(viewsets.ViewSet):
|
|
|
|
"""
|
2016-11-17 23:51:07 +00:00
|
|
|
Task get/add/delete/update
|
2016-10-13 20:28:32 +00:00
|
|
|
A task represents a set of images and other input to be sent to a processing node.
|
|
|
|
Once a processing node completes processing, results are stored in the task.
|
2016-10-13 16:21:12 +00:00
|
|
|
"""
|
2017-07-12 17:35:28 +00:00
|
|
|
queryset = models.Task.objects.all().defer('orthophoto_extent', 'dsm_extent', 'dtm_extent', 'console_output', )
|
2016-10-18 15:25:14 +00:00
|
|
|
|
2016-11-03 17:17:58 +00:00
|
|
|
parser_classes = (parsers.MultiPartParser, parsers.JSONParser, parsers.FormParser, )
|
2016-10-27 16:26:15 +00:00
|
|
|
ordering_fields = '__all__'
|
2016-11-01 19:11:36 +00:00
|
|
|
|
2019-04-03 16:01:07 +00:00
|
|
|
def get_permissions(self):
|
|
|
|
"""
|
|
|
|
Instantiates and returns the list of permissions that this view requires.
|
|
|
|
We don't use object level permissions on tasks, relying on
|
|
|
|
project's object permissions instead (but standard model permissions still apply)
|
|
|
|
and with the exception of 'retrieve' (task GET) for public tasks access
|
|
|
|
"""
|
|
|
|
if self.action == 'retrieve':
|
|
|
|
permission_classes = [permissions.AllowAny]
|
|
|
|
else:
|
|
|
|
permission_classes = [permissions.DjangoModelPermissions, ]
|
|
|
|
|
|
|
|
return [permission() for permission in permission_classes]
|
|
|
|
|
2016-11-05 16:44:47 +00:00
|
|
|
def set_pending_action(self, pending_action, request, pk=None, project_pk=None, perms=('change_project', )):
|
2016-11-09 21:13:43 +00:00
|
|
|
get_and_check_project(request, project_pk, perms)
|
2016-11-02 22:32:24 +00:00
|
|
|
try:
|
|
|
|
task = self.queryset.get(pk=pk, project=project_pk)
|
2017-11-30 23:49:58 +00:00
|
|
|
except (ObjectDoesNotExist, ValidationError):
|
2016-11-02 22:32:24 +00:00
|
|
|
raise exceptions.NotFound()
|
|
|
|
|
2016-11-04 18:19:18 +00:00
|
|
|
task.pending_action = pending_action
|
2019-06-27 15:19:52 +00:00
|
|
|
task.partial = False # Otherwise this will not be processed
|
2016-11-02 22:32:24 +00:00
|
|
|
task.last_error = None
|
|
|
|
task.save()
|
|
|
|
|
2018-02-17 17:35:03 +00:00
|
|
|
# Process task right away
|
|
|
|
worker_tasks.process_task.delay(task.id)
|
2016-11-03 17:17:58 +00:00
|
|
|
|
2016-11-02 22:32:24 +00:00
|
|
|
return Response({'success': True})
|
|
|
|
|
2016-11-04 18:19:18 +00:00
|
|
|
@detail_route(methods=['post'])
|
|
|
|
def cancel(self, *args, **kwargs):
|
2016-11-15 16:51:19 +00:00
|
|
|
return self.set_pending_action(pending_actions.CANCEL, *args, **kwargs)
|
2016-11-04 18:19:18 +00:00
|
|
|
|
|
|
|
@detail_route(methods=['post'])
|
|
|
|
def restart(self, *args, **kwargs):
|
2016-11-15 16:51:19 +00:00
|
|
|
return self.set_pending_action(pending_actions.RESTART, *args, **kwargs)
|
2016-11-04 18:19:18 +00:00
|
|
|
|
|
|
|
@detail_route(methods=['post'])
|
|
|
|
def remove(self, *args, **kwargs):
|
2016-11-15 16:51:19 +00:00
|
|
|
return self.set_pending_action(pending_actions.REMOVE, *args, perms=('delete_project', ), **kwargs)
|
2016-11-04 18:19:18 +00:00
|
|
|
|
2016-11-02 22:32:24 +00:00
|
|
|
@detail_route(methods=['get'])
|
|
|
|
def output(self, request, pk=None, project_pk=None):
|
|
|
|
"""
|
|
|
|
Retrieve the console output for this task.
|
|
|
|
An optional "line" query param can be passed to retrieve
|
|
|
|
only the output starting from a certain line number.
|
|
|
|
"""
|
2016-11-09 21:13:43 +00:00
|
|
|
get_and_check_project(request, project_pk)
|
2016-11-02 22:32:24 +00:00
|
|
|
try:
|
|
|
|
task = self.queryset.get(pk=pk, project=project_pk)
|
2017-11-30 23:49:58 +00:00
|
|
|
except (ObjectDoesNotExist, ValidationError):
|
2016-11-02 22:32:24 +00:00
|
|
|
raise exceptions.NotFound()
|
|
|
|
|
2016-10-31 21:09:01 +00:00
|
|
|
line_num = max(0, int(request.query_params.get('line', 0)))
|
|
|
|
output = task.console_output or ""
|
2018-04-19 15:48:57 +00:00
|
|
|
return Response('\n'.join(output.rstrip().split('\n')[line_num:]))
|
2016-10-31 21:09:01 +00:00
|
|
|
|
2016-10-13 16:21:12 +00:00
|
|
|
def list(self, request, project_pk=None):
|
2016-11-09 21:13:43 +00:00
|
|
|
get_and_check_project(request, project_pk)
|
2016-10-13 16:21:12 +00:00
|
|
|
tasks = self.queryset.filter(project=project_pk)
|
2016-10-27 16:26:15 +00:00
|
|
|
tasks = filters.OrderingFilter().filter_queryset(self.request, tasks, self)
|
2016-10-13 16:21:12 +00:00
|
|
|
serializer = TaskSerializer(tasks, many=True)
|
|
|
|
return Response(serializer.data)
|
|
|
|
|
|
|
|
def retrieve(self, request, pk=None, project_pk=None):
|
|
|
|
try:
|
|
|
|
task = self.queryset.get(pk=pk, project=project_pk)
|
2017-11-30 23:49:58 +00:00
|
|
|
except (ObjectDoesNotExist, ValidationError):
|
2016-10-13 16:21:12 +00:00
|
|
|
raise exceptions.NotFound()
|
2016-10-31 21:09:01 +00:00
|
|
|
|
2019-04-03 16:01:07 +00:00
|
|
|
if not task.public:
|
|
|
|
get_and_check_project(request, task.project.id)
|
|
|
|
|
2016-11-02 22:32:24 +00:00
|
|
|
serializer = TaskSerializer(task)
|
|
|
|
return Response(serializer.data)
|
2016-10-13 20:28:32 +00:00
|
|
|
|
2019-06-26 22:41:09 +00:00
|
|
|
@detail_route(methods=['post'])
|
|
|
|
def commit(self, request, pk=None, project_pk=None):
|
|
|
|
"""
|
|
|
|
Commit a task after all images have been uploaded
|
|
|
|
"""
|
|
|
|
get_and_check_project(request, project_pk, ('change_project', ))
|
|
|
|
try:
|
|
|
|
task = self.queryset.get(pk=pk, project=project_pk)
|
|
|
|
except (ObjectDoesNotExist, ValidationError):
|
|
|
|
raise exceptions.NotFound()
|
|
|
|
|
|
|
|
task.partial = False
|
|
|
|
task.images_count = models.ImageUpload.objects.filter(task=task).count()
|
2019-06-27 18:48:43 +00:00
|
|
|
|
|
|
|
if task.images_count < 2:
|
2020-12-18 21:54:00 +00:00
|
|
|
raise exceptions.ValidationError(detail=_("You need to upload at least 2 images before commit"))
|
2019-06-27 18:48:43 +00:00
|
|
|
|
2019-06-26 22:41:09 +00:00
|
|
|
task.save()
|
|
|
|
worker_tasks.process_task.delay(task.id)
|
|
|
|
|
|
|
|
serializer = TaskSerializer(task)
|
|
|
|
return Response(serializer.data, status=status.HTTP_200_OK)
|
|
|
|
|
|
|
|
@detail_route(methods=['post'])
|
|
|
|
def upload(self, request, pk=None, project_pk=None):
|
|
|
|
"""
|
|
|
|
Add images to a task
|
|
|
|
"""
|
|
|
|
get_and_check_project(request, project_pk, ('change_project', ))
|
|
|
|
try:
|
|
|
|
task = self.queryset.get(pk=pk, project=project_pk)
|
|
|
|
except (ObjectDoesNotExist, ValidationError):
|
|
|
|
raise exceptions.NotFound()
|
|
|
|
|
2019-02-20 21:42:20 +00:00
|
|
|
files = flatten_files(request.FILES)
|
2016-10-13 20:28:32 +00:00
|
|
|
|
2019-06-26 22:41:09 +00:00
|
|
|
if len(files) == 0:
|
2020-12-18 21:54:00 +00:00
|
|
|
raise exceptions.ValidationError(detail=_("No files uploaded"))
|
2017-01-27 01:39:09 +00:00
|
|
|
|
2017-02-15 19:20:41 +00:00
|
|
|
with transaction.atomic():
|
|
|
|
for image in files:
|
|
|
|
models.ImageUpload.objects.create(task=task, image=image)
|
2017-02-01 23:11:39 +00:00
|
|
|
|
2019-06-26 22:41:09 +00:00
|
|
|
return Response({'success': True}, status=status.HTTP_200_OK)
|
|
|
|
|
|
|
|
def create(self, request, project_pk=None):
|
|
|
|
project = get_and_check_project(request, project_pk, ('change_project', ))
|
|
|
|
|
|
|
|
# If this is a partial task, we're going to upload images later
|
|
|
|
# for now we just create a placeholder task.
|
|
|
|
if request.data.get('partial'):
|
|
|
|
task = models.Task.objects.create(project=project,
|
|
|
|
pending_action=pending_actions.RESIZE if 'resize_to' in request.data else None)
|
2017-02-01 23:11:39 +00:00
|
|
|
serializer = TaskSerializer(task, data=request.data, partial=True)
|
|
|
|
serializer.is_valid(raise_exception=True)
|
|
|
|
serializer.save()
|
2019-06-26 22:41:09 +00:00
|
|
|
else:
|
|
|
|
files = flatten_files(request.FILES)
|
|
|
|
|
|
|
|
if len(files) <= 1:
|
2020-12-18 21:54:00 +00:00
|
|
|
raise exceptions.ValidationError(detail=_("Cannot create task, you need at least 2 images"))
|
2019-06-26 22:41:09 +00:00
|
|
|
|
|
|
|
with transaction.atomic():
|
|
|
|
task = models.Task.objects.create(project=project,
|
|
|
|
pending_action=pending_actions.RESIZE if 'resize_to' in request.data else None)
|
|
|
|
|
|
|
|
for image in files:
|
|
|
|
models.ImageUpload.objects.create(task=task, image=image)
|
|
|
|
task.images_count = len(files)
|
|
|
|
|
|
|
|
# Update other parameters such as processing node, task name, etc.
|
|
|
|
serializer = TaskSerializer(task, data=request.data, partial=True)
|
|
|
|
serializer.is_valid(raise_exception=True)
|
|
|
|
serializer.save()
|
2017-02-01 23:11:39 +00:00
|
|
|
|
2019-06-26 22:41:09 +00:00
|
|
|
worker_tasks.process_task.delay(task.id)
|
2018-02-19 20:50:26 +00:00
|
|
|
|
|
|
|
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
2017-02-15 19:20:41 +00:00
|
|
|
|
2016-10-21 15:02:41 +00:00
|
|
|
|
|
|
|
def update(self, request, pk=None, project_pk=None, partial=False):
|
2016-11-09 21:13:43 +00:00
|
|
|
get_and_check_project(request, project_pk, ('change_project', ))
|
2016-10-21 15:02:41 +00:00
|
|
|
try:
|
|
|
|
task = self.queryset.get(pk=pk, project=project_pk)
|
2017-11-30 23:49:58 +00:00
|
|
|
except (ObjectDoesNotExist, ValidationError):
|
2016-10-21 15:02:41 +00:00
|
|
|
raise exceptions.NotFound()
|
|
|
|
|
2017-03-03 21:43:45 +00:00
|
|
|
# Check that a user has access to reassign a project
|
|
|
|
if 'project' in request.data:
|
|
|
|
try:
|
|
|
|
get_and_check_project(request, request.data['project'], ('change_project', ))
|
|
|
|
except exceptions.NotFound:
|
|
|
|
raise exceptions.PermissionDenied()
|
|
|
|
|
2016-10-21 15:02:41 +00:00
|
|
|
serializer = TaskSerializer(task, data=request.data, partial=partial)
|
|
|
|
serializer.is_valid(raise_exception=True)
|
|
|
|
serializer.save()
|
2016-10-24 18:14:35 +00:00
|
|
|
|
2018-02-17 17:35:03 +00:00
|
|
|
# Process task right away
|
|
|
|
worker_tasks.process_task.delay(task.id)
|
2016-10-24 18:14:35 +00:00
|
|
|
|
2016-10-21 15:02:41 +00:00
|
|
|
return Response(serializer.data)
|
|
|
|
|
|
|
|
def partial_update(self, request, *args, **kwargs):
|
|
|
|
kwargs['partial'] = True
|
2016-11-09 21:13:43 +00:00
|
|
|
return self.update(request, *args, **kwargs)
|
|
|
|
|
|
|
|
|
2016-11-11 17:55:56 +00:00
|
|
|
class TaskNestedView(APIView):
|
2017-07-12 17:35:28 +00:00
|
|
|
queryset = models.Task.objects.all().defer('orthophoto_extent', 'dtm_extent', 'dsm_extent', 'console_output', )
|
2019-02-19 18:44:40 +00:00
|
|
|
permission_classes = (AllowAny, )
|
2016-11-09 21:13:43 +00:00
|
|
|
|
2018-03-19 16:26:10 +00:00
|
|
|
def get_and_check_task(self, request, pk, annotate={}):
|
2016-11-09 21:13:43 +00:00
|
|
|
try:
|
2018-03-19 16:26:10 +00:00
|
|
|
task = self.queryset.annotate(**annotate).get(pk=pk)
|
2017-11-30 23:49:58 +00:00
|
|
|
except (ObjectDoesNotExist, ValidationError):
|
2016-11-09 21:13:43 +00:00
|
|
|
raise exceptions.NotFound()
|
2017-11-30 15:55:15 +00:00
|
|
|
|
|
|
|
# Check for permissions, unless the task is public
|
|
|
|
if not task.public:
|
2018-03-19 16:26:10 +00:00
|
|
|
get_and_check_project(request, task.project.id)
|
2017-11-30 15:55:15 +00:00
|
|
|
|
2016-11-09 21:13:43 +00:00
|
|
|
return task
|
|
|
|
|
|
|
|
|
2019-01-15 22:51:32 +00:00
|
|
|
def download_file_response(request, filePath, content_disposition):
|
|
|
|
filename = os.path.basename(filePath)
|
|
|
|
filesize = os.stat(filePath).st_size
|
|
|
|
file = open(filePath, "rb")
|
|
|
|
|
|
|
|
# More than 100mb, normal http response, otherwise stream
|
|
|
|
# Django docs say to avoid streaming when possible
|
|
|
|
stream = filesize > 1e8 or request.GET.get('_force_stream', False)
|
|
|
|
if stream:
|
|
|
|
response = FileResponse(file)
|
|
|
|
else:
|
|
|
|
response = HttpResponse(FileWrapper(file),
|
|
|
|
content_type=(mimetypes.guess_type(filename)[0] or "application/zip"))
|
|
|
|
|
|
|
|
response['Content-Type'] = mimetypes.guess_type(filename)[0] or "application/zip"
|
|
|
|
response['Content-Disposition'] = "{}; filename={}".format(content_disposition, filename)
|
|
|
|
response['Content-Length'] = filesize
|
|
|
|
|
|
|
|
# For testing
|
|
|
|
if stream:
|
|
|
|
response['_stream'] = 'yes'
|
|
|
|
|
|
|
|
return response
|
|
|
|
|
|
|
|
|
2017-01-18 19:49:53 +00:00
|
|
|
"""
|
|
|
|
Task downloads are simply aliases to download the task's assets
|
|
|
|
(but require a shorter path and look nicer the API user)
|
|
|
|
"""
|
|
|
|
class TaskDownloads(TaskNestedView):
|
2019-01-15 22:51:32 +00:00
|
|
|
def get(self, request, pk=None, project_pk=None, asset=""):
|
|
|
|
"""
|
|
|
|
Downloads a task asset (if available)
|
|
|
|
"""
|
|
|
|
task = self.get_and_check_task(request, pk)
|
2017-03-08 14:55:19 +00:00
|
|
|
|
2019-01-15 22:51:32 +00:00
|
|
|
# Check and download
|
|
|
|
try:
|
|
|
|
asset_path = task.get_asset_download_path(asset)
|
|
|
|
except FileNotFoundError:
|
2020-12-18 21:54:00 +00:00
|
|
|
raise exceptions.NotFound(_("Asset does not exist"))
|
2016-11-11 17:55:56 +00:00
|
|
|
|
2019-01-15 22:51:32 +00:00
|
|
|
if not os.path.exists(asset_path):
|
2020-12-18 21:54:00 +00:00
|
|
|
raise exceptions.NotFound(_("Asset does not exist"))
|
2016-11-11 17:55:56 +00:00
|
|
|
|
2019-01-15 22:51:32 +00:00
|
|
|
return download_file_response(request, asset_path, 'attachment')
|
2017-01-18 19:49:53 +00:00
|
|
|
|
|
|
|
"""
|
|
|
|
Raw access to the task's asset folder resources
|
|
|
|
Useful when accessing a textured 3d model, or the Potree point cloud data
|
|
|
|
"""
|
|
|
|
class TaskAssets(TaskNestedView):
|
|
|
|
def get(self, request, pk=None, project_pk=None, unsafe_asset_path=""):
|
|
|
|
"""
|
|
|
|
Downloads a task asset (if available)
|
|
|
|
"""
|
2018-03-19 16:26:10 +00:00
|
|
|
task = self.get_and_check_task(request, pk)
|
2017-01-18 19:49:53 +00:00
|
|
|
|
|
|
|
# Check for directory traversal attacks
|
|
|
|
try:
|
|
|
|
asset_path = path_traversal_check(task.assets_path(unsafe_asset_path), task.assets_path(""))
|
|
|
|
except SuspiciousFileOperation:
|
2020-12-18 21:54:00 +00:00
|
|
|
raise exceptions.NotFound(_("Asset does not exist"))
|
2017-01-18 19:49:53 +00:00
|
|
|
|
|
|
|
if (not os.path.exists(asset_path)) or os.path.isdir(asset_path):
|
2020-12-18 21:54:00 +00:00
|
|
|
raise exceptions.NotFound(_("Asset does not exist"))
|
2017-01-18 19:49:53 +00:00
|
|
|
|
2019-01-15 22:51:32 +00:00
|
|
|
return download_file_response(request, asset_path, 'inline')
|
2019-02-20 21:42:20 +00:00
|
|
|
|
|
|
|
"""
|
|
|
|
Task assets import
|
|
|
|
"""
|
|
|
|
class TaskAssetsImport(APIView):
|
|
|
|
permission_classes = (permissions.AllowAny,)
|
|
|
|
parser_classes = (parsers.MultiPartParser, parsers.JSONParser, parsers.FormParser,)
|
|
|
|
|
|
|
|
def post(self, request, project_pk=None):
|
|
|
|
project = get_and_check_project(request, project_pk, ('change_project',))
|
|
|
|
|
|
|
|
files = flatten_files(request.FILES)
|
2019-02-21 19:16:48 +00:00
|
|
|
import_url = request.data.get('url', None)
|
2020-12-18 21:54:00 +00:00
|
|
|
task_name = request.data.get('name', _('Imported Task'))
|
2019-02-20 21:42:20 +00:00
|
|
|
|
2019-02-21 19:16:48 +00:00
|
|
|
if not import_url and len(files) != 1:
|
2020-12-18 21:54:00 +00:00
|
|
|
raise exceptions.ValidationError(detail=_("Cannot create task, you need to upload 1 file"))
|
2019-02-20 21:42:20 +00:00
|
|
|
|
2019-02-21 19:16:48 +00:00
|
|
|
if import_url and len(files) > 0:
|
2020-12-18 21:54:00 +00:00
|
|
|
raise exceptions.ValidationError(detail=_("Cannot create task, either specify a URL or upload 1 file."))
|
2019-02-21 19:16:48 +00:00
|
|
|
|
2019-02-20 21:42:20 +00:00
|
|
|
with transaction.atomic():
|
|
|
|
task = models.Task.objects.create(project=project,
|
|
|
|
auto_processing_node=False,
|
2019-02-21 19:16:48 +00:00
|
|
|
name=task_name,
|
|
|
|
import_url=import_url if import_url else "file://all.zip",
|
2019-02-20 21:42:20 +00:00
|
|
|
status=status_codes.RUNNING,
|
|
|
|
pending_action=pending_actions.IMPORT)
|
|
|
|
task.create_task_directories()
|
|
|
|
|
2019-02-21 19:16:48 +00:00
|
|
|
if len(files) > 0:
|
|
|
|
destination_file = task.assets_path("all.zip")
|
2019-03-04 16:42:51 +00:00
|
|
|
|
2019-02-21 19:16:48 +00:00
|
|
|
with open(destination_file, 'wb+') as fd:
|
2019-03-04 16:42:51 +00:00
|
|
|
if isinstance(files[0], InMemoryUploadedFile):
|
|
|
|
for chunk in files[0].chunks():
|
|
|
|
fd.write(chunk)
|
|
|
|
else:
|
|
|
|
with open(files[0].temporary_file_path(), 'rb') as file:
|
|
|
|
copyfileobj(file, fd)
|
2019-02-20 21:42:20 +00:00
|
|
|
|
|
|
|
worker_tasks.process_task.delay(task.id)
|
|
|
|
|
|
|
|
serializer = TaskSerializer(task)
|
|
|
|
return Response(serializer.data, status=status.HTTP_201_CREATED)
|