Merge pull request #673 from pierotofy/presetfix

Bug fixes and improvements
pull/684/head
Piero Toffanin 2019-06-14 11:52:11 -04:00 zatwierdzone przez GitHub
commit abc555cba9
Nie znaleziono w bazie danych klucza dla tego podpisu
ID klucza GPG: 4AEE18F83AFDEB23
3 zmienionych plików z 40 dodań i 14 usunięć

Wyświetl plik

@ -111,12 +111,10 @@ def add_default_presets():
{'name': 'depthmap-resolution', 'value': '1000'},
{'name': 'texturing-nadir-weight', 'value': "28"}]})
Preset.objects.update_or_create(name='Point of Interest', system=True,
defaults={'options': [{'name': 'matcher-neighbors', 'value': "24"},
{'name': 'mesh-size', 'value': '600000'},
defaults={'options': [{'name': 'mesh-size', 'value': '600000'},
{'name': 'use-3dmesh', 'value': True}]})
Preset.objects.update_or_create(name='Forest', system=True,
defaults={'options': [{'name': 'min-num-features', 'value': "18000"},
{'name': 'matcher-neighbors', 'value': "21"},
{'name': 'texturing-data-term', 'value': "area"}]})
Preset.objects.update_or_create(name='DSM + DTM', system=True,
defaults={

Wyświetl plik

@ -1,6 +1,6 @@
{
"name": "WebODM",
"version": "1.0.0",
"version": "1.0.1",
"description": "Open Source Drone Image Processing",
"main": "index.js",
"scripts": {

Wyświetl plik

@ -3,6 +3,7 @@ import shutil
import traceback
import time
from threading import Event, Thread
from celery.utils.log import get_task_logger
from django.core.exceptions import ObjectDoesNotExist
from django.db.models import Count
@ -58,19 +59,42 @@ def cleanup_tmp_directory():
logger.info('Cleaned up: %s (%s)' % (f, modified))
# Based on https://stackoverflow.com/questions/22498038/improve-current-implementation-of-a-setinterval-python/22498708#22498708
def setInterval(interval, func, *args):
stopped = Event()
def loop():
while not stopped.wait(interval):
func(*args)
t = Thread(target=loop)
t.daemon = True
t.start()
return stopped.set
@app.task
def process_task(taskId):
try:
lock = redis_client.lock('task_lock_{}'.format(taskId))
have_lock = lock.acquire(blocking=False)
lock_id = 'task_lock_{}'.format(taskId)
cancel_monitor = None
if not have_lock:
return
try:
task_lock_last_update = redis_client.getset(lock_id, time.time())
if task_lock_last_update is not None:
# Check if lock has expired
if time.time() - float(task_lock_last_update) <= 30:
# Locked
return
else:
# Expired
logger.warning("Task {} has an expired lock! This could mean that WebODM is running out of memory. Check your server configuration.")
# Set lock
def update_lock():
redis_client.set(lock_id, time.time())
cancel_monitor = setInterval(10, update_lock)
try:
task = Task.objects.get(pk=taskId)
except ObjectDoesNotExist:
logger.info("Task id {} has already been deleted.".format(taskId))
logger.info("Task {} has already been deleted.".format(taskId))
return
try:
@ -81,13 +105,17 @@ def process_task(taskId):
e, traceback.format_exc()))
if settings.TESTING: raise e
finally:
if cancel_monitor is not None:
cancel_monitor()
try:
if have_lock:
lock.release()
except redis.exceptions.LockError:
# A lock could have expired
redis_client.delete(lock_id)
except redis.exceptions.RedisError:
# Ignore errors, the lock will expire at some point
pass
def get_pending_tasks():
# All tasks that have a processing node assigned
# Or that need one assigned (via auto)