Merge pull request #838 from pierotofy/custom

Refactored GCP resize code to python
pull/843/head
Piero Toffanin 2020-03-23 20:47:07 -04:00 zatwierdzone przez GitHub
commit 432c034640
Nie znaleziono w bazie danych klucza dla tego podpisu
ID klucza GPG: 4AEE18F83AFDEB23
5 zmienionych plików z 107 dodań i 45 usunięć

88
app/classes/gcp.py 100644
Wyświetl plik

@ -0,0 +1,88 @@
import glob
import os
import logging
logger = logging.getLogger('app.logger')
class GCPFile:
def __init__(self, gcp_path):
self.gcp_path = gcp_path
self.entries = []
self.raw_srs = ""
self.read()
def read(self):
if self.exists():
with open(self.gcp_path, 'r') as f:
contents = f.read().strip()
lines = list(map(str.strip, contents.split('\n')))
if lines:
self.raw_srs = lines[0] # SRS
for line in lines[1:]:
if line != "" and line[0] != "#":
parts = line.split()
if len(parts) >= 6:
self.entries.append(line)
else:
logger.warning("Malformed GCP line: %s" % line)
else:
logger.warning("GCP file %s does not exist" % self.gcp_path)
def iter_entries(self):
for entry in self.entries:
yield self.parse_entry(entry)
def parse_entry(self, entry):
if entry:
parts = entry.split()
x, y, z, px, py, filename = parts[:6]
extras = " ".join(parts[6:])
return GCPEntry(float(x), float(y), float(z), float(px), float(py), filename, extras)
def get_entry(self, n):
if n < self.entries_count():
return self.parse_entry(self.entries[n])
def entries_count(self):
return len(self.entries)
def exists(self):
return bool(self.gcp_path and os.path.exists(self.gcp_path))
def create_resized_copy(self, gcp_file_output, image_ratios):
"""
Creates a new resized GCP file from an existing GCP file. If one already exists, it will be removed.
:param gcp_file_output output path of new GCP file
:param image_ratios dictionary with "imagename" --> "resize_ratio" values
:return path to new GCP file
"""
output = [self.raw_srs]
for entry in self.iter_entries():
entry.px *= image_ratios.get(entry.filename.lower(), 1.0)
entry.py *= image_ratios.get(entry.filename.lower(), 1.0)
output.append(str(entry))
with open(gcp_file_output, 'w') as f:
f.write('\n'.join(output) + '\n')
return gcp_file_output
class GCPEntry:
def __init__(self, x, y, z, px, py, filename, extras=""):
self.x = x
self.y = y
self.z = z
self.px = px
self.py = py
self.filename = filename
self.extras = extras
def __str__(self):
return "{} {} {} {} {} {} {}".format(self.x, self.y, self.z,
self.px, self.py,
self.filename,
self.extras).rstrip()

Wyświetl plik

@ -35,6 +35,7 @@ from nodeodm import status_codes
from nodeodm.models import ProcessingNode from nodeodm.models import ProcessingNode
from pyodm.exceptions import NodeResponseError, NodeConnectionError, NodeServerError, OdmError from pyodm.exceptions import NodeResponseError, NodeConnectionError, NodeServerError, OdmError
from webodm import settings from webodm import settings
from app.classes.gcp import GCPFile
from .project import Project from .project import Project
from functools import partial from functools import partial
@ -628,7 +629,7 @@ class Task(models.Model):
os.makedirs(assets_dir) os.makedirs(assets_dir)
# Download and try to extract results up to 4 times # Download and try to extract results up to 4 times
# (~95% of the times, on large downloads, the archive could be corrupted) # (~5% of the times, on large downloads, the archive could be corrupted)
retry_num = 0 retry_num = 0
extracted = False extracted = False
last_update = 0 last_update = 0
@ -648,7 +649,7 @@ class Task(models.Model):
logger.info("Downloading all.zip for {}".format(self)) logger.info("Downloading all.zip for {}".format(self))
# Download all assets # Download all assets
zip_path = self.processing_node.download_task_assets(self.uuid, assets_dir, progress_callback=callback) zip_path = self.processing_node.download_task_assets(self.uuid, assets_dir, progress_callback=callback, parallel_downloads=max(1, int(16 / (2 ** retry_num))))
# Rename to all.zip # Rename to all.zip
all_zip_path = self.assets_path("all.zip") all_zip_path = self.assets_path("all.zip")
@ -660,7 +661,7 @@ class Task(models.Model):
self.extract_assets_and_complete() self.extract_assets_and_complete()
extracted = True extracted = True
except zipfile.BadZipFile: except zipfile.BadZipFile:
if retry_num < 4: if retry_num < 5:
logger.warning("{} seems corrupted. Retrying...".format(all_zip_path)) logger.warning("{} seems corrupted. Retrying...".format(all_zip_path))
retry_num += 1 retry_num += 1
os.remove(all_zip_path) os.remove(all_zip_path)
@ -889,21 +890,19 @@ class Task(models.Model):
# Assume we only have a single GCP file per task # Assume we only have a single GCP file per task
gcp_path = gcp_path[0] gcp_path = gcp_path[0]
resize_script_path = os.path.join(settings.BASE_DIR, 'app', 'scripts', 'resize_gcp.js')
dict = {} image_ratios = {}
for ri in resized_images: for ri in resized_images:
dict[os.path.basename(ri['path'])] = ri['resize_ratio'] image_ratios[os.path.basename(ri['path']).lower()] = ri['resize_ratio']
try: try:
new_gcp_content = subprocess.check_output("node {} {} '{}'".format(quote(resize_script_path), quote(gcp_path), json.dumps(dict)), shell=True) gcpFile = GCPFile(gcp_path)
with open(gcp_path, 'w') as f: gcpFile.create_resized_copy(gcp_path, image_ratios)
f.write(new_gcp_content.decode('utf-8'))
logger.info("Resized GCP file {}".format(gcp_path)) logger.info("Resized GCP file {}".format(gcp_path))
return gcp_path return gcp_path
except subprocess.CalledProcessError as e: except Exception as e:
logger.warning("Could not resize GCP file {}: {}".format(gcp_path, str(e))) logger.warning("Could not resize GCP file {}: {}".format(gcp_path, str(e)))
return None
def create_task_directories(self): def create_task_directories(self):
""" """

Wyświetl plik

@ -1,25 +0,0 @@
#!/usr/bin/env node
const fs = require('fs');
const Gcp = require('../static/app/js/classes/Gcp');
const argv = process.argv.slice(2);
function die(s){
console.log(s);
process.exit(1);
}
if (argv.length != 2){
die(`Usage: ./resize_gcp.js <path/to/gcp_file.txt> <JSON encoded image-->ratio map>`);
}
const [inputFile, jsonMap] = argv;
if (!fs.existsSync(inputFile)){
die('File does not exist: ' + inputFile);
}
const originalGcp = new Gcp(fs.readFileSync(inputFile, 'utf8'));
try{
const map = JSON.parse(jsonMap);
const newGcp = originalGcp.resize(map, true);
console.log(newGcp.toString());
}catch(e){
die("Not a valid JSON string: " + jsonMap);
}

Wyświetl plik

@ -169,14 +169,14 @@ class TestApiTask(BootTransactionTestCase):
[x, y, z, px, py, imagename, *extras] = lines[1].split(' ') [x, y, z, px, py, imagename, *extras] = lines[1].split(' ')
self.assertTrue(imagename == "tiny_drone_image.JPG") # case insensitive self.assertTrue(imagename == "tiny_drone_image.JPG") # case insensitive
self.assertTrue(float(px) == 2.0) # scaled by half self.assertEqual(float(px), 2.0) # scaled by half
self.assertTrue(float(py) == 3.0) # scaled by half self.assertEqual(float(py), 3.0) # scaled by half
self.assertTrue(float(x) == 576529.22) # Didn't change self.assertEqual(float(x), 576529.22) # Didn't change
[x, y, z, px, py, imagename, *extras] = lines[5].split(' ') [x, y, z, px, py, imagename, *extras] = lines[5].split(' ')
self.assertTrue(imagename == "missing_image.jpg") self.assertEqual(imagename, "missing_image.jpg")
self.assertTrue(float(px) == 8.0) # Didn't change self.assertEqual(float(px), 8.0) # Didn't change
self.assertTrue(float(py) == 8.0) # Didn't change self.assertEqual(float(py), 8.0) # Didn't change
# Resize progress is 100% # Resize progress is 100%
resized_task.refresh_from_db() resized_task.refresh_from_db()
@ -795,7 +795,7 @@ class TestApiTask(BootTransactionTestCase):
worker.tasks.process_pending_tasks() worker.tasks.process_pending_tasks()
task = Task.objects.get(pk=res.data['id']) task = Task.objects.get(pk=res.data['id'])
self.assertTrue(task.status == status_codes.COMPLETED) self.assertEqual(task.status, status_codes.COMPLETED)
# Orthophoto files/directories should be missing # Orthophoto files/directories should be missing
self.assertFalse(os.path.exists(task.assets_path("odm_orthophoto", "odm_orthophoto.tif"))) self.assertFalse(os.path.exists(task.assets_path("odm_orthophoto", "odm_orthophoto.tif")))

Wyświetl plik

@ -160,13 +160,13 @@ class ProcessingNode(models.Model):
task = api_client.get_task(uuid) task = api_client.get_task(uuid)
return task.remove() return task.remove()
def download_task_assets(self, uuid, destination, progress_callback): def download_task_assets(self, uuid, destination, progress_callback, parallel_downloads=16):
""" """
Downloads a task asset Downloads a task asset
""" """
api_client = self.api_client() api_client = self.api_client()
task = api_client.get_task(uuid) task = api_client.get_task(uuid)
return task.download_zip(destination, progress_callback) return task.download_zip(destination, progress_callback, parallel_downloads=parallel_downloads)
def restart_task(self, uuid, options = None): def restart_task(self, uuid, options = None):
""" """