Resize handling for TIFFs

pull/746/head
Piero Toffanin 2019-12-02 12:57:16 -05:00
rodzic cff90587e3
commit 90b5a9ce91
10 zmienionych plików z 716 dodań i 694 usunięć

Wyświetl plik

@ -1,5 +1,6 @@
import rasterio
import urllib
import os
from django.http import HttpResponse
from rio_tiler.errors import TileOutsideBounds
from rio_tiler.mercator import get_zooms
@ -92,6 +93,9 @@ class TileJson(TaskNestedView):
task = self.get_and_check_task(request, pk)
raster_path = get_raster_path(task, tile_type)
if not os.path.isfile(raster_path):
raise exceptions.NotFound()
with rasterio.open(raster_path) as src_dst:
minzoom, maxzoom = get_zooms(src_dst)
@ -136,6 +140,9 @@ class Metadata(TaskNestedView):
pmin, pmax = 2.0, 98.0
raster_path = get_raster_path(task, tile_type)
if not os.path.isfile(raster_path):
raise exceptions.NotFound()
try:
info = main.metadata(raster_path, pmin=pmin, pmax=pmax, histogram_bins=255, histogram_range=hrange, expr=expr)
except IndexError as e:
@ -253,7 +260,7 @@ class Tiles(TaskNestedView):
expr, _ = lookup_formula(formula, bands)
if tile_type in ['dsm', 'dtm'] and rescale is None:
raise exceptions.ValidationError("Cannot get tiles without rescale parameter. Add ?rescale=min,max to the URL.")
rescale = "0,1000"
if tile_type in ['dsm', 'dtm'] and color_map is None:
color_map = "gray"
@ -263,6 +270,8 @@ class Tiles(TaskNestedView):
tilesize = scale * 256
url = get_raster_path(task, tile_type)
if not os.path.isfile(url):
raise exceptions.NotFound()
try:
if expr is not None:

Wyświetl plik

@ -104,14 +104,14 @@ def add_default_presets():
defaults={'options': [{'name': 'mesh-octree-depth', 'value': "11"},
{'name': 'use-3dmesh', 'value': True},
{'name': 'depthmap-resolution', 'value': '1000'},
{'name': 'mesh-size', 'value': '600000'}]})
{'name': 'mesh-size', 'value': '300000'}]})
Preset.objects.update_or_create(name='Buildings', system=True,
defaults={'options': [{'name': 'mesh-octree-depth', 'value': "10"},
{'name': 'mesh-size', 'value': '300000'},
{'name': 'depthmap-resolution', 'value': '1000'},
{'name': 'texturing-nadir-weight', 'value': "28"}]})
Preset.objects.update_or_create(name='Point of Interest', system=True,
defaults={'options': [{'name': 'mesh-size', 'value': '600000'},
defaults={'options': [{'name': 'mesh-size', 'value': '300000'},
{'name': 'use-3dmesh', 'value': True}]})
Preset.objects.update_or_create(name='Forest', system=True,
defaults={'options': [{'name': 'min-num-features', 'value': "18000"},

Wyświetl plik

@ -8,20 +8,18 @@ import os
from webodm import settings
def find_and_assure_cogeo(apps, schema_editor):
print("===============================================")
print("Optimizing TIFFs: this is going to take a while!")
print("===============================================")
print("=====================================================================")
print("Migrating TIFFs to Cloud Optimized GeoTIFFs, this might take a while!")
print("=====================================================================")
for asset_filename in ["odm_orthophoto.tif", "dsm.tif", "dtm.tif"]:
for asset in glob.glob(os.path.join(settings.MEDIA_ROOT, "**", asset_filename), recursive=True):
for asset in glob.glob(os.path.join(settings.MEDIA_ROOT, "project", "**", asset_filename), recursive=True):
try:
print("Optimizing %s" % asset)
assure_cogeo(asset)
except Exception as e:
print("WARNING: cannot check/optimize %s (%s), skipping..." % (asset, str(e)))
print("Done optimizing TIFFs!")
class Migration(migrations.Migration):

Wyświetl plik

@ -85,6 +85,35 @@ def resize_image(image_path, resize_to, done=None):
:return: path and resize ratio
"""
try:
can_resize = False
# Check if this image can be resized
# There's no easy way to resize multispectral 16bit images
# (Support should be added to PIL)
is_jpeg = re.match(r'.*\.jpe?g$', image_path, re.IGNORECASE)
if is_jpeg:
# We can always resize these
can_resize = True
else:
try:
bps = piexif.load(image_path)['0th'][piexif.ImageIFD.BitsPerSample]
if isinstance(bps, int):
# Always resize single band images
can_resize = True
elif isinstance(bps, tuple) and len(bps) > 1:
# Only resize multiband images if depth is 8bit
can_resize = bps == (8, ) * len(bps)
else:
logger.warning("Cannot determine if image %s can be resized, hoping for the best!" % image_path)
can_resize = True
except KeyError:
logger.warning("Cannot find BitsPerSample tag for %s" % image_path)
if not can_resize:
logger.warning("Cannot resize %s" % image_path)
return {'path': image_path, 'resize_ratio': 1}
im = Image.open(image_path)
path, ext = os.path.splitext(image_path)
resized_image_path = os.path.join(path + '.resized' + ext)
@ -100,15 +129,18 @@ def resize_image(image_path, resize_to, done=None):
resized_width = int(width * ratio)
resized_height = int(height * ratio)
im.thumbnail((resized_width, resized_height), Image.LANCZOS)
im = im.resize((resized_width, resized_height), Image.BILINEAR)
params = {}
if is_jpeg:
params['quality'] = 100
if 'exif' in im.info:
exif_dict = piexif.load(im.info['exif'])
exif_dict['Exif'][piexif.ExifIFD.PixelXDimension] = resized_width
exif_dict['Exif'][piexif.ExifIFD.PixelYDimension] = resized_height
im.save(resized_image_path, "JPEG", exif=piexif.dump(exif_dict), quality=100)
im.save(resized_image_path, exif=piexif.dump(exif_dict), **params)
else:
im.save(resized_image_path, "JPEG", quality=100)
im.save(resized_image_path, **params)
im.close()
@ -821,7 +853,7 @@ class Task(models.Model):
logger.warning("We were asked to resize images to {}, this might be an error.".format(self.resize_to))
return []
images_path = self.find_all_files_matching(r'.*\.jpe?g$')
images_path = self.find_all_files_matching(r'.*\.(jpe?g|tiff?)$')
total_images = len(images_path)
resized_images_count = 0
last_update = 0

Wyświetl plik

@ -97,7 +97,7 @@ class Map extends React.Component {
let metaUrl = url + "metadata";
if (type == "plant") metaUrl += "?formula=NDVI&bands=RGN&color_map=rdylgn";
if (type == "dsm") metaUrl += "?hillshade=3&color_map=jet_r";
if (type == "dsm" || type == "dtm") metaUrl += "?hillshade=3&color_map=jet_r";
this.tileJsonRequests.push($.getJSON(metaUrl)
.done(mres => {
@ -121,7 +121,7 @@ class Map extends React.Component {
params["rescale"] = encodeURIComponent("-1,1");
}
tileUrl = tileUrl.slice(0, tileUrl.indexOf("?")) + Utils.toSearchQuery(params);
tileUrl = Utils.buildUrlWithQuery(tileUrl, params);
}
const layer = Leaflet.tileLayer(tileUrl, {

Wyświetl plik

@ -235,7 +235,7 @@ class ProjectListItem extends React.Component {
this.setUploadState({
totalCount: this.state.upload.totalCount - remainingFilesCount,
uploading: false,
error: `${remainingFilesCount} files cannot be uploaded. As a reminder, only images (.jpg, .png) and GCP files (.txt) can be uploaded. Try again.`
error: `${remainingFilesCount} files cannot be uploaded. As a reminder, only images (.jpg, .tif, .png) and GCP files (.txt) can be uploaded. Try again.`
});
}
})

Plik diff jest za duży Load Diff

Wyświetl plik

@ -26,143 +26,138 @@ class TestApiTask(BootTransactionTestCase):
def test_task(self):
client = APIClient()
node_odm = start_processing_node()
with start_processing_node():
user = User.objects.get(username="testuser")
self.assertFalse(user.is_superuser)
project = Project.objects.create(
owner=user,
name="test project"
)
user = User.objects.get(username="testuser")
self.assertFalse(user.is_superuser)
project = Project.objects.create(
owner=user,
name="test project"
)
image1 = open("app/fixtures/tiny_drone_image.jpg", 'rb')
image2 = open("app/fixtures/tiny_drone_image_2.jpg", 'rb')
image1 = open("app/fixtures/tiny_drone_image.jpg", 'rb')
image2 = open("app/fixtures/tiny_drone_image_2.jpg", 'rb')
# Create processing node
pnode = ProcessingNode.objects.create(hostname="localhost", port=11223)
client.login(username="testuser", password="test1234")
# Create processing node
pnode = ProcessingNode.objects.create(hostname="localhost", port=11223)
client.login(username="testuser", password="test1234")
# Create task
res = client.post("/api/projects/{}/tasks/".format(project.id), {
'images': [image1, image2]
}, format="multipart")
image1.close()
image2.close()
task = Task.objects.get(id=res.data['id'])
# Create task
res = client.post("/api/projects/{}/tasks/".format(project.id), {
'images': [image1, image2]
}, format="multipart")
image1.close()
image2.close()
task = Task.objects.get(id=res.data['id'])
# Wait for completion
c = 0
while c < 10:
worker.tasks.process_pending_tasks()
task.refresh_from_db()
if task.status == status_codes.COMPLETED:
break
c += 1
time.sleep(1)
# Wait for completion
c = 0
while c < 10:
worker.tasks.process_pending_tasks()
task.refresh_from_db()
if task.status == status_codes.COMPLETED:
break
c += 1
time.sleep(1)
self.assertEqual(task.status, status_codes.COMPLETED)
self.assertEqual(task.status, status_codes.COMPLETED)
# Download task assets
task_uuid = task.uuid
res = client.get("/api/projects/{}/tasks/{}/download/all.zip".format(project.id, task.id))
self.assertEqual(res.status_code, status.HTTP_200_OK)
# Download task assets
task_uuid = task.uuid
res = client.get("/api/projects/{}/tasks/{}/download/all.zip".format(project.id, task.id))
self.assertEqual(res.status_code, status.HTTP_200_OK)
if not os.path.exists(settings.MEDIA_TMP):
os.mkdir(settings.MEDIA_TMP)
if not os.path.exists(settings.MEDIA_TMP):
os.mkdir(settings.MEDIA_TMP)
assets_path = os.path.join(settings.MEDIA_TMP, "all.zip")
assets_path = os.path.join(settings.MEDIA_TMP, "all.zip")
with open(assets_path, 'wb') as f:
f.write(res.content)
with open(assets_path, 'wb') as f:
f.write(res.content)
remove_perm('change_project', user, project)
remove_perm('change_project', user, project)
assets_file = open(assets_path, 'rb')
assets_file = open(assets_path, 'rb')
# Cannot import unless we have permission
res = client.post("/api/projects/{}/tasks/import".format(project.id), {
'file': [assets_file]
}, format="multipart")
self.assertEqual(res.status_code, status.HTTP_404_NOT_FOUND)
# Cannot import unless we have permission
res = client.post("/api/projects/{}/tasks/import".format(project.id), {
'file': [assets_file]
}, format="multipart")
self.assertEqual(res.status_code, status.HTTP_404_NOT_FOUND)
assign_perm('change_project', user, project)
assign_perm('change_project', user, project)
# Import with file upload method
assets_file.seek(0)
res = client.post("/api/projects/{}/tasks/import".format(project.id), {
'file': [assets_file]
}, format="multipart")
self.assertEqual(res.status_code, status.HTTP_201_CREATED)
assets_file.close()
# Import with file upload method
assets_file.seek(0)
res = client.post("/api/projects/{}/tasks/import".format(project.id), {
'file': [assets_file]
}, format="multipart")
self.assertEqual(res.status_code, status.HTTP_201_CREATED)
assets_file.close()
file_import_task = Task.objects.get(id=res.data['id'])
# Wait for completion
c = 0
while c < 10:
worker.tasks.process_pending_tasks()
file_import_task.refresh_from_db()
if file_import_task.status == status_codes.COMPLETED:
break
c += 1
time.sleep(1)
file_import_task = Task.objects.get(id=res.data['id'])
# Wait for completion
c = 0
while c < 10:
worker.tasks.process_pending_tasks()
file_import_task.refresh_from_db()
if file_import_task.status == status_codes.COMPLETED:
break
c += 1
time.sleep(1)
self.assertEqual(file_import_task.import_url, "file://all.zip")
self.assertEqual(file_import_task.images_count, 1)
self.assertEqual(file_import_task.processing_node, None)
self.assertEqual(file_import_task.auto_processing_node, False)
self.assertEqual(file_import_task.import_url, "file://all.zip")
self.assertEqual(file_import_task.images_count, 1)
self.assertEqual(file_import_task.processing_node, None)
self.assertEqual(file_import_task.auto_processing_node, False)
# Can access assets
res = client.get("/api/projects/{}/tasks/{}/assets/odm_orthophoto/odm_orthophoto.tif".format(project.id, file_import_task.id))
self.assertEqual(res.status_code, status.HTTP_200_OK)
# Can access assets
res = client.get("/api/projects/{}/tasks/{}/assets/odm_orthophoto/odm_orthophoto.tif".format(project.id, file_import_task.id))
self.assertEqual(res.status_code, status.HTTP_200_OK)
# Set task public so we can download from it without auth
file_import_task.public = True
file_import_task.save()
# Set task public so we can download from it without auth
file_import_task.public = True
file_import_task.save()
# Import with URL method
assets_import_url = "http://{}:{}/task/{}/download/all.zip".format(pnode.hostname, pnode.port, task_uuid)
res = client.post("/api/projects/{}/tasks/import".format(project.id), {
'url': assets_import_url
})
self.assertEqual(res.status_code, status.HTTP_201_CREATED)
url_task = Task.objects.get(id=res.data['id'])
# Import with URL method
assets_import_url = "http://{}:{}/task/{}/download/all.zip".format(pnode.hostname, pnode.port, task_uuid)
res = client.post("/api/projects/{}/tasks/import".format(project.id), {
'url': assets_import_url
})
self.assertEqual(res.status_code, status.HTTP_201_CREATED)
url_task = Task.objects.get(id=res.data['id'])
# Wait for completion
c = 0
while c < 10:
worker.tasks.process_pending_tasks()
url_task.refresh_from_db()
if url_task.status == status_codes.COMPLETED:
break
c += 1
time.sleep(1)
# Wait for completion
c = 0
while c < 10:
worker.tasks.process_pending_tasks()
url_task.refresh_from_db()
if url_task.status == status_codes.COMPLETED:
break
c += 1
time.sleep(1)
self.assertEqual(url_task.import_url, assets_import_url)
self.assertEqual(url_task.images_count, 1)
self.assertEqual(url_task.import_url, assets_import_url)
self.assertEqual(url_task.images_count, 1)
# Import corrupted file
assets_import_url = "http://{}:{}/task/{}/download/orthophoto.tif".format(pnode.hostname, pnode.port, task_uuid)
res = client.post("/api/projects/{}/tasks/import".format(project.id), {
'url': assets_import_url
})
self.assertEqual(res.status_code, status.HTTP_201_CREATED)
corrupted_task = Task.objects.get(id=res.data['id'])
# Import corrupted file
assets_import_url = "http://{}:{}/task/{}/download/orthophoto.tif".format(pnode.hostname, pnode.port, task_uuid)
res = client.post("/api/projects/{}/tasks/import".format(project.id), {
'url': assets_import_url
})
self.assertEqual(res.status_code, status.HTTP_201_CREATED)
corrupted_task = Task.objects.get(id=res.data['id'])
# Wait for completion
c = 0
while c < 10:
worker.tasks.process_pending_tasks()
corrupted_task.refresh_from_db()
if corrupted_task.status == status_codes.FAILED:
break
c += 1
time.sleep(1)
self.assertEqual(corrupted_task.status, status_codes.FAILED)
self.assertTrue("Invalid" in corrupted_task.last_error)
# Stop processing node
node_odm.terminate()
# Wait for completion
c = 0
while c < 10:
worker.tasks.process_pending_tasks()
corrupted_task.refresh_from_db()
if corrupted_task.status == status_codes.FAILED:
break
c += 1
time.sleep(1)
self.assertEqual(corrupted_task.status, status_codes.FAILED)
self.assertTrue("Invalid" in corrupted_task.last_error)

Wyświetl plik

@ -29,36 +29,33 @@ class TestWorker(BootTestCase):
pnode = ProcessingNode.objects.create(hostname="localhost", port=11223)
self.assertTrue(pnode.api_version is None)
pnserver = start_processing_node()
with start_processing_node():
worker.tasks.update_nodes_info()
worker.tasks.update_nodes_info()
pnode.refresh_from_db()
self.assertTrue(pnode.api_version is not None)
pnode.refresh_from_db()
self.assertTrue(pnode.api_version is not None)
# Create task
task = Task.objects.create(project=project)
# Create task
task = Task.objects.create(project=project)
# Delete project
project.deleting = True
project.save()
# Delete project
project.deleting = True
project.save()
worker.tasks.cleanup_projects()
worker.tasks.cleanup_projects()
# Task and project should still be here (since task still exists)
self.assertTrue(Task.objects.filter(pk=task.id).exists())
self.assertTrue(Project.objects.filter(pk=project.id).exists())
# Task and project should still be here (since task still exists)
self.assertTrue(Task.objects.filter(pk=task.id).exists())
self.assertTrue(Project.objects.filter(pk=project.id).exists())
# Remove task
task.delete()
# Remove task
task.delete()
worker.tasks.cleanup_projects()
worker.tasks.cleanup_projects()
# Task and project should have been removed (now that task count is zero)
self.assertFalse(Task.objects.filter(pk=task.id).exists())
self.assertFalse(Project.objects.filter(pk=project.id).exists())
pnserver.terminate()
# Task and project should have been removed (now that task count is zero)
self.assertFalse(Task.objects.filter(pk=task.id).exists())
self.assertFalse(Project.objects.filter(pk=project.id).exists())
tmpdir = os.path.join(settings.MEDIA_TMP, 'test')
os.mkdir(tmpdir)

Wyświetl plik

@ -15,12 +15,15 @@ from webodm import settings
logger = logging.getLogger('app.logger')
@contextmanager
def start_processing_node(*args):
current_dir = os.path.dirname(os.path.realpath(__file__))
node_odm = subprocess.Popen(['node', 'index.js', '--port', '11223', '--test'] + list(args), shell=False,
cwd=os.path.join(current_dir, "..", "..", "nodeodm", "external", "NodeODM"))
time.sleep(2) # Wait for the server to launch
return node_odm
yield node_odm
node_odm.terminate()
time.sleep(1) # Wait for the server to stop
# We need to clear previous media_root content
# This points to the test directory, but just in case