kopia lustrzana https://github.com/OpenDroneMap/WebODM
commit
c157eb358a
|
@ -20,7 +20,10 @@ from .common import get_and_check_project, get_tile_json, path_traversal_check
|
|||
class TaskIDsSerializer(serializers.BaseSerializer):
|
||||
def to_representation(self, obj):
|
||||
return obj.id
|
||||
|
||||
|
||||
class geojsonSerializer(serializers.Serializer):
|
||||
"""docstring for geojsonSeria"""
|
||||
geometry = serializers.JSONField(help_text="polygon contour to get volume")
|
||||
|
||||
class TaskSerializer(serializers.ModelSerializer):
|
||||
project = serializers.PrimaryKeyRelatedField(queryset=models.Project.objects.all())
|
||||
|
@ -302,3 +305,16 @@ class TaskAssets(TaskNestedView):
|
|||
content_type=(mimetypes.guess_type(asset_filename)[0] or "application/zip"))
|
||||
response['Content-Disposition'] = "inline; filename={}".format(asset_filename)
|
||||
return response
|
||||
|
||||
class TaskVolume(TaskNestedView):
|
||||
def post(self, request, pk=None, project_pk=None):
|
||||
task = self.get_and_check_task(request, pk, project_pk)
|
||||
serializer = geojsonSerializer(data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
# geometry = serializer.data.get('geometry')
|
||||
# if geometry is None:
|
||||
# raise exceptions.ValidationError("A geoson file are not available.")
|
||||
result=task.get_volume(request.data)
|
||||
response = Response(result, status=status.HTTP_200_OK)
|
||||
return response
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@ from django.conf.urls import url, include
|
|||
|
||||
from app.api.presets import PresetViewSet
|
||||
from .projects import ProjectViewSet
|
||||
from .tasks import TaskViewSet, TaskTiles, TaskTilesJson, TaskDownloads, TaskAssets
|
||||
from .tasks import TaskViewSet, TaskTiles, TaskTilesJson, TaskDownloads, TaskAssets, TaskVolume
|
||||
from .processingnodes import ProcessingNodeViewSet, ProcessingNodeOptionsView
|
||||
from rest_framework_nested import routers
|
||||
from rest_framework_jwt.views import obtain_jwt_token
|
||||
|
@ -28,7 +28,8 @@ urlpatterns = [
|
|||
url(r'projects/(?P<project_pk>[^/.]+)/tasks/(?P<pk>[^/.]+)/download/(?P<asset>.+)$', TaskDownloads.as_view()),
|
||||
|
||||
url(r'projects/(?P<project_pk>[^/.]+)/tasks/(?P<pk>[^/.]+)/assets/(?P<unsafe_asset_path>.+)$', TaskAssets.as_view()),
|
||||
url(r'projects/(?P<project_pk>[^/.]+)/tasks/(?P<pk>[^/.]+)/volume$', TaskVolume.as_view()),
|
||||
|
||||
url(r'^auth/', include('rest_framework.urls')),
|
||||
url(r'^token-auth/', obtain_jwt_token),
|
||||
]
|
||||
]
|
||||
|
|
|
@ -0,0 +1,158 @@
|
|||
from osgeo import gdal, gdalnumeric, ogr
|
||||
from PIL import Image, ImageDraw
|
||||
import os
|
||||
import numpy as np
|
||||
import json
|
||||
|
||||
def clip_raster(raster, geojson, gt=None, nodata=-9999):
|
||||
'''
|
||||
Clips a raster (given as either a gdal.Dataset or as a numpy.array
|
||||
instance) to a polygon layer provided by a Shapefile (or other vector
|
||||
layer). If a numpy.array is given, a "GeoTransform" must be provided
|
||||
(via dataset.GetGeoTransform() in GDAL). Returns an array. Clip features
|
||||
must be a dissolved, single-part geometry (not multi-part). Modified from:
|
||||
|
||||
http://pcjericks.github.io/py-gdalogr-cookbook/raster_layers.html
|
||||
#clip-a-geotiff-with-shapefile
|
||||
|
||||
Arguments:
|
||||
rast A gdal.Dataset or a NumPy array
|
||||
features_path The path to the clipping features
|
||||
gt An optional GDAL GeoTransform to use instead
|
||||
nodata The NoData value; defaults to -9999.
|
||||
'''
|
||||
def array_to_image(a):
|
||||
'''
|
||||
Converts a gdalnumeric array to a Python Imaging Library (PIL) Image.
|
||||
'''
|
||||
i = Image.fromstring('L',(a.shape[1], a.shape[0]),
|
||||
(a.astype('b')).tostring())
|
||||
return i
|
||||
|
||||
def convertJson(jsdata):
|
||||
return json.dumps(jsdata)
|
||||
|
||||
def image_to_array(i):
|
||||
'''
|
||||
Converts a Python Imaging Library (PIL) array to a gdalnumeric image.
|
||||
'''
|
||||
a = gdalnumeric.fromstring(i.tobytes(), 'b')
|
||||
a.shape = i.im.size[1], i.im.size[0]
|
||||
return a
|
||||
|
||||
def world_to_pixel(geo_matrix, x, y):
|
||||
'''
|
||||
Uses a gdal geomatrix (gdal.GetGeoTransform()) to calculate
|
||||
the pixel location of a geospatial coordinate; from:
|
||||
http://pcjericks.github.io/py-gdalogr-cookbook/raster_layers.html#clip-a-geotiff-with-shapefile
|
||||
'''
|
||||
ulX = geo_matrix[0]
|
||||
ulY = geo_matrix[3]
|
||||
xDist = geo_matrix[1]
|
||||
yDist = geo_matrix[5]
|
||||
rtnX = geo_matrix[2]
|
||||
rtnY = geo_matrix[4]
|
||||
pixel = int((x - ulX) / xDist)
|
||||
line = int((ulY - y) / xDist)
|
||||
return (pixel, line)
|
||||
|
||||
rast=gdal.Open(raster)
|
||||
|
||||
# Can accept either a gdal.Dataset or numpy.array instance
|
||||
if not isinstance(rast, np.ndarray):
|
||||
gt = rast.GetGeoTransform()
|
||||
rast = rast.ReadAsArray()
|
||||
|
||||
# Create an OGR layer from a boundary shapefile
|
||||
|
||||
geo = convertJson(geojson)
|
||||
features = ogr.Open(geo)
|
||||
if features.GetDriver().GetName() == 'ESRI Shapefile':
|
||||
lyr = features.GetLayer(os.path.split(os.path.splitext(features_path)[0])[1])
|
||||
|
||||
else:
|
||||
lyr = features.GetLayer()
|
||||
|
||||
# Get the first feature
|
||||
poly = lyr.GetNextFeature()
|
||||
|
||||
# Convert the layer extent to image pixel coordinates
|
||||
minX, maxX, minY, maxY = lyr.GetExtent()
|
||||
ulX, ulY = world_to_pixel(gt, minX, maxY)
|
||||
lrX, lrY = world_to_pixel(gt, maxX, minY)
|
||||
|
||||
# Calculate the pixel size of the new image
|
||||
pxWidth = int(lrX - ulX)
|
||||
pxHeight = int(lrY - ulY)
|
||||
|
||||
# If the clipping features extend out-of-bounds and ABOVE the raster...
|
||||
if gt[3] < maxY:
|
||||
# In such a case... ulY ends up being negative--can't have that!
|
||||
iY = ulY
|
||||
ulY = 0
|
||||
|
||||
# Multi-band image?
|
||||
try:
|
||||
clip = rast[:, ulY:lrY, ulX:lrX]
|
||||
|
||||
except IndexError:
|
||||
clip = rast[ulY:lrY, ulX:lrX]
|
||||
|
||||
# Create a new geomatrix for the image
|
||||
gt2 = list(gt)
|
||||
gt2[0] = minX
|
||||
gt2[3] = maxY
|
||||
|
||||
# Map points to pixels for drawing the boundary on a blank 8-bit,
|
||||
# black and white, mask image.
|
||||
points = []
|
||||
pixels = []
|
||||
geom = poly.GetGeometryRef()
|
||||
pts = geom.GetGeometryRef(0)
|
||||
|
||||
for p in range(pts.GetPointCount()):
|
||||
points.append((pts.GetX(p), pts.GetY(p)))
|
||||
|
||||
for p in points:
|
||||
pixels.append(world_to_pixel(gt2, p[0], p[1]))
|
||||
|
||||
raster_poly = Image.new('L', (pxWidth, pxHeight), 1)
|
||||
rasterize = ImageDraw.Draw(raster_poly)
|
||||
rasterize.polygon(pixels, 0) # Fill with zeroes
|
||||
|
||||
# If the clipping features extend out-of-bounds and ABOVE the raster...
|
||||
if gt[3] < maxY:
|
||||
# The clip features were "pushed down" to match the bounds of the
|
||||
# raster; this step "pulls" them back up
|
||||
premask = image_to_array(raster_poly)
|
||||
# We slice out the piece of our clip features that are "off the map"
|
||||
mask = np.ndarray((premask.shape[-2] - abs(iY), premask.shape[-1]), premask.dtype)
|
||||
mask[:] = premask[abs(iY):, :]
|
||||
mask.resize(premask.shape) # Then fill in from the bottom
|
||||
|
||||
# Most importantly, push the clipped piece down
|
||||
gt2[3] = maxY - (maxY - gt[3])
|
||||
|
||||
else:
|
||||
mask = image_to_array(raster_poly)
|
||||
|
||||
# Clip the image using the mask
|
||||
try:
|
||||
clip = gdalnumeric.choose(mask, (clip, nodata))
|
||||
|
||||
# If the clipping features extend out-of-bounds and BELOW the raster...
|
||||
except ValueError:
|
||||
# We have to cut the clipping features to the raster!
|
||||
rshp = list(mask.shape)
|
||||
if mask.shape[-2] != clip.shape[-2]:
|
||||
rshp[0] = clip.shape[-2]
|
||||
|
||||
if mask.shape[-1] != clip.shape[-1]:
|
||||
rshp[1] = clip.shape[-1]
|
||||
|
||||
mask.resize(*rshp, refcheck=False)
|
||||
|
||||
clip = gdalnumeric.choose(mask, (clip, nodata))
|
||||
|
||||
# return (clip, ulX, ulY, gt2)
|
||||
return clip
|
|
@ -0,0 +1,35 @@
|
|||
import json
|
||||
from osgeo import osr
|
||||
|
||||
|
||||
def spatialref(epsg_code):
|
||||
spatialref = osr.SpatialReference()
|
||||
spatialref.ImportFromEPSG(epsg_code)
|
||||
return spatialref
|
||||
|
||||
def spatialrefWQT(dataset):
|
||||
spatialref = osr.SpatialReference()
|
||||
spatialref.ImportFromWkt(dataset.GetProjectionRef())
|
||||
return spatialref
|
||||
|
||||
def reprojson(geojson, dataset):
|
||||
|
||||
crsin= spatialref(4326)
|
||||
crsout = spatialrefWQT(dataset)
|
||||
|
||||
coordinate_transformation = osr.CoordinateTransformation(crsin, crsout)
|
||||
|
||||
# Define dictionary representation of output feature collection
|
||||
fc_out = {"geometry":{"type":"Polygon","coordinates":[]}}
|
||||
|
||||
# Iterate through each feature of the feature collection
|
||||
new_coords = []
|
||||
# Project/transform coordinate pairs of each ring
|
||||
# (iteration required in case geometry type is MultiPolygon, or there are holes)
|
||||
for ring in geojson['geometry']['coordinates']:
|
||||
coords=[(entry[0],entry[1]) for entry in ring]
|
||||
for i in range(len(coords)):
|
||||
x2, y2, z= coordinate_transformation.TransformPoint(coords[i][0], coords[i][1])
|
||||
new_coords.append([x2, y2])
|
||||
fc_out['geometry']['coordinates'] = [new_coords]
|
||||
return fc_out
|
|
@ -3,6 +3,15 @@ import os
|
|||
import shutil
|
||||
import zipfile
|
||||
import uuid as uuid_module
|
||||
import json
|
||||
|
||||
import osgeo.ogr
|
||||
import gdal
|
||||
import struct
|
||||
import statistics
|
||||
from .vertex import rings
|
||||
from .repro_json import reprojson
|
||||
from .cliprasterpol import clip_raster
|
||||
|
||||
import json
|
||||
from shlex import quote
|
||||
|
@ -578,6 +587,25 @@ class Task(models.Model):
|
|||
self.status = status_codes.FAILED
|
||||
self.pending_action = None
|
||||
self.save()
|
||||
|
||||
def get_volume(self, geojson):
|
||||
try:
|
||||
raster_path= self.assets_path("odm_dem", "dsm.tif")
|
||||
raster=gdal.Open(raster_path)
|
||||
gt=raster.GetGeoTransform()
|
||||
rb=raster.GetRasterBand(1)
|
||||
gdal.UseExceptions()
|
||||
geosom = reprojson(geojson, raster)
|
||||
coords=[(entry[0],entry[1]) for entry in rings(raster_path, geosom)]
|
||||
GSD=gt[1]
|
||||
volume=0
|
||||
med=statistics.median(entry[2] for entry in rings(raster_path, geosom))
|
||||
clip=clip_raster(raster_path, geosom, gt=None, nodata=-9999)
|
||||
return ((clip-med)*GSD*GSD)[clip!=-9999.0].sum()
|
||||
|
||||
except FileNotFoundError as e:
|
||||
logger.warning(e)
|
||||
|
||||
|
||||
def find_all_files_matching(self, regex):
|
||||
directory = full_task_directory_path(self.id, self.project.id)
|
||||
|
|
|
@ -0,0 +1,40 @@
|
|||
from osgeo import ogr
|
||||
import gdal
|
||||
import struct
|
||||
import json
|
||||
|
||||
def convertJson(jsdata):
|
||||
return json.dumps(jsdata)
|
||||
|
||||
def rings(raster, geojson):
|
||||
|
||||
src=gdal.Open(raster)
|
||||
gtx=src.GetGeoTransform()
|
||||
rbu=src.GetRasterBand(1)
|
||||
gdal.UseExceptions()
|
||||
|
||||
geo=convertJson(geojson)
|
||||
|
||||
geojsom= ogr.Open(geo)
|
||||
|
||||
layer1 = geojsom.GetLayer(0)
|
||||
|
||||
vertices = []
|
||||
|
||||
for feat in layer1:
|
||||
geom = feat.GetGeometryRef()
|
||||
ring = geom.GetGeometryRef(0)
|
||||
points = ring.GetPointCount()
|
||||
|
||||
for p in range(points):
|
||||
lon, lat, z = ring.GetPoint(p)
|
||||
px = int((lon - gtx[0]) / gtx[1]) #x pixel
|
||||
py = int((lat - gtx[3]) / gtx[5]) #y pixel
|
||||
try:
|
||||
structval=rbu.ReadRaster(px,py,1,1,buf_type=gdal.GDT_Float32) #Assumes 32 bit int- 'float'
|
||||
intval = struct.unpack('f' , structval) #assume float
|
||||
val=intval[0]
|
||||
vertices.append((px, py, val))
|
||||
except:
|
||||
val=-9999 #or some value to indicate a fail
|
||||
return vertices
|
|
@ -3,6 +3,12 @@ import '../css/Map.scss';
|
|||
import 'leaflet/dist/leaflet.css';
|
||||
import Leaflet from 'leaflet';
|
||||
import async from 'async';
|
||||
|
||||
import 'leaflet-measure/dist/leaflet-measure.css';
|
||||
import 'leaflet-measure/dist/leaflet-measure';
|
||||
import 'leaflet-draw/dist/leaflet.draw.css';
|
||||
import 'leaflet-draw/dist/leaflet.draw';
|
||||
|
||||
import '../vendor/leaflet/L.Control.MousePosition.css';
|
||||
import '../vendor/leaflet/L.Control.MousePosition';
|
||||
import '../vendor/leaflet/Leaflet.Autolayers/css/leaflet.auto-layers.css';
|
||||
|
@ -136,7 +142,7 @@ class Map extends React.Component {
|
|||
<i class="fa fa-cube"></i> 3D
|
||||
</button>`;
|
||||
|
||||
layer.bindPopup(popup);
|
||||
//layer.bindPopup(popup);
|
||||
|
||||
$('#layerOpacity', popup).on('change input', function() {
|
||||
layer.setOpacity($('#layerOpacity', popup).val());
|
||||
|
@ -179,6 +185,79 @@ class Map extends React.Component {
|
|||
map: this.map
|
||||
});
|
||||
|
||||
measureControl.addTo(this.map);
|
||||
|
||||
const featureGroup = L.featureGroup();
|
||||
featureGroup.addTo(this.map);
|
||||
|
||||
new L.Control.Draw({
|
||||
draw: {
|
||||
polygon: {
|
||||
allowIntersection: false, // Restricts shapes to simple polygons
|
||||
shapeOptions: {
|
||||
color: '#707070'
|
||||
}
|
||||
},
|
||||
rectangle: false,
|
||||
circle: false,
|
||||
circlemarker: false,
|
||||
marker: false
|
||||
},
|
||||
edit: {
|
||||
featureGroup: featureGroup,
|
||||
edit: {
|
||||
selectedPathOptions: {
|
||||
maintainColor: true,
|
||||
dashArray: '10, 10'
|
||||
}
|
||||
}
|
||||
}
|
||||
}).addTo(this.map);
|
||||
|
||||
this.map.on(L.Draw.Event.CREATED, function(e) {
|
||||
e.layer.feature = {geometry: {type: 'Polygon'} };
|
||||
featureGroup.addLayer(e.layer);
|
||||
|
||||
var paramList;
|
||||
$.ajax({
|
||||
type: 'POST',
|
||||
async: false,
|
||||
url: '/api/projects/4/tasks/7/volume',
|
||||
data: JSON.stringify(e.layer.toGeoJSON()),
|
||||
contentType: "application/json",
|
||||
success: function (msg) {
|
||||
paramList = msg;
|
||||
},
|
||||
error: function (jqXHR, textStatus, errorThrown) {
|
||||
alert("get session failed " + errorThrown);
|
||||
}
|
||||
});
|
||||
|
||||
e.layer.bindPopup('Volume: ' + paramList.toFixed(2) + 'Mètres Cubes (m3)');
|
||||
});
|
||||
|
||||
this.map.on(L.Draw.Event.EDITED, function(e) {
|
||||
e.layers.eachLayer(function(layer) {
|
||||
var paramList = null;
|
||||
$.ajax({
|
||||
type: 'POST',
|
||||
async: false,
|
||||
url: '/api/projects/1/tasks/4/volume',
|
||||
data: JSON.stringify(layer.toGeoJSON()),
|
||||
contentType: "application/json",
|
||||
success: function (msg) {
|
||||
paramList = msg;
|
||||
},
|
||||
error: function (jqXHR, textStatus, errorThrown) {
|
||||
alert("get session failed " + errorThrown);
|
||||
}
|
||||
});
|
||||
|
||||
layer.setPopupContent('Volume: ' + paramList.toFixed(2) + 'Mètres Cubes (m3)');
|
||||
|
||||
});
|
||||
});
|
||||
|
||||
Leaflet.control.scale({
|
||||
maxWidth: 250,
|
||||
}).addTo(this.map);
|
||||
|
|
Ładowanie…
Reference in New Issue