kopia lustrzana https://github.com/OpenDroneMap/WebODM
npm install support for plugins, started moving volume code into plugin, API url mountpoints
rodzic
c157eb358a
commit
2005194d94
|
@ -20,10 +20,6 @@ from .common import get_and_check_project, get_tile_json, path_traversal_check
|
|||
class TaskIDsSerializer(serializers.BaseSerializer):
|
||||
def to_representation(self, obj):
|
||||
return obj.id
|
||||
|
||||
class geojsonSerializer(serializers.Serializer):
|
||||
"""docstring for geojsonSeria"""
|
||||
geometry = serializers.JSONField(help_text="polygon contour to get volume")
|
||||
|
||||
class TaskSerializer(serializers.ModelSerializer):
|
||||
project = serializers.PrimaryKeyRelatedField(queryset=models.Project.objects.all())
|
||||
|
@ -196,15 +192,15 @@ class TaskNestedView(APIView):
|
|||
queryset = models.Task.objects.all().defer('orthophoto_extent', 'dtm_extent', 'dsm_extent', 'console_output', )
|
||||
permission_classes = (IsAuthenticatedOrReadOnly, )
|
||||
|
||||
def get_and_check_task(self, request, pk, project_pk, annotate={}):
|
||||
def get_and_check_task(self, request, pk, annotate={}):
|
||||
try:
|
||||
task = self.queryset.annotate(**annotate).get(pk=pk, project=project_pk)
|
||||
task = self.queryset.annotate(**annotate).get(pk=pk)
|
||||
except (ObjectDoesNotExist, ValidationError):
|
||||
raise exceptions.NotFound()
|
||||
|
||||
# Check for permissions, unless the task is public
|
||||
if not task.public:
|
||||
get_and_check_project(request, project_pk)
|
||||
get_and_check_project(request, task.project.id)
|
||||
|
||||
return task
|
||||
|
||||
|
@ -214,7 +210,7 @@ class TaskTiles(TaskNestedView):
|
|||
"""
|
||||
Get a tile image
|
||||
"""
|
||||
task = self.get_and_check_task(request, pk, project_pk)
|
||||
task = self.get_and_check_task(request, pk)
|
||||
tile_path = task.get_tile_path(tile_type, z, x, y)
|
||||
if os.path.isfile(tile_path):
|
||||
tile = open(tile_path, "rb")
|
||||
|
@ -228,7 +224,7 @@ class TaskTilesJson(TaskNestedView):
|
|||
"""
|
||||
Get tile.json for this tasks's asset type
|
||||
"""
|
||||
task = self.get_and_check_task(request, pk, project_pk)
|
||||
task = self.get_and_check_task(request, pk)
|
||||
|
||||
extent_map = {
|
||||
'orthophoto': task.orthophoto_extent,
|
||||
|
@ -259,7 +255,7 @@ class TaskDownloads(TaskNestedView):
|
|||
"""
|
||||
Downloads a task asset (if available)
|
||||
"""
|
||||
task = self.get_and_check_task(request, pk, project_pk)
|
||||
task = self.get_and_check_task(request, pk)
|
||||
|
||||
# Check and download
|
||||
try:
|
||||
|
@ -287,7 +283,7 @@ class TaskAssets(TaskNestedView):
|
|||
"""
|
||||
Downloads a task asset (if available)
|
||||
"""
|
||||
task = self.get_and_check_task(request, pk, project_pk)
|
||||
task = self.get_and_check_task(request, pk)
|
||||
|
||||
# Check for directory traversal attacks
|
||||
try:
|
||||
|
@ -305,16 +301,3 @@ class TaskAssets(TaskNestedView):
|
|||
content_type=(mimetypes.guess_type(asset_filename)[0] or "application/zip"))
|
||||
response['Content-Disposition'] = "inline; filename={}".format(asset_filename)
|
||||
return response
|
||||
|
||||
class TaskVolume(TaskNestedView):
|
||||
def post(self, request, pk=None, project_pk=None):
|
||||
task = self.get_and_check_task(request, pk, project_pk)
|
||||
serializer = geojsonSerializer(data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
# geometry = serializer.data.get('geometry')
|
||||
# if geometry is None:
|
||||
# raise exceptions.ValidationError("A geoson file are not available.")
|
||||
result=task.get_volume(request.data)
|
||||
response = Response(result, status=status.HTTP_200_OK)
|
||||
return response
|
||||
|
||||
|
|
|
@ -1,8 +1,9 @@
|
|||
from django.conf.urls import url, include
|
||||
|
||||
from app.api.presets import PresetViewSet
|
||||
from app.plugins import get_api_url_patterns
|
||||
from .projects import ProjectViewSet
|
||||
from .tasks import TaskViewSet, TaskTiles, TaskTilesJson, TaskDownloads, TaskAssets, TaskVolume
|
||||
from .tasks import TaskViewSet, TaskTiles, TaskTilesJson, TaskDownloads, TaskAssets
|
||||
from .processingnodes import ProcessingNodeViewSet, ProcessingNodeOptionsView
|
||||
from rest_framework_nested import routers
|
||||
from rest_framework_jwt.views import obtain_jwt_token
|
||||
|
@ -28,8 +29,9 @@ urlpatterns = [
|
|||
url(r'projects/(?P<project_pk>[^/.]+)/tasks/(?P<pk>[^/.]+)/download/(?P<asset>.+)$', TaskDownloads.as_view()),
|
||||
|
||||
url(r'projects/(?P<project_pk>[^/.]+)/tasks/(?P<pk>[^/.]+)/assets/(?P<unsafe_asset_path>.+)$', TaskAssets.as_view()),
|
||||
url(r'projects/(?P<project_pk>[^/.]+)/tasks/(?P<pk>[^/.]+)/volume$', TaskVolume.as_view()),
|
||||
|
||||
url(r'^auth/', include('rest_framework.urls')),
|
||||
url(r'^token-auth/', obtain_jwt_token),
|
||||
]
|
||||
|
||||
urlpatterns += get_api_url_patterns()
|
|
@ -1,158 +0,0 @@
|
|||
from osgeo import gdal, gdalnumeric, ogr
|
||||
from PIL import Image, ImageDraw
|
||||
import os
|
||||
import numpy as np
|
||||
import json
|
||||
|
||||
def clip_raster(raster, geojson, gt=None, nodata=-9999):
|
||||
'''
|
||||
Clips a raster (given as either a gdal.Dataset or as a numpy.array
|
||||
instance) to a polygon layer provided by a Shapefile (or other vector
|
||||
layer). If a numpy.array is given, a "GeoTransform" must be provided
|
||||
(via dataset.GetGeoTransform() in GDAL). Returns an array. Clip features
|
||||
must be a dissolved, single-part geometry (not multi-part). Modified from:
|
||||
|
||||
http://pcjericks.github.io/py-gdalogr-cookbook/raster_layers.html
|
||||
#clip-a-geotiff-with-shapefile
|
||||
|
||||
Arguments:
|
||||
rast A gdal.Dataset or a NumPy array
|
||||
features_path The path to the clipping features
|
||||
gt An optional GDAL GeoTransform to use instead
|
||||
nodata The NoData value; defaults to -9999.
|
||||
'''
|
||||
def array_to_image(a):
|
||||
'''
|
||||
Converts a gdalnumeric array to a Python Imaging Library (PIL) Image.
|
||||
'''
|
||||
i = Image.fromstring('L',(a.shape[1], a.shape[0]),
|
||||
(a.astype('b')).tostring())
|
||||
return i
|
||||
|
||||
def convertJson(jsdata):
|
||||
return json.dumps(jsdata)
|
||||
|
||||
def image_to_array(i):
|
||||
'''
|
||||
Converts a Python Imaging Library (PIL) array to a gdalnumeric image.
|
||||
'''
|
||||
a = gdalnumeric.fromstring(i.tobytes(), 'b')
|
||||
a.shape = i.im.size[1], i.im.size[0]
|
||||
return a
|
||||
|
||||
def world_to_pixel(geo_matrix, x, y):
|
||||
'''
|
||||
Uses a gdal geomatrix (gdal.GetGeoTransform()) to calculate
|
||||
the pixel location of a geospatial coordinate; from:
|
||||
http://pcjericks.github.io/py-gdalogr-cookbook/raster_layers.html#clip-a-geotiff-with-shapefile
|
||||
'''
|
||||
ulX = geo_matrix[0]
|
||||
ulY = geo_matrix[3]
|
||||
xDist = geo_matrix[1]
|
||||
yDist = geo_matrix[5]
|
||||
rtnX = geo_matrix[2]
|
||||
rtnY = geo_matrix[4]
|
||||
pixel = int((x - ulX) / xDist)
|
||||
line = int((ulY - y) / xDist)
|
||||
return (pixel, line)
|
||||
|
||||
rast=gdal.Open(raster)
|
||||
|
||||
# Can accept either a gdal.Dataset or numpy.array instance
|
||||
if not isinstance(rast, np.ndarray):
|
||||
gt = rast.GetGeoTransform()
|
||||
rast = rast.ReadAsArray()
|
||||
|
||||
# Create an OGR layer from a boundary shapefile
|
||||
|
||||
geo = convertJson(geojson)
|
||||
features = ogr.Open(geo)
|
||||
if features.GetDriver().GetName() == 'ESRI Shapefile':
|
||||
lyr = features.GetLayer(os.path.split(os.path.splitext(features_path)[0])[1])
|
||||
|
||||
else:
|
||||
lyr = features.GetLayer()
|
||||
|
||||
# Get the first feature
|
||||
poly = lyr.GetNextFeature()
|
||||
|
||||
# Convert the layer extent to image pixel coordinates
|
||||
minX, maxX, minY, maxY = lyr.GetExtent()
|
||||
ulX, ulY = world_to_pixel(gt, minX, maxY)
|
||||
lrX, lrY = world_to_pixel(gt, maxX, minY)
|
||||
|
||||
# Calculate the pixel size of the new image
|
||||
pxWidth = int(lrX - ulX)
|
||||
pxHeight = int(lrY - ulY)
|
||||
|
||||
# If the clipping features extend out-of-bounds and ABOVE the raster...
|
||||
if gt[3] < maxY:
|
||||
# In such a case... ulY ends up being negative--can't have that!
|
||||
iY = ulY
|
||||
ulY = 0
|
||||
|
||||
# Multi-band image?
|
||||
try:
|
||||
clip = rast[:, ulY:lrY, ulX:lrX]
|
||||
|
||||
except IndexError:
|
||||
clip = rast[ulY:lrY, ulX:lrX]
|
||||
|
||||
# Create a new geomatrix for the image
|
||||
gt2 = list(gt)
|
||||
gt2[0] = minX
|
||||
gt2[3] = maxY
|
||||
|
||||
# Map points to pixels for drawing the boundary on a blank 8-bit,
|
||||
# black and white, mask image.
|
||||
points = []
|
||||
pixels = []
|
||||
geom = poly.GetGeometryRef()
|
||||
pts = geom.GetGeometryRef(0)
|
||||
|
||||
for p in range(pts.GetPointCount()):
|
||||
points.append((pts.GetX(p), pts.GetY(p)))
|
||||
|
||||
for p in points:
|
||||
pixels.append(world_to_pixel(gt2, p[0], p[1]))
|
||||
|
||||
raster_poly = Image.new('L', (pxWidth, pxHeight), 1)
|
||||
rasterize = ImageDraw.Draw(raster_poly)
|
||||
rasterize.polygon(pixels, 0) # Fill with zeroes
|
||||
|
||||
# If the clipping features extend out-of-bounds and ABOVE the raster...
|
||||
if gt[3] < maxY:
|
||||
# The clip features were "pushed down" to match the bounds of the
|
||||
# raster; this step "pulls" them back up
|
||||
premask = image_to_array(raster_poly)
|
||||
# We slice out the piece of our clip features that are "off the map"
|
||||
mask = np.ndarray((premask.shape[-2] - abs(iY), premask.shape[-1]), premask.dtype)
|
||||
mask[:] = premask[abs(iY):, :]
|
||||
mask.resize(premask.shape) # Then fill in from the bottom
|
||||
|
||||
# Most importantly, push the clipped piece down
|
||||
gt2[3] = maxY - (maxY - gt[3])
|
||||
|
||||
else:
|
||||
mask = image_to_array(raster_poly)
|
||||
|
||||
# Clip the image using the mask
|
||||
try:
|
||||
clip = gdalnumeric.choose(mask, (clip, nodata))
|
||||
|
||||
# If the clipping features extend out-of-bounds and BELOW the raster...
|
||||
except ValueError:
|
||||
# We have to cut the clipping features to the raster!
|
||||
rshp = list(mask.shape)
|
||||
if mask.shape[-2] != clip.shape[-2]:
|
||||
rshp[0] = clip.shape[-2]
|
||||
|
||||
if mask.shape[-1] != clip.shape[-1]:
|
||||
rshp[1] = clip.shape[-1]
|
||||
|
||||
mask.resize(*rshp, refcheck=False)
|
||||
|
||||
clip = gdalnumeric.choose(mask, (clip, nodata))
|
||||
|
||||
# return (clip, ulX, ulY, gt2)
|
||||
return clip
|
|
@ -1,35 +0,0 @@
|
|||
import json
|
||||
from osgeo import osr
|
||||
|
||||
|
||||
def spatialref(epsg_code):
|
||||
spatialref = osr.SpatialReference()
|
||||
spatialref.ImportFromEPSG(epsg_code)
|
||||
return spatialref
|
||||
|
||||
def spatialrefWQT(dataset):
|
||||
spatialref = osr.SpatialReference()
|
||||
spatialref.ImportFromWkt(dataset.GetProjectionRef())
|
||||
return spatialref
|
||||
|
||||
def reprojson(geojson, dataset):
|
||||
|
||||
crsin= spatialref(4326)
|
||||
crsout = spatialrefWQT(dataset)
|
||||
|
||||
coordinate_transformation = osr.CoordinateTransformation(crsin, crsout)
|
||||
|
||||
# Define dictionary representation of output feature collection
|
||||
fc_out = {"geometry":{"type":"Polygon","coordinates":[]}}
|
||||
|
||||
# Iterate through each feature of the feature collection
|
||||
new_coords = []
|
||||
# Project/transform coordinate pairs of each ring
|
||||
# (iteration required in case geometry type is MultiPolygon, or there are holes)
|
||||
for ring in geojson['geometry']['coordinates']:
|
||||
coords=[(entry[0],entry[1]) for entry in ring]
|
||||
for i in range(len(coords)):
|
||||
x2, y2, z= coordinate_transformation.TransformPoint(coords[i][0], coords[i][1])
|
||||
new_coords.append([x2, y2])
|
||||
fc_out['geometry']['coordinates'] = [new_coords]
|
||||
return fc_out
|
|
@ -3,15 +3,6 @@ import os
|
|||
import shutil
|
||||
import zipfile
|
||||
import uuid as uuid_module
|
||||
import json
|
||||
|
||||
import osgeo.ogr
|
||||
import gdal
|
||||
import struct
|
||||
import statistics
|
||||
from .vertex import rings
|
||||
from .repro_json import reprojson
|
||||
from .cliprasterpol import clip_raster
|
||||
|
||||
import json
|
||||
from shlex import quote
|
||||
|
@ -588,25 +579,6 @@ class Task(models.Model):
|
|||
self.pending_action = None
|
||||
self.save()
|
||||
|
||||
def get_volume(self, geojson):
|
||||
try:
|
||||
raster_path= self.assets_path("odm_dem", "dsm.tif")
|
||||
raster=gdal.Open(raster_path)
|
||||
gt=raster.GetGeoTransform()
|
||||
rb=raster.GetRasterBand(1)
|
||||
gdal.UseExceptions()
|
||||
geosom = reprojson(geojson, raster)
|
||||
coords=[(entry[0],entry[1]) for entry in rings(raster_path, geosom)]
|
||||
GSD=gt[1]
|
||||
volume=0
|
||||
med=statistics.median(entry[2] for entry in rings(raster_path, geosom))
|
||||
clip=clip_raster(raster_path, geosom, gt=None, nodata=-9999)
|
||||
return ((clip-med)*GSD*GSD)[clip!=-9999.0].sum()
|
||||
|
||||
except FileNotFoundError as e:
|
||||
logger.warning(e)
|
||||
|
||||
|
||||
def find_all_files_matching(self, regex):
|
||||
directory = full_task_directory_path(self.id, self.project.id)
|
||||
return [os.path.join(directory, f) for f in os.listdir(directory) if
|
||||
|
|
|
@ -1,40 +0,0 @@
|
|||
from osgeo import ogr
|
||||
import gdal
|
||||
import struct
|
||||
import json
|
||||
|
||||
def convertJson(jsdata):
|
||||
return json.dumps(jsdata)
|
||||
|
||||
def rings(raster, geojson):
|
||||
|
||||
src=gdal.Open(raster)
|
||||
gtx=src.GetGeoTransform()
|
||||
rbu=src.GetRasterBand(1)
|
||||
gdal.UseExceptions()
|
||||
|
||||
geo=convertJson(geojson)
|
||||
|
||||
geojsom= ogr.Open(geo)
|
||||
|
||||
layer1 = geojsom.GetLayer(0)
|
||||
|
||||
vertices = []
|
||||
|
||||
for feat in layer1:
|
||||
geom = feat.GetGeometryRef()
|
||||
ring = geom.GetGeometryRef(0)
|
||||
points = ring.GetPointCount()
|
||||
|
||||
for p in range(points):
|
||||
lon, lat, z = ring.GetPoint(p)
|
||||
px = int((lon - gtx[0]) / gtx[1]) #x pixel
|
||||
py = int((lat - gtx[3]) / gtx[5]) #y pixel
|
||||
try:
|
||||
structval=rbu.ReadRaster(px,py,1,1,buf_type=gdal.GDT_Float32) #Assumes 32 bit int- 'float'
|
||||
intval = struct.unpack('f' , structval) #assume float
|
||||
val=intval[0]
|
||||
vertices.append((px, py, val))
|
||||
except:
|
||||
val=-9999 #or some value to indicate a fail
|
||||
return vertices
|
|
@ -1,6 +1,7 @@
|
|||
import os
|
||||
import logging
|
||||
import importlib
|
||||
import subprocess
|
||||
|
||||
import django
|
||||
import json
|
||||
|
@ -13,27 +14,48 @@ logger = logging.getLogger('app.logger')
|
|||
|
||||
def register_plugins():
|
||||
for plugin in get_active_plugins():
|
||||
|
||||
# Check for package.json in public directory
|
||||
# and run npm install if needed
|
||||
if plugin.path_exists("public/package.json") and not plugin.path_exists("public/node_modules"):
|
||||
logger.info("Running npm install for {}".format(plugin.get_name()))
|
||||
subprocess.call(['npm', 'install'], cwd=plugin.get_path("public"))
|
||||
|
||||
plugin.register()
|
||||
logger.info("Registered {}".format(plugin))
|
||||
|
||||
|
||||
def get_url_patterns():
|
||||
def get_app_url_patterns():
|
||||
"""
|
||||
@return the patterns to expose the /public directory of each plugin (if needed)
|
||||
@return the patterns to expose the /public directory of each plugin (if needed) and
|
||||
each mount point
|
||||
"""
|
||||
url_patterns = []
|
||||
for plugin in get_active_plugins():
|
||||
for mount_point in plugin.mount_points():
|
||||
for mount_point in plugin.app_mount_points():
|
||||
url_patterns.append(url('^plugins/{}/{}'.format(plugin.get_name(), mount_point.url),
|
||||
mount_point.view,
|
||||
*mount_point.args,
|
||||
**mount_point.kwargs))
|
||||
|
||||
if plugin.has_public_path():
|
||||
if plugin.path_exists("public"):
|
||||
url_patterns.append(url('^plugins/{}/(.*)'.format(plugin.get_name()),
|
||||
django.views.static.serve,
|
||||
{'document_root': plugin.get_path("public")}))
|
||||
|
||||
return url_patterns
|
||||
|
||||
def get_api_url_patterns():
|
||||
"""
|
||||
@return the patterns to expose the plugin API mount points (if any)
|
||||
"""
|
||||
url_patterns = []
|
||||
for plugin in get_active_plugins():
|
||||
for mount_point in plugin.api_mount_points():
|
||||
url_patterns.append(url('^plugins/{}/{}'.format(plugin.get_name(), mount_point.url),
|
||||
mount_point.view,
|
||||
*mount_point.args,
|
||||
**mount_point.kwargs))
|
||||
|
||||
return url_patterns
|
||||
|
||||
|
@ -85,6 +107,11 @@ def get_active_plugins():
|
|||
|
||||
return plugins
|
||||
|
||||
def get_plugin_by_name(name):
|
||||
plugins = get_active_plugins()
|
||||
res = list(filter(lambda p: p.get_name() == name, plugins))
|
||||
return res[0] if res else None
|
||||
|
||||
|
||||
def get_plugins_path():
|
||||
current_path = os.path.dirname(os.path.realpath(__file__))
|
||||
|
|
|
@ -5,7 +5,7 @@ class MountPoint:
|
|||
"""
|
||||
|
||||
:param url: path to mount this view to, relative to plugins directory
|
||||
:param view: Django view
|
||||
:param view: Django/DjangoRestFramework view
|
||||
:param args: extra args to pass to url() call
|
||||
:param kwargs: extra kwargs to pass to url() call
|
||||
"""
|
||||
|
|
|
@ -46,8 +46,8 @@ class PluginBase(ABC):
|
|||
"""
|
||||
return "plugins/{}/templates/{}".format(self.get_name(), path)
|
||||
|
||||
def has_public_path(self):
|
||||
return os.path.isdir(self.get_path("public"))
|
||||
def path_exists(self, path):
|
||||
return os.path.exists(self.get_path(path))
|
||||
|
||||
def include_js_files(self):
|
||||
"""
|
||||
|
@ -73,7 +73,7 @@ class PluginBase(ABC):
|
|||
"""
|
||||
return []
|
||||
|
||||
def mount_points(self):
|
||||
def app_mount_points(self):
|
||||
"""
|
||||
Should be overriden by plugins that want to connect
|
||||
custom Django views
|
||||
|
@ -81,5 +81,13 @@ class PluginBase(ABC):
|
|||
"""
|
||||
return []
|
||||
|
||||
def api_mount_points(self):
|
||||
"""
|
||||
Should be overriden by plugins that want to add
|
||||
new API mount points
|
||||
:return: [] of MountPoint objects
|
||||
"""
|
||||
return []
|
||||
|
||||
def __str__(self):
|
||||
return "[{}]".format(self.get_module_name())
|
|
@ -4,8 +4,6 @@ import 'leaflet/dist/leaflet.css';
|
|||
import Leaflet from 'leaflet';
|
||||
import async from 'async';
|
||||
|
||||
import 'leaflet-measure/dist/leaflet-measure.css';
|
||||
import 'leaflet-measure/dist/leaflet-measure';
|
||||
import 'leaflet-draw/dist/leaflet.draw.css';
|
||||
import 'leaflet-draw/dist/leaflet.draw';
|
||||
|
||||
|
@ -105,6 +103,7 @@ class Map extends React.Component {
|
|||
|
||||
// Associate metadata with this layer
|
||||
meta.name = info.name;
|
||||
window.meta = meta;
|
||||
layer[Symbol.for("meta")] = meta;
|
||||
|
||||
if (forceAddLayers || prevSelectedLayers.indexOf(layerId(layer)) !== -1){
|
||||
|
@ -185,8 +184,6 @@ class Map extends React.Component {
|
|||
map: this.map
|
||||
});
|
||||
|
||||
measureControl.addTo(this.map);
|
||||
|
||||
const featureGroup = L.featureGroup();
|
||||
featureGroup.addTo(this.map);
|
||||
|
||||
|
@ -217,12 +214,13 @@ class Map extends React.Component {
|
|||
this.map.on(L.Draw.Event.CREATED, function(e) {
|
||||
e.layer.feature = {geometry: {type: 'Polygon'} };
|
||||
featureGroup.addLayer(e.layer);
|
||||
const meta = window.meta;
|
||||
|
||||
var paramList;
|
||||
$.ajax({
|
||||
type: 'POST',
|
||||
async: false,
|
||||
url: '/api/projects/4/tasks/7/volume',
|
||||
url: `/api/projects/${meta.task.project}/tasks/${meta.task.id}/volume`,
|
||||
data: JSON.stringify(e.layer.toGeoJSON()),
|
||||
contentType: "application/json",
|
||||
success: function (msg) {
|
||||
|
@ -238,11 +236,13 @@ class Map extends React.Component {
|
|||
|
||||
this.map.on(L.Draw.Event.EDITED, function(e) {
|
||||
e.layers.eachLayer(function(layer) {
|
||||
const meta = window.meta;
|
||||
|
||||
var paramList = null;
|
||||
$.ajax({
|
||||
type: 'POST',
|
||||
async: false,
|
||||
url: '/api/projects/1/tasks/4/volume',
|
||||
url: `/api/projects/${meta.task.project}/tasks/${meta.task.id}/volume`,
|
||||
data: JSON.stringify(layer.toGeoJSON()),
|
||||
contentType: "application/json",
|
||||
success: function (msg) {
|
||||
|
|
|
@ -1,6 +1,9 @@
|
|||
import os
|
||||
|
||||
from django.test import Client
|
||||
from rest_framework import status
|
||||
|
||||
from app.plugins import get_plugin_by_name
|
||||
from .classes import BootTestCase
|
||||
|
||||
class TestPlugins(BootTestCase):
|
||||
|
@ -37,6 +40,11 @@ class TestPlugins(BootTestCase):
|
|||
# And our menu entry
|
||||
self.assertContains(res, '<li><a href="/plugins/test/menu_url/"><i class="test-icon"></i> Test</a></li>', html=True)
|
||||
|
||||
# A node_modules directory has been created as a result of npm install
|
||||
# because we have a package.json in the public director
|
||||
test_plugin = get_plugin_by_name("test")
|
||||
self.assertTrue(os.path.exists(test_plugin.get_path("public/node_modules")))
|
||||
|
||||
# TODO:
|
||||
# test API endpoints
|
||||
# test python hooks
|
||||
|
|
|
@ -4,7 +4,7 @@ from django.shortcuts import render_to_response
|
|||
from django.template import RequestContext
|
||||
|
||||
from .views import app as app_views, public as public_views
|
||||
from .plugins import get_url_patterns
|
||||
from .plugins import get_app_url_patterns
|
||||
|
||||
from app.boot import boot
|
||||
from webodm import settings
|
||||
|
@ -30,7 +30,7 @@ urlpatterns = [
|
|||
|
||||
# TODO: is there a way to place plugins /public directories
|
||||
# into the static build directories and let nginx serve them?
|
||||
urlpatterns += get_url_patterns()
|
||||
urlpatterns += get_app_url_patterns()
|
||||
|
||||
handler404 = app_views.handler404
|
||||
handler500 = app_views.handler500
|
||||
|
|
|
@ -6,7 +6,7 @@ class Plugin(PluginBase):
|
|||
def main_menu(self):
|
||||
return [Menu("GCP Interface", self.public_url(""), "fa fa-map-marker fa-fw")]
|
||||
|
||||
def mount_points(self):
|
||||
def app_mount_points(self):
|
||||
return [
|
||||
MountPoint('$', lambda request: render(request, self.template_path("app.html"), {'title': 'GCP Editor'}))
|
||||
]
|
||||
|
|
|
@ -12,7 +12,7 @@ class Plugin(PluginBase):
|
|||
def include_css_files(self):
|
||||
return ['test.css']
|
||||
|
||||
def mount_points(self):
|
||||
def app_mount_points(self):
|
||||
return [
|
||||
MountPoint('/app_mountpoint/$', lambda request: render(request, self.template_path("app.html"), {'title': 'Test'}))
|
||||
]
|
||||
|
|
|
@ -0,0 +1,14 @@
|
|||
{
|
||||
"name": "public",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"main": "main.js",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
},
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"pad-left": "^2.1.0"
|
||||
}
|
||||
}
|
|
@ -0,0 +1,21 @@
|
|||
from rest_framework import serializers
|
||||
from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
|
||||
from app.api.tasks import TaskNestedView
|
||||
|
||||
|
||||
class GeoJSONSerializer(serializers.Serializer):
|
||||
geometry = serializers.JSONField(help_text="Polygon contour defining the volume area to compute")
|
||||
|
||||
|
||||
class TaskVolume(TaskNestedView):
|
||||
def post(self, request, pk=None):
|
||||
task = self.get_and_check_task(request, pk)
|
||||
serializer = GeoJSONSerializer(data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
#result=task.get_volume(serializer.geometry)
|
||||
return Response(serializer.geometry, status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
|
|
@ -3,7 +3,7 @@
|
|||
"webodmMinVersion": "0.5.0",
|
||||
"description": "A plugin to compute volume measurements from a DSM",
|
||||
"version": "0.1.0",
|
||||
"author": "Piero Toffanin",
|
||||
"author": "Abdelkoddouss Izem, Piero Toffanin",
|
||||
"email": "pt@masseranolabs.com",
|
||||
"repository": "https://github.com/OpenDroneMap/WebODM",
|
||||
"tags": ["volume", "measurements"],
|
||||
|
|
|
@ -1,5 +1,30 @@
|
|||
from app.plugins import MountPoint
|
||||
from app.plugins import PluginBase
|
||||
from .api import TaskVolume
|
||||
|
||||
class Plugin(PluginBase):
|
||||
def include_js_files(self):
|
||||
return ['hello.js']
|
||||
def api_mount_points(self):
|
||||
return [
|
||||
MountPoint('task/(?P<pk>[^/.]+)/calculate$', TaskVolume.as_view())
|
||||
]
|
||||
|
||||
|
||||
# def get_volume(self, geojson):
|
||||
# try:
|
||||
# raster_path= self.assets_path("odm_dem", "dsm.tif")
|
||||
# raster=gdal.Open(raster_path)
|
||||
# gt=raster.GetGeoTransform()
|
||||
# rb=raster.GetRasterBand(1)
|
||||
# gdal.UseExceptions()
|
||||
# geosom = reprojson(geojson, raster)
|
||||
# coords=[(entry[0],entry[1]) for entry in rings(raster_path, geosom)]
|
||||
# GSD=gt[1]
|
||||
# volume=0
|
||||
# print(rings(raster_path, geosom))
|
||||
# print(GSD)
|
||||
# med=statistics.median(entry[2] for entry in rings(raster_path, geosom))
|
||||
# clip=clip_raster(raster_path, geosom, gt=None, nodata=-9999)
|
||||
# return ((clip-med)*GSD*GSD)[clip!=-9999.0].sum()
|
||||
#
|
||||
# except FileNotFoundError as e:
|
||||
# logger.warning(e)
|
|
@ -1,6 +0,0 @@
|
|||
PluginsAPI.Map.willAddControls(function(options){
|
||||
console.log("GOT: ", options);
|
||||
});
|
||||
PluginsAPI.Map.didAddControls(function(options){
|
||||
console.log("GOT2: ", options);
|
||||
});
|
|
@ -0,0 +1,14 @@
|
|||
{
|
||||
"name": "volume",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
},
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"leaflet-draw": "^1.0.2"
|
||||
}
|
||||
}
|
Ładowanie…
Reference in New Issue