Merge pull request #1614 from HeDo88TH/remove-laspy

Remove laspy
pull/1619/head
Piero Toffanin 2023-03-02 10:43:05 -05:00 zatwierdzone przez GitHub
commit ffa7871c33
Nie znaleziono w bazie danych klucza dla tego podpisu
ID klucza GPG: 4AEE18F83AFDEB23
5 zmienionych plików z 105 dodań i 51 usunięć

Wyświetl plik

@ -46,30 +46,18 @@ def classify(lasFile, scalar, slope, threshold, window):
log.ODM_INFO('Created %s in %s' % (lasFile, datetime.now() - start))
return lasFile
def rectify(lasFile, debug=False, reclassify_threshold=5, min_area=750, min_points=500):
def rectify(lasFile, reclassify_threshold=5, min_area=750, min_points=500):
start = datetime.now()
pcFile = lasFile
try:
# Workaround for MacOS: laspy does not have support for LAZ on macOS
# so we first convert to normal LAS via PDAL
# TODO: remove LASPY and use PDAL instead
if sys.platform == 'darwin' and lasFile[-3:] == "laz":
pcFile = lasFile + ".tmp.las"
pdal.translate(lasFile, pcFile)
log.ODM_INFO("Rectifying {} using with [reclassify threshold: {}, min area: {}, min points: {}]".format(lasFile, reclassify_threshold, min_area, min_points))
run_rectification(
input=pcFile, output=pcFile, debug=debug, \
input=lasFile, output=lasFile, \
reclassify_plan='median', reclassify_threshold=reclassify_threshold, \
extend_plan='surrounding', extend_grid_distance=5, \
min_area=min_area, min_points=min_points)
if sys.platform == 'darwin' and pcFile != lasFile and os.path.isfile(pcFile):
pdal.translate(pcFile, lasFile)
os.remove(pcFile)
log.ODM_INFO('Created %s in %s' % (lasFile, datetime.now() - start))
except Exception as e:
log.ODM_WARNING("Error rectifying ground in file %s: %s" % (lasFile, str(e)))

Wyświetl plik

@ -1,47 +1,115 @@
# TODO: Move to pylas when project migrates to python3
import laspy
import time
import pdal
import numpy as np
from opendm import log
from ..point_cloud import PointCloud
import pdb
import json
def read_cloud(point_cloud_path):
# Open point cloud and read its properties
las_file = laspy.read(point_cloud_path)
header = las_file.header
pipeline = pdal.Pipeline('[{"type":"readers.las","filename":"%s"}]' % point_cloud_path)
pipeline.execute()
x = las_file.x.scaled_array()
y = las_file.y.scaled_array()
z = las_file.z.scaled_array()
arrays = pipeline.arrays[0]
cloud = PointCloud.with_dimensions(x, y, z, las_file.classification.array, las_file.red, las_file.green, las_file.blue)
# Extract point coordinates, classification, and RGB values
x = arrays["X"]
y = arrays["Y"]
z = arrays["Z"]
classification = arrays["Classification"].astype(np.uint8)
red = arrays["Red"]
green = arrays["Green"]
blue = arrays["Blue"]
# Return the result
return header, cloud
cloud = PointCloud.with_dimensions(x, y, z, classification, red, green, blue)
def write_cloud(header, point_cloud, output_point_cloud_path, write_extra_dimensions=False):
# Open output file
output_las_file = laspy.LasData(header)
return pipeline.metadata["metadata"]["readers.las"], cloud
if write_extra_dimensions:
extra_dims = [laspy.ExtraBytesParams(name=name, type=dimension.get_las_type(), description="Dimension added by Ground Extend") for name, dimension in point_cloud.extra_dimensions_metadata.items()]
output_las_file.add_extra_dims(extra_dims)
# Assign dimension values
for dimension_name, values in point_cloud.extra_dimensions.items():
setattr(output_las_file, dimension_name, values)
def safe_add_metadata(pipeline, metadata, key, sourcekey=None):
k = key if sourcekey is None else sourcekey
if k in metadata:
pipeline["pipeline"][0][key] = metadata[k]
def write_cloud(metadata, point_cloud, output_point_cloud_path):
# Adapt points to scale and offset
[x, y] = np.hsplit(point_cloud.xy, 2)
output_las_file.x = x.ravel()
output_las_file.y = y.ravel()
output_las_file.z = point_cloud.z
x, y = np.hsplit(point_cloud.xy, 2)
# Set color
[red, green, blue] = np.hsplit(point_cloud.rgb, 3)
output_las_file.red = red.ravel()
output_las_file.green = green.ravel()
output_las_file.blue = blue.ravel()
red, green, blue = np.hsplit(point_cloud.rgb, 3)
# Set classification
output_las_file.classification = point_cloud.classification.astype(np.uint8)
arrays = np.zeros(len(x),
dtype=[('X', '<f8'),
('Y', '<f8'),
('Z', '<f8'),
('Intensity', '<u2'),
('ReturnNumber', 'u1'),
('NumberOfReturns', 'u1'),
('ScanDirectionFlag', 'u1'),
('EdgeOfFlightLine', 'u1'),
('Classification', 'u1'),
('ScanAngleRank', '<f4'),
('UserData', 'u1'),
('PointSourceId', '<u2'),
('GpsTime', '<f8'),
('Red', '<u2'),
('Green', '<u2'),
('Blue', '<u2')])
arrays['X'] = x.ravel()
arrays['Y'] = y.ravel()
arrays['Z'] = point_cloud.z
arrays['Classification'] = point_cloud.classification.astype(np.uint8).ravel()
arrays['Red'] = red.astype(np.uint8).ravel()
arrays['Green'] = green.astype(np.uint8).ravel()
arrays['Blue'] = blue.astype(np.uint8).ravel()
output_las_file.write(output_point_cloud_path)
writer_pipeline = {
"pipeline": [
{
"type": "writers.las",
"filename": output_point_cloud_path,
"compression": "laszip",
"extra_dims": "all"
}
]
}
safe_add_metadata(writer_pipeline, metadata, "scale_x")
safe_add_metadata(writer_pipeline, metadata, "scale_y")
safe_add_metadata(writer_pipeline, metadata, "scale_z")
safe_add_metadata(writer_pipeline, metadata, "offset_x")
safe_add_metadata(writer_pipeline, metadata, "offset_y")
safe_add_metadata(writer_pipeline, metadata, "offset_z")
safe_add_metadata(writer_pipeline, metadata, "a_srs", "spatialreference")
safe_add_metadata(writer_pipeline, metadata, "dataformat_id")
safe_add_metadata(writer_pipeline, metadata, "system_id")
safe_add_metadata(writer_pipeline, metadata, "software_id")
safe_add_metadata(writer_pipeline, metadata, "creation_doy")
safe_add_metadata(writer_pipeline, metadata, "creation_year")
safe_add_metadata(writer_pipeline, metadata, "minor_version")
safe_add_metadata(writer_pipeline, metadata, "major_version")
safe_add_metadata(writer_pipeline, metadata, "file_source_id")
safe_add_metadata(writer_pipeline, metadata, "global_encoding")
# The metadata object contains the VLRs as fields called "vlr_N" where N is the index of the VLR
# We have to copy them over to the writer pipeline as a list of dictionaries in the "vlrs" field
writer_pipeline["pipeline"][0]["vlrs"] = []
i = 0
while True:
vlr_field = "vlr_%d" % i
if vlr_field in metadata:
vlr = metadata[vlr_field]
writer_pipeline["pipeline"][0]["vlrs"].append({
"record_id": vlr["record_id"],
"user_id": vlr["user_id"],
"description": vlr["description"],
"data": vlr["data"]
})
i += 1
else:
break
pipeline = pdal.Pipeline(json.dumps(writer_pipeline), arrays=[arrays])
pipeline.execute()

Wyświetl plik

@ -23,7 +23,7 @@ def run_rectification(**kwargs):
if 'extend_plan' in kwargs and kwargs['extend_plan'] is not None:
point_cloud = extend_cloud(point_cloud, kwargs['extend_plan'], kwargs['extend_grid_distance'], kwargs['min_points'], kwargs['min_area'])
write_cloud(header, point_cloud, kwargs['output'], kwargs['debug'])
write_cloud(header, point_cloud, kwargs['output'])
def reclassify_cloud(point_cloud, plan, threshold, min_points, min_area):
# Get only ground

Wyświetl plik

@ -8,8 +8,6 @@ Fiona==1.8.17 ; sys_platform == 'linux'
Fiona==1.9.1 ; sys_platform == 'darwin'
https://github.com/OpenDroneMap/windows-deps/raw/main/Fiona-1.8.19-cp38-cp38-win_amd64.whl ; sys_platform == 'win32'
joblib==1.1.0
laspy[lazrs]==2.3.0 ; sys_platform == 'linux' or sys_platform == 'win32'
laspy==2.3.0 ; sys_platform == 'darwin'
lxml==4.6.1
matplotlib==3.3.3
networkx==2.5

Wyświetl plik

@ -68,7 +68,7 @@ class ODMDEMStage(types.ODM_Stage):
self.update_progress(progress)
if args.pc_rectify:
commands.rectify(dem_input, False)
commands.rectify(dem_input)
# Do we need to process anything here?
if (args.dsm or args.dtm) and pc_model_found: