Adding ground rectification

Former-commit-id: cba377424f
pull/1161/head
Nicolas Chamo 2020-03-09 22:52:12 -03:00 zatwierdzone przez NChamo
rodzic 1a97b4e07b
commit 2c0787ca47
27 zmienionych plików z 830 dodań i 1 usunięć

1
docker.settings.yaml 100644 → 100755
Wyświetl plik

@ -62,3 +62,4 @@ project_path: '/' #DO NOT CHANGE THIS OR DOCKER WILL NOT WORK. It should be '/'
#build_overviews: FALSE
#pc-classify: none
#force_gps: False
#rectify: False

11
opendm/config.py 100644 → 100755
Wyświetl plik

@ -595,6 +595,13 @@ def config():
'This flag is useful if you have high precision GPS measurements. '
'If there are no GCPs, this flag does nothing. Default: %(default)s'))
parser.add_argument('--rectify',
action='store_true',
default=False,
help=('Perform ground rectification. This means that wrongly classified ground '
'points will be re-classified and gaps will be filled. '
'Default: %(default)s'))
args = parser.parse_args()
# check that the project path setting has been set properly
@ -609,6 +616,10 @@ def config():
log.ODM_INFO('Fast orthophoto is turned on, automatically setting --skip-3dmodel')
args.skip_3dmodel = True
if args.rectify and not args.pc_classify:
log.ODM_INFO("Groudn rectify is turned on, automatically turning on point cloud classification")
args.pc_classify = True
if args.dtm and not args.pc_classify:
log.ODM_INFO("DTM is turned on, automatically turning on point cloud classification")
args.pc_classify = True

42
opendm/dem/commands.py 100644 → 100755
Wyświetl plik

@ -8,6 +8,7 @@ import shutil
from opendm.system import run
from opendm import point_cloud
from opendm import io
from opendm import system
from opendm.concurrency import get_max_memory
from scipy import ndimage
from datetime import datetime
@ -18,6 +19,7 @@ except:
import queue
import threading
from .ground_rectification.rectify import run_rectification
from . import pdal
def classify(lasFile, scalar, slope, threshold, window, verbose=False):
@ -31,6 +33,46 @@ def classify(lasFile, scalar, slope, threshold, window, verbose=False):
log.ODM_INFO('Created %s in %s' % (os.path.relpath(lasFile), datetime.now() - start))
return lasFile
def rectify(lasFile, debug=False, reclassify_threshold=5, min_area=750, min_points=500):
start = datetime.now()
try:
# Currently, no Python 2 lib that supports reading and writing LAZ, so we will do it manually until ODM is migrated to Python 3
# When migration is done, we can move to pylas and avoid using PDAL for convertion
tempLasFile = os.path.join(os.path.dirname(lasFile), 'tmp.las')
# Convert LAZ to LAS
cmd = [
'pdal',
'translate',
'-i %s' % lasFile,
'-o %s' % tempLasFile
]
system.run(' '.join(cmd))
log.ODM_INFO("Rectifying {} using with [reclassify threshold: {}, min area: {}, min points: {}]".format(lasFile, reclassify_threshold, min_area, min_points))
run_rectification(
input=tempLasFile, output=tempLasFile, debug=debug, \
reclassify_plan='median', reclassify_threshold=reclassify_threshold, \
extend_plan='surrounding', extend_grid_distance=5, \
min_area=min_area, min_points=min_points)
# Convert LAS to LAZ
cmd = [
'pdal',
'translate',
'-i %s' % tempLasFile,
'-o %s' % lasFile
]
system.run(' '.join(cmd))
os.remove(tempLasFile)
except:
raise Exception("Error rectifying ground in file %s" % lasFile)
log.ODM_INFO('Created %s in %s' % (os.path.relpath(lasFile), datetime.now() - start))
return lasFile
error = None
def create_dem(input_point_cloud, dem_type, output_type='max', radiuses=['0.56'], gapfill=True,

Wyświetl plik

@ -0,0 +1,59 @@
import numpy as np
from scipy.spatial import Delaunay
from ..point_cloud import PointCloud
EPSILON = 0.00001
class PolyBounds(object):
def __init__(self, points):
self.__points = points
self.__delaunay = Delaunay(points)
[x_min, y_min] = np.amin(points, axis=0)
[x_max, y_max] = np.amax(points, axis=0)
self._corners = (x_min, x_max, y_min, y_max)
def keep_points_inside(self, point_cloud):
"""Return a new point cloud with the points from the given cloud that are inside the bounds"""
mask = self.calculate_mask(point_cloud)
return point_cloud[mask]
def percentage_of_points_inside(self, points):
if isinstance(points, PointCloud):
points = points.get_xy()
mask = self.calculate_mask(points)
return np.count_nonzero(mask) * 100 / points.shape[0]
def calculate_mask(self, points):
"""Calculate the mask that would filter out the points outside the bounds"""
if isinstance(points, PointCloud):
points = points.get_xy()
return self.__delaunay.find_simplex(points) >= 0
def center(self):
(x_min, x_max, y_min, y_max) = self._corners
return ((x_min + x_max) / 2, (y_min + y_max) / 2)
def corners(self):
return self._corners
def points(self):
return self.__points
class BoxBounds(PolyBounds):
def __init__(self, x_min, x_max, y_min, y_max):
super(BoxBounds, self).__init__([[x_min, y_max], [x_max, y_max], [x_max, y_min], [x_min, y_min]])
def area(self):
(x_min, x_max, y_min, y_max) = self._corners
return (x_max - x_min) * (y_max - y_min)
def divide_by_point(self, point):
"""Divide the box into four boxes, marked by the point. It is assumed that the point is inside the box"""
[x_point, y_point] = point
(x_min, x_max, y_min, y_max) = self._corners
return [
BoxBounds(x_min, x_point, y_min, y_point),
BoxBounds(x_point + EPSILON, x_max, y_min, y_point),
BoxBounds(x_min, x_point, y_point + EPSILON, y_max),
BoxBounds(x_point + EPSILON, x_max, y_point + EPSILON, y_max)
]

Wyświetl plik

@ -0,0 +1,16 @@
import numpy as np
from scipy.spatial import ConvexHull
from .types import BoxBounds, PolyBounds
def calculate_convex_hull_bounds(points):
hull = ConvexHull(points)
return PolyBounds(points[hull.vertices])
def box_from_point_and_size(center, width, height):
return BoxBounds(center[0] - width / 2, center[0] + width / 2, center[1] - height / 2, center[1] + height / 2)
def box_from_cloud(point_cloud):
xy = point_cloud.get_xy()
[x_min, y_min] = np.amin(xy, axis=0)
[x_max, y_max] = np.amax(xy, axis=0)
return BoxBounds(x_min, x_max, y_min, y_max)

Wyświetl plik

@ -0,0 +1,27 @@
import numpy as np
from abc import ABCMeta, abstractmethod
class Dimension(object):
__metaclass__ = ABCMeta
def __init__(self):
super(Dimension, self).__init__()
@abstractmethod
def assign(self, *point_clouds, **kwargs):
"Assign a value to the points on the partition"
@abstractmethod
def assign_default(self, point_cloud):
"Assign a default value"
@abstractmethod
def get_name(self):
"Return the name of the dimension"
@abstractmethod
def get_las_type(self):
"Return the type of the values stored"
def _set_values(self, point_cloud, values):
point_cloud.add_dimension(self, values)

Wyświetl plik

@ -0,0 +1,45 @@
import numpy as np
from sklearn.linear_model import RANSACRegressor
from .dimension import Dimension
class DistanceDimension(Dimension):
"""Assign each point the distance to the estimated ground"""
def __init__(self):
super(DistanceDimension, self).__init__()
def assign_default(self, point_cloud):
default = np.full(point_cloud.len(), -1)
super(DistanceDimension, self)._set_values(point_cloud, default)
def assign(self, *point_clouds, **kwargs):
for point_cloud in point_clouds:
xy = point_cloud.get_xy()
# Calculate RANSCAC model
model = RANSACRegressor().fit(xy, point_cloud.get_z())
# Calculate angle between estimated plane and XY plane
angle = self.__calculate_angle(model)
if angle >= 45:
# If the angle is higher than 45 degrees, then don't calculate the difference, since it will probably be way off
diff = np.full(point_cloud.len(), 0)
else:
predicted = model.predict(xy)
diff = point_cloud.get_z() - predicted
# Ignore the diff when the diff is below the ground
diff[diff < 0] = 0
super(DistanceDimension, self)._set_values(point_cloud, diff)
def get_name(self):
return 'distance_to_ground'
def get_las_type(self):
return 10
def __calculate_angle(self, model):
"Calculate the angle between the estimated plane and the XY plane"
a = model.estimator_.coef_[0]
b = model.estimator_.coef_[1]
angle = np.arccos(1 / np.sqrt(a ** 2 + b ** 2 + 1))
return np.degrees(angle)

Wyświetl plik

@ -0,0 +1,23 @@
import numpy as np
from .dimension import Dimension
class ExtendedDimension(Dimension):
"""Whether the point was added or was already on the original point cloud"""
def __init__(self):
super(ExtendedDimension, self).__init__()
def assign_default(self, point_cloud):
default = np.full(point_cloud.len(), 0, dtype=np.uint16)
super(ExtendedDimension, self)._set_values(point_cloud, default)
def assign(self, *point_clouds, **kwargs):
for point_cloud in point_clouds:
added = np.full(point_cloud.len(), 1, dtype=np.uint16)
super(ExtendedDimension, self)._set_values(point_cloud, added)
def get_name(self):
return 'extended'
def get_las_type(self):
return 3

Wyświetl plik

@ -0,0 +1,25 @@
import numpy as np
from .dimension import Dimension
class PartitionDimension(Dimension):
"""Group points by partition"""
def __init__(self, name):
super(PartitionDimension, self).__init__()
self.counter = 1
self.name = name
def assign_default(self, point_cloud):
default = np.full(point_cloud.len(), 0)
super(PartitionDimension, self)._set_values(point_cloud, default)
def assign(self, *point_clouds, **kwargs):
for point_cloud in point_clouds:
super(PartitionDimension, self)._set_values(point_cloud, np.full(point_cloud.len(), self.counter))
self.counter += 1
def get_name(self):
return self.name
def get_las_type(self):
return 5

Wyświetl plik

@ -0,0 +1,32 @@
import numpy as np
from sklearn.neighbors import BallTree
EPSILON = 0.00001
def build_grid(bounds, point_cloud, distance):
"""First, a 2D grid is built with a distance of 'distance' between points, inside the given bounds.
Then, only points that don't have a point cloud neighbour closer than 'distance' are left. The rest are filtered out."""
# Generate a grid of 2D points inside the bounds, with a distance of 'distance' between them
grid = __build_grid(bounds, distance)
# Filter out grid points outside the bounds (makes sense if bounds are not squared)
grid_inside = bounds.keep_points_inside(grid)
# Filter out the grid points that have a neighbor closer than 'distance' from the given point cloud
return __calculate_lonely_points(grid_inside, point_cloud, distance)
def __build_grid(bounds, distance):
x_min, x_max, y_min, y_max = bounds.corners()
grid = [[x, y] for x in np.arange(x_min, x_max + distance, distance) for y in np.arange(y_min, y_max + distance, distance)]
return np.array(grid)
def __calculate_lonely_points(grid, point_cloud, distance):
# Generate BallTree for point cloud
ball_tree = BallTree(point_cloud.get_xy(), metric='manhattan')
# Calculate for each of the points in the grid, the amount of neighbors in the original ground cloud
count = ball_tree.query_radius(grid, distance - EPSILON, count_only=True)
# Return only the points in the grid that don't have a neighbor
return grid[count == 0]

Wyświetl plik

@ -0,0 +1,65 @@
# TODO: Move to pylas when project migrates to python3
from laspy.file import File
from laspy.header import Header
import numpy as np
from ..point_cloud import PointCloud
def read_cloud(point_cloud_path):
# Open point cloud and read its properties
las_file = File(point_cloud_path, mode='r')
header = (las_file.header.copy(), las_file.header.scale, las_file.header.offset,las_file.header.evlrs, las_file.header.vlrs)
[x_scale, y_scale, z_scale] = las_file.header.scale
[x_offset, y_offset, z_offset] = las_file.header.offset
# Calculate the real coordinates
x = las_file.X * x_scale + x_offset
y = las_file.Y * y_scale + y_offset
z = las_file.Z * z_scale + z_offset
cloud = PointCloud.with_dimensions(x, y, z, las_file.Classification, las_file.red, las_file.green, las_file.blue)
# Close the file
las_file.close()
# Return the result
return header, cloud
def write_cloud(header, point_cloud, output_point_cloud_path, write_extra_dimensions=False):
(h, scale, offset, evlrs, vlrs) = header
# Open output file
output_las_file = File(output_point_cloud_path, mode='w', header=h, evlrs=evlrs, vlrs=vlrs)
if write_extra_dimensions:
# Create new dimensions
for name, dimension in point_cloud.extra_dimensions_metadata.items():
output_las_file.define_new_dimension(name=name, data_type=dimension.get_las_type(), description="Dimension added by Ground Extend")
# Assign dimension values
for dimension_name, values in point_cloud.extra_dimensions.items():
setattr(output_las_file, dimension_name, values)
# Adapt points to scale and offset
[x_scale, y_scale, z_scale] = scale
[x_offset, y_offset, z_offset] = offset
[x, y] = np.hsplit(point_cloud.xy, 2)
output_las_file.X = (x.ravel() - x_offset) / x_scale
output_las_file.Y = (y.ravel() - y_offset) / y_scale
output_las_file.Z = (point_cloud.z - z_offset) / z_scale
# Set color
[red, green, blue] = np.hsplit(point_cloud.rgb, 3)
output_las_file.red = red.ravel()
output_las_file.green = green.ravel()
output_las_file.blue = blue.ravel()
# Set classification
output_las_file.Classification = point_cloud.classification.astype(np.uint8)
# Set header
output_las_file.header.scale = scale
output_las_file.header.offset = offset
# Close files
output_las_file.close()

Wyświetl plik

@ -0,0 +1,13 @@
from .partition_plan import PartitionPlan, Partition
from ..bounds.utils import box_from_cloud
class OnePartition(PartitionPlan):
"""This partition plan does nothing. It returns all the cloud points in one partition."""
def __init__(self, point_cloud):
super(OnePartition, self).__init__()
self.point_cloud = point_cloud
def execute(self, **kwargs):
bounds = box_from_cloud(self.point_cloud)
return [Partition(self.point_cloud, bounds=bounds)]

Wyświetl plik

@ -0,0 +1,17 @@
from abc import ABCMeta, abstractmethod
class PartitionPlan(object):
"""We want to partition the ground in different areas. There are many ways to do so, and each of them will be a different partition plan."""
__metaclass__ = ABCMeta
def __init__(self):
super(PartitionPlan, self).__init__()
@abstractmethod
def execute(self):
"""This method is expected to return a list of Partition instances"""
class Partition:
def __init__(self, point_cloud, **kwargs):
self.point_cloud = point_cloud
self.bounds = kwargs['bounds']

Wyświetl plik

@ -0,0 +1,59 @@
import numpy as np
from abc import abstractmethod
from ..bounds.utils import box_from_cloud
from .partition_plan import PartitionPlan, Partition
class QuadPartitions(PartitionPlan):
"""This partition plan starts with one big partition that includes the whole point cloud. It then divides it into four partitions, based on some criteria.
Each of these partitions are then divided into four other partitions and so on. The algorithm has two possible stopping criterias:
if subdividing a partition would imply that one of the new partitions contains fewer that a given amount of points, or that one of the new partitions as an area smaller that the given size,
then the partition is not divided."""
def __init__(self, point_cloud):
super(QuadPartitions, self).__init__()
self.point_cloud = point_cloud
@abstractmethod
def choose_divide_point(self, point_cloud, bounding_box):
"""Given a point cloud and a bounding box, calculate the point that will be used to divide the partition by four"""
def execute(self, **kwargs):
initial_bounding_box = box_from_cloud(self.point_cloud)
return self._divide_until(self.point_cloud, initial_bounding_box, kwargs['min_points'], kwargs['min_area'])
def _divide_until(self, point_cloud, bounding_box, min_points, min_area):
dividing_point = self.choose_divide_point(point_cloud, bounding_box)
new_boxes = bounding_box.divide_by_point(dividing_point)
for new_box in new_boxes:
if new_box.area() < min_area:
return [Partition(point_cloud, bounds=bounding_box)] # If by dividing, I break the minimum area threshold, don't do it
subdivisions = []
for new_box in new_boxes:
mask = new_box.calculate_mask(point_cloud)
if np.count_nonzero(mask) < min_points:
return [Partition(point_cloud, bounds=bounding_box)] # If by dividing, I break the minimum amount of points in a zone, don't do it
subdivisions += self._divide_until(point_cloud[mask], new_box, min_points, min_area)
return subdivisions
class UniformPartitions(QuadPartitions):
"""This kind of partitioner takes the current bounding box, and divides it by four uniform partitions"""
def __init__(self, point_cloud):
super(UniformPartitions, self).__init__(point_cloud)
def choose_divide_point(self, point_cloud, bounding_box):
return bounding_box.center()
class MedianPartitions(QuadPartitions):
"""This kind of partitioner takes the current point cloud, and divides it by the median, so that all four new partitions have the same amount of points"""
def __init__(self, point_cloud):
super(MedianPartitions, self).__init__(point_cloud)
def choose_divide_point(self, point_cloud, bounding_box):
return np.median(point_cloud.get_xy(), axis=0)

Wyświetl plik

@ -0,0 +1,16 @@
from .one_partition import OnePartition
from .quad_partitions import UniformPartitions, MedianPartitions
from .surrounding_partitions import SurroundingPartitions
def select_partition_plan(name, point_cloud):
if name == 'one':
return OnePartition(point_cloud)
elif name == 'uniform':
return UniformPartitions(point_cloud)
elif name == 'median':
return MedianPartitions(point_cloud)
elif name == 'surrounding':
return SurroundingPartitions(point_cloud)
else:
raise Exception('Incorrect partition name.')

Wyświetl plik

@ -0,0 +1,117 @@
from sklearn.cluster import DBSCAN
from sklearn.neighbors import BallTree
import numpy as np
import math
from ..bounds.utils import box_from_cloud, calculate_convex_hull_bounds
from ..bounds.types import BoxBounds
from ..grid.builder import build_grid
from ..point_cloud import PointCloud
from .partition_plan import PartitionPlan, Partition
DEFAULT_DISTANCE = 5
MIN_PERCENTAGE_OF_POINTS_IN_CONVEX_HULL = 90
EPSILON = 0.0001
class SurroundingPartitions(PartitionPlan):
def __init__(self, point_cloud):
super(SurroundingPartitions, self).__init__()
self.point_cloud = point_cloud
self.chebyshev_ball_tree = BallTree(point_cloud.xy, metric='chebyshev')
self.manhattan_ball_tree = BallTree(point_cloud.xy, metric='manhattan')
def execute(self, **kwargs):
distance = kwargs['distance'] if 'distance' in kwargs else DEFAULT_DISTANCE
bounds = kwargs['bounds'] if 'bounds' in kwargs else box_from_cloud(self.point_cloud)
min_points = kwargs['min_points']
min_area = kwargs['min_area']
result = ExecutionResult(self.point_cloud.len())
grid = build_grid(bounds, self.point_cloud, distance)
if grid.shape[0] >= 1:
db = DBSCAN(eps=distance + EPSILON, min_samples=1, metric='manhattan', n_jobs=-1).fit(grid)
clusters = set(db.labels_)
for cluster in clusters:
cluster_members = grid[db.labels_ == cluster]
point_cloud_neighbors, point_cloud_neighbors_mask = self.__find_cluster_neighbors(cluster_members, distance)
if self.__is_cluster_surrounded(cluster_members, point_cloud_neighbors):
result.add_cluster_partition(cluster_members, point_cloud_neighbors, point_cloud_neighbors_mask)
else:
point_cloud_neighbors, point_cloud_neighbors_mask, bounding_box = self.__find_points_for_non_surrounded_cluster(bounds, cluster_members, distance, min_area, min_points)
result.add_zone_partition(cluster_members, point_cloud_neighbors, point_cloud_neighbors_mask, bounding_box)
return result.build_result(self.point_cloud)
def __find_points_for_non_surrounded_cluster(self, bounds, cluster_members, distance, min_area, min_points):
(center_x, center_y) = bounds.center()
[x_min, y_min] = np.amin(cluster_members, axis=0)
[x_max, y_max] = np.amax(cluster_members, axis=0)
x = [x_min - distance, x_max + distance]
y = [y_min - distance, y_max + distance]
# Find the indices of the corner closest to the center of the point cloud
closest_x_idx = np.argmin(np.abs(x - center_x))
closest_y_idx = np.argmin(np.abs(y - center_y))
# Calculate the direction to where the box should grow
x_dir = np.sign(center_x - x[closest_x_idx])
y_dir = np.sign(center_y - y[closest_y_idx])
# Handle corner case where center is between min and max
if x_min <= center_x and center_x <= x_max:
x_dir *= -1
if y_min <= center_y and center_y <= y_max:
y_dir *= -1
bounding_box = BoxBounds(x[0], x[1], y[0], y[1])
while bounding_box.area() < min_area:
x[closest_x_idx] += distance * x_dir
y[closest_y_idx] += distance * y_dir
bounding_box = BoxBounds(x[0], x[1], y[0], y[1])
mask = bounding_box.calculate_mask(self.point_cloud)
while len(mask) < min_points:
x[closest_x_idx] += distance * x_dir
y[closest_y_idx] += distance * y_dir
bounding_box = BoxBounds(x[0], x[1], y[0], y[1])
mask = bounding_box.calculate_mask(self.point_cloud)
return self.point_cloud[mask], mask, bounding_box
def __is_cluster_surrounded(self, cluster_members, point_cloud_neighbors):
convex_hull = calculate_convex_hull_bounds(point_cloud_neighbors.get_xy())
ratio = convex_hull.percentage_of_points_inside(cluster_members)
return ratio > MIN_PERCENTAGE_OF_POINTS_IN_CONVEX_HULL
def __find_cluster_neighbors(self, cluster_members, distance):
mask_per_point = self.manhattan_ball_tree.query_radius(cluster_members, distance * 3)
all_neighbor_mask = np.concatenate(mask_per_point)
point_cloud_neighbors = self.point_cloud[all_neighbor_mask]
return point_cloud_neighbors, all_neighbor_mask
class ExecutionResult:
def __init__(self, cloud_size):
self.partitions = [ ]
self.marked_as_neighbors = np.zeros(cloud_size, dtype=bool)
def add_cluster_partition(self, cluster_members, point_cloud_neighbors, point_cloud_neighbors_mask):
convex_hull = calculate_convex_hull_bounds(np.concatenate((point_cloud_neighbors.get_xy(), cluster_members)))
self.marked_as_neighbors[point_cloud_neighbors_mask] = True
self.partitions.append(Partition(point_cloud_neighbors, bounds=convex_hull))
def add_zone_partition(self, cluster_members, point_cloud_neighbors, point_cloud_neighbors_mask, bounding_box):
self.marked_as_neighbors[point_cloud_neighbors_mask] = True
self.partitions.append(Partition(point_cloud_neighbors, bounds=bounding_box))
def build_result(self, whole_point_cloud):
remaining_cloud = whole_point_cloud[~self.marked_as_neighbors]
new_bounds = box_from_cloud(remaining_cloud)
self.partitions.insert(0, Partition(remaining_cloud, bounds=new_bounds))
return self.partitions

Wyświetl plik

@ -0,0 +1,101 @@
import numpy as np
from numpy.lib.recfunctions import append_fields
class PointCloud:
"""Representation of a 3D point cloud"""
def __init__(self, xy, z, classification, rgb, indices, extra_dimensions, extra_dimensions_metadata):
self.xy = xy
self.z = z
self.classification = classification
self.rgb = rgb
self.indices = indices
self.extra_dimensions = extra_dimensions
self.extra_dimensions_metadata = extra_dimensions_metadata
@staticmethod
def with_dimensions(x, y, z, classification, red, green, blue, indices=None):
xy = np.column_stack((x, y))
rgb = np.column_stack((red, green, blue))
indices = indices if indices is not None else np.arange(0, len(x))
return PointCloud(xy, z, classification, rgb, indices, { }, { })
@staticmethod
def with_xy(xy):
[x, y] = np.hsplit(xy, 2)
empty = np.empty(xy.shape[0])
return PointCloud.with_dimensions(x.ravel(), y.ravel(), empty, np.empty(xy.shape[0], dtype=np.uint8), empty, empty, empty)
def __getitem__(self, mask):
masked_dimensions = { name: values[mask] for name, values in self.extra_dimensions.items() }
return PointCloud(self.xy[mask], self.z[mask], self.classification[mask], self.rgb[mask], self.indices[mask], masked_dimensions, self.extra_dimensions_metadata)
def concatenate(self, other_cloud):
for name, dimension in self.extra_dimensions_metadata.items():
if name not in other_cloud.extra_dimensions:
dimension.assign_default(other_cloud)
for name, dimension in other_cloud.extra_dimensions_metadata.items():
if name not in self.extra_dimensions:
dimension.assign_default(self)
new_indices = np.arange(len(self.indices), len(self.indices) + len(other_cloud.indices))
self.xy = np.concatenate((self.xy, other_cloud.xy))
self.z = np.concatenate((self.z, other_cloud.z))
self.classification = np.concatenate((self.classification, other_cloud.classification))
self.rgb = np.concatenate((self.rgb, other_cloud.rgb))
self.indices = np.concatenate((self.indices, new_indices))
self.extra_dimensions = { name: np.concatenate((values, other_cloud.extra_dimensions[name])) for name, values in self.extra_dimensions.items() }
def update(self, other_cloud):
for name, dimension in self.extra_dimensions_metadata.items():
if name not in other_cloud.extra_dimensions:
dimension.assign_default(other_cloud)
for name, dimension in other_cloud.extra_dimensions_metadata.items():
if name not in self.extra_dimensions:
dimension.assign_default(self)
self.xy[other_cloud.indices] = other_cloud.xy
self.z[other_cloud.indices] = other_cloud.z
self.classification[other_cloud.indices] = other_cloud.classification
self.rgb[other_cloud.indices] = other_cloud.rgb
for name, values in self.extra_dimensions.items():
values[other_cloud.indices] = other_cloud.extra_dimensions[name]
def add_dimension(self, dimension, values):
self.extra_dimensions[dimension.get_name()] = values
self.extra_dimensions_metadata[dimension.get_name()] = dimension
def get_xy(self):
return self.xy
def get_z(self):
return self.z
def len(self):
return len(self.z)
def get_extra_dimension_values(self, name):
return self.extra_dimensions[name]
def get_bounding_box(self):
[x_min, y_min] = np.amin(self.xy, axis=0)
[x_max, y_max] = np.amax(self.xy, axis=0)
z_min = min(self.z)
z_max = max(self.z)
return BoundingBox3D(x_min, x_max, y_min, y_max, z_min, z_max)
class BoundingBox3D:
def __init__(self, x_min, x_max, y_min, y_max, z_min, z_max):
self.x_min = x_min
self.x_max = x_max
self.y_min = y_min
self.y_max = y_max
self.z_min = z_min
self.z_max = z_max
def keep_points_inside(self, point_cloud):
min = np.array([self.x_min, self.y_min, self.z_min])
max = np.array([self.x_max, self.y_max, self.z_max])
arr = np.column_stack((point_cloud.get_xy(), point_cloud.get_z()))
mask = np.all(np.logical_and(min <= arr, arr <= max), axis=1)
return point_cloud[mask]

Wyświetl plik

@ -0,0 +1,154 @@
import argparse
import numpy as np
from os import path
from sklearn.neighbors import BallTree
from sklearn.linear_model import RANSACRegressor
from .extra_dimensions.distance_dimension import DistanceDimension
from .extra_dimensions.partition_dimension import PartitionDimension
from .extra_dimensions.extended_dimension import ExtendedDimension
from .grid.builder import build_grid
from .bounds.utils import calculate_convex_hull_bounds
from .io.las_io import read_cloud, write_cloud
from .partition.selector import select_partition_plan
from .point_cloud import PointCloud
EPSILON = 0.00001
def run_rectification(**kwargs):
header, point_cloud = read_cloud(kwargs['input'])
if 'reclassify_plan' in kwargs and kwargs['reclassify_plan'] is not None:
point_cloud = reclassify_cloud(point_cloud, kwargs['reclassify_plan'], kwargs['reclassify_threshold'], kwargs['min_points'], kwargs['min_area'])
if 'extend_plan' in kwargs and kwargs['extend_plan'] is not None:
point_cloud = extend_cloud(point_cloud, kwargs['extend_plan'], kwargs['extend_grid_distance'], kwargs['min_points'], kwargs['min_area'])
write_cloud(header, point_cloud, kwargs['output'], kwargs['debug'])
def reclassify_cloud(point_cloud, plan, threshold, min_points, min_area):
# Get only ground
ground_cloud = point_cloud[point_cloud.classification == 2]
# Get the partition plan, according to the specified criteria
partition_plan = select_partition_plan(plan, ground_cloud)
# Execute the partition plan, and get all the partitions
partitions = [result for result in partition_plan.execute(min_points=min_points, min_area=min_area)]
# Add 'distance to ground' and 'partition number' dimensions to the cloud
for dimension in [DistanceDimension(), PartitionDimension('reclassify_partition')]:
# Calculate new dimension for partition
for partition in partitions:
dimension.assign(partition.point_cloud)
# Update new data to the original point cloud
point_cloud.update(partition.point_cloud)
# Calculate the points that need to be reclassified
mask = point_cloud.get_extra_dimension_values('distance_to_ground') > threshold
# Reclassify them as 'unclassified'
point_cloud.classification[mask] = 1
return point_cloud
def extend_cloud(point_cloud, plan, distance, min_points, min_area):
# Get only ground
ground_cloud = point_cloud[point_cloud.classification == 2]
# Read the bounds file
bounds = calculate_convex_hull_bounds(ground_cloud.get_xy())
# Generate a grid of 2D points inside the bounds, with a distance of 'distance' between them
grid_2d = build_grid(bounds, ground_cloud, distance)
# Create a new point cloud
grid_3d = PointCloud.with_xy(grid_2d)
# Get the partition plan, according to the specified criteria
partition_plan = select_partition_plan(plan, ground_cloud)
# Execute the partition plan, and get all the partitions
partitions = partition_plan.execute(distance=distance, min_points=min_points, min_area=min_area, bounds=bounds)
# Create dimensions
partition_dimension = PartitionDimension('extend_partition')
extended_dimension = ExtendedDimension()
for partition in partitions:
# Keep the grid point that are inside the partition
grid_inside = partition.bounds.keep_points_inside(grid_3d)
if grid_inside.len() > 0:
# In each partition, calculate the altitude of the grid points
new_points = __calculate_new_points(grid_inside, partition.point_cloud)
# Assign the dimension values
partition_dimension.assign(new_points, partition.point_cloud)
extended_dimension.assign(new_points)
# Update the original 3d grid with the new calculated points
grid_3d.update(new_points)
else:
# Assign the original points the correct partition
partition_dimension.assign(partition.point_cloud)
# Update new information to the original point cloud
point_cloud.update(partition.point_cloud)
# Calculate the bounding box of the original cloud
bbox = point_cloud.get_bounding_box()
# Remove points that might have ended up outside the bbox
grid_3d = bbox.keep_points_inside(grid_3d)
# Add the new grid points to the original cloud
point_cloud.concatenate(grid_3d)
# Add the new points to the original point cloud
return point_cloud
def __calculate_new_points(grid_points_inside, partition_point_cloud):
# Calculate RANSCAC model
model = RANSACRegressor().fit(partition_point_cloud.get_xy(), partition_point_cloud.get_z())
# With the ransac model, calculate the altitude for each grid point
grid_points_altitude = model.predict(grid_points_inside.get_xy())
# Calculate color for new points
[avg_red, avg_green, avg_blue] = np.mean(partition_point_cloud.rgb, axis=0)
red = np.full(grid_points_inside.len(), avg_red)
green = np.full(grid_points_inside.len(), avg_green)
blue = np.full(grid_points_inside.len(), avg_blue)
# Classify all new points as ground
classification = np.full(grid_points_inside.len(), 2, dtype=np.uint8)
# Split xy into columns
[x, y] = np.hsplit(grid_points_inside.get_xy(), 2)
# Return point cloud
return PointCloud.with_dimensions(x.ravel(), y.ravel(), grid_points_altitude, classification, red, green, blue, grid_points_inside.indices)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='This script takes a pre-classified point cloud, and then it re-clasiffies wrongly classified ground point to non-ground points and finally adds ground points where needed.')
parser.add_argument('input', type=str, help='The path where to find the pre-classified point cloud.')
parser.add_argument('output', type=str, help='The path where to save the rectified point cloud.')
parser.add_argument('--reclassify_plan', type=str, help='The partition plan to use reclasiffication. Must be one of(one, uniform, median, surrounding)')
parser.add_argument('--reclassify_threshold', type=float, help='Every point with a distance to the estimated ground that is higher than the threshold will be reclassified as non ground', default=5)
parser.add_argument('--extend_plan', type=str, help='The partition plan to use for extending the ground. Must be one of(one, uniform, median, surrounding)')
parser.add_argument('--extend_grid_distance', type=float, help='The distance between points on the grid that will be added to the point cloud.', default=5)
parser.add_argument('--min_area', type=int, help='Some partition plans need a minimum area as a stopping criteria.', default=750)
parser.add_argument('--min_points', type=int, help='Some partition plans need a minimum number of points as a stopping criteria.', default=500)
args = parser.parse_args()
if args.reclassify_plan is None and args.extend_plan is None:
raise Exception("Please set a reclassifying or extension plan. Otherwise there is nothing for me to do.")
run(input=args.input, reclassify_plan=args.reclassify_plan, reclassify_threshold=args.reclassify_threshold, \
extend_plan=args.extend_plan, extend_grid_distance=args.extend_grid_distance, \
output=args.output, min_points=args.min_points, min_area=args.min_area, debug=False)

4
requirements.txt 100644 → 100755
Wyświetl plik

@ -17,4 +17,6 @@ psutil==5.6.3
joblib==0.13.2
Fiona==1.8.9.post2
cryptography==2.8
pyOpenSSL==19.1.0
pyOpenSSL==19.1.0
scikit-learn==0.20
laspy==1.6.0

1
settings.yaml 100644 → 100755
Wyświetl plik

@ -62,3 +62,4 @@ project_path: '' # Example: '/home/user/ODMProjects'
#build_overviews: FALSE
#pc-classify: none
#force_gps: False
#rectify: False

3
stages/odm_dem.py 100644 → 100755
Wyświetl plik

@ -75,6 +75,9 @@ class ODMDEMStage(types.ODM_Stage):
progress = 20
self.update_progress(progress)
if args.rectify:
commands.rectify(dem_input, args.debug)
# Do we need to process anything here?
if (args.dsm or args.dtm) and pc_model_found:
dsm_output_filename = os.path.join(odm_dem_root, 'dsm.tif')