Restored spaces

pull/1614/head
Luca Di Leo 2023-03-01 19:03:33 +01:00
rodzic 31bfa95f19
commit 7ace79cdc4
1 zmienionych plików z 11 dodań i 11 usunięć

Wyświetl plik

@ -102,13 +102,13 @@ def create_dem(input_point_cloud, dem_type, output_type='max', radiuses=['0.56']
w, h = (RES_FLOOR, int(math.ceil(ext_height / ext_width * RES_FLOOR))) w, h = (RES_FLOOR, int(math.ceil(ext_height / ext_width * RES_FLOOR)))
else: else:
w, h = (int(math.ceil(ext_width / ext_height * RES_FLOOR)), RES_FLOOR) w, h = (int(math.ceil(ext_width / ext_height * RES_FLOOR)), RES_FLOOR)
floor_ratio = prev_w / float(w) floor_ratio = prev_w / float(w)
resolution *= floor_ratio resolution *= floor_ratio
radiuses = [str(float(r) * floor_ratio) for r in radiuses] radiuses = [str(float(r) * floor_ratio) for r in radiuses]
log.ODM_WARNING("Really low resolution DEM requested %s will set floor at %s pixels. Resolution changed to %s. The scale of this reconstruction might be off." % ((prev_w, prev_h), RES_FLOOR, resolution)) log.ODM_WARNING("Really low resolution DEM requested %s will set floor at %s pixels. Resolution changed to %s. The scale of this reconstruction might be off." % ((prev_w, prev_h), RES_FLOOR, resolution))
final_dem_pixels = w * h final_dem_pixels = w * h
num_splits = int(max(1, math.ceil(math.log(math.ceil(final_dem_pixels / float(max_tile_size * max_tile_size)))/math.log(2)))) num_splits = int(max(1, math.ceil(math.log(math.ceil(final_dem_pixels / float(max_tile_size * max_tile_size)))/math.log(2))))
@ -144,7 +144,7 @@ def create_dem(input_point_cloud, dem_type, output_type='max', radiuses=['0.56']
'minx': minx, 'minx': minx,
'maxx': maxx, 'maxx': maxx,
'miny': miny, 'miny': miny,
'maxy': maxy 'maxy': maxy
}, },
'filename': filename 'filename': filename
}) })
@ -157,7 +157,7 @@ def create_dem(input_point_cloud, dem_type, output_type='max', radiuses=['0.56']
def process_tile(q): def process_tile(q):
log.ODM_INFO("Generating %s (%s, radius: %s, resolution: %s)" % (q['filename'], output_type, q['radius'], resolution)) log.ODM_INFO("Generating %s (%s, radius: %s, resolution: %s)" % (q['filename'], output_type, q['radius'], resolution))
d = pdal.json_gdal_base(q['filename'], output_type, q['radius'], resolution, q['bounds']) d = pdal.json_gdal_base(q['filename'], output_type, q['radius'], resolution, q['bounds'])
if dem_type == 'dtm': if dem_type == 'dtm':
@ -175,7 +175,7 @@ def create_dem(input_point_cloud, dem_type, output_type='max', radiuses=['0.56']
output_path = os.path.abspath(os.path.join(outdir, output_file)) output_path = os.path.abspath(os.path.join(outdir, output_file))
# Verify tile results # Verify tile results
for t in tiles: for t in tiles:
if not os.path.exists(t['filename']): if not os.path.exists(t['filename']):
raise Exception("Error creating %s, %s failed to be created" % (output_file, t['filename'])) raise Exception("Error creating %s, %s failed to be created" % (output_file, t['filename']))
@ -225,14 +225,14 @@ def create_dem(input_point_cloud, dem_type, output_type='max', radiuses=['0.56']
'"{geotiff_tmp}" "{geotiff_small}"'.format(**kwargs)) '"{geotiff_tmp}" "{geotiff_small}"'.format(**kwargs))
# Fill scaled # Fill scaled
gdal_fillnodata(['.', gdal_fillnodata(['.',
'-co', 'NUM_THREADS=%s' % kwargs['threads'], '-co', 'NUM_THREADS=%s' % kwargs['threads'],
'-co', 'BIGTIFF=IF_SAFER', '-co', 'BIGTIFF=IF_SAFER',
'--config', 'GDAL_CACHE_MAX', str(kwargs['max_memory']) + '%', '--config', 'GDAL_CACHE_MAX', str(kwargs['max_memory']) + '%',
'-b', '1', '-b', '1',
'-of', 'GTiff', '-of', 'GTiff',
kwargs['geotiff_small'], kwargs['geotiff_small_filled']]) kwargs['geotiff_small'], kwargs['geotiff_small_filled']])
# Merge filled scaled DEM with unfilled DEM using bilinear interpolation # Merge filled scaled DEM with unfilled DEM using bilinear interpolation
run('gdalbuildvrt -resolution highest -r bilinear "%s" "%s" "%s"' % (merged_vrt_path, geotiff_small_filled_path, geotiff_tmp_path)) run('gdalbuildvrt -resolution highest -r bilinear "%s" "%s" "%s"' % (merged_vrt_path, geotiff_small_filled_path, geotiff_tmp_path))
run('gdal_translate ' run('gdal_translate '
@ -258,11 +258,11 @@ def create_dem(input_point_cloud, dem_type, output_type='max', radiuses=['0.56']
os.replace(geotiff_path, output_path) os.replace(geotiff_path, output_path)
if os.path.exists(geotiff_tmp_path): if os.path.exists(geotiff_tmp_path):
if not keep_unfilled_copy: if not keep_unfilled_copy:
os.remove(geotiff_tmp_path) os.remove(geotiff_tmp_path)
else: else:
os.replace(geotiff_tmp_path, io.related_file_path(output_path, postfix=".unfilled")) os.replace(geotiff_tmp_path, io.related_file_path(output_path, postfix=".unfilled"))
for cleanup_file in [tiles_vrt_path, tiles_file_list, merged_vrt_path, geotiff_small_path, geotiff_small_filled_path]: for cleanup_file in [tiles_vrt_path, tiles_file_list, merged_vrt_path, geotiff_small_path, geotiff_small_filled_path]:
if os.path.exists(cleanup_file): os.remove(cleanup_file) if os.path.exists(cleanup_file): os.remove(cleanup_file)
for t in tiles: for t in tiles:
@ -295,7 +295,7 @@ def compute_euclidean_map(geotiff_path, output_path, overwrite=False):
log.ODM_WARNING("Cannot compute euclidean distance file: %s" % output_path) log.ODM_WARNING("Cannot compute euclidean distance file: %s" % output_path)
else: else:
log.ODM_WARNING("Cannot compute euclidean map, gdal_proximity is missing") log.ODM_WARNING("Cannot compute euclidean map, gdal_proximity is missing")
else: else:
log.ODM_INFO("Found a euclidean distance map: %s" % output_path) log.ODM_INFO("Found a euclidean distance map: %s" % output_path)
return output_path return output_path