diff --git a/contrib/pc2dem/pc2dem.py b/contrib/pc2dem/pc2dem.py index 0441565b..3e2811c1 100755 --- a/contrib/pc2dem/pc2dem.py +++ b/contrib/pc2dem/pc2dem.py @@ -51,7 +51,6 @@ commands.create_dem(args.point_cloud, outdir=outdir, resolution=args.resolution, decimation=1, - verbose=True, max_workers=multiprocessing.cpu_count(), keep_unfilled_copy=False ) \ No newline at end of file diff --git a/opendm/config.py b/opendm/config.py index 64884676..89eb00e3 100755 --- a/opendm/config.py +++ b/opendm/config.py @@ -654,14 +654,6 @@ def config(argv=None, parser=None): default=False, help='Create Cloud-Optimized GeoTIFFs instead of normal GeoTIFFs. Default: %(default)s') - - parser.add_argument('--verbose', '-v', - action=StoreTrue, - nargs=0, - default=False, - help='Print additional messages to the console. ' - 'Default: %(default)s') - parser.add_argument('--copy-to', metavar='', action=StoreValue, diff --git a/opendm/dem/commands.py b/opendm/dem/commands.py index 6f127428..4e79edfa 100755 --- a/opendm/dem/commands.py +++ b/opendm/dem/commands.py @@ -35,11 +35,11 @@ except ModuleNotFoundError: except: pass -def classify(lasFile, scalar, slope, threshold, window, verbose=False): +def classify(lasFile, scalar, slope, threshold, window): start = datetime.now() try: - pdal.run_pdaltranslate_smrf(lasFile, lasFile, scalar, slope, threshold, window, verbose) + pdal.run_pdaltranslate_smrf(lasFile, lasFile, scalar, slope, threshold, window) except: log.ODM_WARNING("Error creating classified file %s" % lasFile) @@ -90,7 +90,7 @@ error = None def create_dem(input_point_cloud, dem_type, output_type='max', radiuses=['0.56'], gapfill=True, outdir='', resolution=0.1, max_workers=1, max_tile_size=4096, - verbose=False, decimation=None, keep_unfilled_copy=False, + decimation=None, keep_unfilled_copy=False, apply_smoothing=True): """ Create DEM from multiple radii, and optionally gapfill """ @@ -187,7 +187,7 @@ def create_dem(input_point_cloud, dem_type, output_type='max', radiuses=['0.56'] d = pdal.json_add_decimation_filter(d, decimation) pdal.json_add_readers(d, [input_point_cloud]) - pdal.run_pipeline(d, verbose=verbose) + pdal.run_pipeline(d) parallel_map(process_tile, tiles, max_workers) diff --git a/opendm/dem/pdal.py b/opendm/dem/pdal.py index fb3edf31..8e4e5f6f 100644 --- a/opendm/dem/pdal.py +++ b/opendm/dem/pdal.py @@ -133,22 +133,13 @@ def json_add_readers(json, filenames): return json -def json_print(json): - """ Pretty print JSON """ - log.ODM_DEBUG(jsonlib.dumps(json, indent=4, separators=(',', ': '))) - - """ Run PDAL commands """ -def run_pipeline(json, verbose=False): +def run_pipeline(json): """ Run PDAL Pipeline with provided JSON """ - if verbose: - json_print(json) # write to temp file f, jsonfile = tempfile.mkstemp(suffix='.json') - if verbose: - log.ODM_INFO('Pipeline file: %s' % jsonfile) os.write(f, jsonlib.dumps(json).encode('utf8')) os.close(f) @@ -157,14 +148,11 @@ def run_pipeline(json, verbose=False): 'pipeline', '-i %s' % double_quote(jsonfile) ] - if verbose or sys.platform == 'win32': - system.run(' '.join(cmd)) - else: - system.run(' '.join(cmd) + ' > /dev/null 2>&1') + system.run(' '.join(cmd)) os.remove(jsonfile) -def run_pdaltranslate_smrf(fin, fout, scalar, slope, threshold, window, verbose=False): +def run_pdaltranslate_smrf(fin, fout, scalar, slope, threshold, window): """ Run PDAL translate """ cmd = [ 'pdal', @@ -178,12 +166,9 @@ def run_pdaltranslate_smrf(fin, fout, scalar, slope, threshold, window, verbose= '--filters.smrf.window=%s' % window, ] - if verbose: - log.ODM_INFO(' '.join(cmd)) - system.run(' '.join(cmd)) -def merge_point_clouds(input_files, output_file, verbose=False): +def merge_point_clouds(input_files, output_file): if len(input_files) == 0: log.ODM_WARNING("Cannot merge point clouds, no point clouds to merge.") return @@ -194,8 +179,5 @@ def merge_point_clouds(input_files, output_file, verbose=False): ' '.join(map(double_quote, input_files + [output_file])), ] - if verbose: - log.ODM_INFO(' '.join(cmd)) - system.run(' '.join(cmd)) diff --git a/opendm/mesh.py b/opendm/mesh.py index f0c524a6..fd7f21bc 100644 --- a/opendm/mesh.py +++ b/opendm/mesh.py @@ -8,7 +8,7 @@ from opendm import concurrency from scipy import signal import numpy as np -def create_25dmesh(inPointCloud, outMesh, dsm_radius=0.07, dsm_resolution=0.05, depth=8, samples=1, maxVertexCount=100000, verbose=False, available_cores=None, method='gridded', smooth_dsm=True): +def create_25dmesh(inPointCloud, outMesh, dsm_radius=0.07, dsm_resolution=0.05, depth=8, samples=1, maxVertexCount=100000, available_cores=None, method='gridded', smooth_dsm=True): # Create DSM from point cloud # Create temporary directory @@ -33,20 +33,18 @@ def create_25dmesh(inPointCloud, outMesh, dsm_radius=0.07, dsm_resolution=0.05, gapfill=True, outdir=tmp_directory, resolution=dsm_resolution, - verbose=verbose, max_workers=available_cores, apply_smoothing=smooth_dsm ) if method == 'gridded': - mesh = dem_to_mesh_gridded(os.path.join(tmp_directory, 'mesh_dsm.tif'), outMesh, maxVertexCount, verbose, maxConcurrency=max(1, available_cores)) + mesh = dem_to_mesh_gridded(os.path.join(tmp_directory, 'mesh_dsm.tif'), outMesh, maxVertexCount, maxConcurrency=max(1, available_cores)) elif method == 'poisson': - dsm_points = dem_to_points(os.path.join(tmp_directory, 'mesh_dsm.tif'), os.path.join(tmp_directory, 'dsm_points.ply'), verbose) + dsm_points = dem_to_points(os.path.join(tmp_directory, 'mesh_dsm.tif'), os.path.join(tmp_directory, 'dsm_points.ply')) mesh = screened_poisson_reconstruction(dsm_points, outMesh, depth=depth, samples=samples, maxVertexCount=maxVertexCount, - threads=max(1, available_cores - 1), # poissonrecon can get stuck on some machines if --threads == all cores - verbose=verbose) + threads=max(1, available_cores - 1)), # poissonrecon can get stuck on some machines if --threads == all cores else: raise 'Not a valid method: ' + method @@ -57,14 +55,13 @@ def create_25dmesh(inPointCloud, outMesh, dsm_radius=0.07, dsm_resolution=0.05, return mesh -def dem_to_points(inGeotiff, outPointCloud, verbose=False): +def dem_to_points(inGeotiff, outPointCloud): log.ODM_INFO('Sampling points from DSM: %s' % inGeotiff) kwargs = { 'bin': context.dem2points_path, 'outfile': outPointCloud, - 'infile': inGeotiff, - 'verbose': '-verbose' if verbose else '' + 'infile': inGeotiff } system.run('"{bin}" -inputFile "{infile}" ' @@ -72,12 +69,12 @@ def dem_to_points(inGeotiff, outPointCloud, verbose=False): '-skirtHeightThreshold 1.5 ' '-skirtIncrements 0.2 ' '-skirtHeightCap 100 ' - ' {verbose} '.format(**kwargs)) + '-verbose '.format(**kwargs)) return outPointCloud -def dem_to_mesh_gridded(inGeotiff, outMesh, maxVertexCount, verbose=False, maxConcurrency=1): +def dem_to_mesh_gridded(inGeotiff, outMesh, maxVertexCount, maxConcurrency=1): log.ODM_INFO('Creating mesh from DSM: %s' % inGeotiff) mesh_path, mesh_filename = os.path.split(outMesh) @@ -99,8 +96,7 @@ def dem_to_mesh_gridded(inGeotiff, outMesh, maxVertexCount, verbose=False, maxCo 'outfile': outMeshDirty, 'infile': inGeotiff, 'maxVertexCount': maxVertexCount, - 'maxConcurrency': maxConcurrency, - 'verbose': '-verbose' if verbose else '' + 'maxConcurrency': maxConcurrency } system.run('"{bin}" -inputFile "{infile}" ' '-outputFile "{outfile}" ' @@ -108,7 +104,7 @@ def dem_to_mesh_gridded(inGeotiff, outMesh, maxVertexCount, verbose=False, maxCo '-maxVertexCount {maxVertexCount} ' '-maxConcurrency {maxConcurrency} ' '-edgeSwapThreshold 0.15 ' - ' {verbose} '.format(**kwargs)) + '-verbose '.format(**kwargs)) break except Exception as e: maxConcurrency = math.floor(maxConcurrency / 2) @@ -138,7 +134,7 @@ def dem_to_mesh_gridded(inGeotiff, outMesh, maxVertexCount, verbose=False, maxCo return outMesh -def screened_poisson_reconstruction(inPointCloud, outMesh, depth = 8, samples = 1, maxVertexCount=100000, pointWeight=4, threads=context.num_cores, verbose=False): +def screened_poisson_reconstruction(inPointCloud, outMesh, depth = 8, samples = 1, maxVertexCount=100000, pointWeight=4, threads=context.num_cores): mesh_path, mesh_filename = os.path.split(outMesh) # mesh_path = path/to @@ -165,8 +161,7 @@ def screened_poisson_reconstruction(inPointCloud, outMesh, depth = 8, samples = 'depth': depth, 'samples': samples, 'pointWeight': pointWeight, - 'threads': int(threads), - 'verbose': '--verbose' if verbose else '' + 'threads': int(threads) } # Run PoissonRecon @@ -178,8 +173,7 @@ def screened_poisson_reconstruction(inPointCloud, outMesh, depth = 8, samples = '--samplesPerNode {samples} ' '--threads {threads} ' '--bType 2 ' - '--linearFit ' - '{verbose}'.format(**poissonReconArgs)) + '--linearFit '.format(**poissonReconArgs)) except Exception as e: log.ODM_WARNING(str(e)) diff --git a/opendm/point_cloud.py b/opendm/point_cloud.py index 481ef33a..8d763594 100644 --- a/opendm/point_cloud.py +++ b/opendm/point_cloud.py @@ -71,7 +71,7 @@ def split(input_point_cloud, outdir, filename_template, capacity, dims=None): return [os.path.join(outdir, f) for f in os.listdir(outdir)] -def filter(input_point_cloud, output_point_cloud, standard_deviation=2.5, meank=16, sample_radius=0, boundary=None, verbose=False, max_concurrency=1): +def filter(input_point_cloud, output_point_cloud, standard_deviation=2.5, meank=16, sample_radius=0, boundary=None, max_concurrency=1): """ Filters a point cloud """ @@ -82,8 +82,7 @@ def filter(input_point_cloud, output_point_cloud, standard_deviation=2.5, meank= args = [ '--input "%s"' % input_point_cloud, '--output "%s"' % output_point_cloud, - '--concurrency %s' % max_concurrency, - '--verbose' if verbose else '', + '--concurrency %s' % max_concurrency ] if sample_radius > 0: diff --git a/stages/odm_app.py b/stages/odm_app.py index e05db6f2..b82228e8 100644 --- a/stages/odm_app.py +++ b/stages/odm_app.py @@ -35,8 +35,7 @@ class ODMApp: log.logger.init_json_output(json_log_paths, args) - dataset = ODMLoadDatasetStage('dataset', args, progress=5.0, - verbose=args.verbose) + dataset = ODMLoadDatasetStage('dataset', args, progress=5.0) split = ODMSplitStage('split', args, progress=75.0) merge = ODMMergeStage('merge', args, progress=100.0) opensfm = ODMOpenSfMStage('opensfm', args, progress=25.0) @@ -47,15 +46,12 @@ class ODMApp: oct_tree=max(1, min(14, args.mesh_octree_depth)), samples=1.0, point_weight=4.0, - max_concurrency=args.max_concurrency, - verbose=args.verbose) + max_concurrency=args.max_concurrency) texturing = ODMMvsTexStage('mvs_texturing', args, progress=70.0) georeferencing = ODMGeoreferencingStage('odm_georeferencing', args, progress=80.0, - gcp_file=args.gcp, - verbose=args.verbose) + gcp_file=args.gcp) dem = ODMDEMStage('odm_dem', args, progress=90.0, - max_concurrency=args.max_concurrency, - verbose=args.verbose) + max_concurrency=args.max_concurrency) orthophoto = ODMOrthoPhotoStage('odm_orthophoto', args, progress=98.0) report = ODMReport('odm_report', args, progress=99.0) postprocess = ODMPostProcess('odm_postprocess', args, progress=100.0) diff --git a/stages/odm_dem.py b/stages/odm_dem.py index c98eff79..f9464c0c 100755 --- a/stages/odm_dem.py +++ b/stages/odm_dem.py @@ -58,8 +58,7 @@ class ODMDEMStage(types.ODM_Stage): args.smrf_scalar, args.smrf_slope, args.smrf_threshold, - args.smrf_window, - verbose=args.verbose + args.smrf_window ) with open(pc_classify_marker, 'w') as f: @@ -103,7 +102,6 @@ class ODMDEMStage(types.ODM_Stage): outdir=odm_dem_root, resolution=resolution / 100.0, decimation=args.dem_decimation, - verbose=args.verbose, max_workers=args.max_concurrency, keep_unfilled_copy=args.dem_euclidean_map ) diff --git a/stages/odm_filterpoints.py b/stages/odm_filterpoints.py index cda5a219..b988aa90 100644 --- a/stages/odm_filterpoints.py +++ b/stages/odm_filterpoints.py @@ -53,7 +53,6 @@ class ODMFilterPoints(types.ODM_Stage): standard_deviation=args.pc_filter, sample_radius=args.pc_sample, boundary=boundary_offset(outputs.get('boundary'), reconstruction.get_proj_offset()), - verbose=args.verbose, max_concurrency=args.max_concurrency) # Quick check diff --git a/stages/odm_meshing.py b/stages/odm_meshing.py index b711bc94..72231edf 100644 --- a/stages/odm_meshing.py +++ b/stages/odm_meshing.py @@ -27,9 +27,7 @@ class ODMeshingStage(types.ODM_Stage): samples=self.params.get('samples'), maxVertexCount=self.params.get('max_vertex'), pointWeight=self.params.get('point_weight'), - threads=max(1, self.params.get('max_concurrency') - 1), # poissonrecon can get stuck on some machines if --threads == all cores - verbose=self.params.get('verbose')) - + threads=max(1, self.params.get('max_concurrency') - 1)), # poissonrecon can get stuck on some machines if --threads == all cores else: log.ODM_WARNING('Found a valid ODM Mesh file in: %s' % tree.odm_mesh) @@ -68,7 +66,6 @@ class ODMeshingStage(types.ODM_Stage): depth=self.params.get('oct_tree'), maxVertexCount=self.params.get('max_vertex'), samples=self.params.get('samples'), - verbose=self.params.get('verbose'), available_cores=args.max_concurrency, method='poisson' if args.fast_orthophoto else 'gridded', smooth_dsm=True) diff --git a/stages/odm_orthophoto.py b/stages/odm_orthophoto.py index a15e9728..39e2bf60 100644 --- a/stages/odm_orthophoto.py +++ b/stages/odm_orthophoto.py @@ -18,7 +18,6 @@ class ODMOrthoPhotoStage(types.ODM_Stage): def process(self, args, outputs): tree = outputs['tree'] reconstruction = outputs['reconstruction'] - verbose = '-verbose' if args.verbose else '' # define paths and create working directories system.mkdir_p(tree.odm_orthophoto) @@ -42,8 +41,7 @@ class ODMOrthoPhotoStage(types.ODM_Stage): 'corners': tree.odm_orthophoto_corners, 'res': resolution, 'bands': '', - 'depth_idx': '', - 'verbose': verbose + 'depth_idx': '' } models = [] @@ -85,7 +83,7 @@ class ODMOrthoPhotoStage(types.ODM_Stage): # run odm_orthophoto system.run('"{odm_ortho_bin}" -inputFiles {models} ' - '-logFile "{log}" -outputFile "{ortho}" -resolution {res} {verbose} ' + '-logFile "{log}" -outputFile "{ortho}" -resolution {res} -verbose ' '-outputCornerFile "{corners}" {bands} {depth_idx}'.format(**kwargs)) # Create georeferenced GeoTiff