5
5
# revision history
6
6
# 0.1: initial implementation
7
7
# 0.15: add pipeline architecture
8
+ # 0.3: refactoring
8
9
# function: clustring. filtering. make footprints. make LoD1. make spreadsheet
9
10
# license: MIT license
10
11
# reference:
@@ -292,7 +293,7 @@ def view_tin(ground_xyz, tri):
292
293
ax .plot_trisurf (ground_xyz [:,0 ], ground_xyz [:,1 ], ground_xyz [:,2 ], triangles = tri .simplices , cmap = plt .cm .Spectral )
293
294
plt .show ()
294
295
295
- import pyvista as pv , pydeck as pdk , meshio
296
+ import pyvista as pv # , pydeck as pdk, meshio
296
297
from shapely .geometry import Polygon , MultiPolygon , mapping
297
298
298
299
def extrude_polygon (poly , height ):
@@ -561,7 +562,7 @@ def get_pipeline_stage(pipeline, name):
561
562
return stage
562
563
return None
563
564
564
- def scan_to_model_process (args ):
565
+ def scan_to_model_process (args , progress_tqdm = tqdm ):
565
566
function_map = {
566
567
'csf' : filtering_csf ,
567
568
'color' : filtering_color ,
@@ -574,60 +575,60 @@ def scan_to_model_process(args):
574
575
575
576
outputs_result = []
576
577
577
- try :
578
- pipeline = load_pipeline (args .pipeline )
579
- make_folders (args .output )
580
-
581
- dataset = [{
582
- "input" : args .input ,
583
- "output" : args .output ,
584
- "active" : True }]
585
-
586
- outputs_result = []
587
- output = dataset
588
- for index , stage in enumerate (pipeline ):
589
- name = stage ['name' ]
590
- output_tag = ''
591
- if 'output_tag' in stage :
592
- output_tag = stage ['output_tag' ]
593
- input_filter = ''
594
- if 'input_filter' in stage :
595
- input_filter = stage ['input_filter' ]
596
-
597
- if index == 0 :
598
- dataset = update_module_output (name , output_tag , output )
599
- else :
600
- dataset = update_output_to_input (name , output_tag , output )
601
- if len (input_filter ):
602
- dataset = update_active_inputs (dataset , 'name' , input_filter , True )
603
-
604
- config = stage ['config' ]
605
- if 'csf.ground' in config :
606
- ground_fname = get_value_from_name (outputs_result [0 ]['dataset' ], 'name' , 'ground' , 'input' ) # TBD. should be generized.
607
- config ['ground' ] = ground_fname
608
-
609
- output = function_map [name ](dataset , config )
610
- result = {
611
- 'name' : name ,
612
- 'dataset' : output .copy ()
613
- }
614
- outputs_result .append (result )
615
-
616
- except Exception as e :
617
- print (traceback .format_exc ())
618
- pass
578
+ pipeline = load_pipeline (args .pipeline )
579
+ make_folders (args .output )
580
+
581
+ dataset = [{
582
+ "input" : args .input ,
583
+ "output" : args .output ,
584
+ "active" : True }]
585
+
586
+ outputs_result = []
587
+ output = dataset
588
+ index = 0
589
+ for stage in progress_tqdm (pipeline , desc = 'scan to model processing...' ):
590
+ name = stage ['name' ]
591
+ output_tag = ''
592
+ if 'output_tag' in stage :
593
+ output_tag = stage ['output_tag' ]
594
+ input_filter = ''
595
+ if 'input_filter' in stage :
596
+ input_filter = stage ['input_filter' ]
597
+
598
+ if index == 0 :
599
+ dataset = update_module_output (name , output_tag , output )
600
+ else :
601
+ dataset = update_output_to_input (name , output_tag , output )
602
+ if len (input_filter ):
603
+ dataset = update_active_inputs (dataset , 'name' , input_filter , True )
604
+
605
+ config = stage ['config' ]
606
+ if 'csf.ground' in config :
607
+ ground_fname = get_value_from_name (outputs_result [0 ]['dataset' ], 'name' , 'ground' , 'input' ) # TBD. should be generized.
608
+ config ['ground' ] = ground_fname
609
+
610
+ output = function_map [name ](dataset , config )
611
+ result = {
612
+ 'name' : name ,
613
+ 'dataset' : output .copy ()
614
+ }
615
+ outputs_result .append (result )
616
+ index += 1
619
617
620
618
return outputs_result
621
619
622
620
def main ():
621
+ # get current module's path
622
+ module_path = os .path .dirname (os .path .abspath (__file__ ))
623
+
623
624
argparser = argparse .ArgumentParser (description = "CSF Filtering" )
624
625
# argparser.add_argument("--input", default="./input/belleview_group.las", required=False, help="Input file name")
625
626
# argparser.add_argument("--output", default="./output/belleview/belleview.las", required=False, help="Output file name")
626
627
# argparser.add_argument("--input", default="./input/downsampledlesscloudEURO3.las", required=False, help="Input file name")
627
628
# argparser.add_argument("--output", default="./output/euro3/EURO3.las", required=False, help="Output file name")
628
- argparser .add_argument ("--input" , default = ". /input/OTP_EPSG26910_5703_38_-122_ca_sunrise_memorial.las" , required = False , help = "Input file name" )
629
- argparser .add_argument ("--output" , default = ". /output/opt/sunrise.las" , required = False , help = "Output file name" )
630
- argparser .add_argument ("--pipeline" , default = " pipeline.json" , required = False , help = "pipeline file name" )
629
+ argparser .add_argument ("--input" , default = f" { module_path } /input/OTP_EPSG26910_5703_38_-122_ca_sunrise_memorial.las" , required = False , help = "Input file name" )
630
+ argparser .add_argument ("--output" , default = f" { module_path } /output/opt/sunrise.las" , required = False , help = "Output file name" )
631
+ argparser .add_argument ("--pipeline" , default = f" { module_path } / pipeline.json" , required = False , help = "pipeline file name" )
631
632
args = argparser .parse_args ()
632
633
633
634
scan_to_model_process (args )
0 commit comments