Skip to content

Commit f6eac04

Browse files
authored
Add files via upload
1 parent 994e8d7 commit f6eac04

File tree

3 files changed

+118
-216
lines changed

3 files changed

+118
-216
lines changed

app.py

Lines changed: 53 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,53 @@
1+
# author: taewook kang
2+
# date: 2025-02-28
3+
# description: scan to model pipeline app
4+
5+
import os, json, sys, time, shutil, subprocess, zipfile, requests, gradio as gr
6+
from scan_to_model_pipeline import scan_to_model_process
7+
8+
module_path = os.path.dirname(os.path.realpath(__file__))
9+
10+
class args_param:
11+
input = ''
12+
output = ''
13+
pipeline = ''
14+
15+
def process_point_cloud(pipeline_file, input_file, progress=gr.Progress()):
16+
try:
17+
output_dir = module_path + "/output"
18+
os.makedirs(output_dir, exist_ok=True)
19+
20+
# Run the scan to model process
21+
params = args_param()
22+
params.input = input_file
23+
params.output = os.path.join(output_dir, "result.las")
24+
params.pipeline = pipeline_file
25+
scan_to_model_process(params, progress.tqdm)
26+
27+
# Create a zip file of the output
28+
zip_filename = module_path + "/output.zip"
29+
with zipfile.ZipFile(zip_filename, 'w') as zipf:
30+
for root, dirs, files in progress.tqdm(os.walk(output_dir), desc="Zipping files"):
31+
for file in files:
32+
zipf.write(os.path.join(root, file), file)
33+
34+
# Clean up the output directory
35+
shutil.rmtree(output_dir)
36+
except Exception as e:
37+
print(e)
38+
gr.Warning(f'Error: {e}')
39+
output_dir = ''
40+
return zip_filename
41+
42+
with gr.Blocks(title="Scan to Model Pipeline") as interface:
43+
gr.Markdown("# Scan to Model Pipeline (ver 0.2. prototype)")
44+
gr.Markdown("Upload pipeline configuration file (JSON) and point cloud data file (LAS, LAZ) to process the data and download the results as a zip file.")
45+
gr.Markdown("1. [Upload pipeline configuration (JSON)](https://github.com/mac999/scan_to_model_pipeline/blob/main/pipeline.json)</br>2. Upload [point cloud data (LAS, LAZ)](https://github.com/mac999/scan_to_model_pipeline/tree/main/input)</br>3. Click 'Run Pipeline' button</br>In detail, refer to the [github page](https://github.com/mac999/scan_to_model_pipeline.git)")
46+
with gr.Row(equal_height="height"):
47+
input_config = gr.File(label="Pipeline Configuration File", file_types=['json'])
48+
input_files = gr.File(label="Point Cloud Data File", file_types=['pcd', 'txt', 'las', 'laz'])
49+
output_file = gr.File(label="Download Model Output Zip File")
50+
run_button = gr.Button("Run Pipeline")
51+
run_button.click(fn=process_point_cloud, inputs=[input_config, input_files], outputs=output_file)
52+
53+
interface.launch(share=True)

requirements.txt

Lines changed: 17 additions & 169 deletions
Original file line numberDiff line numberDiff line change
@@ -1,169 +1,17 @@
1-
alphashape==1.3.1
2-
altgraph==0.17.3
3-
asttokens==2.4.0
4-
attrs==23.1.0
5-
backcall==0.2.0
6-
blinker==1.7.0
7-
cachetools==5.3.1
8-
certifi==2023.7.22
9-
charset-normalizer==3.2.0
10-
click==8.1.7
11-
click-log==0.4.0
12-
click-plugins==1.1.1
13-
cligj==0.7.2
14-
cloth-simulation-filter==1.1.4
15-
colorama @ file:///home/conda/feedstock_root/build_artifacts/colorama_1666700638685/work
16-
comm==0.1.4
17-
comtypes==1.3.1
18-
contourpy==1.1.0
19-
cycler==0.11.0
20-
Cython==3.0.8
21-
dash==2.16.1
22-
dash-core-components==2.0.0
23-
dash-html-components==2.0.0
24-
dash-table==5.0.0
25-
debugpy==1.8.0
26-
decorator==5.1.1
27-
descartes==1.1.0
28-
durationpy==0.6
29-
et-xmlfile==1.1.0
30-
exceptiongroup==1.1.3
31-
executing==1.2.0
32-
fastjsonschema==2.18.0
33-
fiona==1.9.5
34-
Flask==3.0.2
35-
fonttools==4.42.1
36-
fpdf==1.7.2
37-
GDAL==3.7.2
38-
geopandas==0.14.0
39-
google-auth==2.23.0
40-
grpcio==1.58.0
41-
h5py==3.9.0
42-
idna==3.4
43-
importlib-metadata==6.8.0
44-
importlib-resources==6.0.1
45-
iniconfig==2.0.0
46-
ipykernel==6.25.2
47-
ipython==8.15.0
48-
ipython-genutils==0.2.0
49-
ipywidgets==8.0.4
50-
itsdangerous==2.1.2
51-
jedi==0.19.0
52-
Jinja2==3.1.3
53-
joblib==1.3.2
54-
json5==0.9.14
55-
jsonschema==4.19.0
56-
jsonschema-specifications==2023.7.1
57-
jupyter_client==8.3.1
58-
jupyter_core==5.3.1
59-
jupyterlab-widgets==3.0.9
60-
kiwisolver==1.4.5
61-
laspy==2.5.3
62-
littleutils==0.2.2
63-
llvmlite==0.41.1
64-
lorem==0.1.1
65-
Markdown==3.4.4
66-
markdown-it-py==3.0.0
67-
MarkupSafe==2.1.5
68-
matplotlib==3.7.3
69-
matplotlib-inline==0.1.6
70-
mdurl==0.1.2
71-
meshio==5.3.5
72-
mpmath==1.2.1
73-
nbformat==5.7.0
74-
nest-asyncio==1.5.7
75-
networkx==3.2.1
76-
numba==0.58.1
77-
numpy @ file:///D:/bld/numpy_1691056377689/work
78-
nvidia-smi==0.1.3
79-
open3d @ file:///F:/projects/Open3D/build/lib/python_package/pip_package/open3d-0.17.0%2Bb7f9f3ae6-cp39-cp39-win_amd64.whl#sha256=d324c1f12e0a051cd0c72069545e5a8d291c071a0c3a51548cc08b33467bc550
80-
opencv-python==4.8.1.78
81-
openpyxl==3.1.5
82-
packaging==23.1
83-
pandas @ file:///D:/bld/pandas_1693415295498/work
84-
parso==0.8.3
85-
pefile==2023.2.7
86-
pickleshare==0.7.5
87-
Pillow==10.0.0
88-
plotly==5.20.0
89-
pluggy==1.3.0
90-
pooch==1.7.0
91-
prompt-toolkit==3.0.39
92-
psutil==5.9.5
93-
pure-eval==0.2.2
94-
pyasn1==0.5.0
95-
pyasn1-modules==0.3.0
96-
pyautocad==0.2.0
97-
pybind11==2.11.1
98-
pybind11-global==2.11.1
99-
pydeck==0.8.0
100-
Pygments==2.16.1
101-
pyinstaller==5.13.2
102-
pyinstaller-hooks-contrib==2023.8
103-
PyOpenGL @ file:///C:/Program%20Files%20%28x86%29/ZED%20SDK/PyOpenGL-3.1.5-cp39-cp39-win_amd64.whl#sha256=4f6861cd8f7bd0d8b51fbd0afb10ce10102b79c620237480eaf638162bf40a1b
104-
PyOpenGL-accelerate @ file:///C:/Program%20Files%20%28x86%29/ZED%20SDK/PyOpenGL_accelerate-3.1.5-cp39-cp39-win_amd64.whl#sha256=1b9b2ccb587d1fbe39cc2f0146674b80962daf5f49bac98e1eea89e672fff8ba
105-
pyparsing==3.1.1
106-
pyproj==3.6.1
107-
pyransac3d==0.6.0
108-
pyreadline==2.1
109-
pytest==7.4.2
110-
python-dateutil @ file:///home/conda/feedstock_root/build_artifacts/python-dateutil_1626286286081/work
111-
pytz @ file:///home/conda/feedstock_root/build_artifacts/pytz_1693930252784/work
112-
pyvista==0.42.3
113-
pywin32==306
114-
pywin32-ctypes==0.2.2
115-
PyYAML==6.0.1
116-
pyzed==1.3.0
117-
pyzmq==24.0.1
118-
referencing==0.30.2
119-
requests==2.31.0
120-
retrying==1.3.4
121-
rfc3339-validator==0.1.4
122-
rfc3986-validator==0.1.1
123-
rich==13.7.1
124-
rpds-py==0.10.3
125-
rsa==4.9
126-
Rtree==1.3.0
127-
scikit-learn==1.3.0
128-
scipy==1.11.2
129-
scooby==0.7.4
130-
seaborn==0.12.2
131-
Send2Trash==1.8.2
132-
setuptools-scm==8.0.4
133-
shapely==2.0.1
134-
simplekml==1.3.6
135-
six @ file:///home/conda/feedstock_root/build_artifacts/six_1620240208055/work
136-
sniffio==1.3.0
137-
sorcery==0.2.2
138-
soupsieve==2.5
139-
sspilib==0.1.0
140-
stack-data==0.6.2
141-
sympy==1.11.1
142-
tenacity==8.2.3
143-
threadpoolctl==3.2.0
144-
tinycss2==1.2.1
145-
tomli==2.0.1
146-
toolz==0.12.0
147-
torch==2.0.1+cu118
148-
torchaudio==2.0.2+cu118
149-
torchvision==0.15.2+cu118
150-
tornado==6.3.3
151-
tqdm @ file:///home/conda/feedstock_root/build_artifacts/tqdm_1691671248568/work
152-
traitlets==5.10.0
153-
trimesh==4.2.2
154-
types-python-dateutil==2.8.19.14
155-
typing_extensions==4.10.0
156-
tzdata @ file:///home/conda/feedstock_root/build_artifacts/python-tzdata_1680081134351/work
157-
ujson==5.8.0
158-
uri-template==1.3.0
159-
urllib3==1.26.16
160-
vtk==9.2.6
161-
wcwidth==0.2.6
162-
webcolors==1.13
163-
webencodings==0.5.1
164-
websocket-client==1.6.4
165-
Werkzeug==3.0.1
166-
widgetsnbextension==4.0.9
167-
wrapt==1.15.0
168-
y-py==0.6.2
169-
zipp==3.16.2
1+
Gradio==4.16.0
2+
reportlab==3.6.11
3+
pandas==2.2.3
4+
openpyxl==3.1.2
5+
fpdf==1.7.2
6+
fastapi==0.112.4
7+
pydantic==2.10.6
8+
ifcopenshell
9+
alphashape
10+
shapely
11+
rtree
12+
laspy
13+
cloth-simulation-filter
14+
pyvista
15+
tqdm
16+
scipy
17+
scikit-learn

scan_to_model_pipeline.py

Lines changed: 48 additions & 47 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@
55
# revision history
66
# 0.1: initial implementation
77
# 0.15: add pipeline architecture
8+
# 0.3: refactoring
89
# function: clustring. filtering. make footprints. make LoD1. make spreadsheet
910
# license: MIT license
1011
# reference:
@@ -292,7 +293,7 @@ def view_tin(ground_xyz, tri):
292293
ax.plot_trisurf(ground_xyz[:,0], ground_xyz[:,1], ground_xyz[:,2], triangles=tri.simplices, cmap=plt.cm.Spectral)
293294
plt.show()
294295

295-
import pyvista as pv, pydeck as pdk, meshio
296+
import pyvista as pv # , pydeck as pdk, meshio
296297
from shapely.geometry import Polygon, MultiPolygon, mapping
297298

298299
def extrude_polygon(poly, height):
@@ -561,7 +562,7 @@ def get_pipeline_stage(pipeline, name):
561562
return stage
562563
return None
563564

564-
def scan_to_model_process(args):
565+
def scan_to_model_process(args, progress_tqdm=tqdm):
565566
function_map = {
566567
'csf': filtering_csf,
567568
'color': filtering_color,
@@ -574,60 +575,60 @@ def scan_to_model_process(args):
574575

575576
outputs_result = []
576577

577-
try:
578-
pipeline = load_pipeline(args.pipeline)
579-
make_folders(args.output)
580-
581-
dataset = [{
582-
"input": args.input,
583-
"output": args.output,
584-
"active": True}]
585-
586-
outputs_result = []
587-
output = dataset
588-
for index, stage in enumerate(pipeline):
589-
name = stage['name']
590-
output_tag = ''
591-
if 'output_tag' in stage:
592-
output_tag = stage['output_tag']
593-
input_filter = ''
594-
if 'input_filter' in stage:
595-
input_filter = stage['input_filter']
596-
597-
if index == 0:
598-
dataset = update_module_output(name, output_tag, output)
599-
else:
600-
dataset = update_output_to_input(name, output_tag, output)
601-
if len(input_filter):
602-
dataset = update_active_inputs(dataset, 'name', input_filter, True)
603-
604-
config = stage['config']
605-
if 'csf.ground' in config:
606-
ground_fname = get_value_from_name(outputs_result[0]['dataset'], 'name', 'ground', 'input') # TBD. should be generized.
607-
config['ground'] = ground_fname
608-
609-
output = function_map[name](dataset, config)
610-
result = {
611-
'name': name,
612-
'dataset': output.copy()
613-
}
614-
outputs_result.append(result)
615-
616-
except Exception as e:
617-
print(traceback.format_exc())
618-
pass
578+
pipeline = load_pipeline(args.pipeline)
579+
make_folders(args.output)
580+
581+
dataset = [{
582+
"input": args.input,
583+
"output": args.output,
584+
"active": True}]
585+
586+
outputs_result = []
587+
output = dataset
588+
index = 0
589+
for stage in progress_tqdm(pipeline, desc='scan to model processing...'):
590+
name = stage['name']
591+
output_tag = ''
592+
if 'output_tag' in stage:
593+
output_tag = stage['output_tag']
594+
input_filter = ''
595+
if 'input_filter' in stage:
596+
input_filter = stage['input_filter']
597+
598+
if index == 0:
599+
dataset = update_module_output(name, output_tag, output)
600+
else:
601+
dataset = update_output_to_input(name, output_tag, output)
602+
if len(input_filter):
603+
dataset = update_active_inputs(dataset, 'name', input_filter, True)
604+
605+
config = stage['config']
606+
if 'csf.ground' in config:
607+
ground_fname = get_value_from_name(outputs_result[0]['dataset'], 'name', 'ground', 'input') # TBD. should be generized.
608+
config['ground'] = ground_fname
609+
610+
output = function_map[name](dataset, config)
611+
result = {
612+
'name': name,
613+
'dataset': output.copy()
614+
}
615+
outputs_result.append(result)
616+
index += 1
619617

620618
return outputs_result
621619

622620
def main():
621+
# get current module's path
622+
module_path = os.path.dirname(os.path.abspath(__file__))
623+
623624
argparser = argparse.ArgumentParser(description="CSF Filtering")
624625
# argparser.add_argument("--input", default="./input/belleview_group.las", required=False, help="Input file name")
625626
# argparser.add_argument("--output", default="./output/belleview/belleview.las", required=False, help="Output file name")
626627
# argparser.add_argument("--input", default="./input/downsampledlesscloudEURO3.las", required=False, help="Input file name")
627628
# argparser.add_argument("--output", default="./output/euro3/EURO3.las", required=False, help="Output file name")
628-
argparser.add_argument("--input", default="./input/OTP_EPSG26910_5703_38_-122_ca_sunrise_memorial.las", required=False, help="Input file name")
629-
argparser.add_argument("--output", default="./output/opt/sunrise.las", required=False, help="Output file name")
630-
argparser.add_argument("--pipeline", default="pipeline.json", required=False, help="pipeline file name")
629+
argparser.add_argument("--input", default=f"{module_path}/input/OTP_EPSG26910_5703_38_-122_ca_sunrise_memorial.las", required=False, help="Input file name")
630+
argparser.add_argument("--output", default=f"{module_path}/output/opt/sunrise.las", required=False, help="Output file name")
631+
argparser.add_argument("--pipeline", default=f"{module_path}/pipeline.json", required=False, help="pipeline file name")
631632
args = argparser.parse_args()
632633

633634
scan_to_model_process(args)

0 commit comments

Comments
 (0)