|
| 1 | +#!/usr/bin/env python3 |
| 2 | +# -*- coding: utf-8 -*- |
| 3 | +############################################################################### |
| 4 | +# |
| 5 | +# Project: PROJ |
| 6 | +# Purpose: Convert a TIN JSON to a TIN GeoPackage |
| 7 | +# Author: Even Rouault <even.rouault at spatialys.com> |
| 8 | +# |
| 9 | +############################################################################### |
| 10 | +# Copyright (c) 2025, Even Rouault <even.rouault at spatialys.com> |
| 11 | +# |
| 12 | +# SPDX-License-Identifier: MIT |
| 13 | +############################################################################### |
| 14 | + |
| 15 | +import argparse |
| 16 | +import json |
| 17 | +import struct |
| 18 | + |
| 19 | +from osgeo import gdal, ogr, osr |
| 20 | + |
| 21 | + |
| 22 | +def get_args(): |
| 23 | + parser = argparse.ArgumentParser( |
| 24 | + description="Convert a TIN JSON to a TIN GeoPackage." |
| 25 | + ) |
| 26 | + parser.add_argument("source", help="Source JSON file") |
| 27 | + parser.add_argument("dest", help="Destination GeoPackage file") |
| 28 | + return parser.parse_args() |
| 29 | + |
| 30 | + |
| 31 | +def as_i32le_hex(v): |
| 32 | + return "".join(["%02X" % b for b in struct.pack("<i", v)]) |
| 33 | + |
| 34 | + |
| 35 | +def convert_json_to_gpkg(source, dest): |
| 36 | + |
| 37 | + j = json.loads(open(source, "rb").read()) |
| 38 | + |
| 39 | + if "file_type" not in j: |
| 40 | + raise Exception(f"'file_type' missing in {source}") |
| 41 | + file_type = j["file_type"] |
| 42 | + if file_type != "triangulation_file": |
| 43 | + raise Exception(f"file_type={file_type} not handled") |
| 44 | + |
| 45 | + if "vertices" not in j or not isinstance(j["vertices"], list): |
| 46 | + raise Exception(f"'vertices' array missing in {source}") |
| 47 | + vertices = j["vertices"] |
| 48 | + |
| 49 | + if "vertices_columns" not in j or not isinstance(j["vertices_columns"], list): |
| 50 | + raise Exception(f"'vertices_columns' array missing in {source}") |
| 51 | + vertices_columns = j["vertices_columns"] |
| 52 | + idx_source_x = vertices_columns.index("source_x") |
| 53 | + idx_source_y = vertices_columns.index("source_y") |
| 54 | + |
| 55 | + if "triangles" not in j or not isinstance(j["triangles"], list): |
| 56 | + raise Exception(f"'triangles' array missing in {source}") |
| 57 | + triangles = j["triangles"] |
| 58 | + |
| 59 | + if "triangles_columns" not in j or not isinstance(j["triangles_columns"], list): |
| 60 | + raise Exception(f"'triangles_columns' array missing in {source}") |
| 61 | + triangles_columns = j["triangles_columns"] |
| 62 | + idx_vertex1 = triangles_columns.index("idx_vertex1") |
| 63 | + idx_vertex2 = triangles_columns.index("idx_vertex2") |
| 64 | + idx_vertex3 = triangles_columns.index("idx_vertex3") |
| 65 | + |
| 66 | + with gdal.config_options( |
| 67 | + { |
| 68 | + "OGR_SQLITE_PRAGMA": "page_size=1024", |
| 69 | + "CREATE_TRIGGERS": "NO", |
| 70 | + "CREATE_RASTER_TABLES": "NO", |
| 71 | + } |
| 72 | + ): |
| 73 | + ds = gdal.GetDriverByName("GPKG").CreateVector(dest) |
| 74 | + ds.StartTransaction() |
| 75 | + |
| 76 | + srs = None |
| 77 | + if "input_crs" in j: |
| 78 | + srs = osr.SpatialReference() |
| 79 | + srs.SetAxisMappingStrategy(osr.OAMS_TRADITIONAL_GIS_ORDER) |
| 80 | + srs.SetFromUserInput(j["input_crs"]) |
| 81 | + |
| 82 | + metadata = {} |
| 83 | + for key, value in j.items(): |
| 84 | + if key not in ( |
| 85 | + "vertices_columns", |
| 86 | + "triangles_columns", |
| 87 | + "vertices", |
| 88 | + "triangles", |
| 89 | + ): |
| 90 | + metadata[key] = value |
| 91 | + |
| 92 | + if "target_x" in vertices_columns and "target_y" in vertices_columns: |
| 93 | + idx_target_x = vertices_columns.index("target_x") |
| 94 | + idx_target_y = vertices_columns.index("target_y") |
| 95 | + |
| 96 | + shift_x = [vertex[idx_target_x] - vertex[idx_source_x] for vertex in vertices] |
| 97 | + shift_y = [vertex[idx_target_y] - vertex[idx_source_y] for vertex in vertices] |
| 98 | + metadata["min_shift_x"] = min(shift_x) |
| 99 | + metadata["max_shift_x"] = max(shift_x) |
| 100 | + metadata["min_shift_y"] = min(shift_y) |
| 101 | + metadata["max_shift_y"] = max(shift_y) |
| 102 | + |
| 103 | + metadata["num_vertices"] = len(vertices) |
| 104 | + |
| 105 | + ds.ExecuteSQL( |
| 106 | + """CREATE TABLE gpkg_metadata ( |
| 107 | + id INTEGER PRIMARY KEY AUTOINCREMENT, |
| 108 | + md_scope TEXT NOT NULL DEFAULT 'dataset', |
| 109 | + md_standard_uri TEXT NOT NULL, |
| 110 | + mime_type TEXT NOT NULL DEFAULT 'text/xml', |
| 111 | + metadata TEXT NOT NULL DEFAULT '' |
| 112 | +)""" |
| 113 | + ) |
| 114 | + ds.ExecuteSQL( |
| 115 | + """CREATE TABLE gpkg_metadata_reference ( |
| 116 | + reference_scope TEXT NOT NULL, |
| 117 | + table_name TEXT, |
| 118 | + column_name TEXT, |
| 119 | + row_id_value INTEGER, |
| 120 | + timestamp DATETIME NOT NULL DEFAULT (strftime('%Y-%m-%dT%H:%M:%fZ','now')), |
| 121 | + md_file_id INTEGER NOT NULL, |
| 122 | + md_parent_id INTEGER, |
| 123 | + CONSTRAINT crmr_mfi_fk FOREIGN KEY (md_file_id) REFERENCES gpkg_metadata(id), |
| 124 | + CONSTRAINT crmr_mpi_fk FOREIGN KEY (md_parent_id) REFERENCES gpkg_metadata(id) |
| 125 | +)""" |
| 126 | + ) |
| 127 | + |
| 128 | + serialized_json = json.dumps(metadata).replace("'", "''") |
| 129 | + ds.ExecuteSQL( |
| 130 | + f"INSERT INTO gpkg_metadata (id, md_scope, md_standard_uri, mime_type, metadata) VALUES (1, 'dataset', 'https://proj.org', 'application/json', '{serialized_json}')" |
| 131 | + ) |
| 132 | + ds.ExecuteSQL( |
| 133 | + "INSERT INTO gpkg_metadata_reference (reference_scope, table_name, column_name, row_id_value, md_file_id, md_parent_id) VALUES ('geopackage', NULL, NULL, NULL, 1, NULL)" |
| 134 | + ) |
| 135 | + |
| 136 | + lyr = ds.CreateLayer( |
| 137 | + "vertices", |
| 138 | + geom_type=ogr.wkbPoint, |
| 139 | + srs=srs, |
| 140 | + options=["SPATIAL_INDEX=NO", "GEOMETRY_NULLABLE=NO"], |
| 141 | + ) |
| 142 | + |
| 143 | + ogr_to_vertices_column_idx = [] |
| 144 | + for idx, col in enumerate(vertices_columns): |
| 145 | + if idx in (idx_source_x, idx_source_y): |
| 146 | + continue |
| 147 | + is_reserved_col = col in ( |
| 148 | + "target_x", |
| 149 | + "target_y", |
| 150 | + "source_z", |
| 151 | + "target_z", |
| 152 | + "offset_z", |
| 153 | + ) |
| 154 | + if isinstance(vertices[0][idx], float) or is_reserved_col: |
| 155 | + ogr_type = ogr.OFTReal |
| 156 | + elif isinstance(vertices[0][idx], int): |
| 157 | + ogr_type = ogr.OFTInteger |
| 158 | + else: |
| 159 | + ogr_type = ogr.OFTString |
| 160 | + fld_defn = ogr.FieldDefn(col, ogr_type) |
| 161 | + if is_reserved_col: |
| 162 | + fld_defn.SetNullable(False) |
| 163 | + lyr.CreateField(fld_defn) |
| 164 | + ogr_to_vertices_column_idx.append(idx) |
| 165 | + for fid, v in enumerate(vertices): |
| 166 | + f = ogr.Feature(lyr.GetLayerDefn()) |
| 167 | + for ogr_idx, idx in enumerate(ogr_to_vertices_column_idx): |
| 168 | + f.SetField(ogr_idx, v[idx]) |
| 169 | + p = ogr.Geometry(ogr.wkbPoint) |
| 170 | + p.SetPoint_2D(0, v[idx_source_x], v[idx_source_y]) |
| 171 | + f.SetGeometryDirectly(p) |
| 172 | + f.SetFID(fid) |
| 173 | + lyr.CreateFeature(f) |
| 174 | + |
| 175 | + lyr = ds.CreateLayer("triangles_def", geom_type=ogr.wkbNone) |
| 176 | + other_fields = "" |
| 177 | + for idx, col in enumerate(triangles_columns): |
| 178 | + if isinstance(triangles[0][idx], float): |
| 179 | + ogr_type = ogr.OFTReal |
| 180 | + elif isinstance(triangles[0][idx], int): |
| 181 | + ogr_type = ogr.OFTInteger |
| 182 | + else: |
| 183 | + ogr_type = ogr.OFTString |
| 184 | + fld_defn = ogr.FieldDefn(col, ogr_type) |
| 185 | + if col in ("idx_vertex1", "idx_vertex2", "idx_vertex3"): |
| 186 | + fld_defn.SetNullable(False) |
| 187 | + lyr.CreateField(fld_defn) |
| 188 | + other_fields += ", " |
| 189 | + other_fields += col |
| 190 | + for fid, triangle in enumerate(triangles): |
| 191 | + f = ogr.Feature(lyr.GetLayerDefn()) |
| 192 | + for idx in range(len(triangles_columns)): |
| 193 | + f.SetField(idx, triangle[idx]) |
| 194 | + f.SetFID(fid) |
| 195 | + lyr.CreateFeature(f) |
| 196 | + |
| 197 | + with ds.ExecuteSQL( |
| 198 | + "SELECT srs_id FROM gpkg_contents WHERE table_name = 'vertices'" |
| 199 | + ) as sql_lyr: |
| 200 | + f = sql_lyr.GetNextFeature() |
| 201 | + srs_id = f["srs_id"] |
| 202 | + |
| 203 | + min_x = min([v[idx_source_x] for v in vertices]) |
| 204 | + max_x = max([v[idx_source_x] for v in vertices]) |
| 205 | + min_y = min([v[idx_source_y] for v in vertices]) |
| 206 | + max_y = max([v[idx_source_y] for v in vertices]) |
| 207 | + |
| 208 | + # We use a trick to generate GPKG polygon geometries from GPKG point geometries |
| 209 | + # of the vertices. |
| 210 | + srs_id_i32le = as_i32le_hex(srs_id) |
| 211 | + wkb_polygon_i32le = as_i32le_hex(3) |
| 212 | + number_rings_i32le = as_i32le_hex(1) |
| 213 | + number_vertices_i32le = as_i32le_hex(4) |
| 214 | + triangle_gpkg_prefix = f"47500001{srs_id_i32le}01{wkb_polygon_i32le}{number_rings_i32le}{number_vertices_i32le}" |
| 215 | + # 14 = GPKG_header_size_without_envelope (8) + WKB point header (5) + base_one_index (1) |
| 216 | + ds.ExecuteSQL( |
| 217 | + f"CREATE VIEW triangles AS SELECT triangles_def.fid AS OGC_FID{other_fields}, CAST(X'{triangle_gpkg_prefix}' || substr(v1.geom, 14) || substr(v2.geom, 14) || substr(v3.geom, 14) || substr(v1.geom, 14) AS BLOB) AS geom FROM triangles_def LEFT JOIN vertices v1 ON idx_vertex1 = v1.fid LEFT JOIN vertices v2 ON idx_vertex2 = v2.fid LEFT JOIN vertices v3 ON idx_vertex3 = v3.fid" |
| 218 | + ) |
| 219 | + ds.ExecuteSQL( |
| 220 | + f"INSERT INTO gpkg_contents (table_name, identifier, data_type, srs_id, min_x, min_y, max_x, max_y) VALUES ('triangles', 'triangles', 'features', {srs_id}, {min_x}, {min_y}, {max_x}, {max_y})" |
| 221 | + ) |
| 222 | + ds.ExecuteSQL( |
| 223 | + f"INSERT INTO gpkg_geometry_columns (table_name, column_name, geometry_type_name, srs_id, z, m) values ('triangles', 'geom', 'POLYGON', {srs_id}, 0, 0)" |
| 224 | + ) |
| 225 | + |
| 226 | + ds.ExecuteSQL( |
| 227 | + "CREATE VIRTUAL TABLE rtree_triangles_geom USING rtree(id, minx, maxx, miny, maxy)" |
| 228 | + ) |
| 229 | + for fid, triangle in enumerate(triangles): |
| 230 | + v1 = vertices[triangle[idx_vertex1]] |
| 231 | + v2 = vertices[triangle[idx_vertex2]] |
| 232 | + v3 = vertices[triangle[idx_vertex3]] |
| 233 | + tab_x = [v1[idx_source_x], v2[idx_source_x], v3[idx_source_x]] |
| 234 | + tab_y = [v1[idx_source_y], v2[idx_source_y], v3[idx_source_y]] |
| 235 | + minx = min(tab_x) |
| 236 | + miny = min(tab_y) |
| 237 | + maxx = max(tab_x) |
| 238 | + maxy = max(tab_y) |
| 239 | + ds.ExecuteSQL( |
| 240 | + f"INSERT INTO rtree_triangles_geom VALUES ({fid}, {minx}, {maxx}, {miny}, {maxy})" |
| 241 | + ) |
| 242 | + |
| 243 | + ds.CommitTransaction() |
| 244 | + ds.ExecuteSQL("DELETE FROM sqlite_sequence") |
| 245 | + ds.ExecuteSQL("DROP TRIGGER trigger_insert_feature_count_vertices") |
| 246 | + ds.ExecuteSQL("DROP TRIGGER trigger_delete_feature_count_vertices") |
| 247 | + ds.ExecuteSQL("DROP TRIGGER trigger_insert_feature_count_triangles_def") |
| 248 | + ds.ExecuteSQL("DROP TRIGGER trigger_delete_feature_count_triangles_def") |
| 249 | + ds.ExecuteSQL("DROP TABLE gpkg_ogr_contents") |
| 250 | + ds.ExecuteSQL("VACUUM") |
| 251 | + ds.Close() |
| 252 | + |
| 253 | + # Check that the triangle coverage is OK (no overlaps) |
| 254 | + if gdal.VersionInfo(None) >= "3120000": |
| 255 | + with gdal.alg.vector.check_coverage( |
| 256 | + input=dest, output="", output_format="MEM", input_layer="triangles" |
| 257 | + ) as alg: |
| 258 | + out_ds = alg.Output() |
| 259 | + out_lyr = out_ds.GetLayer(0) |
| 260 | + if out_lyr.GetFeatureCount(): |
| 261 | + print("Coverage of triangles has issues. Invalid edges:") |
| 262 | + for f in out_lyr: |
| 263 | + f.DumpReadable() |
| 264 | + |
| 265 | + |
| 266 | +if __name__ == "__main__": |
| 267 | + |
| 268 | + gdal.UseExceptions() |
| 269 | + args = get_args() |
| 270 | + convert_json_to_gpkg(args.source, args.dest) |
0 commit comments