diff --git a/app/routes/geometry.py b/app/routes/geometry.py index f317971..677c599 100644 --- a/app/routes/geometry.py +++ b/app/routes/geometry.py @@ -23,7 +23,7 @@ def get(self, query_data): @blp.arguments(GeometryStartQuerySchema, location="query") @blp.response(201, GeometrySchema) def post(self, geometry_data): - result = geometry_service.start_geometry_check_task(geometry_data["fileUploadId"]) + result = geometry_service.start_geometry_check_task(geometry_data["fileUploadId"], geometry_data["useGeometryPipeline"]) return result diff --git a/app/schemas/geometry_schema.py b/app/schemas/geometry_schema.py index b4c2829..5a86915 100644 --- a/app/schemas/geometry_schema.py +++ b/app/schemas/geometry_schema.py @@ -17,6 +17,7 @@ class GeometrySchema(Schema): class GeometryStartQuerySchema(Schema): fileUploadId = fields.Number(required=True) + useGeometryPipeline = fields.Boolean(required=True) class GeometryGetQuerySchema(Schema): diff --git a/app/services/geometry_diagnostic_log_service.py b/app/services/geometry_diagnostic_log_service.py new file mode 100644 index 0000000..acf4247 --- /dev/null +++ b/app/services/geometry_diagnostic_log_service.py @@ -0,0 +1,726 @@ +from collections import Counter, defaultdict +from dataclasses import asdict, is_dataclass +import json +import math +from typing import Any, Dict, List, Tuple + +from app.utils.geometry_utils import FaceRecord, dot, _uedge, newell_normal_from_points, sub + +def build_topology_report(vertices: List[Tuple[float, float, float]], faces: List[FaceRecord]) -> Dict[str, Any]: + """ + Build a topology summary for the current geometry state. + + Parameters + ---------- + vertices : list[tuple[float, float, float]] + Global vertex list. + faces : list[FaceRecord] + Current face records. + + Returns + ------- + dict + Counts and short samples for faces, vertices, unique edges, + boundary edges, and non-manifold edges. + """ + edge_count: Dict[Tuple[int, int], int] = defaultdict(int) + for face in faces: + for edge in face.undirected_edges(): + edge_count[edge] += 1 + + boundary_edges = [edge for edge, count in edge_count.items() if count == 1] + non_manifold_edges = [edge for edge, count in edge_count.items() if count > 2] + + return { + "num_vertices": len(vertices), + "num_faces": len(faces), + "num_edges": len(edge_count), + "num_boundary_edges": len(boundary_edges), + "num_non_manifold_edges": len(non_manifold_edges), + "boundary_edge_samples": boundary_edges[:20], + "non_manifold_edge_samples": non_manifold_edges[:20], + } + +def build_issue_detection_report( + *, + vertices_before_dedup: int, + vertices_after_dedup: int, + degenerate_fatal_count: int, + degenerate_warning_count: int, + nonplanar_fatal_count: int, + nonplanar_warning_count: int, + tjunctions: List[Dict[str, Any]], + intersections: List[Dict[str, Any]], +) -> Dict[str, Any]: + """ + Build a structured issue report for the inspection stage. + + Parameters + ---------- + vertices_before_dedup : int + Number of vertices before deduplication. + vertices_after_dedup : int + Number of vertices after deduplication. + degenerate_fatal_count : int + Number of fatal degenerate faces detected. + degenerate_warning_count : int + Number of warning degenerate faces detected. + nonplanar_fatal_count : int + Number of fatal non-planar faces detected. + nonplanar_warning_count : int + Number of warning non-planar faces detected. + tjunctions : list[dict] + Raw T-junction detection records. + intersections : list[dict] + Raw segment-facet intersection records. + + Returns + ------- + dict + Structured issue summary. + """ + return { + "vertex_deduplication": { + "before": vertices_before_dedup, + "after": vertices_after_dedup, + "deduplicated": max(0, vertices_before_dedup - vertices_after_dedup), + }, + "degenerate_faces": { + "fatal": degenerate_fatal_count, + "warning": degenerate_warning_count, + }, + "non_planar_faces": { + "fatal": nonplanar_fatal_count, + "warning": nonplanar_warning_count, + }, + "tjunctions": { + "count": len(tjunctions), + "samples": tjunctions[:20], + }, + "intersections": { + "count": len(intersections), + "by_type": dict(Counter(hit.get("hit_type", "unknown") for hit in intersections)), + "samples": intersections[:20], + }, + } + +def append_repair_report( + repair_report: List[Dict[str, Any]], + *, + repair_type: str, + affected_count: int = 0, + before: Any = None, + after: Any = None, + details: Any = None, +) -> None: + """ + Append one repair event to the repair report. + + Parameters + ---------- + repair_report : list[dict] + Mutable list storing all repair events. + repair_type : str + Name of the repair operation. + affected_count : int, optional + Number of affected entities. + before : Any, optional + Relevant state before the repair. + after : Any, optional + Relevant state after the repair. + details : Any, optional + Extra contextual information. + """ + repair_report.append({ + "repair_type": repair_type, + "affected_count": affected_count, + "before": before, + "after": after, + "details": details, + }) + +def build_revalidation_report( + vertices: List[Tuple[float, float, float]], + faces: List[FaceRecord], + *, + tjunctions: List[Dict[str, Any]], + intersections: List[Dict[str, Any]], +) -> Dict[str, Any]: + """ + Build the final revalidation report after repair. + + Parameters + ---------- + vertices : list[tuple[float, float, float]] + Final vertex list. + faces : list[FaceRecord] + Final face list. + tjunctions : list[dict] + Remaining T-junctions after repair. + intersections : list[dict] + Remaining intersections after repair. + + Returns + ------- + dict + Final issue state and topology snapshot. + """ + holes = find_free_edge_loops(faces) + return { + "remaining_tjunctions": { + "count": len(tjunctions), + "samples": tjunctions[:20], + }, + "remaining_intersections": { + "count": len(intersections), + "by_type": dict(Counter(hit.get("hit_type", "unknown") for hit in intersections)), + "samples": intersections[:20], + }, + "remaining_holes": { + "count": len(holes), + "samples": holes[:10], + }, + "topology_after_repair": build_topology_report(vertices, faces), + } + +def create_geometry_processing_report( + *, + obj_file: str, + repaired_obj: str, + topology_before_repair: Dict[str, Any], + issue_detection_report: Dict[str, Any], + repair_report: List[Dict[str, Any]], + revalidation_report: Dict[str, Any], +) -> Dict[str, Any]: + """ + Assemble the full geometry processing report. + + Returns + ------- + dict + Full report structure ready to be serialized as JSON. + """ + return { + "input_obj": obj_file, + "output_repaired_obj": repaired_obj, + "topology_before_repair": topology_before_repair, + "issue_detection_report": issue_detection_report, + "repair_report": repair_report, + "revalidation_report": revalidation_report, + } + +def write_geometry_processing_report(report: Dict[str, Any], report_path: str) -> None: + """ + Write a geometry processing report to disk. + + Parameters + ---------- + report : dict + Report payload. + report_path : str + Output JSON path. + """ + def _json_safe(value: Any) -> Any: + if isinstance(value, (str, int, float, bool)) or value is None: + return value + + if isinstance(value, FaceRecord): + return { + "fid": value.fid, + "verts": value.verts, + "group": value.group, + "group_material": value.group_material, + "material": value.material, + } + + if is_dataclass(value): + return _json_safe(asdict(value)) + + if isinstance(value, dict): + return {str(k): _json_safe(v) for k, v in value.items()} + + if isinstance(value, (list, tuple, set)): + return [_json_safe(v) for v in value] + + return str(value) + + safe_report = _json_safe(report) + with open(report_path, "w") as fp: + json.dump(safe_report, fp, indent=2) + +def find_free_edge_loops(faces): + """ + faces: list of FaceRecord (any polygon, not just triangles) + Returns: list of loops, each loop is a list of vertex ids in order (closed implied). + """ + # Count undirected edge incidence + edge_count = defaultdict(int) + for face in faces: + for u, v in face.undirected_edges(): + edge_count[_uedge(u, v)] += 1 + + free_edges = [e for e, cnt in edge_count.items() if cnt == 1] + if not free_edges: + return [] + + # Build adjacency graph of free edges: vertex -> neighboring vertices along boundary + adj = defaultdict(list) + for u, v in free_edges: + adj[u].append(v) + adj[v].append(u) + + # Extract loops by walking until we return to start + loops = [] + visited_edges = set() + + def mark_edge(u, v): + visited_edges.add(_uedge(u, v)) + + def edge_visited(u, v): + return _uedge(u, v) in visited_edges + + for (u0, v0) in free_edges: + if edge_visited(u0, v0): + continue + + # Start a new loop walk + loop = [u0, v0] + mark_edge(u0, v0) + + prev = u0 + cur = v0 + + safety = 0 + while safety < 100000: + safety += 1 + + # Choose next neighbor not equal to prev and not yet visited, if possible + nxt = None + for cand in adj[cur]: + if cand == prev: + continue + if not edge_visited(cur, cand): + nxt = cand + break + + if nxt is None: + # might be closing edge back to start + # try to close if possible + if loop[0] in adj[cur] and not edge_visited(cur, loop[0]): + nxt = loop[0] + else: + break + + mark_edge(cur, nxt) + + if nxt == loop[0]: + # closed + loops.append(loop[:] ) # loop without repeating start at end + break + + loop.append(nxt) + prev, cur = cur, nxt + + # Normalize loops (remove duplicates / rotate to smallest id) + norm = [] + seen = set() + for loop in loops: + if len(loop) < 3: + continue + # rotate so smallest vertex id comes first + m = min(loop) + mi = loop.index(m) + rotated = loop[mi:] + loop[:mi] + # also consider reverse as same + key1 = tuple(rotated) + key2 = tuple([rotated[0]] + list(reversed(rotated[1:]))) + key = min(key1, key2) + if key in seen: + continue + seen.add(key) + norm.append(rotated) + + return norm + +# ----------------------------- +# Polygon quality report +# ----------------------------- +def log_polygon_quality(logger, face_id, grp, mat, mapped, qrep): + logger_fn = logger.error if qrep["status"] == "fatal" else logger.warning + + logger_fn( + "[FACE POLY %s] id=%d group=%s mat=%s n=%d " + "min_edge=%.6e m max_edge=%.6e m ratio=%.3e min_angle=%.3f deg " + "self_intersects_2d=%s drop_axis=%s reasons=%s verts=%s", + qrep["status"].upper(), + face_id, + grp, + mat, + qrep["n"], + qrep["min_edge_len_m"], + qrep["max_edge_len_m"], + qrep["edge_len_ratio"], + qrep["min_angle_deg"], + qrep["self_intersects_2d"], + qrep["projection_axis_dropped"], + qrep["reasons"], + mapped, + ) + +def polygon_quality_report( + face_ids, + points, + *, + edge_len_warn_ratio=100.0, + edge_len_fatal_ratio=1000.0, + min_angle_warn_deg=10.0, + min_angle_fatal_deg=3.0, + min_edge_warn_m=1e-4, + min_edge_fatal_m=1e-6, + proj_tol=1e-12, +): + """ + Polygon-level diagnostics independent of triangulation. + + Parameters + ---------- + face_ids : list[int] + 1-based vertex ids of one polygon face + points : list[(x,y,z)] + unique_vertices list; vertex id i -> points[i-1] + + Returns + ------- + dict with: + { + "n": int, + "min_edge_len_m": float, + "max_edge_len_m": float, + "edge_len_ratio": float, + "min_angle_deg": float, + "self_intersects_2d": bool, + "projection_axis_dropped": "x"|"y"|"z"|None, + "status": "ok"|"warning"|"fatal", + "reasons": [str, ...], + } + """ + + def dist(a, b): + dx = a[0] - b[0] + dy = a[1] - b[1] + dz = a[2] - b[2] + return math.sqrt(dx*dx + dy*dy + dz*dz) + + def norm(v): + return math.sqrt(dot(v, v)) + + def clamp(x, lo, hi): + return max(lo, min(hi, x)) + + def project_face(ids): + nrm = newell_normal_from_points(ids, points) + ax, ay, az = abs(nrm[0]), abs(nrm[1]), abs(nrm[2]) + + if ax == 0.0 and ay == 0.0 and az == 0.0: + return None, None + + if az >= ax and az >= ay: + # drop z -> XY + poly2d = [(points[pid - 1][0], points[pid - 1][1]) for pid in ids] + return poly2d, "z" + elif ay >= ax and ay >= az: + # drop y -> XZ + poly2d = [(points[pid - 1][0], points[pid - 1][2]) for pid in ids] + return poly2d, "y" + else: + # drop x -> YZ + poly2d = [(points[pid - 1][1], points[pid - 1][2]) for pid in ids] + return poly2d, "x" + + def orient2d(a, b, c): + return (b[0] - a[0]) * (c[1] - a[1]) - (b[1] - a[1]) * (c[0] - a[0]) + + def on_segment_2d(p, a, b, tol=1e-12): + # assumes near-collinear + return ( + min(a[0], b[0]) - tol <= p[0] <= max(a[0], b[0]) + tol and + min(a[1], b[1]) - tol <= p[1] <= max(a[1], b[1]) + tol + ) + + def seg_intersect_2d(a, b, c, d, tol=1e-12): + """ + Proper/general 2D segment intersection. + Returns True if segments intersect. + """ + o1 = orient2d(a, b, c) + o2 = orient2d(a, b, d) + o3 = orient2d(c, d, a) + o4 = orient2d(c, d, b) + + # proper intersection + if ((o1 > tol and o2 < -tol) or (o1 < -tol and o2 > tol)) and \ + ((o3 > tol and o4 < -tol) or (o3 < -tol and o4 > tol)): + return True + + # collinear / touching cases + if abs(o1) <= tol and on_segment_2d(c, a, b, tol): return True + if abs(o2) <= tol and on_segment_2d(d, a, b, tol): return True + if abs(o3) <= tol and on_segment_2d(a, c, d, tol): return True + if abs(o4) <= tol and on_segment_2d(b, c, d, tol): return True + + return False + + def polygon_self_intersects_2d(poly2d): + n = len(poly2d) + if n < 4: + return False + + for i in range(n): + a = poly2d[i] + b = poly2d[(i + 1) % n] + + for j in range(i + 1, n): + # edge j + c = poly2d[j] + d = poly2d[(j + 1) % n] + + # skip same edge + if i == j: + continue + + # skip adjacent edges sharing a vertex + if (i + 1) % n == j or (j + 1) % n == i: + continue + + # skip first/last adjacency in closed polygon + if i == 0 and (j + 1) % n == 0: + continue + + if seg_intersect_2d(a, b, c, d, tol=proj_tol): + return True + return False + + n = len(face_ids) + report = { + "n": n, + "min_edge_len_m": float("inf"), + "max_edge_len_m": 0.0, + "edge_len_ratio": float("inf"), + "min_angle_deg": float("inf"), + "self_intersects_2d": False, + "projection_axis_dropped": None, + "status": "ok", + "reasons": [], + } + + if n < 3: + report["status"] = "fatal" + report["reasons"].append("fewer_than_3_vertices") + return report + + pts = [points[vid - 1] for vid in face_ids] + + # -------------------------------------------------- + # 1) minimum / maximum edge length + # -------------------------------------------------- + edge_lengths = [] + for i in range(n): + a = pts[i] + b = pts[(i + 1) % n] + L = dist(a, b) + edge_lengths.append(L) + + min_edge = min(edge_lengths) if edge_lengths else float("inf") + max_edge = max(edge_lengths) if edge_lengths else 0.0 + ratio = (max_edge / min_edge) if min_edge > 0.0 else float("inf") + + report["min_edge_len_m"] = min_edge + report["max_edge_len_m"] = max_edge + report["edge_len_ratio"] = ratio + + # -------------------------------------------------- + # 2) minimum internal angle + # -------------------------------------------------- + min_angle = float("inf") + for i in range(n): + p_prev = pts[(i - 1) % n] + p_cur = pts[i] + p_next = pts[(i + 1) % n] + + v1 = sub(p_prev, p_cur) + v2 = sub(p_next, p_cur) + + n1 = norm(v1) + n2 = norm(v2) + + if n1 <= 0.0 or n2 <= 0.0: + ang = 0.0 + else: + c = clamp(dot(v1, v2) / (n1 * n2), -1.0, 1.0) + ang = math.degrees(math.acos(c)) + + if ang < min_angle: + min_angle = ang + + report["min_angle_deg"] = min_angle + + # -------------------------------------------------- + # 3) self-intersection in projected loop + # -------------------------------------------------- + poly2d, dropped = project_face(face_ids) + report["projection_axis_dropped"] = dropped + if poly2d is not None: + report["self_intersects_2d"] = polygon_self_intersects_2d(poly2d) + + # -------------------------------------------------- + # 4) classify severity + # -------------------------------------------------- + fatal = False + warning = False + + if report["self_intersects_2d"]: + fatal = True + report["reasons"].append("self_intersects_2d") + + if min_edge <= min_edge_fatal_m: + fatal = True + report["reasons"].append("min_edge_too_small_fatal") + elif min_edge <= min_edge_warn_m: + warning = True + report["reasons"].append("min_edge_too_small_warn") + + if ratio >= edge_len_fatal_ratio: + fatal = True + report["reasons"].append("edge_len_ratio_fatal") + elif ratio >= edge_len_warn_ratio: + warning = True + report["reasons"].append("edge_len_ratio_warn") + + if min_angle <= min_angle_fatal_deg: + fatal = True + report["reasons"].append("min_angle_too_small_fatal") + elif min_angle <= min_angle_warn_deg: + warning = True + report["reasons"].append("min_angle_too_small_warn") + + if fatal: + report["status"] = "fatal" + elif warning: + report["status"] = "warning" + else: + report["status"] = "ok" + + return report + +def mesh_topology_report(faces, *, sample_n=20): + """ + faces: list of FaceRecord (any polygon, not just triangles) + Returns a dict with topology statistics. + """ + + # --- edge -> incident face fids + edge_to_fids = defaultdict(list) + for face in faces: + for u, v in face.undirected_edges(): + edge_to_fids[_uedge(u, v)].append(face.fid) + + # --- classify edges + edge_count = {e: len(fids) for e, fids in edge_to_fids.items()} + boundary_edges = [e for e, cnt in edge_count.items() if cnt == 1] + interior_ok_edges = [e for e, cnt in edge_count.items() if cnt == 2] + nonmanifold_edges = [e for e, cnt in edge_count.items() if cnt > 2] + + # --- build boundary adjacency (graph of free edges) + badj = defaultdict(set) + for u, v in boundary_edges: + badj[u].add(v) + badj[v].add(u) + + bdeg = {v: len(nbrs) for v, nbrs in badj.items()} + bdeg_hist = dict(Counter(bdeg.values())) + + boundary_endpoints = sorted([v for v, d in bdeg.items() if d == 1]) + boundary_branch_vertices = sorted([v for v, d in bdeg.items() if d > 2]) + + # --- boundary connected components classification + # component is: + # cycle: all vertices degree 2 + # chain: has deg1 endpoints and max deg <=2 + # branched: contains deg >2 + seen_v = set() + comps = [] + for v0 in badj.keys(): + if v0 in seen_v: + continue + stack = [v0] + comp_vs = set() + while stack: + v = stack.pop() + if v in comp_vs: + continue + comp_vs.add(v) + for w in badj[v]: + if w not in comp_vs: + stack.append(w) + seen_v |= comp_vs + + degs = [bdeg[v] for v in comp_vs] + dmax = max(degs) if degs else 0 + dmin = min(degs) if degs else 0 + n1 = sum(1 for d in degs if d == 1) + n2 = sum(1 for d in degs if d == 2) + ngt2 = sum(1 for d in degs if d > 2) + + if ngt2 > 0: + kind = "branched_nonmanifold_boundary" + elif n1 == 0 and dmin == 2 and dmax == 2: + kind = "cycle" + else: + kind = "open_chain_or_path" + + comps.append({ + "kind": kind, + "n_vertices": len(comp_vs), + "deg_hist": dict(Counter(degs)), + "sample_vertices": sorted(list(comp_vs))[:min(sample_n, len(comp_vs))] + }) + + # summary counts + comp_kind_hist = dict(Counter(c["kind"] for c in comps)) + + return { + # global + "n_faces": len(faces), + "n_unique_edges": len(edge_count), + "n_boundary_edges": len(boundary_edges), + "n_nonmanifold_edges": len(nonmanifold_edges), + + # high-level flags + # Watertighness still needs more checks (e.g self intersections) + "is_watertight": (len(boundary_edges) == 0), + "has_nonmanifold_edges": (len(nonmanifold_edges) > 0), + "has_nonmanifold_boundary": (len(boundary_branch_vertices) > 0), + + # samples + "boundary_edges_sample": boundary_edges[:sample_n], + "nonmanifold_edges_sample": nonmanifold_edges[:sample_n], + + # boundary vertex degrees + "n_boundary_vertices": len(bdeg), + "boundary_degree_hist": bdeg_hist, + "boundary_endpoints_sample": boundary_endpoints[:sample_n], + "boundary_branch_vertices_sample": boundary_branch_vertices[:sample_n], + + # boundary component breakdown + "boundary_component_kind_hist": comp_kind_hist, + "boundary_components_sample": comps[:min(10, len(comps))], + } + +def log_topology(logger, label, faces): + rep = mesh_topology_report(faces, sample_n=10) + logger.info(f"[TOPO] {label}: " + f"faces={rep['n_faces']} edges={rep['n_unique_edges']} " + f"boundary_edges={rep['n_boundary_edges']} nonmanifold_edges={rep['n_nonmanifold_edges']} " + ) + + return rep diff --git a/app/services/geometry_export_service.py b/app/services/geometry_export_service.py new file mode 100644 index 0000000..8d57406 --- /dev/null +++ b/app/services/geometry_export_service.py @@ -0,0 +1,183 @@ +from collections import defaultdict +import json +import logging +from typing import List, Dict, Any, Tuple +from app.utils.geometry_utils import FaceRecord, _uedge + +logger = logging.getLogger(__name__) + +def export_processed_topology_to_gmsh_geo( + faces: List[FaceRecord], + unique_vertices: List[Tuple[float, float, float]], + geo_file: str, + volume_name: str = "RoomVolume", +) -> Tuple[int, int]: + """ + Export processed topology to Gmsh GEO file. + + Parameters + ---------- + faces : List[FaceRecord] + List of processed FaceRecord objects. + unique_vertices : List[Tuple[float, float, float]] + List of unique vertex coordinates. + geo_file : str + Path to the output GEO file. + volume_name : str + Name for the physical volume. + + Returns + ------- + Tuple[int, int] + (num_lines, num_surfaces) + """ + # ----------------------------- + # Build unique edges (Lines) + signed loops + # ----------------------------- + edge_to_line: Dict = {} + line_orientation: Dict = {} + next_line_id = 1 + face_line_loops: List[List[int]] = [] + + for face in faces: + loop_line_ids = [] + n = len(face.verts) + for i in range(n): + a = face.verts[i] + b = face.verts[(i + 1) % n] + key = _uedge(a, b) + if key not in edge_to_line: + edge_to_line[key] = next_line_id + line_orientation[next_line_id] = (a, b) + next_line_id += 1 + lid = edge_to_line[key] + ori = line_orientation[lid] + loop_line_ids.append(lid if ori == (a, b) else -lid) + face_line_loops.append(loop_line_ids) + + # Physical surface groups: material -> list of 0-based face indices + physical_surfaces_dict: Dict = {} + for idx, face in enumerate(faces): + physical_surfaces_dict.setdefault(face.material, []).append(idx) + + logger.info("Physical surfaces: %s", { + mat: len(ids) for mat, ids in physical_surfaces_dict.items() + }) + + # ----------------------------- + # Write GEO + # ----------------------------- + with open(geo_file, "w") as g: + # Points + for i, v in enumerate(unique_vertices, start=1): + g.write(f"Point({i}) = {{ {v[0]}, {v[1]}, {v[2]}, 1.0 }};\n") + g.write("\n") + + # Lines + for lid in range(1, next_line_id): + a, b = line_orientation[lid] + g.write(f"Line({lid}) = {{ {a}, {b} }};\n") + g.write("\n") + + # Line Loops + for sid, (loop, face) in enumerate(zip(face_line_loops, faces), start=1): + loop_str = ", ".join(str(x) for x in loop) + + # g.write(f"// fid={face.fid} group={face.group} material={face.material}\n") + g.write(f"Line Loop({sid}) = {{ {loop_str} }};\n") + g.write("\n") + + # Plane Surfaces + for sid in range(1, len(face_line_loops) + 1): + g.write(f"Plane Surface({sid}) = {{ {sid} }};\n") + g.write("\n") + + # Surface Loop + Volume + total_surfaces = len(face_line_loops) + surf_list = ", ".join(str(i) for i in range(1, total_surfaces + 1)) + g.write(f"Surface Loop(1) = {{ {surf_list} }};\n") + g.write("Volume(1) = { 1 };\n") + g.write(f'Physical Volume("{volume_name}") = {{ 1 }};\n') + + # Physical Surfaces + for mat, surface_indices in physical_surfaces_dict.items(): + gmsh_ids = ", ".join(str(i + 1) for i in surface_indices) + g.write(f'Physical Surface("{mat}") = {{ {gmsh_ids} }};\n') + + # Physical Lines + lines_all = ", ".join(str(i) for i in range(1, next_line_id)) + g.write(f'Physical Line("default") = {{ {lines_all} }};\n') + + # Mesh options + g.write('Mesh.Algorithm = 6;\n') + g.write('Mesh.Algorithm3D = 1; // Delaunay3D\n') + g.write('Mesh.Optimize = 1;\n') + g.write('Mesh.CharacteristicLengthFromPoints = 1;\n') + + return next_line_id - 1, len(face_line_loops) + +def export_processed_topology_to_obj( + obj_output_path: str, + unique_vertices: list, + faces: "List[FaceRecord]", +) -> bool: + """ + Export the processed topology to an OBJ file. + Args: + obj_output_path : destination path + unique_vertices : list of (x, y, z) tuples, 0-based (vertex id i -> index i-1) + faces : list of FaceRecord (each carries verts, group, material) + """ + try: + # Group faces by group first, then by group_material + faces_by_group_and_material: dict = defaultdict(lambda: defaultdict(list)) + for face in faces: + faces_by_group_and_material[face.group][face.group_material].append(face) + + with open(obj_output_path, "w") as f: + f.write("# Processed topology from geometry conversion\n\n") + + # Vertices + for x, y, z in unique_vertices: + # previously we flipped z, now flip back for OBJ export + f.write(f"v {x} {z} {-y}\n") + + f.write("\n") + + # Faces grouped by group, then by group_material + for group in sorted(faces_by_group_and_material.keys()): + f.write(f"\ng {group}\n") + for group_material in sorted(faces_by_group_and_material[group].keys()): + f.write(f"usemtl {group_material}\n") + for face in faces_by_group_and_material[group][group_material]: + f.write("f " + " ".join(f"{v}//1" for v in face.verts) + "\n") + return True + + except Exception as ex: + logger.error("Failed to export processed topology to OBJ: %s", ex) + return False + +def export_faces_to_json(faces, filepath): + """ + Export FaceRecord list to JSON for debugging. + + Parameters + ---------- + faces : list[FaceRecord] + filepath : str + """ + data = [] + + for f in faces: + data.append({ + "fid": f.fid, + "verts": f.verts, + "group": getattr(f, "group", None), + "group_material": getattr(f, "group_material", None), + "material": getattr(f, "material", None), + }) + + with open(filepath, "w") as fp: + json.dump(data, fp, indent=2) + + print(f"[DEBUG] Exported {len(faces)} faces → {filepath}") diff --git a/app/services/geometry_inspection_service.py b/app/services/geometry_inspection_service.py new file mode 100644 index 0000000..9aec078 --- /dev/null +++ b/app/services/geometry_inspection_service.py @@ -0,0 +1,835 @@ +from __future__ import annotations +from collections import defaultdict +import logging +import math +from typing import List, Tuple, Dict, Any +from matplotlib.pylab import cross +from app.utils.geometry_utils import FaceRecord, _uedge, dot, sub, triangulate_face_cdt_shapely +from app.utils.geometry_validation_utils import classify_face_planarity_m, classify_face_degeneracy + +logger = logging.getLogger(__name__) + +def inspect_face_planarity_issues( + faces: List[FaceRecord], + unique_vertices: List[Tuple[float, float, float]], + *, + warn_planar_tol_m: float = 1e-4, + fatal_planar_tol_m: float = 1e-3, +) -> List[Dict[str, Any]]: + """ + Inspect faces for planarity issues and return problematic faces with their coordinates. + + Parameters + ---------- + faces : List[FaceRecord] + List of FaceRecord objects to inspect. + unique_vertices : List[Tuple[float, float, float]] + List of unique vertex coordinates. + warn_planar_tol_m : float + Warning tolerance for planarity deviation in meters. + fatal_planar_tol_m : float + Fatal tolerance for planarity deviation in meters. + + Returns + ------- + List[Dict[str, Any]] + List of dictionaries containing problematic faces with their details: + - face: FaceRecord object + - status: "warning" or "fatal" + - max_dist_m: maximum distance from plane in meters + - rms_dist_m: RMS distance from plane in meters + - coordinates: list of (x, y, z) coordinates for the face vertices + """ + problematic_faces = [] + + for face in faces: + status, max_dist_m, rms_dist_m = classify_face_planarity_m( + face.verts, + unique_vertices, + warn_planar_tol_m=warn_planar_tol_m, + fatal_planar_tol_m=fatal_planar_tol_m, + ) + + if status in ("warning", "fatal"): + # Get coordinates for the face vertices + coordinates = [unique_vertices[vid - 1] for vid in face.verts] + + face_info = { + "face": face, + "status": status, + "max_dist_m": max_dist_m, + "rms_dist_m": rms_dist_m, + "coordinates": coordinates, + } + problematic_faces.append(face_info) + + logger.info(f"Found {len(problematic_faces)} faces with planarity issues") + return problematic_faces + +def detect_boundary_edges( + faces: List["FaceRecord"], + unique_vertices: List[Tuple[float, float, float]], +) -> List[Dict[str, Any]]: + """ + Detect boundary / open edges in a face set. + + A boundary edge is an undirected edge that is connected to exactly one face. + + Parameters + ---------- + faces : list[FaceRecord] + Face list. Each face must provide: + - fid : int + - verts : list[int] + Ordered 1-based vertex ids of the face loop. + unique_vertices : list[tuple[float, float, float]] + List of unique vertex coordinates in meters, 0-based indexed. + + Returns + ------- + list[dict] + One entry per boundary edge with: + - "edge": tuple[tuple[float, float, float], tuple[float, float, float]] + - "face_fids": list[int] + - "count": int + + Notes + ----- + - The returned edge is undirected, so ((x1,y1,z1), (x2,y2,z2)) and ((x2,y2,z2), (x1,y1,z1)) are treated as the same edge. + - For a valid closed watertight manifold, this function should return an empty list. + """ + edge_to_faces: Dict[Tuple[int, int], List[int]] = defaultdict(list) + + for f in faces: + n = len(f.verts) + if n < 2: + continue + + for i in range(n): + a = f.verts[i] + b = f.verts[(i + 1) % n] + edge_to_faces[_uedge(a, b)].append(f.fid) + + boundary_edges: List[Dict[str, Any]] = [] + for edge, face_fids in edge_to_faces.items(): + if len(face_fids) == 1: + a, b = edge + coord_a = unique_vertices[a - 1] + coord_b = unique_vertices[b - 1] + boundary_edges.append({ + "edge": (coord_a, coord_b), + "face_fids": face_fids[:], + }) + + return boundary_edges + +def detect_degenerate_faces( + faces: List["FaceRecord"], + unique_vertices: List[Tuple[float, float, float]], + *, + fatal_area2_tol: float = 1e-16, +) -> List[Dict[str, Any]]: + """ + Detect degenerate faces in a face set. + + A degenerate face is one classified as "fatal" or "warning" by classify_face_degeneracy. + + Parameters + ---------- + faces : list[FaceRecord] + Face list. + unique_vertices : list[tuple[float, float, float]] + List of unique vertex coordinates in meters, 0-based indexed. + fatal_area2_tol : float, optional + Tolerance for fatal degeneracy (area squared proxy). + warn_area2_tol : float, optional + Tolerance for warning degeneracy (area squared proxy). + + Returns + ------- + list[dict] + One entry per degenerate face: + - "fid": int + - "verts": list[int] + - "status": str ("fatal" or "warning") + - "area2": float (area squared proxy) + - "fatal_tol": float + - "warn_tol": float + + Notes + ----- + - Uses classify_face_degeneracy for consistency with repair functions. + """ + degenerate_faces: List[Dict[str, Any]] = [] + + for f in faces: + status, area2 = classify_face_degeneracy( + f.verts, + unique_vertices, + fatal_area_tol=fatal_area2_tol, + ) + + if status == "fatal": + degenerate_faces.append({ + "fid": f.fid, + "verts": f.verts[:], + "status": status, + "area2": area2, + "fatal_tol": fatal_area2_tol, + }) + + return degenerate_faces + +def polygon_area_3d(coords: List[Tuple[float, float, float]]) -> float: + """ + Compute the area of a 3D polygon assuming it is planar. + + Uses the shoelace formula after projecting to the best-fit plane. + """ + if len(coords) < 3: + return 0.0 + + # Find the normal to determine projection plane + # Use first three points + a, b, c = coords[0], coords[1], coords[2] + normal = cross(sub(b, a), sub(c, a)) + normal_len = math.sqrt(dot(normal, normal)) + if normal_len < 1e-12: + # Degenerate triangle + return 0.0 + normal = (normal[0] / normal_len, normal[1] / normal_len, normal[2] / normal_len) + + # Project to plane: choose the axis with largest normal component + abs_normal = (abs(normal[0]), abs(normal[1]), abs(normal[2])) + if abs_normal[0] >= abs_normal[1] and abs_normal[0] >= abs_normal[2]: + # Project to YZ plane + proj = lambda p: (p[1], p[2]) + elif abs_normal[1] >= abs_normal[2]: + # Project to XZ plane + proj = lambda p: (p[0], p[2]) + else: + # Project to XY plane + proj = lambda p: (p[0], p[1]) + + # Shoelace + proj_coords = [proj(p) for p in coords] + n = len(proj_coords) + area = 0.0 + for i in range(n): + x1, y1 = proj_coords[i] + x2, y2 = proj_coords[(i + 1) % n] + area += x1 * y2 - x2 * y1 + return abs(area) / 2.0 + +def detect_possible_holes_from_faces( + faces: List["FaceRecord"], + unique_vertices: List[Tuple[float, float, float]], +) -> List[Dict[str, Any]]: + """ + Detect possible holes in a model using only face topology. + + A "possible hole" here means a closed loop of boundary edges that is + supported by more than one adjacent face overall. + + Boundary edges are edges that belong to exactly one face. Loops that are + contributed by only a single face are usually just open face perimeters, + not real hole candidates, so they are filtered out. + + This implementation finds all connected components of boundary edges + and identifies those that form simple cycles (each vertex has degree 2). + + Parameters + ---------- + faces : list[FaceRecord] + Face list. Each face must provide an ordered vertex loop in ``face.verts``. + unique_vertices : list[tuple[float, float, float]] + List of unique vertex coordinates in meters, 0-based indexed. + + Returns + ------- + list[dict] + One entry per detected boundary loop. Each entry contains: + - "loop_index": int + - "vertex_loop": list[tuple[float, float, float]] + - "edge_loop": list[tuple[tuple[float, float, float], tuple[float, float, float]]] + - "num_edges": int + - "adjacent_face_fids": list[int] + + Notes + ----- + - This is a topology-based detector only. + - It guarantees finding all simple closed loops in isolated boundary components. + - It does not guarantee that every loop is a real geometric hole. + - It is still very useful for identifying open boundaries in the mesh. + """ + edge_to_faces: Dict[Tuple[int, int], List[int]] = defaultdict(list) + + # Build edge -> adjacent face ids + for f in faces: + n = len(f.verts) + if n < 2: + continue + for i in range(n): + a = f.verts[i] + b = f.verts[(i + 1) % n] + edge_to_faces[_uedge(a, b)].append(f.fid) + + # Boundary edges = edges used by exactly one face + boundary_edges = {e for e, adj in edge_to_faces.items() if len(adj) == 1} + if not boundary_edges: + return [] + + # Build adjacency graph of boundary edges at vertices + adj: Dict[int, set[int]] = defaultdict(set) + for a, b in boundary_edges: + adj[a].add(b) + adj[b].add(a) + + # Find connected components + visited = set() + components = [] + for v in adj: + if v not in visited: + component = set() + stack = [v] + while stack: + curr = stack.pop() + if curr not in visited: + visited.add(curr) + component.add(curr) + stack.extend(adj[curr] - visited) + components.append(component) + + loops: List[Dict[str, Any]] = [] + for comp in components: + # Check if component is a cycle: all vertices have degree 2 + if all(len(adj[v]) == 2 for v in comp): + comp_boundary_edges = [e for e in boundary_edges if e[0] in comp and e[1] in comp] + adjacent_face_fids = sorted({fid for e in comp_boundary_edges for fid in edge_to_faces[e]}) + + # Real hole candidates should be bounded by more than one face. + # A loop formed entirely from one face is typically just an open + # perimeter of that face, not a hole in the surface. + if len(adjacent_face_fids) < 2: + continue + + # Traverse the cycle + start_v = min(comp) + vertex_loop = [unique_vertices[start_v - 1]] + edge_loop = [] + prev_v = start_v + cur_v = next(iter(adj[start_v])) + while cur_v != start_v: + vertex_loop.append(unique_vertices[cur_v - 1]) + edge_loop.append((unique_vertices[prev_v - 1], unique_vertices[cur_v - 1])) + neighbors = adj[cur_v] + next_v = next(n for n in neighbors if n != prev_v) + prev_v = cur_v + cur_v = next_v + # Close the loop + edge_loop.append((unique_vertices[prev_v - 1], unique_vertices[cur_v - 1])) + loops.append({ + "loop_index": len(loops), + "vertex_loop": vertex_loop, + "edge_loop": edge_loop, + "num_edges": len(edge_loop), + "adjacent_face_fids": adjacent_face_fids, + }) + + return loops + +def detect_faces_with_area_below_threshold( + faces: List["FaceRecord"], + vertices: List[Tuple[float, float, float]], + *, + area_threshold_m2: float = 0.001, +) -> List[Dict[str, Any]]: + """ + Detect faces whose area is below a given threshold. + + Parameters + ---------- + faces : list[FaceRecord] + Face list. + vertices : list[(x, y, z)] + Global vertex list in meters. + area_threshold_m2 : float, optional + Area threshold in square meters. + + Default = 0.001 m² = 10 cm². + + Returns + ------- + list[dict] + One entry per small face: + - "fid": int + - "verts": list[int] + - "area_m2": float + - "threshold_m2": float + + Notes + ----- + I assumed "10 cm" means "10 cm²" for face area. + If you want another interpretation, just change ``area_threshold_m2``. + """ + small_faces: List[Dict[str, Any]] = [] + + for f in faces: + area = polygon_area_3d(f.verts, vertices) + if area < area_threshold_m2: + small_faces.append({ + "fid": f.fid, + "verts": f.verts[:], + "area_m2": area, + "threshold_m2": area_threshold_m2, + }) + + return small_faces + +def detect_t_junctions_from_facerecords_global_plc( + faces: "List[FaceRecord]", + points: List[Tuple[float, float, float]], + *, + tol: float = 1e-8, + max_reports: int = 2000, +) -> List[Dict[str, Any]]: + """ + PLC-grade, global T-junction detection (FaceRecord-only). + A "T-junction" here means: + - A FACE uses an edge (u,v) on its boundary + - There exists some vertex w that lies on segment u-v (within tol, interior) + - AND w is NOT a vertex of that face + This catches the exact condition that leads to Gmsh PLC errors + like "segment and facet intersect at point". + Returns dicts like: + { + "edge": (u, v), + "split_vertex": w, + "t_param": t, + "edge_face_fids": [ ...faces that use (u,v)... ], + "culprit_face_fid": , + "v_face_fids": [ ...faces that contain w... ], + } + """ + + def uedge(i, j): + return (i, j) if i < j else (j, i) + + def point_on_segment_scale_correct(P, A, B, tol_): + """ + Returns (on_segment, t) where t is param along A->B. + Uses scale-correct distance-to-line check: + |AB x AP|^2 <= tol^2 * |AB|^2 + """ + AB = sub(B, A) + AP = sub(P, A) + ab2 = dot(AB, AB) + if ab2 <= 0.0: + return (False, 0.0) + + cr = cross(AB, AP) + if dot(cr, cr) > (tol_ * tol_) * ab2: + return (False, 0.0) + + t = dot(AP, AB) / ab2 + if not (-tol_ <= t <= 1.0 + tol_): + return (False, t) + + return (True, t) + + # Build: edge -> face indices that use the edge on their boundary + edge_to_face_idxs = defaultdict(list) + vert_to_face_idxs = defaultdict(list) + edge_set = set() + + for fi, face in enumerate(faces): + poly = face.verts + n = len(poly) + for v in poly: + vert_to_face_idxs[v].append(fi) + for i in range(n): + a = poly[i] + b = poly[(i + 1) % n] + e = uedge(a, b) + edge_set.add(e) + edge_to_face_idxs[e].append(fi) + + all_verts = list(range(1, len(points) + 1)) + + reports = [] + for (u, v) in edge_set: + A = points[u - 1] + B = points[v - 1] + + # faces that *actually* use this long edge + face_idxs_using_edge = edge_to_face_idxs[uedge(u, v)] + if not face_idxs_using_edge: + continue + + for w in all_verts: + if w == u or w == v: + continue + + P = points[w - 1] + ok, t = point_on_segment_scale_correct(P, A, B, tol) + if not ok: + continue + + # interior only + if not (tol < t < 1.0 - tol): + continue + + # If ANY face uses (u,v) but doesn't include w, that's a PLC T-junction + culprit_fid = None + for fi in face_idxs_using_edge: + if w not in faces[fi].verts: + culprit_fid = faces[fi].fid + break + + if culprit_fid is None: + # all faces that use (u,v) already include w (rare; usually (u,v) would disappear) + continue + + edge_face_fids = [faces[fi].fid for fi in face_idxs_using_edge] + v_face_fids = [faces[fi].fid for fi in vert_to_face_idxs.get(w, [])] + + if len(v_face_fids) > 0 : + reports.append({ + "edge": (u, v), + "split_vertex": w, + "t_param": t, + "edge_face_fids": edge_face_fids, + "culprit_face_fid": culprit_fid, + "v_face_fids": v_face_fids, + }) + + if len(reports) >= max_reports: + return reports + + return reports + +# ----------------------------- +# PLC check: segment-facet intersections using CDT triangulation +# ----------------------------- +def detect_segment_facet_intersections_cdt( + faces, # List[FaceRecord] + points: List[Tuple[float,float,float]], # unique_vertices + *, + warn_planar_tol_m=1e-4, + fatal_planar_tol_m=1e-3, + eps=1e-10, + bbox_pad=1e-9, + max_reports=2000, + skip_warped_faces=True, + logger=None, +) -> List[Dict[str,Any]]: + """ + Reports intersections where a boundary segment (edge) intersects a triangle + from a non-incident face. + + Parameters + ---------- + faces : list[FaceRecord] + List of FaceRecord objects (each must provide `.fid` and `.verts`, + with `.verts` being an ordered 1-based vertex id loop). + points : list[tuple[float, float, float]] + Global unique vertex coordinates (0-based list; vertex id i -> points[i-1]). + warn_planar_tol_m : float, optional + Warning tolerance for planarity checks in meters (default 1e-4). + fatal_planar_tol_m : float, optional + Fatal tolerance for planarity checks in meters (default 1e-3). + eps : float, optional + Numerical epsilon used by the segment-triangle intersection test. + bbox_pad : float, optional + Padding applied to AABB overlap checks to avoid missing near-boundary hits. + max_reports : int, optional + Maximum number of intersection reports to collect before returning. + skip_warped_faces : bool, optional + If True, faces flagged as non-planar ("fatal") are skipped from + triangulation and intersection tests. + logger : logging.Logger or None, optional + Logger used for informational messages. + + Returns + ------- + list[dict] + A list of intersection report dictionaries. Each report contains keys + such as: + - "edge": (u, v) -- vertex ids defining the tested segment + - "edge_fids": list[int] -- face ids adjacent to that edge + - "facet_fid": int -- the face id of the triangle that was hit + - "facet_tri": (a, b, c) -- triangle vertex ids + - "point": (x, y, z) -- intersection point coordinates + - "t_param", "bary_u", "bary_v", "bary_w" -- intersection params + - "hit_type" -- classification string (vertex/edge/interior touch) + - "facet_planarity_flag" -- planarity status of the triangle's originating face + + Notes: + - Uses CDT triangulation internally for faces with n>3 (good for concave). + - If skip_warped_faces=True and planarity_fn provided: + faces with pstat=="fatal" are not triangulated (skipped). + - Does NOT modify your exported topology. + """ + + # 1) Triangle soup + tri_list = [] # each: {fid, tri=(a,b,c), aabb, planar_flag} + skipped_nonplanar = 0 + tri_fail = 0 + + for f in faces: + poly = f.verts + if len(poly) < 3: + continue + + planar_flag = None + if len(poly) > 3: + pstat, pmax_m, prms_m = classify_face_planarity_m( + poly, points, + warn_planar_tol_m=warn_planar_tol_m, + fatal_planar_tol_m=fatal_planar_tol_m, + ) + planar_flag = pstat + if skip_warped_faces and pstat == "fatal": + skipped_nonplanar += 1 + continue + + # triangulate + if len(poly) == 3: + tris = [poly[:]] + else: + tris = triangulate_face_cdt_shapely(poly, points) + + if not tris: + tri_fail += 1 + continue + + for tri in tris: + if len(tri) != 3: + continue + a,b,c = tri + A, B, C = points[a-1], points[b-1], points[c-1] + tri_list.append({ + "fid": f.fid, + "tri": (a,b,c), + "aabb": aabb_of_tri(A,B,C), + "planar_flag": planar_flag, + }) + + if logger is not None: + logger.info("[PLC] tri_soup=%d skipped_nonplanar_faces=%d tri_fail_faces=%d", + len(tri_list), skipped_nonplanar, tri_fail) + + # 2) Unique edges + incident face ids + edge_to_faces = defaultdict(set) + edge_set = set() + for f in faces: + poly = f.verts + n = len(poly) + if n < 2: + continue + for i in range(n): + u = poly[i] + v = poly[(i+1) % n] + e = (u,v) if u < v else (v,u) + edge_set.add(e) + edge_to_faces[e].add(f.fid) + + # 3) Precompute triangle vertex sets for incident skipping + tri_vset = [set(t["tri"]) for t in tri_list] + + # 4) Intersections + reports = [] + for (u,v) in edge_set: + P0 = points[u-1] + P1 = points[v-1] + seg_bb = aabb_of_seg(P0,P1) + + for ti, tinfo in enumerate(tri_list): + if not aabb_overlap(seg_bb, tinfo["aabb"], pad=bbox_pad): + continue + + a,b,c = tinfo["tri"] + + # Skip triangles incident to this segment (share any vertex) + if u in tri_vset[ti] or v in tri_vset[ti]: + continue + + A, B, C = points[a-1], points[b-1], points[c-1] + hit, t, uu, vv = segment_intersects_triangle(P0, P1, A, B, C, eps=eps) + if not hit: + continue + + hit_type = classify_segment_triangle_hit( + t, uu, vv, + t_eps=1e-9, + bary_eps=1e-9, + ) + + I = vadd(P0, vmul(sub(P1, P0), t)) + + reports.append({ + "edge": (u, v), + "edge_fids": sorted(edge_to_faces[(u, v) if u < v else (v, u)]), + "facet_fid": tinfo["fid"], + "facet_tri": (a, b, c), + "point": I, + "t_param": float(t), + "bary_u": float(uu), + "bary_v": float(vv), + "bary_w": float(1.0 - uu - vv), + "hit_type": hit_type, + "facet_planarity_flag": tinfo["planar_flag"], + }) + if len(reports) >= max_reports: + return reports + + return reports + +def classify_segment_triangle_hit(t, u, v, *, t_eps=1e-9, bary_eps=1e-9): + """ + Classify a non-coplanar segment-triangle hit using segment parameter t + and barycentric coordinates u,v,w. + + Returns one of: + - endpoint_vertex_touch + - endpoint_edge_touch + - endpoint_face_interior_touch + - segment_vertex_touch + - segment_edge_intersection + - segment_face_interior_intersection + - unknown + """ + w = 1.0 - u - v + + # ---- segment side + at_start = abs(t) <= t_eps + at_end = abs(t - 1.0) <= t_eps + at_endpoint = at_start or at_end + + vals = [u, v, w] + + near_zero = [abs(x) <= bary_eps for x in vals] + near_one = [abs(x - 1.0) <= bary_eps for x in vals] + + n_zero = sum(near_zero) + n_one = sum(near_one) + + # triangle location + if n_one == 1 and n_zero >= 2: + tri_loc = "vertex" + elif n_zero == 1 and n_one == 0: + tri_loc = "edge" + elif all((bary_eps < x < 1.0 - bary_eps) for x in vals): + tri_loc = "interior" + else: + tri_loc = "unknown" + + # combine + if at_endpoint: + if tri_loc == "vertex": + return "endpoint_vertex_touch" + elif tri_loc == "edge": + return "endpoint_edge_touch" + elif tri_loc == "interior": + return "endpoint_face_interior_touch" + else: + return "unknown" + else: + if tri_loc == "vertex": + return "segment_vertex_touch" + elif tri_loc == "edge": + return "segment_edge_intersection" + elif tri_loc == "interior": + return "segment_face_interior_intersection" + else: + return "unknown" + +# Segment facet intersection detection +# ----------------------------- +# Basic vector ops +# ----------------------------- +def vadd(a,b): return (a[0]+b[0], a[1]+b[1], a[2]+b[2]) +def vmul(a,s): return (a[0]*s, a[1]*s, a[2]*s) + +def aabb_of_tri(A,B,C): + return (min(A[0],B[0],C[0]), min(A[1],B[1],C[1]), min(A[2],B[2],C[2]), + max(A[0],B[0],C[0]), max(A[1],B[1],C[1]), max(A[2],B[2],C[2])) + +def aabb_of_seg(A,B): + return (min(A[0],B[0]), min(A[1],B[1]), min(A[2],B[2]), + max(A[0],B[0]), max(A[1],B[1]), max(A[2],B[2])) + +def aabb_overlap(bb1, bb2, pad=0.0): + ax0,ay0,az0,ax1,ay1,az1 = bb1 + bx0,by0,bz0,bx1,by1,bz1 = bb2 + return not (ax1+pad < bx0 or bx1+pad < ax0 or + ay1+pad < by0 or by1+pad < ay0 or + az1+pad < bz0 or bz1+pad < az0) + +def polygon_area_3d( + loop_vids: List[int], + vertices: List[Tuple[float, float, float]], +) -> float: + """ + Compute polygon area in 3D using Newell's method. + + Parameters + ---------- + loop_vids : list[int] + Ordered 1-based vertex ids of one face. + vertices : list[(x, y, z)] + Global vertex list. + + Returns + ------- + float + Polygon area in square meters if input coordinates are in meters. + """ + if len(loop_vids) < 3: + return 0.0 + + nx = ny = nz = 0.0 + n = len(loop_vids) + + for i in range(n): + p = vertices[loop_vids[i] - 1] + q = vertices[loop_vids[(i + 1) % n] - 1] + + nx += (p[1] - q[1]) * (p[2] + q[2]) + ny += (p[2] - q[2]) * (p[0] + q[0]) + nz += (p[0] - q[0]) * (p[1] + q[1]) + + return 0.5 * math.sqrt(nx * nx + ny * ny + nz * nz) + +# ----------------------------- +# Segment-triangle intersection (Möller–Trumbore variant) +# Returns (hit, t, u, v) with t in [0,1] along segment +# ----------------------------- +def segment_intersects_triangle(P0, P1, A, B, C, eps=1e-12): + D = sub(P1, P0) + E1 = sub(B, A) + E2 = sub(C, A) + + H = cross(D, E2) + det = dot(E1, H) + + # parallel / coplanar-ish (we treat as "no hit" here) + if abs(det) < eps: + return (False, None, None, None) + + inv_det = 1.0 / det + S = sub(P0, A) + u = inv_det * dot(S, H) + if u < -eps or u > 1.0 + eps: + return (False, None, None, None) + + Q = cross(S, E1) + v = inv_det * dot(D, Q) + if v < -eps or (u + v) > 1.0 + eps: + return (False, None, None, None) + + t = inv_det * dot(E2, Q) + if t < -eps or t > 1.0 + eps: + return (False, None, None, None) + + return (True, t, u, v) \ No newline at end of file diff --git a/app/services/geometry_parsing_service.py b/app/services/geometry_parsing_service.py new file mode 100644 index 0000000..8583c24 --- /dev/null +++ b/app/services/geometry_parsing_service.py @@ -0,0 +1,210 @@ +import rhino3dm +import logging +from typing import List, Tuple, Dict, Optional +from app.utils.geometry_utils import FaceRecord +import logging +logger = logging.getLogger(__name__) + +def extract_rhino_materials(rhino3dm_path: str) -> List[str]: + """ + Extract material IDs from a Rhino .3dm file. + # ----------------------------- + # 1) Rhino materials + # ----------------------------- + # Read the Rhino .3dm file and extract material names assigned to each mesh object. + # These material names are stored as user strings on the geometry and are mapped + # by object ID so they can later be associated with the corresponding OBJ faces. + + Parameters + ---------- + rhino3dm_path : str + Path to the Rhino .3dm file. + + Returns + ------- + list[str] + List of material IDs found in the file. + """ + model = rhino3dm.File3dm.Read(rhino3dm_path) + material_to_id = {} + for obj in model.Objects: + if isinstance(obj.Geometry, rhino3dm.Mesh): + material_name = obj.Geometry.GetUserString("material_name") + if material_name: + material_to_id[f"{obj.Attributes.Id}"] = material_name + + material_id_array = list(material_to_id.keys()) + return material_id_array + +def parse_obj_file(obj_file: str) -> Tuple[List[Tuple[float, float, float]], List[List[int]], List[str], List[str]]: + """ + Parse an OBJ file to extract vertices, faces, groups, and materials. + + Parameters + ---------- + obj_file : str + Path to the OBJ file. + + Returns + ------- + tuple + (vertices, raw_faces, face_groups, face_group_materials) + - vertices: list of (x, y, z) tuples + - raw_faces: list of face vertex indices + - face_groups: list of group names for each face + - face_group_materials: list of material names for each face + """ + vertices = [] + raw_faces = [] + face_groups = [] + face_group_materials = [] + current_group = "default" + current_group_material = "default" + + with open(obj_file, "r") as f: + created_by_sketchup = False + for raw in f: + line = raw.strip() + if not line: + continue + if (line.startswith("#") and "SketchUp" in line): + created_by_sketchup = True + if line.startswith("v "): + parts = line.split() + x, y, z = map(float, parts[1:4]) + # SketchUp (Y-up, left-handed) -> Gmsh (right-handed), flip Z + vertices.append((x, -z, y)) + elif line.startswith("g "): + parts = line.split()[1:] + parts = [p for p in parts if not p.startswith("Mesh") and not p.startswith("Model")] + current_group = parts[0] if parts else "default" + elif line.startswith("usemtl "): + parts = line.split()[1:] + logger.info(f"Material directive: {parts[0]}") + current_group_material = parts[0] if parts else "default" + elif line.startswith("f "): + parts = line.split()[1:] + idxs = [int(p.split("/")[0]) for p in parts] + raw_faces.append(idxs) + face_groups.append(current_group) + face_group_materials.append(current_group_material) + + return vertices, raw_faces, face_groups, face_group_materials + +def deduplicate_vertices(vertices: List[Tuple[float, float, float]], tol: float = 1e-2) -> Tuple[List[Tuple[float, float, float]], Dict[int, int]]: + """ + Deduplicate vertices within a tolerance and create a mapping from original indices to unique indices. + + Parameters + ---------- + vertices : list[tuple[float, float, float]] + List of vertex coordinates. + tol : float + Tolerance for considering vertices as duplicates. + + Returns + ------- + tuple + (unique_vertices, orig_to_unique) + - unique_vertices: list of unique vertex coordinates + - orig_to_unique: dict mapping original 1-based indices to unique 1-based indices + """ + unique_vertices = [] + orig_to_unique = {} + + for i, v in enumerate(vertices, start=1): + found = None + + for j, uv in enumerate(unique_vertices, start=1): + if (abs(uv[0] - v[0]) < tol and + abs(uv[1] - v[1]) < tol and + abs(uv[2] - v[2]) < tol): + found = j + break + if found is None: + unique_vertices.append(v) + orig_to_unique[i] = len(unique_vertices) + else: + orig_to_unique[i] = found + + logger.info("03. Deduplicated vertices:") + logger.info(f" unique_vertices: {len(unique_vertices)}") + + return unique_vertices, orig_to_unique + +def clean_face_loop(verts: List[int]) -> List[int]: + """ + Remove: + • consecutive duplicates (..., a, a, ...) + • closing duplicate (first == last) + Keeps polygon order intact. + """ + + if not verts: + return verts + + cleaned = [verts[0]] + + # Remove consecutive duplicates + for v in verts[1:]: + if v != cleaned[-1]: + cleaned.append(v) + + # Remove closing duplicate + if len(cleaned) >= 2 and cleaned[0] == cleaned[-1]: + cleaned.pop() + + return cleaned + +def process_and_instantiate_faces( + raw_faces: List[List[int]], + face_groups: List[str], + face_group_materials: List[str], + material_id_array: List[str], + orig_to_unique: Optional[Dict[int, int]] = None, +) -> List[FaceRecord]: + """ + Process raw faces by mapping vertex indices to unique indices, cleaning face loops, + and creating FaceRecord objects for each face. + + Parameters + ---------- + raw_faces : List[List[int]] + List of raw face vertex indices. + face_groups : List[str] + List of group names for each face. + face_group_materials : List[str] + List of material names for each face. + material_id_array : List[str] + List of material IDs corresponding to each face. + orig_to_unique : Dict[int, int], optional + Mapping from original vertex indices to unique vertex indices. + If omitted, original vertex indices are assumed to already match + the unique vertex indices. + + Returns + ------- + List[FaceRecord] + List of FaceRecord objects representing the processed faces. + """ + + + faces = [] + face_id = 0 + + if orig_to_unique is None: + max_vertex_id = max((vid for face in raw_faces for vid in face), default=0) + orig_to_unique = {i: i for i in range(1, max_vertex_id + 1)} + + for raw_face, grp, grp_mat, mat in zip(raw_faces, face_groups, face_group_materials, material_id_array): + mapped = [orig_to_unique[i] for i in raw_face] + mapped = clean_face_loop(mapped) + + sub_faces = [mapped] + + if not sub_faces: + logger.error("[FACE] face_id=%d produced 0 sub_faces (mapped=%s)", face_id, mapped) + for verts in sub_faces: + faces.append(FaceRecord(fid=face_id, verts=verts, group=grp, group_material=grp_mat, material=mat)) + face_id += 1 + return faces diff --git a/app/services/geometry_repair_service.py b/app/services/geometry_repair_service.py new file mode 100644 index 0000000..bcabed9 --- /dev/null +++ b/app/services/geometry_repair_service.py @@ -0,0 +1,3633 @@ +from collections import defaultdict, deque +import logging +import math +from typing import List, Tuple, Dict, Any, Optional + +import numpy as np +from app.utils.geometry_utils import FaceRecord, _uedge, area2, cross, dot, newell_normal_from_points, orient, sub, triangulate_face_cdt_shapely +from app.utils.geometry_validation_utils import classify_face_degeneracy, classify_face_planarity_m, planarity_deviation_m +from app.services.geometry_inspection_service import detect_segment_facet_intersections_cdt, detect_t_junctions_from_facerecords_global_plc +from app.services.geometry_parsing_service import clean_face_loop + +logger = logging.getLogger(__name__) + +def remove_degenerate_faces( + faces: List[FaceRecord], + unique_vertices: List[Tuple[float, float, float]], + *, + fatal_area_tol: float = 1e-12, + logger: logging.Logger = None, +) -> Tuple[List[FaceRecord], int, int]: + """ + Remove degenerate faces from a list of FaceRecord objects. + + Parameters + ---------- + faces : List[FaceRecord] + List of FaceRecord objects to process. + unique_vertices : List[Tuple[float, float, float]] + List of unique vertex coordinates. + fatal_area_tol : float + Tolerance for fatal degeneracy (area squared). + warn_area_tol : float + Tolerance for warning degeneracy (area squared). + logger : logging.Logger, optional + Logger instance for logging messages. + + Returns + ------- + Tuple[List[FaceRecord], int, int] + (clean_faces, fatal_removed, warn_removed) + - clean_faces: List of non-degenerate FaceRecord objects + - fatal_removed: Number of fatally degenerate faces removed + - warn_removed: Number of warning degenerate faces kept (but counted) + """ + if logger is None: + logger = logging.getLogger(__name__) + + faces_clean = [] + fatal_removed = 0 + + for f in faces: + status = classify_face_degeneracy( + f.verts, + unique_vertices, + fatal_area_tol=fatal_area_tol, + ) + + if status == "fatal": + fatal_removed += 1 + continue + + faces_clean.append(f) + + return faces_clean, fatal_removed + +def sort_vertices_deterministically( + unique_vertices: List[Tuple[float, float, float]], + faces: List[FaceRecord] +) -> Tuple[List[Tuple[float, float, float]], List[FaceRecord]]: + """ + Sort vertices deterministically for reproducibility and remap face vertex indices. + + Parameters + ---------- + unique_vertices : List[Tuple[float, float, float]] + List of unique vertex coordinates. + faces : List[FaceRecord] + List of FaceRecord objects with vertex indices. + + Returns + ------- + Tuple[List[Tuple[float, float, float]], List[FaceRecord]] + (sorted_vertices, remapped_faces) + - sorted_vertices: Vertices sorted deterministically by coordinates + - remapped_faces: Faces with vertex indices updated to match sorted vertices + """ + unique_vertices_sorted = sorted( + enumerate(unique_vertices, start=1), + key=lambda kv: ( + round(kv[1][0], 8), + round(kv[1][1], 8), + round(kv[1][2], 8), + ), + ) + index_map = {old: new for new, (old, _) in enumerate(unique_vertices_sorted, start=1)} + unique_vertices = [v for _, v in unique_vertices_sorted] + # Remap all face vertex indices + for face in faces: + face.verts = [index_map[i] for i in face.verts] + + return unique_vertices, faces + +def fix_t_junctions_iterative( + faces: List[FaceRecord], + points: List[Tuple[float, float, float]], + *, + tol: float = 1e-8, + max_iters: int = 100, + max_reports: int = 5000, + logger: Optional[logging.Logger] = None, +) -> Tuple[List[FaceRecord], bool]: + changed_any = False + + for it in range(max_iters): + tjs = detect_t_junctions_from_facerecords_global_plc( + faces, points, tol=tol, max_reports=max_reports + ) + if not tjs: + if logger: + logger.info("[TJUNC FIX] stable after %d iterations", it) + return faces, changed_any + + if logger: + logger.warning("[TJUNC FIX] iter=%d found=%d (showing up to 5)", it, len(tjs)) + for r in tjs[:5]: + logger.warning("[TJUNC] edge=%s split_v=%d t=%.6f culprit=%s", + r["edge"], r["split_vertex"], r["t_param"], + r.get("culprit_face_fid")) + + faces, changed = fix_t_junctions_by_edge_splitting_facerecords( + faces, tjs, points, tol=tol, max_passes=1, logger=logger + ) + if not changed: + # nothing changed but still detecting => tolerance mismatch or a harder intersection + if logger: + logger.warning("[TJUNC FIX] no changes applied but TJUNC still detected; stopping") + return faces, changed_any + + changed_any = True + + if logger: + logger.warning("[TJUNC FIX] reached max_iters=%d; may still have TJUNCs", max_iters) + return faces, changed_any + +def fix_t_junctions_by_edge_splitting_facerecords( + faces: List[FaceRecord], + tjunc_reports: List[Dict[str, Any]], + points: List[Tuple[float, float, float]], + *, + tol: float = 1e-8, + max_passes: int = 10, + logger: Optional[logging.Logger] = None, +) -> Tuple[List[FaceRecord], bool]: + """ + Fix PLC T-junctions WITHOUT triangulation by splitting polygon edges. + faces: List[FaceRecord] (modified in place; returned too) + tjunc_reports: output of detect_t_junctions_from_facerecords_local_plc or global_plc + each report has: + - edge=(u,v) + - split_vertex=w + - t_param=t + - edge_face_fids=[...] + - culprit_face_fid= (if you use plc detector) + If your report doesn't include culprit_face_fid, we'll apply to all edge_face_fids. + points: unique_vertices list (1-based id -> points[id-1]) + tol: used to reject near-endpoint inserts + max_passes: repeat because inserting vertices may create new detectable cases + """ + + # Build fid -> face object map + fid_to_face = {f.fid: f for f in faces} + + def uedge(a, b): + return (a, b) if a < b else (b, a) + + # Param along directed edge (u->v) + def edge_param_t(u, v, w): + Ax, Ay, Az = points[u - 1] + Bx, By, Bz = points[v - 1] + Px, Py, Pz = points[w - 1] + ABx, ABy, ABz = (Bx - Ax, By - Ay, Bz - Az) + APx, APy, APz = (Px - Ax, Py - Ay, Pz - Az) + ab2 = ABx*ABx + ABy*ABy + ABz*ABz + if ab2 <= 0.0: + return 0.0 + return (APx*ABx + APy*ABy + APz*ABz) / ab2 + + changed_any = False + + for pass_i in range(max_passes): + # Collect insert operations: + # (fid, undirected_edge) -> list of (u,v,w,t) + ops = defaultdict(list) + + for r in tjunc_reports: + u, v = r["edge"] + w = r["split_vertex"] + + # choose target faces: + if "culprit_face_fid" in r and r["culprit_face_fid"] is not None: + target_fids = [r["culprit_face_fid"]] + else: + target_fids = r.get("edge_face_fids") or r.get("edge_fids") or [] + + for fid in target_fids: + if fid not in fid_to_face: + continue + # store (directed u,v) as in report; we'll handle reverse in insertion + t = edge_param_t(u, v, w) + if not (tol < t < 1.0 - tol): + continue + ops[(fid, uedge(u, v))].append((u, v, w, t)) + + if not ops: + if logger: + logger.info("[TJUNC FIX] pass=%d: no ops; done", pass_i) + break + + pass_changed = 0 + + for (fid, _e), items in ops.items(): + face = fid_to_face[fid] + poly = face.verts + + # If multiple w on the same edge, insert in correct order + # Need consistent direction along the edge as it appears in the polygon + # We'll just sort by t from the report direction; insertion function handles either direction. + items_sorted = sorted(items, key=lambda x: x[3]) + + for (u, v, w, _t) in items_sorted: + new_poly, did = _insert_vertex_on_edge_in_poly(poly, u, v, w) + if did: + poly = new_poly + pass_changed += 1 + + face.verts = poly + + if logger: + logger.info("[TJUNC FIX] pass=%d: faces_changed=%d", pass_i, pass_changed) + + if pass_changed == 0: + break + + changed_any = True + + # Re-run detector each pass so we only apply what still exists + # (You provide your detector; call it outside if you prefer) + # Here we stop early and let caller re-run if desired. + # If you want automatic multi-pass with detection, see the wrapper below. + break + + return faces, changed_any + +def _insert_vertex_on_edge_in_poly(poly, u, v, w): + """ + poly: list of vertex ids (cycle, no repeated start/end) + Insert w between u and v if edge (u->v) or (v->u) appears. + Returns (new_poly, changed_bool). + Does NOT insert if w is already in poly. + """ + if w in poly: + return poly, False + + n = len(poly) + if n < 2: + return poly, False + + # Look for directed edge u->v in the cyclic list + for i in range(n): + a = poly[i] + b = poly[(i + 1) % n] + if a == u and b == v: + # Insert w after a + return poly[: i + 1] + [w] + poly[i + 1 :], True + + # Look for directed edge v->u (reverse direction) + for i in range(n): + a = poly[i] + b = poly[(i + 1) % n] + if a == v and b == u: + return poly[: i + 1] + [w] + poly[i + 1 :], True + + return poly, False + +def flip_all_faces_if_majority_inward( + faces: "List[FaceRecord]", + unique_vertices: List[Tuple[float,float,float]], + room_center: Tuple[float,float,float], + logger=None, +) -> bool: + """ + Uses Newell normal + centroid->room_center dot test. + If majority of faces look inward, flip all faces. + Returns True if flipped all, else False. + """ + + inward = 0 + outward = 0 + + for f in faces: + pts = [unique_vertices[i - 1] for i in f.verts] + cx = sum(p[0] for p in pts) / len(pts) + cy = sum(p[1] for p in pts) / len(pts) + cz = sum(p[2] for p in pts) / len(pts) + + nx, ny, nz = newell_normal_from_points(f.verts, unique_vertices) + to_center = (room_center[0] - cx, room_center[1] - cy, room_center[2] - cz) + dotp = nx*to_center[0] + ny*to_center[1] + nz*to_center[2] + + # Your rule: outward if dotp < 0 + if dotp < 0: + outward += 1 + else: + inward += 1 + + if inward > outward: + for f in faces: + f.verts.reverse() + if logger: + logger.info("[ORIENT] flipped ALL faces (majority inward: inward=%d outward=%d)", inward, outward) + return True + else: + if logger: + logger.info("[ORIENT] kept orientation (majority outward: inward=%d outward=%d)", inward, outward) + return False + +def trim_component_against_facet_plane( + faces: List["FaceRecord"], + points: List[Tuple[float, float, float]], + *, + clipping_facet_fid: int, + seed_face_fids: List[int], + room_center: Tuple[float, float, float], + tol: float = 1e-9, + logger=None, +) -> Tuple[List["FaceRecord"], List[Tuple[float, float, float]], bool, Dict[str, Any]]: + """ + Trim a connected face component against the plane of a clipping facet. + + Parameters + ---------- + faces : list[FaceRecord] + Current face list. + points : list[(x, y, z)] + Global mutable point list. New intersection vertices may be appended. + clipping_facet_fid : int + Face id whose plane defines the clipping boundary. + seed_face_fids : list[int] + One or more faces belonging to the protruding component that should be clipped. + For PLC reports, ``edge_fids`` is usually a good seed. + room_center : tuple[float, float, float] + A point known to lie on the side of the clipping plane that should be kept. + In CHORAS this is usually the room center. + tol : float + Numerical tolerance for clipping and vertex reuse. + logger : logging.Logger | None + Optional logger. + + Returns + ------- + tuple + (updated_faces, updated_points, changed, diagnostics) + + diagnostics contains: + - status + - clipping_facet_fid + - seed_face_fids + - component_face_fids + - keep_sign + - faces_removed + - faces_clipped + - new_vertices_added + + Purpose + ------- + This helper performs a best-effort half-space clip of a selected connected + component. It is useful when a protruding object crosses a room boundary face and + the desired behavior is to keep only the part on the room side of that face plane. + + Limitation + ---------- + The clipping facet itself is not split or reconstructed. Therefore, this helper is + best used as a controlled trimming utility for clearly unwanted outside geometry. + """ + diag: Dict[str, Any] = { + "status": "noop", + "clipping_facet_fid": clipping_facet_fid, + "seed_face_fids": list(seed_face_fids), + "component_face_fids": [], + "keep_sign": None, + "faces_removed": 0, + "faces_clipped": 0, + "new_vertices_added": 0, + } + + clipping_face = _find_face_by_fid(faces, clipping_facet_fid) + if clipping_face is None: + diag["status"] = "clipping_face_not_found" + return faces, points, False, diag + + plane = _plane_from_face(clipping_face, points) + if plane is None: + diag["status"] = "invalid_clipping_plane" + return faces, points, False, diag + + plane_point, plane_normal = plane + room_sd = _signed_distance_to_plane(room_center, plane_point, plane_normal) + keep_sign = 1.0 if room_sd >= 0.0 else -1.0 + diag["keep_sign"] = keep_sign + + component_face_fids = collect_face_component_from_seed_faces( + faces, + seed_face_fids, + excluded_face_fids=[clipping_facet_fid], + ) + diag["component_face_fids"] = component_face_fids + + if not component_face_fids: + diag["status"] = "empty_component" + return faces, points, False, diag + + start_n_points = len(points) + changed = False + updated_faces: List[FaceRecord] = [] + + for face in faces: + if face.fid not in component_face_fids: + updated_faces.append(face) + continue + + clipped_loop = _clip_face_loop_against_plane( + face.verts, + points, + plane_point, + plane_normal, + keep_sign, + tol=tol, + ) + + if len(clipped_loop) < 3: + diag["faces_removed"] += 1 + changed = True + continue + + if clipped_loop != clean_face_loop(face.verts): + diag["faces_clipped"] += 1 + changed = True + + status, _area2 = classify_face_degeneracy( + clipped_loop, + points, + fatal_area_tol=1e-18, + ) + if status == "fatal": + diag["faces_removed"] += 1 + changed = True + continue + + updated_faces.append( + FaceRecord( + fid=face.fid, + verts=clipped_loop, + group=getattr(face, "group", "default"), + group_material=getattr(face, "group_material", "default"), + material=getattr(face, "material", "unknown"), + ) + ) + + diag["new_vertices_added"] = len(points) - start_n_points + diag["status"] = "ok" if changed else "no_effect" + + if logger is not None: + logger.info( + "[TRIM] facet_fid=%d component_faces=%d clipped=%d removed=%d new_vertices=%d status=%s", + clipping_facet_fid, + len(component_face_fids), + diag["faces_clipped"], + diag["faces_removed"], + diag["new_vertices_added"], + diag["status"], + ) + + return updated_faces, points, changed, diag + +def trim_component_from_segment_face_intersection_report( + faces: List["FaceRecord"], + points: List[Tuple[float, float, float]], + plc_report: Dict[str, Any], + room_center: Tuple[float, float, float], + *, + tol: float = 1e-9, + logger=None, +) -> Tuple[List["FaceRecord"], List[Tuple[float, float, float]], bool, Dict[str, Any]]: + """ + Convenience wrapper that trims a protruding component using one PLC report. + + Parameters + ---------- + faces : list[FaceRecord] + Current face list. + points : list[(x, y, z)] + Global mutable point list. + plc_report : dict + One PLC report generated by ``detect_segment_facet_intersections_cdt``. + The report must contain ``facet_fid`` and ``edge_fids``. + room_center : tuple[float, float, float] + Reference point used to choose the kept half-space. + tol : float + Numerical tolerance for clipping. + logger : logging.Logger | None + Optional logger. + + Returns + ------- + tuple + (updated_faces, updated_points, changed, diagnostics) + + Purpose + ------- + This wrapper is especially useful for PLC reports of type + ``segment_face_interior_intersection`` where the edge's incident faces belong to a + protruding component that should be clipped back to the clipping facet plane. + """ + clipping_facet_fid = plc_report.get("facet_fid") + seed_face_fids = list(plc_report.get("edge_fids") or []) + + if clipping_facet_fid is None or not seed_face_fids: + diag = { + "status": "invalid_plc_report", + "clipping_facet_fid": clipping_facet_fid, + "seed_face_fids": seed_face_fids, + } + return faces, points, False, diag + + return trim_component_against_facet_plane( + faces, + points, + clipping_facet_fid=clipping_facet_fid, + seed_face_fids=seed_face_fids, + room_center=room_center, + tol=tol, + logger=logger, + ) + +def compact_vertices_and_remove_unused( + faces: List["FaceRecord"], + points: List[Tuple[float, float, float]], +) -> Tuple[List["FaceRecord"], List[Tuple[float, float, float]], bool, Dict[str, Any]]: + """ + Remove unused vertices after clipping/repair and remap face indices. + + Parameters + ---------- + faces : list[FaceRecord] + Current face list. + points : list[(x, y, z)] + Global vertex list. + + Returns + ------- + tuple + (updated_faces, updated_points, changed, diagnostics) + + Purpose + ------- + Local clipping repairs may leave vertices that are no longer referenced by any face. + This helper removes such dangling vertices and remaps all face vertex indices so the + geometry remains compact and consistent. + """ + used = sorted({vid for f in faces for vid in f.verts}) + mapping = {old_vid: new_vid for new_vid, old_vid in enumerate(used, start=1)} + new_points = [points[old_vid - 1] for old_vid in used] + changed = len(used) != len(points) or any(old_vid != mapping[old_vid] for old_vid in used) + + new_faces: List[FaceRecord] = [] + for f in faces: + new_faces.append( + FaceRecord( + fid=f.fid, + verts=[mapping[vid] for vid in f.verts], + group=getattr(f, "group", "default"), + group_material=getattr(f, "group_material", "default"), + material=getattr(f, "material", "unknown"), + ) + ) + + diag = { + "status": "ok", + "vertices_before": len(points), + "vertices_after": len(new_points), + "removed_unused_vertices": len(points) - len(new_points), + } + return new_faces, new_points, changed, diag + +def trim_segment_face_intersections_iterative( + faces: List["FaceRecord"], + points: List[Tuple[float, float, float]], + room_center: Tuple[float, float, float], + *, + max_iters: int = 20, + tol: float = 1e-9, + logger=None, +) -> Tuple[List["FaceRecord"], List[Tuple[float, float, float]], bool, Dict[str, Any]]: + """ + Iteratively trim components for remaining segment-face interior intersections. + + Parameters + ---------- + faces : list[FaceRecord] + Current face list. + points : list[(x, y, z)] + Global mutable vertex list. + room_center : tuple[float, float, float] + Reference point used to choose the kept side of the clipping plane. + max_iters : int + Maximum number of trim attempts. + tol : float + Numerical tolerance for clipping. + logger : logging.Logger | None + Optional logger. + + Returns + ------- + tuple + (updated_faces, updated_points, changed_any, diagnostics) + + Purpose + ------- + A single trim usually resolves only one offending connected component. This helper + repeatedly detects remaining `segment_face_interior_intersection` hits, trims one + component at a time, compacts the vertex list, and revalidates until no further + supported hits remain or no progress can be made. + """ + changed_any = False + actions = [] + + for it in range(1, max_iters + 1): + plc_hits = detect_segment_facet_intersections_cdt( + faces, + points, + warn_planar_tol_m=1e-4, + fatal_planar_tol_m=1e-3, + eps=1e-10, + bbox_pad=1e-9, + max_reports=2000, + skip_warped_faces=True, + logger=logger, + ) + + seg_face_hits = [r for r in plc_hits if r.get("hit_type") == "segment_face_interior_intersection"] + if not seg_face_hits: + diag = { + "status": "ok" if changed_any else "no_supported_hits", + "iterations": it - 1, + "applied_repairs": len(actions), + "actions": actions, + } + return faces, points, changed_any, diag + + target = seg_face_hits[0] + faces2, points2, changed, trim_diag = trim_component_from_segment_face_intersection_report( + faces, + points, + target, + room_center, + tol=tol, + logger=logger, + ) + if not changed: + diag = { + "status": "stalled", + "iterations": it, + "applied_repairs": len(actions), + "last_trim": trim_diag, + "actions": actions, + } + return faces, points, changed_any, diag + + faces2, points2, compact_changed, compact_diag = compact_vertices_and_remove_unused(faces2, points2) + action = { + "iteration": it, + "target_hit": { + "edge": target.get("edge"), + "edge_fids": target.get("edge_fids"), + "facet_fid": target.get("facet_fid"), + "point": target.get("point"), + }, + "trim_diag": trim_diag, + "compact_diag": compact_diag, + } + actions.append(action) + faces, points = faces2, points2 + changed_any = True + + if logger is not None: + logger.info( + "[PLC TRIM LOOP] iter=%d trim_status=%s removed_unused_vertices=%d", + it, + trim_diag.get("status"), + compact_diag.get("removed_unused_vertices", 0), + ) + + diag = { + "status": "max_iters_reached", + "iterations": max_iters, + "applied_repairs": len(actions), + "actions": actions, + } + return faces, points, changed_any, diag + +# ------ Trimming Helper + +def _clip_face_loop_against_plane( + face_loop: List[int], + points: List[Tuple[float, float, float]], + plane_point: Tuple[float, float, float], + plane_normal: Tuple[float, float, float], + keep_sign: float, + *, + tol: float = 1e-9, +) -> List[int]: + """ + Clip one polygon against a plane and keep the selected half-space. + + Parameters + ---------- + face_loop : list[int] + Ordered 1-based vertex ids of one polygon face. + points : list[(x, y, z)] + Global mutable point list. New intersection vertices may be appended. + plane_point : tuple[float, float, float] + A point on the clipping plane. + plane_normal : tuple[float, float, float] + Unit plane normal. + keep_sign : float + The kept side of the plane. Distances with the same sign as ``keep_sign`` + are kept. + tol : float + Numerical tolerance used for clipping and vertex reuse. + + Returns + ------- + list[int] + The clipped polygon loop. Returns an empty list when the face is completely + removed by the clip. + + Notes + ----- + This is a Sutherland-Hodgman style half-space clip performed directly in 3D. + """ + if len(face_loop) < 3: + return [] + + def is_inside(sd: float) -> bool: + return sd * keep_sign >= -tol + + out: List[int] = [] + n = len(face_loop) + + for i in range(n): + a_vid = face_loop[i] + b_vid = face_loop[(i + 1) % n] + A = points[a_vid - 1] + B = points[b_vid - 1] + + da = _signed_distance_to_plane(A, plane_point, plane_normal) + db = _signed_distance_to_plane(B, plane_point, plane_normal) + a_inside = is_inside(da) + b_inside = is_inside(db) + + if a_inside and b_inside: + if not out or out[-1] != b_vid: + out.append(b_vid) + elif a_inside and not b_inside: + denom = da - db + if abs(denom) > tol: + t = da / denom + X = ( + A[0] + t * (B[0] - A[0]), + A[1] + t * (B[1] - A[1]), + A[2] + t * (B[2] - A[2]), + ) + x_vid = _get_or_create_vertex_id(points, X, tol=tol) + if not out or out[-1] != x_vid: + out.append(x_vid) + elif (not a_inside) and b_inside: + denom = da - db + if abs(denom) > tol: + t = da / denom + X = ( + A[0] + t * (B[0] - A[0]), + A[1] + t * (B[1] - A[1]), + A[2] + t * (B[2] - A[2]), + ) + x_vid = _get_or_create_vertex_id(points, X, tol=tol) + if not out or out[-1] != x_vid: + out.append(x_vid) + if not out or out[-1] != b_vid: + out.append(b_vid) + + out = clean_face_loop(out) + if len(out) >= 2 and out[0] == out[-1]: + out = out[:-1] + return out + +def _find_face_by_fid(faces: List["FaceRecord"], fid: int) -> "FaceRecord | None": + """Return the face with the requested face id, or ``None`` if it does not exist.""" + for face in faces: + if face.fid == fid: + return face + return None + +def _plane_from_face(face: "FaceRecord", points: List[Tuple[float, float, float]]): + """ + Build a clipping plane from a face using its Newell normal. + + Parameters + ---------- + face : FaceRecord + Face that defines the plane. + points : list[(x, y, z)] + Global point list. + + Returns + ------- + tuple[(x, y, z), (nx, ny, nz)] | None + A point on the plane and a unit normal, or ``None`` if the face is degenerate. + """ + if len(face.verts) < 3: + return None + + nrm = newell_normal_from_points(face.verts, points) + nn = math.sqrt(nrm[0] * nrm[0] + nrm[1] * nrm[1] + nrm[2] * nrm[2]) + if nn <= 1e-18: + return None + + plane_point = points[face.verts[0] - 1] + plane_normal = (nrm[0] / nn, nrm[1] / nn, nrm[2] / nn) + return plane_point, plane_normal + + +def _signed_distance_to_plane( + p: Tuple[float, float, float], + plane_point: Tuple[float, float, float], + plane_normal: Tuple[float, float, float], +) -> float: + """Return the signed distance from ``p`` to a plane.""" + return ( + (p[0] - plane_point[0]) * plane_normal[0] + + (p[1] - plane_point[1]) * plane_normal[1] + + (p[2] - plane_point[2]) * plane_normal[2] + ) + + +def collect_face_component_from_seed_faces( + faces: List["FaceRecord"], + seed_face_fids: List[int], + *, + excluded_face_fids: List[int] | None = None, +) -> List[int]: + """ + Collect one connected face component starting from seed faces. + + Parameters + ---------- + faces : list[FaceRecord] + Current face list. + seed_face_fids : list[int] + Starting faces belonging to the component that should be trimmed. + excluded_face_fids : list[int] | None + Faces that must not be entered during traversal, e.g. the clipping facet. + + Returns + ------- + list[int] + Sorted face ids belonging to the connected component. + + Notes + ----- + Connectivity is defined through shared undirected edges. + """ + excluded = set(excluded_face_fids or []) + valid_fids = {f.fid for f in faces} + seeds = [fid for fid in seed_face_fids if fid in valid_fids and fid not in excluded] + if not seeds: + return [] + + edge_to_fids = _build_edge_face_adjacency(faces) + face_adj: Dict[int, set] = defaultdict(set) + for fids in edge_to_fids.values(): + for a in fids: + for b in fids: + if a != b: + face_adj[a].add(b) + + component = set() + q = deque(seeds) + while q: + fid = q.popleft() + if fid in component or fid in excluded: + continue + component.add(fid) + for nbr in face_adj.get(fid, []): + if nbr not in component and nbr not in excluded: + q.append(nbr) + + return sorted(component) +def _build_edge_face_adjacency(faces: List["FaceRecord"]) -> Dict[Tuple[int, int], List[int]]: + """Build a map from undirected edge to incident face ids.""" + edge_to_fids: Dict[Tuple[int, int], List[int]] = defaultdict(list) + for face in faces: + for u, v in face.undirected_edges(): + edge_to_fids[_uedge(u, v)].append(face.fid) + return edge_to_fids +def _get_or_create_vertex_id( + points: List[Tuple[float, float, float]], + xyz: Tuple[float, float, float], + *, + tol: float = 1e-9, +) -> int: + """ + Reuse an existing vertex if the coordinates already exist within tolerance; + otherwise append a new vertex and return its 1-based id. + """ + for i, p in enumerate(points, start=1): + if ( + abs(p[0] - xyz[0]) <= tol + and abs(p[1] - xyz[1]) <= tol + and abs(p[2] - xyz[2]) <= tol + ): + return i + + points.append(xyz) + return len(points) + +# ------ End of Trimming Helper + + +# ------ Repair Segment-Face Intersection + +def repair_plc_single_splits_iterative( + faces: List[FaceRecord], + points: List[Tuple[float, float, float]], + room_center: Tuple[float, float, float], + *, + logger=None, + max_iters: int = 20, + planarity_tol_m: float = 1e-6, +): + """ + Iteratively repair PLC endpoint_face_interior_touch cases using: + 1) multi-point same-face triangulation repair + 2) single-point triangulation repair + + Strategy + -------- + Per iteration: + - detect PLC hits + - group endpoint_face_interior_touch by touched face + - if any face has >1 hit, repair that face first with multi-point repair + - else if any face has exactly 1 hit, repair one single-hit face + - re-orient and repeat + + Returns + ------- + Tuple[List[FaceRecord], bool, dict] + (updated_faces, changed_any, summary) + """ + summary = { + "iterations": 0, + "applied_repairs": 0, + "stopped_reason": "unknown", + "remaining_plc_hits": 0, + "remaining_endpoint_face_hits": 0, + "remaining_single_hit_candidates": 0, + "remaining_multi_hit_faces": 0, + } + + changed_any = False + + for it in range(1, max_iters + 1): + summary["iterations"] = it + + plc_hits = detect_segment_facet_intersections_cdt( + faces, + points, + warn_planar_tol_m=1e-4, + fatal_planar_tol_m=1e-3, + eps=1e-10, + bbox_pad=1e-9, + max_reports=2000, + skip_warped_faces=True, + logger=logger, + ) + + summary["remaining_plc_hits"] = len(plc_hits) + + if not plc_hits: + summary["remaining_plc_hits"] = 0 + summary["remaining_endpoint_face_hits"] = 0 + summary["remaining_single_hit_candidates"] = 0 + summary["remaining_multi_hit_faces"] = 0 + summary["stopped_reason"] = "no_plc_hits" + if logger: + logger.info("[PLC REPAIR] stable after %d iterations: no PLC hits", it - 1) + return faces, points, changed_any, summary + + endpoint_face_hits = [ + r for r in plc_hits + if r.get("hit_type") == "endpoint_face_interior_touch" + ] + summary["remaining_endpoint_face_hits"] = len(endpoint_face_hits) + + if not endpoint_face_hits: + summary["remaining_endpoint_face_hits"] = 0 + summary["remaining_single_hit_candidates"] = 0 + summary["remaining_multi_hit_faces"] = 0 + summary["stopped_reason"] = "no_endpoint_face_hits" + if logger: + logger.info("[PLC REPAIR] stop: PLC hits remain, but none are endpoint_face_interior_touch") + return faces, points, changed_any, summary + + hits_by_face = defaultdict(list) + for r in endpoint_face_hits: + hits_by_face[r["facet_fid"]].append(r) + + multi_hit_faces = [fid for fid, rs in hits_by_face.items() if len(rs) > 1] + single_hit_candidates = [rs[0] for fid, rs in hits_by_face.items() if len(rs) == 1] + + summary["remaining_multi_hit_faces"] = len(multi_hit_faces) + summary["remaining_single_hit_candidates"] = len(single_hit_candidates) + + if logger: + logger.info( + "[PLC REPAIR] iter=%d plc_hits=%d endpoint_face_hits=%d multi_hit_faces=%d single_hit_candidates=%d", + it, + len(plc_hits), + len(endpoint_face_hits), + len(multi_hit_faces), + len(single_hit_candidates), + ) + + changed = False + diag = None + + # --------------------------------------------------------- + # Priority 1: multi-hit same-face repair + # --------------------------------------------------------- + if multi_hit_faces: + + chosen_fid = max(multi_hit_faces, key=lambda fid: len(hits_by_face[fid])) + + chosen_reports = hits_by_face[chosen_fid] + + chosen_face = _find_face_by_fid(faces, chosen_fid) + + cls = classify_multi_hit_face_collinear( + + chosen_face, + + chosen_reports, + + points, + + tol_m=0.01, + + ) + + if cls["is_collinear"]: + + if logger: + + logger.info( + + "[PLC REPAIR] multi-hit face=%d classified as COLLINEAR (max_dev=%.6g)", + + chosen_fid, + + cls["max_dev"], + + ) + + faces, points, changed, diag = repair_multi_hit_face_collinear_chain( + + faces, + + chosen_reports, + + points, + + logger=logger, + + ) + + else: + + if logger: + + logger.info( + + "CURRENTLY ONLY COLLINEAR multi-hit repair is implemented; face=%d classified as NONCOLLINEAR (max_dev=%.6g); skipping for now", + chosen_fid, + cls["max_dev"], + ) + # --------------------------------------------------------- + # Priority 2: single-hit repair + # --------------------------------------------------------- + elif single_hit_candidates: + chosen_report = single_hit_candidates[0] + + if logger: + logger.info( + "[PLC REPAIR] chosen single-hit iter=%d facet_fid=%d edge=%s point=(%.6f,%.6f,%.6f)", + it, + chosen_report["facet_fid"], + chosen_report["edge"], + chosen_report["point"][0], + chosen_report["point"][1], + chosen_report["point"][2], + ) + + faces, changed, diag = repair_single_endpoint_face_interior_touch_by_triangulation( + faces, + chosen_report, + points, + logger=logger, + planarity_tol_m=planarity_tol_m, + ) + + else: + summary["stopped_reason"] = "no_candidates" + if logger: + logger.info("[PLC REPAIR] stop: no repair candidates") + return faces, points, changed_any, summary + + if not changed: + summary["stopped_reason"] = "selected_candidate_not_changed" + if logger: + logger.info("[PLC REPAIR] stop: selected candidate produced no topology change; diag=%s", diag) + return faces, points, changed_any, summary + + changed_any = True + summary["applied_repairs"] += 1 + + if logger: + logger.info("[PLC REPAIR] applied iter=%d diag=%s", it, diag) + + # re-orient after topology change + diag_orient = orient_faces_consistently_by_adjacency(faces, logger=logger) + if logger: + logger.info("[ORIENT AFTER PLC REPAIR] iter=%d diag=%s", it, diag_orient) + + flip_all_faces_if_majority_inward( + faces, + points, + room_center, + logger=logger, + ) + + summary["stopped_reason"] = "max_iters_reached" + if logger: + logger.warning("[PLC REPAIR] reached max_iters=%d", max_iters) + + return faces, points, changed_any, summary + +def repair_multi_endpoint_face_touch_same_face_by_triangulation( + faces: List[FaceRecord], + plc_reports: List[Dict[str, Any]], + points: List[Tuple[float, float, float]], + *, + logger=None, + planarity_tol_m: float = 1e-6, +): + """ + Repair one face hit by multiple endpoint_face_interior_touch intersections + by building one split chain across the face and triangulating both sides. + + Intention + --------- + This targets the "small room" pattern: + - many endpoint_face_interior_touch hits + - same facet_fid + - points roughly form one ordered chain across the face + + Strategy + -------- + 1. Collect all unique inserted endpoints on the touched face. + 2. Project face and points to 2D. + 3. Fit one dominant chain direction through the inserted points. + 4. Extend that line to the boundary in both directions. + 5. Insert/reuse two boundary points. + 6. Split the face into 2 polygons using: + boundary_point_A -> inserted_points_sorted -> boundary_point_B + 7. Triangulate both polygons with triangulate_face_cdt_shapely(...). + + Parameters + ---------- + faces : List[FaceRecord] + Current face list. + plc_reports : list[dict] + PLC reports that must all belong to the same facet_fid. + points : list[(x,y,z)] + Global point list. + logger : logging.Logger | None + Optional logger. + planarity_tol_m : float + Maximum allowed planarity deviation of the touched face. + + Returns + ------- + tuple[list[FaceRecord], list[(x,y,z)], bool, dict] + (updated_faces, updated_points, changed, diagnostics) + """ + diag = { + "status": "noop", + "facet_fid": None, + "n_hits": 0, + "n_inserted_points": 0, + "created_boundary_points": 0, + "n_output_tris": 0, + } + + if not plc_reports: + diag["status"] = "no_reports" + return faces, points, False, diag + + facet_ids = {r["facet_fid"] for r in plc_reports} + if len(facet_ids) != 1: + diag["status"] = "reports_not_same_face" + return faces, points, False, diag + + facet_fid = next(iter(facet_ids)) + diag["facet_fid"] = facet_fid + diag["n_hits"] = len(plc_reports) + + fid_to_face = {f.fid: f for f in faces} + touched_face = fid_to_face.get(facet_fid) + if touched_face is None: + diag["status"] = "missing_face" + return faces, points, False, diag + + pstat, pmax_m, prms_m = classify_face_planarity_m(touched_face.verts, points) + if pstat == "fatal": + diag["status"] = "face_nonplanar" + return faces, points, False, diag + + # -------------------------------------------------- + # 1) Extract unique inserted endpoint vids + # -------------------------------------------------- + inserted_vids = [] + seen = set() + + for r in plc_reports: + if r.get("hit_type") != "endpoint_face_interior_touch": + continue + + edge = r["edge"] + t = r["t_param"] + + if abs(t) <= 1e-9: + vid = edge[0] + elif abs(t - 1.0) <= 1e-9: + vid = edge[1] + else: + continue + + if vid not in seen: + seen.add(vid) + inserted_vids.append(vid) + + if len(inserted_vids) < 2: + diag["status"] = "need_at_least_two_points" + return faces, points, False, diag + + diag["n_inserted_points"] = len(inserted_vids) + + # -------------------------------------------------- + # 2) Project touched face + inserted points to 2D + # -------------------------------------------------- + poly_ids = clean_face_loop(touched_face.verts) + poly2d, dropped_axis = project_face_to_2d(poly_ids, points) + + if area2(poly2d) < 0: + poly_ids.reverse() + poly2d.reverse() + + chain_pts_2d = [project_vid_to_2d(vid, points, dropped_axis) for vid in inserted_vids] + + # -------------------------------------------------- + # 3) Fit one dominant chain direction and sort points + # -------------------------------------------------- + c2, d2 = fit_chain_direction_2d(chain_pts_2d) + + proj_vals = [] + for vid in inserted_vids: + p2 = project_vid_to_2d(vid, points, dropped_axis) + s = (p2[0] - c2[0]) * d2[0] + (p2[1] - c2[1]) * d2[1] + proj_vals.append((s, vid)) + + proj_vals.sort(key=lambda x: x[0]) + inserted_vids_sorted = [vid for _, vid in proj_vals] + + # -------------------------------------------------- + # 4) Extend fitted line to both boundary sides + # -------------------------------------------------- + best_neg = None + best_pos = None + edge_neg = None + edge_pos = None + + n = len(poly2d) + for i in range(n): + a2 = poly2d[i] + b2 = poly2d[(i + 1) % n] + + hit = line_segment_intersection_signed(c2, d2, a2, b2, tol=1e-12) + if hit is None: + continue + + s, tseg = hit + if s < 0.0: + if best_neg is None or s > best_neg[0]: + best_neg = (s, tseg) + edge_neg = i + elif s > 0.0: + if best_pos is None or s < best_pos[0]: + best_pos = (s, tseg) + edge_pos = i + + if best_neg is None or best_pos is None: + diag["status"] = "failed_boundary_extension" + return faces, points, False, diag + + # -------------------------------------------------- + # 5) Create/reuse two boundary points + # -------------------------------------------------- + bneg_vid, points, used_neg_existing = create_or_reuse_boundary_point_on_edge( + poly_ids, poly2d, edge_neg, best_neg[1], points + ) + bpos_vid, points, used_pos_existing = create_or_reuse_boundary_point_on_edge( + poly_ids, poly2d, edge_pos, best_pos[1], points + ) + + diag["created_boundary_points"] = int(not used_neg_existing) + int(not used_pos_existing) + + # refresh polygon references after possible point insertion + poly_ids = clean_face_loop(touched_face.verts) + poly2d, dropped_axis = project_face_to_2d(poly_ids, points) + if area2(poly2d) < 0: + poly_ids.reverse() + poly2d.reverse() + + # insert boundary point A if new + if bneg_vid not in poly_ids: + poly_ids = insert_vertex_on_polygon_edge(poly_ids, edge_neg, bneg_vid) + + # insert boundary point B if new + if bpos_vid not in poly_ids: + poly2d, dropped_axis = project_face_to_2d(poly_ids, points) + if area2(poly2d) < 0: + poly_ids.reverse() + poly2d.reverse() + + p_bpos_2d = project_vid_to_2d(bpos_vid, points, dropped_axis) + found_edge = None + for i in range(len(poly_ids)): + a2 = poly2d[i] + b2 = poly2d[(i + 1) % len(poly_ids)] + if point_on_segment_2d(p_bpos_2d, a2, b2, tol=1e-8): + found_edge = i + break + + if found_edge is None: + diag["status"] = "cannot_reinsert_second_boundary_point" + return faces, points, False, diag + + poly_ids = insert_vertex_on_polygon_edge(poly_ids, found_edge, bpos_vid) + + if bneg_vid not in poly_ids or bpos_vid not in poly_ids: + diag["status"] = "boundary_points_not_in_loop" + return faces, points, False, diag + + # -------------------------------------------------- + # 6) Build chain split and 2 polygons + # -------------------------------------------------- + chain_vids = [bneg_vid] + inserted_vids_sorted + [bpos_vid] + + idx_neg = poly_ids.index(bneg_vid) + idx_pos = poly_ids.index(bpos_vid) + + boundary_a = boundary_chain(poly_ids, idx_neg, idx_pos) + boundary_b = boundary_chain(poly_ids, idx_pos, idx_neg) + + # remove duplicated boundary endpoints before concatenation + poly_a = clean_face_loop(boundary_a + list(reversed(chain_vids[1:-1]))) + poly_b = clean_face_loop(boundary_b + chain_vids[1:-1]) + + split_polys = [] + for poly in (poly_a, poly_b): + if len(poly) < 3: + continue + if polygon_area2_newell(poly, points) <= 1e-22: + continue + split_polys.append(poly) + + if len(split_polys) != 2: + diag["status"] = "invalid_split_polys" + return faces, points, False, diag + + # -------------------------------------------------- + # 7) Triangulate both polygons + # -------------------------------------------------- + out_tris = [] + for poly in split_polys: + if len(poly) == 3: + tris = [poly] + else: + tris = triangulate_face_cdt_shapely(poly, points) + + if not tris: + diag["status"] = "triangulation_failed" + return faces, points, False, diag + + for tri in tris: + if len(set(tri)) < 3: + continue + if tri_area2(tri[0], tri[1], tri[2], points) <= 1e-22: + continue + out_tris.append(tri) + + if not out_tris: + diag["status"] = "no_output_tris" + return faces, points, False, diag + + diag["n_output_tris"] = len(out_tris) + + # -------------------------------------------------- + # 8) Replace touched face with new triangles + # -------------------------------------------------- + next_fid = max((f.fid for f in faces), default=-1) + 1 + new_faces = [] + + for f in faces: + if f.fid != facet_fid: + new_faces.append(f) + continue + + for tri in out_tris: + new_faces.append( + FaceRecord( + fid=next_fid, + verts=tri, + group=f.group, + group_material=f.group_material, + material=f.material, + ) + ) + next_fid += 1 + + diag["status"] = "ok" + + if logger: + logger.info( + "[PLC MULTI TRI REPAIR] facet_fid=%d hits=%d inserted=%d created_boundary_points=%d output_tris=%d", + facet_fid, + len(plc_reports), + len(inserted_vids_sorted), + diag["created_boundary_points"], + diag["n_output_tris"], + ) + + return new_faces, points, True, diag + +# Segment Facet Intersection helper math (2D projection, orientation, segment intersection) +# Note: we want to be robust to near-collinear cases, so we use a tolerance-based approach + +def point_on_segment_2d(p, a, b, tol=1e-12): + """ + Check whether a 2D point p lies on segment ab. + + Parameters + ---------- + p : tuple[float, float] + Query point. + a, b : tuple[float, float] + Segment endpoints. + tol : float + Numerical tolerance. + + Returns + ------- + bool + True if p lies on the segment within tolerance. + """ + if abs(orient(a, b, p)) > tol: + return False + + return ( + min(a[0], b[0]) - tol <= p[0] <= max(a[0], b[0]) + tol and + min(a[1], b[1]) - tol <= p[1] <= max(a[1], b[1]) + tol + ) + +def tri_area2(i, j, k, points): + a = points[i - 1] + b = points[j - 1] + c = points[k - 1] + ab = sub(b, a) + ac = sub(c, a) + cr = cross(ab, ac) + return dot(cr, cr) + + +def segments_intersect_2d(a, b, c, d, tol=1e-12): + """ + General 2D segment intersection test, including touching cases. + + Parameters + ---------- + a, b : tuple[float, float] + Endpoints of first segment. + c, d : tuple[float, float] + Endpoints of second segment. + tol : float + Numerical tolerance. + + Returns + ------- + bool + True if the two segments intersect. + """ + o1 = orient(a, b, c) + o2 = orient(a, b, d) + o3 = orient(c, d, a) + o4 = orient(c, d, b) + + # proper crossing + if ((o1 > tol and o2 < -tol) or (o1 < -tol and o2 > tol)) and \ + ((o3 > tol and o4 < -tol) or (o3 < -tol and o4 > tol)): + return True + + # touching / collinear cases + if abs(o1) <= tol and point_on_segment_2d(c, a, b, tol): + return True + if abs(o2) <= tol and point_on_segment_2d(d, a, b, tol): + return True + if abs(o3) <= tol and point_on_segment_2d(a, c, d, tol): + return True + if abs(o4) <= tol and point_on_segment_2d(b, c, d, tol): + return True + + return False + + +def point_in_polygon_2d(poly, p, tol=1e-12): + """ + Classify a 2D point relative to a simple polygon. + + Parameters + ---------- + poly : list[tuple[float, float]] + Polygon vertices in order. + p : tuple[float, float] + Query point. + tol : float + Numerical tolerance. + + Returns + ------- + str + One of: + - "inside" + - "boundary" + - "outside" + """ + n = len(poly) + + # boundary check first + for i in range(n): + a = poly[i] + b = poly[(i + 1) % n] + if point_on_segment_2d(p, a, b, tol): + return "boundary" + + x, y = p + inside = False + + for i in range(n): + x1, y1 = poly[i] + x2, y2 = poly[(i + 1) % n] + + crosses = ((y1 > y) != (y2 > y)) + if crosses: + xinters = (x2 - x1) * (y - y1) / (y2 - y1) + x1 + if x < xinters: + inside = not inside + + return "inside" if inside else "outside" + + +def project_point_by_dropped_axis(p3, dropped_axis): + """ + Project one 3D point to 2D using the same dropped axis as the face projection. + + Parameters + ---------- + p3 : tuple[float, float, float] + 3D point. + dropped_axis : str + One of "x", "y", "z". + + Returns + ------- + tuple[float, float] + Projected 2D point. + """ + if dropped_axis == "z": + return (p3[0], p3[1]) + elif dropped_axis == "y": + return (p3[0], p3[2]) + else: + return (p3[1], p3[2]) + + +def project_face_to_2d(face_ids, points): + """ + Project a planar polygon to 2D by dropping the dominant normal axis. + + Parameters + ---------- + face_ids : list[int] + Ordered 1-based vertex ids of the polygon. + points : list[tuple[float, float, float]] + Global point list. + + Returns + ------- + tuple[list[tuple[float, float]], str] + (projected_polygon, dropped_axis) + """ + nrm = newell_normal_from_points(face_ids, points) + ax, ay, az = abs(nrm[0]), abs(nrm[1]), abs(nrm[2]) + + if az >= ax and az >= ay: + return ([(points[pid - 1][0], points[pid - 1][1]) for pid in face_ids], "z") + elif ay >= ax and ay >= az: + return ([(points[pid - 1][0], points[pid - 1][2]) for pid in face_ids], "y") + else: + return ([(points[pid - 1][1], points[pid - 1][2]) for pid in face_ids], "x") + + +def boundary_chain(poly_ids, i, j): + """ + Return the ordered boundary chain from polygon index i to j inclusive. + + Parameters + ---------- + poly_ids : list[int] + Polygon vertex ids in order. + i, j : int + Indices into poly_ids. + + Returns + ------- + list[int] + Boundary chain from i to j inclusive. + """ + n = len(poly_ids) + out = [poly_ids[i]] + k = i + while k != j: + k = (k + 1) % n + out.append(poly_ids[k]) + return out + + +def visible_boundary_vertices_from_point(poly2d, p2, tol=1e-12): + """ + Compute polygon vertices visible from an interior point. + + Parameters + ---------- + poly2d : list[tuple[float, float]] + Simple polygon vertices in order. + p2 : tuple[float, float] + Interior point. + tol : float + Numerical tolerance. + + Returns + ------- + list[int] + Indices of visible boundary vertices. + """ + n = len(poly2d) + visible = [] + + for i in range(n): + vi = poly2d[i] + ok = True + + for k in range(n): + a = poly2d[k] + b = poly2d[(k + 1) % n] + + # skip edges incident to vertex i + if k == i or (k + 1) % n == i: + continue + + if segments_intersect_2d(p2, vi, a, b, tol): + ok = False + break + + if ok: + visible.append(i) + + return visible + + +def split_face_at_single_interior_vertex( + face: FaceRecord, + inserted_vid: int, + points: List[Tuple[float, float, float]], + *, + planarity_tol_m: float = 1e-6, + boundary_tol_2d: float = 1e-10, +): + """ + Split one planar polygon face using one inserted interior vertex. + + Intention + --------- + This is the preferred single-split repair for one + `endpoint_face_interior_touch` case. + + Strategy + -------- + 1. Check the touched face is planar enough. + 2. Project the face and inserted point to 2D. + 3. Require the inserted point to lie strictly inside the polygon. + 4. Find visible boundary vertices from the inserted point. + 5. Prefer a 2-polygon split using two non-adjacent visible vertices. + 6. If that fails, fall back to a triangle fan. + + Parameters + ---------- + face : FaceRecord + Touched face to split. + inserted_vid : int + 1-based vertex id of the touching endpoint. + points : list[tuple[float, float, float]] + Global point list. + planarity_tol_m : float + Maximum allowed planarity deviation for trying polygon split. + boundary_tol_2d : float + 2D tolerance for point-in-polygon and visibility checks. + + Returns + ------- + list[list[int]] | None + Replacement faces as ordered vertex-id loops, or None if split fails. + """ + if inserted_vid in face.verts: + return None + + # 1) planarity check + max_abs, rms, nu, c = planarity_deviation_m(face.verts, points) + if not math.isfinite(max_abs) or max_abs > planarity_tol_m: + return None + + # 2) project face to 2D + poly_ids = clean_face_loop(face.verts) + poly2d, dropped_axis = project_face_to_2d(poly_ids, points) + + # ensure CCW for consistency + if area2(poly2d) < 0: + poly_ids.reverse() + poly2d.reverse() + + p2 = project_point_by_dropped_axis(points[inserted_vid - 1], dropped_axis) + + # 3) inserted point must be strictly inside, not boundary + pos = point_in_polygon_2d(poly2d, p2, tol=boundary_tol_2d) + if pos != "inside": + return None + + # 4) visible boundary vertices + visible = visible_boundary_vertices_from_point(poly2d, p2, tol=boundary_tol_2d) + if len(visible) < 2: + return None + + # 5) preferred split: choose farthest non-adjacent visible pair + best_pair = None + best_score = -1.0 + n = len(poly_ids) + + for a in visible: + for b in visible: + if a >= b: + continue + if (a + 1) % n == b or (b + 1) % n == a: + continue # adjacent pair is weak; prefer real split + + pa = poly2d[a] + pb = poly2d[b] + score = (pa[0] - pb[0])**2 + (pa[1] - pb[1])**2 + if score > best_score: + best_score = score + best_pair = (a, b) + + if best_pair is not None: + i, j = best_pair + + chain_ij = boundary_chain(poly_ids, i, j) + chain_ji = boundary_chain(poly_ids, j, i) + + poly_a = clean_face_loop(chain_ij + [inserted_vid]) + poly_b = clean_face_loop(chain_ji + [inserted_vid]) + + out = [] + for poly in (poly_a, poly_b): + if len(poly) < 3: + continue + if polygon_area2_newell(poly, points) <= 1e-22: + continue + out.append(poly) + + if len(out) == 2: + return out + + # 6) fallback: triangle fan + out = [] + for i in range(len(poly_ids)): + a = poly_ids[i] + b = poly_ids[(i + 1) % len(poly_ids)] + tri = [a, b, inserted_vid] + + if len(set(tri)) < 3: + continue + if tri_area2(tri[0], tri[1], tri[2], points) <= 1e-22: + continue + + out.append(tri) + + return out if out else None + + +def polygon_area2_newell(face_ids, points): + """ + Squared area proxy of polygon using Newell normal. + + Parameters + ---------- + face_ids : list[int] + Ordered polygon vertex ids. + points : list[tuple[float, float, float]] + Global point list. + + Returns + ------- + float + Squared norm of Newell normal. + """ + nrm = newell_normal_from_points(face_ids, points) + return dot(nrm, nrm) + + +def repair_single_endpoint_face_interior_touch( + faces: List[FaceRecord], + plc_report: Dict[str, Any], + points: List[Tuple[float, float, float]], + *, + logger=None, + planarity_tol_m: float = 1e-6, +): + """ + Repair one single `endpoint_face_interior_touch` PLC report. + + Parameters + ---------- + faces : list[FaceRecord] + Current face list. + plc_report : dict + One classified PLC report containing at least: + - hit_type + - edge + - t_param + - facet_fid + points : list[tuple[float, float, float]] + Global point list. + logger : logging.Logger | None + Optional logger. + planarity_tol_m : float + Maximum allowed planarity deviation for polygon split. + + Returns + ------- + tuple[list[FaceRecord], bool, dict] + (updated_faces, changed, diagnostics) + """ + diag = { + "status": "noop", + "touched_fid": None, + "inserted_vid": None, + "n_new_faces": 0, + } + + if plc_report.get("hit_type") != "endpoint_face_interior_touch": + diag["status"] = "wrong_type" + return faces, False, diag + + facet_fid = plc_report["facet_fid"] + edge = plc_report["edge"] + t = plc_report["t_param"] + + if abs(t) <= 1e-9: + inserted_vid = edge[0] + elif abs(t - 1.0) <= 1e-9: + inserted_vid = edge[1] + else: + diag["status"] = "bad_t_param" + return faces, False, diag + + diag["touched_fid"] = facet_fid + diag["inserted_vid"] = inserted_vid + + fid_to_face = {f.fid: f for f in faces} + touched_face = fid_to_face.get(facet_fid) + if touched_face is None: + diag["status"] = "missing_face" + return faces, False, diag + + split_polys = split_face_at_single_interior_vertex( + touched_face, + inserted_vid, + points, + planarity_tol_m=planarity_tol_m, + ) + + if not split_polys: + diag["status"] = "split_failed" + if logger: + logger.warning( + "[SINGLE SPLIT] failed facet_fid=%d inserted_vid=%d verts=%s", + facet_fid, inserted_vid, touched_face.verts + ) + return faces, False, diag + + next_fid = max((f.fid for f in faces), default=-1) + 1 + new_faces = [] + + for f in faces: + if f.fid != facet_fid: + new_faces.append(f) + continue + + for poly in split_polys: + new_faces.append( + FaceRecord( + fid=next_fid, + verts=poly, + group=f.group, + group_material=f.group_material, + material=f.material, + ) + ) + next_fid += 1 + + diag["status"] = "ok" + diag["n_new_faces"] = len(split_polys) + + if logger: + logger.info( + "[SINGLE SPLIT] repaired facet_fid=%d inserted_vid=%d -> %d new faces", + facet_fid, inserted_vid, len(split_polys) + ) + + return new_faces, True, diag + +# ----------------------------- +# Repair Mode for single endpoint-face interior touch +# ----------------------------- + +def point_segment_distance_2d(p, a, b): + """ + Distance from 2D point p to segment ab. + + Parameters + ---------- + p, a, b : tuple[float, float] + 2D points. + + Returns + ------- + float + Euclidean distance from p to segment ab. + """ + abx = b[0] - a[0] + aby = b[1] - a[1] + apx = p[0] - a[0] + apy = p[1] - a[1] + + ab2 = abx * abx + aby * aby + if ab2 <= 0.0: + dx = p[0] - a[0] + dy = p[1] - a[1] + return math.sqrt(dx * dx + dy * dy) + + t = (apx * abx + apy * aby) / ab2 + t = max(0.0, min(1.0, t)) + + qx = a[0] + t * abx + qy = a[1] + t * aby + + dx = p[0] - qx + dy = p[1] - qy + return math.sqrt(dx * dx + dy * dy) + + +def project_face_and_point_to_2d(face_ids, point_vid, points): + """ + Project one face and one point to 2D using dominant-axis projection. + + Parameters + ---------- + face_ids : list[int] + Ordered 1-based vertex ids of the face. + point_vid : int + 1-based vertex id of query point. + points : list[(x,y,z)] + Global point list. + + Returns + ------- + tuple[list[(x,y)], (x,y), str] + (projected face polygon, projected point, dropped_axis) + """ + nrm = newell_normal_from_points(face_ids, points) + ax, ay, az = abs(nrm[0]), abs(nrm[1]), abs(nrm[2]) + + if az >= ax and az >= ay: + poly2d = [(points[pid - 1][0], points[pid - 1][1]) for pid in face_ids] + p2 = (points[point_vid - 1][0], points[point_vid - 1][1]) + return poly2d, p2, "z" + elif ay >= ax and ay >= az: + poly2d = [(points[pid - 1][0], points[pid - 1][2]) for pid in face_ids] + p2 = (points[point_vid - 1][0], points[point_vid - 1][2]) + return poly2d, p2, "y" + else: + poly2d = [(points[pid - 1][1], points[pid - 1][2]) for pid in face_ids] + p2 = (points[point_vid - 1][1], points[point_vid - 1][2]) + return poly2d, p2, "x" + + +def classify_endpoint_face_touch_repair_mode( + plc_report, + faces, + points, + *, + coplanar_tol_m=1e-6, + min_interior_clearance_m=1e-4, + min_face_area2=1e-20, + min_cross_fraction=0.15, +): + """ + Decide whether one endpoint_face_interior_touch should use + structural split or local split. + + Intention + --------- + This is the decision rule between: + - structural_split + - local_split + + It is designed for your OBJ->Gmsh preprocessing pipeline and focuses + on planar architectural faces. + + Parameters + ---------- + plc_report : dict + One PLC report from detect_segment_facet_intersections_cdt(...). + Must contain: + - hit_type + - edge + - t_param + - facet_fid + faces : list[FaceRecord] + Current face list. + points : list[(x,y,z)] + Global point list. + coplanar_tol_m : float + Max allowed distance of the opposite endpoint to the touched face plane + for the edge to be treated as coplanar with the face. + min_interior_clearance_m : float + Minimum 2D distance from inserted point to any boundary edge/vertex + to consider it a real interior point instead of near-boundary noise. + min_face_area2 : float + Minimum polygon area proxy from Newell normal squared. + min_cross_fraction : float + Minimum fraction of the face bbox span covered by the edge projection + to count as a meaningful cross-face structural cut. + + Returns + ------- + tuple[str, dict] + ("structural_split" | "local_split" | "reject", diagnostics) + + Diagnostics + ----------- + Returns a dict with reasons and measured quantities so you can log/debug. + """ + diag = { + "facet_fid": None, + "inserted_vid": None, + "other_vid": None, + "coplanar": False, + "face_ok": False, + "clear_of_boundary": False, + "cross_fraction": 0.0, + "decision": "reject", + "reasons": [], + } + + if plc_report.get("hit_type") != "endpoint_face_interior_touch": + diag["reasons"].append("wrong_hit_type") + return "reject", diag + + facet_fid = plc_report["facet_fid"] + edge = plc_report["edge"] + t = plc_report["t_param"] + diag["facet_fid"] = facet_fid + + fid_to_face = {f.fid: f for f in faces} + face = fid_to_face.get(facet_fid) + if face is None: + diag["reasons"].append("missing_face") + return "reject", diag + + if abs(t) <= 1e-9: + inserted_vid = edge[0] + other_vid = edge[1] + elif abs(t - 1.0) <= 1e-9: + inserted_vid = edge[1] + other_vid = edge[0] + else: + diag["reasons"].append("bad_t_param") + return "reject", diag + + diag["inserted_vid"] = inserted_vid + diag["other_vid"] = other_vid + + # -------------------------------------------------- + # 1) face must be valid and reasonably planar + # -------------------------------------------------- + area2_face = polygon_area2_newell(face.verts, points) + if area2_face <= min_face_area2: + diag["reasons"].append("face_too_small_or_degenerate") + return "reject", diag + + pstat, pmax_m, prms_m = classify_face_planarity_m(face.verts, points) + if pstat == "fatal": + diag["reasons"].append("face_nonplanar") + return "reject", diag + + diag["face_ok"] = True + + # -------------------------------------------------- + # 2) coplanarity test for the touching edge vs face plane + # -------------------------------------------------- + max_abs, rms, nu, c = planarity_deviation_m(face.verts, points) + qx, qy, qz = points[other_vid - 1] + dx = qx - c[0] + dy = qy - c[1] + dz = qz - c[2] + q_dist = abs(nu[0] * dx + nu[1] * dy + nu[2] * dz) + + if q_dist <= coplanar_tol_m: + diag["coplanar"] = True + else: + diag["reasons"].append("edge_not_coplanar_with_face") + + # -------------------------------------------------- + # 3) inserted point should be clearly interior, + # not just almost on an existing boundary + # -------------------------------------------------- + poly_ids = clean_face_loop(face.verts) + poly2d, p2, dropped_axis = project_face_and_point_to_2d(poly_ids, inserted_vid, points) + + if area2(poly2d) < 0: + poly_ids.reverse() + poly2d.reverse() + + min_edge_dist = float("inf") + min_vert_dist = float("inf") + n = len(poly2d) + + for i in range(n): + a = poly2d[i] + b = poly2d[(i + 1) % n] + de = point_segment_distance_2d(p2, a, b) + if de < min_edge_dist: + min_edge_dist = de + + dvx = p2[0] - a[0] + dvy = p2[1] - a[1] + dv = math.sqrt(dvx * dvx + dvy * dvy) + if dv < min_vert_dist: + min_vert_dist = dv + + if min(min_edge_dist, min_vert_dist) >= min_interior_clearance_m: + diag["clear_of_boundary"] = True + else: + diag["reasons"].append("point_near_existing_boundary") + + # -------------------------------------------------- + # 4) structural-crossing strength: + # does the edge projection span a meaningful fraction of face size? + # -------------------------------------------------- + p_inserted = p2 + p_other = project_face_and_point_to_2d(poly_ids, other_vid, points)[1] + + minx = min(x for x, y in poly2d) + maxx = max(x for x, y in poly2d) + miny = min(y for x, y in poly2d) + maxy = max(y for x, y in poly2d) + + face_span = max(maxx - minx, maxy - miny) + edge_span = math.sqrt( + (p_inserted[0] - p_other[0]) ** 2 + + (p_inserted[1] - p_other[1]) ** 2 + ) + + cross_fraction = (edge_span / face_span) if face_span > 0.0 else 0.0 + diag["cross_fraction"] = cross_fraction + + if cross_fraction < min_cross_fraction: + diag["reasons"].append("edge_does_not_meaningfully_cross_face") + + # -------------------------------------------------- + # Final decision + # -------------------------------------------------- + if diag["face_ok"] and diag["clear_of_boundary"]: + diag["decision"] = "structural_split" + return "structural_split", diag + + if diag["face_ok"]: + diag["decision"] = "local_split" + return "local_split", diag + + diag["decision"] = "reject" + return "reject", diag + + +def repair_single_endpoint_face_interior_touch_by_triangulation( + faces: List[FaceRecord], + plc_report: Dict[str, Any], + points: List[Tuple[float, float, float]], + *, + logger=None, + planarity_tol_m: float = 1e-6, +): + """ + Repair one endpoint_face_interior_touch by: + 1) inserting touching point P through polygon split logic + 2) triangulating the resulting polygon pieces with triangulate_face_cdt_shapely(...) + + Intention + --------- + This uses your existing CDT triangulation, but only AFTER the touching + point has been added to the touched face topology. + + Parameters + ---------- + faces : List[FaceRecord] + Current face list. + plc_report : Dict[str, Any] + One PLC report with: + - hit_type + - edge + - t_param + - facet_fid + points : List[(x,y,z)] + Global point list. + logger : logging.Logger | None + Optional logger. + planarity_tol_m : float + Maximum allowed face planarity deviation before split attempt. + + Returns + ------- + Tuple[List[FaceRecord], bool, Dict[str, Any]] + (updated_faces, changed, diagnostics) + """ + diag = { + "status": "noop", + "touched_fid": None, + "inserted_vid": None, + "n_split_polys": 0, + "n_output_tris": 0, + } + + if plc_report.get("hit_type") != "endpoint_face_interior_touch": + diag["status"] = "wrong_type" + return faces, False, diag + + facet_fid = plc_report["facet_fid"] + edge = plc_report["edge"] + t = plc_report["t_param"] + + if abs(t) <= 1e-9: + inserted_vid = edge[0] + elif abs(t - 1.0) <= 1e-9: + inserted_vid = edge[1] + else: + diag["status"] = "bad_t_param" + return faces, False, diag + + diag["touched_fid"] = facet_fid + diag["inserted_vid"] = inserted_vid + + fid_to_face = {f.fid: f for f in faces} + touched_face = fid_to_face.get(facet_fid) + if touched_face is None: + diag["status"] = "missing_face" + return faces, False, diag + + # First: split the touched face so inserted_vid becomes part of topology + split_polys = split_face_at_single_interior_vertex( + touched_face, + inserted_vid, + points, + planarity_tol_m=planarity_tol_m, + ) + + if not split_polys: + diag["status"] = "split_failed" + return faces, False, diag + + diag["n_split_polys"] = len(split_polys) + + # Then: triangulate each resulting polygon using your existing CDT function + out_tris = [] + for poly in split_polys: + if len(poly) < 3: + continue + + if len(poly) == 3: + tris = [poly] + else: + tris = triangulate_face_cdt_shapely(poly, points) + + if not tris: + diag["status"] = "triangulation_failed" + if logger: + logger.warning( + "[PLC TRI REPAIR] triangulation failed facet_fid=%d poly=%s", + facet_fid, poly + ) + return faces, False, diag + + for tri in tris: + if len(set(tri)) < 3: + continue + if tri_area2(tri[0], tri[1], tri[2], points) <= 1e-22: + continue + out_tris.append(tri) + + if not out_tris: + diag["status"] = "no_output_tris" + return faces, False, diag + + diag["n_output_tris"] = len(out_tris) + + next_fid = max((f.fid for f in faces), default=-1) + 1 + new_faces = [] + + for f in faces: + if f.fid != facet_fid: + new_faces.append(f) + continue + + for tri in out_tris: + new_faces.append( + FaceRecord( + fid=next_fid, + verts=tri, + group=f.group, + group_material=f.group_material, + material=f.material, + ) + ) + next_fid += 1 + + diag["status"] = "ok" + + if logger: + logger.info( + "[PLC TRI REPAIR] repaired facet_fid=%d inserted_vid=%d split_polys=%d output_tris=%d", + facet_fid, + inserted_vid, + diag["n_split_polys"], + diag["n_output_tris"], + ) + + return new_faces, True, diag + +# Helpers for Multi Point-Face Intersection Classification and Repair +def insert_vertex_on_polygon_edge(poly_ids, edge_index, new_vid): + """ + Insert a vertex into a polygon loop on a specific edge. + + Intention + --------- + Used when a new vertex lies on an existing polygon boundary edge. + The vertex must be inserted into the polygon vertex order so that + the polygon boundary remains correct. + + Parameters + ---------- + poly_ids : list[int] + Polygon vertex ids in order (cyclic). + edge_index : int + Index i meaning the edge is: + poly_ids[i] -> poly_ids[(i+1) % n] + new_vid : int + Vertex id to insert on that edge. + + Returns + ------- + list[int] + Updated polygon vertex list with the new vertex inserted. + """ + n = len(poly_ids) + + if n < 2: + return poly_ids + + return ( + poly_ids[: edge_index + 1] + + [new_vid] + + poly_ids[edge_index + 1 :] + ) + +def project_vid_to_2d(vid, points, dropped_axis): + """ + Project one vertex id to 2D using the same dropped axis. + + Parameters + ---------- + vid : int + 1-based vertex id. + points : list[(x,y,z)] + Global point list. + dropped_axis : str + One of "x", "y", "z". + + Returns + ------- + tuple[float, float] + Projected 2D point. + """ + x, y, z = points[vid - 1] + if dropped_axis == "z": + return (x, y) + elif dropped_axis == "y": + return (x, z) + else: + return (y, z) + +def fit_chain_direction_2d(chain_pts_2d): + """ + Fit dominant 2D direction through a set of points using PCA. + + Parameters + ---------- + chain_pts_2d : list[(x,y)] + 2D points. + + Returns + ------- + tuple[(float,float), (float,float)] + (centroid, unit_direction) + """ + arr = np.asarray(chain_pts_2d, dtype=float) + c = arr.mean(axis=0) + + if len(arr) == 1: + return (float(c[0]), float(c[1])), (1.0, 0.0) + + if len(arr) == 2: + d = arr[1] - arr[0] + n = np.linalg.norm(d) + if n <= 0.0: + return (float(c[0]), float(c[1])), (1.0, 0.0) + d /= n + return (float(c[0]), float(c[1])), (float(d[0]), float(d[1])) + + centered = arr - c + cov = centered.T @ centered + vals, vecs = np.linalg.eigh(cov) + d = vecs[:, np.argmax(vals)] + n = np.linalg.norm(d) + if n <= 0.0: + return (float(c[0]), float(c[1])), (1.0, 0.0) + d /= n + return (float(c[0]), float(c[1])), (float(d[0]), float(d[1])) + +def line_segment_intersection_signed(p0, d, a, b, tol=1e-12): + """ + Intersect infinite line p = p0 + s*d with segment a + t*(b-a). + + Parameters + ---------- + p0 : tuple[float, float] + Point on the line. + d : tuple[float, float] + Line direction. + a, b : tuple[float, float] + Segment endpoints. + tol : float + Numerical tolerance. + + Returns + ------- + tuple[float, float] | None + (s, t) where: + - s is signed parameter on the infinite line + - t is segment parameter in [0,1] + Returns None if no segment intersection. + """ + ex = b[0] - a[0] + ey = b[1] - a[1] + + den = d[0] * ey - d[1] * ex + if abs(den) <= tol: + return None + + apx = a[0] - p0[0] + apy = a[1] - p0[1] + + s = (apx * ey - apy * ex) / den + t = (apx * d[1] - apy * d[0]) / den + + if t < -tol or t > 1.0 + tol: + return None + + return (s, t) + +def create_or_reuse_boundary_point_on_edge( + poly_ids, + poly2d, + edge_index, + tseg, + points, + *, + merge_tol_2d=1e-8, +): + """ + Create or reuse a boundary point on one polygon edge. + + Parameters + ---------- + poly_ids : list[int] + Polygon vertex ids in order. + poly2d : list[(x,y)] + Matching projected polygon vertices. + edge_index : int + Edge index i means poly_ids[i] -> poly_ids[i+1]. + tseg : float + Segment parameter on the edge. + points : list[(x,y,z)] + Global point list. + merge_tol_2d : float + Reuse endpoint if hit is within this 2D tolerance. + + Returns + ------- + tuple[int, list[(x,y,z)], bool] + (boundary_vid, updated_points, used_existing_vertex) + """ + n = len(poly_ids) + + a_vid = poly_ids[edge_index] + b_vid = poly_ids[(edge_index + 1) % n] + + a2 = poly2d[edge_index] + b2 = poly2d[(edge_index + 1) % n] + + hit2 = ( + a2[0] + tseg * (b2[0] - a2[0]), + a2[1] + tseg * (b2[1] - a2[1]), + ) + + da = math.sqrt((hit2[0] - a2[0]) ** 2 + (hit2[1] - a2[1]) ** 2) + db = math.sqrt((hit2[0] - b2[0]) ** 2 + (hit2[1] - b2[1]) ** 2) + + if da <= merge_tol_2d: + return a_vid, points, True + if db <= merge_tol_2d: + return b_vid, points, True + + a3 = points[a_vid - 1] + b3 = points[b_vid - 1] + new_p3 = ( + a3[0] + tseg * (b3[0] - a3[0]), + a3[1] + tseg * (b3[1] - a3[1]), + a3[2] + tseg * (b3[2] - a3[2]), + ) + + points = points + [new_p3] + return len(points), points, False + +# --------------------------------------- +# Remove Intersection problem with offset +# --------------------------------------- + +def dist3(a, b): + """ + Euclidean distance between two 3D points. + + Parameters + ---------- + a, b : tuple[float, float, float] + 3D points. + + Returns + ------- + float + Distance between a and b. + """ + dx = a[0] - b[0] + dy = a[1] - b[1] + dz = a[2] - b[2] + return math.sqrt(dx * dx + dy * dy + dz * dz) + + +def get_touching_endpoint_vid_from_plc_report(plc_report, *, t_eps=1e-9): + """ + Return the touching endpoint vertex id from a PLC report. + + Parameters + ---------- + plc_report : dict + One PLC report from detect_segment_facet_intersections_cdt(...). + Expected keys: + - "edge": tuple[int, int] + - "t_param": float + t_eps : float + Tolerance for deciding whether the hit is at the start or end. + + Returns + ------- + int | None + Touching endpoint vertex id, or None if not an endpoint hit. + """ + u, v = plc_report["edge"] + t = plc_report["t_param"] + + if abs(t) <= t_eps: + return u + if abs(t - 1.0) <= t_eps: + return v + return None + + +def get_other_endpoint_vid_from_plc_report(plc_report, *, t_eps=1e-9): + """ + Return the non-touching endpoint vertex id from a PLC report. + + Parameters + ---------- + plc_report : dict + One PLC report from detect_segment_facet_intersections_cdt(...). + t_eps : float + Tolerance for deciding whether the hit is at the start or end. + + Returns + ------- + int | None + Other endpoint vertex id, or None if not an endpoint hit. + """ + u, v = plc_report["edge"] + t = plc_report["t_param"] + + if abs(t) <= t_eps: + return v + if abs(t - 1.0) <= t_eps: + return u + return None + + +def compute_face_unit_normal(face_verts, points, *, eps=1e-30): + """ + Compute a unit normal for one face using Newell's method. + + Parameters + ---------- + face_verts : list[int] + Ordered face vertex ids. + points : list[(x, y, z)] + Global point list. + eps : float + Degeneracy threshold. + + Returns + ------- + tuple[float, float, float] | None + Unit normal if valid, otherwise None. + """ + nx, ny, nz = newell_normal_from_points(face_verts, points) + nlen = math.sqrt(nx * nx + ny * ny + nz * nz) + + if nlen <= eps: + return None + + return (nx / nlen, ny / nlen, nz / nlen) + + +def offset_point_along_vector(point, direction, distance): + """ + Move a 3D point by a signed distance along a direction vector. + + Parameters + ---------- + point : tuple[float, float, float] + Original point. + direction : tuple[float, float, float] + Direction vector (assumed normalized). + distance : float + Signed offset distance in meters. + + Returns + ------- + tuple[float, float, float] + Shifted point. + """ + return ( + point[0] + direction[0] * distance, + point[1] + direction[1] * distance, + point[2] + direction[2] * distance, + ) + + +def move_touching_endpoint_off_face( + faces, + points, + plc_report, + *, + offset_m=0.01, + logger=None, + t_eps=1e-9, +): + """ + Resolve one endpoint_face_interior_touch by moving the touching endpoint + along the face normal in the direction that SHORTENS the edge. + + Intuition + --------- + Two candidate moves are tested: + P_plus = P + offset * n + P_minus = P - offset * n + + The direction that produces the shorter edge to the other endpoint is chosen. + This usually moves the point to the same side of the face as the other endpoint. + + Important + --------- + This is a geometric workaround, not a topological repair. + + Parameters + ---------- + faces : list[FaceRecord] + Current face list. + points : list[(x,y,z)] + Global point list. Updated in place. + plc_report : dict + One PLC report from detect_segment_facet_intersections_cdt(...). + Required keys: + - "hit_type" + - "edge" + - "facet_fid" + - "t_param" + offset_m : float + Offset distance in meters. Default 0.01 m = 1 cm. + logger : logging.Logger | None + Optional logger. + t_eps : float + Endpoint classification tolerance. + + Returns + ------- + tuple[list[(x,y,z)], bool, dict] + (updated_points, changed, diagnostics) + """ + diag = { + "status": "noop", + "touched_vid": None, + "other_vid": None, + "facet_fid": plc_report.get("facet_fid"), + "old_point": None, + "new_point": None, + "normal": None, + "offset_m": offset_m, + "len_plus": None, + "len_minus": None, + "chosen_direction": None, + } + + if plc_report.get("hit_type") != "endpoint_face_interior_touch": + diag["status"] = "wrong_hit_type" + return points, False, diag + + touched_vid = get_touching_endpoint_vid_from_plc_report(plc_report, t_eps=t_eps) + other_vid = get_other_endpoint_vid_from_plc_report(plc_report, t_eps=t_eps) + + if touched_vid is None or other_vid is None: + diag["status"] = "not_an_endpoint_hit" + return points, False, diag + + diag["touched_vid"] = touched_vid + diag["other_vid"] = other_vid + + facet_fid = plc_report["facet_fid"] + fid_to_face = {f.fid: f for f in faces} + touched_face = fid_to_face.get(facet_fid) + + if touched_face is None: + diag["status"] = "missing_facet_face" + return points, False, diag + + normal = compute_face_unit_normal(touched_face.verts, points) + if normal is None: + diag["status"] = "invalid_face_normal" + return points, False, diag + + old_point = points[touched_vid - 1] + other_point = points[other_vid - 1] + + p_plus = offset_point_along_vector(old_point, normal, +offset_m) + p_minus = offset_point_along_vector(old_point, normal, -offset_m) + + len_plus = dist3(p_plus, other_point) + len_minus = dist3(p_minus, other_point) + + diag["old_point"] = old_point + diag["normal"] = normal + diag["len_plus"] = len_plus + diag["len_minus"] = len_minus + + if len_plus < len_minus: + new_point = p_plus + chosen = "+normal" + else: + new_point = p_minus + chosen = "-normal" + + points[touched_vid - 1] = new_point + + diag["status"] = "ok" + diag["new_point"] = new_point + diag["chosen_direction"] = chosen + + if logger: + logger.info( + "[PLC OFFSET] moved vid=%d away from facet_fid=%d by %.6f m " + "using shorter-edge rule other_vid=%d " + "len(+n)=%.6f len(-n)=%.6f chosen=%s " + "old=(%.6f,%.6f,%.6f) new=(%.6f,%.6f,%.6f) normal=(%.6f,%.6f,%.6f)", + touched_vid, + facet_fid, + offset_m, + other_vid, + len_plus, + len_minus, + chosen, + old_point[0], old_point[1], old_point[2], + new_point[0], new_point[1], new_point[2], + normal[0], normal[1], normal[2], + ) + + return points, True, diag + +def repair_plc_by_offset_iterative( + faces, + points, + *, + logger=None, + max_iters=20, + offset_m=0.1, +): + """ + Iteratively remove endpoint_face_interior_touch intersections by offsetting + the touching endpoint away from the touched face. + + Strategy + -------- + - detect PLC intersections + - select endpoint_face_interior_touch cases + - move one touching endpoint by offset_m along the touched face normal + - re-run detection + - repeat until stable or max_iters reached + + Parameters + ---------- + faces : list[FaceRecord] + Current face list. + points : list[(x,y,z)] + Global point list. Updated in place. + logger : logging.Logger | None + Optional logger. + max_iters : int + Maximum number of offset-repair iterations. + offset_m : float + Offset distance in meters. Default 0.01 m = 1 cm. + + Returns + ------- + tuple[list[(x,y,z)], bool, dict] + (updated_points, changed_any, summary) + + Summary keys + ------------ + - iterations + - applied_repairs + - stopped_reason + - remaining_plc_hits + - remaining_endpoint_face_hits + """ + summary = { + "iterations": 0, + "applied_repairs": 0, + "stopped_reason": "unknown", + "remaining_plc_hits": 0, + "remaining_endpoint_face_hits": 0, + } + + changed_any = False + + for it in range(1, max_iters + 1): + summary["iterations"] = it + + plc_hits = detect_segment_facet_intersections_cdt( + faces, + points, + warn_planar_tol_m=1e-4, + fatal_planar_tol_m=1e-3, + eps=1e-10, + bbox_pad=1e-9, + max_reports=2000, + skip_warped_faces=True, + logger=logger, + ) + + summary["remaining_plc_hits"] = len(plc_hits) + + if not plc_hits: + summary["remaining_endpoint_face_hits"] = 0 + summary["stopped_reason"] = "no_plc_hits" + if logger: + logger.info("[PLC OFFSET] stable after %d iterations: no PLC hits", it - 1) + return points, changed_any, summary + + endpoint_face_hits = [ + r for r in plc_hits + if r.get("hit_type") == "endpoint_face_interior_touch" + ] + summary["remaining_endpoint_face_hits"] = len(endpoint_face_hits) + + if not endpoint_face_hits: + summary["stopped_reason"] = "no_endpoint_face_interior_touch" + if logger: + logger.info("[PLC OFFSET] stop: PLC hits remain, but none are endpoint_face_interior_touch") + return points, changed_any, summary + + # Apply one repair at a time, then re-detect. + target = endpoint_face_hits[0] + + points, changed, diag = move_touching_endpoint_off_face( + faces, + points, + target, + offset_m=offset_m, + logger=logger, + ) + + if not changed: + summary["stopped_reason"] = "selected_offset_not_applied" + if logger: + logger.info("[PLC OFFSET] stop: selected report was not changed; diag=%s", diag) + return points, changed_any, summary + + changed_any = True + summary["applied_repairs"] += 1 + + if logger: + logger.info("[PLC OFFSET] applied iter=%d diag=%s", it, diag) + + summary["stopped_reason"] = "max_iters_reached" + if logger: + logger.warning("[PLC OFFSET] reached max_iters=%d", max_iters) + + return points, changed_any, summary + +# -------- REPAIR MULTI HIT POINT-FACE INTERSECTION BY SPLITTING WITH NEW VERTEX -------- +def _find_face_by_fid(faces: List["FaceRecord"], fid: int): + + for f in faces: + + if f.fid == fid: + + return f + + return None + +def _norm(v): + + return math.sqrt(sum(x * x for x in v)) + +def _sub(a, b): + + return (a[0] - b[0], a[1] - b[1], a[2] - b[2]) + +def _dot(a, b): + + return a[0]*b[0] + a[1]*b[1] + a[2]*b[2] + +def _cross(a, b): + + return ( + + a[1]*b[2] - a[2]*b[1], + + a[2]*b[0] - a[0]*b[2], + + a[0]*b[1] - a[1]*b[0], + + ) + +def _unit(v): + + n = _norm(v) + + if n <= 1e-30: + + return (0.0, 0.0, 0.0) + + return (v[0]/n, v[1]/n, v[2]/n) + +def _distance(a, b): + + return _norm(_sub(a, b)) + +def _face_plane_basis(face, points): + + """ + + Return centroid + orthonormal basis (u,v,n) for face plane. + + """ + + n = newell_normal_from_points(face.verts, points) + + n = _unit(n) + + c = polygon_centroid(face.verts, points) + + # choose tangent axis + + ref = (1.0, 0.0, 0.0) + + if abs(_dot(ref, n)) > 0.9: + + ref = (0.0, 1.0, 0.0) + + u = _unit(_cross(ref, n)) + + v = _unit(_cross(n, u)) + + return c, u, v, n + +def _project_to_face_2d(p, c, u, v): + + """ + + Project 3D point to local face 2D coordinates. + + """ + + d = _sub(p, c) + + return (_dot(d, u), _dot(d, v)) + +def _point_from_2d(xy, c, u, v): + + """ + + Convert local 2D back to 3D point. + + """ + + return ( + + c[0] + xy[0]*u[0] + xy[1]*v[0], + + c[1] + xy[0]*u[1] + xy[1]*v[1], + + c[2] + xy[0]*u[2] + xy[1]*v[2], + + ) + +def polygon_centroid( + + loop_vids: List[int], + + points: List[Tuple[float, float, float]], + +) -> Tuple[float, float, float]: + + """ + + Compute the centroid of a polygon from its vertex coordinates. + + Parameters + + ---------- + + loop_vids : list[int] + + Ordered 1-based vertex ids of the polygon. + + points : list[(x, y, z)] + + Global vertex list. + + Returns + + ------- + + tuple[float, float, float] + + Approximate centroid of the polygon. + + Notes + + ----- + + - Uses arithmetic mean of polygon vertices. + + - Stable and sufficient for: + + * face-plane reference point + + * orientation tests + + * local projection basis origin + + - Does not require triangulation. + + """ + + if not loop_vids: + + raise ValueError("polygon_centroid(): empty vertex loop") + + sx = sy = sz = 0.0 + + n = len(loop_vids) + + for vid in loop_vids: + + p = points[vid - 1] # 1-based ids + + sx += p[0] + + sy += p[1] + + sz += p[2] + + return (sx / n, sy / n, sz / n) + +def _get_or_create_vertex(points, p, tol=1e-9): + + for i, q in enumerate(points, start=1): + + if _distance(p, q) <= tol: + + return i + + points.append(p) + + return len(points) + +# ---------------------------------------------------------- + +# 1. Classify multi-hit same-face collinearity + +# ---------------------------------------------------------- + +def classify_multi_hit_face_collinear( + + face: "FaceRecord", + + reports: List[Dict[str, Any]], + + points: List[Tuple[float, float, float]], + + *, + + tol_m: float = 1e-4, + +): + + """ + + Determine whether touch points on one face are approximately collinear. + + Returns + + ------- + + dict: + + { + + "is_collinear": bool, + + "ordered_points_2d": [...], + + "ordered_points_3d": [...], + + "max_dev": float + + } + + """ + + if len(reports) < 2: + + return { + + "is_collinear": False, + + "reason": "need_at_least_2_points", + + } + + c, u, v, n = _face_plane_basis(face, points) + + pts3 = [r["point"] for r in reports] + + pts2 = [_project_to_face_2d(p, c, u, v) for p in pts3] + + # choose farthest pair + + best_i = 0 + + best_j = 1 + + best_d = -1.0 + + for i in range(len(pts2)): + + for j in range(i + 1, len(pts2)): + + d = math.dist(pts2[i], pts2[j]) + + if d > best_d: + + best_d = d + + best_i, best_j = i, j + + a = pts2[best_i] + + b = pts2[best_j] + + dx = b[0] - a[0] + + dy = b[1] - a[1] + + L = math.sqrt(dx*dx + dy*dy) + + if L <= 1e-12: + + return { + + "is_collinear": False, + + "reason": "degenerate_points", + + } + + # perpendicular distances + + max_dev = 0.0 + + params = [] + + for p in pts2: + + px = p[0] - a[0] + + py = p[1] - a[1] + + t = (px*dx + py*dy) / (L*L) + + params.append(t) + + perp = abs(px*dy - py*dx) / L + + max_dev = max(max_dev, perp) + + ordered = sorted(zip(params, pts2, pts3), key=lambda x: x[0]) + logger.info("IM HERE 2") + logger.info("max_dev: %f", max_dev) + logger.info("tol_m: %f", tol_m) + + return { + + "is_collinear": max_dev <= 0.01, + + "max_dev": max_dev, + + "ordered_points_2d": [x[1] for x in ordered], + + "ordered_points_3d": [x[2] for x in ordered], + + } + +# ---------------------------------------------------------- + +# 2. Structured connection repair + +# ---------------------------------------------------------- + +def repair_multi_hit_face_collinear_chain( + + faces: List["FaceRecord"], + + reports: List[Dict[str, Any]], + + points: List[Tuple[float, float, float]], + + *, + + logger=None, + +): + + """ + + Replace one touched face by structured split following collinear chain. + + Strategy: + + - take touched face + + - insert ordered touch vertices + + - connect chain to nearest two boundary vertices + + - split into two polygons + + Returns + + ------- + + faces, points, changed, diag + + """ + + facet_fid = reports[0]["facet_fid"] + + face = _find_face_by_fid(faces, facet_fid) + + if face is None: + + return faces, points, False, {"status": "face_not_found"} + + cls = classify_multi_hit_face_collinear(face, reports, points) + logger.info("im heree") + logger.info("cls=%s", cls) + if not cls["is_collinear"]: + + return faces, points, False, {"status": "not_collinear"} + + chain_vids = [] + + for p in cls["ordered_points_3d"]: + + vid = _get_or_create_vertex(points, p) + + chain_vids.append(vid) + + # choose nearest boundary vertices to chain ends + + face_pts = [points[v - 1] for v in face.verts] + + start_vid = min( + + face.verts, + + key=lambda vid: _distance(points[vid - 1], points[chain_vids[0] - 1]) + + ) + + end_vid = min( + + face.verts, + + key=lambda vid: _distance(points[vid - 1], points[chain_vids[-1] - 1]) + + ) + + split_chain = [start_vid] + chain_vids + [end_vid] + + # build two loops along original boundary + + verts = face.verts + + i0 = verts.index(start_vid) + + i1 = verts.index(end_vid) + + if i0 <= i1: + + path1 = verts[i0:i1 + 1] + + path2 = verts[i1:] + verts[:i0 + 1] + + else: + + path1 = verts[i0:] + verts[:i1 + 1] + + path2 = verts[i1:i0 + 1] + + new_loop1 = clean_face_loop(path1 + list(reversed(split_chain[1:-1]))) + + new_loop2 = clean_face_loop(path2 + split_chain[1:-1]) + + if len(new_loop1) < 3 or len(new_loop2) < 3: + + return faces, points, False, {"status": "bad_split"} + + # replace face + + new_faces = [] + + for f in faces: + + if f.fid != facet_fid: + + new_faces.append(f) + + else: + + new_faces.append(FaceRecord( + + fid=f.fid, + + verts=new_loop1, + + group=f.group, + + group_material=f.group_material, + + material=f.material, + + )) + + new_faces.append(FaceRecord( + + fid=max(ff.fid for ff in faces) + 1, + + verts=new_loop2, + + group=f.group, + + group_material=f.group_material, + + material=f.material, + + )) + + diag = { + + "status": "ok", + + "repair_type": "collinear_chain_split", + + "facet_fid": facet_fid, + + "n_chain_points": len(chain_vids), + + } + + if logger: + + logger.info("[PLC REPAIR] structured collinear split face=%d", facet_fid) + + return new_faces, points, True, diag + +def orient_faces_consistently_by_adjacency( + faces: "List[FaceRecord]", + logger=None, +) -> Dict[str, Any]: + """ + Make polygon winding globally consistent across shared edges. + Guarantee: for every manifold shared edge (used by exactly 2 faces), + the edge direction is opposite in the two faces. + Returns diagnostics: + { + "components": int, + "flipped_faces": int, + "boundary_edges": int, + "nonmanifold_edges": int + } + """ + + def uedge(a: int, b: int) -> Tuple[int, int]: + return (a, b) if a < b else (b, a) + + # For each face, we need to know its directed edges. + # We'll store per undirected edge the directed form as it appears in the face. + # Example: if face uses edge as (a->b) we store dir=(a,b). + edge_to_uses: Dict[Tuple[int, int], List[Tuple[int, Tuple[int, int]]]] = defaultdict(list) + + for fi, f in enumerate(faces): + vs = f.verts + n = len(vs) + if n < 2: + continue + for i in range(n): + a = vs[i] + b = vs[(i + 1) % n] + edge_to_uses[uedge(a, b)].append((fi, (a, b))) + + boundary_edges = 0 + nonmanifold_edges = 0 + for e, uses in edge_to_uses.items(): + if len(uses) == 1: + boundary_edges += 1 + elif len(uses) > 2: + nonmanifold_edges += 1 + + if logger: + logger.info( + "[ORIENT] edges=%d boundary=%d nonmanifold=%d", + len(edge_to_uses), boundary_edges, nonmanifold_edges + ) + + # Build face adjacency graph via manifold edges (exactly 2 incident faces) + face_adj: Dict[int, List[Tuple[int, Tuple[int,int], Tuple[int,int]]]] = defaultdict(list) + # entry: face_adj[fa].append((fb, undirected_edge, (fa_dir), (fb_dir))) but we can keep dirs separately. + + for e, uses in edge_to_uses.items(): + if len(uses) != 2: + continue # skip boundary and nonmanifold for propagation (can't enforce) + (f0, dir0), (f1, dir1) = uses + face_adj[f0].append((f1, e, dir0, dir1)) + face_adj[f1].append((f0, e, dir1, dir0)) + + # BFS over connected components + visited = [False] * len(faces) + flipped = [False] * len(faces) # whether we have flipped face fi relative to its original + flipped_count = 0 + components = 0 + + def flip_face(fi: int): + nonlocal flipped_count + faces[fi].verts.reverse() + flipped[fi] = not flipped[fi] + flipped_count += 1 + + for seed in range(len(faces)): + if visited[seed]: + continue + + # If isolated face (no manifold adjacency), still counts as a component + components += 1 + visited[seed] = True + q = deque([seed]) + + while q: + fa = q.popleft() + for (fb, e, dir_a, dir_b) in face_adj.get(fa, []): + if not visited[fb]: + # We want edge direction opposite between faces. + # If fa uses e as (u->v), then fb must use (v->u). + # Our stored dir_* are directed pairs as they appear in the *current* face ordering. + # BUT note: if we already flipped fa earlier, dir_a from the map is stale. + # So we must recompute the current directed edge in each face on the fly. + # + # To do that, we find the direction of e in the current loop of fa and fb. + + def current_edge_dir(face_verts: List[int], undirected_edge: Tuple[int,int]) -> Optional[Tuple[int,int]]: + u, v = undirected_edge + n = len(face_verts) + for i in range(n): + a = face_verts[i] + b = face_verts[(i + 1) % n] + if (a == u and b == v) or (a == v and b == u): + return (a, b) + return None + + da = current_edge_dir(faces[fa].verts, e) + db = current_edge_dir(faces[fb].verts, e) + + if da is None or db is None: + # should not happen unless face loops changed strangely + if logger: + logger.warning("[ORIENT] missing edge %s in fa=%d or fb=%d", e, fa, fb) + visited[fb] = True + q.append(fb) + continue + + # If same direction, flip neighbor to make it opposite + if da == db: + flip_face(fb) + if logger: + logger.debug("[ORIENT] flipped face fid=%d to fix shared edge %s", faces[fb].fid, e) + + visited[fb] = True + q.append(fb) + else: + # already visited; we could optionally verify consistency + pass + + return { + "components": components, + "flipped_faces": flipped_count, + "boundary_edges": boundary_edges, + "nonmanifold_edges": nonmanifold_edges, + } diff --git a/app/services/geometry_service.py b/app/services/geometry_service.py index 3801686..5be3259 100644 --- a/app/services/geometry_service.py +++ b/app/services/geometry_service.py @@ -1,12 +1,18 @@ +from __future__ import annotations +from typing import List, Dict, Any import logging import math import os import zipfile -import math import rhino3dm from flask_smorest import abort +from app.services.geometry_inspection_service import detect_boundary_edges, detect_degenerate_faces, detect_possible_holes_from_faces, detect_segment_facet_intersections_cdt, detect_t_junctions_from_facerecords_global_plc, inspect_face_planarity_issues +from app.services.geometry_export_service import export_processed_topology_to_gmsh_geo, export_processed_topology_to_obj +from app.services.geometry_diagnostic_log_service import append_repair_report, build_issue_detection_report, build_revalidation_report, build_topology_report, create_geometry_processing_report, log_topology, write_geometry_processing_report +from app.services.geometry_parsing_service import extract_rhino_materials, process_and_instantiate_faces, parse_obj_file, deduplicate_vertices +from app.services.geometry_repair_service import fix_t_junctions_iterative, flip_all_faces_if_majority_inward, remove_degenerate_faces, repair_plc_by_offset_iterative, repair_plc_single_splits_iterative, sort_vertices_deterministically, trim_segment_face_intersections_iterative import config from app.db import db from app.factory.geometry_converter_factory.GeometryConversionFactory import ( @@ -24,12 +30,13 @@ def get_geometry_by_id(geometry_id): return results -def start_geometry_check_task(file_upload_id): +def start_geometry_check_task(file_upload_id, use_geometry_pipeline): """ This function is a wrapper over 3dm mapper. It creates a task and geometry given a file upload id. Then calls the map_to_3dm function to map the given geometry file format to a rhino model. :param file_upload_id: represents an id related to the uploaded file + :param use_geometry_pipeline: boolean indicating whether to use the geometry pipeline for the conversion :return: Geometry: returns an object of Geometry model corresponding to the uploaded file """ try: @@ -41,7 +48,7 @@ def start_geometry_check_task(file_upload_id): db.session.add(geometry) db.session.commit() - result = map_to_3dm_and_geo(geometry.id) + result = map_to_3dm_and_geo(geometry.id, use_geometry_pipeline) if not result: task.status = Status.Error task.message = "An error is encountered during the geometry processing!" @@ -67,7 +74,7 @@ def get_geometry_result(task_id): return Geometry.query.filter_by(taskId=task_id).first() -def map_to_3dm_and_geo(geometry_id): +def map_to_3dm_and_geo(geometry_id, use_geometry_pipeline): geometry = Geometry.query.filter_by(id=geometry_id).first() file = File.query.filter_by(id=geometry.inputModelUploadId).first() task = Task.query.filter_by(id=geometry.taskId).first() @@ -118,9 +125,25 @@ def map_to_3dm_and_geo(geometry_id): if config.FeatureToggle.is_enabled("enable_geo_conversion"): try: - if not obj_to_gmsh_geo_precise(obj_path, geo_path, rhino3dm_path): - logger.error("Can not generate a geo file") - return False + if use_geometry_pipeline: + # repair the obj + repaired_obj_path, issue_report_path = generate_repaired_obj_and_issue_report(obj_path, rhino3dm_path, tol=1e-2, conformize_tol=None) + # recreate the 3dm from the repaired obj + if not conversion_strategy.generate_3dm(repaired_obj_path, rhino3dm_path): + logger.error("Can not generate a 3dm file") + return False + if not convert_repaired_obj_to_gmsh_geo(repaired_obj_path, geo_path, rhino3dm_path): + logger.error("Can not generate a geo file") + return False + + # create a zip file fromm the repaired version + with zipfile.ZipFile(zip_file_path, "w") as zipf: + zipf.write(rhino3dm_path, arcname=f"{file_name}.3dm") + logger.warning("Geometry pipeline is enabled, obj repaired and converted to geo with the pipeline.") + else: + if not obj_to_gmsh_geo_precise(obj_path, geo_path, rhino3dm_path): + logger.error("Can not generate a geo file") + return False file_geo = File(fileName=f"{file_name}.geo") db.session.add(file_geo) @@ -777,3 +800,549 @@ def is_outward_facing(face): print(f"Wrote {geo_file}: {len(unique_vertices)} points, {next_line_id-1} lines, {len(face_line_loops)} surfaces.") return True + +def obj_to_gmsh_geo_precise_with_repair_pipeline( + obj_file, + geo_file, + rhino3dm_path, + volume_name="RoomVolume", + tol=1e-2, + conformize_tol=None, +): + """ + OBJ -> Gmsh .geo + This version: + 1) Reads Rhino .3dm file to extract material names associated with mesh objects. + 2) Parses OBJ file: extracts vertices (with Y-up to Z-up coordinate flip), faces, groups, and materials. + 3) Deduplicates vertices within tolerance. + 4) Maps faces to unique vertices, checks for degeneracy and planarity (no triangulation of ngons). + 5) Detects T-junctions in the geometry. + 6) Removes degenerate faces (fatal cases). + 7) Sorts vertices deterministically for reproducibility. + 8) Iteratively fixes T-junctions by splitting edges. + 9) Orients faces consistently using adjacency. + 10) Detects and repairs piecewise linear complex (PLC) violations (segment-facet intersections). + 11) Builds edges, line loops, and surface loops. + Output: + - Writes Gmsh .geo file with Point, Line, Line Loop, Plane Surface entities. + - Creates Physical Volume, Physical Surfaces (by material), and Physical Lines. + - Generates processing report and processed OBJ export. + """ + + if conformize_tol is None: + conformize_tol = max(1e-7, tol) + + # ----------------------------- + # 1) Rhino materials + # ----------------------------- + # Read the Rhino .3dm file and extract material names assigned to each mesh object. + # These material names are stored as user strings on the geometry and are mapped + # by object ID so they can later be associated with the corresponding OBJ faces. + material_id_array = extract_rhino_materials(rhino3dm_path) + + + # ----------------------------- + # 2) Parse OBJ file + # ----------------------------- + vertices, raw_faces, face_groups, face_group_materials = parse_obj_file(obj_file) + + # ----------------------------- + # 3) Deduplicate vertices + # ----------------------------- + unique_vertices, orig_to_unique = deduplicate_vertices(vertices, tol) + + + # -------- 4) Map faces to unique vertices + planarirt check for every face + remove degenerate face + optional triangulation -------- + faces = process_and_instantiate_faces( + raw_faces, + face_groups, + face_group_materials, + material_id_array, + orig_to_unique + ) + logger.warning(faces[:5]) + problematic_faces = inspect_face_planarity_issues(faces, unique_vertices) + + tjs = detect_t_junctions_from_facerecords_global_plc( + faces, + unique_vertices, + tol=conformize_tol if conformize_tol is not None else max(1e-7, tol), + max_reports=2000, + ) + + if tjs: + logger.warning("[RAW FACES TJUNC] found %d (showing up to 20)", len(tjs)) + for r in tjs[:20]: + logger.warning("[RAW FACES TJUNC] edge=%s split_v=%d t=%.6f edge_fids=%s v_fids=%s", + r["edge"], r["split_vertex"], r["t_param"], + r["edge_face_fids"], r["v_face_fids"]) + + topology_before_repair = {} + topology_before_repair.update(build_topology_report(unique_vertices, faces)) + issue_detection_report = { + "non_coplanar_faces": problematic_faces, + "T-junctions": tjs, + "possible_holes": detect_possible_holes_from_faces(faces, unique_vertices), + "boundary_edges": detect_boundary_edges(faces, unique_vertices), + "degenerate_faces": detect_degenerate_faces(faces, unique_vertices) + } + + + log_topology(logger, "after_mapping", faces) + + faces, fatal_removed = remove_degenerate_faces( + faces, + unique_vertices, + fatal_area_tol=1e-18, + logger=logger, + ) + + report_path = geo_file.replace(".geo", "_report.json") + repair_report: List[Dict[str, Any]] = [] + append_repair_report( + repair_report, + repair_type="remove_degenerate_faces", + affected_count=fatal_removed, + before={"faces": len(faces) + fatal_removed}, + after={"faces": len(faces)}, + ) + log_topology(logger, "after_mapping", faces) + + tjs = detect_t_junctions_from_facerecords_global_plc( + faces, + unique_vertices, + tol=conformize_tol if conformize_tol is not None else max(1e-7, tol), + max_reports=2000, + ) + + if tjs: + logger.warning("[CLEAN FACES TJUNC] found %d (showing up to 20)", len(tjs)) + for r in tjs[:20]: + logger.warning("[CLEAN FACES TJUNC] edge=%s split_v=%d t=%.6f edge_fids=%s v_fids=%s", + r["edge"], r["split_vertex"], r["t_param"], + r["edge_face_fids"], r["v_face_fids"]) + + + before_repair_tjs = tjs + # ----------------------------- + # 5) Sort vertices deterministically + # ----------------------------- + unique_vertices, faces = sort_vertices_deterministically(unique_vertices, faces) + + # 6) Iteratively fix T-junctions by splitting edges + if len(tjs) > 0: + faces, changed = fix_t_junctions_iterative( + faces, + unique_vertices, + tol=conformize_tol, + max_iters=100, + logger=logger, + ) + tjs = detect_t_junctions_from_facerecords_global_plc( + faces, + unique_vertices, + tol=conformize_tol if conformize_tol is not None else max(1e-7, tol), + max_reports=2000, + ) + append_repair_report( + repair_report, + repair_type="fix_t_junctions_iterative", + after=len(tjs), + before=len(before_repair_tjs), + affected_count=len(before_repair_tjs) - len(tjs), + ) + if tjs: + logger.warning("[AFTER FIX TJUNC FACES TJUNC] found %d (showing up to 20)", len(tjs)) + for r in tjs[:20]: + logger.warning("[AFTER FIX TJUNC FACES TJUNC] edge=%s split_v=%d t=%.6f edge_fids=%s v_fids=%s", + r["edge"], r["split_vertex"], r["t_param"], + r["edge_face_fids"], r["v_face_fids"]) + + # 7) GLOBAL ORIENTATION (adjacency) + optional outward flip + # ----------------------------- + room_center = tuple( + sum(v[i] for v in unique_vertices) / len(unique_vertices) + for i in range(3) + ) + + # diag = orient_faces_consistently_by_adjacency(faces, logger=logger) + # append_repair_report( + # repair_report, + # repair_type="orient_faces_consistently", + # details=diag, + # ) + # logger.info("[ORIENT] diag=%s", diag) + + flip_all_faces_if_majority_inward(faces, unique_vertices, room_center, logger=logger) + + # Check for segment-facet intersections (PLC violations) after orientation fix + plc_hits = detect_segment_facet_intersections_cdt( + faces, + unique_vertices, + warn_planar_tol_m=1e-4, + fatal_planar_tol_m=1e-3, + eps=1e-10, + bbox_pad=1e-9, + max_reports=2000, + skip_warped_faces=True, + logger=logger, + ) + issue_detection_report["intersections"] = { + "count": len(plc_hits), + "intersections_hits": plc_hits, + } + + + # --------------------------------------------------------- + # 8) PLC detection after orientation fix + # --------------------------------------------------------- + + if plc_hits: + logger.error("[PLC] segment–facet intersections found: %d (showing up to 20)", len(plc_hits)) + for r in plc_hits[:20]: + x,y,z = r["point"] + logger.error( + "[PLC] type=%s edge=%s edge_fids=%s intersects facet_fid=%d tri=%s " + "at P=(%.6f,%.6f,%.6f) t=%.6f bary=(%.6f,%.6f,%.6f) planar=%s", + r["hit_type"], + r["edge"], + r["edge_fids"], + r["facet_fid"], + r["facet_tri"], + r["point"][0], r["point"][1], r["point"][2], + r["t_param"], + r["bary_u"], r["bary_v"], r["bary_w"], + r["facet_planarity_flag"], + ) + + # issue_detection_report["intersections"] = { + # "count": len(plc_hits), + # "by_type": dict(Counter(hit.get("hit_type", "unknown") for hit in plc_hits)), + # "samples": plc_hits[:20], + # } + # --------------------------------------------------------- + # 8.2) Iterative PLC repair + # --------------------------------------------------------- + # faces, unique_vertices, plc_repair_changed, plc_repair_summary = repair_plc_single_splits_iterative( + # faces, + # unique_vertices, + # room_center, + # logger=logger, + # max_iters=20, + # planarity_tol_m=1e-6, + # ) + + # append_repair_report( + # repair_report, + # repair_type="repair_plc_single_splits_iterative", + # affected_count=len(plc_hits), + # before={"intersections": len(plc_hits)}, + # after=None, + # details=plc_repair_summary, + # ) + + # logger.info("[PLC REPAIR SUMMARY] %s", plc_repair_summary) + + # optional final PLC report after all attempted repairs + plc_hits = detect_segment_facet_intersections_cdt( + faces, + unique_vertices, + warn_planar_tol_m=1e-4, + fatal_planar_tol_m=1e-3, + eps=1e-10, + bbox_pad=1e-9, + max_reports=2000, + skip_warped_faces=True, + logger=logger, + ) + + if plc_hits: + faces, unique_vertices, trim_changed, trim_summary = trim_segment_face_intersections_iterative( + faces, + unique_vertices, + room_center, + max_iters=20, + tol=1e-9, + logger=logger, + ) + + if plc_hits: + logger.error("[PLC FINAL] remaining intersections: %d (showing up to 20)", len(plc_hits)) + for r in plc_hits[:20]: + logger.error( + "[PLC FINAL] type=%s edge=%s edge_fids=%s intersects facet_fid=%d tri=%s " + "at P=(%.6f,%.6f,%.6f) t=%.6f bary=(%.6f,%.6f,%.6f) planar=%s", + r["hit_type"], + r["edge"], + r["edge_fids"], + r["facet_fid"], + r["facet_tri"], + r["point"][0], r["point"][1], r["point"][2], + r["t_param"], + r["bary_u"], r["bary_v"], r["bary_w"], + r["facet_planarity_flag"], + ) + + faces, unique_vertices, plc_repair_changed, plc_repair_summary = repair_plc_single_splits_iterative( + faces, + unique_vertices, + room_center, + logger=logger, + max_iters=20, + planarity_tol_m=1e-6, + ) + + append_repair_report( + repair_report, + repair_type="repair_plc_single_splits_iterative", + affected_count=len(plc_hits) - plc_repair_summary["remaining_plc_hits"], + before={"intersections": len(plc_hits)}, + after={"intersections": plc_repair_summary["remaining_plc_hits"]}, + ) + + + # 9.2) Export processed topology to OBJ + obj_output_path = geo_file.replace(".geo", "_processed.obj") + export_processed_topology_to_obj(obj_output_path, unique_vertices, faces) + + num_lines, num_surfaces = export_processed_topology_to_gmsh_geo(faces, unique_vertices, geo_file, volume_name) + + tjs_final = detect_t_junctions_from_facerecords_global_plc( + faces, + unique_vertices, + tol=conformize_tol if conformize_tol is not None else max(1e-7, tol), + max_reports=2000, + ) + + plc_hits_final = detect_segment_facet_intersections_cdt( + faces, + unique_vertices, + warn_planar_tol_m=1e-4, + fatal_planar_tol_m=1e-3, + eps=1e-10, + bbox_pad=1e-9, + max_reports=2000, + skip_warped_faces=True, + logger=logger, + ) + + revalidation_report = { + "non_coplanar_faces": inspect_face_planarity_issues(faces, unique_vertices), + "T-junctions": tjs_final, + "intersections": { + "count": len(plc_hits_final), + "intersection_hits": plc_hits_final + }, + "possible_holes": detect_possible_holes_from_faces(faces, unique_vertices), + "boundary_edges": detect_boundary_edges(faces, unique_vertices), + "degenerate_faces": detect_degenerate_faces(faces, unique_vertices) + } + + report = create_geometry_processing_report( + obj_file=obj_file, + repaired_obj=obj_output_path, + topology_before_repair=topology_before_repair, + issue_detection_report=issue_detection_report, + repair_report=repair_report, + revalidation_report=revalidation_report, + ) + write_geometry_processing_report(report, report_path) + + return True + +def generate_repaired_obj_and_issue_report(obj_file,rhino3dm_path, tol=1e-2, conformize_tol=None): + if conformize_tol is None: + conformize_tol = max(1e-7, tol) + + material_id_array = extract_rhino_materials(rhino3dm_path) + + vertices, raw_faces, face_groups, face_group_materials = parse_obj_file(obj_file) + unique_vertices, orig_to_unique = deduplicate_vertices(vertices, tol) + faces = process_and_instantiate_faces( + raw_faces, + face_groups, + face_group_materials, + material_id_array, + orig_to_unique + ) + problematic_faces = inspect_face_planarity_issues(faces, unique_vertices) + tjs = detect_t_junctions_from_facerecords_global_plc( + faces, + unique_vertices, + tol=conformize_tol if conformize_tol is not None else max(1e-7, tol), + max_reports=2000, + ) + before_repair_tjs = tjs + unique_vertices, faces = sort_vertices_deterministically(unique_vertices, faces) + report_path = obj_file.replace(".obj", "_report.json") + repair_report: List[Dict[str, Any]] = [] + + + topology_before_repair = {} + topology_before_repair.update(build_topology_report(unique_vertices, faces)) + issue_detection_report = { + "non_coplanar_faces": problematic_faces, + "T-junctions": tjs, + "possible_holes": detect_possible_holes_from_faces(faces, unique_vertices), + "boundary_edges": detect_boundary_edges(faces, unique_vertices), + "degenerate_faces": detect_degenerate_faces(faces, unique_vertices) + } + + faces, fatal_removed = remove_degenerate_faces( + faces, + unique_vertices, + fatal_area_tol=1e-18, + logger=logger, + ) + if fatal_removed > 0: + append_repair_report( + repair_report, + repair_type="remove_degenerate_faces", + affected_count=fatal_removed, + before={"faces": len(faces) + fatal_removed}, + after={"faces": len(faces)}, + ) + + if len(tjs) > 0: + faces, changed = fix_t_junctions_iterative( + faces, + unique_vertices, + tol=conformize_tol, + max_iters=100, + logger=logger, + ) + tjs = detect_t_junctions_from_facerecords_global_plc( + faces, + unique_vertices, + tol=conformize_tol if conformize_tol is not None else max(1e-7, tol), + max_reports=2000, + ) + append_repair_report( + repair_report, + repair_type="fix_t_junctions_iterative", + after=len(tjs), + before=len(before_repair_tjs), + affected_count=len(before_repair_tjs) - len(tjs), + ) + + append_repair_report( + repair_report, + repair_type="fix_t_junctions_iterative", + after=len(tjs), + before=len(before_repair_tjs), + affected_count=len(before_repair_tjs) - len(tjs), + ) + + room_center = tuple( + sum(v[i] for v in unique_vertices) / len(unique_vertices) + for i in range(3) + ) + + flip_all_faces_if_majority_inward(faces, unique_vertices, room_center, logger=logger) + + # Check for segment-facet intersections (PLC violations) after orientation fix + plc_hits = detect_segment_facet_intersections_cdt( + faces, + unique_vertices, + warn_planar_tol_m=1e-4, + fatal_planar_tol_m=1e-3, + eps=1e-10, + bbox_pad=1e-9, + max_reports=2000, + skip_warped_faces=True, + logger=logger, + ) + + if plc_hits: + issue_detection_report["intersections"] = { + "count": len(plc_hits), + "intersections_hits": plc_hits, + } + faces, unique_vertices, trim_changed, trim_summary = trim_segment_face_intersections_iterative( + faces, + unique_vertices, + room_center, + max_iters=20, + tol=1e-9, + logger=logger, + ) + faces, unique_vertices, plc_repair_changed, plc_repair_summary = repair_plc_single_splits_iterative( + faces, + unique_vertices, + room_center, + logger=logger, + max_iters=20, + planarity_tol_m=1e-6, + ) + append_repair_report( + repair_report, + repair_type="repair_plc_single_splits_iterative", + affected_count=len(plc_hits) - plc_repair_summary["remaining_plc_hits"], + before={"intersections": len(plc_hits)}, + after={"intersections": plc_repair_summary["remaining_plc_hits"]}, + ) + obj_output_path = obj_file.replace(".obj", "_repaired.obj") + export_processed_topology_to_obj(obj_output_path, unique_vertices, faces) + tjs_final = detect_t_junctions_from_facerecords_global_plc( + faces, + unique_vertices, + tol=conformize_tol if conformize_tol is not None else max(1e-7, tol), + max_reports=2000, + ) + + plc_hits_final = detect_segment_facet_intersections_cdt( + faces, + unique_vertices, + warn_planar_tol_m=1e-4, + fatal_planar_tol_m=1e-3, + eps=1e-10, + bbox_pad=1e-9, + max_reports=2000, + skip_warped_faces=True, + logger=logger, + ) + + revalidation_report = { + "non_coplanar_faces": inspect_face_planarity_issues(faces, unique_vertices), + "T-junctions": tjs_final, + "intersections": { + "count": len(plc_hits_final), + "intersection_hits": plc_hits_final + }, + "possible_holes": detect_possible_holes_from_faces(faces, unique_vertices), + "boundary_edges": detect_boundary_edges(faces, unique_vertices), + "degenerate_faces": detect_degenerate_faces(faces, unique_vertices) + } + + report = create_geometry_processing_report( + obj_file=obj_file, + repaired_obj=obj_output_path, + topology_before_repair=topology_before_repair, + issue_detection_report=issue_detection_report, + repair_report=repair_report, + revalidation_report=revalidation_report, + ) + write_geometry_processing_report(report, report_path) + return obj_output_path, report_path + +def convert_repaired_obj_to_gmsh_geo(repaired_obj_path, geo_file, rhino3dm_path, volume_name="RoomVolume"): + try: + + material_id_array = extract_rhino_materials(rhino3dm_path) + vertices, raw_faces, face_groups, face_group_materials = parse_obj_file(repaired_obj_path) + faces = process_and_instantiate_faces( + raw_faces, + face_groups, + face_group_materials, + material_id_array + ) + room_center = tuple( + sum(v[i] for v in vertices) / len(vertices) + for i in range(3) + ) + flip_all_faces_if_majority_inward(faces, vertices, room_center, logger=logger) + num_lines, num_surfaces = export_processed_topology_to_gmsh_geo(faces, vertices, geo_file, volume_name) + return num_lines, num_surfaces + except Exception as ex: + logger.error("Failed to convert repaired OBJ to Gmsh GEO: %s", ex) + return False \ No newline at end of file diff --git a/app/utils/geometry_utils.py b/app/utils/geometry_utils.py new file mode 100644 index 0000000..eac0f06 --- /dev/null +++ b/app/utils/geometry_utils.py @@ -0,0 +1,198 @@ +from dataclasses import dataclass +from typing import List, Tuple, Dict, Any +from shapely.geometry import Polygon +from shapely import constrained_delaunay_triangles + +@dataclass +class FaceRecord: + """ + A single polygon face with all associated metadata. + Attributes: + fid : unique integer face id + verts : ordered list of 1-based vertex indices + group : OBJ group name (from the 'g' directive) + group_material : material UUID string associated with the group (from the usemtl) + material : material UUID string (from the Rhino3dm file) + """ + fid: int + verts: List[int] + group: str = "default" + group_material: str = "default_group_material" + material: str = "unknown" + + # Convenience: iterate over edges (pairs of consecutive vertex ids) + def edges(self): + n = len(self.verts) + return [(self.verts[i], self.verts[(i + 1) % n]) for i in range(n)] + + def undirected_edges(self): + return [_uedge(u, v) for u, v in self.edges()] + +def _uedge(u, v): + return (u, v) if u < v else (v, u) + +def remove_duplicate_faces(faces: List[FaceRecord]) -> List[FaceRecord]: + """Remove faces whose vertex sets (regardless of order) are duplicates.""" + seen: dict = {} + out: List[FaceRecord] = [] + for face in faces: + key = tuple(sorted(face.verts)) + if key in seen: + continue + seen[key] = True + out.append(face) + return out + +def newell_normal_from_points(face_ids, points): + """ + face_ids: list of vertex ids (1-based) + points: list[(x,y,z)] 0-based + returns (nx, ny, nz) unnormalized + """ + nx = ny = nz = 0.0 + n = len(face_ids) + for i in range(n): + p = points[face_ids[i] - 1] + q = points[face_ids[(i + 1) % n] - 1] + nx += (p[1] - q[1]) * (p[2] + q[2]) + ny += (p[2] - q[2]) * (p[0] + q[0]) + nz += (p[0] - q[0]) * (p[1] + q[1]) + return (nx, ny, nz) + +# HELPER MATH + +def sub(a, b): + return (a[0] - b[0], a[1] - b[1], a[2] - b[2]) + +def cross(a, b): + return ( + a[1] * b[2] - a[2] * b[1], + a[2] * b[0] - a[0] * b[2], + a[0] * b[1] - a[1] * b[0], + ) + +def dot(a, b): + return a[0] * b[0] + a[1] * b[1] + a[2] * b[2] + +def triangulate_face_cdt_shapely( + face: List[int], + points: List[Tuple[float, float, float]], + *, + tol: float = 1e-12, + round_ndigits: int = 12, +) -> List[List[int]]: + """ + Constrained Delaunay triangulation of a polygon face using Shapely 2.1+. + face: list of vertex indices (>=3), 1-based indices into points + points: list[(x,y,z)] 0-based; vertex id i -> points[i-1] + Returns: list of triangles [[i,j,k], ...] with original 1-based vertex ids. + Notes: + - Does NOT add vertices (triangles use existing polygon vertices). + - Polygon must be simple (non-self-intersecting) for reliable results. + """ + if len(face) < 3: + return [] + if len(face) == 3: + return [face[:]] + + # --- choose projection plane (drop dominant normal axis) + nrm = newell_normal_from_points(face, points) + ax, ay, az = abs(nrm[0]), abs(nrm[1]), abs(nrm[2]) + + if az >= ax and az >= ay: + proj = lambda pid: (points[pid - 1][0], points[pid - 1][1]) # drop z + elif ay >= ax and ay >= az: + proj = lambda pid: (points[pid - 1][0], points[pid - 1][2]) # drop y + else: + proj = lambda pid: (points[pid - 1][1], points[pid - 1][2]) # drop x + + face_ids = face[:] + poly2d = [proj(pid) for pid in face_ids] + + # Ensure CCW for consistency + if area2(poly2d) < 0: + face_ids.reverse() + poly2d.reverse() + + # Remove consecutive duplicate points (Shapely dislikes them) + cleaned_ids = [face_ids[0]] + cleaned_2d = [poly2d[0]] + for pid, p2 in zip(face_ids[1:], poly2d[1:]): + if abs(p2[0] - cleaned_2d[-1][0]) <= tol and abs(p2[1] - cleaned_2d[-1][1]) <= tol: + continue + cleaned_ids.append(pid) + cleaned_2d.append(p2) + + # If last equals first, drop last + if len(cleaned_2d) >= 2: + if abs(cleaned_2d[0][0] - cleaned_2d[-1][0]) <= tol and abs(cleaned_2d[0][1] - cleaned_2d[-1][1]) <= tol: + cleaned_ids.pop() + cleaned_2d.pop() + + if len(cleaned_ids) < 3: + return [] + if len(cleaned_ids) == 3: + return [cleaned_ids] + + # Map rounded 2D coords -> original vertex id (1-based) + key = lambda x, y: (round(x, round_ndigits), round(y, round_ndigits)) + coord_to_vid = {} + for vid, (x, y) in zip(cleaned_ids, cleaned_2d): + coord_to_vid[key(x, y)] = vid + + + + poly = Polygon(cleaned_2d) + + # Fix minor invalidities (e.g., nearly-collinear artifacts) + if not poly.is_valid: + poly = poly.buffer(0) + + if poly.is_empty or (not poly.is_valid): + return [] + + # Shapely returns a GeometryCollection of triangle polygons + tris_geom = constrained_delaunay_triangles(poly) + + geoms = getattr(tris_geom, "geoms", []) + if not geoms: + return [] + + out: List[List[int]] = [] + for tri in geoms: + # triangle polygon coords includes closing point == first point + coords = list(tri.exterior.coords) + if len(coords) < 4: + continue + coords = coords[:-1] # drop closing coordinate + if len(coords) != 3: + continue + + vids = [] + ok = True + for (x, y) in coords: + vid = coord_to_vid.get(key(x, y)) + if vid is None: + ok = False + break + vids.append(vid) + + if not ok or len(set(vids)) < 3: + continue + + out.append(vids) + + return out + +# Triangulation helper math +def orient(a, b, c): + return (b[0] - a[0]) * (c[1] - a[1]) - (b[1] - a[1]) * (c[0] - a[0]) + +def area2(poly2d): + s = 0.0 + m = len(poly2d) + for i in range(m): + x1, y1 = poly2d[i] + x2, y2 = poly2d[(i + 1) % m] + s += x1 * y2 - x2 * y1 + return s \ No newline at end of file diff --git a/app/utils/geometry_validation_utils.py b/app/utils/geometry_validation_utils.py new file mode 100644 index 0000000..b762068 --- /dev/null +++ b/app/utils/geometry_validation_utils.py @@ -0,0 +1,131 @@ +import logging +import math +from typing import List, Tuple +from app.utils.geometry_utils import newell_normal_from_points + +logger = logging.getLogger(__name__) + +def classify_face_degeneracy( + verts_ids: List[int], + points: List[Tuple[float, float, float]], + *, + fatal_area_tol: float = 1e-12, +) -> Tuple[str, float]: + """ + Classify polygon degeneracy using Newell area. + Returns: + ("ok" | "warning" | "fatal", area2) + area2 = squared area proxy (||normal||^2) + """ + + if len(verts_ids) < 3: + return "fatal", 0.0 + + # Remove duplicate consecutive vertices + unique = [] + seen = set() + for vid in verts_ids: + if vid not in seen: + unique.append(vid) + seen.add(vid) + + if len(unique) < 3: + return "fatal", 0.0 + + # ---- Newell normal + nx = ny = nz = 0.0 + n = len(unique) + + for i in range(n): + p = points[unique[i] - 1] + q = points[unique[(i + 1) % n] - 1] + + nx += (p[1] - q[1]) * (p[2] + q[2]) + ny += (p[2] - q[2]) * (p[0] + q[0]) + nz += (p[0] - q[0]) * (p[1] + q[1]) + + area2 = nx*nx + ny*ny + nz*nz # proportional to area^2 + + if area2 <= fatal_area_tol: + return "fatal", area2 + + return "ok", area2 + +def classify_face_planarity_m( + face_ids, + points, + *, + # meters: + # warn at 0.1 mm, fatal at 1.0 mm + warn_planar_tol_m=1e-4, + fatal_planar_tol_m=1e-3, +): + """ + Returns: (status, max_abs_dist_m, rms_dist_m) + status: + - "ok" + - "warning" + - "fatal" + - "skip" (triangles: always planar; degeneracy handled elsewhere) + """ + n = len(face_ids) + if n < 3: + return ("fatal", float("inf"), float("inf")) + + if n == 3: + # Triangle is always planar (if non-degenerate). + return ("skip", 0.0, 0.0) + + max_abs, rms, _, _ = planarity_deviation_m(face_ids, points) + if not math.isfinite(max_abs): + return ("fatal", max_abs, rms) + if max_abs > fatal_planar_tol_m: + return ("fatal", max_abs, rms) + if max_abs > warn_planar_tol_m: + return ("warning", max_abs, rms) + return ("ok", max_abs, rms) + +def planarity_deviation_m(face_ids, points, *, eps=1e-30): + """ + Returns: + (max_abs_dist_m, rms_dist_m, normal_unit, plane_point_centroid) + Plane: + - normal via Newell + - plane through centroid of face vertices + """ + n = len(face_ids) + if n < 3: + return (float("inf"), float("inf"), (0.0, 0.0, 0.0), (0.0, 0.0, 0.0)) + + nrm = newell_normal_from_points(face_ids, points) + nlen = math.sqrt(nrm[0]*nrm[0] + nrm[1]*nrm[1] + nrm[2]*nrm[2]) + if nlen <= eps: + # collinear / degenerate -> plane undefined + return (float("inf"), float("inf"), (0.0, 0.0, 0.0), (0.0, 0.0, 0.0)) + + nu = (nrm[0]/nlen, nrm[1]/nlen, nrm[2]/nlen) + + # centroid + cx = cy = cz = 0.0 + for vid in face_ids: + x, y, z = points[vid - 1] + cx += x; cy += y; cz += z + inv = 1.0 / n + c = (cx * inv, cy * inv, cz * inv) + + # distances + max_abs = 0.0 + s2 = 0.0 + for vid in face_ids: + x, y, z = points[vid - 1] + dx = x - c[0] + dy = y - c[1] + dz = z - c[2] + d = nu[0]*dx + nu[1]*dy + nu[2]*dz # signed distance in meters + ad = abs(d) + if ad > max_abs: + max_abs = ad + s2 += d*d + + rms = math.sqrt(s2 / n) + return (max_abs, rms, nu, c) diff --git a/requirements.txt b/requirements.txt index d1727ab..1c3ba72 100644 --- a/requirements.txt +++ b/requirements.txt @@ -110,6 +110,7 @@ rhino3dm==8.6.1 rich==13.7.1 rpds-py==0.18.1 scipy==1.14.0 +shapely==2.1.2 six==1.16.0 snowballstemmer==2.2.0 soundfile==0.13.1