Spaces:
Sleeping
Sleeping
| import glob | |
| import pickle | |
| import numpy as np | |
| import jax | |
| import jax.numpy as jnp | |
| import torch | |
| from torch_geometric.nn import knn | |
| import gradio as gr | |
| import matplotlib.pyplot as plt | |
| import open3d as o3d | |
| import trimesh | |
| def createPlane(normal, point_on_plane): | |
| normal = normal / np.linalg.norm(normal) | |
| # Find a vector in the plane | |
| if np.allclose(normal, [1, 0, 0]): | |
| v1 = np.cross(normal, [0, 1, 0]) | |
| else: | |
| v1 = np.cross(normal, [1, 0, 0]) | |
| v1 = v1 / np.linalg.norm(v1) | |
| v2 = np.cross(normal, v1) | |
| v2 = v2 / np.linalg.norm(v2) | |
| half_width = 1 | |
| half_height = 1 | |
| # Calculate the corners | |
| corner1 = point_on_plane + half_width * v1 + half_height * v2 | |
| corner2 = point_on_plane - half_width * v1 + half_height * v2 | |
| corner3 = point_on_plane - half_width * v1 - half_height * v2 | |
| corner4 = point_on_plane + half_width * v1 - half_height * v2 | |
| vertices = np.array([corner1, corner2, corner3, corner4]) | |
| faces = np.array([ | |
| [0, 1, 2], | |
| [0, 2, 3], | |
| [2, 1, 0], | |
| [3, 2, 0] | |
| ]) | |
| # Define the color (sky blue) with opacity (alpha) | |
| # Define the color (sky blue) with transparency | |
| sky_blue_with_alpha = [255, 255, 255, 128] # RGBA format, with 128 alpha for half opacity | |
| # Set the vertex colors with transparency | |
| vertex_colors = np.tile(sky_blue_with_alpha, (vertices.shape[0], 1)) | |
| # Create a mesh for the rectangle | |
| plane_mesh = trimesh.Trimesh(vertices=vertices, faces=faces, vertex_colors=vertex_colors) | |
| return plane_mesh | |
| def reflect_points_multiple_3d(d, n, p): | |
| points_expanded = p.unsqueeze(0).expand(n.size(0), -1, -1) | |
| normals_expanded = n.unsqueeze(1).expand(-1, p.size(0), -1) | |
| distances_expanded = d.unsqueeze(1).expand(-1, p.size(0)) | |
| dot_products = torch.sum(points_expanded * normals_expanded, dim=2) | |
| # reflections: (m, n, 3) | |
| reflections = points_expanded - 2 * (dot_products - distances_expanded).unsqueeze(2) * normals_expanded | |
| return reflections | |
| def reflection_point_association_3d(d, n, q, threshold): | |
| # d in shape (m, ), n in shape (m, 3), q in shape (n, 3) | |
| reflections = reflect_points_multiple_3d(d, n, q) # shape: (m, n, 3) | |
| reflections = reflections.view(-1, 3) # Flatten to (m*n, 3) | |
| # Using knn to find the closest points in q for each point in reflections | |
| # knn finds indices of the nearest neighbors | |
| _, indices = knn(q, reflections, k=1, batch_x=None, batch_y=None) # indices shape: (m*n, 1) | |
| # Gather nearest points based on indices from q | |
| nearest_points = q[indices.squeeze()] # shape: (m*n, 3) | |
| # Calculate distances for the nearest neighbors | |
| distances = (nearest_points - reflections).norm(dim=1) # shape: (m*n,) | |
| # Reshape distances back to (m, n) and check threshold | |
| distances = distances.view(d.size(0), -1) # shape: (m, n) | |
| within_threshold = distances <= threshold # shape: (m, n), sum this along axis 1 to get the number of associated points | |
| return within_threshold | |
| def get_patches(points, centroids): | |
| norm = np.linalg.norm(centroids, axis=1) | |
| n = centroids / norm[:, None] | |
| d = norm - 1 | |
| association = reflection_point_association_3d(torch.tensor(np.array(d)), torch.tensor(np.array(n)), | |
| torch.tensor(np.array(points)), 0.03) | |
| return np.array(association) | |
| def left_right(allpoints, patchbool, planepoints): | |
| """ | |
| inputs: patchpoints: (n,3) | |
| planepoints: (4,3) | |
| outputs: leftpoints, rightpoints: (k,3), (k',3) | |
| """ | |
| def signed_distance(point, plane_point, normal): | |
| return np.dot(normal, point - plane_point) / (np.linalg.norm(normal)+1e-6) | |
| patchpoints = allpoints[patchbool] | |
| p1 = planepoints[0] | |
| p2 = planepoints[1] | |
| p3 = planepoints[2] | |
| v1 = p2 - p1 | |
| v2 = p3 - p1 | |
| normal = np.cross(v1, v2) | |
| distances = np.array([signed_distance(point, p1, normal) for point in patchpoints]) | |
| contains_nan = np.isnan(distances).any() | |
| l_idx = distances<0 | |
| allidcs = np.arange(len(allpoints))[patchbool] | |
| left_idx = allidcs[l_idx] | |
| right_idx = allidcs[~l_idx] | |
| #left_points = patchpoints[l_idx] | |
| #right_points = patchpoints[~l_idx] | |
| return left_idx, right_idx#left_points, right_points | |
| def dbscan(D, eps, MinPts): | |
| labels = [0]*len(D) | |
| C = 0 | |
| for P in range(0, len(D)): | |
| if not (labels[P] == 0): | |
| continue | |
| NeighborPts = region_query(D, P, eps) | |
| if len(NeighborPts) < MinPts: | |
| labels[P] = -1 | |
| else: | |
| C += 1 | |
| grow_cluster(D, labels, P, NeighborPts, C, eps, MinPts) | |
| return labels | |
| def grow_cluster(D, labels, P, NeighborPts, C, eps, MinPts): | |
| labels[P] = C | |
| i = 0 | |
| while i < len(NeighborPts): | |
| Pn = NeighborPts[i] | |
| if labels[Pn] == -1: | |
| labels[Pn] = C | |
| elif labels[Pn] == 0: | |
| labels[Pn] = C | |
| PnNeighborPts = region_query(D, Pn, eps) | |
| if len(PnNeighborPts) >= MinPts: | |
| NeighborPts = NeighborPts + PnNeighborPts | |
| i += 1 | |
| def region_query(D, P, eps): | |
| neighbors = [] | |
| for Pn in range(0, len(D)): | |
| #if geodesic_dist(D[P], D[Pn])<eps: | |
| if np.linalg.norm(D[P] - D[Pn]) < eps: | |
| neighbors.append(Pn) | |
| return neighbors | |
| def compute_centroids(data, labels): | |
| unique = np.unique(labels) | |
| unique_labels = unique[unique!=-1] | |
| centroids = [] | |
| for label in unique_labels: | |
| mask = labels == label | |
| points = data[mask] | |
| centroid = jnp.mean(points, axis=0) | |
| centroids.append(centroid) | |
| return np.stack(centroids) | |
| def proc_all(mesh_path, mode_path): | |
| with open(mode_path, 'rb') as f: | |
| fin = pickle.load(f) | |
| name = mesh_path.split('/')[-1].split('.')[0] | |
| mesh = trimesh.load(mesh_path) | |
| verts = np.array(mesh.vertices) | |
| my_labels = dbscan(fin, eps=0.1, MinPts=1) | |
| centroid = np.array(compute_centroids(fin, my_labels)) | |
| norm = torch.norm(torch.tensor(np.array(centroid)), dim=-1) | |
| n = centroid / norm[:,None] | |
| d = norm - 1 | |
| point = n * d[...,None] | |
| #pts = create_points_3d(fin, alpha = 1, markersize = 4, label = 'final timestep') | |
| #pts2 = create_points_3d(centroid, alpha = 1, markersize = 10, label = 'final timestep') | |
| #plot_all_3d([pts, pts2]) | |
| pats = get_patches(torch.tensor(np.array(verts)), torch.tensor(centroid)) | |
| alldicts = [] | |
| for i in range(len(n)): | |
| plane = createPlane(n[i], point[i]) | |
| l,r = left_right(verts, pats[i], plane.vertices) | |
| single = {'plane': plane, 'left': l, 'right': r} | |
| alldicts.append(single) | |
| return alldicts | |
| def create_scene(allscenes, plane_idx): | |
| scene = allscenes[plane_idx] | |
| temp_file = f"/tmp/scene_{plane_idx}.obj" | |
| scene.export(temp_file) | |
| return temp_file | |
| def load_mesh_max(scene_path): | |
| dict_path = scene_path.split('/')[-1].split('.')[0][:-7] + "_dicts.pickle" | |
| with open(dict_path, 'rb') as f: | |
| dicts = pickle.load(f) | |
| with open(scene_path, 'rb') as f: | |
| scenes = pickle.load(f) | |
| alllengths = [dic['len'] for dic in dicts] | |
| sort_idcs = np.argsort(alllengths) | |
| sorted_planes = np.array(scenes)[sort_idcs] | |
| allmesh = [] | |
| for i in range(min(5, len(dicts))): # Limiting to maximum 5 outputs | |
| temp_file = create_scene(sorted_planes, -(i+1)) | |
| allmesh.append(temp_file) | |
| return allmesh + [None] * (5 - len(allmesh)) # Fill the rest with None if less than 5 | |
| def load_mesh_min(scene_path): | |
| dict_path = scene_path.split('/')[-1].split('.')[0][:-7] + "_dicts.pickle" | |
| with open(dict_path, 'rb') as f: | |
| dicts = pickle.load(f) | |
| with open(scene_path, 'rb') as f: | |
| scenes = pickle.load(f) | |
| alllengths = [dic['len'] for dic in dicts] | |
| sort_idcs = np.argsort(alllengths) | |
| sorted_planes = np.array(scenes)[sort_idcs] | |
| allmesh = [] | |
| for i in range(min(5, len(dicts))): # Limiting to maximum 5 outputs | |
| temp_file = create_scene(sorted_planes, i) | |
| allmesh.append(temp_file) | |
| return allmesh + [None] * (5 - len(allmesh)) # Fill the rest with None if less than 5 | |
| def reset_outputs(): | |
| # Returns a list of None, one for each 3D model output to reset them | |
| return [None] * 10 # Adjust to the total number of Model3D components you have | |
| examples = glob.glob("*_planes.pickle") | |
| with gr.Blocks() as demo: | |
| with gr.Row(): | |
| file_input = gr.File(label="Upload processed planes here") | |
| examples_component = gr.Examples(examples=examples, inputs=file_input, outputs=None, examples_per_page=25) | |
| with gr.Row(): | |
| with gr.Column(scale=1, min_width=600): | |
| gr.Markdown("Top 5 largest") | |
| model1 = gr.Model3D(label="3D Model 1", height=500) | |
| model2 = gr.Model3D(label="3D Model 2", height=500) | |
| model3 = gr.Model3D(label="3D Model 3", height=500) | |
| model4 = gr.Model3D(label="3D Model 4", height=500) | |
| model5 = gr.Model3D(label="3D Model 5", height=500) | |
| with gr.Column(scale=1, min_width=600): | |
| gr.Markdown("Top 5 smallest") | |
| model6 = gr.Model3D(label="3D Model 6", height=500) | |
| model7 = gr.Model3D(label="3D Model 7", height=500) | |
| model8 = gr.Model3D(label="3D Model 8", height=500) | |
| model9 = gr.Model3D(label="3D Model 9", height=500) | |
| model10 = gr.Model3D(label="3D Model 10", height=500) | |
| # Setup the function to be called when files are uploaded or an example is chosen | |
| # Reset the outputs whenever a new file is uploaded | |
| file_input.change(fn=reset_outputs, inputs=[], outputs=[model1, model2, model3, model4, model5, model6, model7, model8, model9, model10]) | |
| file_input.change(fn=load_mesh_max, inputs=file_input, outputs=[model1, model2, model3, model4, model5]) | |
| file_input.change(fn=load_mesh_min, inputs=file_input, outputs=[model6, model7, model8, model9, model10]) | |
| if __name__ == "__main__": | |
| demo.launch(debug=True) | |