Created
October 7, 2025 13:35
-
-
Save ji-podhead/ba7826397f000591d4342bb18508073e to your computer and use it in GitHub Desktop.
Voxelize A Mesh in Blender and sample the color from the texture
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| import bpy | |
| import bmesh | |
| import numpy as np | |
| import json | |
| from mathutils import Vector | |
| from collections import defaultdict | |
| def texture_surface_voxelizer(obj, particle_count, output_filepath="voxel_data.json"): | |
| """ | |
| Voxelizes the surface of a Blender object, extracts texture colors, | |
| and saves the data to a JSON file. | |
| Args: | |
| obj (bpy.types.Object): The Blender object to be voxelized. | |
| particle_count (int): The number of particles to sample. | |
| output_filepath (str): The path where the JSON file will be saved. | |
| """ | |
| if obj.type != 'MESH': | |
| print(f"Object '{obj.name}' is not a mesh. Please select a mesh object.") | |
| return | |
| # Ensure the object is in object mode and scale is applied | |
| bpy.ops.object.mode_set(mode='OBJECT') | |
| bpy.ops.object.transform_apply(location=False, rotation=False, scale=True) | |
| # Get the mesh data | |
| mesh = obj.data | |
| # Check if UV maps exist | |
| if not mesh.uv_layers: | |
| print(f"Mesh '{obj.name}' has no UV maps. Cannot extract texture colors.") | |
| # Fallback to exporting positions only with default color | |
| return extract_positions_only(obj, particle_count, output_filepath) | |
| uv_layer = mesh.uv_layers.active.data | |
| # Find the object's texture | |
| image_texture = None | |
| if obj.material_slots: | |
| for slot in obj.material_slots: | |
| if slot.material: | |
| # Search for an Image Texture Node in the shader tree (Principled BSDF) | |
| if slot.material.node_tree: | |
| for node in slot.material.node_tree.nodes: | |
| if node.type == 'BSDF_PRINCIPLED': | |
| # Try to find the Base Color input | |
| if node.inputs.get('Base Color') and node.inputs['Base Color'].links: | |
| linked_node = node.inputs['Base Color'].links[0].from_node | |
| if linked_node.type == 'TEX_IMAGE': | |
| image_texture = linked_node.image | |
| break | |
| if image_texture: | |
| break | |
| if image_texture: | |
| break | |
| if not image_texture: | |
| print(f"No image texture (Image Texture Node) connected to 'Base Color' found in material of '{obj.name}'.") | |
| print("Cannot extract texture colors. Voxels will be exported as white.") | |
| # Fallback to exporting positions only with default color | |
| return extract_positions_only(obj, particle_count, output_filepath) | |
| if image_texture.source == 'FILE': | |
| # Ensure the image is loaded and has pixel data | |
| # reload() is good to ensure it's updated, especially if modified externally. | |
| image_texture.reload() | |
| # Check if the image has pixel data after reload | |
| if not image_texture.pixels: | |
| print(f"Image '{image_texture.name}' has no pixel data after reload.") | |
| print("Cannot extract texture colors. Voxels will be exported as white.") | |
| return extract_positions_only(obj, particle_count, output_filepath) | |
| # Access pixel data | |
| width, height = image_texture.size | |
| # Check if image dimensions are valid | |
| if width == 0 or height == 0: | |
| print(f"Image '{image_texture.name}' has invalid dimensions ({width}x{height}).") | |
| print("Cannot extract texture colors. Voxels will be exported as white.") | |
| return extract_positions_only(obj, particle_count, output_filepath) | |
| pixels = np.array(image_texture.pixels).reshape(height, width, -1) # RGBA or RGB | |
| else: | |
| print(f"Image source for texture '{image_texture.name}' is not 'FILE'.") | |
| print("Can only use external image files for color sampling.") | |
| return extract_positions_only(obj, particle_count, output_filepath) | |
| # Prepare for sampling using bmesh (for vertex/face data) and obj.evaluated_get (for world space) | |
| bm = bmesh.new() | |
| bm.from_mesh(mesh) | |
| bm.verts.ensure_lookup_table() | |
| bm.faces.ensure_lookup_table() | |
| # Apply object's world matrix to get global positions | |
| obj_eval = obj.evaluated_get(bpy.context.evaluated_depsgraph_get()) | |
| matrix_world = obj_eval.matrix_world | |
| positions = [] | |
| colors = [] | |
| # Simple sampling approach: randomly pick faces and points on them | |
| for _ in range(particle_count): | |
| # Randomly select a face | |
| face = bm.faces[np.random.randint(0, len(bm.faces))] | |
| # Quick and dirty: Pick a random vertex of the face for sampling | |
| # A more advanced surface sampler would involve barycentric coordinates | |
| # to pick points *within* the face, not just on vertices. | |
| v_idx = np.random.randint(0, len(face.verts)) | |
| v = face.verts[v_idx] | |
| world_pos = matrix_world @ v.co | |
| positions.extend([world_pos.x, world_pos.y, world_pos.z]) | |
| # Query UV coordinates for the vertex | |
| uv = None | |
| for loop in face.loops: | |
| if loop.vert == v: | |
| uv = uv_layer[loop.index].uv | |
| break | |
| # Initialize default color to grey | |
| sampled_r, sampled_g, sampled_b = 100, 100, 100 | |
| if uv: | |
| # Transform UV to image coordinates | |
| # UVs range from 0 to 1. Pixel indices range from 0 to width-1 / height-1. | |
| # Blender's UV (0,0) is typically bottom-left, image (0,0) often top-left. | |
| # So, inverting uv.y is usually correct. | |
| px = int(uv.x * width) | |
| py = int((1 - uv.y) * height) | |
| # Ensure coordinates are within image boundaries | |
| px = max(0, min(px, width - 1)) | |
| py = max(0, min(py, height - 1)) | |
| # Check if the calculated coordinates are valid | |
| if 0 <= py < height and 0 <= px < width: | |
| # Extract color from pixel data (RGBA) | |
| # `pixels` is a numpy array that we reshaped. | |
| # `pixels[py, px]` returns an array [R, G, B, A]. | |
| pixel_color = pixels[py, px] | |
| # Blender typically stores colors in 0-1 range. | |
| # Your JS code uses 0-250 (which is basically 0-1 scaled by 250). | |
| sampled_r = pixel_color[0] * 250 | |
| sampled_g = pixel_color[1] * 250 | |
| sampled_b = pixel_color[2] * 250 | |
| # Ensure values stay within 0-250 range | |
| sampled_r = max(0, min(250, sampled_r)) | |
| sampled_g = max(0, min(250, sampled_g)) | |
| sampled_b = max(0, min(250, sampled_b)) | |
| else: | |
| print(f"Warning: Pixel coordinates ({px},{py}) out of image bounds ({width}x{height}) for UV: ({uv.x},{uv.y}).") | |
| else: | |
| print(f"Warning: Could not find UV coordinates for vertex {v.index} on face {face.index}.") | |
| colors.extend([sampled_r, sampled_g, sampled_b]) | |
| bm.free() # Free BMesh data | |
| # Data structure for JSON | |
| json_data = { | |
| "positions": positions, | |
| "colors": colors | |
| } | |
| # Save JSON file | |
| with open(output_filepath, 'w') as f: | |
| json.dump(json_data, f, indent=4) | |
| print(f"Voxel data for '{obj.name}' with {len(positions) // 3} particles exported to '{output_filepath}'.") | |
| return json_data | |
| def extract_positions_only(obj, particle_count, output_filepath="voxel_data.json"): | |
| """ | |
| Extracts only particle positions on the surface of a Blender object. | |
| A fallback when texture colors cannot be extracted. | |
| """ | |
| print(f"Extracting positions only for object '{obj.name}'.") | |
| bpy.ops.object.mode_set(mode='OBJECT') | |
| bpy.ops.object.transform_apply(location=False, rotation=False, scale=True) | |
| mesh = obj.data | |
| bm = bmesh.new() | |
| bm.from_mesh(mesh) | |
| bm.verts.ensure_lookup_table() | |
| bm.faces.ensure_lookup_table() | |
| obj_eval = obj.evaluated_get(bpy.context.evaluated_depsgraph_get()) | |
| matrix_world = obj_eval.matrix_world | |
| positions = [] | |
| colors = [] # Default color, e.g., Grey | |
| for _ in range(particle_count): | |
| face = bm.faces[np.random.randint(0, len(bm.faces))] | |
| v_idx = np.random.randint(0, len(face.verts)) | |
| v = face.verts[v_idx] | |
| world_pos = matrix_world @ v.co | |
| positions.extend([world_pos.x, world_pos.y, world_pos.z]) | |
| colors.extend([100, 100, 100]) # Grey | |
| bm.free() | |
| json_data = { | |
| "positions": positions, | |
| "colors": colors | |
| } | |
| with open(output_filepath, 'w') as f: | |
| json.dump(json_data, f, indent=4) | |
| print(f"Voxel positions (without texture colors) for '{obj.name}' with {len(positions) // 3} particles exported to '{output_filepath}'.") | |
| return json_data | |
| # --- SCRIPT USAGE --- | |
| if __name__ == "__main__": | |
| # Ensure an object is selected | |
| if bpy.context.active_object: | |
| selected_object = bpy.context.active_object | |
| # Configure particle count and output path | |
| # Adjust the path for your operating system! | |
| # Example Windows: "C:\\temp\\voxel_data.json" | |
| # Example macOS/Linux: "/tmp/voxel_data.json" | |
| output_file = bpy.path.abspath("//voxel_data.json") # Saves in the same directory as the .blend file | |
| particle_count = 10000 # Adjust the number of particles | |
| print(f"\nStarting voxelization for: {selected_object.name}") | |
| texture_surface_voxelizer(selected_object, particle_count, output_file) | |
| print("Voxelization completed.") | |
| else: | |
| print("No object selected. Please select a mesh object.") |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment