diff --git a/README.md b/README.md index 1126a87..b2de26f 100644 --- a/README.md +++ b/README.md @@ -18,6 +18,10 @@ Important note: For sparse point cloud to import you should change in `Structure Additional option: by searching [F3] for `Meshroom update cameras`, you can copy settings from active camera to all meshroom cameras. +## Changelog: +- 0.1.0 - undisorted is now active, info when no .ply format, update point cloud vis, views named by filenames. +- 0.0.1 - initial working functionality + ## TODO: - seach through node tree and give option which elements import; - if node is not computed then put some info about it; diff --git a/__init__.py b/__init__.py index 1d61ae0..d088a6d 100644 --- a/__init__.py +++ b/__init__.py @@ -29,7 +29,7 @@ "name": "Meshroom importer", "description": "Imports from .mg file cameras, images, sparse and obj", "author": "Dawid HuczyƄski", - "version": (0, 0, 1), + "version": (0, 1, 0), "blender": (2, 80, 0), "location": "File > Import > Import Meshroom", "warning": "This addon is still in development.", @@ -37,12 +37,10 @@ "category": "Import-Export"} -filepath = r'D:\Koszyk\koszyk.mg' - # module import https://github.com/uhlik/bpy/blob/master/view3d_point_cloud_visualizer.py # thanks to Jakub Uhlik vis_mod = 'view3d_point_cloud_visualizer' -if vis_mod in sys.modules.keys() and sys.modules[vis_mod].bl_info['version'] <= (0, 7, 0): +if vis_mod in sys.modules.keys() and sys.modules[vis_mod].bl_info['version'] <= (0, 8, 12): local_visualizer = False else: from . import view3d_point_cloud_visualizer as point_cloud @@ -63,11 +61,11 @@ def find_view_layer(coll, lay_coll=None): return None -def read_meshlab(filepath): - 'Handle meshlab file' - +def get_meshroom_paths(filepath): + 'Handle meshroom file' cache = os.path.join(os.path.dirname(filepath), 'MeshroomCache') data = json.load(open(filepath, 'r')) + data try: nodeSFM = data['graph']['StructureFromMotion_1'] nodeType = nodeSFM['nodeType'] @@ -76,9 +74,19 @@ def read_meshlab(filepath): # cache=cache, nodeType=nodeType, uid0=uid0) cameras_sfm = nodeSFM['outputs']['outputViewsAndPoses'].format( cache=cache, nodeType=nodeType, uid0=uid0) - cloud = os.path.join(cache, nodeType, uid0, 'cloud_and_poses.ply') + # sparse = nodeSFM['outputs']['output'].format( + # cache=cache, nodeType=nodeType, uid0=uid0) + sparse = nodeSFM['outputs']['extraInfoFolder'].format( + cache=cache, nodeType=nodeType, uid0=uid0) + 'cloud_and_poses.ply' except KeyError: - cameras_sfm = cloud = None + cameras_sfm = sparse = None + try: + prepDense = data['graph']['PrepareDenseScene_1'] + nodeType = prepDense['nodeType'] + uid0 = prepDense['uids']['0'] + exr_folder = prepDense['outputs']['output'].format(cache=cache, nodeType=nodeType, uid0=uid0) + except KeyError: + exr_folder = None try: nodeMesh = data['graph']['Meshing_1'] dense_obj = nodeMesh['outputs']['output'].format( @@ -91,22 +99,26 @@ def read_meshlab(filepath): cache=cache, nodeType=nodeTex['nodeType'], uid0=nodeTex['uids']['0']) except KeyError: tex_obj = None - return (cameras_sfm, cloud, dense_obj, tex_obj) + return (cameras_sfm, sparse, dense_obj, tex_obj, exr_folder) -def import_cameras(cameras_sfm, img_depth): +def import_cameras(cameras_sfm, img_depth, undistorted, exr_folder): 'read camera sfm and imports to blender' data = json.load(open(cameras_sfm, 'r')) poses = {x['poseId']: x['pose'] for x in data['poses']} intrinsics = {x['intrinsicId']: x for x in data['intrinsics']} + #TODO dimensions per camera render = bpy.context.scene.render render.resolution_x = int(data['views'][0]['width']) render.resolution_y = int(data['views'][0]['height']) for view in data['views']: view_id = view['viewId'] - path = view['path'] + if undistorted: + path = os.path.join(exr_folder, f'{view_id}.exr') + else: + path = view['path'] width, height = int(view['width']), int(view['height']) focal_length = float(view['metadata']['Exif:FocalLength']) pose = poses[view['poseId']]['transform'] @@ -130,7 +142,12 @@ def import_cameras(cameras_sfm, img_depth): bg.display_depth = img_depth # camera object - ob = bpy.data.objects.new(f'View {view_id}', bcam) + if undistorted: + # TODO: if undistorted then apply some resize, or add separate images in right place. + name = f'View {view_id}' + else: + name = 'View {}'.format(os.path.splitext(os.path.basename(path))) + ob = bpy.data.objects.new(name, bcam) bpy.context.collection.objects.link(ob) loc = [float(x) for x in pose['center']] rot = [float(x) for x in pose['rotation']] @@ -140,45 +157,6 @@ def import_cameras(cameras_sfm, img_depth): ob.location = Vector(loc) -def import_sparse_depricated(cloud): - '''Depricated. Use view3d_point_cloud_visualizer instead.''' - # read .ply file - f = open(cloud, 'r') - ply = f.read() - header = ply[:1000].split('end_header\n')[0].split('\n') - header - assert header[0] == 'ply' - assert header[1].startswith('format ascii') - elements = [] - tmp_prop = [] - for x in header[2:]: - a = x.split(' ') - if a[0] == 'element': - if tmp_prop: - elements[-1]['props'] = list(tmp_prop) - tmp_prop = [] - el = {'name': a[1], 'nr': a[2]} - elements.append(el) - elif a[0] == 'property': - prop = {'name': a[2], 'type': a[1]} - tmp_prop.append(prop) - - elements[-1]['props'] = list(tmp_prop) - - points = ply.split('end_header\n')[1].split('\n') - if points[-1] == '': - points.pop() - - verts = [] - for point in points: - verts.append((float(x) for x in point.split()[:3])) - - mesh = bpy.data.meshes.new('sparse cloud SFM') - mesh.from_pydata(verts, [], []) - obj = bpy.data.objects.new('sparse cloud SFM', mesh) - bpy.context.collection.objects.link(obj) - - def import_object(filepath): bpy.ops.import_scene.obj(filepath=filepath) bpy.context.selected_objects[0].matrix_world = Matrix() @@ -197,21 +175,21 @@ class import_meshroom(bpy.types.Operator): directory: bpy.props.StringProperty( maxlen=1024, subtype='FILE_PATH', options={'HIDDEN', 'SKIP_SAVE'}) - cameras: bpy.props.BoolProperty(default=True, name='Views', description='Import views as cameras and images') + import_views: bpy.props.BoolProperty(default=True, name='Views', description='Import views as cameras and images') - undistorted: bpy.props.BoolProperty(default=True, name='Undistorted', description='Better, but heavy images') + undistorted: bpy.props.BoolProperty(default=False, name='Undistorted', description='Better, but heavy images') DEPTH = [ ('FRONT', 'FRONT', 'Preview semi transparent image in front of the objects', '', 0), ('BACK', 'BACK', 'Preview image behing objects', '', 1) ] - img_front: bpy.props.EnumProperty(items=DEPTH, name='Depth', description='', default='FRONT') + img_front: bpy.props.EnumProperty(items=DEPTH, name='Image View Depth', description='', default='FRONT') - sparse: bpy.props.BoolProperty(default=True, name='Import SFM', description='') + import_sparse: bpy.props.BoolProperty(default=True, name='Import StructureFromMotion', description='') - dense: bpy.props.BoolProperty(default=False, name='Import dense mesh', description='') + import_dense: bpy.props.BoolProperty(default=False, name='Import Meshing Node', description='') - textured: bpy.props.BoolProperty(default=True, name='Import textured mesh', description='') + import_textured: bpy.props.BoolProperty(default=True, name='Import Texturing Node', description='') def invoke(self, context, event): context.window_manager.fileselect_add(self) @@ -233,22 +211,34 @@ def execute(self, context): lay_col = find_view_layer(camera_col) context.view_layer.active_layer_collection = lay_col # filepath = PATH - cameras_sfm, cloud, dense_obj, tex_obj = read_meshlab(filepath) - if self.cameras: - import_cameras(cameras_sfm, self.img_front) + cameras_sfm, sparse, dense_obj, tex_obj, exr_folder = get_meshroom_paths(filepath) + if self.import_views: + import_cameras(cameras_sfm, self.img_front, self.undistorted, exr_folder) lay_col = find_view_layer(col) context.view_layer.active_layer_collection = lay_col - if self.sparse: - empty = bpy.data.objects.new('sparse cloud SFM', None) - col.objects.link(empty) - empty.select_set(True) - context.view_layer.objects.active = empty - bpy.ops.point_cloud_visualizer.load_ply_to_cache(filepath=cloud) - bpy.ops.point_cloud_visualizer.draw() - if self.dense and dense_obj: - import_object(dense_obj) - if self.textured and tex_obj: - import_object(tex_obj) + if self.import_sparse: + if os.path.exists(sparse): + empty = bpy.data.objects.new('sparse cloud SFM', None) + col.objects.link(empty) + empty.select_set(True) + context.view_layer.objects.active = empty + bpy.ops.point_cloud_visualizer.load_ply_to_cache(filepath=sparse) + bpy.ops.point_cloud_visualizer.draw() + elif os.path.exists(sparse.replace('.ply', '.abc')): + self.report({'ERROR_INVALID_INPUT'}, "You need to use .ply format instead of .abc to use colored pointcloud. "\ + "You can always import .abc through Blender alembic importer.") + else: + self.report({'ERROR_INVALID_INPUT'}, "Missing Meshroom reconstruction: StructureFromMotion (.ply format).") + if self.import_dense: + if dense_obj: + import_object(dense_obj) + else: + self.report({'ERROR_INVALID_INPUT'}, "Missing Meshroom reconstruction: Meshing.") + if self.import_textured and tex_obj: + if tex_obj: + import_object(tex_obj) + else: + self.report({'ERROR_INVALID_INPUT'}, "Missing Meshroom reconstruction: Texturing.") return {"FINISHED"} diff --git a/view3d_point_cloud_visualizer.py b/view3d_point_cloud_visualizer.py index 514d7c1..43fbe44 100644 --- a/view3d_point_cloud_visualizer.py +++ b/view3d_point_cloud_visualizer.py @@ -17,14 +17,14 @@ # ##### END GPL LICENSE BLOCK ##### bl_info = {"name": "Point Cloud Visualizer", - "description": "Display colored point cloud PLY files in 3D viewport.", + "description": "Display, render and convert to mesh colored point cloud PLY files.", "author": "Jakub Uhlik", - "version": (0, 7, 0), + "version": (0, 8, 12), "blender": (2, 80, 0), "location": "3D Viewport > Sidebar > Point Cloud Visualizer", "warning": "", - "wiki_url": "", - "tracker_url": "", + "wiki_url": "https://github.com/uhlik/bpy", + "tracker_url": "https://github.com/uhlik/bpy/issues", "category": "3D View", } @@ -37,18 +37,26 @@ import numpy as np import bpy -from bpy.props import PointerProperty, BoolProperty, StringProperty, FloatProperty, IntProperty, FloatVectorProperty -from bpy.types import PropertyGroup, Panel, Operator +from bpy.props import PointerProperty, BoolProperty, StringProperty, FloatProperty, IntProperty, FloatVectorProperty, EnumProperty +from bpy.types import PropertyGroup, Panel, Operator, AddonPreferences import gpu from gpu.types import GPUOffScreen, GPUShader, GPUBatch, GPUVertBuf, GPUVertFormat from gpu_extras.batch import batch_for_shader from bpy.app.handlers import persistent import bgl -from mathutils import Matrix, Vector +from mathutils import Matrix, Vector, Quaternion from bpy_extras.object_utils import world_to_camera_view from bpy_extras.io_utils import axis_conversion +# FIXME undo still doesn't work in some cases, from what i've seen, only when i am undoing operations on parent object, especially when you undo/redo e.g. transforms around load/draw operators, filepath property gets reset and the whole thing is drawn, but ui looks like loding never happened, i've added a quick fix storing path in cache, but it all depends on object name and this is bad. +# FIXME ply loading might not work with all ply files, for example, file spec seems does not forbid having two or more blocks of vertices with different props, currently i load only first block of vertices. maybe construct some messed up ply and test how for example meshlab behaves +# FIXME checking for normals/colors in points is kinda scattered all over +# TODO better docs, some gifs would be the best, i personally hate watching video tutorials when i need just sigle bit of information buried in 10+ minutes video, what a waste of time +# NOTE ~2k lines, maybe time to break into modules, but having sigle file is not a bad thing.. +# NOTE $ pycodestyle --ignore=W293,E501,E741,E402 --exclude='io_mesh_fast_obj/blender' . + + DEBUG = False @@ -58,14 +66,347 @@ def log(msg, indent=0, ): print(m) -def human_readable_number(num, suffix='', ): - # https://stackoverflow.com/questions/1094841/reusable-library-to-get-human-readable-version-of-file-size - f = 1000.0 - for unit in ['', 'K', 'M', 'G', 'T', 'P', 'E', 'Z', ]: - if(abs(num) < f): - return "{:3.1f}{}{}".format(num, unit, suffix) - num /= f - return "{:.1f}{}{}".format(num, 'Y', suffix) +class InstanceMeshGenerator(): + def __init__(self): + self.def_verts, self.def_edges, self.def_faces = self.generate() + + def generate(self): + return [(0, 0, 0, ), ], [], [] + + +class VertexMeshGenerator(InstanceMeshGenerator): + def __init__(self): + log("{}:".format(self.__class__.__name__), 0, ) + super(VertexMeshGenerator, self).__init__() + + +class TetrahedronMeshGenerator(InstanceMeshGenerator): + def __init__(self, length=1.0, ): + log("{}:".format(self.__class__.__name__), 0, ) + if(length <= 0): + log("length is (or less than) 0, which is ridiculous. setting to 0.001..", 1) + length = 0.001 + self.length = length + super(TetrahedronMeshGenerator, self).__init__() + + def generate(self): + def circle2d_coords(radius, steps, offset, ox, oy): + r = [] + angstep = 2 * math.pi / steps + for i in range(steps): + x = math.sin(i * angstep + offset) * radius + ox + y = math.cos(i * angstep + offset) * radius + oy + r.append((x, y)) + return r + + l = self.length + excircle_radius = math.sqrt(3) / 3 * l + c = circle2d_coords(excircle_radius, 3, 0, 0, 0) + h = l / 3 * math.sqrt(6) + dv = [(c[0][0], c[0][1], 0, ), + (c[1][0], c[1][1], 0, ), + (c[2][0], c[2][1], 0, ), + (0, 0, h, ), ] + df = ([(0, 1, 2), + (3, 2, 1), + (3, 1, 0), + (3, 0, 2), ]) + return dv, [], df + + +class EquilateralTriangleMeshGenerator(InstanceMeshGenerator): + def __init__(self, length=1.0, offset=0.0, ): + log("{}:".format(self.__class__.__name__), 0, ) + if(length <= 0): + log("got ridiculous length value (smaller or equal to 0).. setting to 0.001", 1) + length = 0.001 + self.length = length + self.offset = offset + super(EquilateralTriangleMeshGenerator, self).__init__() + + def generate(self): + def circle2d_coords(radius, steps, offset, ox, oy): + r = [] + angstep = 2 * math.pi / steps + for i in range(steps): + x = math.sin(i * angstep + offset) * radius + ox + y = math.cos(i * angstep + offset) * radius + oy + r.append((x, y)) + return r + + r = math.sqrt(3) / 3 * self.length + c = circle2d_coords(r, 3, self.offset, 0, 0) + dv = [] + for i in c: + dv.append((i[0], i[1], 0, )) + df = [(0, 2, 1, ), ] + return dv, [], df + + +class IcoSphereMeshGenerator(InstanceMeshGenerator): + def __init__(self, radius=1, subdivision=2, ): + log("{}:".format(self.__class__.__name__), 0, ) + if(radius <= 0): + log("radius is (or less than) 0, which is ridiculous. setting to 0.001..", 1) + radius = 0.001 + self.radius = radius + subdivision = int(subdivision) + if(not (0 < subdivision <= 2)): + log("subdivision 1 or 2 allowed, not {}, setting to 1".format(subdivision), 1) + subdivision = 1 + self.subdivision = subdivision + super(IcoSphereMeshGenerator, self).__init__() + + def generate(self): + if(self.subdivision == 1): + dv = [(0.0, 0.0, -0.5), (0.3617999851703644, -0.2628600001335144, -0.22360749542713165), (-0.13819250464439392, -0.42531999945640564, -0.22360749542713165), (-0.44721248745918274, 0.0, -0.22360749542713165), (-0.13819250464439392, 0.42531999945640564, -0.22360749542713165), (0.3617999851703644, 0.2628600001335144, -0.22360749542713165), (0.13819250464439392, -0.42531999945640564, 0.22360749542713165), (-0.3617999851703644, -0.2628600001335144, 0.22360749542713165), (-0.3617999851703644, 0.2628600001335144, 0.22360749542713165), (0.13819250464439392, 0.42531999945640564, 0.22360749542713165), (0.44721248745918274, 0.0, 0.22360749542713165), (0.0, 0.0, 0.5)] + df = [(0, 1, 2), (1, 0, 5), (0, 2, 3), (0, 3, 4), (0, 4, 5), (1, 5, 10), (2, 1, 6), (3, 2, 7), (4, 3, 8), (5, 4, 9), (1, 10, 6), (2, 6, 7), (3, 7, 8), (4, 8, 9), (5, 9, 10), (6, 10, 11), (7, 6, 11), (8, 7, 11), (9, 8, 11), (10, 9, 11)] + elif(self.subdivision == 2): + dv = [(0.0, 0.0, -0.5), (0.36180365085601807, -0.2628626525402069, -0.22360976040363312), (-0.1381940096616745, -0.42532461881637573, -0.22360992431640625), (-0.4472131133079529, 0.0, -0.22360780835151672), (-0.1381940096616745, 0.42532461881637573, -0.22360992431640625), (0.36180365085601807, 0.2628626525402069, -0.22360976040363312), (0.1381940096616745, -0.42532461881637573, 0.22360992431640625), (-0.36180365085601807, -0.2628626525402069, 0.22360976040363312), (-0.36180365085601807, 0.2628626525402069, 0.22360976040363312), (0.1381940096616745, 0.42532461881637573, 0.22360992431640625), (0.4472131133079529, 0.0, 0.22360780835151672), (0.0, 0.0, 0.5), (-0.08122777938842773, -0.24999763071537018, -0.42532721161842346), (0.21266134083271027, -0.15450569987297058, -0.4253270924091339), (0.13143441081047058, -0.40450581908226013, -0.26286882162094116), (0.4253239333629608, 0.0, -0.2628679573535919), (0.21266134083271027, 0.15450569987297058, -0.4253270924091339), (-0.262864887714386, 0.0, -0.42532584071159363), (-0.3440946936607361, -0.24999846518039703, -0.26286810636520386), (-0.08122777938842773, 0.24999763071537018, -0.42532721161842346), (-0.3440946936607361, 0.24999846518039703, -0.26286810636520386), (0.13143441081047058, 0.40450581908226013, -0.26286882162094116), (0.47552892565727234, -0.15450631082057953, 0.0), (0.47552892565727234, 0.15450631082057953, 0.0), (0.0, -0.4999999701976776, 0.0), (0.2938928008079529, -0.4045083522796631, 0.0), (-0.47552892565727234, -0.15450631082057953, 0.0), (-0.2938928008079529, -0.4045083522796631, 0.0), (-0.2938928008079529, 0.4045083522796631, 0.0), (-0.47552892565727234, 0.15450631082057953, 0.0), (0.2938928008079529, 0.4045083522796631, 0.0), (0.0, 0.4999999701976776, 0.0), (0.3440946936607361, -0.24999846518039703, 0.26286810636520386), (-0.13143441081047058, -0.40450581908226013, 0.26286882162094116), (-0.4253239333629608, 0.0, 0.2628679573535919), (-0.13143441081047058, 0.40450581908226013, 0.26286882162094116), (0.3440946936607361, 0.24999846518039703, 0.26286810636520386), (0.08122777938842773, -0.24999763071537018, 0.4253271818161011), (0.262864887714386, 0.0, 0.42532584071159363), (-0.21266134083271027, -0.15450569987297058, 0.4253270924091339), (-0.21266134083271027, 0.15450569987297058, 0.4253270924091339), (0.08122777938842773, 0.24999763071537018, 0.4253271818161011)] + df = [(0, 13, 12), (1, 13, 15), (0, 12, 17), (0, 17, 19), (0, 19, 16), (1, 15, 22), (2, 14, 24), (3, 18, 26), (4, 20, 28), (5, 21, 30), (1, 22, 25), (2, 24, 27), (3, 26, 29), (4, 28, 31), (5, 30, 23), (6, 32, 37), (7, 33, 39), (8, 34, 40), (9, 35, 41), (10, 36, 38), (38, 41, 11), (38, 36, 41), (36, 9, 41), (41, 40, 11), (41, 35, 40), (35, 8, 40), (40, 39, 11), (40, 34, 39), (34, 7, 39), (39, 37, 11), (39, 33, 37), (33, 6, 37), (37, 38, 11), (37, 32, 38), (32, 10, 38), (23, 36, 10), (23, 30, 36), (30, 9, 36), (31, 35, 9), (31, 28, 35), (28, 8, 35), (29, 34, 8), (29, 26, 34), (26, 7, 34), (27, 33, 7), (27, 24, 33), (24, 6, 33), (25, 32, 6), (25, 22, 32), (22, 10, 32), (30, 31, 9), (30, 21, 31), (21, 4, 31), (28, 29, 8), (28, 20, 29), (20, 3, 29), (26, 27, 7), (26, 18, 27), (18, 2, 27), (24, 25, 6), (24, 14, 25), (14, 1, 25), (22, 23, 10), (22, 15, 23), (15, 5, 23), (16, 21, 5), (16, 19, 21), (19, 4, 21), (19, 20, 4), (19, 17, 20), (17, 3, 20), (17, 18, 3), (17, 12, 18), (12, 2, 18), (15, 16, 5), (15, 13, 16), (13, 0, 16), (12, 14, 2), (12, 13, 14), (13, 1, 14)] + else: + raise ValueError("IcoSphereMeshGenerator: unsupported subdivision: {}".format(self.subdivision)) + return dv, [], df + + +class CubeMeshGenerator(InstanceMeshGenerator): + def __init__(self, length=1.0, ): + log("{}:".format(self.__class__.__name__), 0, ) + if(length <= 0): + log("less is (or less than) 0, which is ridiculous. setting to 0.001..", 1) + radius = 0.001 + self.length = length + super(CubeMeshGenerator, self).__init__() + + def generate(self): + l = self.length / 2 + dv = [(+l, +l, -l), + (+l, -l, -l), + (-l, -l, -l), + (-l, +l, -l), + (+l, +l, +l), + (+l, -l, +l), + (-l, -l, +l), + (-l, +l, +l), ] + df = [(0, 1, 2, 3), + (4, 7, 6, 5), + (0, 4, 5, 1), + (1, 5, 6, 2), + (2, 6, 7, 3), + (4, 0, 3, 7), ] + return dv, [], df + + +class PCMeshInstancer(): + def __init__(self, name, points, generator=None, matrix=None, size=0.01, normal_align=False, vcols=False, ): + log("{}:".format(self.__class__.__name__), 0, ) + + self.name = name + self.points = points + if(generator is None): + generator = InstanceMeshGenerator() + self.generator = generator + if(matrix is None): + matrix = Matrix() + self.matrix = matrix + self.size = size + self.normal_align = normal_align + self.vcols = vcols + + self.uuid = uuid.uuid1() + + log("calculating matrices..", 1) + self.calc_matrices() + + log("calculating mesh..", 1) + self.calc_mesh_data() + + log("creating mesh..", 1) + self.mesh = bpy.data.meshes.new(self.name) + self.mesh.from_pydata(self.verts, self.edges, self.faces) + self.object = self.add_object(self.name, self.mesh) + self.object.matrix_world = self.matrix + self.activate_object(self.object) + + if(self.vcols): + log("making vertex colors..", 1) + self.make_vcols() + + log("cleanup..", 1) + + context = bpy.context + view_layer = context.view_layer + collection = view_layer.active_layer_collection.collection + collection.objects.unlink(self.def_object) + bpy.data.objects.remove(self.def_object) + bpy.data.meshes.remove(self.def_mesh) + + log("done.", 1) + + def add_object(self, name, data, ): + so = bpy.context.scene.objects + for i in so: + i.select_set(False) + o = bpy.data.objects.new(name, data) + context = bpy.context + view_layer = context.view_layer + collection = view_layer.active_layer_collection.collection + collection.objects.link(o) + o.select_set(True) + view_layer.objects.active = o + return o + + def activate_object(self, obj, ): + bpy.ops.object.select_all(action='DESELECT') + context = bpy.context + view_layer = context.view_layer + obj.select_set(True) + view_layer.objects.active = obj + + def calc_matrices(self): + def split(p): + co = (p[0], p[1], p[2]) + no = (p[3], p[4], p[5]) + rgb = (p[6], p[7], p[8]) + return co, no, rgb + + def rotation_to(a, b): + # http://stackoverflow.com/questions/1171849/finding-quaternion-representing-the-rotation-from-one-vector-to-another + # https://github.com/toji/gl-matrix/blob/f0583ef53e94bc7e78b78c8a24f09ed5e2f7a20c/src/gl-matrix/quat.js#L54 + + a = a.normalized() + b = b.normalized() + q = Quaternion() + + tmpvec3 = Vector() + xUnitVec3 = Vector((1, 0, 0)) + yUnitVec3 = Vector((0, 1, 0)) + + dot = a.dot(b) + if(dot < -0.999999): + tmpvec3 = xUnitVec3.cross(a) + if(tmpvec3.length < 0.000001): + tmpvec3 = yUnitVec3.cross(a) + tmpvec3.normalize() + q = Quaternion(tmpvec3, math.pi) + elif(dot > 0.999999): + q.x = 0 + q.y = 0 + q.z = 0 + q.w = 1 + else: + tmpvec3 = a.cross(b) + q.x = tmpvec3[0] + q.y = tmpvec3[1] + q.z = tmpvec3[2] + q.w = 1 + dot + q.normalize() + return q + + _, _, osv = self.matrix.decompose() + osm = Matrix(((osv.x, 0.0, 0.0, 0.0), (0.0, osv.y, 0.0, 0.0), (0.0, 0.0, osv.z, 0.0), (0.0, 0.0, 0.0, 1.0))).inverted() + + # calculate instance matrices from points.. + self.matrices = [] + for i, p in enumerate(self.points): + co, no, rgb = split(p) + # location + ml = Matrix.Translation(co).to_4x4() + if(self.normal_align): + # rotation from normal + quat = rotation_to(Vector((0, 0, 1)), Vector(no)) + mr = quat.to_matrix().to_4x4() + else: + mr = Matrix.Rotation(0.0, 4, 'Z') + # scale + s = self.size + ms = Matrix(((s, 0.0, 0.0, 0.0), (0.0, s, 0.0, 0.0), (0.0, 0.0, s, 0.0), (0.0, 0.0, 0.0, 1.0))) + # combine + m = ml @ mr @ ms @ osm + self.matrices.append(m) + + def calc_mesh_data(self): + # initialize lists + l = len(self.matrices) + self.verts = [(0, 0, 0)] * (l * len(self.generator.def_verts)) + self.edges = [(0, 0)] * (l * len(self.generator.def_edges)) + self.faces = [(0)] * (l * len(self.generator.def_faces)) + self.colors = [None] * l + + # generator data + v, e, f = self.generator.generate() + self.def_verts = v + self.def_edges = e + self.def_faces = f + + # def object + self.def_mesh = bpy.data.meshes.new("PCInstancer-def_mesh-{}".format(self.uuid)) + self.def_mesh.from_pydata(v, e, f) + self.def_object = self.add_object("PCInstancer-def_object-{}".format(self.uuid), self.def_mesh) + + # loop over matrices + for i, m in enumerate(self.matrices): + # transform mesh + self.def_mesh.transform(m) + # store + self.write_pydata_chunk(i) + # reset mesh + for j, v in enumerate(self.def_object.data.vertices): + v.co = Vector(self.def_verts[j]) + + def write_pydata_chunk(self, i, ): + # exponents + ev = len(self.generator.def_verts) + ee = len(self.generator.def_edges) + ef = len(self.generator.def_faces) + # vertices + for j in range(ev): + self.verts[(i * ev) + j] = self.def_mesh.vertices[j].co.to_tuple() + # edges + if(len(self.def_edges) is not 0): + for j in range(ee): + self.edges[(i * ee) + j] = ((i * ev) + self.def_edges[j][0], + (i * ev) + self.def_edges[j][1], ) + # faces + if(len(self.def_faces) is not 0): + for j in range(ef): + # tris + if(len(self.def_faces[j]) == 3): + self.faces[(i * ef) + j] = ((i * ev) + self.def_faces[j][0], + (i * ev) + self.def_faces[j][1], + (i * ev) + self.def_faces[j][2], ) + # quads + elif(len(self.def_faces[j]) == 4): + self.faces[(i * ef) + j] = ((i * ev) + self.def_faces[j][0], + (i * ev) + self.def_faces[j][1], + (i * ev) + self.def_faces[j][2], + (i * ev) + self.def_faces[j][3], ) + # ngons + else: + ngon = [] + for a in range(len(self.def_faces[j])): + ngon.append((i * ev) + self.def_faces[j][a]) + self.faces[(i * ef) + j] = tuple(ngon) + + def make_vcols(self): + if(len(self.mesh.loops) != 0): + colors = [] + for i, v in enumerate(self.points): + rgb = (v[6], v[7], v[8]) + col = (rgb[0] / 255.0, rgb[1] / 255.0, rgb[2] / 255.0) + # colors.append(Color(col)) + colors.append(col) + + num = len(self.def_verts) + vc = self.mesh.vertex_colors.new() + for l in self.mesh.loops: + vi = l.vertex_index + li = l.index + c = colors[int(vi / num)] + # vc.data[li].color = (c.r, c.g, c.b, 1.0, ) + vc.data[li].color = c + (1.0, ) + else: + log("no mesh loops in mesh", 2, ) class BinPlyPointCloudReader(): @@ -200,6 +541,23 @@ def __init__(self, path, ): else: self._data_binary() log("loaded {} vertices".format(len(self.points)), 1) + # remove alpha if present + self.points = self.points[[b for b in list(self.points.dtype.names) if b != 'alpha']] + # some info + nms = self.points.dtype.names + self.has_vertices = True + self.has_normals = True + self.has_colors = True + if(not set(('x', 'y', 'z')).issubset(nms)): + self.has_vertices = False + if(not set(('nx', 'ny', 'nz')).issubset(nms)): + self.has_normals = False + if(not set(('red', 'green', 'blue')).issubset(nms)): + self.has_colors = False + log('has_vertices: {}'.format(self.has_vertices), 2) + log('has_normals: {}'.format(self.has_normals), 2) + log('has_colors: {}'.format(self.has_colors), 2) + log("done.", 1) def _header(self): @@ -266,9 +624,6 @@ def _header(self): current_element['props'].append((n, self._types[c], self._types[t], )) else: _, t, n = l.split(' ') - if(n == 'alpha'): - # skip alpha, maybe use it in future versions, but now it is useless - continue if(self._ply_format == 'ascii'): current_element['props'].append((n, self._types[t])) else: @@ -421,173 +776,56 @@ class PCVShaders(): fragColor = col; } ''' - - -def load_ply_to_cache(operator, context, ): - pcv = context.object.point_cloud_visualizer - filepath = pcv.filepath - - __t = time.time() - - log('load data..') - _t = time.time() - - points = [] - try: - # points = BinPlyPointCloudReader(filepath).points - points = PlyPointCloudReader(filepath).points - except Exception as e: - if(operator is not None): - operator.report({'ERROR'}, str(e)) - else: - raise e - if(len(points) == 0): - operator.report({'ERROR'}, "No vertices loaded from file at {}".format(filepath)) - return False - - _d = datetime.timedelta(seconds=time.time() - _t) - log("completed in {}.".format(_d)) - - log('shuffle data..') - _t = time.time() - - np.random.shuffle(points) - - _d = datetime.timedelta(seconds=time.time() - _t) - log("completed in {}.".format(_d)) - - log('process data..') - _t = time.time() - - if(not set(('x', 'y', 'z')).issubset(points.dtype.names)): - # this is very unlikely.. - operator.report({'ERROR'}, "Loaded data seems to miss vertex locations.") - return False - normals = True - if(not set(('nx', 'ny', 'nz')).issubset(points.dtype.names)): - normals = False - pcv.has_normals = normals - if(not pcv.has_normals): - pcv.light_enabled = False - vcols = True - if(not set(('red', 'green', 'blue')).issubset(points.dtype.names)): - vcols = False - pcv.has_vcols = vcols - - vs = np.column_stack((points['x'], points['y'], points['z'], )) - - if(normals): - ns = np.column_stack((points['nx'], points['ny'], points['nz'], )) - else: - n = len(points) - ns = np.column_stack((np.full(n, 0.0, dtype=np.float32, ), - np.full(n, 0.0, dtype=np.float32, ), - np.full(n, 1.0, dtype=np.float32, ), )) - - if(vcols): - cs = np.column_stack((points['red'] / 255, points['green'] / 255, points['blue'] / 255, np.ones(len(points), dtype=float, ), )) - cs = cs.astype(np.float32) - else: - n = len(points) - default_color = 0.65 - cs = np.column_stack((np.full(n, default_color, dtype=np.float32, ), - np.full(n, default_color, dtype=np.float32, ), - np.full(n, default_color, dtype=np.float32, ), - np.ones(n, dtype=np.float32, ), )) - - u = str(uuid.uuid1()) - o = context.object - - pcv.uuid = u - - d = PCVManager.new() - d['uuid'] = u - d['stats'] = len(vs) - d['vertices'] = vs - d['colors'] = cs - d['normals'] = ns - - d['length'] = len(vs) - dp = pcv.display_percent - l = int((len(vs) / 100) * dp) - if(dp >= 99): - l = len(vs) - d['display_percent'] = l - d['current_display_percent'] = l - shader = GPUShader(PCVShaders.vertex_shader, PCVShaders.fragment_shader) - batch = batch_for_shader(shader, 'POINTS', {"position": vs[:l], "color": cs[:l], "normal": ns[:l], }) - - d['shader'] = shader - d['batch'] = batch - d['ready'] = True - d['object'] = o - d['name'] = o.name - - PCVManager.add(d) - - _d = datetime.timedelta(seconds=time.time() - _t) - log("completed in {}.".format(_d)) - - log("-" * 50) - __d = datetime.timedelta(seconds=time.time() - __t) - log("load and process completed in {}.".format(__d)) - log("-" * 50) - - return True - - -def save_render(operator, scene, image, render_suffix, render_zeros, ): - f = False - n = render_suffix - rs = bpy.context.scene.render - op = rs.filepath - if(len(op) > 0): - if(not op.endswith(os.path.sep)): - f = True - op, n = os.path.split(op) - else: - log("error: output path is not set".format(e)) - operator.report({'ERROR'}, "Output path is not set.") - return - - if(f): - n = "{}_{}".format(n, render_suffix) - - fnm = "{}_{:0{z}d}.png".format(n, scene.frame_current, z=render_zeros) - p = os.path.join(os.path.realpath(bpy.path.abspath(op)), fnm) - - s = rs.image_settings - ff = s.file_format - cm = s.color_mode - cd = s.color_depth - - vs = scene.view_settings - vsvt = vs.view_transform - vsl = vs.look - vs.view_transform = 'Default' - vs.look = 'None' - - s.file_format = 'PNG' - s.color_mode = 'RGBA' - s.color_depth = '8' - - try: - image.save_render(p) - log("image '{}' saved".format(p)) - except Exception as e: - s.file_format = ff - s.color_mode = cm - s.color_depth = cd - - log("error: {}".format(e)) - operator.report({'ERROR'}, "Unable to save render image, see console for details.") - return - - s.file_format = ff - s.color_mode = cm - s.color_depth = cd - vs.view_transform = vsvt - vs.look = vsl + vertex_shader_simple = ''' + in vec3 position; + in vec4 color; + uniform mat4 perspective_matrix; + uniform mat4 object_matrix; + uniform float point_size; + uniform float alpha_radius; + out vec4 f_color; + out float f_alpha_radius; + void main() + { + gl_Position = perspective_matrix * object_matrix * vec4(position, 1.0f); + gl_PointSize = point_size; + f_color = color; + f_alpha_radius = alpha_radius; + } + ''' + fragment_shader_simple = ''' + in vec4 f_color; + in float f_alpha_radius; + out vec4 fragColor; + void main() + { + float r = 0.0f; + float a = 1.0f; + vec2 cxy = 2.0f * gl_PointCoord - 1.0f; + r = dot(cxy, cxy); + if(r > f_alpha_radius){ + discard; + } + fragColor = f_color * a; + } + ''' + vertex_shader_normals = ''' + uniform mat4 perspective_matrix; + uniform mat4 object_matrix; + in vec3 position; + void main() + { + gl_Position = perspective_matrix * object_matrix * vec4(position, 1.0f); + } + ''' + fragment_shader_normals = ''' + uniform vec4 color; + out vec4 fragColor; + void main() + { + fragColor = color; + } + ''' class PCVManager(): @@ -595,6 +833,136 @@ class PCVManager(): handle = None initialized = False + @classmethod + def load_ply_to_cache(cls, operator, context, ): + pcv = context.object.point_cloud_visualizer + filepath = pcv.filepath + + __t = time.time() + + log('load data..') + _t = time.time() + + points = [] + try: + # points = BinPlyPointCloudReader(filepath).points + points = PlyPointCloudReader(filepath).points + except Exception as e: + if(operator is not None): + operator.report({'ERROR'}, str(e)) + else: + raise e + if(len(points) == 0): + operator.report({'ERROR'}, "No vertices loaded from file at {}".format(filepath)) + return False + + _d = datetime.timedelta(seconds=time.time() - _t) + log("completed in {}.".format(_d)) + + log('shuffle data..') + _t = time.time() + + np.random.shuffle(points) + + _d = datetime.timedelta(seconds=time.time() - _t) + log("completed in {}.".format(_d)) + + log('process data..') + _t = time.time() + + if(not set(('x', 'y', 'z')).issubset(points.dtype.names)): + # this is very unlikely.. + operator.report({'ERROR'}, "Loaded data seems to miss vertex locations.") + return False + normals = True + if(not set(('nx', 'ny', 'nz')).issubset(points.dtype.names)): + normals = False + pcv.has_normals = normals + if(not pcv.has_normals): + pcv.illumination = False + vcols = True + if(not set(('red', 'green', 'blue')).issubset(points.dtype.names)): + vcols = False + pcv.has_vcols = vcols + + vs = np.column_stack((points['x'], points['y'], points['z'], )) + + if(normals): + ns = np.column_stack((points['nx'], points['ny'], points['nz'], )) + else: + n = len(points) + ns = np.column_stack((np.full(n, 0.0, dtype=np.float32, ), + np.full(n, 0.0, dtype=np.float32, ), + np.full(n, 1.0, dtype=np.float32, ), )) + + if(vcols): + cs = np.column_stack((points['red'] / 255, points['green'] / 255, points['blue'] / 255, np.ones(len(points), dtype=float, ), )) + cs = cs.astype(np.float32) + else: + n = len(points) + # default_color = 0.65 + # cs = np.column_stack((np.full(n, default_color, dtype=np.float32, ), + # np.full(n, default_color, dtype=np.float32, ), + # np.full(n, default_color, dtype=np.float32, ), + # np.ones(n, dtype=np.float32, ), )) + + preferences = bpy.context.preferences + addon_prefs = preferences.addons[__name__].preferences + col = addon_prefs.default_vertex_color[:] + col = tuple([c ** (1 / 2.2) for c in col]) + (1.0, ) + cs = np.column_stack((np.full(n, col[0], dtype=np.float32, ), + np.full(n, col[1], dtype=np.float32, ), + np.full(n, col[2], dtype=np.float32, ), + np.ones(n, dtype=np.float32, ), )) + + u = str(uuid.uuid1()) + o = context.object + + pcv.uuid = u + + d = PCVManager.new() + d['filepath'] = filepath + d['uuid'] = u + d['stats'] = len(vs) + d['vertices'] = vs + d['colors'] = cs + d['normals'] = ns + + d['length'] = len(vs) + dp = pcv.display_percent + l = int((len(vs) / 100) * dp) + if(dp >= 99): + l = len(vs) + d['display_percent'] = l + d['current_display_percent'] = l + + ienabled = pcv.illumination + d['illumination'] = ienabled + if(ienabled): + shader = GPUShader(PCVShaders.vertex_shader, PCVShaders.fragment_shader) + batch = batch_for_shader(shader, 'POINTS', {"position": vs[:l], "color": cs[:l], "normal": ns[:l], }) + else: + shader = GPUShader(PCVShaders.vertex_shader_simple, PCVShaders.fragment_shader_simple) + batch = batch_for_shader(shader, 'POINTS', {"position": vs[:l], "color": cs[:l], }) + + d['shader'] = shader + d['batch'] = batch + d['ready'] = True + d['object'] = o + d['name'] = o.name + + PCVManager.add(d) + + _d = datetime.timedelta(seconds=time.time() - _t) + log("completed in {}.".format(_d)) + + log("-" * 50) + __d = datetime.timedelta(seconds=time.time() - __t) + log("load and process completed in {}.".format(__d)) + log("-" * 50) + + return True + @classmethod def render(cls, uuid, ): bgl.glEnable(bgl.GL_PROGRAM_POINT_SIZE) @@ -610,7 +978,10 @@ def render(cls, uuid, ): vs = ci['vertices'] cs = ci['colors'] ns = ci['normals'] - batch = batch_for_shader(shader, 'POINTS', {"position": vs[:l], "color": cs[:l], "normal": ns[:l], }) + if(ci['illumination']): + batch = batch_for_shader(shader, 'POINTS', {"position": vs[:l], "color": cs[:l], "normal": ns[:l], }) + else: + batch = batch_for_shader(shader, 'POINTS', {"position": vs[:l], "color": cs[:l], }) ci['batch'] = batch o = ci['object'] @@ -624,6 +995,26 @@ def render(cls, uuid, ): # update stored reference ci['object'] = o pcv = o.point_cloud_visualizer + # push back correct uuid, since undo changed it, why? WHY? why do i even bother? + pcv.uuid = uuid + # push back filepath, it might get lost during undo/redo + pcv.filepath = ci['filepath'] + + if(ci['illumination'] != pcv.illumination): + vs = ci['vertices'] + cs = ci['colors'] + ns = ci['normals'] + l = ci['current_display_percent'] + if(pcv.illumination): + shader = GPUShader(PCVShaders.vertex_shader, PCVShaders.fragment_shader) + batch = batch_for_shader(shader, 'POINTS', {"position": vs[:l], "color": cs[:l], "normal": ns[:l], }) + ci['illumination'] = True + else: + shader = GPUShader(PCVShaders.vertex_shader_simple, PCVShaders.fragment_shader_simple) + batch = batch_for_shader(shader, 'POINTS', {"position": vs[:l], "color": cs[:l], }) + ci['illumination'] = False + ci['shader'] = shader + ci['batch'] = batch shader.bind() pm = bpy.context.region_data.perspective_matrix @@ -632,7 +1023,7 @@ def render(cls, uuid, ): shader.uniform_float("point_size", pcv.point_size) shader.uniform_float("alpha_radius", pcv.alpha_radius) - if(pcv.light_enabled and pcv.has_normals): + if(pcv.illumination and pcv.has_normals and ci['illumination']): cm = Matrix(((-1.0, 0.0, 0.0, 0.0, ), (0.0, -0.0, 1.0, 0.0, ), (0.0, -1.0, -0.0, 0.0, ), (0.0, 0.0, 0.0, 1.0, ), )) _, obrot, _ = o.matrix_world.decompose() mr = obrot.to_matrix().to_4x4() @@ -663,17 +1054,78 @@ def render(cls, uuid, ): c = pcv.shadow_intensity shader.uniform_float("shadow_intensity", (c, c, c, )) shader.uniform_float("show_normals", float(pcv.show_normals)) - shader.uniform_float("show_illumination", float(pcv.light_enabled)) + shader.uniform_float("show_illumination", float(pcv.illumination)) else: - z = (0, 0, 0) - shader.uniform_float("light_direction", z) - shader.uniform_float("light_intensity", z) - shader.uniform_float("shadow_direction", z) - shader.uniform_float("shadow_intensity", z) - shader.uniform_float("show_normals", float(False)) - shader.uniform_float("show_illumination", float(False)) + # z = (0, 0, 0) + # shader.uniform_float("light_direction", z) + # shader.uniform_float("light_intensity", z) + # shader.uniform_float("shadow_direction", z) + # shader.uniform_float("shadow_intensity", z) + # shader.uniform_float("show_normals", float(False)) + # shader.uniform_float("show_illumination", float(False)) + pass batch.draw(shader) + + if(pcv.vertex_normals and pcv.has_normals): + + def make_arrays(vs, ns, s, ): + l = len(vs) + coords = [None] * (l * 2) + indices = [None] * l + for i, v in enumerate(vs): + n = Vector(ns[i]) + v = Vector(v) + coords[i * 2 + 0] = v + coords[i * 2 + 1] = v + (n.normalized() * s) + indices[i] = (i * 2 + 0, i * 2 + 1, ) + return coords, indices + + def make(ci): + s = pcv.vertex_normals_size + l = ci['current_display_percent'] + vs = ci['vertices'][:l] + ns = ci['normals'][:l] + coords, indices = make_arrays(vs, ns, s, ) + # shader = gpu.shader.from_builtin('3D_UNIFORM_COLOR') + shader = GPUShader(PCVShaders.vertex_shader_normals, PCVShaders.fragment_shader_normals) + batch = batch_for_shader(shader, 'LINES', {'position': coords}, indices=indices, ) + d = {'shader': shader, + 'batch': batch, + 'coords': coords, + 'indices': indices, + 'current_display_percent': l, + 'size': s, + 'current_size': s, } + ci['vertex_normals'] = d + return shader, batch + + if("vertex_normals" not in ci.keys()): + shader, batch = make(ci) + else: + d = ci['vertex_normals'] + shader = d['shader'] + batch = d['batch'] + ok = True + if(ci['current_display_percent'] != d['current_display_percent']): + ok = False + if(d['current_size'] != pcv.vertex_normals_size): + ok = False + if(not ok): + shader, batch = make(ci) + + shader.bind() + pm = bpy.context.region_data.perspective_matrix + shader.uniform_float("perspective_matrix", pm) + shader.uniform_float("object_matrix", o.matrix_world) + + preferences = bpy.context.preferences + addon_prefs = preferences.addons[__name__].preferences + col = addon_prefs.normal_color[:] + col = tuple([c ** (1 / 2.2) for c in col]) + (1.0, ) + # shader.uniform_float("color", (35 / 255, 97 / 255, 221 / 255, 1, ), ) + shader.uniform_float("color", col, ) + batch.draw(shader) @classmethod def handler(cls): @@ -730,6 +1182,7 @@ def new(cls): 'colors': None, 'display_percent': None, 'current_display_percent': None, + 'illumination': False, 'shader': False, 'batch': False, 'ready': False, @@ -796,7 +1249,7 @@ def execute(self, context): if(pcv.uuid not in PCVManager.cache): pcv.uuid = "" - ok = load_ply_to_cache(self, context) + ok = PCVManager.load_ply_to_cache(self, context) if(not ok): return {'CANCELLED'} @@ -866,7 +1319,7 @@ def execute(self, context): PCVManager.cache[pcv.uuid]['kill'] = True PCVManager.gc() - ok = load_ply_to_cache(self, context) + ok = PCVManager.load_ply_to_cache(self, context) if(not ok): return {'CANCELLED'} @@ -947,12 +1400,17 @@ def execute(self, context): cs = [b for _, a, b, c in sps][::-1] ns = [c for _, a, b, c in sps][::-1] - shader = GPUShader(PCVShaders.vertex_shader, PCVShaders.fragment_shader) - batch = batch_for_shader(shader, 'POINTS', {"position": vs, "color": cs, "normal": ns, }) + if(pcv.illumination): + shader = GPUShader(PCVShaders.vertex_shader, PCVShaders.fragment_shader) + batch = batch_for_shader(shader, 'POINTS', {"position": vs, "color": cs, "normal": ns, }) + else: + shader = GPUShader(PCVShaders.vertex_shader_simple, PCVShaders.fragment_shader_simple) + batch = batch_for_shader(shader, 'POINTS', {"position": vs, "color": cs, }) shader.bind() view_matrix = cam.matrix_world.inverted() - camera_matrix = cam.calc_matrix_camera(bpy.context.depsgraph, x=render.resolution_x, y=render.resolution_y, scale_x=render.pixel_aspect_x, scale_y=render.pixel_aspect_y, ) + depsgraph = bpy.context.evaluated_depsgraph_get() + camera_matrix = cam.calc_matrix_camera(depsgraph, x=render.resolution_x, y=render.resolution_y, scale_x=render.pixel_aspect_x, scale_y=render.pixel_aspect_y, ) perspective_matrix = camera_matrix @ view_matrix shader.uniform_float("perspective_matrix", perspective_matrix) @@ -960,7 +1418,7 @@ def execute(self, context): shader.uniform_float("point_size", pcv.render_point_size) shader.uniform_float("alpha_radius", pcv.alpha_radius) - if(pcv.light_enabled and pcv.has_normals): + if(pcv.illumination and pcv.has_normals and cloud['illumination']): cm = Matrix(((-1.0, 0.0, 0.0, 0.0, ), (0.0, -0.0, 1.0, 0.0, ), (0.0, -1.0, -0.0, 0.0, ), (0.0, 0.0, 0.0, 1.0, ), )) _, obrot, _ = o.matrix_world.decompose() mr = obrot.to_matrix().to_4x4() @@ -978,15 +1436,16 @@ def execute(self, context): c = pcv.shadow_intensity shader.uniform_float("shadow_intensity", (c, c, c, )) shader.uniform_float("show_normals", float(pcv.show_normals)) - shader.uniform_float("show_illumination", float(pcv.light_enabled)) + shader.uniform_float("show_illumination", float(pcv.illumination)) else: - z = (0, 0, 0) - shader.uniform_float("light_direction", z) - shader.uniform_float("light_intensity", z) - shader.uniform_float("shadow_direction", z) - shader.uniform_float("shadow_intensity", z) - shader.uniform_float("show_normals", float(False)) - shader.uniform_float("show_illumination", float(False)) + # z = (0, 0, 0) + # shader.uniform_float("light_direction", z) + # shader.uniform_float("light_intensity", z) + # shader.uniform_float("shadow_direction", z) + # shader.uniform_float("shadow_intensity", z) + # shader.uniform_float("show_normals", float(False)) + # shader.uniform_float("show_illumination", float(False)) + pass batch.draw(shader) @@ -1011,6 +1470,59 @@ def execute(self, context): image.pixels = [v / 255 for v in buffer] # save as image file + def save_render(operator, scene, image, render_suffix, render_zeros, ): + f = False + n = render_suffix + rs = bpy.context.scene.render + op = rs.filepath + if(len(op) > 0): + if(not op.endswith(os.path.sep)): + f = True + op, n = os.path.split(op) + else: + log("error: output path is not set".format(e)) + operator.report({'ERROR'}, "Output path is not set.") + return + + if(f): + n = "{}_{}".format(n, render_suffix) + + fnm = "{}_{:0{z}d}.png".format(n, scene.frame_current, z=render_zeros) + p = os.path.join(os.path.realpath(bpy.path.abspath(op)), fnm) + + s = rs.image_settings + ff = s.file_format + cm = s.color_mode + cd = s.color_depth + + vs = scene.view_settings + vsvt = vs.view_transform + vsl = vs.look + vs.view_transform = 'Standard' + vs.look = 'None' + + s.file_format = 'PNG' + s.color_mode = 'RGBA' + s.color_depth = '8' + + try: + image.save_render(p) + log("image '{}' saved".format(p)) + except Exception as e: + s.file_format = ff + s.color_mode = cm + s.color_depth = cd + + log("error: {}".format(e)) + operator.report({'ERROR'}, "Unable to save render image, see console for details.") + return + + s.file_format = ff + s.color_mode = cm + s.color_depth = cd + vs.view_transform = vsvt + vs.look = vsl + save_render(self, scene, image, render_suffix, render_zeros, ) # restore @@ -1043,13 +1555,190 @@ def execute(self, context): return {'CANCELLED'} fc = scene.frame_current - for i in range(scene.frame_start, scene.frame_end, 1): + for i in range(scene.frame_start, scene.frame_end + 1, 1): scene.frame_set(i) bpy.ops.point_cloud_visualizer.render() scene.frame_set(fc) return {'FINISHED'} +class PCV_OT_convert(Operator): + bl_idname = "point_cloud_visualizer.convert" + bl_label = "Convert" + bl_description = "Convert point cloud to mesh" + + @classmethod + def poll(cls, context): + pcv = context.object.point_cloud_visualizer + ok = False + for k, v in PCVManager.cache.items(): + if(v['uuid'] == pcv.uuid): + if(v['ready']): + if(v['draw']): + ok = True + return ok + + def execute(self, context): + scene = context.scene + pcv = context.object.point_cloud_visualizer + o = context.object + + cache = PCVManager.cache[pcv.uuid] + + l = cache['stats'] + if(not pcv.mesh_all): + nump = l + mps = pcv.mesh_percentage + l = int((nump / 100) * mps) + if(mps >= 99): + l = nump + + vs = cache['vertices'][:l] + ns = cache['normals'][:l] + cs = cache['colors'][:l] + + points = [] + for i in range(l): + c = tuple([int(255 * cs[i][j]) for j in range(3)]) + points.append(tuple(vs[i]) + tuple(ns[i]) + c) + # dtype = np.dtype([('x', '= 99): + _l = _nump + _vs = _cache['vertices'][:_l] + _ns = _cache['normals'][:_l] + _cs = _cache['colors'][:_l] + + points = [] + for i in range(_l): + _r = 255 * _cs[i][0] + _g = 255 * _cs[i][1] + _b = 255 * _cs[i][2] + points.append(tuple(_vs[i]) + tuple(_ns[i]) + (_r, _g, _b, )) + + _dtype = np.dtype([('x', '>> file selector def prop_name(cls, prop, colon=False, ): for p in cls.bl_rna.properties: if(p.identifier == prop): @@ -1078,8 +1767,11 @@ def prop_name(cls, prop, colon=False, ): return p.name return '' + # f = 0.275 + f = 0.33 + r = sub.row(align=True, ) - s = r.split(factor=0.33) + s = r.split(factor=f) s.label(text=prop_name(pcv, 'filepath', True, )) s = s.split(factor=1.0) r = s.row(align=True, ) @@ -1087,12 +1779,61 @@ def prop_name(cls, prop, colon=False, ): c.prop(pcv, 'filepath', text='', ) c.enabled = False r.operator('point_cloud_visualizer.load_ply_to_cache', icon='FILEBROWSER', text='', ) - # -------------- file selector + # <<<----------- file selector + + # ----------->>> info block + def human_readable_number(num, suffix='', ): + # https://stackoverflow.com/questions/1094841/reusable-library-to-get-human-readable-version-of-file-size + f = 1000.0 + for unit in ['', 'K', 'M', 'G', 'T', 'P', 'E', 'Z', ]: + if(abs(num) < f): + return "{:3.1f}{}{}".format(num, unit, suffix) + num /= f + return "{:.1f}{}{}".format(num, 'Y', suffix) + + l0c0 = "Selected: " + l0c1 = "{}".format("n/a") + l1c0 = "Displayed: " + l1c1 = "{} of {}".format("0.0", "n/a") + + if(pcv.filepath != ""): + _, t = os.path.split(pcv.filepath) + l0c1 = "{}".format(t) + if(pcv.uuid in PCVManager.cache): + l0c0 = "Loaded: " + l0c1 = "{}".format(t) + cache = PCVManager.cache[pcv.uuid] + n = human_readable_number(cache['display_percent']) + if(not cache['draw']): + n = "0.0" + nn = human_readable_number(cache['stats']) + l1c1 = "{} of {}".format(n, nn) + + f = 0.33 + c = sub.column() + c.scale_y = 0.66 + r = c.row() + s = r.split(factor=f) + s.label(text=l0c0) + s = s.split(factor=1.0) + s.label(text=l0c1) + r = c.row() + s = r.split(factor=f) + s.label(text=l1c0) + s = s.split(factor=1.0) + s.label(text=l1c1) + + sub.separator() + # <<<----------- info block + + # sub.prop(pcv, 'ply_info', text="", emboss=False, ) + # sub.prop(pcv, 'ply_display_info', text="", emboss=False, ) e = not (pcv.filepath == "") r = sub.row(align=True) r.operator('point_cloud_visualizer.draw') r.operator('point_cloud_visualizer.erase') + r.scale_y = 1.5 r.enabled = e r = sub.row() r.prop(pcv, 'display_percent') @@ -1104,6 +1845,13 @@ def prop_name(cls, prop, colon=False, ): # r.prop(pcv, 'alpha_radius') # r.enabled = e + r = sub.row(align=True) + r.prop(pcv, 'vertex_normals', toggle=True, icon_only=True, icon='SNAP_NORMAL', ) + r.prop(pcv, 'vertex_normals_size') + r.enabled = e + if(not pcv.has_normals): + r.enabled = False + sub.separator() pcv = context.object.point_cloud_visualizer @@ -1115,14 +1863,17 @@ def prop_name(cls, prop, colon=False, ): ok = True c = sub.column() - c.prop(pcv, 'light_enabled', toggle=True, ) + r = c.row(align=True) + r.prop(pcv, 'illumination', toggle=True, ) + r.prop(pcv, 'illumination_edit', toggle=True, icon_only=True, icon='TOOL_SETTINGS', ) + # r.prop(pcv, 'illumination_edit', toggle=True, icon_only=True, icon='SETTINGS', ) if(ok): if(not pcv.has_normals): c.label(text="Missing vertex normals.", icon='ERROR', ) c.enabled = False else: c.enabled = False - if(pcv.light_enabled): + if(pcv.illumination_edit): cc = c.column() cc.prop(pcv, 'light_direction', text="", ) ccc = cc.column(align=True) @@ -1131,85 +1882,153 @@ def prop_name(cls, prop, colon=False, ): if(not pcv.has_normals): cc.enabled = e + # if(pcv.uuid in PCVManager.cache): + # sub.separator() + # # r = sub.row() + # # h, t = os.path.split(pcv.filepath) + # # n = human_readable_number(PCVManager.cache[pcv.uuid]['stats']) + # # r.label(text='{}: {} points'.format(t, n)) + # sub.prop(pcv, 'ply_info', text="", emboss=False, ) + # sub.prop(pcv, 'ply_display_info', text="", emboss=False, ) + + +class PCV_PT_render(Panel): + bl_space_type = 'VIEW_3D' + bl_region_type = 'UI' + bl_category = "View" + bl_label = "Render" + bl_parent_id = "PCV_PT_panel" + bl_options = {'DEFAULT_CLOSED'} + + def draw(self, context): + pcv = context.object.point_cloud_visualizer + l = self.layout + sub = l.column() + + c = sub.column() + c.prop(pcv, 'render_display_percent') + c.prop(pcv, 'render_point_size') + c.prop(pcv, 'render_suffix') + c.prop(pcv, 'render_zeros') + sub.separator() + r = sub.row(align=True) + r.operator('point_cloud_visualizer.render') + r.operator('point_cloud_visualizer.animation') + + sub.enabled = PCV_OT_render.poll(context) + + +class PCV_PT_convert(Panel): + bl_space_type = 'VIEW_3D' + bl_region_type = 'UI' + bl_category = "View" + bl_label = "Convert" + bl_parent_id = "PCV_PT_panel" + bl_options = {'DEFAULT_CLOSED'} + + def draw(self, context): + pcv = context.object.point_cloud_visualizer + l = self.layout + sub = l.column() + c = sub.column() + c.prop(pcv, 'mesh_type') + + f = 0.245 + r = c.row(align=True) + s = r.split(factor=f, align=True, ) + s.prop(pcv, 'mesh_all', toggle=True, ) + s = s.split(factor=1.0, align=True, ) + s.prop(pcv, 'mesh_percentage') + if(pcv.mesh_all): + s.enabled = False + + cc = c.column() + cc.prop(pcv, 'mesh_size') + + cc_n = cc.row() + cc_n.prop(pcv, 'mesh_normal_align') + if(not pcv.has_normals): + cc_n.enabled = False + + cc_c = cc.row() + cc_c.prop(pcv, 'mesh_vcols') + if(not pcv.has_vcols): + cc_c.enabled = False + + if(pcv.mesh_type == 'VERTEX'): + cc.enabled = False + + # c.separator() + c.operator('point_cloud_visualizer.convert') + c.enabled = PCV_OT_convert.poll(context) + + +class PCV_PT_debug(Panel): + bl_space_type = 'VIEW_3D' + bl_region_type = 'UI' + bl_category = "View" + bl_label = "Debug" + bl_parent_id = "PCV_PT_panel" + bl_options = {'DEFAULT_CLOSED'} + + def draw(self, context): + pcv = context.object.point_cloud_visualizer + l = self.layout + sub = l.column() + + sub.label(text="properties:") b = sub.box() - r = b.row() - r.prop(pcv, 'render_expanded', icon='TRIA_DOWN' if pcv.render_expanded else 'TRIA_RIGHT', icon_only=True, emboss=False, ) - r.label(text="Render") - if(pcv.render_expanded): - c = b.column() - r = c.row(align=True) - r.operator('point_cloud_visualizer.render') - r.operator('point_cloud_visualizer.animation') - c = b.column() - c.prop(pcv, 'render_display_percent') - c.prop(pcv, 'render_point_size') - c.separator() - c.prop(pcv, 'render_suffix') - c.prop(pcv, 'render_zeros') - c.enabled = PCV_OT_render.poll(context) - - if(pcv.uuid in PCVManager.cache): - r = sub.row() - h, t = os.path.split(pcv.filepath) - n = human_readable_number(PCVManager.cache[pcv.uuid]['stats']) - r.label(text='{}: {} points'.format(t, n)) - - if(pcv.debug): - sub.separator() - - sub.label(text="properties:") - b = sub.box() - c = b.column() - c.label(text="uuid: {}".format(pcv.uuid)) - c.label(text="filepath: {}".format(pcv.filepath)) - c.label(text="point_size: {}".format(pcv.point_size)) - c.label(text="alpha_radius: {}".format(pcv.alpha_radius)) - c.label(text="display_percent: {}".format(pcv.display_percent)) - c.label(text="render_expanded: {}".format(pcv.render_expanded)) - c.label(text="render_point_size: {}".format(pcv.render_point_size)) - c.label(text="render_display_percent: {}".format(pcv.render_display_percent)) - c.label(text="render_suffix: {}".format(pcv.render_suffix)) - c.label(text="render_zeros: {}".format(pcv.render_zeros)) - - c.label(text="has_normals: {}".format(pcv.has_normals)) - c.label(text="has_vcols: {}".format(pcv.has_vcols)) - c.label(text="light_enabled: {}".format(pcv.light_enabled)) - c.label(text="light_direction: {}".format(pcv.light_direction)) - c.label(text="light_intensity: {}".format(pcv.light_intensity)) - c.label(text="shadow_intensity: {}".format(pcv.shadow_intensity)) - - c.label(text="debug: {}".format(pcv.debug)) - c.scale_y = 0.5 - - sub.label(text="manager:") - c = sub.column(align=True) - c.operator('point_cloud_visualizer.init') - c.operator('point_cloud_visualizer.deinit') - c.operator('point_cloud_visualizer.gc') - b = sub.box() - c = b.column() - c.label(text="cache: {} item(s)".format(len(PCVManager.cache.items()))) - c.label(text="handle: {}".format(PCVManager.handle)) - c.label(text="initialized: {}".format(PCVManager.initialized)) - c.scale_y = 0.5 - - if(len(PCVManager.cache)): - sub.label(text="cache details:") - for k, v in PCVManager.cache.items(): - b = sub.box() - c = b.column() - c.scale_y = 0.5 - for ki, vi in sorted(v.items()): - if(type(vi) == np.ndarray): - c.label(text="{}: numpy.ndarray ({} items)".format(ki, len(vi))) - else: - c.label(text="{}: {}".format(ki, vi)) + c = b.column() + c.label(text="uuid: {}".format(pcv.uuid)) + c.label(text="filepath: {}".format(pcv.filepath)) + c.label(text="point_size: {}".format(pcv.point_size)) + c.label(text="alpha_radius: {}".format(pcv.alpha_radius)) + c.label(text="display_percent: {}".format(pcv.display_percent)) + c.label(text="render_expanded: {}".format(pcv.render_expanded)) + c.label(text="render_point_size: {}".format(pcv.render_point_size)) + c.label(text="render_display_percent: {}".format(pcv.render_display_percent)) + c.label(text="render_suffix: {}".format(pcv.render_suffix)) + c.label(text="render_zeros: {}".format(pcv.render_zeros)) + + c.label(text="has_normals: {}".format(pcv.has_normals)) + c.label(text="has_vcols: {}".format(pcv.has_vcols)) + c.label(text="illumination: {}".format(pcv.illumination)) + c.label(text="light_direction: {}".format(pcv.light_direction)) + c.label(text="light_intensity: {}".format(pcv.light_intensity)) + c.label(text="shadow_intensity: {}".format(pcv.shadow_intensity)) + + c.label(text="debug: {}".format(pcv.debug)) + c.scale_y = 0.5 + + sub.label(text="manager:") + c = sub.column(align=True) + c.operator('point_cloud_visualizer.init') + c.operator('point_cloud_visualizer.deinit') + c.operator('point_cloud_visualizer.gc') + b = sub.box() + c = b.column() + c.label(text="cache: {} item(s)".format(len(PCVManager.cache.items()))) + c.label(text="handle: {}".format(PCVManager.handle)) + c.label(text="initialized: {}".format(PCVManager.initialized)) + c.scale_y = 0.5 + + if(len(PCVManager.cache)): + sub.label(text="cache details:") + for k, v in PCVManager.cache.items(): + b = sub.box() + c = b.column() + c.scale_y = 0.5 + for ki, vi in sorted(v.items()): + if(type(vi) == np.ndarray): + c.label(text="{}: numpy.ndarray ({} items)".format(ki, len(vi))) + else: + c.label(text="{}: {}".format(ki, vi)) class PCV_properties(PropertyGroup): - filepath: StringProperty(name="PLY file", default="", description="", ) + filepath: StringProperty(name="PLY File", default="", description="", ) uuid: StringProperty(default="", options={'HIDDEN', }, ) # point_size: FloatProperty(name="Size", default=3.0, min=0.001, max=100.0, precision=3, subtype='FACTOR', description="Point size", ) # point_size: IntProperty(name="Size", default=3, min=1, max=100, subtype='PIXEL', description="Point size", ) @@ -1229,23 +2048,52 @@ def _display_percent_update(self, context, ): display_percent: FloatProperty(name="Display", default=100.0, min=0.0, max=100.0, precision=0, subtype='PERCENTAGE', update=_display_percent_update, description="Adjust percentage of points displayed", ) + # ply_info: StringProperty(name="PLY Info", default="", description="", ) + # ply_display_info: StringProperty(name="PLY Display Info", default="Display:", description="", ) + + vertex_normals: BoolProperty(name="Normals", description="Draw normals of points", default=False, ) + vertex_normals_size: FloatProperty(name="Length", description="Length of point normal line", default=0.01, min=0.00001, max=1.0, soft_min=0.001, soft_max=0.2, step=1, precision=3, ) + render_expanded: BoolProperty(default=False, options={'HIDDEN', }, ) # render_point_size: FloatProperty(name="Size", default=3.0, min=0.001, max=100.0, precision=3, subtype='FACTOR', description="Render point size", ) render_point_size: IntProperty(name="Size", default=3, min=1, max=100, subtype='PIXEL', description="Point size", ) render_display_percent: FloatProperty(name="Count", default=100.0, min=0.0, max=100.0, precision=0, subtype='PERCENTAGE', description="Adjust percentage of points rendered", ) render_suffix: StringProperty(name="Suffix", default="pcv_frame", description="Render filename or suffix, depends on render output path. Frame number will be appended automatically", ) - render_zeros: IntProperty(name="Leading Zeros", default=6, min=3, max=10, subtype='FACTOR', description="Number of leading zeros in render filename", ) + # render_zeros: IntProperty(name="Leading Zeros", default=6, min=3, max=10, subtype='FACTOR', description="Number of leading zeros in render filename", ) + render_zeros: IntProperty(name="Leading Zeros", default=6, min=3, max=10, description="Number of leading zeros in render filename", ) - has_normals: BoolProperty(default=False) - has_vcols: BoolProperty(default=False) - light_enabled: BoolProperty(name="Illumination", description="Enable extra illumination on point cloud", default=False, ) + has_normals: BoolProperty(default=False, options={'HIDDEN', }, ) + has_vcols: BoolProperty(default=False, options={'HIDDEN', }, ) + illumination: BoolProperty(name="Illumination", description="Enable extra illumination on point cloud", default=False, ) + illumination_edit: BoolProperty(name="Edit", description="Edit illumination properties", default=False, ) light_direction: FloatVectorProperty(name="Light Direction", description="Light direction", default=(0.0, 1.0, 0.0), subtype='DIRECTION', size=3, ) # light_color: FloatVectorProperty(name="Light Color", description="", default=(0.2, 0.2, 0.2), min=0, max=1, subtype='COLOR', size=3, ) light_intensity: FloatProperty(name="Light Intensity", description="Light intensity", default=0.3, min=0, max=1, subtype='FACTOR', ) shadow_intensity: FloatProperty(name="Shadow Intensity", description="Shadow intensity", default=0.2, min=0, max=1, subtype='FACTOR', ) show_normals: BoolProperty(name="Colorize By Vertex Normals", description="", default=False, ) - debug: BoolProperty(default=DEBUG, options={'HIDDEN', }, ) + mesh_type: EnumProperty(name="Type", items=[('VERTEX', "Vertex", ""), + ('TRIANGLE', "Equilateral Triangle", ""), + ('TETRAHEDRON', "Tetrahedron", ""), + ('CUBE', "Cube", ""), + ('ICOSPHERE', "Ico Sphere", ""), ], default='CUBE', description="Instance mesh type", ) + mesh_size: FloatProperty(name="Size", description="Mesh instance size, instanced mesh has size 1.0", default=0.01, min=0.000001, precision=4, max=100.0, ) + mesh_normal_align: BoolProperty(name="Align To Normal", description="Align instance to point normal", default=True, ) + mesh_vcols: BoolProperty(name="Colors", description="Assign point color to instance vertex colors", default=True, ) + mesh_all: BoolProperty(name="All", description="Convert all points", default=True, ) + mesh_percentage: FloatProperty(name="Subset", default=100.0, min=0.0, max=100.0, precision=0, subtype='PERCENTAGE', description="Convert random subset of points by given percentage", ) + + def _debug_update(self, context, ): + global DEBUG, debug_classes + DEBUG = self.debug + if(DEBUG): + for cls in debug_classes: + bpy.utils.register_class(cls) + else: + for cls in reversed(debug_classes): + bpy.utils.unregister_class(cls) + + debug: BoolProperty(default=DEBUG, options={'HIDDEN', }, update=_debug_update, ) @classmethod def register(cls): @@ -1256,6 +2104,19 @@ def unregister(cls): del bpy.types.Object.point_cloud_visualizer +class PCV_preferences(AddonPreferences): + bl_idname = __name__ + + default_vertex_color: FloatVectorProperty(name="Default Color", default=(0.65, 0.65, 0.65, ), min=0, max=1, subtype='COLOR', size=3, description="Default color to be used upon loading PLY to cache when vertex colors are missing", ) + normal_color: FloatVectorProperty(name="Normal Color", default=((35 / 255) ** 2.2, (97 / 255) ** 2.2, (221 / 255) ** 2.2, ), min=0, max=1, subtype='COLOR', size=3, description="Display color for vertex normals", ) + + def draw(self, context): + l = self.layout + r = l.row() + r.prop(self, "default_vertex_color") + r.prop(self, "normal_color") + + @persistent def watcher(scene): PCVManager.deinit() @@ -1263,19 +2124,25 @@ def watcher(scene): classes = ( PCV_properties, + PCV_preferences, PCV_PT_panel, + PCV_PT_render, + PCV_PT_convert, PCV_OT_load, PCV_OT_draw, PCV_OT_erase, PCV_OT_render, PCV_OT_animation, + PCV_OT_convert, +) +debug_classes = ( + PCV_PT_debug, + PCV_OT_init, + PCV_OT_deinit, + PCV_OT_gc, ) if(DEBUG): - classes = classes + ( - PCV_OT_init, - PCV_OT_deinit, - PCV_OT_gc, - ) + classes = classes + debug_classes def register(): @@ -1291,4 +2158,10 @@ def unregister(): if __name__ == "__main__": + """ + > Well, that doesn't explain... why you've come all the way out here, all the way out here to hell. + > I, uh, have a job out in the town of Machine. + > Machine? That's the end of the line. + Jim Jarmusch, Dead Man (1995) + """ register()