From 4a127e617cecb180aacc9d5271fb9a1e0d30fa88 Mon Sep 17 00:00:00 2001 From: Swann Martinez Date: Fri, 13 Mar 2020 15:05:00 +0100 Subject: [PATCH 01/33] feat: cleanup --- multi_user/bl_types/bl_datablock.py | 16 ---------------- multi_user/libs/replication | 2 +- 2 files changed, 1 insertion(+), 17 deletions(-) diff --git a/multi_user/bl_types/bl_datablock.py b/multi_user/bl_types/bl_datablock.py index be6c8ed..f370bc6 100644 --- a/multi_user/bl_types/bl_datablock.py +++ b/multi_user/bl_types/bl_datablock.py @@ -77,22 +77,6 @@ class BlDatablock(ReplicatedDatablock): self.diff_method = DIFF_BINARY - def library_apply(self): - """Apply stored data - """ - # UP in case we want to reset our pointer data - self.state = UP - - def bl_diff(self): - """Generic datablock diff""" - return self.pointer.name != self.data['name'] - - def diff_library(self): - return False - - def resolve_deps_library(self): - return [self.pointer.library] - def resolve(self): datablock_ref = None datablock_root = getattr(bpy.data, self.bl_id) diff --git a/multi_user/libs/replication b/multi_user/libs/replication index 99bf948..f24d796 160000 --- a/multi_user/libs/replication +++ b/multi_user/libs/replication @@ -1 +1 @@ -Subproject commit 99bf94874a07890dc747ec53519aa34443a95146 +Subproject commit f24d796fe30c601d1ecee6231dd845ceab4707e1 From 717a2da3de74b2b230adbd9ac589820fa27c31f1 Mon Sep 17 00:00:00 2001 From: Swann Martinez Date: Fri, 13 Mar 2020 17:13:39 +0100 Subject: [PATCH 02/33] refactor: cleanup progression --- multi_user/bl_types/bl_action.py | 6 +++--- multi_user/bl_types/bl_armature.py | 2 +- multi_user/bl_types/bl_camera.py | 2 +- multi_user/bl_types/bl_collection.py | 4 ++-- multi_user/bl_types/bl_curve.py | 2 +- multi_user/bl_types/bl_datablock.py | 9 ++++----- multi_user/bl_types/bl_gpencil.py | 2 +- multi_user/bl_types/bl_image.py | 6 +++--- multi_user/bl_types/bl_lattice.py | 2 +- multi_user/bl_types/bl_library.py | 4 ++-- multi_user/bl_types/bl_light.py | 4 ++-- multi_user/bl_types/bl_lightprobe.py | 2 +- multi_user/bl_types/bl_material.py | 2 +- multi_user/bl_types/bl_mesh.py | 2 +- multi_user/bl_types/bl_metaball.py | 2 +- multi_user/bl_types/bl_object.py | 2 +- multi_user/bl_types/bl_scene.py | 4 ++-- multi_user/bl_types/bl_speaker.py | 2 +- multi_user/bl_types/bl_world.py | 7 ++----- multi_user/libs/replication | 2 +- 20 files changed, 32 insertions(+), 36 deletions(-) diff --git a/multi_user/bl_types/bl_action.py b/multi_user/bl_types/bl_action.py index 9d4c28f..5a45c04 100644 --- a/multi_user/bl_types/bl_action.py +++ b/multi_user/bl_types/bl_action.py @@ -15,10 +15,10 @@ class BlAction(BlDatablock): bl_automatic_push = True bl_icon = 'ACTION_TWEAK' - def construct(self, data): + def _construct(self, data): return bpy.data.actions.new(data["name"]) - def load(self, data, target): + def _load(self, data, target): begin_frame = 100000 end_frame = -100000 @@ -90,7 +90,7 @@ class BlAction(BlDatablock): target.fcurves.remove(fcurve) target.id_root= data['id_root'] - def dump(self, pointer=None): + def _dump(self, pointer=None): assert(pointer) dumper = utils.dump_anything.Dumper() dumper.exclude_filter =[ diff --git a/multi_user/bl_types/bl_armature.py b/multi_user/bl_types/bl_armature.py index 8aca65c..ed271a6 100644 --- a/multi_user/bl_types/bl_armature.py +++ b/multi_user/bl_types/bl_armature.py @@ -17,7 +17,7 @@ class BlArmature(BlDatablock): bl_automatic_push = True bl_icon = 'ARMATURE_DATA' - def construct(self, data): + def _construct(self, data): return bpy.data.armatures.new(data["name"]) def load_implementation(self, data, target): diff --git a/multi_user/bl_types/bl_camera.py b/multi_user/bl_types/bl_camera.py index 72700a2..3dbd8f2 100644 --- a/multi_user/bl_types/bl_camera.py +++ b/multi_user/bl_types/bl_camera.py @@ -22,7 +22,7 @@ class BlCamera(BlDatablock): if dof_settings: utils.dump_anything.load(target.dof, dof_settings) - def construct(self, data): + def _construct(self, data): return bpy.data.cameras.new(data["name"]) def dump_implementation(self, data, pointer=None): diff --git a/multi_user/bl_types/bl_collection.py b/multi_user/bl_types/bl_collection.py index 38ac0b1..b9b114d 100644 --- a/multi_user/bl_types/bl_collection.py +++ b/multi_user/bl_types/bl_collection.py @@ -13,7 +13,7 @@ class BlCollection(BlDatablock): bl_delay_apply = 1 bl_automatic_push = True - def construct(self, data): + def _construct(self, data): if self.is_library: with bpy.data.libraries.load(filepath=bpy.data.libraries[self.data['library']].filepath, link=True) as (sourceData, targetData): targetData.collections = [ @@ -28,7 +28,7 @@ class BlCollection(BlDatablock): instance.uuid = self.uuid return instance - def load(self, data, target): + def load_implementation(self, data, target): # Load other meshes metadata # dump_anything.load(target, data) target.name = data["name"] diff --git a/multi_user/bl_types/bl_curve.py b/multi_user/bl_types/bl_curve.py index 4a498e9..44f687d 100644 --- a/multi_user/bl_types/bl_curve.py +++ b/multi_user/bl_types/bl_curve.py @@ -15,7 +15,7 @@ class BlCurve(BlDatablock): bl_automatic_push = True bl_icon = 'CURVE_DATA' - def construct(self, data): + def _construct(self, data): return bpy.data.curves.new(data["name"], data["type"]) def load_implementation(self, data, target): diff --git a/multi_user/bl_types/bl_datablock.py b/multi_user/bl_types/bl_datablock.py index f370bc6..3a891db 100644 --- a/multi_user/bl_types/bl_datablock.py +++ b/multi_user/bl_types/bl_datablock.py @@ -61,7 +61,6 @@ class BlDatablock(ReplicatedDatablock): bl_automatic_push : boolean bl_icon : type icon (blender icon name) """ - bl_id = "scenes" def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) @@ -77,7 +76,7 @@ class BlDatablock(ReplicatedDatablock): self.diff_method = DIFF_BINARY - def resolve(self): + def _resolve(self): datablock_ref = None datablock_root = getattr(bpy.data, self.bl_id) datablock_ref = utils.find_from_attr('uuid', self.uuid, datablock_root) @@ -92,7 +91,7 @@ class BlDatablock(ReplicatedDatablock): self.pointer = datablock_ref - def dump(self, pointer=None): + def _dump(self, pointer=None): data = {} # Dump animation data if utils.has_action(pointer): @@ -118,7 +117,7 @@ class BlDatablock(ReplicatedDatablock): def dump_implementation(self, data, target): raise NotImplementedError - def load(self, data, target): + def _load(self, data, target): # Load animation data if 'animation_data' in data.keys(): if target.animation_data is None: @@ -150,7 +149,7 @@ class BlDatablock(ReplicatedDatablock): if not self.is_library: dependencies.extend(self.resolve_deps_implementation()) - print(dependencies) + return dependencies def resolve_deps_implementation(self): diff --git a/multi_user/bl_types/bl_gpencil.py b/multi_user/bl_types/bl_gpencil.py index d593e6d..a1796eb 100644 --- a/multi_user/bl_types/bl_gpencil.py +++ b/multi_user/bl_types/bl_gpencil.py @@ -40,7 +40,7 @@ class BlGpencil(BlDatablock): bl_automatic_push = True bl_icon = 'GREASEPENCIL' - def construct(self, data): + def _construct(self, data): return bpy.data.grease_pencils.new(data["name"]) def load_implementation(self, data, target): diff --git a/multi_user/bl_types/bl_image.py b/multi_user/bl_types/bl_image.py index 8b965f1..bdf561d 100644 --- a/multi_user/bl_types/bl_image.py +++ b/multi_user/bl_types/bl_image.py @@ -35,14 +35,14 @@ class BlImage(BlDatablock): bl_automatic_push = False bl_icon = 'IMAGE_DATA' - def construct(self, data): + def _construct(self, data): return bpy.data.images.new( name=data['name'], width=data['size'][0], height=data['size'][1] ) - def load(self, data, target): + def _load(self, data, target): image = target prefs = utils.get_preferences() @@ -59,7 +59,7 @@ class BlImage(BlDatablock): image.colorspace_settings.name = data["colorspace_settings"]["name"] - def dump(self, data, pointer=None): + def _dump(self, data, pointer=None): assert(pointer) data = {} data['pixels'] = dump_image(pointer) diff --git a/multi_user/bl_types/bl_lattice.py b/multi_user/bl_types/bl_lattice.py index 5a7ac28..160e819 100644 --- a/multi_user/bl_types/bl_lattice.py +++ b/multi_user/bl_types/bl_lattice.py @@ -18,7 +18,7 @@ class BlLattice(BlDatablock): for point in data['points']: utils.dump_anything.load(target.points[point], data["points"][point]) - def construct(self, data): + def _construct(self, data): return bpy.data.lattices.new(data["name"]) def dump_implementation(self, data, pointer=None): diff --git a/multi_user/bl_types/bl_library.py b/multi_user/bl_types/bl_library.py index 8e909ac..0d76396 100644 --- a/multi_user/bl_types/bl_library.py +++ b/multi_user/bl_types/bl_library.py @@ -13,11 +13,11 @@ class BlLibrary(BlDatablock): bl_automatic_push = True bl_icon = 'LIBRARY_DATA_DIRECT' - def construct(self, data): + def _construct(self, data): with bpy.data.libraries.load(filepath=data["filepath"], link=True) as (sourceData, targetData): targetData = sourceData return sourceData - def load(self, data, target): + def _load(self, data, target): pass def dump(self, pointer=None): diff --git a/multi_user/bl_types/bl_light.py b/multi_user/bl_types/bl_light.py index 6dfdd80..ac5cecc 100644 --- a/multi_user/bl_types/bl_light.py +++ b/multi_user/bl_types/bl_light.py @@ -13,10 +13,10 @@ class BlLight(BlDatablock): bl_automatic_push = True bl_icon = 'LIGHT_DATA' - def construct(self, data): + def _construct(self, data): return bpy.data.lights.new(data["name"], data["type"]) - def load(self, data, target): + def load_implementation(self, data, target): utils.dump_anything.load(target, data) def dump_implementation(self, data, pointer=None): diff --git a/multi_user/bl_types/bl_lightprobe.py b/multi_user/bl_types/bl_lightprobe.py index bc8edfd..3fae44c 100644 --- a/multi_user/bl_types/bl_lightprobe.py +++ b/multi_user/bl_types/bl_lightprobe.py @@ -18,7 +18,7 @@ class BlLightprobe(BlDatablock): def load_implementation(self, data, target): utils.dump_anything.load(target, data) - def construct(self, data): + def _construct(self, data): type = 'CUBE' if data['type'] == 'CUBEMAP' else data['type'] # See https://developer.blender.org/D6396 if bpy.app.version[1] >= 83: diff --git a/multi_user/bl_types/bl_material.py b/multi_user/bl_types/bl_material.py index fa33f40..4389037 100644 --- a/multi_user/bl_types/bl_material.py +++ b/multi_user/bl_types/bl_material.py @@ -76,7 +76,7 @@ class BlMaterial(BlDatablock): bl_automatic_push = True bl_icon = 'MATERIAL_DATA' - def construct(self, data): + def _construct(self, data): return bpy.data.materials.new(data["name"]) def load_implementation(self, data, target): diff --git a/multi_user/bl_types/bl_mesh.py b/multi_user/bl_types/bl_mesh.py index f502cae..68976db 100644 --- a/multi_user/bl_types/bl_mesh.py +++ b/multi_user/bl_types/bl_mesh.py @@ -86,7 +86,7 @@ class BlMesh(BlDatablock): bl_automatic_push = True bl_icon = 'MESH_DATA' - def construct(self, data): + def _construct(self, data): instance = bpy.data.meshes.new(data["name"]) instance.uuid = self.uuid return instance diff --git a/multi_user/bl_types/bl_metaball.py b/multi_user/bl_types/bl_metaball.py index 7dec312..4dac138 100644 --- a/multi_user/bl_types/bl_metaball.py +++ b/multi_user/bl_types/bl_metaball.py @@ -13,7 +13,7 @@ class BlMetaball(BlDatablock): bl_automatic_push = True bl_icon = 'META_BALL' - def construct(self, data): + def _construct(self, data): return bpy.data.metaballs.new(data["name"]) def load(self, data, target): diff --git a/multi_user/bl_types/bl_object.py b/multi_user/bl_types/bl_object.py index 26b5469..4f68938 100644 --- a/multi_user/bl_types/bl_object.py +++ b/multi_user/bl_types/bl_object.py @@ -38,7 +38,7 @@ class BlObject(BlDatablock): bl_automatic_push = True bl_icon = 'OBJECT_DATA' - def construct(self, data): + def _construct(self, data): pointer = None if self.is_library: diff --git a/multi_user/bl_types/bl_scene.py b/multi_user/bl_types/bl_scene.py index bac83f8..ad0740f 100644 --- a/multi_user/bl_types/bl_scene.py +++ b/multi_user/bl_types/bl_scene.py @@ -12,12 +12,12 @@ class BlScene(BlDatablock): bl_automatic_push = True bl_icon = 'SCENE_DATA' - def construct(self, data): + def _construct(self, data): instance = bpy.data.scenes.new(data["name"]) instance.uuid = self.uuid return instance - def load(self, data, target): + def load_implementation(self, data, target): target = self.pointer # Load other meshes metadata utils.dump_anything.load(target, data) diff --git a/multi_user/bl_types/bl_speaker.py b/multi_user/bl_types/bl_speaker.py index d675ee2..8116c01 100644 --- a/multi_user/bl_types/bl_speaker.py +++ b/multi_user/bl_types/bl_speaker.py @@ -16,7 +16,7 @@ class BlSpeaker(BlDatablock): def load_implementation(self, data, target): utils.dump_anything.load(target, data) - def construct(self, data): + def _construct(self, data): return bpy.data.speakers.new(data["name"]) def dump_implementation(self, data, pointer=None): diff --git a/multi_user/bl_types/bl_world.py b/multi_user/bl_types/bl_world.py index 588ef64..ac5cb1c 100644 --- a/multi_user/bl_types/bl_world.py +++ b/multi_user/bl_types/bl_world.py @@ -14,10 +14,10 @@ class BlWorld(BlDatablock): bl_automatic_push = True bl_icon = 'WORLD_DATA' - def construct(self, data): + def _construct(self, data): return bpy.data.worlds.new(data["name"]) - def load(self, data, target): + def load_implementation(self, data, target): if data["use_nodes"]: if target.node_tree is None: target.use_nodes = True @@ -101,6 +101,3 @@ class BlWorld(BlDatablock): deps.append(self.pointer.library) return deps - def is_valid(self): - return bpy.data.worlds.get(self.data['name']) - diff --git a/multi_user/libs/replication b/multi_user/libs/replication index f24d796..546fe5d 160000 --- a/multi_user/libs/replication +++ b/multi_user/libs/replication @@ -1 +1 @@ -Subproject commit f24d796fe30c601d1ecee6231dd845ceab4707e1 +Subproject commit 546fe5d6394f46bff00d81e1c7658a34341aef16 From 2fcb4615be3d2d584854a5800452077590d8eb97 Mon Sep 17 00:00:00 2001 From: Swann Date: Fri, 20 Mar 2020 14:56:50 +0100 Subject: [PATCH 03/33] feat: GPL headers --- multi_user/__init__.py | 18 ++++++++++++++++++ multi_user/bl_types/__init__.py | 18 ++++++++++++++++++ multi_user/bl_types/bl_action.py | 18 ++++++++++++++++++ multi_user/bl_types/bl_armature.py | 18 ++++++++++++++++++ multi_user/bl_types/bl_camera.py | 18 ++++++++++++++++++ multi_user/bl_types/bl_collection.py | 18 ++++++++++++++++++ multi_user/bl_types/bl_curve.py | 18 ++++++++++++++++++ multi_user/bl_types/bl_datablock.py | 18 ++++++++++++++++++ multi_user/bl_types/bl_gpencil.py | 18 ++++++++++++++++++ multi_user/bl_types/bl_image.py | 18 ++++++++++++++++++ multi_user/bl_types/bl_lattice.py | 18 ++++++++++++++++++ multi_user/bl_types/bl_library.py | 18 ++++++++++++++++++ multi_user/bl_types/bl_light.py | 18 ++++++++++++++++++ multi_user/bl_types/bl_lightprobe.py | 18 ++++++++++++++++++ multi_user/bl_types/bl_material.py | 18 ++++++++++++++++++ multi_user/bl_types/bl_mesh.py | 18 ++++++++++++++++++ multi_user/bl_types/bl_metaball.py | 18 ++++++++++++++++++ multi_user/bl_types/bl_object.py | 18 ++++++++++++++++++ multi_user/bl_types/bl_scene.py | 18 ++++++++++++++++++ multi_user/bl_types/bl_speaker.py | 18 ++++++++++++++++++ multi_user/bl_types/bl_world.py | 18 ++++++++++++++++++ multi_user/delayable.py | 17 +++++++++++++++++ multi_user/environment.py | 18 ++++++++++++++++++ multi_user/libs/replication | 2 +- multi_user/operators.py | 18 ++++++++++++++++++ multi_user/preferences.py | 18 ++++++++++++++++++ multi_user/presence.py | 18 ++++++++++++++++++ multi_user/ui.py | 18 ++++++++++++++++++ multi_user/utils.py | 18 ++++++++++++++++++ 29 files changed, 504 insertions(+), 1 deletion(-) diff --git a/multi_user/__init__.py b/multi_user/__init__.py index 2990789..b401231 100644 --- a/multi_user/__init__.py +++ b/multi_user/__init__.py @@ -1,3 +1,21 @@ +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# ##### END GPL LICENSE BLOCK ##### + + bl_info = { "name": "Multi-User", "author": "Swann Martinez", diff --git a/multi_user/bl_types/__init__.py b/multi_user/bl_types/__init__.py index c3e9605..b5427c2 100644 --- a/multi_user/bl_types/__init__.py +++ b/multi_user/bl_types/__init__.py @@ -1,3 +1,21 @@ +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# ##### END GPL LICENSE BLOCK ##### + + __all__ = [ 'bl_object', 'bl_mesh', diff --git a/multi_user/bl_types/bl_action.py b/multi_user/bl_types/bl_action.py index 5a45c04..d903a4f 100644 --- a/multi_user/bl_types/bl_action.py +++ b/multi_user/bl_types/bl_action.py @@ -1,3 +1,21 @@ +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# ##### END GPL LICENSE BLOCK ##### + + import bpy import mathutils import copy diff --git a/multi_user/bl_types/bl_armature.py b/multi_user/bl_types/bl_armature.py index ed271a6..c609be8 100644 --- a/multi_user/bl_types/bl_armature.py +++ b/multi_user/bl_types/bl_armature.py @@ -1,3 +1,21 @@ +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# ##### END GPL LICENSE BLOCK ##### + + import bpy import mathutils diff --git a/multi_user/bl_types/bl_camera.py b/multi_user/bl_types/bl_camera.py index 3dbd8f2..88112a3 100644 --- a/multi_user/bl_types/bl_camera.py +++ b/multi_user/bl_types/bl_camera.py @@ -1,3 +1,21 @@ +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# ##### END GPL LICENSE BLOCK ##### + + import bpy import mathutils diff --git a/multi_user/bl_types/bl_collection.py b/multi_user/bl_types/bl_collection.py index b9b114d..3d3f9f7 100644 --- a/multi_user/bl_types/bl_collection.py +++ b/multi_user/bl_types/bl_collection.py @@ -1,3 +1,21 @@ +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# ##### END GPL LICENSE BLOCK ##### + + import bpy import mathutils diff --git a/multi_user/bl_types/bl_curve.py b/multi_user/bl_types/bl_curve.py index 44f687d..ac6fbdc 100644 --- a/multi_user/bl_types/bl_curve.py +++ b/multi_user/bl_types/bl_curve.py @@ -1,3 +1,21 @@ +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# ##### END GPL LICENSE BLOCK ##### + + import bpy import bpy.types as T import mathutils diff --git a/multi_user/bl_types/bl_datablock.py b/multi_user/bl_types/bl_datablock.py index 3a891db..3a5d8b3 100644 --- a/multi_user/bl_types/bl_datablock.py +++ b/multi_user/bl_types/bl_datablock.py @@ -1,3 +1,21 @@ +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# ##### END GPL LICENSE BLOCK ##### + + import bpy import mathutils diff --git a/multi_user/bl_types/bl_gpencil.py b/multi_user/bl_types/bl_gpencil.py index a1796eb..8fc8a86 100644 --- a/multi_user/bl_types/bl_gpencil.py +++ b/multi_user/bl_types/bl_gpencil.py @@ -1,3 +1,21 @@ +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# ##### END GPL LICENSE BLOCK ##### + + import bpy import mathutils diff --git a/multi_user/bl_types/bl_image.py b/multi_user/bl_types/bl_image.py index bdf561d..c758fb0 100644 --- a/multi_user/bl_types/bl_image.py +++ b/multi_user/bl_types/bl_image.py @@ -1,3 +1,21 @@ +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# ##### END GPL LICENSE BLOCK ##### + + import bpy import mathutils import os diff --git a/multi_user/bl_types/bl_lattice.py b/multi_user/bl_types/bl_lattice.py index 160e819..ca6de99 100644 --- a/multi_user/bl_types/bl_lattice.py +++ b/multi_user/bl_types/bl_lattice.py @@ -1,3 +1,21 @@ +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# ##### END GPL LICENSE BLOCK ##### + + import bpy import mathutils diff --git a/multi_user/bl_types/bl_library.py b/multi_user/bl_types/bl_library.py index 0d76396..f53e6de 100644 --- a/multi_user/bl_types/bl_library.py +++ b/multi_user/bl_types/bl_library.py @@ -1,3 +1,21 @@ +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# ##### END GPL LICENSE BLOCK ##### + + import bpy import mathutils diff --git a/multi_user/bl_types/bl_light.py b/multi_user/bl_types/bl_light.py index ac5cecc..3d216f6 100644 --- a/multi_user/bl_types/bl_light.py +++ b/multi_user/bl_types/bl_light.py @@ -1,3 +1,21 @@ +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# ##### END GPL LICENSE BLOCK ##### + + import bpy import mathutils diff --git a/multi_user/bl_types/bl_lightprobe.py b/multi_user/bl_types/bl_lightprobe.py index 3fae44c..1a3968a 100644 --- a/multi_user/bl_types/bl_lightprobe.py +++ b/multi_user/bl_types/bl_lightprobe.py @@ -1,3 +1,21 @@ +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# ##### END GPL LICENSE BLOCK ##### + + import bpy import mathutils import logging diff --git a/multi_user/bl_types/bl_material.py b/multi_user/bl_types/bl_material.py index 4389037..89f3f3d 100644 --- a/multi_user/bl_types/bl_material.py +++ b/multi_user/bl_types/bl_material.py @@ -1,3 +1,21 @@ +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# ##### END GPL LICENSE BLOCK ##### + + import bpy import mathutils import logging diff --git a/multi_user/bl_types/bl_mesh.py b/multi_user/bl_types/bl_mesh.py index 68976db..d9195ae 100644 --- a/multi_user/bl_types/bl_mesh.py +++ b/multi_user/bl_types/bl_mesh.py @@ -1,3 +1,21 @@ +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# ##### END GPL LICENSE BLOCK ##### + + import bpy import bmesh import mathutils diff --git a/multi_user/bl_types/bl_metaball.py b/multi_user/bl_types/bl_metaball.py index 4dac138..36c998f 100644 --- a/multi_user/bl_types/bl_metaball.py +++ b/multi_user/bl_types/bl_metaball.py @@ -1,3 +1,21 @@ +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# ##### END GPL LICENSE BLOCK ##### + + import bpy import mathutils diff --git a/multi_user/bl_types/bl_object.py b/multi_user/bl_types/bl_object.py index 4f68938..f4bfd76 100644 --- a/multi_user/bl_types/bl_object.py +++ b/multi_user/bl_types/bl_object.py @@ -1,3 +1,21 @@ +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# ##### END GPL LICENSE BLOCK ##### + + import bpy import mathutils import logging diff --git a/multi_user/bl_types/bl_scene.py b/multi_user/bl_types/bl_scene.py index ad0740f..48ad025 100644 --- a/multi_user/bl_types/bl_scene.py +++ b/multi_user/bl_types/bl_scene.py @@ -1,3 +1,21 @@ +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# ##### END GPL LICENSE BLOCK ##### + + import bpy import mathutils diff --git a/multi_user/bl_types/bl_speaker.py b/multi_user/bl_types/bl_speaker.py index 8116c01..9d2f87c 100644 --- a/multi_user/bl_types/bl_speaker.py +++ b/multi_user/bl_types/bl_speaker.py @@ -1,3 +1,21 @@ +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# ##### END GPL LICENSE BLOCK ##### + + import bpy import mathutils diff --git a/multi_user/bl_types/bl_world.py b/multi_user/bl_types/bl_world.py index ac5cb1c..d23a65d 100644 --- a/multi_user/bl_types/bl_world.py +++ b/multi_user/bl_types/bl_world.py @@ -1,3 +1,21 @@ +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# ##### END GPL LICENSE BLOCK ##### + + import bpy import mathutils diff --git a/multi_user/delayable.py b/multi_user/delayable.py index 6c100ab..78598d4 100644 --- a/multi_user/delayable.py +++ b/multi_user/delayable.py @@ -1,3 +1,20 @@ +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# ##### END GPL LICENSE BLOCK ##### + import logging import bpy diff --git a/multi_user/environment.py b/multi_user/environment.py index 077f451..716fcf8 100644 --- a/multi_user/environment.py +++ b/multi_user/environment.py @@ -1,3 +1,21 @@ +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# ##### END GPL LICENSE BLOCK ##### + + import collections import logging import os diff --git a/multi_user/libs/replication b/multi_user/libs/replication index 546fe5d..7077c12 160000 --- a/multi_user/libs/replication +++ b/multi_user/libs/replication @@ -1 +1 @@ -Subproject commit 546fe5d6394f46bff00d81e1c7658a34341aef16 +Subproject commit 7077c125627d6cb2e7160db1d0e882d35980e4c9 diff --git a/multi_user/operators.py b/multi_user/operators.py index 424418f..22ac9cb 100644 --- a/multi_user/operators.py +++ b/multi_user/operators.py @@ -1,3 +1,21 @@ +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# ##### END GPL LICENSE BLOCK ##### + + import asyncio import logging import os diff --git a/multi_user/preferences.py b/multi_user/preferences.py index 3230b6d..7efd6ba 100644 --- a/multi_user/preferences.py +++ b/multi_user/preferences.py @@ -1,3 +1,21 @@ +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# ##### END GPL LICENSE BLOCK ##### + + import logging import bpy diff --git a/multi_user/presence.py b/multi_user/presence.py index 5494e19..975193d 100644 --- a/multi_user/presence.py +++ b/multi_user/presence.py @@ -1,3 +1,21 @@ +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# ##### END GPL LICENSE BLOCK ##### + + import copy import logging import math diff --git a/multi_user/ui.py b/multi_user/ui.py index 21f4d48..26f5c8d 100644 --- a/multi_user/ui.py +++ b/multi_user/ui.py @@ -1,3 +1,21 @@ +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# ##### END GPL LICENSE BLOCK ##### + + import bpy from . import operators, utils diff --git a/multi_user/utils.py b/multi_user/utils.py index db96e92..5da0db8 100644 --- a/multi_user/utils.py +++ b/multi_user/utils.py @@ -1,3 +1,21 @@ +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# ##### END GPL LICENSE BLOCK ##### + + import json import logging import os From 2484028b5a08fbfa3e596657fbd1c79e326fdf82 Mon Sep 17 00:00:00 2001 From: Swann Date: Fri, 20 Mar 2020 16:14:54 +0100 Subject: [PATCH 04/33] feat: cleanup --- multi_user/libs/dump_anything.py | 18 +++ multi_user/libs/overrider.py | 219 ------------------------------- 2 files changed, 18 insertions(+), 219 deletions(-) delete mode 100644 multi_user/libs/overrider.py diff --git a/multi_user/libs/dump_anything.py b/multi_user/libs/dump_anything.py index 9c4f3fc..47a77df 100644 --- a/multi_user/libs/dump_anything.py +++ b/multi_user/libs/dump_anything.py @@ -1,3 +1,21 @@ +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# ##### END GPL LICENSE BLOCK ##### + + import bpy import bpy.types as T import mathutils diff --git a/multi_user/libs/overrider.py b/multi_user/libs/overrider.py deleted file mode 100644 index 964833d..0000000 --- a/multi_user/libs/overrider.py +++ /dev/null @@ -1,219 +0,0 @@ -""" -Context Manager allowing temporary override of attributes - -````python -import bpy -from overrider import Overrider - -with Overrider(name='bpy_', parent=bpy) as bpy_: - # set preview render settings - bpy_.context.scene.render.use_file_extension = False - bpy_.context.scene.render.resolution_x = 512 - bpy_.context.scene.render.resolution_y = 512 - bpy_.context.scene.render.use_file_extension = False - bpy_.context.scene.render.image_settings.file_format = "JPEG" - bpy_.context.scene.layers[10] = False - - frame_start = action.frame_range[0] - frame_end = action.frame_range[1] - if begin_frame is not None: - frame_start = begin_frame - if end_frame is not None: - frame_end = end_frame - - # render - window = bpy_.data.window_managers[0].windows[0] - screen = bpy_.data.window_managers[0].windows[0].screen - area = next(area for area in screen.areas if area.type == 'VIEW_3D') - space = next(space for space in area.spaces if space.type == 'VIEW_3D') - - space.viewport_shade = 'MATERIAL' - space.region_3d.view_perspective = 'CAMERA' - - override_context = { - "window": window._real_value_(), - "screen": screen._real_value_() - } - - if frame_start == frame_end: - bpy.context.scene.frame_set(int(frame_start)) - bpy_.context.scene.render.filepath = os.path.join(directory, "icon.jpg") - bpy.ops.render.opengl(override_context, write_still=True) - - else: - for icon_index, frame_number in enumerate(range(int(frame_start), int(frame_end) + 1)): - bpy.context.scene.frame_set(frame_number) - bpy.context.scene.render.filepath = os.path.join(directory, "icon", "{:04d}.jpg".format(icon_index)) - bpy.ops.render.opengl(override_context, write_still=True) -```` -""" -from collections import OrderedDict - - -class OverrideIter: - - def __init__(self, parent): - self.parent = parent - self.index = -1 - - def __next__(self): - self.index += 1 - try: - return self.parent[self.index] - except IndexError as e: - raise StopIteration - - -class OverrideBase: - - def __init__(self, context_manager, name=None, parent=None): - self._name__ = name - self._context_manager_ = context_manager - self._parent_ = parent - self._changed_attributes_ = OrderedDict() - self._changed_items_ = OrderedDict() - self._children_ = list() - self._original_value_ = self._real_value_() - - def __repr__(self): - return "<{}({})>".format(self.__class__.__name__, self._path_) - - @property - def _name_(self): - raise NotImplementedError() - - @property - def _path_(self): - if isinstance(self._parent_, OverrideBase): - return self._parent_._path_ + self._name_ - - return self._name_ - - def _real_value_(self): - raise NotImplementedError() - - def _restore_(self): - for attribute, original_value in reversed(self._changed_attributes_.items()): - setattr(self._real_value_(), attribute, original_value) - - for item, original_value in reversed(self._changed_items_.items()): - self._real_value_()[item] = original_value - - def __getattr__(self, attr): - new_attribute = OverrideAttribute(self._context_manager_, name=attr, parent=self) - self._children_.append(new_attribute) - return new_attribute - - def __getitem__(self, item): - new_item = OverrideItem(self._context_manager_, name=item, parent=self) - self._children_.append(new_item) - return new_item - - def __iter__(self): - return OverrideIter(self) - - def __setattr__(self, attr, value): - if attr in ( - '_name__', - '_context_manager_', - '_parent_', - '_children_', - '_original_value_', - '_changed_attributes_', - '_changed_items_' - ): - self.__dict__[attr] = value - return - - if attr not in self._changed_attributes_.keys(): - self._changed_attributes_[attr] = getattr(self._real_value_(), attr) - self._context_manager_.register_as_changed(self) - - setattr(self._real_value_(), attr, value) - - def __setitem__(self, item, value): - if item not in self._changed_items_.keys(): - self._changed_items_[item] = self._real_value_()[item] - self._context_manager_.register_as_changed(self) - - self._real_value_()[item] = value - - def __eq__(self, other): - return self._real_value_() == other - - def __gt__(self, other): - return self._real_value_() > other - - def __lt__(self, other): - return self._real_value_() < other - - def __ge__(self, other): - return self._real_value_() >= other - - def __le__(self, other): - return self._real_value_() <= other - - def __call__(self, *args, **kwargs): - # TODO : surround str value with quotes - arguments = list([str(arg) for arg in args]) + ['{}={}'.format(key, value) for key, value in kwargs.items()] - arguments = ', '.join(arguments) - raise RuntimeError('Overrider does not allow call to {}({})'.format(self._path_, arguments)) - - -class OverrideRoot(OverrideBase): - - @property - def _name_(self): - return self._name__ - - def _real_value_(self): - return self._parent_ - - -class OverrideAttribute(OverrideBase): - - @property - def _name_(self): - return '.{}'.format(self._name__) - - def _real_value_(self): - return getattr(self._parent_._real_value_(), self._name__) - - -class OverrideItem(OverrideBase): - - @property - def _name_(self): - if isinstance(self._name__, str): - return '["{}"]'.format(self._name__) - - return '[{}]'.format(self._name__) - - def _real_value_(self): - return self._parent_._real_value_()[self._name__] - - -class Overrider: - def __init__(self, name, parent): - self.name = name - self.parent = parent - self.override = None - self.registered_overrides = list() - - def __enter__(self): - self.override = OverrideRoot( - context_manager=self, - parent=self.parent, - name=self.name - ) - return self.override - - def __exit__(self, exc_type, exc_val, exc_tb): - self.restore() - - def register_as_changed(self, override): - self.registered_overrides.append(override) - - def restore(self): - for override in reversed(self.registered_overrides): - override._restore_() From 8ce53b8413db1f51792a4006d94179e6b79ac1a1 Mon Sep 17 00:00:00 2001 From: Swann Date: Fri, 20 Mar 2020 19:31:48 +0100 Subject: [PATCH 05/33] feat: bl_object clean Related to #29 --- multi_user/bl_types/bl_armature.py | 1 - multi_user/bl_types/bl_object.py | 62 ++---------------------------- multi_user/libs/dump_anything.py | 24 +++++++++--- multi_user/libs/replication | 2 +- 4 files changed, 23 insertions(+), 66 deletions(-) diff --git a/multi_user/bl_types/bl_armature.py b/multi_user/bl_types/bl_armature.py index c609be8..c4abaf7 100644 --- a/multi_user/bl_types/bl_armature.py +++ b/multi_user/bl_types/bl_armature.py @@ -19,7 +19,6 @@ import bpy import mathutils -from ..libs.overrider import Overrider from .. import utils from .. import presence, operators from .bl_datablock import BlDatablock diff --git a/multi_user/bl_types/bl_object.py b/multi_user/bl_types/bl_object.py index f4bfd76..9f17eb1 100644 --- a/multi_user/bl_types/bl_object.py +++ b/multi_user/bl_types/bl_object.py @@ -26,22 +26,6 @@ from .bl_datablock import BlDatablock logger = logging.getLogger(__name__) -def load_constraints(target, data): - for local_constraint in target.constraints: - if local_constraint.name not in data: - target.constraints.remove(local_constraint) - - for constraint in data: - target_constraint = target.constraints.get(constraint) - - if not target_constraint: - target_constraint = target.constraints.new( - data[constraint]['type']) - - utils.dump_anything.load( - target_constraint, data[constraint]) - - def load_pose(target_bone, data): target_bone.rotation_mode = data['rotation_mode'] @@ -105,32 +89,7 @@ class BlObject(BlDatablock): def load_implementation(self, data, target): # Load transformation data - rot_mode = 'rotation_quaternion' if data['rotation_mode'] == 'QUATERNION' else 'rotation_euler' - target.rotation_mode = data['rotation_mode'] - target.location = data['location'] - setattr(target, rot_mode, data[rot_mode]) - target.scale = data['scale'] - - target.name = data["name"] - # Load modifiers - if hasattr(target, 'modifiers'): - # TODO: smarter selective update - target.modifiers.clear() - - for modifier in data['modifiers']: - target_modifier = target.modifiers.get(modifier) - - if not target_modifier: - target_modifier = target.modifiers.new( - data['modifiers'][modifier]['name'], data['modifiers'][modifier]['type']) - - utils.dump_anything.load( - target_modifier, data['modifiers'][modifier]) - - # Load constraints - # Object - if hasattr(target, 'constraints') and 'constraints' in data: - load_constraints(target, data['constraints']) + utils.dump_anything.load(target, data) # Pose if 'pose' in data: @@ -153,28 +112,14 @@ class BlObject(BlDatablock): bone_data = data['pose']['bones'].get(bone) if 'constraints' in bone_data.keys(): - load_constraints( - target_bone, bone_data['constraints']) + utils.dump_anything.load(target_bone, bone_data['constraints']) + load_pose(target_bone, bone_data) if 'bone_index' in bone_data.keys(): target_bone.bone_group = target.pose.bone_group[bone_data['bone_group_index']] - # Load relations - if 'children' in data.keys(): - for child in data['children']: - bpy.data.objects[child].parent = self.pointer - - # Load empty representation - target.empty_display_size = data['empty_display_size'] - target.empty_display_type = data['empty_display_type'] - - # Instancing - target.instance_type = data['instance_type'] - if data['instance_type'] == 'COLLECTION': - target.instance_collection = bpy.data.collections[data['instance_collection']] - # vertex groups if 'vertex_groups' in data: target.vertex_groups.clear() @@ -238,7 +183,6 @@ class BlObject(BlDatablock): data["modifiers"] = {} for index, modifier in enumerate(pointer.modifiers): data["modifiers"][modifier.name] = dumper.dump(modifier) - data["modifiers"][modifier.name]['m_index'] = index # CONSTRAINTS # OBJECT diff --git a/multi_user/libs/dump_anything.py b/multi_user/libs/dump_anything.py index 47a77df..caa705e 100644 --- a/multi_user/libs/dump_anything.py +++ b/multi_user/libs/dump_anything.py @@ -15,11 +15,13 @@ # # ##### END GPL LICENSE BLOCK ##### +import logging import bpy import bpy.types as T import mathutils +logger = logging.getLogger(__name__) def remove_items_from_dict(d, keys, recursive=False): copy = dict(d) @@ -316,19 +318,29 @@ class Loader: CONSTRUCTOR_ADD = "add" constructors = { - T.ColorRampElement: (CONSTRUCTOR_NEW, ["position"]), - T.ParticleSettingsTextureSlot: (CONSTRUCTOR_ADD, []) + T.ColorRampElement: (CONSTRUCTOR_NEW, ["position"], True), + T.ParticleSettingsTextureSlot: (CONSTRUCTOR_ADD, [], False), + T.Modifier: (CONSTRUCTOR_NEW, ["name", "type"], True), + T.Constraint: (CONSTRUCTOR_NEW, ["type"], True), + # T.VertexGroup: (CONSTRUCTOR_NEW, ["name"], True), } element_type = element.bl_rna_property.fixed_type + constructor = constructors.get(type(element_type)) + if constructor is None: # collection type not supported return + + # Try to clear existing + if constructor[2]: + getattr(element.read(), 'clear')() + for dumped_element in dump.values(): try: constructor_parameters = [dumped_element[name] for name in constructor[1]] except KeyError: - print("Collection load error, missing parameters.") + logger.error("Collection load error, missing parameters.") continue # TODO handle error new_element = getattr(element.read(), constructor[0])( *constructor_parameters) @@ -354,6 +366,8 @@ class Loader: pointer.write(bpy.data.meshes.get(dump)) elif isinstance(rna_property_type, T.Material): pointer.write(bpy.data.materials.get(dump)) + elif isinstance(rna_property_type, T.Collection): + pointer.write(bpy.data.collections.get(dump)) def _load_matrix(self, matrix, dump): matrix.write(mathutils.Matrix(dump)) @@ -386,8 +400,8 @@ class Loader: continue # TODO error handling try: self._load_any(default.extend(k), v) - except: - pass + except Exception as e: + logger.error(e) @property def match_subset_all(self): diff --git a/multi_user/libs/replication b/multi_user/libs/replication index 7077c12..42b3a31 160000 --- a/multi_user/libs/replication +++ b/multi_user/libs/replication @@ -1 +1 @@ -Subproject commit 7077c125627d6cb2e7160db1d0e882d35980e4c9 +Subproject commit 42b3a31b8ec44115f8fbc3697bdeeee74e608c6f From fb0760928e1e125875c4db17dee929e20d85c784 Mon Sep 17 00:00:00 2001 From: Swann Date: Mon, 23 Mar 2020 11:04:06 +0100 Subject: [PATCH 06/33] feat: verbose errors --- multi_user/bl_types/bl_object.py | 10 +++++----- multi_user/libs/dump_anything.py | 33 +++++++++++++++++--------------- 2 files changed, 23 insertions(+), 20 deletions(-) diff --git a/multi_user/bl_types/bl_object.py b/multi_user/bl_types/bl_object.py index 9f17eb1..0387a65 100644 --- a/multi_user/bl_types/bl_object.py +++ b/multi_user/bl_types/bl_object.py @@ -261,18 +261,18 @@ class BlObject(BlDatablock): data['vertex_groups'] = vg_data # SHAPE KEYS - pointer_data = pointer.data - if hasattr(pointer_data, 'shape_keys') and pointer_data.shape_keys: + object_data = pointer.data + if hasattr(object_data, 'shape_keys') and object_data.shape_keys: dumper = utils.dump_anything.Dumper() dumper.depth = 2 dumper.include_filter = [ 'reference_key', 'use_relative' ] - data['shape_keys'] = dumper.dump(pointer_data.shape_keys) - data['shape_keys']['reference_key'] = pointer_data.shape_keys.reference_key.name + data['shape_keys'] = dumper.dump(object_data.shape_keys) + data['shape_keys']['reference_key'] = object_data.shape_keys.reference_key.name key_blocks = {} - for key in pointer_data.shape_keys.key_blocks: + for key in object_data.shape_keys.key_blocks: dumper.depth = 3 dumper.include_filter = [ 'name', diff --git a/multi_user/libs/dump_anything.py b/multi_user/libs/dump_anything.py index caa705e..de239ed 100644 --- a/multi_user/libs/dump_anything.py +++ b/multi_user/libs/dump_anything.py @@ -93,8 +93,9 @@ def _load_filter_default(default): class Dumper: + # TODO: support occlude readonly def __init__(self): - self.verbose = False + self.verbose = True self.depth = 1 self.keep_compounds_as_leaves = False self.accept_read_only = True @@ -103,7 +104,6 @@ class Dumper: self.type_subset = self.match_subset_all self.include_filter = [] self.exclude_filter = [] - # self._atomic_types = [] # TODO future option? def dump(self, any): return self._dump_any(any, 0) @@ -195,7 +195,8 @@ class Dumper: if (self.include_filter and p not in self.include_filter): return False getattr(default, p) - except AttributeError: + except AttributeError as err: + logger.error(err) return False if p.startswith("__"): return False @@ -258,14 +259,15 @@ class BlenderAPIElement: def write(self, value): # take precaution if property is read-only - try: - if self.sub_element_name: - setattr(self.api_element, self.sub_element_name, value) - else: - self.api_element = value - except AttributeError as err: - if not self.occlude_read_only: - raise err + if self.api_element.is_property_readonly(self.sub_element_name) and \ + self.occlude_read_only: + logger.error(f"Skipping {self.sub_element_name}") + return + + if self.sub_element_name: + setattr(self.api_element, self.sub_element_name, value) + else: + self.api_element = value def extend(self, element_name): return BlenderAPIElement(self.read(), element_name) @@ -282,7 +284,7 @@ class BlenderAPIElement: class Loader: def __init__(self): self.type_subset = self.match_subset_all - self.occlude_read_only = True + self.occlude_read_only = False self.order = ['*'] def load(self, dst_data, src_dumped_data): @@ -307,6 +309,7 @@ class Loader: for i in range(len(dump)): element.read()[i] = dump[i] except AttributeError as err: + logger.error(err) if not self.occlude_read_only: raise err @@ -397,11 +400,11 @@ class Loader: for k in self._ordered_keys(dump.keys()): v = dump[k] if not hasattr(default.read(), k): - continue # TODO error handling + logger.error(f"Load default, skipping {default} : {k}") try: self._load_any(default.extend(k), v) - except Exception as e: - logger.error(e) + except Exception as err: + logger.error(f"Cannot load {k}: {err}") @property def match_subset_all(self): From 90a44eb5dbc3bda4a0c560f555de4ef9b9248211 Mon Sep 17 00:00:00 2001 From: Swann Date: Mon, 23 Mar 2020 12:09:59 +0100 Subject: [PATCH 07/33] fix resolve --- multi_user/bl_types/bl_datablock.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/multi_user/bl_types/bl_datablock.py b/multi_user/bl_types/bl_datablock.py index 3a5d8b3..a645488 100644 --- a/multi_user/bl_types/bl_datablock.py +++ b/multi_user/bl_types/bl_datablock.py @@ -94,7 +94,7 @@ class BlDatablock(ReplicatedDatablock): self.diff_method = DIFF_BINARY - def _resolve(self): + def resolve(self): datablock_ref = None datablock_root = getattr(bpy.data, self.bl_id) datablock_ref = utils.find_from_attr('uuid', self.uuid, datablock_root) From b55700862f6ead1e3827b3f918da974e0fdcd41d Mon Sep 17 00:00:00 2001 From: Swann Date: Mon, 23 Mar 2020 13:49:47 +0100 Subject: [PATCH 08/33] fix: loadimg error --- multi_user/libs/dump_anything.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/multi_user/libs/dump_anything.py b/multi_user/libs/dump_anything.py index de239ed..bd72dea 100644 --- a/multi_user/libs/dump_anything.py +++ b/multi_user/libs/dump_anything.py @@ -259,12 +259,9 @@ class BlenderAPIElement: def write(self, value): # take precaution if property is read-only - if self.api_element.is_property_readonly(self.sub_element_name) and \ - self.occlude_read_only: - logger.error(f"Skipping {self.sub_element_name}") - return + if self.sub_element_name and \ + not self.api_element.is_property_readonly(self.sub_element_name): - if self.sub_element_name: setattr(self.api_element, self.sub_element_name, value) else: self.api_element = value From 01faa94a9a369641f809f337ec5be01a14bcd767 Mon Sep 17 00:00:00 2001 From: Swann Date: Mon, 23 Mar 2020 15:29:34 +0100 Subject: [PATCH 09/33] fix: resolve error --- multi_user/bl_types/bl_datablock.py | 2 +- multi_user/bl_types/bl_object.py | 2 +- multi_user/libs/dump_anything.py | 10 +++++----- multi_user/operators.py | 2 +- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/multi_user/bl_types/bl_datablock.py b/multi_user/bl_types/bl_datablock.py index a645488..3a5d8b3 100644 --- a/multi_user/bl_types/bl_datablock.py +++ b/multi_user/bl_types/bl_datablock.py @@ -94,7 +94,7 @@ class BlDatablock(ReplicatedDatablock): self.diff_method = DIFF_BINARY - def resolve(self): + def _resolve(self): datablock_ref = None datablock_root = getattr(bpy.data, self.bl_id) datablock_ref = utils.find_from_attr('uuid', self.uuid, datablock_root) diff --git a/multi_user/bl_types/bl_object.py b/multi_user/bl_types/bl_object.py index 0387a65..2abb6ad 100644 --- a/multi_user/bl_types/bl_object.py +++ b/multi_user/bl_types/bl_object.py @@ -52,7 +52,7 @@ class BlObject(BlDatablock): instance.uuid = self.uuid return instance - # Object specific constructor... + # TODO: refactoring if "data" not in data: pass elif data["data"] in bpy.data.meshes.keys(): diff --git a/multi_user/libs/dump_anything.py b/multi_user/libs/dump_anything.py index bd72dea..051e0e9 100644 --- a/multi_user/libs/dump_anything.py +++ b/multi_user/libs/dump_anything.py @@ -196,7 +196,7 @@ class Dumper: return False getattr(default, p) except AttributeError as err: - logger.error(err) + logger.debug(err) return False if p.startswith("__"): return False @@ -306,7 +306,7 @@ class Loader: for i in range(len(dump)): element.read()[i] = dump[i] except AttributeError as err: - logger.error(err) + logger.debug(err) if not self.occlude_read_only: raise err @@ -340,7 +340,7 @@ class Loader: constructor_parameters = [dumped_element[name] for name in constructor[1]] except KeyError: - logger.error("Collection load error, missing parameters.") + logger.debug("Collection load error, missing parameters.") continue # TODO handle error new_element = getattr(element.read(), constructor[0])( *constructor_parameters) @@ -397,11 +397,11 @@ class Loader: for k in self._ordered_keys(dump.keys()): v = dump[k] if not hasattr(default.read(), k): - logger.error(f"Load default, skipping {default} : {k}") + logger.debug(f"Load default, skipping {default} : {k}") try: self._load_any(default.extend(k), v) except Exception as err: - logger.error(f"Cannot load {k}: {err}") + logger.debug(f"Cannot load {k}: {err}") @property def match_subset_all(self): diff --git a/multi_user/operators.py b/multi_user/operators.py index 22ac9cb..1d302c1 100644 --- a/multi_user/operators.py +++ b/multi_user/operators.py @@ -488,7 +488,7 @@ def sanitize_deps_graph(dummy): if client and client.state['STATE'] in [STATE_ACTIVE]: for node_key in client.list(): - client.get(node_key).resolve() + client.get(node_key)._resolve() @persistent From 90d4bb0e47628af6aacef1726d4d4f9af277a2c4 Mon Sep 17 00:00:00 2001 From: Swann Date: Mon, 23 Mar 2020 17:55:10 +0100 Subject: [PATCH 10/33] feat: mesh dump refactoring wip --- multi_user/bl_types/bl_mesh.py | 112 ++++++++++++++++++--------------- multi_user/utils.py | 6 +- 2 files changed, 67 insertions(+), 51 deletions(-) diff --git a/multi_user/bl_types/bl_mesh.py b/multi_user/bl_types/bl_mesh.py index d9195ae..6842623 100644 --- a/multi_user/bl_types/bl_mesh.py +++ b/multi_user/bl_types/bl_mesh.py @@ -19,82 +19,90 @@ import bpy import bmesh import mathutils +import logging +import numpy as np from .. import utils from ..libs.replication.replication.constants import DIFF_BINARY from .bl_datablock import BlDatablock +logger = logging.getLogger(__name__) def dump_mesh(mesh, data={}): import bmesh mesh_data = data - mesh_buffer = bmesh.new() - # https://blog.michelanders.nl/2016/02/copying-vertices-to-numpy-arrays-in_4.html - mesh_buffer.from_mesh(mesh) - uv_layer = mesh_buffer.loops.layers.uv.verify() - bevel_layer = mesh_buffer.verts.layers.bevel_weight.verify() - skin_layer = mesh_buffer.verts.layers.skin.verify() + # VERTICES + start = utils.current_milli_time() - verts = {} - for vert in mesh_buffer.verts: - v = {} - v["co"] = list(vert.co) + vert_count = len(mesh.vertices) + shape = (vert_count, 3) - # vert metadata - v['bevel'] = vert[bevel_layer] - v['normal'] = list(vert.normal) - # v['skin'] = list(vert[skin_layer]) + verts_co = np.empty(vert_count*3, dtype=np.float64) + mesh.vertices.foreach_get('co', verts_co) + # verts_co.shape = shape + mesh_data["verts_co"] = verts_co.tobytes() - verts[str(vert.index)] = v + verts_normal = np.empty(vert_count*3, dtype=np.float64) + mesh.vertices.foreach_get('normal', verts_normal) + # verts_normal.shape = shape + mesh_data["verts_normal"] = verts_normal.tobytes() + + verts_bevel = np.empty(vert_count, dtype=np.float64) + mesh.vertices.foreach_get('bevel_weight', verts_bevel) + mesh_data["verts_bevel"] = verts_bevel.tobytes() - mesh_data["verts"] = verts + logger.error(f"verts {utils.current_milli_time()-start} ms") + + # EDGES + start = utils.current_milli_time() + edge_count = len(mesh.edges) + + edges_vert = np.empty(edge_count*2, dtype=np.int) + mesh.edges.foreach_get('vertices', edges_vert) + # edges_vert.shape = (edge_count, 2) + mesh_data["egdes_vert"] = edges_vert.tobytes() - edges = {} - for edge in mesh_buffer.edges: - e = {} - e["verts"] = [edge.verts[0].index, edge.verts[1].index] + logger.error(f"edges {utils.current_milli_time()-start} ms") - # Edge metadata - e["smooth"] = edge.smooth + start = utils.current_milli_time() - edges[edge.index] = e - mesh_data["edges"] = edges + # POLYGONS + start = utils.current_milli_time() + poly_count = len(mesh.polygons) - faces = {} - for face in mesh_buffer.faces: - f = {} - fverts = [] - for vert in face.verts: - fverts.append(vert.index) + poly_mat = np.empty(poly_count, dtype=np.int) + mesh.polygons.foreach_get("material_index", poly_mat) + mesh_data["poly_mat"] = poly_mat.tobytes() - f["verts"] = fverts - f["material_index"] = face.material_index - f["smooth"] = face.smooth - f["normal"] = list(face.normal) - f["index"] = face.index + poly_loop_start = np.empty(poly_count, dtype=np.int) + mesh.polygons.foreach_get("loop_start", poly_loop_start) + mesh_data["poly_loop_start"] = poly_loop_start.tobytes() - uvs = [] - # Face metadata - for loop in face.loops: - loop_uv = loop[uv_layer] + poly_loop_total = np.empty(poly_count, dtype=np.int) + mesh.polygons.foreach_get("loop_total", poly_loop_total) + mesh_data["poly_loop_total"] = poly_loop_total.tobytes() - uvs.append(list(loop_uv.uv)) + poly_smooth = np.empty(poly_count, dtype=np.bool) + mesh.polygons.foreach_get("use_smooth", poly_smooth) + mesh_data["poly_smooth"] = poly_smooth.tobytes() + + logger.error(f"polygons {utils.current_milli_time()-start} ms") - f["uv"] = uvs - faces[face.index] = f + # UV + start = utils.current_milli_time() + mesh_data['uv_layers'] = {} + for layer in mesh.uv_layers: + mesh_data['uv_layers'][layer.name] = {} + + uv_layer = np.empty(len(layer.data)*2, dtype=np.float64) + layer.data.foreach_get("uv", uv_layer) - mesh_data["faces"] = faces - - uv_layers = [] - for uv_layer in mesh.uv_layers: - uv_layers.append(uv_layer.name) - - mesh_data["uv_layers"] = uv_layers - # return mesh_data + mesh_data['uv_layers'][layer.name]['data'] = uv_layer.tobytes() + logger.error(f"uvs {utils.current_milli_time()-start} ms") class BlMesh(BlDatablock): bl_id = "meshes" @@ -174,8 +182,12 @@ class BlMesh(BlDatablock): 'use_auto_smooth', 'auto_smooth_angle' ] + + data = dumper.dump(pointer) + dump_mesh(pointer, data) + # Fix material index m_list = [] for material in pointer.materials: diff --git a/multi_user/utils.py b/multi_user/utils.py index 5da0db8..cd69456 100644 --- a/multi_user/utils.py +++ b/multi_user/utils.py @@ -22,6 +22,7 @@ import os import random import string import sys +import time from uuid import uuid4 from collections.abc import Iterable @@ -175,4 +176,7 @@ def resolve_from_id(id, optionnal_type=None): def get_preferences(): - return bpy.context.preferences.addons[__package__].preferences \ No newline at end of file + return bpy.context.preferences.addons[__package__].preferences + +def current_milli_time(): + return int(round(time.time() * 1000)) \ No newline at end of file From 01fdf7b35b187c4a6840dcddb9b936eae295fb2a Mon Sep 17 00:00:00 2001 From: Swann Date: Mon, 23 Mar 2020 21:49:28 +0100 Subject: [PATCH 11/33] feat: mesh implementation cleanup progress --- multi_user/bl_types/bl_mesh.py | 115 ++++++++++++++++++++------------- 1 file changed, 70 insertions(+), 45 deletions(-) diff --git a/multi_user/bl_types/bl_mesh.py b/multi_user/bl_types/bl_mesh.py index 6842623..be5ac14 100644 --- a/multi_user/bl_types/bl_mesh.py +++ b/multi_user/bl_types/bl_mesh.py @@ -29,10 +29,7 @@ from .bl_datablock import BlDatablock logger = logging.getLogger(__name__) def dump_mesh(mesh, data={}): - import bmesh - mesh_data = data - # https://blog.michelanders.nl/2016/02/copying-vertices-to-numpy-arrays-in_4.html # VERTICES start = utils.current_milli_time() @@ -45,14 +42,14 @@ def dump_mesh(mesh, data={}): # verts_co.shape = shape mesh_data["verts_co"] = verts_co.tobytes() - verts_normal = np.empty(vert_count*3, dtype=np.float64) - mesh.vertices.foreach_get('normal', verts_normal) - # verts_normal.shape = shape - mesh_data["verts_normal"] = verts_normal.tobytes() + # verts_normal = np.empty(vert_count*3, dtype=np.float64) + # mesh.vertices.foreach_get('normal', verts_normal) + # # verts_normal.shape = shape + # mesh_data["verts_normal"] = verts_normal.tobytes() - verts_bevel = np.empty(vert_count, dtype=np.float64) - mesh.vertices.foreach_get('bevel_weight', verts_bevel) - mesh_data["verts_bevel"] = verts_bevel.tobytes() + # verts_bevel = np.empty(vert_count, dtype=np.float64) + # mesh.vertices.foreach_get('bevel_weight', verts_bevel) + # mesh_data["verts_bevel"] = verts_bevel.tobytes() logger.error(f"verts {utils.current_milli_time()-start} ms") @@ -64,7 +61,7 @@ def dump_mesh(mesh, data={}): mesh.edges.foreach_get('vertices', edges_vert) # edges_vert.shape = (edge_count, 2) mesh_data["egdes_vert"] = edges_vert.tobytes() - + mesh_data["egdes_count"] = len(mesh.edges) logger.error(f"edges {utils.current_milli_time()-start} ms") start = utils.current_milli_time() @@ -104,6 +101,27 @@ def dump_mesh(mesh, data={}): logger.error(f"uvs {utils.current_milli_time()-start} ms") + # LOOPS + start = utils.current_milli_time() + loop_count = len(mesh.loops) + + # loop_bitangent = np.empty(loop_count*3, dtype=np.float64) + # mesh.loops.foreach_get("bitangent", loop_bitangent) + + loop_tangent = np.empty(loop_count*3, dtype=np.float64) + mesh.loops.foreach_get("tangent", loop_tangent) + mesh_data["loop_tangent"] = loop_tangent.tobytes() + + loop_normal = np.empty(loop_count*3, dtype=np.float64) + mesh.loops.foreach_get("normal", loop_normal) + mesh_data["loop_normal"] = loop_normal.tobytes() + + loop_vertex_index = np.empty(loop_count, dtype=np.int) + mesh.loops.foreach_get("vertex_index", loop_vertex_index) + mesh_data["loop_vertex_index"] = loop_vertex_index.tobytes() + + logger.error(f"loops {utils.current_milli_time()-start} ms") + class BlMesh(BlDatablock): bl_id = "meshes" bl_class = bpy.types.Mesh @@ -127,49 +145,56 @@ class BlMesh(BlDatablock): target.materials.append(bpy.data.materials[m]) # 2 - LOAD GEOMETRY - mesh_buffer = bmesh.new() + # 2.a - VERTS + vertices = np.frombuffer(data["verts_co"], dtype=np.float64) + vert_count = int(len(vertices)/3) - for i in data["verts"]: - v = mesh_buffer.verts.new(data["verts"][i]["co"]) - v.normal = data["verts"][i]["normal"] - mesh_buffer.verts.ensure_lookup_table() + nb_vert_to_add = vert_count - len(target.vertices) + target.vertices.add(nb_vert_to_add) + target.vertices.foreach_set('co', vertices) - for i in data["edges"]: - verts = mesh_buffer.verts - v1 = data["edges"][i]["verts"][0] - v2 = data["edges"][i]["verts"][1] - edge = mesh_buffer.edges.new([verts[v1], verts[v2]]) - edge.smooth = data["edges"][i]["smooth"] - - mesh_buffer.edges.ensure_lookup_table() - for p in data["faces"]: - verts = [] - for v in data["faces"][p]["verts"]: - verts.append(mesh_buffer.verts[v]) + # 2.b - EDGES + egdes_vert = np.frombuffer(data["egdes_vert"], dtype=np.int) + edge_count = data["egdes_count"] - if len(verts) > 0: - f = mesh_buffer.faces.new(verts) + nb_edges_to_add = edge_count - len(target.edges) + target.edges.add(nb_edges_to_add) - uv_layer = mesh_buffer.loops.layers.uv.verify() + target.edges.foreach_set("vertices", egdes_vert) - f.smooth = data["faces"][p]["smooth"] - f.normal = data["faces"][p]["normal"] - f.index = data["faces"][p]["index"] - f.material_index = data["faces"][p]['material_index'] - # UV loading - for i, loop in enumerate(f.loops): - loop_uv = loop[uv_layer] - loop_uv.uv = data["faces"][p]["uv"][i] - mesh_buffer.faces.ensure_lookup_table() - mesh_buffer.to_mesh(target) + # 2.b - POLY + poly_smooth = np.frombuffer(data["poly_smooth"], dtype=np.int) + poly_count = len(poly_smooth) + + nb_poly_to_add = poly_count - len(target.polygons) + target.polygons.add(nb_poly_to_add) + + poly_loop_start = np.frombuffer(data["poly_loop_start"], dtype=np.int) + target.polygons.foreach_set("loop_start", poly_loop_start) + + poly_loop_total = np.frombuffer(data["poly_loop_total"], dtype=np.int) + target.polygons.foreach_set("loop_total", poly_loop_total) + + + # 2.c - LOOPS + loop_vertex_index = np.frombuffer(data['loop_vertex_index'], dtype=np.float64) + loops_count = len(loop_vertex_index) + + nb_loop_to_add = loops_count - len(target.loops) + target.loops.add(nb_loop_to_add) + + target.loops.foreach_set("vertex_intex", loop_vertex_index) + + + # target.loops.foreach_set("vertex_index", loops_vert_idx) + # target.polygons.foreach_set("loop_start", faces_loop_start) + # target.polygons.foreach_set("loop_total", faces_loop_total) # 3 - LOAD METADATA # uv's - utils.dump_anything.load(target.uv_layers, data['uv_layers']) - - bevel_layer = mesh_buffer.verts.layers.bevel_weight.verify() - skin_layer = mesh_buffer.verts.layers.skin.verify() + # utils.dump_anything.load(target.uv_layers, data['uv_layers']) + target.update() utils.dump_anything.load(target, data) def dump_implementation(self, data, pointer=None): From 757dbfd6ab723367137f39a0915280b71f7be671 Mon Sep 17 00:00:00 2001 From: Swann Date: Tue, 24 Mar 2020 19:09:02 +0100 Subject: [PATCH 12/33] feat: mesh loading progress --- multi_user/bl_types/bl_mesh.py | 110 ++++++++++++++++----------------- 1 file changed, 53 insertions(+), 57 deletions(-) diff --git a/multi_user/bl_types/bl_mesh.py b/multi_user/bl_types/bl_mesh.py index be5ac14..2c44377 100644 --- a/multi_user/bl_types/bl_mesh.py +++ b/multi_user/bl_types/bl_mesh.py @@ -59,7 +59,6 @@ def dump_mesh(mesh, data={}): edges_vert = np.empty(edge_count*2, dtype=np.int) mesh.edges.foreach_get('vertices', edges_vert) - # edges_vert.shape = (edge_count, 2) mesh_data["egdes_vert"] = edges_vert.tobytes() mesh_data["egdes_count"] = len(mesh.edges) logger.error(f"edges {utils.current_milli_time()-start} ms") @@ -69,10 +68,11 @@ def dump_mesh(mesh, data={}): # POLYGONS start = utils.current_milli_time() poly_count = len(mesh.polygons) + mesh_data["poly_count"] = poly_count - poly_mat = np.empty(poly_count, dtype=np.int) - mesh.polygons.foreach_get("material_index", poly_mat) - mesh_data["poly_mat"] = poly_mat.tobytes() + # poly_mat = np.empty(poly_count, dtype=np.int) + # mesh.polygons.foreach_get("material_index", poly_mat) + # mesh_data["poly_mat"] = poly_mat.tobytes() poly_loop_start = np.empty(poly_count, dtype=np.int) mesh.polygons.foreach_get("loop_start", poly_loop_start) @@ -88,6 +88,27 @@ def dump_mesh(mesh, data={}): logger.error(f"polygons {utils.current_milli_time()-start} ms") + # LOOPS + start = utils.current_milli_time() + loop_count = len(mesh.loops) + mesh_data["loop_count"] = loop_count + # loop_bitangent = np.empty(loop_count*3, dtype=np.float64) + # mesh.loops.foreach_get("bitangent", loop_bitangent) + + # loop_tangent = np.empty(loop_count*3, dtype=np.float64) + # mesh.loops.foreach_get("tangent", loop_tangent) + # mesh_data["loop_tangent"] = loop_tangent.tobytes() + + loop_normal = np.empty(loop_count*3, dtype=np.float64) + mesh.loops.foreach_get("normal", loop_normal) + mesh_data["loop_normal"] = loop_normal.tobytes() + + loop_vertex_index = np.empty(loop_count, dtype=np.int) + mesh.loops.foreach_get("vertex_index", loop_vertex_index) + mesh_data["loop_vertex_index"] = loop_vertex_index.tobytes() + + logger.error(f"loops {utils.current_milli_time()-start} ms") + # UV start = utils.current_milli_time() mesh_data['uv_layers'] = {} @@ -101,26 +122,7 @@ def dump_mesh(mesh, data={}): logger.error(f"uvs {utils.current_milli_time()-start} ms") - # LOOPS - start = utils.current_milli_time() - loop_count = len(mesh.loops) - - # loop_bitangent = np.empty(loop_count*3, dtype=np.float64) - # mesh.loops.foreach_get("bitangent", loop_bitangent) - - loop_tangent = np.empty(loop_count*3, dtype=np.float64) - mesh.loops.foreach_get("tangent", loop_tangent) - mesh_data["loop_tangent"] = loop_tangent.tobytes() - - loop_normal = np.empty(loop_count*3, dtype=np.float64) - mesh.loops.foreach_get("normal", loop_normal) - mesh_data["loop_normal"] = loop_normal.tobytes() - - loop_vertex_index = np.empty(loop_count, dtype=np.int) - mesh.loops.foreach_get("vertex_index", loop_vertex_index) - mesh_data["loop_vertex_index"] = loop_vertex_index.tobytes() - - logger.error(f"loops {utils.current_milli_time()-start} ms") + class BlMesh(BlDatablock): bl_id = "meshes" @@ -145,57 +147,51 @@ class BlMesh(BlDatablock): target.materials.append(bpy.data.materials[m]) # 2 - LOAD GEOMETRY + if target.vertices: + target.clear_geometry() + # 2.a - VERTS vertices = np.frombuffer(data["verts_co"], dtype=np.float64) vert_count = int(len(vertices)/3) - - nb_vert_to_add = vert_count - len(target.vertices) - target.vertices.add(nb_vert_to_add) - target.vertices.foreach_set('co', vertices) + target.vertices.add(vert_count) # 2.b - EDGES egdes_vert = np.frombuffer(data["egdes_vert"], dtype=np.int) edge_count = data["egdes_count"] - - nb_edges_to_add = edge_count - len(target.edges) - target.edges.add(nb_edges_to_add) - - target.edges.foreach_set("vertices", egdes_vert) - - # 2.b - POLY - poly_smooth = np.frombuffer(data["poly_smooth"], dtype=np.int) - poly_count = len(poly_smooth) - - nb_poly_to_add = poly_count - len(target.polygons) - target.polygons.add(nb_poly_to_add) - - poly_loop_start = np.frombuffer(data["poly_loop_start"], dtype=np.int) - target.polygons.foreach_set("loop_start", poly_loop_start) - - poly_loop_total = np.frombuffer(data["poly_loop_total"], dtype=np.int) - target.polygons.foreach_set("loop_total", poly_loop_total) - + target.edges.add(edge_count) # 2.c - LOOPS - loop_vertex_index = np.frombuffer(data['loop_vertex_index'], dtype=np.float64) - loops_count = len(loop_vertex_index) - - nb_loop_to_add = loops_count - len(target.loops) - target.loops.add(nb_loop_to_add) + loops_count = data["loop_count"] + target.loops.add(loops_count) - target.loops.foreach_set("vertex_intex", loop_vertex_index) + loop_vertex_index = np.frombuffer(data['loop_vertex_index'], dtype=np.int) + loop_normal = np.frombuffer(data['loop_normal'], dtype=np.float64) + + # 2.b - POLY + poly_count = data["poly_count"] + target.polygons.add(poly_count) + + poly_loop_start = np.frombuffer(data["poly_loop_start"], dtype=np.int) + poly_loop_total = np.frombuffer(data["poly_loop_total"], dtype=np.int) + poly_smooth = np.frombuffer(data["poly_smooth"], dtype=np.bool) + + + target.vertices.foreach_set('co', vertices) + target.edges.foreach_set("vertices", egdes_vert) + target.loops.foreach_set("vertex_index", loop_vertex_index) + target.loops.foreach_set("normal", loop_normal) + target.polygons.foreach_set("loop_total", poly_loop_total) + target.polygons.foreach_set("loop_start", poly_loop_start) + target.polygons.foreach_set("use_smooth", poly_smooth) - # target.loops.foreach_set("vertex_index", loops_vert_idx) - # target.polygons.foreach_set("loop_start", faces_loop_start) - # target.polygons.foreach_set("loop_total", faces_loop_total) # 3 - LOAD METADATA # uv's # utils.dump_anything.load(target.uv_layers, data['uv_layers']) target.update() - utils.dump_anything.load(target, data) + # utils.dump_anything.load(target, data) def dump_implementation(self, data, pointer=None): assert(pointer) From daff548010b0e1f6966927bbb7765a16b2d50e2f Mon Sep 17 00:00:00 2001 From: Swann Date: Tue, 24 Mar 2020 19:40:18 +0100 Subject: [PATCH 13/33] feat: uv_layer loading --- multi_user/bl_types/bl_mesh.py | 24 +++++++++++++++++------- 1 file changed, 17 insertions(+), 7 deletions(-) diff --git a/multi_user/bl_types/bl_mesh.py b/multi_user/bl_types/bl_mesh.py index 2c44377..8178270 100644 --- a/multi_user/bl_types/bl_mesh.py +++ b/multi_user/bl_types/bl_mesh.py @@ -70,9 +70,9 @@ def dump_mesh(mesh, data={}): poly_count = len(mesh.polygons) mesh_data["poly_count"] = poly_count - # poly_mat = np.empty(poly_count, dtype=np.int) - # mesh.polygons.foreach_get("material_index", poly_mat) - # mesh_data["poly_mat"] = poly_mat.tobytes() + poly_mat = np.empty(poly_count, dtype=np.int) + mesh.polygons.foreach_get("material_index", poly_mat) + mesh_data["poly_mat"] = poly_mat.tobytes() poly_loop_start = np.empty(poly_count, dtype=np.int) mesh.polygons.foreach_get("loop_start", poly_loop_start) @@ -160,14 +160,14 @@ class BlMesh(BlDatablock): edge_count = data["egdes_count"] target.edges.add(edge_count) - # 2.c - LOOPS + # # 2.c - LOOPS loops_count = data["loop_count"] target.loops.add(loops_count) loop_vertex_index = np.frombuffer(data['loop_vertex_index'], dtype=np.int) loop_normal = np.frombuffer(data['loop_normal'], dtype=np.float64) - # 2.b - POLY + # # 2.b - POLY poly_count = data["poly_count"] target.polygons.add(poly_count) @@ -175,6 +175,7 @@ class BlMesh(BlDatablock): poly_loop_total = np.frombuffer(data["poly_loop_total"], dtype=np.int) poly_smooth = np.frombuffer(data["poly_smooth"], dtype=np.bool) + poly_mat = np.frombuffer(data["poly_mat"], dtype=np.int) target.vertices.foreach_set('co', vertices) target.edges.foreach_set("vertices", egdes_vert) @@ -183,14 +184,23 @@ class BlMesh(BlDatablock): target.polygons.foreach_set("loop_total", poly_loop_total) target.polygons.foreach_set("loop_start", poly_loop_start) target.polygons.foreach_set("use_smooth", poly_smooth) - + target.polygons.foreach_set("material_index", poly_mat) # 3 - LOAD METADATA # uv's # utils.dump_anything.load(target.uv_layers, data['uv_layers']) + + for layer in data['uv_layers']: + if layer not in target.uv_layers: + target.uv_layers.new(name=layer) + + uv_buffer = np.frombuffer(data["uv_layers"][layer]['data']) - target.update() + target.uv_layers[layer].data.foreach_set('uv', uv_buffer) + + + target.validate () # utils.dump_anything.load(target, data) def dump_implementation(self, data, pointer=None): From c3ae56abd2e850af6a8c09dc5cdac5d38099c6d8 Mon Sep 17 00:00:00 2001 From: Swann Date: Wed, 25 Mar 2020 09:47:20 +0100 Subject: [PATCH 14/33] refactor: cleanup progress --- multi_user/bl_types/bl_mesh.py | 202 +++++++++++++-------------------- 1 file changed, 81 insertions(+), 121 deletions(-) diff --git a/multi_user/bl_types/bl_mesh.py b/multi_user/bl_types/bl_mesh.py index 8178270..4337493 100644 --- a/multi_user/bl_types/bl_mesh.py +++ b/multi_user/bl_types/bl_mesh.py @@ -28,101 +28,6 @@ from .bl_datablock import BlDatablock logger = logging.getLogger(__name__) -def dump_mesh(mesh, data={}): - mesh_data = data - - # VERTICES - start = utils.current_milli_time() - - vert_count = len(mesh.vertices) - shape = (vert_count, 3) - - verts_co = np.empty(vert_count*3, dtype=np.float64) - mesh.vertices.foreach_get('co', verts_co) - # verts_co.shape = shape - mesh_data["verts_co"] = verts_co.tobytes() - - # verts_normal = np.empty(vert_count*3, dtype=np.float64) - # mesh.vertices.foreach_get('normal', verts_normal) - # # verts_normal.shape = shape - # mesh_data["verts_normal"] = verts_normal.tobytes() - - # verts_bevel = np.empty(vert_count, dtype=np.float64) - # mesh.vertices.foreach_get('bevel_weight', verts_bevel) - # mesh_data["verts_bevel"] = verts_bevel.tobytes() - - logger.error(f"verts {utils.current_milli_time()-start} ms") - - # EDGES - start = utils.current_milli_time() - edge_count = len(mesh.edges) - - edges_vert = np.empty(edge_count*2, dtype=np.int) - mesh.edges.foreach_get('vertices', edges_vert) - mesh_data["egdes_vert"] = edges_vert.tobytes() - mesh_data["egdes_count"] = len(mesh.edges) - logger.error(f"edges {utils.current_milli_time()-start} ms") - - start = utils.current_milli_time() - - # POLYGONS - start = utils.current_milli_time() - poly_count = len(mesh.polygons) - mesh_data["poly_count"] = poly_count - - poly_mat = np.empty(poly_count, dtype=np.int) - mesh.polygons.foreach_get("material_index", poly_mat) - mesh_data["poly_mat"] = poly_mat.tobytes() - - poly_loop_start = np.empty(poly_count, dtype=np.int) - mesh.polygons.foreach_get("loop_start", poly_loop_start) - mesh_data["poly_loop_start"] = poly_loop_start.tobytes() - - poly_loop_total = np.empty(poly_count, dtype=np.int) - mesh.polygons.foreach_get("loop_total", poly_loop_total) - mesh_data["poly_loop_total"] = poly_loop_total.tobytes() - - poly_smooth = np.empty(poly_count, dtype=np.bool) - mesh.polygons.foreach_get("use_smooth", poly_smooth) - mesh_data["poly_smooth"] = poly_smooth.tobytes() - - logger.error(f"polygons {utils.current_milli_time()-start} ms") - - # LOOPS - start = utils.current_milli_time() - loop_count = len(mesh.loops) - mesh_data["loop_count"] = loop_count - # loop_bitangent = np.empty(loop_count*3, dtype=np.float64) - # mesh.loops.foreach_get("bitangent", loop_bitangent) - - # loop_tangent = np.empty(loop_count*3, dtype=np.float64) - # mesh.loops.foreach_get("tangent", loop_tangent) - # mesh_data["loop_tangent"] = loop_tangent.tobytes() - - loop_normal = np.empty(loop_count*3, dtype=np.float64) - mesh.loops.foreach_get("normal", loop_normal) - mesh_data["loop_normal"] = loop_normal.tobytes() - - loop_vertex_index = np.empty(loop_count, dtype=np.int) - mesh.loops.foreach_get("vertex_index", loop_vertex_index) - mesh_data["loop_vertex_index"] = loop_vertex_index.tobytes() - - logger.error(f"loops {utils.current_milli_time()-start} ms") - - # UV - start = utils.current_milli_time() - mesh_data['uv_layers'] = {} - for layer in mesh.uv_layers: - mesh_data['uv_layers'][layer.name] = {} - - uv_layer = np.empty(len(layer.data)*2, dtype=np.float64) - layer.data.foreach_get("uv", uv_layer) - - mesh_data['uv_layers'][layer.name]['data'] = uv_layer.tobytes() - - logger.error(f"uvs {utils.current_milli_time()-start} ms") - - class BlMesh(BlDatablock): bl_id = "meshes" @@ -139,44 +44,46 @@ class BlMesh(BlDatablock): def load_implementation(self, data, target): if not target or not target.is_editmode: - # 1 - LOAD MATERIAL SLOTS - # SLots + # MATERIAL SLOTS i = 0 for m in data["material_list"]: target.materials.append(bpy.data.materials[m]) - # 2 - LOAD GEOMETRY + # CLEAR GEOMETRY if target.vertices: target.clear_geometry() - # 2.a - VERTS + # VERTS vertices = np.frombuffer(data["verts_co"], dtype=np.float64) vert_count = int(len(vertices)/3) target.vertices.add(vert_count) - # 2.b - EDGES + # EDGES egdes_vert = np.frombuffer(data["egdes_vert"], dtype=np.int) edge_count = data["egdes_count"] target.edges.add(edge_count) - # # 2.c - LOOPS + # LOOPS loops_count = data["loop_count"] target.loops.add(loops_count) - loop_vertex_index = np.frombuffer(data['loop_vertex_index'], dtype=np.int) + loop_vertex_index = np.frombuffer( + data['loop_vertex_index'], dtype=np.int) loop_normal = np.frombuffer(data['loop_normal'], dtype=np.float64) - # # 2.b - POLY + # POLY poly_count = data["poly_count"] target.polygons.add(poly_count) - poly_loop_start = np.frombuffer(data["poly_loop_start"], dtype=np.int) - poly_loop_total = np.frombuffer(data["poly_loop_total"], dtype=np.int) + poly_loop_start = np.frombuffer( + data["poly_loop_start"], dtype=np.int) + poly_loop_total = np.frombuffer( + data["poly_loop_total"], dtype=np.int) poly_smooth = np.frombuffer(data["poly_smooth"], dtype=np.bool) poly_mat = np.frombuffer(data["poly_mat"], dtype=np.int) - + target.vertices.foreach_set('co', vertices) target.edges.foreach_set("vertices", egdes_vert) target.loops.foreach_set("vertex_index", loop_vertex_index) @@ -185,40 +92,93 @@ class BlMesh(BlDatablock): target.polygons.foreach_set("loop_start", poly_loop_start) target.polygons.foreach_set("use_smooth", poly_smooth) target.polygons.foreach_set("material_index", poly_mat) - - - # 3 - LOAD METADATA - # uv's - # utils.dump_anything.load(target.uv_layers, data['uv_layers']) + # UV Layers for layer in data['uv_layers']: if layer not in target.uv_layers: target.uv_layers.new(name=layer) - + uv_buffer = np.frombuffer(data["uv_layers"][layer]['data']) target.uv_layers[layer].data.foreach_set('uv', uv_buffer) - - target.validate () - # utils.dump_anything.load(target, data) + target.validate() + utils.dump_anything.load(target, data) def dump_implementation(self, data, pointer=None): assert(pointer) + mesh = pointer + dumper = utils.dump_anything.Dumper() - dumper.depth = 2 + dumper.depth = 1 dumper.include_filter = [ 'name', 'use_auto_smooth', 'auto_smooth_angle' ] - - data = dumper.dump(pointer) - - dump_mesh(pointer, data) - + data = dumper.dump(mesh) + + # VERTICES + vert_count = len(mesh.vertices) + + verts_co = np.empty(vert_count*3, dtype=np.float64) + mesh.vertices.foreach_get('co', verts_co) + data["verts_co"] = verts_co.tobytes() + + # EDGES + edge_count = len(mesh.edges) + + edges_vert = np.empty(edge_count*2, dtype=np.int) + mesh.edges.foreach_get('vertices', edges_vert) + data["egdes_vert"] = edges_vert.tobytes() + data["egdes_count"] = len(mesh.edges) + + # TODO: edge crease, bevel_weight + + # POLYGONS + poly_count = len(mesh.polygons) + data["poly_count"] = poly_count + + poly_mat = np.empty(poly_count, dtype=np.int) + mesh.polygons.foreach_get("material_index", poly_mat) + data["poly_mat"] = poly_mat.tobytes() + + poly_loop_start = np.empty(poly_count, dtype=np.int) + mesh.polygons.foreach_get("loop_start", poly_loop_start) + data["poly_loop_start"] = poly_loop_start.tobytes() + + poly_loop_total = np.empty(poly_count, dtype=np.int) + mesh.polygons.foreach_get("loop_total", poly_loop_total) + data["poly_loop_total"] = poly_loop_total.tobytes() + + poly_smooth = np.empty(poly_count, dtype=np.bool) + mesh.polygons.foreach_get("use_smooth", poly_smooth) + data["poly_smooth"] = poly_smooth.tobytes() + + # LOOPS + loop_count = len(mesh.loops) + data["loop_count"] = loop_count + + loop_normal = np.empty(loop_count*3, dtype=np.float64) + mesh.loops.foreach_get("normal", loop_normal) + data["loop_normal"] = loop_normal.tobytes() + + loop_vertex_index = np.empty(loop_count, dtype=np.int) + mesh.loops.foreach_get("vertex_index", loop_vertex_index) + data["loop_vertex_index"] = loop_vertex_index.tobytes() + + # UV Layers + data['uv_layers'] = {} + for layer in mesh.uv_layers: + data['uv_layers'][layer.name] = {} + + uv_layer = np.empty(len(layer.data)*2, dtype=np.float64) + layer.data.foreach_get("uv", uv_layer) + + data['uv_layers'][layer.name]['data'] = uv_layer.tobytes() + # Fix material index m_list = [] for material in pointer.materials: From d7964b645a7069f00cb4357eaaab09045f012aa2 Mon Sep 17 00:00:00 2001 From: Swann Date: Wed, 25 Mar 2020 10:35:31 +0100 Subject: [PATCH 15/33] fix: generated image path error --- multi_user/bl_types/bl_image.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/multi_user/bl_types/bl_image.py b/multi_user/bl_types/bl_image.py index c758fb0..8497725 100644 --- a/multi_user/bl_types/bl_image.py +++ b/multi_user/bl_types/bl_image.py @@ -28,8 +28,10 @@ def dump_image(image): if image.source == "GENERATED": prefs = utils.get_preferences() img_name = "{}.png".format(image.name) - + + # Cache the image on the disk image.filepath_raw = os.path.join(prefs.cache_directory, img_name) + os.makedirs(prefs.cache_directory, exist_ok=True) image.file_format = "PNG" image.save() @@ -77,7 +79,7 @@ class BlImage(BlDatablock): image.colorspace_settings.name = data["colorspace_settings"]["name"] - def _dump(self, data, pointer=None): + def _dump(self, pointer=None): assert(pointer) data = {} data['pixels'] = dump_image(pointer) From f9222d84eab0f33eb6e2eee06f0612d1dc38e393 Mon Sep 17 00:00:00 2001 From: Swann Date: Wed, 25 Mar 2020 11:36:29 +0100 Subject: [PATCH 16/33] feat: crease and bevel_wieght support --- multi_user/bl_types/bl_mesh.py | 34 +++++++++++++++++++++++++++++++--- 1 file changed, 31 insertions(+), 3 deletions(-) diff --git a/multi_user/bl_types/bl_mesh.py b/multi_user/bl_types/bl_mesh.py index 4337493..d6a7a3b 100644 --- a/multi_user/bl_types/bl_mesh.py +++ b/multi_user/bl_types/bl_mesh.py @@ -44,6 +44,8 @@ class BlMesh(BlDatablock): def load_implementation(self, data, target): if not target or not target.is_editmode: + utils.dump_anything.load(target, data) + # MATERIAL SLOTS i = 0 @@ -60,9 +62,13 @@ class BlMesh(BlDatablock): target.vertices.add(vert_count) # EDGES + egdes_vert = np.frombuffer(data["egdes_vert"], dtype=np.int) + edge_count = data["egdes_count"] target.edges.add(edge_count) + + # LOOPS loops_count = data["loop_count"] @@ -84,8 +90,18 @@ class BlMesh(BlDatablock): poly_mat = np.frombuffer(data["poly_mat"], dtype=np.int) + # LOADING target.vertices.foreach_set('co', vertices) target.edges.foreach_set("vertices", egdes_vert) + + if data['use_customdata_edge_crease']: + edges_crease = np.frombuffer(data["edges_crease"], dtype=np.float64) + target.edges.foreach_set("crease", edges_crease) + + if data['use_customdata_edge_bevel']: + edges_bevel = np.frombuffer(data["edges_bevel"], dtype=np.float64) + target.edges.foreach_set("bevel_weight", edges_bevel) + target.loops.foreach_set("vertex_index", loop_vertex_index) target.loops.foreach_set("normal", loop_normal) target.polygons.foreach_set("loop_total", poly_loop_total) @@ -93,6 +109,7 @@ class BlMesh(BlDatablock): target.polygons.foreach_set("use_smooth", poly_smooth) target.polygons.foreach_set("material_index", poly_mat) + # UV Layers for layer in data['uv_layers']: if layer not in target.uv_layers: @@ -103,7 +120,7 @@ class BlMesh(BlDatablock): target.uv_layers[layer].data.foreach_set('uv', uv_buffer) target.validate() - utils.dump_anything.load(target, data) + def dump_implementation(self, data, pointer=None): assert(pointer) @@ -115,11 +132,14 @@ class BlMesh(BlDatablock): dumper.include_filter = [ 'name', 'use_auto_smooth', - 'auto_smooth_angle' + 'auto_smooth_angle', + 'use_customdata_edge_bevel', + 'use_customdata_edge_crease' ] data = dumper.dump(mesh) + # TODO: selective dump # VERTICES vert_count = len(mesh.vertices) @@ -135,7 +155,15 @@ class BlMesh(BlDatablock): data["egdes_vert"] = edges_vert.tobytes() data["egdes_count"] = len(mesh.edges) - # TODO: edge crease, bevel_weight + if mesh.use_customdata_edge_crease: + edges_crease = np.empty(edge_count, dtype=np.float64) + mesh.edges.foreach_get('crease', edges_crease) + data["edges_crease"] = edges_crease.tobytes() + + if mesh.use_customdata_edge_bevel: + edges_bevel = np.empty(edge_count, dtype=np.float64) + mesh.edges.foreach_get('bevel_weight', edges_bevel) + data["edges_bevel"] = edges_bevel.tobytes() # POLYGONS poly_count = len(mesh.polygons) From a84fccb3ced6e82400ab099bbb7460206a44e984 Mon Sep 17 00:00:00 2001 From: Swann Date: Wed, 25 Mar 2020 13:24:12 +0100 Subject: [PATCH 17/33] feat: camera cleanup --- multi_user/bl_types/bl_camera.py | 17 ++++++++++++++--- 1 file changed, 14 insertions(+), 3 deletions(-) diff --git a/multi_user/bl_types/bl_camera.py b/multi_user/bl_types/bl_camera.py index 88112a3..4876010 100644 --- a/multi_user/bl_types/bl_camera.py +++ b/multi_user/bl_types/bl_camera.py @@ -31,6 +31,10 @@ class BlCamera(BlDatablock): bl_automatic_push = True bl_icon = 'CAMERA_DATA' + def _construct(self, data): + return bpy.data.cameras.new(data["name"]) + + def load_implementation(self, data, target): utils.dump_anything.load(target, data) @@ -40,12 +44,11 @@ class BlCamera(BlDatablock): if dof_settings: utils.dump_anything.load(target.dof, dof_settings) - def _construct(self, data): - return bpy.data.cameras.new(data["name"]) - def dump_implementation(self, data, pointer=None): assert(pointer) + # TODO: background image support + dumper = utils.dump_anything.Dumper() dumper.depth = 2 dumper.include_filter = [ @@ -67,6 +70,14 @@ class BlCamera(BlDatablock): 'aperture_blades', 'aperture_rotation', 'aperture_ratio', + 'display_size', + 'show_limits', + 'show_mist', + 'show_sensor', + 'show_name', + 'sensor_fit', + 'sensor_height', + 'sensor_width', ] return dumper.dump(pointer) From 6a98e749f9148a5f10184f4c3dea1dd1b1561c4d Mon Sep 17 00:00:00 2001 From: Swann Date: Thu, 26 Mar 2020 09:20:41 +0100 Subject: [PATCH 18/33] feat: gpencil dump refactoring --- multi_user/bl_types/bl_gpencil.py | 204 ++++++++++++++++++++++++----- multi_user/bl_types/bl_material.py | 27 +++- 2 files changed, 199 insertions(+), 32 deletions(-) diff --git a/multi_user/bl_types/bl_gpencil.py b/multi_user/bl_types/bl_gpencil.py index 8fc8a86..0b9bc1d 100644 --- a/multi_user/bl_types/bl_gpencil.py +++ b/multi_user/bl_types/bl_gpencil.py @@ -18,36 +18,170 @@ import bpy import mathutils +import numpy as np from ..libs import dump_anything from .bl_datablock import BlDatablock +# GPencil data api is structured as it follow: +# GP-Object --> GP-Layers --> GP-Frames --> GP-Strokes --> GP-Stroke-Points -def load_gpencil_layer(target=None, data=None, create=False): +def dump_stroke(stroke): + """ Dump a grease pencil stroke to a dict - dump_anything.load(target, data) - for k,v in target.frames.items(): - target.frames.remove(v) + :param stroke: target grease pencil stroke + :type stroke: bpy.types.GPencilStroke + :return: dict + """ +< + assert(stroke) + + dumper = dump_anything.Dumper() + dumper.include_filter = [ + "aspect", + "display_mode", + "draw_cyclic", + "end_cap_mode", + "hardeness", + "line_width", + "material_index", + "start_cap_mode", + "uv_rotation", + "uv_scale", + "uv_translation", + "vertex_color_fill", + ] + dumped_stroke = dump_anything.dump(stroke) + + # Stoke points + p_count = len(stroke.points) + dumped_stroke['p_count'] = p_count + + p_co = np.empty(p_count*3, dtype=np.float64) + stroke.points.foreach_get('co', p_co) + + p_pressure = np.empty(p_count, dtype=np.float64) + stroke.points.foreach_get('pressure', p_pressure) + + p_strength = np.empty(p_count, dtype=np.float64) + stroke.points.foreach_get('strength', p_strength) + + p_vertex_color = np.empty(p_count*4, dtype=np.float64) + stroke.points.foreach_get('vertex_color', p_vertex_color) + + # TODO: uv_factor, uv_rotation + + dumped_stroke['p_co'] = p_co.tobytes() + dumped_stroke['p_pressure'] = p_pressure.tobytes() + dumped_stroke['p_strength'] = p_strength.tobytes() + dumped_stroke['p_vertex_color'] = p_vertex_color.tobytes() + + return dumped_stroke + +def load_stroke(stroke_data, stroke): + """ Load a grease pencil stroke from a dict + + :param stroke_data: dumped grease pencil stroke + :type stroke_data: dict + :param stroke: target grease pencil stroke + :type stroke: bpy.types.GPencilStroke + """ + assert(stroke and stroke_data) + + dump_anything.load(stroke, stroke_data) + + p_co = np.frombuffer(stroke_data["p_co"], dtype=np.float64) + p_pressure = np.frombuffer(stroke_data["p_pressure"], dtype=np.float64) + p_strength = np.frombuffer(stroke_data["p_strength"], dtype=np.float64) + p_vertex_color = np.frombuffer(stroke_data["p_vertex_color"], dtype=np.float64) + + stroke.points.add(stroke_data["p_count"]) + + stroke.points.foreach_set('co', p_co) + stroke.points.foreach_set('pressure', p_pressure) + stroke.points.foreach_set('strength', p_strength) + stroke.points.foreach_set('vertex_color', p_vertex_color) + + +def dump_frame(frame): + """ Dump a grease pencil frame to a dict + + :param frame: target grease pencil stroke + :type frame: bpy.types.GPencilFrame + :return: dict + """ + + assert(frame) + + dumped_frame = dict() + dumped_frame['frame_number'] = frame.frame_number + dumped_frame['strokes'] = [] + + # TODO: took existing strokes in account + for stroke in frame.strokes: + dumped_frame['strokes'].append(dump_stroke(stroke)) + + return dumped_frame + +def load_frame(frame_data, frame): + """ Load a grease pencil frame from a dict + + :param frame_data: source grease pencil frame + :type frame_data: dict + :param frame: target grease pencil stroke + :type frame: bpy.types.GPencilFrame + """ + + assert(frame and frame_data) + + frame.frame_number = frame_data['frame_number'] + + # TODO: took existing stroke in account + + for stroke_data in frame_data['strokes']: + target_stroke = frame.strokes.new() - for frame in data["frames"]: - - tframe = target.frames.new(data["frames"][frame]['frame_number']) + load_stroke(stroke_data, target_stroke) - for stroke in data["frames"][frame]["strokes"]: - try: - tstroke = tframe.strokes[stroke] - except: - tstroke = tframe.strokes.new() - dump_anything.load( - tstroke, data["frames"][frame]["strokes"][stroke]) +def dump_layer(layer): + """ Dump a grease pencil layer - for point in data["frames"][frame]["strokes"][stroke]["points"]: - p = data["frames"][frame]["strokes"][stroke]["points"][point] + :param layer: target grease pencil stroke + :type layer: bpy.types.GPencilFrame + """ - tstroke.points.add(1) - tpoint = tstroke.points[len(tstroke.points)-1] + assert(layer) + + dumper = dump_anything.Dumper() + + dumper.exclude_filter = [ + 'parent_type' + ] + dumped_layer = dumper.dump(layer) + + dumped_layer['frames'] = [] + + for frame in layer.frames: + dumped_layer['frames'].append(dump_frame(frame)) + + return dumped_layer + +def load_layer(layer_data, layer): + """ Load a grease pencil layer from a dict + + :param layer_data: source grease pencil layer data + :type layer_data: dict + :param layer: target grease pencil stroke + :type layer: bpy.types.GPencilFrame + """ + # TODO: take existing data in account + dump_anything.load(layer, layer_data) + + for frame_data in layer_data["frames"]: + target_frame = layer.frames.new(frame_data['frame_number']) + + load_frame(frame_data, target_frame) - dump_anything.load(tpoint, p) class BlGpencil(BlDatablock): @@ -65,26 +199,34 @@ class BlGpencil(BlDatablock): for layer in target.layers: target.layers.remove(layer) - if "layers" in data.keys(): - for layer in data["layers"]: - if layer not in target.layers.keys(): - gp_layer = target.layers.new(data["layers"][layer]["info"]) - else: - gp_layer = target.layers[layer] - load_gpencil_layer( - target=gp_layer, data=data["layers"][layer], create=True) - - dump_anything.load(target, data) - target.materials.clear() if "materials" in data.keys(): for mat in data['materials']: target.materials.append(bpy.data.materials[mat]) + if "layers" in data.keys(): + for layer in data["layers"]: + layer_data = data["layers"].get(layer) + + if layer not in target.layers.keys(): + target_layer = target.layers.new(data["layers"][layer]["info"]) + else: + target_layer = target.layers[layer] + + load_layer(layer_data, target_layer) + + dump_anything.load(target, data) + + + def dump_implementation(self, data, pointer=None): assert(pointer) data = dump_anything.dump(pointer, 2) - data['layers'] = dump_anything.dump(pointer.layers, 9) + + data['layers'] = {} + + for layer in pointer.layers: + data['layers'][layer.info] = dump_layer(layer) return data diff --git a/multi_user/bl_types/bl_material.py b/multi_user/bl_types/bl_material.py index 89f3f3d..196aa33 100644 --- a/multi_user/bl_types/bl_material.py +++ b/multi_user/bl_types/bl_material.py @@ -206,7 +206,32 @@ class BlMaterial(BlDatablock): data["node_tree"]["links"] = links_dumper.dump(pointer.node_tree.links) elif pointer.is_grease_pencil: - data['grease_pencil'] = dump_anything.dump(pointer.grease_pencil, 3) + gp_mat_dumper = dump_anything.Dumper() + gp_mat_dumper.depth = 3 + + gp_mat_dumper.include_filter = [ + 'show_stroke', + 'mode', + 'stroke_style', + 'color', + 'use_overlap_strokes', + 'show_fill', + 'fill_style', + 'fill_color', + 'pass_index', + 'alignment_mode', + # 'fill_image', + 'texture_opacity', + 'mix_factor', + 'texture_offset', + 'texture_angle', + 'texture_scale', + 'texture_clamp', + 'gradient_type', + 'mix_color', + 'flip' + ] + data['grease_pencil'] = gp_mat_dumper.dump(pointer.grease_pencil) return data def resolve_deps_implementation(self): From 73019fc0b092e25dfe69668ef920764d440d04f3 Mon Sep 17 00:00:00 2001 From: Swann Date: Thu, 26 Mar 2020 11:12:26 +0100 Subject: [PATCH 19/33] feat: grease pencil progress --- multi_user/bl_types/bl_gpencil.py | 59 ++++++++++++++++++++++++------- 1 file changed, 46 insertions(+), 13 deletions(-) diff --git a/multi_user/bl_types/bl_gpencil.py b/multi_user/bl_types/bl_gpencil.py index 0b9bc1d..81682a3 100644 --- a/multi_user/bl_types/bl_gpencil.py +++ b/multi_user/bl_types/bl_gpencil.py @@ -33,7 +33,7 @@ def dump_stroke(stroke): :type stroke: bpy.types.GPencilStroke :return: dict """ -< + assert(stroke) dumper = dump_anything.Dumper() @@ -51,7 +51,7 @@ def dump_stroke(stroke): "uv_translation", "vertex_color_fill", ] - dumped_stroke = dump_anything.dump(stroke) + dumped_stroke = dumper.dump(stroke) # Stoke points p_count = len(stroke.points) @@ -134,13 +134,12 @@ def load_frame(frame_data, frame): assert(frame and frame_data) - frame.frame_number = frame_data['frame_number'] + # frame.frame_number = frame_data['frame_number'] # TODO: took existing stroke in account for stroke_data in frame_data['strokes']: target_stroke = frame.strokes.new() - load_stroke(stroke_data, target_stroke) def dump_layer(layer): @@ -154,8 +153,40 @@ def dump_layer(layer): dumper = dump_anything.Dumper() - dumper.exclude_filter = [ - 'parent_type' + dumper.include_filter = [ + 'info', + 'opacity', + 'channel_color', + 'color', + # 'thickness', + 'tint_color', + 'tint_factor', + 'vertex_paint_opacity', + 'line_change', + 'use_onion_skinning', + # 'use_annotation_onion_skinning', + # 'annotation_onion_before_range', + # 'annotation_onion_after_range', + # 'annotation_onion_before_color', + # 'annotation_onion_after_color', + 'pass_index', + # 'viewlayer_render', + 'blend_mode', + 'hide', + 'annotation_hide', + 'lock', + # 'lock_frame', + # 'lock_material', + # 'use_mask_layer', + 'use_lights', + 'use_solo_mode', + 'select', + 'show_points', + 'show_in_front', + # 'parent', + # 'parent_type', + # 'parent_bone', + # 'matrix_inverse', ] dumped_layer = dumper.dump(layer) @@ -196,22 +227,24 @@ class BlGpencil(BlDatablock): return bpy.data.grease_pencils.new(data["name"]) def load_implementation(self, data, target): - for layer in target.layers: - target.layers.remove(layer) - target.materials.clear() if "materials" in data.keys(): for mat in data['materials']: target.materials.append(bpy.data.materials[mat]) + # TODO: reuse existing layer + for layer in target.layers: + target.layers.remove(layer) + if "layers" in data.keys(): for layer in data["layers"]: layer_data = data["layers"].get(layer) - if layer not in target.layers.keys(): - target_layer = target.layers.new(data["layers"][layer]["info"]) - else: - target_layer = target.layers[layer] + # if layer not in target.layers.keys(): + target_layer = target.layers.new(data["layers"][layer]["info"]) + # else: + # target_layer = target.layers[layer] + # target_layer.clear() load_layer(layer_data, target_layer) From 25825f7aeb7df64d3d3c5e35b304419acc54d821 Mon Sep 17 00:00:00 2001 From: Swann Date: Thu, 26 Mar 2020 11:45:41 +0100 Subject: [PATCH 20/33] feat: blender 2.83 api changes --- multi_user/bl_types/bl_gpencil.py | 28 ++++++++++++---------------- 1 file changed, 12 insertions(+), 16 deletions(-) diff --git a/multi_user/bl_types/bl_gpencil.py b/multi_user/bl_types/bl_gpencil.py index 81682a3..7ae4983 100644 --- a/multi_user/bl_types/bl_gpencil.py +++ b/multi_user/bl_types/bl_gpencil.py @@ -59,23 +59,23 @@ def dump_stroke(stroke): p_co = np.empty(p_count*3, dtype=np.float64) stroke.points.foreach_get('co', p_co) + dumped_stroke['p_co'] = p_co.tobytes() p_pressure = np.empty(p_count, dtype=np.float64) stroke.points.foreach_get('pressure', p_pressure) + dumped_stroke['p_pressure'] = p_pressure.tobytes() p_strength = np.empty(p_count, dtype=np.float64) stroke.points.foreach_get('strength', p_strength) + dumped_stroke['p_strength'] = p_strength.tobytes() - p_vertex_color = np.empty(p_count*4, dtype=np.float64) - stroke.points.foreach_get('vertex_color', p_vertex_color) + if bpy.app.version[1] >= 83: # new in blender 2.83 + p_vertex_color = np.empty(p_count*4, dtype=np.float64) + stroke.points.foreach_get('vertex_color', p_vertex_color) + dumped_stroke['p_vertex_color'] = p_vertex_color.tobytes() # TODO: uv_factor, uv_rotation - dumped_stroke['p_co'] = p_co.tobytes() - dumped_stroke['p_pressure'] = p_pressure.tobytes() - dumped_stroke['p_strength'] = p_strength.tobytes() - dumped_stroke['p_vertex_color'] = p_vertex_color.tobytes() - return dumped_stroke def load_stroke(stroke_data, stroke): @@ -91,16 +91,12 @@ def load_stroke(stroke_data, stroke): dump_anything.load(stroke, stroke_data) p_co = np.frombuffer(stroke_data["p_co"], dtype=np.float64) - p_pressure = np.frombuffer(stroke_data["p_pressure"], dtype=np.float64) - p_strength = np.frombuffer(stroke_data["p_strength"], dtype=np.float64) - p_vertex_color = np.frombuffer(stroke_data["p_vertex_color"], dtype=np.float64) - - stroke.points.add(stroke_data["p_count"]) - - stroke.points.foreach_set('co', p_co) - stroke.points.foreach_set('pressure', p_pressure) + p_pressure = np.frombuffer(strokD stroke.points.foreach_set('strength', p_strength) - stroke.points.foreach_set('vertex_color', p_vertex_color) + + if "p_vertex_color" in stroke_data: + p_vertex_color = np.frombuffer(stroke_data["p_vertex_color"], dtype=np.float64) + stroke.points.foreach_set('vertex_color', p_vertex_color) def dump_frame(frame): From 441378590343aa6e51046395bb03aa75badc0307 Mon Sep 17 00:00:00 2001 From: Swann Date: Thu, 26 Mar 2020 12:07:12 +0100 Subject: [PATCH 21/33] feat: missing lines --- multi_user/bl_types/bl_gpencil.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/multi_user/bl_types/bl_gpencil.py b/multi_user/bl_types/bl_gpencil.py index 7ae4983..a6c032d 100644 --- a/multi_user/bl_types/bl_gpencil.py +++ b/multi_user/bl_types/bl_gpencil.py @@ -91,7 +91,13 @@ def load_stroke(stroke_data, stroke): dump_anything.load(stroke, stroke_data) p_co = np.frombuffer(stroke_data["p_co"], dtype=np.float64) - p_pressure = np.frombuffer(strokD + p_pressure = np.frombuffer(stroke_data["p_pressure"], dtype=np.float64) + p_strength = np.frombuffer(stroke_data["p_strength"], dtype=np.float64) + + stroke.points.add(stroke_data["p_count"]) + + stroke.points.foreach_set('co', p_co) + stroke.points.foreach_set('pressure', p_pressure) stroke.points.foreach_set('strength', p_strength) if "p_vertex_color" in stroke_data: From f0a2659b4377241e6db45b3fddc5491b9f938858 Mon Sep 17 00:00:00 2001 From: Swann Date: Thu, 26 Mar 2020 16:10:27 +0100 Subject: [PATCH 22/33] fix: collection destructor during loading --- multi_user/libs/dump_anything.py | 50 ++++++++++++++++++++++---------- 1 file changed, 35 insertions(+), 15 deletions(-) diff --git a/multi_user/libs/dump_anything.py b/multi_user/libs/dump_anything.py index 051e0e9..b580779 100644 --- a/multi_user/libs/dump_anything.py +++ b/multi_user/libs/dump_anything.py @@ -317,13 +317,22 @@ class Loader: CONSTRUCTOR_NEW = "new" CONSTRUCTOR_ADD = "add" + DESTRUCTOR_REMOVE = "remove" + DESTRUCTOR_CLEAR = "clear" + constructors = { - T.ColorRampElement: (CONSTRUCTOR_NEW, ["position"], True), - T.ParticleSettingsTextureSlot: (CONSTRUCTOR_ADD, [], False), - T.Modifier: (CONSTRUCTOR_NEW, ["name", "type"], True), - T.Constraint: (CONSTRUCTOR_NEW, ["type"], True), + T.ColorRampElement: (CONSTRUCTOR_NEW, ["position"]), + T.ParticleSettingsTextureSlot: (CONSTRUCTOR_ADD, []), + T.Modifier: (CONSTRUCTOR_NEW, ["name", "type"]), + T.Constraint: (CONSTRUCTOR_NEW, ["type"]), # T.VertexGroup: (CONSTRUCTOR_NEW, ["name"], True), } + + destructors = { + T.ColorRampElement:DESTRUCTOR_REMOVE, + T.Modifier: DESTRUCTOR_CLEAR, + T.Constraint: CONSTRUCTOR_NEW, + } element_type = element.bl_rna_property.fixed_type constructor = constructors.get(type(element_type)) @@ -331,19 +340,30 @@ class Loader: if constructor is None: # collection type not supported return + destructor = destructors.get(type(element_type)) + # Try to clear existing - if constructor[2]: - getattr(element.read(), 'clear')() + if destructor: + if destructor == DESTRUCTOR_REMOVE: + collection = element.read() + for i in range(len(collection)-1): + collection.remove(collection[0]) + else: + getattr(element.read(), DESTRUCTOR_CLEAR)() - for dumped_element in dump.values(): - try: - constructor_parameters = [dumped_element[name] - for name in constructor[1]] - except KeyError: - logger.debug("Collection load error, missing parameters.") - continue # TODO handle error - new_element = getattr(element.read(), constructor[0])( - *constructor_parameters) + for dump_idx, dumped_element in enumerate(dump.values()): + if dump_idx == 0 and len(element.read())>0: + new_element = element.read()[0] + else: + try: + constructor_parameters = [dumped_element[name] + for name in constructor[1]] + except KeyError: + logger.debug("Collection load error, missing parameters.") + continue # TODO handle error + + new_element = getattr(element.read(), constructor[0])( + *constructor_parameters) self._load_any( BlenderAPIElement( new_element, occlude_read_only=self.occlude_read_only), From 2016af33b7931aeabcc17e98b6813bb77d147af1 Mon Sep 17 00:00:00 2001 From: Swann Date: Thu, 26 Mar 2020 17:13:30 +0100 Subject: [PATCH 23/33] fix: missing light shape --- multi_user/bl_types/bl_light.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/multi_user/bl_types/bl_light.py b/multi_user/bl_types/bl_light.py index 3d216f6..67f8af9 100644 --- a/multi_user/bl_types/bl_light.py +++ b/multi_user/bl_types/bl_light.py @@ -59,7 +59,8 @@ class BlLight(BlDatablock): "contact_shadow_distance", "contact_shadow_soft_size", "contact_shadow_bias", - "contact_shadow_thickness" + "contact_shadow_thickness", + "shape" ] data = dumper.dump(pointer) return data From 6334bfdc011f18762a3a42b3ce5b36fba99394ba Mon Sep 17 00:00:00 2001 From: Swann Date: Thu, 26 Mar 2020 17:13:48 +0100 Subject: [PATCH 24/33] fix: node_tree dump --- multi_user/bl_types/bl_material.py | 55 +++++++++++++++++++++--------- multi_user/bl_types/bl_world.py | 11 +++--- 2 files changed, 44 insertions(+), 22 deletions(-) diff --git a/multi_user/bl_types/bl_material.py b/multi_user/bl_types/bl_material.py index 196aa33..b56d3c4 100644 --- a/multi_user/bl_types/bl_material.py +++ b/multi_user/bl_types/bl_material.py @@ -77,14 +77,40 @@ def load_node(target_node_tree, source): except: logger.error("{} not supported, skipping".format(input)) -def load_link(target_node_tree, source): - input_socket = target_node_tree.nodes[source['to_node'] - ['name']].inputs[source['to_socket']['name']] - output_socket = target_node_tree.nodes[source['from_node'] - ['name']].outputs[source['from_socket']['name']] +def load_links(links_data, node_tree): + """ Load node_tree links from a list + + :arg links_data: dumped node links + :type links_data: list + :arg node_tree: node links collection + :type node_tree: bpy.types.NodeTree + """ - target_node_tree.links.new(input_socket, output_socket) + for link in links_data: + input_socket = node_tree.nodes[link['to_node']].inputs[int(link['to_socket'])] + output_socket = node_tree.nodes[link['from_node']].outputs[int(link['from_socket'])] + node_tree.links.new(input_socket, output_socket) + +def dump_links(links): + """ Dump node_tree links collection to a list + + :arg links: node links collection + :type links: bpy.types.NodeLinks + :retrun: list + """ + + links_data = [] + + for link in links: + links_data.append({ + 'to_node':link.to_node.name, + 'to_socket':link.to_socket.path_from_id()[-2:-1], + 'from_node':link.from_node.name, + 'from_socket':link.from_socket.path_from_id()[-2:-1], + }) + + return links_data class BlMaterial(BlDatablock): bl_id = "materials" @@ -123,8 +149,7 @@ class BlMaterial(BlDatablock): # Load nodes links target.node_tree.links.clear() - for link in data["node_tree"]["links"]: - load_link(target.node_tree, data["node_tree"]["links"][link]) + load_links(data["node_tree"]["links"], target.node_tree) def dump_implementation(self, data, pointer=None): assert(pointer) @@ -160,14 +185,7 @@ class BlMaterial(BlDatablock): input_dumper = dump_anything.Dumper() input_dumper.depth = 2 input_dumper.include_filter = ["default_value"] - links_dumper = dump_anything.Dumper() - links_dumper.depth = 3 - links_dumper.include_filter = [ - "name", - "to_node", - "from_node", - "from_socket", - "to_socket"] + data = mat_dumper.dump(pointer) if pointer.use_nodes: @@ -202,8 +220,11 @@ class BlMaterial(BlDatablock): 'location' ] nodes[node.name]['mapping'] = curve_dumper.dump(node.mapping) + data["node_tree"]['nodes'] = nodes - data["node_tree"]["links"] = links_dumper.dump(pointer.node_tree.links) + + + data["node_tree"]["links"] = dump_links(pointer.node_tree.links) elif pointer.is_grease_pencil: gp_mat_dumper = dump_anything.Dumper() diff --git a/multi_user/bl_types/bl_world.py b/multi_user/bl_types/bl_world.py index d23a65d..21dace2 100644 --- a/multi_user/bl_types/bl_world.py +++ b/multi_user/bl_types/bl_world.py @@ -21,7 +21,7 @@ import mathutils from .. import utils from .bl_datablock import BlDatablock -from .bl_material import load_link, load_node +from .bl_material import load_links, load_node, dump_links class BlWorld(BlDatablock): @@ -48,8 +48,8 @@ class BlWorld(BlDatablock): # Load nodes links target.node_tree.links.clear() - for link in data["node_tree"]["links"]: - load_link(target.node_tree, data["node_tree"]["links"][link]) + + load_links(data["node_tree"]["links"], target.node_tree) def dump_implementation(self, data, pointer=None): assert(pointer) @@ -104,8 +104,9 @@ class BlWorld(BlDatablock): nodes[node.name]['inputs'][i.name] = input_dumper.dump( i) data["node_tree"]['nodes'] = nodes - utils.dump_datablock_attibutes( - pointer.node_tree, ["links"], 3, data['node_tree']) + + data["node_tree"]['links'] = dump_links(pointer.node_tree.links) + return data def resolve_deps_implementation(self): From 667c3cd04d84e99f2f7a0e818679e87382c6e9d3 Mon Sep 17 00:00:00 2001 From: Swann Date: Thu, 26 Mar 2020 17:13:59 +0100 Subject: [PATCH 25/33] fix: mesh update --- multi_user/bl_types/bl_mesh.py | 1 + multi_user/bl_types/bl_scene.py | 8 +++++++- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/multi_user/bl_types/bl_mesh.py b/multi_user/bl_types/bl_mesh.py index d6a7a3b..5d772cf 100644 --- a/multi_user/bl_types/bl_mesh.py +++ b/multi_user/bl_types/bl_mesh.py @@ -120,6 +120,7 @@ class BlMesh(BlDatablock): target.uv_layers[layer].data.foreach_set('uv', uv_buffer) target.validate() + target.update() def dump_implementation(self, data, pointer=None): diff --git a/multi_user/bl_types/bl_scene.py b/multi_user/bl_types/bl_scene.py index 48ad025..5b7b9e5 100644 --- a/multi_user/bl_types/bl_scene.py +++ b/multi_user/bl_types/bl_scene.py @@ -73,7 +73,13 @@ class BlScene(BlDatablock): scene_dumper = utils.dump_anything.Dumper() scene_dumper.depth = 1 - scene_dumper.include_filter = ['name','world', 'id', 'camera', 'grease_pencil'] + scene_dumper.include_filter = [ + 'name', + 'world', + 'id', + 'camera', + 'grease_pencil' + ] data = scene_dumper.dump(pointer) scene_dumper.depth = 3 From ea9ee4ead1d351ada5fde2212353e7a45dc6fb5e Mon Sep 17 00:00:00 2001 From: Swann Date: Thu, 26 Mar 2020 17:23:42 +0100 Subject: [PATCH 26/33] fix: mesh materials --- multi_user/bl_types/bl_mesh.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/multi_user/bl_types/bl_mesh.py b/multi_user/bl_types/bl_mesh.py index 5d772cf..185eca0 100644 --- a/multi_user/bl_types/bl_mesh.py +++ b/multi_user/bl_types/bl_mesh.py @@ -47,7 +47,7 @@ class BlMesh(BlDatablock): utils.dump_anything.load(target, data) # MATERIAL SLOTS - i = 0 + target.materials.clear() for m in data["material_list"]: target.materials.append(bpy.data.materials[m]) From d19932cc3b17391b2b533e8be51968abd90ec765 Mon Sep 17 00:00:00 2001 From: Swann Date: Thu, 26 Mar 2020 17:30:45 +0100 Subject: [PATCH 27/33] refactor: change default timers for mesh and gpencil --- multi_user/bl_types/bl_gpencil.py | 8 ++++++-- multi_user/bl_types/bl_mesh.py | 4 ++-- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/multi_user/bl_types/bl_gpencil.py b/multi_user/bl_types/bl_gpencil.py index a6c032d..79ada32 100644 --- a/multi_user/bl_types/bl_gpencil.py +++ b/multi_user/bl_types/bl_gpencil.py @@ -78,6 +78,7 @@ def dump_stroke(stroke): return dumped_stroke + def load_stroke(stroke_data, stroke): """ Load a grease pencil stroke from a dict @@ -125,6 +126,7 @@ def dump_frame(frame): return dumped_frame + def load_frame(frame_data, frame): """ Load a grease pencil frame from a dict @@ -144,6 +146,7 @@ def load_frame(frame_data, frame): target_stroke = frame.strokes.new() load_stroke(stroke_data, target_stroke) + def dump_layer(layer): """ Dump a grease pencil layer @@ -199,6 +202,7 @@ def dump_layer(layer): return dumped_layer + def load_layer(layer_data, layer): """ Load a grease pencil layer from a dict @@ -220,8 +224,8 @@ def load_layer(layer_data, layer): class BlGpencil(BlDatablock): bl_id = "grease_pencils" bl_class = bpy.types.GreasePencil - bl_delay_refresh = 5 - bl_delay_apply = 5 + bl_delay_refresh = 2 + bl_delay_apply = 1 bl_automatic_push = True bl_icon = 'GREASEPENCIL' diff --git a/multi_user/bl_types/bl_mesh.py b/multi_user/bl_types/bl_mesh.py index 185eca0..51ca774 100644 --- a/multi_user/bl_types/bl_mesh.py +++ b/multi_user/bl_types/bl_mesh.py @@ -32,8 +32,8 @@ logger = logging.getLogger(__name__) class BlMesh(BlDatablock): bl_id = "meshes" bl_class = bpy.types.Mesh - bl_delay_refresh = 10 - bl_delay_apply = 10 + bl_delay_refresh = 2 + bl_delay_apply = 1 bl_automatic_push = True bl_icon = 'MESH_DATA' From cab4a8876b1cd2efcf571c3f933555d4e38ef99b Mon Sep 17 00:00:00 2001 From: Swann Date: Thu, 26 Mar 2020 18:51:19 +0100 Subject: [PATCH 28/33] feat: CurveMapping support --- multi_user/libs/dump_anything.py | 21 ++++++++++++++++++++- 1 file changed, 20 insertions(+), 1 deletion(-) diff --git a/multi_user/libs/dump_anything.py b/multi_user/libs/dump_anything.py index b580779..d686085 100644 --- a/multi_user/libs/dump_anything.py +++ b/multi_user/libs/dump_anything.py @@ -67,7 +67,7 @@ def _load_filter_type(t, use_bl_rna=True): if use_bl_rna and x.bl_rna_property: return isinstance(x.bl_rna_property, t) else: - isinstance(x.read(), t) + return isinstance(x.read(), t) return filter_function @@ -370,6 +370,24 @@ class Loader: dumped_element ) + def _load_curve_mapping(self, element, dump): + mapping = element.read() + # cleanup existing curve + for curve in mapping.curves: + for idx in range(len(curve.points)): + if idx == 0: + break + + curve.points.remove(curve.points[1]) + for curve_index, curve in dump['curves'].items(): + for point_idx, point in curve['points'].items(): + pos = point['location'] + + if len(mapping.curves[curve_index].points) == 1: + mapping.curves[curve_index].points[int(point_idx)].location = pos + else: + mapping.curves[curve_index].points.new(pos[0],pos[1]) + def _load_pointer(self, pointer, dump): rna_property_type = pointer.bl_rna_property.fixed_type if not rna_property_type: @@ -434,6 +452,7 @@ class Loader: (_load_filter_type(mathutils.Vector, use_bl_rna=False), self._load_vector), (_load_filter_type(mathutils.Quaternion, use_bl_rna=False), self._load_quaternion), (_load_filter_type(mathutils.Euler, use_bl_rna=False), self._load_euler), + (_load_filter_type(T.CurveMapping, use_bl_rna=False), self._load_curve_mapping), (_load_filter_type(T.FloatProperty), self._load_identity), (_load_filter_type(T.StringProperty), self._load_identity), (_load_filter_type(T.EnumProperty), self._load_identity), From d7e47e5c14d317a0ae597f2cc85f3d026324e628 Mon Sep 17 00:00:00 2001 From: Swann Date: Thu, 26 Mar 2020 18:51:35 +0100 Subject: [PATCH 29/33] refactor: clean material --- multi_user/bl_types/bl_material.py | 54 +++++++----------------------- multi_user/bl_types/bl_world.py | 2 +- 2 files changed, 13 insertions(+), 43 deletions(-) diff --git a/multi_user/bl_types/bl_material.py b/multi_user/bl_types/bl_material.py index b56d3c4..fb84a63 100644 --- a/multi_user/bl_types/bl_material.py +++ b/multi_user/bl_types/bl_material.py @@ -25,55 +25,25 @@ from ..libs import dump_anything from .bl_datablock import BlDatablock logger = logging.getLogger(__name__) -def clean_color_ramp(target_ramp): - # clear existing - try: - for key in target_ramp.elements: - target_ramp.elements.remove(key) - except: - pass - -def load_mapping(target_apping, source_mapping): - # clear existing curves - for curve in target_apping.curves: - for point in curve.points: - try: - curve.remove(point) - except: - continue - - # Load curves - for curve in source_mapping['curves']: - for point in source_mapping['curves'][curve]['points']: - pos = source_mapping['curves'][curve]['points'][point]['location'] - target_apping.curves[curve].points.new(pos[0],pos[1]) +def load_node(node_data, node_tree): + """ Load a node into a node_tree from a dict -def load_node(target_node_tree, source): - target_node = target_node_tree.nodes.get(source["name"]) + :arg node_data: dumped node data + :type node_data: dict + :arg node_tree: target node_tree + :type node_tree: bpy.types.NodeTree + """ + target_node = node_tree.nodes.new(type=node_data["bl_idname"]) - if target_node is None: - node_type = source["bl_idname"] + dump_anything.load(target_node, node_data) - target_node = target_node_tree.nodes.new(type=node_type) - - # Clean color ramp before loading it - if source['type'] == 'VALTORGB': - clean_color_ramp(target_node.color_ramp) - if source['type'] == 'CURVE_RGB': - load_mapping(target_node.mapping, source['mapping']) - dump_anything.load( - target_node, - source) - - if source['type'] == 'TEX_IMAGE': - target_node.image = bpy.data.images[source['image']] - for input in source["inputs"]: + for input in node_data["inputs"]: if hasattr(target_node.inputs[input], "default_value"): try: - target_node.inputs[input].default_value = source["inputs"][input]["default_value"] + target_node.inputs[input].default_value = node_data["inputs"][input]["default_value"] except: logger.error("{} not supported, skipping".format(input)) @@ -144,7 +114,7 @@ class BlMaterial(BlDatablock): # Load nodes for node in data["node_tree"]["nodes"]: - load_node(target.node_tree, data["node_tree"]["nodes"][node]) + load_node(data["node_tree"]["nodes"][node], target.node_tree) # Load nodes links target.node_tree.links.clear() diff --git a/multi_user/bl_types/bl_world.py b/multi_user/bl_types/bl_world.py index 21dace2..8f79dc1 100644 --- a/multi_user/bl_types/bl_world.py +++ b/multi_user/bl_types/bl_world.py @@ -43,7 +43,7 @@ class BlWorld(BlDatablock): target.node_tree.nodes.clear() for node in data["node_tree"]["nodes"]: - load_node(target.node_tree, data["node_tree"]["nodes"][node]) + load_node(data["node_tree"]["nodes"][node], target.node_tree) # Load nodes links target.node_tree.links.clear() From 3f0c31d771c5239cdf49e93eecdc3139f1e7dd4b Mon Sep 17 00:00:00 2001 From: Swann Date: Thu, 26 Mar 2020 19:10:26 +0100 Subject: [PATCH 30/33] refactor: cleanup implementation --- multi_user/bl_types/bl_material.py | 59 ++++++++++++++++++------------ 1 file changed, 35 insertions(+), 24 deletions(-) diff --git a/multi_user/bl_types/bl_material.py b/multi_user/bl_types/bl_material.py index fb84a63..228283d 100644 --- a/multi_user/bl_types/bl_material.py +++ b/multi_user/bl_types/bl_material.py @@ -126,6 +126,13 @@ class BlMaterial(BlDatablock): mat_dumper = dump_anything.Dumper() mat_dumper.depth = 2 mat_dumper.exclude_filter = [ + "is_embed_data", + "is_evaluated", + "name_full", + "bl_description", + "bl_icon", + "bl_idname", + "bl_label", "preview", "original", "uuid", @@ -134,40 +141,44 @@ class BlMaterial(BlDatablock): "line_color", "view_center", ] - node_dumper = dump_anything.Dumper() - node_dumper.depth = 1 - node_dumper.exclude_filter = [ - "dimensions", - "show_expanded" - "select", - "bl_height_min", - "bl_height_max", - "bl_width_min", - "bl_width_max", - "bl_width_default", - "hide", - "show_options", - "show_tetxures", - "show_preview", - "outputs", - "width_hidden" - ] - input_dumper = dump_anything.Dumper() - input_dumper.depth = 2 - input_dumper.include_filter = ["default_value"] - data = mat_dumper.dump(pointer) if pointer.use_nodes: nodes = {} - + node_dumper = dump_anything.Dumper() + node_dumper.depth = 1 + node_dumper.exclude_filter = [ + "dimensions", + "show_expanded", + "name_full", + "select", + "bl_height_min", + "bl_height_max", + "bl_width_min", + "bl_width_max", + "type", + "bl_icon", + "bl_width_default", + "bl_static_type", + "show_tetxure", + "hide", + "show_options", + "show_preview", + "outputs", + "width_hidden" + ] for node in pointer.node_tree.nodes: + nodes[node.name] = node_dumper.dump(node) if hasattr(node, 'inputs'): nodes[node.name]['inputs'] = {} - for i in node.inputs: + for i in node.inputs: + input_dumper = dump_anything.Dumper() + input_dumper.depth = 2 + input_dumper.include_filter = ["default_value"] + if hasattr(i, 'default_value'): nodes[node.name]['inputs'][i.name] = input_dumper.dump( i) From 98d86c050b3999031021b70de9404e37c561758b Mon Sep 17 00:00:00 2001 From: Swann Date: Thu, 26 Mar 2020 21:11:21 +0100 Subject: [PATCH 31/33] feat: update submodules --- multi_user/libs/replication | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/multi_user/libs/replication b/multi_user/libs/replication index 42b3a31..70b2d24 160000 --- a/multi_user/libs/replication +++ b/multi_user/libs/replication @@ -1 +1 @@ -Subproject commit 42b3a31b8ec44115f8fbc3697bdeeee74e608c6f +Subproject commit 70b2d24d15690540c0e0bee43bd82bf338f986c9 From ef9e9dbae8791773ca4c7ae8667def354b661ce1 Mon Sep 17 00:00:00 2001 From: Swann Date: Fri, 27 Mar 2020 15:50:25 +0100 Subject: [PATCH 32/33] feat: action refactoring --- multi_user/bl_types/bl_action.py | 261 +++++++++++++++++++++---------- 1 file changed, 179 insertions(+), 82 deletions(-) diff --git a/multi_user/bl_types/bl_action.py b/multi_user/bl_types/bl_action.py index d903a4f..7d95e87 100644 --- a/multi_user/bl_types/bl_action.py +++ b/multi_user/bl_types/bl_action.py @@ -19,11 +19,179 @@ import bpy import mathutils import copy +import logging +import numpy as np +from enum import Enum from .. import utils from .bl_datablock import BlDatablock -# WIP +logger = logging.getLogger(__name__) + +ENUM_EASING_TYPE = [ + 'AUTO', + 'EAS_IN', + 'EASE_OUT', + 'EASE_IN_OUT'] + +ENUM_HANDLE_TYPE = [ + 'FREE', + 'ALIGNED', + 'VECTOR', + 'AUTO', + 'AUTO_CLAMPED'] + +ENUM_INTERPOLATION_TYPE = [ + 'CONSTANT', + 'LINEAR', + 'BEZIER', + 'SINE', + 'QUAD', + 'CUBIC', + 'QUART', + 'QUINT', + 'EXPO', + 'CIRC', + 'BACK', + 'BOUNCE', + 'ELASTIC'] + +ENUM_KEY_TYPE = [ + 'KEYFRAME', + 'BREAKDOWN', + 'MOVING_HOLD', + 'EXTREME', + 'JITTER'] + +#TODO: Automatic enum and numpy dump and loading + +def dump_fcurve(fcurve, use_numpy=True): + """ Dump a sigle curve to a dict + + :arg fcurve: fcurve to dump + :type fcurve: bpy.types.FCurve + :arg use_numpy: use numpy to eccelerate dump + :type use_numpy: bool + :return: dict + """ + fcurve_data = { + "data_path": fcurve.data_path, + "dumped_array_index": fcurve.array_index, + "use_numpy": use_numpy + } + + if use_numpy: + keyframes_count = len(fcurve.keyframe_points) + + k_amplitude = np.empty(keyframes_count, dtype=np.float64) + fcurve.keyframe_points.foreach_get('amplitude', k_amplitude) + k_co = np.empty(keyframes_count*2, dtype=np.float64) + fcurve.keyframe_points.foreach_get('co', k_co) + k_back = np.empty(keyframes_count, dtype=np.float64) + fcurve.keyframe_points.foreach_get('back', k_back) + k_handle_left = np.empty(keyframes_count*2, dtype=np.float64) + fcurve.keyframe_points.foreach_get('handle_left', k_handle_left) + k_handle_right = np.empty(keyframes_count*2, dtype=np.float64) + fcurve.keyframe_points.foreach_get('handle_right', k_handle_right) + + fcurve_data['amplitude'] = k_amplitude.tobytes() + fcurve_data['co'] = k_co.tobytes() + fcurve_data['back'] = k_back.tobytes() + fcurve_data['handle_left'] = k_handle_left.tobytes() + fcurve_data['handle_right'] = k_handle_right.tobytes() + + fcurve_data['easing'] = [ENUM_EASING_TYPE.index(p.easing) for p in fcurve.keyframe_points] + fcurve_data['handle_left_type'] = [ENUM_HANDLE_TYPE.index(p.handle_left_type) for p in fcurve.keyframe_points] + fcurve_data['handle_right_type'] = [ENUM_HANDLE_TYPE.index(p.handle_right_type) for p in fcurve.keyframe_points] + fcurve_data['type'] = [ENUM_KEY_TYPE.index(p.type) for p in fcurve.keyframe_points] + fcurve_data['interpolation'] = [ENUM_INTERPOLATION_TYPE.index(p.interpolation) for p in fcurve.keyframe_points] + + else: # Legacy method + dumper = utils.dump_anything.Dumper() + fcurve_data["keyframe_points"] = [] + + for k in fcurve.keyframe_points: + fcurve_data["keyframe_points"].append( + dumper.dump(k) + ) + + return fcurve_data + +def load_fcurve(fcurve_data, fcurve): + """ Load a dumped fcurve + + :arg fcurve_data: a dumped fcurve + :type fcurve_data: dict + :arg fcurve: fcurve to dump + :type fcurve: bpy.types.FCurve + """ + use_numpy = fcurve_data.get('use_numpy') + + keyframe_points = fcurve.keyframe_points + + # Remove all keyframe points + for i in range(len(keyframe_points)): + keyframe_points.remove(keyframe_points[0], fast=True) + + if use_numpy: + k_amplitude = np.frombuffer(fcurve_data['amplitude'], dtype=np.float64) + + keyframe_count = len(k_amplitude) + + k_co = np.frombuffer(fcurve_data['co'], dtype=np.float64) + k_back = np.frombuffer(fcurve_data['back'], dtype=np.float64) + k_amplitude = np.frombuffer(fcurve_data['amplitude'], dtype=np.float64) + k_handle_left= np.frombuffer(fcurve_data['handle_left'], dtype=np.float64) + k_handle_right= np.frombuffer(fcurve_data['handle_right'], dtype=np.float64) + + keyframe_points.add(keyframe_count) + + keyframe_points.foreach_set('co',k_co) + keyframe_points.foreach_set('back',k_back) + keyframe_points.foreach_set('amplitude',k_amplitude) + keyframe_points.foreach_set('handle_left',k_handle_left) + keyframe_points.foreach_set('handle_right',k_handle_right) + + for index, point in enumerate(keyframe_points): + point.type = ENUM_KEY_TYPE[fcurve_data['type'][index]] + point.easing = ENUM_EASING_TYPE[fcurve_data['easing'][index]] + point.handle_left_type = ENUM_HANDLE_TYPE[fcurve_data['handle_left_type'][index]] + point.handle_right_type = ENUM_HANDLE_TYPE[fcurve_data['handle_right_type'][index]] + point.interpolation = ENUM_INTERPOLATION_TYPE[fcurve_data['interpolation'][index]] + + else: + # paste dumped keyframes + for dumped_keyframe_point in fcurve_data["keyframe_points"]: + if dumped_keyframe_point['type'] == '': + dumped_keyframe_point['type'] = 'KEYFRAME' + + new_kf = keyframe_points.insert( + dumped_keyframe_point["co"][0], + dumped_keyframe_point["co"][1], + options={'FAST', 'REPLACE'} + ) + + keycache = copy.copy(dumped_keyframe_point) + keycache = utils.dump_anything.remove_items_from_dict( + keycache, + ["co", "handle_left", "handle_right", 'type'] + ) + + utils.dump_anything.load(new_kf, keycache) + + new_kf.type = dumped_keyframe_point['type'] + new_kf.handle_left = [ + dumped_keyframe_point["handle_left"][0], + dumped_keyframe_point["handle_left"][1] + ] + new_kf.handle_right = [ + dumped_keyframe_point["handle_right"][0], + dumped_keyframe_point["handle_right"][1] + ] + + fcurve.update() + + class BlAction(BlDatablock): bl_id = "actions" @@ -32,30 +200,11 @@ class BlAction(BlDatablock): bl_delay_apply = 1 bl_automatic_push = True bl_icon = 'ACTION_TWEAK' - + def _construct(self, data): return bpy.data.actions.new(data["name"]) def _load(self, data, target): - begin_frame = 100000 - end_frame = -100000 - - for dumped_fcurve in data["fcurves"]: - begin_frame = min( - begin_frame, - min( - [begin_frame] + [dkp["co"][0] for dkp in dumped_fcurve["keyframe_points"]] - ) - ) - end_frame = max( - end_frame, - max( - [end_frame] + [dkp["co"][0] for dkp in dumped_fcurve["keyframe_points"]] - ) - ) - begin_frame = 0 - - loader = utils.dump_anything.Loader() for dumped_fcurve in data["fcurves"]: dumped_data_path = dumped_fcurve["data_path"] dumped_array_index = dumped_fcurve["dumped_array_index"] @@ -65,53 +214,14 @@ class BlAction(BlDatablock): if fcurve is None: fcurve = target.fcurves.new(dumped_data_path, index=dumped_array_index) - - # remove keyframes within dumped_action range - for keyframe in reversed(fcurve.keyframe_points): - if end_frame >= (keyframe.co[0] + begin_frame ) >= begin_frame: - fcurve.keyframe_points.remove(keyframe, fast=True) - - # paste dumped keyframes - for dumped_keyframe_point in dumped_fcurve["keyframe_points"]: - if dumped_keyframe_point['type'] == '': - dumped_keyframe_point['type'] = 'KEYFRAME' - - new_kf = fcurve.keyframe_points.insert( - dumped_keyframe_point["co"][0] - begin_frame, - dumped_keyframe_point["co"][1], - options={'FAST', 'REPLACE'} - ) - - keycache = copy.copy(dumped_keyframe_point) - keycache = utils.dump_anything.remove_items_from_dict( - keycache, - ["co", "handle_left", "handle_right",'type'] - ) - - loader.load( - new_kf, - keycache - ) - - new_kf.type = dumped_keyframe_point['type'] - new_kf.handle_left = [ - dumped_keyframe_point["handle_left"][0] - begin_frame, - dumped_keyframe_point["handle_left"][1] - ] - new_kf.handle_right = [ - dumped_keyframe_point["handle_right"][0] - begin_frame, - dumped_keyframe_point["handle_right"][1] - ] - - # clearing (needed for blender to update well) - if len(fcurve.keyframe_points) == 0: - target.fcurves.remove(fcurve) - target.id_root= data['id_root'] + load_fcurve(dumped_fcurve, fcurve) + target.id_root = data['id_root'] def _dump(self, pointer=None): + start = utils.current_milli_time() assert(pointer) dumper = utils.dump_anything.Dumper() - dumper.exclude_filter =[ + dumper.exclude_filter = [ 'name_full', 'original', 'use_fake_user', @@ -124,27 +234,14 @@ class BlAction(BlDatablock): 'users' ] dumper.depth = 1 - data = dumper.dump(pointer) + data = dumper.dump(pointer) - data["fcurves"] = [] - dumper.depth = 2 + for fcurve in self.pointer.fcurves: - fc = { - "data_path": fcurve.data_path, - "dumped_array_index": fcurve.array_index, - "keyframe_points": [] - } - - for k in fcurve.keyframe_points: - fc["keyframe_points"].append( - dumper.dump(k) - ) - - data["fcurves"].append(fc) + data["fcurves"].append(dump_fcurve(fcurve, use_numpy=True)) + logger.error( + f"{self.pointer.name} dumping time: {utils.current_milli_time()-start} ms") return data - - - From 289a49251e1288d1b3fe78e1e8a24e9febec1f76 Mon Sep 17 00:00:00 2001 From: Swann Date: Fri, 27 Mar 2020 18:18:36 +0100 Subject: [PATCH 33/33] refactor: cleanup --- multi_user/bl_types/bl_action.py | 11 ++++++----- multi_user/bl_types/bl_armature.py | 2 -- multi_user/bl_types/bl_lightprobe.py | 8 ++++---- 3 files changed, 10 insertions(+), 11 deletions(-) diff --git a/multi_user/bl_types/bl_action.py b/multi_user/bl_types/bl_action.py index 7d95e87..2ca2cf3 100644 --- a/multi_user/bl_types/bl_action.py +++ b/multi_user/bl_types/bl_action.py @@ -19,14 +19,12 @@ import bpy import mathutils import copy -import logging import numpy as np from enum import Enum from .. import utils from .bl_datablock import BlDatablock -logger = logging.getLogger(__name__) ENUM_EASING_TYPE = [ 'AUTO', @@ -34,6 +32,7 @@ ENUM_EASING_TYPE = [ 'EASE_OUT', 'EASE_IN_OUT'] + ENUM_HANDLE_TYPE = [ 'FREE', 'ALIGNED', @@ -41,6 +40,7 @@ ENUM_HANDLE_TYPE = [ 'AUTO', 'AUTO_CLAMPED'] + ENUM_INTERPOLATION_TYPE = [ 'CONSTANT', 'LINEAR', @@ -56,6 +56,7 @@ ENUM_INTERPOLATION_TYPE = [ 'BOUNCE', 'ELASTIC'] + ENUM_KEY_TYPE = [ 'KEYFRAME', 'BREAKDOWN', @@ -63,8 +64,10 @@ ENUM_KEY_TYPE = [ 'EXTREME', 'JITTER'] + #TODO: Automatic enum and numpy dump and loading + def dump_fcurve(fcurve, use_numpy=True): """ Dump a sigle curve to a dict @@ -218,7 +221,6 @@ class BlAction(BlDatablock): target.id_root = data['id_root'] def _dump(self, pointer=None): - start = utils.current_milli_time() assert(pointer) dumper = utils.dump_anything.Dumper() dumper.exclude_filter = [ @@ -241,7 +243,6 @@ class BlAction(BlDatablock): for fcurve in self.pointer.fcurves: data["fcurves"].append(dump_fcurve(fcurve, use_numpy=True)) - logger.error( - f"{self.pointer.name} dumping time: {utils.current_milli_time()-start} ms") + return data diff --git a/multi_user/bl_types/bl_armature.py b/multi_user/bl_types/bl_armature.py index c4abaf7..81d464f 100644 --- a/multi_user/bl_types/bl_armature.py +++ b/multi_user/bl_types/bl_armature.py @@ -23,8 +23,6 @@ from .. import utils from .. import presence, operators from .bl_datablock import BlDatablock -# WIP - class BlArmature(BlDatablock): bl_id = "armatures" diff --git a/multi_user/bl_types/bl_lightprobe.py b/multi_user/bl_types/bl_lightprobe.py index 1a3968a..abd20e9 100644 --- a/multi_user/bl_types/bl_lightprobe.py +++ b/multi_user/bl_types/bl_lightprobe.py @@ -33,16 +33,16 @@ class BlLightprobe(BlDatablock): bl_automatic_push = True bl_icon = 'LIGHTPROBE_GRID' - def load_implementation(self, data, target): - utils.dump_anything.load(target, data) - def _construct(self, data): type = 'CUBE' if data['type'] == 'CUBEMAP' else data['type'] # See https://developer.blender.org/D6396 if bpy.app.version[1] >= 83: return bpy.data.lightprobes.new(data["name"], type) else: - logger.warning("Lightprobe replication only supported since 2.83. See https://developer.blender.org/D6396") + logger.warning("Lightprobe replication only supported since 2.83. See https://developer.blender.org/D6396") + + def load_implementation(self, data, target): + utils.dump_anything.load(target, data) def dump_implementation(self, data, pointer=None): assert(pointer)