diff --git a/multi_user/__init__.py b/multi_user/__init__.py index 7168169..4a56a6e 100644 --- a/multi_user/__init__.py +++ b/multi_user/__init__.py @@ -44,7 +44,7 @@ from . import environment DEPENDENCIES = { - ("replication", '0.1.17'), + ("replication", '0.1.25'), } diff --git a/multi_user/bl_types/__init__.py b/multi_user/bl_types/__init__.py index 11e41d5..96a5e38 100644 --- a/multi_user/bl_types/__init__.py +++ b/multi_user/bl_types/__init__.py @@ -39,7 +39,7 @@ __all__ = [ 'bl_font', 'bl_sound', 'bl_file', - 'bl_sequencer', + # 'bl_sequencer', 'bl_node_group', 'bl_texture', ] # Order here defines execution order diff --git a/multi_user/bl_types/bl_action.py b/multi_user/bl_types/bl_action.py index 0bee448..8672fb6 100644 --- a/multi_user/bl_types/bl_action.py +++ b/multi_user/bl_types/bl_action.py @@ -132,9 +132,6 @@ def load_fcurve(fcurve_data, fcurve): class BlAction(BlDatablock): bl_id = "actions" bl_class = bpy.types.Action - bl_delay_refresh = 1 - bl_delay_apply = 1 - bl_automatic_push = True bl_check_common = False bl_icon = 'ACTION_TWEAK' bl_reload_parent = False diff --git a/multi_user/bl_types/bl_armature.py b/multi_user/bl_types/bl_armature.py index f106c5b..f39776f 100644 --- a/multi_user/bl_types/bl_armature.py +++ b/multi_user/bl_types/bl_armature.py @@ -38,9 +38,6 @@ def get_roll(bone: bpy.types.Bone) -> float: class BlArmature(BlDatablock): bl_id = "armatures" bl_class = bpy.types.Armature - bl_delay_refresh = 1 - bl_delay_apply = 0 - bl_automatic_push = True bl_check_common = False bl_icon = 'ARMATURE_DATA' bl_reload_parent = False diff --git a/multi_user/bl_types/bl_camera.py b/multi_user/bl_types/bl_camera.py index 78b9968..486726c 100644 --- a/multi_user/bl_types/bl_camera.py +++ b/multi_user/bl_types/bl_camera.py @@ -26,9 +26,6 @@ from .bl_datablock import BlDatablock class BlCamera(BlDatablock): bl_id = "cameras" bl_class = bpy.types.Camera - bl_delay_refresh = 1 - bl_delay_apply = 1 - bl_automatic_push = True bl_check_common = False bl_icon = 'CAMERA_DATA' bl_reload_parent = False diff --git a/multi_user/bl_types/bl_collection.py b/multi_user/bl_types/bl_collection.py index 7b78989..6393847 100644 --- a/multi_user/bl_types/bl_collection.py +++ b/multi_user/bl_types/bl_collection.py @@ -85,9 +85,6 @@ class BlCollection(BlDatablock): bl_id = "collections" bl_icon = 'FILE_FOLDER' bl_class = bpy.types.Collection - bl_delay_refresh = 1 - bl_delay_apply = 1 - bl_automatic_push = True bl_check_common = True bl_reload_parent = False @@ -114,6 +111,10 @@ class BlCollection(BlDatablock): # Link childrens load_collection_childrens(data['children'], target) + # FIXME: Find a better way after the replication big refacotoring + # Keep other user from deleting collection object by flushing their history + utils.flush_history() + def _dump_implementation(self, data, instance=None): assert(instance) diff --git a/multi_user/bl_types/bl_curve.py b/multi_user/bl_types/bl_curve.py index 7121b3a..ec7954e 100644 --- a/multi_user/bl_types/bl_curve.py +++ b/multi_user/bl_types/bl_curve.py @@ -27,6 +27,7 @@ from .dump_anything import (Dumper, Loader, np_load_collection, np_dump_collection) from .bl_datablock import get_datablock_from_uuid +from .bl_material import dump_materials_slots, load_materials_slots SPLINE_BEZIER_POINT = [ # "handle_left_type", @@ -136,9 +137,6 @@ SPLINE_METADATA = [ class BlCurve(BlDatablock): bl_id = "curves" bl_class = bpy.types.Curve - bl_delay_refresh = 1 - bl_delay_apply = 1 - bl_automatic_push = True bl_check_common = False bl_icon = 'CURVE_DATA' bl_reload_parent = False @@ -173,18 +171,9 @@ class BlCurve(BlDatablock): loader.load(new_spline, spline) # MATERIAL SLOTS - target.materials.clear() - for mat_uuid, mat_name in data["material_list"]: - mat_ref = None - if mat_uuid is not None: - mat_ref = get_datablock_from_uuid(mat_uuid, None) - else: - mat_ref = bpy.data.materials.get(mat_name, None) - - if mat_ref is None: - raise Exception("Material doesn't exist") - - target.materials.append(mat_ref) + src_materials = data.get('materials', None) + if src_materials: + load_materials_slots(src_materials, target.materials) def _dump_implementation(self, data, instance=None): assert(instance) @@ -229,8 +218,7 @@ class BlCurve(BlDatablock): elif isinstance(instance, T.Curve): data['type'] = 'CURVE' - data['material_list'] = [(m.uuid, m.name) - for m in instance.materials if m] + data['materials'] = dump_materials_slots(instance.materials) return data diff --git a/multi_user/bl_types/bl_datablock.py b/multi_user/bl_types/bl_datablock.py index 022bbc3..b7cc450 100644 --- a/multi_user/bl_types/bl_datablock.py +++ b/multi_user/bl_types/bl_datablock.py @@ -106,9 +106,6 @@ class BlDatablock(ReplicatedDatablock): bl_id : blender internal storage identifier bl_class : blender internal type - bl_delay_refresh : refresh rate in second for observers - bl_delay_apply : refresh rate in sec for apply - bl_automatic_push : boolean bl_icon : type icon (blender icon name) bl_check_common: enable check even in common rights bl_reload_parent: reload parent @@ -129,28 +126,30 @@ class BlDatablock(ReplicatedDatablock): if instance and hasattr(instance, 'uuid'): instance.uuid = self.uuid - if logging.getLogger().level == logging.DEBUG: - self.diff_method = DIFF_JSON - else: - self.diff_method = DIFF_BINARY - - def resolve(self): + def resolve(self, construct = True): datablock_ref = None datablock_root = getattr(bpy.data, self.bl_id) - datablock_ref = utils.find_from_attr('uuid', self.uuid, datablock_root) + + try: + datablock_ref = datablock_root[self.data['name']] + except Exception: + pass if not datablock_ref: - try: - datablock_ref = datablock_root[self.data['name']] - except Exception: + datablock_ref = utils.find_from_attr('uuid', self.uuid, datablock_root) + + if construct and not datablock_ref: name = self.data.get('name') logging.debug(f"Constructing {name}") datablock_ref = self._construct(data=self.data) - if datablock_ref: - setattr(datablock_ref, 'uuid', self.uuid) - - self.instance = datablock_ref + if datablock_ref is not None: + setattr(datablock_ref, 'uuid', self.uuid) + self.instance = datablock_ref + return True + else: + return False + def remove_instance(self): """ diff --git a/multi_user/bl_types/bl_file.py b/multi_user/bl_types/bl_file.py index 1dd8919..5801306 100644 --- a/multi_user/bl_types/bl_file.py +++ b/multi_user/bl_types/bl_file.py @@ -54,9 +54,6 @@ class BlFile(ReplicatedDatablock): bl_id = 'file' bl_name = "file" bl_class = Path - bl_delay_refresh = 2 - bl_delay_apply = 1 - bl_automatic_push = True bl_check_common = False bl_icon = 'FILE' bl_reload_parent = True @@ -69,9 +66,8 @@ class BlFile(ReplicatedDatablock): raise FileNotFoundError(str(self.instance)) self.preferences = utils.get_preferences() - self.diff_method = DIFF_BINARY - def resolve(self): + def resolve(self, construct = True): if self.data: self.instance = Path(get_filepath(self.data['name'])) @@ -79,8 +75,8 @@ class BlFile(ReplicatedDatablock): logging.debug("File don't exist, loading it.") self._load(self.data, self.instance) - def push(self, socket, identity=None): - super().push(socket, identity=None) + def push(self, socket, identity=None, check_data=False): + super().push(socket, identity=None, check_data=False) if self.preferences.clear_memory_filecache: del self.data['file'] diff --git a/multi_user/bl_types/bl_font.py b/multi_user/bl_types/bl_font.py index 0f3a532..c10ba10 100644 --- a/multi_user/bl_types/bl_font.py +++ b/multi_user/bl_types/bl_font.py @@ -30,9 +30,6 @@ from .dump_anything import Dumper, Loader class BlFont(BlDatablock): bl_id = "fonts" bl_class = bpy.types.VectorFont - bl_delay_refresh = 1 - bl_delay_apply = 1 - bl_automatic_push = True bl_check_common = False bl_icon = 'FILE_FONT' bl_reload_parent = False diff --git a/multi_user/bl_types/bl_gpencil.py b/multi_user/bl_types/bl_gpencil.py index be6b649..02b55d6 100644 --- a/multi_user/bl_types/bl_gpencil.py +++ b/multi_user/bl_types/bl_gpencil.py @@ -109,7 +109,9 @@ def load_stroke(stroke_data, stroke): stroke.points.add(stroke_data["p_count"]) np_load_collection(stroke_data['points'], stroke.points, STROKE_POINT) - + # HACK: Temporary fix to trigger a BKE_gpencil_stroke_geometry_update to + # fix fill issues + stroke.uv_scale = stroke_data["uv_scale"] def dump_frame(frame): """ Dump a grease pencil frame to a dict @@ -204,7 +206,7 @@ def dump_layer(layer): for frame in layer.frames: dumped_layer['frames'].append(dump_frame(frame)) - + return dumped_layer @@ -226,13 +228,9 @@ def load_layer(layer_data, layer): load_frame(frame_data, target_frame) - class BlGpencil(BlDatablock): bl_id = "grease_pencils" bl_class = bpy.types.GreasePencil - bl_delay_refresh = 2 - bl_delay_apply = 1 - bl_automatic_push = True bl_check_common = False bl_icon = 'GREASEPENCIL' bl_reload_parent = False @@ -265,6 +263,7 @@ class BlGpencil(BlDatablock): load_layer(layer_data, target_layer) + target.layers.update() @@ -287,6 +286,8 @@ class BlGpencil(BlDatablock): for layer in instance.layers: data['layers'][layer.info] = dump_layer(layer) + data["active_layers"] = instance.layers.active.info + data["eval_frame"] = bpy.context.scene.frame_current return data def _resolve_deps_implementation(self): @@ -296,3 +297,18 @@ class BlGpencil(BlDatablock): deps.append(material) return deps + + def layer_changed(self): + return self.instance.layers.active.info != self.data["active_layers"] + + def frame_changed(self): + return bpy.context.scene.frame_current != self.data["eval_frame"] + + def diff(self): + if self.layer_changed() \ + or self.frame_changed() \ + or bpy.context.mode == 'OBJECT' \ + or self.preferences.sync_flags.sync_during_editmode: + return super().diff() + else: + return False diff --git a/multi_user/bl_types/bl_image.py b/multi_user/bl_types/bl_image.py index 6435551..c559938 100644 --- a/multi_user/bl_types/bl_image.py +++ b/multi_user/bl_types/bl_image.py @@ -51,9 +51,6 @@ format_to_ext = { class BlImage(BlDatablock): bl_id = "images" bl_class = bpy.types.Image - bl_delay_refresh = 2 - bl_delay_apply = 1 - bl_automatic_push = True bl_check_common = False bl_icon = 'IMAGE_DATA' bl_reload_parent = False diff --git a/multi_user/bl_types/bl_lattice.py b/multi_user/bl_types/bl_lattice.py index ecb527d..64560ac 100644 --- a/multi_user/bl_types/bl_lattice.py +++ b/multi_user/bl_types/bl_lattice.py @@ -29,9 +29,6 @@ POINT = ['co', 'weight_softbody', 'co_deform'] class BlLattice(BlDatablock): bl_id = "lattices" bl_class = bpy.types.Lattice - bl_delay_refresh = 1 - bl_delay_apply = 1 - bl_automatic_push = True bl_check_common = False bl_icon = 'LATTICE_DATA' bl_reload_parent = False diff --git a/multi_user/bl_types/bl_library.py b/multi_user/bl_types/bl_library.py index 7e4b837..33a111e 100644 --- a/multi_user/bl_types/bl_library.py +++ b/multi_user/bl_types/bl_library.py @@ -26,9 +26,6 @@ from .bl_datablock import BlDatablock class BlLibrary(BlDatablock): bl_id = "libraries" bl_class = bpy.types.Library - bl_delay_refresh = 1 - bl_delay_apply = 1 - bl_automatic_push = True bl_check_common = False bl_icon = 'LIBRARY_DATA_DIRECT' bl_reload_parent = False diff --git a/multi_user/bl_types/bl_light.py b/multi_user/bl_types/bl_light.py index c0b7530..2bb1095 100644 --- a/multi_user/bl_types/bl_light.py +++ b/multi_user/bl_types/bl_light.py @@ -26,9 +26,6 @@ from .bl_datablock import BlDatablock class BlLight(BlDatablock): bl_id = "lights" bl_class = bpy.types.Light - bl_delay_refresh = 1 - bl_delay_apply = 1 - bl_automatic_push = True bl_check_common = False bl_icon = 'LIGHT_DATA' bl_reload_parent = False diff --git a/multi_user/bl_types/bl_lightprobe.py b/multi_user/bl_types/bl_lightprobe.py index 00b72f3..61052d0 100644 --- a/multi_user/bl_types/bl_lightprobe.py +++ b/multi_user/bl_types/bl_lightprobe.py @@ -27,9 +27,6 @@ from .bl_datablock import BlDatablock class BlLightprobe(BlDatablock): bl_id = "lightprobes" bl_class = bpy.types.LightProbe - bl_delay_refresh = 1 - bl_delay_apply = 1 - bl_automatic_push = True bl_check_common = False bl_icon = 'LIGHTPROBE_GRID' bl_reload_parent = False diff --git a/multi_user/bl_types/bl_material.py b/multi_user/bl_types/bl_material.py index 1080912..8e62ed2 100644 --- a/multi_user/bl_types/bl_material.py +++ b/multi_user/bl_types/bl_material.py @@ -29,7 +29,7 @@ from .bl_datablock import BlDatablock, get_datablock_from_uuid NODE_SOCKET_INDEX = re.compile('\[(\d*)\]') -def load_node(node_data, node_tree): +def load_node(node_data: dict, node_tree: bpy.types.ShaderNodeTree): """ Load a node into a node_tree from a dict :arg node_data: dumped node data @@ -70,9 +70,11 @@ def load_node(node_data, node_tree): try: outputs[idx].default_value = output except: - logging.warning(f"Node {target_node.name} output {outputs[idx].name} parameter not supported, skipping ({e})") + logging.warning( + f"Node {target_node.name} output {outputs[idx].name} parameter not supported, skipping ({e})") else: - logging.warning(f"Node {target_node.name} output length mismatch.") + logging.warning( + f"Node {target_node.name} output length mismatch.") def load_links(links_data, node_tree): @@ -117,7 +119,7 @@ def dump_links(links): return links_data -def dump_node(node): +def dump_node(node: bpy.types.ShaderNode) -> dict: """ Dump a single node to a dict :arg node: target node @@ -155,7 +157,7 @@ def dump_node(node): dumped_node = node_dumper.dump(node) - dump_io_needed = (node.type not in ['REROUTE','OUTPUT_MATERIAL']) + dump_io_needed = (node.type not in ['REROUTE', 'OUTPUT_MATERIAL']) if dump_io_needed: io_dumper = Dumper() @@ -166,13 +168,15 @@ def dump_node(node): dumped_node['inputs'] = [] for idx, inpt in enumerate(node.inputs): if hasattr(inpt, 'default_value'): - dumped_node['inputs'].append(io_dumper.dump(inpt.default_value)) + dumped_node['inputs'].append( + io_dumper.dump(inpt.default_value)) if hasattr(node, 'outputs'): dumped_node['outputs'] = [] for idx, output in enumerate(node.outputs): if hasattr(output, 'default_value'): - dumped_node['outputs'].append(io_dumper.dump(output.default_value)) + dumped_node['outputs'].append( + io_dumper.dump(output.default_value)) if hasattr(node, 'color_ramp'): ramp_dumper = Dumper() @@ -223,7 +227,7 @@ def dump_shader_node_tree(node_tree: bpy.types.ShaderNodeTree) -> dict: return node_tree_data -def dump_node_tree_sockets(sockets: bpy.types.Collection)->dict: +def dump_node_tree_sockets(sockets: bpy.types.Collection) -> dict: """ dump sockets of a shader_node_tree :arg target_node_tree: target node_tree @@ -244,6 +248,7 @@ def dump_node_tree_sockets(sockets: bpy.types.Collection)->dict: return sockets_data + def load_node_tree_sockets(sockets: bpy.types.Collection, sockets_data: dict): """ load sockets of a shader_node_tree @@ -263,7 +268,7 @@ def load_node_tree_sockets(sockets: bpy.types.Collection, # Check for new sockets for idx, socket_data in enumerate(sockets_data): try: - checked_socket = sockets[idx] + checked_socket = sockets[idx] if checked_socket.name != socket_data[0]: checked_socket.name = socket_data[0] except Exception: @@ -271,7 +276,7 @@ def load_node_tree_sockets(sockets: bpy.types.Collection, s['uuid'] = socket_data[2] -def load_shader_node_tree(node_tree_data:dict, target_node_tree:bpy.types.ShaderNodeTree)->dict: +def load_shader_node_tree(node_tree_data: dict, target_node_tree: bpy.types.ShaderNodeTree) -> dict: """Load a shader node_tree from dumped data :arg node_tree_data: dumped node data @@ -291,7 +296,7 @@ def load_shader_node_tree(node_tree_data:dict, target_node_tree:bpy.types.Shader if 'outputs' in node_tree_data: socket_collection = getattr(target_node_tree, 'outputs') - load_node_tree_sockets(socket_collection,node_tree_data['outputs']) + load_node_tree_sockets(socket_collection, node_tree_data['outputs']) # Load nodes for node in node_tree_data["nodes"]: @@ -305,8 +310,11 @@ def load_shader_node_tree(node_tree_data:dict, target_node_tree:bpy.types.Shader def get_node_tree_dependencies(node_tree: bpy.types.NodeTree) -> list: - has_image = lambda node : (node.type in ['TEX_IMAGE', 'TEX_ENVIRONMENT'] and node.image) - has_node_group = lambda node : (hasattr(node,'node_tree') and node.node_tree) + def has_image(node): return ( + node.type in ['TEX_IMAGE', 'TEX_ENVIRONMENT'] and node.image) + + def has_node_group(node): return ( + hasattr(node, 'node_tree') and node.node_tree) deps = [] @@ -319,12 +327,43 @@ def get_node_tree_dependencies(node_tree: bpy.types.NodeTree) -> list: return deps +def dump_materials_slots(materials: bpy.types.bpy_prop_collection) -> list: + """ Dump material slots collection + + :arg materials: material slots collection to dump + :type materials: bpy.types.bpy_prop_collection + :return: list of tuples (mat_uuid, mat_name) + """ + return [(m.uuid, m.name) for m in materials if m] + + +def load_materials_slots(src_materials: list, dst_materials: bpy.types.bpy_prop_collection): + """ Load material slots + + :arg src_materials: dumped material collection (ex: object.materials) + :type src_materials: list of tuples (uuid, name) + :arg dst_materials: target material collection pointer + :type dst_materials: bpy.types.bpy_prop_collection + """ + # MATERIAL SLOTS + dst_materials.clear() + + for mat_uuid, mat_name in src_materials: + mat_ref = None + if mat_uuid is not None: + mat_ref = get_datablock_from_uuid(mat_uuid, None) + else: + mat_ref = bpy.data.materials.get(mat_name, None) + + if mat_ref is None: + raise Exception(f"Material {mat_name} doesn't exist") + + dst_materials.append(mat_ref) + + class BlMaterial(BlDatablock): bl_id = "materials" bl_class = bpy.types.Material - bl_delay_refresh = 1 - bl_delay_apply = 1 - bl_automatic_push = True bl_check_common = False bl_icon = 'MATERIAL_DATA' bl_reload_parent = False diff --git a/multi_user/bl_types/bl_mesh.py b/multi_user/bl_types/bl_mesh.py index 26611e6..9ffda41 100644 --- a/multi_user/bl_types/bl_mesh.py +++ b/multi_user/bl_types/bl_mesh.py @@ -26,6 +26,7 @@ from .dump_anything import Dumper, Loader, np_load_collection_primitives, np_dum from replication.constants import DIFF_BINARY from replication.exception import ContextError from .bl_datablock import BlDatablock, get_datablock_from_uuid +from .bl_material import dump_materials_slots, load_materials_slots VERTICE = ['co'] @@ -49,9 +50,6 @@ POLYGON = [ class BlMesh(BlDatablock): bl_id = "meshes" bl_class = bpy.types.Mesh - bl_delay_refresh = 2 - bl_delay_apply = 1 - bl_automatic_push = True bl_check_common = False bl_icon = 'MESH_DATA' bl_reload_parent = False @@ -69,19 +67,9 @@ class BlMesh(BlDatablock): loader.load(target, data) # MATERIAL SLOTS - target.materials.clear() - - for mat_uuid, mat_name in data["material_list"]: - mat_ref = None - if mat_uuid is not None: - mat_ref = get_datablock_from_uuid(mat_uuid, None) - else: - mat_ref = bpy.data.materials.get(mat_name, None) - - if mat_ref is None: - raise Exception("Material doesn't exist") - - target.materials.append(mat_ref) + src_materials = data.get('materials', None) + if src_materials: + load_materials_slots(src_materials, target.materials) # CLEAR GEOMETRY if target.vertices: @@ -126,7 +114,7 @@ class BlMesh(BlDatablock): def _dump_implementation(self, data, instance=None): assert(instance) - if instance.is_editmode and not self.preferences.sync_flags.sync_during_editmode: + if (instance.is_editmode or bpy.context.mode == "SCULPT") and not self.preferences.sync_flags.sync_during_editmode: raise ContextError("Mesh is in edit mode") mesh = instance @@ -172,9 +160,8 @@ class BlMesh(BlDatablock): data['vertex_colors'][color_map.name] = {} data['vertex_colors'][color_map.name]['data'] = np_dump_collection_primitive(color_map.data, 'color') - # Fix material index - data['material_list'] = [(m.uuid, m.name) for m in instance.materials if m] - + # Materials + data['materials'] = dump_materials_slots(instance.materials) return data def _resolve_deps_implementation(self): diff --git a/multi_user/bl_types/bl_metaball.py b/multi_user/bl_types/bl_metaball.py index 4cc146c..2a156a7 100644 --- a/multi_user/bl_types/bl_metaball.py +++ b/multi_user/bl_types/bl_metaball.py @@ -65,9 +65,6 @@ def load_metaball_elements(elements_data, elements): class BlMetaball(BlDatablock): bl_id = "metaballs" bl_class = bpy.types.MetaBall - bl_delay_refresh = 1 - bl_delay_apply = 1 - bl_automatic_push = True bl_check_common = False bl_icon = 'META_BALL' bl_reload_parent = False diff --git a/multi_user/bl_types/bl_node_group.py b/multi_user/bl_types/bl_node_group.py index 1d38969..a353659 100644 --- a/multi_user/bl_types/bl_node_group.py +++ b/multi_user/bl_types/bl_node_group.py @@ -28,9 +28,6 @@ from .bl_material import (dump_shader_node_tree, class BlNodeGroup(BlDatablock): bl_id = "node_groups" bl_class = bpy.types.ShaderNodeTree - bl_delay_refresh = 1 - bl_delay_apply = 1 - bl_automatic_push = True bl_check_common = False bl_icon = 'NODETREE' bl_reload_parent = False diff --git a/multi_user/bl_types/bl_object.py b/multi_user/bl_types/bl_object.py index 262dd0a..dceb7f6 100644 --- a/multi_user/bl_types/bl_object.py +++ b/multi_user/bl_types/bl_object.py @@ -107,9 +107,6 @@ def find_textures_dependencies(collection): class BlObject(BlDatablock): bl_id = "objects" bl_class = bpy.types.Object - bl_delay_refresh = 1 - bl_delay_apply = 1 - bl_automatic_push = True bl_check_common = False bl_icon = 'OBJECT_DATA' bl_reload_parent = False @@ -135,6 +132,10 @@ class BlObject(BlDatablock): data_uuid, find_data_from_name(data_id), ignore=['images']) # TODO: use resolve_from_id + + if object_data is None and data_uuid: + raise Exception(f"Fail to load object {data['name']}({self.uuid})") + instance = bpy.data.objects.new(object_name, object_data) instance.uuid = self.uuid @@ -372,29 +373,32 @@ class BlObject(BlDatablock): # VERTEx GROUP if len(instance.vertex_groups) > 0: - points_attr = 'vertices' if isinstance( - instance.data, bpy.types.Mesh) else 'points' - vg_data = [] - for vg in instance.vertex_groups: - vg_idx = vg.index - dumped_vg = {} - dumped_vg['name'] = vg.name + if isinstance( instance.data, bpy.types.GreasePencil): + logging.warning("Grease pencil vertex groups are not supported yet. More info: https://gitlab.com/slumber/multi-user/-/issues/161") + else: + points_attr = 'vertices' if isinstance( + instance.data, bpy.types.Mesh) else 'points' + vg_data = [] + for vg in instance.vertex_groups: + vg_idx = vg.index + dumped_vg = {} + dumped_vg['name'] = vg.name - vertices = [] + vertices = [] - for i, v in enumerate(getattr(instance.data, points_attr)): - for vg in v.groups: - if vg.group == vg_idx: - vertices.append({ - 'index': i, - 'weight': vg.weight - }) + for i, v in enumerate(getattr(instance.data, points_attr)): + for vg in v.groups: + if vg.group == vg_idx: + vertices.append({ + 'index': i, + 'weight': vg.weight + }) - dumped_vg['vertices'] = vertices + dumped_vg['vertices'] = vertices - vg_data.append(dumped_vg) + vg_data.append(dumped_vg) - data['vertex_groups'] = vg_data + data['vertex_groups'] = vg_data # SHAPE KEYS object_data = instance.data diff --git a/multi_user/bl_types/bl_scene.py b/multi_user/bl_types/bl_scene.py index 575f515..08a3d69 100644 --- a/multi_user/bl_types/bl_scene.py +++ b/multi_user/bl_types/bl_scene.py @@ -17,16 +17,19 @@ import logging +from pathlib import Path import bpy import mathutils from deepdiff import DeepDiff from replication.constants import DIFF_JSON, MODIFIED +from ..utils import flush_history from .bl_collection import (dump_collection_children, dump_collection_objects, load_collection_childrens, load_collection_objects, resolve_collection_dependencies) from .bl_datablock import BlDatablock +from .bl_file import get_filepath from .dump_anything import Dumper, Loader RENDER_SETTINGS = [ @@ -265,28 +268,116 @@ VIEW_SETTINGS = [ ] +def dump_sequence(sequence: bpy.types.Sequence) -> dict: + """ Dump a sequence to a dict + + :arg sequence: sequence to dump + :type sequence: bpy.types.Sequence + :return dict: + """ + dumper = Dumper() + dumper.exclude_filter = [ + 'lock', + 'select', + 'select_left_handle', + 'select_right_handle', + 'strobe' + ] + dumper.depth = 1 + data = dumper.dump(sequence) + # TODO: Support multiple images + if sequence.type == 'IMAGE': + data['filenames'] = [e.filename for e in sequence.elements] + # Effect strip inputs + input_count = getattr(sequence, 'input_count', None) + if input_count: + for n in range(input_count): + input_name = f"input_{n+1}" + data[input_name] = getattr(sequence, input_name).name + + return data + + +def load_sequence(sequence_data: dict, sequence_editor: bpy.types.SequenceEditor): + """ Load sequence from dumped data + + :arg sequence_data: sequence to dump + :type sequence_data:dict + :arg sequence_editor: root sequence editor + :type sequence_editor: bpy.types.SequenceEditor + """ + strip_type = sequence_data.get('type') + strip_name = sequence_data.get('name') + strip_channel = sequence_data.get('channel') + strip_frame_start = sequence_data.get('frame_start') + + sequence = sequence_editor.sequences_all.get(strip_name, None) + + if sequence is None: + if strip_type == 'SCENE': + strip_scene = bpy.data.scenes.get(sequence_data.get('scene')) + sequence = sequence_editor.sequences.new_scene(strip_name, + strip_scene, + strip_channel, + strip_frame_start) + elif strip_type == 'MOVIE': + filepath = get_filepath(Path(sequence_data['filepath']).name) + sequence = sequence_editor.sequences.new_movie(strip_name, + filepath, + strip_channel, + strip_frame_start) + elif strip_type == 'SOUND': + filepath = bpy.data.sounds[sequence_data['sound']].filepath + sequence = sequence_editor.sequences.new_sound(strip_name, + filepath, + strip_channel, + strip_frame_start) + elif strip_type == 'IMAGE': + images_name = sequence_data.get('filenames') + filepath = get_filepath(images_name[0]) + sequence = sequence_editor.sequences.new_image(strip_name, + filepath, + strip_channel, + strip_frame_start) + # load other images + if len(images_name)>1: + for img_idx in range(1,len(images_name)): + sequence.elements.append((images_name[img_idx])) + else: + seq = {} + + for i in range(sequence_data['input_count']): + seq[f"seq{i+1}"] = sequence_editor.sequences_all.get(sequence_data.get(f"input_{i+1}", None)) + + sequence = sequence_editor.sequences.new_effect(name=strip_name, + type=strip_type, + channel=strip_channel, + frame_start=strip_frame_start, + frame_end=sequence_data['frame_final_end'], + **seq) + + loader = Loader() + # TODO: Support filepath updates + loader.exclure_filter = ['filepath', 'sound', 'filenames','fps'] + loader.load(sequence, sequence_data) + sequence.select = False + class BlScene(BlDatablock): bl_id = "scenes" bl_class = bpy.types.Scene - bl_delay_refresh = 1 - bl_delay_apply = 1 - bl_automatic_push = True bl_check_common = True bl_icon = 'SCENE_DATA' bl_reload_parent = False - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - - self.diff_method = DIFF_JSON - def _construct(self, data): instance = bpy.data.scenes.new(data["name"]) + instance.uuid = self.uuid + return instance def _load_implementation(self, data, target): @@ -328,6 +419,29 @@ class BlScene(BlDatablock): 'view_settings']['curve_mapping']['black_level'] target.view_settings.curve_mapping.update() + # Sequencer + sequences = data.get('sequences') + + if sequences: + # Create sequencer data + target.sequence_editor_create() + vse = target.sequence_editor + + # Clear removed sequences + for seq in vse.sequences_all: + if seq.name not in sequences: + vse.sequences.remove(seq) + # Load existing sequences + for seq_name, seq_data in sequences.items(): + load_sequence(seq_data, vse) + # If the sequence is no longer used, clear it + elif target.sequence_editor and not sequences: + target.sequence_editor_clear() + + # FIXME: Find a better way after the replication big refacotoring + # Keep other user from deleting collection object by flushing their history + flush_history() + def _dump_implementation(self, data, instance=None): assert(instance) @@ -386,10 +500,14 @@ class BlScene(BlDatablock): data['view_settings']['curve_mapping']['curves'] = scene_dumper.dump( instance.view_settings.curve_mapping.curves) - if instance.sequence_editor: - data['has_sequence'] = True - else: - data['has_sequence'] = False + # Sequence + vse = instance.sequence_editor + if vse: + dumped_sequences = {} + for seq in vse.sequences_all: + dumped_sequences[seq.name] = dump_sequence(seq) + data['sequences'] = dumped_sequences + return data @@ -408,9 +526,18 @@ class BlScene(BlDatablock): deps.append(self.instance.grease_pencil) # Sequences - # deps.extend(list(self.instance.sequence_editor.sequences_all)) - if self.instance.sequence_editor: - deps.append(self.instance.sequence_editor) + vse = self.instance.sequence_editor + if vse: + for sequence in vse.sequences_all: + if sequence.type == 'MOVIE' and sequence.filepath: + deps.append(Path(bpy.path.abspath(sequence.filepath))) + elif sequence.type == 'SOUND' and sequence.sound: + deps.append(sequence.sound) + elif sequence.type == 'IMAGE': + for elem in sequence.elements: + sequence.append( + Path(bpy.path.abspath(sequence.directory), + elem.filename)) return deps diff --git a/multi_user/bl_types/bl_sequencer.py b/multi_user/bl_types/bl_sequencer.py deleted file mode 100644 index 6d74aa3..0000000 --- a/multi_user/bl_types/bl_sequencer.py +++ /dev/null @@ -1,198 +0,0 @@ -# ##### BEGIN GPL LICENSE BLOCK ##### -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . -# -# ##### END GPL LICENSE BLOCK ##### - - -import bpy -import mathutils -from pathlib import Path -import logging - -from .bl_file import get_filepath -from .dump_anything import Loader, Dumper -from .bl_datablock import BlDatablock, get_datablock_from_uuid - -def dump_sequence(sequence: bpy.types.Sequence) -> dict: - """ Dump a sequence to a dict - - :arg sequence: sequence to dump - :type sequence: bpy.types.Sequence - :return dict: - """ - dumper = Dumper() - dumper.exclude_filter = [ - 'lock', - 'select', - 'select_left_handle', - 'select_right_handle', - 'strobe' - ] - dumper.depth = 1 - data = dumper.dump(sequence) - - - # TODO: Support multiple images - if sequence.type == 'IMAGE': - data['filenames'] = [e.filename for e in sequence.elements] - - - # Effect strip inputs - input_count = getattr(sequence, 'input_count', None) - if input_count: - for n in range(input_count): - input_name = f"input_{n+1}" - data[input_name] = getattr(sequence, input_name).name - - return data - - -def load_sequence(sequence_data: dict, sequence_editor: bpy.types.SequenceEditor): - """ Load sequence from dumped data - - :arg sequence_data: sequence to dump - :type sequence_data:dict - :arg sequence_editor: root sequence editor - :type sequence_editor: bpy.types.SequenceEditor - """ - strip_type = sequence_data.get('type') - strip_name = sequence_data.get('name') - strip_channel = sequence_data.get('channel') - strip_frame_start = sequence_data.get('frame_start') - - sequence = sequence_editor.sequences_all.get(strip_name, None) - - if sequence is None: - if strip_type == 'SCENE': - strip_scene = bpy.data.scenes.get(sequence_data.get('scene')) - sequence = sequence_editor.sequences.new_scene(strip_name, - strip_scene, - strip_channel, - strip_frame_start) - elif strip_type == 'MOVIE': - filepath = get_filepath(Path(sequence_data['filepath']).name) - sequence = sequence_editor.sequences.new_movie(strip_name, - filepath, - strip_channel, - strip_frame_start) - elif strip_type == 'SOUND': - filepath = bpy.data.sounds[sequence_data['sound']].filepath - sequence = sequence_editor.sequences.new_sound(strip_name, - filepath, - strip_channel, - strip_frame_start) - elif strip_type == 'IMAGE': - images_name = sequence_data.get('filenames') - filepath = get_filepath(images_name[0]) - sequence = sequence_editor.sequences.new_image(strip_name, - filepath, - strip_channel, - strip_frame_start) - # load other images - if len(images_name)>1: - for img_idx in range(1,len(images_name)): - sequence.elements.append((images_name[img_idx])) - else: - seq = {} - - for i in range(sequence_data['input_count']): - seq[f"seq{i+1}"] = sequence_editor.sequences_all.get(sequence_data.get(f"input_{i+1}", None)) - - sequence = sequence_editor.sequences.new_effect(name=strip_name, - type=strip_type, - channel=strip_channel, - frame_start=strip_frame_start, - frame_end=sequence_data['frame_final_end'], - **seq) - - loader = Loader() - loader.load(sequence, sequence_data) - sequence.select = False - - -class BlSequencer(BlDatablock): - bl_id = "scenes" - bl_class = bpy.types.SequenceEditor - bl_delay_refresh = 1 - bl_delay_apply = 1 - bl_automatic_push = True - bl_check_common = True - bl_icon = 'SEQUENCE' - bl_reload_parent = False - - def _construct(self, data): - # Get the scene - scene_id = data.get('name') - scene = bpy.data.scenes.get(scene_id, None) - - # Create sequencer data - scene.sequence_editor_clear() - scene.sequence_editor_create() - - return scene.sequence_editor - - def resolve(self): - scene = bpy.data.scenes.get(self.data['name'], None) - if scene: - if scene.sequence_editor is None: - self.instance = self._construct(self.data) - else: - self.instance = scene.sequence_editor - else: - logging.warning("Sequencer editor scene not found") - - def _load_implementation(self, data, target): - loader = Loader() - # Sequencer - sequences = data.get('sequences') - if sequences: - for seq in target.sequences_all: - if seq.name not in sequences: - target.sequences.remove(seq) - for seq_name, seq_data in sequences.items(): - load_sequence(seq_data, target) - - def _dump_implementation(self, data, instance=None): - assert(instance) - sequence_dumper = Dumper() - sequence_dumper.depth = 1 - sequence_dumper.include_filter = [ - 'proxy_storage', - ] - data = {}#sequence_dumper.dump(instance) - # Sequencer - sequences = {} - - for seq in instance.sequences_all: - sequences[seq.name] = dump_sequence(seq) - - data['sequences'] = sequences - data['name'] = instance.id_data.name - - return data - - - def _resolve_deps_implementation(self): - deps = [] - - for seq in self.instance.sequences_all: - if seq.type == 'MOVIE' and seq.filepath: - deps.append(Path(bpy.path.abspath(seq.filepath))) - elif seq.type == 'SOUND' and seq.sound: - deps.append(seq.sound) - elif seq.type == 'IMAGE': - for e in seq.elements: - deps.append(Path(bpy.path.abspath(seq.directory), e.filename)) - return deps diff --git a/multi_user/bl_types/bl_sound.py b/multi_user/bl_types/bl_sound.py index 514b2a9..b81a0b4 100644 --- a/multi_user/bl_types/bl_sound.py +++ b/multi_user/bl_types/bl_sound.py @@ -30,9 +30,6 @@ from .dump_anything import Dumper, Loader class BlSound(BlDatablock): bl_id = "sounds" bl_class = bpy.types.Sound - bl_delay_refresh = 1 - bl_delay_apply = 1 - bl_automatic_push = True bl_check_common = False bl_icon = 'SOUND' bl_reload_parent = False diff --git a/multi_user/bl_types/bl_speaker.py b/multi_user/bl_types/bl_speaker.py index 037bc05..c5f7b6c 100644 --- a/multi_user/bl_types/bl_speaker.py +++ b/multi_user/bl_types/bl_speaker.py @@ -26,9 +26,6 @@ from .bl_datablock import BlDatablock class BlSpeaker(BlDatablock): bl_id = "speakers" bl_class = bpy.types.Speaker - bl_delay_refresh = 1 - bl_delay_apply = 1 - bl_automatic_push = True bl_check_common = False bl_icon = 'SPEAKER' bl_reload_parent = False diff --git a/multi_user/bl_types/bl_texture.py b/multi_user/bl_types/bl_texture.py index 98d7898..f132e15 100644 --- a/multi_user/bl_types/bl_texture.py +++ b/multi_user/bl_types/bl_texture.py @@ -26,9 +26,6 @@ from .bl_datablock import BlDatablock class BlTexture(BlDatablock): bl_id = "textures" bl_class = bpy.types.Texture - bl_delay_refresh = 1 - bl_delay_apply = 1 - bl_automatic_push = True bl_check_common = False bl_icon = 'TEXTURE' bl_reload_parent = False diff --git a/multi_user/bl_types/bl_volume.py b/multi_user/bl_types/bl_volume.py index fc67012..90a5a56 100644 --- a/multi_user/bl_types/bl_volume.py +++ b/multi_user/bl_types/bl_volume.py @@ -22,14 +22,11 @@ from pathlib import Path from .dump_anything import Loader, Dumper from .bl_datablock import BlDatablock, get_datablock_from_uuid - +from .bl_material import dump_materials_slots, load_materials_slots class BlVolume(BlDatablock): bl_id = "volumes" bl_class = bpy.types.Volume - bl_delay_refresh = 1 - bl_delay_apply = 1 - bl_automatic_push = True bl_check_common = False bl_icon = 'VOLUME_DATA' bl_reload_parent = False @@ -40,19 +37,9 @@ class BlVolume(BlDatablock): loader.load(target.display, data['display']) # MATERIAL SLOTS - target.materials.clear() - - for mat_uuid, mat_name in data["material_list"]: - mat_ref = None - if mat_uuid is not None: - mat_ref = get_datablock_from_uuid(mat_uuid, None) - else: - mat_ref = bpy.data.materials.get(mat_name, None) - - if mat_ref is None: - raise Exception("Material doesn't exist") - - target.materials.append(mat_ref) + src_materials = data.get('materials', None) + if src_materials: + load_materials_slots(src_materials, target.materials) def _construct(self, data): return bpy.data.volumes.new(data["name"]) @@ -78,7 +65,7 @@ class BlVolume(BlDatablock): data['display'] = dumper.dump(instance.display) # Fix material index - data['material_list'] = [(m.uuid, m.name) for m in instance.materials if m] + data['materials'] = dump_materials_slots(instance.materials) return data diff --git a/multi_user/bl_types/bl_world.py b/multi_user/bl_types/bl_world.py index eba2959..ee0f15d 100644 --- a/multi_user/bl_types/bl_world.py +++ b/multi_user/bl_types/bl_world.py @@ -29,9 +29,6 @@ from .bl_material import (load_shader_node_tree, class BlWorld(BlDatablock): bl_id = "worlds" bl_class = bpy.types.World - bl_delay_refresh = 1 - bl_delay_apply = 1 - bl_automatic_push = True bl_check_common = True bl_icon = 'WORLD_DATA' bl_reload_parent = False diff --git a/multi_user/bl_types/dump_anything.py b/multi_user/bl_types/dump_anything.py index 4d43180..30c2a13 100644 --- a/multi_user/bl_types/dump_anything.py +++ b/multi_user/bl_types/dump_anything.py @@ -465,6 +465,7 @@ class Loader: self.type_subset = self.match_subset_all self.occlude_read_only = False self.order = ['*'] + self.exclure_filter = [] def load(self, dst_data, src_dumped_data): self._load_any( @@ -475,7 +476,8 @@ class Loader: def _load_any(self, any, dump): for filter_function, load_function in self.type_subset: - if filter_function(any): + if filter_function(any) and \ + any.sub_element_name not in self.exclure_filter: load_function(any, dump) return diff --git a/multi_user/operators.py b/multi_user/operators.py index 24942e2..a8f024b 100644 --- a/multi_user/operators.py +++ b/multi_user/operators.py @@ -45,7 +45,7 @@ from bpy_extras.io_utils import ExportHelper, ImportHelper from replication.constants import (COMMITED, FETCHED, RP_COMMON, STATE_ACTIVE, STATE_INITIAL, STATE_SYNCING, UP) from replication.data import ReplicatedDataFactory -from replication.exception import NonAuthorizedOperationError +from replication.exception import NonAuthorizedOperationError, ContextError from replication.interface import session from . import bl_types, environment, timers, ui, utils @@ -74,30 +74,42 @@ def session_callback(name): def initialize_session(): """Session connection init hander """ + logging.info("Intializing the scene") settings = utils.get_preferences() runtime_settings = bpy.context.window_manager.session # Step 1: Constrect nodes + logging.info("Constructing nodes") for node in session._graph.list_ordered(): - node_ref = session.get(node) - if node_ref.state == FETCHED: + node_ref = session.get(uuid=node) + if node_ref is None: + logging.error(f"Can't construct node {node}") + elif node_ref.state == FETCHED: node_ref.resolve() - + # Step 2: Load nodes + logging.info("Loading nodes") for node in session._graph.list_ordered(): - node_ref = session.get(node) - if node_ref.state == FETCHED: + node_ref = session.get(uuid=node) + + if node_ref is None: + logging.error(f"Can't load node {node}") + elif node_ref.state == FETCHED: node_ref.apply() + logging.info("Registering timers") # Step 4: Register blender timers for d in deleyables: d.register() - if settings.update_method == 'DEPSGRAPH': - bpy.app.handlers.depsgraph_update_post.append(depsgraph_evaluation) - bpy.ops.session.apply_armature_operator('INVOKE_DEFAULT') + # Step 5: Clearing history + utils.flush_history() + + # Step 6: Launch deps graph update handling + bpy.app.handlers.depsgraph_update_post.append(depsgraph_evaluation) + @session_callback('on_exit') def on_connection_end(reason="none"): @@ -116,9 +128,8 @@ def on_connection_end(reason="none"): stop_modal_executor = True - if settings.update_method == 'DEPSGRAPH': - bpy.app.handlers.depsgraph_update_post.remove( - depsgraph_evaluation) + if depsgraph_evaluation in bpy.app.handlers.depsgraph_update_post: + bpy.app.handlers.depsgraph_update_post.remove(depsgraph_evaluation) # Step 3: remove file handled logger = logging.getLogger() @@ -148,7 +159,7 @@ class SessionStartOperator(bpy.types.Operator): runtime_settings = context.window_manager.session users = bpy.data.window_managers['WinMan'].online_users admin_pass = runtime_settings.password - use_extern_update = settings.update_method == 'DEPSGRAPH' + users.clear() deleyables.clear() @@ -159,9 +170,10 @@ class SessionStartOperator(bpy.types.Operator): datefmt='%H:%M:%S' ) + start_time = datetime.now().strftime('%Y_%m_%d_%H-%M-%S') log_directory = os.path.join( settings.cache_directory, - "multiuser_client.log") + f"multiuser_{start_time}.log") os.makedirs(settings.cache_directory, exist_ok=True) @@ -196,16 +208,9 @@ class SessionStartOperator(bpy.types.Operator): bpy_factory.register_type( type_module_class.bl_class, type_module_class, - timer=type_local_config.bl_delay_refresh*1000, - automatic=type_local_config.auto_push, check_common=type_module_class.bl_check_common) - if settings.update_method == 'DEFAULT': - if type_local_config.bl_delay_apply > 0: - deleyables.append( - timers.ApplyTimer( - timeout=type_local_config.bl_delay_apply, - target_type=type_module_class)) + deleyables.append(timers.ApplyTimer(timeout=settings.depsgraph_update_rate)) if bpy.app.version[1] >= 91: python_binary_path = sys.executable @@ -215,11 +220,7 @@ class SessionStartOperator(bpy.types.Operator): session.configure( factory=bpy_factory, python_path=python_binary_path, - external_update_handling=use_extern_update) - - if settings.update_method == 'DEPSGRAPH': - deleyables.append(timers.ApplyTimer( - settings.depsgraph_update_rate/1000)) + external_update_handling=True) # Host a session if self.host: @@ -271,7 +272,10 @@ class SessionStartOperator(bpy.types.Operator): # Background client updates service deleyables.append(timers.ClientUpdate()) deleyables.append(timers.DynamicRightSelectTimer()) - + # deleyables.append(timers.PushTimer( + # queue=stagging, + # timeout=settings.depsgraph_update_rate + # )) session_update = timers.SessionStatusUpdate() session_user_sync = timers.SessionUserSync() session_background_executor = timers.MainThreadExecutor( @@ -698,6 +702,31 @@ class SessionClearCache(bpy.types.Operator): row = self.layout row.label(text=f" Do you really want to remove local cache ? ") +class SessionPurgeOperator(bpy.types.Operator): + "Remove node with lost references" + bl_idname = "session.purge" + bl_label = "Purge session data" + + @classmethod + def poll(cls, context): + return True + + def execute(self, context): + try: + sanitize_deps_graph(remove_nodes=True) + except Exception as e: + self.report({'ERROR'}, repr(e)) + + return {"FINISHED"} + + def invoke(self, context, event): + return context.window_manager.invoke_props_dialog(self) + + def draw(self, context): + row = self.layout + row.label(text=f" Do you really want to remove local cache ? ") + + class SessionNotifyOperator(bpy.types.Operator): """Dialog only operator""" bl_idname = "session.notify" @@ -722,37 +751,6 @@ class SessionNotifyOperator(bpy.types.Operator): return context.window_manager.invoke_props_dialog(self) -def dump_db(filepath): - # Replication graph - nodes_ids = session.list() - #TODO: add dump graph to replication - - nodes =[] - for n in nodes_ids: - nd = session.get(uuid=n) - nodes.append(( - n, - { - 'owner': nd.owner, - 'str_type': nd.str_type, - 'data': nd.data, - 'dependencies': nd.dependencies, - } - )) - - db = dict() - db['nodes'] = nodes - db['users'] = copy.copy(session.online_users) - - stime = datetime.now().strftime('%Y_%m_%d_%H-%M-%S') - - filepath = Path(filepath) - filepath = filepath.with_name(f"{filepath.stem}_{stime}{filepath.suffix}") - with gzip.open(filepath, "wb") as f: - logging.info(f"Writing session snapshot to {filepath}") - pickle.dump(db, f, protocol=4) - - class SessionSaveBackupOperator(bpy.types.Operator, ExportHelper): bl_idname = "session.save" bl_label = "Save session data" @@ -786,7 +784,7 @@ class SessionSaveBackupOperator(bpy.types.Operator, ExportHelper): recorder.register() deleyables.append(recorder) else: - dump_db(self.filepath) + session.save(self.filepath) return {'FINISHED'} @@ -914,21 +912,48 @@ classes = ( SessionSaveBackupOperator, SessionLoadSaveOperator, SessionStopAutoSaveOperator, + SessionPurgeOperator, ) +def update_external_dependencies(): + nodes_ids = session.list(filter=bl_types.bl_file.BlFile) + for node_id in nodes_ids: + node = session.get(node_id) + if node and node.owner in [session.id, RP_COMMON] \ + and node.has_changed(): + session.commit(node_id) + session.push(node_id, check_data=False) + +def sanitize_deps_graph(remove_nodes: bool = False): + """ Cleanup the replication graph + """ + if session and session.state['STATE'] == STATE_ACTIVE: + start = utils.current_milli_time() + rm_cpt = 0 + for node_key in session.list(): + node = session.get(node_key) + if node is None \ + or (node.state == UP and not node.resolve(construct=False)): + if remove_nodes: + try: + session.remove(node.uuid, remove_dependencies=False) + logging.info(f"Removing {node.uuid}") + rm_cpt += 1 + except NonAuthorizedOperationError: + continue + logging.info(f"Sanitize took { utils.current_milli_time()-start} ms") + @persistent -def sanitize_deps_graph(dummy): - """sanitize deps graph +def resolve_deps_graph(dummy): + """Resolve deps graph Temporary solution to resolve each node pointers after a Undo. A future solution should be to avoid storing dataclock reference... """ if session and session.state['STATE'] == STATE_ACTIVE: - for node_key in session.list(): - session.get(node_key).resolve() - + sanitize_deps_graph(remove_nodes=True) @persistent def load_pre_handler(dummy): @@ -952,41 +977,58 @@ def depsgraph_evaluation(scene): dependency_updates = [u for u in blender_depsgraph.updates] settings = utils.get_preferences() - # NOTE: maybe we don't need to check each update but only the first + update_external_dependencies() + # NOTE: maybe we don't need to check each update but only the first for update in reversed(dependency_updates): # Is the object tracked ? if update.id.uuid: # Retrieve local version - node = session.get(update.id.uuid) - + node = session.get(uuid=update.id.uuid) + # Check our right on this update: # - if its ours or ( under common and diff), launch the # update process - # - if its to someone else, ignore the update (go deeper ?) - if node and node.owner in [session.id, RP_COMMON] and node.state == UP: - # Avoid slow geometry update - if 'EDIT' in context.mode and \ - not settings.sync_flags.sync_during_editmode: - break + # - if its to someone else, ignore the update + if node and node.owner in [session.id, RP_COMMON]: + if node.state == UP: + # Avoid slow geometry update + if 'EDIT' in context.mode and \ + not settings.sync_flags.sync_during_editmode: + break - session.stash(node.uuid) + try: + if node.has_changed(): + session.commit(node.uuid) + session.push(node.uuid, check_data=False) + except ReferenceError: + logging.debug(f"Reference error {node.uuid}") + if not node.is_valid(): + session.remove(node.uuid) + except ContextError as e: + logging.debug(e) + except Exception as e: + logging.error(e) else: - # Distant update continue - # else: - # # New items ! - # logger.error("UPDATE: ADD") - - + # A new scene is created + elif isinstance(update.id, bpy.types.Scene): + ref = session.get(reference=update.id) + if ref: + ref.resolve() + else: + scn_uuid = session.add(update.id) + session.commit(scn_uuid) + session.push(scn_uuid, check_data=False) def register(): from bpy.utils import register_class for cls in classes: register_class(cls) - bpy.app.handlers.undo_post.append(sanitize_deps_graph) - bpy.app.handlers.redo_post.append(sanitize_deps_graph) + + bpy.app.handlers.undo_post.append(resolve_deps_graph) + bpy.app.handlers.redo_post.append(resolve_deps_graph) bpy.app.handlers.load_pre.append(load_pre_handler) bpy.app.handlers.frame_change_pre.append(update_client_frame) @@ -1000,8 +1042,8 @@ def unregister(): for cls in reversed(classes): unregister_class(cls) - bpy.app.handlers.undo_post.remove(sanitize_deps_graph) - bpy.app.handlers.redo_post.remove(sanitize_deps_graph) + bpy.app.handlers.undo_post.remove(resolve_deps_graph) + bpy.app.handlers.redo_post.remove(resolve_deps_graph) bpy.app.handlers.load_pre.remove(load_pre_handler) bpy.app.handlers.frame_change_pre.remove(update_client_frame) diff --git a/multi_user/preferences.py b/multi_user/preferences.py index 3a2d816..1757c1b 100644 --- a/multi_user/preferences.py +++ b/multi_user/preferences.py @@ -97,8 +97,6 @@ def get_log_level(self): class ReplicatedDatablock(bpy.types.PropertyGroup): type_name: bpy.props.StringProperty() bl_name: bpy.props.StringProperty() - bl_delay_refresh: bpy.props.FloatProperty() - bl_delay_apply: bpy.props.FloatProperty() use_as_filter: bpy.props.BoolProperty(default=True) auto_push: bpy.props.BoolProperty(default=True) icon: bpy.props.StringProperty() @@ -199,20 +197,11 @@ class SessionPrefs(bpy.types.AddonPreferences): description='connection timeout before disconnection', default=1000 ) - update_method: bpy.props.EnumProperty( - name='update method', - description='replication update method', - items=[ - ('DEFAULT', "Default", "Default: Use threads to monitor databloc changes"), - ('DEPSGRAPH', "Depsgraph", - "Experimental: Use the blender dependency graph to trigger updates"), - ], - ) # Replication update settings - depsgraph_update_rate: bpy.props.IntProperty( - name='depsgraph update rate', - description='Dependency graph uppdate rate (milliseconds)', - default=1000 + depsgraph_update_rate: bpy.props.FloatProperty( + name='depsgraph update rate (s)', + description='Dependency graph uppdate rate (s)', + default=1 ) clear_memory_filecache: bpy.props.BoolProperty( name="Clear memory filecache", @@ -282,11 +271,6 @@ class SessionPrefs(bpy.types.AddonPreferences): description="Rights", default=False ) - conf_session_timing_expanded: bpy.props.BoolProperty( - name="timings", - description="timings", - default=False - ) conf_session_cache_expanded: bpy.props.BoolProperty( name="Cache", description="cache", @@ -390,28 +374,7 @@ class SessionPrefs(bpy.types.AddonPreferences): row = box.row() row.label(text="Init the session from:") row.prop(self, "init_method", text="") - row = box.row() - row.label(text="Update method:") - row.prop(self, "update_method", text="") - table = box.box() - table.row().prop( - self, "conf_session_timing_expanded", text="Refresh rates", - icon=get_expanded_icon(self.conf_session_timing_expanded), - emboss=False) - - if self.conf_session_timing_expanded: - line = table.row() - line.label(text=" ") - line.separator() - line.label(text="refresh (sec)") - line.label(text="apply (sec)") - - for item in self.supported_datablocks: - line = table.row(align=True) - line.label(text="", icon=item.icon) - line.prop(item, "bl_delay_refresh", text="") - line.prop(item, "bl_delay_apply", text="") # HOST SETTINGS box = grid.box() box.prop( @@ -467,11 +430,8 @@ class SessionPrefs(bpy.types.AddonPreferences): type_module_class = getattr(type_module, type_impl_name) new_db.name = type_impl_name new_db.type_name = type_impl_name - new_db.bl_delay_refresh = type_module_class.bl_delay_refresh - new_db.bl_delay_apply = type_module_class.bl_delay_apply new_db.use_as_filter = True new_db.icon = type_module_class.bl_icon - new_db.auto_push = type_module_class.bl_automatic_push new_db.bl_name = type_module_class.bl_id diff --git a/multi_user/timers.py b/multi_user/timers.py index 9fd6dda..9e1d2e6 100644 --- a/multi_user/timers.py +++ b/multi_user/timers.py @@ -22,7 +22,7 @@ import bpy from replication.constants import (FETCHED, RP_COMMON, STATE_ACTIVE, STATE_INITIAL, STATE_LOBBY, STATE_QUITTING, STATE_SRV_SYNC, STATE_SYNCING, UP) -from replication.exception import NonAuthorizedOperationError +from replication.exception import NonAuthorizedOperationError, ContextError from replication.interface import session from . import operators, utils @@ -98,20 +98,12 @@ class SessionBackupTimer(Timer): def execute(self): - operators.dump_db(self._filepath) + session.save(self._filepath) class ApplyTimer(Timer): - def __init__(self, timeout=1, target_type=None): - self._type = target_type - super().__init__(timeout) - self.id = target_type.__name__ - def execute(self): if session and session.state['STATE'] == STATE_ACTIVE: - if self._type: - nodes = session.list(filter=self._type) - else: - nodes = session.list() + nodes = session.list() for node in nodes: node_ref = session.get(uuid=node) @@ -122,13 +114,11 @@ class ApplyTimer(Timer): except Exception as e: logging.error(f"Fail to apply {node_ref.uuid}: {e}") else: - if self._type.bl_reload_parent: - parents = [] + if node_ref.bl_reload_parent: + for parent in session._graph.find_parents(node): + logging.debug("Refresh parent {node}") + session.apply(parent, force=True) - for n in session.list(): - deps = session.get(uuid=n).dependencies - if deps and node in deps: - session.apply(n, force=True) class DynamicRightSelectTimer(Timer): def __init__(self, timeout=.1): @@ -149,6 +139,9 @@ class DynamicRightSelectTimer(Timer): ctx = bpy.context annotation_gp = ctx.scene.grease_pencil + if annotation_gp and not annotation_gp.uuid: + ctx.scene.update_tag() + # if an annotation exist and is tracked if annotation_gp and annotation_gp.uuid: registered_gp = session.get(uuid=annotation_gp.uuid) @@ -163,6 +156,13 @@ class DynamicRightSelectTimer(Timer): settings.username, ignore_warnings=True, affect_dependencies=False) + + if registered_gp.owner == settings.username: + gp_node = session.get(uuid=annotation_gp.uuid) + if gp_node.has_changed(): + session.commit(gp_node.uuid) + session.push(gp_node.uuid, check_data=False) + elif self._annotating: session.change_owner( registered_gp.uuid, diff --git a/multi_user/ui.py b/multi_user/ui.py index bca2e0e..b8f423b 100644 --- a/multi_user/ui.py +++ b/multi_user/ui.py @@ -269,7 +269,6 @@ class SESSION_PT_advanced_settings(bpy.types.Panel): if settings.sidebar_advanced_rep_expanded: replication_section_row = replication_section.row() - replication_section_row.label(text="Sync flags", icon='COLLECTION_NEW') replication_section_row = replication_section.row() replication_section_row.prop(settings.sync_flags, "sync_render_settings") replication_section_row = replication_section.row() @@ -282,34 +281,8 @@ class SESSION_PT_advanced_settings(bpy.types.Panel): warning = replication_section_row.box() warning.label(text="Don't use this with heavy meshes !", icon='ERROR') replication_section_row = replication_section.row() + replication_section_row.prop(settings, "depsgraph_update_rate", text="Apply delay") - replication_section_row.label(text="Update method", icon='RECOVER_LAST') - replication_section_row = replication_section.row() - replication_section_row.prop(settings, "update_method", expand=True) - replication_section_row = replication_section.row() - replication_timers = replication_section_row.box() - replication_timers.label(text="Replication timers", icon='TIME') - if settings.update_method == "DEFAULT": - replication_timers = replication_timers.row() - # Replication frequencies - flow = replication_timers.grid_flow( - row_major=True, columns=0, even_columns=True, even_rows=False, align=True) - line = flow.row(align=True) - line.label(text=" ") - line.separator() - line.label(text="refresh (sec)") - line.label(text="apply (sec)") - - for item in settings.supported_datablocks: - line = flow.row(align=True) - line.prop(item, "auto_push", text="", icon=item.icon) - line.separator() - line.prop(item, "bl_delay_refresh", text="") - line.prop(item, "bl_delay_apply", text="") - else: - replication_timers = replication_timers.row() - replication_timers.label(text="Update rate (ms):") - replication_timers.prop(settings, "depsgraph_update_rate", text="") cache_section = layout.row().box() cache_section.prop( diff --git a/multi_user/utils.py b/multi_user/utils.py index 57ed532..25444f9 100644 --- a/multi_user/utils.py +++ b/multi_user/utils.py @@ -65,6 +65,15 @@ def get_datablock_users(datablock): return users +def flush_history(): + try: + logging.debug("Flushing history") + for i in range(bpy.context.preferences.edit.undo_steps+1): + bpy.ops.ed.undo_push(message="Multiuser history flush") + except RuntimeError: + logging.error("Fail to overwrite history") + + def get_state_str(state): state_str = 'UNKOWN' if state == STATE_WAITING: