From 5ffb05f46a91da3ce1d57b43d0cd3cbc3420f312 Mon Sep 17 00:00:00 2001 From: Swann Date: Thu, 8 Apr 2021 11:22:01 +0200 Subject: [PATCH] feat: draw user curve --- multi_user/bl_types/bl_mesh.py | 2 +- multi_user/bl_types/dump_anything.py | 4 + multi_user/operators.py | 253 ++++++++++++++++++--------- 3 files changed, 180 insertions(+), 79 deletions(-) diff --git a/multi_user/bl_types/bl_mesh.py b/multi_user/bl_types/bl_mesh.py index 8297737..2011e08 100644 --- a/multi_user/bl_types/bl_mesh.py +++ b/multi_user/bl_types/bl_mesh.py @@ -69,7 +69,7 @@ class BlMesh(BlDatablock): loader.load(target, data) # MATERIAL SLOTS - src_materials = data.get('materials', None) + src_materials = data.get('materials', data.get('material_list')) if src_materials: load_materials_slots(src_materials, target.materials) diff --git a/multi_user/bl_types/dump_anything.py b/multi_user/bl_types/dump_anything.py index 4765fbf..f6cee25 100644 --- a/multi_user/bl_types/dump_anything.py +++ b/multi_user/bl_types/dump_anything.py @@ -55,6 +55,10 @@ def np_load_collection(dikt: dict, collection: bpy.types.CollectionProperty, att for attr in attributes: attr_type = collection[0].bl_rna.properties.get(attr).type + + if attr not in dikt: + logging.warning(f"No data for {attr}, skipping.") + continue if attr_type in PRIMITIVE_TYPES: np_load_collection_primitives(collection, attr, dikt[attr]) diff --git a/multi_user/operators.py b/multi_user/operators.py index 902de49..e8cc1e3 100644 --- a/multi_user/operators.py +++ b/multi_user/operators.py @@ -39,6 +39,7 @@ except ImportError: import pickle import bpy +import bmesh import mathutils from bpy.app.handlers import persistent from bpy_extras.io_utils import ExportHelper, ImportHelper @@ -56,11 +57,12 @@ background_execution_queue = Queue() deleyables = [] stop_modal_executor = False + def session_callback(name): """ Session callback wrapper This allow to encapsulate session callbacks to background_execution_queue. - By doing this way callback are executed from the main thread. + By doing this way callback are executed from the main thread. """ def func_wrapper(func): @session.register(name) @@ -72,7 +74,7 @@ def session_callback(name): @session_callback('on_connection') def initialize_session(): - """Session connection init hander + """Session connection init hander """ logging.info("Intializing the scene") settings = utils.get_preferences() @@ -86,7 +88,7 @@ def initialize_session(): logging.error(f"Can't construct node {node}") elif node_ref.state == FETCHED: node_ref.resolve() - + # Step 2: Load nodes logging.info("Loading nodes") for node in session._graph.list_ordered(): @@ -113,7 +115,7 @@ def initialize_session(): @session_callback('on_exit') def on_connection_end(reason="none"): - """Session connection finished handler + """Session connection finished handler """ global deleyables, stop_modal_executor settings = utils.get_preferences() @@ -137,7 +139,8 @@ def on_connection_end(reason="none"): if isinstance(handler, logging.FileHandler): logger.removeHandler(handler) if reason != "user": - bpy.ops.session.notify('INVOKE_DEFAULT', message=f"Disconnected from session. Reason: {reason}. ") + bpy.ops.session.notify( + 'INVOKE_DEFAULT', message=f"Disconnected from session. Reason: {reason}. ") # OPERATORS @@ -270,7 +273,8 @@ class SessionStartOperator(bpy.types.Operator): # Background client updates service deleyables.append(timers.ClientUpdate()) deleyables.append(timers.DynamicRightSelectTimer()) - deleyables.append(timers.ApplyTimer(timeout=settings.depsgraph_update_rate)) + deleyables.append(timers.ApplyTimer( + timeout=settings.depsgraph_update_rate)) # deleyables.append(timers.PushTimer( # queue=stagging, # timeout=settings.depsgraph_update_rate @@ -288,8 +292,6 @@ class SessionStartOperator(bpy.types.Operator): deleyables.append(session_update) deleyables.append(session_user_sync) - - self.report( {'INFO'}, f"connecting to tcp://{settings.ip}:{settings.port}") @@ -609,7 +611,7 @@ class SessionApply(bpy.types.Operator): session.apply(parent, force=True) except Exception as e: self.report({'ERROR'}, repr(e)) - return {"CANCELED"} + return {"CANCELED"} return {"FINISHED"} @@ -635,6 +637,7 @@ class SessionCommit(bpy.types.Operator): self.report({'ERROR'}, repr(e)) return {"CANCELED"} + class ApplyArmatureOperator(bpy.types.Operator): """Operator which runs its self from a timer""" bl_idname = "session.apply_armature_operator" @@ -706,6 +709,7 @@ class SessionClearCache(bpy.types.Operator): row = self.layout row.label(text=f" Do you really want to remove local cache ? ") + class SessionPurgeOperator(bpy.types.Operator): "Remove node with lost references" bl_idname = "session.purge" @@ -750,7 +754,6 @@ class SessionNotifyOperator(bpy.types.Operator): layout = self.layout layout.row().label(text=self.message) - def invoke(self, context, event): return context.window_manager.invoke_props_dialog(self) @@ -796,6 +799,7 @@ class SessionSaveBackupOperator(bpy.types.Operator, ExportHelper): def poll(cls, context): return session.state['STATE'] == STATE_ACTIVE + class SessionStopAutoSaveOperator(bpy.types.Operator): bl_idname = "session.cancel_autosave" bl_label = "Cancel auto-save" @@ -839,6 +843,12 @@ class SessionLoadSaveOperator(bpy.types.Operator, ImportHelper): default=False, ) + animate: bpy.props.BoolProperty( + name="Animate history", + description="Animate history versions", + default=False, + ) + clear_datablocks: bpy.props.BoolProperty( name="Removes existing data", description="Remove all exisitng datablocks", @@ -850,17 +860,99 @@ class SessionLoadSaveOperator(bpy.types.Operator, ImportHelper): options={'HIDDEN', 'SKIP_SAVE'}, ) + def draw_users_as_curves(self, user_list): + users_collection = bpy.data.collections.new("users") + for username, user_data in user_list.items(): + metadata = user_data.get('metadata', None) + if metadata: + logging.info(f"Loading user {username}") + + # Curve creation + location = metadata.get('view_corners') + positions = [coord for coord in location] + + curve = bpy.data.curves.new(username, 'CURVE') + obj = bpy.data.objects.new(username, curve) + + for p in positions: + p.append(0.0) + + sight = curve.splines.new('POLY') + sight.points.add(1) + sight.points[0].co = positions[4] + sight.points[1].co = positions[5] + + cadre = curve.splines.new('POLY') + cadre.points.add(4) + cadre.points[0].co = positions[0] + cadre.points[1].co = positions[2] + cadre.points[2].co = positions[1] + cadre.points[3].co = positions[3] + cadre.points[4].co = positions[0] + + frust = curve.splines.new('POLY') + frust.points.add(2) + frust.points[0].co = positions[0] + frust.points[1].co = positions[6] + frust.points[2].co = positions[1] + + frust2 = curve.splines.new('POLY') + frust2.points.add(2) + frust2.points[0].co = positions[2] + frust2.points[1].co = positions[6] + frust2.points[2].co = positions[3] + + curve.bevel_depth = 0.02 + + # Material creation + color = metadata.get('color') + material = bpy.data.materials.new(username) + material.use_nodes = True + material.node_tree.nodes[0].inputs['Emission'].default_value = color + + curve.materials.append(material) + + users_collection.objects.link(obj) + return users_collection + + def draw_users_meshes(self, user_list): + for username, user_data in user_list.items(): + metadata = user_data.get('metadata', None) + if metadata: + logging.info(f"Loading user {username}") + location = metadata.get('view_corners') + color = metadata.get('color') + + positions = [tuple(coord) for coord in location] + edges = ((1, 3), (2, 1), (3, 0), + (2, 0), (4, 5), (1, 6), + (2, 6), (3, 6), (0, 6)) + + mesh = bpy.data.meshes.new("toto") + obj = bpy.data.objects.new("toto", mesh) + bm = bmesh.new() + + for p in positions: + bm.verts.new(p) + bm.verts.ensure_lookup_table() + + for v1, v2 in edges: + bm.edges.new((bm.verts[v1], bm.verts[v2])) + bm.to_mesh(mesh) + + bpy.context.scene.collection.objects.link(obj) + def execute(self, context): from replication.graph import ReplicationGraph # Initialisation # init the factory with supported types - bpy_factory = ReplicatedDataFactory() + bpy_factory=ReplicatedDataFactory() for type in bl_types.types_to_register(): - type_module = getattr(bl_types, type) - name = [e.capitalize() for e in type.split('_')[1:]] - type_impl_name = 'Bl'+''.join(name) - type_module_class = getattr(type_module, type_impl_name) + type_module=getattr(bl_types, type) + name=[e.capitalize() for e in type.split('_')[1:]] + type_impl_name='Bl'+''.join(name) + type_module_class=getattr(type_module, type_impl_name) bpy_factory.register_type( @@ -870,86 +962,91 @@ class SessionLoadSaveOperator(bpy.types.Operator, ImportHelper): if self.clear_datablocks: utils.clean_scene() - dir_path = Path(self.filepath).parent + dir_path=Path(self.filepath).parent for db in self.files: - filepath = os.path.join(dir_path,db.name) + filepath=os.path.join(dir_path, db.name) try: - f = gzip.open(filepath, "rb") - db = pickle.load(f) + f=gzip.open(filepath, "rb") + db=pickle.load(f) except OSError as e: - f = open(filepath, "rb") - db = pickle.load(f) - - if db: - - created = os.path.getctime(filepath) - logging.info(f"Reading {filepath}") - nodes = db.get("nodes") + f=open(filepath, "rb") + db=pickle.load(f) + if db: + created=os.path.getctime(filepath) + logging.info(f"Reading {filepath}") + nodes=db.get("nodes") + users=db.get("users") + users_collection = self.draw_users_as_curves(users) logging.info(f"{len(nodes)} Nodes to load") - - graph = ReplicationGraph() + + graph=ReplicationGraph() for node, node_data in nodes: - node_type = node_data.get('str_type') + node_type=node_data.get('str_type') - impl = bpy_factory.get_implementation_from_net(node_type) + impl=bpy_factory.get_implementation_from_net(node_type) if impl: logging.info(f"Loading {node}") - instance = impl(owner=node_data['owner'], + instance=impl(owner=node_data['owner'], uuid=node, dependencies=node_data['dependencies'], data=node_data['data']) instance.store(graph) - instance.state = FETCHED - - logging.info("Graph succefully loaded") - - # Find scene - scenes = [n for n in graph.values() if isinstance(n, bl_types.bl_scene.BlScene)] - scene = scenes[0] + instance.state=FETCHED - collection_data = { - 'instance_offset': [0.0, 0.0, 0.0], - 'name': str(created), - 'objects': scene.data['collection']['objects'], - 'children': scene.data['collection']['children']} - collection_node = bl_types.bl_collection.BlCollection() - collection_node.dependencies = scene.dependencies - collection_node.data = collection_data - graph[collection_node.uuid] = collection_node - del graph[scene.uuid] + logging.info("Graph succefully loaded") + + # Find scene + scenes=[n for n in graph.values() if isinstance( + n, bl_types.bl_scene.BlScene)] + scene=scenes[0] + + # collection_data = { + # 'instance_offset': [0.0, 0.0, 0.0], + # 'name': str(created), + # 'objects': scene.data['collection']['objects'], + # 'children': scene.data['collection']['children']} + # collection_node = bl_types.bl_collection.BlCollection() + # collection_node.dependencies = scene.dependencies + # collection_node.data = collection_data + # graph[collection_node.uuid] = collection_node + # del graph[scene.uuid] + scene.data['name']=str(created) # Step 1: Construct nodes for node in graph.list_ordered(): - node_inst = graph[node] + node_inst=graph[node] try: - node_inst.instance = node_inst._construct(node_inst.data) - node_inst.instance.uuid = node_inst.uuid + node_inst.instance=node_inst._construct(node_inst.data) + node_inst.instance.uuid=node_inst.uuid except Exception as e: continue # Step 2: Load nodes for node in graph.list_ordered(): - graph[node].state = FETCHED + graph[node].state=FETCHED graph[node].apply() - bpy.context.scene.collection.children.link(collection_node.instance) + scene.instance.collection.children.link(users_collection) + + # bpy.context.scene.collection.children.link(collection_node.instance) return {'FINISHED'} - @classmethod + @ classmethod def poll(cls, context): return True def menu_func_import(self, context): - self.layout.operator(SessionLoadSaveOperator.bl_idname, text='Multi-user session snapshot (.db)') + self.layout.operator(SessionLoadSaveOperator.bl_idname, + text='Multi-user session snapshot (.db)') -classes = ( +classes=( SessionStartOperator, SessionStopOperator, SessionPropertyRemoveOperator, @@ -970,22 +1067,22 @@ classes = ( ) def update_external_dependencies(): - nodes_ids = session.list(filter=bl_types.bl_file.BlFile) + nodes_ids=session.list(filter=bl_types.bl_file.BlFile) for node_id in nodes_ids: - node = session.get(node_id) + node=session.get(node_id) if node and node.owner in [session.id, RP_COMMON] \ and node.has_changed(): session.commit(node_id) session.push(node_id, check_data=False) -def sanitize_deps_graph(remove_nodes: bool = False): +def sanitize_deps_graph(remove_nodes: bool=False): """ Cleanup the replication graph """ if session and session.state['STATE'] == STATE_ACTIVE: - start = utils.current_milli_time() - rm_cpt = 0 + start=utils.current_milli_time() + rm_cpt=0 for node_key in session.list(): - node = session.get(node_key) + node=session.get(node_key) if node is None \ or (node.state == UP and not node.resolve(construct=False)): if remove_nodes: @@ -998,7 +1095,7 @@ def sanitize_deps_graph(remove_nodes: bool = False): logging.info(f"Sanitize took { utils.current_milli_time()-start} ms") -@persistent +@ persistent def resolve_deps_graph(dummy): """Resolve deps graph @@ -1009,13 +1106,13 @@ def resolve_deps_graph(dummy): if session and session.state['STATE'] == STATE_ACTIVE: sanitize_deps_graph(remove_nodes=True) -@persistent +@ persistent def load_pre_handler(dummy): if session and session.state['STATE'] in [STATE_ACTIVE, STATE_SYNCING]: bpy.ops.session.stop() -@persistent +@ persistent def update_client_frame(scene): if session and session.state['STATE'] == STATE_ACTIVE: session.update_user_metadata({ @@ -1023,13 +1120,13 @@ def update_client_frame(scene): }) -@persistent +@ persistent def depsgraph_evaluation(scene): if session and session.state['STATE'] == STATE_ACTIVE: - context = bpy.context - blender_depsgraph = bpy.context.view_layer.depsgraph - dependency_updates = [u for u in blender_depsgraph.updates] - settings = utils.get_preferences() + context=bpy.context + blender_depsgraph=bpy.context.view_layer.depsgraph + dependency_updates=[u for u in blender_depsgraph.updates] + settings=utils.get_preferences() update_external_dependencies() @@ -1038,8 +1135,8 @@ def depsgraph_evaluation(scene): # Is the object tracked ? if update.id.uuid: # Retrieve local version - node = session.get(uuid=update.id.uuid) - + node=session.get(uuid=update.id.uuid) + # Check our right on this update: # - if its ours or ( under common and diff), launch the # update process @@ -1055,24 +1152,24 @@ def depsgraph_evaluation(scene): if not node.is_valid(): session.remove(node.uuid) except ContextError as e: - logging.debug(e) + logging.debug(e) except Exception as e: logging.error(e) else: continue - # A new scene is created + # A new scene is created elif isinstance(update.id, bpy.types.Scene): - ref = session.get(reference=update.id) + ref=session.get(reference=update.id) if ref: ref.resolve() else: - scn_uuid = session.add(update.id) + scn_uuid=session.add(update.id) session.commit(scn_uuid) session.push(scn_uuid, check_data=False) def register(): from bpy.utils import register_class - for cls in classes: + for cls in classes: register_class(cls)