Compare commits

...

2 Commits

Author SHA1 Message Date
Swann
5ffb05f46a
feat: draw user curve 2021-04-08 11:22:01 +02:00
Swann
5817c9110b
feat: basic collection loading 2021-04-07 10:06:38 +02:00
3 changed files with 238 additions and 87 deletions

View File

@ -69,7 +69,7 @@ class BlMesh(BlDatablock):
loader.load(target, data)
# MATERIAL SLOTS
src_materials = data.get('materials', None)
src_materials = data.get('materials', data.get('material_list'))
if src_materials:
load_materials_slots(src_materials, target.materials)

View File

@ -56,6 +56,10 @@ def np_load_collection(dikt: dict, collection: bpy.types.CollectionProperty, att
for attr in attributes:
attr_type = collection[0].bl_rna.properties.get(attr).type
if attr not in dikt:
logging.warning(f"No data for {attr}, skipping.")
continue
if attr_type in PRIMITIVE_TYPES:
np_load_collection_primitives(collection, attr, dikt[attr])
elif attr_type == 'ENUM':

View File

@ -39,6 +39,7 @@ except ImportError:
import pickle
import bpy
import bmesh
import mathutils
from bpy.app.handlers import persistent
from bpy_extras.io_utils import ExportHelper, ImportHelper
@ -56,6 +57,7 @@ background_execution_queue = Queue()
deleyables = []
stop_modal_executor = False
def session_callback(name):
""" Session callback wrapper
@ -137,7 +139,8 @@ def on_connection_end(reason="none"):
if isinstance(handler, logging.FileHandler):
logger.removeHandler(handler)
if reason != "user":
bpy.ops.session.notify('INVOKE_DEFAULT', message=f"Disconnected from session. Reason: {reason}. ")
bpy.ops.session.notify(
'INVOKE_DEFAULT', message=f"Disconnected from session. Reason: {reason}. ")
# OPERATORS
@ -270,7 +273,8 @@ class SessionStartOperator(bpy.types.Operator):
# Background client updates service
deleyables.append(timers.ClientUpdate())
deleyables.append(timers.DynamicRightSelectTimer())
deleyables.append(timers.ApplyTimer(timeout=settings.depsgraph_update_rate))
deleyables.append(timers.ApplyTimer(
timeout=settings.depsgraph_update_rate))
# deleyables.append(timers.PushTimer(
# queue=stagging,
# timeout=settings.depsgraph_update_rate
@ -288,8 +292,6 @@ class SessionStartOperator(bpy.types.Operator):
deleyables.append(session_update)
deleyables.append(session_user_sync)
self.report(
{'INFO'},
f"connecting to tcp://{settings.ip}:{settings.port}")
@ -635,6 +637,7 @@ class SessionCommit(bpy.types.Operator):
self.report({'ERROR'}, repr(e))
return {"CANCELED"}
class ApplyArmatureOperator(bpy.types.Operator):
"""Operator which runs its self from a timer"""
bl_idname = "session.apply_armature_operator"
@ -706,6 +709,7 @@ class SessionClearCache(bpy.types.Operator):
row = self.layout
row.label(text=f" Do you really want to remove local cache ? ")
class SessionPurgeOperator(bpy.types.Operator):
"Remove node with lost references"
bl_idname = "session.purge"
@ -750,7 +754,6 @@ class SessionNotifyOperator(bpy.types.Operator):
layout = self.layout
layout.row().label(text=self.message)
def invoke(self, context, event):
return context.window_manager.invoke_props_dialog(self)
@ -796,6 +799,7 @@ class SessionSaveBackupOperator(bpy.types.Operator, ExportHelper):
def poll(cls, context):
return session.state['STATE'] == STATE_ACTIVE
class SessionStopAutoSaveOperator(bpy.types.Operator):
bl_idname = "session.cancel_autosave"
bl_label = "Cancel auto-save"
@ -827,79 +831,222 @@ class SessionLoadSaveOperator(bpy.types.Operator, ImportHelper):
maxlen=255, # Max internal buffer length, longer would be clamped.
)
load_to_collection: bpy.props.BoolProperty(
name="Load to collection",
description="Load the snapshot into a collection",
default=False,
)
draw_users: bpy.props.BoolProperty(
name="Draw users",
description="Draw a mesh representing each user position and selected object",
default=False,
)
animate: bpy.props.BoolProperty(
name="Animate history",
description="Animate history versions",
default=False,
)
clear_datablocks: bpy.props.BoolProperty(
name="Removes existing data",
description="Remove all exisitng datablocks",
default=True,
)
files: bpy.props.CollectionProperty(
type=bpy.types.OperatorFileListElement,
options={'HIDDEN', 'SKIP_SAVE'},
)
def draw_users_as_curves(self, user_list):
users_collection = bpy.data.collections.new("users")
for username, user_data in user_list.items():
metadata = user_data.get('metadata', None)
if metadata:
logging.info(f"Loading user {username}")
# Curve creation
location = metadata.get('view_corners')
positions = [coord for coord in location]
curve = bpy.data.curves.new(username, 'CURVE')
obj = bpy.data.objects.new(username, curve)
for p in positions:
p.append(0.0)
sight = curve.splines.new('POLY')
sight.points.add(1)
sight.points[0].co = positions[4]
sight.points[1].co = positions[5]
cadre = curve.splines.new('POLY')
cadre.points.add(4)
cadre.points[0].co = positions[0]
cadre.points[1].co = positions[2]
cadre.points[2].co = positions[1]
cadre.points[3].co = positions[3]
cadre.points[4].co = positions[0]
frust = curve.splines.new('POLY')
frust.points.add(2)
frust.points[0].co = positions[0]
frust.points[1].co = positions[6]
frust.points[2].co = positions[1]
frust2 = curve.splines.new('POLY')
frust2.points.add(2)
frust2.points[0].co = positions[2]
frust2.points[1].co = positions[6]
frust2.points[2].co = positions[3]
curve.bevel_depth = 0.02
# Material creation
color = metadata.get('color')
material = bpy.data.materials.new(username)
material.use_nodes = True
material.node_tree.nodes[0].inputs['Emission'].default_value = color
curve.materials.append(material)
users_collection.objects.link(obj)
return users_collection
def draw_users_meshes(self, user_list):
for username, user_data in user_list.items():
metadata = user_data.get('metadata', None)
if metadata:
logging.info(f"Loading user {username}")
location = metadata.get('view_corners')
color = metadata.get('color')
positions = [tuple(coord) for coord in location]
edges = ((1, 3), (2, 1), (3, 0),
(2, 0), (4, 5), (1, 6),
(2, 6), (3, 6), (0, 6))
mesh = bpy.data.meshes.new("toto")
obj = bpy.data.objects.new("toto", mesh)
bm = bmesh.new()
for p in positions:
bm.verts.new(p)
bm.verts.ensure_lookup_table()
for v1, v2 in edges:
bm.edges.new((bm.verts[v1], bm.verts[v2]))
bm.to_mesh(mesh)
bpy.context.scene.collection.objects.link(obj)
def execute(self, context):
from replication.graph import ReplicationGraph
# TODO: add filechecks
try:
f = gzip.open(self.filepath, "rb")
db = pickle.load(f)
except OSError as e:
f = open(self.filepath, "rb")
db = pickle.load(f)
if db:
logging.info(f"Reading {self.filepath}")
nodes = db.get("nodes")
logging.info(f"{len(nodes)} Nodes to load")
# Initialisation
# init the factory with supported types
bpy_factory = ReplicatedDataFactory()
bpy_factory=ReplicatedDataFactory()
for type in bl_types.types_to_register():
type_module = getattr(bl_types, type)
name = [e.capitalize() for e in type.split('_')[1:]]
type_impl_name = 'Bl'+''.join(name)
type_module_class = getattr(type_module, type_impl_name)
type_module=getattr(bl_types, type)
name=[e.capitalize() for e in type.split('_')[1:]]
type_impl_name='Bl'+''.join(name)
type_module_class=getattr(type_module, type_impl_name)
bpy_factory.register_type(
type_module_class.bl_class,
type_module_class)
# Optionnaly clear the scene
if self.clear_datablocks:
utils.clean_scene()
graph = ReplicationGraph()
dir_path=Path(self.filepath).parent
for db in self.files:
filepath=os.path.join(dir_path, db.name)
try:
f=gzip.open(filepath, "rb")
db=pickle.load(f)
except OSError as e:
f=open(filepath, "rb")
db=pickle.load(f)
if db:
created=os.path.getctime(filepath)
logging.info(f"Reading {filepath}")
nodes=db.get("nodes")
users=db.get("users")
users_collection = self.draw_users_as_curves(users)
logging.info(f"{len(nodes)} Nodes to load")
graph=ReplicationGraph()
for node, node_data in nodes:
node_type = node_data.get('str_type')
node_type=node_data.get('str_type')
impl = bpy_factory.get_implementation_from_net(node_type)
impl=bpy_factory.get_implementation_from_net(node_type)
if impl:
logging.info(f"Loading {node}")
instance = impl(owner=node_data['owner'],
instance=impl(owner=node_data['owner'],
uuid=node,
dependencies=node_data['dependencies'],
data=node_data['data'])
instance.store(graph)
instance.state = FETCHED
instance.state=FETCHED
logging.info("Graph succefully loaded")
utils.clean_scene()
# Find scene
scenes=[n for n in graph.values() if isinstance(
n, bl_types.bl_scene.BlScene)]
scene=scenes[0]
# collection_data = {
# 'instance_offset': [0.0, 0.0, 0.0],
# 'name': str(created),
# 'objects': scene.data['collection']['objects'],
# 'children': scene.data['collection']['children']}
# collection_node = bl_types.bl_collection.BlCollection()
# collection_node.dependencies = scene.dependencies
# collection_node.data = collection_data
# graph[collection_node.uuid] = collection_node
# del graph[scene.uuid]
scene.data['name']=str(created)
# Step 1: Construct nodes
for node in graph.list_ordered():
graph[node].resolve()
node_inst=graph[node]
try:
node_inst.instance=node_inst._construct(node_inst.data)
node_inst.instance.uuid=node_inst.uuid
except Exception as e:
continue
# Step 2: Load nodes
for node in graph.list_ordered():
graph[node].state=FETCHED
graph[node].apply()
scene.instance.collection.children.link(users_collection)
# bpy.context.scene.collection.children.link(collection_node.instance)
return {'FINISHED'}
@classmethod
@ classmethod
def poll(cls, context):
return True
def menu_func_import(self, context):
self.layout.operator(SessionLoadSaveOperator.bl_idname, text='Multi-user session snapshot (.db)')
self.layout.operator(SessionLoadSaveOperator.bl_idname,
text='Multi-user session snapshot (.db)')
classes = (
classes=(
SessionStartOperator,
SessionStopOperator,
SessionPropertyRemoveOperator,
@ -920,22 +1067,22 @@ classes = (
)
def update_external_dependencies():
nodes_ids = session.list(filter=bl_types.bl_file.BlFile)
nodes_ids=session.list(filter=bl_types.bl_file.BlFile)
for node_id in nodes_ids:
node = session.get(node_id)
node=session.get(node_id)
if node and node.owner in [session.id, RP_COMMON] \
and node.has_changed():
session.commit(node_id)
session.push(node_id, check_data=False)
def sanitize_deps_graph(remove_nodes: bool = False):
def sanitize_deps_graph(remove_nodes: bool=False):
""" Cleanup the replication graph
"""
if session and session.state['STATE'] == STATE_ACTIVE:
start = utils.current_milli_time()
rm_cpt = 0
start=utils.current_milli_time()
rm_cpt=0
for node_key in session.list():
node = session.get(node_key)
node=session.get(node_key)
if node is None \
or (node.state == UP and not node.resolve(construct=False)):
if remove_nodes:
@ -948,7 +1095,7 @@ def sanitize_deps_graph(remove_nodes: bool = False):
logging.info(f"Sanitize took { utils.current_milli_time()-start} ms")
@persistent
@ persistent
def resolve_deps_graph(dummy):
"""Resolve deps graph
@ -959,13 +1106,13 @@ def resolve_deps_graph(dummy):
if session and session.state['STATE'] == STATE_ACTIVE:
sanitize_deps_graph(remove_nodes=True)
@persistent
@ persistent
def load_pre_handler(dummy):
if session and session.state['STATE'] in [STATE_ACTIVE, STATE_SYNCING]:
bpy.ops.session.stop()
@persistent
@ persistent
def update_client_frame(scene):
if session and session.state['STATE'] == STATE_ACTIVE:
session.update_user_metadata({
@ -973,13 +1120,13 @@ def update_client_frame(scene):
})
@persistent
@ persistent
def depsgraph_evaluation(scene):
if session and session.state['STATE'] == STATE_ACTIVE:
context = bpy.context
blender_depsgraph = bpy.context.view_layer.depsgraph
dependency_updates = [u for u in blender_depsgraph.updates]
settings = utils.get_preferences()
context=bpy.context
blender_depsgraph=bpy.context.view_layer.depsgraph
dependency_updates=[u for u in blender_depsgraph.updates]
settings=utils.get_preferences()
update_external_dependencies()
@ -988,7 +1135,7 @@ def depsgraph_evaluation(scene):
# Is the object tracked ?
if update.id.uuid:
# Retrieve local version
node = session.get(uuid=update.id.uuid)
node=session.get(uuid=update.id.uuid)
# Check our right on this update:
# - if its ours or ( under common and diff), launch the
@ -1012,11 +1159,11 @@ def depsgraph_evaluation(scene):
continue
# A new scene is created
elif isinstance(update.id, bpy.types.Scene):
ref = session.get(reference=update.id)
ref=session.get(reference=update.id)
if ref:
ref.resolve()
else:
scn_uuid = session.add(update.id)
scn_uuid=session.add(update.id)
session.commit(scn_uuid)
session.push(scn_uuid, check_data=False)
def register():