Compare commits

...

2 Commits

Author SHA1 Message Date
Swann
5ffb05f46a
feat: draw user curve 2021-04-08 11:22:01 +02:00
Swann
5817c9110b
feat: basic collection loading 2021-04-07 10:06:38 +02:00
3 changed files with 238 additions and 87 deletions

View File

@ -69,7 +69,7 @@ class BlMesh(BlDatablock):
loader.load(target, data)
# MATERIAL SLOTS
src_materials = data.get('materials', None)
src_materials = data.get('materials', data.get('material_list'))
if src_materials:
load_materials_slots(src_materials, target.materials)

View File

@ -56,6 +56,10 @@ def np_load_collection(dikt: dict, collection: bpy.types.CollectionProperty, att
for attr in attributes:
attr_type = collection[0].bl_rna.properties.get(attr).type
if attr not in dikt:
logging.warning(f"No data for {attr}, skipping.")
continue
if attr_type in PRIMITIVE_TYPES:
np_load_collection_primitives(collection, attr, dikt[attr])
elif attr_type == 'ENUM':

View File

@ -39,6 +39,7 @@ except ImportError:
import pickle
import bpy
import bmesh
import mathutils
from bpy.app.handlers import persistent
from bpy_extras.io_utils import ExportHelper, ImportHelper
@ -56,6 +57,7 @@ background_execution_queue = Queue()
deleyables = []
stop_modal_executor = False
def session_callback(name):
""" Session callback wrapper
@ -137,7 +139,8 @@ def on_connection_end(reason="none"):
if isinstance(handler, logging.FileHandler):
logger.removeHandler(handler)
if reason != "user":
bpy.ops.session.notify('INVOKE_DEFAULT', message=f"Disconnected from session. Reason: {reason}. ")
bpy.ops.session.notify(
'INVOKE_DEFAULT', message=f"Disconnected from session. Reason: {reason}. ")
# OPERATORS
@ -270,7 +273,8 @@ class SessionStartOperator(bpy.types.Operator):
# Background client updates service
deleyables.append(timers.ClientUpdate())
deleyables.append(timers.DynamicRightSelectTimer())
deleyables.append(timers.ApplyTimer(timeout=settings.depsgraph_update_rate))
deleyables.append(timers.ApplyTimer(
timeout=settings.depsgraph_update_rate))
# deleyables.append(timers.PushTimer(
# queue=stagging,
# timeout=settings.depsgraph_update_rate
@ -288,8 +292,6 @@ class SessionStartOperator(bpy.types.Operator):
deleyables.append(session_update)
deleyables.append(session_user_sync)
self.report(
{'INFO'},
f"connecting to tcp://{settings.ip}:{settings.port}")
@ -635,6 +637,7 @@ class SessionCommit(bpy.types.Operator):
self.report({'ERROR'}, repr(e))
return {"CANCELED"}
class ApplyArmatureOperator(bpy.types.Operator):
"""Operator which runs its self from a timer"""
bl_idname = "session.apply_armature_operator"
@ -706,6 +709,7 @@ class SessionClearCache(bpy.types.Operator):
row = self.layout
row.label(text=f" Do you really want to remove local cache ? ")
class SessionPurgeOperator(bpy.types.Operator):
"Remove node with lost references"
bl_idname = "session.purge"
@ -750,7 +754,6 @@ class SessionNotifyOperator(bpy.types.Operator):
layout = self.layout
layout.row().label(text=self.message)
def invoke(self, context, event):
return context.window_manager.invoke_props_dialog(self)
@ -796,6 +799,7 @@ class SessionSaveBackupOperator(bpy.types.Operator, ExportHelper):
def poll(cls, context):
return session.state['STATE'] == STATE_ACTIVE
class SessionStopAutoSaveOperator(bpy.types.Operator):
bl_idname = "session.cancel_autosave"
bl_label = "Cancel auto-save"
@ -827,26 +831,121 @@ class SessionLoadSaveOperator(bpy.types.Operator, ImportHelper):
maxlen=255, # Max internal buffer length, longer would be clamped.
)
load_to_collection: bpy.props.BoolProperty(
name="Load to collection",
description="Load the snapshot into a collection",
default=False,
)
draw_users: bpy.props.BoolProperty(
name="Draw users",
description="Draw a mesh representing each user position and selected object",
default=False,
)
animate: bpy.props.BoolProperty(
name="Animate history",
description="Animate history versions",
default=False,
)
clear_datablocks: bpy.props.BoolProperty(
name="Removes existing data",
description="Remove all exisitng datablocks",
default=True,
)
files: bpy.props.CollectionProperty(
type=bpy.types.OperatorFileListElement,
options={'HIDDEN', 'SKIP_SAVE'},
)
def draw_users_as_curves(self, user_list):
users_collection = bpy.data.collections.new("users")
for username, user_data in user_list.items():
metadata = user_data.get('metadata', None)
if metadata:
logging.info(f"Loading user {username}")
# Curve creation
location = metadata.get('view_corners')
positions = [coord for coord in location]
curve = bpy.data.curves.new(username, 'CURVE')
obj = bpy.data.objects.new(username, curve)
for p in positions:
p.append(0.0)
sight = curve.splines.new('POLY')
sight.points.add(1)
sight.points[0].co = positions[4]
sight.points[1].co = positions[5]
cadre = curve.splines.new('POLY')
cadre.points.add(4)
cadre.points[0].co = positions[0]
cadre.points[1].co = positions[2]
cadre.points[2].co = positions[1]
cadre.points[3].co = positions[3]
cadre.points[4].co = positions[0]
frust = curve.splines.new('POLY')
frust.points.add(2)
frust.points[0].co = positions[0]
frust.points[1].co = positions[6]
frust.points[2].co = positions[1]
frust2 = curve.splines.new('POLY')
frust2.points.add(2)
frust2.points[0].co = positions[2]
frust2.points[1].co = positions[6]
frust2.points[2].co = positions[3]
curve.bevel_depth = 0.02
# Material creation
color = metadata.get('color')
material = bpy.data.materials.new(username)
material.use_nodes = True
material.node_tree.nodes[0].inputs['Emission'].default_value = color
curve.materials.append(material)
users_collection.objects.link(obj)
return users_collection
def draw_users_meshes(self, user_list):
for username, user_data in user_list.items():
metadata = user_data.get('metadata', None)
if metadata:
logging.info(f"Loading user {username}")
location = metadata.get('view_corners')
color = metadata.get('color')
positions = [tuple(coord) for coord in location]
edges = ((1, 3), (2, 1), (3, 0),
(2, 0), (4, 5), (1, 6),
(2, 6), (3, 6), (0, 6))
mesh = bpy.data.meshes.new("toto")
obj = bpy.data.objects.new("toto", mesh)
bm = bmesh.new()
for p in positions:
bm.verts.new(p)
bm.verts.ensure_lookup_table()
for v1, v2 in edges:
bm.edges.new((bm.verts[v1], bm.verts[v2]))
bm.to_mesh(mesh)
bpy.context.scene.collection.objects.link(obj)
def execute(self, context):
from replication.graph import ReplicationGraph
# TODO: add filechecks
try:
f = gzip.open(self.filepath, "rb")
db = pickle.load(f)
except OSError as e:
f = open(self.filepath, "rb")
db = pickle.load(f)
if db:
logging.info(f"Reading {self.filepath}")
nodes = db.get("nodes")
logging.info(f"{len(nodes)} Nodes to load")
# Initialisation
# init the factory with supported types
bpy_factory=ReplicatedDataFactory()
for type in bl_types.types_to_register():
@ -859,6 +958,29 @@ class SessionLoadSaveOperator(bpy.types.Operator, ImportHelper):
bpy_factory.register_type(
type_module_class.bl_class,
type_module_class)
# Optionnaly clear the scene
if self.clear_datablocks:
utils.clean_scene()
dir_path=Path(self.filepath).parent
for db in self.files:
filepath=os.path.join(dir_path, db.name)
try:
f=gzip.open(filepath, "rb")
db=pickle.load(f)
except OSError as e:
f=open(filepath, "rb")
db=pickle.load(f)
if db:
created=os.path.getctime(filepath)
logging.info(f"Reading {filepath}")
nodes=db.get("nodes")
users=db.get("users")
users_collection = self.draw_users_as_curves(users)
logging.info(f"{len(nodes)} Nodes to load")
graph=ReplicationGraph()
@ -878,16 +1000,40 @@ class SessionLoadSaveOperator(bpy.types.Operator, ImportHelper):
logging.info("Graph succefully loaded")
utils.clean_scene()
# Find scene
scenes=[n for n in graph.values() if isinstance(
n, bl_types.bl_scene.BlScene)]
scene=scenes[0]
# collection_data = {
# 'instance_offset': [0.0, 0.0, 0.0],
# 'name': str(created),
# 'objects': scene.data['collection']['objects'],
# 'children': scene.data['collection']['children']}
# collection_node = bl_types.bl_collection.BlCollection()
# collection_node.dependencies = scene.dependencies
# collection_node.data = collection_data
# graph[collection_node.uuid] = collection_node
# del graph[scene.uuid]
scene.data['name']=str(created)
# Step 1: Construct nodes
for node in graph.list_ordered():
graph[node].resolve()
node_inst=graph[node]
try:
node_inst.instance=node_inst._construct(node_inst.data)
node_inst.instance.uuid=node_inst.uuid
except Exception as e:
continue
# Step 2: Load nodes
for node in graph.list_ordered():
graph[node].state=FETCHED
graph[node].apply()
scene.instance.collection.children.link(users_collection)
# bpy.context.scene.collection.children.link(collection_node.instance)
return {'FINISHED'}
@ -896,7 +1042,8 @@ class SessionLoadSaveOperator(bpy.types.Operator, ImportHelper):
return True
def menu_func_import(self, context):
self.layout.operator(SessionLoadSaveOperator.bl_idname, text='Multi-user session snapshot (.db)')
self.layout.operator(SessionLoadSaveOperator.bl_idname,
text='Multi-user session snapshot (.db)')
classes=(