feat: basic collection loading
This commit is contained in:
parent
5e30e215ab
commit
5817c9110b
@ -827,26 +827,33 @@ class SessionLoadSaveOperator(bpy.types.Operator, ImportHelper):
|
||||
maxlen=255, # Max internal buffer length, longer would be clamped.
|
||||
)
|
||||
|
||||
load_to_collection: bpy.props.BoolProperty(
|
||||
name="Load to collection",
|
||||
description="Load the snapshot into a collection",
|
||||
default=False,
|
||||
)
|
||||
|
||||
draw_users: bpy.props.BoolProperty(
|
||||
name="Draw users",
|
||||
description="Draw a mesh representing each user position and selected object",
|
||||
default=False,
|
||||
)
|
||||
|
||||
clear_datablocks: bpy.props.BoolProperty(
|
||||
name="Removes existing data",
|
||||
description="Remove all exisitng datablocks",
|
||||
default=True,
|
||||
)
|
||||
|
||||
files: bpy.props.CollectionProperty(
|
||||
type=bpy.types.OperatorFileListElement,
|
||||
options={'HIDDEN', 'SKIP_SAVE'},
|
||||
)
|
||||
|
||||
def execute(self, context):
|
||||
from replication.graph import ReplicationGraph
|
||||
|
||||
# TODO: add filechecks
|
||||
|
||||
try:
|
||||
f = gzip.open(self.filepath, "rb")
|
||||
db = pickle.load(f)
|
||||
except OSError as e:
|
||||
f = open(self.filepath, "rb")
|
||||
db = pickle.load(f)
|
||||
|
||||
if db:
|
||||
logging.info(f"Reading {self.filepath}")
|
||||
nodes = db.get("nodes")
|
||||
|
||||
logging.info(f"{len(nodes)} Nodes to load")
|
||||
|
||||
|
||||
|
||||
# Initialisation
|
||||
# init the factory with supported types
|
||||
bpy_factory = ReplicatedDataFactory()
|
||||
for type in bl_types.types_to_register():
|
||||
@ -859,6 +866,29 @@ class SessionLoadSaveOperator(bpy.types.Operator, ImportHelper):
|
||||
bpy_factory.register_type(
|
||||
type_module_class.bl_class,
|
||||
type_module_class)
|
||||
# Optionnaly clear the scene
|
||||
if self.clear_datablocks:
|
||||
utils.clean_scene()
|
||||
|
||||
dir_path = Path(self.filepath).parent
|
||||
|
||||
for db in self.files:
|
||||
filepath = os.path.join(dir_path,db.name)
|
||||
|
||||
try:
|
||||
f = gzip.open(filepath, "rb")
|
||||
db = pickle.load(f)
|
||||
except OSError as e:
|
||||
f = open(filepath, "rb")
|
||||
db = pickle.load(f)
|
||||
|
||||
if db:
|
||||
|
||||
created = os.path.getctime(filepath)
|
||||
logging.info(f"Reading {filepath}")
|
||||
nodes = db.get("nodes")
|
||||
|
||||
logging.info(f"{len(nodes)} Nodes to load")
|
||||
|
||||
graph = ReplicationGraph()
|
||||
|
||||
@ -878,16 +908,36 @@ class SessionLoadSaveOperator(bpy.types.Operator, ImportHelper):
|
||||
|
||||
logging.info("Graph succefully loaded")
|
||||
|
||||
utils.clean_scene()
|
||||
# Find scene
|
||||
scenes = [n for n in graph.values() if isinstance(n, bl_types.bl_scene.BlScene)]
|
||||
scene = scenes[0]
|
||||
|
||||
collection_data = {
|
||||
'instance_offset': [0.0, 0.0, 0.0],
|
||||
'name': str(created),
|
||||
'objects': scene.data['collection']['objects'],
|
||||
'children': scene.data['collection']['children']}
|
||||
collection_node = bl_types.bl_collection.BlCollection()
|
||||
collection_node.dependencies = scene.dependencies
|
||||
collection_node.data = collection_data
|
||||
graph[collection_node.uuid] = collection_node
|
||||
del graph[scene.uuid]
|
||||
|
||||
# Step 1: Construct nodes
|
||||
for node in graph.list_ordered():
|
||||
graph[node].resolve()
|
||||
|
||||
node_inst = graph[node]
|
||||
try:
|
||||
node_inst.instance = node_inst._construct(node_inst.data)
|
||||
node_inst.instance.uuid = node_inst.uuid
|
||||
except Exception as e:
|
||||
continue
|
||||
# Step 2: Load nodes
|
||||
for node in graph.list_ordered():
|
||||
graph[node].state = FETCHED
|
||||
graph[node].apply()
|
||||
|
||||
bpy.context.scene.collection.children.link(collection_node.instance)
|
||||
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user