refactor: protocol refactoring part 1 (mesh, object, action, scene)

This commit is contained in:
Swann 2021-05-18 23:14:09 +02:00
parent 26140eefb2
commit ffb70ab74c
No known key found for this signature in database
GPG Key ID: E1D3641A7C43AACB
32 changed files with 500 additions and 545 deletions

View File

@ -49,8 +49,18 @@ if bpy.app.version[1] >= 91:
__all__.append('bl_volume')
from . import *
from replication.data import DataTranslationProtocol
def types_to_register():
return __all__
from replication.protocol import DataTranslationProtocol
def get_data_translation_protocol()-> DataTranslationProtocol:
""" Return a data translation protocol from implemented bpy types
"""
bpy_protocol = DataTranslationProtocol()
for module_name in __all__:
impl = globals().get(module_name)
if impl and hasattr(impl, "_type") and hasattr(impl, "_type"):
bpy_protocol.register_implementation(impl._type, impl._class)
return bpy_protocol

View File

@ -25,8 +25,8 @@ from enum import Enum
from .. import utils
from .dump_anything import (
Dumper, Loader, np_dump_collection, np_load_collection, remove_items_from_dict)
from .bl_datablock import BlDatablock, has_action, has_driver, dump_driver, load_driver
from replication.protocol import ReplicatedDatablock
from .bl_datablock import resolve_datablock_from_uuid
KEYFRAME = [
'amplitude',
@ -41,6 +41,66 @@ KEYFRAME = [
'interpolation',
]
def has_action(datablock):
""" Check if the datablock datablock has actions
"""
return (hasattr(datablock, 'animation_data')
and datablock.animation_data
and datablock.animation_data.action)
def has_driver(datablock):
""" Check if the datablock datablock is driven
"""
return (hasattr(datablock, 'animation_data')
and datablock.animation_data
and datablock.animation_data.drivers)
def dump_driver(driver):
dumper = Dumper()
dumper.depth = 6
data = dumper.dump(driver)
return data
def load_driver(target_datablock, src_driver):
loader = Loader()
drivers = target_datablock.animation_data.drivers
src_driver_data = src_driver['driver']
new_driver = drivers.new(src_driver['data_path'], index=src_driver['array_index'])
# Settings
new_driver.driver.type = src_driver_data['type']
new_driver.driver.expression = src_driver_data['expression']
loader.load(new_driver, src_driver)
# Variables
for src_variable in src_driver_data['variables']:
src_var_data = src_driver_data['variables'][src_variable]
new_var = new_driver.driver.variables.new()
new_var.name = src_var_data['name']
new_var.type = src_var_data['type']
for src_target in src_var_data['targets']:
src_target_data = src_var_data['targets'][src_target]
src_id = src_target_data.get('id')
if src_id:
new_var.targets[src_target].id = utils.resolve_from_id(src_target_data['id'], src_target_data['id_type'])
loader.load(new_var.targets[src_target], src_target_data)
# Fcurve
new_fcurve = new_driver.keyframe_points
for p in reversed(new_fcurve):
new_fcurve.remove(p, fast=True)
new_fcurve.add(len(src_driver['keyframe_points']))
for index, src_point in enumerate(src_driver['keyframe_points']):
new_point = new_fcurve[index]
loader.load(new_point, src_driver['keyframe_points'][src_point])
def dump_fcurve(fcurve: bpy.types.FCurve, use_numpy: bool = True) -> dict:
""" Dump a sigle curve to a dict
@ -198,26 +258,28 @@ def resolve_animation_dependencies(datablock):
return []
class BlAction(BlDatablock):
class BlAction(ReplicatedDatablock):
bl_id = "actions"
bl_class = bpy.types.Action
bl_check_common = False
bl_icon = 'ACTION_TWEAK'
bl_reload_parent = False
def _construct(self, data):
@staticmethod
def construct(data: dict) -> object:
return bpy.data.actions.new(data["name"])
def _load_implementation(self, data, target):
@staticmethod
def load(data: dict, datablock: object):
for dumped_fcurve in data["fcurves"]:
dumped_data_path = dumped_fcurve["data_path"]
dumped_array_index = dumped_fcurve["dumped_array_index"]
# create fcurve if needed
fcurve = target.fcurves.find(
fcurve = datablock.fcurves.find(
dumped_data_path, index=dumped_array_index)
if fcurve is None:
fcurve = target.fcurves.new(
fcurve = datablock.fcurves.new(
dumped_data_path, index=dumped_array_index)
load_fcurve(dumped_fcurve, fcurve)
@ -225,9 +287,10 @@ class BlAction(BlDatablock):
id_root = data.get('id_root')
if id_root:
target.id_root = id_root
datablock.id_root = id_root
def _dump_implementation(self, data, instance=None):
@staticmethod
def dump(datablock: object) -> dict:
dumper = Dumper()
dumper.exclude_filter = [
'name_full',
@ -242,11 +305,26 @@ class BlAction(BlDatablock):
'users'
]
dumper.depth = 1
data = dumper.dump(instance)
data = dumper.dump(datablock)
data["fcurves"] = []
for fcurve in instance.fcurves:
for fcurve in datablock.fcurves:
data["fcurves"].append(dump_fcurve(fcurve, use_numpy=True))
return data
@staticmethod
def resolve(data: dict) -> object:
uuid = data.get('uuid')
name = data.get('name')
datablock = resolve_datablock_from_uuid(uuid, bpy.data.actions)
if datablock is None:
bpy.data.actions.get(name)
@staticmethod
def resolve_deps(datablock: object) -> [object]:
return []
_type = bpy.types.Action
_class = BlAction

View File

@ -22,7 +22,7 @@ import mathutils
from .dump_anything import Loader, Dumper
from .. import presence, operators, utils
from .bl_datablock import BlDatablock
from replication.protocol import ReplicatedDatablock
def get_roll(bone: bpy.types.Bone) -> float:
@ -35,17 +35,17 @@ def get_roll(bone: bpy.types.Bone) -> float:
return bone.AxisRollFromMatrix(bone.matrix_local.to_3x3())[1]
class BlArmature(BlDatablock):
class BlArmature(ReplicatedDatablock):
bl_id = "armatures"
bl_class = bpy.types.Armature
bl_check_common = False
bl_icon = 'ARMATURE_DATA'
bl_reload_parent = False
def _construct(self, data):
def construct(data: dict) -> object:
return bpy.data.armatures.new(data["name"])
def _load_implementation(self, data, target):
def load(data: dict, datablock: object):
# Load parent object
parent_object = utils.find_from_attr(
'uuid',
@ -119,7 +119,7 @@ class BlArmature(BlDatablock):
if 'EDIT' in current_mode:
bpy.ops.object.mode_set(mode='EDIT')
def _dump_implementation(self, data, instance=None):
def dump(datablock: object) -> dict:
assert(instance)
dumper = Dumper()

View File

@ -20,21 +20,21 @@ import bpy
import mathutils
from .dump_anything import Loader, Dumper
from .bl_datablock import BlDatablock
from replication.protocol import ReplicatedDatablock
class BlCamera(BlDatablock):
class BlCamera(ReplicatedDatablock):
bl_id = "cameras"
bl_class = bpy.types.Camera
bl_check_common = False
bl_icon = 'CAMERA_DATA'
bl_reload_parent = False
def _construct(self, data):
def construct(data: dict) -> object:
return bpy.data.cameras.new(data["name"])
def _load_implementation(self, data, target):
def load(data: dict, datablock: object):
loader = Loader()
loader.load(target, data)
@ -61,7 +61,7 @@ class BlCamera(BlDatablock):
loader.load(target_img.image_user, img_user)
def _dump_implementation(self, data, instance=None):
def dump(datablock: object) -> dict:
assert(instance)
# TODO: background image support
@ -119,7 +119,7 @@ class BlCamera(BlDatablock):
if image.image_user:
data['background_images'][index]['image_user'] = dumper.dump(image.image_user)
return data
def _resolve_deps_implementation(self):
def resolve_deps(datablock: object) -> [object]:
deps = []
for background in self.instance.background_images:
if background.image:

View File

@ -20,7 +20,7 @@ import bpy
import mathutils
from .. import utils
from .bl_datablock import BlDatablock
from replication.protocol import ReplicatedDatablock
from .dump_anything import Loader, Dumper
@ -81,14 +81,14 @@ def resolve_collection_dependencies(collection):
return deps
class BlCollection(BlDatablock):
class BlCollection(ReplicatedDatablock):
bl_id = "collections"
bl_icon = 'FILE_FOLDER'
bl_class = bpy.types.Collection
bl_check_common = True
bl_reload_parent = False
def _construct(self, data):
def construct(data: dict) -> object:
if self.is_library:
with bpy.data.libraries.load(filepath=bpy.data.libraries[self.data['library']].filepath, link=True) as (sourceData, targetData):
targetData.collections = [
@ -101,7 +101,7 @@ class BlCollection(BlDatablock):
instance = bpy.data.collections.new(data["name"])
return instance
def _load_implementation(self, data, target):
def load(data: dict, datablock: object):
loader = Loader()
loader.load(target, data)
@ -115,7 +115,7 @@ class BlCollection(BlDatablock):
# Keep other user from deleting collection object by flushing their history
utils.flush_history()
def _dump_implementation(self, data, instance=None):
def dump(datablock: object) -> dict:
assert(instance)
dumper = Dumper()
@ -134,5 +134,5 @@ class BlCollection(BlDatablock):
return data
def _resolve_deps_implementation(self):
def resolve_deps(datablock: object) -> [object]:
return resolve_collection_dependencies(self.instance)

View File

@ -22,11 +22,10 @@ import mathutils
import logging
from .. import utils
from .bl_datablock import BlDatablock
from replication.protocol import ReplicatedDatablock
from .dump_anything import (Dumper, Loader,
np_load_collection,
np_dump_collection)
from .bl_datablock import get_datablock_from_uuid
from .bl_material import dump_materials_slots, load_materials_slots
SPLINE_BEZIER_POINT = [
@ -134,17 +133,17 @@ SPLINE_METADATA = [
]
class BlCurve(BlDatablock):
class BlCurve(ReplicatedDatablock):
bl_id = "curves"
bl_class = bpy.types.Curve
bl_check_common = False
bl_icon = 'CURVE_DATA'
bl_reload_parent = False
def _construct(self, data):
def construct(data: dict) -> object:
return bpy.data.curves.new(data["name"], data["type"])
def _load_implementation(self, data, target):
def load(data: dict, datablock: object):
loader = Loader()
loader.load(target, data)
@ -175,7 +174,7 @@ class BlCurve(BlDatablock):
if src_materials:
load_materials_slots(src_materials, target.materials)
def _dump_implementation(self, data, instance=None):
def dump(datablock: object) -> dict:
assert(instance)
dumper = Dumper()
# Conflicting attributes
@ -222,7 +221,7 @@ class BlCurve(BlDatablock):
return data
def _resolve_deps_implementation(self):
def resolve_deps(datablock: object) -> [object]:
# TODO: resolve material
deps = []
curve = self.instance

View File

@ -22,73 +22,11 @@ from collections.abc import Iterable
import bpy
import mathutils
from replication.constants import DIFF_BINARY, DIFF_JSON, UP
from replication.data import ReplicatedDatablock
from replication.protocol import ReplicatedDatablock
from .. import utils
from .dump_anything import Dumper, Loader
def has_action(target):
""" Check if the target datablock has actions
"""
return (hasattr(target, 'animation_data')
and target.animation_data
and target.animation_data.action)
def has_driver(target):
""" Check if the target datablock is driven
"""
return (hasattr(target, 'animation_data')
and target.animation_data
and target.animation_data.drivers)
def dump_driver(driver):
dumper = Dumper()
dumper.depth = 6
data = dumper.dump(driver)
return data
def load_driver(target_datablock, src_driver):
loader = Loader()
drivers = target_datablock.animation_data.drivers
src_driver_data = src_driver['driver']
new_driver = drivers.new(src_driver['data_path'], index=src_driver['array_index'])
# Settings
new_driver.driver.type = src_driver_data['type']
new_driver.driver.expression = src_driver_data['expression']
loader.load(new_driver, src_driver)
# Variables
for src_variable in src_driver_data['variables']:
src_var_data = src_driver_data['variables'][src_variable]
new_var = new_driver.driver.variables.new()
new_var.name = src_var_data['name']
new_var.type = src_var_data['type']
for src_target in src_var_data['targets']:
src_target_data = src_var_data['targets'][src_target]
src_id = src_target_data.get('id')
if src_id:
new_var.targets[src_target].id = utils.resolve_from_id(src_target_data['id'], src_target_data['id_type'])
loader.load(new_var.targets[src_target], src_target_data)
# Fcurve
new_fcurve = new_driver.keyframe_points
for p in reversed(new_fcurve):
new_fcurve.remove(p, fast=True)
new_fcurve.add(len(src_driver['keyframe_points']))
for index, src_point in enumerate(src_driver['keyframe_points']):
new_point = new_fcurve[index]
loader.load(new_point, src_driver['keyframe_points'][src_point])
def get_datablock_from_uuid(uuid, default, ignore=[]):
if not uuid:
return default
@ -100,133 +38,30 @@ def get_datablock_from_uuid(uuid, default, ignore=[]):
return item
return default
def resolve_datablock_from_uuid(uuid, bpy_collection):
for item in bpy_collection:
if getattr(item, 'uuid', None) == uuid:
return item
return None
class BlDatablock(ReplicatedDatablock):
"""BlDatablock
def resolve_from_root(data: dict, root: str, construct = True):
datablock_root = getattr(bpy.data, self.bl_id)
datablock_ref = utils.find_from_attr('uuid', self.uuid, datablock_root)
bl_id : blender internal storage identifier
bl_class : blender internal type
bl_icon : type icon (blender icon name)
bl_check_common: enable check even in common rights
bl_reload_parent: reload parent
"""
if not datablock_ref:
try:
datablock_ref = datablock_root[self.data['name']]
except Exception:
pass
if construct and not datablock_ref:
name = self.data.get('name')
logging.debug(f"Constructing {name}")
datablock_ref = self._construct(data=self.data)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
instance = kwargs.get('instance', None)
self.preferences = utils.get_preferences()
# TODO: use is_library_indirect
self.is_library = (instance and hasattr(instance, 'library') and
instance.library) or \
(hasattr(self,'data') and self.data and 'library' in self.data)
if instance and hasattr(instance, 'uuid'):
instance.uuid = self.uuid
def resolve(self, construct = True):
datablock_root = getattr(bpy.data, self.bl_id)
datablock_ref = utils.find_from_attr('uuid', self.uuid, datablock_root)
if not datablock_ref:
try:
datablock_ref = datablock_root[self.data['name']]
except Exception:
pass
if construct and not datablock_ref:
name = self.data.get('name')
logging.debug(f"Constructing {name}")
datablock_ref = self._construct(data=self.data)
if datablock_ref is not None:
setattr(datablock_ref, 'uuid', self.uuid)
self.instance = datablock_ref
return True
else:
return False
def remove_instance(self):
"""
Remove instance from blender data
"""
assert(self.instance)
datablock_root = getattr(bpy.data, self.bl_id)
datablock_root.remove(self.instance)
def _dump(self, instance=None):
dumper = Dumper()
data = {}
animation_data = {}
# Dump animation data
if has_action(instance):
animation_data['action'] = instance.animation_data.action.name
if has_driver(instance):
animation_data['drivers'] = []
for driver in instance.animation_data.drivers:
animation_data['drivers'].append(dump_driver(driver))
if animation_data:
data['animation_data'] = animation_data
if self.is_library:
data.update(dumper.dump(instance))
else:
data.update(self._dump_implementation(data, instance=instance))
return data
def _dump_implementation(self, data, target):
raise NotImplementedError
def _load(self, data, target):
# Load animation data
if 'animation_data' in data.keys():
if target.animation_data is None:
target.animation_data_create()
for d in target.animation_data.drivers:
target.animation_data.drivers.remove(d)
if 'drivers' in data['animation_data']:
for driver in data['animation_data']['drivers']:
load_driver(target, driver)
if 'action' in data['animation_data']:
target.animation_data.action = bpy.data.actions[data['animation_data']['action']]
elif target.animation_data.action:
target.animation_data.action = None
# Remove existing animation data if there is not more to load
elif hasattr(target, 'animation_data') and target.animation_data:
target.animation_data_clear()
if self.is_library:
return
else:
self._load_implementation(data, target)
def _load_implementation(self, data, target):
raise NotImplementedError
def resolve_deps(self):
dependencies = []
if has_action(self.instance):
dependencies.append(self.instance.animation_data.action)
if not self.is_library:
dependencies.extend(self._resolve_deps_implementation())
logging.debug(f"{self.instance} dependencies: {dependencies}")
return dependencies
def _resolve_deps_implementation(self):
return []
def is_valid(self):
return getattr(bpy.data, self.bl_id).get(self.data['name'])
if datablock_ref is not None:
setattr(datablock_ref, 'uuid', self.uuid)
self.instance = datablock_ref
return True
else:
return False

View File

@ -24,7 +24,7 @@ from pathlib import Path
import bpy
import mathutils
from replication.constants import DIFF_BINARY, UP
from replication.data import ReplicatedDatablock
from replication.protocol import ReplicatedDatablock
from .. import utils
from .dump_anything import Dumper, Loader

View File

@ -22,19 +22,19 @@ from pathlib import Path
import bpy
from .bl_datablock import BlDatablock
from replication.protocol import ReplicatedDatablock
from .bl_file import get_filepath, ensure_unpacked
from .dump_anything import Dumper, Loader
class BlFont(BlDatablock):
class BlFont(ReplicatedDatablock):
bl_id = "fonts"
bl_class = bpy.types.VectorFont
bl_check_common = False
bl_icon = 'FILE_FONT'
bl_reload_parent = False
def _construct(self, data):
def construct(data: dict) -> object:
filename = data.get('filename')
if filename == '<builtin>':
@ -62,7 +62,7 @@ class BlFont(BlDatablock):
def diff(self):
return False
def _resolve_deps_implementation(self):
def resolve_deps(datablock: object) -> [object]:
deps = []
if self.instance.filepath and self.instance.filepath != '<builtin>':
ensure_unpacked(self.instance)

View File

@ -24,7 +24,7 @@ from .dump_anything import (Dumper,
Loader,
np_dump_collection,
np_load_collection)
from .bl_datablock import BlDatablock
from replication.protocol import ReplicatedDatablock
# GPencil data api is structured as it follow:
# GP-Object --> GP-Layers --> GP-Frames --> GP-Strokes --> GP-Stroke-Points
@ -228,17 +228,17 @@ def load_layer(layer_data, layer):
load_frame(frame_data, target_frame)
class BlGpencil(BlDatablock):
class BlGpencil(ReplicatedDatablock):
bl_id = "grease_pencils"
bl_class = bpy.types.GreasePencil
bl_check_common = False
bl_icon = 'GREASEPENCIL'
bl_reload_parent = False
def _construct(self, data):
def construct(data: dict) -> object:
return bpy.data.grease_pencils.new(data["name"])
def _load_implementation(self, data, target):
def load(data: dict, datablock: object):
target.materials.clear()
if "materials" in data.keys():
for mat in data['materials']:
@ -267,7 +267,7 @@ class BlGpencil(BlDatablock):
def _dump_implementation(self, data, instance=None):
def dump(datablock: object) -> dict:
assert(instance)
dumper = Dumper()
dumper.depth = 2
@ -290,7 +290,7 @@ class BlGpencil(BlDatablock):
data["eval_frame"] = bpy.context.scene.frame_current
return data
def _resolve_deps_implementation(self):
def resolve_deps(datablock: object) -> [object]:
deps = []
for material in self.instance.materials:

View File

@ -24,7 +24,7 @@ import bpy
import mathutils
from .. import utils
from .bl_datablock import BlDatablock
from replication.protocol import ReplicatedDatablock
from .dump_anything import Dumper, Loader
from .bl_file import get_filepath, ensure_unpacked
@ -48,14 +48,14 @@ format_to_ext = {
}
class BlImage(BlDatablock):
class BlImage(ReplicatedDatablock):
bl_id = "images"
bl_class = bpy.types.Image
bl_check_common = False
bl_icon = 'IMAGE_DATA'
bl_reload_parent = False
def _construct(self, data):
def construct(data: dict) -> object:
return bpy.data.images.new(
name=data['name'],
width=data['size'][0],
@ -105,7 +105,7 @@ class BlImage(BlDatablock):
else:
return None
def _resolve_deps_implementation(self):
def resolve_deps(datablock: object) -> [object]:
deps = []
if self.instance.packed_file:

View File

@ -20,23 +20,23 @@ import bpy
import mathutils
from .dump_anything import Dumper, Loader, np_dump_collection, np_load_collection
from .bl_datablock import BlDatablock
from replication.protocol import ReplicatedDatablock
from replication.exception import ContextError
POINT = ['co', 'weight_softbody', 'co_deform']
class BlLattice(BlDatablock):
class BlLattice(ReplicatedDatablock):
bl_id = "lattices"
bl_class = bpy.types.Lattice
bl_check_common = False
bl_icon = 'LATTICE_DATA'
bl_reload_parent = False
def _construct(self, data):
def construct(data: dict) -> object:
return bpy.data.lattices.new(data["name"])
def _load_implementation(self, data, target):
def load(data: dict, datablock: object):
if target.is_editmode:
raise ContextError("lattice is in edit mode")
@ -45,7 +45,7 @@ class BlLattice(BlDatablock):
np_load_collection(data['points'], target.points, POINT)
def _dump_implementation(self, data, instance=None):
def dump(datablock: object) -> dict:
if instance.is_editmode:
raise ContextError("lattice is in edit mode")

View File

@ -20,17 +20,17 @@ import bpy
import mathutils
from .dump_anything import Loader, Dumper
from .bl_datablock import BlDatablock
from replication.protocol import ReplicatedDatablock
class BlLibrary(BlDatablock):
class BlLibrary(ReplicatedDatablock):
bl_id = "libraries"
bl_class = bpy.types.Library
bl_check_common = False
bl_icon = 'LIBRARY_DATA_DIRECT'
bl_reload_parent = False
def _construct(self, data):
def construct(data: dict) -> object:
with bpy.data.libraries.load(filepath=data["filepath"], link=True) as (sourceData, targetData):
targetData = sourceData
return sourceData

View File

@ -20,24 +20,24 @@ import bpy
import mathutils
from .dump_anything import Loader, Dumper
from .bl_datablock import BlDatablock
from replication.protocol import ReplicatedDatablock
class BlLight(BlDatablock):
class BlLight(ReplicatedDatablock):
bl_id = "lights"
bl_class = bpy.types.Light
bl_check_common = False
bl_icon = 'LIGHT_DATA'
bl_reload_parent = False
def _construct(self, data):
def construct(data: dict) -> object:
return bpy.data.lights.new(data["name"], data["type"])
def _load_implementation(self, data, target):
def load(data: dict, datablock: object):
loader = Loader()
loader.load(target, data)
def _dump_implementation(self, data, instance=None):
def dump(datablock: object) -> dict:
assert(instance)
dumper = Dumper()
dumper.depth = 3

View File

@ -21,17 +21,17 @@ import mathutils
import logging
from .dump_anything import Loader, Dumper
from .bl_datablock import BlDatablock
from replication.protocol import ReplicatedDatablock
class BlLightprobe(BlDatablock):
class BlLightprobe(ReplicatedDatablock):
bl_id = "lightprobes"
bl_class = bpy.types.LightProbe
bl_check_common = False
bl_icon = 'LIGHTPROBE_GRID'
bl_reload_parent = False
def _construct(self, data):
def construct(data: dict) -> object:
type = 'CUBE' if data['type'] == 'CUBEMAP' else data['type']
# See https://developer.blender.org/D6396
if bpy.app.version[1] >= 83:
@ -39,11 +39,11 @@ class BlLightprobe(BlDatablock):
else:
logging.warning("Lightprobe replication only supported since 2.83. See https://developer.blender.org/D6396")
def _load_implementation(self, data, target):
def load(data: dict, datablock: object):
loader = Loader()
loader.load(target, data)
def _dump_implementation(self, data, instance=None):
def dump(datablock: object) -> dict:
assert(instance)
if bpy.app.version[1] < 83:
logging.warning("Lightprobe replication only supported since 2.83. See https://developer.blender.org/D6396")

View File

@ -24,7 +24,9 @@ import re
from uuid import uuid4
from .dump_anything import Loader, Dumper
from .bl_datablock import BlDatablock, get_datablock_from_uuid
from replication.protocol import ReplicatedDatablock
from .bl_datablock import get_datablock_from_uuid
NODE_SOCKET_INDEX = re.compile('\[(\d*)\]')
IGNORED_SOCKETS = ['GEOMETRY', 'SHADER', 'CUSTOM']
@ -389,17 +391,17 @@ def load_materials_slots(src_materials: list, dst_materials: bpy.types.bpy_prop_
dst_materials.append(mat_ref)
class BlMaterial(BlDatablock):
class BlMaterial(ReplicatedDatablock):
bl_id = "materials"
bl_class = bpy.types.Material
bl_check_common = False
bl_icon = 'MATERIAL_DATA'
bl_reload_parent = False
def _construct(self, data):
def construct(data: dict) -> object:
return bpy.data.materials.new(data["name"])
def _load_implementation(self, data, target):
def load(data: dict, datablock: object):
loader = Loader()
is_grease_pencil = data.get('is_grease_pencil')
@ -417,7 +419,7 @@ class BlMaterial(BlDatablock):
load_node_tree(data['node_tree'], target.node_tree)
def _dump_implementation(self, data, instance=None):
def dump(datablock: object) -> dict:
assert(instance)
mat_dumper = Dumper()
mat_dumper.depth = 2
@ -486,7 +488,7 @@ class BlMaterial(BlDatablock):
return data
def _resolve_deps_implementation(self):
def resolve_deps(datablock: object) -> [object]:
# TODO: resolve node group deps
deps = []

View File

@ -25,8 +25,13 @@ import numpy as np
from .dump_anything import Dumper, Loader, np_load_collection_primitives, np_dump_collection_primitive, np_load_collection, np_dump_collection
from replication.constants import DIFF_BINARY
from replication.exception import ContextError
from .bl_datablock import BlDatablock, get_datablock_from_uuid
from replication.protocol import ReplicatedDatablock
from .bl_datablock import get_datablock_from_uuid
from .bl_material import dump_materials_slots, load_materials_slots
from ..utils import get_preferences
from .bl_datablock import resolve_datablock_from_uuid
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
VERTICE = ['co']
@ -49,76 +54,79 @@ POLYGON = [
'material_index',
]
class BlMesh(BlDatablock):
class BlMesh(ReplicatedDatablock):
bl_id = "meshes"
bl_class = bpy.types.Mesh
bl_check_common = False
bl_icon = 'MESH_DATA'
bl_reload_parent = True
def _construct(self, data):
instance = bpy.data.meshes.new(data["name"])
instance.uuid = self.uuid
@staticmethod
def construct(data: dict) -> object:
instance = bpy.data.meshes.new(data.get("name"))
instance.uuid = data.get("uuid")
return instance
def _load_implementation(self, data, target):
if not target or target.is_editmode:
@staticmethod
def load(data: dict, datablock: object):
if not datablock or datablock.is_editmode:
raise ContextError
else:
load_animation_data(datablock.get('animation_data'), datablock)
loader = Loader()
loader.load(target, data)
loader.load(datablock, data)
# MATERIAL SLOTS
src_materials = data.get('materials', None)
if src_materials:
load_materials_slots(src_materials, target.materials)
load_materials_slots(src_materials, datablock.materials)
# CLEAR GEOMETRY
if target.vertices:
target.clear_geometry()
if datablock.vertices:
datablock.clear_geometry()
target.vertices.add(data["vertex_count"])
target.edges.add(data["egdes_count"])
target.loops.add(data["loop_count"])
target.polygons.add(data["poly_count"])
datablock.vertices.add(data["vertex_count"])
datablock.edges.add(data["egdes_count"])
datablock.loops.add(data["loop_count"])
datablock.polygons.add(data["poly_count"])
# LOADING
np_load_collection(data['vertices'], target.vertices, VERTICE)
np_load_collection(data['edges'], target.edges, EDGE)
np_load_collection(data['loops'], target.loops, LOOP)
np_load_collection(data["polygons"],target.polygons, POLYGON)
np_load_collection(data['vertices'], datablock.vertices, VERTICE)
np_load_collection(data['edges'], datablock.edges, EDGE)
np_load_collection(data['loops'], datablock.loops, LOOP)
np_load_collection(data["polygons"],datablock.polygons, POLYGON)
# UV Layers
if 'uv_layers' in data.keys():
for layer in data['uv_layers']:
if layer not in target.uv_layers:
target.uv_layers.new(name=layer)
if layer not in datablock.uv_layers:
datablock.uv_layers.new(name=layer)
np_load_collection_primitives(
target.uv_layers[layer].data,
datablock.uv_layers[layer].data,
'uv',
data["uv_layers"][layer]['data'])
# Vertex color
if 'vertex_colors' in data.keys():
for color_layer in data['vertex_colors']:
if color_layer not in target.vertex_colors:
target.vertex_colors.new(name=color_layer)
if color_layer not in datablock.vertex_colors:
datablock.vertex_colors.new(name=color_layer)
np_load_collection_primitives(
target.vertex_colors[color_layer].data,
datablock.vertex_colors[color_layer].data,
'color',
data["vertex_colors"][color_layer]['data'])
target.validate()
target.update()
datablock.validate()
datablock.update()
def _dump_implementation(self, data, instance=None):
assert(instance)
if (instance.is_editmode or bpy.context.mode == "SCULPT") and not self.preferences.sync_flags.sync_during_editmode:
@staticmethod
def dump(datablock: object) -> dict:
if (datablock.is_editmode or bpy.context.mode == "SCULPT") and not get_preferences().sync_flags.sync_during_editmode:
raise ContextError("Mesh is in edit mode")
mesh = instance
mesh = datablock
dumper = Dumper()
dumper.depth = 1
@ -132,6 +140,8 @@ class BlMesh(BlDatablock):
data = dumper.dump(mesh)
data['animation_data'] = dump_animation_data(datablock)
# VERTICES
data["vertex_count"] = len(mesh.vertices)
data["vertices"] = np_dump_collection(mesh.vertices, VERTICE)
@ -163,21 +173,37 @@ class BlMesh(BlDatablock):
data['vertex_colors'][color_map.name]['data'] = np_dump_collection_primitive(color_map.data, 'color')
# Materials
data['materials'] = dump_materials_slots(instance.materials)
data['materials'] = dump_materials_slots(datablock.materials)
return data
def _resolve_deps_implementation(self):
@staticmethod
def resolve_deps(datablock: object) -> [object]:
deps = []
for material in self.instance.materials:
for material in datablock.materials:
if material:
deps.append(material)
deps.extend(resolve_animation_dependencies(datablock))
return deps
@staticmethod
def resolve(data: dict) -> object:
uuid = data.get('uuid')
name = data.get('name')
datablock = resolve_datablock_from_uuid(uuid, bpy.data.meshes)
if datablock is None:
datablock = bpy.data.meshes.get(name)
return datablock
def diff(self):
if 'EDIT' in bpy.context.mode \
and not self.preferences.sync_flags.sync_during_editmode:
and not get_preferences().sync_flags.sync_during_editmode:
return False
else:
return super().diff()
_type = bpy.types.Mesh
_class = BlMesh

View File

@ -23,7 +23,7 @@ from .dump_anything import (
Dumper, Loader, np_dump_collection_primitive, np_load_collection_primitives,
np_dump_collection, np_load_collection)
from .bl_datablock import BlDatablock
from replication.protocol import ReplicatedDatablock
ELEMENT = [
@ -62,17 +62,17 @@ def load_metaball_elements(elements_data, elements):
np_load_collection(elements_data, elements, ELEMENT)
class BlMetaball(BlDatablock):
class BlMetaball(ReplicatedDatablock):
bl_id = "metaballs"
bl_class = bpy.types.MetaBall
bl_check_common = False
bl_icon = 'META_BALL'
bl_reload_parent = False
def _construct(self, data):
def construct(data: dict) -> object:
return bpy.data.metaballs.new(data["name"])
def _load_implementation(self, data, target):
def load(data: dict, datablock: object):
loader = Loader()
loader.load(target, data)
@ -83,7 +83,7 @@ class BlMetaball(BlDatablock):
load_metaball_elements(data['elements'], target.elements)
def _dump_implementation(self, data, instance=None):
def dump(datablock: object) -> dict:
assert(instance)
dumper = Dumper()
dumper.depth = 1

View File

@ -20,26 +20,26 @@ import bpy
import mathutils
from .dump_anything import Dumper, Loader, np_dump_collection, np_load_collection
from .bl_datablock import BlDatablock
from replication.protocol import ReplicatedDatablock
from .bl_material import (dump_node_tree,
load_node_tree,
get_node_tree_dependencies)
class BlNodeGroup(BlDatablock):
class BlNodeGroup(ReplicatedDatablock):
bl_id = "node_groups"
bl_class = bpy.types.NodeTree
bl_check_common = False
bl_icon = 'NODETREE'
bl_reload_parent = False
def _construct(self, data):
def construct(data: dict) -> object:
return bpy.data.node_groups.new(data["name"], data["type"])
def _load_implementation(self, data, target):
def load(data: dict, datablock: object):
load_node_tree(data, target)
def _dump_implementation(self, data, instance=None):
def dump(datablock: object) -> dict:
return dump_node_tree(instance)
def _resolve_deps_implementation(self):
def resolve_deps(datablock: object) -> [object]:
return get_node_tree_dependencies(self.instance)

View File

@ -22,7 +22,8 @@ import bpy
import mathutils
from replication.exception import ContextError
from .bl_datablock import BlDatablock, get_datablock_from_uuid
from replication.protocol import ReplicatedDatablock
from .bl_datablock import get_datablock_from_uuid, resolve_datablock_from_uuid
from .bl_material import IGNORED_SOCKETS
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
from .dump_anything import (
@ -44,6 +45,8 @@ SHAPEKEY_BLOCK_ATTR = [
'slider_min',
'slider_max',
]
if bpy.app.version[1] >= 93:
SUPPORTED_GEOMETRY_NODE_PARAMETERS = (int, str, float)
else:
@ -51,6 +54,7 @@ else:
logging.warning("Geometry node Float parameter not supported in \
blender 2.92.")
def get_node_group_inputs(node_group):
inputs = []
for inpt in node_group.inputs:
@ -89,6 +93,7 @@ def dump_physics(target: bpy.types.Object)->dict:
return physics_data
def load_physics(dumped_settings: dict, target: bpy.types.Object):
""" Load all physics settings from a given object excluding modifier
related physics settings (such as softbody, cloth, dynapaint and fluid)
@ -114,7 +119,8 @@ def load_physics(dumped_settings: dict, target: bpy.types.Object):
loader.load(target.rigid_body_constraint, dumped_settings['rigid_body_constraint'])
elif target.rigid_body_constraint:
bpy.ops.rigidbody.constraint_remove({"object": target})
def dump_modifier_geometry_node_inputs(modifier: bpy.types.Modifier) -> list:
""" Dump geometry node modifier input properties
@ -295,6 +301,7 @@ def load_vertex_groups(dumped_vertex_groups: dict, target_object: bpy.types.Obje
for index, weight in vg['vertices']:
vertex_group.add([index], weight, 'REPLACE')
def dump_shape_keys(target_key: bpy.types.Key)->dict:
""" Dump the target shape_keys datablock to a dict using numpy
@ -436,25 +443,17 @@ def load_modifiers_custom_data(dumped_modifiers: dict, modifiers: bpy.types.bpy_
else:
logging.error("Could't load projector target object {projector_object}")
class BlObject(BlDatablock):
class BlObject(ReplicatedDatablock):
bl_id = "objects"
bl_class = bpy.types.Object
bl_check_common = False
bl_icon = 'OBJECT_DATA'
bl_reload_parent = False
def _construct(self, data):
@staticmethod
def construct(data: dict) -> object:
instance = None
if self.is_library:
with bpy.data.libraries.load(filepath=bpy.data.libraries[self.data['library']].filepath, link=True) as (sourceData, targetData):
targetData.objects = [
name for name in sourceData.objects if name == self.data['name']]
instance = bpy.data.objects[self.data['name']]
instance.uuid = self.uuid
return instance
# TODO: refactoring
object_name = data.get("name")
data_uuid = data.get("data_uuid")
@ -467,70 +466,71 @@ class BlObject(BlDatablock):
ignore=['images']) # TODO: use resolve_from_id
if data_type != 'EMPTY' and object_data is None:
raise Exception(f"Fail to load object {data['name']}({self.uuid})")
raise Exception(f"Fail to load object {data['name']})")
instance = bpy.data.objects.new(object_name, object_data)
instance.uuid = self.uuid
instance.uuid = data.get("uuid")
return instance
def _load_implementation(self, data, target):
@staticmethod
def load(data: dict, datablock: object):
loader = Loader()
data_uuid = data.get("data_uuid")
data_id = data.get("data")
if target.data and (target.data.name != data_id):
target.data = get_datablock_from_uuid(
if datablock.data and (datablock.data.name != data_id):
datablock.data = get_datablock_from_uuid(
data_uuid, find_data_from_name(data_id), ignore=['images'])
# vertex groups
vertex_groups = data.get('vertex_groups', None)
if vertex_groups:
load_vertex_groups(vertex_groups, target)
load_vertex_groups(vertex_groups, datablock)
object_data = target.data
object_data = datablock.data
# SHAPE KEYS
shape_keys = data.get('shape_keys')
if shape_keys:
load_shape_keys(shape_keys, target)
load_shape_keys(shape_keys, datablock)
# Load transformation data
loader.load(target, data)
loader.load(datablock, data)
# Object display fields
if 'display' in data:
loader.load(target.display, data['display'])
loader.load(datablock.display, data['display'])
# Parenting
parent_id = data.get('parent_uid')
if parent_id:
parent = get_datablock_from_uuid(parent_id[0], bpy.data.objects[parent_id[1]])
# Avoid reloading
if target.parent != parent and parent is not None:
target.parent = parent
elif target.parent:
target.parent = None
if datablock.parent != parent and parent is not None:
datablock.parent = parent
elif datablock.parent:
datablock.parent = None
# Pose
if 'pose' in data:
if not target.pose:
if not datablock.pose:
raise Exception('No pose data yet (Fixed in a near futur)')
# Bone groups
for bg_name in data['pose']['bone_groups']:
bg_data = data['pose']['bone_groups'].get(bg_name)
bg_target = target.pose.bone_groups.get(bg_name)
bg_target = datablock.pose.bone_groups.get(bg_name)
if not bg_target:
bg_target = target.pose.bone_groups.new(name=bg_name)
bg_target = datablock.pose.bone_groups.new(name=bg_name)
loader.load(bg_target, bg_data)
# target.pose.bone_groups.get
# datablock.pose.bone_groups.get
# Bones
for bone in data['pose']['bones']:
target_bone = target.pose.bones.get(bone)
target_bone = datablock.pose.bones.get(bone)
bone_data = data['pose']['bones'].get(bone)
if 'constraints' in bone_data.keys():
@ -539,13 +539,13 @@ class BlObject(BlDatablock):
load_pose(target_bone, bone_data)
if 'bone_index' in bone_data.keys():
target_bone.bone_group = target.pose.bone_group[bone_data['bone_group_index']]
target_bone.bone_group = datablock.pose.bone_group[bone_data['bone_group_index']]
# TODO: find another way...
if target.empty_display_type == "IMAGE":
if datablock.empty_display_type == "IMAGE":
img_uuid = data.get('data_uuid')
if target.data is None and img_uuid:
target.data = get_datablock_from_uuid(img_uuid, None)
if datablock.data is None and img_uuid:
datablock.data = get_datablock_from_uuid(img_uuid, None)
if hasattr(object_data, 'skin_vertices') \
and object_data.skin_vertices\
@ -556,30 +556,29 @@ class BlObject(BlDatablock):
skin_data.data,
SKIN_DATA)
if hasattr(target, 'cycles_visibility') \
if hasattr(datablock, 'cycles_visibility') \
and 'cycles_visibility' in data:
loader.load(target.cycles_visibility, data['cycles_visibility'])
loader.load(datablock.cycles_visibility, data['cycles_visibility'])
if hasattr(target, 'modifiers'):
load_modifiers_custom_data(data['modifiers'], target.modifiers)
if hasattr(datablock, 'modifiers'):
load_modifiers_custom_data(data['modifiers'], datablock.modifiers)
# PHYSICS
load_physics(data, target)
load_physics(data, datablock)
transform = data.get('transforms', None)
if transform:
target.matrix_parent_inverse = mathutils.Matrix(
datablock.matrix_parent_inverse = mathutils.Matrix(
transform['matrix_parent_inverse'])
target.matrix_basis = mathutils.Matrix(transform['matrix_basis'])
target.matrix_local = mathutils.Matrix(transform['matrix_local'])
datablock.matrix_basis = mathutils.Matrix(transform['matrix_basis'])
datablock.matrix_local = mathutils.Matrix(transform['matrix_local'])
def _dump_implementation(self, data, instance=None):
assert(instance)
if _is_editmode(instance):
@staticmethod
def dump(datablock: object) -> dict:
if _is_editmode(datablock):
if self.preferences.sync_flags.sync_during_editmode:
instance.update_from_editmode()
datablock.update_from_editmode()
else:
raise ContextError("Object is in edit-mode.")
@ -618,32 +617,30 @@ class BlObject(BlDatablock):
'type'
]
data = dumper.dump(instance)
data = dumper.dump(datablock)
dumper.include_filter = [
'matrix_parent_inverse',
'matrix_local',
'matrix_basis']
data['transforms'] = dumper.dump(instance)
data['transforms'] = dumper.dump(datablock)
dumper.include_filter = [
'show_shadows',
]
data['display'] = dumper.dump(instance.display)
data['display'] = dumper.dump(datablock.display)
data['data_uuid'] = getattr(instance.data, 'uuid', None)
if self.is_library:
return data
data['data_uuid'] = getattr(datablock.data, 'uuid', None)
# PARENTING
if instance.parent:
data['parent_uid'] = (instance.parent.uuid, instance.parent.name)
if datablock.parent:
data['parent_uid'] = (datablock.parent.uuid, datablock.parent.name)
# MODIFIERS
modifiers = getattr(instance, 'modifiers', None)
if hasattr(instance, 'modifiers'):
modifiers = getattr(datablock, 'modifiers', None)
if hasattr(datablock, 'modifiers'):
data['modifiers'] = dump_modifiers(modifiers)
gp_modifiers = getattr(instance, 'grease_pencil_modifiers', None)
gp_modifiers = getattr(datablock, 'grease_pencil_modifiers', None)
if gp_modifiers:
dumper.include_filter = None
@ -666,16 +663,16 @@ class BlObject(BlDatablock):
# CONSTRAINTS
if hasattr(instance, 'constraints'):
if hasattr(datablock, 'constraints'):
dumper.include_filter = None
dumper.depth = 3
data["constraints"] = dumper.dump(instance.constraints)
data["constraints"] = dumper.dump(datablock.constraints)
# POSE
if hasattr(instance, 'pose') and instance.pose:
if hasattr(datablock, 'pose') and datablock.pose:
# BONES
bones = {}
for bone in instance.pose.bones:
for bone in datablock.pose.bones:
bones[bone.name] = {}
dumper.depth = 1
rotation = 'rotation_quaternion' if bone.rotation_mode == 'QUATERNION' else 'rotation_euler'
@ -700,7 +697,7 @@ class BlObject(BlDatablock):
# GROUPS
bone_groups = {}
for group in instance.pose.bone_groups:
for group in datablock.pose.bone_groups:
dumper.depth = 3
dumper.include_filter = [
'name',
@ -710,11 +707,11 @@ class BlObject(BlDatablock):
data['pose']['bone_groups'] = bone_groups
# VERTEx GROUP
if len(instance.vertex_groups) > 0:
data['vertex_groups'] = dump_vertex_groups(instance)
if len(datablock.vertex_groups) > 0:
data['vertex_groups'] = dump_vertex_groups(datablock)
# SHAPE KEYS
object_data = instance.data
object_data = datablock.data
if hasattr(object_data, 'shape_keys') and object_data.shape_keys:
data['shape_keys'] = dump_shape_keys(object_data.shape_keys)
@ -727,7 +724,7 @@ class BlObject(BlDatablock):
data['skin_vertices'] = skin_vertices
# CYCLE SETTINGS
if hasattr(instance, 'cycles_visibility'):
if hasattr(datablock, 'cycles_visibility'):
dumper.include_filter = [
'camera',
'diffuse',
@ -736,38 +733,49 @@ class BlObject(BlDatablock):
'scatter',
'shadow',
]
data['cycles_visibility'] = dumper.dump(instance.cycles_visibility)
data['cycles_visibility'] = dumper.dump(datablock.cycles_visibility)
# PHYSICS
data.update(dump_physics(instance))
data.update(dump_physics(datablock))
return data
def _resolve_deps_implementation(self):
@staticmethod
def resolve_deps(datablock: object) -> [object]:
deps = []
# Avoid Empty case
if self.instance.data:
deps.append(self.instance.data)
if datablock.data:
deps.append(datablock.data)
# Particle systems
for particle_slot in self.instance.particle_systems:
for particle_slot in datablock.particle_systems:
deps.append(particle_slot.settings)
if self.is_library:
deps.append(self.instance.library)
if datablock.parent:
deps.append(datablock.parent)
if self.instance.parent:
deps.append(self.instance.parent)
if self.instance.instance_type == 'COLLECTION':
if datablock.instance_type == 'COLLECTION':
# TODO: uuid based
deps.append(self.instance.instance_collection)
deps.append(datablock.instance_collection)
if self.instance.modifiers:
deps.extend(find_textures_dependencies(self.instance.modifiers))
deps.extend(find_geometry_nodes_dependencies(self.instance.modifiers))
if datablock.modifiers:
deps.extend(find_textures_dependencies(datablock.modifiers))
deps.extend(find_geometry_nodes_dependencies(datablock.modifiers))
if hasattr(self.instance.data, 'shape_keys') and self.instance.data.shape_keys:
deps.extend(resolve_animation_dependencies(self.instance.data.shape_keys))
if hasattr(datablock.data, 'shape_keys') and datablock.data.shape_keys:
deps.extend(resolve_animation_dependencies(datablock.data.shape_keys))
return deps
@staticmethod
def resolve(data: dict) -> object:
uuid = data.get('uuid')
name = data.get('name')
datablock = resolve_datablock_from_uuid(uuid, bpy.data.objects)
if datablock is None:
datablock = bpy.data.objects.get(name)
return datablock
_type = bpy.types.Object
_class = BlObject

View File

@ -2,7 +2,8 @@ import bpy
import mathutils
from . import dump_anything
from .bl_datablock import BlDatablock, get_datablock_from_uuid
from replication.protocol import ReplicatedDatablock
from .bl_datablock import get_datablock_from_uuid
def dump_textures_slots(texture_slots: bpy.types.bpy_prop_collection) -> list:
@ -37,19 +38,19 @@ IGNORED_ATTR = [
"users"
]
class BlParticle(BlDatablock):
class BlParticle(ReplicatedDatablock):
bl_id = "particles"
bl_class = bpy.types.ParticleSettings
bl_icon = "PARTICLES"
bl_check_common = False
bl_reload_parent = False
def _construct(self, data):
def construct(data: dict) -> object:
instance = bpy.data.particles.new(data["name"])
instance.uuid = self.uuid
return instance
def _load_implementation(self, data, target):
def load(data: dict, datablock: object):
dump_anything.load(target, data)
dump_anything.load(target.effector_weights, data["effector_weights"])
@ -66,7 +67,7 @@ class BlParticle(BlDatablock):
# Texture slots
load_texture_slots(data["texture_slots"], target.texture_slots)
def _dump_implementation(self, data, instance=None):
def dump(datablock: object) -> dict:
assert instance
dumper = dump_anything.Dumper()
@ -86,5 +87,5 @@ class BlParticle(BlDatablock):
return data
def _resolve_deps_implementation(self):
def resolve_deps(datablock: object) -> [object]:
return [t.texture for t in self.instance.texture_slots if t and t.texture]

View File

@ -18,7 +18,7 @@
import logging
from pathlib import Path
from uuid import uuid4
import bpy
import mathutils
from deepdiff import DeepDiff, Delta
@ -28,9 +28,12 @@ from ..utils import flush_history
from .bl_collection import (dump_collection_children, dump_collection_objects,
load_collection_childrens, load_collection_objects,
resolve_collection_dependencies)
from .bl_datablock import BlDatablock
from replication.protocol import ReplicatedDatablock
from .bl_file import get_filepath
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
from .dump_anything import Dumper, Loader
from ..utils import get_preferences
from .bl_datablock import resolve_datablock_from_uuid
RENDER_SETTINGS = [
'dither_intensity',
@ -367,7 +370,7 @@ def load_sequence(sequence_data: dict, sequence_editor: bpy.types.SequenceEditor
sequence.select = False
class BlScene(BlDatablock):
class BlScene(ReplicatedDatablock):
is_root = True
bl_id = "scenes"
@ -376,58 +379,62 @@ class BlScene(BlDatablock):
bl_icon = 'SCENE_DATA'
bl_reload_parent = False
def _construct(self, data):
@staticmethod
def construct(data: dict) -> object:
instance = bpy.data.scenes.new(data["name"])
instance.uuid = self.uuid
instance.uuid = data.get('uuid')
return instance
def _load_implementation(self, data, target):
@staticmethod
def load(data: dict, datablock: object):
load_animation_data(datablock.get('animation_data'), datablock)
# Load other meshes metadata
loader = Loader()
loader.load(target, data)
loader.load(datablock, data)
# Load master collection
load_collection_objects(
data['collection']['objects'], target.collection)
data['collection']['objects'], datablock.collection)
load_collection_childrens(
data['collection']['children'], target.collection)
data['collection']['children'], datablock.collection)
if 'world' in data.keys():
target.world = bpy.data.worlds[data['world']]
datablock.world = bpy.data.worlds[data['world']]
# Annotation
if 'grease_pencil' in data.keys():
target.grease_pencil = bpy.data.grease_pencils[data['grease_pencil']]
datablock.grease_pencil = bpy.data.grease_pencils[data['grease_pencil']]
if self.preferences.sync_flags.sync_render_settings:
if get_preferences().sync_flags.sync_render_settings:
if 'eevee' in data.keys():
loader.load(target.eevee, data['eevee'])
loader.load(datablock.eevee, data['eevee'])
if 'cycles' in data.keys():
loader.load(target.cycles, data['cycles'])
loader.load(datablock.cycles, data['cycles'])
if 'render' in data.keys():
loader.load(target.render, data['render'])
loader.load(datablock.render, data['render'])
if 'view_settings' in data.keys():
loader.load(target.view_settings, data['view_settings'])
if target.view_settings.use_curve_mapping and \
loader.load(datablock.view_settings, data['view_settings'])
if datablock.view_settings.use_curve_mapping and \
'curve_mapping' in data['view_settings']:
# TODO: change this ugly fix
target.view_settings.curve_mapping.white_level = data[
datablock.view_settings.curve_mapping.white_level = data[
'view_settings']['curve_mapping']['white_level']
target.view_settings.curve_mapping.black_level = data[
datablock.view_settings.curve_mapping.black_level = data[
'view_settings']['curve_mapping']['black_level']
target.view_settings.curve_mapping.update()
datablock.view_settings.curve_mapping.update()
# Sequencer
sequences = data.get('sequences')
if sequences:
# Create sequencer data
target.sequence_editor_create()
vse = target.sequence_editor
datablock.sequence_editor_create()
vse = datablock.sequence_editor
# Clear removed sequences
for seq in vse.sequences_all:
@ -437,15 +444,17 @@ class BlScene(BlDatablock):
for seq_name, seq_data in sequences.items():
load_sequence(seq_data, vse)
# If the sequence is no longer used, clear it
elif target.sequence_editor and not sequences:
target.sequence_editor_clear()
elif datablock.sequence_editor and not sequences:
datablock.sequence_editor_clear()
# FIXME: Find a better way after the replication big refacotoring
# Keep other user from deleting collection object by flushing their history
flush_history()
def _dump_implementation(self, data, instance=None):
assert(instance)
@staticmethod
def dump(datablock: object) -> dict:
data = {}
data['animation_data'] = dump_animation_data(datablock)
# Metadata
scene_dumper = Dumper()
@ -459,40 +468,40 @@ class BlScene(BlDatablock):
'frame_end',
'frame_step',
]
if self.preferences.sync_flags.sync_active_camera:
if get_preferences().sync_flags.sync_active_camera:
scene_dumper.include_filter.append('camera')
data.update(scene_dumper.dump(instance))
data.update(scene_dumper.dump(datablock))
# Master collection
data['collection'] = {}
data['collection']['children'] = dump_collection_children(
instance.collection)
datablock.collection)
data['collection']['objects'] = dump_collection_objects(
instance.collection)
datablock.collection)
scene_dumper.depth = 1
scene_dumper.include_filter = None
# Render settings
if self.preferences.sync_flags.sync_render_settings:
if get_preferences().sync_flags.sync_render_settings:
scene_dumper.include_filter = RENDER_SETTINGS
data['render'] = scene_dumper.dump(instance.render)
data['render'] = scene_dumper.dump(datablock.render)
if instance.render.engine == 'BLENDER_EEVEE':
if datablock.render.engine == 'BLENDER_EEVEE':
scene_dumper.include_filter = EVEE_SETTINGS
data['eevee'] = scene_dumper.dump(instance.eevee)
elif instance.render.engine == 'CYCLES':
data['eevee'] = scene_dumper.dump(datablock.eevee)
elif datablock.render.engine == 'CYCLES':
scene_dumper.include_filter = CYCLES_SETTINGS
data['cycles'] = scene_dumper.dump(instance.cycles)
data['cycles'] = scene_dumper.dump(datablock.cycles)
scene_dumper.include_filter = VIEW_SETTINGS
data['view_settings'] = scene_dumper.dump(instance.view_settings)
data['view_settings'] = scene_dumper.dump(datablock.view_settings)
if instance.view_settings.use_curve_mapping:
if datablock.view_settings.use_curve_mapping:
data['view_settings']['curve_mapping'] = scene_dumper.dump(
instance.view_settings.curve_mapping)
datablock.view_settings.curve_mapping)
scene_dumper.depth = 5
scene_dumper.include_filter = [
'curves',
@ -500,10 +509,10 @@ class BlScene(BlDatablock):
'location',
]
data['view_settings']['curve_mapping']['curves'] = scene_dumper.dump(
instance.view_settings.curve_mapping.curves)
datablock.view_settings.curve_mapping.curves)
# Sequence
vse = instance.sequence_editor
vse = datablock.sequence_editor
if vse:
dumped_sequences = {}
for seq in vse.sequences_all:
@ -513,22 +522,25 @@ class BlScene(BlDatablock):
return data
def _resolve_deps_implementation(self):
@staticmethod
def resolve_deps(datablock: object) -> [object]:
deps = []
# Master Collection
deps.extend(resolve_collection_dependencies(self.instance.collection))
deps.extend(resolve_collection_dependencies(datablock.collection))
# world
if self.instance.world:
deps.append(self.instance.world)
if datablock.world:
deps.append(datablock.world)
# annotations
if self.instance.grease_pencil:
deps.append(self.instance.grease_pencil)
if datablock.grease_pencil:
deps.append(datablock.grease_pencil)
deps.extend(resolve_animation_dependencies(datablock))
# Sequences
vse = self.instance.sequence_editor
vse = datablock.sequence_editor
if vse:
for sequence in vse.sequences_all:
if sequence.type == 'MOVIE' and sequence.filepath:
@ -543,6 +555,16 @@ class BlScene(BlDatablock):
return deps
@staticmethod
def resolve(data: dict) -> object:
uuid = data.get('uuid')
name = data.get('name')
datablock = resolve_datablock_from_uuid(uuid, bpy.data.scenes)
if datablock is None:
datablock = bpy.data.scenes.get(name)
return datablock
def diff(self):
exclude_path = []
@ -564,4 +586,6 @@ class BlScene(BlDatablock):
'mutate':True
}
return super().diff(diff_params=diff_params)
# return Delta(DeepDiff(self.data, self._dump(instance=self.instance),))
_type = bpy.types.Scene
_class = BlScene

View File

@ -23,18 +23,18 @@ from pathlib import Path
import bpy
from .bl_file import get_filepath, ensure_unpacked
from .bl_datablock import BlDatablock
from replication.protocol import ReplicatedDatablock
from .dump_anything import Dumper, Loader
class BlSound(BlDatablock):
class BlSound(ReplicatedDatablock):
bl_id = "sounds"
bl_class = bpy.types.Sound
bl_check_common = False
bl_icon = 'SOUND'
bl_reload_parent = False
def _construct(self, data):
def construct(data: dict) -> object:
filename = data.get('filename')
return bpy.data.sounds.load(get_filepath(filename))
@ -57,7 +57,7 @@ class BlSound(BlDatablock):
'name': instance.name
}
def _resolve_deps_implementation(self):
def resolve_deps(datablock: object) -> [object]:
deps = []
if self.instance.filepath and self.instance.filepath != '<builtin>':
ensure_unpacked(self.instance)

View File

@ -20,24 +20,24 @@ import bpy
import mathutils
from .dump_anything import Loader, Dumper
from .bl_datablock import BlDatablock
from replication.protocol import ReplicatedDatablock
class BlSpeaker(BlDatablock):
class BlSpeaker(ReplicatedDatablock):
bl_id = "speakers"
bl_class = bpy.types.Speaker
bl_check_common = False
bl_icon = 'SPEAKER'
bl_reload_parent = False
def _load_implementation(self, data, target):
def load(data: dict, datablock: object):
loader = Loader()
loader.load(target, data)
def _construct(self, data):
def construct(data: dict) -> object:
return bpy.data.speakers.new(data["name"])
def _dump_implementation(self, data, instance=None):
def dump(datablock: object) -> dict:
assert(instance)
dumper = Dumper()
@ -60,7 +60,7 @@ class BlSpeaker(BlDatablock):
return dumper.dump(instance)
def _resolve_deps_implementation(self):
def resolve_deps(datablock: object) -> [object]:
# TODO: resolve material
deps = []

View File

@ -20,24 +20,24 @@ import bpy
import mathutils
from .dump_anything import Loader, Dumper
from .bl_datablock import BlDatablock
from replication.protocol import ReplicatedDatablock
class BlTexture(BlDatablock):
class BlTexture(ReplicatedDatablock):
bl_id = "textures"
bl_class = bpy.types.Texture
bl_check_common = False
bl_icon = 'TEXTURE'
bl_reload_parent = False
def _load_implementation(self, data, target):
def load(data: dict, datablock: object):
loader = Loader()
loader.load(target, data)
def _construct(self, data):
def construct(data: dict) -> object:
return bpy.data.textures.new(data["name"], data["type"])
def _dump_implementation(self, data, instance=None):
def dump(datablock: object) -> dict:
assert(instance)
dumper = Dumper()
@ -61,7 +61,7 @@ class BlTexture(BlDatablock):
return data
def _resolve_deps_implementation(self):
def resolve_deps(datablock: object) -> [object]:
# TODO: resolve material
deps = []

View File

@ -21,17 +21,18 @@ import mathutils
from pathlib import Path
from .dump_anything import Loader, Dumper
from .bl_datablock import BlDatablock, get_datablock_from_uuid
from replication.protocol import ReplicatedDatablock
from .bl_datablock import get_datablock_from_uuid
from .bl_material import dump_materials_slots, load_materials_slots
class BlVolume(BlDatablock):
class BlVolume(ReplicatedDatablock):
bl_id = "volumes"
bl_class = bpy.types.Volume
bl_check_common = False
bl_icon = 'VOLUME_DATA'
bl_reload_parent = False
def _load_implementation(self, data, target):
def load(data: dict, datablock: object):
loader = Loader()
loader.load(target, data)
loader.load(target.display, data['display'])
@ -41,10 +42,10 @@ class BlVolume(BlDatablock):
if src_materials:
load_materials_slots(src_materials, target.materials)
def _construct(self, data):
def construct(data: dict) -> object:
return bpy.data.volumes.new(data["name"])
def _dump_implementation(self, data, instance=None):
def dump(datablock: object) -> dict:
assert(instance)
dumper = Dumper()
@ -69,7 +70,7 @@ class BlVolume(BlDatablock):
return data
def _resolve_deps_implementation(self):
def resolve_deps(datablock: object) -> [object]:
# TODO: resolve material
deps = []

View File

@ -20,23 +20,23 @@ import bpy
import mathutils
from .dump_anything import Loader, Dumper
from .bl_datablock import BlDatablock
from replication.protocol import ReplicatedDatablock
from .bl_material import (load_node_tree,
dump_node_tree,
get_node_tree_dependencies)
class BlWorld(BlDatablock):
class BlWorld(ReplicatedDatablock):
bl_id = "worlds"
bl_class = bpy.types.World
bl_check_common = True
bl_icon = 'WORLD_DATA'
bl_reload_parent = False
def _construct(self, data):
def construct(data: dict) -> object:
return bpy.data.worlds.new(data["name"])
def _load_implementation(self, data, target):
def load(data: dict, datablock: object):
loader = Loader()
loader.load(target, data)
@ -46,7 +46,7 @@ class BlWorld(BlDatablock):
load_node_tree(data['node_tree'], target.node_tree)
def _dump_implementation(self, data, instance=None):
def dump(datablock: object) -> dict:
assert(instance)
world_dumper = Dumper()
@ -62,7 +62,7 @@ class BlWorld(BlDatablock):
return data
def _resolve_deps_implementation(self):
def resolve_deps(datablock: object) -> [object]:
deps = []
if self.instance.use_nodes:

@ -1 +1 @@
Subproject commit 95166bfae41990e18d16a76b9a0bb4a25489d5fa
Subproject commit 0eccf69957e52442e9837b22c820f67187ee0e64

View File

@ -45,11 +45,12 @@ from bpy.app.handlers import persistent
from bpy_extras.io_utils import ExportHelper, ImportHelper
from replication.constants import (COMMITED, FETCHED, RP_COMMON, STATE_ACTIVE,
STATE_INITIAL, STATE_SYNCING, UP)
from replication.data import DataTranslationProtocol
from replication.protocol import DataTranslationProtocol
from replication.exception import ContextError, NonAuthorizedOperationError
from replication.interface import session
from replication import porcelain
from replication.repository import Repository
from replication.objects import Node
from . import bl_types, environment, timers, ui, utils
from .presence import SessionStatusWidget, renderer, view3d_find
@ -88,8 +89,10 @@ def initialize_session():
if node_ref is None:
logging.error(f"Can't construct node {node}")
elif node_ref.state == FETCHED:
node_ref.resolve()
node_ref.instance = session.repository.rdp.resolve(node_ref.data)
if node_ref.instance is None:
node_ref.instance = session.repository.rdp.construct(node_ref.data)
# Step 2: Load nodes
logging.info("Loading nodes")
for node in session.repository.list_ordered():
@ -184,29 +187,16 @@ class SessionStartOperator(bpy.types.Operator):
handler.setFormatter(formatter)
bpy_protocol = DataTranslationProtocol()
supported_bl_types = []
bpy_protocol = bl_types.get_data_translation_protocol()
# init the factory with supported types
for type in bl_types.types_to_register():
type_module = getattr(bl_types, type)
name = [e.capitalize() for e in type.split('_')[1:]]
type_impl_name = 'Bl'+''.join(name)
type_module_class = getattr(type_module, type_impl_name)
supported_bl_types.append(type_module_class.bl_id)
if type_impl_name not in settings.supported_datablocks:
logging.info(f"{type_impl_name} not found, \
# Check if supported_datablocks are up to date before starting the
# the session
for impl in bpy_protocol.implementations.values():
if impl.__name__ not in settings.supported_datablocks:
logging.info(f"{impl.__name__} not found, \
regenerate type settings...")
settings.generate_supported_types()
type_local_config = settings.supported_datablocks[type_impl_name]
bpy_protocol.register_type(
type_module_class.bl_class,
type_module_class,
check_common=type_module_class.bl_check_common)
if bpy.app.version[1] >= 91:
python_binary_path = sys.executable
@ -214,7 +204,7 @@ class SessionStartOperator(bpy.types.Operator):
python_binary_path = bpy.app.binary_path_python
repo = Repository(
data_protocol=bpy_protocol,
rdp=bpy_protocol,
username=settings.username)
# Host a session
@ -850,28 +840,13 @@ class SessionLoadSaveOperator(bpy.types.Operator, ImportHelper):
# init the factory with supported types
bpy_protocol = DataTranslationProtocol()
for type in bl_types.types_to_register():
type_module = getattr(bl_types, type)
name = [e.capitalize() for e in type.split('_')[1:]]
type_impl_name = 'Bl'+''.join(name)
type_module_class = getattr(type_module, type_impl_name)
bpy_protocol.register_type(
type_module_class.bl_class,
type_module_class)
bpy_protocol = bl_types.get_data_translation_protocol()
graph = Repository()
for node, node_data in nodes:
node_type = node_data.get('str_type')
impl = bpy_protocol.get_implementation_from_net(node_type)
if impl:
logging.info(f"Loading {node}")
instance = impl(owner=node_data['owner'],
instance = Node(owner=node_data['owner'],
uuid=node,
dependencies=node_data['dependencies'],
data=node_data['data'])
@ -990,20 +965,20 @@ def depsgraph_evaluation(scene):
if update.id.uuid:
# Retrieve local version
node = session.repository.get_node(update.id.uuid)
check_common = session.repository.rdp.get_implementation(update.id).bl_check_common
# Check our right on this update:
# - if its ours or ( under common and diff), launch the
# update process
# - if its to someone else, ignore the update
if node and (node.owner == session.id or node.bl_check_common):
if node and (node.owner == session.id or check_common):
if node.state == UP:
try:
porcelain.commit(session.repository, node.uuid)
porcelain.push(session.repository, 'origin', node.uuid)
except ReferenceError:
logging.debug(f"Reference error {node.uuid}")
if not node.is_valid():
session.remove(node.uuid)
# if not node.is_valid():
# session.remove(node.uuid)
except ContextError as e:
logging.debug(e)
except Exception as e:

View File

@ -407,18 +407,18 @@ class SessionPrefs(bpy.types.AddonPreferences):
def generate_supported_types(self):
self.supported_datablocks.clear()
for type in bl_types.types_to_register():
bpy_protocol = bl_types.get_data_translation_protocol()
# init the factory with supported types
for impl in bpy_protocol.implementations.values():
new_db = self.supported_datablocks.add()
type_module = getattr(bl_types, type)
name = [e.capitalize() for e in type.split('_')[1:]]
type_impl_name = 'Bl'+''.join(name)
type_module_class = getattr(type_module, type_impl_name)
new_db.name = type_impl_name
new_db.type_name = type_impl_name
new_db.name = impl.__name__
new_db.type_name = impl.__name__
new_db.use_as_filter = True
new_db.icon = type_module_class.bl_icon
new_db.bl_name = type_module_class.bl_id
new_db.icon = impl.bl_icon
new_db.bl_name = impl.bl_id
def client_list_callback(scene, context):

View File

@ -121,7 +121,8 @@ class ApplyTimer(Timer):
logging.error(f"Fail to apply {node_ref.uuid}")
traceback.print_exc()
else:
if node_ref.bl_reload_parent:
impl = session.repository.rdp.get_implementation(node_ref.instance)
if impl.bl_reload_parent:
for parent in session.repository.get_parents(node):
logging.debug("Refresh parent {node}")
porcelain.apply(session.repository,

View File

@ -453,8 +453,8 @@ def draw_property(context, parent, property_uuid, level=0):
detail_item_box = line.row(align=True)
detail_item_box.label(text="",
icon=settings.supported_datablocks[item.str_type].icon)
detail_item_box.label(text="")
# icon=settings.supported_datablocks].icon)
detail_item_box.label(text=f"{name}")
# Operations
@ -561,12 +561,7 @@ class SESSION_PT_repository(bpy.types.Panel):
types_filter = [t.type_name for t in settings.supported_datablocks
if t.use_as_filter]
key_to_filter = session.list(
filter_owner=settings.username) if runtime_settings.filter_owned else session.list()
client_keys = [key for key in key_to_filter
if session.repository.get_node(key).str_type
in types_filter]
client_keys = session.list()
if client_keys:
col = layout.column(align=True)