Merge branch '188-intgrate-replication-as-a-submodule' into develop
This commit is contained in:
commit
b203d9dffd
1
.gitignore
vendored
1
.gitignore
vendored
@ -14,3 +14,4 @@ _build
|
||||
|
||||
# ignore generated zip generated from blender_addon_tester
|
||||
*.zip
|
||||
libs
|
@ -8,3 +8,5 @@ build:
|
||||
name: multi_user
|
||||
paths:
|
||||
- multi_user
|
||||
variables:
|
||||
GIT_SUBMODULE_STRATEGY: recursive
|
||||
|
@ -5,6 +5,7 @@ deploy:
|
||||
variables:
|
||||
DOCKER_DRIVER: overlay2
|
||||
DOCKER_TLS_CERTDIR: "/certs"
|
||||
GIT_SUBMODULE_STRATEGY: recursive
|
||||
|
||||
services:
|
||||
- docker:19.03.12-dind
|
||||
|
@ -3,3 +3,5 @@ test:
|
||||
image: slumber/blender-addon-testing:latest
|
||||
script:
|
||||
- python3 scripts/test_addon.py
|
||||
variables:
|
||||
GIT_SUBMODULE_STRATEGY: recursive
|
3
.gitmodules
vendored
3
.gitmodules
vendored
@ -0,0 +1,3 @@
|
||||
[submodule "multi_user/libs/replication"]
|
||||
path = multi_user/libs/replication
|
||||
url = https://gitlab.com/slumber/replication.git
|
@ -19,7 +19,7 @@
|
||||
bl_info = {
|
||||
"name": "Multi-User",
|
||||
"author": "Swann Martinez",
|
||||
"version": (0, 4, 0),
|
||||
"version": (0, 5, 0),
|
||||
"description": "Enable real-time collaborative workflow inside blender",
|
||||
"blender": (2, 82, 0),
|
||||
"location": "3D View > Sidebar > Multi-User tab",
|
||||
@ -43,13 +43,10 @@ from bpy.app.handlers import persistent
|
||||
from . import environment
|
||||
|
||||
|
||||
DEPENDENCIES = {
|
||||
("replication", '0.1.36'),
|
||||
}
|
||||
|
||||
|
||||
module_error_msg = "Insufficient rights to install the multi-user \
|
||||
dependencies, aunch blender with administrator rights."
|
||||
|
||||
|
||||
def register():
|
||||
# Setup logging policy
|
||||
logging.basicConfig(
|
||||
@ -58,12 +55,7 @@ def register():
|
||||
level=logging.INFO)
|
||||
|
||||
try:
|
||||
if bpy.app.version[1] >= 91:
|
||||
python_binary_path = sys.executable
|
||||
else:
|
||||
python_binary_path = bpy.app.binary_path_python
|
||||
|
||||
environment.setup(DEPENDENCIES, python_binary_path)
|
||||
environment.register()
|
||||
|
||||
from . import presence
|
||||
from . import operators
|
||||
@ -111,3 +103,5 @@ def unregister():
|
||||
del bpy.types.ID.uuid
|
||||
del bpy.types.WindowManager.online_users
|
||||
del bpy.types.WindowManager.user_index
|
||||
|
||||
environment.unregister()
|
||||
|
@ -28,7 +28,6 @@ __all__ = [
|
||||
'bl_light',
|
||||
'bl_scene',
|
||||
'bl_material',
|
||||
'bl_library',
|
||||
'bl_armature',
|
||||
'bl_action',
|
||||
'bl_world',
|
||||
@ -39,7 +38,6 @@ __all__ = [
|
||||
'bl_font',
|
||||
'bl_sound',
|
||||
'bl_file',
|
||||
# 'bl_sequencer',
|
||||
'bl_node_group',
|
||||
'bl_texture',
|
||||
"bl_particle",
|
||||
@ -49,8 +47,18 @@ if bpy.app.version[1] >= 91:
|
||||
__all__.append('bl_volume')
|
||||
|
||||
from . import *
|
||||
from replication.data import DataTranslationProtocol
|
||||
|
||||
def types_to_register():
|
||||
return __all__
|
||||
|
||||
from replication.protocol import DataTranslationProtocol
|
||||
|
||||
def get_data_translation_protocol()-> DataTranslationProtocol:
|
||||
""" Return a data translation protocol from implemented bpy types
|
||||
"""
|
||||
bpy_protocol = DataTranslationProtocol()
|
||||
for module_name in __all__:
|
||||
impl = globals().get(module_name)
|
||||
if impl and hasattr(impl, "_type") and hasattr(impl, "_type"):
|
||||
bpy_protocol.register_implementation(impl._type, impl._class)
|
||||
return bpy_protocol
|
||||
|
@ -25,8 +25,8 @@ from enum import Enum
|
||||
from .. import utils
|
||||
from .dump_anything import (
|
||||
Dumper, Loader, np_dump_collection, np_load_collection, remove_items_from_dict)
|
||||
from .bl_datablock import BlDatablock, has_action, has_driver, dump_driver, load_driver
|
||||
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
|
||||
KEYFRAME = [
|
||||
'amplitude',
|
||||
@ -41,6 +41,66 @@ KEYFRAME = [
|
||||
'interpolation',
|
||||
]
|
||||
|
||||
def has_action(datablock):
|
||||
""" Check if the datablock datablock has actions
|
||||
"""
|
||||
return (hasattr(datablock, 'animation_data')
|
||||
and datablock.animation_data
|
||||
and datablock.animation_data.action)
|
||||
|
||||
|
||||
def has_driver(datablock):
|
||||
""" Check if the datablock datablock is driven
|
||||
"""
|
||||
return (hasattr(datablock, 'animation_data')
|
||||
and datablock.animation_data
|
||||
and datablock.animation_data.drivers)
|
||||
|
||||
|
||||
def dump_driver(driver):
|
||||
dumper = Dumper()
|
||||
dumper.depth = 6
|
||||
data = dumper.dump(driver)
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def load_driver(target_datablock, src_driver):
|
||||
loader = Loader()
|
||||
drivers = target_datablock.animation_data.drivers
|
||||
src_driver_data = src_driver['driver']
|
||||
new_driver = drivers.new(src_driver['data_path'], index=src_driver['array_index'])
|
||||
|
||||
# Settings
|
||||
new_driver.driver.type = src_driver_data['type']
|
||||
new_driver.driver.expression = src_driver_data['expression']
|
||||
loader.load(new_driver, src_driver)
|
||||
|
||||
# Variables
|
||||
for src_variable in src_driver_data['variables']:
|
||||
src_var_data = src_driver_data['variables'][src_variable]
|
||||
new_var = new_driver.driver.variables.new()
|
||||
new_var.name = src_var_data['name']
|
||||
new_var.type = src_var_data['type']
|
||||
|
||||
for src_target in src_var_data['targets']:
|
||||
src_target_data = src_var_data['targets'][src_target]
|
||||
src_id = src_target_data.get('id')
|
||||
if src_id:
|
||||
new_var.targets[src_target].id = utils.resolve_from_id(src_target_data['id'], src_target_data['id_type'])
|
||||
loader.load(new_var.targets[src_target], src_target_data)
|
||||
|
||||
# Fcurve
|
||||
new_fcurve = new_driver.keyframe_points
|
||||
for p in reversed(new_fcurve):
|
||||
new_fcurve.remove(p, fast=True)
|
||||
|
||||
new_fcurve.add(len(src_driver['keyframe_points']))
|
||||
|
||||
for index, src_point in enumerate(src_driver['keyframe_points']):
|
||||
new_point = new_fcurve[index]
|
||||
loader.load(new_point, src_driver['keyframe_points'][src_point])
|
||||
|
||||
|
||||
def dump_fcurve(fcurve: bpy.types.FCurve, use_numpy: bool = True) -> dict:
|
||||
""" Dump a sigle curve to a dict
|
||||
@ -198,26 +258,28 @@ def resolve_animation_dependencies(datablock):
|
||||
return []
|
||||
|
||||
|
||||
class BlAction(BlDatablock):
|
||||
class BlAction(ReplicatedDatablock):
|
||||
bl_id = "actions"
|
||||
bl_class = bpy.types.Action
|
||||
bl_check_common = False
|
||||
bl_icon = 'ACTION_TWEAK'
|
||||
bl_reload_parent = False
|
||||
|
||||
def _construct(self, data):
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
return bpy.data.actions.new(data["name"])
|
||||
|
||||
def _load_implementation(self, data, target):
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
for dumped_fcurve in data["fcurves"]:
|
||||
dumped_data_path = dumped_fcurve["data_path"]
|
||||
dumped_array_index = dumped_fcurve["dumped_array_index"]
|
||||
|
||||
# create fcurve if needed
|
||||
fcurve = target.fcurves.find(
|
||||
fcurve = datablock.fcurves.find(
|
||||
dumped_data_path, index=dumped_array_index)
|
||||
if fcurve is None:
|
||||
fcurve = target.fcurves.new(
|
||||
fcurve = datablock.fcurves.new(
|
||||
dumped_data_path, index=dumped_array_index)
|
||||
|
||||
load_fcurve(dumped_fcurve, fcurve)
|
||||
@ -225,9 +287,10 @@ class BlAction(BlDatablock):
|
||||
id_root = data.get('id_root')
|
||||
|
||||
if id_root:
|
||||
target.id_root = id_root
|
||||
datablock.id_root = id_root
|
||||
|
||||
def _dump_implementation(self, data, instance=None):
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
dumper = Dumper()
|
||||
dumper.exclude_filter = [
|
||||
'name_full',
|
||||
@ -242,11 +305,23 @@ class BlAction(BlDatablock):
|
||||
'users'
|
||||
]
|
||||
dumper.depth = 1
|
||||
data = dumper.dump(instance)
|
||||
data = dumper.dump(datablock)
|
||||
|
||||
data["fcurves"] = []
|
||||
|
||||
for fcurve in instance.fcurves:
|
||||
for fcurve in datablock.fcurves:
|
||||
data["fcurves"].append(dump_fcurve(fcurve, use_numpy=True))
|
||||
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.actions)
|
||||
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
return []
|
||||
|
||||
_type = bpy.types.Action
|
||||
_class = BlAction
|
||||
|
@ -22,8 +22,9 @@ import mathutils
|
||||
|
||||
from .dump_anything import Loader, Dumper
|
||||
from .. import presence, operators, utils
|
||||
from .bl_datablock import BlDatablock
|
||||
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||
|
||||
def get_roll(bone: bpy.types.Bone) -> float:
|
||||
""" Compute the actuall roll of a pose bone
|
||||
@ -35,17 +36,19 @@ def get_roll(bone: bpy.types.Bone) -> float:
|
||||
return bone.AxisRollFromMatrix(bone.matrix_local.to_3x3())[1]
|
||||
|
||||
|
||||
class BlArmature(BlDatablock):
|
||||
class BlArmature(ReplicatedDatablock):
|
||||
bl_id = "armatures"
|
||||
bl_class = bpy.types.Armature
|
||||
bl_check_common = False
|
||||
bl_icon = 'ARMATURE_DATA'
|
||||
bl_reload_parent = False
|
||||
|
||||
def _construct(self, data):
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
return bpy.data.armatures.new(data["name"])
|
||||
|
||||
def _load_implementation(self, data, target):
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
# Load parent object
|
||||
parent_object = utils.find_from_attr(
|
||||
'uuid',
|
||||
@ -55,7 +58,7 @@ class BlArmature(BlDatablock):
|
||||
|
||||
if parent_object is None:
|
||||
parent_object = bpy.data.objects.new(
|
||||
data['user_name'], target)
|
||||
data['user_name'], datablock)
|
||||
parent_object.uuid = data['user']
|
||||
|
||||
is_object_in_master = (
|
||||
@ -90,10 +93,10 @@ class BlArmature(BlDatablock):
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
|
||||
for bone in data['bones']:
|
||||
if bone not in target.edit_bones:
|
||||
new_bone = target.edit_bones.new(bone)
|
||||
if bone not in datablock.edit_bones:
|
||||
new_bone = datablock.edit_bones.new(bone)
|
||||
else:
|
||||
new_bone = target.edit_bones[bone]
|
||||
new_bone = datablock.edit_bones[bone]
|
||||
|
||||
bone_data = data['bones'].get(bone)
|
||||
|
||||
@ -104,7 +107,7 @@ class BlArmature(BlDatablock):
|
||||
new_bone.roll = bone_data['roll']
|
||||
|
||||
if 'parent' in bone_data:
|
||||
new_bone.parent = target.edit_bones[data['bones']
|
||||
new_bone.parent = datablock.edit_bones[data['bones']
|
||||
[bone]['parent']]
|
||||
new_bone.use_connect = bone_data['use_connect']
|
||||
|
||||
@ -119,9 +122,10 @@ class BlArmature(BlDatablock):
|
||||
if 'EDIT' in current_mode:
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
|
||||
def _dump_implementation(self, data, instance=None):
|
||||
assert(instance)
|
||||
load_animation_data(data.get('animation_data'), datablock)
|
||||
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
dumper = Dumper()
|
||||
dumper.depth = 4
|
||||
dumper.include_filter = [
|
||||
@ -135,14 +139,14 @@ class BlArmature(BlDatablock):
|
||||
'name',
|
||||
'layers',
|
||||
]
|
||||
data = dumper.dump(instance)
|
||||
data = dumper.dump(datablock)
|
||||
|
||||
for bone in instance.bones:
|
||||
for bone in datablock.bones:
|
||||
if bone.parent:
|
||||
data['bones'][bone.name]['parent'] = bone.parent.name
|
||||
# get the parent Object
|
||||
# TODO: Use id_data instead
|
||||
object_users = utils.get_datablock_users(instance)[0]
|
||||
object_users = utils.get_datablock_users(datablock)[0]
|
||||
data['user'] = object_users.uuid
|
||||
data['user_name'] = object_users.name
|
||||
|
||||
@ -153,7 +157,25 @@ class BlArmature(BlDatablock):
|
||||
data['user_scene'] = [
|
||||
item.name for item in container_users if isinstance(item, bpy.types.Scene)]
|
||||
|
||||
for bone in instance.bones:
|
||||
for bone in datablock.bones:
|
||||
data['bones'][bone.name]['roll'] = get_roll(bone)
|
||||
|
||||
data['animation_data'] = dump_animation_data(datablock)
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
name = data.get('name')
|
||||
datablock = resolve_datablock_from_uuid(uuid, bpy.data.armatures)
|
||||
if datablock is None:
|
||||
datablock = bpy.data.armatures.get(name)
|
||||
|
||||
return datablock
|
||||
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
return resolve_animation_dependencies(datablock)
|
||||
|
||||
_type = bpy.types.Armature
|
||||
_class = BlArmature
|
@ -20,39 +20,46 @@ import bpy
|
||||
import mathutils
|
||||
|
||||
from .dump_anything import Loader, Dumper
|
||||
from .bl_datablock import BlDatablock
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||
|
||||
|
||||
class BlCamera(BlDatablock):
|
||||
class BlCamera(ReplicatedDatablock):
|
||||
bl_id = "cameras"
|
||||
bl_class = bpy.types.Camera
|
||||
bl_check_common = False
|
||||
bl_icon = 'CAMERA_DATA'
|
||||
bl_reload_parent = False
|
||||
|
||||
def _construct(self, data):
|
||||
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
return bpy.data.cameras.new(data["name"])
|
||||
|
||||
|
||||
def _load_implementation(self, data, target):
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
loader = Loader()
|
||||
loader.load(target, data)
|
||||
loader.load(datablock, data)
|
||||
|
||||
dof_settings = data.get('dof')
|
||||
|
||||
load_animation_data(data.get('animation_data'), datablock)
|
||||
|
||||
# DOF settings
|
||||
if dof_settings:
|
||||
loader.load(target.dof, dof_settings)
|
||||
loader.load(datablock.dof, dof_settings)
|
||||
|
||||
background_images = data.get('background_images')
|
||||
|
||||
target.background_images.clear()
|
||||
datablock.background_images.clear()
|
||||
|
||||
if background_images:
|
||||
for img_name, img_data in background_images.items():
|
||||
img_id = img_data.get('image')
|
||||
if img_id:
|
||||
target_img = target.background_images.new()
|
||||
target_img = datablock.background_images.new()
|
||||
target_img.image = bpy.data.images[img_id]
|
||||
loader.load(target_img, img_data)
|
||||
|
||||
@ -61,11 +68,8 @@ class BlCamera(BlDatablock):
|
||||
loader.load(target_img.image_user, img_user)
|
||||
|
||||
|
||||
def _dump_implementation(self, data, instance=None):
|
||||
assert(instance)
|
||||
|
||||
# TODO: background image support
|
||||
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
dumper = Dumper()
|
||||
dumper.depth = 3
|
||||
dumper.include_filter = [
|
||||
@ -114,15 +118,29 @@ class BlCamera(BlDatablock):
|
||||
'use_cyclic',
|
||||
'use_auto_refresh'
|
||||
]
|
||||
data = dumper.dump(instance)
|
||||
for index, image in enumerate(instance.background_images):
|
||||
data = dumper.dump(datablock)
|
||||
data['animation_data'] = dump_animation_data(datablock)
|
||||
|
||||
for index, image in enumerate(datablock.background_images):
|
||||
if image.image_user:
|
||||
data['background_images'][index]['image_user'] = dumper.dump(image.image_user)
|
||||
return data
|
||||
def _resolve_deps_implementation(self):
|
||||
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.cameras)
|
||||
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
deps = []
|
||||
for background in self.instance.background_images:
|
||||
for background in datablock.background_images:
|
||||
if background.image:
|
||||
deps.append(background.image)
|
||||
|
||||
deps.extend(resolve_animation_dependencies(datablock))
|
||||
|
||||
return deps
|
||||
|
||||
_type = bpy.types.Camera
|
||||
_class = BlCamera
|
||||
|
@ -19,10 +19,12 @@
|
||||
import bpy
|
||||
import mathutils
|
||||
|
||||
from .. import utils
|
||||
from .bl_datablock import BlDatablock
|
||||
from .dump_anything import Loader, Dumper
|
||||
from deepdiff import DeepDiff, Delta
|
||||
|
||||
from .. import utils
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from .dump_anything import Loader, Dumper
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
|
||||
def dump_collection_children(collection):
|
||||
collection_children = []
|
||||
@ -81,58 +83,82 @@ def resolve_collection_dependencies(collection):
|
||||
|
||||
return deps
|
||||
|
||||
class BlCollection(BlDatablock):
|
||||
class BlCollection(ReplicatedDatablock):
|
||||
bl_id = "collections"
|
||||
bl_icon = 'FILE_FOLDER'
|
||||
bl_class = bpy.types.Collection
|
||||
bl_check_common = True
|
||||
bl_reload_parent = False
|
||||
|
||||
def _construct(self, data):
|
||||
if self.is_library:
|
||||
with bpy.data.libraries.load(filepath=bpy.data.libraries[self.data['library']].filepath, link=True) as (sourceData, targetData):
|
||||
targetData.collections = [
|
||||
name for name in sourceData.collections if name == self.data['name']]
|
||||
|
||||
instance = bpy.data.collections[self.data['name']]
|
||||
|
||||
return instance
|
||||
use_delta = True
|
||||
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
instance = bpy.data.collections.new(data["name"])
|
||||
return instance
|
||||
|
||||
def _load_implementation(self, data, target):
|
||||
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
loader = Loader()
|
||||
loader.load(target, data)
|
||||
loader.load(datablock, data)
|
||||
|
||||
# Objects
|
||||
load_collection_objects(data['objects'], target)
|
||||
load_collection_objects(data['objects'], datablock)
|
||||
|
||||
# Link childrens
|
||||
load_collection_childrens(data['children'], target)
|
||||
load_collection_childrens(data['children'], datablock)
|
||||
|
||||
# FIXME: Find a better way after the replication big refacotoring
|
||||
# Keep other user from deleting collection object by flushing their history
|
||||
utils.flush_history()
|
||||
|
||||
def _dump_implementation(self, data, instance=None):
|
||||
assert(instance)
|
||||
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
dumper = Dumper()
|
||||
dumper.depth = 1
|
||||
dumper.include_filter = [
|
||||
"name",
|
||||
"instance_offset"
|
||||
]
|
||||
data = dumper.dump(instance)
|
||||
data = dumper.dump(datablock)
|
||||
|
||||
# dump objects
|
||||
data['objects'] = dump_collection_objects(instance)
|
||||
data['objects'] = dump_collection_objects(datablock)
|
||||
|
||||
# dump children collections
|
||||
data['children'] = dump_collection_children(instance)
|
||||
data['children'] = dump_collection_children(datablock)
|
||||
|
||||
return data
|
||||
|
||||
def _resolve_deps_implementation(self):
|
||||
return resolve_collection_dependencies(self.instance)
|
||||
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.collections)
|
||||
|
||||
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
return resolve_collection_dependencies(datablock)
|
||||
|
||||
@staticmethod
|
||||
def compute_delta(last_data: dict, current_data: dict) -> Delta:
|
||||
diff_params = {
|
||||
'ignore_order': True,
|
||||
'report_repetition': True
|
||||
}
|
||||
delta_params = {
|
||||
# 'mutate': True
|
||||
}
|
||||
|
||||
return Delta(
|
||||
DeepDiff(last_data,
|
||||
current_data,
|
||||
cache_size=5000,
|
||||
**diff_params),
|
||||
**delta_params)
|
||||
|
||||
_type = bpy.types.Collection
|
||||
_class = BlCollection
|
@ -21,13 +21,15 @@ import bpy.types as T
|
||||
import mathutils
|
||||
import logging
|
||||
|
||||
from .. import utils
|
||||
from .bl_datablock import BlDatablock
|
||||
from ..utils import get_preferences
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from .dump_anything import (Dumper, Loader,
|
||||
np_load_collection,
|
||||
np_dump_collection)
|
||||
from .bl_datablock import get_datablock_from_uuid
|
||||
from .bl_material import dump_materials_slots, load_materials_slots
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||
|
||||
|
||||
SPLINE_BEZIER_POINT = [
|
||||
# "handle_left_type",
|
||||
@ -134,25 +136,29 @@ SPLINE_METADATA = [
|
||||
]
|
||||
|
||||
|
||||
class BlCurve(BlDatablock):
|
||||
class BlCurve(ReplicatedDatablock):
|
||||
bl_id = "curves"
|
||||
bl_class = bpy.types.Curve
|
||||
bl_check_common = False
|
||||
bl_icon = 'CURVE_DATA'
|
||||
bl_reload_parent = False
|
||||
|
||||
def _construct(self, data):
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
return bpy.data.curves.new(data["name"], data["type"])
|
||||
|
||||
def _load_implementation(self, data, target):
|
||||
loader = Loader()
|
||||
loader.load(target, data)
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
load_animation_data(data.get('animation_data'), datablock)
|
||||
|
||||
target.splines.clear()
|
||||
loader = Loader()
|
||||
loader.load(datablock, data)
|
||||
|
||||
datablock.splines.clear()
|
||||
|
||||
# load splines
|
||||
for spline in data['splines'].values():
|
||||
new_spline = target.splines.new(spline['type'])
|
||||
new_spline = datablock.splines.new(spline['type'])
|
||||
|
||||
# Load curve geometry data
|
||||
if new_spline.type == 'BEZIER':
|
||||
@ -173,15 +179,14 @@ class BlCurve(BlDatablock):
|
||||
# MATERIAL SLOTS
|
||||
src_materials = data.get('materials', None)
|
||||
if src_materials:
|
||||
load_materials_slots(src_materials, target.materials)
|
||||
load_materials_slots(src_materials, datablock.materials)
|
||||
|
||||
def _dump_implementation(self, data, instance=None):
|
||||
assert(instance)
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
dumper = Dumper()
|
||||
# Conflicting attributes
|
||||
# TODO: remove them with the NURBS support
|
||||
dumper.include_filter = CURVE_METADATA
|
||||
|
||||
dumper.exclude_filter = [
|
||||
'users',
|
||||
'order_u',
|
||||
@ -190,14 +195,16 @@ class BlCurve(BlDatablock):
|
||||
'point_count_u',
|
||||
'active_textbox'
|
||||
]
|
||||
if instance.use_auto_texspace:
|
||||
if datablock.use_auto_texspace:
|
||||
dumper.exclude_filter.extend([
|
||||
'texspace_location',
|
||||
'texspace_size'])
|
||||
data = dumper.dump(instance)
|
||||
data = dumper.dump(datablock)
|
||||
|
||||
data['animation_data'] = dump_animation_data(datablock)
|
||||
data['splines'] = {}
|
||||
|
||||
for index, spline in enumerate(instance.splines):
|
||||
for index, spline in enumerate(datablock.splines):
|
||||
dumper.depth = 2
|
||||
dumper.include_filter = SPLINE_METADATA
|
||||
spline_data = dumper.dump(spline)
|
||||
@ -211,21 +218,27 @@ class BlCurve(BlDatablock):
|
||||
spline.bezier_points, SPLINE_BEZIER_POINT)
|
||||
data['splines'][index] = spline_data
|
||||
|
||||
if isinstance(instance, T.SurfaceCurve):
|
||||
if isinstance(datablock, T.SurfaceCurve):
|
||||
data['type'] = 'SURFACE'
|
||||
elif isinstance(instance, T.TextCurve):
|
||||
elif isinstance(datablock, T.TextCurve):
|
||||
data['type'] = 'FONT'
|
||||
elif isinstance(instance, T.Curve):
|
||||
elif isinstance(datablock, T.Curve):
|
||||
data['type'] = 'CURVE'
|
||||
|
||||
data['materials'] = dump_materials_slots(instance.materials)
|
||||
data['materials'] = dump_materials_slots(datablock.materials)
|
||||
|
||||
return data
|
||||
|
||||
def _resolve_deps_implementation(self):
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.curves)
|
||||
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
# TODO: resolve material
|
||||
deps = []
|
||||
curve = self.instance
|
||||
curve = datablock
|
||||
|
||||
if isinstance(curve, T.TextCurve):
|
||||
deps.extend([
|
||||
@ -234,15 +247,19 @@ class BlCurve(BlDatablock):
|
||||
curve.font_bold_italic,
|
||||
curve.font_italic])
|
||||
|
||||
for material in self.instance.materials:
|
||||
for material in datablock.materials:
|
||||
if material:
|
||||
deps.append(material)
|
||||
|
||||
deps.extend(resolve_animation_dependencies(datablock))
|
||||
|
||||
return deps
|
||||
|
||||
def diff(self):
|
||||
if 'EDIT' in bpy.context.mode \
|
||||
and not self.preferences.sync_flags.sync_during_editmode:
|
||||
return False
|
||||
else:
|
||||
return super().diff()
|
||||
@staticmethod
|
||||
def needs_update(datablock: object, data: dict) -> bool:
|
||||
return 'EDIT' not in bpy.context.mode \
|
||||
or get_preferences().sync_flags.sync_during_editmode
|
||||
|
||||
|
||||
_type = [bpy.types.Curve, bpy.types.TextCurve]
|
||||
_class = BlCurve
|
||||
|
@ -22,73 +22,11 @@ from collections.abc import Iterable
|
||||
import bpy
|
||||
import mathutils
|
||||
from replication.constants import DIFF_BINARY, DIFF_JSON, UP
|
||||
from replication.data import ReplicatedDatablock
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
|
||||
from .. import utils
|
||||
from .dump_anything import Dumper, Loader
|
||||
|
||||
|
||||
def has_action(target):
|
||||
""" Check if the target datablock has actions
|
||||
"""
|
||||
return (hasattr(target, 'animation_data')
|
||||
and target.animation_data
|
||||
and target.animation_data.action)
|
||||
|
||||
|
||||
def has_driver(target):
|
||||
""" Check if the target datablock is driven
|
||||
"""
|
||||
return (hasattr(target, 'animation_data')
|
||||
and target.animation_data
|
||||
and target.animation_data.drivers)
|
||||
|
||||
|
||||
def dump_driver(driver):
|
||||
dumper = Dumper()
|
||||
dumper.depth = 6
|
||||
data = dumper.dump(driver)
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def load_driver(target_datablock, src_driver):
|
||||
loader = Loader()
|
||||
drivers = target_datablock.animation_data.drivers
|
||||
src_driver_data = src_driver['driver']
|
||||
new_driver = drivers.new(src_driver['data_path'], index=src_driver['array_index'])
|
||||
|
||||
# Settings
|
||||
new_driver.driver.type = src_driver_data['type']
|
||||
new_driver.driver.expression = src_driver_data['expression']
|
||||
loader.load(new_driver, src_driver)
|
||||
|
||||
# Variables
|
||||
for src_variable in src_driver_data['variables']:
|
||||
src_var_data = src_driver_data['variables'][src_variable]
|
||||
new_var = new_driver.driver.variables.new()
|
||||
new_var.name = src_var_data['name']
|
||||
new_var.type = src_var_data['type']
|
||||
|
||||
for src_target in src_var_data['targets']:
|
||||
src_target_data = src_var_data['targets'][src_target]
|
||||
src_id = src_target_data.get('id')
|
||||
if src_id:
|
||||
new_var.targets[src_target].id = utils.resolve_from_id(src_target_data['id'], src_target_data['id_type'])
|
||||
loader.load(new_var.targets[src_target], src_target_data)
|
||||
|
||||
# Fcurve
|
||||
new_fcurve = new_driver.keyframe_points
|
||||
for p in reversed(new_fcurve):
|
||||
new_fcurve.remove(p, fast=True)
|
||||
|
||||
new_fcurve.add(len(src_driver['keyframe_points']))
|
||||
|
||||
for index, src_point in enumerate(src_driver['keyframe_points']):
|
||||
new_point = new_fcurve[index]
|
||||
loader.load(new_point, src_driver['keyframe_points'][src_point])
|
||||
|
||||
|
||||
def get_datablock_from_uuid(uuid, default, ignore=[]):
|
||||
if not uuid:
|
||||
return default
|
||||
@ -100,133 +38,8 @@ def get_datablock_from_uuid(uuid, default, ignore=[]):
|
||||
return item
|
||||
return default
|
||||
|
||||
|
||||
class BlDatablock(ReplicatedDatablock):
|
||||
"""BlDatablock
|
||||
|
||||
bl_id : blender internal storage identifier
|
||||
bl_class : blender internal type
|
||||
bl_icon : type icon (blender icon name)
|
||||
bl_check_common: enable check even in common rights
|
||||
bl_reload_parent: reload parent
|
||||
"""
|
||||
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
instance = kwargs.get('instance', None)
|
||||
|
||||
self.preferences = utils.get_preferences()
|
||||
|
||||
# TODO: use is_library_indirect
|
||||
self.is_library = (instance and hasattr(instance, 'library') and
|
||||
instance.library) or \
|
||||
(hasattr(self,'data') and self.data and 'library' in self.data)
|
||||
|
||||
if instance and hasattr(instance, 'uuid'):
|
||||
instance.uuid = self.uuid
|
||||
|
||||
def resolve(self, construct = True):
|
||||
datablock_root = getattr(bpy.data, self.bl_id)
|
||||
datablock_ref = utils.find_from_attr('uuid', self.uuid, datablock_root)
|
||||
|
||||
if not datablock_ref:
|
||||
try:
|
||||
datablock_ref = datablock_root[self.data['name']]
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if construct and not datablock_ref:
|
||||
name = self.data.get('name')
|
||||
logging.debug(f"Constructing {name}")
|
||||
datablock_ref = self._construct(data=self.data)
|
||||
|
||||
if datablock_ref is not None:
|
||||
setattr(datablock_ref, 'uuid', self.uuid)
|
||||
self.instance = datablock_ref
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
def remove_instance(self):
|
||||
"""
|
||||
Remove instance from blender data
|
||||
"""
|
||||
assert(self.instance)
|
||||
|
||||
datablock_root = getattr(bpy.data, self.bl_id)
|
||||
datablock_root.remove(self.instance)
|
||||
|
||||
def _dump(self, instance=None):
|
||||
dumper = Dumper()
|
||||
data = {}
|
||||
animation_data = {}
|
||||
# Dump animation data
|
||||
if has_action(instance):
|
||||
animation_data['action'] = instance.animation_data.action.name
|
||||
if has_driver(instance):
|
||||
animation_data['drivers'] = []
|
||||
for driver in instance.animation_data.drivers:
|
||||
animation_data['drivers'].append(dump_driver(driver))
|
||||
|
||||
if animation_data:
|
||||
data['animation_data'] = animation_data
|
||||
|
||||
if self.is_library:
|
||||
data.update(dumper.dump(instance))
|
||||
else:
|
||||
data.update(self._dump_implementation(data, instance=instance))
|
||||
|
||||
return data
|
||||
|
||||
def _dump_implementation(self, data, target):
|
||||
raise NotImplementedError
|
||||
|
||||
def _load(self, data, target):
|
||||
# Load animation data
|
||||
if 'animation_data' in data.keys():
|
||||
if target.animation_data is None:
|
||||
target.animation_data_create()
|
||||
|
||||
for d in target.animation_data.drivers:
|
||||
target.animation_data.drivers.remove(d)
|
||||
|
||||
if 'drivers' in data['animation_data']:
|
||||
for driver in data['animation_data']['drivers']:
|
||||
load_driver(target, driver)
|
||||
|
||||
if 'action' in data['animation_data']:
|
||||
target.animation_data.action = bpy.data.actions[data['animation_data']['action']]
|
||||
elif target.animation_data.action:
|
||||
target.animation_data.action = None
|
||||
|
||||
# Remove existing animation data if there is not more to load
|
||||
elif hasattr(target, 'animation_data') and target.animation_data:
|
||||
target.animation_data_clear()
|
||||
|
||||
if self.is_library:
|
||||
return
|
||||
else:
|
||||
self._load_implementation(data, target)
|
||||
|
||||
def _load_implementation(self, data, target):
|
||||
raise NotImplementedError
|
||||
|
||||
def resolve_deps(self):
|
||||
dependencies = []
|
||||
|
||||
if has_action(self.instance):
|
||||
dependencies.append(self.instance.animation_data.action)
|
||||
|
||||
if not self.is_library:
|
||||
dependencies.extend(self._resolve_deps_implementation())
|
||||
|
||||
logging.debug(f"{self.instance} dependencies: {dependencies}")
|
||||
return dependencies
|
||||
|
||||
def _resolve_deps_implementation(self):
|
||||
return []
|
||||
|
||||
def is_valid(self):
|
||||
return getattr(bpy.data, self.bl_id).get(self.data['name'])
|
||||
def resolve_datablock_from_uuid(uuid, bpy_collection):
|
||||
for item in bpy_collection:
|
||||
if getattr(item, 'uuid', None) == uuid:
|
||||
return item
|
||||
return None
|
||||
|
@ -19,14 +19,15 @@
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from pathlib import Path, WindowsPath, PosixPath
|
||||
|
||||
import bpy
|
||||
import mathutils
|
||||
from replication.constants import DIFF_BINARY, UP
|
||||
from replication.data import ReplicatedDatablock
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
|
||||
from .. import utils
|
||||
from ..utils import get_preferences
|
||||
from .dump_anything import Dumper, Loader
|
||||
|
||||
|
||||
@ -58,33 +59,16 @@ class BlFile(ReplicatedDatablock):
|
||||
bl_icon = 'FILE'
|
||||
bl_reload_parent = True
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.instance = kwargs.get('instance', None)
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
return Path(get_filepath(data['name']))
|
||||
|
||||
if self.instance and not self.instance.exists():
|
||||
raise FileNotFoundError(str(self.instance))
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
return Path(get_filepath(data['name']))
|
||||
|
||||
self.preferences = utils.get_preferences()
|
||||
|
||||
def resolve(self, construct = True):
|
||||
self.instance = Path(get_filepath(self.data['name']))
|
||||
|
||||
file_exists = self.instance.exists()
|
||||
if not file_exists:
|
||||
logging.debug("File don't exist, loading it.")
|
||||
self._load(self.data, self.instance)
|
||||
|
||||
return file_exists
|
||||
|
||||
|
||||
def push(self, socket, identity=None, check_data=False):
|
||||
super().push(socket, identity=None, check_data=False)
|
||||
|
||||
if self.preferences.clear_memory_filecache:
|
||||
del self.data['file']
|
||||
|
||||
def _dump(self, instance=None):
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
"""
|
||||
Read the file and return a dict as:
|
||||
{
|
||||
@ -96,46 +80,62 @@ class BlFile(ReplicatedDatablock):
|
||||
logging.info(f"Extracting file metadata")
|
||||
|
||||
data = {
|
||||
'name': self.instance.name,
|
||||
'name': datablock.name,
|
||||
}
|
||||
|
||||
logging.info(
|
||||
f"Reading {self.instance.name} content: {self.instance.stat().st_size} bytes")
|
||||
logging.info(f"Reading {datablock.name} content: {datablock.stat().st_size} bytes")
|
||||
|
||||
try:
|
||||
file = open(self.instance, "rb")
|
||||
file = open(datablock, "rb")
|
||||
data['file'] = file.read()
|
||||
|
||||
file.close()
|
||||
except IOError:
|
||||
logging.warning(f"{self.instance} doesn't exist, skipping")
|
||||
logging.warning(f"{datablock} doesn't exist, skipping")
|
||||
else:
|
||||
file.close()
|
||||
|
||||
return data
|
||||
|
||||
def _load(self, data, target):
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
"""
|
||||
Writing the file
|
||||
"""
|
||||
|
||||
try:
|
||||
file = open(target, "wb")
|
||||
file = open(datablock, "wb")
|
||||
file.write(data['file'])
|
||||
|
||||
if self.preferences.clear_memory_filecache:
|
||||
del self.data['file']
|
||||
if get_preferences().clear_memory_filecache:
|
||||
del data['file']
|
||||
except IOError:
|
||||
logging.warning(f"{target} doesn't exist, skipping")
|
||||
logging.warning(f"{datablock} doesn't exist, skipping")
|
||||
else:
|
||||
file.close()
|
||||
|
||||
def diff(self):
|
||||
if self.preferences.clear_memory_filecache:
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
return []
|
||||
|
||||
@staticmethod
|
||||
def needs_update(datablock: object, data:dict)-> bool:
|
||||
if get_preferences().clear_memory_filecache:
|
||||
return False
|
||||
else:
|
||||
if not self.instance:
|
||||
if not datablock:
|
||||
return None
|
||||
|
||||
if not data:
|
||||
return True
|
||||
|
||||
memory_size = sys.getsizeof(data['file'])-33
|
||||
disk_size = datablock.stat().st_size
|
||||
|
||||
if memory_size != disk_size:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
memory_size = sys.getsizeof(self.data['file'])-33
|
||||
disk_size = self.instance.stat().st_size
|
||||
return memory_size != disk_size
|
||||
|
||||
_type = [WindowsPath, PosixPath]
|
||||
_class = BlFile
|
@ -22,19 +22,20 @@ from pathlib import Path
|
||||
|
||||
import bpy
|
||||
|
||||
from .bl_datablock import BlDatablock
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from .bl_file import get_filepath, ensure_unpacked
|
||||
from .dump_anything import Dumper, Loader
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
|
||||
|
||||
class BlFont(BlDatablock):
|
||||
class BlFont(ReplicatedDatablock):
|
||||
bl_id = "fonts"
|
||||
bl_class = bpy.types.VectorFont
|
||||
bl_check_common = False
|
||||
bl_icon = 'FILE_FONT'
|
||||
bl_reload_parent = False
|
||||
|
||||
def _construct(self, data):
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
filename = data.get('filename')
|
||||
|
||||
if filename == '<builtin>':
|
||||
@ -42,31 +43,43 @@ class BlFont(BlDatablock):
|
||||
else:
|
||||
return bpy.data.fonts.load(get_filepath(filename))
|
||||
|
||||
def _load(self, data, target):
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
pass
|
||||
|
||||
def _dump(self, instance=None):
|
||||
if instance.filepath == '<builtin>':
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
if datablock.filepath == '<builtin>':
|
||||
filename = '<builtin>'
|
||||
else:
|
||||
filename = Path(instance.filepath).name
|
||||
filename = Path(datablock.filepath).name
|
||||
|
||||
if not filename:
|
||||
raise FileExistsError(instance.filepath)
|
||||
raise FileExistsError(datablock.filepath)
|
||||
|
||||
return {
|
||||
'filename': filename,
|
||||
'name': instance.name
|
||||
'name': datablock.name
|
||||
}
|
||||
|
||||
def diff(self):
|
||||
return False
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.fonts)
|
||||
|
||||
def _resolve_deps_implementation(self):
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
deps = []
|
||||
if self.instance.filepath and self.instance.filepath != '<builtin>':
|
||||
ensure_unpacked(self.instance)
|
||||
if datablock.filepath and datablock.filepath != '<builtin>':
|
||||
ensure_unpacked(datablock)
|
||||
|
||||
deps.append(Path(bpy.path.abspath(self.instance.filepath)))
|
||||
deps.append(Path(bpy.path.abspath(datablock.filepath)))
|
||||
|
||||
return deps
|
||||
|
||||
@staticmethod
|
||||
def needs_update(datablock: object, data:dict)-> bool:
|
||||
return False
|
||||
|
||||
_type = bpy.types.VectorFont
|
||||
_class = BlFont
|
@ -24,10 +24,11 @@ from .dump_anything import (Dumper,
|
||||
Loader,
|
||||
np_dump_collection,
|
||||
np_load_collection)
|
||||
from .bl_datablock import BlDatablock
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||
from ..utils import get_preferences
|
||||
|
||||
# GPencil data api is structured as it follow:
|
||||
# GP-Object --> GP-Layers --> GP-Frames --> GP-Strokes --> GP-Stroke-Points
|
||||
|
||||
STROKE_POINT = [
|
||||
'co',
|
||||
@ -113,6 +114,7 @@ def load_stroke(stroke_data, stroke):
|
||||
# fix fill issues
|
||||
stroke.uv_scale = stroke_data["uv_scale"]
|
||||
|
||||
|
||||
def dump_frame(frame):
|
||||
""" Dump a grease pencil frame to a dict
|
||||
|
||||
@ -151,6 +153,7 @@ def load_frame(frame_data, frame):
|
||||
|
||||
np_load_collection(frame_data['strokes'], frame.strokes, STROKE)
|
||||
|
||||
|
||||
def dump_layer(layer):
|
||||
""" Dump a grease pencil layer
|
||||
|
||||
@ -228,47 +231,58 @@ def load_layer(layer_data, layer):
|
||||
load_frame(frame_data, target_frame)
|
||||
|
||||
|
||||
class BlGpencil(BlDatablock):
|
||||
def layer_changed(datablock: object, data: dict) -> bool:
|
||||
if datablock.layers.active and \
|
||||
datablock.layers.active.info != data["active_layers"]:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
def frame_changed(data: dict) -> bool:
|
||||
return bpy.context.scene.frame_current != data["eval_frame"]
|
||||
|
||||
class BlGpencil(ReplicatedDatablock):
|
||||
bl_id = "grease_pencils"
|
||||
bl_class = bpy.types.GreasePencil
|
||||
bl_check_common = False
|
||||
bl_icon = 'GREASEPENCIL'
|
||||
bl_reload_parent = False
|
||||
|
||||
def _construct(self, data):
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
return bpy.data.grease_pencils.new(data["name"])
|
||||
|
||||
def _load_implementation(self, data, target):
|
||||
target.materials.clear()
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
datablock.materials.clear()
|
||||
if "materials" in data.keys():
|
||||
for mat in data['materials']:
|
||||
target.materials.append(bpy.data.materials[mat])
|
||||
datablock.materials.append(bpy.data.materials[mat])
|
||||
|
||||
loader = Loader()
|
||||
loader.load(target, data)
|
||||
loader.load(datablock, data)
|
||||
|
||||
# TODO: reuse existing layer
|
||||
for layer in target.layers:
|
||||
target.layers.remove(layer)
|
||||
for layer in datablock.layers:
|
||||
datablock.layers.remove(layer)
|
||||
|
||||
if "layers" in data.keys():
|
||||
for layer in data["layers"]:
|
||||
layer_data = data["layers"].get(layer)
|
||||
|
||||
# if layer not in target.layers.keys():
|
||||
target_layer = target.layers.new(data["layers"][layer]["info"])
|
||||
# if layer not in datablock.layers.keys():
|
||||
target_layer = datablock.layers.new(data["layers"][layer]["info"])
|
||||
# else:
|
||||
# target_layer = target.layers[layer]
|
||||
# target_layer.clear()
|
||||
|
||||
load_layer(layer_data, target_layer)
|
||||
|
||||
target.layers.update()
|
||||
datablock.layers.update()
|
||||
|
||||
|
||||
|
||||
def _dump_implementation(self, data, instance=None):
|
||||
assert(instance)
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
dumper = Dumper()
|
||||
dumper.depth = 2
|
||||
dumper.include_filter = [
|
||||
@ -279,36 +293,37 @@ class BlGpencil(BlDatablock):
|
||||
'pixel_factor',
|
||||
'stroke_depth_order'
|
||||
]
|
||||
data = dumper.dump(instance)
|
||||
data = dumper.dump(datablock)
|
||||
|
||||
data['layers'] = {}
|
||||
|
||||
for layer in instance.layers:
|
||||
for layer in datablock.layers:
|
||||
data['layers'][layer.info] = dump_layer(layer)
|
||||
|
||||
data["active_layers"] = instance.layers.active.info
|
||||
data["active_layers"] = datablock.layers.active.info if datablock.layers.active else "None"
|
||||
data["eval_frame"] = bpy.context.scene.frame_current
|
||||
return data
|
||||
|
||||
def _resolve_deps_implementation(self):
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.grease_pencils)
|
||||
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
deps = []
|
||||
|
||||
for material in self.instance.materials:
|
||||
for material in datablock.materials:
|
||||
deps.append(material)
|
||||
|
||||
return deps
|
||||
|
||||
def layer_changed(self):
|
||||
return self.instance.layers.active.info != self.data["active_layers"]
|
||||
@staticmethod
|
||||
def needs_update(datablock: object, data: dict) -> bool:
|
||||
return bpy.context.mode == 'OBJECT' \
|
||||
or layer_changed(datablock, data) \
|
||||
or frame_changed(data) \
|
||||
or get_preferences().sync_flags.sync_during_editmode
|
||||
|
||||
def frame_changed(self):
|
||||
return bpy.context.scene.frame_current != self.data["eval_frame"]
|
||||
|
||||
def diff(self):
|
||||
if self.layer_changed() \
|
||||
or self.frame_changed() \
|
||||
or bpy.context.mode == 'OBJECT' \
|
||||
or self.preferences.sync_flags.sync_during_editmode:
|
||||
return super().diff()
|
||||
else:
|
||||
return False
|
||||
_type = bpy.types.GreasePencil
|
||||
_class = BlGpencil
|
||||
|
@ -24,9 +24,12 @@ import bpy
|
||||
import mathutils
|
||||
|
||||
from .. import utils
|
||||
from .bl_datablock import BlDatablock
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from .dump_anything import Dumper, Loader
|
||||
from .bl_file import get_filepath, ensure_unpacked
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||
|
||||
|
||||
format_to_ext = {
|
||||
'BMP': 'bmp',
|
||||
@ -48,35 +51,36 @@ format_to_ext = {
|
||||
}
|
||||
|
||||
|
||||
class BlImage(BlDatablock):
|
||||
class BlImage(ReplicatedDatablock):
|
||||
bl_id = "images"
|
||||
bl_class = bpy.types.Image
|
||||
bl_check_common = False
|
||||
bl_icon = 'IMAGE_DATA'
|
||||
bl_reload_parent = False
|
||||
|
||||
def _construct(self, data):
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
return bpy.data.images.new(
|
||||
name=data['name'],
|
||||
width=data['size'][0],
|
||||
height=data['size'][1]
|
||||
)
|
||||
|
||||
def _load(self, data, target):
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
loader = Loader()
|
||||
loader.load(data, target)
|
||||
loader.load(data, datablock)
|
||||
|
||||
target.source = 'FILE'
|
||||
target.filepath_raw = get_filepath(data['filename'])
|
||||
datablock.source = 'FILE'
|
||||
datablock.filepath_raw = get_filepath(data['filename'])
|
||||
color_space_name = data["colorspace_settings"]["name"]
|
||||
|
||||
if color_space_name:
|
||||
target.colorspace_settings.name = color_space_name
|
||||
datablock.colorspace_settings.name = color_space_name
|
||||
|
||||
def _dump(self, instance=None):
|
||||
assert(instance)
|
||||
|
||||
filename = Path(instance.filepath).name
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
filename = Path(datablock.filepath).name
|
||||
|
||||
data = {
|
||||
"filename": filename
|
||||
@ -93,35 +97,45 @@ class BlImage(BlDatablock):
|
||||
'float_buffer',
|
||||
'alpha_mode',
|
||||
'colorspace_settings']
|
||||
data.update(dumper.dump(instance))
|
||||
data.update(dumper.dump(datablock))
|
||||
return data
|
||||
|
||||
def diff(self):
|
||||
if self.instance.is_dirty:
|
||||
self.instance.save()
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.images)
|
||||
|
||||
if self.instance and (self.instance.name != self.data['name']):
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
deps = []
|
||||
|
||||
if datablock.packed_file:
|
||||
filename = Path(bpy.path.abspath(datablock.filepath)).name
|
||||
datablock.filepath_raw = get_filepath(filename)
|
||||
datablock.save()
|
||||
# An image can't be unpacked to the modified path
|
||||
# TODO: make a bug report
|
||||
datablock.unpack(method="REMOVE")
|
||||
|
||||
elif datablock.source == "GENERATED":
|
||||
filename = f"{datablock.name}.png"
|
||||
datablock.filepath = get_filepath(filename)
|
||||
datablock.save()
|
||||
|
||||
if datablock.filepath:
|
||||
deps.append(Path(bpy.path.abspath(datablock.filepath)))
|
||||
|
||||
return deps
|
||||
|
||||
@staticmethod
|
||||
def needs_update(datablock: object, data:dict)-> bool:
|
||||
if datablock.is_dirty:
|
||||
datablock.save()
|
||||
|
||||
if not data or (datablock and (datablock.name != data.get('name'))):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def _resolve_deps_implementation(self):
|
||||
deps = []
|
||||
|
||||
if self.instance.packed_file:
|
||||
filename = Path(bpy.path.abspath(self.instance.filepath)).name
|
||||
self.instance.filepath_raw = get_filepath(filename)
|
||||
self.instance.save()
|
||||
# An image can't be unpacked to the modified path
|
||||
# TODO: make a bug report
|
||||
self.instance.unpack(method="REMOVE")
|
||||
|
||||
elif self.instance.source == "GENERATED":
|
||||
filename = f"{self.instance.name}.png"
|
||||
self.instance.filepath = get_filepath(filename)
|
||||
self.instance.save()
|
||||
|
||||
if self.instance.filepath:
|
||||
deps.append(Path(bpy.path.abspath(self.instance.filepath)))
|
||||
|
||||
return deps
|
||||
_type = bpy.types.Image
|
||||
_class = BlImage
|
||||
|
@ -20,33 +20,39 @@ import bpy
|
||||
import mathutils
|
||||
|
||||
from .dump_anything import Dumper, Loader, np_dump_collection, np_load_collection
|
||||
from .bl_datablock import BlDatablock
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from replication.exception import ContextError
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||
|
||||
POINT = ['co', 'weight_softbody', 'co_deform']
|
||||
|
||||
|
||||
class BlLattice(BlDatablock):
|
||||
class BlLattice(ReplicatedDatablock):
|
||||
bl_id = "lattices"
|
||||
bl_class = bpy.types.Lattice
|
||||
bl_check_common = False
|
||||
bl_icon = 'LATTICE_DATA'
|
||||
bl_reload_parent = False
|
||||
|
||||
def _construct(self, data):
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
return bpy.data.lattices.new(data["name"])
|
||||
|
||||
def _load_implementation(self, data, target):
|
||||
if target.is_editmode:
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
load_animation_data(data.get('animation_data'), datablock)
|
||||
if datablock.is_editmode:
|
||||
raise ContextError("lattice is in edit mode")
|
||||
|
||||
loader = Loader()
|
||||
loader.load(target, data)
|
||||
loader.load(datablock, data)
|
||||
|
||||
np_load_collection(data['points'], target.points, POINT)
|
||||
np_load_collection(data['points'], datablock.points, POINT)
|
||||
|
||||
def _dump_implementation(self, data, instance=None):
|
||||
if instance.is_editmode:
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
if datablock.is_editmode:
|
||||
raise ContextError("lattice is in edit mode")
|
||||
|
||||
dumper = Dumper()
|
||||
@ -62,9 +68,20 @@ class BlLattice(BlDatablock):
|
||||
'interpolation_type_w',
|
||||
'use_outside'
|
||||
]
|
||||
data = dumper.dump(instance)
|
||||
|
||||
data['points'] = np_dump_collection(instance.points, POINT)
|
||||
data = dumper.dump(datablock)
|
||||
|
||||
data['points'] = np_dump_collection(datablock.points, POINT)
|
||||
data['animation_data'] = dump_animation_data(datablock)
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.lattices)
|
||||
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
return resolve_animation_dependencies(datablock)
|
||||
|
||||
_type = bpy.types.Lattice
|
||||
_class = BlLattice
|
@ -1,45 +0,0 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
|
||||
import bpy
|
||||
import mathutils
|
||||
|
||||
from .dump_anything import Loader, Dumper
|
||||
from .bl_datablock import BlDatablock
|
||||
|
||||
|
||||
class BlLibrary(BlDatablock):
|
||||
bl_id = "libraries"
|
||||
bl_class = bpy.types.Library
|
||||
bl_check_common = False
|
||||
bl_icon = 'LIBRARY_DATA_DIRECT'
|
||||
bl_reload_parent = False
|
||||
|
||||
def _construct(self, data):
|
||||
with bpy.data.libraries.load(filepath=data["filepath"], link=True) as (sourceData, targetData):
|
||||
targetData = sourceData
|
||||
return sourceData
|
||||
def _load(self, data, target):
|
||||
pass
|
||||
|
||||
def _dump(self, instance=None):
|
||||
assert(instance)
|
||||
dumper = Dumper()
|
||||
return dumper.dump(instance)
|
||||
|
||||
|
@ -20,25 +20,32 @@ import bpy
|
||||
import mathutils
|
||||
|
||||
from .dump_anything import Loader, Dumper
|
||||
from .bl_datablock import BlDatablock
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||
|
||||
|
||||
class BlLight(BlDatablock):
|
||||
class BlLight(ReplicatedDatablock):
|
||||
bl_id = "lights"
|
||||
bl_class = bpy.types.Light
|
||||
bl_check_common = False
|
||||
bl_icon = 'LIGHT_DATA'
|
||||
bl_reload_parent = False
|
||||
|
||||
def _construct(self, data):
|
||||
return bpy.data.lights.new(data["name"], data["type"])
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
instance = bpy.data.lights.new(data["name"], data["type"])
|
||||
instance.uuid = data.get("uuid")
|
||||
return instance
|
||||
|
||||
def _load_implementation(self, data, target):
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
loader = Loader()
|
||||
loader.load(target, data)
|
||||
loader.load(datablock, data)
|
||||
load_animation_data(data.get('animation_data'), datablock)
|
||||
|
||||
def _dump_implementation(self, data, instance=None):
|
||||
assert(instance)
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
dumper = Dumper()
|
||||
dumper.depth = 3
|
||||
dumper.include_filter = [
|
||||
@ -67,9 +74,23 @@ class BlLight(BlDatablock):
|
||||
'spot_size',
|
||||
'spot_blend'
|
||||
]
|
||||
data = dumper.dump(instance)
|
||||
data = dumper.dump(datablock)
|
||||
data['animation_data'] = dump_animation_data(datablock)
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.lights)
|
||||
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
deps = []
|
||||
|
||||
deps.extend(resolve_animation_dependencies(datablock))
|
||||
|
||||
return deps
|
||||
|
||||
|
||||
|
||||
_type = [bpy.types.SpotLight, bpy.types.PointLight, bpy.types.AreaLight, bpy.types.SunLight]
|
||||
_class = BlLight
|
||||
|
@ -21,17 +21,18 @@ import mathutils
|
||||
import logging
|
||||
|
||||
from .dump_anything import Loader, Dumper
|
||||
from .bl_datablock import BlDatablock
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
|
||||
|
||||
class BlLightprobe(BlDatablock):
|
||||
class BlLightprobe(ReplicatedDatablock):
|
||||
bl_id = "lightprobes"
|
||||
bl_class = bpy.types.LightProbe
|
||||
bl_check_common = False
|
||||
bl_icon = 'LIGHTPROBE_GRID'
|
||||
bl_reload_parent = False
|
||||
|
||||
def _construct(self, data):
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
type = 'CUBE' if data['type'] == 'CUBEMAP' else data['type']
|
||||
# See https://developer.blender.org/D6396
|
||||
if bpy.app.version[1] >= 83:
|
||||
@ -39,12 +40,13 @@ class BlLightprobe(BlDatablock):
|
||||
else:
|
||||
logging.warning("Lightprobe replication only supported since 2.83. See https://developer.blender.org/D6396")
|
||||
|
||||
def _load_implementation(self, data, target):
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
loader = Loader()
|
||||
loader.load(target, data)
|
||||
loader.load(datablock, data)
|
||||
|
||||
def _dump_implementation(self, data, instance=None):
|
||||
assert(instance)
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
if bpy.app.version[1] < 83:
|
||||
logging.warning("Lightprobe replication only supported since 2.83. See https://developer.blender.org/D6396")
|
||||
|
||||
@ -71,7 +73,16 @@ class BlLightprobe(BlDatablock):
|
||||
'visibility_blur'
|
||||
]
|
||||
|
||||
return dumper.dump(instance)
|
||||
return dumper.dump(datablock)
|
||||
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.lightprobes)
|
||||
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
return []
|
||||
|
||||
_type = bpy.types.LightProbe
|
||||
_class = BlLightprobe
|
@ -24,7 +24,10 @@ import re
|
||||
from uuid import uuid4
|
||||
|
||||
from .dump_anything import Loader, Dumper
|
||||
from .bl_datablock import BlDatablock, get_datablock_from_uuid
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
|
||||
from .bl_datablock import get_datablock_from_uuid, resolve_datablock_from_uuid
|
||||
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||
|
||||
NODE_SOCKET_INDEX = re.compile('\[(\d*)\]')
|
||||
IGNORED_SOCKETS = ['GEOMETRY', 'SHADER', 'CUSTOM']
|
||||
@ -45,7 +48,11 @@ def load_node(node_data: dict, node_tree: bpy.types.ShaderNodeTree):
|
||||
node_tree_uuid = node_data.get('node_tree_uuid', None)
|
||||
|
||||
if image_uuid and not target_node.image:
|
||||
target_node.image = get_datablock_from_uuid(image_uuid, None)
|
||||
image = resolve_datablock_from_uuid(image_uuid, bpy.data.images)
|
||||
if image is None:
|
||||
logging.error(f"Fail to find material image from uuid {image_uuid}")
|
||||
else:
|
||||
target_node.image = image
|
||||
|
||||
if node_tree_uuid:
|
||||
target_node.node_tree = get_datablock_from_uuid(node_tree_uuid, None)
|
||||
@ -389,36 +396,40 @@ def load_materials_slots(src_materials: list, dst_materials: bpy.types.bpy_prop_
|
||||
dst_materials.append(mat_ref)
|
||||
|
||||
|
||||
class BlMaterial(BlDatablock):
|
||||
class BlMaterial(ReplicatedDatablock):
|
||||
bl_id = "materials"
|
||||
bl_class = bpy.types.Material
|
||||
bl_check_common = False
|
||||
bl_icon = 'MATERIAL_DATA'
|
||||
bl_reload_parent = False
|
||||
|
||||
def _construct(self, data):
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
return bpy.data.materials.new(data["name"])
|
||||
|
||||
def _load_implementation(self, data, target):
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
load_animation_data(data.get('animation_data'), datablock)
|
||||
|
||||
loader = Loader()
|
||||
|
||||
is_grease_pencil = data.get('is_grease_pencil')
|
||||
use_nodes = data.get('use_nodes')
|
||||
|
||||
loader.load(target, data)
|
||||
loader.load(datablock, data)
|
||||
|
||||
if is_grease_pencil:
|
||||
if not target.is_grease_pencil:
|
||||
bpy.data.materials.create_gpencil_data(target)
|
||||
loader.load(target.grease_pencil, data['grease_pencil'])
|
||||
if not datablock.is_grease_pencil:
|
||||
bpy.data.materials.create_gpencil_data(datablock)
|
||||
loader.load(datablock.grease_pencil, data['grease_pencil'])
|
||||
elif use_nodes:
|
||||
if target.node_tree is None:
|
||||
target.use_nodes = True
|
||||
if datablock.node_tree is None:
|
||||
datablock.use_nodes = True
|
||||
|
||||
load_node_tree(data['node_tree'], target.node_tree)
|
||||
load_node_tree(data['node_tree'], datablock.node_tree)
|
||||
|
||||
def _dump_implementation(self, data, instance=None):
|
||||
assert(instance)
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
mat_dumper = Dumper()
|
||||
mat_dumper.depth = 2
|
||||
mat_dumper.include_filter = [
|
||||
@ -444,9 +455,9 @@ class BlMaterial(BlDatablock):
|
||||
'line_priority',
|
||||
'is_grease_pencil'
|
||||
]
|
||||
data = mat_dumper.dump(instance)
|
||||
data = mat_dumper.dump(datablock)
|
||||
|
||||
if instance.is_grease_pencil:
|
||||
if datablock.is_grease_pencil:
|
||||
gp_mat_dumper = Dumper()
|
||||
gp_mat_dumper.depth = 3
|
||||
|
||||
@ -480,19 +491,28 @@ class BlMaterial(BlDatablock):
|
||||
'use_overlap_strokes',
|
||||
'use_fill_holdout',
|
||||
]
|
||||
data['grease_pencil'] = gp_mat_dumper.dump(instance.grease_pencil)
|
||||
elif instance.use_nodes:
|
||||
data['node_tree'] = dump_node_tree(instance.node_tree)
|
||||
data['grease_pencil'] = gp_mat_dumper.dump(datablock.grease_pencil)
|
||||
elif datablock.use_nodes:
|
||||
data['node_tree'] = dump_node_tree(datablock.node_tree)
|
||||
|
||||
data['animation_data'] = dump_animation_data(datablock)
|
||||
return data
|
||||
|
||||
def _resolve_deps_implementation(self):
|
||||
# TODO: resolve node group deps
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.materials)
|
||||
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
deps = []
|
||||
|
||||
if self.instance.use_nodes:
|
||||
deps.extend(get_node_tree_dependencies(self.instance.node_tree))
|
||||
if self.is_library:
|
||||
deps.append(self.instance.library)
|
||||
if datablock.use_nodes:
|
||||
deps.extend(get_node_tree_dependencies(datablock.node_tree))
|
||||
|
||||
deps.extend(resolve_animation_dependencies(datablock))
|
||||
|
||||
return deps
|
||||
|
||||
_type = bpy.types.Material
|
||||
_class = BlMaterial
|
@ -25,8 +25,13 @@ import numpy as np
|
||||
from .dump_anything import Dumper, Loader, np_load_collection_primitives, np_dump_collection_primitive, np_load_collection, np_dump_collection
|
||||
from replication.constants import DIFF_BINARY
|
||||
from replication.exception import ContextError
|
||||
from .bl_datablock import BlDatablock, get_datablock_from_uuid
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
|
||||
from .bl_datablock import get_datablock_from_uuid
|
||||
from .bl_material import dump_materials_slots, load_materials_slots
|
||||
from ..utils import get_preferences
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||
|
||||
VERTICE = ['co']
|
||||
|
||||
@ -49,76 +54,77 @@ POLYGON = [
|
||||
'material_index',
|
||||
]
|
||||
|
||||
class BlMesh(BlDatablock):
|
||||
class BlMesh(ReplicatedDatablock):
|
||||
bl_id = "meshes"
|
||||
bl_class = bpy.types.Mesh
|
||||
bl_check_common = False
|
||||
bl_icon = 'MESH_DATA'
|
||||
bl_reload_parent = True
|
||||
|
||||
def _construct(self, data):
|
||||
instance = bpy.data.meshes.new(data["name"])
|
||||
instance.uuid = self.uuid
|
||||
return instance
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
return bpy.data.meshes.new(data.get("name"))
|
||||
|
||||
def _load_implementation(self, data, target):
|
||||
if not target or target.is_editmode:
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
if not datablock or datablock.is_editmode:
|
||||
raise ContextError
|
||||
else:
|
||||
load_animation_data(data.get('animation_data'), datablock)
|
||||
|
||||
loader = Loader()
|
||||
loader.load(target, data)
|
||||
loader.load(datablock, data)
|
||||
|
||||
# MATERIAL SLOTS
|
||||
src_materials = data.get('materials', None)
|
||||
if src_materials:
|
||||
load_materials_slots(src_materials, target.materials)
|
||||
load_materials_slots(src_materials, datablock.materials)
|
||||
|
||||
# CLEAR GEOMETRY
|
||||
if target.vertices:
|
||||
target.clear_geometry()
|
||||
if datablock.vertices:
|
||||
datablock.clear_geometry()
|
||||
|
||||
target.vertices.add(data["vertex_count"])
|
||||
target.edges.add(data["egdes_count"])
|
||||
target.loops.add(data["loop_count"])
|
||||
target.polygons.add(data["poly_count"])
|
||||
datablock.vertices.add(data["vertex_count"])
|
||||
datablock.edges.add(data["egdes_count"])
|
||||
datablock.loops.add(data["loop_count"])
|
||||
datablock.polygons.add(data["poly_count"])
|
||||
|
||||
# LOADING
|
||||
np_load_collection(data['vertices'], target.vertices, VERTICE)
|
||||
np_load_collection(data['edges'], target.edges, EDGE)
|
||||
np_load_collection(data['loops'], target.loops, LOOP)
|
||||
np_load_collection(data["polygons"],target.polygons, POLYGON)
|
||||
np_load_collection(data['vertices'], datablock.vertices, VERTICE)
|
||||
np_load_collection(data['edges'], datablock.edges, EDGE)
|
||||
np_load_collection(data['loops'], datablock.loops, LOOP)
|
||||
np_load_collection(data["polygons"],datablock.polygons, POLYGON)
|
||||
|
||||
# UV Layers
|
||||
if 'uv_layers' in data.keys():
|
||||
for layer in data['uv_layers']:
|
||||
if layer not in target.uv_layers:
|
||||
target.uv_layers.new(name=layer)
|
||||
if layer not in datablock.uv_layers:
|
||||
datablock.uv_layers.new(name=layer)
|
||||
|
||||
np_load_collection_primitives(
|
||||
target.uv_layers[layer].data,
|
||||
datablock.uv_layers[layer].data,
|
||||
'uv',
|
||||
data["uv_layers"][layer]['data'])
|
||||
|
||||
# Vertex color
|
||||
if 'vertex_colors' in data.keys():
|
||||
for color_layer in data['vertex_colors']:
|
||||
if color_layer not in target.vertex_colors:
|
||||
target.vertex_colors.new(name=color_layer)
|
||||
if color_layer not in datablock.vertex_colors:
|
||||
datablock.vertex_colors.new(name=color_layer)
|
||||
|
||||
np_load_collection_primitives(
|
||||
target.vertex_colors[color_layer].data,
|
||||
datablock.vertex_colors[color_layer].data,
|
||||
'color',
|
||||
data["vertex_colors"][color_layer]['data'])
|
||||
|
||||
target.validate()
|
||||
target.update()
|
||||
datablock.validate()
|
||||
datablock.update()
|
||||
|
||||
def _dump_implementation(self, data, instance=None):
|
||||
assert(instance)
|
||||
|
||||
if (instance.is_editmode or bpy.context.mode == "SCULPT") and not self.preferences.sync_flags.sync_during_editmode:
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
if (datablock.is_editmode or bpy.context.mode == "SCULPT") and not get_preferences().sync_flags.sync_during_editmode:
|
||||
raise ContextError("Mesh is in edit mode")
|
||||
mesh = instance
|
||||
mesh = datablock
|
||||
|
||||
dumper = Dumper()
|
||||
dumper.depth = 1
|
||||
@ -132,6 +138,8 @@ class BlMesh(BlDatablock):
|
||||
|
||||
data = dumper.dump(mesh)
|
||||
|
||||
data['animation_data'] = dump_animation_data(datablock)
|
||||
|
||||
# VERTICES
|
||||
data["vertex_count"] = len(mesh.vertices)
|
||||
data["vertices"] = np_dump_collection(mesh.vertices, VERTICE)
|
||||
@ -163,21 +171,30 @@ class BlMesh(BlDatablock):
|
||||
data['vertex_colors'][color_map.name]['data'] = np_dump_collection_primitive(color_map.data, 'color')
|
||||
|
||||
# Materials
|
||||
data['materials'] = dump_materials_slots(instance.materials)
|
||||
data['materials'] = dump_materials_slots(datablock.materials)
|
||||
return data
|
||||
|
||||
def _resolve_deps_implementation(self):
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
deps = []
|
||||
|
||||
for material in self.instance.materials:
|
||||
for material in datablock.materials:
|
||||
if material:
|
||||
deps.append(material)
|
||||
|
||||
deps.extend(resolve_animation_dependencies(datablock))
|
||||
|
||||
return deps
|
||||
|
||||
def diff(self):
|
||||
if 'EDIT' in bpy.context.mode \
|
||||
and not self.preferences.sync_flags.sync_during_editmode:
|
||||
return False
|
||||
else:
|
||||
return super().diff()
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.meshes)
|
||||
|
||||
@staticmethod
|
||||
def needs_update(datablock: object, data: dict) -> bool:
|
||||
return ('EDIT' not in bpy.context.mode and bpy.context.mode != 'SCULPT') \
|
||||
or get_preferences().sync_flags.sync_during_editmode
|
||||
|
||||
_type = bpy.types.Mesh
|
||||
_class = BlMesh
|
||||
|
@ -23,7 +23,9 @@ from .dump_anything import (
|
||||
Dumper, Loader, np_dump_collection_primitive, np_load_collection_primitives,
|
||||
np_dump_collection, np_load_collection)
|
||||
|
||||
from .bl_datablock import BlDatablock
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||
|
||||
|
||||
ELEMENT = [
|
||||
@ -62,29 +64,33 @@ def load_metaball_elements(elements_data, elements):
|
||||
np_load_collection(elements_data, elements, ELEMENT)
|
||||
|
||||
|
||||
class BlMetaball(BlDatablock):
|
||||
class BlMetaball(ReplicatedDatablock):
|
||||
bl_id = "metaballs"
|
||||
bl_class = bpy.types.MetaBall
|
||||
bl_check_common = False
|
||||
bl_icon = 'META_BALL'
|
||||
bl_reload_parent = False
|
||||
|
||||
def _construct(self, data):
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
return bpy.data.metaballs.new(data["name"])
|
||||
|
||||
def _load_implementation(self, data, target):
|
||||
loader = Loader()
|
||||
loader.load(target, data)
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
load_animation_data(data.get('animation_data'), datablock)
|
||||
|
||||
target.elements.clear()
|
||||
loader = Loader()
|
||||
loader.load(datablock, data)
|
||||
|
||||
datablock.elements.clear()
|
||||
|
||||
for mtype in data["elements"]['type']:
|
||||
new_element = target.elements.new()
|
||||
new_element = datablock.elements.new()
|
||||
|
||||
load_metaball_elements(data['elements'], target.elements)
|
||||
load_metaball_elements(data['elements'], datablock.elements)
|
||||
|
||||
def _dump_implementation(self, data, instance=None):
|
||||
assert(instance)
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
dumper = Dumper()
|
||||
dumper.depth = 1
|
||||
dumper.include_filter = [
|
||||
@ -98,7 +104,24 @@ class BlMetaball(BlDatablock):
|
||||
'texspace_size'
|
||||
]
|
||||
|
||||
data = dumper.dump(instance)
|
||||
data['elements'] = dump_metaball_elements(instance.elements)
|
||||
data = dumper.dump(datablock)
|
||||
data['animation_data'] = dump_animation_data(datablock)
|
||||
data['elements'] = dump_metaball_elements(datablock.elements)
|
||||
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.metaballs)
|
||||
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
deps = []
|
||||
|
||||
deps.extend(resolve_animation_dependencies(datablock))
|
||||
|
||||
return deps
|
||||
|
||||
_type = bpy.types.MetaBall
|
||||
_class = BlMetaball
|
@ -20,26 +20,43 @@ import bpy
|
||||
import mathutils
|
||||
|
||||
from .dump_anything import Dumper, Loader, np_dump_collection, np_load_collection
|
||||
from .bl_datablock import BlDatablock
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from .bl_material import (dump_node_tree,
|
||||
load_node_tree,
|
||||
get_node_tree_dependencies)
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||
|
||||
class BlNodeGroup(BlDatablock):
|
||||
class BlNodeGroup(ReplicatedDatablock):
|
||||
bl_id = "node_groups"
|
||||
bl_class = bpy.types.NodeTree
|
||||
bl_check_common = False
|
||||
bl_icon = 'NODETREE'
|
||||
bl_reload_parent = False
|
||||
|
||||
def _construct(self, data):
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
return bpy.data.node_groups.new(data["name"], data["type"])
|
||||
|
||||
def _load_implementation(self, data, target):
|
||||
load_node_tree(data, target)
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
load_node_tree(data, datablock)
|
||||
|
||||
def _dump_implementation(self, data, instance=None):
|
||||
return dump_node_tree(instance)
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
return dump_node_tree(datablock)
|
||||
|
||||
def _resolve_deps_implementation(self):
|
||||
return get_node_tree_dependencies(self.instance)
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.node_groups)
|
||||
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
deps = []
|
||||
deps.extend(get_node_tree_dependencies(datablock))
|
||||
deps.extend(resolve_animation_dependencies(datablock))
|
||||
return deps
|
||||
|
||||
_type = [bpy.types.ShaderNodeTree, bpy.types.GeometryNodeTree]
|
||||
_class = BlNodeGroup
|
@ -22,8 +22,10 @@ import bpy
|
||||
import mathutils
|
||||
from replication.exception import ContextError
|
||||
|
||||
from .bl_datablock import BlDatablock, get_datablock_from_uuid
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from .bl_datablock import get_datablock_from_uuid, resolve_datablock_from_uuid
|
||||
from .bl_material import IGNORED_SOCKETS
|
||||
from ..utils import get_preferences
|
||||
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||
from .dump_anything import (
|
||||
Dumper,
|
||||
@ -44,6 +46,8 @@ SHAPEKEY_BLOCK_ATTR = [
|
||||
'slider_min',
|
||||
'slider_max',
|
||||
]
|
||||
|
||||
|
||||
if bpy.app.version[1] >= 93:
|
||||
SUPPORTED_GEOMETRY_NODE_PARAMETERS = (int, str, float)
|
||||
else:
|
||||
@ -51,6 +55,7 @@ else:
|
||||
logging.warning("Geometry node Float parameter not supported in \
|
||||
blender 2.92.")
|
||||
|
||||
|
||||
def get_node_group_inputs(node_group):
|
||||
inputs = []
|
||||
for inpt in node_group.inputs:
|
||||
@ -89,6 +94,7 @@ def dump_physics(target: bpy.types.Object)->dict:
|
||||
|
||||
return physics_data
|
||||
|
||||
|
||||
def load_physics(dumped_settings: dict, target: bpy.types.Object):
|
||||
""" Load all physics settings from a given object excluding modifier
|
||||
related physics settings (such as softbody, cloth, dynapaint and fluid)
|
||||
@ -115,6 +121,7 @@ def load_physics(dumped_settings: dict, target: bpy.types.Object):
|
||||
elif target.rigid_body_constraint:
|
||||
bpy.ops.rigidbody.constraint_remove({"object": target})
|
||||
|
||||
|
||||
def dump_modifier_geometry_node_inputs(modifier: bpy.types.Modifier) -> list:
|
||||
""" Dump geometry node modifier input properties
|
||||
|
||||
@ -295,6 +302,7 @@ def load_vertex_groups(dumped_vertex_groups: dict, target_object: bpy.types.Obje
|
||||
for index, weight in vg['vertices']:
|
||||
vertex_group.add([index], weight, 'REPLACE')
|
||||
|
||||
|
||||
def dump_shape_keys(target_key: bpy.types.Key)->dict:
|
||||
""" Dump the target shape_keys datablock to a dict using numpy
|
||||
|
||||
@ -370,12 +378,12 @@ def dump_modifiers(modifiers: bpy.types.bpy_prop_collection)->dict:
|
||||
:type modifiers: bpy.types.bpy_prop_collection
|
||||
:return: dict
|
||||
"""
|
||||
dumped_modifiers = {}
|
||||
dumped_modifiers = []
|
||||
dumper = Dumper()
|
||||
dumper.depth = 1
|
||||
dumper.exclude_filter = ['is_active']
|
||||
|
||||
for index, modifier in enumerate(modifiers):
|
||||
for modifier in modifiers:
|
||||
dumped_modifier = dumper.dump(modifier)
|
||||
# hack to dump geometry nodes inputs
|
||||
if modifier.type == 'NODES':
|
||||
@ -397,9 +405,78 @@ def dump_modifiers(modifiers: bpy.types.bpy_prop_collection)->dict:
|
||||
elif modifier.type == 'UV_PROJECT':
|
||||
dumped_modifier['projectors'] =[p.object.name for p in modifier.projectors if p and p.object]
|
||||
|
||||
dumped_modifiers[modifier.name] = dumped_modifier
|
||||
dumped_modifiers.append(dumped_modifier)
|
||||
return dumped_modifiers
|
||||
|
||||
def dump_constraints(constraints: bpy.types.bpy_prop_collection)->list:
|
||||
"""Dump all constraints to a list
|
||||
|
||||
:param constraints: constraints
|
||||
:type constraints: bpy.types.bpy_prop_collection
|
||||
:return: dict
|
||||
"""
|
||||
dumper = Dumper()
|
||||
dumper.depth = 2
|
||||
dumper.include_filter = None
|
||||
dumped_constraints = []
|
||||
for constraint in constraints:
|
||||
dumped_constraints.append(dumper.dump(constraint))
|
||||
return dumped_constraints
|
||||
|
||||
def load_constraints(dumped_constraints: list, constraints: bpy.types.bpy_prop_collection):
|
||||
""" Load dumped constraints
|
||||
|
||||
:param dumped_constraints: list of constraints to load
|
||||
:type dumped_constraints: list
|
||||
:param constraints: constraints
|
||||
:type constraints: bpy.types.bpy_prop_collection
|
||||
"""
|
||||
loader = Loader()
|
||||
constraints.clear()
|
||||
for dumped_constraint in dumped_constraints:
|
||||
constraint_type = dumped_constraint.get('type')
|
||||
new_constraint = constraints.new(constraint_type)
|
||||
loader.load(new_constraint, dumped_constraint)
|
||||
|
||||
def load_modifiers(dumped_modifiers: list, modifiers: bpy.types.bpy_prop_collection):
|
||||
""" Dump all modifiers of a modifier collection into a dict
|
||||
|
||||
:param dumped_modifiers: list of modifiers to load
|
||||
:type dumped_modifiers: list
|
||||
:param modifiers: modifiers
|
||||
:type modifiers: bpy.types.bpy_prop_collection
|
||||
"""
|
||||
loader = Loader()
|
||||
modifiers.clear()
|
||||
for dumped_modifier in dumped_modifiers:
|
||||
name = dumped_modifier.get('name')
|
||||
mtype = dumped_modifier.get('type')
|
||||
loaded_modifier = modifiers.new(name, mtype)
|
||||
loader.load(loaded_modifier, dumped_modifier)
|
||||
|
||||
if loaded_modifier.type == 'NODES':
|
||||
load_modifier_geometry_node_inputs(dumped_modifier, loaded_modifier)
|
||||
elif loaded_modifier.type == 'PARTICLE_SYSTEM':
|
||||
default = loaded_modifier.particle_system.settings
|
||||
dumped_particles = dumped_modifier['particle_system']
|
||||
loader.load(loaded_modifier.particle_system, dumped_particles)
|
||||
|
||||
settings = get_datablock_from_uuid(dumped_particles['settings_uuid'], None)
|
||||
if settings:
|
||||
loaded_modifier.particle_system.settings = settings
|
||||
# Hack to remove the default generated particle settings
|
||||
if not default.uuid:
|
||||
bpy.data.particles.remove(default)
|
||||
elif loaded_modifier.type in ['SOFT_BODY', 'CLOTH']:
|
||||
loader.load(loaded_modifier.settings, dumped_modifier['settings'])
|
||||
elif loaded_modifier.type == 'UV_PROJECT':
|
||||
for projector_index, projector_object in enumerate(dumped_modifier['projectors']):
|
||||
target_object = bpy.data.objects.get(projector_object)
|
||||
if target_object:
|
||||
loaded_modifier.projectors[projector_index].object = target_object
|
||||
else:
|
||||
logging.error("Could't load projector target object {projector_object}")
|
||||
|
||||
|
||||
def load_modifiers_custom_data(dumped_modifiers: dict, modifiers: bpy.types.bpy_prop_collection):
|
||||
""" Load modifiers custom data not managed by the dump_anything loader
|
||||
@ -413,48 +490,19 @@ def load_modifiers_custom_data(dumped_modifiers: dict, modifiers: bpy.types.bpy_
|
||||
|
||||
for modifier in modifiers:
|
||||
dumped_modifier = dumped_modifiers.get(modifier.name)
|
||||
if modifier.type == 'NODES':
|
||||
load_modifier_geometry_node_inputs(dumped_modifier, modifier)
|
||||
elif modifier.type == 'PARTICLE_SYSTEM':
|
||||
default = modifier.particle_system.settings
|
||||
dumped_particles = dumped_modifier['particle_system']
|
||||
loader.load(modifier.particle_system, dumped_particles)
|
||||
|
||||
settings = get_datablock_from_uuid(dumped_particles['settings_uuid'], None)
|
||||
if settings:
|
||||
modifier.particle_system.settings = settings
|
||||
# Hack to remove the default generated particle settings
|
||||
if not default.uuid:
|
||||
bpy.data.particles.remove(default)
|
||||
elif modifier.type in ['SOFT_BODY', 'CLOTH']:
|
||||
loader.load(modifier.settings, dumped_modifier['settings'])
|
||||
elif modifier.type == 'UV_PROJECT':
|
||||
for projector_index, projector_object in enumerate(dumped_modifier['projectors']):
|
||||
target_object = bpy.data.objects.get(projector_object)
|
||||
if target_object:
|
||||
modifier.projectors[projector_index].object = target_object
|
||||
else:
|
||||
logging.error("Could't load projector target object {projector_object}")
|
||||
|
||||
class BlObject(BlDatablock):
|
||||
class BlObject(ReplicatedDatablock):
|
||||
bl_id = "objects"
|
||||
bl_class = bpy.types.Object
|
||||
bl_check_common = False
|
||||
bl_icon = 'OBJECT_DATA'
|
||||
bl_reload_parent = False
|
||||
|
||||
def _construct(self, data):
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
instance = None
|
||||
|
||||
if self.is_library:
|
||||
with bpy.data.libraries.load(filepath=bpy.data.libraries[self.data['library']].filepath, link=True) as (sourceData, targetData):
|
||||
targetData.objects = [
|
||||
name for name in sourceData.objects if name == self.data['name']]
|
||||
|
||||
instance = bpy.data.objects[self.data['name']]
|
||||
instance.uuid = self.uuid
|
||||
return instance
|
||||
|
||||
# TODO: refactoring
|
||||
object_name = data.get("name")
|
||||
data_uuid = data.get("data_uuid")
|
||||
@ -467,70 +515,68 @@ class BlObject(BlDatablock):
|
||||
ignore=['images']) # TODO: use resolve_from_id
|
||||
|
||||
if data_type != 'EMPTY' and object_data is None:
|
||||
raise Exception(f"Fail to load object {data['name']}({self.uuid})")
|
||||
raise Exception(f"Fail to load object {data['name']})")
|
||||
|
||||
instance = bpy.data.objects.new(object_name, object_data)
|
||||
instance.uuid = self.uuid
|
||||
return bpy.data.objects.new(object_name, object_data)
|
||||
|
||||
return instance
|
||||
|
||||
def _load_implementation(self, data, target):
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
loader = Loader()
|
||||
|
||||
load_animation_data(data.get('animation_data'), datablock)
|
||||
data_uuid = data.get("data_uuid")
|
||||
data_id = data.get("data")
|
||||
|
||||
if target.data and (target.data.name != data_id):
|
||||
target.data = get_datablock_from_uuid(
|
||||
if datablock.data and (datablock.data.name != data_id):
|
||||
datablock.data = get_datablock_from_uuid(
|
||||
data_uuid, find_data_from_name(data_id), ignore=['images'])
|
||||
|
||||
# vertex groups
|
||||
vertex_groups = data.get('vertex_groups', None)
|
||||
if vertex_groups:
|
||||
load_vertex_groups(vertex_groups, target)
|
||||
load_vertex_groups(vertex_groups, datablock)
|
||||
|
||||
object_data = target.data
|
||||
object_data = datablock.data
|
||||
|
||||
# SHAPE KEYS
|
||||
shape_keys = data.get('shape_keys')
|
||||
if shape_keys:
|
||||
load_shape_keys(shape_keys, target)
|
||||
load_shape_keys(shape_keys, datablock)
|
||||
|
||||
# Load transformation data
|
||||
loader.load(target, data)
|
||||
loader.load(datablock, data)
|
||||
|
||||
# Object display fields
|
||||
if 'display' in data:
|
||||
loader.load(target.display, data['display'])
|
||||
loader.load(datablock.display, data['display'])
|
||||
|
||||
# Parenting
|
||||
parent_id = data.get('parent_uid')
|
||||
if parent_id:
|
||||
parent = get_datablock_from_uuid(parent_id[0], bpy.data.objects[parent_id[1]])
|
||||
# Avoid reloading
|
||||
if target.parent != parent and parent is not None:
|
||||
target.parent = parent
|
||||
elif target.parent:
|
||||
target.parent = None
|
||||
if datablock.parent != parent and parent is not None:
|
||||
datablock.parent = parent
|
||||
elif datablock.parent:
|
||||
datablock.parent = None
|
||||
|
||||
# Pose
|
||||
if 'pose' in data:
|
||||
if not target.pose:
|
||||
if not datablock.pose:
|
||||
raise Exception('No pose data yet (Fixed in a near futur)')
|
||||
# Bone groups
|
||||
for bg_name in data['pose']['bone_groups']:
|
||||
bg_data = data['pose']['bone_groups'].get(bg_name)
|
||||
bg_target = target.pose.bone_groups.get(bg_name)
|
||||
bg_target = datablock.pose.bone_groups.get(bg_name)
|
||||
|
||||
if not bg_target:
|
||||
bg_target = target.pose.bone_groups.new(name=bg_name)
|
||||
bg_target = datablock.pose.bone_groups.new(name=bg_name)
|
||||
|
||||
loader.load(bg_target, bg_data)
|
||||
# target.pose.bone_groups.get
|
||||
# datablock.pose.bone_groups.get
|
||||
|
||||
# Bones
|
||||
for bone in data['pose']['bones']:
|
||||
target_bone = target.pose.bones.get(bone)
|
||||
target_bone = datablock.pose.bones.get(bone)
|
||||
bone_data = data['pose']['bones'].get(bone)
|
||||
|
||||
if 'constraints' in bone_data.keys():
|
||||
@ -539,13 +585,13 @@ class BlObject(BlDatablock):
|
||||
load_pose(target_bone, bone_data)
|
||||
|
||||
if 'bone_index' in bone_data.keys():
|
||||
target_bone.bone_group = target.pose.bone_group[bone_data['bone_group_index']]
|
||||
target_bone.bone_group = datablock.pose.bone_group[bone_data['bone_group_index']]
|
||||
|
||||
# TODO: find another way...
|
||||
if target.empty_display_type == "IMAGE":
|
||||
if datablock.empty_display_type == "IMAGE":
|
||||
img_uuid = data.get('data_uuid')
|
||||
if target.data is None and img_uuid:
|
||||
target.data = get_datablock_from_uuid(img_uuid, None)
|
||||
if datablock.data is None and img_uuid:
|
||||
datablock.data = get_datablock_from_uuid(img_uuid, None)
|
||||
|
||||
if hasattr(object_data, 'skin_vertices') \
|
||||
and object_data.skin_vertices\
|
||||
@ -556,30 +602,33 @@ class BlObject(BlDatablock):
|
||||
skin_data.data,
|
||||
SKIN_DATA)
|
||||
|
||||
if hasattr(target, 'cycles_visibility') \
|
||||
if hasattr(datablock, 'cycles_visibility') \
|
||||
and 'cycles_visibility' in data:
|
||||
loader.load(target.cycles_visibility, data['cycles_visibility'])
|
||||
loader.load(datablock.cycles_visibility, data['cycles_visibility'])
|
||||
|
||||
if hasattr(target, 'modifiers'):
|
||||
load_modifiers_custom_data(data['modifiers'], target.modifiers)
|
||||
if hasattr(datablock, 'modifiers'):
|
||||
load_modifiers(data['modifiers'], datablock.modifiers)
|
||||
|
||||
constraints = data.get('constraints')
|
||||
if constraints:
|
||||
load_constraints(constraints, datablock.constraints)
|
||||
|
||||
# PHYSICS
|
||||
load_physics(data, target)
|
||||
load_physics(data, datablock)
|
||||
|
||||
transform = data.get('transforms', None)
|
||||
if transform:
|
||||
target.matrix_parent_inverse = mathutils.Matrix(
|
||||
datablock.matrix_parent_inverse = mathutils.Matrix(
|
||||
transform['matrix_parent_inverse'])
|
||||
target.matrix_basis = mathutils.Matrix(transform['matrix_basis'])
|
||||
target.matrix_local = mathutils.Matrix(transform['matrix_local'])
|
||||
datablock.matrix_basis = mathutils.Matrix(transform['matrix_basis'])
|
||||
datablock.matrix_local = mathutils.Matrix(transform['matrix_local'])
|
||||
|
||||
|
||||
def _dump_implementation(self, data, instance=None):
|
||||
assert(instance)
|
||||
|
||||
if _is_editmode(instance):
|
||||
if self.preferences.sync_flags.sync_during_editmode:
|
||||
instance.update_from_editmode()
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
if _is_editmode(datablock):
|
||||
if get_preferences().sync_flags.sync_during_editmode:
|
||||
datablock.update_from_editmode()
|
||||
else:
|
||||
raise ContextError("Object is in edit-mode.")
|
||||
|
||||
@ -618,32 +667,30 @@ class BlObject(BlDatablock):
|
||||
'type'
|
||||
]
|
||||
|
||||
data = dumper.dump(instance)
|
||||
|
||||
data = dumper.dump(datablock)
|
||||
data['animation_data'] = dump_animation_data(datablock)
|
||||
dumper.include_filter = [
|
||||
'matrix_parent_inverse',
|
||||
'matrix_local',
|
||||
'matrix_basis']
|
||||
data['transforms'] = dumper.dump(instance)
|
||||
data['transforms'] = dumper.dump(datablock)
|
||||
dumper.include_filter = [
|
||||
'show_shadows',
|
||||
]
|
||||
data['display'] = dumper.dump(instance.display)
|
||||
data['display'] = dumper.dump(datablock.display)
|
||||
|
||||
data['data_uuid'] = getattr(instance.data, 'uuid', None)
|
||||
if self.is_library:
|
||||
return data
|
||||
data['data_uuid'] = getattr(datablock.data, 'uuid', None)
|
||||
|
||||
# PARENTING
|
||||
if instance.parent:
|
||||
data['parent_uid'] = (instance.parent.uuid, instance.parent.name)
|
||||
if datablock.parent:
|
||||
data['parent_uid'] = (datablock.parent.uuid, datablock.parent.name)
|
||||
|
||||
# MODIFIERS
|
||||
modifiers = getattr(instance, 'modifiers', None)
|
||||
if hasattr(instance, 'modifiers'):
|
||||
modifiers = getattr(datablock, 'modifiers', None)
|
||||
if hasattr(datablock, 'modifiers'):
|
||||
data['modifiers'] = dump_modifiers(modifiers)
|
||||
|
||||
gp_modifiers = getattr(instance, 'grease_pencil_modifiers', None)
|
||||
gp_modifiers = getattr(datablock, 'grease_pencil_modifiers', None)
|
||||
|
||||
if gp_modifiers:
|
||||
dumper.include_filter = None
|
||||
@ -666,16 +713,14 @@ class BlObject(BlDatablock):
|
||||
|
||||
|
||||
# CONSTRAINTS
|
||||
if hasattr(instance, 'constraints'):
|
||||
dumper.include_filter = None
|
||||
dumper.depth = 3
|
||||
data["constraints"] = dumper.dump(instance.constraints)
|
||||
if hasattr(datablock, 'constraints'):
|
||||
data["constraints"] = dump_constraints(datablock.constraints)
|
||||
|
||||
# POSE
|
||||
if hasattr(instance, 'pose') and instance.pose:
|
||||
if hasattr(datablock, 'pose') and datablock.pose:
|
||||
# BONES
|
||||
bones = {}
|
||||
for bone in instance.pose.bones:
|
||||
for bone in datablock.pose.bones:
|
||||
bones[bone.name] = {}
|
||||
dumper.depth = 1
|
||||
rotation = 'rotation_quaternion' if bone.rotation_mode == 'QUATERNION' else 'rotation_euler'
|
||||
@ -700,7 +745,7 @@ class BlObject(BlDatablock):
|
||||
|
||||
# GROUPS
|
||||
bone_groups = {}
|
||||
for group in instance.pose.bone_groups:
|
||||
for group in datablock.pose.bone_groups:
|
||||
dumper.depth = 3
|
||||
dumper.include_filter = [
|
||||
'name',
|
||||
@ -710,11 +755,11 @@ class BlObject(BlDatablock):
|
||||
data['pose']['bone_groups'] = bone_groups
|
||||
|
||||
# VERTEx GROUP
|
||||
if len(instance.vertex_groups) > 0:
|
||||
data['vertex_groups'] = dump_vertex_groups(instance)
|
||||
if len(datablock.vertex_groups) > 0:
|
||||
data['vertex_groups'] = dump_vertex_groups(datablock)
|
||||
|
||||
# SHAPE KEYS
|
||||
object_data = instance.data
|
||||
object_data = datablock.data
|
||||
if hasattr(object_data, 'shape_keys') and object_data.shape_keys:
|
||||
data['shape_keys'] = dump_shape_keys(object_data.shape_keys)
|
||||
|
||||
@ -727,7 +772,7 @@ class BlObject(BlDatablock):
|
||||
data['skin_vertices'] = skin_vertices
|
||||
|
||||
# CYCLE SETTINGS
|
||||
if hasattr(instance, 'cycles_visibility'):
|
||||
if hasattr(datablock, 'cycles_visibility'):
|
||||
dumper.include_filter = [
|
||||
'camera',
|
||||
'diffuse',
|
||||
@ -736,38 +781,48 @@ class BlObject(BlDatablock):
|
||||
'scatter',
|
||||
'shadow',
|
||||
]
|
||||
data['cycles_visibility'] = dumper.dump(instance.cycles_visibility)
|
||||
data['cycles_visibility'] = dumper.dump(datablock.cycles_visibility)
|
||||
|
||||
# PHYSICS
|
||||
data.update(dump_physics(instance))
|
||||
data.update(dump_physics(datablock))
|
||||
|
||||
return data
|
||||
|
||||
def _resolve_deps_implementation(self):
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
deps = []
|
||||
|
||||
# Avoid Empty case
|
||||
if self.instance.data:
|
||||
deps.append(self.instance.data)
|
||||
if datablock.data:
|
||||
deps.append(datablock.data)
|
||||
|
||||
# Particle systems
|
||||
for particle_slot in self.instance.particle_systems:
|
||||
for particle_slot in datablock.particle_systems:
|
||||
deps.append(particle_slot.settings)
|
||||
|
||||
if self.is_library:
|
||||
deps.append(self.instance.library)
|
||||
if datablock.parent:
|
||||
deps.append(datablock.parent)
|
||||
|
||||
if self.instance.parent:
|
||||
deps.append(self.instance.parent)
|
||||
|
||||
if self.instance.instance_type == 'COLLECTION':
|
||||
if datablock.instance_type == 'COLLECTION':
|
||||
# TODO: uuid based
|
||||
deps.append(self.instance.instance_collection)
|
||||
deps.append(datablock.instance_collection)
|
||||
|
||||
if self.instance.modifiers:
|
||||
deps.extend(find_textures_dependencies(self.instance.modifiers))
|
||||
deps.extend(find_geometry_nodes_dependencies(self.instance.modifiers))
|
||||
if datablock.modifiers:
|
||||
deps.extend(find_textures_dependencies(datablock.modifiers))
|
||||
deps.extend(find_geometry_nodes_dependencies(datablock.modifiers))
|
||||
|
||||
if hasattr(datablock.data, 'shape_keys') and datablock.data.shape_keys:
|
||||
deps.extend(resolve_animation_dependencies(datablock.data.shape_keys))
|
||||
|
||||
deps.extend(resolve_animation_dependencies(datablock))
|
||||
|
||||
if hasattr(self.instance.data, 'shape_keys') and self.instance.data.shape_keys:
|
||||
deps.extend(resolve_animation_dependencies(self.instance.data.shape_keys))
|
||||
return deps
|
||||
|
||||
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.objects)
|
||||
|
||||
_type = bpy.types.Object
|
||||
_class = BlObject
|
@ -2,7 +2,10 @@ import bpy
|
||||
import mathutils
|
||||
|
||||
from . import dump_anything
|
||||
from .bl_datablock import BlDatablock, get_datablock_from_uuid
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from .bl_datablock import get_datablock_from_uuid
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||
|
||||
|
||||
def dump_textures_slots(texture_slots: bpy.types.bpy_prop_collection) -> list:
|
||||
@ -37,54 +40,65 @@ IGNORED_ATTR = [
|
||||
"users"
|
||||
]
|
||||
|
||||
class BlParticle(BlDatablock):
|
||||
class BlParticle(ReplicatedDatablock):
|
||||
bl_id = "particles"
|
||||
bl_class = bpy.types.ParticleSettings
|
||||
bl_icon = "PARTICLES"
|
||||
bl_check_common = False
|
||||
bl_reload_parent = False
|
||||
|
||||
def _construct(self, data):
|
||||
instance = bpy.data.particles.new(data["name"])
|
||||
instance.uuid = self.uuid
|
||||
return instance
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
return bpy.data.particles.new(data["name"])
|
||||
|
||||
def _load_implementation(self, data, target):
|
||||
dump_anything.load(target, data)
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
load_animation_data(data.get('animation_data'), datablock)
|
||||
dump_anything.load(datablock, data)
|
||||
|
||||
dump_anything.load(target.effector_weights, data["effector_weights"])
|
||||
dump_anything.load(datablock.effector_weights, data["effector_weights"])
|
||||
|
||||
# Force field
|
||||
force_field_1 = data.get("force_field_1", None)
|
||||
if force_field_1:
|
||||
dump_anything.load(target.force_field_1, force_field_1)
|
||||
dump_anything.load(datablock.force_field_1, force_field_1)
|
||||
|
||||
force_field_2 = data.get("force_field_2", None)
|
||||
if force_field_2:
|
||||
dump_anything.load(target.force_field_2, force_field_2)
|
||||
dump_anything.load(datablock.force_field_2, force_field_2)
|
||||
|
||||
# Texture slots
|
||||
load_texture_slots(data["texture_slots"], target.texture_slots)
|
||||
|
||||
def _dump_implementation(self, data, instance=None):
|
||||
assert instance
|
||||
load_texture_slots(data["texture_slots"], datablock.texture_slots)
|
||||
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
dumper = dump_anything.Dumper()
|
||||
dumper.depth = 1
|
||||
dumper.exclude_filter = IGNORED_ATTR
|
||||
data = dumper.dump(instance)
|
||||
data = dumper.dump(datablock)
|
||||
|
||||
# Particle effectors
|
||||
data["effector_weights"] = dumper.dump(instance.effector_weights)
|
||||
if instance.force_field_1:
|
||||
data["force_field_1"] = dumper.dump(instance.force_field_1)
|
||||
if instance.force_field_2:
|
||||
data["force_field_2"] = dumper.dump(instance.force_field_2)
|
||||
data["effector_weights"] = dumper.dump(datablock.effector_weights)
|
||||
if datablock.force_field_1:
|
||||
data["force_field_1"] = dumper.dump(datablock.force_field_1)
|
||||
if datablock.force_field_2:
|
||||
data["force_field_2"] = dumper.dump(datablock.force_field_2)
|
||||
|
||||
# Texture slots
|
||||
data["texture_slots"] = dump_textures_slots(instance.texture_slots)
|
||||
|
||||
data["texture_slots"] = dump_textures_slots(datablock.texture_slots)
|
||||
data['animation_data'] = dump_animation_data(datablock)
|
||||
return data
|
||||
|
||||
def _resolve_deps_implementation(self):
|
||||
return [t.texture for t in self.instance.texture_slots if t and t.texture]
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.particles)
|
||||
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
deps = [t.texture for t in datablock.texture_slots if t and t.texture]
|
||||
deps.extend(resolve_animation_dependencies(datablock))
|
||||
return deps
|
||||
|
||||
_type = bpy.types.ParticleSettings
|
||||
_class = BlParticle
|
@ -18,17 +18,21 @@
|
||||
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from uuid import uuid4
|
||||
|
||||
import bpy
|
||||
import mathutils
|
||||
from deepdiff import DeepDiff
|
||||
from deepdiff import DeepDiff, Delta
|
||||
from replication.constants import DIFF_JSON, MODIFIED
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
|
||||
from ..utils import flush_history
|
||||
from ..utils import flush_history, get_preferences
|
||||
from .bl_action import (dump_animation_data, load_animation_data,
|
||||
resolve_animation_dependencies)
|
||||
from .bl_collection import (dump_collection_children, dump_collection_objects,
|
||||
load_collection_childrens, load_collection_objects,
|
||||
resolve_collection_dependencies)
|
||||
from .bl_datablock import BlDatablock
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
from .bl_file import get_filepath
|
||||
from .dump_anything import Dumper, Loader
|
||||
|
||||
@ -286,12 +290,10 @@ def dump_sequence(sequence: bpy.types.Sequence) -> dict:
|
||||
dumper.depth = 1
|
||||
data = dumper.dump(sequence)
|
||||
|
||||
|
||||
# TODO: Support multiple images
|
||||
if sequence.type == 'IMAGE':
|
||||
data['filenames'] = [e.filename for e in sequence.elements]
|
||||
|
||||
|
||||
# Effect strip inputs
|
||||
input_count = getattr(sequence, 'input_count', None)
|
||||
if input_count:
|
||||
@ -302,7 +304,8 @@ def dump_sequence(sequence: bpy.types.Sequence) -> dict:
|
||||
return data
|
||||
|
||||
|
||||
def load_sequence(sequence_data: dict, sequence_editor: bpy.types.SequenceEditor):
|
||||
def load_sequence(sequence_data: dict,
|
||||
sequence_editor: bpy.types.SequenceEditor):
|
||||
""" Load sequence from dumped data
|
||||
|
||||
:arg sequence_data: sequence to dump
|
||||
@ -321,54 +324,56 @@ def load_sequence(sequence_data: dict, sequence_editor: bpy.types.SequenceEditor
|
||||
if strip_type == 'SCENE':
|
||||
strip_scene = bpy.data.scenes.get(sequence_data.get('scene'))
|
||||
sequence = sequence_editor.sequences.new_scene(strip_name,
|
||||
strip_scene,
|
||||
strip_channel,
|
||||
strip_frame_start)
|
||||
strip_scene,
|
||||
strip_channel,
|
||||
strip_frame_start)
|
||||
elif strip_type == 'MOVIE':
|
||||
filepath = get_filepath(Path(sequence_data['filepath']).name)
|
||||
sequence = sequence_editor.sequences.new_movie(strip_name,
|
||||
filepath,
|
||||
strip_channel,
|
||||
strip_frame_start)
|
||||
filepath,
|
||||
strip_channel,
|
||||
strip_frame_start)
|
||||
elif strip_type == 'SOUND':
|
||||
filepath = bpy.data.sounds[sequence_data['sound']].filepath
|
||||
sequence = sequence_editor.sequences.new_sound(strip_name,
|
||||
filepath,
|
||||
strip_channel,
|
||||
strip_frame_start)
|
||||
filepath,
|
||||
strip_channel,
|
||||
strip_frame_start)
|
||||
elif strip_type == 'IMAGE':
|
||||
images_name = sequence_data.get('filenames')
|
||||
filepath = get_filepath(images_name[0])
|
||||
sequence = sequence_editor.sequences.new_image(strip_name,
|
||||
filepath,
|
||||
strip_channel,
|
||||
strip_frame_start)
|
||||
filepath,
|
||||
strip_channel,
|
||||
strip_frame_start)
|
||||
# load other images
|
||||
if len(images_name)>1:
|
||||
for img_idx in range(1,len(images_name)):
|
||||
if len(images_name) > 1:
|
||||
for img_idx in range(1, len(images_name)):
|
||||
sequence.elements.append((images_name[img_idx]))
|
||||
else:
|
||||
seq = {}
|
||||
|
||||
for i in range(sequence_data['input_count']):
|
||||
seq[f"seq{i+1}"] = sequence_editor.sequences_all.get(sequence_data.get(f"input_{i+1}", None))
|
||||
seq[f"seq{i+1}"] = sequence_editor.sequences_all.get(
|
||||
sequence_data.get(f"input_{i+1}", None))
|
||||
|
||||
sequence = sequence_editor.sequences.new_effect(name=strip_name,
|
||||
type=strip_type,
|
||||
channel=strip_channel,
|
||||
frame_start=strip_frame_start,
|
||||
frame_end=sequence_data['frame_final_end'],
|
||||
**seq)
|
||||
type=strip_type,
|
||||
channel=strip_channel,
|
||||
frame_start=strip_frame_start,
|
||||
frame_end=sequence_data['frame_final_end'],
|
||||
**seq)
|
||||
|
||||
loader = Loader()
|
||||
# TODO: Support filepath updates
|
||||
loader.exclure_filter = ['filepath', 'sound', 'filenames','fps']
|
||||
|
||||
loader.exclure_filter = ['filepath', 'sound', 'filenames', 'fps']
|
||||
loader.load(sequence, sequence_data)
|
||||
sequence.select = False
|
||||
|
||||
|
||||
class BlScene(BlDatablock):
|
||||
class BlScene(ReplicatedDatablock):
|
||||
is_root = True
|
||||
use_delta = True
|
||||
|
||||
bl_id = "scenes"
|
||||
bl_class = bpy.types.Scene
|
||||
@ -376,76 +381,78 @@ class BlScene(BlDatablock):
|
||||
bl_icon = 'SCENE_DATA'
|
||||
bl_reload_parent = False
|
||||
|
||||
def _construct(self, data):
|
||||
instance = bpy.data.scenes.new(data["name"])
|
||||
instance.uuid = self.uuid
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
return bpy.data.scenes.new(data["name"])
|
||||
|
||||
return instance
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
load_animation_data(data.get('animation_data'), datablock)
|
||||
|
||||
def _load_implementation(self, data, target):
|
||||
# Load other meshes metadata
|
||||
loader = Loader()
|
||||
loader.load(target, data)
|
||||
loader.load(datablock, data)
|
||||
|
||||
# Load master collection
|
||||
load_collection_objects(
|
||||
data['collection']['objects'], target.collection)
|
||||
data['collection']['objects'], datablock.collection)
|
||||
load_collection_childrens(
|
||||
data['collection']['children'], target.collection)
|
||||
data['collection']['children'], datablock.collection)
|
||||
|
||||
if 'world' in data.keys():
|
||||
target.world = bpy.data.worlds[data['world']]
|
||||
datablock.world = bpy.data.worlds[data['world']]
|
||||
|
||||
# Annotation
|
||||
if 'grease_pencil' in data.keys():
|
||||
target.grease_pencil = bpy.data.grease_pencils[data['grease_pencil']]
|
||||
datablock.grease_pencil = bpy.data.grease_pencils[data['grease_pencil']]
|
||||
|
||||
if self.preferences.sync_flags.sync_render_settings:
|
||||
if get_preferences().sync_flags.sync_render_settings:
|
||||
if 'eevee' in data.keys():
|
||||
loader.load(target.eevee, data['eevee'])
|
||||
loader.load(datablock.eevee, data['eevee'])
|
||||
|
||||
if 'cycles' in data.keys():
|
||||
loader.load(target.cycles, data['cycles'])
|
||||
loader.load(datablock.cycles, data['cycles'])
|
||||
|
||||
if 'render' in data.keys():
|
||||
loader.load(target.render, data['render'])
|
||||
loader.load(datablock.render, data['render'])
|
||||
|
||||
if 'view_settings' in data.keys():
|
||||
loader.load(target.view_settings, data['view_settings'])
|
||||
if target.view_settings.use_curve_mapping and \
|
||||
'curve_mapping' in data['view_settings']:
|
||||
view_settings = data.get('view_settings')
|
||||
if view_settings:
|
||||
loader.load(datablock.view_settings, view_settings)
|
||||
if datablock.view_settings.use_curve_mapping and \
|
||||
'curve_mapping' in view_settings:
|
||||
# TODO: change this ugly fix
|
||||
target.view_settings.curve_mapping.white_level = data[
|
||||
'view_settings']['curve_mapping']['white_level']
|
||||
target.view_settings.curve_mapping.black_level = data[
|
||||
'view_settings']['curve_mapping']['black_level']
|
||||
target.view_settings.curve_mapping.update()
|
||||
datablock.view_settings.curve_mapping.white_level = view_settings['curve_mapping']['white_level']
|
||||
datablock.view_settings.curve_mapping.black_level = view_settings['curve_mapping']['black_level']
|
||||
datablock.view_settings.curve_mapping.update()
|
||||
|
||||
# Sequencer
|
||||
sequences = data.get('sequences')
|
||||
|
||||
if sequences:
|
||||
# Create sequencer data
|
||||
target.sequence_editor_create()
|
||||
vse = target.sequence_editor
|
||||
datablock.sequence_editor_create()
|
||||
vse = datablock.sequence_editor
|
||||
|
||||
# Clear removed sequences
|
||||
for seq in vse.sequences_all:
|
||||
if seq.name not in sequences:
|
||||
vse.sequences.remove(seq)
|
||||
# Load existing sequences
|
||||
for seq_name, seq_data in sequences.items():
|
||||
for seq_data in sequences.value():
|
||||
load_sequence(seq_data, vse)
|
||||
# If the sequence is no longer used, clear it
|
||||
elif target.sequence_editor and not sequences:
|
||||
target.sequence_editor_clear()
|
||||
elif datablock.sequence_editor and not sequences:
|
||||
datablock.sequence_editor_clear()
|
||||
|
||||
# FIXME: Find a better way after the replication big refacotoring
|
||||
# Keep other user from deleting collection object by flushing their history
|
||||
flush_history()
|
||||
|
||||
def _dump_implementation(self, data, instance=None):
|
||||
assert(instance)
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
data = {}
|
||||
data['animation_data'] = dump_animation_data(datablock)
|
||||
|
||||
# Metadata
|
||||
scene_dumper = Dumper()
|
||||
@ -459,40 +466,40 @@ class BlScene(BlDatablock):
|
||||
'frame_end',
|
||||
'frame_step',
|
||||
]
|
||||
if self.preferences.sync_flags.sync_active_camera:
|
||||
if get_preferences().sync_flags.sync_active_camera:
|
||||
scene_dumper.include_filter.append('camera')
|
||||
|
||||
data.update(scene_dumper.dump(instance))
|
||||
data.update(scene_dumper.dump(datablock))
|
||||
|
||||
# Master collection
|
||||
data['collection'] = {}
|
||||
data['collection']['children'] = dump_collection_children(
|
||||
instance.collection)
|
||||
datablock.collection)
|
||||
data['collection']['objects'] = dump_collection_objects(
|
||||
instance.collection)
|
||||
datablock.collection)
|
||||
|
||||
scene_dumper.depth = 1
|
||||
scene_dumper.include_filter = None
|
||||
|
||||
# Render settings
|
||||
if self.preferences.sync_flags.sync_render_settings:
|
||||
if get_preferences().sync_flags.sync_render_settings:
|
||||
scene_dumper.include_filter = RENDER_SETTINGS
|
||||
|
||||
data['render'] = scene_dumper.dump(instance.render)
|
||||
data['render'] = scene_dumper.dump(datablock.render)
|
||||
|
||||
if instance.render.engine == 'BLENDER_EEVEE':
|
||||
if datablock.render.engine == 'BLENDER_EEVEE':
|
||||
scene_dumper.include_filter = EVEE_SETTINGS
|
||||
data['eevee'] = scene_dumper.dump(instance.eevee)
|
||||
elif instance.render.engine == 'CYCLES':
|
||||
data['eevee'] = scene_dumper.dump(datablock.eevee)
|
||||
elif datablock.render.engine == 'CYCLES':
|
||||
scene_dumper.include_filter = CYCLES_SETTINGS
|
||||
data['cycles'] = scene_dumper.dump(instance.cycles)
|
||||
data['cycles'] = scene_dumper.dump(datablock.cycles)
|
||||
|
||||
scene_dumper.include_filter = VIEW_SETTINGS
|
||||
data['view_settings'] = scene_dumper.dump(instance.view_settings)
|
||||
data['view_settings'] = scene_dumper.dump(datablock.view_settings)
|
||||
|
||||
if instance.view_settings.use_curve_mapping:
|
||||
if datablock.view_settings.use_curve_mapping:
|
||||
data['view_settings']['curve_mapping'] = scene_dumper.dump(
|
||||
instance.view_settings.curve_mapping)
|
||||
datablock.view_settings.curve_mapping)
|
||||
scene_dumper.depth = 5
|
||||
scene_dumper.include_filter = [
|
||||
'curves',
|
||||
@ -500,35 +507,37 @@ class BlScene(BlDatablock):
|
||||
'location',
|
||||
]
|
||||
data['view_settings']['curve_mapping']['curves'] = scene_dumper.dump(
|
||||
instance.view_settings.curve_mapping.curves)
|
||||
datablock.view_settings.curve_mapping.curves)
|
||||
|
||||
# Sequence
|
||||
vse = instance.sequence_editor
|
||||
vse = datablock.sequence_editor
|
||||
if vse:
|
||||
dumped_sequences = {}
|
||||
for seq in vse.sequences_all:
|
||||
dumped_sequences[seq.name] = dump_sequence(seq)
|
||||
data['sequences'] = dumped_sequences
|
||||
|
||||
|
||||
return data
|
||||
|
||||
def _resolve_deps_implementation(self):
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
deps = []
|
||||
|
||||
# Master Collection
|
||||
deps.extend(resolve_collection_dependencies(self.instance.collection))
|
||||
deps.extend(resolve_collection_dependencies(datablock.collection))
|
||||
|
||||
# world
|
||||
if self.instance.world:
|
||||
deps.append(self.instance.world)
|
||||
if datablock.world:
|
||||
deps.append(datablock.world)
|
||||
|
||||
# annotations
|
||||
if self.instance.grease_pencil:
|
||||
deps.append(self.instance.grease_pencil)
|
||||
if datablock.grease_pencil:
|
||||
deps.append(datablock.grease_pencil)
|
||||
|
||||
deps.extend(resolve_animation_dependencies(datablock))
|
||||
|
||||
# Sequences
|
||||
vse = self.instance.sequence_editor
|
||||
vse = datablock.sequence_editor
|
||||
if vse:
|
||||
for sequence in vse.sequences_all:
|
||||
if sequence.type == 'MOVIE' and sequence.filepath:
|
||||
@ -543,16 +552,45 @@ class BlScene(BlDatablock):
|
||||
|
||||
return deps
|
||||
|
||||
def diff(self):
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
name = data.get('name')
|
||||
datablock = resolve_datablock_from_uuid(uuid, bpy.data.scenes)
|
||||
if datablock is None:
|
||||
datablock = bpy.data.scenes.get(name)
|
||||
|
||||
return datablock
|
||||
|
||||
@staticmethod
|
||||
def compute_delta(last_data: dict, current_data: dict) -> Delta:
|
||||
exclude_path = []
|
||||
|
||||
if not self.preferences.sync_flags.sync_render_settings:
|
||||
if not get_preferences().sync_flags.sync_render_settings:
|
||||
exclude_path.append("root['eevee']")
|
||||
exclude_path.append("root['cycles']")
|
||||
exclude_path.append("root['view_settings']")
|
||||
exclude_path.append("root['render']")
|
||||
|
||||
if not self.preferences.sync_flags.sync_active_camera:
|
||||
if not get_preferences().sync_flags.sync_active_camera:
|
||||
exclude_path.append("root['camera']")
|
||||
|
||||
return DeepDiff(self.data, self._dump(instance=self.instance), exclude_paths=exclude_path)
|
||||
diff_params = {
|
||||
'exclude_paths': exclude_path,
|
||||
'ignore_order': True,
|
||||
'report_repetition': True
|
||||
}
|
||||
delta_params = {
|
||||
# 'mutate': True
|
||||
}
|
||||
|
||||
return Delta(
|
||||
DeepDiff(last_data,
|
||||
current_data,
|
||||
cache_size=5000,
|
||||
**diff_params),
|
||||
**delta_params)
|
||||
|
||||
|
||||
_type = bpy.types.Scene
|
||||
_class = BlScene
|
||||
|
@ -23,45 +23,59 @@ from pathlib import Path
|
||||
import bpy
|
||||
|
||||
from .bl_file import get_filepath, ensure_unpacked
|
||||
from .bl_datablock import BlDatablock
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from .dump_anything import Dumper, Loader
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
|
||||
|
||||
class BlSound(BlDatablock):
|
||||
class BlSound(ReplicatedDatablock):
|
||||
bl_id = "sounds"
|
||||
bl_class = bpy.types.Sound
|
||||
bl_check_common = False
|
||||
bl_icon = 'SOUND'
|
||||
bl_reload_parent = False
|
||||
|
||||
def _construct(self, data):
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
filename = data.get('filename')
|
||||
|
||||
return bpy.data.sounds.load(get_filepath(filename))
|
||||
|
||||
def _load(self, data, target):
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
loader = Loader()
|
||||
loader.load(target, data)
|
||||
loader.load(datablock, data)
|
||||
|
||||
def diff(self):
|
||||
return False
|
||||
|
||||
def _dump(self, instance=None):
|
||||
filename = Path(instance.filepath).name
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
filename = Path(datablock.filepath).name
|
||||
|
||||
if not filename:
|
||||
raise FileExistsError(instance.filepath)
|
||||
raise FileExistsError(datablock.filepath)
|
||||
|
||||
return {
|
||||
'filename': filename,
|
||||
'name': instance.name
|
||||
'name': datablock.name
|
||||
}
|
||||
|
||||
def _resolve_deps_implementation(self):
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
deps = []
|
||||
if self.instance.filepath and self.instance.filepath != '<builtin>':
|
||||
ensure_unpacked(self.instance)
|
||||
if datablock.filepath and datablock.filepath != '<builtin>':
|
||||
ensure_unpacked(datablock)
|
||||
|
||||
deps.append(Path(bpy.path.abspath(self.instance.filepath)))
|
||||
deps.append(Path(bpy.path.abspath(datablock.filepath)))
|
||||
|
||||
return deps
|
||||
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.sounds)
|
||||
|
||||
@staticmethod
|
||||
def needs_update(datablock: object, data:dict)-> bool:
|
||||
return False
|
||||
|
||||
_type = bpy.types.Sound
|
||||
_class = BlSound
|
@ -20,26 +20,29 @@ import bpy
|
||||
import mathutils
|
||||
|
||||
from .dump_anything import Loader, Dumper
|
||||
from .bl_datablock import BlDatablock
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||
|
||||
|
||||
class BlSpeaker(BlDatablock):
|
||||
class BlSpeaker(ReplicatedDatablock):
|
||||
bl_id = "speakers"
|
||||
bl_class = bpy.types.Speaker
|
||||
bl_check_common = False
|
||||
bl_icon = 'SPEAKER'
|
||||
bl_reload_parent = False
|
||||
|
||||
def _load_implementation(self, data, target):
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
loader = Loader()
|
||||
loader.load(target, data)
|
||||
loader.load(datablock, data)
|
||||
load_animation_data(data.get('animation_data'), datablock)
|
||||
|
||||
def _construct(self, data):
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
return bpy.data.speakers.new(data["name"])
|
||||
|
||||
def _dump_implementation(self, data, instance=None):
|
||||
assert(instance)
|
||||
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
dumper = Dumper()
|
||||
dumper.depth = 1
|
||||
dumper.include_filter = [
|
||||
@ -58,17 +61,27 @@ class BlSpeaker(BlDatablock):
|
||||
'cone_volume_outer'
|
||||
]
|
||||
|
||||
return dumper.dump(instance)
|
||||
data = dumper.dump(datablock)
|
||||
data['animation_data'] = dump_animation_data(datablock)
|
||||
return data
|
||||
|
||||
def _resolve_deps_implementation(self):
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.speakers)
|
||||
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
# TODO: resolve material
|
||||
deps = []
|
||||
|
||||
sound = self.instance.sound
|
||||
sound = datablock.sound
|
||||
|
||||
if sound:
|
||||
deps.append(sound)
|
||||
|
||||
deps.extend(resolve_animation_dependencies(datablock))
|
||||
return deps
|
||||
|
||||
|
||||
_type = bpy.types.Speaker
|
||||
_class = BlSpeaker
|
||||
|
@ -20,25 +20,30 @@ import bpy
|
||||
import mathutils
|
||||
|
||||
from .dump_anything import Loader, Dumper
|
||||
from .bl_datablock import BlDatablock
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||
import bpy.types as T
|
||||
|
||||
|
||||
class BlTexture(BlDatablock):
|
||||
class BlTexture(ReplicatedDatablock):
|
||||
bl_id = "textures"
|
||||
bl_class = bpy.types.Texture
|
||||
bl_check_common = False
|
||||
bl_icon = 'TEXTURE'
|
||||
bl_reload_parent = False
|
||||
|
||||
def _load_implementation(self, data, target):
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
loader = Loader()
|
||||
loader.load(target, data)
|
||||
loader.load(datablock, data)
|
||||
load_animation_data(data.get('animation_data'), datablock)
|
||||
|
||||
def _construct(self, data):
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
return bpy.data.textures.new(data["name"], data["type"])
|
||||
|
||||
def _dump_implementation(self, data, instance=None):
|
||||
assert(instance)
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
|
||||
dumper = Dumper()
|
||||
dumper.depth = 1
|
||||
@ -52,24 +57,39 @@ class BlTexture(BlDatablock):
|
||||
'name_full'
|
||||
]
|
||||
|
||||
data = dumper.dump(instance)
|
||||
color_ramp = getattr(instance, 'color_ramp', None)
|
||||
data = dumper.dump(datablock)
|
||||
|
||||
color_ramp = getattr(datablock, 'color_ramp', None)
|
||||
|
||||
if color_ramp:
|
||||
dumper.depth = 4
|
||||
data['color_ramp'] = dumper.dump(color_ramp)
|
||||
|
||||
data['animation_data'] = dump_animation_data(datablock)
|
||||
return data
|
||||
|
||||
def _resolve_deps_implementation(self):
|
||||
# TODO: resolve material
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.textures)
|
||||
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
deps = []
|
||||
|
||||
image = getattr(self.instance,"image", None)
|
||||
image = getattr(datablock,"image", None)
|
||||
|
||||
if image:
|
||||
deps.append(image)
|
||||
|
||||
deps.extend(resolve_animation_dependencies(datablock))
|
||||
|
||||
return deps
|
||||
|
||||
|
||||
_type = [T.WoodTexture, T.VoronoiTexture,
|
||||
T.StucciTexture, T.NoiseTexture,
|
||||
T.MusgraveTexture, T.MarbleTexture,
|
||||
T.MagicTexture, T.ImageTexture,
|
||||
T.DistortedNoiseTexture, T.CloudsTexture,
|
||||
T.BlendTexture]
|
||||
_class = BlTexture
|
||||
|
@ -21,32 +21,24 @@ import mathutils
|
||||
from pathlib import Path
|
||||
|
||||
from .dump_anything import Loader, Dumper
|
||||
from .bl_datablock import BlDatablock, get_datablock_from_uuid
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from .bl_datablock import get_datablock_from_uuid, resolve_datablock_from_uuid
|
||||
from .bl_material import dump_materials_slots, load_materials_slots
|
||||
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||
|
||||
class BlVolume(BlDatablock):
|
||||
class BlVolume(ReplicatedDatablock):
|
||||
bl_id = "volumes"
|
||||
bl_class = bpy.types.Volume
|
||||
bl_check_common = False
|
||||
bl_icon = 'VOLUME_DATA'
|
||||
bl_reload_parent = False
|
||||
|
||||
def _load_implementation(self, data, target):
|
||||
loader = Loader()
|
||||
loader.load(target, data)
|
||||
loader.load(target.display, data['display'])
|
||||
|
||||
# MATERIAL SLOTS
|
||||
src_materials = data.get('materials', None)
|
||||
if src_materials:
|
||||
load_materials_slots(src_materials, target.materials)
|
||||
|
||||
def _construct(self, data):
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
return bpy.data.volumes.new(data["name"])
|
||||
|
||||
def _dump_implementation(self, data, instance=None):
|
||||
assert(instance)
|
||||
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
dumper = Dumper()
|
||||
dumper.depth = 1
|
||||
dumper.exclude_filter = [
|
||||
@ -60,27 +52,48 @@ class BlVolume(BlDatablock):
|
||||
'use_fake_user'
|
||||
]
|
||||
|
||||
data = dumper.dump(instance)
|
||||
data = dumper.dump(datablock)
|
||||
|
||||
data['display'] = dumper.dump(instance.display)
|
||||
data['display'] = dumper.dump(datablock.display)
|
||||
|
||||
# Fix material index
|
||||
data['materials'] = dump_materials_slots(instance.materials)
|
||||
|
||||
data['materials'] = dump_materials_slots(datablock.materials)
|
||||
data['animation_data'] = dump_animation_data(datablock)
|
||||
return data
|
||||
|
||||
def _resolve_deps_implementation(self):
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
load_animation_data(data.get('animation_data'), datablock)
|
||||
loader = Loader()
|
||||
loader.load(datablock, data)
|
||||
loader.load(datablock.display, data['display'])
|
||||
|
||||
# MATERIAL SLOTS
|
||||
src_materials = data.get('materials', None)
|
||||
if src_materials:
|
||||
load_materials_slots(src_materials, datablock.materials)
|
||||
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.volumes)
|
||||
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
# TODO: resolve material
|
||||
deps = []
|
||||
|
||||
external_vdb = Path(bpy.path.abspath(self.instance.filepath))
|
||||
external_vdb = Path(bpy.path.abspath(datablock.filepath))
|
||||
if external_vdb.exists() and not external_vdb.is_dir():
|
||||
deps.append(external_vdb)
|
||||
|
||||
for material in self.instance.materials:
|
||||
for material in datablock.materials:
|
||||
if material:
|
||||
deps.append(material)
|
||||
|
||||
deps.extend(resolve_animation_dependencies(datablock))
|
||||
|
||||
return deps
|
||||
|
||||
|
||||
_type = bpy.types.Volume
|
||||
_class = BlVolume
|
||||
|
@ -20,35 +20,40 @@ import bpy
|
||||
import mathutils
|
||||
|
||||
from .dump_anything import Loader, Dumper
|
||||
from .bl_datablock import BlDatablock
|
||||
from replication.protocol import ReplicatedDatablock
|
||||
from .bl_material import (load_node_tree,
|
||||
dump_node_tree,
|
||||
get_node_tree_dependencies)
|
||||
|
||||
from .bl_datablock import resolve_datablock_from_uuid
|
||||
from .bl_action import dump_animation_data, load_animation_data, resolve_animation_dependencies
|
||||
|
||||
class BlWorld(BlDatablock):
|
||||
|
||||
class BlWorld(ReplicatedDatablock):
|
||||
bl_id = "worlds"
|
||||
bl_class = bpy.types.World
|
||||
bl_check_common = True
|
||||
bl_icon = 'WORLD_DATA'
|
||||
bl_reload_parent = False
|
||||
|
||||
def _construct(self, data):
|
||||
@staticmethod
|
||||
def construct(data: dict) -> object:
|
||||
return bpy.data.worlds.new(data["name"])
|
||||
|
||||
def _load_implementation(self, data, target):
|
||||
@staticmethod
|
||||
def load(data: dict, datablock: object):
|
||||
load_animation_data(data.get('animation_data'), datablock)
|
||||
loader = Loader()
|
||||
loader.load(target, data)
|
||||
loader.load(datablock, data)
|
||||
|
||||
if data["use_nodes"]:
|
||||
if target.node_tree is None:
|
||||
target.use_nodes = True
|
||||
if datablock.node_tree is None:
|
||||
datablock.use_nodes = True
|
||||
|
||||
load_node_tree(data['node_tree'], target.node_tree)
|
||||
|
||||
def _dump_implementation(self, data, instance=None):
|
||||
assert(instance)
|
||||
load_node_tree(data['node_tree'], datablock.node_tree)
|
||||
|
||||
@staticmethod
|
||||
def dump(datablock: object) -> dict:
|
||||
world_dumper = Dumper()
|
||||
world_dumper.depth = 1
|
||||
world_dumper.include_filter = [
|
||||
@ -56,17 +61,27 @@ class BlWorld(BlDatablock):
|
||||
"name",
|
||||
"color"
|
||||
]
|
||||
data = world_dumper.dump(instance)
|
||||
if instance.use_nodes:
|
||||
data['node_tree'] = dump_node_tree(instance.node_tree)
|
||||
data = world_dumper.dump(datablock)
|
||||
if datablock.use_nodes:
|
||||
data['node_tree'] = dump_node_tree(datablock.node_tree)
|
||||
|
||||
data['animation_data'] = dump_animation_data(datablock)
|
||||
return data
|
||||
|
||||
def _resolve_deps_implementation(self):
|
||||
@staticmethod
|
||||
def resolve(data: dict) -> object:
|
||||
uuid = data.get('uuid')
|
||||
return resolve_datablock_from_uuid(uuid, bpy.data.worlds)
|
||||
|
||||
@staticmethod
|
||||
def resolve_deps(datablock: object) -> [object]:
|
||||
deps = []
|
||||
|
||||
if self.instance.use_nodes:
|
||||
deps.extend(get_node_tree_dependencies(self.instance.node_tree))
|
||||
if self.is_library:
|
||||
deps.append(self.instance.library)
|
||||
if datablock.use_nodes:
|
||||
deps.extend(get_node_tree_dependencies(datablock.node_tree))
|
||||
|
||||
deps.extend(resolve_animation_dependencies(datablock))
|
||||
return deps
|
||||
|
||||
_type = bpy.types.World
|
||||
_class = BlWorld
|
@ -507,16 +507,12 @@ class Loader:
|
||||
_constructors = {
|
||||
T.ColorRampElement: (CONSTRUCTOR_NEW, ["position"]),
|
||||
T.ParticleSettingsTextureSlot: (CONSTRUCTOR_ADD, []),
|
||||
T.Modifier: (CONSTRUCTOR_NEW, ["name", "type"]),
|
||||
T.GpencilModifier: (CONSTRUCTOR_NEW, ["name", "type"]),
|
||||
T.Constraint: (CONSTRUCTOR_NEW, ["type"]),
|
||||
}
|
||||
|
||||
destructors = {
|
||||
T.ColorRampElement: DESTRUCTOR_REMOVE,
|
||||
T.Modifier: DESTRUCTOR_CLEAR,
|
||||
T.GpencilModifier: DESTRUCTOR_CLEAR,
|
||||
T.Constraint: DESTRUCTOR_REMOVE,
|
||||
}
|
||||
element_type = element.bl_rna_property.fixed_type
|
||||
|
||||
|
@ -24,20 +24,25 @@ import sys
|
||||
from pathlib import Path
|
||||
import socket
|
||||
import re
|
||||
import bpy
|
||||
|
||||
VERSION_EXPR = re.compile('\d+.\d+.\d+')
|
||||
|
||||
THIRD_PARTY = os.path.join(os.path.dirname(os.path.abspath(__file__)), "libs")
|
||||
DEFAULT_CACHE_DIR = os.path.join(
|
||||
os.path.dirname(os.path.abspath(__file__)), "cache")
|
||||
REPLICATION_DEPENDENCIES = {
|
||||
"zmq",
|
||||
"deepdiff"
|
||||
}
|
||||
LIBS = os.path.join(os.path.dirname(os.path.abspath(__file__)), "libs")
|
||||
REPLICATION = os.path.join(LIBS,"replication")
|
||||
|
||||
PYTHON_PATH = None
|
||||
SUBPROCESS_DIR = None
|
||||
|
||||
|
||||
rtypes = []
|
||||
|
||||
|
||||
def module_can_be_imported(name):
|
||||
def module_can_be_imported(name: str) -> bool:
|
||||
try:
|
||||
__import__(name)
|
||||
return True
|
||||
@ -50,7 +55,7 @@ def install_pip():
|
||||
subprocess.run([str(PYTHON_PATH), "-m", "ensurepip"])
|
||||
|
||||
|
||||
def install_package(name, version):
|
||||
def install_package(name: str, install_dir: str):
|
||||
logging.info(f"installing {name} version...")
|
||||
env = os.environ
|
||||
if "PIP_REQUIRE_VIRTUALENV" in env:
|
||||
@ -60,12 +65,13 @@ def install_package(name, version):
|
||||
# env var for the subprocess.
|
||||
env = os.environ.copy()
|
||||
del env["PIP_REQUIRE_VIRTUALENV"]
|
||||
subprocess.run([str(PYTHON_PATH), "-m", "pip", "install", f"{name}=={version}"], env=env)
|
||||
subprocess.run([str(PYTHON_PATH), "-m", "pip", "install", f"{name}", "-t", install_dir], env=env)
|
||||
|
||||
if name in sys.modules:
|
||||
del sys.modules[name]
|
||||
|
||||
def check_package_version(name, required_version):
|
||||
|
||||
def check_package_version(name: str, required_version: str):
|
||||
logging.info(f"Checking {name} version...")
|
||||
out = subprocess.run([str(PYTHON_PATH), "-m", "pip", "show", name], capture_output=True)
|
||||
|
||||
@ -77,6 +83,7 @@ def check_package_version(name, required_version):
|
||||
logging.info(f"{name} need an update")
|
||||
return False
|
||||
|
||||
|
||||
def get_ip():
|
||||
"""
|
||||
Retrieve the main network interface IP.
|
||||
@ -94,7 +101,25 @@ def check_dir(dir):
|
||||
os.makedirs(dir)
|
||||
|
||||
|
||||
def setup(dependencies, python_path):
|
||||
def setup_paths(paths: list):
|
||||
""" Add missing path to sys.path
|
||||
"""
|
||||
for path in paths:
|
||||
if path not in sys.path:
|
||||
logging.debug(f"Adding {path} dir to the path.")
|
||||
sys.path.insert(0, path)
|
||||
|
||||
|
||||
def remove_paths(paths: list):
|
||||
""" Remove list of path from sys.path
|
||||
"""
|
||||
for path in paths:
|
||||
if path in sys.path:
|
||||
logging.debug(f"Removing {path} dir from the path.")
|
||||
sys.path.remove(path)
|
||||
|
||||
|
||||
def install_modules(dependencies: list, python_path: str, install_dir: str):
|
||||
global PYTHON_PATH, SUBPROCESS_DIR
|
||||
|
||||
PYTHON_PATH = Path(python_path)
|
||||
@ -103,9 +128,23 @@ def setup(dependencies, python_path):
|
||||
if not module_can_be_imported("pip"):
|
||||
install_pip()
|
||||
|
||||
for package_name, package_version in dependencies:
|
||||
for package_name in dependencies:
|
||||
if not module_can_be_imported(package_name):
|
||||
install_package(package_name, package_version)
|
||||
install_package(package_name, install_dir=install_dir)
|
||||
module_can_be_imported(package_name)
|
||||
elif not check_package_version(package_name, package_version):
|
||||
install_package(package_name, package_version)
|
||||
|
||||
def register():
|
||||
if bpy.app.version[1] >= 91:
|
||||
python_binary_path = sys.executable
|
||||
else:
|
||||
python_binary_path = bpy.app.binary_path_python
|
||||
|
||||
for module_name in list(sys.modules.keys()):
|
||||
if 'replication' in module_name:
|
||||
del sys.modules[module_name]
|
||||
|
||||
setup_paths([LIBS, REPLICATION])
|
||||
install_modules(REPLICATION_DEPENDENCIES, python_binary_path, install_dir=LIBS)
|
||||
|
||||
def unregister():
|
||||
remove_paths([REPLICATION, LIBS])
|
@ -1 +1 @@
|
||||
Subproject commit b2bd39a6e140f60fc2422d710bce83586cc93af1
|
||||
Subproject commit f0745647e08406e46074d555e12a612e95c87aa1
|
@ -47,11 +47,12 @@ from bpy.app.handlers import persistent
|
||||
from bpy_extras.io_utils import ExportHelper, ImportHelper
|
||||
from replication.constants import (COMMITED, FETCHED, RP_COMMON, STATE_ACTIVE,
|
||||
STATE_INITIAL, STATE_SYNCING, UP)
|
||||
from replication.data import DataTranslationProtocol
|
||||
from replication.protocol import DataTranslationProtocol
|
||||
from replication.exception import ContextError, NonAuthorizedOperationError
|
||||
from replication.interface import session
|
||||
from replication.porcelain import add, apply
|
||||
from replication import porcelain
|
||||
from replication.repository import Repository
|
||||
from replication.objects import Node
|
||||
|
||||
from . import bl_types, environment, timers, ui, utils
|
||||
from .presence import SessionStatusWidget, renderer, view3d_find
|
||||
@ -79,35 +80,33 @@ def session_callback(name):
|
||||
def initialize_session():
|
||||
"""Session connection init hander
|
||||
"""
|
||||
logging.info("Intializing the scene")
|
||||
settings = utils.get_preferences()
|
||||
runtime_settings = bpy.context.window_manager.session
|
||||
|
||||
# Step 1: Constrect nodes
|
||||
logging.info("Constructing nodes")
|
||||
for node in session.repository.list_ordered():
|
||||
node_ref = session.repository.get_node(node)
|
||||
if node_ref is None:
|
||||
logging.error(f"Can't construct node {node}")
|
||||
elif node_ref.state == FETCHED:
|
||||
node_ref.resolve()
|
||||
if not runtime_settings.is_host:
|
||||
logging.info("Intializing the scene")
|
||||
# Step 1: Constrect nodes
|
||||
logging.info("Instantiating nodes")
|
||||
for node in session.repository.index_sorted:
|
||||
node_ref = session.repository.graph.get(node)
|
||||
if node_ref is None:
|
||||
logging.error(f"Can't construct node {node}")
|
||||
elif node_ref.state == FETCHED:
|
||||
node_ref.instance = session.repository.rdp.resolve(node_ref.data)
|
||||
if node_ref.instance is None:
|
||||
node_ref.instance = session.repository.rdp.construct(node_ref.data)
|
||||
node_ref.instance.uuid = node_ref.uuid
|
||||
|
||||
# Step 2: Load nodes
|
||||
logging.info("Loading nodes")
|
||||
for node in session.repository.list_ordered():
|
||||
node_ref = session.repository.get_node(node)
|
||||
|
||||
if node_ref is None:
|
||||
logging.error(f"Can't load node {node}")
|
||||
elif node_ref.state == FETCHED:
|
||||
node_ref.apply()
|
||||
# Step 2: Load nodes
|
||||
logging.info("Applying nodes")
|
||||
for node in session.repository.index_sorted:
|
||||
porcelain.apply(session.repository, node)
|
||||
|
||||
logging.info("Registering timers")
|
||||
# Step 4: Register blender timers
|
||||
for d in deleyables:
|
||||
d.register()
|
||||
|
||||
bpy.ops.session.apply_armature_operator('INVOKE_DEFAULT')
|
||||
|
||||
# Step 5: Clearing history
|
||||
utils.flush_history()
|
||||
@ -191,36 +190,25 @@ class SessionStartOperator(bpy.types.Operator):
|
||||
|
||||
handler.setFormatter(formatter)
|
||||
|
||||
bpy_protocol = DataTranslationProtocol()
|
||||
supported_bl_types = []
|
||||
bpy_protocol = bl_types.get_data_translation_protocol()
|
||||
|
||||
# init the factory with supported types
|
||||
for type in bl_types.types_to_register():
|
||||
type_module = getattr(bl_types, type)
|
||||
name = [e.capitalize() for e in type.split('_')[1:]]
|
||||
type_impl_name = 'Bl'+''.join(name)
|
||||
type_module_class = getattr(type_module, type_impl_name)
|
||||
|
||||
supported_bl_types.append(type_module_class.bl_id)
|
||||
|
||||
if type_impl_name not in settings.supported_datablocks:
|
||||
logging.info(f"{type_impl_name} not found, \
|
||||
# Check if supported_datablocks are up to date before starting the
|
||||
# the session
|
||||
for dcc_type_id in bpy_protocol.implementations.keys():
|
||||
if dcc_type_id not in settings.supported_datablocks:
|
||||
logging.info(f"{dcc_type_id} not found, \
|
||||
regenerate type settings...")
|
||||
settings.generate_supported_types()
|
||||
|
||||
type_local_config = settings.supported_datablocks[type_impl_name]
|
||||
|
||||
bpy_protocol.register_type(
|
||||
type_module_class.bl_class,
|
||||
type_module_class,
|
||||
check_common=type_module_class.bl_check_common)
|
||||
|
||||
if bpy.app.version[1] >= 91:
|
||||
python_binary_path = sys.executable
|
||||
else:
|
||||
python_binary_path = bpy.app.binary_path_python
|
||||
|
||||
repo = Repository(data_protocol=bpy_protocol)
|
||||
repo = Repository(
|
||||
rdp=bpy_protocol,
|
||||
username=settings.username)
|
||||
|
||||
# Host a session
|
||||
if self.host:
|
||||
@ -233,12 +221,17 @@ class SessionStartOperator(bpy.types.Operator):
|
||||
try:
|
||||
# Init repository
|
||||
for scene in bpy.data.scenes:
|
||||
add(repo, scene)
|
||||
porcelain.add(repo, scene)
|
||||
|
||||
porcelain.remote_add(
|
||||
repo,
|
||||
'origin',
|
||||
'127.0.0.1',
|
||||
settings.port,
|
||||
admin_password=admin_pass)
|
||||
session.host(
|
||||
repository= repo,
|
||||
id=settings.username,
|
||||
port=settings.port,
|
||||
remote='origin',
|
||||
timeout=settings.connection_timeout,
|
||||
password=admin_pass,
|
||||
cache_directory=settings.cache_directory,
|
||||
@ -257,11 +250,14 @@ class SessionStartOperator(bpy.types.Operator):
|
||||
admin_pass = None
|
||||
|
||||
try:
|
||||
porcelain.remote_add(
|
||||
repo,
|
||||
'origin',
|
||||
settings.ip,
|
||||
settings.port,
|
||||
admin_password=admin_pass)
|
||||
session.connect(
|
||||
repository= repo,
|
||||
id=settings.username,
|
||||
address=settings.ip,
|
||||
port=settings.port,
|
||||
timeout=settings.connection_timeout,
|
||||
password=admin_pass
|
||||
)
|
||||
@ -273,10 +269,7 @@ class SessionStartOperator(bpy.types.Operator):
|
||||
deleyables.append(timers.ClientUpdate())
|
||||
deleyables.append(timers.DynamicRightSelectTimer())
|
||||
deleyables.append(timers.ApplyTimer(timeout=settings.depsgraph_update_rate))
|
||||
# deleyables.append(timers.PushTimer(
|
||||
# queue=stagging,
|
||||
# timeout=settings.depsgraph_update_rate
|
||||
# ))
|
||||
|
||||
session_update = timers.SessionStatusUpdate()
|
||||
session_user_sync = timers.SessionUserSync()
|
||||
session_background_executor = timers.MainThreadExecutor(
|
||||
@ -293,10 +286,6 @@ class SessionStartOperator(bpy.types.Operator):
|
||||
deleyables.append(session_user_sync)
|
||||
deleyables.append(session_listen)
|
||||
|
||||
|
||||
self.report(
|
||||
{'INFO'},
|
||||
f"connecting to tcp://{settings.ip}:{settings.port}")
|
||||
return {"FINISHED"}
|
||||
|
||||
|
||||
@ -332,9 +321,10 @@ class SessionInitOperator(bpy.types.Operator):
|
||||
utils.clean_scene()
|
||||
|
||||
for scene in bpy.data.scenes:
|
||||
add(session.repository, scene)
|
||||
porcelain.add(session.repository, scene)
|
||||
|
||||
session.init()
|
||||
context.window_manager.session.is_host = True
|
||||
|
||||
return {"FINISHED"}
|
||||
|
||||
@ -381,7 +371,7 @@ class SessionKickOperator(bpy.types.Operator):
|
||||
assert(session)
|
||||
|
||||
try:
|
||||
session.kick(self.user)
|
||||
porcelain.kick(session.repository, self.user)
|
||||
except Exception as e:
|
||||
self.report({'ERROR'}, repr(e))
|
||||
|
||||
@ -410,7 +400,7 @@ class SessionPropertyRemoveOperator(bpy.types.Operator):
|
||||
|
||||
def execute(self, context):
|
||||
try:
|
||||
session.remove(self.property_path)
|
||||
porcelain.rm(session.repository, self.property_path)
|
||||
|
||||
return {"FINISHED"}
|
||||
except: # NonAuthorizedOperationError:
|
||||
@ -452,10 +442,17 @@ class SessionPropertyRightOperator(bpy.types.Operator):
|
||||
runtime_settings = context.window_manager.session
|
||||
|
||||
if session:
|
||||
session.change_owner(self.key,
|
||||
runtime_settings.clients,
|
||||
if runtime_settings.clients == RP_COMMON:
|
||||
porcelain.unlock(session.repository,
|
||||
self.key,
|
||||
ignore_warnings=True,
|
||||
affect_dependencies=self.recursive)
|
||||
else:
|
||||
porcelain.lock(session.repository,
|
||||
self.key,
|
||||
runtime_settings.clients,
|
||||
ignore_warnings=True,
|
||||
affect_dependencies=self.recursive)
|
||||
|
||||
return {"FINISHED"}
|
||||
|
||||
@ -570,7 +567,7 @@ class SessionSnapTimeOperator(bpy.types.Operator):
|
||||
|
||||
def modal(self, context, event):
|
||||
is_running = context.window_manager.session.user_snap_running
|
||||
if event.type in {'RIGHTMOUSE', 'ESC'} or not is_running:
|
||||
if not is_running:
|
||||
self.cancel(context)
|
||||
return {'CANCELLED'}
|
||||
|
||||
@ -603,18 +600,19 @@ class SessionApply(bpy.types.Operator):
|
||||
def execute(self, context):
|
||||
logging.debug(f"Running apply on {self.target}")
|
||||
try:
|
||||
node_ref = session.repository.get_node(self.target)
|
||||
apply(session.repository,
|
||||
self.target,
|
||||
force=True,
|
||||
force_dependencies=self.reset_dependencies)
|
||||
if node_ref.bl_reload_parent:
|
||||
for parent in session.repository.get_parents(self.target):
|
||||
node_ref = session.repository.graph.get(self.target)
|
||||
porcelain.apply(session.repository,
|
||||
self.target,
|
||||
force=True,
|
||||
force_dependencies=self.reset_dependencies)
|
||||
impl = session.repository.rdp.get_implementation(node_ref.instance)
|
||||
if impl.bl_reload_parent:
|
||||
for parent in session.repository.graph.get_parents(self.target):
|
||||
logging.debug(f"Refresh parent {parent}")
|
||||
|
||||
apply(session.repository,
|
||||
parent.uuid,
|
||||
force=True)
|
||||
porcelain.apply(session.repository,
|
||||
parent.uuid,
|
||||
force=True)
|
||||
except Exception as e:
|
||||
self.report({'ERROR'}, repr(e))
|
||||
traceback.print_exc()
|
||||
@ -637,54 +635,12 @@ class SessionCommit(bpy.types.Operator):
|
||||
|
||||
def execute(self, context):
|
||||
try:
|
||||
session.commit(uuid=self.target)
|
||||
session.push(self.target)
|
||||
porcelain.commit(session.repository, self.target)
|
||||
porcelain.push(session.repository, 'origin', self.target)
|
||||
return {"FINISHED"}
|
||||
except Exception as e:
|
||||
self.report({'ERROR'}, repr(e))
|
||||
return {"CANCELED"}
|
||||
|
||||
class ApplyArmatureOperator(bpy.types.Operator):
|
||||
"""Operator which runs its self from a timer"""
|
||||
bl_idname = "session.apply_armature_operator"
|
||||
bl_label = "Modal Executor Operator"
|
||||
|
||||
_timer = None
|
||||
|
||||
def modal(self, context, event):
|
||||
global stop_modal_executor, modal_executor_queue
|
||||
if stop_modal_executor:
|
||||
self.cancel(context)
|
||||
return {'CANCELLED'}
|
||||
|
||||
if event.type == 'TIMER':
|
||||
if session and session.state == STATE_ACTIVE:
|
||||
nodes = session.list(filter=bl_types.bl_armature.BlArmature)
|
||||
|
||||
for node in nodes:
|
||||
node_ref = session.repository.get_node(node)
|
||||
|
||||
if node_ref.state == FETCHED:
|
||||
try:
|
||||
apply(session.repository, node)
|
||||
except Exception as e:
|
||||
logging.error("Fail to apply armature: {e}")
|
||||
|
||||
return {'PASS_THROUGH'}
|
||||
|
||||
def execute(self, context):
|
||||
wm = context.window_manager
|
||||
self._timer = wm.event_timer_add(2, window=context.window)
|
||||
wm.modal_handler_add(self)
|
||||
return {'RUNNING_MODAL'}
|
||||
|
||||
def cancel(self, context):
|
||||
global stop_modal_executor
|
||||
|
||||
wm = context.window_manager
|
||||
wm.event_timer_remove(self._timer)
|
||||
|
||||
stop_modal_executor = False
|
||||
return {"CANCELLED"}
|
||||
|
||||
|
||||
class SessionClearCache(bpy.types.Operator):
|
||||
@ -715,6 +671,7 @@ class SessionClearCache(bpy.types.Operator):
|
||||
row = self.layout
|
||||
row.label(text=f" Do you really want to remove local cache ? ")
|
||||
|
||||
|
||||
class SessionPurgeOperator(bpy.types.Operator):
|
||||
"Remove node with lost references"
|
||||
bl_idname = "session.purge"
|
||||
@ -797,7 +754,7 @@ class SessionSaveBackupOperator(bpy.types.Operator, ExportHelper):
|
||||
recorder.register()
|
||||
deleyables.append(recorder)
|
||||
else:
|
||||
session.save(self.filepath)
|
||||
session.repository.dumps(self.filepath)
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
@ -805,6 +762,7 @@ class SessionSaveBackupOperator(bpy.types.Operator, ExportHelper):
|
||||
def poll(cls, context):
|
||||
return session.state == STATE_ACTIVE
|
||||
|
||||
|
||||
class SessionStopAutoSaveOperator(bpy.types.Operator):
|
||||
bl_idname = "session.cancel_autosave"
|
||||
bl_label = "Cancel auto-save"
|
||||
@ -839,63 +797,24 @@ class SessionLoadSaveOperator(bpy.types.Operator, ImportHelper):
|
||||
def execute(self, context):
|
||||
from replication.repository import Repository
|
||||
|
||||
# TODO: add filechecks
|
||||
# init the factory with supported types
|
||||
bpy_protocol = bl_types.get_data_translation_protocol()
|
||||
repo = Repository(bpy_protocol)
|
||||
repo.loads(self.filepath)
|
||||
utils.clean_scene()
|
||||
|
||||
try:
|
||||
f = gzip.open(self.filepath, "rb")
|
||||
db = pickle.load(f)
|
||||
except OSError as e:
|
||||
f = open(self.filepath, "rb")
|
||||
db = pickle.load(f)
|
||||
nodes = [repo.graph.get(n) for n in repo.index_sorted]
|
||||
|
||||
if db:
|
||||
logging.info(f"Reading {self.filepath}")
|
||||
nodes = db.get("nodes")
|
||||
# Step 1: Construct nodes
|
||||
for node in nodes:
|
||||
node.instance = bpy_protocol.resolve(node.data)
|
||||
if node.instance is None:
|
||||
node.instance = bpy_protocol.construct(node.data)
|
||||
node.instance.uuid = node.uuid
|
||||
|
||||
logging.info(f"{len(nodes)} Nodes to load")
|
||||
|
||||
|
||||
|
||||
# init the factory with supported types
|
||||
bpy_protocol = DataTranslationProtocol()
|
||||
for type in bl_types.types_to_register():
|
||||
type_module = getattr(bl_types, type)
|
||||
name = [e.capitalize() for e in type.split('_')[1:]]
|
||||
type_impl_name = 'Bl'+''.join(name)
|
||||
type_module_class = getattr(type_module, type_impl_name)
|
||||
|
||||
|
||||
bpy_protocol.register_type(
|
||||
type_module_class.bl_class,
|
||||
type_module_class)
|
||||
|
||||
graph = Repository()
|
||||
|
||||
for node, node_data in nodes:
|
||||
node_type = node_data.get('str_type')
|
||||
|
||||
impl = bpy_protocol.get_implementation_from_net(node_type)
|
||||
|
||||
if impl:
|
||||
logging.info(f"Loading {node}")
|
||||
instance = impl(owner=node_data['owner'],
|
||||
uuid=node,
|
||||
dependencies=node_data['dependencies'],
|
||||
data=node_data['data'])
|
||||
graph.do_commit(instance)
|
||||
instance.state = FETCHED
|
||||
|
||||
logging.info("Graph succefully loaded")
|
||||
|
||||
utils.clean_scene()
|
||||
|
||||
# Step 1: Construct nodes
|
||||
for node in graph.list_ordered():
|
||||
graph[node].resolve()
|
||||
|
||||
# Step 2: Load nodes
|
||||
for node in graph.list_ordered():
|
||||
graph[node].apply()
|
||||
# Step 2: Load nodes
|
||||
for node in nodes:
|
||||
porcelain.apply(repo, node.uuid)
|
||||
|
||||
|
||||
return {'FINISHED'}
|
||||
@ -987,7 +906,6 @@ classes = (
|
||||
SessionPropertyRightOperator,
|
||||
SessionApply,
|
||||
SessionCommit,
|
||||
ApplyArmatureOperator,
|
||||
SessionKickOperator,
|
||||
SessionInitOperator,
|
||||
SessionClearCache,
|
||||
@ -1000,14 +918,15 @@ classes = (
|
||||
SessionPresetServerRemove,
|
||||
)
|
||||
|
||||
|
||||
def update_external_dependencies():
|
||||
nodes_ids = session.list(filter=bl_types.bl_file.BlFile)
|
||||
nodes_ids = [n.uuid for n in session.repository.graph.values() if n.data['type_id'] in ['WindowsPath', 'PosixPath']]
|
||||
for node_id in nodes_ids:
|
||||
node = session.repository.get_node(node_id)
|
||||
if node and node.owner in [session.id, RP_COMMON] \
|
||||
and node.has_changed():
|
||||
session.commit(node_id)
|
||||
session.push(node_id, check_data=False)
|
||||
node = session.repository.graph.get(node_id)
|
||||
if node and node.owner in [session.repository.username, RP_COMMON]:
|
||||
porcelain.commit(session.repository, node_id)
|
||||
porcelain.push(session.repository,'origin', node_id)
|
||||
|
||||
|
||||
def sanitize_deps_graph(remove_nodes: bool = False):
|
||||
""" Cleanup the replication graph
|
||||
@ -1015,18 +934,20 @@ def sanitize_deps_graph(remove_nodes: bool = False):
|
||||
if session and session.state == STATE_ACTIVE:
|
||||
start = utils.current_milli_time()
|
||||
rm_cpt = 0
|
||||
for node_key in session.list():
|
||||
node = session.repository.get_node(node_key)
|
||||
for node in session.repository.graph.values():
|
||||
node.instance = session.repository.rdp.resolve(node.data)
|
||||
if node is None \
|
||||
or (node.state == UP and not node.resolve(construct=False)):
|
||||
or (node.state == UP and not node.instance):
|
||||
if remove_nodes:
|
||||
try:
|
||||
session.remove(node.uuid, remove_dependencies=False)
|
||||
porcelain.rm(session.repository,
|
||||
node.uuid,
|
||||
remove_dependencies=False)
|
||||
logging.info(f"Removing {node.uuid}")
|
||||
rm_cpt += 1
|
||||
except NonAuthorizedOperationError:
|
||||
continue
|
||||
logging.info(f"Sanitize took { utils.current_milli_time()-start} ms")
|
||||
logging.info(f"Sanitize took { utils.current_milli_time()-start} ms, removed {rm_cpt} nodes")
|
||||
|
||||
|
||||
@persistent
|
||||
@ -1040,6 +961,7 @@ def resolve_deps_graph(dummy):
|
||||
if session and session.state == STATE_ACTIVE:
|
||||
sanitize_deps_graph(remove_nodes=True)
|
||||
|
||||
|
||||
@persistent
|
||||
def load_pre_handler(dummy):
|
||||
if session and session.state in [STATE_ACTIVE, STATE_SYNCING]:
|
||||
@ -1049,7 +971,7 @@ def load_pre_handler(dummy):
|
||||
@persistent
|
||||
def update_client_frame(scene):
|
||||
if session and session.state == STATE_ACTIVE:
|
||||
session.update_user_metadata({
|
||||
porcelain.update_user_metadata(session.repository, {
|
||||
'frame_current': scene.frame_current
|
||||
})
|
||||
|
||||
@ -1064,27 +986,28 @@ def depsgraph_evaluation(scene):
|
||||
|
||||
update_external_dependencies()
|
||||
|
||||
is_internal = [u for u in dependency_updates if u.is_updated_geometry or u.is_updated_shading or u.is_updated_transform]
|
||||
|
||||
# NOTE: maybe we don't need to check each update but only the first
|
||||
if not is_internal:
|
||||
return
|
||||
for update in reversed(dependency_updates):
|
||||
# Is the object tracked ?
|
||||
if update.id.uuid:
|
||||
# Retrieve local version
|
||||
node = session.repository.get_node(update.id.uuid)
|
||||
|
||||
node = session.repository.graph.get(update.id.uuid)
|
||||
check_common = session.repository.rdp.get_implementation(update.id).bl_check_common
|
||||
# Check our right on this update:
|
||||
# - if its ours or ( under common and diff), launch the
|
||||
# update process
|
||||
# - if its to someone else, ignore the update
|
||||
if node and (node.owner == session.id or node.bl_check_common):
|
||||
if node and (node.owner == session.repository.username or check_common):
|
||||
if node.state == UP:
|
||||
try:
|
||||
if node.has_changed():
|
||||
session.commit(node.uuid)
|
||||
session.push(node.uuid, check_data=False)
|
||||
porcelain.commit(session.repository, node.uuid)
|
||||
porcelain.push(session.repository, 'origin', node.uuid)
|
||||
except ReferenceError:
|
||||
logging.debug(f"Reference error {node.uuid}")
|
||||
if not node.is_valid():
|
||||
session.remove(node.uuid)
|
||||
except ContextError as e:
|
||||
logging.debug(e)
|
||||
except Exception as e:
|
||||
@ -1095,11 +1018,11 @@ def depsgraph_evaluation(scene):
|
||||
elif isinstance(update.id, bpy.types.Scene):
|
||||
ref = session.repository.get_node_by_datablock(update.id)
|
||||
if ref:
|
||||
ref.resolve()
|
||||
pass
|
||||
else:
|
||||
scn_uuid = add(session.repository, update.id)
|
||||
session.commit(scn_uuid)
|
||||
session.push(scn_uuid, check_data=False)
|
||||
scn_uuid = porcelain.add(session.repository, update.id)
|
||||
porcelain.commit(session.node_id, scn_uuid)
|
||||
porcelain.push(session.repository,'origin', scn_uuid)
|
||||
def register():
|
||||
from bpy.utils import register_class
|
||||
|
||||
|
@ -457,18 +457,18 @@ class SessionPrefs(bpy.types.AddonPreferences):
|
||||
def generate_supported_types(self):
|
||||
self.supported_datablocks.clear()
|
||||
|
||||
for type in bl_types.types_to_register():
|
||||
bpy_protocol = bl_types.get_data_translation_protocol()
|
||||
|
||||
# init the factory with supported types
|
||||
for dcc_type_id, impl in bpy_protocol.implementations.items():
|
||||
new_db = self.supported_datablocks.add()
|
||||
|
||||
type_module = getattr(bl_types, type)
|
||||
name = [e.capitalize() for e in type.split('_')[1:]]
|
||||
type_impl_name = 'Bl'+''.join(name)
|
||||
type_module_class = getattr(type_module, type_impl_name)
|
||||
new_db.name = type_impl_name
|
||||
new_db.type_name = type_impl_name
|
||||
new_db.name = dcc_type_id
|
||||
new_db.type_name = dcc_type_id
|
||||
new_db.use_as_filter = True
|
||||
new_db.icon = type_module_class.bl_icon
|
||||
new_db.bl_name = type_module_class.bl_id
|
||||
new_db.icon = impl.bl_icon
|
||||
new_db.bl_name = impl.bl_id
|
||||
|
||||
|
||||
# custom at launch server preset
|
||||
def generate_default_presets(self):
|
||||
@ -553,6 +553,11 @@ class SessionProps(bpy.types.PropertyGroup):
|
||||
description='Show only owned datablocks',
|
||||
default=True
|
||||
)
|
||||
filter_name: bpy.props.StringProperty(
|
||||
name="filter_name",
|
||||
default="",
|
||||
description='Node name filter',
|
||||
)
|
||||
admin: bpy.props.BoolProperty(
|
||||
name="admin",
|
||||
description='Connect as admin',
|
||||
|
@ -302,9 +302,10 @@ class UserSelectionWidget(Widget):
|
||||
return
|
||||
|
||||
vertex_pos = bbox_from_obj(ob, 1.0)
|
||||
vertex_indices = ((0, 1), (0, 2), (1, 3), (2, 3),
|
||||
(4, 5), (4, 6), (5, 7), (6, 7),
|
||||
(0, 4), (1, 5), (2, 6), (3, 7))
|
||||
vertex_indices = (
|
||||
(0, 1), (1, 2), (2, 3), (0, 3),
|
||||
(4, 5), (5, 6), (6, 7), (4, 7),
|
||||
(0, 4), (1, 5), (2, 6), (3, 7))
|
||||
|
||||
if ob.instance_collection:
|
||||
for obj in ob.instance_collection.objects:
|
||||
|
@ -24,7 +24,7 @@ from replication.constants import (FETCHED, RP_COMMON, STATE_ACTIVE,
|
||||
STATE_SRV_SYNC, STATE_SYNCING, UP)
|
||||
from replication.exception import NonAuthorizedOperationError, ContextError
|
||||
from replication.interface import session
|
||||
from replication.porcelain import apply, add
|
||||
from replication import porcelain
|
||||
|
||||
from . import operators, utils
|
||||
from .presence import (UserFrustumWidget, UserNameWidget, UserSelectionWidget,
|
||||
@ -72,6 +72,7 @@ class Timer(object):
|
||||
except Exception as e:
|
||||
logging.error(e)
|
||||
self.unregister()
|
||||
traceback.print_exc()
|
||||
session.disconnect(reason=f"Error during timer {self.id} execution")
|
||||
else:
|
||||
if self.is_running:
|
||||
@ -99,7 +100,7 @@ class SessionBackupTimer(Timer):
|
||||
|
||||
|
||||
def execute(self):
|
||||
session.save(self._filepath)
|
||||
session.repository.dumps(self._filepath)
|
||||
|
||||
class SessionListenTimer(Timer):
|
||||
def execute(self):
|
||||
@ -108,22 +109,21 @@ class SessionListenTimer(Timer):
|
||||
class ApplyTimer(Timer):
|
||||
def execute(self):
|
||||
if session and session.state == STATE_ACTIVE:
|
||||
nodes = session.list()
|
||||
|
||||
for node in nodes:
|
||||
node_ref = session.repository.get_node(node)
|
||||
for node in session.repository.graph.keys():
|
||||
node_ref = session.repository.graph.get(node)
|
||||
|
||||
if node_ref.state == FETCHED:
|
||||
try:
|
||||
apply(session.repository, node)
|
||||
porcelain.apply(session.repository, node)
|
||||
except Exception as e:
|
||||
logging.error(f"Fail to apply {node_ref.uuid}")
|
||||
traceback.print_exc()
|
||||
else:
|
||||
if node_ref.bl_reload_parent:
|
||||
for parent in session.repository.get_parents(node):
|
||||
impl = session.repository.rdp.get_implementation(node_ref.instance)
|
||||
if impl.bl_reload_parent:
|
||||
for parent in session.repository.graph.get_parents(node):
|
||||
logging.debug("Refresh parent {node}")
|
||||
apply(session.repository,
|
||||
porcelain.apply(session.repository,
|
||||
parent.uuid,
|
||||
force=True)
|
||||
|
||||
@ -152,31 +152,28 @@ class DynamicRightSelectTimer(Timer):
|
||||
|
||||
# if an annotation exist and is tracked
|
||||
if annotation_gp and annotation_gp.uuid:
|
||||
registered_gp = session.repository.get_node(annotation_gp.uuid)
|
||||
registered_gp = session.repository.graph.get(annotation_gp.uuid)
|
||||
if is_annotating(bpy.context):
|
||||
# try to get the right on it
|
||||
if registered_gp.owner == RP_COMMON:
|
||||
self._annotating = True
|
||||
logging.debug(
|
||||
"Getting the right on the annotation GP")
|
||||
session.change_owner(
|
||||
registered_gp.uuid,
|
||||
settings.username,
|
||||
ignore_warnings=True,
|
||||
affect_dependencies=False)
|
||||
porcelain.lock(session.repository,
|
||||
registered_gp.uuid,
|
||||
ignore_warnings=True,
|
||||
affect_dependencies=False)
|
||||
|
||||
if registered_gp.owner == settings.username:
|
||||
gp_node = session.repository.get_node(annotation_gp.uuid)
|
||||
if gp_node.has_changed():
|
||||
session.commit(gp_node.uuid)
|
||||
session.push(gp_node.uuid, check_data=False)
|
||||
gp_node = session.repository.graph.get(annotation_gp.uuid)
|
||||
porcelain.commit(session.repository, gp_node.uuid)
|
||||
porcelain.push(session.repository, 'origin', gp_node.uuid)
|
||||
|
||||
elif self._annotating:
|
||||
session.change_owner(
|
||||
registered_gp.uuid,
|
||||
RP_COMMON,
|
||||
ignore_warnings=True,
|
||||
affect_dependencies=False)
|
||||
porcelain.unlock(session.repository,
|
||||
registered_gp.uuid,
|
||||
ignore_warnings=True,
|
||||
affect_dependencies=False)
|
||||
|
||||
current_selection = utils.get_selected_objects(
|
||||
bpy.context.scene,
|
||||
@ -190,25 +187,24 @@ class DynamicRightSelectTimer(Timer):
|
||||
|
||||
# change old selection right to common
|
||||
for obj in obj_common:
|
||||
node = session.repository.get_node(obj)
|
||||
node = session.repository.graph.get(obj)
|
||||
|
||||
if node and (node.owner == settings.username or node.owner == RP_COMMON):
|
||||
recursive = True
|
||||
if node.data and 'instance_type' in node.data.keys():
|
||||
recursive = node.data['instance_type'] != 'COLLECTION'
|
||||
try:
|
||||
session.change_owner(
|
||||
node.uuid,
|
||||
RP_COMMON,
|
||||
ignore_warnings=True,
|
||||
affect_dependencies=recursive)
|
||||
porcelain.unlock(session.repository,
|
||||
node.uuid,
|
||||
ignore_warnings=True,
|
||||
affect_dependencies=recursive)
|
||||
except NonAuthorizedOperationError:
|
||||
logging.warning(
|
||||
f"Not authorized to change {node} owner")
|
||||
|
||||
# change new selection to our
|
||||
for obj in obj_ours:
|
||||
node = session.repository.get_node(obj)
|
||||
node = session.repository.graph.get(obj)
|
||||
|
||||
if node and node.owner == RP_COMMON:
|
||||
recursive = True
|
||||
@ -216,11 +212,10 @@ class DynamicRightSelectTimer(Timer):
|
||||
recursive = node.data['instance_type'] != 'COLLECTION'
|
||||
|
||||
try:
|
||||
session.change_owner(
|
||||
node.uuid,
|
||||
settings.username,
|
||||
ignore_warnings=True,
|
||||
affect_dependencies=recursive)
|
||||
porcelain.lock(session.repository,
|
||||
node.uuid,
|
||||
ignore_warnings=True,
|
||||
affect_dependencies=recursive)
|
||||
except NonAuthorizedOperationError:
|
||||
logging.warning(
|
||||
f"Not authorized to change {node} owner")
|
||||
@ -233,21 +228,19 @@ class DynamicRightSelectTimer(Timer):
|
||||
'selected_objects': current_selection
|
||||
}
|
||||
|
||||
session.update_user_metadata(user_metadata)
|
||||
porcelain.update_user_metadata(session.repository, user_metadata)
|
||||
logging.debug("Update selection")
|
||||
|
||||
# Fix deselection until right managment refactoring (with Roles concepts)
|
||||
if len(current_selection) == 0 :
|
||||
owned_keys = session.list(
|
||||
filter_owner=settings.username)
|
||||
owned_keys = [k for k, v in session.repository.graph.items() if v.owner==settings.username]
|
||||
for key in owned_keys:
|
||||
node = session.repository.get_node(key)
|
||||
node = session.repository.graph.get(key)
|
||||
try:
|
||||
session.change_owner(
|
||||
key,
|
||||
RP_COMMON,
|
||||
ignore_warnings=True,
|
||||
affect_dependencies=recursive)
|
||||
porcelain.unlock(session.repository,
|
||||
key,
|
||||
ignore_warnings=True,
|
||||
affect_dependencies=True)
|
||||
except NonAuthorizedOperationError:
|
||||
logging.warning(
|
||||
f"Not authorized to change {key} owner")
|
||||
@ -255,7 +248,7 @@ class DynamicRightSelectTimer(Timer):
|
||||
for obj in bpy.data.objects:
|
||||
object_uuid = getattr(obj, 'uuid', None)
|
||||
if object_uuid:
|
||||
is_selectable = not session.is_readonly(object_uuid)
|
||||
is_selectable = not session.repository.is_node_readonly(object_uuid)
|
||||
if obj.hide_select != is_selectable:
|
||||
obj.hide_select = is_selectable
|
||||
|
||||
@ -309,18 +302,18 @@ class ClientUpdate(Timer):
|
||||
'frame_current': bpy.context.scene.frame_current,
|
||||
'scene_current': scene_current
|
||||
}
|
||||
session.update_user_metadata(metadata)
|
||||
porcelain.update_user_metadata(session.repository, metadata)
|
||||
|
||||
# Update client representation
|
||||
# Update client current scene
|
||||
elif scene_current != local_user_metadata['scene_current']:
|
||||
local_user_metadata['scene_current'] = scene_current
|
||||
session.update_user_metadata(local_user_metadata)
|
||||
porcelain.update_user_metadata(session.repository, local_user_metadata)
|
||||
elif 'view_corners' in local_user_metadata and current_view_corners != local_user_metadata['view_corners']:
|
||||
local_user_metadata['view_corners'] = current_view_corners
|
||||
local_user_metadata['view_matrix'] = get_view_matrix(
|
||||
)
|
||||
session.update_user_metadata(local_user_metadata)
|
||||
porcelain.update_user_metadata(session.repository, local_user_metadata)
|
||||
|
||||
|
||||
class SessionStatusUpdate(Timer):
|
||||
|
@ -443,8 +443,8 @@ class SESSION_PT_presence(bpy.types.Panel):
|
||||
def draw_property(context, parent, property_uuid, level=0):
|
||||
settings = get_preferences()
|
||||
runtime_settings = context.window_manager.session
|
||||
item = session.repository.get_node(property_uuid)
|
||||
|
||||
item = session.repository.graph.get(property_uuid)
|
||||
type_id = item.data.get('type_id')
|
||||
area_msg = parent.row(align=True)
|
||||
|
||||
if item.state == ERROR:
|
||||
@ -455,11 +455,10 @@ def draw_property(context, parent, property_uuid, level=0):
|
||||
line = area_msg.box()
|
||||
|
||||
name = item.data['name'] if item.data else item.uuid
|
||||
|
||||
icon = settings.supported_datablocks[type_id].icon if type_id else 'ERROR'
|
||||
detail_item_box = line.row(align=True)
|
||||
|
||||
detail_item_box.label(text="",
|
||||
icon=settings.supported_datablocks[item.str_type].icon)
|
||||
detail_item_box.label(text="", icon=icon)
|
||||
detail_item_box.label(text=f"{name}")
|
||||
|
||||
# Operations
|
||||
@ -546,40 +545,32 @@ class SESSION_PT_repository(bpy.types.Panel):
|
||||
else:
|
||||
row.operator('session.save', icon="FILE_TICK")
|
||||
|
||||
flow = layout.grid_flow(
|
||||
row_major=True,
|
||||
columns=0,
|
||||
even_columns=True,
|
||||
even_rows=False,
|
||||
align=True)
|
||||
|
||||
for item in settings.supported_datablocks:
|
||||
col = flow.column(align=True)
|
||||
col.prop(item, "use_as_filter", text="", icon=item.icon)
|
||||
|
||||
row = layout.row(align=True)
|
||||
row.prop(runtime_settings, "filter_owned", text="Show only owned")
|
||||
|
||||
row = layout.row(align=True)
|
||||
box = layout.box()
|
||||
row = box.row()
|
||||
row.prop(runtime_settings, "filter_owned", text="Show only owned Nodes", icon_only=True, icon="DECORATE_UNLOCKED")
|
||||
row = box.row()
|
||||
row.prop(runtime_settings, "filter_name", text="Filter")
|
||||
row = box.row()
|
||||
|
||||
# Properties
|
||||
types_filter = [t.type_name for t in settings.supported_datablocks
|
||||
if t.use_as_filter]
|
||||
owned_nodes = [k for k, v in session.repository.graph.items() if v.owner==settings.username]
|
||||
|
||||
key_to_filter = session.list(
|
||||
filter_owner=settings.username) if runtime_settings.filter_owned else session.list()
|
||||
filtered_node = owned_nodes if runtime_settings.filter_owned else session.repository.graph.keys()
|
||||
|
||||
client_keys = [key for key in key_to_filter
|
||||
if session.repository.get_node(key).str_type
|
||||
in types_filter]
|
||||
if runtime_settings.filter_name:
|
||||
for node_id in filtered_node:
|
||||
node_instance = session.repository.graph.get(node_id)
|
||||
name = node_instance.data.get('name')
|
||||
if runtime_settings.filter_name not in name:
|
||||
filtered_node.remove(node_id)
|
||||
|
||||
if client_keys:
|
||||
if filtered_node:
|
||||
col = layout.column(align=True)
|
||||
for key in client_keys:
|
||||
for key in filtered_node:
|
||||
draw_property(context, col, key)
|
||||
|
||||
else:
|
||||
row.label(text="Empty")
|
||||
layout.row().label(text="Empty")
|
||||
|
||||
elif session.state == STATE_LOBBY and usr and usr['admin']:
|
||||
row.operator("session.init", icon='TOOL_SETTINGS', text="Init")
|
||||
|
@ -1,4 +1,4 @@
|
||||
import re
|
||||
|
||||
init_py = open("multi_user/__init__.py").read()
|
||||
init_py = open("multi_user/libs/replication/replication/__init__.py").read()
|
||||
print(re.search("\d+\.\d+\.\d+\w\d+|\d+\.\d+\.\d+", init_py).group(0))
|
||||
|
@ -32,11 +32,11 @@ def test_action(clear_blend):
|
||||
|
||||
# Test
|
||||
implementation = BlAction()
|
||||
expected = implementation._dump(datablock)
|
||||
expected = implementation.dump(datablock)
|
||||
bpy.data.actions.remove(datablock)
|
||||
|
||||
test = implementation._construct(expected)
|
||||
implementation._load(expected, test)
|
||||
result = implementation._dump(test)
|
||||
test = implementation.construct(expected)
|
||||
implementation.load(expected, test)
|
||||
result = implementation.dump(test)
|
||||
|
||||
assert not DeepDiff(expected, result)
|
||||
|
@ -12,11 +12,11 @@ def test_armature(clear_blend):
|
||||
datablock = bpy.data.armatures[0]
|
||||
|
||||
implementation = BlArmature()
|
||||
expected = implementation._dump(datablock)
|
||||
expected = implementation.dump(datablock)
|
||||
bpy.data.armatures.remove(datablock)
|
||||
|
||||
test = implementation._construct(expected)
|
||||
implementation._load(expected, test)
|
||||
result = implementation._dump(test)
|
||||
test = implementation.construct(expected)
|
||||
implementation.load(expected, test)
|
||||
result = implementation.dump(test)
|
||||
|
||||
assert not DeepDiff(expected, result)
|
||||
|
@ -15,11 +15,11 @@ def test_camera(clear_blend, camera_type):
|
||||
datablock.type = camera_type
|
||||
|
||||
camera_dumper = BlCamera()
|
||||
expected = camera_dumper._dump(datablock)
|
||||
expected = camera_dumper.dump(datablock)
|
||||
bpy.data.cameras.remove(datablock)
|
||||
|
||||
test = camera_dumper._construct(expected)
|
||||
camera_dumper._load(expected, test)
|
||||
result = camera_dumper._dump(test)
|
||||
test = camera_dumper.construct(expected)
|
||||
camera_dumper.load(expected, test)
|
||||
result = camera_dumper.dump(test)
|
||||
|
||||
assert not DeepDiff(expected, result)
|
||||
|
@ -23,11 +23,11 @@ def test_collection(clear_blend):
|
||||
|
||||
# Test
|
||||
implementation = BlCollection()
|
||||
expected = implementation._dump(datablock)
|
||||
expected = implementation.dump(datablock)
|
||||
bpy.data.collections.remove(datablock)
|
||||
|
||||
test = implementation._construct(expected)
|
||||
implementation._load(expected, test)
|
||||
result = implementation._dump(test)
|
||||
test = implementation.construct(expected)
|
||||
implementation.load(expected, test)
|
||||
result = implementation.dump(test)
|
||||
|
||||
assert not DeepDiff(expected, result)
|
||||
|
@ -19,11 +19,11 @@ def test_curve(clear_blend, curve_type):
|
||||
datablock = bpy.data.curves[0]
|
||||
|
||||
implementation = BlCurve()
|
||||
expected = implementation._dump(datablock)
|
||||
expected = implementation.dump(datablock)
|
||||
bpy.data.curves.remove(datablock)
|
||||
|
||||
test = implementation._construct(expected)
|
||||
implementation._load(expected, test)
|
||||
result = implementation._dump(test)
|
||||
test = implementation.construct(expected)
|
||||
implementation.load(expected, test)
|
||||
result = implementation.dump(test)
|
||||
|
||||
assert not DeepDiff(expected, result)
|
||||
|
@ -13,11 +13,11 @@ def test_gpencil(clear_blend):
|
||||
datablock = bpy.data.grease_pencils[0]
|
||||
|
||||
implementation = BlGpencil()
|
||||
expected = implementation._dump(datablock)
|
||||
expected = implementation.dump(datablock)
|
||||
bpy.data.grease_pencils.remove(datablock)
|
||||
|
||||
test = implementation._construct(expected)
|
||||
implementation._load(expected, test)
|
||||
result = implementation._dump(test)
|
||||
test = implementation.construct(expected)
|
||||
implementation.load(expected, test)
|
||||
result = implementation.dump(test)
|
||||
|
||||
assert not DeepDiff(expected, result)
|
||||
|
@ -13,11 +13,11 @@ def test_lattice(clear_blend):
|
||||
datablock = bpy.data.lattices[0]
|
||||
|
||||
implementation = BlLattice()
|
||||
expected = implementation._dump(datablock)
|
||||
expected = implementation.dump(datablock)
|
||||
bpy.data.lattices.remove(datablock)
|
||||
|
||||
test = implementation._construct(expected)
|
||||
implementation._load(expected, test)
|
||||
result = implementation._dump(test)
|
||||
test = implementation.construct(expected)
|
||||
implementation.load(expected, test)
|
||||
result = implementation.dump(test)
|
||||
|
||||
assert not DeepDiff(expected, result)
|
||||
|
@ -14,11 +14,11 @@ def test_lightprobes(clear_blend, lightprobe_type):
|
||||
|
||||
blender_light = bpy.data.lightprobes[0]
|
||||
lightprobe_dumper = BlLightprobe()
|
||||
expected = lightprobe_dumper._dump(blender_light)
|
||||
expected = lightprobe_dumper.dump(blender_light)
|
||||
bpy.data.lightprobes.remove(blender_light)
|
||||
|
||||
test = lightprobe_dumper._construct(expected)
|
||||
lightprobe_dumper._load(expected, test)
|
||||
result = lightprobe_dumper._dump(test)
|
||||
test = lightprobe_dumper.construct(expected)
|
||||
lightprobe_dumper.load(expected, test)
|
||||
result = lightprobe_dumper.dump(test)
|
||||
|
||||
assert not DeepDiff(expected, result)
|
||||
|
@ -13,11 +13,11 @@ def test_light(clear_blend, light_type):
|
||||
|
||||
blender_light = bpy.data.lights[0]
|
||||
light_dumper = BlLight()
|
||||
expected = light_dumper._dump(blender_light)
|
||||
expected = light_dumper.dump(blender_light)
|
||||
bpy.data.lights.remove(blender_light)
|
||||
|
||||
test = light_dumper._construct(expected)
|
||||
light_dumper._load(expected, test)
|
||||
result = light_dumper._dump(test)
|
||||
test = light_dumper.construct(expected)
|
||||
light_dumper.load(expected, test)
|
||||
result = light_dumper.dump(test)
|
||||
|
||||
assert not DeepDiff(expected, result)
|
||||
|
@ -17,12 +17,12 @@ def test_material_nodes(clear_blend):
|
||||
datablock.node_tree.nodes.new(ntype)
|
||||
|
||||
implementation = BlMaterial()
|
||||
expected = implementation._dump(datablock)
|
||||
expected = implementation.dump(datablock)
|
||||
bpy.data.materials.remove(datablock)
|
||||
|
||||
test = implementation._construct(expected)
|
||||
implementation._load(expected, test)
|
||||
result = implementation._dump(test)
|
||||
test = implementation.construct(expected)
|
||||
implementation.load(expected, test)
|
||||
result = implementation.dump(test)
|
||||
|
||||
assert not DeepDiff(expected, result)
|
||||
|
||||
@ -32,11 +32,11 @@ def test_material_gpencil(clear_blend):
|
||||
bpy.data.materials.create_gpencil_data(datablock)
|
||||
|
||||
implementation = BlMaterial()
|
||||
expected = implementation._dump(datablock)
|
||||
expected = implementation.dump(datablock)
|
||||
bpy.data.materials.remove(datablock)
|
||||
|
||||
test = implementation._construct(expected)
|
||||
implementation._load(expected, test)
|
||||
result = implementation._dump(test)
|
||||
test = implementation.construct(expected)
|
||||
implementation.load(expected, test)
|
||||
result = implementation.dump(test)
|
||||
|
||||
assert not DeepDiff(expected, result)
|
||||
|
@ -18,11 +18,11 @@ def test_mesh(clear_blend, mesh_type):
|
||||
|
||||
# Test
|
||||
implementation = BlMesh()
|
||||
expected = implementation._dump(datablock)
|
||||
expected = implementation.dump(datablock)
|
||||
bpy.data.meshes.remove(datablock)
|
||||
|
||||
test = implementation._construct(expected)
|
||||
implementation._load(expected, test)
|
||||
result = implementation._dump(test)
|
||||
test = implementation.construct(expected)
|
||||
implementation.load(expected, test)
|
||||
result = implementation.dump(test)
|
||||
|
||||
assert not DeepDiff(expected, result)
|
||||
|
@ -13,11 +13,11 @@ def test_metaball(clear_blend, metaballs_type):
|
||||
|
||||
datablock = bpy.data.metaballs[0]
|
||||
dumper = BlMetaball()
|
||||
expected = dumper._dump(datablock)
|
||||
expected = dumper.dump(datablock)
|
||||
bpy.data.metaballs.remove(datablock)
|
||||
|
||||
test = dumper._construct(expected)
|
||||
dumper._load(expected, test)
|
||||
result = dumper._dump(test)
|
||||
test = dumper.construct(expected)
|
||||
dumper.load(expected, test)
|
||||
result = dumper.dump(test)
|
||||
|
||||
assert not DeepDiff(expected, result)
|
||||
|
@ -65,11 +65,11 @@ def test_object(clear_blend):
|
||||
datablock.shape_key_add(name='shape2')
|
||||
|
||||
implementation = BlObject()
|
||||
expected = implementation._dump(datablock)
|
||||
expected = implementation.dump(datablock)
|
||||
bpy.data.objects.remove(datablock)
|
||||
|
||||
test = implementation._construct(expected)
|
||||
implementation._load(expected, test)
|
||||
result = implementation._dump(test)
|
||||
test = implementation.construct(expected)
|
||||
implementation.load(expected, test)
|
||||
result = implementation.dump(test)
|
||||
print(DeepDiff(expected, result))
|
||||
assert not DeepDiff(expected, result)
|
||||
|
@ -15,11 +15,11 @@ def test_scene(clear_blend):
|
||||
datablock.view_settings.use_curve_mapping = True
|
||||
# Test
|
||||
implementation = BlScene()
|
||||
expected = implementation._dump(datablock)
|
||||
expected = implementation.dump(datablock)
|
||||
bpy.data.scenes.remove(datablock)
|
||||
|
||||
test = implementation._construct(expected)
|
||||
implementation._load(expected, test)
|
||||
result = implementation._dump(test)
|
||||
test = implementation.construct(expected)
|
||||
implementation.load(expected, test)
|
||||
result = implementation.dump(test)
|
||||
|
||||
assert not DeepDiff(expected, result)
|
||||
|
@ -12,11 +12,11 @@ def test_speaker(clear_blend):
|
||||
datablock = bpy.data.speakers[0]
|
||||
|
||||
implementation = BlSpeaker()
|
||||
expected = implementation._dump(datablock)
|
||||
expected = implementation.dump(datablock)
|
||||
bpy.data.speakers.remove(datablock)
|
||||
|
||||
test = implementation._construct(expected)
|
||||
implementation._load(expected, test)
|
||||
result = implementation._dump(test)
|
||||
test = implementation.construct(expected)
|
||||
implementation.load(expected, test)
|
||||
result = implementation.dump(test)
|
||||
|
||||
assert not DeepDiff(expected, result)
|
||||
|
@ -14,11 +14,11 @@ def test_texture(clear_blend, texture_type):
|
||||
datablock = bpy.data.textures.new('test', texture_type)
|
||||
|
||||
implementation = BlTexture()
|
||||
expected = implementation._dump(datablock)
|
||||
expected = implementation.dump(datablock)
|
||||
bpy.data.textures.remove(datablock)
|
||||
|
||||
test = implementation._construct(expected)
|
||||
implementation._load(expected, test)
|
||||
result = implementation._dump(test)
|
||||
test = implementation.construct(expected)
|
||||
implementation.load(expected, test)
|
||||
result = implementation.dump(test)
|
||||
|
||||
assert not DeepDiff(expected, result)
|
||||
|
@ -11,11 +11,11 @@ def test_volume(clear_blend):
|
||||
datablock = bpy.data.volumes.new("Test")
|
||||
|
||||
implementation = BlVolume()
|
||||
expected = implementation._dump(datablock)
|
||||
expected = implementation.dump(datablock)
|
||||
bpy.data.volumes.remove(datablock)
|
||||
|
||||
test = implementation._construct(expected)
|
||||
implementation._load(expected, test)
|
||||
result = implementation._dump(test)
|
||||
test = implementation.construct(expected)
|
||||
implementation.load(expected, test)
|
||||
result = implementation.dump(test)
|
||||
|
||||
assert not DeepDiff(expected, result)
|
||||
|
@ -12,11 +12,11 @@ def test_world(clear_blend):
|
||||
datablock.use_nodes = True
|
||||
|
||||
implementation = BlWorld()
|
||||
expected = implementation._dump(datablock)
|
||||
expected = implementation.dump(datablock)
|
||||
bpy.data.worlds.remove(datablock)
|
||||
|
||||
test = implementation._construct(expected)
|
||||
implementation._load(expected, test)
|
||||
result = implementation._dump(test)
|
||||
test = implementation.construct(expected)
|
||||
implementation.load(expected, test)
|
||||
result = implementation.dump(test)
|
||||
|
||||
assert not DeepDiff(expected, result)
|
||||
|
Loading…
Reference in New Issue
Block a user