Merge branch 'develop' into remove-services

This commit is contained in:
Swann 2021-04-23 11:21:16 +02:00
commit a7ad9d30c3
No known key found for this signature in database
GPG Key ID: E1D3641A7C43AACB
19 changed files with 427 additions and 113 deletions

View File

@ -157,4 +157,33 @@ All notable changes to this project will be documented in this file.
- Empty and Light object selection highlights - Empty and Light object selection highlights
- Material renaming - Material renaming
- Default material nodes input parameters - Default material nodes input parameters
- blender 2.91 python api compatibility - blender 2.91 python api compatibility
## [0.3.0] - 2021-04-14
### Added
- Curve material support
- Cycle visibility settings
- Session save/load operator
- Add new scene support
- Physic initial support
- Geometry node initial support
- Blender 2.93 compatibility
### Changed
- Host documentation on Gitlab Page
- Event driven update (from the blender deps graph)
### Fixed
- Vertex group assignation
- Parent relation can't be removed
- Separate object
- Delete animation
- Sync missing holdout option for grease pencil material
- Sync missing `skin_vertices`
- Exception access violation during Undo/Redo
- Sync missing armature bone Roll
- Sync missing driver data_path
- Constraint replication

View File

@ -29,35 +29,35 @@ See the [troubleshooting guide](https://slumber.gitlab.io/multi-user/getting_sta
Currently, not all data-block are supported for replication over the wire. The following list summarizes the status for each ones. Currently, not all data-block are supported for replication over the wire. The following list summarizes the status for each ones.
| Name | Status | Comment | | Name | Status | Comment |
| -------------- | :----: | :--------------------------------------------------------------------------: | | -------------- | :----: | :----------------------------------------------------------: |
| action | ✔️ | | | action | ✔️ | |
| armature | ❗ | Not stable | | armature | ❗ | Not stable |
| camera | ✔️ | | | camera | ✔️ | |
| collection | ✔️ | | | collection | ✔️ | |
| curve | ❗ | Nurbs surfaces not supported | | curve | ❗ | Nurbs surfaces not supported |
| gpencil | ✔️ | [Airbrush not supported](https://gitlab.com/slumber/multi-user/-/issues/123) | | gpencil | ✔️ | |
| image | ✔️ | | | image | ✔️ | |
| mesh | ✔️ | | | mesh | ✔️ | |
| material | ✔️ | | | material | ✔️ | |
| node_groups | ❗ | Material only | | node_groups | ❗ | Material & Geometry only |
| geometry nodes | ✔️ | | | geometry nodes | ✔️ | |
| metaball | ✔️ | | | metaball | ✔️ | |
| object | ✔️ | | | object | ✔️ | |
| textures | ❗ | Supported for modifiers/materials only | | textures | ❗ | Supported for modifiers/materials/geo nodes only |
| texts | ✔️ | | | texts | ✔️ | |
| scene | ✔️ | | | scene | ✔️ | |
| world | ✔️ | | | world | ✔️ | |
| lightprobes | ✔️ | | | lightprobes | ✔️ | |
| compositing | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/46) | | compositing | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/46) |
| texts | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/81) | | texts | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/81) |
| nla | ❌ | | | nla | ❌ | |
| volumes | ✔️ | | | volumes | ✔️ | |
| particles | ❌ | [On-going](https://gitlab.com/slumber/multi-user/-/issues/24) | | particles | ❗ | The cache isn't syncing. |
| speakers | ❗ | [Partial](https://gitlab.com/slumber/multi-user/-/issues/65) | | speakers | ❗ | [Partial](https://gitlab.com/slumber/multi-user/-/issues/65) |
| vse | ❗ | Mask and Clip not supported yet | | vse | ❗ | Mask and Clip not supported yet |
| physics | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/45) | | physics | ❌ | [Planned](https://gitlab.com/slumber/multi-user/-/issues/45) |
| libraries | ❗ | Partial | | libraries | ❗ | Partial |
@ -70,7 +70,7 @@ I'm working on it.
| Dependencies | Version | Needed | | Dependencies | Version | Needed |
| ------------ | :-----: | -----: | | ------------ | :-----: | -----: |
| Replication | latest | yes | | Replication | latest | yes |

View File

@ -122,13 +122,13 @@ class addon_updater_install_popup(bpy.types.Operator):
# if true, run clean install - ie remove all files before adding new # if true, run clean install - ie remove all files before adding new
# equivalent to deleting the addon and reinstalling, except the # equivalent to deleting the addon and reinstalling, except the
# updater folder/backup folder remains # updater folder/backup folder remains
clean_install = bpy.props.BoolProperty( clean_install: bpy.props.BoolProperty(
name="Clean install", name="Clean install",
description="If enabled, completely clear the addon's folder before installing new update, creating a fresh install", description="If enabled, completely clear the addon's folder before installing new update, creating a fresh install",
default=False, default=False,
options={'HIDDEN'} options={'HIDDEN'}
) )
ignore_enum = bpy.props.EnumProperty( ignore_enum: bpy.props.EnumProperty(
name="Process update", name="Process update",
description="Decide to install, ignore, or defer new addon update", description="Decide to install, ignore, or defer new addon update",
items=[ items=[
@ -264,7 +264,7 @@ class addon_updater_update_now(bpy.types.Operator):
# if true, run clean install - ie remove all files before adding new # if true, run clean install - ie remove all files before adding new
# equivalent to deleting the addon and reinstalling, except the # equivalent to deleting the addon and reinstalling, except the
# updater folder/backup folder remains # updater folder/backup folder remains
clean_install = bpy.props.BoolProperty( clean_install: bpy.props.BoolProperty(
name="Clean install", name="Clean install",
description="If enabled, completely clear the addon's folder before installing new update, creating a fresh install", description="If enabled, completely clear the addon's folder before installing new update, creating a fresh install",
default=False, default=False,
@ -332,7 +332,7 @@ class addon_updater_update_target(bpy.types.Operator):
i+=1 i+=1
return ret return ret
target = bpy.props.EnumProperty( target: bpy.props.EnumProperty(
name="Target version to install", name="Target version to install",
description="Select the version to install", description="Select the version to install",
items=target_version items=target_version
@ -341,7 +341,7 @@ class addon_updater_update_target(bpy.types.Operator):
# if true, run clean install - ie remove all files before adding new # if true, run clean install - ie remove all files before adding new
# equivalent to deleting the addon and reinstalling, except the # equivalent to deleting the addon and reinstalling, except the
# updater folder/backup folder remains # updater folder/backup folder remains
clean_install = bpy.props.BoolProperty( clean_install: bpy.props.BoolProperty(
name="Clean install", name="Clean install",
description="If enabled, completely clear the addon's folder before installing new update, creating a fresh install", description="If enabled, completely clear the addon's folder before installing new update, creating a fresh install",
default=False, default=False,
@ -399,7 +399,7 @@ class addon_updater_install_manually(bpy.types.Operator):
bl_description = "Proceed to manually install update" bl_description = "Proceed to manually install update"
bl_options = {'REGISTER', 'INTERNAL'} bl_options = {'REGISTER', 'INTERNAL'}
error = bpy.props.StringProperty( error: bpy.props.StringProperty(
name="Error Occurred", name="Error Occurred",
default="", default="",
options={'HIDDEN'} options={'HIDDEN'}
@ -461,7 +461,7 @@ class addon_updater_updated_successful(bpy.types.Operator):
bl_description = "Update installation response" bl_description = "Update installation response"
bl_options = {'REGISTER', 'INTERNAL', 'UNDO'} bl_options = {'REGISTER', 'INTERNAL', 'UNDO'}
error = bpy.props.StringProperty( error: bpy.props.StringProperty(
name="Error Occurred", name="Error Occurred",
default="", default="",
options={'HIDDEN'} options={'HIDDEN'}

View File

@ -42,6 +42,7 @@ __all__ = [
# 'bl_sequencer', # 'bl_sequencer',
'bl_node_group', 'bl_node_group',
'bl_texture', 'bl_texture',
"bl_particle",
] # Order here defines execution order ] # Order here defines execution order
if bpy.app.version[1] >= 91: if bpy.app.version[1] >= 91:

View File

@ -61,7 +61,6 @@ def dump_fcurve(fcurve: bpy.types.FCurve, use_numpy: bool = True) -> dict:
points = fcurve.keyframe_points points = fcurve.keyframe_points
fcurve_data['keyframes_count'] = len(fcurve.keyframe_points) fcurve_data['keyframes_count'] = len(fcurve.keyframe_points)
fcurve_data['keyframe_points'] = np_dump_collection(points, KEYFRAME) fcurve_data['keyframe_points'] = np_dump_collection(points, KEYFRAME)
else: # Legacy method else: # Legacy method
dumper = Dumper() dumper = Dumper()
fcurve_data["keyframe_points"] = [] fcurve_data["keyframe_points"] = []
@ -71,6 +70,18 @@ def dump_fcurve(fcurve: bpy.types.FCurve, use_numpy: bool = True) -> dict:
dumper.dump(k) dumper.dump(k)
) )
if fcurve.modifiers:
dumper = Dumper()
dumper.exclude_filter = [
'is_valid',
'active'
]
dumped_modifiers = []
for modfifier in fcurve.modifiers:
dumped_modifiers.append(dumper.dump(modfifier))
fcurve_data['modifiers'] = dumped_modifiers
return fcurve_data return fcurve_data
@ -83,7 +94,7 @@ def load_fcurve(fcurve_data, fcurve):
:type fcurve: bpy.types.FCurve :type fcurve: bpy.types.FCurve
""" """
use_numpy = fcurve_data.get('use_numpy') use_numpy = fcurve_data.get('use_numpy')
loader = Loader()
keyframe_points = fcurve.keyframe_points keyframe_points = fcurve.keyframe_points
# Remove all keyframe points # Remove all keyframe points
@ -128,6 +139,21 @@ def load_fcurve(fcurve_data, fcurve):
fcurve.update() fcurve.update()
dumped_fcurve_modifiers = fcurve_data.get('modifiers', None)
if dumped_fcurve_modifiers:
# clear modifiers
for fmod in fcurve.modifiers:
fcurve.modifiers.remove(fmod)
# Load each modifiers in order
for modifier_data in dumped_fcurve_modifiers:
modifier = fcurve.modifiers.new(modifier_data['type'])
loader.load(modifier, modifier_data)
elif fcurve.modifiers:
for fmod in fcurve.modifiers:
fcurve.modifiers.remove(fmod)
class BlAction(BlDatablock): class BlAction(BlDatablock):
bl_id = "actions" bl_id = "actions"

View File

@ -56,6 +56,11 @@ class BlCamera(BlDatablock):
target_img.image = bpy.data.images[img_id] target_img.image = bpy.data.images[img_id]
loader.load(target_img, img_data) loader.load(target_img, img_data)
img_user = img_data.get('image_user')
if img_user:
loader.load(target_img.image_user, img_user)
def _dump_implementation(self, data, instance=None): def _dump_implementation(self, data, instance=None):
assert(instance) assert(instance)
@ -101,10 +106,19 @@ class BlCamera(BlDatablock):
'scale', 'scale',
'use_flip_x', 'use_flip_x',
'use_flip_y', 'use_flip_y',
'image' 'image_user',
'image',
'frame_duration',
'frame_start',
'frame_offset',
'use_cyclic',
'use_auto_refresh'
] ]
return dumper.dump(instance) data = dumper.dump(instance)
for index, image in enumerate(instance.background_images):
if image.image_user:
data['background_images'][index]['image_user'] = dumper.dump(image.image_user)
return data
def _resolve_deps_implementation(self): def _resolve_deps_implementation(self):
deps = [] deps = []
for background in self.instance.background_images: for background in self.instance.background_images:

View File

@ -72,10 +72,10 @@ def load_driver(target_datablock, src_driver):
for src_target in src_var_data['targets']: for src_target in src_var_data['targets']:
src_target_data = src_var_data['targets'][src_target] src_target_data = src_var_data['targets'][src_target]
new_var.targets[src_target].id = utils.resolve_from_id( src_id = src_target_data.get('id')
src_target_data['id'], src_target_data['id_type']) if src_id:
loader.load( new_var.targets[src_target].id = utils.resolve_from_id(src_target_data['id'], src_target_data['id_type'])
new_var.targets[src_target], src_target_data) loader.load(new_var.targets[src_target], src_target_data)
# Fcurve # Fcurve
new_fcurve = new_driver.keyframe_points new_fcurve = new_driver.keyframe_points
@ -161,19 +161,17 @@ class BlDatablock(ReplicatedDatablock):
def _dump(self, instance=None): def _dump(self, instance=None):
dumper = Dumper() dumper = Dumper()
data = {} data = {}
animation_data = {}
# Dump animation data # Dump animation data
if has_action(instance): if has_action(instance):
dumper = Dumper() animation_data['action'] = instance.animation_data.action.name
dumper.include_filter = ['action']
data['animation_data'] = dumper.dump(instance.animation_data)
if has_driver(instance): if has_driver(instance):
dumped_drivers = {'animation_data': {'drivers': []}} animation_data['drivers'] = []
for driver in instance.animation_data.drivers: for driver in instance.animation_data.drivers:
dumped_drivers['animation_data']['drivers'].append( animation_data['drivers'].append(dump_driver(driver))
dump_driver(driver))
data.update(dumped_drivers) if animation_data:
data['animation_data'] = animation_data
if self.is_library: if self.is_library:
data.update(dumper.dump(instance)) data.update(dumper.dump(instance))
@ -200,6 +198,9 @@ class BlDatablock(ReplicatedDatablock):
if 'action' in data['animation_data']: if 'action' in data['animation_data']:
target.animation_data.action = bpy.data.actions[data['animation_data']['action']] target.animation_data.action = bpy.data.actions[data['animation_data']['action']]
elif target.animation_data.action:
target.animation_data.action = None
# Remove existing animation data if there is not more to load # Remove existing animation data if there is not more to load
elif hasattr(target, 'animation_data') and target.animation_data: elif hasattr(target, 'animation_data') and target.animation_data:
target.animation_data_clear() target.animation_data_clear()

View File

@ -66,9 +66,12 @@ class BlImage(BlDatablock):
loader = Loader() loader = Loader()
loader.load(data, target) loader.load(data, target)
target.source = 'FILE' target.source = data['source']
target.filepath_raw = get_filepath(data['filename']) target.filepath_raw = get_filepath(data['filename'])
target.colorspace_settings.name = data["colorspace_settings"]["name"] color_space_name = data["colorspace_settings"]["name"]
if color_space_name:
target.colorspace_settings.name = color_space_name
def _dump(self, instance=None): def _dump(self, instance=None):
assert(instance) assert(instance)
@ -83,6 +86,7 @@ class BlImage(BlDatablock):
dumper.depth = 2 dumper.depth = 2
dumper.include_filter = [ dumper.include_filter = [
"name", "name",
'source',
'size', 'size',
'height', 'height',
'alpha', 'alpha',

View File

@ -27,7 +27,7 @@ from .dump_anything import Loader, Dumper
from .bl_datablock import BlDatablock, get_datablock_from_uuid from .bl_datablock import BlDatablock, get_datablock_from_uuid
NODE_SOCKET_INDEX = re.compile('\[(\d*)\]') NODE_SOCKET_INDEX = re.compile('\[(\d*)\]')
IGNORED_SOCKETS = ['GEOMETRY', 'SHADER'] IGNORED_SOCKETS = ['GEOMETRY', 'SHADER', 'CUSTOM']
def load_node(node_data: dict, node_tree: bpy.types.ShaderNodeTree): def load_node(node_data: dict, node_tree: bpy.types.ShaderNodeTree):
""" Load a node into a node_tree from a dict """ Load a node into a node_tree from a dict
@ -54,8 +54,8 @@ def load_node(node_data: dict, node_tree: bpy.types.ShaderNodeTree):
if inputs_data: if inputs_data:
inputs = [i for i in target_node.inputs if i.type not in IGNORED_SOCKETS] inputs = [i for i in target_node.inputs if i.type not in IGNORED_SOCKETS]
for idx, inpt in enumerate(inputs): for idx, inpt in enumerate(inputs):
loaded_input = inputs_data[idx]
if idx < len(inputs_data) and hasattr(inpt, "default_value"): if idx < len(inputs_data) and hasattr(inpt, "default_value"):
loaded_input = inputs_data[idx]
try: try:
if inpt.type in ['OBJECT', 'COLLECTION']: if inpt.type in ['OBJECT', 'COLLECTION']:
inpt.default_value = get_datablock_from_uuid(loaded_input, None) inpt.default_value = get_datablock_from_uuid(loaded_input, None)
@ -69,13 +69,17 @@ def load_node(node_data: dict, node_tree: bpy.types.ShaderNodeTree):
outputs_data = node_data.get('outputs') outputs_data = node_data.get('outputs')
if outputs_data: if outputs_data:
outputs = [o for o in target_node.outputs if o.type not in IGNORED_SOCKETS] outputs = [o for o in target_node.outputs if o.type not in IGNORED_SOCKETS]
for idx, output in enumerate(outputs_data): for idx, output in enumerate(outputs):
if idx < len(outputs) and hasattr(outputs[idx], "default_value"): if idx < len(outputs_data) and hasattr(output, "default_value"):
loaded_output = outputs_data[idx]
try: try:
outputs[idx].default_value = output if output.type in ['OBJECT', 'COLLECTION']:
output.default_value = get_datablock_from_uuid(loaded_output, None)
else:
output.default_value = loaded_output
except Exception as e: except Exception as e:
logging.warning( logging.warning(
f"Node {target_node.name} output {outputs[idx].name} parameter not supported, skipping ({e})") f"Node {target_node.name} output {output.name} parameter not supported, skipping ({e})")
else: else:
logging.warning( logging.warning(
f"Node {target_node.name} output length mismatch.") f"Node {target_node.name} output length mismatch.")
@ -119,6 +123,9 @@ def dump_node(node: bpy.types.ShaderNode) -> dict:
dumped_node = node_dumper.dump(node) dumped_node = node_dumper.dump(node)
if node.parent:
dumped_node['parent'] = node.parent.name
dump_io_needed = (node.type not in ['REROUTE', 'OUTPUT_MATERIAL']) dump_io_needed = (node.type not in ['REROUTE', 'OUTPUT_MATERIAL'])
if dump_io_needed: if dump_io_needed:
@ -155,6 +162,7 @@ def dump_node(node: bpy.types.ShaderNode) -> dict:
'color', 'color',
'position', 'position',
'interpolation', 'interpolation',
'hue_interpolation',
'color_mode' 'color_mode'
] ]
dumped_node['color_ramp'] = ramp_dumper.dump(node.color_ramp) dumped_node['color_ramp'] = ramp_dumper.dump(node.color_ramp)
@ -313,6 +321,14 @@ def load_node_tree(node_tree_data: dict, target_node_tree: bpy.types.ShaderNodeT
for node in node_tree_data["nodes"]: for node in node_tree_data["nodes"]:
load_node(node_tree_data["nodes"][node], target_node_tree) load_node(node_tree_data["nodes"][node], target_node_tree)
for node_id, node_data in node_tree_data["nodes"].items():
target_node = target_node_tree.nodes.get(node_id, None)
if target_node is None:
continue
elif 'parent' in node_data:
target_node.parent = target_node_tree.nodes[node_data['parent']]
else:
target_node.parent = None
# TODO: load only required nodes links # TODO: load only required nodes links
# Load nodes links # Load nodes links
target_node_tree.links.clear() target_node_tree.links.clear()
@ -327,6 +343,8 @@ def get_node_tree_dependencies(node_tree: bpy.types.NodeTree) -> list:
def has_node_group(node): return ( def has_node_group(node): return (
hasattr(node, 'node_tree') and node.node_tree) hasattr(node, 'node_tree') and node.node_tree)
def has_texture(node): return (
node.type in ['ATTRIBUTE_SAMPLE_TEXTURE','TEXTURE'] and node.texture)
deps = [] deps = []
for node in node_tree.nodes: for node in node_tree.nodes:
@ -334,6 +352,8 @@ def get_node_tree_dependencies(node_tree: bpy.types.NodeTree) -> list:
deps.append(node.image) deps.append(node.image)
elif has_node_group(node): elif has_node_group(node):
deps.append(node.node_tree) deps.append(node.node_tree)
elif has_texture(node):
deps.append(node.texture)
return deps return deps
@ -364,10 +384,7 @@ def load_materials_slots(src_materials: list, dst_materials: bpy.types.bpy_prop_
if mat_uuid is not None: if mat_uuid is not None:
mat_ref = get_datablock_from_uuid(mat_uuid, None) mat_ref = get_datablock_from_uuid(mat_uuid, None)
else: else:
mat_ref = bpy.data.materials.get(mat_name, None) mat_ref = bpy.data.materials[mat_name]
if mat_ref is None:
raise Exception(f"Material {mat_name} doesn't exist")
dst_materials.append(mat_ref) dst_materials.append(mat_ref)

View File

@ -23,6 +23,7 @@ import mathutils
from replication.exception import ContextError from replication.exception import ContextError
from .bl_datablock import BlDatablock, get_datablock_from_uuid from .bl_datablock import BlDatablock, get_datablock_from_uuid
from .bl_material import IGNORED_SOCKETS
from .dump_anything import ( from .dump_anything import (
Dumper, Dumper,
Loader, Loader,
@ -30,32 +31,97 @@ from .dump_anything import (
np_dump_collection) np_dump_collection)
SKIN_DATA = [ SKIN_DATA = [
'radius', 'radius',
'use_loose', 'use_loose',
'use_root' 'use_root'
] ]
def get_input_index(e): if bpy.app.version[1] >= 93:
return int(re.findall('[0-9]+', e)[0]) SUPPORTED_GEOMETRY_NODE_PARAMETERS = (int, str, float)
else:
SUPPORTED_GEOMETRY_NODE_PARAMETERS = (int, str)
logging.warning("Geometry node Float parameter not supported in \
blender 2.92.")
def get_node_group_inputs(node_group):
inputs = []
for inpt in node_group.inputs:
if inpt.type in IGNORED_SOCKETS:
continue
else:
inputs.append(inpt)
return inputs
# return [inpt.identifer for inpt in node_group.inputs if inpt.type not in IGNORED_SOCKETS]
def dump_physics(target: bpy.types.Object)->dict:
"""
Dump all physics settings from a given object excluding modifier
related physics settings (such as softbody, cloth, dynapaint and fluid)
"""
dumper = Dumper()
dumper.depth = 1
physics_data = {}
# Collisions (collision)
if target.collision and target.collision.use:
physics_data['collision'] = dumper.dump(target.collision)
# Field (field)
if target.field and target.field.type != "NONE":
physics_data['field'] = dumper.dump(target.field)
# Rigid Body (rigid_body)
if target.rigid_body:
physics_data['rigid_body'] = dumper.dump(target.rigid_body)
# Rigid Body constraint (rigid_body_constraint)
if target.rigid_body_constraint:
physics_data['rigid_body_constraint'] = dumper.dump(target.rigid_body_constraint)
return physics_data
def load_physics(dumped_settings: dict, target: bpy.types.Object):
""" Load all physics settings from a given object excluding modifier
related physics settings (such as softbody, cloth, dynapaint and fluid)
"""
loader = Loader()
if 'collision' in dumped_settings:
loader.load(target.collision, dumped_settings['collision'])
if 'field' in dumped_settings:
loader.load(target.field, dumped_settings['field'])
if 'rigid_body' in dumped_settings:
if not target.rigid_body:
bpy.ops.rigidbody.object_add({"object": target})
loader.load(target.rigid_body, dumped_settings['rigid_body'])
elif target.rigid_body:
bpy.ops.rigidbody.object_remove({"object": target})
if 'rigid_body_constraint' in dumped_settings:
if not target.rigid_body_constraint:
bpy.ops.rigidbody.constraint_add({"object": target})
loader.load(target.rigid_body_constraint, dumped_settings['rigid_body_constraint'])
elif target.rigid_body_constraint:
bpy.ops.rigidbody.constraint_remove({"object": target})
def dump_modifier_geometry_node_inputs(modifier: bpy.types.Modifier) -> list: def dump_modifier_geometry_node_inputs(modifier: bpy.types.Modifier) -> list:
""" Dump geometry node modifier input properties """ Dump geometry node modifier input properties
:arg modifier: geometry node modifier to dump :arg modifier: geometry node modifier to dump
:type modifier: bpy.type.Modifier :type modifier: bpy.type.Modifier
""" """
inputs_name = [p for p in dir(modifier) if "Input_" in p]
inputs_name.sort(key=get_input_index)
dumped_inputs = [] dumped_inputs = []
for inputs_index, input_name in enumerate(inputs_name): for inpt in get_node_group_inputs(modifier.node_group):
input_value = modifier[input_name] input_value = modifier[inpt.identifier]
dumped_input = None dumped_input = None
if isinstance(input_value, bpy.types.ID): if isinstance(input_value, bpy.types.ID):
dumped_input = input_value.uuid dumped_input = input_value.uuid
elif type(input_value) in [int, str, float]: elif isinstance(input_value, SUPPORTED_GEOMETRY_NODE_PARAMETERS):
dumped_input = input_value dumped_input = input_value
elif hasattr(input_value, 'to_list'): elif hasattr(input_value, 'to_list'):
dumped_input = input_value.to_list() dumped_input = input_value.to_list()
@ -73,18 +139,16 @@ def load_modifier_geometry_node_inputs(dumped_modifier: dict, target_modifier: b
:type target_modifier: bpy.type.Modifier :type target_modifier: bpy.type.Modifier
""" """
inputs_name = [p for p in dir(target_modifier) if "Input_" in p] for input_index, inpt in enumerate(get_node_group_inputs(target_modifier.node_group)):
inputs_name.sort(key=get_input_index)
for input_index, input_name in enumerate(inputs_name):
dumped_value = dumped_modifier['inputs'][input_index] dumped_value = dumped_modifier['inputs'][input_index]
input_value = target_modifier[input_name] input_value = target_modifier[inpt.identifier]
if type(input_value) in [int, str, float]: if isinstance(input_value, SUPPORTED_GEOMETRY_NODE_PARAMETERS):
input_value = dumped_value target_modifier[inpt.identifier] = dumped_value
elif hasattr(input_value, 'to_list'): elif hasattr(input_value, 'to_list'):
for index in range(len(input_value)): for index in range(len(input_value)):
input_value[index] = dumped_value[index] input_value[index] = dumped_value[index]
else: elif inpt.type in ['COLLECTION', 'OBJECT']:
target_modifier[input_name] = get_datablock_from_uuid( target_modifier[inpt.identifier] = get_datablock_from_uuid(
dumped_value, None) dumped_value, None)
@ -161,19 +225,24 @@ def find_textures_dependencies(modifiers: bpy.types.bpy_prop_collection) -> [bpy
return textures return textures
def find_geometry_nodes(modifiers: bpy.types.bpy_prop_collection) -> [bpy.types.NodeTree]: def find_geometry_nodes_dependencies(modifiers: bpy.types.bpy_prop_collection) -> [bpy.types.NodeTree]:
""" Find geometry nodes group from a modifier stack """ Find geometry nodes dependencies from a modifier stack
:arg modifiers: modifiers collection :arg modifiers: modifiers collection
:type modifiers: bpy.types.bpy_prop_collection :type modifiers: bpy.types.bpy_prop_collection
:return: list of bpy.types.NodeTree pointers :return: list of bpy.types.NodeTree pointers
""" """
nodes_groups = [] dependencies = []
for item in modifiers: for mod in modifiers:
if item.type == 'NODES' and item.node_group: if mod.type == 'NODES' and mod.node_group:
nodes_groups.append(item.node_group) dependencies.append(mod.node_group)
# for inpt in get_node_group_inputs(mod.node_group):
# parameter = mod.get(inpt.identifier)
# if parameter and isinstance(parameter, bpy.types.ID):
# dependencies.append(parameter)
return dependencies
return nodes_groups
def dump_vertex_groups(src_object: bpy.types.Object) -> dict: def dump_vertex_groups(src_object: bpy.types.Object) -> dict:
""" Dump object's vertex groups """ Dump object's vertex groups
@ -219,6 +288,7 @@ def load_vertex_groups(dumped_vertex_groups: dict, target_object: bpy.types.Obje
for index, weight in vg['vertices']: for index, weight in vg['vertices']:
vertex_group.add([index], weight, 'REPLACE') vertex_group.add([index], weight, 'REPLACE')
class BlObject(BlDatablock): class BlObject(BlDatablock):
bl_id = "objects" bl_id = "objects"
bl_class = bpy.types.Object bl_class = bpy.types.Object
@ -301,9 +371,9 @@ class BlObject(BlDatablock):
loader.load(target.display, data['display']) loader.load(target.display, data['display'])
# Parenting # Parenting
parent_id = data.get('parent_id') parent_id = data.get('parent_uid')
if parent_id: if parent_id:
parent = bpy.data.objects[parent_id] parent = get_datablock_from_uuid(parent_id[0], bpy.data.objects[parent_id[1]])
# Avoid reloading # Avoid reloading
if target.parent != parent and parent is not None: if target.parent != parent and parent is not None:
target.parent = parent target.parent = parent
@ -354,21 +424,49 @@ class BlObject(BlDatablock):
SKIN_DATA) SKIN_DATA)
if hasattr(target, 'cycles_visibility') \ if hasattr(target, 'cycles_visibility') \
and 'cycles_visibility' in data: and 'cycles_visibility' in data:
loader.load(target.cycles_visibility, data['cycles_visibility']) loader.load(target.cycles_visibility, data['cycles_visibility'])
# TODO: handle geometry nodes input from dump_anything # TODO: handle geometry nodes input from dump_anything
if hasattr(target, 'modifiers'): if hasattr(target, 'modifiers'):
nodes_modifiers = [mod for mod in target.modifiers if mod.type == 'NODES'] nodes_modifiers = [
mod for mod in target.modifiers if mod.type == 'NODES']
for modifier in nodes_modifiers: for modifier in nodes_modifiers:
load_modifier_geometry_node_inputs(data['modifiers'][modifier.name], modifier) load_modifier_geometry_node_inputs(
data['modifiers'][modifier.name], modifier)
particles_modifiers = [
mod for mod in target.modifiers if mod.type == 'PARTICLE_SYSTEM']
for mod in particles_modifiers:
default = mod.particle_system.settings
dumped_particles = data['modifiers'][mod.name]['particle_system']
loader.load(mod.particle_system, dumped_particles)
settings = get_datablock_from_uuid(dumped_particles['settings_uuid'], None)
if settings:
mod.particle_system.settings = settings
# Hack to remove the default generated particle settings
if not default.uuid:
bpy.data.particles.remove(default)
phys_modifiers = [
mod for mod in target.modifiers if mod.type in ['SOFT_BODY', 'CLOTH']]
for mod in phys_modifiers:
loader.load(mod.settings, data['modifiers'][mod.name]['settings'])
# PHYSICS
load_physics(data, target)
transform = data.get('transforms', None) transform = data.get('transforms', None)
if transform: if transform:
target.matrix_parent_inverse = mathutils.Matrix(transform['matrix_parent_inverse']) target.matrix_parent_inverse = mathutils.Matrix(
transform['matrix_parent_inverse'])
target.matrix_basis = mathutils.Matrix(transform['matrix_basis']) target.matrix_basis = mathutils.Matrix(transform['matrix_basis'])
target.matrix_local = mathutils.Matrix(transform['matrix_local']) target.matrix_local = mathutils.Matrix(transform['matrix_local'])
def _dump_implementation(self, data, instance=None): def _dump_implementation(self, data, instance=None):
assert(instance) assert(instance)
@ -431,7 +529,7 @@ class BlObject(BlDatablock):
# PARENTING # PARENTING
if instance.parent: if instance.parent:
data['parent_id'] = instance.parent.name data['parent_uid'] = (instance.parent.uuid, instance.parent.name)
# MODIFIERS # MODIFIERS
if hasattr(instance, 'modifiers'): if hasattr(instance, 'modifiers'):
@ -440,12 +538,29 @@ class BlObject(BlDatablock):
if modifiers: if modifiers:
dumper.include_filter = None dumper.include_filter = None
dumper.depth = 1 dumper.depth = 1
dumper.exclude_filter = ['is_active']
for index, modifier in enumerate(modifiers): for index, modifier in enumerate(modifiers):
data["modifiers"][modifier.name] = dumper.dump(modifier) dumped_modifier = dumper.dump(modifier)
# hack to dump geometry nodes inputs # hack to dump geometry nodes inputs
if modifier.type == 'NODES': if modifier.type == 'NODES':
dumped_inputs = dump_modifier_geometry_node_inputs(modifier) dumped_inputs = dump_modifier_geometry_node_inputs(
data["modifiers"][modifier.name]['inputs'] = dumped_inputs modifier)
dumped_modifier['inputs'] = dumped_inputs
elif modifier.type == 'PARTICLE_SYSTEM':
dumper.exclude_filter = [
"is_edited",
"is_editable",
"is_global_hair"
]
dumped_modifier['particle_system'] = dumper.dump(modifier.particle_system)
dumped_modifier['particle_system']['settings_uuid'] = modifier.particle_system.settings.uuid
elif modifier.type in ['SOFT_BODY', 'CLOTH']:
dumped_modifier['settings'] = dumper.dump(modifier.settings)
data["modifiers"][modifier.name] = dumped_modifier
gp_modifiers = getattr(instance, 'grease_pencil_modifiers', None) gp_modifiers = getattr(instance, 'grease_pencil_modifiers', None)
if gp_modifiers: if gp_modifiers:
@ -467,6 +582,7 @@ class BlObject(BlDatablock):
'location'] 'location']
gp_mod_data['curve'] = curve_dumper.dump(modifier.curve) gp_mod_data['curve'] = curve_dumper.dump(modifier.curve)
# CONSTRAINTS # CONSTRAINTS
if hasattr(instance, 'constraints'): if hasattr(instance, 'constraints'):
dumper.include_filter = None dumper.include_filter = None
@ -511,7 +627,6 @@ class BlObject(BlDatablock):
bone_groups[group.name] = dumper.dump(group) bone_groups[group.name] = dumper.dump(group)
data['pose']['bone_groups'] = bone_groups data['pose']['bone_groups'] = bone_groups
# VERTEx GROUP # VERTEx GROUP
if len(instance.vertex_groups) > 0: if len(instance.vertex_groups) > 0:
data['vertex_groups'] = dump_vertex_groups(instance) data['vertex_groups'] = dump_vertex_groups(instance)
@ -548,7 +663,8 @@ class BlObject(BlDatablock):
if hasattr(object_data, 'skin_vertices') and object_data.skin_vertices: if hasattr(object_data, 'skin_vertices') and object_data.skin_vertices:
skin_vertices = list() skin_vertices = list()
for skin_data in object_data.skin_vertices: for skin_data in object_data.skin_vertices:
skin_vertices.append(np_dump_collection(skin_data.data, SKIN_DATA)) skin_vertices.append(
np_dump_collection(skin_data.data, SKIN_DATA))
data['skin_vertices'] = skin_vertices data['skin_vertices'] = skin_vertices
# CYCLE SETTINGS # CYCLE SETTINGS
@ -563,6 +679,9 @@ class BlObject(BlDatablock):
] ]
data['cycles_visibility'] = dumper.dump(instance.cycles_visibility) data['cycles_visibility'] = dumper.dump(instance.cycles_visibility)
# PHYSICS
data.update(dump_physics(instance))
return data return data
def _resolve_deps_implementation(self): def _resolve_deps_implementation(self):
@ -572,10 +691,14 @@ class BlObject(BlDatablock):
if self.instance.data: if self.instance.data:
deps.append(self.instance.data) deps.append(self.instance.data)
# Particle systems
for particle_slot in self.instance.particle_systems:
deps.append(particle_slot.settings)
if self.is_library: if self.is_library:
deps.append(self.instance.library) deps.append(self.instance.library)
if self.instance.parent : if self.instance.parent:
deps.append(self.instance.parent) deps.append(self.instance.parent)
if self.instance.instance_type == 'COLLECTION': if self.instance.instance_type == 'COLLECTION':
@ -584,6 +707,6 @@ class BlObject(BlDatablock):
if self.instance.modifiers: if self.instance.modifiers:
deps.extend(find_textures_dependencies(self.instance.modifiers)) deps.extend(find_textures_dependencies(self.instance.modifiers))
deps.extend(find_geometry_nodes(self.instance.modifiers)) deps.extend(find_geometry_nodes_dependencies(self.instance.modifiers))
return deps return deps

View File

@ -0,0 +1,90 @@
import bpy
import mathutils
from . import dump_anything
from .bl_datablock import BlDatablock, get_datablock_from_uuid
def dump_textures_slots(texture_slots: bpy.types.bpy_prop_collection) -> list:
""" Dump every texture slot collection as the form:
[(index, slot_texture_uuid, slot_texture_name), (), ...]
"""
dumped_slots = []
for index, slot in enumerate(texture_slots):
if slot and slot.texture:
dumped_slots.append((index, slot.texture.uuid, slot.texture.name))
return dumped_slots
def load_texture_slots(dumped_slots: list, target_slots: bpy.types.bpy_prop_collection):
"""
"""
for index, slot in enumerate(target_slots):
if slot:
target_slots.clear(index)
for index, slot_uuid, slot_name in dumped_slots:
target_slots.create(index).texture = get_datablock_from_uuid(
slot_uuid, slot_name
)
IGNORED_ATTR = [
"is_embedded_data",
"is_evaluated",
"is_fluid",
"is_library_indirect",
"users"
]
class BlParticle(BlDatablock):
bl_id = "particles"
bl_class = bpy.types.ParticleSettings
bl_icon = "PARTICLES"
bl_check_common = False
bl_reload_parent = False
def _construct(self, data):
instance = bpy.data.particles.new(data["name"])
instance.uuid = self.uuid
return instance
def _load_implementation(self, data, target):
dump_anything.load(target, data)
dump_anything.load(target.effector_weights, data["effector_weights"])
# Force field
force_field_1 = data.get("force_field_1", None)
if force_field_1:
dump_anything.load(target.force_field_1, force_field_1)
force_field_2 = data.get("force_field_2", None)
if force_field_2:
dump_anything.load(target.force_field_2, force_field_2)
# Texture slots
load_texture_slots(data["texture_slots"], target.texture_slots)
def _dump_implementation(self, data, instance=None):
assert instance
dumper = dump_anything.Dumper()
dumper.depth = 1
dumper.exclude_filter = IGNORED_ATTR
data = dumper.dump(instance)
# Particle effectors
data["effector_weights"] = dumper.dump(instance.effector_weights)
if instance.force_field_1:
data["force_field_1"] = dumper.dump(instance.force_field_1)
if instance.force_field_2:
data["force_field_2"] = dumper.dump(instance.force_field_2)
# Texture slots
data["texture_slots"] = dump_textures_slots(instance.texture_slots)
return data
def _resolve_deps_implementation(self):
return [t.texture for t in self.instance.texture_slots if t and t.texture]

View File

@ -610,6 +610,8 @@ class Loader:
instance.write(bpy.data.fonts.get(dump)) instance.write(bpy.data.fonts.get(dump))
elif isinstance(rna_property_type, T.Sound): elif isinstance(rna_property_type, T.Sound):
instance.write(bpy.data.sounds.get(dump)) instance.write(bpy.data.sounds.get(dump))
# elif isinstance(rna_property_type, T.ParticleSettings):
# instance.write(bpy.data.particles.get(dump))
def _load_matrix(self, matrix, dump): def _load_matrix(self, matrix, dump):
matrix.write(mathutils.Matrix(dump)) matrix.write(mathutils.Matrix(dump))

@ -0,0 +1 @@
Subproject commit 001fbdc60da58a5e3b7006f1d782d6f472c12809

View File

@ -213,8 +213,6 @@ class SessionStartOperator(bpy.types.Operator):
type_module_class, type_module_class,
check_common=type_module_class.bl_check_common) check_common=type_module_class.bl_check_common)
deleyables.append(timers.ApplyTimer(timeout=settings.depsgraph_update_rate))
if bpy.app.version[1] >= 91: if bpy.app.version[1] >= 91:
python_binary_path = sys.executable python_binary_path = sys.executable
else: else:
@ -272,6 +270,11 @@ class SessionStartOperator(bpy.types.Operator):
# Background client updates service # Background client updates service
deleyables.append(timers.ClientUpdate()) deleyables.append(timers.ClientUpdate())
deleyables.append(timers.DynamicRightSelectTimer()) deleyables.append(timers.DynamicRightSelectTimer())
deleyables.append(timers.ApplyTimer(timeout=settings.depsgraph_update_rate))
# deleyables.append(timers.PushTimer(
# queue=stagging,
# timeout=settings.depsgraph_update_rate
# ))
session_update = timers.SessionStatusUpdate() session_update = timers.SessionStatusUpdate()
session_user_sync = timers.SessionUserSync() session_user_sync = timers.SessionUserSync()
session_background_executor = timers.MainThreadExecutor( session_background_executor = timers.MainThreadExecutor(

View File

@ -181,7 +181,7 @@ class SessionPrefs(bpy.types.AddonPreferences):
connection_timeout: bpy.props.IntProperty( connection_timeout: bpy.props.IntProperty(
name='connection timeout', name='connection timeout',
description='connection timeout before disconnection', description='connection timeout before disconnection',
default=1000 default=5000
) )
# Replication update settings # Replication update settings
depsgraph_update_rate: bpy.props.FloatProperty( depsgraph_update_rate: bpy.props.FloatProperty(

View File

@ -18,7 +18,6 @@
import logging import logging
import sys import sys
import traceback import traceback
import bpy import bpy
from replication.constants import (FETCHED, RP_COMMON, STATE_ACTIVE, from replication.constants import (FETCHED, RP_COMMON, STATE_ACTIVE,
STATE_INITIAL, STATE_LOBBY, STATE_QUITTING, STATE_INITIAL, STATE_LOBBY, STATE_QUITTING,
@ -118,6 +117,7 @@ class ApplyTimer(Timer):
try: try:
apply(session.repository, node) apply(session.repository, node)
except Exception as e: except Exception as e:
logging.error(f"Fail to apply {node_ref.uuid}")
traceback.print_exc() traceback.print_exc()
else: else:
if node_ref.bl_reload_parent: if node_ref.bl_reload_parent:

View File

@ -13,7 +13,7 @@ def main():
if len(sys.argv) > 2: if len(sys.argv) > 2:
blender_rev = sys.argv[2] blender_rev = sys.argv[2]
else: else:
blender_rev = "2.91.0" blender_rev = "2.92.0"
try: try:
exit_val = BAT.test_blender_addon(addon_path=addon, blender_revision=blender_rev) exit_val = BAT.test_blender_addon(addon_path=addon, blender_revision=blender_rev)

View File

@ -8,6 +8,7 @@ import random
from multi_user.bl_types.bl_action import BlAction from multi_user.bl_types.bl_action import BlAction
INTERPOLATION = ['CONSTANT', 'LINEAR', 'BEZIER', 'SINE', 'QUAD', 'CUBIC', 'QUART', 'QUINT', 'EXPO', 'CIRC', 'BACK', 'BOUNCE', 'ELASTIC'] INTERPOLATION = ['CONSTANT', 'LINEAR', 'BEZIER', 'SINE', 'QUAD', 'CUBIC', 'QUART', 'QUINT', 'EXPO', 'CIRC', 'BACK', 'BOUNCE', 'ELASTIC']
FMODIFIERS = ['GENERATOR', 'FNGENERATOR', 'ENVELOPE', 'CYCLES', 'NOISE', 'LIMITS', 'STEPPED']
# @pytest.mark.parametrize('blendname', ['test_action.blend']) # @pytest.mark.parametrize('blendname', ['test_action.blend'])
def test_action(clear_blend): def test_action(clear_blend):
@ -22,6 +23,9 @@ def test_action(clear_blend):
point.co[1] = random.randint(-10,10) point.co[1] = random.randint(-10,10)
point.interpolation = INTERPOLATION[random.randint(0, len(INTERPOLATION)-1)] point.interpolation = INTERPOLATION[random.randint(0, len(INTERPOLATION)-1)]
for mod_type in FMODIFIERS:
fcurve_sample.modifiers.new(mod_type)
bpy.ops.mesh.primitive_plane_add() bpy.ops.mesh.primitive_plane_add()
bpy.data.objects[0].animation_data_create() bpy.data.objects[0].animation_data_create()
bpy.data.objects[0].animation_data.action = datablock bpy.data.objects[0].animation_data.action = datablock

View File

@ -7,7 +7,7 @@ import bpy
import random import random
from multi_user.bl_types.bl_object import BlObject from multi_user.bl_types.bl_object import BlObject
# Removed 'BUILD' modifier because the seed doesn't seems to be # Removed 'BUILD', 'SOFT_BODY' modifier because the seed doesn't seems to be
# correctly initialized (#TODO: report the bug) # correctly initialized (#TODO: report the bug)
MOFIFIERS_TYPES = [ MOFIFIERS_TYPES = [
'DATA_TRANSFER', 'MESH_CACHE', 'MESH_SEQUENCE_CACHE', 'DATA_TRANSFER', 'MESH_CACHE', 'MESH_SEQUENCE_CACHE',
@ -22,8 +22,7 @@ MOFIFIERS_TYPES = [
'MESH_DEFORM', 'SHRINKWRAP', 'SIMPLE_DEFORM', 'SMOOTH', 'MESH_DEFORM', 'SHRINKWRAP', 'SIMPLE_DEFORM', 'SMOOTH',
'CORRECTIVE_SMOOTH', 'LAPLACIANSMOOTH', 'SURFACE_DEFORM', 'CORRECTIVE_SMOOTH', 'LAPLACIANSMOOTH', 'SURFACE_DEFORM',
'WARP', 'WAVE', 'CLOTH', 'COLLISION', 'DYNAMIC_PAINT', 'WARP', 'WAVE', 'CLOTH', 'COLLISION', 'DYNAMIC_PAINT',
'EXPLODE', 'FLUID', 'OCEAN', 'PARTICLE_INSTANCE', 'EXPLODE', 'FLUID', 'OCEAN', 'PARTICLE_INSTANCE', 'SURFACE']
'SOFT_BODY', 'SURFACE']
GP_MODIFIERS_TYPE = [ GP_MODIFIERS_TYPE = [
'GP_ARRAY', 'GP_BUILD', 'GP_MIRROR', 'GP_MULTIPLY', 'GP_ARRAY', 'GP_BUILD', 'GP_MIRROR', 'GP_MULTIPLY',
@ -72,5 +71,5 @@ def test_object(clear_blend):
test = implementation._construct(expected) test = implementation._construct(expected)
implementation._load(expected, test) implementation._load(expected, test)
result = implementation._dump(test) result = implementation._dump(test)
print(DeepDiff(expected, result))
assert not DeepDiff(expected, result) assert not DeepDiff(expected, result)