2021-04-21 11:10:24 +02:00

718 lines
26 KiB
Python

# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# ##### END GPL LICENSE BLOCK #####
import logging
import re
import bpy
import mathutils
from replication.exception import ContextError
from replication.objects import Node
from replication.protocol import ReplicatedDatablock
from .bl_datablock import get_datablock_from_uuid, stamp_uuid
from .bl_action import (load_animation_data,
dump_animation_data,
resolve_animation_dependencies)
from ..preferences import get_preferences
from .bl_datablock import get_datablock_from_uuid
from .bl_material import IGNORED_SOCKETS
from .dump_anything import (
Dumper,
Loader,
np_load_collection,
np_dump_collection)
SKIN_DATA = [
'radius',
'use_loose',
'use_root'
]
if bpy.app.version[1] >= 93:
SUPPORTED_GEOMETRY_NODE_PARAMETERS = (int, str, float)
else:
SUPPORTED_GEOMETRY_NODE_PARAMETERS = (int, str)
logging.warning("Geometry node Float parameter not supported in \
blender 2.92.")
def get_node_group_inputs(node_group):
inputs = []
for inpt in node_group.inputs:
if inpt.type in IGNORED_SOCKETS:
continue
else:
inputs.append(inpt)
return inputs
# return [inpt.identifer for inpt in node_group.inputs if inpt.type not in IGNORED_SOCKETS]
def dump_physics(target: bpy.types.Object)->dict:
"""
Dump all physics settings from a given object excluding modifier
related physics settings (such as softbody, cloth, dynapaint and fluid)
"""
dumper = Dumper()
dumper.depth = 1
physics_data = {}
# Collisions (collision)
if target.collision and target.collision.use:
physics_data['collision'] = dumper.dump(target.collision)
# Field (field)
if target.field and target.field.type != "NONE":
physics_data['field'] = dumper.dump(target.field)
# Rigid Body (rigid_body)
if target.rigid_body:
physics_data['rigid_body'] = dumper.dump(target.rigid_body)
# Rigid Body constraint (rigid_body_constraint)
if target.rigid_body_constraint:
physics_data['rigid_body_constraint'] = dumper.dump(target.rigid_body_constraint)
return physics_data
def load_physics(dumped_settings: dict, target: bpy.types.Object):
""" Load all physics settings from a given object excluding modifier
related physics settings (such as softbody, cloth, dynapaint and fluid)
"""
loader = Loader()
if 'collision' in dumped_settings:
loader.load(target.collision, dumped_settings['collision'])
if 'field' in dumped_settings:
loader.load(target.field, dumped_settings['field'])
if 'rigid_body' in dumped_settings:
if not target.rigid_body:
bpy.ops.rigidbody.object_add({"object": target})
loader.load(target.rigid_body, dumped_settings['rigid_body'])
elif target.rigid_body:
bpy.ops.rigidbody.object_remove({"object": target})
if 'rigid_body_constraint' in dumped_settings:
if not target.rigid_body_constraint:
bpy.ops.rigidbody.constraint_add({"object": target})
loader.load(target.rigid_body_constraint, dumped_settings['rigid_body_constraint'])
elif target.rigid_body_constraint:
bpy.ops.rigidbody.constraint_remove({"object": target})
def dump_modifier_geometry_node_inputs(modifier: bpy.types.Modifier) -> list:
""" Dump geometry node modifier input properties
:arg modifier: geometry node modifier to dump
:type modifier: bpy.type.Modifier
"""
dumped_inputs = []
for inpt in get_node_group_inputs(modifier.node_group):
input_value = modifier[inpt.identifier]
dumped_input = None
if isinstance(input_value, bpy.types.ID):
dumped_input = input_value.uuid
elif isinstance(input_value, SUPPORTED_GEOMETRY_NODE_PARAMETERS):
dumped_input = input_value
elif hasattr(input_value, 'to_list'):
dumped_input = input_value.to_list()
dumped_inputs.append(dumped_input)
return dumped_inputs
def load_modifier_geometry_node_inputs(dumped_modifier: dict, target_modifier: bpy.types.Modifier):
""" Load geometry node modifier inputs
:arg dumped_modifier: source dumped modifier to load
:type dumped_modifier: dict
:arg target_modifier: target geometry node modifier
:type target_modifier: bpy.type.Modifier
"""
for input_index, inpt in enumerate(get_node_group_inputs(target_modifier.node_group)):
dumped_value = dumped_modifier['inputs'][input_index]
input_value = target_modifier[inpt.identifier]
if isinstance(input_value, SUPPORTED_GEOMETRY_NODE_PARAMETERS):
target_modifier[inpt.identifier] = dumped_value
elif hasattr(input_value, 'to_list'):
for index in range(len(input_value)):
input_value[index] = dumped_value[index]
elif inpt.type in ['COLLECTION', 'OBJECT']:
target_modifier[inpt.identifier] = get_datablock_from_uuid(
dumped_value, None)
def load_pose(target_bone, data):
target_bone.rotation_mode = data['rotation_mode']
loader = Loader()
loader.load(target_bone, data)
def find_data_from_name(name=None):
datablock = None
if not name:
pass
elif name in bpy.data.meshes.keys():
datablock = bpy.data.meshes[name]
elif name in bpy.data.lights.keys():
datablock = bpy.data.lights[name]
elif name in bpy.data.cameras.keys():
datablock = bpy.data.cameras[name]
elif name in bpy.data.curves.keys():
datablock = bpy.data.curves[name]
elif name in bpy.data.metaballs.keys():
datablock = bpy.data.metaballs[name]
elif name in bpy.data.armatures.keys():
datablock = bpy.data.armatures[name]
elif name in bpy.data.grease_pencils.keys():
datablock = bpy.data.grease_pencils[name]
elif name in bpy.data.curves.keys():
datablock = bpy.data.curves[name]
elif name in bpy.data.lattices.keys():
datablock = bpy.data.lattices[name]
elif name in bpy.data.speakers.keys():
datablock = bpy.data.speakers[name]
elif name in bpy.data.lightprobes.keys():
# Only supported since 2.83
if bpy.app.version[1] >= 83:
datablock = bpy.data.lightprobes[name]
else:
logging.warning(
"Lightprobe replication only supported since 2.83. See https://developer.blender.org/D6396")
elif bpy.app.version[1] >= 91 and name in bpy.data.volumes.keys():
# Only supported since 2.91
datablock = bpy.data.volumes[name]
return datablock
def _is_editmode(object: bpy.types.Object) -> bool:
child_data = getattr(object, 'data', None)
return (child_data and
hasattr(child_data, 'is_editmode') and
child_data.is_editmode)
def find_textures_dependencies(modifiers: bpy.types.bpy_prop_collection) -> [bpy.types.Texture]:
""" Find textures lying in a modifier stack
:arg modifiers: modifiers collection
:type modifiers: bpy.types.bpy_prop_collection
:return: list of bpy.types.Texture pointers
"""
textures = []
for mod in modifiers:
modifier_attributes = [getattr(mod, attr_name)
for attr_name in mod.bl_rna.properties.keys()]
for attr in modifier_attributes:
if issubclass(type(attr), bpy.types.Texture) and attr is not None:
textures.append(attr)
return textures
def find_geometry_nodes_dependencies(modifiers: bpy.types.bpy_prop_collection) -> [bpy.types.NodeTree]:
""" Find geometry nodes dependencies from a modifier stack
:arg modifiers: modifiers collection
:type modifiers: bpy.types.bpy_prop_collection
:return: list of bpy.types.NodeTree pointers
"""
dependencies = []
for mod in modifiers:
if mod.type == 'NODES' and mod.node_group:
dependencies.append(mod.node_group)
# for inpt in get_node_group_inputs(mod.node_group):
# parameter = mod.get(inpt.identifier)
# if parameter and isinstance(parameter, bpy.types.ID):
# dependencies.append(parameter)
return dependencies
def dump_vertex_groups(src_object: bpy.types.Object) -> dict:
""" Dump object's vertex groups
:param target_object: dump vertex groups of this object
:type target_object: bpy.types.Object
"""
if isinstance(src_object.data, bpy.types.GreasePencil):
logging.warning(
"Grease pencil vertex groups are not supported yet. More info: https://gitlab.com/slumber/multi-user/-/issues/161")
else:
points_attr = 'vertices' if isinstance(
src_object.data, bpy.types.Mesh) else 'points'
dumped_vertex_groups = {}
# Vertex group metadata
for vg in src_object.vertex_groups:
dumped_vertex_groups[vg.index] = {
'name': vg.name,
'vertices': []
}
# Vertex group assignation
for vert in getattr(src_object.data, points_attr):
for vg in vert.groups:
vertices = dumped_vertex_groups.get(vg.group)['vertices']
vertices.append((vert.index, vg.weight))
return dumped_vertex_groups
def load_vertex_groups(dumped_vertex_groups: dict, target_object: bpy.types.Object):
""" Load object vertex groups
:param dumped_vertex_groups: vertex_groups to load
:type dumped_vertex_groups: dict
:param target_object: object to load the vertex groups into
:type target_object: bpy.types.Object
"""
target_object.vertex_groups.clear()
for vg in dumped_vertex_groups.values():
vertex_group = target_object.vertex_groups.new(name=vg['name'])
for index, weight in vg['vertices']:
vertex_group.add([index], weight, 'REPLACE')
class BlObject(ReplicatedDatablock):
bl_id = "objects"
bl_check_common = False
bl_icon = 'OBJECT_DATA'
bl_reload_parent = False
is_root = False
@staticmethod
def construct(data: dict) -> bpy.types.Object:
datablock = None
# TODO: refactoring
object_name = data.get("name")
data_uuid = data.get("data_uuid")
data_id = data.get("data")
object_uuid = data.get('uuid')
object_data = get_datablock_from_uuid(
data_uuid,
find_data_from_name(data_id),
ignore=['images']) # TODO: use resolve_from_id
if object_data is None and data_uuid:
raise Exception(f"Fail to load object {data['name']}({object_uuid})")
datablock = bpy.data.objects.new(object_name, object_data)
datablock.uuid = object_uuid
return datablock
@staticmethod
def load(data: dict, datablock: bpy.types.Object):
data = datablock.data
load_animation_data(data, datablock)
loader = Loader()
data_uuid = data.get("data_uuid")
data_id = data.get("data")
if datablock.data and (datablock.data.name != data_id):
datablock.data = get_datablock_from_uuid(
data_uuid, find_data_from_name(data_id), ignore=['images'])
# vertex groups
vertex_groups = data.get('vertex_groups', None)
if vertex_groups:
load_vertex_groups(vertex_groups, datablock)
object_data = datablock.data
# SHAPE KEYS
if 'shape_keys' in data:
datablock.shape_key_clear()
# Create keys and load vertices coords
for key_block in data['shape_keys']['key_blocks']:
key_data = data['shape_keys']['key_blocks'][key_block]
datablock.shape_key_add(name=key_block)
loader.load(
datablock.data.shape_keys.key_blocks[key_block], key_data)
for vert in key_data['data']:
datablock.data.shape_keys.key_blocks[key_block].data[vert].co = key_data['data'][vert]['co']
# Load relative key after all
for key_block in data['shape_keys']['key_blocks']:
reference = data['shape_keys']['key_blocks'][key_block]['relative_key']
datablock.data.shape_keys.key_blocks[key_block].relative_key = datablock.data.shape_keys.key_blocks[reference]
# Load transformation data
loader.load(datablock, data)
# Object display fields
if 'display' in data:
loader.load(datablock.display, data['display'])
# Parenting
parent_id = data.get('parent_uid')
if parent_id:
parent = get_datablock_from_uuid(parent_id[0], bpy.data.objects[parent_id[1]])
# Avoid reloading
if datablock.parent != parent and parent is not None:
datablock.parent = parent
elif datablock.parent:
datablock.parent = None
# Pose
if 'pose' in data:
if not datablock.pose:
raise Exception('No pose data yet (Fixed in a near futur)')
# Bone groups
for bg_name in data['pose']['bone_groups']:
bg_data = data['pose']['bone_groups'].get(bg_name)
bg_datablock = datablock.pose.bone_groups.get(bg_name)
if not bg_datablock:
bg_datablock = datablock.pose.bone_groups.new(name=bg_name)
loader.load(bg_datablock, bg_data)
# datablock.pose.bone_groups.get
# Bones
for bone in data['pose']['bones']:
datablock_bone = datablock.pose.bones.get(bone)
bone_data = data['pose']['bones'].get(bone)
if 'constraints' in bone_data.keys():
loader.load(datablock_bone, bone_data['constraints'])
load_pose(datablock_bone, bone_data)
if 'bone_index' in bone_data.keys():
datablock_bone.bone_group = datablock.pose.bone_group[bone_data['bone_group_index']]
# TODO: find another way...
if datablock.empty_display_type == "IMAGE":
img_uuid = data.get('data_uuid')
if datablock.data is None and img_uuid:
datablock.data = get_datablock_from_uuid(img_uuid, None)
if hasattr(object_data, 'skin_vertices') \
and object_data.skin_vertices\
and 'skin_vertices' in data:
for index, skin_data in enumerate(object_data.skin_vertices):
np_load_collection(
data['skin_vertices'][index],
skin_data.data,
SKIN_DATA)
if hasattr(datablock, 'cycles_visibility') \
and 'cycles_visibility' in data:
loader.load(datablock.cycles_visibility, data['cycles_visibility'])
# TODO: handle geometry nodes input from dump_anything
if hasattr(datablock, 'modifiers'):
nodes_modifiers = [
mod for mod in datablock.modifiers if mod.type == 'NODES']
for modifier in nodes_modifiers:
load_modifier_geometry_node_inputs(
data['modifiers'][modifier.name], modifier)
particles_modifiers = [
mod for mod in datablock.modifiers if mod.type == 'PARTICLE_SYSTEM']
for mod in particles_modifiers:
default = mod.particle_system.settings
dumped_particles = data['modifiers'][mod.name]['particle_system']
loader.load(mod.particle_system, dumped_particles)
settings = get_datablock_from_uuid(dumped_particles['settings_uuid'], None)
if settings:
mod.particle_system.settings = settings
# Hack to remove the default generated particle settings
if not default.uuid:
bpy.data.particles.remove(default)
phys_modifiers = [
mod for mod in datablock.modifiers if mod.type in ['SOFT_BODY', 'CLOTH']]
for mod in phys_modifiers:
loader.load(mod.settings, data['modifiers'][mod.name]['settings'])
# PHYSICS
load_physics(data, datablock)
transform = data.get('transforms', None)
if transform:
datablock.matrix_parent_inverse = mathutils.Matrix(
transform['matrix_parent_inverse'])
datablock.matrix_basis = mathutils.Matrix(transform['matrix_basis'])
datablock.matrix_local = mathutils.Matrix(transform['matrix_local'])
@staticmethod
def dump(datablock: object) -> dict:
assert(datablock)
if _is_editmode(datablock):
if self.preferences.sync_flags.sync_during_editmode:
datablock.update_from_editmode()
else:
raise ContextError("Object is in edit-mode.")
dumper = Dumper()
dumper.depth = 1
dumper.include_filter = [
"uuid",
"name",
"rotation_mode",
"data",
"library",
"empty_display_type",
"empty_display_size",
"empty_image_offset",
"empty_image_depth",
"empty_image_side",
"show_empty_image_orthographic",
"show_empty_image_perspective",
"show_empty_image_only_axis_aligned",
"use_empty_image_alpha",
"color",
"instance_collection",
"instance_type",
'lock_location',
'lock_rotation',
'lock_scale',
'hide_render',
'display_type',
'display_bounds_type',
'show_bounds',
'show_name',
'show_axis',
'show_wire',
'show_all_edges',
'show_texture_space',
'show_in_front',
'type'
]
data = dumper.dump(datablock)
dumper.include_filter = [
'matrix_parent_inverse',
'matrix_local',
'matrix_basis']
data['transforms'] = dumper.dump(datablock)
dumper.include_filter = [
'show_shadows',
]
data['display'] = dumper.dump(datablock.display)
data['data_uuid'] = getattr(datablock.data, 'uuid', None)
# PARENTING
if datablock.parent:
data['parent_uid'] = (datablock.parent.uuid, datablock.parent.name)
# MODIFIERS
if hasattr(datablock, 'modifiers'):
data["modifiers"] = {}
modifiers = getattr(datablock, 'modifiers', None)
if modifiers:
dumper.include_filter = None
dumper.depth = 1
dumper.exclude_filter = ['is_active']
for index, modifier in enumerate(modifiers):
dumped_modifier = dumper.dump(modifier)
# hack to dump geometry nodes inputs
if modifier.type == 'NODES':
dumped_inputs = dump_modifier_geometry_node_inputs(
modifier)
dumped_modifier['inputs'] = dumped_inputs
elif modifier.type == 'PARTICLE_SYSTEM':
dumper.exclude_filter = [
"is_edited",
"is_editable",
"is_global_hair"
]
dumped_modifier['particle_system'] = dumper.dump(modifier.particle_system)
dumped_modifier['particle_system']['settings_uuid'] = modifier.particle_system.settings.uuid
elif modifier.type in ['SOFT_BODY', 'CLOTH']:
dumped_modifier['settings'] = dumper.dump(modifier.settings)
data["modifiers"][modifier.name] = dumped_modifier
gp_modifiers = getattr(datablock, 'grease_pencil_modifiers', None)
if gp_modifiers:
dumper.include_filter = None
dumper.depth = 1
gp_modifiers_data = data["grease_pencil_modifiers"] = {}
for index, modifier in enumerate(gp_modifiers):
gp_mod_data = gp_modifiers_data[modifier.name] = dict()
gp_mod_data.update(dumper.dump(modifier))
if hasattr(modifier, 'use_custom_curve') \
and modifier.use_custom_curve:
curve_dumper = Dumper()
curve_dumper.depth = 5
curve_dumper.include_filter = [
'curves',
'points',
'location']
gp_mod_data['curve'] = curve_dumper.dump(modifier.curve)
# CONSTRAINTS
if hasattr(datablock, 'constraints'):
dumper.include_filter = None
dumper.depth = 3
data["constraints"] = dumper.dump(datablock.constraints)
# POSE
if hasattr(datablock, 'pose') and datablock.pose:
# BONES
bones = {}
for bone in datablock.pose.bones:
bones[bone.name] = {}
dumper.depth = 1
rotation = 'rotation_quaternion' if bone.rotation_mode == 'QUATERNION' else 'rotation_euler'
group_index = 'bone_group_index' if bone.bone_group else None
dumper.include_filter = [
'rotation_mode',
'location',
'scale',
'custom_shape',
'use_custom_shape_bone_size',
'custom_shape_scale',
group_index,
rotation
]
bones[bone.name] = dumper.dump(bone)
dumper.include_filter = []
dumper.depth = 3
bones[bone.name]["constraints"] = dumper.dump(bone.constraints)
data['pose'] = {'bones': bones}
# GROUPS
bone_groups = {}
for group in datablock.pose.bone_groups:
dumper.depth = 3
dumper.include_filter = [
'name',
'color_set'
]
bone_groups[group.name] = dumper.dump(group)
data['pose']['bone_groups'] = bone_groups
# VERTEx GROUP
if len(datablock.vertex_groups) > 0:
data['vertex_groups'] = dump_vertex_groups(datablock)
# SHAPE KEYS
object_data = datablock.data
if hasattr(object_data, 'shape_keys') and object_data.shape_keys:
dumper = Dumper()
dumper.depth = 2
dumper.include_filter = [
'reference_key',
'use_relative'
]
data['shape_keys'] = dumper.dump(object_data.shape_keys)
data['shape_keys']['reference_key'] = object_data.shape_keys.reference_key.name
key_blocks = {}
for key in object_data.shape_keys.key_blocks:
dumper.depth = 3
dumper.include_filter = [
'name',
'data',
'mute',
'value',
'slider_min',
'slider_max',
'data',
'co'
]
key_blocks[key.name] = dumper.dump(key)
key_blocks[key.name]['relative_key'] = key.relative_key.name
data['shape_keys']['key_blocks'] = key_blocks
# SKIN VERTICES
if hasattr(object_data, 'skin_vertices') and object_data.skin_vertices:
skin_vertices = list()
for skin_data in object_data.skin_vertices:
skin_vertices.append(
np_dump_collection(skin_data.data, SKIN_DATA))
data['skin_vertices'] = skin_vertices
# CYCLE SETTINGS
if hasattr(datablock, 'cycles_visibility'):
dumper.include_filter = [
'camera',
'diffuse',
'glossy',
'transmission',
'scatter',
'shadow',
]
data['cycles_visibility'] = dumper.dump(
datablock.cycles_visibility)
# PHYSICS
data.update(dump_physics(instance))
return data
@staticmethod
def resolve_deps(datablock: bpy.types.Object) -> list:
deps = []
# Avoid Empty case
if datablock.data:
deps.append(datablock.data)
# Particle systems
for particle_slot in datablock.particle_systems:
deps.append(particle_slot.settings)
if datablock.parent:
deps.append(datablock.parent)
if datablock.instance_type == 'COLLECTION':
# TODO: uuid based
deps.append(datablock.instance_collection)
deps.extend(resolve_animation_dependencies(datablock))
if datablock.modifiers:
deps.extend(find_textures_dependencies(datablock.modifiers))
deps.extend(find_geometry_nodes_dependencies(datablock.modifiers))
return deps
_type = bpy.types.Object
_class = BlObject