718 lines
26 KiB
Python
Raw Normal View History

2020-03-20 14:56:50 +01:00
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# ##### END GPL LICENSE BLOCK #####
import logging
2021-02-26 12:27:56 +01:00
import re
2019-08-07 18:14:48 +02:00
import bpy
import mathutils
from replication.exception import ContextError
from replication.objects import Node
from replication.protocol import ReplicatedDatablock
2019-08-07 18:14:48 +02:00
from .bl_datablock import get_datablock_from_uuid, stamp_uuid
from .bl_action import (load_animation_data,
dump_animation_data,
2021-03-26 16:14:27 +01:00
resolve_animation_dependencies)
from ..preferences import get_preferences
2021-04-20 09:53:59 +02:00
from .bl_datablock import get_datablock_from_uuid
from .bl_material import IGNORED_SOCKETS
from .dump_anything import (
Dumper,
Loader,
np_load_collection,
np_dump_collection)
2019-08-27 16:44:36 +02:00
SKIN_DATA = [
'radius',
'use_loose',
'use_root'
]
if bpy.app.version[1] >= 93:
SUPPORTED_GEOMETRY_NODE_PARAMETERS = (int, str, float)
else:
SUPPORTED_GEOMETRY_NODE_PARAMETERS = (int, str)
logging.warning("Geometry node Float parameter not supported in \
blender 2.92.")
def get_node_group_inputs(node_group):
inputs = []
for inpt in node_group.inputs:
if inpt.type in IGNORED_SOCKETS:
continue
else:
inputs.append(inpt)
return inputs
# return [inpt.identifer for inpt in node_group.inputs if inpt.type not in IGNORED_SOCKETS]
def dump_physics(target: bpy.types.Object)->dict:
"""
Dump all physics settings from a given object excluding modifier
related physics settings (such as softbody, cloth, dynapaint and fluid)
"""
dumper = Dumper()
dumper.depth = 1
physics_data = {}
# Collisions (collision)
if target.collision and target.collision.use:
physics_data['collision'] = dumper.dump(target.collision)
2021-02-26 12:27:56 +01:00
# Field (field)
if target.field and target.field.type != "NONE":
physics_data['field'] = dumper.dump(target.field)
# Rigid Body (rigid_body)
if target.rigid_body:
2021-04-14 12:25:16 +02:00
physics_data['rigid_body'] = dumper.dump(target.rigid_body)
# Rigid Body constraint (rigid_body_constraint)
if target.rigid_body_constraint:
2021-04-14 12:25:16 +02:00
physics_data['rigid_body_constraint'] = dumper.dump(target.rigid_body_constraint)
return physics_data
def load_physics(dumped_settings: dict, target: bpy.types.Object):
""" Load all physics settings from a given object excluding modifier
related physics settings (such as softbody, cloth, dynapaint and fluid)
"""
loader = Loader()
if 'collision' in dumped_settings:
loader.load(target.collision, dumped_settings['collision'])
if 'field' in dumped_settings:
loader.load(target.field, dumped_settings['field'])
2021-04-14 12:25:16 +02:00
if 'rigid_body' in dumped_settings:
if not target.rigid_body:
bpy.ops.rigidbody.object_add({"object": target})
loader.load(target.rigid_body, dumped_settings['rigid_body'])
elif target.rigid_body:
bpy.ops.rigidbody.object_remove({"object": target})
if 'rigid_body_constraint' in dumped_settings:
if not target.rigid_body_constraint:
bpy.ops.rigidbody.constraint_add({"object": target})
loader.load(target.rigid_body_constraint, dumped_settings['rigid_body_constraint'])
elif target.rigid_body_constraint:
bpy.ops.rigidbody.constraint_remove({"object": target})
def dump_modifier_geometry_node_inputs(modifier: bpy.types.Modifier) -> list:
""" Dump geometry node modifier input properties
:arg modifier: geometry node modifier to dump
:type modifier: bpy.type.Modifier
"""
2021-02-26 12:27:56 +01:00
dumped_inputs = []
for inpt in get_node_group_inputs(modifier.node_group):
input_value = modifier[inpt.identifier]
2021-02-26 12:27:56 +01:00
dumped_input = None
if isinstance(input_value, bpy.types.ID):
dumped_input = input_value.uuid
elif isinstance(input_value, SUPPORTED_GEOMETRY_NODE_PARAMETERS):
2021-02-26 12:27:56 +01:00
dumped_input = input_value
elif hasattr(input_value, 'to_list'):
dumped_input = input_value.to_list()
dumped_inputs.append(dumped_input)
return dumped_inputs
def load_modifier_geometry_node_inputs(dumped_modifier: dict, target_modifier: bpy.types.Modifier):
""" Load geometry node modifier inputs
:arg dumped_modifier: source dumped modifier to load
:type dumped_modifier: dict
:arg target_modifier: target geometry node modifier
:type target_modifier: bpy.type.Modifier
"""
for input_index, inpt in enumerate(get_node_group_inputs(target_modifier.node_group)):
2021-02-26 12:27:56 +01:00
dumped_value = dumped_modifier['inputs'][input_index]
input_value = target_modifier[inpt.identifier]
if isinstance(input_value, SUPPORTED_GEOMETRY_NODE_PARAMETERS):
target_modifier[inpt.identifier] = dumped_value
2021-02-26 12:27:56 +01:00
elif hasattr(input_value, 'to_list'):
for index in range(len(input_value)):
input_value[index] = dumped_value[index]
elif inpt.type in ['COLLECTION', 'OBJECT']:
target_modifier[inpt.identifier] = get_datablock_from_uuid(
dumped_value, None)
2021-02-26 12:27:56 +01:00
def load_pose(target_bone, data):
target_bone.rotation_mode = data['rotation_mode']
2020-04-01 11:34:24 +02:00
loader = Loader()
loader.load(target_bone, data)
2020-09-17 23:45:09 +02:00
2020-09-17 23:17:51 +02:00
def find_data_from_name(name=None):
datablock = None
2020-09-17 23:17:51 +02:00
if not name:
2020-12-02 11:24:26 +01:00
pass
2020-09-17 23:17:51 +02:00
elif name in bpy.data.meshes.keys():
datablock = bpy.data.meshes[name]
2020-09-17 23:17:51 +02:00
elif name in bpy.data.lights.keys():
datablock = bpy.data.lights[name]
2020-09-17 23:17:51 +02:00
elif name in bpy.data.cameras.keys():
datablock = bpy.data.cameras[name]
2020-09-17 23:17:51 +02:00
elif name in bpy.data.curves.keys():
datablock = bpy.data.curves[name]
2020-09-17 23:17:51 +02:00
elif name in bpy.data.metaballs.keys():
datablock = bpy.data.metaballs[name]
2020-09-17 23:17:51 +02:00
elif name in bpy.data.armatures.keys():
datablock = bpy.data.armatures[name]
2020-09-17 23:17:51 +02:00
elif name in bpy.data.grease_pencils.keys():
datablock = bpy.data.grease_pencils[name]
2020-09-17 23:17:51 +02:00
elif name in bpy.data.curves.keys():
datablock = bpy.data.curves[name]
2020-09-17 23:17:51 +02:00
elif name in bpy.data.lattices.keys():
datablock = bpy.data.lattices[name]
2020-09-17 23:17:51 +02:00
elif name in bpy.data.speakers.keys():
datablock = bpy.data.speakers[name]
2020-09-17 23:17:51 +02:00
elif name in bpy.data.lightprobes.keys():
# Only supported since 2.83
if bpy.app.version[1] >= 83:
datablock = bpy.data.lightprobes[name]
2020-09-17 23:17:51 +02:00
else:
logging.warning(
"Lightprobe replication only supported since 2.83. See https://developer.blender.org/D6396")
2020-12-02 18:13:11 +01:00
elif bpy.app.version[1] >= 91 and name in bpy.data.volumes.keys():
# Only supported since 2.91
datablock = bpy.data.volumes[name]
return datablock
2020-09-17 23:45:09 +02:00
def _is_editmode(object: bpy.types.Object) -> bool:
child_data = getattr(object, 'data', None)
return (child_data and
hasattr(child_data, 'is_editmode') and
child_data.is_editmode)
def find_textures_dependencies(modifiers: bpy.types.bpy_prop_collection) -> [bpy.types.Texture]:
""" Find textures lying in a modifier stack
:arg modifiers: modifiers collection
:type modifiers: bpy.types.bpy_prop_collection
:return: list of bpy.types.Texture pointers
2020-12-02 11:24:26 +01:00
"""
textures = []
for mod in modifiers:
modifier_attributes = [getattr(mod, attr_name)
for attr_name in mod.bl_rna.properties.keys()]
for attr in modifier_attributes:
if issubclass(type(attr), bpy.types.Texture) and attr is not None:
textures.append(attr)
2020-12-02 11:24:26 +01:00
return textures
def find_geometry_nodes_dependencies(modifiers: bpy.types.bpy_prop_collection) -> [bpy.types.NodeTree]:
""" Find geometry nodes dependencies from a modifier stack
:arg modifiers: modifiers collection
:type modifiers: bpy.types.bpy_prop_collection
:return: list of bpy.types.NodeTree pointers
"""
dependencies = []
for mod in modifiers:
if mod.type == 'NODES' and mod.node_group:
dependencies.append(mod.node_group)
2021-04-02 16:12:51 +02:00
# for inpt in get_node_group_inputs(mod.node_group):
# parameter = mod.get(inpt.identifier)
# if parameter and isinstance(parameter, bpy.types.ID):
# dependencies.append(parameter)
return dependencies
2020-12-02 11:24:26 +01:00
def dump_vertex_groups(src_object: bpy.types.Object) -> dict:
""" Dump object's vertex groups
:param target_object: dump vertex groups of this object
:type target_object: bpy.types.Object
"""
if isinstance(src_object.data, bpy.types.GreasePencil):
logging.warning(
"Grease pencil vertex groups are not supported yet. More info: https://gitlab.com/slumber/multi-user/-/issues/161")
else:
points_attr = 'vertices' if isinstance(
src_object.data, bpy.types.Mesh) else 'points'
dumped_vertex_groups = {}
# Vertex group metadata
for vg in src_object.vertex_groups:
dumped_vertex_groups[vg.index] = {
'name': vg.name,
'vertices': []
}
# Vertex group assignation
for vert in getattr(src_object.data, points_attr):
for vg in vert.groups:
vertices = dumped_vertex_groups.get(vg.group)['vertices']
vertices.append((vert.index, vg.weight))
return dumped_vertex_groups
def load_vertex_groups(dumped_vertex_groups: dict, target_object: bpy.types.Object):
""" Load object vertex groups
:param dumped_vertex_groups: vertex_groups to load
:type dumped_vertex_groups: dict
:param target_object: object to load the vertex groups into
:type target_object: bpy.types.Object
"""
target_object.vertex_groups.clear()
for vg in dumped_vertex_groups.values():
vertex_group = target_object.vertex_groups.new(name=vg['name'])
for index, weight in vg['vertices']:
vertex_group.add([index], weight, 'REPLACE')
class BlObject(ReplicatedDatablock):
bl_id = "objects"
2020-09-17 22:47:11 +02:00
bl_check_common = False
bl_icon = 'OBJECT_DATA'
bl_reload_parent = False
2021-04-21 11:10:24 +02:00
is_root = False
@staticmethod
def construct(data: dict) -> bpy.types.Object:
datablock = None
2019-09-13 16:48:35 +02:00
2020-03-23 15:29:34 +01:00
# TODO: refactoring
2020-09-17 23:17:51 +02:00
object_name = data.get("name")
2020-09-17 23:45:09 +02:00
data_uuid = data.get("data_uuid")
data_id = data.get("data")
object_uuid = data.get('uuid')
2020-09-17 23:45:09 +02:00
object_data = get_datablock_from_uuid(
2020-12-02 11:24:26 +01:00
data_uuid,
find_data_from_name(data_id),
2020-12-02 11:24:26 +01:00
ignore=['images']) # TODO: use resolve_from_id
2021-02-09 14:20:08 +01:00
if object_data is None and data_uuid:
raise Exception(f"Fail to load object {data['name']}({object_uuid})")
2021-02-09 14:20:08 +01:00
datablock = bpy.data.objects.new(object_name, object_data)
datablock.uuid = object_uuid
return datablock
@staticmethod
def load(data: dict, datablock: bpy.types.Object):
2021-04-20 09:53:59 +02:00
data = datablock.data
load_animation_data(data, datablock)
2019-08-07 22:16:36 +02:00
2020-08-06 15:33:08 +02:00
loader = Loader()
2020-09-17 23:45:09 +02:00
data_uuid = data.get("data_uuid")
data_id = data.get("data")
if datablock.data and (datablock.data.name != data_id):
datablock.data = get_datablock_from_uuid(
2020-12-02 11:24:26 +01:00
data_uuid, find_data_from_name(data_id), ignore=['images'])
2020-08-06 15:26:55 +02:00
# vertex groups
vertex_groups = data.get('vertex_groups', None)
if vertex_groups:
load_vertex_groups(vertex_groups, datablock)
2020-08-06 15:26:55 +02:00
object_data = datablock.data
2020-08-06 15:26:55 +02:00
# SHAPE KEYS
if 'shape_keys' in data:
datablock.shape_key_clear()
2020-08-06 15:26:55 +02:00
# Create keys and load vertices coords
for key_block in data['shape_keys']['key_blocks']:
2020-12-02 11:24:26 +01:00
key_data = data['shape_keys']['key_blocks'][key_block]
datablock.shape_key_add(name=key_block)
2020-08-06 15:26:55 +02:00
loader.load(
datablock.data.shape_keys.key_blocks[key_block], key_data)
2020-08-06 15:26:55 +02:00
for vert in key_data['data']:
datablock.data.shape_keys.key_blocks[key_block].data[vert].co = key_data['data'][vert]['co']
2020-08-06 15:26:55 +02:00
# Load relative key after all
for key_block in data['shape_keys']['key_blocks']:
reference = data['shape_keys']['key_blocks'][key_block]['relative_key']
datablock.data.shape_keys.key_blocks[key_block].relative_key = datablock.data.shape_keys.key_blocks[reference]
2020-08-06 15:26:55 +02:00
# Load transformation data
loader.load(datablock, data)
# Object display fields
2020-12-23 00:21:31 +01:00
if 'display' in data:
loader.load(datablock.display, data['display'])
# Parenting
parent_id = data.get('parent_uid')
if parent_id:
parent = get_datablock_from_uuid(parent_id[0], bpy.data.objects[parent_id[1]])
# Avoid reloading
if datablock.parent != parent and parent is not None:
datablock.parent = parent
elif datablock.parent:
datablock.parent = None
# Pose
if 'pose' in data:
if not datablock.pose:
2020-01-24 14:33:00 +01:00
raise Exception('No pose data yet (Fixed in a near futur)')
# Bone groups
for bg_name in data['pose']['bone_groups']:
bg_data = data['pose']['bone_groups'].get(bg_name)
bg_datablock = datablock.pose.bone_groups.get(bg_name)
if not bg_datablock:
bg_datablock = datablock.pose.bone_groups.new(name=bg_name)
loader.load(bg_datablock, bg_data)
# datablock.pose.bone_groups.get
# Bones
for bone in data['pose']['bones']:
datablock_bone = datablock.pose.bones.get(bone)
bone_data = data['pose']['bones'].get(bone)
2020-01-24 14:33:00 +01:00
if 'constraints' in bone_data.keys():
loader.load(datablock_bone, bone_data['constraints'])
2020-03-20 19:31:48 +01:00
load_pose(datablock_bone, bone_data)
2020-01-24 14:33:00 +01:00
if 'bone_index' in bone_data.keys():
datablock_bone.bone_group = datablock.pose.bone_group[bone_data['bone_group_index']]
2019-08-08 15:00:07 +02:00
2020-07-24 21:38:00 +02:00
# TODO: find another way...
if datablock.empty_display_type == "IMAGE":
img_uuid = data.get('data_uuid')
if datablock.data is None and img_uuid:
datablock.data = get_datablock_from_uuid(img_uuid, None)
2020-07-24 21:38:00 +02:00
if hasattr(object_data, 'skin_vertices') \
and object_data.skin_vertices\
and 'skin_vertices' in data:
for index, skin_data in enumerate(object_data.skin_vertices):
np_load_collection(
data['skin_vertices'][index],
skin_data.data,
SKIN_DATA)
2021-04-20 09:53:59 +02:00
if hasattr(datablock, 'cycles_visibility') \
and 'cycles_visibility' in data:
2021-04-20 09:53:59 +02:00
loader.load(datablock.cycles_visibility, data['cycles_visibility'])
2021-01-21 10:51:28 +01:00
# TODO: handle geometry nodes input from dump_anything
2021-04-20 09:53:59 +02:00
if hasattr(datablock, 'modifiers'):
nodes_modifiers = [
2021-04-20 09:53:59 +02:00
mod for mod in datablock.modifiers if mod.type == 'NODES']
2021-02-26 12:27:56 +01:00
for modifier in nodes_modifiers:
load_modifier_geometry_node_inputs(
data['modifiers'][modifier.name], modifier)
2021-02-26 12:27:56 +01:00
particles_modifiers = [
2021-04-20 09:53:59 +02:00
mod for mod in datablock.modifiers if mod.type == 'PARTICLE_SYSTEM']
for mod in particles_modifiers:
default = mod.particle_system.settings
dumped_particles = data['modifiers'][mod.name]['particle_system']
loader.load(mod.particle_system, dumped_particles)
settings = get_datablock_from_uuid(dumped_particles['settings_uuid'], None)
if settings:
mod.particle_system.settings = settings
# Hack to remove the default generated particle settings
if not default.uuid:
bpy.data.particles.remove(default)
phys_modifiers = [
2021-04-20 09:53:59 +02:00
mod for mod in datablock.modifiers if mod.type in ['SOFT_BODY', 'CLOTH']]
for mod in phys_modifiers:
loader.load(mod.settings, data['modifiers'][mod.name]['settings'])
2021-04-14 12:25:16 +02:00
# PHYSICS
2021-04-20 09:53:59 +02:00
load_physics(data, datablock)
2021-04-14 12:25:16 +02:00
transform = data.get('transforms', None)
if transform:
2021-04-20 09:53:59 +02:00
datablock.matrix_parent_inverse = mathutils.Matrix(
transform['matrix_parent_inverse'])
2021-04-20 09:53:59 +02:00
datablock.matrix_basis = mathutils.Matrix(transform['matrix_basis'])
datablock.matrix_local = mathutils.Matrix(transform['matrix_local'])
2021-04-20 09:53:59 +02:00
@staticmethod
def dump(datablock: object) -> dict:
assert(datablock)
2021-04-20 09:53:59 +02:00
if _is_editmode(datablock):
2020-09-25 11:23:36 +02:00
if self.preferences.sync_flags.sync_during_editmode:
2021-04-20 09:53:59 +02:00
datablock.update_from_editmode()
else:
raise ContextError("Object is in edit-mode.")
2020-04-22 17:04:14 +02:00
2020-04-01 11:34:24 +02:00
dumper = Dumper()
2019-09-24 12:08:02 +02:00
dumper.depth = 1
dumper.include_filter = [
"uuid",
2019-09-20 22:37:32 +02:00
"name",
"rotation_mode",
2020-09-18 16:17:19 +02:00
"data",
2019-10-14 14:14:11 +02:00
"library",
"empty_display_type",
"empty_display_size",
2020-07-24 21:38:00 +02:00
"empty_image_offset",
"empty_image_depth",
"empty_image_side",
"show_empty_image_orthographic",
"show_empty_image_perspective",
"show_empty_image_only_axis_aligned",
"use_empty_image_alpha",
"color",
2019-10-14 14:14:11 +02:00
"instance_collection",
"instance_type",
2020-07-29 11:10:35 +02:00
'lock_location',
'lock_rotation',
'lock_scale',
2020-09-23 16:48:17 +02:00
'hide_render',
2020-09-23 10:00:08 +02:00
'display_type',
'display_bounds_type',
'show_bounds',
'show_name',
'show_axis',
'show_wire',
'show_all_edges',
'show_texture_space',
'show_in_front',
'type'
2019-09-20 22:37:32 +02:00
]
data = dumper.dump(datablock)
dumper.include_filter = [
'matrix_parent_inverse',
'matrix_local',
'matrix_basis']
data['transforms'] = dumper.dump(datablock)
dumper.include_filter = [
'show_shadows',
]
data['display'] = dumper.dump(datablock.display)
data['data_uuid'] = getattr(datablock.data, 'uuid', None)
# PARENTING
2021-04-20 09:53:59 +02:00
if datablock.parent:
data['parent_uid'] = (datablock.parent.uuid, datablock.parent.name)
2019-10-14 14:14:11 +02:00
# MODIFIERS
if hasattr(datablock, 'modifiers'):
data["modifiers"] = {}
modifiers = getattr(datablock, 'modifiers', None)
if modifiers:
dumper.include_filter = None
dumper.depth = 1
dumper.exclude_filter = ['is_active']
for index, modifier in enumerate(modifiers):
dumped_modifier = dumper.dump(modifier)
2021-02-26 12:27:56 +01:00
# hack to dump geometry nodes inputs
if modifier.type == 'NODES':
dumped_inputs = dump_modifier_geometry_node_inputs(
modifier)
dumped_modifier['inputs'] = dumped_inputs
elif modifier.type == 'PARTICLE_SYSTEM':
2021-04-14 09:54:34 +02:00
dumper.exclude_filter = [
"is_edited",
"is_editable",
"is_global_hair"
]
dumped_modifier['particle_system'] = dumper.dump(modifier.particle_system)
dumped_modifier['particle_system']['settings_uuid'] = modifier.particle_system.settings.uuid
elif modifier.type in ['SOFT_BODY', 'CLOTH']:
dumped_modifier['settings'] = dumper.dump(modifier.settings)
data["modifiers"][modifier.name] = dumped_modifier
2021-04-20 09:53:59 +02:00
gp_modifiers = getattr(datablock, 'grease_pencil_modifiers', None)
if gp_modifiers:
dumper.include_filter = None
dumper.depth = 1
gp_modifiers_data = data["grease_pencil_modifiers"] = {}
for index, modifier in enumerate(gp_modifiers):
gp_mod_data = gp_modifiers_data[modifier.name] = dict()
gp_mod_data.update(dumper.dump(modifier))
if hasattr(modifier, 'use_custom_curve') \
and modifier.use_custom_curve:
curve_dumper = Dumper()
curve_dumper.depth = 5
curve_dumper.include_filter = [
'curves',
'points',
'location']
gp_mod_data['curve'] = curve_dumper.dump(modifier.curve)
# CONSTRAINTS
if hasattr(datablock, 'constraints'):
2020-12-24 14:56:40 +01:00
dumper.include_filter = None
dumper.depth = 3
data["constraints"] = dumper.dump(datablock.constraints)
# POSE
if hasattr(datablock, 'pose') and datablock.pose:
# BONES
bones = {}
for bone in datablock.pose.bones:
bones[bone.name] = {}
dumper.depth = 1
rotation = 'rotation_quaternion' if bone.rotation_mode == 'QUATERNION' else 'rotation_euler'
group_index = 'bone_group_index' if bone.bone_group else None
dumper.include_filter = [
'rotation_mode',
'location',
'scale',
2020-02-24 15:49:11 +01:00
'custom_shape',
'use_custom_shape_bone_size',
'custom_shape_scale',
group_index,
rotation
]
2020-01-23 17:59:17 +01:00
bones[bone.name] = dumper.dump(bone)
2020-01-23 17:59:17 +01:00
dumper.include_filter = []
dumper.depth = 3
bones[bone.name]["constraints"] = dumper.dump(bone.constraints)
data['pose'] = {'bones': bones}
# GROUPS
bone_groups = {}
for group in datablock.pose.bone_groups:
dumper.depth = 3
dumper.include_filter = [
'name',
'color_set'
]
bone_groups[group.name] = dumper.dump(group)
data['pose']['bone_groups'] = bone_groups
# VERTEx GROUP
if len(datablock.vertex_groups) > 0:
data['vertex_groups'] = dump_vertex_groups(datablock)
# SHAPE KEYS
object_data = datablock.data
2020-03-23 11:04:06 +01:00
if hasattr(object_data, 'shape_keys') and object_data.shape_keys:
2020-04-01 11:34:24 +02:00
dumper = Dumper()
dumper.depth = 2
dumper.include_filter = [
'reference_key',
'use_relative'
]
2020-03-23 11:04:06 +01:00
data['shape_keys'] = dumper.dump(object_data.shape_keys)
data['shape_keys']['reference_key'] = object_data.shape_keys.reference_key.name
key_blocks = {}
2020-03-23 11:04:06 +01:00
for key in object_data.shape_keys.key_blocks:
dumper.depth = 3
dumper.include_filter = [
'name',
'data',
'mute',
'value',
'slider_min',
'slider_max',
'data',
'co'
]
key_blocks[key.name] = dumper.dump(key)
key_blocks[key.name]['relative_key'] = key.relative_key.name
data['shape_keys']['key_blocks'] = key_blocks
2021-01-21 10:51:28 +01:00
# SKIN VERTICES
if hasattr(object_data, 'skin_vertices') and object_data.skin_vertices:
skin_vertices = list()
for skin_data in object_data.skin_vertices:
skin_vertices.append(
np_dump_collection(skin_data.data, SKIN_DATA))
data['skin_vertices'] = skin_vertices
2021-01-21 10:51:28 +01:00
# CYCLE SETTINGS
if hasattr(datablock, 'cycles_visibility'):
2021-01-21 10:51:28 +01:00
dumper.include_filter = [
'camera',
'diffuse',
'glossy',
'transmission',
'scatter',
'shadow',
]
data['cycles_visibility'] = dumper.dump(
datablock.cycles_visibility)
2021-01-21 10:51:28 +01:00
# PHYSICS
data.update(dump_physics(instance))
2019-08-08 15:00:07 +02:00
return data
2019-08-27 16:44:36 +02:00
@staticmethod
def resolve_deps(datablock: bpy.types.Object) -> list:
2020-03-09 15:59:30 +01:00
deps = []
# Avoid Empty case
if datablock.data:
deps.append(datablock.data)
2019-08-27 16:44:36 +02:00
# Particle systems
2021-04-20 09:53:59 +02:00
for particle_slot in datablock.particle_systems:
deps.append(particle_slot.settings)
2021-04-20 09:53:59 +02:00
if datablock.parent:
deps.append(datablock.parent)
2021-04-20 09:53:59 +02:00
if datablock.instance_type == 'COLLECTION':
# TODO: uuid based
deps.append(datablock.instance_collection)
2019-10-14 14:14:11 +02:00
2021-03-26 16:14:27 +01:00
deps.extend(resolve_animation_dependencies(datablock))
2021-04-20 09:53:59 +02:00
if datablock.modifiers:
deps.extend(find_textures_dependencies(datablock.modifiers))
deps.extend(find_geometry_nodes_dependencies(datablock.modifiers))
2020-12-02 11:24:26 +01:00
return deps
2021-04-21 11:10:24 +02:00
_type = bpy.types.Object
_class = BlObject