feat: start to leanup datablock io api

This commit is contained in:
Swann Martinez 2020-03-09 15:59:30 +01:00
parent 9df7cd4659
commit a8da01c8ff
No known key found for this signature in database
GPG Key ID: 414CCAFD8DA720E1
21 changed files with 187 additions and 154 deletions

View File

@ -127,6 +127,9 @@ class BlAction(BlDatablock):
return data
def resolve_deps_implementation(self):
return []
def is_valid(self):
return bpy.data.actions.get(self.data['name'])

View File

@ -127,5 +127,8 @@ class BlArmature(BlDatablock):
item.name for item in container_users if isinstance(item, bpy.types.Scene)]
return data
def resolve_deps_implementation(self):
return []
def is_valid(self):
return bpy.data.armatures.get(self.data['name'])

View File

@ -13,7 +13,7 @@ class BlCamera(BlDatablock):
bl_automatic_push = True
bl_icon = 'CAMERA_DATA'
def load(self, data, target):
def load_implementation(self, data, target):
utils.dump_anything.load(target, data)
dof_settings = data.get('dof')
@ -52,5 +52,8 @@ class BlCamera(BlDatablock):
]
return dumper.dump(pointer)
def resolve_deps_implementation(self):
return []
def is_valid(self):
return bpy.data.cameras.get(self.data['name'])

View File

@ -77,7 +77,7 @@ class BlCollection(BlDatablock):
return data
def resolve_dependencies(self):
def resolve_deps_implementation(self):
deps = []
for child in self.pointer.children:

View File

@ -1,8 +1,11 @@
import bpy
import bpy.types as T
import mathutils
from .. import utils
from .bl_datablock import BlDatablock
from ..libs import dump_anything
class BlCurve(BlDatablock):
bl_id = "curves"
@ -13,51 +16,61 @@ class BlCurve(BlDatablock):
bl_icon = 'CURVE_DATA'
def construct(self, data):
return bpy.data.curves.new(data["name"], 'CURVE')
return bpy.data.curves.new(data["name"], data["type"])
def load(self, data, target):
utils.dump_anything.load(target, data)
def load_implementation(self, data, target):
dump_anything.load(target, data)
target.splines.clear()
# load splines
for spline in data['splines']:
new_spline = target.splines.new(data['splines'][spline]['type'])
utils.dump_anything.load(new_spline, data['splines'][spline])
dump_anything.load(new_spline, data['splines'][spline])
# Load curve geometry data
if new_spline.type == 'BEZIER':
for bezier_point_index in data['splines'][spline]["bezier_points"]:
if bezier_point_index != 0:
new_spline.bezier_points.add(1)
utils.dump_anything.load(
dump_anything.load(
new_spline.bezier_points[bezier_point_index], data['splines'][spline]["bezier_points"][bezier_point_index])
# Not really working for now...
# See https://blender.stackexchange.com/questions/7020/create-nurbs-surface-with-python
if new_spline.type == 'NURBS':
new_spline.points.add(len(data['splines'][spline]["points"])-1)
for point_index in data['splines'][spline]["points"]:
new_spline.points.add(1)
utils.dump_anything.load(
dump_anything.load(
new_spline.points[point_index], data['splines'][spline]["points"][point_index])
def dump_implementation(self, data, pointer=None):
assert(pointer)
data = utils.dump_datablock(pointer, 1)
dumper = dump_anything.Dumper()
data = dumper.dump(pointer)
data['splines'] = {}
dumper = utils.dump_anything.Dumper()
dumper.depth = 3
for index, spline in enumerate(pointer.splines):
spline_data = {}
spline_data = dump_anything.dump(spline)
spline_data['points'] = dumper.dump(spline.points)
spline_data['bezier_points'] = dumper.dump(spline.bezier_points)
spline_data['type'] = dumper.dump(spline.type)
data['splines'][index] = spline_data
if isinstance(pointer,'TextCurve'):
data['type'] = 'TEXT'
if isinstance(pointer,'SurfaceCurve'):
if isinstance(pointer, T.SurfaceCurve):
data['type'] = 'SURFACE'
if isinstance(pointer,'TextCurve'):
elif isinstance(pointer, T.TextCurve):
data['type'] = 'FONT'
elif isinstance(pointer, T.Curve):
data['type'] = 'CURVE'
return data
def resolve_deps_implementation(self):
return []
def is_valid(self):
return bpy.data.curves.get(self.data['name'])

View File

@ -3,11 +3,11 @@ import mathutils
from .. import utils
from ..libs.replication.replication.data import ReplicatedDatablock
from ..libs.replication.replication.constants import UP
from ..libs.replication.replication.constants import DIFF_BINARY
from ..libs.replication.replication.constants import (UP, DIFF_BINARY)
from ..libs import dump_anything
def dump_driver(driver):
dumper = utils.dump_anything.Dumper()
dumper = dump_anything.Dumper()
dumper.depth = 6
data = dumper.dump(driver)
@ -22,7 +22,7 @@ def load_driver(target_datablock, src_driver):
# Settings
new_driver.driver.type = src_driver_data['type']
new_driver.driver.expression = src_driver_data['expression']
utils.dump_anything.load(new_driver, src_driver)
dump_anything.load(new_driver, src_driver)
# Variables
for src_variable in src_driver_data['variables']:
@ -35,7 +35,7 @@ def load_driver(target_datablock, src_driver):
src_target_data = src_var_data['targets'][src_target]
new_var.targets[src_target].id = utils.resolve_from_id(
src_target_data['id'], src_target_data['id_type'])
utils.dump_anything.load(
dump_anything.load(
new_var.targets[src_target], src_target_data)
# Fcurve
@ -47,7 +47,7 @@ def load_driver(target_datablock, src_driver):
for index, src_point in enumerate(src_driver['keyframe_points']):
new_point = new_fcurve[index]
utils.dump_anything.load(
dump_anything.load(
new_point, src_driver['keyframe_points'][src_point])
@ -72,12 +72,6 @@ class BlDatablock(ReplicatedDatablock):
pointer.library) or \
(self.data and 'library' in self.data)
if self.is_library:
self.load = self.load_library
self.dump = self.dump_library
self.diff = self.diff_library
self.resolve_dependencies = self.resolve_dependencies_library
if self.pointer and hasattr(self.pointer, 'uuid'):
self.pointer.uuid = self.uuid
@ -93,19 +87,10 @@ class BlDatablock(ReplicatedDatablock):
"""Generic datablock diff"""
return self.pointer.name != self.data['name']
def construct_library(self, data):
return None
def load_library(self, data, target):
pass
def dump_library(self, pointer=None):
return utils.dump_datablock(pointer, 1)
def diff_library(self):
return False
def resolve_dependencies_library(self):
def resolve_deps_library(self):
return [self.pointer.library]
def resolve(self):
@ -116,8 +101,7 @@ class BlDatablock(ReplicatedDatablock):
# In case of lost uuid (ex: undo), resolve by name and reassign it
# TODO: avoid reference storing
if not datablock_ref:
datablock_ref = getattr(
bpy.data, self.bl_id).get(self.data['name'])
datablock_ref = getattr(bpy.data, self.bl_id).get(self.data['name'])
if datablock_ref:
setattr(datablock_ref, 'uuid', self.uuid)
@ -126,6 +110,7 @@ class BlDatablock(ReplicatedDatablock):
def dump(self, pointer=None):
data = {}
# Dump animation data
if utils.has_action(pointer):
dumper = utils.dump_anything.Dumper()
dumper.include_filter = ['action']
@ -138,6 +123,10 @@ class BlDatablock(ReplicatedDatablock):
dump_driver(driver))
data.update(dumped_drivers)
if self.is_library:
data.update(dump_anything.dump(pointer))
else:
data.update(self.dump_implementation(data, pointer=pointer))
return data
@ -161,18 +150,27 @@ class BlDatablock(ReplicatedDatablock):
if 'action' in data['animation_data']:
target.animation_data.action = bpy.data.actions[data['animation_data']['action']]
if self.is_library:
return
else:
self.load_implementation(data, target)
def load_implementation(self, data, target):
raise NotImplementedError
def resolve_dependencies(self):
def resolve_deps(self):
dependencies = []
if utils.has_action(self.pointer):
dependencies.append(self.pointer.animation_data.action)
if not self.is_library:
dependencies.extend(self.resolve_deps_implementation())
print(dependencies)
return dependencies
def resolve_deps_implementation(self):
raise NotImplementedError
def is_valid(self):
raise NotImplementedError

View File

@ -1,13 +1,13 @@
import bpy
import mathutils
from .. import utils
from ..libs import dump_anything
from .bl_datablock import BlDatablock
def load_gpencil_layer(target=None, data=None, create=False):
utils.dump_anything.load(target, data)
dump_anything.load(target, data)
for k,v in target.frames.items():
target.frames.remove(v)
@ -15,13 +15,12 @@ def load_gpencil_layer(target=None, data=None, create=False):
tframe = target.frames.new(data["frames"][frame]['frame_number'])
# utils.dump_anything.load(tframe, data["frames"][frame])
for stroke in data["frames"][frame]["strokes"]:
try:
tstroke = tframe.strokes[stroke]
except:
tstroke = tframe.strokes.new()
utils.dump_anything.load(
dump_anything.load(
tstroke, data["frames"][frame]["strokes"][stroke])
for point in data["frames"][frame]["strokes"][stroke]["points"]:
@ -30,7 +29,7 @@ def load_gpencil_layer(target=None, data=None, create=False):
tstroke.points.add(1)
tpoint = tstroke.points[len(tstroke.points)-1]
utils.dump_anything.load(tpoint, p)
dump_anything.load(tpoint, p)
class BlGpencil(BlDatablock):
@ -44,7 +43,7 @@ class BlGpencil(BlDatablock):
def construct(self, data):
return bpy.data.grease_pencils.new(data["name"])
def load(self, data, target):
def load_implementation(self, data, target):
for layer in target.layers:
target.layers.remove(layer)
@ -57,7 +56,7 @@ class BlGpencil(BlDatablock):
load_gpencil_layer(
target=gp_layer, data=data["layers"][layer], create=True)
utils.dump_anything.load(target, data)
dump_anything.load(target, data)
target.materials.clear()
if "materials" in data.keys():
@ -66,12 +65,12 @@ class BlGpencil(BlDatablock):
def dump_implementation(self, data, pointer=None):
assert(pointer)
data = utils.dump_datablock(pointer, 2)
utils.dump_datablock_attibutes(
pointer, ['layers'], 9, data)
data = dump_anything.dump(pointer, 2)
data['layers'] = dump_anything.dump(pointer.layers, 9)
return data
def resolve_dependencies(self):
def resolve_deps_implementation(self):
deps = []
for material in self.pointer.materials:

View File

@ -59,7 +59,7 @@ class BlImage(BlDatablock):
image.colorspace_settings.name = data["colorspace_settings"]["name"]
def dump_implementation(self, data, pointer=None):
def dump(self, data, pointer=None):
assert(pointer)
data = {}
data['pixels'] = dump_image(pointer)
@ -81,5 +81,8 @@ class BlImage(BlDatablock):
def diff(self):
return False
def resolve_deps_implementation(self):
return []
def is_valid(self):
return bpy.data.images.get(self.data['name'])

View File

@ -13,7 +13,7 @@ class BlLattice(BlDatablock):
bl_automatic_push = True
bl_icon = 'LATTICE_DATA'
def load(self, data, target):
def load_implementation(self, data, target):
utils.dump_anything.load(target, data)
for point in data['points']:
@ -21,7 +21,7 @@ class BlLattice(BlDatablock):
def construct(self, data):
return bpy.data.lattices.new(data["name"])
def dump(self, pointer=None):
def dump_implementation(self, data, pointer=None):
assert(pointer)
dumper = utils.dump_anything.Dumper()
@ -45,6 +45,9 @@ class BlLattice(BlDatablock):
return data
def resolve_deps_implementation(self):
return []
def is_valid(self):
return bpy.data.lattices.get(self.data['name'])

View File

@ -1,7 +1,7 @@
import bpy
import mathutils
from .. import utils
from ..libs import dump_anything
from .bl_datablock import BlDatablock
@ -22,7 +22,10 @@ class BlLibrary(BlDatablock):
def dump(self, pointer=None):
assert(pointer)
return utils.dump_datablock(pointer, 1)
return dump_anything.dump(pointer, 1)
def resolve_deps_implementation(self):
return []
def is_valid(self):
return bpy.data.libraries.get(self.data['name'])

View File

@ -46,6 +46,9 @@ class BlLight(BlDatablock):
data = dumper.dump(pointer)
return data
def resolve_deps_implementation(self):
return []
def is_valid(self):
return bpy.data.lights.get(self.data['name'])

View File

@ -15,7 +15,7 @@ class BlLightprobe(BlDatablock):
bl_automatic_push = True
bl_icon = 'LIGHTPROBE_GRID'
def load(self, data, target):
def load_implementation(self, data, target):
utils.dump_anything.load(target, data)
def construct(self, data):
@ -26,10 +26,7 @@ class BlLightprobe(BlDatablock):
else:
logger.warning("Lightprobe replication only supported since 2.83. See https://developer.blender.org/D6396")
def dump(self, pointer=None):
def dump_implementation(self, data, pointer=None):
assert(pointer)
if bpy.app.version[1] < 83:
logger.warning("Lightprobe replication only supported since 2.83. See https://developer.blender.org/D6396")
@ -59,5 +56,8 @@ class BlLightprobe(BlDatablock):
return dumper.dump(pointer)
def resolve_deps_implementation(self):
return []
def is_valid(self):
return bpy.data.lattices.get(self.data['name'])

View File

@ -3,6 +3,7 @@ import mathutils
import logging
from .. import utils
from ..libs import dump_anything
from .bl_datablock import BlDatablock
logger = logging.getLogger(__name__)
@ -43,7 +44,7 @@ def load_node(target_node_tree, source):
clean_color_ramp(target_node.color_ramp)
if source['type'] == 'CURVE_RGB':
load_mapping(target_node.mapping, source['mapping'])
utils.dump_anything.load(
dump_anything.load(
target_node,
source)
@ -84,7 +85,7 @@ class BlMaterial(BlDatablock):
if not target.is_grease_pencil:
bpy.data.materials.create_gpencil_data(target)
utils.dump_anything.load(
dump_anything.load(
target.grease_pencil, data['grease_pencil'])
utils.load_dict(data['grease_pencil'], target.grease_pencil)
@ -95,7 +96,7 @@ class BlMaterial(BlDatablock):
target.node_tree.nodes.clear()
utils.dump_anything.load(target,data)
dump_anything.load(target,data)
# Load nodes
for node in data["node_tree"]["nodes"]:
@ -109,7 +110,7 @@ class BlMaterial(BlDatablock):
def dump_implementation(self, data, pointer=None):
assert(pointer)
mat_dumper = utils.dump_anything.Dumper()
mat_dumper = dump_anything.Dumper()
mat_dumper.depth = 2
mat_dumper.exclude_filter = [
"preview",
@ -120,7 +121,7 @@ class BlMaterial(BlDatablock):
"line_color",
"view_center",
]
node_dumper = utils.dump_anything.Dumper()
node_dumper = dump_anything.Dumper()
node_dumper.depth = 1
node_dumper.exclude_filter = [
"dimensions",
@ -138,10 +139,10 @@ class BlMaterial(BlDatablock):
"outputs",
"width_hidden"
]
input_dumper = utils.dump_anything.Dumper()
input_dumper = dump_anything.Dumper()
input_dumper.depth = 2
input_dumper.include_filter = ["default_value"]
links_dumper = utils.dump_anything.Dumper()
links_dumper = dump_anything.Dumper()
links_dumper.depth = 3
links_dumper.include_filter = [
"name",
@ -165,7 +166,7 @@ class BlMaterial(BlDatablock):
nodes[node.name]['inputs'][i.name] = input_dumper.dump(
i)
if hasattr(node, 'color_ramp'):
ramp_dumper = utils.dump_anything.Dumper()
ramp_dumper = dump_anything.Dumper()
ramp_dumper.depth = 4
ramp_dumper.include_filter = [
'elements',
@ -175,7 +176,7 @@ class BlMaterial(BlDatablock):
]
nodes[node.name]['color_ramp'] = ramp_dumper.dump(node.color_ramp)
if hasattr(node, 'mapping'):
curve_dumper = utils.dump_anything.Dumper()
curve_dumper = dump_anything.Dumper()
curve_dumper.depth = 5
curve_dumper.include_filter = [
'curves',
@ -187,10 +188,10 @@ class BlMaterial(BlDatablock):
data["node_tree"]["links"] = links_dumper.dump(pointer.node_tree.links)
elif pointer.is_grease_pencil:
utils.dump_datablock_attibutes(pointer, ["grease_pencil"], 3, data)
data['grease_pencil'] = dump_anything.dump(pointer.grease_pencil, 3)
return data
def resolve_dependencies(self):
def resolve_deps_implementation(self):
# TODO: resolve node group deps
deps = []

View File

@ -168,7 +168,7 @@ class BlMesh(BlDatablock):
return data
def resolve_dependencies(self):
def resolve_deps_implementation(self):
deps = []
for material in self.pointer.materials:

View File

@ -33,5 +33,8 @@ class BlMetaball(BlDatablock):
data = dumper.dump(pointer)
return data
def resolve_deps_implementation(self):
return []
def is_valid(self):
return bpy.data.metaballs.get(self.data['name'])

View File

@ -328,8 +328,8 @@ class BlObject(BlDatablock):
return data
def resolve_dependencies(self):
deps = super().resolve_dependencies()
def resolve_deps_implementation(self):
deps = []
# Avoid Empty case
if self.pointer.data:

View File

@ -65,7 +65,7 @@ class BlScene(BlDatablock):
return data
def resolve_dependencies(self):
def resolve_deps_implementation(self):
deps = []
# child collections

View File

@ -13,13 +13,13 @@ class BlSpeaker(BlDatablock):
bl_automatic_push = True
bl_icon = 'SPEAKER'
def load(self, data, target):
def load_implementation(self, data, target):
utils.dump_anything.load(target, data)
def construct(self, data):
return bpy.data.speakers.new(data["name"])
def dump(self, pointer=None):
def dump_implementation(self, data, pointer=None):
assert(pointer)
dumper = utils.dump_anything.Dumper()
@ -41,6 +41,9 @@ class BlSpeaker(BlDatablock):
return dumper.dump(pointer)
def resolve_deps_implementation(self):
return []
def is_valid(self):
return bpy.data.lattices.get(self.data['name'])

View File

@ -90,7 +90,7 @@ class BlWorld(BlDatablock):
pointer.node_tree, ["links"], 3, data['node_tree'])
return data
def resolve_dependencies(self):
def resolve_deps_implementation(self):
deps = []
if self.pointer.use_nodes:

View File

@ -20,9 +20,11 @@ def _is_dictionnary(v):
def _dump_filter_type(t):
return lambda x: isinstance(x, t)
def _dump_filter_type_by_name(t_name):
return lambda x: t_name == x.__class__.__name__
def _dump_filter_array(array):
# only primitive type array
if not isinstance(array, T.bpy_prop_array):
@ -57,9 +59,11 @@ def _load_filter_array(array):
return False
return True
def _load_filter_color(color):
return color.__class__.__name__ == 'Color'
def _load_filter_default(default):
if default.read() is None:
return False
@ -69,7 +73,6 @@ def _load_filter_default(default):
class Dumper:
def __init__(self):
self.verbose = False
self.depth = 1
@ -94,28 +97,41 @@ class Dumper:
self._dump_identity = (lambda x, depth: x, lambda x, depth: x)
self._dump_ref = (lambda x, depth: x.name, self._dump_object_as_branch)
self._dump_ID = (lambda x, depth: x.name, self._dump_default_as_branch)
self._dump_collection = (self._dump_default_as_leaf, self._dump_collection_as_branch)
self._dump_array = (self._dump_default_as_leaf, self._dump_array_as_branch)
self._dump_matrix = (self._dump_matrix_as_leaf, self._dump_matrix_as_leaf)
self._dump_vector = (self._dump_vector_as_leaf, self._dump_vector_as_leaf)
self._dump_quaternion = (self._dump_quaternion_as_leaf, self._dump_quaternion_as_leaf)
self._dump_default = (self._dump_default_as_leaf, self._dump_default_as_branch)
self._dump_collection = (
self._dump_default_as_leaf, self._dump_collection_as_branch)
self._dump_array = (self._dump_default_as_leaf,
self._dump_array_as_branch)
self._dump_matrix = (self._dump_matrix_as_leaf,
self._dump_matrix_as_leaf)
self._dump_vector = (self._dump_vector_as_leaf,
self._dump_vector_as_leaf)
self._dump_quaternion = (
self._dump_quaternion_as_leaf, self._dump_quaternion_as_leaf)
self._dump_default = (self._dump_default_as_leaf,
self._dump_default_as_branch)
self._dump_color = (self._dump_color_as_leaf, self._dump_color_as_leaf)
def _build_match_elements(self):
self._match_type_bool = (_dump_filter_type(bool), self._dump_identity)
self._match_type_int = (_dump_filter_type(int), self._dump_identity)
self._match_type_float = (_dump_filter_type(float), self._dump_identity)
self._match_type_float = (
_dump_filter_type(float), self._dump_identity)
self._match_type_string = (_dump_filter_type(str), self._dump_identity)
self._match_type_ref = (_dump_filter_type(T.Object), self._dump_ref)
self._match_type_ID = (_dump_filter_type(T.ID), self._dump_ID)
self._match_type_bpy_prop_collection = (_dump_filter_type(T.bpy_prop_collection), self._dump_collection)
self._match_type_bpy_prop_collection = (
_dump_filter_type(T.bpy_prop_collection), self._dump_collection)
self._match_type_array = (_dump_filter_array, self._dump_array)
self._match_type_matrix = (_dump_filter_type(mathutils.Matrix), self._dump_matrix)
self._match_type_vector = (_dump_filter_type(mathutils.Vector), self._dump_vector)
self._match_type_quaternion = (_dump_filter_type(mathutils.Quaternion), self._dump_quaternion)
self._match_type_euler = (_dump_filter_type(mathutils.Euler), self._dump_quaternion)
self._match_type_color = (_dump_filter_type_by_name("Color"), self._dump_color)
self._match_type_matrix = (_dump_filter_type(
mathutils.Matrix), self._dump_matrix)
self._match_type_vector = (_dump_filter_type(
mathutils.Vector), self._dump_vector)
self._match_type_quaternion = (_dump_filter_type(
mathutils.Quaternion), self._dump_quaternion)
self._match_type_euler = (_dump_filter_type(
mathutils.Euler), self._dump_quaternion)
self._match_type_color = (
_dump_filter_type_by_name("Color"), self._dump_color)
self._match_default = (_dump_filter_default, self._dump_default)
def _dump_collection_as_branch(self, collection, depth):
@ -169,7 +185,8 @@ class Dumper:
return False
return True
all_property_names = [p for p in dir(default) if is_valid_property(p) and p != '' and p not in self.exclude_filter]
all_property_names = [p for p in dir(default) if is_valid_property(
p) and p != '' and p not in self.exclude_filter]
dump = {}
for p in all_property_names:
if (self.exclude_filter and p in self.exclude_filter) or\
@ -250,7 +267,8 @@ class Loader:
def load(self, dst_data, src_dumped_data):
self._load_any(
BlenderAPIElement(dst_data, occlude_read_only=self.occlude_read_only),
BlenderAPIElement(
dst_data, occlude_read_only=self.occlude_read_only),
src_dumped_data
)
@ -260,7 +278,6 @@ class Loader:
load_function(any, dump)
return
def _load_identity(self, element, dump):
element.write(dump)
@ -290,13 +307,16 @@ class Loader:
return
for dumped_element in dump.values():
try:
constructor_parameters = [dumped_element[name] for name in constructor[1]]
constructor_parameters = [dumped_element[name]
for name in constructor[1]]
except KeyError:
print("Collection load error, missing parameters.")
continue # TODO handle error
new_element = getattr(element.read(), constructor[0])(*constructor_parameters)
new_element = getattr(element.read(), constructor[0])(
*constructor_parameters)
self._load_any(
BlenderAPIElement(new_element, occlude_read_only=self.occlude_read_only),
BlenderAPIElement(
new_element, occlude_read_only=self.occlude_read_only),
dumped_element
)
@ -356,8 +376,10 @@ class Loader:
return [
(_load_filter_type(T.BoolProperty), self._load_identity),
(_load_filter_type(T.IntProperty), self._load_identity),
(_load_filter_type(mathutils.Matrix, use_bl_rna=False), self._load_matrix), # before float because bl_rna type of matrix if FloatProperty
(_load_filter_type(mathutils.Vector, use_bl_rna=False), self._load_vector), # before float because bl_rna type of vector if FloatProperty
# before float because bl_rna type of matrix if FloatProperty
(_load_filter_type(mathutils.Matrix, use_bl_rna=False), self._load_matrix),
# before float because bl_rna type of vector if FloatProperty
(_load_filter_type(mathutils.Vector, use_bl_rna=False), self._load_vector),
(_load_filter_type(mathutils.Quaternion, use_bl_rna=False), self._load_quaternion),
(_load_filter_type(mathutils.Euler, use_bl_rna=False), self._load_euler),
(_load_filter_type(T.FloatProperty), self._load_identity),
@ -372,26 +394,12 @@ class Loader:
# Utility functions
def dump(any, depth=1):
dumper = Dumper()
dumper.depath = depth
dumper.depth = depth
return dumper.dump(any)
def dump_datablock(datablock, depth):
if datablock:
dumper = Dumper()
dumper.type_subset = dumper.match_subset_all
dumper.depth = depth
datablock_type = datablock.bl_rna.name
key = "{}/{}".format(datablock_type, datablock.name)
data = dumper.dump(datablock)
return data
def load(dst, src):
loader = Loader()
# loader.match_subset_all = loader.match_subset_all
loader.load(dst, src)

View File

@ -126,19 +126,6 @@ def load_dict(src_dict, target):
pass
def dump_datablock(datablock, depth):
if datablock:
dumper = dump_anything.Dumper()
dumper.type_subset = dumper.match_subset_all
dumper.depth = depth
datablock_type = datablock.bl_rna.name
key = "{}/{}".format(datablock_type, datablock.name)
data = dumper.dump(datablock)
return data
def dump_datablock_attibutes(datablock=None, attributes=[], depth=1, dickt=None):
if datablock:
dumper = dump_anything.Dumper()