feat: verbose errors

This commit is contained in:
Swann 2020-03-23 11:04:06 +01:00
parent 8ce53b8413
commit fb0760928e
No known key found for this signature in database
GPG Key ID: B880407E0F5F413E
2 changed files with 23 additions and 20 deletions

View File

@ -261,18 +261,18 @@ class BlObject(BlDatablock):
data['vertex_groups'] = vg_data data['vertex_groups'] = vg_data
# SHAPE KEYS # SHAPE KEYS
pointer_data = pointer.data object_data = pointer.data
if hasattr(pointer_data, 'shape_keys') and pointer_data.shape_keys: if hasattr(object_data, 'shape_keys') and object_data.shape_keys:
dumper = utils.dump_anything.Dumper() dumper = utils.dump_anything.Dumper()
dumper.depth = 2 dumper.depth = 2
dumper.include_filter = [ dumper.include_filter = [
'reference_key', 'reference_key',
'use_relative' 'use_relative'
] ]
data['shape_keys'] = dumper.dump(pointer_data.shape_keys) data['shape_keys'] = dumper.dump(object_data.shape_keys)
data['shape_keys']['reference_key'] = pointer_data.shape_keys.reference_key.name data['shape_keys']['reference_key'] = object_data.shape_keys.reference_key.name
key_blocks = {} key_blocks = {}
for key in pointer_data.shape_keys.key_blocks: for key in object_data.shape_keys.key_blocks:
dumper.depth = 3 dumper.depth = 3
dumper.include_filter = [ dumper.include_filter = [
'name', 'name',

View File

@ -93,8 +93,9 @@ def _load_filter_default(default):
class Dumper: class Dumper:
# TODO: support occlude readonly
def __init__(self): def __init__(self):
self.verbose = False self.verbose = True
self.depth = 1 self.depth = 1
self.keep_compounds_as_leaves = False self.keep_compounds_as_leaves = False
self.accept_read_only = True self.accept_read_only = True
@ -103,7 +104,6 @@ class Dumper:
self.type_subset = self.match_subset_all self.type_subset = self.match_subset_all
self.include_filter = [] self.include_filter = []
self.exclude_filter = [] self.exclude_filter = []
# self._atomic_types = [] # TODO future option?
def dump(self, any): def dump(self, any):
return self._dump_any(any, 0) return self._dump_any(any, 0)
@ -195,7 +195,8 @@ class Dumper:
if (self.include_filter and p not in self.include_filter): if (self.include_filter and p not in self.include_filter):
return False return False
getattr(default, p) getattr(default, p)
except AttributeError: except AttributeError as err:
logger.error(err)
return False return False
if p.startswith("__"): if p.startswith("__"):
return False return False
@ -258,14 +259,15 @@ class BlenderAPIElement:
def write(self, value): def write(self, value):
# take precaution if property is read-only # take precaution if property is read-only
try: if self.api_element.is_property_readonly(self.sub_element_name) and \
self.occlude_read_only:
logger.error(f"Skipping {self.sub_element_name}")
return
if self.sub_element_name: if self.sub_element_name:
setattr(self.api_element, self.sub_element_name, value) setattr(self.api_element, self.sub_element_name, value)
else: else:
self.api_element = value self.api_element = value
except AttributeError as err:
if not self.occlude_read_only:
raise err
def extend(self, element_name): def extend(self, element_name):
return BlenderAPIElement(self.read(), element_name) return BlenderAPIElement(self.read(), element_name)
@ -282,7 +284,7 @@ class BlenderAPIElement:
class Loader: class Loader:
def __init__(self): def __init__(self):
self.type_subset = self.match_subset_all self.type_subset = self.match_subset_all
self.occlude_read_only = True self.occlude_read_only = False
self.order = ['*'] self.order = ['*']
def load(self, dst_data, src_dumped_data): def load(self, dst_data, src_dumped_data):
@ -307,6 +309,7 @@ class Loader:
for i in range(len(dump)): for i in range(len(dump)):
element.read()[i] = dump[i] element.read()[i] = dump[i]
except AttributeError as err: except AttributeError as err:
logger.error(err)
if not self.occlude_read_only: if not self.occlude_read_only:
raise err raise err
@ -397,11 +400,11 @@ class Loader:
for k in self._ordered_keys(dump.keys()): for k in self._ordered_keys(dump.keys()):
v = dump[k] v = dump[k]
if not hasattr(default.read(), k): if not hasattr(default.read(), k):
continue # TODO error handling logger.error(f"Load default, skipping {default} : {k}")
try: try:
self._load_any(default.extend(k), v) self._load_any(default.extend(k), v)
except Exception as e: except Exception as err:
logger.error(e) logger.error(f"Cannot load {k}: {err}")
@property @property
def match_subset_all(self): def match_subset_all(self):