feat: attribute dump function

refactor: cleanup utils
This commit is contained in:
Swann 2020-03-30 14:31:35 +02:00
parent 922538dc3a
commit 79ba63ce85
No known key found for this signature in database
GPG Key ID: B880407E0F5F413E
8 changed files with 210 additions and 290 deletions

View File

@ -23,6 +23,8 @@ import numpy as np
from enum import Enum from enum import Enum
from .. import utils from .. import utils
from ..libs.dump_anything import (
Dumper, Loader, dump_collection_attr, load_collection_attr)
from .bl_datablock import BlDatablock from .bl_datablock import BlDatablock
@ -65,7 +67,7 @@ ENUM_KEY_TYPE = [
'JITTER'] 'JITTER']
#TODO: Automatic enum and numpy dump and loading # TODO: Automatic enum and numpy dump and loading
def dump_fcurve(fcurve, use_numpy=True): def dump_fcurve(fcurve, use_numpy=True):
@ -84,32 +86,27 @@ def dump_fcurve(fcurve, use_numpy=True):
} }
if use_numpy: if use_numpy:
keyframes_count = len(fcurve.keyframe_points) points = fcurve.keyframe_points
fcurve_data['keyframes_count'] = len(fcurve.keyframe_points)
k_amplitude = np.empty(keyframes_count, dtype=np.float64) fcurve_data['amplitude'] = dump_collection_attr(points, 'amplitude')
fcurve.keyframe_points.foreach_get('amplitude', k_amplitude) fcurve_data['co'] = dump_collection_attr(points, 'co')
k_co = np.empty(keyframes_count*2, dtype=np.float64) fcurve_data['back'] = dump_collection_attr(points, 'back')
fcurve.keyframe_points.foreach_get('co', k_co) fcurve_data['handle_left'] = dump_collection_attr(points, 'handle_left')
k_back = np.empty(keyframes_count, dtype=np.float64) fcurve_data['handle_right'] = dump_collection_attr(points, 'handle_right')
fcurve.keyframe_points.foreach_get('back', k_back)
k_handle_left = np.empty(keyframes_count*2, dtype=np.float64)
fcurve.keyframe_points.foreach_get('handle_left', k_handle_left)
k_handle_right = np.empty(keyframes_count*2, dtype=np.float64)
fcurve.keyframe_points.foreach_get('handle_right', k_handle_right)
fcurve_data['amplitude'] = k_amplitude.tobytes() fcurve_data['easing'] = [ENUM_EASING_TYPE.index(
fcurve_data['co'] = k_co.tobytes() p.easing) for p in fcurve.keyframe_points]
fcurve_data['back'] = k_back.tobytes() fcurve_data['handle_left_type'] = [ENUM_HANDLE_TYPE.index(
fcurve_data['handle_left'] = k_handle_left.tobytes() p.handle_left_type) for p in fcurve.keyframe_points]
fcurve_data['handle_right'] = k_handle_right.tobytes() fcurve_data['handle_right_type'] = [ENUM_HANDLE_TYPE.index(
p.handle_right_type) for p in fcurve.keyframe_points]
fcurve_data['type'] = [ENUM_KEY_TYPE.index(
p.type) for p in fcurve.keyframe_points]
fcurve_data['interpolation'] = [ENUM_INTERPOLATION_TYPE.index(
p.interpolation) for p in fcurve.keyframe_points]
fcurve_data['easing'] = [ENUM_EASING_TYPE.index(p.easing) for p in fcurve.keyframe_points] else: # Legacy method
fcurve_data['handle_left_type'] = [ENUM_HANDLE_TYPE.index(p.handle_left_type) for p in fcurve.keyframe_points]
fcurve_data['handle_right_type'] = [ENUM_HANDLE_TYPE.index(p.handle_right_type) for p in fcurve.keyframe_points]
fcurve_data['type'] = [ENUM_KEY_TYPE.index(p.type) for p in fcurve.keyframe_points]
fcurve_data['interpolation'] = [ENUM_INTERPOLATION_TYPE.index(p.interpolation) for p in fcurve.keyframe_points]
else: # Legacy method
dumper = utils.dump_anything.Dumper() dumper = utils.dump_anything.Dumper()
fcurve_data["keyframe_points"] = [] fcurve_data["keyframe_points"] = []
@ -120,6 +117,7 @@ def dump_fcurve(fcurve, use_numpy=True):
return fcurve_data return fcurve_data
def load_fcurve(fcurve_data, fcurve): def load_fcurve(fcurve_data, fcurve):
""" Load a dumped fcurve """ Load a dumped fcurve
@ -129,31 +127,21 @@ def load_fcurve(fcurve_data, fcurve):
:type fcurve: bpy.types.FCurve :type fcurve: bpy.types.FCurve
""" """
use_numpy = fcurve_data.get('use_numpy') use_numpy = fcurve_data.get('use_numpy')
keyframe_points = fcurve.keyframe_points keyframe_points = fcurve.keyframe_points
# Remove all keyframe points # Remove all keyframe points
for i in range(len(keyframe_points)): for i in range(len(keyframe_points)):
keyframe_points.remove(keyframe_points[0], fast=True) keyframe_points.remove(keyframe_points[0], fast=True)
if use_numpy: if use_numpy:
k_amplitude = np.frombuffer(fcurve_data['amplitude'], dtype=np.float64) keyframe_points.add(fcurve_data['keyframes_count'])
keyframe_count = len(k_amplitude) load_collection_attr(keyframe_points, 'co', fcurve_data['co'])
load_collection_attr(keyframe_points, 'back', fcurve_data['back'])
k_co = np.frombuffer(fcurve_data['co'], dtype=np.float64) load_collection_attr(keyframe_points, 'amplitude', fcurve_data['amplitude'])
k_back = np.frombuffer(fcurve_data['back'], dtype=np.float64) load_collection_attr(keyframe_points, 'handle_left', fcurve_data['handle_left'])
k_amplitude = np.frombuffer(fcurve_data['amplitude'], dtype=np.float64) load_collection_attr(keyframe_points, 'handle_right', fcurve_data['handle_right'])
k_handle_left= np.frombuffer(fcurve_data['handle_left'], dtype=np.float64)
k_handle_right= np.frombuffer(fcurve_data['handle_right'], dtype=np.float64)
keyframe_points.add(keyframe_count)
keyframe_points.foreach_set('co',k_co)
keyframe_points.foreach_set('back',k_back)
keyframe_points.foreach_set('amplitude',k_amplitude)
keyframe_points.foreach_set('handle_left',k_handle_left)
keyframe_points.foreach_set('handle_right',k_handle_right)
for index, point in enumerate(keyframe_points): for index, point in enumerate(keyframe_points):
point.type = ENUM_KEY_TYPE[fcurve_data['type'][index]] point.type = ENUM_KEY_TYPE[fcurve_data['type'][index]]
@ -162,7 +150,7 @@ def load_fcurve(fcurve_data, fcurve):
point.handle_right_type = ENUM_HANDLE_TYPE[fcurve_data['handle_right_type'][index]] point.handle_right_type = ENUM_HANDLE_TYPE[fcurve_data['handle_right_type'][index]]
point.interpolation = ENUM_INTERPOLATION_TYPE[fcurve_data['interpolation'][index]] point.interpolation = ENUM_INTERPOLATION_TYPE[fcurve_data['interpolation'][index]]
else: else:
# paste dumped keyframes # paste dumped keyframes
for dumped_keyframe_point in fcurve_data["keyframe_points"]: for dumped_keyframe_point in fcurve_data["keyframe_points"]:
if dumped_keyframe_point['type'] == '': if dumped_keyframe_point['type'] == '':
@ -193,7 +181,6 @@ def load_fcurve(fcurve_data, fcurve):
] ]
fcurve.update() fcurve.update()
class BlAction(BlDatablock): class BlAction(BlDatablock):
@ -213,9 +200,11 @@ class BlAction(BlDatablock):
dumped_array_index = dumped_fcurve["dumped_array_index"] dumped_array_index = dumped_fcurve["dumped_array_index"]
# create fcurve if needed # create fcurve if needed
fcurve = target.fcurves.find(dumped_data_path, index=dumped_array_index) fcurve = target.fcurves.find(
dumped_data_path, index=dumped_array_index)
if fcurve is None: if fcurve is None:
fcurve = target.fcurves.new(dumped_data_path, index=dumped_array_index) fcurve = target.fcurves.new(
dumped_data_path, index=dumped_array_index)
load_fcurve(dumped_fcurve, fcurve) load_fcurve(dumped_fcurve, fcurve)
target.id_root = data['id_root'] target.id_root = data['id_root']
@ -243,6 +232,4 @@ class BlAction(BlDatablock):
for fcurve in self.pointer.fcurves: for fcurve in self.pointer.fcurves:
data["fcurves"].append(dump_fcurve(fcurve, use_numpy=True)) data["fcurves"].append(dump_fcurve(fcurve, use_numpy=True))
return data return data

View File

@ -22,7 +22,24 @@ import mathutils
from .. import utils from .. import utils
from ..libs.replication.replication.data import ReplicatedDatablock from ..libs.replication.replication.data import ReplicatedDatablock
from ..libs.replication.replication.constants import (UP, DIFF_BINARY) from ..libs.replication.replication.constants import (UP, DIFF_BINARY)
from ..libs import dump_anything from ..libs import dump_anything
def has_action(target):
""" Check if the target datablock has actions
"""
return (hasattr(target, 'animation_data')
and target.animation_data
and target.animation_data.action)
def has_driver(target):
""" Check if the target datablock is driven
"""
return (hasattr(target, 'animation_data')
and target.animation_data
and target.animation_data.drivers)
def dump_driver(driver): def dump_driver(driver):
dumper = dump_anything.Dumper() dumper = dump_anything.Dumper()
@ -112,12 +129,12 @@ class BlDatablock(ReplicatedDatablock):
def _dump(self, pointer=None): def _dump(self, pointer=None):
data = {} data = {}
# Dump animation data # Dump animation data
if utils.has_action(pointer): if has_action(pointer):
dumper = utils.dump_anything.Dumper() dumper = utils.dump_anything.Dumper()
dumper.include_filter = ['action'] dumper.include_filter = ['action']
data['animation_data'] = dumper.dump(pointer.animation_data) data['animation_data'] = dumper.dump(pointer.animation_data)
if utils.has_driver(pointer): if has_driver(pointer):
dumped_drivers = {'animation_data': {'drivers': []}} dumped_drivers = {'animation_data': {'drivers': []}}
for driver in pointer.animation_data.drivers: for driver in pointer.animation_data.drivers:
dumped_drivers['animation_data']['drivers'].append( dumped_drivers['animation_data']['drivers'].append(
@ -162,7 +179,7 @@ class BlDatablock(ReplicatedDatablock):
def resolve_deps(self): def resolve_deps(self):
dependencies = [] dependencies = []
if utils.has_action(self.pointer): if has_action(self.pointer):
dependencies.append(self.pointer.animation_data.action) dependencies.append(self.pointer.animation_data.action)
if not self.is_library: if not self.is_library:

View File

@ -20,7 +20,10 @@ import bpy
import mathutils import mathutils
import numpy as np import numpy as np
from ..libs import dump_anything from ..libs.dump_anything import (Dumper,
Loader,
dump_collection_attr,
load_collection_attr)
from .bl_datablock import BlDatablock from .bl_datablock import BlDatablock
# GPencil data api is structured as it follow: # GPencil data api is structured as it follow:
@ -36,7 +39,7 @@ def dump_stroke(stroke):
assert(stroke) assert(stroke)
dumper = dump_anything.Dumper() dumper = Dumper()
dumper.include_filter = [ dumper.include_filter = [
"aspect", "aspect",
"display_mode", "display_mode",
@ -56,23 +59,12 @@ def dump_stroke(stroke):
# Stoke points # Stoke points
p_count = len(stroke.points) p_count = len(stroke.points)
dumped_stroke['p_count'] = p_count dumped_stroke['p_count'] = p_count
dumped_stroke['p_co'] = dump_collection_attr(stroke.points,'co')
p_co = np.empty(p_count*3, dtype=np.float64) dumped_stroke['p_pressure'] = dump_collection_attr(stroke.points,'pressure')
stroke.points.foreach_get('co', p_co) dumped_stroke['p_strength'] = dump_collection_attr(stroke.points,'strength')
dumped_stroke['p_co'] = p_co.tobytes()
p_pressure = np.empty(p_count, dtype=np.float64)
stroke.points.foreach_get('pressure', p_pressure)
dumped_stroke['p_pressure'] = p_pressure.tobytes()
p_strength = np.empty(p_count, dtype=np.float64)
stroke.points.foreach_get('strength', p_strength)
dumped_stroke['p_strength'] = p_strength.tobytes()
if bpy.app.version[1] >= 83: # new in blender 2.83 if bpy.app.version[1] >= 83: # new in blender 2.83
p_vertex_color = np.empty(p_count*4, dtype=np.float64) dumped_stroke['p_vertex_color'] = dump_collection_attr(stroke.points,'vertex_color')
stroke.points.foreach_get('vertex_color', p_vertex_color)
dumped_stroke['p_vertex_color'] = p_vertex_color.tobytes()
# TODO: uv_factor, uv_rotation # TODO: uv_factor, uv_rotation
@ -89,21 +81,17 @@ def load_stroke(stroke_data, stroke):
""" """
assert(stroke and stroke_data) assert(stroke and stroke_data)
dump_anything.load(stroke, stroke_data) loader = Loader()
loader.load(stroke, stroke_data)
p_co = np.frombuffer(stroke_data["p_co"], dtype=np.float64)
p_pressure = np.frombuffer(stroke_data["p_pressure"], dtype=np.float64)
p_strength = np.frombuffer(stroke_data["p_strength"], dtype=np.float64)
stroke.points.add(stroke_data["p_count"]) stroke.points.add(stroke_data["p_count"])
stroke.points.foreach_set('co', p_co) load_collection_attr(stroke.points, 'co',stroke_data["p_co"])
stroke.points.foreach_set('pressure', p_pressure) load_collection_attr(stroke.points, 'pressure',stroke_data["p_pressure"])
stroke.points.foreach_set('strength', p_strength) load_collection_attr(stroke.points, 'strength',stroke_data["p_strength"])
if "p_vertex_color" in stroke_data: if "p_vertex_color" in stroke_data:
p_vertex_color = np.frombuffer(stroke_data["p_vertex_color"], dtype=np.float64) load_collection_attr(stroke.points, 'vertex_color',stroke_data["p_vertex_color"])
stroke.points.foreach_set('vertex_color', p_vertex_color)
def dump_frame(frame): def dump_frame(frame):
@ -156,7 +144,7 @@ def dump_layer(layer):
assert(layer) assert(layer)
dumper = dump_anything.Dumper() dumper = Dumper()
dumper.include_filter = [ dumper.include_filter = [
'info', 'info',
@ -212,7 +200,8 @@ def load_layer(layer_data, layer):
:type layer: bpy.types.GPencilFrame :type layer: bpy.types.GPencilFrame
""" """
# TODO: take existing data in account # TODO: take existing data in account
dump_anything.load(layer, layer_data) loader = Loader()
loader.load(layer, layer_data)
for frame_data in layer_data["frames"]: for frame_data in layer_data["frames"]:
target_frame = layer.frames.new(frame_data['frame_number']) target_frame = layer.frames.new(frame_data['frame_number'])
@ -253,14 +242,17 @@ class BlGpencil(BlDatablock):
# target_layer.clear() # target_layer.clear()
load_layer(layer_data, target_layer) load_layer(layer_data, target_layer)
dump_anything.load(target, data) loader = Loader()
loader.load(target, data)
def dump_implementation(self, data, pointer=None): def dump_implementation(self, data, pointer=None):
assert(pointer) assert(pointer)
data = dump_anything.dump(pointer, 2) dumper = Dumper()
dumper.depth = 2
data = dumper.dump(pointer)
data['layers'] = {} data['layers'] = {}

View File

@ -102,7 +102,6 @@ class BlMaterial(BlDatablock):
dump_anything.load( dump_anything.load(
target.grease_pencil, data['grease_pencil']) target.grease_pencil, data['grease_pencil'])
utils.load_dict(data['grease_pencil'], target.grease_pencil)
elif data["use_nodes"]: elif data["use_nodes"]:
if target.node_tree is None: if target.node_tree is None:

View File

@ -22,7 +22,7 @@ import mathutils
import logging import logging
import numpy as np import numpy as np
from .. import utils from ..libs.dump_anything import Dumper, Loader, load_collection_attr, dump_collection_attr
from ..libs.replication.replication.constants import DIFF_BINARY from ..libs.replication.replication.constants import DIFF_BINARY
from .bl_datablock import BlDatablock from .bl_datablock import BlDatablock
@ -44,8 +44,9 @@ class BlMesh(BlDatablock):
def load_implementation(self, data, target): def load_implementation(self, data, target):
if not target or not target.is_editmode: if not target or not target.is_editmode:
utils.dump_anything.load(target, data) loader = Loader()
loader.load(target, data)
# MATERIAL SLOTS # MATERIAL SLOTS
target.materials.clear() target.materials.clear()
@ -56,60 +57,34 @@ class BlMesh(BlDatablock):
if target.vertices: if target.vertices:
target.clear_geometry() target.clear_geometry()
# VERTS target.vertices.add(data["vertex_count"])
vertices = np.frombuffer(data["verts_co"], dtype=np.float64) target.edges.add(data["egdes_count"])
vert_count = int(len(vertices)/3) target.loops.add(data["loop_count"])
target.vertices.add(vert_count) target.polygons.add(data["poly_count"])
# EDGES # LOADING
load_collection_attr(target.vertices, 'co', data["verts_co"])
egdes_vert = np.frombuffer(data["egdes_vert"], dtype=np.int) load_collection_attr(target.edges, "vertices", data["egdes_vert"])
edge_count = data["egdes_count"]
target.edges.add(edge_count)
# LOOPS
loops_count = data["loop_count"]
target.loops.add(loops_count)
loop_vertex_index = np.frombuffer(
data['loop_vertex_index'], dtype=np.int)
loop_normal = np.frombuffer(data['loop_normal'], dtype=np.float64)
# POLY
poly_count = data["poly_count"]
target.polygons.add(poly_count)
poly_loop_start = np.frombuffer(
data["poly_loop_start"], dtype=np.int)
poly_loop_total = np.frombuffer(
data["poly_loop_total"], dtype=np.int)
poly_smooth = np.frombuffer(data["poly_smooth"], dtype=np.bool)
poly_mat = np.frombuffer(data["poly_mat"], dtype=np.int)
# LOADING
target.vertices.foreach_set('co', vertices)
target.edges.foreach_set("vertices", egdes_vert)
if data['use_customdata_edge_crease']: if data['use_customdata_edge_crease']:
edges_crease = np.frombuffer(data["edges_crease"], dtype=np.float64) load_collection_attr(
target.edges.foreach_set("crease", edges_crease) target.edges, "crease", data["edges_crease"])
if data['use_customdata_edge_bevel']: if data['use_customdata_edge_bevel']:
edges_bevel = np.frombuffer(data["edges_bevel"], dtype=np.float64) load_collection_attr(
target.edges.foreach_set("bevel_weight", edges_bevel) target.edges, "bevel_weight", data["edges_bevel"])
load_collection_attr(
target.loops, 'vertex_index', data["loop_vertex_index"])
load_collection_attr(target.loops, 'normal', data["loop_normal"])
load_collection_attr(
target.polygons, 'loop_total', data["poly_loop_total"])
load_collection_attr(
target.polygons, 'loop_start', data["poly_loop_start"])
load_collection_attr(
target.polygons, 'use_smooth', data["poly_smooth"])
load_collection_attr(
target.polygons, 'material_index', data["poly_mat"])
target.loops.foreach_set("vertex_index", loop_vertex_index)
target.loops.foreach_set("normal", loop_normal)
target.polygons.foreach_set("loop_total", poly_loop_total)
target.polygons.foreach_set("loop_start", poly_loop_start)
target.polygons.foreach_set("use_smooth", poly_smooth)
target.polygons.foreach_set("material_index", poly_mat)
# UV Layers # UV Layers
for layer in data['uv_layers']: for layer in data['uv_layers']:
if layer not in target.uv_layers: if layer not in target.uv_layers:
@ -121,14 +96,13 @@ class BlMesh(BlDatablock):
target.validate() target.validate()
target.update() target.update()
def dump_implementation(self, data, pointer=None): def dump_implementation(self, data, pointer=None):
assert(pointer) assert(pointer)
mesh = pointer mesh = pointer
dumper = utils.dump_anything.Dumper() dumper = Dumper()
dumper.depth = 1 dumper.depth = 1
dumper.include_filter = [ dumper.include_filter = [
'name', 'name',
@ -140,73 +114,43 @@ class BlMesh(BlDatablock):
data = dumper.dump(mesh) data = dumper.dump(mesh)
# TODO: selective dump
# VERTICES # VERTICES
vert_count = len(mesh.vertices) data["vertex_count"] = len(mesh.vertices)
data["verts_co"] = dump_collection_attr(mesh.vertices, 'co')
verts_co = np.empty(vert_count*3, dtype=np.float64) # EDGES
mesh.vertices.foreach_get('co', verts_co)
data["verts_co"] = verts_co.tobytes()
# EDGES
edge_count = len(mesh.edges)
edges_vert = np.empty(edge_count*2, dtype=np.int)
mesh.edges.foreach_get('vertices', edges_vert)
data["egdes_vert"] = edges_vert.tobytes()
data["egdes_count"] = len(mesh.edges) data["egdes_count"] = len(mesh.edges)
data["egdes_vert"] = dump_collection_attr(mesh.edges, 'vertices')
if mesh.use_customdata_edge_crease: if mesh.use_customdata_edge_crease:
edges_crease = np.empty(edge_count, dtype=np.float64) data["edges_crease"] = dump_collection_attr(mesh.edges, 'crease')
mesh.edges.foreach_get('crease', edges_crease)
data["edges_crease"] = edges_crease.tobytes()
if mesh.use_customdata_edge_bevel: if mesh.use_customdata_edge_bevel:
edges_bevel = np.empty(edge_count, dtype=np.float64) data["edges_bevel"] = dump_collection_attr(
mesh.edges.foreach_get('bevel_weight', edges_bevel) mesh.edges, 'edges_bevel')
data["edges_bevel"] = edges_bevel.tobytes()
# POLYGONS # POLYGONS
poly_count = len(mesh.polygons) data["poly_count"] = len(mesh.polygons)
data["poly_count"] = poly_count data["poly_mat"] = dump_collection_attr(
mesh.polygons, 'material_index')
poly_mat = np.empty(poly_count, dtype=np.int) data["poly_loop_start"] = dump_collection_attr(
mesh.polygons.foreach_get("material_index", poly_mat) mesh.polygons, 'loop_start')
data["poly_mat"] = poly_mat.tobytes() data["poly_loop_total"] = dump_collection_attr(
mesh.polygons, 'loop_total')
poly_loop_start = np.empty(poly_count, dtype=np.int) data["poly_smooth"] = dump_collection_attr(mesh.polygons, 'use_smooth')
mesh.polygons.foreach_get("loop_start", poly_loop_start)
data["poly_loop_start"] = poly_loop_start.tobytes()
poly_loop_total = np.empty(poly_count, dtype=np.int)
mesh.polygons.foreach_get("loop_total", poly_loop_total)
data["poly_loop_total"] = poly_loop_total.tobytes()
poly_smooth = np.empty(poly_count, dtype=np.bool)
mesh.polygons.foreach_get("use_smooth", poly_smooth)
data["poly_smooth"] = poly_smooth.tobytes()
# LOOPS # LOOPS
loop_count = len(mesh.loops) data["loop_count"] = len(mesh.loops)
data["loop_count"] = loop_count data["loop_normal"] = dump_collection_attr(mesh.loops, 'normal')
data["loop_vertex_index"] = dump_collection_attr(
loop_normal = np.empty(loop_count*3, dtype=np.float64) mesh.loops, 'vertex_index')
mesh.loops.foreach_get("normal", loop_normal)
data["loop_normal"] = loop_normal.tobytes()
loop_vertex_index = np.empty(loop_count, dtype=np.int)
mesh.loops.foreach_get("vertex_index", loop_vertex_index)
data["loop_vertex_index"] = loop_vertex_index.tobytes()
# UV Layers # UV Layers
data['uv_layers'] = {} data['uv_layers'] = {}
for layer in mesh.uv_layers: for layer in mesh.uv_layers:
data['uv_layers'][layer.name] = {} data['uv_layers'][layer.name] = {}
data['uv_layers'][layer.name]['data'] = dump_collection_attr(
uv_layer = np.empty(len(layer.data)*2, dtype=np.float64) layer.data, 'uv')
layer.data.foreach_get("uv", uv_layer)
data['uv_layers'][layer.name]['data'] = uv_layer.tobytes()
# Fix material index # Fix material index
m_list = [] m_list = []

View File

@ -20,9 +20,68 @@ import logging
import bpy import bpy
import bpy.types as T import bpy.types as T
import mathutils import mathutils
import numpy as np
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
BPY_TO_NUMPY_TYPES = {
'FLOAT': np.float,
'INT': np.int,
'BOOL': np.bool
}
def dump_collection_attr(collection, attribute):
""" Dump a collection attribute as a sequence
!!! warning
Only work with int, float and bool attributes
:arg collection: target collection
:type collection: bpy.types.CollectionProperty
:arg attribute: target attribute
:type attribute: str
:return: numpy byte buffer
"""
attr_infos = collection[0].bl_rna.properties.get(attribute)
assert(attr_infos.type in ['FLOAT', 'INT', 'BOOLEAN'])
size = sum(attr_infos.array_dimensions) if attr_infos.is_array else 1
dumped_sequence = np.zeros(
len(collection)*size,
dtype=BPY_TO_NUMPY_TYPES.get(attr_infos.type))
collection.foreach_get(attribute, dumped_sequence)
return dumped_sequence.tobytes()
def load_collection_attr(collection, attribute, sequence):
""" Load a collection attribute from a bytes sequence
!!! warning
Only work with int, float and bool attributes
:arg collection: target collection
:type collection: bpy.types.CollectionProperty
:arg attribute: target attribute
:type attribute: str
:return: numpy byte buffer
"""
attr_infos = collection[0].bl_rna.properties.get(attribute)
assert(attr_infos.type in ['FLOAT', 'INT', 'BOOLEAN'])
# TODO: check types match
collection.foreach_set(
attribute,
np.frombuffer(sequence, dtype=BPY_TO_NUMPY_TYPES.get(attr_infos.type)))
def remove_items_from_dict(d, keys, recursive=False): def remove_items_from_dict(d, keys, recursive=False):
copy = dict(d) copy = dict(d)
for k in keys: for k in keys:
@ -32,7 +91,7 @@ def remove_items_from_dict(d, keys, recursive=False):
copy[k] = remove_items_from_dict(copy[k], keys, recursive) copy[k] = remove_items_from_dict(copy[k], keys, recursive)
return copy return copy
def _is_dictionnary(v): def _is_dictionnary(v):
return hasattr(v, "items") and callable(v.items) return hasattr(v, "items") and callable(v.items)
@ -94,6 +153,7 @@ def _load_filter_default(default):
class Dumper: class Dumper:
# TODO: support occlude readonly # TODO: support occlude readonly
# TODO: use foreach_set/get on collection compatible properties
def __init__(self): def __init__(self):
self.verbose = True self.verbose = True
self.depth = 1 self.depth = 1
@ -325,7 +385,6 @@ class Loader:
T.ParticleSettingsTextureSlot: (CONSTRUCTOR_ADD, []), T.ParticleSettingsTextureSlot: (CONSTRUCTOR_ADD, []),
T.Modifier: (CONSTRUCTOR_NEW, ["name", "type"]), T.Modifier: (CONSTRUCTOR_NEW, ["name", "type"]),
T.Constraint: (CONSTRUCTOR_NEW, ["type"]), T.Constraint: (CONSTRUCTOR_NEW, ["type"]),
# T.VertexGroup: (CONSTRUCTOR_NEW, ["name"], True),
} }
destructors = { destructors = {

View File

@ -15,14 +15,28 @@
# #
# ##### END GPL LICENSE BLOCK ##### # ##### END GPL LICENSE BLOCK #####
import random
import logging import logging
import bpy import bpy
import string
from . import utils, bl_types, environment, addon_updater_ops, presence from . import utils, bl_types, environment, addon_updater_ops, presence
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
def randomColor():
"""Generate a random color """
r = random.random()
v = random.random()
b = random.random()
return [r, v, b]
def random_string_digits(stringLength=6):
"""Generate a random string of letters and digits """
lettersAndDigits = string.ascii_letters + string.digits
return ''.join(random.choices(lettersAndDigits, k=stringLength))
class ReplicatedDatablock(bpy.types.PropertyGroup): class ReplicatedDatablock(bpy.types.PropertyGroup):
type_name: bpy.props.StringProperty() type_name: bpy.props.StringProperty()
@ -43,12 +57,12 @@ class SessionPrefs(bpy.types.AddonPreferences):
default="127.0.0.1") default="127.0.0.1")
username: bpy.props.StringProperty( username: bpy.props.StringProperty(
name="Username", name="Username",
default="user_{}".format(utils.random_string_digits()) default=f"user_{random_string_digits()}"
) )
client_color: bpy.props.FloatVectorProperty( client_color: bpy.props.FloatVectorProperty(
name="client_instance_color", name="client_instance_color",
subtype='COLOR', subtype='COLOR',
default=utils.randomColor()) default=randomColor())
port: bpy.props.IntProperty( port: bpy.props.IntProperty(
name="port", name="port",
description='Distant host port', description='Distant host port',

View File

@ -19,8 +19,6 @@
import json import json
import logging import logging
import os import os
import random
import string
import sys import sys
import time import time
from uuid import uuid4 from uuid import uuid4
@ -35,17 +33,6 @@ from .libs import dump_anything
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
logger.setLevel(logging.WARNING) logger.setLevel(logging.WARNING)
def has_action(target):
return (hasattr(target, 'animation_data')
and target.animation_data
and target.animation_data.action)
def has_driver(target):
return (hasattr(target, 'animation_data')
and target.animation_data
and target.animation_data.drivers)
def find_from_attr(attr_name, attr_value, list): def find_from_attr(attr_name, attr_value, list):
for item in list: for item in list:
@ -73,19 +60,6 @@ def get_datablock_users(datablock):
return users return users
def random_string_digits(stringLength=6):
"""Generate a random string of letters and digits """
lettersAndDigits = string.ascii_letters + string.digits
return ''.join(random.choices(lettersAndDigits, k=stringLength))
def randomColor():
r = random.random()
v = random.random()
b = random.random()
return [r, v, b]
def clean_scene(): def clean_scene():
for type_name in dir(bpy.data): for type_name in dir(bpy.data):
try: try:
@ -96,76 +70,10 @@ def clean_scene():
continue continue
def revers(d):
l = []
for i in d:
l.append(i)
return l[::-1]
def get_armature_edition_context(armature):
override = {}
# Set correct area
for area in bpy.data.window_managers[0].windows[0].screen.areas:
if area.type == 'VIEW_3D':
override = bpy.context.copy()
override['area'] = area
break
# Set correct armature settings
override['window'] = bpy.data.window_managers[0].windows[0]
override['screen'] = bpy.data.window_managers[0].windows[0].screen
override['mode'] = 'EDIT_ARMATURE'
override['active_object'] = armature
override['selected_objects'] = [armature]
for o in bpy.data.objects:
if o.data == armature:
override['edit_object'] = o
break
return override
def get_selected_objects(scene, active_view_layer): def get_selected_objects(scene, active_view_layer):
return [obj.uuid for obj in scene.objects if obj.select_get(view_layer=active_view_layer)] return [obj.uuid for obj in scene.objects if obj.select_get(view_layer=active_view_layer)]
def load_dict(src_dict, target):
try:
for item in src_dict:
# attr =
setattr(target, item, src_dict[item])
except Exception as e:
logger.error(e)
pass
def dump_datablock_attibutes(datablock=None, attributes=[], depth=1, dickt=None):
if datablock:
dumper = dump_anything.Dumper()
dumper.type_subset = dumper.match_subset_all
dumper.depth = depth
datablock_type = datablock.bl_rna.name
data = {}
if dickt:
data = dickt
for attr in attributes:
try:
data[attr] = dumper.dump(getattr(datablock, attr))
except:
pass
return data
def resolve_from_id(id, optionnal_type=None): def resolve_from_id(id, optionnal_type=None):
for category in dir(bpy.data): for category in dir(bpy.data):
root = getattr(bpy.data, category) root = getattr(bpy.data, category)