feat: attribute dump function
refactor: cleanup utils
This commit is contained in:
parent
922538dc3a
commit
79ba63ce85
@ -23,6 +23,8 @@ import numpy as np
|
||||
from enum import Enum
|
||||
|
||||
from .. import utils
|
||||
from ..libs.dump_anything import (
|
||||
Dumper, Loader, dump_collection_attr, load_collection_attr)
|
||||
from .bl_datablock import BlDatablock
|
||||
|
||||
|
||||
@ -65,7 +67,7 @@ ENUM_KEY_TYPE = [
|
||||
'JITTER']
|
||||
|
||||
|
||||
#TODO: Automatic enum and numpy dump and loading
|
||||
# TODO: Automatic enum and numpy dump and loading
|
||||
|
||||
|
||||
def dump_fcurve(fcurve, use_numpy=True):
|
||||
@ -84,32 +86,27 @@ def dump_fcurve(fcurve, use_numpy=True):
|
||||
}
|
||||
|
||||
if use_numpy:
|
||||
keyframes_count = len(fcurve.keyframe_points)
|
||||
points = fcurve.keyframe_points
|
||||
fcurve_data['keyframes_count'] = len(fcurve.keyframe_points)
|
||||
|
||||
k_amplitude = np.empty(keyframes_count, dtype=np.float64)
|
||||
fcurve.keyframe_points.foreach_get('amplitude', k_amplitude)
|
||||
k_co = np.empty(keyframes_count*2, dtype=np.float64)
|
||||
fcurve.keyframe_points.foreach_get('co', k_co)
|
||||
k_back = np.empty(keyframes_count, dtype=np.float64)
|
||||
fcurve.keyframe_points.foreach_get('back', k_back)
|
||||
k_handle_left = np.empty(keyframes_count*2, dtype=np.float64)
|
||||
fcurve.keyframe_points.foreach_get('handle_left', k_handle_left)
|
||||
k_handle_right = np.empty(keyframes_count*2, dtype=np.float64)
|
||||
fcurve.keyframe_points.foreach_get('handle_right', k_handle_right)
|
||||
fcurve_data['amplitude'] = dump_collection_attr(points, 'amplitude')
|
||||
fcurve_data['co'] = dump_collection_attr(points, 'co')
|
||||
fcurve_data['back'] = dump_collection_attr(points, 'back')
|
||||
fcurve_data['handle_left'] = dump_collection_attr(points, 'handle_left')
|
||||
fcurve_data['handle_right'] = dump_collection_attr(points, 'handle_right')
|
||||
|
||||
fcurve_data['amplitude'] = k_amplitude.tobytes()
|
||||
fcurve_data['co'] = k_co.tobytes()
|
||||
fcurve_data['back'] = k_back.tobytes()
|
||||
fcurve_data['handle_left'] = k_handle_left.tobytes()
|
||||
fcurve_data['handle_right'] = k_handle_right.tobytes()
|
||||
fcurve_data['easing'] = [ENUM_EASING_TYPE.index(
|
||||
p.easing) for p in fcurve.keyframe_points]
|
||||
fcurve_data['handle_left_type'] = [ENUM_HANDLE_TYPE.index(
|
||||
p.handle_left_type) for p in fcurve.keyframe_points]
|
||||
fcurve_data['handle_right_type'] = [ENUM_HANDLE_TYPE.index(
|
||||
p.handle_right_type) for p in fcurve.keyframe_points]
|
||||
fcurve_data['type'] = [ENUM_KEY_TYPE.index(
|
||||
p.type) for p in fcurve.keyframe_points]
|
||||
fcurve_data['interpolation'] = [ENUM_INTERPOLATION_TYPE.index(
|
||||
p.interpolation) for p in fcurve.keyframe_points]
|
||||
|
||||
fcurve_data['easing'] = [ENUM_EASING_TYPE.index(p.easing) for p in fcurve.keyframe_points]
|
||||
fcurve_data['handle_left_type'] = [ENUM_HANDLE_TYPE.index(p.handle_left_type) for p in fcurve.keyframe_points]
|
||||
fcurve_data['handle_right_type'] = [ENUM_HANDLE_TYPE.index(p.handle_right_type) for p in fcurve.keyframe_points]
|
||||
fcurve_data['type'] = [ENUM_KEY_TYPE.index(p.type) for p in fcurve.keyframe_points]
|
||||
fcurve_data['interpolation'] = [ENUM_INTERPOLATION_TYPE.index(p.interpolation) for p in fcurve.keyframe_points]
|
||||
|
||||
else: # Legacy method
|
||||
else: # Legacy method
|
||||
dumper = utils.dump_anything.Dumper()
|
||||
fcurve_data["keyframe_points"] = []
|
||||
|
||||
@ -120,6 +117,7 @@ def dump_fcurve(fcurve, use_numpy=True):
|
||||
|
||||
return fcurve_data
|
||||
|
||||
|
||||
def load_fcurve(fcurve_data, fcurve):
|
||||
""" Load a dumped fcurve
|
||||
|
||||
@ -137,23 +135,13 @@ def load_fcurve(fcurve_data, fcurve):
|
||||
keyframe_points.remove(keyframe_points[0], fast=True)
|
||||
|
||||
if use_numpy:
|
||||
k_amplitude = np.frombuffer(fcurve_data['amplitude'], dtype=np.float64)
|
||||
keyframe_points.add(fcurve_data['keyframes_count'])
|
||||
|
||||
keyframe_count = len(k_amplitude)
|
||||
|
||||
k_co = np.frombuffer(fcurve_data['co'], dtype=np.float64)
|
||||
k_back = np.frombuffer(fcurve_data['back'], dtype=np.float64)
|
||||
k_amplitude = np.frombuffer(fcurve_data['amplitude'], dtype=np.float64)
|
||||
k_handle_left= np.frombuffer(fcurve_data['handle_left'], dtype=np.float64)
|
||||
k_handle_right= np.frombuffer(fcurve_data['handle_right'], dtype=np.float64)
|
||||
|
||||
keyframe_points.add(keyframe_count)
|
||||
|
||||
keyframe_points.foreach_set('co',k_co)
|
||||
keyframe_points.foreach_set('back',k_back)
|
||||
keyframe_points.foreach_set('amplitude',k_amplitude)
|
||||
keyframe_points.foreach_set('handle_left',k_handle_left)
|
||||
keyframe_points.foreach_set('handle_right',k_handle_right)
|
||||
load_collection_attr(keyframe_points, 'co', fcurve_data['co'])
|
||||
load_collection_attr(keyframe_points, 'back', fcurve_data['back'])
|
||||
load_collection_attr(keyframe_points, 'amplitude', fcurve_data['amplitude'])
|
||||
load_collection_attr(keyframe_points, 'handle_left', fcurve_data['handle_left'])
|
||||
load_collection_attr(keyframe_points, 'handle_right', fcurve_data['handle_right'])
|
||||
|
||||
for index, point in enumerate(keyframe_points):
|
||||
point.type = ENUM_KEY_TYPE[fcurve_data['type'][index]]
|
||||
@ -195,7 +183,6 @@ def load_fcurve(fcurve_data, fcurve):
|
||||
fcurve.update()
|
||||
|
||||
|
||||
|
||||
class BlAction(BlDatablock):
|
||||
bl_id = "actions"
|
||||
bl_class = bpy.types.Action
|
||||
@ -213,9 +200,11 @@ class BlAction(BlDatablock):
|
||||
dumped_array_index = dumped_fcurve["dumped_array_index"]
|
||||
|
||||
# create fcurve if needed
|
||||
fcurve = target.fcurves.find(dumped_data_path, index=dumped_array_index)
|
||||
fcurve = target.fcurves.find(
|
||||
dumped_data_path, index=dumped_array_index)
|
||||
if fcurve is None:
|
||||
fcurve = target.fcurves.new(dumped_data_path, index=dumped_array_index)
|
||||
fcurve = target.fcurves.new(
|
||||
dumped_data_path, index=dumped_array_index)
|
||||
|
||||
load_fcurve(dumped_fcurve, fcurve)
|
||||
target.id_root = data['id_root']
|
||||
@ -243,6 +232,4 @@ class BlAction(BlDatablock):
|
||||
for fcurve in self.pointer.fcurves:
|
||||
data["fcurves"].append(dump_fcurve(fcurve, use_numpy=True))
|
||||
|
||||
|
||||
return data
|
||||
|
||||
|
@ -24,6 +24,23 @@ from ..libs.replication.replication.data import ReplicatedDatablock
|
||||
from ..libs.replication.replication.constants import (UP, DIFF_BINARY)
|
||||
from ..libs import dump_anything
|
||||
|
||||
|
||||
def has_action(target):
|
||||
""" Check if the target datablock has actions
|
||||
"""
|
||||
return (hasattr(target, 'animation_data')
|
||||
and target.animation_data
|
||||
and target.animation_data.action)
|
||||
|
||||
|
||||
def has_driver(target):
|
||||
""" Check if the target datablock is driven
|
||||
"""
|
||||
return (hasattr(target, 'animation_data')
|
||||
and target.animation_data
|
||||
and target.animation_data.drivers)
|
||||
|
||||
|
||||
def dump_driver(driver):
|
||||
dumper = dump_anything.Dumper()
|
||||
dumper.depth = 6
|
||||
@ -112,12 +129,12 @@ class BlDatablock(ReplicatedDatablock):
|
||||
def _dump(self, pointer=None):
|
||||
data = {}
|
||||
# Dump animation data
|
||||
if utils.has_action(pointer):
|
||||
if has_action(pointer):
|
||||
dumper = utils.dump_anything.Dumper()
|
||||
dumper.include_filter = ['action']
|
||||
data['animation_data'] = dumper.dump(pointer.animation_data)
|
||||
|
||||
if utils.has_driver(pointer):
|
||||
if has_driver(pointer):
|
||||
dumped_drivers = {'animation_data': {'drivers': []}}
|
||||
for driver in pointer.animation_data.drivers:
|
||||
dumped_drivers['animation_data']['drivers'].append(
|
||||
@ -162,7 +179,7 @@ class BlDatablock(ReplicatedDatablock):
|
||||
def resolve_deps(self):
|
||||
dependencies = []
|
||||
|
||||
if utils.has_action(self.pointer):
|
||||
if has_action(self.pointer):
|
||||
dependencies.append(self.pointer.animation_data.action)
|
||||
|
||||
if not self.is_library:
|
||||
|
@ -20,7 +20,10 @@ import bpy
|
||||
import mathutils
|
||||
import numpy as np
|
||||
|
||||
from ..libs import dump_anything
|
||||
from ..libs.dump_anything import (Dumper,
|
||||
Loader,
|
||||
dump_collection_attr,
|
||||
load_collection_attr)
|
||||
from .bl_datablock import BlDatablock
|
||||
|
||||
# GPencil data api is structured as it follow:
|
||||
@ -36,7 +39,7 @@ def dump_stroke(stroke):
|
||||
|
||||
assert(stroke)
|
||||
|
||||
dumper = dump_anything.Dumper()
|
||||
dumper = Dumper()
|
||||
dumper.include_filter = [
|
||||
"aspect",
|
||||
"display_mode",
|
||||
@ -56,23 +59,12 @@ def dump_stroke(stroke):
|
||||
# Stoke points
|
||||
p_count = len(stroke.points)
|
||||
dumped_stroke['p_count'] = p_count
|
||||
|
||||
p_co = np.empty(p_count*3, dtype=np.float64)
|
||||
stroke.points.foreach_get('co', p_co)
|
||||
dumped_stroke['p_co'] = p_co.tobytes()
|
||||
|
||||
p_pressure = np.empty(p_count, dtype=np.float64)
|
||||
stroke.points.foreach_get('pressure', p_pressure)
|
||||
dumped_stroke['p_pressure'] = p_pressure.tobytes()
|
||||
|
||||
p_strength = np.empty(p_count, dtype=np.float64)
|
||||
stroke.points.foreach_get('strength', p_strength)
|
||||
dumped_stroke['p_strength'] = p_strength.tobytes()
|
||||
dumped_stroke['p_co'] = dump_collection_attr(stroke.points,'co')
|
||||
dumped_stroke['p_pressure'] = dump_collection_attr(stroke.points,'pressure')
|
||||
dumped_stroke['p_strength'] = dump_collection_attr(stroke.points,'strength')
|
||||
|
||||
if bpy.app.version[1] >= 83: # new in blender 2.83
|
||||
p_vertex_color = np.empty(p_count*4, dtype=np.float64)
|
||||
stroke.points.foreach_get('vertex_color', p_vertex_color)
|
||||
dumped_stroke['p_vertex_color'] = p_vertex_color.tobytes()
|
||||
dumped_stroke['p_vertex_color'] = dump_collection_attr(stroke.points,'vertex_color')
|
||||
|
||||
# TODO: uv_factor, uv_rotation
|
||||
|
||||
@ -89,21 +81,17 @@ def load_stroke(stroke_data, stroke):
|
||||
"""
|
||||
assert(stroke and stroke_data)
|
||||
|
||||
dump_anything.load(stroke, stroke_data)
|
||||
|
||||
p_co = np.frombuffer(stroke_data["p_co"], dtype=np.float64)
|
||||
p_pressure = np.frombuffer(stroke_data["p_pressure"], dtype=np.float64)
|
||||
p_strength = np.frombuffer(stroke_data["p_strength"], dtype=np.float64)
|
||||
loader = Loader()
|
||||
loader.load(stroke, stroke_data)
|
||||
|
||||
stroke.points.add(stroke_data["p_count"])
|
||||
|
||||
stroke.points.foreach_set('co', p_co)
|
||||
stroke.points.foreach_set('pressure', p_pressure)
|
||||
stroke.points.foreach_set('strength', p_strength)
|
||||
load_collection_attr(stroke.points, 'co',stroke_data["p_co"])
|
||||
load_collection_attr(stroke.points, 'pressure',stroke_data["p_pressure"])
|
||||
load_collection_attr(stroke.points, 'strength',stroke_data["p_strength"])
|
||||
|
||||
if "p_vertex_color" in stroke_data:
|
||||
p_vertex_color = np.frombuffer(stroke_data["p_vertex_color"], dtype=np.float64)
|
||||
stroke.points.foreach_set('vertex_color', p_vertex_color)
|
||||
load_collection_attr(stroke.points, 'vertex_color',stroke_data["p_vertex_color"])
|
||||
|
||||
|
||||
def dump_frame(frame):
|
||||
@ -156,7 +144,7 @@ def dump_layer(layer):
|
||||
|
||||
assert(layer)
|
||||
|
||||
dumper = dump_anything.Dumper()
|
||||
dumper = Dumper()
|
||||
|
||||
dumper.include_filter = [
|
||||
'info',
|
||||
@ -212,7 +200,8 @@ def load_layer(layer_data, layer):
|
||||
:type layer: bpy.types.GPencilFrame
|
||||
"""
|
||||
# TODO: take existing data in account
|
||||
dump_anything.load(layer, layer_data)
|
||||
loader = Loader()
|
||||
loader.load(layer, layer_data)
|
||||
|
||||
for frame_data in layer_data["frames"]:
|
||||
target_frame = layer.frames.new(frame_data['frame_number'])
|
||||
@ -254,13 +243,16 @@ class BlGpencil(BlDatablock):
|
||||
|
||||
load_layer(layer_data, target_layer)
|
||||
|
||||
dump_anything.load(target, data)
|
||||
loader = Loader()
|
||||
loader.load(target, data)
|
||||
|
||||
|
||||
|
||||
def dump_implementation(self, data, pointer=None):
|
||||
assert(pointer)
|
||||
data = dump_anything.dump(pointer, 2)
|
||||
dumper = Dumper()
|
||||
dumper.depth = 2
|
||||
data = dumper.dump(pointer)
|
||||
|
||||
data['layers'] = {}
|
||||
|
||||
|
@ -102,7 +102,6 @@ class BlMaterial(BlDatablock):
|
||||
dump_anything.load(
|
||||
target.grease_pencil, data['grease_pencil'])
|
||||
|
||||
utils.load_dict(data['grease_pencil'], target.grease_pencil)
|
||||
|
||||
elif data["use_nodes"]:
|
||||
if target.node_tree is None:
|
||||
|
@ -22,7 +22,7 @@ import mathutils
|
||||
import logging
|
||||
import numpy as np
|
||||
|
||||
from .. import utils
|
||||
from ..libs.dump_anything import Dumper, Loader, load_collection_attr, dump_collection_attr
|
||||
from ..libs.replication.replication.constants import DIFF_BINARY
|
||||
from .bl_datablock import BlDatablock
|
||||
|
||||
@ -44,7 +44,8 @@ class BlMesh(BlDatablock):
|
||||
|
||||
def load_implementation(self, data, target):
|
||||
if not target or not target.is_editmode:
|
||||
utils.dump_anything.load(target, data)
|
||||
loader = Loader()
|
||||
loader.load(target, data)
|
||||
|
||||
# MATERIAL SLOTS
|
||||
target.materials.clear()
|
||||
@ -56,59 +57,33 @@ class BlMesh(BlDatablock):
|
||||
if target.vertices:
|
||||
target.clear_geometry()
|
||||
|
||||
# VERTS
|
||||
vertices = np.frombuffer(data["verts_co"], dtype=np.float64)
|
||||
vert_count = int(len(vertices)/3)
|
||||
target.vertices.add(vert_count)
|
||||
|
||||
# EDGES
|
||||
|
||||
egdes_vert = np.frombuffer(data["egdes_vert"], dtype=np.int)
|
||||
|
||||
edge_count = data["egdes_count"]
|
||||
target.edges.add(edge_count)
|
||||
|
||||
|
||||
|
||||
# LOOPS
|
||||
loops_count = data["loop_count"]
|
||||
target.loops.add(loops_count)
|
||||
|
||||
loop_vertex_index = np.frombuffer(
|
||||
data['loop_vertex_index'], dtype=np.int)
|
||||
loop_normal = np.frombuffer(data['loop_normal'], dtype=np.float64)
|
||||
|
||||
# POLY
|
||||
poly_count = data["poly_count"]
|
||||
target.polygons.add(poly_count)
|
||||
|
||||
poly_loop_start = np.frombuffer(
|
||||
data["poly_loop_start"], dtype=np.int)
|
||||
poly_loop_total = np.frombuffer(
|
||||
data["poly_loop_total"], dtype=np.int)
|
||||
poly_smooth = np.frombuffer(data["poly_smooth"], dtype=np.bool)
|
||||
|
||||
poly_mat = np.frombuffer(data["poly_mat"], dtype=np.int)
|
||||
target.vertices.add(data["vertex_count"])
|
||||
target.edges.add(data["egdes_count"])
|
||||
target.loops.add(data["loop_count"])
|
||||
target.polygons.add(data["poly_count"])
|
||||
|
||||
# LOADING
|
||||
target.vertices.foreach_set('co', vertices)
|
||||
target.edges.foreach_set("vertices", egdes_vert)
|
||||
|
||||
load_collection_attr(target.vertices, 'co', data["verts_co"])
|
||||
load_collection_attr(target.edges, "vertices", data["egdes_vert"])
|
||||
if data['use_customdata_edge_crease']:
|
||||
edges_crease = np.frombuffer(data["edges_crease"], dtype=np.float64)
|
||||
target.edges.foreach_set("crease", edges_crease)
|
||||
load_collection_attr(
|
||||
target.edges, "crease", data["edges_crease"])
|
||||
|
||||
if data['use_customdata_edge_bevel']:
|
||||
edges_bevel = np.frombuffer(data["edges_bevel"], dtype=np.float64)
|
||||
target.edges.foreach_set("bevel_weight", edges_bevel)
|
||||
|
||||
target.loops.foreach_set("vertex_index", loop_vertex_index)
|
||||
target.loops.foreach_set("normal", loop_normal)
|
||||
target.polygons.foreach_set("loop_total", poly_loop_total)
|
||||
target.polygons.foreach_set("loop_start", poly_loop_start)
|
||||
target.polygons.foreach_set("use_smooth", poly_smooth)
|
||||
target.polygons.foreach_set("material_index", poly_mat)
|
||||
load_collection_attr(
|
||||
target.edges, "bevel_weight", data["edges_bevel"])
|
||||
|
||||
load_collection_attr(
|
||||
target.loops, 'vertex_index', data["loop_vertex_index"])
|
||||
load_collection_attr(target.loops, 'normal', data["loop_normal"])
|
||||
load_collection_attr(
|
||||
target.polygons, 'loop_total', data["poly_loop_total"])
|
||||
load_collection_attr(
|
||||
target.polygons, 'loop_start', data["poly_loop_start"])
|
||||
load_collection_attr(
|
||||
target.polygons, 'use_smooth', data["poly_smooth"])
|
||||
load_collection_attr(
|
||||
target.polygons, 'material_index', data["poly_mat"])
|
||||
|
||||
# UV Layers
|
||||
for layer in data['uv_layers']:
|
||||
@ -122,13 +97,12 @@ class BlMesh(BlDatablock):
|
||||
target.validate()
|
||||
target.update()
|
||||
|
||||
|
||||
def dump_implementation(self, data, pointer=None):
|
||||
assert(pointer)
|
||||
|
||||
mesh = pointer
|
||||
|
||||
dumper = utils.dump_anything.Dumper()
|
||||
dumper = Dumper()
|
||||
dumper.depth = 1
|
||||
dumper.include_filter = [
|
||||
'name',
|
||||
@ -140,73 +114,43 @@ class BlMesh(BlDatablock):
|
||||
|
||||
data = dumper.dump(mesh)
|
||||
|
||||
# TODO: selective dump
|
||||
# VERTICES
|
||||
vert_count = len(mesh.vertices)
|
||||
|
||||
verts_co = np.empty(vert_count*3, dtype=np.float64)
|
||||
mesh.vertices.foreach_get('co', verts_co)
|
||||
data["verts_co"] = verts_co.tobytes()
|
||||
data["vertex_count"] = len(mesh.vertices)
|
||||
data["verts_co"] = dump_collection_attr(mesh.vertices, 'co')
|
||||
|
||||
# EDGES
|
||||
edge_count = len(mesh.edges)
|
||||
|
||||
edges_vert = np.empty(edge_count*2, dtype=np.int)
|
||||
mesh.edges.foreach_get('vertices', edges_vert)
|
||||
data["egdes_vert"] = edges_vert.tobytes()
|
||||
data["egdes_count"] = len(mesh.edges)
|
||||
data["egdes_vert"] = dump_collection_attr(mesh.edges, 'vertices')
|
||||
|
||||
if mesh.use_customdata_edge_crease:
|
||||
edges_crease = np.empty(edge_count, dtype=np.float64)
|
||||
mesh.edges.foreach_get('crease', edges_crease)
|
||||
data["edges_crease"] = edges_crease.tobytes()
|
||||
data["edges_crease"] = dump_collection_attr(mesh.edges, 'crease')
|
||||
|
||||
if mesh.use_customdata_edge_bevel:
|
||||
edges_bevel = np.empty(edge_count, dtype=np.float64)
|
||||
mesh.edges.foreach_get('bevel_weight', edges_bevel)
|
||||
data["edges_bevel"] = edges_bevel.tobytes()
|
||||
data["edges_bevel"] = dump_collection_attr(
|
||||
mesh.edges, 'edges_bevel')
|
||||
|
||||
# POLYGONS
|
||||
poly_count = len(mesh.polygons)
|
||||
data["poly_count"] = poly_count
|
||||
|
||||
poly_mat = np.empty(poly_count, dtype=np.int)
|
||||
mesh.polygons.foreach_get("material_index", poly_mat)
|
||||
data["poly_mat"] = poly_mat.tobytes()
|
||||
|
||||
poly_loop_start = np.empty(poly_count, dtype=np.int)
|
||||
mesh.polygons.foreach_get("loop_start", poly_loop_start)
|
||||
data["poly_loop_start"] = poly_loop_start.tobytes()
|
||||
|
||||
poly_loop_total = np.empty(poly_count, dtype=np.int)
|
||||
mesh.polygons.foreach_get("loop_total", poly_loop_total)
|
||||
data["poly_loop_total"] = poly_loop_total.tobytes()
|
||||
|
||||
poly_smooth = np.empty(poly_count, dtype=np.bool)
|
||||
mesh.polygons.foreach_get("use_smooth", poly_smooth)
|
||||
data["poly_smooth"] = poly_smooth.tobytes()
|
||||
data["poly_count"] = len(mesh.polygons)
|
||||
data["poly_mat"] = dump_collection_attr(
|
||||
mesh.polygons, 'material_index')
|
||||
data["poly_loop_start"] = dump_collection_attr(
|
||||
mesh.polygons, 'loop_start')
|
||||
data["poly_loop_total"] = dump_collection_attr(
|
||||
mesh.polygons, 'loop_total')
|
||||
data["poly_smooth"] = dump_collection_attr(mesh.polygons, 'use_smooth')
|
||||
|
||||
# LOOPS
|
||||
loop_count = len(mesh.loops)
|
||||
data["loop_count"] = loop_count
|
||||
|
||||
loop_normal = np.empty(loop_count*3, dtype=np.float64)
|
||||
mesh.loops.foreach_get("normal", loop_normal)
|
||||
data["loop_normal"] = loop_normal.tobytes()
|
||||
|
||||
loop_vertex_index = np.empty(loop_count, dtype=np.int)
|
||||
mesh.loops.foreach_get("vertex_index", loop_vertex_index)
|
||||
data["loop_vertex_index"] = loop_vertex_index.tobytes()
|
||||
data["loop_count"] = len(mesh.loops)
|
||||
data["loop_normal"] = dump_collection_attr(mesh.loops, 'normal')
|
||||
data["loop_vertex_index"] = dump_collection_attr(
|
||||
mesh.loops, 'vertex_index')
|
||||
|
||||
# UV Layers
|
||||
data['uv_layers'] = {}
|
||||
for layer in mesh.uv_layers:
|
||||
data['uv_layers'][layer.name] = {}
|
||||
|
||||
uv_layer = np.empty(len(layer.data)*2, dtype=np.float64)
|
||||
layer.data.foreach_get("uv", uv_layer)
|
||||
|
||||
data['uv_layers'][layer.name]['data'] = uv_layer.tobytes()
|
||||
data['uv_layers'][layer.name]['data'] = dump_collection_attr(
|
||||
layer.data, 'uv')
|
||||
|
||||
# Fix material index
|
||||
m_list = []
|
||||
|
@ -20,9 +20,68 @@ import logging
|
||||
import bpy
|
||||
import bpy.types as T
|
||||
import mathutils
|
||||
import numpy as np
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
BPY_TO_NUMPY_TYPES = {
|
||||
'FLOAT': np.float,
|
||||
'INT': np.int,
|
||||
'BOOL': np.bool
|
||||
}
|
||||
|
||||
|
||||
def dump_collection_attr(collection, attribute):
|
||||
""" Dump a collection attribute as a sequence
|
||||
|
||||
!!! warning
|
||||
Only work with int, float and bool attributes
|
||||
|
||||
:arg collection: target collection
|
||||
:type collection: bpy.types.CollectionProperty
|
||||
:arg attribute: target attribute
|
||||
:type attribute: str
|
||||
:return: numpy byte buffer
|
||||
"""
|
||||
|
||||
attr_infos = collection[0].bl_rna.properties.get(attribute)
|
||||
|
||||
assert(attr_infos.type in ['FLOAT', 'INT', 'BOOLEAN'])
|
||||
|
||||
size = sum(attr_infos.array_dimensions) if attr_infos.is_array else 1
|
||||
|
||||
dumped_sequence = np.zeros(
|
||||
len(collection)*size,
|
||||
dtype=BPY_TO_NUMPY_TYPES.get(attr_infos.type))
|
||||
|
||||
collection.foreach_get(attribute, dumped_sequence)
|
||||
|
||||
return dumped_sequence.tobytes()
|
||||
|
||||
|
||||
def load_collection_attr(collection, attribute, sequence):
|
||||
""" Load a collection attribute from a bytes sequence
|
||||
|
||||
!!! warning
|
||||
Only work with int, float and bool attributes
|
||||
|
||||
:arg collection: target collection
|
||||
:type collection: bpy.types.CollectionProperty
|
||||
:arg attribute: target attribute
|
||||
:type attribute: str
|
||||
:return: numpy byte buffer
|
||||
"""
|
||||
|
||||
attr_infos = collection[0].bl_rna.properties.get(attribute)
|
||||
|
||||
assert(attr_infos.type in ['FLOAT', 'INT', 'BOOLEAN'])
|
||||
|
||||
# TODO: check types match
|
||||
collection.foreach_set(
|
||||
attribute,
|
||||
np.frombuffer(sequence, dtype=BPY_TO_NUMPY_TYPES.get(attr_infos.type)))
|
||||
|
||||
|
||||
def remove_items_from_dict(d, keys, recursive=False):
|
||||
copy = dict(d)
|
||||
for k in keys:
|
||||
@ -94,6 +153,7 @@ def _load_filter_default(default):
|
||||
|
||||
class Dumper:
|
||||
# TODO: support occlude readonly
|
||||
# TODO: use foreach_set/get on collection compatible properties
|
||||
def __init__(self):
|
||||
self.verbose = True
|
||||
self.depth = 1
|
||||
@ -325,7 +385,6 @@ class Loader:
|
||||
T.ParticleSettingsTextureSlot: (CONSTRUCTOR_ADD, []),
|
||||
T.Modifier: (CONSTRUCTOR_NEW, ["name", "type"]),
|
||||
T.Constraint: (CONSTRUCTOR_NEW, ["type"]),
|
||||
# T.VertexGroup: (CONSTRUCTOR_NEW, ["name"], True),
|
||||
}
|
||||
|
||||
destructors = {
|
||||
|
@ -15,14 +15,28 @@
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
|
||||
import random
|
||||
import logging
|
||||
import bpy
|
||||
import string
|
||||
|
||||
from . import utils, bl_types, environment, addon_updater_ops, presence
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
def randomColor():
|
||||
"""Generate a random color """
|
||||
r = random.random()
|
||||
v = random.random()
|
||||
b = random.random()
|
||||
return [r, v, b]
|
||||
|
||||
|
||||
def random_string_digits(stringLength=6):
|
||||
"""Generate a random string of letters and digits """
|
||||
lettersAndDigits = string.ascii_letters + string.digits
|
||||
return ''.join(random.choices(lettersAndDigits, k=stringLength))
|
||||
|
||||
|
||||
class ReplicatedDatablock(bpy.types.PropertyGroup):
|
||||
type_name: bpy.props.StringProperty()
|
||||
@ -43,12 +57,12 @@ class SessionPrefs(bpy.types.AddonPreferences):
|
||||
default="127.0.0.1")
|
||||
username: bpy.props.StringProperty(
|
||||
name="Username",
|
||||
default="user_{}".format(utils.random_string_digits())
|
||||
default=f"user_{random_string_digits()}"
|
||||
)
|
||||
client_color: bpy.props.FloatVectorProperty(
|
||||
name="client_instance_color",
|
||||
subtype='COLOR',
|
||||
default=utils.randomColor())
|
||||
default=randomColor())
|
||||
port: bpy.props.IntProperty(
|
||||
name="port",
|
||||
description='Distant host port',
|
||||
|
@ -19,8 +19,6 @@
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import random
|
||||
import string
|
||||
import sys
|
||||
import time
|
||||
from uuid import uuid4
|
||||
@ -35,17 +33,6 @@ from .libs import dump_anything
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.setLevel(logging.WARNING)
|
||||
|
||||
def has_action(target):
|
||||
return (hasattr(target, 'animation_data')
|
||||
and target.animation_data
|
||||
and target.animation_data.action)
|
||||
|
||||
|
||||
def has_driver(target):
|
||||
return (hasattr(target, 'animation_data')
|
||||
and target.animation_data
|
||||
and target.animation_data.drivers)
|
||||
|
||||
|
||||
def find_from_attr(attr_name, attr_value, list):
|
||||
for item in list:
|
||||
@ -73,19 +60,6 @@ def get_datablock_users(datablock):
|
||||
return users
|
||||
|
||||
|
||||
def random_string_digits(stringLength=6):
|
||||
"""Generate a random string of letters and digits """
|
||||
lettersAndDigits = string.ascii_letters + string.digits
|
||||
return ''.join(random.choices(lettersAndDigits, k=stringLength))
|
||||
|
||||
|
||||
def randomColor():
|
||||
r = random.random()
|
||||
v = random.random()
|
||||
b = random.random()
|
||||
return [r, v, b]
|
||||
|
||||
|
||||
def clean_scene():
|
||||
for type_name in dir(bpy.data):
|
||||
try:
|
||||
@ -96,76 +70,10 @@ def clean_scene():
|
||||
continue
|
||||
|
||||
|
||||
def revers(d):
|
||||
l = []
|
||||
for i in d:
|
||||
l.append(i)
|
||||
|
||||
return l[::-1]
|
||||
|
||||
|
||||
def get_armature_edition_context(armature):
|
||||
|
||||
override = {}
|
||||
# Set correct area
|
||||
for area in bpy.data.window_managers[0].windows[0].screen.areas:
|
||||
if area.type == 'VIEW_3D':
|
||||
override = bpy.context.copy()
|
||||
override['area'] = area
|
||||
break
|
||||
|
||||
# Set correct armature settings
|
||||
override['window'] = bpy.data.window_managers[0].windows[0]
|
||||
override['screen'] = bpy.data.window_managers[0].windows[0].screen
|
||||
override['mode'] = 'EDIT_ARMATURE'
|
||||
override['active_object'] = armature
|
||||
override['selected_objects'] = [armature]
|
||||
|
||||
for o in bpy.data.objects:
|
||||
if o.data == armature:
|
||||
override['edit_object'] = o
|
||||
|
||||
break
|
||||
|
||||
return override
|
||||
|
||||
|
||||
def get_selected_objects(scene, active_view_layer):
|
||||
return [obj.uuid for obj in scene.objects if obj.select_get(view_layer=active_view_layer)]
|
||||
|
||||
|
||||
def load_dict(src_dict, target):
|
||||
try:
|
||||
for item in src_dict:
|
||||
# attr =
|
||||
setattr(target, item, src_dict[item])
|
||||
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
pass
|
||||
|
||||
|
||||
def dump_datablock_attibutes(datablock=None, attributes=[], depth=1, dickt=None):
|
||||
if datablock:
|
||||
dumper = dump_anything.Dumper()
|
||||
dumper.type_subset = dumper.match_subset_all
|
||||
dumper.depth = depth
|
||||
|
||||
datablock_type = datablock.bl_rna.name
|
||||
|
||||
data = {}
|
||||
|
||||
if dickt:
|
||||
data = dickt
|
||||
for attr in attributes:
|
||||
try:
|
||||
data[attr] = dumper.dump(getattr(datablock, attr))
|
||||
except:
|
||||
pass
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def resolve_from_id(id, optionnal_type=None):
|
||||
for category in dir(bpy.data):
|
||||
root = getattr(bpy.data, category)
|
||||
|
Loading…
x
Reference in New Issue
Block a user