clean: materials dump
This commit is contained in:
parent
8262fb9d4e
commit
1f0f44fdbf
@ -27,6 +27,7 @@ from .dump_anything import (Dumper, Loader,
|
|||||||
np_load_collection,
|
np_load_collection,
|
||||||
np_dump_collection)
|
np_dump_collection)
|
||||||
from .bl_datablock import get_datablock_from_uuid
|
from .bl_datablock import get_datablock_from_uuid
|
||||||
|
from .bl_material import dump_materials_slots, load_materials_slots
|
||||||
|
|
||||||
SPLINE_BEZIER_POINT = [
|
SPLINE_BEZIER_POINT = [
|
||||||
# "handle_left_type",
|
# "handle_left_type",
|
||||||
@ -173,18 +174,9 @@ class BlCurve(BlDatablock):
|
|||||||
loader.load(new_spline, spline)
|
loader.load(new_spline, spline)
|
||||||
|
|
||||||
# MATERIAL SLOTS
|
# MATERIAL SLOTS
|
||||||
target.materials.clear()
|
src_materials = data.get('materials', None)
|
||||||
for mat_uuid, mat_name in data["material_list"]:
|
if src_materials:
|
||||||
mat_ref = None
|
load_materials_slots(src_materials, target.materials)
|
||||||
if mat_uuid is not None:
|
|
||||||
mat_ref = get_datablock_from_uuid(mat_uuid, None)
|
|
||||||
else:
|
|
||||||
mat_ref = bpy.data.materials.get(mat_name, None)
|
|
||||||
|
|
||||||
if mat_ref is None:
|
|
||||||
raise Exception("Material doesn't exist")
|
|
||||||
|
|
||||||
target.materials.append(mat_ref)
|
|
||||||
|
|
||||||
def _dump_implementation(self, data, instance=None):
|
def _dump_implementation(self, data, instance=None):
|
||||||
assert(instance)
|
assert(instance)
|
||||||
@ -229,8 +221,7 @@ class BlCurve(BlDatablock):
|
|||||||
elif isinstance(instance, T.Curve):
|
elif isinstance(instance, T.Curve):
|
||||||
data['type'] = 'CURVE'
|
data['type'] = 'CURVE'
|
||||||
|
|
||||||
data['material_list'] = [(m.uuid, m.name)
|
data['materials'] = dump_materials_slots(instance.materials)
|
||||||
for m in instance.materials if m]
|
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
@ -29,7 +29,7 @@ from .bl_datablock import BlDatablock, get_datablock_from_uuid
|
|||||||
NODE_SOCKET_INDEX = re.compile('\[(\d*)\]')
|
NODE_SOCKET_INDEX = re.compile('\[(\d*)\]')
|
||||||
|
|
||||||
|
|
||||||
def load_node(node_data, node_tree):
|
def load_node(node_data: dict, node_tree: bpy.types.ShaderNodeTree):
|
||||||
""" Load a node into a node_tree from a dict
|
""" Load a node into a node_tree from a dict
|
||||||
|
|
||||||
:arg node_data: dumped node data
|
:arg node_data: dumped node data
|
||||||
@ -70,9 +70,11 @@ def load_node(node_data, node_tree):
|
|||||||
try:
|
try:
|
||||||
outputs[idx].default_value = output
|
outputs[idx].default_value = output
|
||||||
except:
|
except:
|
||||||
logging.warning(f"Node {target_node.name} output {outputs[idx].name} parameter not supported, skipping ({e})")
|
logging.warning(
|
||||||
|
f"Node {target_node.name} output {outputs[idx].name} parameter not supported, skipping ({e})")
|
||||||
else:
|
else:
|
||||||
logging.warning(f"Node {target_node.name} output length mismatch.")
|
logging.warning(
|
||||||
|
f"Node {target_node.name} output length mismatch.")
|
||||||
|
|
||||||
|
|
||||||
def load_links(links_data, node_tree):
|
def load_links(links_data, node_tree):
|
||||||
@ -117,7 +119,7 @@ def dump_links(links):
|
|||||||
return links_data
|
return links_data
|
||||||
|
|
||||||
|
|
||||||
def dump_node(node):
|
def dump_node(node: bpy.types.ShaderNode) -> dict:
|
||||||
""" Dump a single node to a dict
|
""" Dump a single node to a dict
|
||||||
|
|
||||||
:arg node: target node
|
:arg node: target node
|
||||||
@ -155,7 +157,7 @@ def dump_node(node):
|
|||||||
|
|
||||||
dumped_node = node_dumper.dump(node)
|
dumped_node = node_dumper.dump(node)
|
||||||
|
|
||||||
dump_io_needed = (node.type not in ['REROUTE','OUTPUT_MATERIAL'])
|
dump_io_needed = (node.type not in ['REROUTE', 'OUTPUT_MATERIAL'])
|
||||||
|
|
||||||
if dump_io_needed:
|
if dump_io_needed:
|
||||||
io_dumper = Dumper()
|
io_dumper = Dumper()
|
||||||
@ -166,13 +168,15 @@ def dump_node(node):
|
|||||||
dumped_node['inputs'] = []
|
dumped_node['inputs'] = []
|
||||||
for idx, inpt in enumerate(node.inputs):
|
for idx, inpt in enumerate(node.inputs):
|
||||||
if hasattr(inpt, 'default_value'):
|
if hasattr(inpt, 'default_value'):
|
||||||
dumped_node['inputs'].append(io_dumper.dump(inpt.default_value))
|
dumped_node['inputs'].append(
|
||||||
|
io_dumper.dump(inpt.default_value))
|
||||||
|
|
||||||
if hasattr(node, 'outputs'):
|
if hasattr(node, 'outputs'):
|
||||||
dumped_node['outputs'] = []
|
dumped_node['outputs'] = []
|
||||||
for idx, output in enumerate(node.outputs):
|
for idx, output in enumerate(node.outputs):
|
||||||
if hasattr(output, 'default_value'):
|
if hasattr(output, 'default_value'):
|
||||||
dumped_node['outputs'].append(io_dumper.dump(output.default_value))
|
dumped_node['outputs'].append(
|
||||||
|
io_dumper.dump(output.default_value))
|
||||||
|
|
||||||
if hasattr(node, 'color_ramp'):
|
if hasattr(node, 'color_ramp'):
|
||||||
ramp_dumper = Dumper()
|
ramp_dumper = Dumper()
|
||||||
@ -223,7 +227,7 @@ def dump_shader_node_tree(node_tree: bpy.types.ShaderNodeTree) -> dict:
|
|||||||
return node_tree_data
|
return node_tree_data
|
||||||
|
|
||||||
|
|
||||||
def dump_node_tree_sockets(sockets: bpy.types.Collection)->dict:
|
def dump_node_tree_sockets(sockets: bpy.types.Collection) -> dict:
|
||||||
""" dump sockets of a shader_node_tree
|
""" dump sockets of a shader_node_tree
|
||||||
|
|
||||||
:arg target_node_tree: target node_tree
|
:arg target_node_tree: target node_tree
|
||||||
@ -244,6 +248,7 @@ def dump_node_tree_sockets(sockets: bpy.types.Collection)->dict:
|
|||||||
|
|
||||||
return sockets_data
|
return sockets_data
|
||||||
|
|
||||||
|
|
||||||
def load_node_tree_sockets(sockets: bpy.types.Collection,
|
def load_node_tree_sockets(sockets: bpy.types.Collection,
|
||||||
sockets_data: dict):
|
sockets_data: dict):
|
||||||
""" load sockets of a shader_node_tree
|
""" load sockets of a shader_node_tree
|
||||||
@ -263,7 +268,7 @@ def load_node_tree_sockets(sockets: bpy.types.Collection,
|
|||||||
# Check for new sockets
|
# Check for new sockets
|
||||||
for idx, socket_data in enumerate(sockets_data):
|
for idx, socket_data in enumerate(sockets_data):
|
||||||
try:
|
try:
|
||||||
checked_socket = sockets[idx]
|
checked_socket = sockets[idx]
|
||||||
if checked_socket.name != socket_data[0]:
|
if checked_socket.name != socket_data[0]:
|
||||||
checked_socket.name = socket_data[0]
|
checked_socket.name = socket_data[0]
|
||||||
except Exception:
|
except Exception:
|
||||||
@ -271,7 +276,7 @@ def load_node_tree_sockets(sockets: bpy.types.Collection,
|
|||||||
s['uuid'] = socket_data[2]
|
s['uuid'] = socket_data[2]
|
||||||
|
|
||||||
|
|
||||||
def load_shader_node_tree(node_tree_data:dict, target_node_tree:bpy.types.ShaderNodeTree)->dict:
|
def load_shader_node_tree(node_tree_data: dict, target_node_tree: bpy.types.ShaderNodeTree) -> dict:
|
||||||
"""Load a shader node_tree from dumped data
|
"""Load a shader node_tree from dumped data
|
||||||
|
|
||||||
:arg node_tree_data: dumped node data
|
:arg node_tree_data: dumped node data
|
||||||
@ -291,7 +296,7 @@ def load_shader_node_tree(node_tree_data:dict, target_node_tree:bpy.types.Shader
|
|||||||
|
|
||||||
if 'outputs' in node_tree_data:
|
if 'outputs' in node_tree_data:
|
||||||
socket_collection = getattr(target_node_tree, 'outputs')
|
socket_collection = getattr(target_node_tree, 'outputs')
|
||||||
load_node_tree_sockets(socket_collection,node_tree_data['outputs'])
|
load_node_tree_sockets(socket_collection, node_tree_data['outputs'])
|
||||||
|
|
||||||
# Load nodes
|
# Load nodes
|
||||||
for node in node_tree_data["nodes"]:
|
for node in node_tree_data["nodes"]:
|
||||||
@ -305,8 +310,11 @@ def load_shader_node_tree(node_tree_data:dict, target_node_tree:bpy.types.Shader
|
|||||||
|
|
||||||
|
|
||||||
def get_node_tree_dependencies(node_tree: bpy.types.NodeTree) -> list:
|
def get_node_tree_dependencies(node_tree: bpy.types.NodeTree) -> list:
|
||||||
has_image = lambda node : (node.type in ['TEX_IMAGE', 'TEX_ENVIRONMENT'] and node.image)
|
def has_image(node): return (
|
||||||
has_node_group = lambda node : (hasattr(node,'node_tree') and node.node_tree)
|
node.type in ['TEX_IMAGE', 'TEX_ENVIRONMENT'] and node.image)
|
||||||
|
|
||||||
|
def has_node_group(node): return (
|
||||||
|
hasattr(node, 'node_tree') and node.node_tree)
|
||||||
|
|
||||||
deps = []
|
deps = []
|
||||||
|
|
||||||
@ -319,6 +327,40 @@ def get_node_tree_dependencies(node_tree: bpy.types.NodeTree) -> list:
|
|||||||
return deps
|
return deps
|
||||||
|
|
||||||
|
|
||||||
|
def dump_materials_slots(materials: bpy.types.bpy_prop_collection) -> list:
|
||||||
|
""" Dump material slots collection
|
||||||
|
|
||||||
|
:arg materials: material slots collection to dump
|
||||||
|
:type materials: bpy.types.bpy_prop_collection
|
||||||
|
:return: list of tuples (mat_uuid, mat_name)
|
||||||
|
"""
|
||||||
|
return [(m.uuid, m.name) for m in materials if m]
|
||||||
|
|
||||||
|
|
||||||
|
def load_materials_slots(src_materials: list, dst_materials: bpy.types.bpy_prop_collection):
|
||||||
|
""" Load material slots
|
||||||
|
|
||||||
|
:arg src_materials: dumped material collection (ex: object.materials)
|
||||||
|
:type src_materials: list of tuples (uuid, name)
|
||||||
|
:arg dst_materials: target material collection pointer
|
||||||
|
:type dst_materials: bpy.types.bpy_prop_collection
|
||||||
|
"""
|
||||||
|
# MATERIAL SLOTS
|
||||||
|
dst_materials.clear()
|
||||||
|
|
||||||
|
for mat_uuid, mat_name in src_materials:
|
||||||
|
mat_ref = None
|
||||||
|
if mat_uuid is not None:
|
||||||
|
mat_ref = get_datablock_from_uuid(mat_uuid, None)
|
||||||
|
else:
|
||||||
|
mat_ref = bpy.data.materials.get(mat_name, None)
|
||||||
|
|
||||||
|
if mat_ref is None:
|
||||||
|
raise Exception(f"Material {mat_name} doesn't exist")
|
||||||
|
|
||||||
|
dst_materials.append(mat_ref)
|
||||||
|
|
||||||
|
|
||||||
class BlMaterial(BlDatablock):
|
class BlMaterial(BlDatablock):
|
||||||
bl_id = "materials"
|
bl_id = "materials"
|
||||||
bl_class = bpy.types.Material
|
bl_class = bpy.types.Material
|
||||||
|
@ -26,6 +26,7 @@ from .dump_anything import Dumper, Loader, np_load_collection_primitives, np_dum
|
|||||||
from replication.constants import DIFF_BINARY
|
from replication.constants import DIFF_BINARY
|
||||||
from replication.exception import ContextError
|
from replication.exception import ContextError
|
||||||
from .bl_datablock import BlDatablock, get_datablock_from_uuid
|
from .bl_datablock import BlDatablock, get_datablock_from_uuid
|
||||||
|
from .bl_material import dump_materials_slots, load_materials_slots
|
||||||
|
|
||||||
VERTICE = ['co']
|
VERTICE = ['co']
|
||||||
|
|
||||||
@ -69,19 +70,9 @@ class BlMesh(BlDatablock):
|
|||||||
loader.load(target, data)
|
loader.load(target, data)
|
||||||
|
|
||||||
# MATERIAL SLOTS
|
# MATERIAL SLOTS
|
||||||
target.materials.clear()
|
src_materials = data.get('materials', None)
|
||||||
|
if src_materials:
|
||||||
for mat_uuid, mat_name in data["material_list"]:
|
load_materials_slots(src_materials, target.materials)
|
||||||
mat_ref = None
|
|
||||||
if mat_uuid is not None:
|
|
||||||
mat_ref = get_datablock_from_uuid(mat_uuid, None)
|
|
||||||
else:
|
|
||||||
mat_ref = bpy.data.materials.get(mat_name, None)
|
|
||||||
|
|
||||||
if mat_ref is None:
|
|
||||||
raise Exception("Material doesn't exist")
|
|
||||||
|
|
||||||
target.materials.append(mat_ref)
|
|
||||||
|
|
||||||
# CLEAR GEOMETRY
|
# CLEAR GEOMETRY
|
||||||
if target.vertices:
|
if target.vertices:
|
||||||
@ -172,9 +163,8 @@ class BlMesh(BlDatablock):
|
|||||||
data['vertex_colors'][color_map.name] = {}
|
data['vertex_colors'][color_map.name] = {}
|
||||||
data['vertex_colors'][color_map.name]['data'] = np_dump_collection_primitive(color_map.data, 'color')
|
data['vertex_colors'][color_map.name]['data'] = np_dump_collection_primitive(color_map.data, 'color')
|
||||||
|
|
||||||
# Fix material index
|
# Materials
|
||||||
data['material_list'] = [(m.uuid, m.name) for m in instance.materials if m]
|
data['materials'] = dump_materials_slots(instance.materials)
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def _resolve_deps_implementation(self):
|
def _resolve_deps_implementation(self):
|
||||||
|
@ -22,7 +22,7 @@ from pathlib import Path
|
|||||||
|
|
||||||
from .dump_anything import Loader, Dumper
|
from .dump_anything import Loader, Dumper
|
||||||
from .bl_datablock import BlDatablock, get_datablock_from_uuid
|
from .bl_datablock import BlDatablock, get_datablock_from_uuid
|
||||||
|
from .bl_material import dump_materials_slots, load_materials_slots
|
||||||
|
|
||||||
class BlVolume(BlDatablock):
|
class BlVolume(BlDatablock):
|
||||||
bl_id = "volumes"
|
bl_id = "volumes"
|
||||||
@ -40,19 +40,9 @@ class BlVolume(BlDatablock):
|
|||||||
loader.load(target.display, data['display'])
|
loader.load(target.display, data['display'])
|
||||||
|
|
||||||
# MATERIAL SLOTS
|
# MATERIAL SLOTS
|
||||||
target.materials.clear()
|
src_materials = data.get('materials', None)
|
||||||
|
if src_materials:
|
||||||
for mat_uuid, mat_name in data["material_list"]:
|
load_materials_slots(src_materials, target.materials)
|
||||||
mat_ref = None
|
|
||||||
if mat_uuid is not None:
|
|
||||||
mat_ref = get_datablock_from_uuid(mat_uuid, None)
|
|
||||||
else:
|
|
||||||
mat_ref = bpy.data.materials.get(mat_name, None)
|
|
||||||
|
|
||||||
if mat_ref is None:
|
|
||||||
raise Exception("Material doesn't exist")
|
|
||||||
|
|
||||||
target.materials.append(mat_ref)
|
|
||||||
|
|
||||||
def _construct(self, data):
|
def _construct(self, data):
|
||||||
return bpy.data.volumes.new(data["name"])
|
return bpy.data.volumes.new(data["name"])
|
||||||
@ -78,7 +68,7 @@ class BlVolume(BlDatablock):
|
|||||||
data['display'] = dumper.dump(instance.display)
|
data['display'] = dumper.dump(instance.display)
|
||||||
|
|
||||||
# Fix material index
|
# Fix material index
|
||||||
data['material_list'] = [(m.uuid, m.name) for m in instance.materials if m]
|
data['materials'] = dump_materials_slots(instance.materials)
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user