Merge branch 'develop' into 45-vse-support
This commit is contained in:
commit
f7e98abb59
@ -36,6 +36,7 @@ Currently, not all data-block are supported for replication over the wire. The f
|
||||
| image | ✔️ | |
|
||||
| mesh | ✔️ | |
|
||||
| material | ✔️ | |
|
||||
| node_groups | ❗ | Material only |
|
||||
| metaball | ✔️ | |
|
||||
| object | ✔️ | |
|
||||
| texts | ✔️ | |
|
||||
|
@ -44,7 +44,7 @@ from . import environment
|
||||
|
||||
|
||||
DEPENDENCIES = {
|
||||
("replication", '0.1.8'),
|
||||
("replication", '0.1.9'),
|
||||
}
|
||||
|
||||
|
||||
|
@ -38,7 +38,8 @@ __all__ = [
|
||||
'bl_font',
|
||||
'bl_sound',
|
||||
'bl_file',
|
||||
'bl_sequencer'
|
||||
'bl_sequencer',
|
||||
'bl_node_group'
|
||||
] # Order here defines execution order
|
||||
|
||||
from . import *
|
||||
|
@ -21,6 +21,8 @@ import mathutils
|
||||
import logging
|
||||
import re
|
||||
|
||||
from uuid import uuid4
|
||||
|
||||
from .dump_anything import Loader, Dumper
|
||||
from .bl_datablock import BlDatablock, get_datablock_from_uuid
|
||||
|
||||
@ -40,25 +42,31 @@ def load_node(node_data, node_tree):
|
||||
target_node.select = False
|
||||
loader.load(target_node, node_data)
|
||||
image_uuid = node_data.get('image_uuid', None)
|
||||
node_tree_uuid = node_data.get('node_tree_uuid', None)
|
||||
|
||||
if image_uuid and not target_node.image:
|
||||
target_node.image = get_datablock_from_uuid(image_uuid, None)
|
||||
|
||||
for idx, inpt in enumerate(node_data["inputs"]):
|
||||
if hasattr(target_node.inputs[idx], "default_value"):
|
||||
try:
|
||||
target_node.inputs[idx].default_value = inpt["default_value"]
|
||||
except:
|
||||
logging.error(
|
||||
f"Material {inpt.keys()} parameter not supported, skipping")
|
||||
if node_tree_uuid:
|
||||
target_node.node_tree = get_datablock_from_uuid(node_tree_uuid, None)
|
||||
|
||||
for idx, output in enumerate(node_data["outputs"]):
|
||||
if hasattr(target_node.outputs[idx], "default_value"):
|
||||
try:
|
||||
target_node.outputs[idx].default_value = output["default_value"]
|
||||
except:
|
||||
logging.error(
|
||||
f"Material {output.keys()} parameter not supported, skipping")
|
||||
inputs = node_data.get('inputs')
|
||||
if inputs:
|
||||
for idx, inpt in enumerate(inputs):
|
||||
if hasattr(target_node.inputs[idx], "default_value"):
|
||||
try:
|
||||
target_node.inputs[idx].default_value = inpt["default_value"]
|
||||
except:
|
||||
logging.error(f"Material input {inpt.keys()} parameter not supported, skipping")
|
||||
|
||||
outputs = node_data.get('outputs')
|
||||
if outputs:
|
||||
for idx, output in enumerate(outputs):
|
||||
if hasattr(target_node.outputs[idx], "default_value"):
|
||||
try:
|
||||
target_node.outputs[idx].default_value = output["default_value"]
|
||||
except:
|
||||
logging.error(f"Material output {output.keys()} parameter not supported, skipping")
|
||||
|
||||
|
||||
def load_links(links_data, node_tree):
|
||||
@ -178,13 +186,126 @@ def dump_node(node):
|
||||
dumped_node['mapping'] = curve_dumper.dump(node.mapping)
|
||||
if hasattr(node, 'image') and getattr(node, 'image'):
|
||||
dumped_node['image_uuid'] = node.image.uuid
|
||||
if hasattr(node, 'node_tree') and getattr(node, 'node_tree'):
|
||||
dumped_node['node_tree_uuid'] = node.node_tree.uuid
|
||||
return dumped_node
|
||||
|
||||
|
||||
def dump_shader_node_tree(node_tree: bpy.types.ShaderNodeTree) -> dict:
|
||||
""" Dump a shader node_tree to a dict including links and nodes
|
||||
|
||||
:arg node_tree: dumped shader node tree
|
||||
:type node_tree: bpy.types.ShaderNodeTree
|
||||
:return: dict
|
||||
"""
|
||||
node_tree_data = {
|
||||
'nodes': {node.name: dump_node(node) for node in node_tree.nodes},
|
||||
'links': dump_links(node_tree.links),
|
||||
'name': node_tree.name,
|
||||
'type': type(node_tree).__name__
|
||||
}
|
||||
|
||||
for socket_id in ['inputs', 'outputs']:
|
||||
socket_collection = getattr(node_tree, socket_id)
|
||||
node_tree_data[socket_id] = dump_node_tree_sockets(socket_collection)
|
||||
|
||||
return node_tree_data
|
||||
|
||||
|
||||
def dump_node_tree_sockets(sockets: bpy.types.Collection)->dict:
|
||||
""" dump sockets of a shader_node_tree
|
||||
|
||||
:arg target_node_tree: target node_tree
|
||||
:type target_node_tree: bpy.types.NodeTree
|
||||
:arg socket_id: socket identifer
|
||||
:type socket_id: str
|
||||
:return: dict
|
||||
"""
|
||||
sockets_data = []
|
||||
for socket in sockets:
|
||||
try:
|
||||
socket_uuid = socket['uuid']
|
||||
except Exception:
|
||||
socket_uuid = str(uuid4())
|
||||
socket['uuid'] = socket_uuid
|
||||
|
||||
sockets_data.append((socket.name, socket.bl_socket_idname, socket_uuid))
|
||||
|
||||
return sockets_data
|
||||
|
||||
def load_node_tree_sockets(sockets: bpy.types.Collection,
|
||||
sockets_data: dict):
|
||||
""" load sockets of a shader_node_tree
|
||||
|
||||
:arg target_node_tree: target node_tree
|
||||
:type target_node_tree: bpy.types.NodeTree
|
||||
:arg socket_id: socket identifer
|
||||
:type socket_id: str
|
||||
:arg socket_data: dumped socket data
|
||||
:type socket_data: dict
|
||||
"""
|
||||
# Check for removed sockets
|
||||
for socket in sockets:
|
||||
if not [s for s in sockets_data if socket['uuid'] == s[2]]:
|
||||
sockets.remove(socket)
|
||||
|
||||
# Check for new sockets
|
||||
for idx, socket_data in enumerate(sockets_data):
|
||||
try:
|
||||
checked_socket = sockets[idx]
|
||||
if checked_socket.name != socket_data[0]:
|
||||
checked_socket.name = socket_data[0]
|
||||
except Exception:
|
||||
s = sockets.new(socket_data[1], socket_data[0])
|
||||
s['uuid'] = socket_data[2]
|
||||
|
||||
|
||||
def load_shader_node_tree(node_tree_data:dict, target_node_tree:bpy.types.ShaderNodeTree)->dict:
|
||||
"""Load a shader node_tree from dumped data
|
||||
|
||||
:arg node_tree_data: dumped node data
|
||||
:type node_tree_data: dict
|
||||
:arg target_node_tree: target node_tree
|
||||
:type target_node_tree: bpy.types.NodeTree
|
||||
"""
|
||||
# TODO: load only required nodes
|
||||
target_node_tree.nodes.clear()
|
||||
|
||||
if not target_node_tree.is_property_readonly('name'):
|
||||
target_node_tree.name = node_tree_data['name']
|
||||
|
||||
if 'inputs' in node_tree_data:
|
||||
socket_collection = getattr(target_node_tree, 'inputs')
|
||||
load_node_tree_sockets(socket_collection, node_tree_data['inputs'])
|
||||
|
||||
if 'outputs' in node_tree_data:
|
||||
socket_collection = getattr(target_node_tree, 'outputs')
|
||||
load_node_tree_sockets(socket_collection,node_tree_data['outputs'])
|
||||
|
||||
# Load nodes
|
||||
for node in node_tree_data["nodes"]:
|
||||
load_node(node_tree_data["nodes"][node], target_node_tree)
|
||||
|
||||
# TODO: load only required nodes links
|
||||
# Load nodes links
|
||||
target_node_tree.links.clear()
|
||||
|
||||
load_links(node_tree_data["links"], target_node_tree)
|
||||
|
||||
|
||||
def get_node_tree_dependencies(node_tree: bpy.types.NodeTree) -> list:
|
||||
has_image = lambda node : (node.type in ['TEX_IMAGE', 'TEX_ENVIRONMENT'] and node.image)
|
||||
has_node_group = lambda node : (hasattr(node,'node_tree') and node.node_tree)
|
||||
|
||||
return [node.image for node in node_tree.nodes if has_image(node)]
|
||||
deps = []
|
||||
|
||||
for node in node_tree.nodes:
|
||||
if has_image(node):
|
||||
deps.append(node.image)
|
||||
elif has_node_group(node):
|
||||
deps.append(node.node_tree)
|
||||
|
||||
return deps
|
||||
|
||||
|
||||
class BlMaterial(BlDatablock):
|
||||
@ -215,16 +336,7 @@ class BlMaterial(BlDatablock):
|
||||
if target.node_tree is None:
|
||||
target.use_nodes = True
|
||||
|
||||
target.node_tree.nodes.clear()
|
||||
|
||||
# Load nodes
|
||||
for node in data["node_tree"]["nodes"]:
|
||||
load_node(data["node_tree"]["nodes"][node], target.node_tree)
|
||||
|
||||
# Load nodes links
|
||||
target.node_tree.links.clear()
|
||||
|
||||
load_links(data["node_tree"]["links"], target.node_tree)
|
||||
load_shader_node_tree(data['node_tree'], target.node_tree)
|
||||
|
||||
def _dump_implementation(self, data, instance=None):
|
||||
assert(instance)
|
||||
@ -288,13 +400,8 @@ class BlMaterial(BlDatablock):
|
||||
]
|
||||
data['grease_pencil'] = gp_mat_dumper.dump(instance.grease_pencil)
|
||||
elif instance.use_nodes:
|
||||
nodes = {}
|
||||
data["node_tree"] = {}
|
||||
for node in instance.node_tree.nodes:
|
||||
nodes[node.name] = dump_node(node)
|
||||
data["node_tree"]['nodes'] = nodes
|
||||
data['node_tree'] = dump_shader_node_tree(instance.node_tree)
|
||||
|
||||
data["node_tree"]["links"] = dump_links(instance.node_tree.links)
|
||||
return data
|
||||
|
||||
def _resolve_deps_implementation(self):
|
||||
|
47
multi_user/bl_types/bl_node_group.py
Normal file
47
multi_user/bl_types/bl_node_group.py
Normal file
@ -0,0 +1,47 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
|
||||
import bpy
|
||||
import mathutils
|
||||
|
||||
from .dump_anything import Dumper, Loader, np_dump_collection, np_load_collection
|
||||
from .bl_datablock import BlDatablock
|
||||
from .bl_material import (dump_shader_node_tree,
|
||||
load_shader_node_tree,
|
||||
get_node_tree_dependencies)
|
||||
|
||||
class BlNodeGroup(BlDatablock):
|
||||
bl_id = "node_groups"
|
||||
bl_class = bpy.types.ShaderNodeTree
|
||||
bl_delay_refresh = 1
|
||||
bl_delay_apply = 1
|
||||
bl_automatic_push = True
|
||||
bl_check_common = False
|
||||
bl_icon = 'NODETREE'
|
||||
|
||||
def _construct(self, data):
|
||||
return bpy.data.node_groups.new(data["name"], data["type"])
|
||||
|
||||
def _load_implementation(self, data, target):
|
||||
load_shader_node_tree(data, target)
|
||||
|
||||
def _dump_implementation(self, data, instance=None):
|
||||
return dump_shader_node_tree(instance)
|
||||
|
||||
def _resolve_deps_implementation(self):
|
||||
return get_node_tree_dependencies(self.instance)
|
@ -21,10 +21,8 @@ import mathutils
|
||||
|
||||
from .dump_anything import Loader, Dumper
|
||||
from .bl_datablock import BlDatablock
|
||||
from .bl_material import (load_links,
|
||||
load_node,
|
||||
dump_node,
|
||||
dump_links,
|
||||
from .bl_material import (load_shader_node_tree,
|
||||
dump_shader_node_tree,
|
||||
get_node_tree_dependencies)
|
||||
|
||||
|
||||
@ -48,15 +46,7 @@ class BlWorld(BlDatablock):
|
||||
if target.node_tree is None:
|
||||
target.use_nodes = True
|
||||
|
||||
target.node_tree.nodes.clear()
|
||||
|
||||
for node in data["node_tree"]["nodes"]:
|
||||
load_node(data["node_tree"]["nodes"][node], target.node_tree)
|
||||
|
||||
# Load nodes links
|
||||
target.node_tree.links.clear()
|
||||
|
||||
load_links(data["node_tree"]["links"], target.node_tree)
|
||||
load_shader_node_tree(data['node_tree'], target.node_tree)
|
||||
|
||||
def _dump_implementation(self, data, instance=None):
|
||||
assert(instance)
|
||||
@ -70,15 +60,7 @@ class BlWorld(BlDatablock):
|
||||
]
|
||||
data = world_dumper.dump(instance)
|
||||
if instance.use_nodes:
|
||||
data['node_tree'] = {}
|
||||
nodes = {}
|
||||
|
||||
for node in instance.node_tree.nodes:
|
||||
nodes[node.name] = dump_node(node)
|
||||
|
||||
data["node_tree"]['nodes'] = nodes
|
||||
|
||||
data["node_tree"]['links'] = dump_links(instance.node_tree.links)
|
||||
data['node_tree'] = dump_shader_node_tree(instance.node_tree)
|
||||
|
||||
return data
|
||||
|
||||
|
@ -166,7 +166,8 @@ class SessionStartOperator(bpy.types.Operator):
|
||||
# init the factory with supported types
|
||||
for type in bl_types.types_to_register():
|
||||
type_module = getattr(bl_types, type)
|
||||
type_impl_name = f"Bl{type.split('_')[1].capitalize()}"
|
||||
name = [e.capitalize() for e in type.split('_')[1:]]
|
||||
type_impl_name = 'Bl'+''.join(name)
|
||||
type_module_class = getattr(type_module, type_impl_name)
|
||||
|
||||
supported_bl_types.append(type_module_class.bl_id)
|
||||
|
@ -29,8 +29,9 @@ from .utils import get_preferences, get_expanded_icon
|
||||
from replication.constants import RP_COMMON
|
||||
from replication.interface import session
|
||||
|
||||
IP_EXPR = re.compile('\d+\.\d+\.\d+\.\d+')
|
||||
|
||||
# From https://stackoverflow.com/a/106223
|
||||
IP_REGEX = re.compile("^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])$")
|
||||
HOSTNAME_REGEX = re.compile("^(([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]*[a-zA-Z0-9])\.)*([A-Za-z0-9]|[A-Za-z0-9][A-Za-z0-9\-]*[A-Za-z0-9])$")
|
||||
|
||||
def randomColor():
|
||||
"""Generate a random color """
|
||||
@ -53,10 +54,13 @@ def update_panel_category(self, context):
|
||||
|
||||
|
||||
def update_ip(self, context):
|
||||
ip = IP_EXPR.search(self.ip)
|
||||
ip = IP_REGEX.search(self.ip)
|
||||
dns = HOSTNAME_REGEX.search(self.ip)
|
||||
|
||||
if ip:
|
||||
self['ip'] = ip.group()
|
||||
elif dns:
|
||||
self['ip'] = dns.group()
|
||||
else:
|
||||
logging.error("Wrong IP format")
|
||||
self['ip'] = "127.0.0.1"
|
||||
@ -458,9 +462,9 @@ class SessionPrefs(bpy.types.AddonPreferences):
|
||||
new_db = self.supported_datablocks.add()
|
||||
|
||||
type_module = getattr(bl_types, type)
|
||||
type_impl_name = f"Bl{type.split('_')[1].capitalize()}"
|
||||
name = [e.capitalize() for e in type.split('_')[1:]]
|
||||
type_impl_name = 'Bl'+''.join(name)
|
||||
type_module_class = getattr(type_module, type_impl_name)
|
||||
|
||||
new_db.name = type_impl_name
|
||||
new_db.type_name = type_impl_name
|
||||
new_db.bl_delay_refresh = type_module_class.bl_delay_refresh
|
||||
|
Loading…
Reference in New Issue
Block a user