2019-05-02 23:58:37 +08:00
|
|
|
import logging
|
2019-05-02 00:51:51 +08:00
|
|
|
import sys
|
2019-04-19 21:46:54 +08:00
|
|
|
from uuid import uuid4
|
2019-05-03 22:20:40 +08:00
|
|
|
import json
|
2019-08-08 21:00:07 +08:00
|
|
|
import os
|
2019-05-02 23:58:37 +08:00
|
|
|
|
|
|
|
import bpy
|
|
|
|
import mathutils
|
|
|
|
|
2019-08-08 21:00:07 +08:00
|
|
|
from . import draw, environment
|
2019-05-02 23:58:37 +08:00
|
|
|
from .libs import dump_anything
|
2019-04-10 23:01:21 +08:00
|
|
|
|
2019-05-15 20:52:45 +08:00
|
|
|
# TODO: replace hardcoded values...
|
2019-08-08 21:00:07 +08:00
|
|
|
BPY_TYPES = {'Image': 'images', 'Texture': 'textures', 'Material': 'materials', 'GreasePencil': 'grease_pencils', 'Curve': 'curves', 'Collection': 'collections', 'Mesh': 'meshes', 'Object': 'objects',
|
|
|
|
'Scene': 'scenes', 'Light': 'lights', 'SunLight': 'lights', 'SpotLight': 'lights', 'AreaLight': 'lights', 'PointLight': 'lights', 'Camera': 'cameras', 'Action': 'actions', 'Armature': 'armatures'}
|
2019-04-11 20:39:31 +08:00
|
|
|
|
2019-04-19 21:46:54 +08:00
|
|
|
logger = logging.getLogger(__name__)
|
2019-08-08 21:00:07 +08:00
|
|
|
logger.setLevel(logging.DEBUG)
|
|
|
|
|
2019-04-17 21:48:20 +08:00
|
|
|
# UTILITY FUNCTIONS
|
2019-08-08 21:00:07 +08:00
|
|
|
|
|
|
|
|
2019-05-04 00:52:49 +08:00
|
|
|
def revers(d):
|
|
|
|
l = []
|
|
|
|
for i in d:
|
|
|
|
l.append(i)
|
2019-05-08 23:06:52 +08:00
|
|
|
|
2019-05-04 00:52:49 +08:00
|
|
|
return l[::-1]
|
2019-05-02 23:58:37 +08:00
|
|
|
|
2019-05-08 23:06:52 +08:00
|
|
|
|
2019-04-11 20:39:31 +08:00
|
|
|
def refresh_window():
|
|
|
|
import bpy
|
2019-05-04 00:52:49 +08:00
|
|
|
|
2019-04-11 20:39:31 +08:00
|
|
|
bpy.ops.wm.redraw_timer(type='DRAW_WIN_SWAP', iterations=1)
|
2019-04-11 00:01:55 +08:00
|
|
|
|
2019-05-08 23:06:52 +08:00
|
|
|
|
2019-05-03 20:55:18 +08:00
|
|
|
def get_armature_edition_context(armature):
|
2019-05-08 23:06:52 +08:00
|
|
|
|
2019-05-03 22:20:40 +08:00
|
|
|
override = {}
|
2019-05-03 20:55:18 +08:00
|
|
|
# Set correct area
|
2019-05-08 23:06:52 +08:00
|
|
|
for area in bpy.data.window_managers[0].windows[0].screen.areas:
|
2019-05-03 20:55:18 +08:00
|
|
|
if area.type == 'VIEW_3D':
|
2019-05-03 22:20:40 +08:00
|
|
|
override = bpy.context.copy()
|
2019-05-03 20:55:18 +08:00
|
|
|
override['area'] = area
|
|
|
|
break
|
|
|
|
|
|
|
|
# Set correct armature settings
|
2019-05-08 23:06:52 +08:00
|
|
|
override['window'] = bpy.data.window_managers[0].windows[0]
|
2019-05-03 22:20:40 +08:00
|
|
|
override['screen'] = bpy.data.window_managers[0].windows[0].screen
|
2019-05-03 20:55:18 +08:00
|
|
|
override['mode'] = 'EDIT_ARMATURE'
|
|
|
|
override['active_object'] = armature
|
|
|
|
override['selected_objects'] = [armature]
|
|
|
|
|
|
|
|
for o in bpy.data.objects:
|
|
|
|
if o.data == armature:
|
2019-05-03 22:20:40 +08:00
|
|
|
override['edit_object'] = o
|
2019-05-08 23:06:52 +08:00
|
|
|
|
2019-05-03 20:55:18 +08:00
|
|
|
break
|
|
|
|
|
|
|
|
return override
|
2019-04-18 21:05:48 +08:00
|
|
|
|
2019-05-08 23:06:52 +08:00
|
|
|
|
2019-04-17 22:15:21 +08:00
|
|
|
def get_selected_objects(scene):
|
2019-04-17 21:48:20 +08:00
|
|
|
selected_objects = []
|
2019-04-17 22:15:21 +08:00
|
|
|
for obj in scene.objects:
|
2019-04-17 21:48:20 +08:00
|
|
|
if obj.select_get():
|
|
|
|
selected_objects.append(obj.name)
|
2019-04-11 00:01:55 +08:00
|
|
|
|
2019-04-17 21:48:20 +08:00
|
|
|
return selected_objects
|
2019-04-18 21:05:48 +08:00
|
|
|
|
2019-08-08 21:00:07 +08:00
|
|
|
|
2019-05-14 16:38:13 +08:00
|
|
|
# LOAD HELPERS
|
2019-08-08 21:00:07 +08:00
|
|
|
|
|
|
|
def load_dict(src_dict, target):
|
|
|
|
try:
|
|
|
|
for item in src_dict:
|
|
|
|
# attr =
|
|
|
|
setattr(target, item, src_dict[item])
|
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
logger.error(e)
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
2019-04-11 00:01:55 +08:00
|
|
|
def load(key, value):
|
|
|
|
target = resolve_bpy_path(key)
|
2019-04-11 20:39:31 +08:00
|
|
|
target_type = key.split('/')[0]
|
2019-04-11 00:01:55 +08:00
|
|
|
|
2019-08-08 21:00:07 +08:00
|
|
|
logger.debug("load{}, {}".format(target_type, key))
|
2019-04-30 23:18:41 +08:00
|
|
|
if value == "None":
|
2019-05-02 23:58:37 +08:00
|
|
|
return
|
|
|
|
|
2019-04-11 00:01:55 +08:00
|
|
|
if target_type == 'Object':
|
|
|
|
load_object(target=target, data=value,
|
|
|
|
create=True)
|
2019-08-08 21:00:07 +08:00
|
|
|
elif target_type == 'Image':
|
|
|
|
load_image(target=target, data=value)
|
2019-04-11 00:01:55 +08:00
|
|
|
elif target_type == 'Mesh':
|
|
|
|
load_mesh(target=target, data=value,
|
2019-04-18 21:05:48 +08:00
|
|
|
create=True)
|
2019-04-11 00:01:55 +08:00
|
|
|
elif target_type == 'Collection':
|
|
|
|
load_collection(target=target, data=value,
|
|
|
|
create=True)
|
|
|
|
elif target_type == 'Material':
|
|
|
|
load_material(target=target, data=value,
|
2019-04-18 21:05:48 +08:00
|
|
|
create=True)
|
2019-05-23 22:49:32 +08:00
|
|
|
elif target_type == 'GreasePencil':
|
2019-04-11 00:01:55 +08:00
|
|
|
load_gpencil(target=target, data=value,
|
2019-04-18 21:05:48 +08:00
|
|
|
create=True)
|
2019-04-11 00:01:55 +08:00
|
|
|
elif target_type == 'Scene':
|
|
|
|
load_scene(target=target, data=value,
|
2019-04-18 21:05:48 +08:00
|
|
|
create=True)
|
2019-04-11 00:01:55 +08:00
|
|
|
elif 'Light' in target_type:
|
|
|
|
load_light(target=target, data=value,
|
2019-04-18 21:05:48 +08:00
|
|
|
create=True)
|
2019-04-11 20:39:31 +08:00
|
|
|
elif target_type == 'Camera':
|
2019-04-11 00:01:55 +08:00
|
|
|
load_default(target=target, data=value,
|
2019-04-18 21:05:48 +08:00
|
|
|
create=True, type=target_type)
|
2019-05-03 20:55:18 +08:00
|
|
|
elif target_type == 'Armature':
|
2019-05-04 00:52:49 +08:00
|
|
|
load_armature(target=target, data=value,
|
2019-05-08 23:06:52 +08:00
|
|
|
create=True)
|
|
|
|
elif target_type == 'Curve':
|
|
|
|
load_curve(target=target, data=value,
|
|
|
|
create=True)
|
2019-04-13 00:53:38 +08:00
|
|
|
elif target_type == 'Client':
|
2019-04-18 21:05:48 +08:00
|
|
|
load_client(key.split('/')[1], value)
|
|
|
|
|
2019-04-11 20:39:31 +08:00
|
|
|
|
2019-04-11 00:01:55 +08:00
|
|
|
def resolve_bpy_path(path):
|
|
|
|
"""
|
|
|
|
Get bpy property value from path
|
|
|
|
"""
|
|
|
|
item = None
|
2019-04-10 23:01:21 +08:00
|
|
|
|
2019-04-11 00:01:55 +08:00
|
|
|
try:
|
|
|
|
path = path.split('/')
|
2019-08-08 21:00:07 +08:00
|
|
|
item = getattr(bpy.data, BPY_TYPES[path[0]])[path[1]]
|
2019-04-10 23:01:21 +08:00
|
|
|
|
2019-04-11 00:01:55 +08:00
|
|
|
except:
|
|
|
|
pass
|
2019-04-10 23:01:21 +08:00
|
|
|
|
2019-04-11 00:01:55 +08:00
|
|
|
return item
|
2019-04-10 23:01:21 +08:00
|
|
|
|
2019-04-17 21:48:20 +08:00
|
|
|
|
2019-04-18 21:05:48 +08:00
|
|
|
def load_client(client=None, data=None):
|
2019-04-17 22:15:21 +08:00
|
|
|
C = bpy.context
|
|
|
|
D = bpy.data
|
2019-05-15 20:52:45 +08:00
|
|
|
net_settings = C.window_manager.session
|
2019-05-02 23:52:32 +08:00
|
|
|
|
2019-04-17 21:01:15 +08:00
|
|
|
if client and data:
|
2019-07-02 00:04:35 +08:00
|
|
|
if net_settings.enable_presence:
|
2019-05-02 23:52:32 +08:00
|
|
|
draw.renderer.draw_client(data)
|
|
|
|
draw.renderer.draw_client_selected_objects(data)
|
2019-04-18 21:05:48 +08:00
|
|
|
|
2019-08-08 21:00:07 +08:00
|
|
|
|
2019-07-02 23:44:59 +08:00
|
|
|
def load_image(target=None, data=None):
|
2019-08-08 21:00:07 +08:00
|
|
|
try:
|
|
|
|
if not target:
|
|
|
|
image = bpy.data.images.new(
|
|
|
|
name=data['name'],
|
|
|
|
width=data['size'][0],
|
|
|
|
height=data['size'][1]
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
image = target
|
|
|
|
|
|
|
|
img_name = "{}.png".format(image.name)
|
|
|
|
|
|
|
|
logger.info("updating {} cache file".format(image.name))
|
|
|
|
img_path = os.path.join(environment.CACHE_DIR, img_name)
|
|
|
|
|
|
|
|
file = open(img_path, 'wb')
|
|
|
|
file.write(data["pixels"])
|
|
|
|
file.close()
|
|
|
|
|
|
|
|
image.source = 'FILE'
|
|
|
|
image.filepath = img_path
|
|
|
|
# dump_anything.load(target, data)
|
|
|
|
except Exception as e:
|
|
|
|
logger.error(e)
|
|
|
|
|
2019-05-04 00:52:49 +08:00
|
|
|
|
2019-05-03 20:55:18 +08:00
|
|
|
def load_armature(target=None, data=None, create=False):
|
2019-05-03 22:20:40 +08:00
|
|
|
file = "cache_{}.json".format(data['name'])
|
2019-05-04 00:52:49 +08:00
|
|
|
context = bpy.context
|
2019-05-03 22:20:40 +08:00
|
|
|
|
2019-05-04 00:52:49 +08:00
|
|
|
if not target:
|
2019-05-08 23:06:52 +08:00
|
|
|
target = bpy.data.armatures.new(data['name'])
|
|
|
|
|
2019-05-03 22:20:40 +08:00
|
|
|
dump_anything.load(target, data)
|
2019-05-03 20:55:18 +08:00
|
|
|
|
2019-05-03 22:20:40 +08:00
|
|
|
with open(file, 'w') as fp:
|
|
|
|
json.dump(data, fp)
|
2019-05-08 23:06:52 +08:00
|
|
|
fp.close()
|
|
|
|
|
2019-05-03 22:20:40 +08:00
|
|
|
target.id = data['id']
|
|
|
|
else:
|
2019-05-04 00:52:49 +08:00
|
|
|
# Construct a correct execution context
|
2019-05-03 22:20:40 +08:00
|
|
|
file = "cache_{}.json".format(target.name)
|
|
|
|
|
|
|
|
with open(file, 'r') as fp:
|
|
|
|
data = json.load(fp)
|
2019-05-03 20:55:18 +08:00
|
|
|
|
2019-05-03 22:20:40 +08:00
|
|
|
if data:
|
2019-05-04 00:52:49 +08:00
|
|
|
ob = None
|
|
|
|
for o in bpy.data.objects:
|
|
|
|
if o.data == target:
|
|
|
|
ob = o
|
|
|
|
if ob:
|
|
|
|
bpy.context.view_layer.objects.active = ob
|
|
|
|
bpy.ops.object.mode_set(mode='EDIT', toggle=False)
|
|
|
|
for eb in data['edit_bones']:
|
|
|
|
if eb in target.edit_bones.keys():
|
|
|
|
# Update the bone
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
# Add new edit bone and load it
|
|
|
|
|
|
|
|
target_new_b = target.edit_bones.new[eb]
|
|
|
|
dump_anything.load(target_new_b, data['bones'][eb])
|
|
|
|
|
2019-06-11 00:26:44 +08:00
|
|
|
logger.debug(eb)
|
2019-05-04 00:52:49 +08:00
|
|
|
|
|
|
|
bpy.ops.object.mode_set(mode='OBJECT', toggle=False)
|
|
|
|
fp.close()
|
|
|
|
import os
|
|
|
|
os.remove(file)
|
2019-05-03 20:55:18 +08:00
|
|
|
|
2019-04-17 21:48:20 +08:00
|
|
|
|
2019-04-10 23:01:21 +08:00
|
|
|
def load_mesh(target=None, data=None, create=False):
|
|
|
|
import bmesh
|
|
|
|
|
2019-04-19 21:46:54 +08:00
|
|
|
if not target or not target.is_editmode:
|
2019-08-08 21:00:07 +08:00
|
|
|
# 1 - LOAD GEOMETRY
|
2019-04-19 21:46:54 +08:00
|
|
|
mesh_buffer = bmesh.new()
|
2019-04-10 23:01:21 +08:00
|
|
|
|
2019-08-08 21:00:07 +08:00
|
|
|
for i in data["verts"]:
|
|
|
|
v = mesh_buffer.verts.new(data["verts"][i]["co"])
|
2019-04-10 23:01:21 +08:00
|
|
|
|
2019-04-19 21:46:54 +08:00
|
|
|
mesh_buffer.verts.ensure_lookup_table()
|
2019-04-10 23:01:21 +08:00
|
|
|
|
2019-04-19 21:46:54 +08:00
|
|
|
for i in data["edges"]:
|
|
|
|
verts = mesh_buffer.verts
|
2019-08-08 21:00:07 +08:00
|
|
|
v1 = data["edges"][i]["verts"][0]
|
|
|
|
v2 = data["edges"][i]["verts"][1]
|
2019-04-19 21:46:54 +08:00
|
|
|
mesh_buffer.edges.new([verts[v1], verts[v2]])
|
2019-04-10 23:01:21 +08:00
|
|
|
|
2019-08-08 21:00:07 +08:00
|
|
|
for p in data["faces"]:
|
2019-04-19 21:46:54 +08:00
|
|
|
verts = []
|
2019-08-08 21:00:07 +08:00
|
|
|
for v in data["faces"][p]["verts"]:
|
2019-04-19 21:46:54 +08:00
|
|
|
verts.append(mesh_buffer.verts[v])
|
2019-04-10 23:01:21 +08:00
|
|
|
|
2019-04-19 21:46:54 +08:00
|
|
|
if len(verts) > 0:
|
2019-05-04 00:52:49 +08:00
|
|
|
f = mesh_buffer.faces.new(verts)
|
2019-08-08 21:00:07 +08:00
|
|
|
|
|
|
|
uv_layer = mesh_buffer.loops.layers.uv.verify()
|
|
|
|
|
|
|
|
f.material_index = data["faces"][p]['material_index']
|
|
|
|
|
|
|
|
# UV loading
|
|
|
|
for i, loop in enumerate(f.loops):
|
|
|
|
loop_uv = loop[uv_layer]
|
|
|
|
loop_uv.uv = data["faces"][p]["uv"][i]
|
2019-04-10 23:01:21 +08:00
|
|
|
|
2019-04-19 21:46:54 +08:00
|
|
|
if target is None and create:
|
|
|
|
target = bpy.data.meshes.new(data["name"])
|
2019-04-10 23:01:21 +08:00
|
|
|
|
2019-04-19 21:46:54 +08:00
|
|
|
mesh_buffer.to_mesh(target)
|
2019-05-08 23:06:52 +08:00
|
|
|
|
2019-08-08 21:00:07 +08:00
|
|
|
# mesh_buffer.from_mesh(target)
|
|
|
|
|
|
|
|
# 2 - LOAD METADATA
|
|
|
|
|
|
|
|
# uv's
|
|
|
|
for uv_layer in data['uv_layers']:
|
|
|
|
target.uv_layers.new(name=uv_layer)
|
|
|
|
|
|
|
|
bevel_layer = mesh_buffer.verts.layers.bevel_weight.verify()
|
|
|
|
skin_layer = mesh_buffer.verts.layers.skin.verify()
|
|
|
|
|
|
|
|
# for face in mesh_buffer.faces:
|
|
|
|
|
|
|
|
# # Face metadata
|
|
|
|
# for loop in face.loops:
|
|
|
|
# loop_uv = loop[uv_layer]
|
|
|
|
# loop_uv.uv = data['faces'][face.index]["uv"]
|
|
|
|
|
2019-04-19 21:46:54 +08:00
|
|
|
dump_anything.load(target, data)
|
2019-04-24 17:52:08 +08:00
|
|
|
|
2019-08-08 21:00:07 +08:00
|
|
|
# 3 - LOAD MATERIAL SLOTS
|
2019-05-04 00:52:49 +08:00
|
|
|
material_to_load = []
|
|
|
|
material_to_load = revers(data["materials"])
|
|
|
|
target.materials.clear()
|
|
|
|
# SLots
|
|
|
|
i = 0
|
2019-05-06 21:58:28 +08:00
|
|
|
|
|
|
|
for m in data["material_list"]:
|
|
|
|
target.materials.append(bpy.data.materials[m])
|
2019-05-04 00:52:49 +08:00
|
|
|
|
2019-04-24 17:52:08 +08:00
|
|
|
target.id = data['id']
|
2019-04-19 21:46:54 +08:00
|
|
|
else:
|
2019-08-08 21:00:07 +08:00
|
|
|
logger.error("Mesh can't be loaded")
|
2019-04-10 23:01:21 +08:00
|
|
|
|
|
|
|
|
|
|
|
def load_object(target=None, data=None, create=False):
|
|
|
|
try:
|
|
|
|
if target is None and create:
|
|
|
|
pointer = None
|
2019-08-08 21:00:07 +08:00
|
|
|
|
2019-04-10 23:01:21 +08:00
|
|
|
# Object specific constructor...
|
|
|
|
if data["data"] in bpy.data.meshes.keys():
|
|
|
|
pointer = bpy.data.meshes[data["data"]]
|
|
|
|
elif data["data"] in bpy.data.lights.keys():
|
|
|
|
pointer = bpy.data.lights[data["data"]]
|
|
|
|
elif data["data"] in bpy.data.cameras.keys():
|
|
|
|
pointer = bpy.data.cameras[data["data"]]
|
|
|
|
elif data["data"] in bpy.data.curves.keys():
|
|
|
|
pointer = bpy.data.curves[data["data"]]
|
2019-05-03 20:55:18 +08:00
|
|
|
elif data["data"] in bpy.data.armatures.keys():
|
|
|
|
pointer = bpy.data.armatures[data["data"]]
|
2019-04-10 23:01:21 +08:00
|
|
|
elif data["data"] in bpy.data.grease_pencils.keys():
|
|
|
|
pointer = bpy.data.grease_pencils[data["data"]]
|
2019-05-08 23:06:52 +08:00
|
|
|
elif data["data"] in bpy.data.curves.keys():
|
|
|
|
pointer = bpy.data.curves[data["data"]]
|
2019-04-10 23:01:21 +08:00
|
|
|
|
|
|
|
target = bpy.data.objects.new(data["name"], pointer)
|
|
|
|
|
2019-08-08 21:00:07 +08:00
|
|
|
# Load other meshes metadata
|
2019-05-10 16:17:52 +08:00
|
|
|
# dump_anything.load(target, data)
|
2019-04-18 21:05:48 +08:00
|
|
|
|
2019-04-10 23:01:21 +08:00
|
|
|
target.matrix_world = mathutils.Matrix(data["matrix_world"])
|
|
|
|
|
2019-04-24 17:52:08 +08:00
|
|
|
target.id = data['id']
|
2019-05-02 23:58:37 +08:00
|
|
|
|
2019-05-15 20:52:45 +08:00
|
|
|
client = bpy.context.window_manager.session.username
|
2019-05-02 21:52:17 +08:00
|
|
|
|
2019-08-08 21:00:07 +08:00
|
|
|
# Load modifiers
|
|
|
|
if hasattr(target,'modifiers'):
|
|
|
|
for local_modifier in target.modifiers:
|
|
|
|
if local_modifier.name not in data['modifiers']:
|
|
|
|
target.modifiers.remove(local_modifier)
|
|
|
|
for modifier in data['modifiers']:
|
|
|
|
target_modifier = target.modifiers.get(modifier)
|
|
|
|
|
|
|
|
if not target_modifier:
|
|
|
|
target_modifier = target.modifiers.new(data['modifiers'][modifier]['name'],data['modifiers'][modifier]['type'])
|
|
|
|
|
|
|
|
dump_anything.load(target_modifier, data['modifiers'][modifier])
|
|
|
|
|
|
|
|
|
|
|
|
|
2019-06-14 00:09:16 +08:00
|
|
|
if target.id == client or target.id == "Common":
|
2019-05-02 21:52:17 +08:00
|
|
|
target.hide_select = False
|
|
|
|
else:
|
|
|
|
target.hide_select = True
|
2019-08-08 21:00:07 +08:00
|
|
|
|
2019-05-03 22:20:40 +08:00
|
|
|
except Exception as e:
|
2019-05-08 23:06:52 +08:00
|
|
|
logger.error("Object {} loading error: {} ".format(data["name"], e))
|
|
|
|
|
|
|
|
|
|
|
|
def load_curve(target=None, data=None, create=False):
|
|
|
|
try:
|
|
|
|
if target is None and create:
|
|
|
|
target = bpy.data.curves.new(data["name"], 'CURVE')
|
|
|
|
|
|
|
|
dump_anything.load(target, data)
|
|
|
|
|
|
|
|
target.splines.clear()
|
|
|
|
# load splines
|
|
|
|
for spline in data['splines']:
|
|
|
|
# Update existing..
|
|
|
|
# if spline in target.splines.keys():
|
|
|
|
|
|
|
|
new_spline = target.splines.new(data['splines'][spline]['type'])
|
|
|
|
dump_anything.load(new_spline, data['splines'][spline])
|
|
|
|
|
2019-05-13 17:49:46 +08:00
|
|
|
# Load curve geometry data
|
2019-05-08 23:06:52 +08:00
|
|
|
for bezier_point_index in data['splines'][spline]["bezier_points"]:
|
|
|
|
new_spline.bezier_points.add(1)
|
2019-05-13 17:49:46 +08:00
|
|
|
dump_anything.load(
|
|
|
|
new_spline.bezier_points[bezier_point_index], data['splines'][spline]["bezier_points"][bezier_point_index])
|
|
|
|
|
2019-05-08 23:06:52 +08:00
|
|
|
for point_index in data['splines'][spline]["points"]:
|
|
|
|
new_spline.points.add(1)
|
2019-05-13 17:49:46 +08:00
|
|
|
dump_anything.load(
|
|
|
|
new_spline.points[point_index], data['splines'][spline]["points"][point_index])
|
2019-05-09 00:18:09 +08:00
|
|
|
target.id = data['id']
|
2019-05-08 23:06:52 +08:00
|
|
|
except Exception as e:
|
|
|
|
logger.error("curve loading error: {}".format(e))
|
2019-04-10 23:01:21 +08:00
|
|
|
|
|
|
|
|
|
|
|
def load_collection(target=None, data=None, create=False):
|
|
|
|
try:
|
|
|
|
if target is None and create:
|
|
|
|
target = bpy.data.collections.new(data["name"])
|
|
|
|
|
|
|
|
# Load other meshes metadata
|
|
|
|
# dump_anything.load(target, data)
|
|
|
|
|
2019-04-26 22:14:48 +08:00
|
|
|
# link objects
|
2019-04-10 23:01:21 +08:00
|
|
|
for object in data["objects"]:
|
2019-05-10 22:22:14 +08:00
|
|
|
if object not in target.objects.keys():
|
|
|
|
target.objects.link(bpy.data.objects[object])
|
2019-04-10 23:01:21 +08:00
|
|
|
|
|
|
|
for object in target.objects.keys():
|
|
|
|
if object not in data["objects"]:
|
|
|
|
target.objects.unlink(bpy.data.objects[object])
|
2019-05-02 23:58:37 +08:00
|
|
|
|
2019-04-26 22:14:48 +08:00
|
|
|
# Link childrens
|
|
|
|
for collection in data["children"]:
|
|
|
|
if collection not in target.children.keys():
|
2019-05-27 23:33:52 +08:00
|
|
|
if bpy.data.collections.find(collection) == -1:
|
|
|
|
target.children.link(
|
|
|
|
bpy.data.collections[collection])
|
|
|
|
else:
|
2019-06-11 00:26:44 +08:00
|
|
|
logger.debug(target.name)
|
2019-08-08 21:00:07 +08:00
|
|
|
|
2019-05-27 23:33:52 +08:00
|
|
|
for collection in target.children.keys():
|
|
|
|
if collection not in data["children"]:
|
|
|
|
target.collection.children.unlink(
|
2019-04-26 22:14:48 +08:00
|
|
|
bpy.data.collections[collection])
|
2019-05-02 23:58:37 +08:00
|
|
|
|
2019-04-24 17:52:08 +08:00
|
|
|
target.id = data['id']
|
2019-05-10 21:31:23 +08:00
|
|
|
|
2019-05-15 20:52:45 +08:00
|
|
|
client = bpy.context.window_manager.session.username
|
2019-05-10 21:31:23 +08:00
|
|
|
|
2019-06-14 00:09:16 +08:00
|
|
|
if target.id == client or target.id == "Common":
|
2019-05-10 21:31:23 +08:00
|
|
|
target.hide_select = False
|
|
|
|
else:
|
|
|
|
target.hide_select = True
|
2019-05-10 22:22:14 +08:00
|
|
|
|
2019-04-26 22:14:48 +08:00
|
|
|
except Exception as e:
|
2019-05-02 00:51:51 +08:00
|
|
|
logger.error("Collection loading error: {}".format(e))
|
2019-04-10 23:01:21 +08:00
|
|
|
|
|
|
|
|
|
|
|
def load_scene(target=None, data=None, create=False):
|
|
|
|
try:
|
|
|
|
if target is None and create:
|
|
|
|
target = bpy.data.scenes.new(data["name"])
|
|
|
|
|
|
|
|
# Load other meshes metadata
|
|
|
|
dump_anything.load(target, data)
|
|
|
|
|
|
|
|
# Load master collection
|
|
|
|
for object in data["collection"]["objects"]:
|
|
|
|
if object not in target.collection.objects.keys():
|
|
|
|
target.collection.objects.link(bpy.data.objects[object])
|
|
|
|
|
|
|
|
for object in target.collection.objects.keys():
|
2019-04-18 21:05:48 +08:00
|
|
|
if object not in data["collection"]["objects"]:
|
2019-04-10 23:01:21 +08:00
|
|
|
target.collection.objects.unlink(bpy.data.objects[object])
|
2019-05-15 17:25:43 +08:00
|
|
|
|
2019-04-10 23:01:21 +08:00
|
|
|
# load collections
|
|
|
|
# TODO: Recursive link
|
2019-06-11 00:26:44 +08:00
|
|
|
logger.debug("check for scene childs")
|
2019-04-10 23:01:21 +08:00
|
|
|
for collection in data["collection"]["children"]:
|
2019-05-10 21:12:52 +08:00
|
|
|
logger.debug(collection)
|
2019-04-10 23:01:21 +08:00
|
|
|
if collection not in target.collection.children.keys():
|
|
|
|
target.collection.children.link(
|
|
|
|
bpy.data.collections[collection])
|
2019-04-18 21:05:48 +08:00
|
|
|
|
2019-06-11 00:26:44 +08:00
|
|
|
logger.debug("check for scene child to remove")
|
2019-05-10 21:12:52 +08:00
|
|
|
for collection in target.collection.children.keys():
|
|
|
|
if collection not in data["collection"]["children"]:
|
|
|
|
target.collection.children.unlink(
|
|
|
|
bpy.data.collections[collection])
|
|
|
|
|
2019-04-24 17:52:08 +08:00
|
|
|
target.id = data['id']
|
2019-04-10 23:01:21 +08:00
|
|
|
# Load annotation
|
2019-05-10 19:05:47 +08:00
|
|
|
# if data["grease_pencil"]:
|
|
|
|
# target.grease_pencil = bpy.data.grease_pencils[data["grease_pencil"]["name"]]
|
|
|
|
# else:
|
|
|
|
# target.grease_pencil = None
|
2019-05-10 19:01:24 +08:00
|
|
|
|
2019-05-10 21:12:52 +08:00
|
|
|
except Exception as e:
|
|
|
|
logger.error("Scene loading error: {}".format(e))
|
2019-04-10 23:01:21 +08:00
|
|
|
|
|
|
|
|
|
|
|
def load_material(target=None, data=None, create=False):
|
|
|
|
try:
|
2019-05-02 00:51:51 +08:00
|
|
|
if target is None:
|
2019-04-10 23:01:21 +08:00
|
|
|
target = bpy.data.materials.new(data["name"])
|
|
|
|
|
2019-05-10 23:00:47 +08:00
|
|
|
if data['is_grease_pencil']:
|
2019-05-10 18:05:32 +08:00
|
|
|
if not target.is_grease_pencil:
|
|
|
|
bpy.data.materials.create_gpencil_data(target)
|
2019-05-13 17:49:46 +08:00
|
|
|
|
2019-05-10 18:05:32 +08:00
|
|
|
dump_anything.load(target.grease_pencil, data['grease_pencil'])
|
|
|
|
|
2019-08-08 21:00:07 +08:00
|
|
|
load_dict(data['grease_pencil'], target.grease_pencil)
|
|
|
|
|
|
|
|
elif data["use_nodes"]:
|
|
|
|
if target.node_tree is None:
|
|
|
|
target.use_nodes = True
|
|
|
|
|
|
|
|
target.node_tree.nodes.clear()
|
2019-04-10 23:01:21 +08:00
|
|
|
|
2019-05-02 00:51:51 +08:00
|
|
|
for node in data["node_tree"]["nodes"]:
|
|
|
|
# fix None node tree error
|
2019-04-10 23:01:21 +08:00
|
|
|
|
2019-05-02 00:51:51 +08:00
|
|
|
index = target.node_tree.nodes.find(node)
|
2019-04-10 23:01:21 +08:00
|
|
|
|
2019-05-02 00:51:51 +08:00
|
|
|
if index is -1:
|
|
|
|
node_type = data["node_tree"]["nodes"][node]["bl_idname"]
|
2019-04-10 23:01:21 +08:00
|
|
|
|
2019-05-02 00:51:51 +08:00
|
|
|
target.node_tree.nodes.new(type=node_type)
|
2019-04-10 23:01:21 +08:00
|
|
|
|
2019-05-02 00:51:51 +08:00
|
|
|
dump_anything.load(
|
|
|
|
target.node_tree.nodes[index], data["node_tree"]["nodes"][node])
|
2019-04-10 23:01:21 +08:00
|
|
|
|
2019-08-08 21:00:07 +08:00
|
|
|
if data["node_tree"]["nodes"][node]['type'] == 'TEX_IMAGE':
|
|
|
|
target.node_tree.nodes[index].image = bpy.data.images[data["node_tree"]
|
|
|
|
["nodes"][node]['image']['name']]
|
|
|
|
|
2019-05-02 00:51:51 +08:00
|
|
|
for input in data["node_tree"]["nodes"][node]["inputs"]:
|
2019-04-10 23:01:21 +08:00
|
|
|
|
2019-05-02 00:51:51 +08:00
|
|
|
try:
|
2019-08-08 21:00:07 +08:00
|
|
|
if hasattr(target.node_tree.nodes[index].inputs[input], "default_value"):
|
|
|
|
target.node_tree.nodes[index].inputs[input].default_value = data[
|
|
|
|
"node_tree"]["nodes"][node]["inputs"][input]["default_value"]
|
|
|
|
except Exception as e:
|
|
|
|
logger.error("Fail loading {} node value from {} ({}) ".format(
|
|
|
|
target.name, target.node_tree.nodes[index].inputs[input].default_value, e))
|
|
|
|
continue
|
2019-04-10 23:01:21 +08:00
|
|
|
|
2019-05-02 00:51:51 +08:00
|
|
|
# Load nodes links
|
|
|
|
target.node_tree.links.clear()
|
2019-04-24 17:52:08 +08:00
|
|
|
|
2019-05-02 00:51:51 +08:00
|
|
|
for link in data["node_tree"]["links"]:
|
|
|
|
current_link = data["node_tree"]["links"][link]
|
|
|
|
input_socket = target.node_tree.nodes[current_link['to_node']
|
2019-05-13 17:49:46 +08:00
|
|
|
['name']].inputs[current_link['to_socket']['name']]
|
2019-05-02 00:51:51 +08:00
|
|
|
output_socket = target.node_tree.nodes[current_link['from_node']
|
2019-05-13 17:49:46 +08:00
|
|
|
['name']].outputs[current_link['from_socket']['name']]
|
2019-04-10 23:01:21 +08:00
|
|
|
|
2019-05-02 00:51:51 +08:00
|
|
|
target.node_tree.links.new(input_socket, output_socket)
|
2019-04-10 23:01:21 +08:00
|
|
|
|
2019-08-08 21:00:07 +08:00
|
|
|
# Load other meshes metadata
|
|
|
|
# dump_anything.load(target, data)
|
|
|
|
|
2019-05-02 00:51:51 +08:00
|
|
|
target.id = data['id']
|
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
logger.error("Material loading error: {}".format(e))
|
2019-04-10 23:01:21 +08:00
|
|
|
|
|
|
|
|
2019-04-18 21:05:48 +08:00
|
|
|
def load_gpencil_layer(target=None, data=None, create=False):
|
2019-04-10 23:01:21 +08:00
|
|
|
|
2019-04-18 21:05:48 +08:00
|
|
|
dump_anything.load(target, data)
|
2019-04-10 23:01:21 +08:00
|
|
|
|
|
|
|
for frame in data["frames"]:
|
2019-04-18 21:05:48 +08:00
|
|
|
try:
|
2019-04-10 23:01:21 +08:00
|
|
|
tframe = target.frames[frame]
|
|
|
|
except:
|
|
|
|
tframe = target.frames.new(frame)
|
|
|
|
dump_anything.load(tframe, data["frames"][frame])
|
|
|
|
for stroke in data["frames"][frame]["strokes"]:
|
|
|
|
try:
|
|
|
|
tstroke = tframe.strokes[stroke]
|
|
|
|
except:
|
|
|
|
tstroke = tframe.strokes.new()
|
2019-04-18 21:05:48 +08:00
|
|
|
dump_anything.load(
|
|
|
|
tstroke, data["frames"][frame]["strokes"][stroke])
|
|
|
|
|
2019-04-10 23:01:21 +08:00
|
|
|
for point in data["frames"][frame]["strokes"][stroke]["points"]:
|
|
|
|
p = data["frames"][frame]["strokes"][stroke]["points"][point]
|
2019-05-10 18:05:32 +08:00
|
|
|
|
|
|
|
tstroke.points.add(1)
|
|
|
|
tpoint = tstroke.points[len(tstroke.points)-1]
|
|
|
|
|
2019-04-18 21:05:48 +08:00
|
|
|
dump_anything.load(tpoint, p)
|
2019-05-10 18:05:32 +08:00
|
|
|
|
2019-05-09 00:18:09 +08:00
|
|
|
|
2019-04-10 23:01:21 +08:00
|
|
|
def load_gpencil(target=None, data=None, create=False):
|
|
|
|
try:
|
|
|
|
if target is None and create:
|
|
|
|
target = bpy.data.grease_pencils.new(data["name"])
|
|
|
|
|
2019-05-10 18:05:32 +08:00
|
|
|
for layer in target.layers:
|
|
|
|
target.layers.remove(layer)
|
2019-05-09 00:18:09 +08:00
|
|
|
|
2019-04-10 23:01:21 +08:00
|
|
|
if "layers" in data.keys():
|
|
|
|
for layer in data["layers"]:
|
|
|
|
if layer not in target.layers.keys():
|
|
|
|
gp_layer = target.layers.new(data["layers"][layer]["info"])
|
|
|
|
else:
|
|
|
|
gp_layer = target.layers[layer]
|
2019-04-18 21:05:48 +08:00
|
|
|
load_gpencil_layer(
|
|
|
|
target=gp_layer, data=data["layers"][layer], create=create)
|
2019-04-24 17:52:08 +08:00
|
|
|
|
2019-04-10 23:01:21 +08:00
|
|
|
dump_anything.load(target, data)
|
2019-04-24 17:52:08 +08:00
|
|
|
|
2019-05-10 18:05:32 +08:00
|
|
|
target.materials.clear()
|
|
|
|
if "materials" in data.keys():
|
|
|
|
for mat in data['materials']:
|
|
|
|
target.materials.append(bpy.data.materials[mat])
|
|
|
|
|
2019-04-24 17:52:08 +08:00
|
|
|
target.id = data['id']
|
2019-04-10 23:01:21 +08:00
|
|
|
except:
|
2019-08-08 21:00:07 +08:00
|
|
|
logger.error("default loadi\ng error")
|
2019-04-10 23:01:21 +08:00
|
|
|
|
|
|
|
|
|
|
|
def load_light(target=None, data=None, create=False, type=None):
|
|
|
|
try:
|
|
|
|
if target is None and create:
|
2019-05-02 00:51:51 +08:00
|
|
|
target = bpy.data.lights.new(data["name"], data["type"])
|
2019-04-10 23:01:21 +08:00
|
|
|
|
|
|
|
dump_anything.load(target, data)
|
2019-04-24 17:52:08 +08:00
|
|
|
|
|
|
|
target.id = data['id']
|
2019-05-08 23:06:52 +08:00
|
|
|
|
2019-08-08 21:00:07 +08:00
|
|
|
|
2019-05-02 00:51:51 +08:00
|
|
|
except Exception as e:
|
|
|
|
logger.error("light loading error: {}".format(e))
|
2019-08-08 21:00:07 +08:00
|
|
|
pass
|
2019-04-10 23:01:21 +08:00
|
|
|
|
|
|
|
|
|
|
|
def load_default(target=None, data=None, create=False, type=None):
|
|
|
|
try:
|
|
|
|
if target is None and create:
|
2019-05-15 20:52:45 +08:00
|
|
|
target = getattr(bpy.data, BPY_TYPES[type]).new(data["name"])
|
2019-04-10 23:01:21 +08:00
|
|
|
|
|
|
|
dump_anything.load(target, data)
|
2019-04-24 17:52:08 +08:00
|
|
|
|
|
|
|
target.id = data['id']
|
2019-05-02 00:51:51 +08:00
|
|
|
except Exception as e:
|
|
|
|
logger.error("default loading error {}".format(e))
|
2019-04-11 00:01:55 +08:00
|
|
|
|
|
|
|
# DUMP HELPERS
|
|
|
|
def dump(key):
|
|
|
|
target = resolve_bpy_path(key)
|
2019-04-11 20:39:31 +08:00
|
|
|
target_type = key.split('/')[0]
|
2019-04-11 00:01:55 +08:00
|
|
|
data = None
|
|
|
|
|
2019-07-02 23:44:59 +08:00
|
|
|
if target_type == 'Image':
|
2019-08-08 21:00:07 +08:00
|
|
|
data = {}
|
2019-07-03 00:05:18 +08:00
|
|
|
data['pixels'] = dump_image(target)
|
2019-08-08 21:00:07 +08:00
|
|
|
dump_datablock_attibutes(target, [], 2, data)
|
|
|
|
data = dump_datablock_attibutes(
|
|
|
|
target,
|
|
|
|
["name", 'size', 'height', 'alpha', 'float_buffer', 'filepath', 'source'],
|
|
|
|
2,
|
|
|
|
data)
|
2019-07-02 23:44:59 +08:00
|
|
|
elif target_type == 'Material':
|
2019-05-10 23:00:47 +08:00
|
|
|
data = dump_datablock(target, 2)
|
2019-08-08 21:00:07 +08:00
|
|
|
if target.node_tree:
|
|
|
|
dump_datablock_attibutes(
|
|
|
|
target.node_tree, ["nodes", "links"], 5, data['node_tree'])
|
|
|
|
elif target.grease_pencil:
|
|
|
|
dump_datablock_attibutes(target, ["grease_pencil"], 3, data)
|
2019-05-23 22:49:32 +08:00
|
|
|
elif target_type == 'GreasePencil':
|
2019-05-10 19:01:24 +08:00
|
|
|
data = dump_datablock(target, 2)
|
2019-08-08 21:00:07 +08:00
|
|
|
dump_datablock_attibutes(
|
2019-05-13 17:49:46 +08:00
|
|
|
target, ['layers'], 9, data)
|
2019-04-11 00:01:55 +08:00
|
|
|
elif target_type == 'Camera':
|
|
|
|
data = dump_datablock(target, 1)
|
2019-05-13 17:49:46 +08:00
|
|
|
elif 'Light' in target_type:
|
2019-08-08 21:00:07 +08:00
|
|
|
data = dump_datablock(target, 3)
|
2019-04-11 00:01:55 +08:00
|
|
|
elif target_type == 'Mesh':
|
2019-05-04 00:52:49 +08:00
|
|
|
data = dump_datablock(target, 2)
|
2019-08-08 21:00:07 +08:00
|
|
|
data = dump_mesh(target, data)
|
|
|
|
# dump_datablock_attibutes(
|
|
|
|
# target, ['name', 'polygons', 'edges', 'vertices', 'id'], 6, data)
|
|
|
|
|
2019-05-08 23:06:52 +08:00
|
|
|
# Fix material index
|
2019-05-06 21:58:28 +08:00
|
|
|
m_list = []
|
|
|
|
for m in target.materials:
|
|
|
|
m_list.append(m.name)
|
2019-05-08 23:06:52 +08:00
|
|
|
|
2019-05-06 21:58:28 +08:00
|
|
|
data['material_list'] = m_list
|
2019-05-08 23:06:52 +08:00
|
|
|
elif target_type == 'Curve':
|
|
|
|
data = dump_datablock(target, 1)
|
2019-08-08 21:00:07 +08:00
|
|
|
dump_datablock_attibutes(
|
2019-05-08 23:06:52 +08:00
|
|
|
target, ['splines'], 5, data)
|
|
|
|
# for index, spline in enumerate(target.splines):
|
2019-08-08 21:00:07 +08:00
|
|
|
# data["splines"][index] = dump_datablock_attibutes(target.splines[index],"Curve/{}".format(index), ["bezier_points", "material_index", "points", "order_u", "order_v", "point_count_u", "point_count_v",
|
2019-05-08 23:06:52 +08:00
|
|
|
# "radius_interpolation", "resolution_v", "use_bezier_u", "use_bezier_v", "use_cyclic_u", "use_cyclic_v", "use_endpoint_u", "use_endpoint_v"], 3)
|
2019-04-11 00:01:55 +08:00
|
|
|
elif target_type == 'Object':
|
|
|
|
data = dump_datablock(target, 1)
|
2019-08-08 21:00:07 +08:00
|
|
|
|
|
|
|
if hasattr(target,'modifiers'):
|
|
|
|
dump_datablock_attibutes(
|
|
|
|
target, ['modifiers'], 3, data)
|
2019-04-11 00:01:55 +08:00
|
|
|
elif target_type == 'Collection':
|
|
|
|
data = dump_datablock(target, 4)
|
|
|
|
elif target_type == 'Scene':
|
2019-08-08 21:00:07 +08:00
|
|
|
data = dump_datablock_attibutes(
|
2019-05-10 22:24:34 +08:00
|
|
|
target, ['name', 'collection', 'id', 'camera', 'grease_pencil'], 2)
|
2019-08-08 21:00:07 +08:00
|
|
|
dump_datablock_attibutes(
|
2019-05-10 22:24:34 +08:00
|
|
|
target, ['collection'], 4, data)
|
2019-05-13 17:49:46 +08:00
|
|
|
|
2019-05-04 00:52:49 +08:00
|
|
|
# elif target_type == 'Armature':
|
|
|
|
# data = dump_datablock(target, 4)
|
2019-05-03 20:55:18 +08:00
|
|
|
|
2019-04-11 00:01:55 +08:00
|
|
|
return data
|
|
|
|
|
2019-05-14 17:00:38 +08:00
|
|
|
|
2019-04-11 00:01:55 +08:00
|
|
|
def dump_datablock(datablock, depth):
|
|
|
|
if datablock:
|
|
|
|
dumper = dump_anything.Dumper()
|
|
|
|
dumper.type_subset = dumper.match_subset_all
|
|
|
|
dumper.depth = depth
|
|
|
|
|
|
|
|
datablock_type = datablock.bl_rna.name
|
|
|
|
key = "{}/{}".format(datablock_type, datablock.name)
|
|
|
|
data = dumper.dump(datablock)
|
|
|
|
|
|
|
|
return data
|
|
|
|
|
2019-05-14 17:00:38 +08:00
|
|
|
|
2019-08-08 21:00:07 +08:00
|
|
|
def dump_datablock_attibutes(datablock=None, attributes=[], depth=1, dickt=None):
|
2019-04-11 00:01:55 +08:00
|
|
|
if datablock:
|
|
|
|
dumper = dump_anything.Dumper()
|
|
|
|
dumper.type_subset = dumper.match_subset_all
|
|
|
|
dumper.depth = depth
|
|
|
|
|
2019-05-13 17:49:46 +08:00
|
|
|
datablock_type = datablock.bl_rna.name
|
2019-04-11 00:01:55 +08:00
|
|
|
key = "{}/{}".format(datablock_type, datablock.name)
|
|
|
|
|
|
|
|
data = {}
|
2019-05-08 23:06:52 +08:00
|
|
|
|
2019-05-04 00:52:49 +08:00
|
|
|
if dickt:
|
|
|
|
data = dickt
|
2019-04-11 00:01:55 +08:00
|
|
|
for attr in attributes:
|
|
|
|
try:
|
|
|
|
data[attr] = dumper.dump(getattr(datablock, attr))
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
|
|
|
|
return data
|
|
|
|
|
2019-04-17 22:15:21 +08:00
|
|
|
|
2019-07-03 00:05:18 +08:00
|
|
|
def dump_image(image):
|
2019-08-08 21:00:07 +08:00
|
|
|
pixels = None
|
|
|
|
if image.source == "GENERATED":
|
|
|
|
img_name = "{}.png".format(image.name)
|
|
|
|
|
|
|
|
image.filepath_raw = os.path.join(environment.CACHE_DIR, img_name)
|
|
|
|
image.file_format = "PNG"
|
|
|
|
image.save()
|
|
|
|
|
|
|
|
if image.source == "FILE":
|
|
|
|
image.save()
|
|
|
|
file = open(image.filepath_raw, "rb")
|
|
|
|
pixels = file.read()
|
|
|
|
logger.debug("Reading image file {}".format(image.name))
|
|
|
|
else:
|
|
|
|
logger.error("image format not supported")
|
2019-07-03 00:05:18 +08:00
|
|
|
return pixels
|
|
|
|
|
|
|
|
|
2019-08-08 21:00:07 +08:00
|
|
|
def dump_mesh(mesh, data={}):
|
|
|
|
import bmesh
|
|
|
|
|
|
|
|
mesh_data = data
|
|
|
|
mesh_buffer = bmesh.new()
|
|
|
|
|
|
|
|
mesh_buffer.from_mesh(mesh)
|
|
|
|
|
|
|
|
uv_layer = mesh_buffer.loops.layers.uv.verify()
|
|
|
|
bevel_layer = mesh_buffer.verts.layers.bevel_weight.verify()
|
|
|
|
skin_layer = mesh_buffer.verts.layers.skin.verify()
|
|
|
|
|
|
|
|
verts = {}
|
|
|
|
for vert in mesh_buffer.verts:
|
|
|
|
v = {}
|
|
|
|
v["co"] = list(vert.co)
|
|
|
|
|
|
|
|
# vert metadata
|
|
|
|
v['bevel'] = vert[bevel_layer]
|
|
|
|
# v['skin'] = list(vert[skin_layer])
|
|
|
|
|
|
|
|
verts[str(vert.index)] = v
|
|
|
|
|
|
|
|
mesh_data["verts"] = verts
|
|
|
|
|
|
|
|
edges = {}
|
|
|
|
for edge in mesh_buffer.edges:
|
|
|
|
e = {}
|
|
|
|
e["verts"] = [edge.verts[0].index, edge.verts[1].index]
|
|
|
|
|
|
|
|
# Edge metadata
|
|
|
|
e["smooth"] = edge.smooth
|
|
|
|
|
|
|
|
edges[edge.index] = e
|
|
|
|
mesh_data["edges"] = edges
|
|
|
|
|
|
|
|
faces = {}
|
|
|
|
for face in mesh_buffer.faces:
|
|
|
|
f = {}
|
|
|
|
fverts = []
|
|
|
|
for vert in face.verts:
|
|
|
|
fverts.append(vert.index)
|
|
|
|
|
|
|
|
f["verts"] = fverts
|
|
|
|
f["material_index"] = face.material_index
|
|
|
|
|
|
|
|
uvs = []
|
|
|
|
# Face metadata
|
|
|
|
for loop in face.loops:
|
|
|
|
loop_uv = loop[uv_layer]
|
|
|
|
|
|
|
|
uvs.append(list(loop_uv.uv))
|
|
|
|
|
|
|
|
f["uv"] = uvs
|
|
|
|
faces[face.index] = f
|
|
|
|
|
|
|
|
mesh_data["faces"] = faces
|
|
|
|
|
|
|
|
uv_layers = []
|
|
|
|
for uv_layer in mesh.uv_layers:
|
|
|
|
uv_layers.append(uv_layer.name)
|
|
|
|
|
|
|
|
mesh_data["uv_layers"] = uv_layers
|
|
|
|
return mesh_data
|
|
|
|
|
|
|
|
|
2019-04-17 21:48:20 +08:00
|
|
|
def init_client(key=None):
|
|
|
|
client_dict = {}
|
2019-04-18 21:05:48 +08:00
|
|
|
|
2019-04-17 21:48:20 +08:00
|
|
|
C = bpy.context
|
2019-05-15 20:52:45 +08:00
|
|
|
Net = C.window_manager.session
|
2019-04-19 21:46:54 +08:00
|
|
|
client_dict['uuid'] = str(uuid4())
|
2019-04-18 21:05:48 +08:00
|
|
|
client_dict['location'] = [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]]
|
|
|
|
client_dict['color'] = [Net.client_color.r,
|
|
|
|
Net.client_color.g, Net.client_color.b, 1]
|
2019-04-17 21:48:20 +08:00
|
|
|
|
|
|
|
client_dict['active_objects'] = get_selected_objects(C.view_layer)
|
2019-04-18 17:34:16 +08:00
|
|
|
|
2019-04-18 21:05:48 +08:00
|
|
|
return client_dict
|