multi-user/client.py

501 lines
16 KiB
Python
Raw Normal View History

2019-04-10 17:01:21 +02:00
import binascii
2019-03-25 14:56:09 +01:00
import collections
2019-05-02 17:58:37 +02:00
import copy
import logging
import os
2019-05-02 17:58:37 +02:00
import queue
2019-04-10 11:21:10 +02:00
import sys
2019-04-10 17:01:21 +02:00
import threading
2019-02-11 15:48:07 +01:00
import time
2019-03-15 17:37:02 +01:00
from enum import Enum
2019-04-10 17:01:21 +02:00
from random import randint
2019-05-09 14:20:42 +02:00
import zmq
import json
2019-05-02 17:58:37 +02:00
2019-07-04 18:24:12 +02:00
from . import environment,replication, helpers, message
2019-05-09 14:20:42 +02:00
from .libs import dump_anything, umsgpack
2019-04-26 17:18:55 +02:00
2019-03-25 14:56:09 +01:00
CONNECT_TIMEOUT = 2
WATCH_FREQUENCY = 0.1
2019-03-15 16:50:59 +01:00
WAITING_TIME = 0.001
2019-04-08 12:53:14 +02:00
SERVER_MAX = 1
DUMP_AGENTS_NUMBER = 1
2019-04-10 17:01:21 +02:00
lock = threading.Lock()
logger = logging.getLogger(__name__)
2019-07-01 18:04:35 +02:00
logging.basicConfig(level=environment)
instance = None
2019-05-02 14:46:31 +02:00
class State(Enum):
INITIAL = 1
SYNCING = 2
ACTIVE = 3
2019-05-03 11:32:14 +02:00
WORKING = 4
2019-05-02 14:46:31 +02:00
2019-04-08 12:53:14 +02:00
def zpipe(ctx):
"""build inproc pipe for talking to threads
2019-03-25 14:56:09 +01:00
2019-04-08 12:53:14 +02:00
mimic pipe used in czmq zthread_fork.
Returns a pair of PAIRs connected via inproc
"""
a = ctx.socket(zmq.PAIR)
b = ctx.socket(zmq.PAIR)
a.linger = b.linger = 0
a.hwm = b.hwm = 1
iface = "inproc://%s" % binascii.hexlify(os.urandom(8))
a.bind(iface)
b.connect(iface)
return a, b
2019-04-08 12:53:14 +02:00
class Client(object):
ctx = None
pipe = None
net_agent = None
2019-05-02 14:46:31 +02:00
store = None
2019-05-03 11:32:14 +02:00
active_tasks = None
2019-06-12 16:44:48 +02:00
def __init__(self, executor):
self.ctx = zmq.Context()
self.pipe, peer = zpipe(self.ctx)
2019-05-02 14:46:31 +02:00
self.store = {}
self.serial_product = queue.Queue()
self.serial_feed = queue.Queue()
self.stop_event = threading.Event()
2019-06-12 16:44:48 +02:00
self.external_tasks = executor
2019-05-14 10:38:13 +02:00
# Net agent
self.net_agent = threading.Thread(
target=net_worker,
2019-06-12 16:44:48 +02:00
args=(self.ctx, self.store, peer, self.serial_product, self.serial_feed, self.stop_event,self.external_tasks), name="net-agent")
self.net_agent.daemon = True
self.net_agent.start()
# Local data translation agent
self.serial_agents = []
for a in range(0, DUMP_AGENTS_NUMBER):
serial_agent = threading.Thread(
target=serial_worker, args=(self.serial_product, self.serial_feed), name="serial-agent")
serial_agent.daemon = True
serial_agent.start()
self.serial_agents.append(serial_agent)
2019-04-18 15:05:48 +02:00
2019-05-14 10:38:13 +02:00
# Sync agent
2019-05-02 18:19:59 +02:00
self.watchdog_agent = threading.Thread(
target=watchdog_worker, args=(self.serial_feed,WATCH_FREQUENCY, self.stop_event), name="watchdog-agent")
2019-05-02 18:19:59 +02:00
self.watchdog_agent.daemon = True
self.watchdog_agent.start()
2019-05-03 11:32:14 +02:00
# Status
self.active_tasks = 0
2019-04-10 18:26:39 +02:00
def connect(self, id, address, port):
self.pipe.send_multipart([b"CONNECT", (id.encode() if isinstance(
id, str) else id), (address.encode() if isinstance(
2019-04-18 15:05:48 +02:00
address, str) else address), b'%d' % port])
2019-07-04 18:24:12 +02:00
def replicate(self, py_object):
"""Entry point for python object replication
- Create object replication structure
- Add it to the distributed hash table
"""
pass
# node = Factory(py_object)
# self.store
def init(self):
"""
Scene initialisation
"""
self.pipe.send_multipart(
[b"INIT"])
2019-04-30 17:18:41 +02:00
def disconnect(self):
"""
Disconnect
"""
self.pipe.send_multipart(
[b"DISCONNECT"])
2019-05-02 18:19:59 +02:00
2019-04-24 17:42:23 +02:00
def set(self, key, value=None, override=False):
"""Set new value in distributed hash table
2019-04-08 18:21:48 +02:00
Sends [SET][key][value] to the agent
"""
2019-05-02 14:46:31 +02:00
if value:
key = umsgpack.packb(key)
value = umsgpack.packb(value) if value else umsgpack.packb('None')
override = umsgpack.packb(override)
2019-05-02 14:46:31 +02:00
self.pipe.send_multipart(
[b"SET", key, value, override])
2019-05-02 14:46:31 +02:00
else:
self.serial_feed.put(('DUMP', key, None))
def add(self, key, value=None):
"""Set new value in distributed hash table
"""
2019-05-02 14:46:31 +02:00
self.serial_feed.put(key)
2019-05-02 14:46:31 +02:00
def is_busy(self):
2019-05-03 11:32:14 +02:00
self.active_tasks = self.serial_feed.qsize() + self.serial_product.qsize()
if self.active_tasks == 0:
2019-05-02 14:46:31 +02:00
return False
2019-04-08 18:21:48 +02:00
else:
2019-05-02 14:46:31 +02:00
return True
2019-04-08 18:21:48 +02:00
2019-04-08 17:01:02 +02:00
def exit(self):
if self.net_agent.is_alive():
2019-04-30 17:18:41 +02:00
self.disconnect()
2019-05-02 18:19:59 +02:00
self.stop_event.set()
for a in range(0, DUMP_AGENTS_NUMBER):
self.serial_feed.put(('STOP', None, None))
# READ-ONLY FUNCTIONS
2019-05-02 14:46:31 +02:00
def get(self, key):
"""Lookup value in distributed hash table
Sends [GET][key] to the agent and waits for a value response
If there is no clone available, will eventually return None.
"""
value = []
for k in self.store.keys():
if key in k:
value.append([k, self.store.get(k).body])
return value
2019-05-02 16:05:06 +02:00
def exist(self, key):
2019-05-10 15:12:52 +02:00
"""
Fast key exist check
"""
2019-05-02 14:46:31 +02:00
2019-05-10 16:22:14 +02:00
if key in self.store.keys():
return True
else:
return False
2019-04-11 14:39:31 +02:00
def list(self):
2019-05-02 14:46:31 +02:00
dump_list = []
for k, v in self.store.items():
2019-05-02 14:46:31 +02:00
if 'Client' in k:
dump_list.append([k, v.id.decode()])
2019-05-02 14:46:31 +02:00
else:
try:
dump_list.append([k, v.body['id']])
2019-05-02 14:46:31 +02:00
except:
pass
return dump_list
2019-04-10 17:01:21 +02:00
def state(self):
if self.net_agent is None or not self.net_agent.is_alive():
return 1 #State.INITIAL
elif self.net_agent.is_alive() and self.store.keys():
return 3 # State.ACTIVE
else:
return 2 #State.SYNCING
# SAVING FUNCTIONS
def dump(self, filepath):
2019-06-25 11:44:46 +02:00
with open('dump.json',"w") as fp:
for key, value in self.store.items():
line = json.dumps(value.body)
fp.write(line)
2019-07-01 18:04:35 +02:00
class Server(object):
2019-04-08 12:53:14 +02:00
address = None # Server address
port = None # Server port
snapshot = None # Snapshot socket
subscriber = None # Incoming updates
2019-04-10 17:01:21 +02:00
def __init__(self, ctx, address, port, id):
2019-04-08 12:53:14 +02:00
self.address = address
self.port = port
self.snapshot = ctx.socket(zmq.DEALER)
2019-07-05 18:07:16 +02:00
self.snapshot = self.context.socket(zmq.DEALER)
self.snapshot.setsockopt(zmq.IDENTITY, id)
self.snapshot.connect("tcp://{}:{}".format(address.decode(), port))
2019-04-08 12:53:14 +02:00
self.subscriber = ctx.socket(zmq.SUB)
self.subscriber.setsockopt_string(zmq.SUBSCRIBE, '')
self.subscriber.connect("tcp://{}:{}".format(address.decode(), port+1))
2019-04-08 12:53:14 +02:00
self.subscriber.linger = 0
print("connected on tcp://{}:{}".format(address.decode(), port))
2019-04-10 17:01:21 +02:00
class ClientAgent(object):
ctx = None
2019-04-08 12:53:14 +02:00
pipe = None
property_map = None
publisher = None
id = None
state = None
2019-04-08 12:53:14 +02:00
server = None
serial = None
serialisation_agent = None
2019-04-08 12:53:14 +02:00
2019-05-02 14:46:31 +02:00
def __init__(self, ctx, store, pipe):
self.ctx = ctx
self.pipe = pipe
2019-05-02 14:46:31 +02:00
self.property_map = store
self.id = b"test"
2019-04-08 12:53:14 +02:00
self.state = State.INITIAL
self.admin = False
2019-04-08 12:53:14 +02:00
self.server = None
self.publisher = self.ctx.socket(zmq.PUSH) # push update socket
2019-04-08 12:53:14 +02:00
self.publisher.setsockopt(zmq.IDENTITY, self.id)
self.publisher.setsockopt(zmq.SNDHWM, 60)
self.publisher.linger = 0
def control_message(self):
2019-04-08 12:53:14 +02:00
msg = self.pipe.recv_multipart()
command = msg.pop(0)
if command == b"CONNECT":
2019-04-10 18:26:39 +02:00
self.id = msg.pop(0)
2019-04-08 12:53:14 +02:00
address = msg.pop(0)
port = int(msg.pop(0))
if self.server is None:
if address == '127.0.0.1' or address == 'localhost':
self.admin = True
self.server = Server(self.ctx, address, port, self.id)
2019-04-10 17:01:21 +02:00
self.publisher.connect(
"tcp://{}:{}".format(address.decode(), port+2))
2019-04-08 12:53:14 +02:00
else:
logger.error("E: too many servers (max. %i)", SERVER_MAX)
2019-04-10 17:01:21 +02:00
2019-04-30 17:18:41 +02:00
elif command == b"DISCONNECT":
if self.admin is False:
2019-04-30 17:18:41 +02:00
uid = self.id.decode()
delete_user = message.Message(
key="Client/{}".format(uid), id=self.id, body=None)
delete_user.send(self.publisher)
2019-05-14 10:38:13 +02:00
# TODO: Do we need to pass every object rights to the moderator on disconnect?
# for k,v in self.property_map.items():
# if v.body["id"] == uid:
# delete_msg = message.Message(
# key=k, id=self.id, body=None)
# # delete_msg.store(self.property_map)
# delete_msg.send(self.publisher)
elif command == b"SET":
2019-04-10 18:01:55 +02:00
key = umsgpack.unpackb(msg[0])
value = umsgpack.unpackb(msg[1])
2019-04-24 17:42:23 +02:00
override = umsgpack.unpackb(msg[2])
if key in self.property_map.keys():
2019-04-25 11:37:00 +02:00
if self.property_map[key].body['id'] == self.id.decode() or override:
if value == 'None':
value = helpers.dump(key)
value['id'] = self.id.decode()
if value:
2019-04-24 17:42:23 +02:00
key_id = self.id
msg = message.Message(
2019-04-24 18:21:07 +02:00
key=key, id=key_id, body=value)
msg.store(self.property_map)
2019-04-24 18:21:07 +02:00
if override:
helpers.load(key, self.property_map[key].body)
msg.send(self.publisher)
else:
logger.error("Fail to dump ")
else:
helpers.load(key, self.property_map[key].body)
elif command == b"ADD":
key = umsgpack.unpackb(msg[0])
value = umsgpack.unpackb(msg[1])
2019-04-18 15:05:48 +02:00
if value == 'None':
value = helpers.dump(key)
value['id'] = self.id.decode()
2019-04-10 18:01:55 +02:00
if value:
msg = message.Message(
2019-04-24 18:21:07 +02:00
key=key, id=self.id, body=value)
2019-04-18 15:05:48 +02:00
msg.store(self.property_map)
msg.send(self.publisher)
2019-04-10 18:01:55 +02:00
else:
logger.error("Fail to dump ")
2019-04-10 17:01:21 +02:00
2019-04-08 18:21:48 +02:00
elif command == b"GET":
value = []
2019-04-08 18:21:48 +02:00
key = umsgpack.unpackb(msg[0])
for k in self.property_map.keys():
if key in k:
2019-04-18 15:05:48 +02:00
value.append([k, self.property_map.get(k).body])
self.pipe.send(umsgpack.packb(value)
if value else umsgpack.packb(''))
2019-04-11 14:39:31 +02:00
elif command == b"LIST":
2019-04-22 12:14:39 +02:00
dump_list = []
for k, v in self.property_map.items():
if 'Client' in k:
dump_list.append([k, v.id.decode()])
else:
2019-04-30 17:18:41 +02:00
try:
dump_list.append([k, v.body['id']])
2019-04-30 17:18:41 +02:00
except:
pass
2019-04-22 12:14:39 +02:00
self.pipe.send(umsgpack.packb(dump_list)
if dump_list else umsgpack.packb(''))
2019-04-10 17:01:21 +02:00
elif command == b"STATE":
self.pipe.send(umsgpack.packb(self.state.value))
2019-04-18 15:05:48 +02:00
2019-06-12 16:44:48 +02:00
def net_worker(ctx, store, pipe, serial_product, serial_feed, stop_event,external_executor):
agent = ClientAgent(ctx, store, pipe)
2019-04-08 12:53:14 +02:00
server = None
net_feed = serial_product
net_product = serial_feed
2019-06-12 16:44:48 +02:00
external_executor = external_executor
2019-05-02 18:19:59 +02:00
while not stop_event.is_set():
2019-04-08 12:53:14 +02:00
poller = zmq.Poller()
poller.register(agent.pipe, zmq.POLLIN)
server_socket = None
if agent.state == State.INITIAL:
server = agent.server
if agent.server:
2019-05-14 10:38:13 +02:00
logger.debug("%s: waiting for server at %s:%d...",
agent.id.decode(), server.address, server.port)
2019-04-08 12:53:14 +02:00
server.snapshot.send(b"SNAPSHOT_REQUEST")
agent.state = State.SYNCING
server_socket = server.snapshot
2019-04-08 12:53:14 +02:00
elif agent.state == State.SYNCING:
server_socket = server.snapshot
2019-04-08 12:53:14 +02:00
elif agent.state == State.ACTIVE:
server_socket = server.subscriber
if server_socket:
poller.register(server_socket, zmq.POLLIN)
try:
2019-04-08 17:01:02 +02:00
items = dict(poller.poll(1))
2019-04-08 12:53:14 +02:00
except:
2019-04-10 17:01:21 +02:00
raise
break
2019-04-08 12:53:14 +02:00
if agent.pipe in items:
agent.control_message()
elif server_socket in items:
msg = message.Message.recv(server_socket)
2019-04-18 15:05:48 +02:00
2019-04-08 12:53:14 +02:00
if agent.state == State.SYNCING:
# CLient snapshot
if msg.key == "SNAPSHOT_END":
client_key = "Client/{}".format(agent.id.decode())
2019-05-14 10:38:13 +02:00
client_dict = {}
2019-05-14 10:38:13 +02:00
client_dict = helpers.init_client(key=client_key)
client_dict['id'] = agent.id.decode()
2019-04-17 15:48:20 +02:00
client_store = message.Message(
key=client_key, id=agent.id, body=client_dict)
2019-04-30 17:18:41 +02:00
client_store.store(agent.property_map)
client_store.send(agent.publisher)
2019-04-08 12:53:14 +02:00
agent.state = State.ACTIVE
2019-05-14 10:38:13 +02:00
logger.debug("snapshot complete")
2019-04-08 12:53:14 +02:00
else:
net_product.put(('LOAD', msg.key, msg.body))
2019-06-12 16:44:48 +02:00
# helpers.load(msg.key, msg.body)
msg.store(agent.property_map)
logger.debug("snapshot from {} stored".format(msg.id))
2019-04-08 12:53:14 +02:00
elif agent.state == State.ACTIVE:
if msg.id != agent.id:
2019-05-02 14:46:31 +02:00
# with lock:
# helpers.load(msg.key, msg.body)
msg.store(agent.property_map)
2019-06-12 16:44:48 +02:00
# net_product.put(('LOAD', msg.key, msg.body))
params = []
params.append(msg.key)
params.append(msg.body)
external_executor.put((helpers.load,params))
else:
2019-04-17 14:22:56 +02:00
logger.debug("{} nothing to do".format(agent.id))
2019-04-08 17:01:02 +02:00
# Serialisation thread => Net thread
2019-05-02 14:46:31 +02:00
if not net_feed.empty():
key, value = net_feed.get()
if value:
# Stamp with id
value['id'] = agent.id.decode()
2019-05-02 14:46:31 +02:00
# Format massage
msg = message.Message(
2019-05-02 14:46:31 +02:00
key=key, id=agent.id, body=value)
2019-04-26 17:57:53 +02:00
msg.store(agent.property_map)
msg.send(agent.publisher)
2019-05-02 14:46:31 +02:00
else:
logger.error("Fail to dump ")
2019-04-26 17:57:53 +02:00
2019-04-08 17:01:02 +02:00
logger.info("exit thread")
2019-04-08 12:53:14 +02:00
2019-05-14 10:38:13 +02:00
def serial_worker(serial_product, serial_feed):
2019-05-02 18:23:35 +02:00
logger.info("serial thread launched")
2019-04-10 17:01:21 +02:00
while True:
command, key, value = serial_feed.get()
2019-05-02 18:19:59 +02:00
if command == 'STOP':
2019-04-10 17:01:21 +02:00
break
elif command == 'DUMP':
2019-05-06 15:58:28 +02:00
try:
value = helpers.dump(key)
2019-04-10 17:01:21 +02:00
2019-05-06 15:58:28 +02:00
if value:
serial_product.put((key, value))
2019-05-06 15:58:28 +02:00
except Exception as e:
logger.error("{}".format(e))
elif command == 'LOAD':
if value:
2019-05-06 15:58:28 +02:00
try:
helpers.load(key, value)
except Exception as e:
logger.error("{}".format(e))
2019-05-02 18:19:59 +02:00
2019-05-02 18:23:35 +02:00
logger.info("serial thread stopped")
2019-05-02 18:19:59 +02:00
def watchdog_worker(serial_feed, interval, stop_event):
2019-05-02 18:19:59 +02:00
import bpy
logger.info(
"watchdog thread launched with {} sec of interval".format(interval))
while not stop_event.is_set():
2019-07-03 16:38:44 +02:00
for datatype in environment.rtypes:
for item in getattr(bpy.data, helpers.BPY_TYPES[datatype]):
2019-05-02 18:40:42 +02:00
key = "{}/{}".format(datatype, item.name)
try:
2019-05-06 15:58:28 +02:00
if item.is_dirty:
2019-05-14 10:38:13 +02:00
logger.debug("{} needs update".format(key))
serial_feed.put(('DUMP', key, None))
item.is_dirty = False
except:
pass
2019-05-02 18:19:59 +02:00
time.sleep(interval)
2019-05-02 18:23:35 +02:00
logger.info("watchdog thread stopped")