Skip to content

Commit

Permalink
Fixed crash with extra large returns
Browse files Browse the repository at this point in the history
  • Loading branch information
HughMacdonald committed Apr 16, 2011
1 parent 1459981 commit 8ee0b0d
Show file tree
Hide file tree
Showing 2 changed files with 150 additions and 116 deletions.
10 changes: 10 additions & 0 deletions nukeCommandClient.py
Expand Up @@ -53,6 +53,16 @@ def get(self, item_type, item_id = -1, parameters = None):
data = {'action': item_type, 'id': item_id, 'parameters': parameters} data = {'action': item_type, 'id': item_id, 'parameters': parameters}
returnData = self.send(pickle.dumps(self.encode(data))) returnData = self.send(pickle.dumps(self.encode(data)))
result = pickle.loads(returnData) result = pickle.loads(returnData)
if isinstance(result, dict) and 'type' in result and result['type'] == "NukeTransferPartialObject":
data = result['data']
nextPart = 1
while nextPart < result['part_count']:
returnData = self.send(pickle.dumps({'type': "NukeTransferPartialObjectRequest", 'part': nextPart}))
result = pickle.loads(returnData)
data += result['data']
nextPart += 1

result = pickle.loads(data)
except Exception, e: except Exception, e:
raise e raise e


Expand Down
256 changes: 140 additions & 116 deletions nukeCommandServer.py
Expand Up @@ -8,127 +8,151 @@
listTypes = [list, tuple, set, frozenset] listTypes = [list, tuple, set, frozenset]
dictTypes = [dict] dictTypes = [dict]


MAX_SOCKET_BYTES = 16384

class NukeConnectionError(StandardError): class NukeConnectionError(StandardError):
pass pass


def nuke_command_server(): def nuke_command_server():
t = threading.Thread(None, NukeInternal) t = threading.Thread(None, NukeInternal)
t.setDaemon(True) t.setDaemon(True)
t.start() t.start()
class NukeInternal: class NukeInternal:
def __init__(self): def __init__(self):
self._objects = {} self._objects = {}
self._next_object_id = 0 self._next_object_id = 0


host = '' host = ''
start_port = 54200 start_port = 54200
end_port = 54300 end_port = 54300
backlog = 5 backlog = 5
size = 1024 * 1024 size = 1024 * 1024


bound_port = False bound_port = False
for port in range(start_port, end_port + 1): for port in range(start_port, end_port + 1):
try: try:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
print "Trying port %d" % port print "Trying port %d" % port
s.bind((host, port)) s.bind((host, port))
bound_port = True bound_port = True
break break
except Exception, e: except Exception, e:
pass pass


if not bound_port: if not bound_port:
raise NukeConnectionError("Cannot find port to bind to") raise NukeConnectionError("Cannot find port to bind to")


s.listen(backlog) s.listen(backlog)


while 1: while 1:
client, address = s.accept() client, address = s.accept()
data = client.recv(size) data = client.recv(size)
if data: if data:
result = self.receive(data) result = self.receive(data)
client.send(result) client.send(result)
client.close() client.close()


def recode_data(self, data, recode_object_func): def recode_data(self, data, recode_object_func):
if type(data) in basicTypes or isinstance(data, Exception): if type(data) in basicTypes or isinstance(data, Exception):
return data return data
elif type(data) in listTypes: elif type(data) in listTypes:
newList = [] newList = []
for i in data: for i in data:
newList.append(self.recode_data(i, recode_object_func)) newList.append(self.recode_data(i, recode_object_func))
return type(data)(newList) return type(data)(newList)
elif type(data) in dictTypes: elif type(data) in dictTypes:
newDict = {} newDict = {}
for k in data: for k in data:
newDict[self.recode_data(k, recode_object_func)] = self.recode_data(data[k], recode_object_func) newDict[self.recode_data(k, recode_object_func)] = self.recode_data(data[k], recode_object_func)
return newDict return newDict
else: else:
return recode_object_func(data) return recode_object_func(data)

def encode_data(self, data):
return self.recode_data(data, self.encode_data_object)

def decode_data(self, data):
return self.recode_data(data, self.decode_data_object)

def encode_data_object(self, data):
this_object_id = self._next_object_id
self._next_object_id += 1
self._objects[this_object_id] = data
return {'type': "NukeTransferObject", 'id': this_object_id}

def decode_data_object(self, data):
object_id = data['id']
return self._objects[object_id]


def encode_data(self, data): def encode(self, data):
return self.recode_data(data, self.encode_data_object) encoded_data = self.encode_data(data)

return pickle.dumps(encoded_data)
def decode_data(self, data):
return self.recode_data(data, self.decode_data_object) def decode(self, data):
return self.decode_data(pickle.loads(data))


def encode_data_object(self, data): def get(self, data):
this_object_id = self._next_object_id obj = self.get_object(data['id'])
self._next_object_id += 1 params = data['parameters']
self._objects[this_object_id] = data result = None
return {'type': "NukeTransferObject", 'id': this_object_id} try:

if data['action'] == "getattr":
def decode_data_object(self, data): result = getattr(obj, params)
object_id = data['id'] elif data['action'] == "setattr":
return self._objects[object_id] setattr(obj, params[0], params[1])
elif data['action'] == "getitem":
# If we're actually getting from globals(), then raise NameError instead of KeyError
if data['id'] == -1 and params not in obj:
raise NameError("name '%s' is not defined" % params)
result = obj[params]
elif data['action'] == "setitem":
obj[params[0]] = params[1]
elif data['action'] == "call":
result = nuke.executeInMainThreadWithResult(obj, args=params['args'], kwargs=params['kwargs'])
elif data['action'] == "len":
result = len(obj)
elif data['action'] == "str":
result = str(obj)
elif data['action'] == "repr":
result = `obj`
elif data['action'] == "import":
result = imp.load_module(params, *imp.find_module(params))
except Exception, e:
result = e

return result

def receive(self, data_string):
data = self.decode(data_string)

if isinstance(data, dict) and 'type' in data and data['type'] == "NukeTransferPartialObjectRequest":
if data['part'] in self.partialObjects:
encoded = self.partialObjects[data['part']]
del self.partialObjects[data['part']]
else:
result = self.get(data)

encoded = self.encode(result)

if len(encoded) > MAX_SOCKET_BYTES:
encodedBits = []
while encoded:
encodedBits.append(encoded[:MAX_SOCKET_BYTES])
encoded = encoded[MAX_SOCKET_BYTES:]

self.partialObjects = {}
for i in range(len(encodedBits)):
self.partialObjects[i] = pickle.dumps({'type': "NukeTransferPartialObject", 'part': i, 'part_count': len(encodedBits), 'data': encodedBits[i]})

encoded = self.partialObjects[0]
del self.partialObjects[0]


def encode(self, data): return encoded
encoded_data = self.encode_data(data)
return pickle.dumps(encoded_data)

def decode(self, data):
return self.decode_data(pickle.loads(data))


def get(self, data_string):
data = self.decode(data_string) def get_object(self, id):
obj = self.get_object(data['id']) if id == -1:
params = data['parameters'] return globals()
result = None else:
try: return self._objects[id]
if data['action'] == "getattr":
result = getattr(obj, params)
elif data['action'] == "setattr":
setattr(obj, params[0], params[1])
elif data['action'] == "getitem":
# If we're actually getting from globals(), then raise NameError instead of KeyError
if data['id'] == -1 and params not in obj:
raise NameError("name '%s' is not defined" % params)
result = obj[params]
elif data['action'] == "setitem":
obj[params[0]] = params[1]
elif data['action'] == "call":
result = nuke.executeInMainThreadWithResult(obj, args=params['args'], kwargs=params['kwargs'])
elif data['action'] == "len":
result = len(obj)
elif data['action'] == "str":
result = str(obj)
elif data['action'] == "repr":
result = `obj`
elif data['action'] == "import":
result = imp.load_module(params, *imp.find_module(params))
except Exception, e:
result = e

encoded = self.encode(result)

return encoded

def receive(self, data):
return self.get(data)

def get_object(self, id):
if id == -1:
return globals()
else:
return self._objects[id]

0 comments on commit 8ee0b0d

Please sign in to comment.