Skip to content

Commit

Permalink
Merge pull request #278 from arista-eosplus/resource-fix
Browse files Browse the repository at this point in the history
Resource fix
  • Loading branch information
advornic committed Jan 15, 2015
2 parents 09a88c4 + 8e6f036 commit 8c50c67
Show file tree
Hide file tree
Showing 4 changed files with 92 additions and 45 deletions.
2 changes: 2 additions & 0 deletions conf/ztpserver.wsgi
Original file line number Diff line number Diff line change
Expand Up @@ -41,3 +41,5 @@ sys.stdout.write('Starting ZTPServer, ')
sys.stdout.write('using config file %s' % conf)

application = start_wsgiapp(conf)
# For debug output, use:
# application = start_wsgiapp(conf, debug=True)
76 changes: 46 additions & 30 deletions ztpserver/resources.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,62 +55,78 @@ def __init__(self, node_id):
def serialize(self):
data = dict()
for key, value in self.data.items():
data[key] = str(value) if value is not None else None
data[key] = str(value) if value else None
return data

def load(self, pool):
self.data = dict()
filename = os.path.join(self.file_path, pool)
contents = load(filename, CONTENT_TYPE_YAML, self.node_id)
for key, value in contents.items():
self.data[key] = str(value) if value is not None else None
contents = load(filename, CONTENT_TYPE_YAML, self.node_id,
lock=True)
if contents and isinstance(contents, dict):
for key, value in contents.iteritems():
self.data[key] = str(value) if value else None
else:
if not contents:
contents = 'empty pool'

msg = '%s: unable to load resource pool %s: %s' % \
(self.node_id, pool, contents)
log.error(msg)
raise ResourcePoolError(msg)

log.debug('%s: loaded resource pool \'%s\': %s' %
(self.node_id, pool, self.data))

def dump(self, pool):
log.debug('%s: writing resource pool \'%s\': %s' %
(self.node_id, pool, self.data))
file_path = os.path.join(self.file_path, pool)
dump(self, file_path, CONTENT_TYPE_YAML, self.node_id)
dump(self, file_path, CONTENT_TYPE_YAML, self.node_id,
lock=True)

def allocate(self, pool, node):
node_id = node.identifier()
log.debug('%s: allocating resources' % node_id)

match = self.lookup(pool, node)
def allocate(self, pool):
if not self.data:
self.load(pool)

match = self.lookup(pool)
try:
if match:
log.debug('%s: already allocated resource \'%s\':\'%s\'' %
(node_id, pool, match))
(self.node_id, pool, match))
return match

self.load(pool)
key = next(x[0] for x in self.data.items() if x[1] is None)
key = next(x[0] for x in self.data.iteritems() if x[1] is None)
log.debug('%s: allocated \'%s\':\'%s\'' % (self.node_id, pool, key))

log.debug('%s: allocated \'%s\':\'%s\'' %
(node_id, pool, key))

self.data[key] = node_id
self.data[key] = self.node_id
self.dump(pool)
except StopIteration:
log.error('%s: no resource free in \'%s\'' % (node_id, pool))
log.error('%s: no resource free in \'%s\'' % (self.node_id, pool))
raise ResourcePoolError('%s: no resource free in \'%s\'' %
(node_id, pool))
(self.node_id, pool))
except Exception as exc:
log.error('%s: failed to allocate resource from \'%s\'' %
(node_id, pool))
(self.node_id, pool))
raise ResourcePoolError(exc.message)

return str(key)

def lookup(self, pool, node):
def lookup(self, pool):
''' Return an existing allocated resource if one exists '''
node_id = node.identifier()
log.debug('%s: looking up resource in \'%s\'' %
(node_id, pool))
try:

if not self.data:
self.load(pool)
matches = [m[0] for m in self.data.iteritems()
if m[1] == node_id]
key = matches[0] if matches else None

try:
try:
key = next(m[0] for m in self.data.iteritems()
if m[1] == self.node_id)
except StopIteration:
key = None

return key
except Exception as exc:
log.error('%s: failed to lookup resource from \'%s\'' %
(node_id, pool))
log.error('%s: failed to lookup resource from \'%s\' (%s)' %
(self.node_id, pool, exc.message))
raise ResourcePoolError(exc.message)
55 changes: 42 additions & 13 deletions ztpserver/serializers.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,11 +36,12 @@
import logging
import json

import threading
import yaml

READ_WRITE_LOCK = {}
log = logging.getLogger(__name__) #pylint: disable=C0103


class SerializerError(Exception):
''' base error raised by serialization functions '''
pass
Expand Down Expand Up @@ -184,30 +185,58 @@ def loads(data, content_type, node_id):
serializer = Serializer(node_id)
return serializer.deserialize(data, content_type)

def load(file_path, content_type, node_id=None):
id_string = '%s: ' % node_id if node_id else ''
def load(file_path, content_type, node_id='N/A', lock=False):
log.debug('%s: reading %s...' % (node_id, file_path))

if lock and file_path not in READ_WRITE_LOCK:
READ_WRITE_LOCK[file_path] = threading.Lock()

try:
data = open(file_path).read()
return loads(data, content_type, node_id)
if lock:
with READ_WRITE_LOCK[file_path]:
with open(file_path) as fhandler:
data = fhandler.read()
else:
with open(file_path) as fhandler:
data = fhandler.read()

result = loads(data, content_type, node_id)
except (OSError, IOError) as err:
log.error('%s: failed to load file from %s (%s)' %
(id_string, file_path, err))
(node_id, file_path, err))
raise SerializerError('%s: failed to load file from %s (%s)' %
(id_string, file_path, err))
(node_id, file_path, err))

# Enable this log if you want to see the contents of the file (verbose)
# log.debug('%s: loaded %s: %s' % (node_id, file_path, result))
return result

def dumps(data, content_type, node_id):
serializer = Serializer(node_id)
if hasattr(data, 'serialize'):
data = data.serialize()
return serializer.serialize(data, content_type)

def dump(data, file_path, content_type, node_id=None):
id_string = '%s: ' % node_id if node_id else ''

def dump(data, file_path, content_type, node_id='N/A', lock=False):
log.debug('%s: writing %s...' % (node_id, file_path))

if lock and file_path not in READ_WRITE_LOCK:
READ_WRITE_LOCK[file_path] = threading.Lock()

try:
with open(file_path, 'w') as fhandler:
fhandler.write(dumps(data, content_type, node_id))
if lock:
with READ_WRITE_LOCK[file_path]:
with open(file_path, 'w') as fhandler:
fhandler.write(dumps(data, content_type, node_id))
else:
with open(file_path, 'w') as fhandler:
fhandler.write(dumps(data, content_type, node_id))
except (OSError, IOError) as err:
log.error('%s: failed to write file to %s (%s)' %
(id_string, file_path, err))
(node_id, file_path, err))
raise SerializerError('%s: failed to write file to %s (%s)' %
(id_string, file_path, err))
(node_id, file_path, err))

# Enable this log if you want to see the contents of the file (verbose)
# log.debug('%s: wrote %s: %s' % (node_id, file_path, data))
4 changes: 2 additions & 2 deletions ztpserver/topology.py
Original file line number Diff line number Diff line change
Expand Up @@ -174,15 +174,15 @@ def resources(attributes, node, node_id):
match = FUNC_RE.match(item)
if match:
method = getattr(_resources, match.group('function'))
_value.append(method(match.group('arg'), node))
_value.append(method(match.group('arg')))
else:
_value.append(item)
value = _value
else:
match = FUNC_RE.match(str(value))
if match:
method = getattr(_resources, match.group('function'))
value = method(match.group('arg'), node)
value = method(match.group('arg'))
_attributes[key] = value
log.debug('%s: resources: %s' % (node_id, _attributes))
return _attributes
Expand Down

0 comments on commit 8c50c67

Please sign in to comment.