Skip to content

Commit

Permalink
Improved FS backend looging and typing. Fixed few bugs
Browse files Browse the repository at this point in the history
Signed-off-by: Christian Pinto <christian.pinto@ibm.com>
  • Loading branch information
christian-pinto committed Feb 28, 2024
1 parent aee63d8 commit 842ae69
Showing 1 changed file with 104 additions and 78 deletions.
182 changes: 104 additions & 78 deletions sunfish/storage/backend_FS.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,13 +11,15 @@
from sunfish.storage import utils
from sunfish.lib.exceptions import *

logger = logging.getLogger(__name__)

class BackendFS(BackendInterface):

def __init__(self, conf):
self.root = conf["backend_conf"]["fs_root"]
self.redfish_root = conf["redfish_root"]

def read(self, path:str):
def read(self, path: str) -> dict:
"""Loads the content of the index.json corresponding to the requested path.
Args:
Expand All @@ -29,27 +31,28 @@ def read(self, path:str):
Returns:
json: data of the resource
"""
logging.info('BackendFS read called')
logger.debug("BackendFS: read called")

length = len(self.redfish_root)
resource = path[length:]
resource = path.replace(self.redfish_root, "")
logger.debug(f"PATH: {path}")
path = os.path.join(os.getcwd(), self.root, resource, 'index.json')

logger.debug(f"BackendFS: read called on {path}")
try:
json_data = open(path)
data:json = json.load(json_data)
data = json.load(json_data)
return data
except FileNotFoundError as e:
raise ResourceNotFound(resource)
def write(self, payload:json):

def write(self, payload: dict):
"""Checks if the Collection exists for that resource and stores the resource in the correct position of the file system.
It create the directory of the resource, creates the index.json file and updates the files linked with the new resource (Collection members or Resources list).
Args:
payload (json): json representing the resource that should be stored.
Raises:
Raisexs:
CollectionNotSupported: the storage of the collections is not supported.
AlreadyExists: it is not possible to have duplicate resources with the same ID.
Expand All @@ -60,53 +63,65 @@ def write(self, payload:json):

# get ID and collection from payload
length = len(self.redfish_root)
id = payload['@odata.id'][length:] # id without redfish.root (es. /redfish/v1/)
id = payload['@odata.id'][length:] # id without redfish.root (es. /redfish/v1/)

id = id.split('/')
for index in range(2, len(id[1:])):
to_check = os.path.join('/'.join(id[:index]), 'index.json')
to_check = os.path.join(os.getcwd(), self.root, to_check)
if os.path.exists(to_check) is False:
raise ActionNotAllowed()

with open(to_check, 'r') as data_json:
data = json.load(data_json)
data_json.close()
if 'Collection' in data["@odata.type"]:
members = data["Members"]
for x in members:
if x["@odata.id"] == os.path.join(self.redfish_root, '/'.join(id[:index+1])):
present = True
else:
if data[id[index]]:
element = data[id[index]]
if element["@odata.id"] == os.path.join(self.redfish_root, '/'.join(id[:index+1])):
present = True
else:
element["@odata.id"] = os.path.join(self.redfish_root, '/'.join(id[:index+1]))
with open(to_check, 'w') as data_json:
json.dump(data, data_json, indent=4, sort_keys=True)
data_json.close()

last_element = len(id)-1
collection_type = id[last_element-1]
data = json.load(data_json)
data_json.close()
if 'Collection' in data["@odata.type"]:
members = data["Members"]
for x in members:
if x["@odata.id"] == os.path.join(self.redfish_root, '/'.join(id[:index + 1])):
present = True
else:
if data[id[index]]:
element = data[id[index]]
if element["@odata.id"] == os.path.join(self.redfish_root, '/'.join(id[:index + 1])):
present = True
else:
element["@odata.id"] = os.path.join(self.redfish_root, '/'.join(id[:index + 1]))
with open(to_check, 'w') as data_json:
json.dump(data, data_json, indent=4, sort_keys=True)
data_json.close()

last_element = len(id) - 1
collection_type = id[last_element - 1]
resource_id = id[last_element]
full_collection = ''
# create the path of the full collection if it is a subcollection
if len(id) > 2:
for i in range(0, last_element-1):
for i in range(0, last_element - 1):
full_collection = full_collection + id[i] + '/'

collection_type = os.path.join(full_collection, collection_type)

collection_path = os.path.join(os.getcwd(), self.root, collection_type) # collection_path .../Resources/[folder], collection_type = [folder]
parent_path = os.path.dirname(collection_path) # parent path .../Resources
collection_type = os.path.join(full_collection, collection_type)

collection_path = os.path.join(os.getcwd(), self.root,
collection_type) # collection_path .../Resources/[folder], collection_type = [folder]
parent_path = os.path.dirname(collection_path) # parent path .../Resources

# check if the directory of the Collection already exists
if not os.path.exists(collection_path):
os.makedirs(collection_path)

config = utils.generate_collection(collection_type)

# if the item to be written is managed by an agent, we want the collection containing it to also be maked
# accordingly. We do this only for collections to be created because we assume that if the collection is
# there already:
# a. The collection is a first level one that is managed by Sunfish
# b. The collection was previously created during an agent discovery process and therefore already marked
# if "Oem" in payload and "Sunfish_RM" in payload["Oem"] and len(id) > 2 :
# if "Oem" not in config:
# config["Oem"] = {}
# config["Oem"]["Sunfish_RM"] = payload["Oem"]["Sunfish_RM"]

## write file Resources/[folder]/index.json
with open(os.path.join(collection_path, "index.json"), "w") as fd:
fd.write(json.dumps(config, indent=4, sort_keys=True))
Expand All @@ -115,57 +130,65 @@ def write(self, payload:json):
# check if the index.json representing the collection exists. In case it doesnt it will create index.json with the collection template
if os.path.exists(os.path.join(parent_path, "index.json")):
collection_name = collection_type.split('/')[-1]
utils.update_collections_parent_json(path=os.path.join(parent_path, "index.json"), type=collection_name, link=self.redfish_root+collection_type)
utils.update_collections_parent_json(path=os.path.join(parent_path, "index.json"), type=collection_name,
link=self.redfish_root + collection_type)
else:
utils.generate_collection(collection_type)
else:
# checks if there is already a resource with the same id
index_path = os.path.join(collection_path, "index.json")
if utils.check_unique_id(index_path, payload['@odata.id']) is False:
raise AlreadyExists(payload['@odata.id'])


# creates folder of the element and write index.json (assuming that the payload is valid i dont use any kind of template to write index.json)
folder_id_path = os.path.join(collection_path, resource_id) # .../Resources/[folder]/[id]
folder_id_path = os.path.join(collection_path, resource_id) # .../Resources/[folder]/[id]

# creates the folder of the element
if not os.path.exists(folder_id_path):
os.mkdir(folder_id_path)
parent_path = os.path.join(*folder_id_path.split("/")[:-2])
parent_json = "/" + os.path.join(parent_path, "index.json")
root_path = os.path.join(os.getcwd(), self.root)
if not os.path.exists(parent_json) and parent_path != root_path[1:]:
logger.warning(
"You should not be here, this is crating an entire path where multiple folders are not existing")



with open(os.path.join(folder_id_path, "index.json"), "w") as fd:
fd.write(json.dumps(payload, indent=4, sort_keys=True))
fd.close()

json_collection_path = os.path.join(collection_path, 'index.json')

# updates the collection with the new element created
if os.path.exists(json_collection_path):
utils.update_collections_json(path=json_collection_path, link=payload['@odata.id'])
else:
utils.generate_collection(collection_type)

# Events have to be handled in a different way.
# To check if write() is called by an event subscription (EventDestination format) I check 'Destination' because
# it is the only required required property that other objects doesnt have

logging.info('BackendFS: [POST] success')
return payload
def replace(self, payload:json):

def replace(self, payload: dict):
try:
return self._update_object(payload, True)
except ResourceNotFound as e:
raise ResourceNotFound(e.resource_id)
def patch(self, path:str, payload:json):
object = self.read(path)
object.update(payload)

def patch(self, path:str, payload:dict):
_object = self.read(path)
_object.update(payload)
try:
return self._update_object(object, False)
return self._update_object(_object, False)
except ResourceNotFound as e:
raise ResourceNotFound(e.resource_id)
def _update_object(self, payload:json, replace:bool):

def _update_object(self, payload: dict, replace: bool):
"""writes the updated json file.
Args:
Expand All @@ -186,12 +209,12 @@ def _update_object(self, payload:json, replace:bool):
id = payload['@odata.id'][length:]
id = id.split('/')

last_element = len(id)-1
collection_type = id[last_element-1]
last_element = len(id) - 1
collection_type = id[last_element - 1]
resource_id = id[last_element]
full_collection = ''
if len(id) > 2:
for i in range(0, last_element-1):
for i in range(0, last_element - 1):
full_collection = full_collection + id[i] + '/'

collection_type = os.path.join(full_collection, collection_type)
Expand All @@ -203,7 +226,7 @@ def _update_object(self, payload:json, replace:bool):
with open(path, 'r') as data_json:
data = json.load(data_json)
data_json.close()

# Update the keys of payload in json file.
for key, value in payload.items():
data[key] = value
Expand All @@ -213,12 +236,12 @@ def _update_object(self, payload:json, replace:bool):
with open(path, 'w') as f:
json.dump(data, f, indent=4, sort_keys=True)
f.close()

except FileNotFoundError as e:
raise ResourceNotFound(resource_id)

result:str = self.read(payload["@odata.id"])
#result:str = payload['@odata.id']
result: str = self.read(payload["@odata.id"])
# result:str = payload['@odata.id']

return result

Expand All @@ -241,25 +264,25 @@ def remove(self, path:str):
length = len(self.redfish_root)
resource_id = path[length:]

path = os.path.join(os.getcwd(), self.root, resource_id)
full_path = os.path.join(os.getcwd(), self.root, resource_id)

if len(resource_id) == 0:
raise ActionNotAllowed()

if os.path.exists(path) is False:
if os.path.exists(full_path) is False:
raise ResourceNotFound(resource_id)
parent_path = os.path.dirname(path)

parent_path = os.path.dirname(full_path)
json_path = os.path.join(parent_path, 'index.json')
shutil.rmtree(path)
shutil.rmtree(full_path)

try:
with open(json_path,"r") as file:
with open(json_path, "r") as file:
pdata = json.load(file)
file.close()

data = {
"@odata.id":os.path.join(self.redfish_root, resource_id)
"@odata.id": os.path.join(self.redfish_root, resource_id)
}
collection_name = resource_id.split('/')[-1]
if 'Members' in pdata and data in pdata['Members']:
Expand All @@ -268,47 +291,50 @@ def remove(self, path:str):
elif collection_name in pdata:
del pdata[collection_name]

with open(json_path,"w") as file:
json.dump(pdata,file, indent=4, sort_keys=True)
with open(json_path, "w") as file:
json.dump(pdata, file, indent=4, sort_keys=True)
file.close()

except FileNotFoundError as e:
raise ResourceNotFound(resource_id)

# check links
to_replace = False
first = False

for path, directories, files in os.walk(os.path.join(os.getcwd(), self.root)):
if 'index.json' in files:
file_path = os.path.join(path, 'index.json')

with open(file_path, "r") as file:
pdata = json.load(file)
file.close()


if 'Links' in pdata and path != os.path.join(os.getcwd(), self.root):
link_list = pdata['Links']
to_del = []
for link in link_list:
for x in link_list[link]:
if isinstance(link_list[link], list):
if x['@odata.id'] == os.path.join(self.redfish_root, resource_id):
to_compare = ""
if type(x) is dict and "@odata.id" in x:
to_compare = x['@odata.id']
elif type(x) is str:
to_compare = x
if to_compare == os.path.join(self.redfish_root, resource_id):
to_replace = True
link_list[link].remove(x)
if(len(link_list[link]) == 0):
if len(link_list[link]) == 0:
to_del.append(link)
elif isinstance(link_list[link], dict):
if x == os.path.join(self.redfish_root, resource_id):
to_del.append(link)
to_replace = True
if to_del:
for el in to_del:
for el in to_del:
del link_list[el]
if to_replace:
with open(file_path, "w") as file:
json.dump(pdata,file, indent=4, sort_keys=True)
file.close()
json.dump(pdata, file, indent=4, sort_keys=True)
to_replace = False

return "DELETE: file removed."

0 comments on commit 842ae69

Please sign in to comment.