Skip to content

Commit

Permalink
change uuid to id for hardlinks
Browse files Browse the repository at this point in the history
  • Loading branch information
jreadey committed Feb 23, 2015
1 parent 6693255 commit 5ebe49b
Show file tree
Hide file tree
Showing 36 changed files with 170 additions and 153 deletions.
4 changes: 2 additions & 2 deletions docs/GroupOps/GET_Link.rst
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ link["h5domain"]
For external links, the path of the external domain containing the object that is linked.
*Note:* The domain may or may not exist. Use GET / with the domain to verify.

link["uuid"]
link["id"]
^^^^^^^^^^^^
For hard links, the uuid of the object the link points to. For symbolic links this
element is not present
Expand Down Expand Up @@ -129,7 +129,7 @@ Sample Response - Hard Link
"title": "g1",
"collection": "groups",
"class": "H5L_TYPE_HARD",
"uuid": "052e001e-9d33-11e4-9a3d-3c15c2da029e"
"id": "052e001e-9d33-11e4-9a3d-3c15c2da029e"
},
"created": "2015-01-16T03:47:22Z",
"lastModified": "2015-01-16T03:47:22Z",
Expand Down
12 changes: 6 additions & 6 deletions docs/GroupOps/GET_Links.rst
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@ Sample Response
"title": "g1.2.1",
"class": "H5L_TYPE_HARD",
"collection": "groups",
"uuid": "0ad38d45-a06f-11e4-a909-3c15c2da029e"
"id": "0ad38d45-a06f-11e4-a909-3c15c2da029e"
},
{
"title": "extlink",
Expand Down Expand Up @@ -147,11 +147,11 @@ Sample Response Batch
{
"links": [
{"title": "g0090", "uuid": "76c53485-a06e-11e4-96f3-3c15c2da029e", "class": "H5L_TYPE_HARD", "collection": "groups"},
{"title": "g0091", "uuid": "76c54d40-a06e-11e4-a342-3c15c2da029e", "class": "H5L_TYPE_HARD", "collection": "groups"},
{"title": "g0092", "uuid": "76c564f5-a06e-11e4-bccd-3c15c2da029e", "class": "H5L_TYPE_HARD", "collection": "groups"},
{"title": "g0093", "uuid": "76c57d19-a06e-11e4-a9a8-3c15c2da029e", "class": "H5L_TYPE_HARD", "collection": "groups"},
{"title": "g0094", "uuid": "76c5941c-a06e-11e4-b641-3c15c2da029e", "class": "H5L_TYPE_HARD", "collection": "groups"}
{"title": "g0090", "id": "76c53485-a06e-11e4-96f3-3c15c2da029e", "class": "H5L_TYPE_HARD", "collection": "groups"},
{"title": "g0091", "id": "76c54d40-a06e-11e4-a342-3c15c2da029e", "class": "H5L_TYPE_HARD", "collection": "groups"},
{"title": "g0092", "id": "76c564f5-a06e-11e4-bccd-3c15c2da029e", "class": "H5L_TYPE_HARD", "collection": "groups"},
{"title": "g0093", "id": "76c57d19-a06e-11e4-a9a8-3c15c2da029e", "class": "H5L_TYPE_HARD", "collection": "groups"},
{"title": "g0094", "id": "76c5941c-a06e-11e4-b641-3c15c2da029e", "class": "H5L_TYPE_HARD", "collection": "groups"}
],
"hrefs": [
{"href": "http://group1k.test.hdfgroup.org/groups/76bddb1e-a06e-11e4-86d6-3c15c2da029e/links", "rel": "self"},
Expand Down
2 changes: 1 addition & 1 deletion docs/Utilities.rst
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ hdf5_file is a file from the h5serv data directory. Output is a list of All UUI
a path to the associated object.

exportjson.py
-----------
-------------

This script makes a series of rest requests to the desired h5serv endpoint and
constructs a JSON file representing the domain's contents.
Expand Down
1 change: 1 addition & 0 deletions server/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
import time
import signal
import logging
import logging.handlers
import os
import os.path as op
import json
Expand Down
8 changes: 4 additions & 4 deletions server/hdf5db.py
Original file line number Diff line number Diff line change
Expand Up @@ -1289,16 +1289,16 @@ def getLinkItemByObj(self, parent, link_name):
obj = parent[link_name]
addr = h5py.h5o.get_info(obj.id).addr
item['class'] = 'H5L_TYPE_HARD'
item['uuid'] = self.getUUIDByAddress(addr)
item['id'] = self.getUUIDByAddress(addr)
class_name = obj.__class__.__name__
if class_name == 'Dataset':
item['href'] = 'datasets/' + item['uuid']
item['href'] = 'datasets/' + item['id']
item['collection'] = 'datasets'
elif class_name == 'Group':
item['href'] = 'groups/' + item['uuid']
item['href'] = 'groups/' + item['id']
item['collection'] = 'groups'
elif class_name == 'Datatype':
item['href'] = 'datatypes/' + item['uuid']
item['href'] = 'datatypes/' + item['id']
item['collection'] = 'datatypes'
else:
self.log.warning("unexpected object type: " + item['type'])
Expand Down
2 changes: 1 addition & 1 deletion test/integ/helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ def getUUID(domain, parentUuid, name):
if target['class'] != 'H5L_TYPE_HARD':
# soft/external links
return None
tgtUuid = target['uuid']
tgtUuid = target['id']

return tgtUuid
"""
Expand Down
7 changes: 5 additions & 2 deletions test/integ/linktest.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ def testGetHard(self):
self.assertTrue("lastModified" in rspJson)
self.assertTrue('link' in rspJson)
target = rspJson['link']
self.assertTrue(helper.validateId(target['uuid']))
self.assertTrue(helper.validateId(target['id']))
self.failUnlessEqual(target['class'], 'H5L_TYPE_HARD')
self.failUnlessEqual(target['title'], 'g1')
self.failUnlessEqual(target['collection'], 'groups')
Expand Down Expand Up @@ -152,7 +152,10 @@ def testGetBatch(self):
self.failUnlessEqual(len(names), 1000) # should get 1000 unique links


#Fix - This is crazy slow!
#Fix - This needs to be made more efficient - when deleting links, the code now
# searches all objects to see if the linked target needs to be made anonymous or not.
# idea: keep back pointers for all links?
# Tracked as Issue #12 in Github
"""
def testMoveLinks(self):
logging.info("LinkTest.testMoveLinks")
Expand Down
12 changes: 6 additions & 6 deletions test/unit/hdf5dbTest.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ def testGetUUIDByPath(self):
g1links = db.getLinkItems(g1Uuid)
self.failUnlessEqual(len(g1links), 2)
for item in g1links:
self.failUnlessEqual(len(item['uuid']), config.get('uuidlen'))
self.failUnlessEqual(len(item['id']), config.get('uuidlen'))

# end of with will close file
# open again and verify we can get obj by name
Expand Down Expand Up @@ -137,7 +137,7 @@ def testGetItemHardLink(self):
with Hdf5db('tall.h5') as db:
grpUuid = db.getUUIDByPath('/g1/g1.1')
item = db.getLinkItemByUuid(grpUuid, "dset1.1.1")
self.assertTrue('uuid' in item)
self.assertTrue('id' in item)
self.assertEqual(item['title'], 'dset1.1.1')
self.assertEqual(item['class'], 'H5L_TYPE_HARD')
self.assertEqual(item['collection'], 'datasets')
Expand All @@ -149,7 +149,7 @@ def testGetItemSoftLink(self):
with Hdf5db('tall.h5') as db:
grpUuid = db.getUUIDByPath('/g1/g1.2/g1.2.1')
item = db.getLinkItemByUuid(grpUuid, "slink")
self.assertTrue('uuid' not in item)
self.assertTrue('id' not in item)
self.assertEqual(item['title'], 'slink')
self.assertEqual(item['class'], 'H5L_TYPE_SOFT')
self.assertEqual(item['h5path'], 'somevalue')
Expand Down Expand Up @@ -205,8 +205,8 @@ def testGetLinks(self):
elif item['class'] == 'H5L_TYPE_EXTERNAL':
externalLink = item
self.assertEqual(hardLink['collection'], 'groups')
self.assertTrue('uuid' in hardLink)
self.assertTrue('uuid' not in externalLink)
self.assertTrue('id' in hardLink)
self.assertTrue('id' not in externalLink)
self.assertEqual(externalLink['h5path'], 'somepath')
self.assertEqual(externalLink['file'], 'somefile')

Expand Down Expand Up @@ -261,7 +261,7 @@ def testReadOnlyGetUUID(self):
g1links = db.getLinkItems(g1Uuid)
self.failUnlessEqual(len(g1links), 2)
for item in g1links:
self.failUnlessEqual(len(item['uuid']), config.get('uuidlen'))
self.failUnlessEqual(len(item['id']), config.get('uuidlen'))

def testReadDataset(self):
getFile('tall.h5')
Expand Down
4 changes: 2 additions & 2 deletions testjson/array_dset.json
Original file line number Diff line number Diff line change
Expand Up @@ -126,8 +126,8 @@
{
"class": "H5L_TYPE_HARD",
"collection": "datasets",
"title": "DS1",
"uuid": "146c9fb5-7b51-11e4-a549-3c15c2da029e"
"id": "146c9fb5-7b51-11e4-a549-3c15c2da029e",
"title": "DS1"
}
]
}
Expand Down
4 changes: 2 additions & 2 deletions testjson/arraytype.json
Original file line number Diff line number Diff line change
Expand Up @@ -1034,8 +1034,8 @@
{
"class": "H5L_TYPE_HARD",
"collection": "datasets",
"title": "array",
"uuid": "9348ad51-7bf7-11e4-a66f-3c15c2da029e"
"id": "9348ad51-7bf7-11e4-a66f-3c15c2da029e",
"title": "array"
}
]
}
Expand Down
8 changes: 4 additions & 4 deletions testjson/committed_type.json
Original file line number Diff line number Diff line change
Expand Up @@ -106,14 +106,14 @@
{
"class": "H5L_TYPE_HARD",
"collection": "datasets",
"title": "DS1",
"uuid": "25d6f985-7b51-11e4-a037-3c15c2da029e"
"id": "25d6f985-7b51-11e4-a037-3c15c2da029e",
"title": "DS1"
},
{
"class": "H5L_TYPE_HARD",
"collection": "datatypes",
"title": "Sensor_Type",
"uuid": "25d7088f-7b51-11e4-ac9b-3c15c2da029e"
"id": "25d7088f-7b51-11e4-ac9b-3c15c2da029e",
"title": "Sensor_Type"
}
]
}
Expand Down
4 changes: 2 additions & 2 deletions testjson/compound.json
Original file line number Diff line number Diff line change
Expand Up @@ -589,8 +589,8 @@
{
"class": "H5L_TYPE_HARD",
"collection": "datasets",
"title": "dset",
"uuid": "2fd9e630-7b51-11e4-b2b8-3c15c2da029e"
"id": "2fd9e630-7b51-11e4-b2b8-3c15c2da029e",
"title": "dset"
}
]
}
Expand Down
20 changes: 10 additions & 10 deletions testjson/compound_committed.json
Original file line number Diff line number Diff line change
Expand Up @@ -460,14 +460,14 @@
{
"class": "H5L_TYPE_HARD",
"collection": "datasets",
"title": "dset",
"uuid": "2f4ba161-8270-11e4-9117-3c15c2da029e"
"id": "2f4ba161-8270-11e4-9117-3c15c2da029e",
"title": "dset"
},
{
"class": "H5L_TYPE_HARD",
"collection": "groups",
"title": "types",
"uuid": "2f4bb178-8270-11e4-a4e7-3c15c2da029e"
"id": "2f4bb178-8270-11e4-a4e7-3c15c2da029e",
"title": "types"
}
]
},
Expand All @@ -479,20 +479,20 @@
{
"class": "H5L_TYPE_HARD",
"collection": "datatypes",
"title": "date",
"uuid": "2f4bbf2e-8270-11e4-b46d-3c15c2da029e"
"id": "2f4bbf2e-8270-11e4-b46d-3c15c2da029e",
"title": "date"
},
{
"class": "H5L_TYPE_HARD",
"collection": "datatypes",
"title": "temp",
"uuid": "2f4bcbcc-8270-11e4-97b3-3c15c2da029e"
"id": "2f4bcbcc-8270-11e4-97b3-3c15c2da029e",
"title": "temp"
},
{
"class": "H5L_TYPE_HARD",
"collection": "datatypes",
"title": "time",
"uuid": "2f4bd7d4-8270-11e4-a886-3c15c2da029e"
"id": "2f4bd7d4-8270-11e4-a886-3c15c2da029e",
"title": "time"
}
]
}
Expand Down
4 changes: 2 additions & 2 deletions testjson/enum_attr.json
Original file line number Diff line number Diff line change
Expand Up @@ -86,8 +86,8 @@
{
"class": "H5L_TYPE_HARD",
"collection": "datasets",
"title": "DS1",
"uuid": "a615cf0a-7b51-11e4-bc7e-3c15c2da029e"
"id": "a615cf0a-7b51-11e4-bc7e-3c15c2da029e",
"title": "DS1"
}
]
}
Expand Down
4 changes: 2 additions & 2 deletions testjson/enum_dset.json
Original file line number Diff line number Diff line change
Expand Up @@ -73,8 +73,8 @@
{
"class": "H5L_TYPE_HARD",
"collection": "datasets",
"title": "DS1",
"uuid": "bce5b4dc-7b51-11e4-999f-3c15c2da029e"
"id": "bce5b4dc-7b51-11e4-999f-3c15c2da029e",
"title": "DS1"
}
]
}
Expand Down
4 changes: 2 additions & 2 deletions testjson/fillvalue.json
Original file line number Diff line number Diff line change
Expand Up @@ -148,8 +148,8 @@
{
"class": "H5L_TYPE_HARD",
"collection": "datasets",
"title": "dset",
"uuid": "f5789a87-7b80-11e4-ba61-3c15c2da029e"
"id": "f5789a87-7b80-11e4-ba61-3c15c2da029e",
"title": "dset"
}
]
}
Expand Down
4 changes: 2 additions & 2 deletions testjson/fixed_string_attr.json
Original file line number Diff line number Diff line change
Expand Up @@ -47,8 +47,8 @@
{
"class": "H5L_TYPE_HARD",
"collection": "datasets",
"title": "DS1",
"uuid": "08baa31e-7b81-11e4-9c29-3c15c2da029e"
"id": "08baa31e-7b81-11e4-9c29-3c15c2da029e",
"title": "DS1"
}
]
}
Expand Down
4 changes: 2 additions & 2 deletions testjson/fixed_string_dset.json
Original file line number Diff line number Diff line change
Expand Up @@ -34,8 +34,8 @@
{
"class": "H5L_TYPE_HARD",
"collection": "datasets",
"title": "DS1",
"uuid": "99d645d1-7bf7-11e4-92bc-3c15c2da029e"
"id": "99d645d1-7bf7-11e4-92bc-3c15c2da029e",
"title": "DS1"
}
]
}
Expand Down
8 changes: 4 additions & 4 deletions testjson/namedtype.json
Original file line number Diff line number Diff line change
Expand Up @@ -59,14 +59,14 @@
{
"class": "H5L_TYPE_HARD",
"collection": "datatypes",
"title": "dtype_compound",
"uuid": "9eb9cf05-7bf7-11e4-8dda-3c15c2da029e"
"id": "9eb9cf05-7bf7-11e4-8dda-3c15c2da029e",
"title": "dtype_compound"
},
{
"class": "H5L_TYPE_HARD",
"collection": "datatypes",
"title": "dtype_simple",
"uuid": "9eb9ddab-7bf7-11e4-927d-3c15c2da029e"
"id": "9eb9ddab-7bf7-11e4-927d-3c15c2da029e",
"title": "dtype_simple"
}
]
}
Expand Down
4 changes: 2 additions & 2 deletions testjson/null_objref_dset.json
Original file line number Diff line number Diff line change
Expand Up @@ -28,8 +28,8 @@
{
"class": "H5L_TYPE_HARD",
"collection": "datasets",
"title": "DS1",
"uuid": "9faa470f-7bf7-11e4-8eb6-3c15c2da029e"
"id": "9faa470f-7bf7-11e4-8eb6-3c15c2da029e",
"title": "DS1"
}
]
}
Expand Down
4 changes: 2 additions & 2 deletions testjson/nullspace_dset.json
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,8 @@
{
"class": "H5L_TYPE_HARD",
"collection": "datasets",
"title": "DS1",
"uuid": "23d3e919-7b53-11e4-961d-3c15c2da029e"
"id": "23d3e919-7b53-11e4-961d-3c15c2da029e",
"title": "DS1"
}
]
}
Expand Down
12 changes: 6 additions & 6 deletions testjson/objref_attr.json
Original file line number Diff line number Diff line change
Expand Up @@ -55,20 +55,20 @@
{
"class": "H5L_TYPE_HARD",
"collection": "datasets",
"title": "DS1",
"uuid": "a09a7f14-7bf7-11e4-8eef-3c15c2da029e"
"id": "a09a7f14-7bf7-11e4-8eef-3c15c2da029e",
"title": "DS1"
},
{
"class": "H5L_TYPE_HARD",
"collection": "datasets",
"title": "DS2",
"uuid": "a09a8efa-7bf7-11e4-9fb6-3c15c2da029e"
"id": "a09a8efa-7bf7-11e4-9fb6-3c15c2da029e",
"title": "DS2"
},
{
"class": "H5L_TYPE_HARD",
"collection": "groups",
"title": "G1",
"uuid": "a09a9b99-7bf7-11e4-aa4b-3c15c2da029e"
"id": "a09a9b99-7bf7-11e4-aa4b-3c15c2da029e",
"title": "G1"
}
]
},
Expand Down

0 comments on commit 5ebe49b

Please sign in to comment.