Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

functions like getNode, create*Array etc are not supported anymore by current versions of #8

Open
detosi opened this issue Oct 28, 2016 · 0 comments

Comments

@detosi
Copy link

detosi commented Oct 28, 2016

This is the diff with my local copy which should be fixed.

--- /home/detosi/IceCube/sandbox/dashi/dashi/storage.py (revision 46)
+++ /home/detosi/IceCube/sandbox/dashi/dashi/storage.py (working copy)
@@ -28,16 +28,16 @@
     """
     import tables
     with maybe_open_file(path, 'a') as file:
-        parentgroup = file.getNode(where)
+        parentgroup = file.get_node(where)

         if name in parentgroup._v_children:
             if not overwrite:
                 raise ValueError("there exists already a histogram with name %s" % name)
             else:
-                file.removeNode(parentgroup, name, recursive=True)
+                file.remove_node(parentgroup, name, recursive=True)

         # create a new group and store all necessary arrays into it
-        group = file.createGroup(where, name)
+        group = file.create_group(where, name)
         attr = group._v_attrs

         attr["ndim"]  = histo.ndim
@@ -49,16 +49,16 @@

         def save(arr, where):
             filters = tables.Filters(complib=complib, complevel=9)
-            ca = file.createCArray(group, where, tables.Atom.from_dtype(arr.dtype), arr.shape, filters=filters)
+            ca = file.create_carray(group, where, tables.Atom.from_dtype(arr.dtype), arr.shape, filters=filters)
             ca[:] = arr

         save(histo._h_bincontent, "_h_bincontent")
         save(histo._h_squaredweights, "_h_squaredweights")

-        # file.createArray(group, "_h_bincontent", histo._h_bincontent)
-        # file.createArray(group, "_h_squaredweights", histo._h_squaredweights)
+        # file.create_array(group, "_h_bincontent", histo._h_bincontent)
+        # file.create_array(group, "_h_squaredweights", histo._h_squaredweights)
         for dim in range(histo.ndim):
-            file.createArray(group, "_h_binedges_%d" % dim, histo._h_binedges[dim])
+            file.create_array(group, "_h_binedges_%d" % dim, histo._h_binedges[dim])
             attr["label_%d" % dim] = histo.labels[dim]


@@ -70,11 +70,11 @@
                     the latter, the file will be opened in read-only mode and
                     closed before the function returns
         histgroup : the group containing the histogram (the one created by histsave)
-                    can be anything what file.getNode accepts, eg. a string or a Group 
+                    can be anything what file.get_node accepts, eg. a string or a Group 
                     object
     """
     with maybe_open_file(path) as file:
-        group = file.getNode(histgroup)
+        group = file.get_node(histgroup)
         attr = group._v_attrs
         histo = None

Index: /home/detosi/IceCube/sandbox/dashi/dashi/tests/datasets_test.py
===================================================================
--- /home/detosi/IceCube/sandbox/dashi/dashi/tests/datasets_test.py (revision 46)
+++ /home/detosi/IceCube/sandbox/dashi/dashi/tests/datasets_test.py (working copy)
@@ -24,8 +24,8 @@
         arr["y"] = n.arange(100, 0, -1, dtype=int)

         fname = "%s_%d.h5" % (name,j)
-        f = tables.openFile(fname, "a")
-        f.createTable("/", "test", arr)
+        f = tables.open_file(fname, "a")
+        f.create_table("/", "test", arr)
         f.close()

         datafiles[name].append(fname)
Index: /home/detosi/IceCube/sandbox/dashi/dashi/objbundleutils.py
===================================================================
--- /home/detosi/IceCube/sandbox/dashi/dashi/objbundleutils.py  (revision 46)
+++ /home/detosi/IceCube/sandbox/dashi/dashi/objbundleutils.py  (working copy)
@@ -95,9 +95,9 @@
         if isinstance(cfg, str):
             if ":" in cfg:
                 path, column = cfg.split(":")
-                arrays[varname] = h5file.getNode(path).col(column)
+                arrays[varname] = h5file.get_node(path).col(column)
             else:
-                arrays[varname] = h5file.getNode(cfg).read()
+                arrays[varname] = h5file.get_node(cfg).read()
         elif callable(cfg):
             args = inspect.getargspec(cfg).args
             if args == ["file"]:
@@ -112,7 +112,7 @@
     import tables
     assert isinstance(h5group, str)
     for key,array in bundle:
-        earr = file.createEArray(h5group, key,  
+        earr = file.create_earray(h5group, key,  
                                  tables.Atom.from_dtype(array.dtype), 
                                  (0,), filters=tables.Filters(complevel=6, complib="zlib"), createparents=True)
         earr.append(array)
@@ -123,7 +123,7 @@
     assert isinstance(h5group, str)
     arrays = dict()

-    for key in file.getNode(h5group)._v_children.keys():
-        arrays[key] = file.getNode(h5group+"/"+key).read()
+    for key in file.get_node(h5group)._v_children.keys():
+        arrays[key] = file.get_node(h5group+"/"+key).read()

     return bundle(**arrays)
Index: /home/detosi/IceCube/sandbox/dashi/dashi/datasets/hdf_datasets.py
===================================================================
--- /home/detosi/IceCube/sandbox/dashi/dashi/datasets/hdf_datasets.py   (revision 46)
+++ /home/detosi/IceCube/sandbox/dashi/dashi/datasets/hdf_datasets.py   (working copy)
@@ -25,9 +25,9 @@

     if ":" in path:
         path_, column = path.split(":")
-        return h5file.getNode(path_).col(column)
+        return h5file.get_node(path_).col(column)
     else:
-        return h5file.getNode(path).read()
+        return h5file.get_node(path).read()

 ################################################################################

@@ -41,12 +41,12 @@

     """
     toc = []
-    for node in h5file.walkNodes(classname="Table"):
+    for node in h5file.walk_nodes(classname="Table"):
         for varname in node.description._v_names:
             toc.append( "%s:%s" % (node._v_pathname, varname) )

     for arraytype in ["Array", "EArray"]:
-        for node in h5file.walkNodes(classname=arraytype):
+        for node in h5file.walk_nodes(classname=arraytype):
             toc.append( node._v_pathname ) 

     return sorted(toc)
@@ -76,14 +76,14 @@

 ################################################################################

-class HDFDataset(Dataset, tables.File):
+class HDFDataset(Dataset, tables.file):
     """
         wrapper around hdf files that can be attached to a hub
     """
     def __init__(self, filename, mode="r", expectedrows=1e6, filters=tables.Filters(complevel=6, complib="zlib"),
                  **kwargs):
         Dataset.__init__(self, os.path.basename(filename))
-        tables.File.__init__(self, filename, mode, **kwargs)
+        tables.file.__init__(self, filename, mode, **kwargs)

         self._ds_expectedrows = expectedrows
         self._ds_filters = filters
@@ -101,12 +101,12 @@
             if not self.isopen:
                 return toc

-            for node in self.walkNodes(classname="Table"):
+            for node in self.walk_nodes(classname="Table"):
                 toc.append( node._v_pathname )
                 for varname in node.description._v_names:
                     toc.append( "%s:%s" % (node._v_pathname, varname) )

-            for node in self.walkNodes(classname="Array"):
+            for node in self.walk_nodes(classname="Array"):
                 toc.append( node._v_pathname ) 

             self._ds_toc_cache = sorted(toc)
@@ -127,12 +127,12 @@
             earr.append(array)
             earr.flush()
         else:
-            tab = self.createTable(parent, arrname, array, createparents=True, filters=self.filters)
+            tab = self.create_table(parent, arrname, array, createparents=True, filters=self.filters)
             tab.flush()

     def _ds_remove_variable(self, path):
         try:
-            self.removeNode(path)
+            self.remove_node(path)
             self.flush()
         except tables.NoSuchNodeError as exc:
             raise ValueError("removing path %s raised a NoSuchNodeError" % path)
@@ -171,4 +171,4 @@
         def _ds_read_variable(self, path):
             return read_variable(self, path)
 except ImportError:
-    pass
\ No newline at end of file
+    pass

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Labels
None yet
Projects
None yet
Development

No branches or pull requests

1 participant