|
| 1 | +import shutil |
| 2 | +from io import BytesIO |
| 3 | +from pathlib import Path, PurePath |
| 4 | + |
| 5 | +from fsspec import AbstractFileSystem |
| 6 | + |
| 7 | +from pins.errors import PinsError |
| 8 | + |
| 9 | + |
| 10 | +class DatabricksFs(AbstractFileSystem): |
| 11 | + protocol = "dbc" |
| 12 | + |
| 13 | + def ls(self, path, detail=False, **kwargs): |
| 14 | + return self._databricks_ls(path, detail) |
| 15 | + |
| 16 | + def exists(self, path: str, **kwargs): |
| 17 | + return self._databricks_exists(path) |
| 18 | + |
| 19 | + def open(self, path: str, mode: str = "rb", *args, **kwargs): |
| 20 | + if mode != "rb": |
| 21 | + raise NotImplementedError |
| 22 | + return self._databricks_open(path) |
| 23 | + |
| 24 | + def get(self, rpath, lpath, recursive=False, **kwargs): |
| 25 | + self._databricks_get(rpath, lpath, recursive, **kwargs) |
| 26 | + |
| 27 | + def mkdir(self, path, create_parents=True, **kwargs): |
| 28 | + if not create_parents: |
| 29 | + raise NotImplementedError |
| 30 | + self._databricks_mkdir(path) |
| 31 | + |
| 32 | + def put( |
| 33 | + self, |
| 34 | + lpath, |
| 35 | + rpath, |
| 36 | + recursive=True, |
| 37 | + maxdepth=None, |
| 38 | + **kwargs, |
| 39 | + ): |
| 40 | + if not recursive: |
| 41 | + raise NotImplementedError |
| 42 | + if maxdepth is not None: |
| 43 | + raise NotImplementedError |
| 44 | + self._databricks_put(lpath, rpath) |
| 45 | + |
| 46 | + def rm(self, path, recursive=True, maxdepth=None) -> None: |
| 47 | + if not recursive: |
| 48 | + raise NotImplementedError |
| 49 | + if maxdepth is not None: |
| 50 | + raise NotImplementedError |
| 51 | + if self._databricks_exists(path): |
| 52 | + self._databricks_rm_dir(path) |
| 53 | + |
| 54 | + @staticmethod |
| 55 | + def _databricks_put(lpath, rpath): |
| 56 | + from databricks.sdk import WorkspaceClient |
| 57 | + |
| 58 | + w = WorkspaceClient() |
| 59 | + path = Path(lpath).absolute() |
| 60 | + orig_path = path |
| 61 | + |
| 62 | + def _upload_files(path): |
| 63 | + contents = Path(path) |
| 64 | + for item in contents.iterdir(): |
| 65 | + abs_path = PurePath(path).joinpath(item) |
| 66 | + is_file = Path(abs_path).is_file() |
| 67 | + if is_file: |
| 68 | + rel_path = abs_path.relative_to(orig_path) |
| 69 | + db_path = PurePath(rpath).joinpath(rel_path) |
| 70 | + file = open(abs_path, "rb") |
| 71 | + w.files.upload(str(db_path), BytesIO(file.read()), overwrite=True) |
| 72 | + else: |
| 73 | + _upload_files(abs_path) |
| 74 | + |
| 75 | + _upload_files(path) |
| 76 | + |
| 77 | + def _databricks_get(self, board, rpath, lpath, recursive=False, **kwargs): |
| 78 | + from databricks.sdk import WorkspaceClient |
| 79 | + |
| 80 | + w = WorkspaceClient() |
| 81 | + file_type = self._databricks_is_type(rpath) |
| 82 | + if file_type == "file": |
| 83 | + board.fs.get(rpath, lpath, **kwargs) |
| 84 | + return |
| 85 | + |
| 86 | + def _get_files(path, recursive, **kwargs): |
| 87 | + raw_contents = w.files.list_directory_contents(path) |
| 88 | + contents = list(raw_contents) |
| 89 | + details = list(map(self._databricks_content_details, contents)) |
| 90 | + for item in details: |
| 91 | + item_path = item.get("path") |
| 92 | + if item.get("is_directory"): |
| 93 | + if recursive: |
| 94 | + _get_files(item_path, recursive=recursive, **kwargs) |
| 95 | + else: |
| 96 | + rel_path = PurePath(item_path).relative_to(rpath) |
| 97 | + target_path = PurePath(lpath).joinpath(rel_path) |
| 98 | + board.fs.get(item_path, str(target_path)) |
| 99 | + |
| 100 | + _get_files(rpath, recursive, **kwargs) |
| 101 | + |
| 102 | + def _databricks_open(self, path): |
| 103 | + from databricks.sdk import WorkspaceClient |
| 104 | + |
| 105 | + if not self._databricks_exists(path): |
| 106 | + raise PinsError(f"File or directory does not exist at path: {path}") |
| 107 | + w = WorkspaceClient() |
| 108 | + resp = w.files.download(path) |
| 109 | + f = BytesIO() |
| 110 | + shutil.copyfileobj(resp.contents, f) |
| 111 | + f.seek(0) |
| 112 | + return f |
| 113 | + |
| 114 | + def _databricks_exists(self, path: str): |
| 115 | + if self._databricks_is_type(path) == "nothing": |
| 116 | + return False |
| 117 | + else: |
| 118 | + return True |
| 119 | + |
| 120 | + @staticmethod |
| 121 | + def _databricks_is_type(path: str): |
| 122 | + from databricks.sdk import WorkspaceClient |
| 123 | + from databricks.sdk.errors import NotFound |
| 124 | + |
| 125 | + w = WorkspaceClient() |
| 126 | + try: |
| 127 | + w.files.get_metadata(path) |
| 128 | + except NotFound: |
| 129 | + try: |
| 130 | + w.files.get_directory_metadata(path) |
| 131 | + except NotFound: |
| 132 | + return "nothing" |
| 133 | + else: |
| 134 | + return "directory" |
| 135 | + else: |
| 136 | + return "file" |
| 137 | + |
| 138 | + def _databricks_ls(self, path, detail): |
| 139 | + from databricks.sdk import WorkspaceClient |
| 140 | + |
| 141 | + if not self._databricks_exists(path): |
| 142 | + raise PinsError(f"File or directory does not exist at path: {path}") |
| 143 | + w = WorkspaceClient() |
| 144 | + if self._databricks_is_type(path) == "file": |
| 145 | + if detail: |
| 146 | + return [dict(name=path, size=None, type="file")] |
| 147 | + else: |
| 148 | + return path |
| 149 | + |
| 150 | + contents_raw = w.files.list_directory_contents(path) |
| 151 | + contents = list(contents_raw) |
| 152 | + items = [] |
| 153 | + for item in contents: |
| 154 | + item = self._databricks_content_details(item) |
| 155 | + item_path = item.get("path") |
| 156 | + item_path = item_path.rstrip("/") |
| 157 | + if detail: |
| 158 | + if item.get("is_directory"): |
| 159 | + item_type = "directory" |
| 160 | + else: |
| 161 | + item_type = "file" |
| 162 | + items.append(dict(name=item_path, size=None, type=item_type)) |
| 163 | + else: |
| 164 | + items.append(item_path) |
| 165 | + return items |
| 166 | + |
| 167 | + def _databricks_rm_dir(self, path): |
| 168 | + from databricks.sdk import WorkspaceClient |
| 169 | + |
| 170 | + w = WorkspaceClient() |
| 171 | + raw_contents = w.files.list_directory_contents(path) |
| 172 | + contents = list(raw_contents) |
| 173 | + details = list(map(self._databricks_content_details, contents)) |
| 174 | + for item in details: |
| 175 | + item_path = item.get("path") |
| 176 | + if item.get("is_directory"): |
| 177 | + self._databricks_rm_dir(item_path) |
| 178 | + else: |
| 179 | + w.files.delete(item_path) |
| 180 | + w.files.delete_directory(path) |
| 181 | + |
| 182 | + @staticmethod |
| 183 | + def _databricks_mkdir(path): |
| 184 | + from databricks.sdk import WorkspaceClient |
| 185 | + |
| 186 | + w = WorkspaceClient() |
| 187 | + w.files.create_directory(path) |
| 188 | + |
| 189 | + @staticmethod |
| 190 | + def _databricks_content_details(item): |
| 191 | + details = { |
| 192 | + "path": item.path, |
| 193 | + "name": item.name, |
| 194 | + "is_directory": item.is_directory, |
| 195 | + } |
| 196 | + return details |
0 commit comments