Skip to content

filesystem_store

Attributes

logger = structlog.getLogger() module-attribute

VALUE_DETAILS_FILE_NAME = 'value.json' module-attribute

DEFAULT_HASHFS_DEPTH = 4 module-attribute

DEFAULT_HASHFS_WIDTH = 1 module-attribute

DEFAULT_HASH_FS_ALGORITHM = 'sha256' module-attribute

Classes

EntityType

Bases: Enum

Source code in kiara/registries/data/data_store/filesystem_store.py
41
42
43
44
45
46
47
class EntityType(Enum):

    VALUE = "values"
    VALUE_DATA = "value_data"
    ENVIRONMENT = "environments"
    MANIFEST = "manifests"
    DESTINY_LINK = "destiny_links"

Attributes

VALUE = 'values' class-attribute
VALUE_DATA = 'value_data' class-attribute
ENVIRONMENT = 'environments' class-attribute
MANIFEST = 'manifests' class-attribute

FileSystemDataArchive

Bases: DataArchive, JobArchive

Data store that loads data from the local filesystem.

Source code in kiara/registries/data/data_store/filesystem_store.py
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
class FileSystemDataArchive(DataArchive, JobArchive):
    """Data store that loads data from the local filesystem."""

    _archive_type_name = "filesystem_data_archive"
    _config_cls = FileSystemArchiveConfig  # type: ignore

    # @classmethod
    # def supported_item_types(cls) -> Iterable[str]:
    #
    #     return ["data", "job_record"]

    @classmethod
    def is_writeable(cls) -> bool:
        return False

    def __init__(self, archive_id: uuid.UUID, config: FileSystemArchiveConfig):

        DataArchive.__init__(self, archive_id=archive_id, config=config)
        self._base_path: Union[Path, None] = None
        self._hashfs_path: Union[Path, None] = None
        self._hashfs: Union[HashFS, None] = None

    # def get_job_archive_id(self) -> uuid.UUID:
    #     return self._kiara.id

    def get_archive_details(self) -> ArchiveDetails:

        size = sum(
            f.stat().st_size for f in self.data_store_path.glob("**/*") if f.is_file()
        )
        return ArchiveDetails(size=size)

    @property
    def data_store_path(self) -> Path:

        if self._base_path is not None:
            return self._base_path

        self._base_path = Path(self.config.archive_path).absolute()  # type: ignore
        self._base_path = fix_windows_longpath(self._base_path)
        self._base_path.mkdir(parents=True, exist_ok=True)
        return self._base_path

    def _delete_archive(self):
        shutil.rmtree(self.data_store_path)

    @property
    def hash_fs_path(self) -> Path:

        if self._hashfs_path is None:
            self._hashfs_path = self.data_store_path / "hash_fs"
        return self._hashfs_path

    @property
    def hashfs(self) -> HashFS:

        if self._hashfs is None:
            self._hashfs = HashFS(
                self.hash_fs_path.as_posix(),
                depth=DEFAULT_HASHFS_DEPTH,
                width=DEFAULT_HASHFS_WIDTH,
                algorithm=DEFAULT_HASH_FS_ALGORITHM,
            )
        return self._hashfs

    def get_path(
        self,
        entity_type: Union[EntityType, None] = None,
        base_path: Union[Path, None] = None,
    ) -> Path:
        if base_path is None:
            if entity_type is None:
                result = self.data_store_path
            else:
                result = self.data_store_path / entity_type.value
        else:
            if entity_type is None:
                result = base_path
            else:
                result = base_path / entity_type.value

        result.mkdir(parents=True, exist_ok=True)
        return result

    def _retrieve_environment_details(
        self, env_type: str, env_hash: str
    ) -> Mapping[str, Any]:

        base_path = self.get_path(entity_type=EntityType.ENVIRONMENT)
        env_details_file = base_path / f"{env_type}_{env_hash}.json"

        if not env_details_file.exists():
            raise Exception(
                f"Can't load environment details, file does not exist: {env_details_file.as_posix()}"
            )

        environment = orjson.loads(env_details_file.read_text())
        return environment

    def retrieve_all_job_hashes(
        self,
        manifest_hash: Union[str, None] = None,
        inputs_hash: Union[str, None] = None,
    ) -> Iterable[str]:

        raise NotImplementedError()

    def _retrieve_record_for_job_hash(self, job_hash: str) -> JobRecord:

        raise NotImplementedError()

    # def find_matching_job_record(
    #     self, inputs_manifest: InputsManifest
    # ) -> Optional[JobRecord]:
    #
    #     manifest_hash = str(inputs_manifest.instance_cid)
    #     jobs_hash = inputs_manifest.job_hash
    #
    #     base_path = self.get_path(entity_type=EntityType.MANIFEST)
    #     manifest_folder = base_path / str(manifest_hash)
    #
    #     if not manifest_folder.exists():
    #         return None
    #
    #     manifest_file = manifest_folder / "manifest.json"
    #
    #     if not manifest_file.exists():
    #         raise Exception(
    #             f"No 'manifests.json' file for manifest with hash: {manifest_hash}"
    #         )
    #
    #     manifest_data = orjson.loads(manifest_file.read_text())
    #
    #     job_folder = manifest_folder / jobs_hash
    #
    #     if not job_folder.exists():
    #         return None
    #
    #     inputs_file_name = job_folder / "inputs.json"
    #     if not inputs_file_name.exists():
    #         raise Exception(
    #             f"No 'inputs.json' file for manifest/inputs hash-combo: {manifest_hash} / {jobs_hash}"
    #         )
    #
    #     inputs_data = {
    #         k: uuid.UUID(v)
    #         for k, v in orjson.loads(inputs_file_name.read_text()).items()
    #     }
    #
    #     outputs = {}
    #     for output_file in job_folder.glob("output__*.json"):
    #         full_output_name = output_file.name[8:]
    #         start_value_id = full_output_name.find("__value_id__")
    #         output_name = full_output_name[0:start_value_id]
    #         value_id_str = full_output_name[start_value_id + 12 : -5]
    #
    #         value_id = uuid.UUID(value_id_str)
    #         outputs[output_name] = value_id
    #
    #     job_id = ID_REGISTRY.generate(obj_type=JobRecord, desc="fake job id")
    #     job_record = JobRecord(
    #         job_id=job_id,
    #         module_type=manifest_data["module_type"],
    #         module_config=manifest_data["module_config"],
    #         inputs=inputs_data,
    #         outputs=outputs,
    #     )
    #     return job_record

    def _find_values_with_hash(
        self,
        value_hash: str,
        value_size: Union[int, None] = None,
        data_type_name: Union[str, None] = None,
    ) -> Set[uuid.UUID]:

        value_data_folder = self.get_path(entity_type=EntityType.VALUE_DATA)

        glob = f"*/{value_hash}/value_id__*.json"

        matches = list(value_data_folder.glob(glob))

        result = set()
        for match in matches:
            if not match.is_symlink():
                log_message(
                    f"Ignoring value_id file, not a symlink: {match.as_posix()}"
                )
                continue

            uuid_str = match.name[10:-5]
            value_id = uuid.UUID(uuid_str)
            result.add(value_id)

        return result

    def _find_destinies_for_value(
        self, value_id: uuid.UUID, alias_filter: Union[str, None] = None
    ) -> Union[Mapping[str, uuid.UUID], None]:

        destiny_dir = self.get_path(entity_type=EntityType.DESTINY_LINK)
        destiny_value_dir = destiny_dir / str(value_id)

        if not destiny_value_dir.exists():
            return None

        destinies = {}
        for alias_link in destiny_value_dir.glob("*.json"):
            assert alias_link.is_symlink()

            alias = alias_link.name[0:-5]
            resolved = alias_link.resolve()

            value_id_str = resolved.parent.name
            value_id = uuid.UUID(value_id_str)
            destinies[alias] = value_id

        return destinies

    def _retrieve_all_value_ids(
        self, data_type_name: Union[str, None] = None
    ) -> Iterable[uuid.UUID]:

        if data_type_name is not None:
            raise NotImplementedError()

        childs = self.get_path(entity_type=EntityType.VALUE).glob("*")
        folders = [uuid.UUID(x.name) for x in childs if x.is_dir()]
        return folders

    def has_value(self, value_id: uuid.UUID) -> bool:
        """Check whether the specific value_id is persisted in this data store.
        way to quickly determine whether a value id is valid for this data store.

        Arguments:
            value_id: the id of the value to check.
        Returns:
            whether this data store contains the value with the specified id
        """

        base_path = (
            self.get_path(entity_type=EntityType.VALUE)
            / str(value_id)
            / VALUE_DETAILS_FILE_NAME
        )
        return base_path.is_file()

    def _retrieve_value_details(self, value_id: uuid.UUID) -> Mapping[str, Any]:

        base_path = (
            self.get_path(entity_type=EntityType.VALUE)
            / str(value_id)
            / VALUE_DETAILS_FILE_NAME
        )
        if not base_path.is_file():
            raise Exception(
                f"Can't retrieve details for value with id '{value_id}': no value with that id stored."
            )

        value_data = orjson.loads(base_path.read_text())
        return value_data

    def _retrieve_serialized_value(self, value: Value) -> PersistedData:

        base_path = self.get_path(entity_type=EntityType.VALUE_DATA)
        data_dir = base_path / value.data_type_name / str(value.value_hash)

        serialized_value_file = data_dir / ".serialized_value.json"
        data = orjson.loads(serialized_value_file.read_text())

        return PersistedData(**data)

    def retrieve_chunk(
        self,
        chunk_id: str,
        as_file: Union[bool, str, None] = None,
        symlink_ok: bool = True,
    ) -> Union[bytes, str]:

        addr = self.hashfs.get(chunk_id)

        if as_file in (None, True):
            return addr.abspath
        elif as_file is False:
            return Path(addr.abspath).read_bytes()
        else:
            raise NotImplementedError()

Attributes

_config_cls = FileSystemArchiveConfig class-attribute
data_store_path: Path property
hash_fs_path: Path property
hashfs: HashFS property

Functions

is_writeable() -> bool classmethod
Source code in kiara/registries/data/data_store/filesystem_store.py
66
67
68
@classmethod
def is_writeable(cls) -> bool:
    return False
get_archive_details() -> ArchiveDetails
Source code in kiara/registries/data/data_store/filesystem_store.py
80
81
82
83
84
85
def get_archive_details(self) -> ArchiveDetails:

    size = sum(
        f.stat().st_size for f in self.data_store_path.glob("**/*") if f.is_file()
    )
    return ArchiveDetails(size=size)
get_path(entity_type: Union[EntityType, None] = None, base_path: Union[Path, None] = None) -> Path
Source code in kiara/registries/data/data_store/filesystem_store.py
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
def get_path(
    self,
    entity_type: Union[EntityType, None] = None,
    base_path: Union[Path, None] = None,
) -> Path:
    if base_path is None:
        if entity_type is None:
            result = self.data_store_path
        else:
            result = self.data_store_path / entity_type.value
    else:
        if entity_type is None:
            result = base_path
        else:
            result = base_path / entity_type.value

    result.mkdir(parents=True, exist_ok=True)
    return result
retrieve_all_job_hashes(manifest_hash: Union[str, None] = None, inputs_hash: Union[str, None] = None) -> Iterable[str]
Source code in kiara/registries/data/data_store/filesystem_store.py
154
155
156
157
158
159
160
def retrieve_all_job_hashes(
    self,
    manifest_hash: Union[str, None] = None,
    inputs_hash: Union[str, None] = None,
) -> Iterable[str]:

    raise NotImplementedError()
has_value(value_id: uuid.UUID) -> bool

Check whether the specific value_id is persisted in this data store. way to quickly determine whether a value id is valid for this data store.

Parameters:

Name Type Description Default
value_id uuid.UUID

the id of the value to check.

required

Returns:

Type Description
bool

whether this data store contains the value with the specified id

Source code in kiara/registries/data/data_store/filesystem_store.py
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
def has_value(self, value_id: uuid.UUID) -> bool:
    """Check whether the specific value_id is persisted in this data store.
    way to quickly determine whether a value id is valid for this data store.

    Arguments:
        value_id: the id of the value to check.
    Returns:
        whether this data store contains the value with the specified id
    """

    base_path = (
        self.get_path(entity_type=EntityType.VALUE)
        / str(value_id)
        / VALUE_DETAILS_FILE_NAME
    )
    return base_path.is_file()
retrieve_chunk(chunk_id: str, as_file: Union[bool, str, None] = None, symlink_ok: bool = True) -> Union[bytes, str]
Source code in kiara/registries/data/data_store/filesystem_store.py
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
def retrieve_chunk(
    self,
    chunk_id: str,
    as_file: Union[bool, str, None] = None,
    symlink_ok: bool = True,
) -> Union[bytes, str]:

    addr = self.hashfs.get(chunk_id)

    if as_file in (None, True):
        return addr.abspath
    elif as_file is False:
        return Path(addr.abspath).read_bytes()
    else:
        raise NotImplementedError()

FilesystemDataStore

Bases: FileSystemDataArchive, BaseDataStore

Data store that stores data as files on the local filesystem.

Source code in kiara/registries/data/data_store/filesystem_store.py
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
class FilesystemDataStore(FileSystemDataArchive, BaseDataStore):
    """Data store that stores data as files on the local filesystem."""

    _archive_type_name = "filesystem_data_store"

    def _persist_environment_details(
        self, env_type: str, env_hash: str, env_data: Mapping[str, Any]
    ):

        base_path = self.get_path(entity_type=EntityType.ENVIRONMENT)
        env_details_file = base_path / f"{env_type}_{env_hash}.json"

        if not env_details_file.exists():
            env_details_file.write_text(orjson_dumps(env_data))

    def _persist_stored_value_info(self, value: Value, persisted_value: PersistedData):

        working_dir = self.get_path(entity_type=EntityType.VALUE_DATA)
        data_dir = working_dir / value.data_type_name / str(value.value_hash)
        sv_file = data_dir / ".serialized_value.json"
        data_dir.mkdir(exist_ok=True, parents=True)
        sv_file.write_text(persisted_value.json())

    def _persist_value_details(self, value: Value):

        value_dir = self.get_path(entity_type=EntityType.VALUE) / str(value.value_id)

        if value_dir.exists():
            raise Exception(
                f"Can't persist value '{value.value_id}', value directory already exists: {value_dir}"
            )
        else:
            value_dir.mkdir(parents=True, exist_ok=False)

        value_file = value_dir / VALUE_DETAILS_FILE_NAME
        value_data = value.dict()
        value_file.write_text(orjson_dumps(value_data, option=orjson.OPT_NON_STR_KEYS))

    def _persist_destiny_backlinks(self, value: Value):

        destiny_dir = self.get_path(entity_type=EntityType.DESTINY_LINK)

        for value_id, backlink in value.destiny_backlinks.items():

            destiny_value_dir = destiny_dir / str(value_id)
            destiny_value_dir.mkdir(parents=True, exist_ok=True)
            destiny_file = destiny_value_dir / f"{backlink}.json"
            assert not destiny_file.exists()

            value_dir = self.get_path(entity_type=EntityType.VALUE) / str(
                value.value_id
            )
            value_file = value_dir / VALUE_DETAILS_FILE_NAME
            assert value_file.exists()

            fix_windows_symlink(value_file, destiny_file)

    def _persist_value_data(self, value: Value) -> PersistedData:

        serialized_value: SerializedData = value.serialized_data

        chunk_id_map = {}
        for key in serialized_value.get_keys():

            data_model = serialized_value.get_serialized_data(key)

            if data_model.type == "chunk":  # type: ignore
                chunks: Iterable[Union[str, BytesIO]] = [BytesIO(data_model.chunk)]  # type: ignore
            elif data_model.type == "chunks":  # type: ignore
                chunks = (BytesIO(c) for c in data_model.chunks)  # type: ignore
            elif data_model.type == "file":  # type: ignore
                chunks = [data_model.file]  # type: ignore
            elif data_model.type == "files":  # type: ignore
                chunks = data_model.files  # type: ignore
            elif data_model.type == "inline-json":  # type: ignore
                chunks = [BytesIO(data_model.as_json())]  # type: ignore
            else:
                raise Exception(
                    f"Invalid serialized data type: {type(data_model)}. Available types: {', '.join(SERIALIZE_TYPES)}"
                )

            chunk_ids = []
            for item in zip(serialized_value.get_cids_for_key(key), chunks):
                cid = item[0]
                _chunk = item[1]
                addr: HashAddress = self.hashfs.put_with_precomputed_hash(
                    _chunk, str(cid)
                )
                chunk_ids.append(addr.id)

            scids = SerializedChunkIDs(
                chunk_id_list=chunk_ids,
                archive_id=self.archive_id,
                size=data_model.get_size(),
            )
            scids._data_registry = self.kiara_context.data_registry
            chunk_id_map[key] = scids

        pers_value = PersistedData(
            archive_id=self.archive_id,
            chunk_id_map=chunk_id_map,
            data_type=serialized_value.data_type,
            data_type_config=serialized_value.data_type_config,
            serialization_profile=serialized_value.serialization_profile,
            metadata=serialized_value.metadata,
        )

        return pers_value

    def _persist_value_pedigree(self, value: Value):

        manifest_hash = value.pedigree.instance_cid
        jobs_hash = value.pedigree.job_hash

        base_path = self.get_path(entity_type=EntityType.MANIFEST)
        manifest_folder = base_path / str(manifest_hash)
        manifest_folder.mkdir(parents=True, exist_ok=True)

        manifest_info_file = manifest_folder / "manifest.json"
        if not manifest_info_file.exists():
            manifest_info_file.write_text(value.pedigree.manifest_data_as_json())

        job_folder = manifest_folder / str(jobs_hash)

        job_folder.mkdir(parents=True, exist_ok=True)

        inputs_details_file_name = job_folder / "inputs.json"
        if not inputs_details_file_name.exists():
            inputs_details_file_name.write_text(orjson_dumps(value.pedigree.inputs))

        outputs_file_name = (
            job_folder
            / f"output__{value.pedigree_output_name}__value_id__{value.value_id}.json"
        )

        outputs_file_name = fix_windows_longpath(outputs_file_name)

        if outputs_file_name.exists():
            # if value.pedigree_output_name == "__void__":
            #     return
            # else:
            raise Exception(f"Can't write value '{value.value_id}': already exists.")
        else:
            outputs_file_name.touch()

        value_data_dir = (
            self.get_path(entity_type=EntityType.VALUE_DATA)
            / value.value_schema.type
            / str(value.value_hash)
        )
        target_file = value_data_dir / f"value_id__{value.value_id}.json"

        fix_windows_symlink(outputs_file_name, target_file)

Functions