Skip to content

tables

Attributes

Classes

DeserializeTableModule

Bases: DeserializeValueModule

Source code in /opt/hostedtoolcache/Python/3.11.4/x64/lib/python3.11/site-packages/kiara_plugin/tabular/modules/tables/__init__.py
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
class DeserializeTableModule(DeserializeValueModule):

    _module_type_name = "load.tables"

    @classmethod
    def retrieve_supported_target_profiles(cls) -> Mapping[str, Type]:
        return {"python_object": KiaraTables}

    @classmethod
    def retrieve_serialized_value_type(cls) -> str:
        return "tables"

    @classmethod
    def retrieve_supported_serialization_profile(cls) -> str:
        return "feather"

    def to__python_object(self, data: SerializedData, **config: Any):

        import pyarrow as pa

        tables: Dict[str, Any] = {}

        for column_id in data.get_keys():

            if TABLE_COLUMN_SPLIT_MARKER not in column_id:
                raise KiaraException(
                    f"Invalid serialized 'tables' data, key must contain '{TABLE_COLUMN_SPLIT_MARKER}': {column_id}"
                )
            table_id, column_name = column_id.split(
                TABLE_COLUMN_SPLIT_MARKER, maxsplit=1
            )

            chunks = data.get_serialized_data(column_id)

            # TODO: support multiple chunks
            assert chunks.get_number_of_chunks() == 1
            files = list(chunks.get_chunks(as_files=True, symlink_ok=True))
            assert len(files) == 1

            file = files[0]
            with pa.memory_map(file, "r") as column_chunk:
                loaded_arrays: pa.Table = pa.ipc.open_file(column_chunk).read_all()
                column = loaded_arrays.column(column_name)
                tables.setdefault(table_id, {})[column_name] = column

        table = KiaraTables.create_tables(tables)
        return table

Functions

retrieve_supported_target_profiles() -> Mapping[str, Type] classmethod
Source code in /opt/hostedtoolcache/Python/3.11.4/x64/lib/python3.11/site-packages/kiara_plugin/tabular/modules/tables/__init__.py
33
34
35
@classmethod
def retrieve_supported_target_profiles(cls) -> Mapping[str, Type]:
    return {"python_object": KiaraTables}
retrieve_serialized_value_type() -> str classmethod
Source code in /opt/hostedtoolcache/Python/3.11.4/x64/lib/python3.11/site-packages/kiara_plugin/tabular/modules/tables/__init__.py
37
38
39
@classmethod
def retrieve_serialized_value_type(cls) -> str:
    return "tables"
retrieve_supported_serialization_profile() -> str classmethod
Source code in /opt/hostedtoolcache/Python/3.11.4/x64/lib/python3.11/site-packages/kiara_plugin/tabular/modules/tables/__init__.py
41
42
43
@classmethod
def retrieve_supported_serialization_profile(cls) -> str:
    return "feather"
to__python_object(data: SerializedData, **config: Any)
Source code in /opt/hostedtoolcache/Python/3.11.4/x64/lib/python3.11/site-packages/kiara_plugin/tabular/modules/tables/__init__.py
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
def to__python_object(self, data: SerializedData, **config: Any):

    import pyarrow as pa

    tables: Dict[str, Any] = {}

    for column_id in data.get_keys():

        if TABLE_COLUMN_SPLIT_MARKER not in column_id:
            raise KiaraException(
                f"Invalid serialized 'tables' data, key must contain '{TABLE_COLUMN_SPLIT_MARKER}': {column_id}"
            )
        table_id, column_name = column_id.split(
            TABLE_COLUMN_SPLIT_MARKER, maxsplit=1
        )

        chunks = data.get_serialized_data(column_id)

        # TODO: support multiple chunks
        assert chunks.get_number_of_chunks() == 1
        files = list(chunks.get_chunks(as_files=True, symlink_ok=True))
        assert len(files) == 1

        file = files[0]
        with pa.memory_map(file, "r") as column_chunk:
            loaded_arrays: pa.Table = pa.ipc.open_file(column_chunk).read_all()
            column = loaded_arrays.column(column_name)
            tables.setdefault(table_id, {})[column_name] = column

    table = KiaraTables.create_tables(tables)
    return table

CreateTablesModuleConfig

Bases: CreateFromModuleConfig

Source code in /opt/hostedtoolcache/Python/3.11.4/x64/lib/python3.11/site-packages/kiara_plugin/tabular/modules/tables/__init__.py
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
class CreateTablesModuleConfig(CreateFromModuleConfig):

    ignore_errors: bool = Field(
        description="Whether to ignore convert errors and omit the failed items.",
        default=False,
    )
    # merge_into_single_table: bool = Field(
    #     description="Whether to merge all csv files into a single table.", default=False
    # )
    include_source_metadata: Union[bool, None] = Field(
        description="Whether to include a table with metadata about the source files.",
        default=None,
    )
    include_source_file_content: bool = Field(
        description="When including source metadata, whether to also include the original raw (string) content.",
        default=False,
    )

Attributes

ignore_errors: bool = Field(description='Whether to ignore convert errors and omit the failed items.', default=False) class-attribute instance-attribute
include_source_metadata: Union[bool, None] = Field(description='Whether to include a table with metadata about the source files.', default=None) class-attribute instance-attribute
include_source_file_content: bool = Field(description='When including source metadata, whether to also include the original raw (string) content.', default=False) class-attribute instance-attribute

CreateDatabaseModule

Bases: CreateFromModule

Source code in /opt/hostedtoolcache/Python/3.11.4/x64/lib/python3.11/site-packages/kiara_plugin/tabular/modules/tables/__init__.py
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
class CreateDatabaseModule(CreateFromModule):

    _module_type_name = "create.tables"
    _config_cls = CreateTablesModuleConfig

    def create__tables__from__file_bundle(
        self, source_value: Value, job_log: JobLog
    ) -> Any:
        """Create a database from a file_bundle value.

        Currently, only csv files are supported, files in the source file_bundle that have different extensions will be ignored.

        Unless 'merge_into_single_table' is set to 'True' in the module configuration, each csv file will create one table
        in the resulting database. If this option is set, only a single table with all the values of all
        csv files will be created. For this to work, all csv files should follow the same schema.

        """

        from pyarrow import csv as pa_csv

        include_raw_content_in_file_info: Union[bool, None] = self.get_config_value(
            "include_source_metadata"
        )

        tables = {}

        bundle: KiaraFileBundle = source_value.data

        table_names: List[str] = []
        included_files: Dict[str, bool] = {}
        errors: Dict[str, Union[None, str]] = {}
        for rel_path in sorted(bundle.included_files.keys()):

            if not rel_path.endswith(".csv"):
                job_log.add_log(
                    f"Ignoring file (not csv): {rel_path}", log_level=logging.INFO
                )
                included_files[rel_path] = False
                errors[rel_path] = "Not a csv file."
                continue

            file_item = bundle.included_files[rel_path]
            table_name = find_free_id(
                stem=file_item.file_name_without_extension, current_ids=table_names
            )
            try:
                table_names.append(table_name)
                table = pa_csv.read_csv(file_item.path)
                tables[table_name] = table
                included_files[rel_path] = True
            except Exception as e:
                included_files[rel_path] = False
                errors[rel_path] = KiaraException.get_root_details(e)

                if self.get_config_value("ignore_errors") is True or True:
                    log_message("ignore.import_file", file=rel_path, reason=str(e))
                    continue

                raise KiaraProcessingException(e)

        if include_raw_content_in_file_info in [None, True]:
            include_content: bool = self.get_config_value("include_source_file_content")

            if "file_items" in tables:
                raise KiaraProcessingException(
                    "Can't create table: 'file_items' columns already exists."
                )

            table = create_table_from_file_bundle(
                file_bundle=source_value.data,
                include_content=include_content,
                included_files=included_files,
                errors=errors,
            )
            tables["file_items"] = table

        return tables

Attributes

_config_cls = CreateTablesModuleConfig class-attribute instance-attribute

Functions

create__tables__from__file_bundle(source_value: Value, job_log: JobLog) -> Any

Create a database from a file_bundle value.

Currently, only csv files are supported, files in the source file_bundle that have different extensions will be ignored.

Unless 'merge_into_single_table' is set to 'True' in the module configuration, each csv file will create one table in the resulting database. If this option is set, only a single table with all the values of all csv files will be created. For this to work, all csv files should follow the same schema.

Source code in /opt/hostedtoolcache/Python/3.11.4/x64/lib/python3.11/site-packages/kiara_plugin/tabular/modules/tables/__init__.py
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
def create__tables__from__file_bundle(
    self, source_value: Value, job_log: JobLog
) -> Any:
    """Create a database from a file_bundle value.

    Currently, only csv files are supported, files in the source file_bundle that have different extensions will be ignored.

    Unless 'merge_into_single_table' is set to 'True' in the module configuration, each csv file will create one table
    in the resulting database. If this option is set, only a single table with all the values of all
    csv files will be created. For this to work, all csv files should follow the same schema.

    """

    from pyarrow import csv as pa_csv

    include_raw_content_in_file_info: Union[bool, None] = self.get_config_value(
        "include_source_metadata"
    )

    tables = {}

    bundle: KiaraFileBundle = source_value.data

    table_names: List[str] = []
    included_files: Dict[str, bool] = {}
    errors: Dict[str, Union[None, str]] = {}
    for rel_path in sorted(bundle.included_files.keys()):

        if not rel_path.endswith(".csv"):
            job_log.add_log(
                f"Ignoring file (not csv): {rel_path}", log_level=logging.INFO
            )
            included_files[rel_path] = False
            errors[rel_path] = "Not a csv file."
            continue

        file_item = bundle.included_files[rel_path]
        table_name = find_free_id(
            stem=file_item.file_name_without_extension, current_ids=table_names
        )
        try:
            table_names.append(table_name)
            table = pa_csv.read_csv(file_item.path)
            tables[table_name] = table
            included_files[rel_path] = True
        except Exception as e:
            included_files[rel_path] = False
            errors[rel_path] = KiaraException.get_root_details(e)

            if self.get_config_value("ignore_errors") is True or True:
                log_message("ignore.import_file", file=rel_path, reason=str(e))
                continue

            raise KiaraProcessingException(e)

    if include_raw_content_in_file_info in [None, True]:
        include_content: bool = self.get_config_value("include_source_file_content")

        if "file_items" in tables:
            raise KiaraProcessingException(
                "Can't create table: 'file_items' columns already exists."
            )

        table = create_table_from_file_bundle(
            file_bundle=source_value.data,
            include_content=include_content,
            included_files=included_files,
            errors=errors,
        )
        tables["file_items"] = table

    return tables

AssembleTablesConfig

Bases: KiaraModuleConfig

Configuration for the 'assemble.tables' module.

Source code in /opt/hostedtoolcache/Python/3.11.4/x64/lib/python3.11/site-packages/kiara_plugin/tabular/modules/tables/__init__.py
176
177
178
179
180
181
182
183
184
185
186
class AssembleTablesConfig(KiaraModuleConfig):
    """Configuration for the 'assemble.tables' module."""

    number_of_tables: Union[int, None] = Field(
        description="How many tables should be merged. If 'table_names' is empty, this defaults to '2', otherwise the length of the 'table_names' input.",
        default=None,
    )
    table_names: Union[List[str], None] = Field(
        description="A pre-defined list of table names. If not defined, users will be asked for the table name(s).",
        default=None,
    )

Attributes

number_of_tables: Union[int, None] = Field(description="How many tables should be merged. If 'table_names' is empty, this defaults to '2', otherwise the length of the 'table_names' input.", default=None) class-attribute instance-attribute
table_names: Union[List[str], None] = Field(description='A pre-defined list of table names. If not defined, users will be asked for the table name(s).', default=None) class-attribute instance-attribute

AssembleTablesModule

Bases: KiaraModule

Assemble a 'tables' value from multiple tables.

Depending on the module configuration, 2 or more tables can be merged into a single 'tables' value.

Source code in /opt/hostedtoolcache/Python/3.11.4/x64/lib/python3.11/site-packages/kiara_plugin/tabular/modules/tables/__init__.py
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
class AssembleTablesModule(KiaraModule):
    """Assemble a 'tables' value from multiple tables.

    Depending on the module configuration, 2 or more tables can be merged into a single 'tables' value.

    """

    _module_type_name = "assemble.tables"
    _config_cls = AssembleTablesConfig

    @functools.cached_property
    def _table_details(self) -> Tuple[int, Union[List[str], None]]:

        number_tables: Union[int, None] = self.get_config_value("number_of_tables")
        table_names: Union[None, List[str]] = self.get_config_value("table_names")

        if not table_names:
            if not number_tables:
                number_tables = 2
        elif not number_tables:
            number_tables = len(table_names)
        elif not number_tables == len(table_names):
            raise KiaraException(
                "The 'number_of_tables' and length of 'table_names' config option must match."
            )

        if number_tables < 2:
            raise KiaraException("The 'number_of_tables' must be at least 2.")

        return number_tables, table_names

    @property
    def number_of_tables(self) -> int:
        number_tables, _ = self._table_details
        return number_tables

    @property
    def table_names(self) -> Union[List[str], None]:
        _, table_names = self._table_details
        return table_names

    def create_inputs_schema(
        self,
    ) -> ValueMapSchema:

        number_tables = self.number_of_tables
        table_names = self.table_names

        if not table_names:
            if not number_tables:
                number_tables = 2
        elif not number_tables:
            number_tables = len(table_names)
        elif not number_tables == len(table_names):
            raise KiaraException(
                "The 'number_of_tables' and length of 'table_names' config option must match."
            )

        if number_tables < 2:
            raise KiaraException("The 'number_of_tables' must be at least 2.")

        inputs_schema = {}
        if not table_names:
            for i in range(1, number_tables + 1):
                inputs_schema[f"table_name_{i}"] = {
                    "type": "string",
                    "doc": f"The alias for table #{i}.",
                }
                inputs_schema[f"table_{i}"] = {
                    "type": "table",
                    "doc": f"The table to merge (#{i}).",
                }
        else:
            for table_name in table_names:
                inputs_schema[f"table_{table_name}"] = {
                    "type": "table",
                    "doc": f"The table to merge for alias '{table_name}'.",
                }

        return inputs_schema

    def create_outputs_schema(
        self,
    ) -> ValueMapSchema:

        outputs = {
            "tables": {
                "type": "tables",
                "doc": "The assembled tables instance.",
            }
        }
        return outputs

    def process(self, inputs: ValueMap, outputs: ValueMap, job_log: JobLog) -> None:

        number_tables = self.number_of_tables
        table_names = self.table_names

        tables: Dict[str, Any] = {}
        if not table_names:
            for i in range(1, number_tables + 1):
                table_name = inputs.get_value_data(f"table_name_{i}")
                table = inputs.get_value_obj(f"table_{i}")
                if table_name in tables.keys():
                    raise KiaraException(f"Duplicate table name: '{table_name}'")
                tables[table_name] = table
        else:
            for table_name in table_names:
                table = inputs.get_value_obj(f"table_{table_name}")
                tables[table_name] = table

        outputs.set_value("tables", tables)

Attributes

_config_cls = AssembleTablesConfig class-attribute instance-attribute
number_of_tables: int property
table_names: Union[List[str], None] property

Functions

create_inputs_schema() -> ValueMapSchema
Source code in /opt/hostedtoolcache/Python/3.11.4/x64/lib/python3.11/site-packages/kiara_plugin/tabular/modules/tables/__init__.py
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
def create_inputs_schema(
    self,
) -> ValueMapSchema:

    number_tables = self.number_of_tables
    table_names = self.table_names

    if not table_names:
        if not number_tables:
            number_tables = 2
    elif not number_tables:
        number_tables = len(table_names)
    elif not number_tables == len(table_names):
        raise KiaraException(
            "The 'number_of_tables' and length of 'table_names' config option must match."
        )

    if number_tables < 2:
        raise KiaraException("The 'number_of_tables' must be at least 2.")

    inputs_schema = {}
    if not table_names:
        for i in range(1, number_tables + 1):
            inputs_schema[f"table_name_{i}"] = {
                "type": "string",
                "doc": f"The alias for table #{i}.",
            }
            inputs_schema[f"table_{i}"] = {
                "type": "table",
                "doc": f"The table to merge (#{i}).",
            }
    else:
        for table_name in table_names:
            inputs_schema[f"table_{table_name}"] = {
                "type": "table",
                "doc": f"The table to merge for alias '{table_name}'.",
            }

    return inputs_schema
create_outputs_schema() -> ValueMapSchema
Source code in /opt/hostedtoolcache/Python/3.11.4/x64/lib/python3.11/site-packages/kiara_plugin/tabular/modules/tables/__init__.py
270
271
272
273
274
275
276
277
278
279
280
def create_outputs_schema(
    self,
) -> ValueMapSchema:

    outputs = {
        "tables": {
            "type": "tables",
            "doc": "The assembled tables instance.",
        }
    }
    return outputs
process(inputs: ValueMap, outputs: ValueMap, job_log: JobLog) -> None
Source code in /opt/hostedtoolcache/Python/3.11.4/x64/lib/python3.11/site-packages/kiara_plugin/tabular/modules/tables/__init__.py
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
def process(self, inputs: ValueMap, outputs: ValueMap, job_log: JobLog) -> None:

    number_tables = self.number_of_tables
    table_names = self.table_names

    tables: Dict[str, Any] = {}
    if not table_names:
        for i in range(1, number_tables + 1):
            table_name = inputs.get_value_data(f"table_name_{i}")
            table = inputs.get_value_obj(f"table_{i}")
            if table_name in tables.keys():
                raise KiaraException(f"Duplicate table name: '{table_name}'")
            tables[table_name] = table
    else:
        for table_name in table_names:
            table = inputs.get_value_obj(f"table_{table_name}")
            tables[table_name] = table

    outputs.set_value("tables", tables)

ExportNetworkDataModule

Bases: DataExportModule

Export network data items.

Source code in /opt/hostedtoolcache/Python/3.11.4/x64/lib/python3.11/site-packages/kiara_plugin/tabular/modules/tables/__init__.py
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
class ExportNetworkDataModule(DataExportModule):
    """Export network data items."""

    _module_type_name = "export.tables"

    # def export__network_data__as__graphml_file(
    #     self, value: NetworkData, base_path: str, name: str
    # ):
    #     """Export network data as graphml file."""
    #
    #     import networkx as nx
    #
    #     target_path = os.path.join(base_path, f"{name}.graphml")
    #
    #     # TODO: can't just assume digraph
    #     graph: nx.Graph = value.as_networkx_graph(nx.DiGraph)
    #     nx.write_graphml(graph, target_path)
    #
    #     return {"files": target_path}
    #
    def export__tables__as__sqlite_db(
        self, value: KiaraTables, base_path: str, name: str
    ):
        """Export network data as a sqlite database file."""

        from kiara_plugin.tabular.utils.tables import create_database_from_tables

        db = create_database_from_tables(tables=value)

        target_path = os.path.abspath(os.path.join(base_path, f"{name}.sqlite"))
        shutil.move(db.db_file_path, target_path)

        return {"files": target_path}

    def export__tables__as__sql_dump(
        self, value: KiaraTables, base_path: str, name: str
    ):
        """Export network data as a sql dump file."""

        import sqlite_utils

        from kiara_plugin.tabular.utils.tables import create_database_from_tables

        kiara_db = create_database_from_tables(tables=value)

        db = sqlite_utils.Database(kiara_db.db_file_path)
        target_path = Path(os.path.join(base_path, f"{name}.sql"))
        with target_path.open("wt") as f:
            for line in db.conn.iterdump():
                f.write(line + "\n")

        return {"files": target_path.as_posix()}

    def export__tables__as__csv_files(
        self, value: KiaraTables, base_path: str, name: str
    ):
        """Export network data as 2 csv files (one for edges, one for nodes."""

        from pyarrow import csv

        files = []

        for table_name in value.table_names:
            target_path = os.path.join(base_path, f"{name}__{table_name}.csv")
            os.makedirs(os.path.dirname(target_path), exist_ok=True)

            table = value.get_table(table_name)

            csv.write_csv(table.arrow_table, target_path)
            files.append(target_path)

        return {"files": files}

Functions

export__tables__as__sqlite_db(value: KiaraTables, base_path: str, name: str)

Export network data as a sqlite database file.

Source code in /opt/hostedtoolcache/Python/3.11.4/x64/lib/python3.11/site-packages/kiara_plugin/tabular/modules/tables/__init__.py
323
324
325
326
327
328
329
330
331
332
333
334
335
def export__tables__as__sqlite_db(
    self, value: KiaraTables, base_path: str, name: str
):
    """Export network data as a sqlite database file."""

    from kiara_plugin.tabular.utils.tables import create_database_from_tables

    db = create_database_from_tables(tables=value)

    target_path = os.path.abspath(os.path.join(base_path, f"{name}.sqlite"))
    shutil.move(db.db_file_path, target_path)

    return {"files": target_path}
export__tables__as__sql_dump(value: KiaraTables, base_path: str, name: str)

Export network data as a sql dump file.

Source code in /opt/hostedtoolcache/Python/3.11.4/x64/lib/python3.11/site-packages/kiara_plugin/tabular/modules/tables/__init__.py
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
def export__tables__as__sql_dump(
    self, value: KiaraTables, base_path: str, name: str
):
    """Export network data as a sql dump file."""

    import sqlite_utils

    from kiara_plugin.tabular.utils.tables import create_database_from_tables

    kiara_db = create_database_from_tables(tables=value)

    db = sqlite_utils.Database(kiara_db.db_file_path)
    target_path = Path(os.path.join(base_path, f"{name}.sql"))
    with target_path.open("wt") as f:
        for line in db.conn.iterdump():
            f.write(line + "\n")

    return {"files": target_path.as_posix()}
export__tables__as__csv_files(value: KiaraTables, base_path: str, name: str)

Export network data as 2 csv files (one for edges, one for nodes.

Source code in /opt/hostedtoolcache/Python/3.11.4/x64/lib/python3.11/site-packages/kiara_plugin/tabular/modules/tables/__init__.py
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
def export__tables__as__csv_files(
    self, value: KiaraTables, base_path: str, name: str
):
    """Export network data as 2 csv files (one for edges, one for nodes."""

    from pyarrow import csv

    files = []

    for table_name in value.table_names:
        target_path = os.path.join(base_path, f"{name}__{table_name}.csv")
        os.makedirs(os.path.dirname(target_path), exist_ok=True)

        table = value.get_table(table_name)

        csv.write_csv(table.arrow_table, target_path)
        files.append(target_path)

    return {"files": files}

Functions