Skip to content

Functions

src.utils.files.validate_data_type(path: Path) -> None

Checks if a data file matches a supported data file type.

Parameters:

Name Type Description Default
path Path

Path to the data file.

required

Raises:

Type Description
ValueError

If data file type not supported.

Source code in src\utils\files.py
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
def validate_data_type(path: Path) -> None:
    """Checks if a data file matches a supported data file type.

    Args:
        path: Path to the data file.

    Raises:
        ValueError: If data file type not supported.
    """
    # Check if data file is a supported data type
    if path.suffix.lower() not in supported_data_types:
        raise ValueError("Data file provided does not match a supported data file type.\n"
                         f"Types supported: {', '.join(supported_data_types)}\n"
                         f"Type received: {path.suffix}")

src.utils.files.validate_data_file(path: Path) -> None

Checks if a data file exists and is a valid data file type. Raises an exception if validation fails.

Parameters:

Name Type Description Default
path Path

Path to the data file.

required

Raises:

Type Description
FileNotFoundError

If data file does not exist.

ValueError

If data file type not supported.

Source code in src\utils\files.py
107
108
109
110
111
112
113
114
115
116
117
118
119
120
def validate_data_file(path: Path) -> None:
    """Checks if a data file exists and is a valid data file type. Raises an exception if validation fails.

    Args:
        path: Path to the data file.

    Raises:
        FileNotFoundError: If data file does not exist.
        ValueError: If data file type not supported.
    """
    # Check if file exists
    if not path.is_file():
        raise FileNotFoundError(f"Data file does not exist:\n{str(path)}")
    validate_data_type(path)

src.utils.files.load_data_file(path: Path, config: Optional[dict] = None) -> Union[list, dict, tuple, set]

Load data object from a data file.

Parameters:

Name Type Description Default
path Path

Path to the data file to be loaded.

required
config dict | None

Dict data to modify DataFileType configuration for this data load procedure.

None

Returns:

Type Description
list | dict | tuple | set

Data object such as dict, list, tuple, set, etc.

Raises:

Type Description
FileNotFoundError

If data file does not exist.

ValueError

If data file type not supported.

OSError

If dumping to data file fails.

Source code in src\utils\files.py
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
def load_data_file(
    path: Path,
    config: Optional[dict] = None
) -> Union[list, dict, tuple, set]:
    """Load data  object from a data file.

    Args:
        path: Path to the data file to be loaded.
        config: Dict data to modify DataFileType configuration for this data load procedure.

    Returns:
        Data object such as dict, list, tuple, set, etc.

    Raises:
        FileNotFoundError: If data file does not exist.
        ValueError: If data file type not supported.
        OSError: If dumping to data file fails.
    """
    # Check if data file is valid
    validate_data_file(path)

    # Pull the parser and insert user config into kwargs
    parser: DataFileType = data_types.get(path.suffix.lower(), {}).copy()
    if config:
        parser['load_kw'].update(config)

    # Attempt to load data
    with util_file_lock, suppress(Exception), open(path, 'r', encoding='utf-8') as f:
        data = parser['load'](f, **parser['load_kw']) or {}
        return data
    raise OSError(f"Unable to load data from data file:\n{str(path)}")

src.utils.files.dump_data_file(obj: Union[list, dict, tuple, set], path: Path, config: Optional[dict] = None) -> None

Dump data object to a data file.

Parameters:

Name Type Description Default
obj list | dict | tuple | set

Iterable or dict object to save to data file.

required
path Path

Path to the data file to be dumped.

required
config dict | None

Dict data to modify DataFileType configuration for this data dump procedure.

None

Raises:

Type Description
FileNotFoundError

If data file does not exist.

ValueError

If data file type not supported.

OSError

If dumping to data file fails.

Source code in src\utils\files.py
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
def dump_data_file(
    obj: Union[list, dict, tuple, set],
    path: Path,
    config: Optional[dict] = None
) -> None:
    """Dump data object to a data file.

    Args:
        obj: Iterable or dict object to save to data file.
        path: Path to the data file to be dumped.
        config: Dict data to modify DataFileType configuration for this data dump procedure.

    Raises:
        FileNotFoundError: If data file does not exist.
        ValueError: If data file type not supported.
        OSError: If dumping to data file fails.
    """
    # Check if data file is valid
    validate_data_type(path)

    # Pull the parser and insert user config into kwargs
    parser: DataFileType = data_types.get(path.suffix.lower(), {}).copy()
    if config:
        parser['dump_kw'].update(config)

    # Attempt to dump data
    with suppress(Exception), util_file_lock, open(path, 'w', encoding='utf-8') as f:
        parser['dump'](obj, f, **parser['dump_kw'])
        return
    raise OSError(f"Unable to dump data from data file:\n{str(path)}")

src.utils.files.verify_config_fields(ini_file: Path, data_file: Path) -> None

Validate that all settings fields present in a given json data are present in config file. If any are missing, add them and return.

Parameters:

Name Type Description Default
ini_file Path

Config file to verify contains the proper fields.

required
data_file Path

Data file containing config fields to check for, JSON or TOML.

required
Source code in src\utils\files.py
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
def verify_config_fields(ini_file: Path, data_file: Path) -> None:
    """Validate that all settings fields present in a given json data are present in config file. If any are missing,
    add them and return.

    Args:
        ini_file: Config file to verify contains the proper fields.
        data_file: Data file containing config fields to check for, JSON or TOML.
    """
    # Track data and changes
    data, changed = {}, False

    # Data file doesn't exist or is unsupported data type
    if not data_file.is_file() or data_file.suffix not in ['.toml', '.json']:
        return

    # Load data from JSON or TOML file
    raw = load_data_file(data_file)
    raw = parse_kivy_config_toml(raw) if data_file.suffix == '.toml' else raw

    # Ensure INI file exists and load ConfigParser
    ensure_path_exists(ini_file)
    config = get_config_object(ini_file)

    # Build a dictionary of the necessary values
    for row in raw:
        # Add row if it's not a title
        if row.get('type', 'title') == 'title':
            continue
        data.setdefault(
            row.get('section', 'BROKEN'), []
        ).append({
            'key': row.get('key', ''),
            'value': row.get('default', 0)
        })

    # Add the data to ini where missing
    for section, settings in data.items():
        # Check if the section exists
        if not config.has_section(section):
            config.add_section(section)
            changed = True
        # Check if each setting exists
        for setting in settings:
            if not config.has_option(section, setting['key']):
                config.set(section, setting['key'], str(setting['value']))
                changed = True

    # If ini has changed, write changes
    if changed:
        with open(ini_file, "w", encoding="utf-8") as f:
            config.write(f)

src.utils.files.parse_kivy_config_json(raw: list[dict]) -> list[dict]

Parse config JSON data for use with Kivy settings panel.

Parameters:

Name Type Description Default
raw list[dict]

Raw loaded JSON data.

required

Returns:

Type Description
list[dict]

Properly parsed data safe for use with Kivy.

Source code in src\utils\files.py
246
247
248
249
250
251
252
253
254
255
256
257
258
259
def parse_kivy_config_json(raw: list[dict]) -> list[dict]:
    """Parse config JSON data for use with Kivy settings panel.

    Args:
        raw: Raw loaded JSON data.

    Returns:
        Properly parsed data safe for use with Kivy.
    """
    # Remove unsupported keys
    for row in raw:
        if 'default' in row:
            row.pop('default')
    return raw

src.utils.files.parse_kivy_config_toml(raw: dict) -> list[dict]

Parse config TOML data for use with Kivy settings panel.

Parameters:

Name Type Description Default
raw dict

Raw loaded TOML data.

required

Returns:

Type Description
list[dict]

Properly parsed data safe for use with Kivy.

Source code in src\utils\files.py
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
def parse_kivy_config_toml(raw: dict) -> list[dict]:
    """Parse config TOML data for use with Kivy settings panel.

    Args:
        raw: Raw loaded TOML data.

    Returns:
        Properly parsed data safe for use with Kivy.
    """

    # Process __CONFIG__ header if present
    cfg_header = raw.pop('__CONFIG__', {})
    prefix = cfg_header.get('prefix', '')

    # Process data
    data: list[dict] = []
    for section, settings in raw.items():

        # Add section title if it exists
        if title := settings.pop('title', None):
            data.append({
                'type': 'title',
                'title': title
            })

        # Add each setting within this section
        for key, field in settings.items():

            # Establish data type and default value
            data_type = field.get('type', 'bool')
            display_default = default = field.get('default', 0)
            if data_type == 'bool':
                display_default = 'True' if default else 'False'
            elif data_type in ['string', 'options', 'path']:
                display_default = f"'{default}'"
            setting = {
                'type': data_type,
                'title': msg_bold(field.get('title', 'Broken Setting')),
                'desc': f"{field.get('desc', '')}\n"
                        f"{msg_bold(f'(Default: {display_default})')}",
                'section': f'{prefix}.{section}' if prefix else section,
                'key': key, 'default': default}
            if options := field.get('options'):
                setting['options'] = options
            data.append(setting)

    # Return parsed data
    return data

src.utils.files.get_kivy_config_from_schema(config: Path) -> str

Return valid JSON data for use with Kivy settings panel.

Parameters:

Name Type Description Default
config Path

Path to config schema file, JSON or TOML.

required

Returns:

Type Description
str

Json string dump of validated data.

Source code in src\utils\files.py
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
def get_kivy_config_from_schema(config: Path) -> str:
    """Return valid JSON data for use with Kivy settings panel.

    Args:
        config: Path to config schema file, JSON or TOML.

    Returns:
        Json string dump of validated data.
    """
    # Need to load data as JSON
    raw = load_data_file(config)

    # Use correct parser
    if config.suffix == '.toml':
        raw = parse_kivy_config_toml(raw)
    return json.dumps(parse_kivy_config_json(raw))

src.utils.files.copy_config_or_verify(path_from: Path, path_to: Path, data_file: Path) -> None

Copy one config to another, or verify it if it exists.

Parameters:

Name Type Description Default
path_from Path

Path to the file to be copied.

required
path_to Path

Path to the file to create, if it doesn't exist.

required
data_file Path

Data schema file to use for validating an existing INI file.

required
Source code in src\utils\files.py
330
331
332
333
334
335
336
337
338
339
340
def copy_config_or_verify(path_from: Path, path_to: Path, data_file: Path) -> None:
    """Copy one config to another, or verify it if it exists.

    Args:
        path_from: Path to the file to be copied.
        path_to: Path to the file to create, if it doesn't exist.
        data_file: Data schema file to use for validating an existing INI file.
    """
    if os.path.isfile(path_to):
        return verify_config_fields(path_to, data_file)
    shutil.copy(path_from, path_to)

src.utils.files.remove_config_file(ini_file: str) -> bool

Check if config file exists, then remove it.

Parameters:

Name Type Description Default
ini_file str

Path to an ini file.

required

Returns:

Type Description
bool

True if removed, False if not.

Source code in src\utils\files.py
343
344
345
346
347
348
349
350
351
352
353
354
355
356
def remove_config_file(ini_file: str) -> bool:
    """Check if config file exists, then remove it.

    Args:
        ini_file: Path to an ini file.

    Returns:
        True if removed, False if not.
    """
    if os.path.isfile(ini_file):
        with suppress(Exception):
            os.remove(ini_file)
            return True
    return False

src.utils.files.get_config_object(path: Union[str, os.PathLike, list[Union[str, os.PathLike]]]) -> ConfigParser

Returns a ConfigParser object using a valid ini path.

Parameters:

Name Type Description Default
path str | PathLike | list[str | PathLike]

Path to ini config file.

required

Returns:

Type Description
ConfigParser

ConfigParser object.

Raises:

Type Description
ValueError

If valid ini file wasn't received.

Source code in src\utils\files.py
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
def get_config_object(path: Union[str, os.PathLike, list[Union[str, os.PathLike]]]) -> ConfigParser:
    """Returns a ConfigParser object using a valid ini path.

    Args:
        path: Path to ini config file.

    Returns:
        ConfigParser object.

    Raises:
        ValueError: If valid ini file wasn't received.
    """
    config = ConfigParser(allow_no_value=True)
    config.optionxform = str
    config.read(path, encoding='utf-8')
    return config

src.utils.files.get_app_version(path: Path) -> str

Returns the version string stored in the root project file.

Parameters:

Name Type Description Default
path Path

Path to the root project file.

required

Returns:

Type Description
str

Current version string.

Source code in src\utils\files.py
382
383
384
385
386
387
388
389
390
391
392
def get_app_version(path: Path) -> str:
    """Returns the version string stored in the root project file.

    Args:
        path: Path to the root project file.

    Returns:
        Current version string.
    """
    project = load_data_file(path)
    return project.get('tool', {}).get('poetry', {}).get('version', '1.0.0')

src.utils.files.check_valid_file(path: Union[str, os.PathLike], ext: Optional[str] = None) -> bool

Checks if a file path provided exists, optionally validate an extension type. @param path: Path to the file to verify. @param ext: Extension to check, if provided. @return: True if file is valid, otherwise False.

Source code in src\utils\files.py
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
def check_valid_file(path: Union[str, os.PathLike], ext: Optional[str] = None) -> bool:
    """
    Checks if a file path provided exists, optionally validate an extension type.
    @param path: Path to the file to verify.
    @param ext: Extension to check, if provided.
    @return: True if file is valid, otherwise False.
    """
    with suppress(Exception):
        check = str(path).lower()
        if os.path.isfile(check):
            if ext:
                ext = (ext if ext.startswith('.') else f'.{ext}').lower()
                if not check.endswith(ext):
                    return False
            return True
    return False

src.utils.files.ensure_path_exists(path: Union[str, os.PathLike]) -> None

Ensure that directories in path exists. @param path: Folder path to check and create if necessary.

Source code in src\utils\files.py
418
419
420
421
422
423
def ensure_path_exists(path: Union[str, os.PathLike]) -> None:
    """
    Ensure that directories in path exists.
    @param path: Folder path to check and create if necessary.
    """
    Path(os.path.dirname(path)).mkdir(mode=777, parents=True, exist_ok=True)

src.utils.files.get_unique_filename(path: Path) -> Path

If a filepath exists, number the file according to the lowest number that doesn't exist. @param path: Path to the file.

Source code in src\utils\files.py
426
427
428
429
430
431
432
433
434
435
436
def get_unique_filename(path: Path) -> Path:
    """
    If a filepath exists, number the file according to the lowest number that doesn't exist.
    @param path: Path to the file.
    """
    i = 1
    stem = path.stem
    while path.is_file():
        path = path.with_stem(f'{stem} ({i})')
        i += 1
    return path

src.utils.files.get_subdirs(path: Path) -> Iterator[Path]

Yields each subdirectory of a given folder.

Parameters:

Name Type Description Default
path Path

Path to the folder to iterate over.

required

Yields:

Type Description
Path

A subdirectory of the given folder.

Source code in src\utils\files.py
439
440
441
442
443
444
445
446
447
448
449
450
def get_subdirs(path: Path) -> Iterator[Path]:
    """Yields each subdirectory of a given folder.

    Args:
        path: Path to the folder to iterate over.

    Yields:
        A subdirectory of the given folder.
    """
    for dir_path, dir_names, filenames in os.walk(path):
        for dirname in dir_names:
            yield Path(dir_path) / dirname

src.utils.files.get_file_size_mb(file_path: Union[str, os.PathLike], decimal: int = 1) -> float

Get a file's size in megabytes rounded. @param file_path: Path to the file. @param decimal: Number of decimal places to allow when rounding. @return: Float representing the filesize in megabytes rounded.

Source code in src\utils\files.py
458
459
460
461
462
463
464
465
def get_file_size_mb(file_path: Union[str, os.PathLike], decimal: int = 1) -> float:
    """
    Get a file's size in megabytes rounded.
    @param file_path: Path to the file.
    @param decimal: Number of decimal places to allow when rounding.
    @return: Float representing the filesize in megabytes rounded.
    """
    return round(os.path.getsize(file_path) / (1024 * 1024), decimal)