h3.utils package#

Submodules#

h3.utils.directories module#

h3.utils.directories.get_checkpoint_dir() str[source]#

./data/datasets/checkpoints

h3.utils.directories.get_cmorph_dir() str[source]#

./data/datasets/weather/cmorph

h3.utils.directories.get_coastline_dir() str[source]#

./data/datasets/terrain/coastlines

h3.utils.directories.get_data_dir() str[source]#

./data

h3.utils.directories.get_dataloading_dir() str[source]#

./h3/dataloading

h3.utils.directories.get_datasets_dir() str[source]#

./data/datasets

h3.utils.directories.get_dem_dir() str[source]#

./data/datasets/terrain/dem_data

h3.utils.directories.get_download_dir() str[source]#

./data/downloads

h3.utils.directories.get_ecmwf_data_dir() str[source]#

./data/datasets/weather/ecmwf

h3.utils.directories.get_flood_dir() str[source]#

./data/datasets/flood

h3.utils.directories.get_h3_data_files_dir() str[source]#

./h3/data_files

h3.utils.directories.get_isd_data_dir() str[source]#

./data/datasets/weather/noaa

h3.utils.directories.get_metadata_pickle_dir() str[source]#

./data/datasets/processed_data/metadata_pickle

h3.utils.directories.get_noaa_data_dir() str[source]#

./data/datasets/weather/noaa

h3.utils.directories.get_pickle_dir() str[source]#

./data/pickles

h3.utils.directories.get_processed_data_dir() str[source]#

./data/datasets/processed_data

h3.utils.directories.get_storm_dir() str[source]#

./data/datasets/storm

h3.utils.directories.get_terrain_dir() str[source]#

./data/datasets/terrain

h3.utils.directories.get_weather_data_dir() str[source]#

./data/datasets/weather

h3.utils.directories.get_xbd_dir() str[source]#

./data/datasets/xBD_data

h3.utils.directories.get_xbd_disaster_dir(disaster: str) str[source]#

/data/datasets/xBD_data/{disaster}

h3.utils.directories.get_xbd_hlabel_dir(old: bool = False) str[source]#

./data/datasets/xBD_data/geotiffs/hold/labels

h3.utils.directories.get_xbd_hurricane_dir() str[source]#

./data/datasets/xBD_data/hurricane

h3.utils.downloader module#

h3.utils.downloader.downloader(urls: Iterable[str], target_dir: str = '/github/workspace/data/downloads')[source]#

Downloader to download multiple files.

h3.utils.downloader.main()[source]#
h3.utils.downloader.url_download(url: str, path: str, task: int = 1, total: int = 1) None[source]#

Download an url to a local file

See also

downloader

Downloads multiple url in parallel.

h3.utils.file_ops module#

h3.utils.file_ops.check_all_downloads()[source]#
h3.utils.file_ops.get_sha1(filepath: str) str[source]#

As the files are big using this method that uses buffers https://stackoverflow.com/a/22058673/9931399

h3.utils.file_ops.guarantee_existence(path: str) str[source]#
h3.utils.file_ops.main()[source]#
h3.utils.file_ops.unpack_file(filepath: str, clean: bool = False, file_format: None | str = None)[source]#

Unpack an archive file. It is quite slow for big files

Parameters:
  • filepath (str,) – Path of the file to unpack, it will unpack in the folder

  • clean (bool, optional) – If True will delete the archive after unpacking. The default is False.

  • file_format (str, optional) – The archive format. If None it will use the file extension. See shutil.unpack_archive()

h3.utils.simple_functions module#