Skip to content

Commit

Permalink
changelog
Browse files Browse the repository at this point in the history
  • Loading branch information
crdanielbusch committed Nov 14, 2024
1 parent 72589ed commit 4ba7f59
Show file tree
Hide file tree
Showing 3 changed files with 1 addition and 99 deletions.
1 change: 1 addition & 0 deletions changelog/1.feature.md
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Script to download data set from FAOSTAT website.
57 changes: 0 additions & 57 deletions scripts/download_all_domains.py
Original file line number Diff line number Diff line change
@@ -1,65 +1,8 @@
"""Downloads all domain data sets from FAOSTAT website."""


from faostat_data_primap.download import (
download_all_domains,
)

# def download_all_domains(
# domains: list[tuple[str]] = domains,
# downloaded_data_path: str = downloaded_data_path,
# ) -> list[str]:
# """
# Download and unpack all climate-related domains from the FAO stat website.
#
# Extract the date when the data set was last updated and create a directory
# with the same name. Download the zip files for each domain if
# it does not already exist. Unpack the zip file and save in
# the same directory.
#
# Parameters
# ----------
# sources
# Name of data set, url to domain overview,
# and download url
#
# Returns
# -------
# List of input files that have been fetched or found locally.
#
# """
# downloaded_files = []
# for ds_name, urls in domains.items():
# url = urls["url_domain"]
# url_download = urls["url_download"]
# url_methodology = urls["url_methodology"]
#
# soup = get_html_content(url)
#
# last_updated = get_last_updated_date(soup, url)
#
# if not downloaded_data_path.exists():
# downloaded_data_path.mkdir()
#
# ds_path = downloaded_data_path / ds_name
# if not ds_path.exists():
# ds_path.mkdir()
#
# local_data_dir = ds_path / last_updated
# if not local_data_dir.exists():
# local_data_dir.mkdir()
#
# download_methodology(save_path=local_data_dir, url_download=url_methodology)
#
# local_filename = local_data_dir / f"{ds_name}.zip"
#
# download_file(url_download=url_download, save_path=local_filename)
#
# downloaded_files.append(str(local_filename))
#
# unzip_file(local_filename)
#
# return downloaded_files

if __name__ == "__main__":
download_all_domains()
42 changes: 0 additions & 42 deletions scripts/remove_downloads.py

This file was deleted.

0 comments on commit 4ba7f59

Please sign in to comment.