From 4a65ba4e8511dee085efb9db8f38f61f2f9d7dcc Mon Sep 17 00:00:00 2001 From: antares-sw <23400824+antares-sw@users.noreply.github.com> Date: Fri, 10 Jan 2025 14:22:19 +0300 Subject: [PATCH] Init --- .dockerignore | 11 ++ .github/workflows/docker.yaml | 37 ++++ .gitignore | 7 + .python-version | 1 + Dockerfile | 28 +++ README.md | 101 +++++++++++ config.toml.example | 23 +++ containers.py | 15 ++ pyproject.toml | 12 ++ recovery.py | 87 +++++++++ settings.py | 46 +++++ snapshot.py | 67 +++++++ storage.py | 68 +++++++ uv.lock | 324 ++++++++++++++++++++++++++++++++++ 14 files changed, 827 insertions(+) create mode 100644 .dockerignore create mode 100644 .github/workflows/docker.yaml create mode 100644 .gitignore create mode 100644 .python-version create mode 100644 Dockerfile create mode 100644 config.toml.example create mode 100644 containers.py create mode 100644 pyproject.toml create mode 100644 recovery.py create mode 100644 settings.py create mode 100644 snapshot.py create mode 100644 storage.py create mode 100644 uv.lock diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..3449c4a --- /dev/null +++ b/.dockerignore @@ -0,0 +1,11 @@ +.venv +__pycache__ +tmp/ +tmp2/ +.vscode +config.toml +google-credentials.json +.git +.gitignore +.dockerignore +README.md \ No newline at end of file diff --git a/.github/workflows/docker.yaml b/.github/workflows/docker.yaml new file mode 100644 index 0000000..6461b49 --- /dev/null +++ b/.github/workflows/docker.yaml @@ -0,0 +1,37 @@ +name: Docker + +on: + push: + +jobs: + docker: + name: Build Docker Image + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v4 + - name: Docker meta + id: meta + uses: docker/metadata-action@v5 + with: + images: | + europe-west4-docker.pkg.dev/stakewiselabs/public/node-snapshots + tags: | + type=ref,event=branch + type=ref,event=tag + type=sha + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + - name: Login to GAR + uses: docker/login-action@v3 + with: + registry: europe-west4-docker.pkg.dev + username: _json_key + password: ${{ secrets.GAR_JSON_KEY }} + - name: Build and push + uses: docker/build-push-action@v5 + with: + push: ${{ github.event_name != 'pull_request' }} + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} + platforms: linux/amd64 diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..67431d3 --- /dev/null +++ b/.gitignore @@ -0,0 +1,7 @@ +.venv +__pycache__ +tmp/ +tmp2/ +.vscode +config.toml +google-credentials.json \ No newline at end of file diff --git a/.python-version b/.python-version new file mode 100644 index 0000000..e4fba21 --- /dev/null +++ b/.python-version @@ -0,0 +1 @@ +3.12 diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..dd8c4c1 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,28 @@ +# Use a Python image with uv pre-installed +FROM ghcr.io/astral-sh/uv:python3.12-bookworm-slim + +# Install the project into `/app` +WORKDIR /app + +# Enable bytecode compilation +ENV UV_COMPILE_BYTECODE=1 + +# Copy from the cache instead of linking since it's a mounted volume +ENV UV_LINK_MODE=copy + +COPY pyproject.toml . +COPY uv.lock . + +# Install the project's dependencies using the lockfile and settings +RUN uv sync --frozen --no-install-project --no-dev + +# Then, add the rest of the project source code and install it +# Installing separately from its dependencies allows optimal layer caching +ADD . /app +RUN uv sync --frozen --no-dev + +# Place executables in the environment at the front of the path +ENV PATH="/app/.venv/bin:$PATH" + +# Reset the entrypoint, don't invoke `uv` +ENTRYPOINT ["uv", "run"] diff --git a/README.md b/README.md index cd5002c..01cd51a 100644 --- a/README.md +++ b/README.md @@ -1,2 +1,103 @@ # node-snapshots + Stores the snapshots of Ethereum Execution and Consensus clients + +## Overview + +This tool allows you to manage Ethereum/Gnosis nodes running in docker containers snapshots by creating backups of specified directories, uploading those backups to Google Cloud Storage (GCS), and facilitating the recovery process. The tool supports running multiple node backups in parallel for efficiency. + +## Features + +- **Node Snapshot**: Create tarballs of specified directories with container stop. +- **Upload to Google Cloud Storage**: Upload the created tarballs to a specified GCS bucket. +- **Parallel Execution**: Take snapshots of multiple containers in parallel. +- **Recovery**: Download and extract snapshots from GCS to recover node data. + +## Prerequisites + +- **Docker**: Ensure Docker is installed and running on your machine. +- **Google Cloud Storage**: You need a Google Cloud project with a GCS bucket and service account credentials. +- **Python 3.12+**: The script uses Python 3 and requires the installation of the necessary libraries. + +## Configuration + +### Configuration File (`config.toml`) + +This tool uses a `.toml` configuration file to specify the Docker containers to back up, as well as the Google Cloud Storage details. Here’s an example configuration: + +```toml +# Docker host configuration +docker_host = "unix:///var/run/docker.sock" + +# Google Cloud service account credentials +google_credentials_json = "google-credentials.json" + +# Google Cloud Storage bucket name +bucket_name = "node-snapshots-bucket" + +# Docker containers and their paths for snapshot management +[[docker_containers]] +container_name = "nethermind" +data_path = "/data/nethermind" +tar_name = "nethermind.tar" +bucket_path = "mainnet/nethermind.tar" +recovery_path = "/data/recovered/nethermind" + +[[docker_containers]] +container_name = "lighthouse" +data_path = "/data/lighthouse" +tar_name = "nethermind.tar" +bucket_path = "mainnet/lighthouse.tar" +recovery_path = "/data/recovered/lighthouse" +``` + +### Configuration Fields + +- **docker_host**: The location of your Docker daemon (e.g., `"unix:///var/run/docker.sock"` for Linux or macOS). +- **google_credentials_json**: Path to the Google Cloud service account credentials JSON file. +- **bucket_name**: The name of your GCS bucket to upload snapshots to. +- **docker_containers**: + - **container_name**: The name of the Docker container to back up. + - **data_path**: Path inside the container to back up (relative or absolute). + - **tar_name**: Name of the tarball file to create. + - **bucket_path**: Path in the GCS bucket where the tarball will be uploaded. + - **recovery_path**: Path on the local filesystem where the tarball will be extracted during recovery. + +## Usage + +### 1. Take Snapshots of Docker Containers + +Run the tool to take snapshots of the Docker containers specified in the configuration file. This will create tarballs of the `data_path` directories, upload them to GCS, and clean up after the operation. + +```bash +python snapshot.py --config path_to_your_config.toml +``` + +### 2. Recovery of Docker Containers + +To recover the data from the snapshot, the tool can download the tarball from GCS and extract it to the specified `recovery_path`. + +The script will: + +1. Check if the recovery directory exists. +2. If the directory is not empty, ask for confirmation before deleting the existing files. +3. Download the tarball from GCS. +4. Extract the tarball to the `recovery_path`. + +To trigger the recovery process, simply run the same tool, and it will automatically download and extract the snapshots. + +```bash +python recovery.py --config path_to_your_config.toml +``` + +## Example Workflow + +1. **Configuration**: Set up the `config.toml` file with the paths to Docker containers and the corresponding Google Cloud Storage bucket details. +2. **Run Backup**: Run the backup script to create tarballs of the Docker container data and upload them to Google Cloud Storage. +3. **Recovery**: Run the recovery script to download and extract the snapshot tarballs into the desired recovery directories. + +## Troubleshooting + +- **Google Cloud Authentication**: Make sure the `google-credentials.json` file points to a valid Google Cloud service account with the necessary permissions to access your GCS bucket. +- **Docker Issues**: Ensure that the Docker daemon is running and that the container names in the configuration file match the actual running containers. +- **Permission Issues**: Verify that the user running the script has permission to write to the specified paths (e.g., the local recovery path and the GCS bucket). diff --git a/config.toml.example b/config.toml.example new file mode 100644 index 0000000..0ff4e08 --- /dev/null +++ b/config.toml.example @@ -0,0 +1,23 @@ +# Docker host configuration +docker_host = "unix:///var/run/docker.sock" + +# Google Cloud service account credentials +google_credentials_json = "google-credentials.json" + +# Google Cloud Storage bucket name +bucket_name = "node-snapshots" + +# Docker containers and their paths for snapshot management +[[docker_containers]] +container_name = "nethermind" +data_path = "/data/nethermind" +tar_name = "nethermind.tar" +bucket_path = "mainnet/nethermind.tar" +recovery_path = "/data/recovered/nethermind" + +[[docker_containers]] +container_name = "lighthouse" +data_path = "/data/lighthouse" +tar_name = "nethermind.tar" +bucket_path = "mainnet/lighthouse.tar" +recovery_path = "/data/recovered/lighthouse" diff --git a/containers.py b/containers.py new file mode 100644 index 0000000..4ecccad --- /dev/null +++ b/containers.py @@ -0,0 +1,15 @@ +import docker + +def stop(docker_client: docker.DockerClient, container_names: list[str]) -> None: + """Stop the Docker containers by name.""" + for container_name in container_names: + container = docker_client.containers.get(container_name) + container.stop() + print(f"Container {container_name} stopped.") + +def start(docker_client: docker.DockerClient, container_names: list[str]) -> None: + """Start the Docker containers by name.""" + for container_name in container_names: + container = docker_client.containers.get(container_name) + container.start() + print(f"Container {container_name} started.") diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..92153fa --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,12 @@ +[project] +name = "node-snapshot" +version = "0.1.0" +description = "Add your description here" +readme = "README.md" +requires-python = ">=3.12" +dependencies = [ + "docker>=7.1.0", + "google-cloud-storage>=2.19.0", + "toml>=0.10.2", + "tqdm>=4.67.1", +] diff --git a/recovery.py b/recovery.py new file mode 100644 index 0000000..4293902 --- /dev/null +++ b/recovery.py @@ -0,0 +1,87 @@ +import os +import shutil +from google.cloud import storage +from google.oauth2 import service_account +import docker +from settings import Settings, parse_arguments +from storage import download_from_bucket, extract_tar +from concurrent.futures import ThreadPoolExecutor + +import storage + +def verify_or_create_directory(recovery_path: str) -> bool: + """Verify if the recovery directory exists, create it if not, or ask for confirmation to delete contents.""" + if not os.path.exists(recovery_path): + os.makedirs(recovery_path) + print(f"Created directory: {recovery_path}") + else: + if os.listdir(recovery_path): + # If the directory is not empty, ask for confirmation to delete its contents + confirm = input(f"The directory {recovery_path} is not empty. Do you want to delete its contents and recover from the snapshot? (y/n): ") + if confirm.lower() == 'y': + for filename in os.listdir(recovery_path): + file_path = os.path.join(recovery_path, filename) + try: + if os.path.isdir(file_path): + shutil.rmtree(file_path) + else: + os.remove(file_path) + except Exception as e: + print(f"Error removing {file_path}: {e}") + print(f"Deleted all contents of {recovery_path}.") + else: + print(f"Skipping recovery for {recovery_path}.") + return False + else: + print(f"The directory {recovery_path} is empty. Proceeding with recovery.") + return True + +def recover_snapshot(storage_client: storage.Client, settings: Settings, container_config: dict[str, str]) -> None: + """Download the snapshot tar file and extract it to the recovery path.""" + + tar_name = container_config['tar_name'] + bucket_path = container_config['bucket_path'] + recovery_path = container_config['recovery_path'] + + # Step 1: Verify or create the recovery directory and delete contents if necessary + if not verify_or_create_directory(recovery_path): + return # Skip this container's recovery if directory isn't ready + + # Step 2: Download the snapshot tar file from Google Cloud Storage + tar_path = f"./{tar_name}" + download_from_bucket(storage_client, settings.bucket_name, bucket_path, tar_path) + + # Step 3: Extract the tarball to the recovery path + extract_tar(tar_path, recovery_path) + + # Step 4: Clean up the tar file after recovery + os.remove(tar_path) + print(f"Cleaned up tar file: {tar_path}") + +def main() -> None: + """Main function to parse arguments, initialize clients, and recover snapshots in parallel.""" + args = parse_arguments() + settings = Settings(config_path=args.config) + + credentials = service_account.Credentials.from_service_account_file( + settings.google_credentials_json + ) + print(settings) + storage_client = storage.Client(credentials=credentials) + docker_client = docker.DockerClient(base_url=settings.docker_host) + + containers_config = settings.docker_containers + + # Recover snapshot for each configured container + with ThreadPoolExecutor() as executor: + futures = [] + for container_config in containers_config: + futures.append(executor.submit(recover_snapshot, storage_client, settings, container_config)) + + for future in futures: + future.result() + + print("All snapshots recovered.") + +if __name__ == "__main__": + main() diff --git a/settings.py b/settings.py new file mode 100644 index 0000000..2c35054 --- /dev/null +++ b/settings.py @@ -0,0 +1,46 @@ +import argparse +import toml +import os + +# Define the configuration class to hold settings +class Settings: + def __init__(self, config_path: str): + self.config_path = config_path + self.google_credentials_json = None + self.bucket_name = None + self.docker_host = None + self.docker_containers = [] + + self.load_config() + + def load_config(self): + """Load configuration from the provided TOML file.""" + try: + # Parse the TOML file + config = toml.load(self.config_path) + + # Extract values into the Settings object + self.google_credentials_json = config.get("google_credentials_json") + self.bucket_name = config.get("bucket_name") + self.docker_host = config.get("docker_host") + self.docker_containers = config.get("docker_containers", []) + + # Ensure credentials file exists + if not os.path.isfile(self.google_credentials_json): + raise ValueError(f"Google credentials file does not exist: {self.google_credentials_json}") + + except Exception as e: + print(f"Error loading configuration: {e}") + raise + + def __repr__(self): + """Helper method to display loaded settings.""" + return f"Settings(google_credentials_json={self.google_credentials_json}, " \ + f"bucket_name={self.bucket_name}, docker_containers={self.docker_containers})" + + +# Argument parsing +def parse_arguments(): + parser = argparse.ArgumentParser(description="Load configuration and interact with Google Cloud Storage.") + parser.add_argument("-c", "--config", required=True, help="Path to the configuration file") + return parser.parse_args() \ No newline at end of file diff --git a/snapshot.py b/snapshot.py new file mode 100644 index 0000000..0a72198 --- /dev/null +++ b/snapshot.py @@ -0,0 +1,67 @@ +from concurrent.futures import ThreadPoolExecutor, Future +import os +from settings import Settings, parse_arguments +from google.cloud import storage +from google.oauth2 import service_account +import containers +import docker +from storage import create_tar, upload_file_to_bucket + +def take_snapshot( + docker_client: docker.DockerClient, + storage_client: storage.Client, + settings: Settings, + container_config: dict[str, str] +) -> None: + """Take a snapshot of the specified Docker containers and upload it to Google Cloud Storage.""" + + container_name = container_config['container_name'] + bucket_path = container_config['bucket_path'] + tar_name = container_config['tar_name'] + data_path = container_config['data_path'] + + # Step 1: Stop containers + containers.stop(docker_client, [container_name]) + + # Step 2: Create tarball from data_path + tar_path = f"{tar_name}" + create_tar([data_path], tar_path) + + # Step 3: Upload tarball to Google Cloud Storage + upload_file_to_bucket(storage_client, settings.bucket_name, tar_path, bucket_path) + + # Step 4: Start containers back + containers.start(docker_client, [container_name]) + + # Step 5: Cleanup + os.remove(tar_path) + + +def main() -> None: + """Main function to parse arguments, initialize clients, and take snapshots in parallel.""" + args = parse_arguments() + settings = Settings(config_path=args.config) + + credentials = service_account.Credentials.from_service_account_file( + settings.google_credentials_json + ) + print(settings) + storage_client = storage.Client(credentials=credentials) + docker_client = docker.DockerClient(base_url=settings.docker_host) + + containers_config = settings.docker_containers + + # Use ThreadPoolExecutor to take snapshots in parallel + with ThreadPoolExecutor() as executor: + futures: list[Future] = [] + for container_config in containers_config: + futures.append(executor.submit(take_snapshot, docker_client, storage_client, settings, container_config)) + + for future in futures: + future.result() + + print("All snapshots taken and uploaded.") + + +if __name__ == "__main__": + main() diff --git a/storage.py b/storage.py new file mode 100644 index 0000000..b6b9123 --- /dev/null +++ b/storage.py @@ -0,0 +1,68 @@ +import io +import os +import subprocess +from google.cloud.storage import Client, Bucket +from tqdm import tqdm + +def create_tar(paths: list[str], tar_name: str) -> None: + """Create an uncompressed tarball without including the full path of the files inside.""" + abs_paths = [os.path.abspath(path) for path in paths] + common_dir = os.path.commonpath(abs_paths) + cmd = ['tar', '--create', '--file', tar_name] + for path in abs_paths: + cmd.append('--directory') + cmd.append(common_dir) + cmd.append(os.path.relpath(path, common_dir)) + + subprocess.run(cmd, check=True) + print(f"Created uncompressed tarball: {tar_name}") + + +def extract_tar(tar_path: str, extract_to_path: str) -> None: + """Extract the tarball to the recovery path.""" + print(f"Extracting {tar_path} to {extract_to_path}...") + + # Command to extract tarball + cmd = ['tar', '--extract', '--file', tar_path, '--directory', extract_to_path] + subprocess.run(cmd, check=True) + print(f"Extraction complete: {extract_to_path}") + + +def download_from_bucket(storage_client: Client, bucket_name: str, source_blob_name: str, destination_file_name: str) -> None: + """Download a file from Google Cloud Storage with progress tracking.""" + try: + bucket = storage_client.get_bucket(bucket_name) + blob = bucket.blob(source_blob_name) + + with open(destination_file_name, 'wb') as f: + with tqdm.wrapattr(f, "write", total=blob.size, desc=f"Downloading {source_blob_name}") as file_obj: + blob.download_to_file(file_obj) + + print(f"File {source_blob_name} downloaded to {destination_file_name}.") + except Exception as e: + print(f"Error downloading file: {e}") + + +def upload_file_to_bucket( + storage_client: Client, + bucket_name: str, + source_file: str, + destination_blob_name: str +) -> None: + """Upload a file to the specified Google Cloud Storage bucket with progress tracking.""" + try: + bucket = storage_client.get_bucket(bucket_name) + blob = bucket.blob(destination_blob_name) + + file_size = os.path.getsize(source_file) + + with tqdm(total=file_size, unit='B', unit_scale=True, desc=f"Uploading {source_file}") as pbar: + with open(source_file, "rb") as f: + chunk_size = 1024 * 1024 * 32 + for chunk in iter(lambda: f.read(chunk_size), b""): + blob.upload_from_file(io.BytesIO(chunk), content_type="application/octet-stream") + pbar.update(len(chunk)) + + print(f"File {source_file} uploaded to {destination_blob_name}.") + except Exception as e: + print(f"Error uploading file: {e}") diff --git a/uv.lock b/uv.lock new file mode 100644 index 0000000..b70b22f --- /dev/null +++ b/uv.lock @@ -0,0 +1,324 @@ +version = 1 +requires-python = ">=3.12" +resolution-markers = [ + "python_full_version < '3.13'", + "python_full_version >= '3.13'", +] + +[[package]] +name = "cachetools" +version = "5.5.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c3/38/a0f315319737ecf45b4319a8cd1f3a908e29d9277b46942263292115eee7/cachetools-5.5.0.tar.gz", hash = "sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a", size = 27661 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a4/07/14f8ad37f2d12a5ce41206c21820d8cb6561b728e51fad4530dff0552a67/cachetools-5.5.0-py3-none-any.whl", hash = "sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292", size = 9524 }, +] + +[[package]] +name = "certifi" +version = "2024.12.14" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0f/bd/1d41ee578ce09523c81a15426705dd20969f5abf006d1afe8aeff0dd776a/certifi-2024.12.14.tar.gz", hash = "sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db", size = 166010 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a5/32/8f6669fc4798494966bf446c8c4a162e0b5d893dff088afddf76414f70e1/certifi-2024.12.14-py3-none-any.whl", hash = "sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56", size = 164927 }, +] + +[[package]] +name = "charset-normalizer" +version = "3.4.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/16/b0/572805e227f01586461c80e0fd25d65a2115599cc9dad142fee4b747c357/charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3", size = 123188 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0a/9a/dd1e1cdceb841925b7798369a09279bd1cf183cef0f9ddf15a3a6502ee45/charset_normalizer-3.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545", size = 196105 }, + { url = "https://files.pythonhosted.org/packages/d3/8c/90bfabf8c4809ecb648f39794cf2a84ff2e7d2a6cf159fe68d9a26160467/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7", size = 140404 }, + { url = "https://files.pythonhosted.org/packages/ad/8f/e410d57c721945ea3b4f1a04b74f70ce8fa800d393d72899f0a40526401f/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757", size = 150423 }, + { url = "https://files.pythonhosted.org/packages/f0/b8/e6825e25deb691ff98cf5c9072ee0605dc2acfca98af70c2d1b1bc75190d/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa", size = 143184 }, + { url = "https://files.pythonhosted.org/packages/3e/a2/513f6cbe752421f16d969e32f3583762bfd583848b763913ddab8d9bfd4f/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d", size = 145268 }, + { url = "https://files.pythonhosted.org/packages/74/94/8a5277664f27c3c438546f3eb53b33f5b19568eb7424736bdc440a88a31f/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616", size = 147601 }, + { url = "https://files.pythonhosted.org/packages/7c/5f/6d352c51ee763623a98e31194823518e09bfa48be2a7e8383cf691bbb3d0/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b", size = 141098 }, + { url = "https://files.pythonhosted.org/packages/78/d4/f5704cb629ba5ab16d1d3d741396aec6dc3ca2b67757c45b0599bb010478/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d", size = 149520 }, + { url = "https://files.pythonhosted.org/packages/c5/96/64120b1d02b81785f222b976c0fb79a35875457fa9bb40827678e54d1bc8/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a", size = 152852 }, + { url = "https://files.pythonhosted.org/packages/84/c9/98e3732278a99f47d487fd3468bc60b882920cef29d1fa6ca460a1fdf4e6/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9", size = 150488 }, + { url = "https://files.pythonhosted.org/packages/13/0e/9c8d4cb99c98c1007cc11eda969ebfe837bbbd0acdb4736d228ccaabcd22/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1", size = 146192 }, + { url = "https://files.pythonhosted.org/packages/b2/21/2b6b5b860781a0b49427309cb8670785aa543fb2178de875b87b9cc97746/charset_normalizer-3.4.1-cp312-cp312-win32.whl", hash = "sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35", size = 95550 }, + { url = "https://files.pythonhosted.org/packages/21/5b/1b390b03b1d16c7e382b561c5329f83cc06623916aab983e8ab9239c7d5c/charset_normalizer-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f", size = 102785 }, + { url = "https://files.pythonhosted.org/packages/38/94/ce8e6f63d18049672c76d07d119304e1e2d7c6098f0841b51c666e9f44a0/charset_normalizer-3.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda", size = 195698 }, + { url = "https://files.pythonhosted.org/packages/24/2e/dfdd9770664aae179a96561cc6952ff08f9a8cd09a908f259a9dfa063568/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313", size = 140162 }, + { url = "https://files.pythonhosted.org/packages/24/4e/f646b9093cff8fc86f2d60af2de4dc17c759de9d554f130b140ea4738ca6/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9", size = 150263 }, + { url = "https://files.pythonhosted.org/packages/5e/67/2937f8d548c3ef6e2f9aab0f6e21001056f692d43282b165e7c56023e6dd/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b", size = 142966 }, + { url = "https://files.pythonhosted.org/packages/52/ed/b7f4f07de100bdb95c1756d3a4d17b90c1a3c53715c1a476f8738058e0fa/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11", size = 144992 }, + { url = "https://files.pythonhosted.org/packages/96/2c/d49710a6dbcd3776265f4c923bb73ebe83933dfbaa841c5da850fe0fd20b/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f", size = 147162 }, + { url = "https://files.pythonhosted.org/packages/b4/41/35ff1f9a6bd380303dea55e44c4933b4cc3c4850988927d4082ada230273/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd", size = 140972 }, + { url = "https://files.pythonhosted.org/packages/fb/43/c6a0b685fe6910d08ba971f62cd9c3e862a85770395ba5d9cad4fede33ab/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2", size = 149095 }, + { url = "https://files.pythonhosted.org/packages/4c/ff/a9a504662452e2d2878512115638966e75633519ec11f25fca3d2049a94a/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886", size = 152668 }, + { url = "https://files.pythonhosted.org/packages/6c/71/189996b6d9a4b932564701628af5cee6716733e9165af1d5e1b285c530ed/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601", size = 150073 }, + { url = "https://files.pythonhosted.org/packages/e4/93/946a86ce20790e11312c87c75ba68d5f6ad2208cfb52b2d6a2c32840d922/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd", size = 145732 }, + { url = "https://files.pythonhosted.org/packages/cd/e5/131d2fb1b0dddafc37be4f3a2fa79aa4c037368be9423061dccadfd90091/charset_normalizer-3.4.1-cp313-cp313-win32.whl", hash = "sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407", size = 95391 }, + { url = "https://files.pythonhosted.org/packages/27/f2/4f9a69cc7712b9b5ad8fdb87039fd89abba997ad5cbe690d1835d40405b0/charset_normalizer-3.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971", size = 102702 }, + { url = "https://files.pythonhosted.org/packages/0e/f6/65ecc6878a89bb1c23a086ea335ad4bf21a588990c3f535a227b9eea9108/charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85", size = 49767 }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335 }, +] + +[[package]] +name = "docker" +version = "7.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pywin32", marker = "sys_platform == 'win32'" }, + { name = "requests" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/91/9b/4a2ea29aeba62471211598dac5d96825bb49348fa07e906ea930394a83ce/docker-7.1.0.tar.gz", hash = "sha256:ad8c70e6e3f8926cb8a92619b832b4ea5299e2831c14284663184e200546fa6c", size = 117834 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e3/26/57c6fb270950d476074c087527a558ccb6f4436657314bfb6cdf484114c4/docker-7.1.0-py3-none-any.whl", hash = "sha256:c96b93b7f0a746f9e77d325bcfb87422a3d8bd4f03136ae8a85b37f1898d5fc0", size = 147774 }, +] + +[[package]] +name = "google-api-core" +version = "2.24.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-auth" }, + { name = "googleapis-common-protos" }, + { name = "proto-plus" }, + { name = "protobuf" }, + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/81/56/d70d66ed1b5ab5f6c27bf80ec889585ad8f865ff32acbafd3b2ef0bfb5d0/google_api_core-2.24.0.tar.gz", hash = "sha256:e255640547a597a4da010876d333208ddac417d60add22b6851a0c66a831fcaf", size = 162647 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a1/76/65b8b94e74bf1b6d1cc38d916089670c4da5029d25762441d8c5c19e51dd/google_api_core-2.24.0-py3-none-any.whl", hash = "sha256:10d82ac0fca69c82a25b3efdeefccf6f28e02ebb97925a8cce8edbfe379929d9", size = 158576 }, +] + +[[package]] +name = "google-auth" +version = "2.37.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cachetools" }, + { name = "pyasn1-modules" }, + { name = "rsa" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/46/af/b25763b9d35dfc2c6f9c3ec34d8d3f1ba760af3a7b7e8d5c5f0579522c45/google_auth-2.37.0.tar.gz", hash = "sha256:0054623abf1f9c83492c63d3f47e77f0a544caa3d40b2d98e099a611c2dd5d00", size = 268878 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8d/8d/4d5d5f9f500499f7bd4c93903b43e8d6976f3fc6f064637ded1a85d09b07/google_auth-2.37.0-py2.py3-none-any.whl", hash = "sha256:42664f18290a6be591be5329a96fe30184be1a1badb7292a7f686a9659de9ca0", size = 209829 }, +] + +[[package]] +name = "google-cloud-core" +version = "2.4.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-api-core" }, + { name = "google-auth" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b8/1f/9d1e0ba6919668608570418a9a51e47070ac15aeff64261fb092d8be94c0/google-cloud-core-2.4.1.tar.gz", hash = "sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073", size = 35587 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5e/0f/2e2061e3fbcb9d535d5da3f58cc8de4947df1786fe6a1355960feb05a681/google_cloud_core-2.4.1-py2.py3-none-any.whl", hash = "sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61", size = 29233 }, +] + +[[package]] +name = "google-cloud-storage" +version = "2.19.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-api-core" }, + { name = "google-auth" }, + { name = "google-cloud-core" }, + { name = "google-crc32c" }, + { name = "google-resumable-media" }, + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/36/76/4d965702e96bb67976e755bed9828fa50306dca003dbee08b67f41dd265e/google_cloud_storage-2.19.0.tar.gz", hash = "sha256:cd05e9e7191ba6cb68934d8eb76054d9be4562aa89dbc4236feee4d7d51342b2", size = 5535488 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d5/94/6db383d8ee1adf45dc6c73477152b82731fa4c4a46d9c1932cc8757e0fd4/google_cloud_storage-2.19.0-py2.py3-none-any.whl", hash = "sha256:aeb971b5c29cf8ab98445082cbfe7b161a1f48ed275822f59ed3f1524ea54fba", size = 131787 }, +] + +[[package]] +name = "google-crc32c" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/67/72/c3298da1a3773102359c5a78f20dae8925f5ea876e37354415f68594a6fb/google_crc32c-1.6.0.tar.gz", hash = "sha256:6eceb6ad197656a1ff49ebfbbfa870678c75be4344feb35ac1edf694309413dc", size = 14472 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cf/41/65a91657d6a8123c6c12f9aac72127b6ac76dda9e2ba1834026a842eb77c/google_crc32c-1.6.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:ed767bf4ba90104c1216b68111613f0d5926fb3780660ea1198fc469af410e9d", size = 30268 }, + { url = "https://files.pythonhosted.org/packages/59/d0/ee743a267c7d5c4bb8bd865f7d4c039505f1c8a4b439df047fdc17be9769/google_crc32c-1.6.0-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:62f6d4a29fea082ac4a3c9be5e415218255cf11684ac6ef5488eea0c9132689b", size = 30113 }, + { url = "https://files.pythonhosted.org/packages/25/53/e5e449c368dd26ade5fb2bb209e046d4309ed0623be65b13f0ce026cb520/google_crc32c-1.6.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c87d98c7c4a69066fd31701c4e10d178a648c2cac3452e62c6b24dc51f9fcc00", size = 32995 }, + { url = "https://files.pythonhosted.org/packages/52/12/9bf6042d5b0ac8c25afed562fb78e51b0641474097e4139e858b45de40a5/google_crc32c-1.6.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd5e7d2445d1a958c266bfa5d04c39932dc54093fa391736dbfdb0f1929c1fb3", size = 32614 }, + { url = "https://files.pythonhosted.org/packages/76/29/fc20f5ec36eac1eea0d0b2de4118c774c5f59c513f2a8630d4db6991f3e0/google_crc32c-1.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:7aec8e88a3583515f9e0957fe4f5f6d8d4997e36d0f61624e70469771584c760", size = 33445 }, +] + +[[package]] +name = "google-resumable-media" +version = "2.7.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-crc32c" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/58/5a/0efdc02665dca14e0837b62c8a1a93132c264bd02054a15abb2218afe0ae/google_resumable_media-2.7.2.tar.gz", hash = "sha256:5280aed4629f2b60b847b0d42f9857fd4935c11af266744df33d8074cae92fe0", size = 2163099 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/82/35/b8d3baf8c46695858cb9d8835a53baa1eeb9906ddaf2f728a5f5b640fd1e/google_resumable_media-2.7.2-py2.py3-none-any.whl", hash = "sha256:3ce7551e9fe6d99e9a126101d2536612bb73486721951e9562fee0f90c6ababa", size = 81251 }, +] + +[[package]] +name = "googleapis-common-protos" +version = "1.66.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "protobuf" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ff/a7/8e9cccdb1c49870de6faea2a2764fa23f627dd290633103540209f03524c/googleapis_common_protos-1.66.0.tar.gz", hash = "sha256:c3e7b33d15fdca5374cc0a7346dd92ffa847425cc4ea941d970f13680052ec8c", size = 114376 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/0f/c0713fb2b3d28af4b2fded3291df1c4d4f79a00d15c2374a9e010870016c/googleapis_common_protos-1.66.0-py2.py3-none-any.whl", hash = "sha256:d7abcd75fabb2e0ec9f74466401f6c119a0b498e27370e9be4c94cb7e382b8ed", size = 221682 }, +] + +[[package]] +name = "idna" +version = "3.10" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442 }, +] + +[[package]] +name = "node-snapshot" +version = "0.1.0" +source = { virtual = "." } +dependencies = [ + { name = "docker" }, + { name = "google-cloud-storage" }, + { name = "toml" }, + { name = "tqdm" }, +] + +[package.metadata] +requires-dist = [ + { name = "docker", specifier = ">=7.1.0" }, + { name = "google-cloud-storage", specifier = ">=2.19.0" }, + { name = "toml", specifier = ">=0.10.2" }, + { name = "tqdm", specifier = ">=4.67.1" }, +] + +[[package]] +name = "proto-plus" +version = "1.25.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "protobuf" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/7e/05/74417b2061e1bf1b82776037cad97094228fa1c1b6e82d08a78d3fb6ddb6/proto_plus-1.25.0.tar.gz", hash = "sha256:fbb17f57f7bd05a68b7707e745e26528b0b3c34e378db91eef93912c54982d91", size = 56124 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/dd/25/0b7cc838ae3d76d46539020ec39fc92bfc9acc29367e58fe912702c2a79e/proto_plus-1.25.0-py3-none-any.whl", hash = "sha256:c91fc4a65074ade8e458e95ef8bac34d4008daa7cce4a12d6707066fca648961", size = 50126 }, +] + +[[package]] +name = "protobuf" +version = "5.29.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f7/d1/e0a911544ca9993e0f17ce6d3cc0932752356c1b0a834397f28e63479344/protobuf-5.29.3.tar.gz", hash = "sha256:5da0f41edaf117bde316404bad1a486cb4ededf8e4a54891296f648e8e076620", size = 424945 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/dc/7a/1e38f3cafa022f477ca0f57a1f49962f21ad25850c3ca0acd3b9d0091518/protobuf-5.29.3-cp310-abi3-win32.whl", hash = "sha256:3ea51771449e1035f26069c4c7fd51fba990d07bc55ba80701c78f886bf9c888", size = 422708 }, + { url = "https://files.pythonhosted.org/packages/61/fa/aae8e10512b83de633f2646506a6d835b151edf4b30d18d73afd01447253/protobuf-5.29.3-cp310-abi3-win_amd64.whl", hash = "sha256:a4fa6f80816a9a0678429e84973f2f98cbc218cca434abe8db2ad0bffc98503a", size = 434508 }, + { url = "https://files.pythonhosted.org/packages/dd/04/3eaedc2ba17a088961d0e3bd396eac764450f431621b58a04ce898acd126/protobuf-5.29.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:a8434404bbf139aa9e1300dbf989667a83d42ddda9153d8ab76e0d5dcaca484e", size = 417825 }, + { url = "https://files.pythonhosted.org/packages/4f/06/7c467744d23c3979ce250397e26d8ad8eeb2bea7b18ca12ad58313c1b8d5/protobuf-5.29.3-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:daaf63f70f25e8689c072cfad4334ca0ac1d1e05a92fc15c54eb9cf23c3efd84", size = 319573 }, + { url = "https://files.pythonhosted.org/packages/a8/45/2ebbde52ad2be18d3675b6bee50e68cd73c9e0654de77d595540b5129df8/protobuf-5.29.3-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:c027e08a08be10b67c06bf2370b99c811c466398c357e615ca88c91c07f0910f", size = 319672 }, + { url = "https://files.pythonhosted.org/packages/fd/b2/ab07b09e0f6d143dfb839693aa05765257bceaa13d03bf1a696b78323e7a/protobuf-5.29.3-py3-none-any.whl", hash = "sha256:0a18ed4a24198528f2333802eb075e59dea9d679ab7a6c5efb017a59004d849f", size = 172550 }, +] + +[[package]] +name = "pyasn1" +version = "0.6.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ba/e9/01f1a64245b89f039897cb0130016d79f77d52669aae6ee7b159a6c4c018/pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034", size = 145322 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c8/f1/d6a797abb14f6283c0ddff96bbdd46937f64122b8c925cab503dd37f8214/pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629", size = 83135 }, +] + +[[package]] +name = "pyasn1-modules" +version = "0.4.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyasn1" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/1d/67/6afbf0d507f73c32d21084a79946bfcfca5fbc62a72057e9c23797a737c9/pyasn1_modules-0.4.1.tar.gz", hash = "sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c", size = 310028 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/77/89/bc88a6711935ba795a679ea6ebee07e128050d6382eaa35a0a47c8032bdc/pyasn1_modules-0.4.1-py3-none-any.whl", hash = "sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd", size = 181537 }, +] + +[[package]] +name = "pywin32" +version = "308" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/00/7c/d00d6bdd96de4344e06c4afbf218bc86b54436a94c01c71a8701f613aa56/pywin32-308-cp312-cp312-win32.whl", hash = "sha256:587f3e19696f4bf96fde9d8a57cec74a57021ad5f204c9e627e15c33ff568897", size = 5939729 }, + { url = "https://files.pythonhosted.org/packages/21/27/0c8811fbc3ca188f93b5354e7c286eb91f80a53afa4e11007ef661afa746/pywin32-308-cp312-cp312-win_amd64.whl", hash = "sha256:00b3e11ef09ede56c6a43c71f2d31857cf7c54b0ab6e78ac659497abd2834f47", size = 6543015 }, + { url = "https://files.pythonhosted.org/packages/9d/0f/d40f8373608caed2255781a3ad9a51d03a594a1248cd632d6a298daca693/pywin32-308-cp312-cp312-win_arm64.whl", hash = "sha256:9b4de86c8d909aed15b7011182c8cab38c8850de36e6afb1f0db22b8959e3091", size = 7976033 }, + { url = "https://files.pythonhosted.org/packages/a9/a4/aa562d8935e3df5e49c161b427a3a2efad2ed4e9cf81c3de636f1fdddfd0/pywin32-308-cp313-cp313-win32.whl", hash = "sha256:1c44539a37a5b7b21d02ab34e6a4d314e0788f1690d65b48e9b0b89f31abbbed", size = 5938579 }, + { url = "https://files.pythonhosted.org/packages/c7/50/b0efb8bb66210da67a53ab95fd7a98826a97ee21f1d22949863e6d588b22/pywin32-308-cp313-cp313-win_amd64.whl", hash = "sha256:fd380990e792eaf6827fcb7e187b2b4b1cede0585e3d0c9e84201ec27b9905e4", size = 6542056 }, + { url = "https://files.pythonhosted.org/packages/26/df/2b63e3e4f2df0224f8aaf6d131f54fe4e8c96400eb9df563e2aae2e1a1f9/pywin32-308-cp313-cp313-win_arm64.whl", hash = "sha256:ef313c46d4c18dfb82a2431e3051ac8f112ccee1a34f29c263c583c568db63cd", size = 7974986 }, +] + +[[package]] +name = "requests" +version = "2.32.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "charset-normalizer" }, + { name = "idna" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/63/70/2bf7780ad2d390a8d301ad0b550f1581eadbd9a20f896afe06353c2a2913/requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760", size = 131218 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f9/9b/335f9764261e915ed497fcdeb11df5dfd6f7bf257d4a6a2a686d80da4d54/requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6", size = 64928 }, +] + +[[package]] +name = "rsa" +version = "4.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyasn1" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/aa/65/7d973b89c4d2351d7fb232c2e452547ddfa243e93131e7cfa766da627b52/rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21", size = 29711 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/49/97/fa78e3d2f65c02c8e1268b9aba606569fe97f6c8f7c2d74394553347c145/rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7", size = 34315 }, +] + +[[package]] +name = "toml" +version = "0.10.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/be/ba/1f744cdc819428fc6b5084ec34d9b30660f6f9daaf70eead706e3203ec3c/toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f", size = 22253 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/44/6f/7120676b6d73228c96e17f1f794d8ab046fc910d781c8d151120c3f1569e/toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b", size = 16588 }, +] + +[[package]] +name = "tqdm" +version = "4.67.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "platform_system == 'Windows'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a8/4b/29b4ef32e036bb34e4ab51796dd745cdba7ed47ad142a9f4a1eb8e0c744d/tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2", size = 169737 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2", size = 78540 }, +] + +[[package]] +name = "urllib3" +version = "2.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/aa/63/e53da845320b757bf29ef6a9062f5c669fe997973f966045cb019c3f4b66/urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d", size = 307268 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c8/19/4ec628951a74043532ca2cf5d97b7b14863931476d117c471e8e2b1eb39f/urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df", size = 128369 }, +]