diff --git a/.github/workflows/alfred.yml b/.github/workflows/alfred.yml index 1a6c8d1..d6619c7 100644 --- a/.github/workflows/alfred.yml +++ b/.github/workflows/alfred.yml @@ -4,25 +4,57 @@ on: push: tags: - 'v*' + workflow_dispatch: jobs: build: runs-on: ubuntu-latest - + outputs: + OUTPUT_FILE: ${{ steps.builder.outputs.OUTPUT_FILE }} steps: - uses: actions/checkout@v4 - name: Build Alfred workflow id: builder run: | - cd ente-totp - python -m venv .venv - source .venv/bin/activate - ./build.sh - env: - WORKFLOW_VERSION: ${{ github.ref_name }} - - - name: Release - uses: softprops/action-gh-release@v1 + python3 build.py + + release: + needs: build + runs-on: ubuntu-latest + permissions: + contents: write + steps: + - uses: actions/checkout@v4 + + - name: Parse tags + id: parse_tags + run: | + git fetch -a + echo "tag_count=$(git tag -l | wc -l)" >> $GITHUB_OUTPUT + + - name: Update CHANGELOG + continue-on-error: ${{ steps.parse_tags.outputs.tag_count == '1' }} + id: changelog + uses: requarks/changelog-action@v1 + with: + token: ${{ github.token }} + tag: ${{ github.ref_name }} + + - name: Create Release + uses: ncipollo/release-action@v1 + with: + allowUpdates: true + draft: false + makeLatest: true + name: ${{ github.ref_name }} + body: ${{ steps.changelog.outputs.changes }} + token: ${{ github.token }} + artifacts: ${{ needs.build.outputs.OUTPUT_FILE }} + + - name: Commit CHANGELOG.md + uses: stefanzweifel/git-auto-commit-action@v5 with: - files: ${{ steps.builder.outputs.OUTPUT_FILE }} \ No newline at end of file + branch: main + commit_message: 'docs: update CHANGELOG.md for ${{ github.ref_name }}' + file_pattern: CHANGELOG.md diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..49cebef --- /dev/null +++ b/.gitignore @@ -0,0 +1,4 @@ +.DS_Store +__pycache__ +*.alfredworkflow +*.zip diff --git a/README.md b/README.md index 52e19cd..830fe64 100644 --- a/README.md +++ b/README.md @@ -1,10 +1,7 @@ # An Alfred Workflow that uses your Ente Exports -The Ente Auth CLI does not support exporting TOTP codes. To use this project, please export TOTP codes from the Ente app and then import them into the workflow's database by selecting the export file using the "Configure Workflow" button in the workflow setup. You can import the file using the `ente import` Alfred command. Once imported, you can delete the file. - -> [!NOTE] -> In the future, the workflow will take care of the import. -> In addtion, once support for exporting codes via CLI is supported, we will use that instead. +> [!WARNING] +> This workflow exports secrets from the Ente Auth CLI. Please exercise caution when using it. ## Setup @@ -15,18 +12,14 @@ The Ente Auth CLI does not support exporting TOTP codes. To use this project, pl 1. Open Alfred 2. Go to Workflows. -3. Click the "Enter 2FA" workflow and click the Configure Workflow button. -4. Next, click the file button next to "Ente Export File" and browse to your Ente Auth plain text export of two-factor codes. +3. Click the "Ente Auth" workflow and click the Configure Workflow button. +4. Next, configure the settings (NOTE: the export path is what you configured when adding your ente account). 5. Finally, run the Alfred command `ente import`. ## Local Development -### Install dependencies -./build-deps.sh +### Install/Update dependencies +poetry install --only=main ### Build alfred workflow file -./build.sh - -### Update requirements -pip install pip-tools -pip-compile requirements.in \ No newline at end of file +python3 build.py diff --git a/build.py b/build.py new file mode 100755 index 0000000..adbe65e --- /dev/null +++ b/build.py @@ -0,0 +1,116 @@ +#!/usr/bin/env python3 + +import os +import plistlib +import shutil +import subprocess +from zipfile import ZIP_STORED, ZipFile + +import tomllib + + +def parse_info_plist(): + """Parse the info.plist file""" + with open("info.plist", "rb") as f: + plist = plistlib.load(f) + return plist + + +def get_workflow_name(): + """Get the workflow name from parsed plist""" + plist = parse_info_plist() + name = plist["name"].replace(" ", "_").lower() + return name + + +def get_workflow_version(): + """Get the workflow version from parsed plist""" + plist = parse_info_plist() + version = plist["version"].replace(" ", "_").lower() + return version + + +def get_pyproject_version(): + """Get the project version from pyproject.toml""" + with open("pyproject.toml", "rb") as f: + pyproject = tomllib.load(f) + version = pyproject["tool"]["poetry"]["version"] + return version + + +def update_version(version: str, plist_path: str = "info.plist"): + """Update the version in info.plist""" + plist = parse_info_plist() + plist["version"] = version + with open(plist_path, "wb") as f: + plistlib.dump(plist, f) + + +def init_venv(): + """Initialize the venv""" + + if os.path.exists(".venv"): + shutil.rmtree(".venv") + + subprocess.run(["poetry", "install", "--only", "main"], check=True) + + print("Dependencies installed successfully.") + + +def zip_workflow(filename: str): + """Zip the workflow""" + basepath = os.getcwd() + + zip_contents = [ + "icon.png", + "info.plist", + "main.py", + "src", + ".venv", + ] + zip_contents = [os.path.join(basepath, file) for file in zip_contents] + zip_exlude = ["__pycache__"] + + def should_include(path): + exclude_paths = any(excluded in path for excluded in zip_exlude) + include_paths = any(included in path for included in zip_contents) + return not exclude_paths and include_paths + + with ZipFile(filename, "w", ZIP_STORED, strict_timestamps=False) as zip: + for root, _, files in os.walk(basepath): + for file in files: + full_path = os.path.join(root, file) + if should_include(full_path): + arcname = os.path.relpath(full_path, basepath) + zip.write(full_path, arcname) + + +def main(): + workflow_name = get_workflow_name() + workflow_version = get_workflow_version() + pyproject_version = get_pyproject_version() + + init_venv() + + if workflow_version != pyproject_version: + update_version(pyproject_version) + else: + print("Workflow version matches PyProject version. Should this be updated?") + + zip_name = f"{workflow_name}-{workflow_version}.alfredworkflow" + zip_workflow(zip_name) + + if os.getenv("GITHUB_ACTIONS"): + with open(os.environ["GITHUB_OUTPUT"], "a") as f: + f.write(f"OUTPUT_FILE={zip_name}\n") + + with open(os.environ["GITHUB_STEP_SUMMARY"], "a") as f: + f.write("# Alfred Workflow Build\n") + f.write(f"* Workflow name: {workflow_name}\n") + f.write(f"* Workflow version: {workflow_version}\n") + f.write(f"* Pyproject version: {pyproject_version}\n") + f.write(f"* ZIP name: {zip_name}\n") + + +if __name__ == "__main__": + main() diff --git a/build.sh b/build.sh deleted file mode 100755 index 5f9781a..0000000 --- a/build.sh +++ /dev/null @@ -1,49 +0,0 @@ -#!/usr/bin/env bash - -# Build script for packaging an Alfred workflow without external dependencies - -# Exit immediately if a command exits with a non-zero status -set -e - -# Call build_deps.sh first -echo "Running build_deps.sh..." -./build_deps.sh - -# Extract the workflow name from info.plist using the Python script -WORKFLOW_NAME=$(python3 build_tools.py --get-name) - -# Check if WORKFLOW_VERSION environment variable is set, otherwise use default -VERSION=${WORKFLOW_VERSION:-"1.0.0"} - -# Output filename -OUTPUT_FILE="${WORKFLOW_NAME}-${VERSION}.alfredworkflow" - -# Clean up any previous builds -clean() { - rm -f "$OUTPUT_FILE" -} - -# Function to package the directory -zip_dir() { - find . \ - -path "./.*" -prune -o \ - -type f \ - -not -name "*.md" \ - -not -name "*.log" \ - -not -name "*.alfredworkflow" \ - -not -name "requirements.*" \ - -not -name "*.pyc" \ - -not -path "*/__pycache__/*" \ - -not -path "./build*" \ - -print | zip --symlinks -@ "$OUTPUT_FILE" -} - -# Main execution -clean -echo "Updating version in info.plist..." -python3 build_tools.py --set-version "${VERSION}" -echo "Packaging the workflow..." -zip_dir -# if in github action, write build file path to GITHUB_OUTPUT -[[ -n "$GITHUB_OUTPUT" ]] && echo "OUTPUT_FILE=${PWD}/${OUTPUT_FILE}" >> "$GITHUB_OUTPUT" -echo "Packaged $OUTPUT_FILE successfully!" diff --git a/build_deps.sh b/build_deps.sh deleted file mode 100755 index 25b0979..0000000 --- a/build_deps.sh +++ /dev/null @@ -1,18 +0,0 @@ -#!/usr/bin/env bash -set -e - -VENDOR_DIR="vendor" - -# Remove existing vendor directory if it exists -if [ -d "$VENDOR_DIR" ]; then - echo "Removing existing $VENDOR_DIR directory..." - rm -rf "$VENDOR_DIR" -fi - -mkdir -p "$VENDOR_DIR" - -# Install dependencies into the vendor directory -echo "Installing dependencies into $VENDOR_DIR..." -pip install --ignore-installed --target="$VENDOR_DIR" -r requirements.txt - -echo "Dependencies installed successfully." diff --git a/build_tools.py b/build_tools.py deleted file mode 100755 index f6d267b..0000000 --- a/build_tools.py +++ /dev/null @@ -1,40 +0,0 @@ -#!/usr/bin/env python3 - -import sys -import plistlib - -def get_workflow_name(): - """Get the workflow name from info.plist""" - plist_path = 'info.plist' - with open(plist_path, 'rb') as f: - plist = plistlib.load(f) - return plist['name'] - -def update_version(version): - """Update the version in info.plist""" - plist_path = 'info.plist' - with open(plist_path, 'rb') as f: - plist = plistlib.load(f) - - plist['version'] = version - - with open(plist_path, 'wb') as f: - plistlib.dump(plist, f) - -if __name__ == '__main__': - if len(sys.argv) < 2: - print("Usage: python3 update_version.py [--get-name | --set-version ]") - sys.exit(1) - - if sys.argv[1] == '--get-name': - workflow_name = get_workflow_name() - print(workflow_name) - elif sys.argv[1] == '--set-version': - if len(sys.argv) != 3: - print("Usage: python3 update_version.py --set-version ") - sys.exit(1) - version = sys.argv[2] - update_version(version) - else: - print("Invalid option. Use --get-name or --set-version.") - sys.exit(1) diff --git a/info.plist b/info.plist index 9b395ba..e0af07b 100644 --- a/info.plist +++ b/info.plist @@ -3,7 +3,9 @@ bundleid - + com.chkpwd.ente.auth + category + Tools connections 548A4F34-31FF-4E4B-AD7C-1CA4D8391AAA @@ -31,18 +33,47 @@ vitoclose + + destinationuid + C0A73A1A-5538-4219-BB30-444166AA3F26 + modifiers + 1048576 + modifiersubtext + + vitoclose + + createdby chkpwd description - + Get TOTP Codes from Ente Auth disabled name - Ente TOTP Codes + Ente Auth objects + + config + + autopaste + + clipboardtext + {query} + ignoredynamicplaceholders + + transient + + + type + alfred.workflow.output.clipboard + uid + 89FF22C4-362B-4675-AEFA-B15E67898E1B + version + 3 + config @@ -71,13 +102,13 @@ runningsubtext Getting code for query: "{query}"... script - python3 main.py get -o alfred "{query}" + python3 main.py search "{query}" scriptargtype 0 scriptfile subtext - Get TOTP Codes from Ente Auth Database + Get TOTP Codes from Ente Auth title {const:alfred_workflow_name} type @@ -92,25 +123,6 @@ version 3 - - config - - autopaste - - clipboardtext - {query} - ignoredynamicplaceholders - - transient - - - type - alfred.workflow.output.clipboard - uid - 89FF22C4-362B-4675-AEFA-B15E67898E1B - version - 3 - config @@ -123,11 +135,9 @@ focusedappvariablename hotkey - 14 + 0 hotmod - 1703936 - hotstring - E + 0 leftcursor modsmode @@ -154,7 +164,7 @@ argumenttrimmode 0 argumenttype - 1 + 2 escaping 102 keyword @@ -170,7 +180,7 @@ runningsubtext importing.... script - python3 main.py import $export_file_path + python3 main.py import scriptargtype 1 scriptfile @@ -178,11 +188,11 @@ subtext title - import your ente data + Import Ente Secrets type 0 withspace - + type alfred.workflow.input.scriptfilter @@ -191,18 +201,58 @@ version 3 + + config + + autopaste + + clipboardtext + {query} + ignoredynamicplaceholders + + transient + + + type + alfred.workflow.output.clipboard + uid + C0A73A1A-5538-4219-BB30-444166AA3F26 + version + 3 + readme - An Alfred Workflow that uses your Ente Exports -Ente Auth CLI does not support exporting TOTP codes. To use this project, please export from the Ente app and then import them into the workflow's database by choosing the export file using the "Configure Workflow" button in the workflow setup and then import using the 'ente import' Alfred command. + # An Alfred Workflow that uses your Ente Exports + +> [!WARNING] +> This workflow exports secrets from the Ente Auth CLI. Please exercise caution when using it. + +## Setup + +1. Install workflow from releases +2. Follow instructions below to create the database + +## Instructions -The file can be deleted once imported. +1. Open Alfred +2. Go to Workflows. +3. Click the "Ente Auth" workflow and click the Configure Workflow button. +4. Next, configure the settings (NOTE: the export path is what you configured when adding your ente account). +5. Finally, run the Alfred command `ente import`. + +## Local Development + +### Install/Update dependencies +poetry install --only=main + +### Build alfred workflow file +python3 build.py uidata 3101A957-02C3-4D0C-8B13-1C3721459261 xpos - 250.0 + 245.0 ypos 330.0 @@ -216,16 +266,23 @@ The file can be deleted once imported. 61342121-5F6E-4340-A89B-F4DC5D5A7965 xpos - 240.0 + 245.0 ypos - 105.0 + 195.0 89FF22C4-362B-4675-AEFA-B15E67898E1B xpos - 495.0 + 465.0 + ypos + 195.0 + + C0A73A1A-5538-4219-BB30-444166AA3F26 + + xpos + 465.0 ypos - 140.0 + 340.0 userconfigurationconfig @@ -234,7 +291,7 @@ The file can be deleted once imported. config default - code||totp||en||ea + code||totp||ente placeholder en required @@ -255,22 +312,43 @@ The file can be deleted once imported. config default - + /usr/local/bin/ente + placeholder + /usr/local/bin/ente + required + + trim + + + description + + label + Ente CLI Binary Path + type + textfield + variable + ENTE_AUTH_BINARY_PATH + + + config + + default + ~/Documents/ente filtermode - 2 + 1 placeholder - plain text Ente Auth export + Export Directory Path required description Point this to the plain text export file from Ente Auth containing your 2FA data. It can be deleted after initial import. label - Ente Export File + Ente Export Directory type filepicker variable - export_file_path + ENTE_EXPORT_DIR config @@ -289,7 +367,7 @@ The file can be deleted once imported. type checkbox variable - username_in_title + USERNAME_IN_TITLE config @@ -308,14 +386,33 @@ The file can be deleted once imported. type checkbox variable - username_in_subtitle + USERNAME_IN_SUBTITLE + + + config + + default + + required + + text + + + description + + label + Overwrite current export + type + checkbox + variable + OVERWRITE_EXPORT variablesdontexport version - 1.0.0 + 2.0.0 webaddress - https://github.com/chkpwd/alfred-workflows/main/tree/ente-totp + https://github.com/chkpwd/alfred-ente-auth diff --git a/main.py b/main.py index e8a37c5..e72bd17 100755 --- a/main.py +++ b/main.py @@ -1,212 +1,76 @@ -import sys -import os - -# Add the vendor directory to the path -sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'vendor')) - -import json import logging -from collections import defaultdict -from datetime import datetime, timedelta - -import click -import pyotp -import keyring - -USERNAME_IN_TITLE = os.getenv("username_in_title", "false").lower() in ("true", "1", "t", "y", "yes") -USERNAME_IN_SUBTITLE = os.getenv("username_in_subtitle", "false").lower() in ("true", "1", "t", "y", "yes") - -# Keychain service and account for storing the secrets -KEYCHAIN_SERVICE = "ente-totp-alfred-workflow" -KEYCHAIN_ACCOUNT = "totp_secrets" - -# Use an environment variable to cache the JSON data to reduce keychain calls -CACHE_ENV_VAR = "TOTP_CACHE" - - -def load_secrets(): - """Load secrets from the environment variable or keychain.""" - # Try to load from the cached environment variable first - cached_secrets = os.getenv(CACHE_ENV_VAR) - - if cached_secrets: - logging.warning("Loading secrets from environment variable cache.") - return json.loads(cached_secrets) - - # If not cached, load from the keychain - logging.warning("Loading secrets from keychain.") - secrets_json = keyring.get_password(KEYCHAIN_SERVICE, KEYCHAIN_ACCOUNT) - - if secrets_json is None: - raise Exception("No secrets found in keychain.") - - return json.loads(secrets_json) - -@click.group() -def cli(): - pass - - -@cli.command("import") -@click.argument("file", type=click.Path(exists=False), required=False) -def import_file(file): - try: - logging.warning(f"import_file: {file}") - secret_dict = defaultdict(list) - for service_name, username, secret in parse_secrets(file): - secret_dict[service_name].append((username, secret)) - - secrets_json = json.dumps(secret_dict) - # Store secrets in the keychain - if secrets_json: - keyring.set_password(KEYCHAIN_SERVICE, KEYCHAIN_ACCOUNT, secrets_json) - - logging.warning(f"Database created with {sum(len(v) for v in secret_dict.values())} entries.") - output = { - "items": [ - {"title": "Import Successful", - "subtitle": f"Database created with {sum(len(v) for v in secret_dict.values())} entries."} - ], - "variables": { - CACHE_ENV_VAR: secrets_json # Set the TOTP_CACHE environment variable for Alfred - } - } - print(json.dumps(output)) - - except FileNotFoundError: - error_message = f"File not found: {file}" - logging.error(error_message) - print(json.dumps({"items": [{"title": "Import Failed", "subtitle": error_message}]})) - except Exception as e: - error_message = f"An error occurred: {str(e)}" - logging.error(error_message) - print(json.dumps({"items": [{"title": "Import Failed", "subtitle": error_message}]})) - - -def parse_secrets(file_path="secrets.txt"): - secrets_list = [] - - with open(file_path, "r") as secrets_file: - for line in secrets_file: - line = line.strip() - if line: - line = line.replace("=sha1", "=SHA1") - if "codeDisplay" in line: - line = line.split("codeDisplay")[0][:-1] - - parsed_uri = pyotp.parse_uri(line) - if parsed_uri: - service_name = parsed_uri.issuer or parsed_uri.name - username = parsed_uri.name - secret = parsed_uri.secret - if secret: - secrets_list.append((service_name, username, secret)) - else: - print(f"Unable to parse secret in: {line}") - else: - print(f"Unable to parse the line: {line}") - return secrets_list - - -def format_data(service_name, username, current_totp, next_totp, output_type): - """Format the TOTP data based on the output type.""" - subset = f"Current TOTP: {current_totp} | Next TOTP: {next_totp}" + ( - f" - {username}" if username and USERNAME_IN_SUBTITLE else "") - service_name = f"{service_name} - {username}" if username and USERNAME_IN_TITLE else service_name - - if output_type == "alfred": - return { - "title": service_name, - "subtitle": subset, - "arg": current_totp, - "icon": {"path": "./icon.png"}, - } - elif output_type == "json": - return { - "service_name": service_name, - "current_totp": current_totp, - "next_totp": next_totp, - "service_data": subset, - } - return None +import os +import sys -@cli.command("get") -@click.argument("secret_id") -@click.option( - "-o", - "output_format", - type=click.Choice(["json", "alfred"]), - default="json", - help="Data output format", +# Add the venv directory to the path +sys.path.append( + os.path.join( + os.path.dirname(os.path.abspath(__file__)), ".venv/lib/python3.11/site-packages" + ) ) -def generate_totp(secret_id, output_format): - """Generate the current TOTP for a given secret.""" - try: - # Load secrets from the cache or keychain - data = load_secrets() - items = [] - - logging.warning(f"Searching for {secret_id} in {len(data)} services.\n") - - # Split the secret_id by spaces for more granular search - search_parts = secret_id.lower().split() - - matches = [] - for service_name, service_data in data.items(): - for username, secret in service_data: - # Lowercase the service_name and username for case-insensitive matching - service_name_lower = service_name.lower() - username_lower = username.lower() +from src.ente_auth import EnteAuth +from src.store_keychain import ente_export_to_keychain, import_accounts_from_keychain +from src.totp_accounts_manager import format_totp_result +from src.utils import fuzzy_search_accounts, output_alfred_message, str_to_bool - # Define match scores for prioritization - score = 0 - if all(part in service_name_lower for part in search_parts): - score += 3 # Full match in service name - if all(part in username_lower for part in search_parts): - score += 2 # Full match in username - if any(part in service_name_lower for part in search_parts): - score += 1 # Partial match in service name - if any(part in username_lower for part in search_parts): - score += 0.5 # Partial match in username - - if score > 0: - # Generate TOTP for the matching service - current_totp = pyotp.TOTP(secret).now() - next_time = datetime.now() + timedelta(seconds=30) - next_totp = pyotp.TOTP(secret).at(next_time) - formatted_data = format_data( - service_name, username, current_totp, next_totp, output_format - ) - matches.append((score, formatted_data)) - - # Sort matches by score in descending order - matches.sort(reverse=True, key=lambda x: x[0]) - items = [match[1] for match in matches] # Extract the formatted results +logger = logging.getLogger(__name__) +logging.basicConfig( + level=logging.INFO, format="%(asctime)s - %(name)s - %(levelname)s - %(message)s" +) - # Set the output JSON with the items (either matching services or no matches) - if items: - output = { - "items": items, - } +if __name__ == "__main__": + if len(sys.argv) < 2: + raise ValueError("No subcommand found. Use one of: import, search") + + elif sys.argv[1] == "import": + ente_export_dir = os.getenv("ENTE_EXPORT_DIR") + if not ente_export_dir: + logger.error("ENTE_EXPORT_DIR not configured.") + sys.exit(1) + ente_export_path = os.path.join( + os.path.expanduser(ente_export_dir), "ente_auth.txt" + ) + + overwrite_export = str_to_bool(os.getenv("OVERWRITE_EXPORT", "True")) + + ente_auth = EnteAuth() + + try: + ente_auth.export_ente_auth_accounts(ente_export_path, overwrite_export) + logger.info("Exported ente auth TOTP data to file.") + except Exception as e: + logger.exception(f"Failed to export ente auth TOTP data: {e}", e) + output_alfred_message("Failed to export TOTP data", str(e)) else: - output = { - "items": [{"title": "No matching services found."}] - } - - # Always check if the secrets were cached, and include the cache variable - if os.getenv(CACHE_ENV_VAR) is None: - secrets_json = json.dumps(data) - output["variables"] = { - CACHE_ENV_VAR: secrets_json - } + try: + import_result = ente_export_to_keychain(ente_export_path) + output_alfred_message( + "Imported TOTP data", + f"Successfully imported {import_result.count} TOTP accounts to keychain and Alfred cache.", + import_result.variables, + ) + except Exception as e: + logger.exception( + f"Failed to populate TOTP data in keychain from file: {e}", e + ) + output_alfred_message("Failed to import TOTP data", str(e)) + + ente_auth.delete_ente_export(ente_export_path) + + elif sys.argv[1] == "search": + if len(sys.argv) < 3: + raise ValueError("No search string found") + + try: + accounts = import_accounts_from_keychain() + logger.info("Loaded TOTP accounts from keychain.") + except Exception as e: + logger.exception(f"Failed to load TOTP accounts from keychain: {e}", e) + output_alfred_message("Failed to load TOTP accounts", str(e)) - print(json.dumps(output, indent=4)) - - except Exception as e: - logging.warning(f"Error: {str(e)}") - print(json.dumps({"items": [], "error": str(e)}, indent=4)) - - -if __name__ == "__main__": - cli() + else: + search_string = sys.argv[2] + matched_accounts = fuzzy_search_accounts(search_string, accounts) + formatted_account_data = format_totp_result(matched_accounts) + formatted_account_data.print_json() diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 0000000..3c4c65f --- /dev/null +++ b/poetry.lock @@ -0,0 +1,355 @@ +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. + +[[package]] +name = "backports-tarfile" +version = "1.2.0" +description = "Backport of CPython tarfile module" +optional = false +python-versions = ">=3.8" +files = [ + {file = "backports.tarfile-1.2.0-py3-none-any.whl", hash = "sha256:77e284d754527b01fb1e6fa8a1afe577858ebe4e9dad8919e34c862cb399bc34"}, + {file = "backports_tarfile-1.2.0.tar.gz", hash = "sha256:d75e02c268746e1b8144c278978b6e98e85de6ad16f8e4b0844a154557eca991"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["jaraco.test", "pytest (!=8.0.*)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)"] + +[[package]] +name = "cffi" +version = "1.17.1" +description = "Foreign Function Interface for Python calling C code." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"}, + {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"}, + {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"}, + {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"}, + {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"}, + {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"}, + {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"}, + {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"}, + {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"}, + {file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"}, + {file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"}, + {file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"}, + {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"}, + {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, + {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, +] + +[package.dependencies] +pycparser = "*" + +[[package]] +name = "cryptography" +version = "43.0.3" +description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +optional = false +python-versions = ">=3.7" +files = [ + {file = "cryptography-43.0.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:bf7a1932ac4176486eab36a19ed4c0492da5d97123f1406cf15e41b05e787d2e"}, + {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63efa177ff54aec6e1c0aefaa1a241232dcd37413835a9b674b6e3f0ae2bfd3e"}, + {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e1ce50266f4f70bf41a2c6dc4358afadae90e2a1e5342d3c08883df1675374f"}, + {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:443c4a81bb10daed9a8f334365fe52542771f25aedaf889fd323a853ce7377d6"}, + {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:74f57f24754fe349223792466a709f8e0c093205ff0dca557af51072ff47ab18"}, + {file = "cryptography-43.0.3-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9762ea51a8fc2a88b70cf2995e5675b38d93bf36bd67d91721c309df184f49bd"}, + {file = "cryptography-43.0.3-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:81ef806b1fef6b06dcebad789f988d3b37ccaee225695cf3e07648eee0fc6b73"}, + {file = "cryptography-43.0.3-cp37-abi3-win32.whl", hash = "sha256:cbeb489927bd7af4aa98d4b261af9a5bc025bd87f0e3547e11584be9e9427be2"}, + {file = "cryptography-43.0.3-cp37-abi3-win_amd64.whl", hash = "sha256:f46304d6f0c6ab8e52770addfa2fc41e6629495548862279641972b6215451cd"}, + {file = "cryptography-43.0.3-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:8ac43ae87929a5982f5948ceda07001ee5e83227fd69cf55b109144938d96984"}, + {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:846da004a5804145a5f441b8530b4bf35afbf7da70f82409f151695b127213d5"}, + {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f996e7268af62598f2fc1204afa98a3b5712313a55c4c9d434aef49cadc91d4"}, + {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:f7b178f11ed3664fd0e995a47ed2b5ff0a12d893e41dd0494f406d1cf555cab7"}, + {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:c2e6fc39c4ab499049df3bdf567f768a723a5e8464816e8f009f121a5a9f4405"}, + {file = "cryptography-43.0.3-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:e1be4655c7ef6e1bbe6b5d0403526601323420bcf414598955968c9ef3eb7d16"}, + {file = "cryptography-43.0.3-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:df6b6c6d742395dd77a23ea3728ab62f98379eff8fb61be2744d4679ab678f73"}, + {file = "cryptography-43.0.3-cp39-abi3-win32.whl", hash = "sha256:d56e96520b1020449bbace2b78b603442e7e378a9b3bd68de65c782db1507995"}, + {file = "cryptography-43.0.3-cp39-abi3-win_amd64.whl", hash = "sha256:0c580952eef9bf68c4747774cde7ec1d85a6e61de97281f2dba83c7d2c806362"}, + {file = "cryptography-43.0.3-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d03b5621a135bffecad2c73e9f4deb1a0f977b9a8ffe6f8e002bf6c9d07b918c"}, + {file = "cryptography-43.0.3-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a2a431ee15799d6db9fe80c82b055bae5a752bef645bba795e8e52687c69efe3"}, + {file = "cryptography-43.0.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:281c945d0e28c92ca5e5930664c1cefd85efe80e5c0d2bc58dd63383fda29f83"}, + {file = "cryptography-43.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:f18c716be16bc1fea8e95def49edf46b82fccaa88587a45f8dc0ff6ab5d8e0a7"}, + {file = "cryptography-43.0.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4a02ded6cd4f0a5562a8887df8b3bd14e822a90f97ac5e544c162899bc467664"}, + {file = "cryptography-43.0.3-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:53a583b6637ab4c4e3591a15bc9db855b8d9dee9a669b550f311480acab6eb08"}, + {file = "cryptography-43.0.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1ec0bcf7e17c0c5669d881b1cd38c4972fade441b27bda1051665faaa89bdcaa"}, + {file = "cryptography-43.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2ce6fae5bdad59577b44e4dfed356944fbf1d925269114c28be377692643b4ff"}, + {file = "cryptography-43.0.3.tar.gz", hash = "sha256:315b9001266a492a6ff443b61238f956b214dbec9910a081ba5b6646a055a805"}, +] + +[package.dependencies] +cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} + +[package.extras] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] +docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] +nox = ["nox"] +pep8test = ["check-sdist", "click", "mypy", "ruff"] +sdist = ["build"] +ssh = ["bcrypt (>=3.1.5)"] +test = ["certifi", "cryptography-vectors (==43.0.3)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test-randomorder = ["pytest-randomly"] + +[[package]] +name = "importlib-metadata" +version = "8.5.0" +description = "Read metadata from Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b"}, + {file = "importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7"}, +] + +[package.dependencies] +zipp = ">=3.20" + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +perf = ["ipython"] +test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] +type = ["pytest-mypy"] + +[[package]] +name = "jaraco-classes" +version = "3.4.0" +description = "Utility functions for Python class constructs" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jaraco.classes-3.4.0-py3-none-any.whl", hash = "sha256:f662826b6bed8cace05e7ff873ce0f9283b5c924470fe664fff1c2f00f581790"}, + {file = "jaraco.classes-3.4.0.tar.gz", hash = "sha256:47a024b51d0239c0dd8c8540c6c7f484be3b8fcf0b2d85c13825780d3b3f3acd"}, +] + +[package.dependencies] +more-itertools = "*" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)"] + +[[package]] +name = "jaraco-context" +version = "6.0.1" +description = "Useful decorators and context managers" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jaraco.context-6.0.1-py3-none-any.whl", hash = "sha256:f797fc481b490edb305122c9181830a3a5b76d84ef6d1aef2fb9b47ab956f9e4"}, + {file = "jaraco_context-6.0.1.tar.gz", hash = "sha256:9bae4ea555cf0b14938dc0aee7c9f32ed303aa20a3b73e7dc80111628792d1b3"}, +] + +[package.dependencies] +"backports.tarfile" = {version = "*", markers = "python_version < \"3.12\""} + +[package.extras] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +test = ["portend", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)"] + +[[package]] +name = "jaraco-functools" +version = "4.1.0" +description = "Functools like those found in stdlib" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jaraco.functools-4.1.0-py3-none-any.whl", hash = "sha256:ad159f13428bc4acbf5541ad6dec511f91573b90fba04df61dafa2a1231cf649"}, + {file = "jaraco_functools-4.1.0.tar.gz", hash = "sha256:70f7e0e2ae076498e212562325e805204fc092d7b4c17e0e86c959e249701a9d"}, +] + +[package.dependencies] +more-itertools = "*" + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["jaraco.classes", "pytest (>=6,!=8.1.*)"] +type = ["pytest-mypy"] + +[[package]] +name = "jeepney" +version = "0.8.0" +description = "Low-level, pure Python DBus protocol wrapper." +optional = false +python-versions = ">=3.7" +files = [ + {file = "jeepney-0.8.0-py3-none-any.whl", hash = "sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755"}, + {file = "jeepney-0.8.0.tar.gz", hash = "sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806"}, +] + +[package.extras] +test = ["async-timeout", "pytest", "pytest-asyncio (>=0.17)", "pytest-trio", "testpath", "trio"] +trio = ["async_generator", "trio"] + +[[package]] +name = "keyring" +version = "25.5.0" +description = "Store and access your passwords safely." +optional = false +python-versions = ">=3.8" +files = [ + {file = "keyring-25.5.0-py3-none-any.whl", hash = "sha256:e67f8ac32b04be4714b42fe84ce7dad9c40985b9ca827c592cc303e7c26d9741"}, + {file = "keyring-25.5.0.tar.gz", hash = "sha256:4c753b3ec91717fe713c4edd522d625889d8973a349b0e582622f49766de58e6"}, +] + +[package.dependencies] +importlib-metadata = {version = ">=4.11.4", markers = "python_version < \"3.12\""} +"jaraco.classes" = "*" +"jaraco.context" = "*" +"jaraco.functools" = "*" +jeepney = {version = ">=0.4.2", markers = "sys_platform == \"linux\""} +pywin32-ctypes = {version = ">=0.2.0", markers = "sys_platform == \"win32\""} +SecretStorage = {version = ">=3.2", markers = "sys_platform == \"linux\""} + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +completion = ["shtab (>=1.1.0)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["pyfakefs", "pytest (>=6,!=8.1.*)"] +type = ["pygobject-stubs", "pytest-mypy", "shtab", "types-pywin32"] + +[[package]] +name = "more-itertools" +version = "10.5.0" +description = "More routines for operating on iterables, beyond itertools" +optional = false +python-versions = ">=3.8" +files = [ + {file = "more-itertools-10.5.0.tar.gz", hash = "sha256:5482bfef7849c25dc3c6dd53a6173ae4795da2a41a80faea6700d9f5846c5da6"}, + {file = "more_itertools-10.5.0-py3-none-any.whl", hash = "sha256:037b0d3203ce90cca8ab1defbbdac29d5f993fc20131f3664dc8d6acfa872aef"}, +] + +[[package]] +name = "pycparser" +version = "2.22" +description = "C parser in Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, + {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, +] + +[[package]] +name = "pyotp" +version = "2.9.0" +description = "Python One Time Password Library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pyotp-2.9.0-py3-none-any.whl", hash = "sha256:81c2e5865b8ac55e825b0358e496e1d9387c811e85bb40e71a3b29b288963612"}, + {file = "pyotp-2.9.0.tar.gz", hash = "sha256:346b6642e0dbdde3b4ff5a930b664ca82abfa116356ed48cc42c7d6590d36f63"}, +] + +[package.extras] +test = ["coverage", "mypy", "ruff", "wheel"] + +[[package]] +name = "pywin32-ctypes" +version = "0.2.3" +description = "A (partial) reimplementation of pywin32 using ctypes/cffi" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pywin32-ctypes-0.2.3.tar.gz", hash = "sha256:d162dc04946d704503b2edc4d55f3dba5c1d539ead017afa00142c38b9885755"}, + {file = "pywin32_ctypes-0.2.3-py3-none-any.whl", hash = "sha256:8a1513379d709975552d202d942d9837758905c8d01eb82b8bcc30918929e7b8"}, +] + +[[package]] +name = "secretstorage" +version = "3.3.3" +description = "Python bindings to FreeDesktop.org Secret Service API" +optional = false +python-versions = ">=3.6" +files = [ + {file = "SecretStorage-3.3.3-py3-none-any.whl", hash = "sha256:f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99"}, + {file = "SecretStorage-3.3.3.tar.gz", hash = "sha256:2403533ef369eca6d2ba81718576c5e0f564d5cca1b58f73a8b23e7d4eeebd77"}, +] + +[package.dependencies] +cryptography = ">=2.0" +jeepney = ">=0.6" + +[[package]] +name = "zipp" +version = "3.21.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.9" +files = [ + {file = "zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931"}, + {file = "zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4"}, +] + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +type = ["pytest-mypy"] + +[metadata] +lock-version = "2.0" +python-versions = "^3.11" +content-hash = "6b7d33b5a53c3b4b9fd3065b5fa76deead3665fa41f15d4de044dae2065434b3" diff --git a/poetry.toml b/poetry.toml new file mode 100644 index 0000000..53b35d3 --- /dev/null +++ b/poetry.toml @@ -0,0 +1,3 @@ +[virtualenvs] +create = true +in-project = true diff --git a/pyproject.toml b/pyproject.toml index 9480aa3..d4ba149 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,3 +1,20 @@ -[tool.basedpyright] -typeCheckingMode = "standard" +[tool.poetry] +name = "alfred-ente-auth" +description = "A workflow that uses your Ente Exports" +authors = ["Bryan Jones "] +license = "MIT" +readme = "README.md" +package-mode = false +version = "2.0.0" +[tool.poetry.dependencies] +python = "^3.11" +pyotp = "^2.9.0" +keyring = "^25.5.0" + +[build-system] +requires = ["poetry-core>=1.8"] +build-backend = "poetry.core.masonry.api" + +[tool.ruff.lint] +extend-select = ["I"] diff --git a/requirements.in b/requirements.in deleted file mode 100644 index 58446eb..0000000 --- a/requirements.in +++ /dev/null @@ -1,3 +0,0 @@ -pyotp -click -keyring \ No newline at end of file diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index b3e2a73..0000000 --- a/requirements.txt +++ /dev/null @@ -1,28 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.9 -# by the following command: -# -# pip-compile requirements.in -# -backports-tarfile==1.2.0 - # via jaraco-context -click==8.1.7 - # via -r requirements.in -importlib-metadata==8.5.0 - # via keyring -jaraco-classes==3.4.0 - # via keyring -jaraco-context==6.0.1 - # via keyring -jaraco-functools==4.0.2 - # via keyring -keyring==25.3.0 - # via -r requirements.in -more-itertools==10.5.0 - # via - # jaraco-classes - # jaraco-functools -pyotp==2.9.0 - # via -r requirements.in -zipp==3.20.2 - # via importlib-metadata diff --git a/src/__init__.py b/src/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/ente_auth.py b/src/ente_auth.py new file mode 100644 index 0000000..0f6f836 --- /dev/null +++ b/src/ente_auth.py @@ -0,0 +1,91 @@ +import logging +import os +import subprocess + +logger = logging.getLogger(__name__) + + +class EnteAuth: + def __init__(self): + ente_auth_binary_path_env = os.getenv("ENTE_AUTH_BINARY_PATH") + if ente_auth_binary_path_env: + self.ente_auth_binary_path = self.check_ente_binary( + ente_auth_binary_path_env + ) + else: + self.ente_auth_binary_path = self._find_ente_path() + + def _find_ente_path(self) -> str: + result = subprocess.run( + ["which", "ente"], stdout=subprocess.PIPE, stderr=subprocess.PIPE + ) + if result.returncode != 0: + raise OSError("'ente' binary not found. Please ensure it's installed.") + + return result.stdout.decode("utf-8").strip() + + def create_ente_path(self, path: str) -> None: + if not os.path.exists(path): + os.makedirs(path) + logger.info(f"Ente folder created at: {path}") + + def export_ente_auth_accounts(self, export_path: str, overwrite: bool) -> None: + path_exists = os.path.exists(export_path) + + if path_exists and overwrite: + logger.debug("Ente auth export file found. Overwrite is true. Deleting...") + self.delete_ente_export(export_path) + elif path_exists and not overwrite: + logger.info("Export file already exists. Skipping export.") + return + + logger.debug("Ente auth export file not found. Exporting...") + try: + result = subprocess.run( + ["ente", "export"], + check=True, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) + + # When export directory doesn't exist, Ente CLI still returns rc0 but prints an error to stderr. + # If this happens, we'll create the path and retry. + if "error: path does not exist" in result.stderr.decode("utf-8"): + export_dir = os.path.dirname(export_path) + logger.info(f"Export directory does not exist. Creating: {export_dir}") + self.create_ente_path(export_dir) + logger.info("Retrying export...") + self.export_ente_auth_accounts(export_path, overwrite) + + except subprocess.CalledProcessError as e: + logger.error("Export failed", e) + raise e + + if not os.path.exists(export_path): + raise OSError( + "Export appeared to succeed, but the export file was not found." + ) + + def delete_ente_export(self, export_path: str) -> None: + try: + os.remove(export_path) + logger.info("Ente export file deleted") + except OSError as e: + logger.error("Error during removal", e) + raise e + + @staticmethod + def check_ente_binary(path) -> bool: + try: + subprocess.run( + [f"{path}", "version"], + check=True, + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + ) + return True + except subprocess.CalledProcessError: + logger.error( + f"Ente binary not found at {path}. Please check ente CLI is installed and the path is correct." + ) + return False diff --git a/src/models.py b/src/models.py new file mode 100644 index 0000000..b70bf98 --- /dev/null +++ b/src/models.py @@ -0,0 +1,66 @@ +import json +import sys +from dataclasses import asdict, dataclass +from typing import Any + + +# https://www.alfredapp.com/help/workflows/inputs/script-filter/json +@dataclass +class AlfredOutputItemIcon: + path: str = "icon.png" + type: str | None = None + + def to_dict(self): + return {k: v for k, v in asdict(self).items() if v is not None} + + +@dataclass +class AlfredOutputItem: + title: str + uid: str | None = None + subtitle: str | None = None + arg: str | list[str] | None = None + icon: AlfredOutputItemIcon | None = None + variables: dict[str, Any] | None = None + + def to_dict(self): + result = {k: v for k, v in asdict(self).items() if v is not None} + if self.icon is not None: + result["icon"] = self.icon.to_dict() + return result + + +@dataclass +class AlfredOutput: + items: list[AlfredOutputItem] + + def to_dict(self): + return {"items": [item.to_dict() for item in self.items]} + + def to_json(self): + return json.dumps(self.to_dict(), separators=(",", ":")) + + def print_json(self): + sys.stdout.write(self.to_json()) + + +@dataclass +class ImportResult: + count: int + variables: dict[str, str] + + +@dataclass +class TotpAccount: + username: str + secret: str + + +class TotpAccounts(dict[str, TotpAccount]): + def to_json(self) -> str: + json_data = {k: asdict(v) for k, v in self.items()} + return json.dumps(json_data, separators=(",", ":")) + + def from_json(self, json_str: str) -> "TotpAccounts": + data = json.loads(json_str) + return TotpAccounts({k: TotpAccount(**v) for k, v in data.items()}) diff --git a/src/store_keychain.py b/src/store_keychain.py new file mode 100644 index 0000000..0b72d85 --- /dev/null +++ b/src/store_keychain.py @@ -0,0 +1,90 @@ +import json +import logging +import os + +import keyring + +from src.models import AlfredOutput, AlfredOutputItem, ImportResult, TotpAccounts +from src.totp_accounts_manager import parse_ente_export + +logger = logging.getLogger(__name__) + +# Keychain service and account for storing the TOTP accounts +KEYCHAIN_SERVICE = "ente-totp-alfred-workflow" +KEYCHAIN_ACCOUNT = "totp_secrets" + +# Use an environment variable to cache the JSON data to reduce keychain calls +CACHE_ENV_VAR = "TOTP_CACHE" + + +def import_accounts_from_keychain() -> TotpAccounts: + """Load TOTP accounts from the environment variable or keychain.""" + cached_accounts = os.getenv(CACHE_ENV_VAR) + + if cached_accounts: + logger.info("Loading TOTP accounts from environment variable cache.") + return json.loads(cached_accounts) + + # If not cached, load from the keychain + logger.info("Loading TOTP accounts from keychain.") + accounts_json = keyring.get_password( + service_name=KEYCHAIN_SERVICE, username=KEYCHAIN_ACCOUNT + ) + + if accounts_json is None: + raise Exception("No TOTP accounts found in keychain.") + + accounts = TotpAccounts().from_json(accounts_json) + + return accounts + + +def ente_export_to_keychain(file: str) -> ImportResult: + """Import TOTP accounts from an Ente export file and store them in the keychain.""" + result = ImportResult(0, {}) + + try: + logger.debug(f"import_file: {file}") + + accounts = parse_ente_export(file) + accounts_json = accounts.to_json() + + if accounts: + keyring.set_password( + service_name=KEYCHAIN_SERVICE, + username=KEYCHAIN_ACCOUNT, + password=accounts_json, + ) + + secrets_imported_count = sum(len(k) for k in accounts.items()) + + logger.info(f"Keychain database created with {secrets_imported_count} entries.") + + result.count = secrets_imported_count + result.variables = {CACHE_ENV_VAR: accounts_json} + + except FileNotFoundError: + error_message = f"File not found: {file}" + logger.error(error_message) + AlfredOutput( + [ + AlfredOutputItem( + title="Import Failed", + subtitle=f"File not found: {file}", + ) + ] + ).print_json() + + except Exception as e: + error_message = f"An error occurred: {str(e)}" + logger.exception(error_message, e) + AlfredOutput( + [ + AlfredOutputItem( + title="Import Failed", + subtitle=error_message, + ) + ] + ).print_json() + + return result diff --git a/src/totp_accounts_manager.py b/src/totp_accounts_manager.py new file mode 100644 index 0000000..186c0e5 --- /dev/null +++ b/src/totp_accounts_manager.py @@ -0,0 +1,93 @@ +import logging +import os +from datetime import datetime, timedelta +from urllib.parse import parse_qs, unquote, urlparse + +import pyotp + +from src.models import ( + AlfredOutput, + AlfredOutputItem, + TotpAccount, + TotpAccounts, +) +from src.utils import str_to_bool + +logger = logging.getLogger(__name__) + + +USERNAME_IN_TITLE = str_to_bool(os.getenv("USERNAME_IN_TITLE", "False")) +USERNAME_IN_SUBTITLE = str_to_bool(os.getenv("USERNAME_IN_SUBTITLE", "False")) + + +def parse_ente_export(file_path: str) -> TotpAccounts: + accounts = TotpAccounts() + + with open(file_path, "r") as ente_export_file: + for line in ente_export_file: + line = line.strip() + if line: + line = line.replace("=sha1", "=SHA1") + if "codeDisplay" in line: + line = line.split("codeDisplay")[0][:-1] + + # Manually parse the otpauth URI without pyotp + # https://github.com/pyauth/pyotp/issues/171 + parsed_uri = urlparse(line) + if parsed_uri.scheme == "otpauth": + path_items = unquote(parsed_uri.path).strip("/").split(":", 1) + if len(path_items) == 2: + service_name, username = path_items[0], path_items[1] + else: + service_name, username = path_items[0].strip(":"), "" + + query_params = parse_qs(parsed_uri.query) + secret = query_params.get("secret", [None])[0] + + if not secret: + raise ValueError( + f"Unable to parse 'secret' parameter in: {line}" + ) + + accounts[service_name] = TotpAccount(username, secret) + return accounts + + +def format_totp_result(accounts: TotpAccounts) -> AlfredOutput: + """Format TOTP accounts for Alfred.""" + result = AlfredOutput([]) + try: + for service_name, service_data in accounts.items(): + current_totp = pyotp.TOTP(service_data.secret).now() + next_time = datetime.now() + timedelta(seconds=30) + next_totp = pyotp.TOTP(service_data.secret).at(next_time) + + service_name = ( + f"{service_name} - {service_data.username}" + if service_data.username and USERNAME_IN_TITLE + else service_name + ) + subtitle = f"Current TOTP: {current_totp} | Next TOTP: {next_totp}" + ( + f" - {service_data.username}" + if service_data.username and USERNAME_IN_SUBTITLE + else "" + ) + + result.items.append( + AlfredOutputItem( + title=service_name, + subtitle=subtitle, + arg=current_totp, + ) + ) + + if not result.items: + result.items = [AlfredOutputItem(title="No matching services found.")] + + except Exception as e: + logging.exception(f"Error: {str(e)}") + result.items = [ + AlfredOutputItem(title="Unexpected error in format_totp_result function.") + ] + + return result diff --git a/src/utils.py b/src/utils.py new file mode 100644 index 0000000..ece39f6 --- /dev/null +++ b/src/utils.py @@ -0,0 +1,53 @@ +from src.models import AlfredOutput, AlfredOutputItem, TotpAccounts + + +def str_to_bool(val): + if isinstance(val, str): + val = val.lower() + if val in (True, "true", "1", 1): + return True + if val in (False, "false", "0", 0): + return False + msg = f"Cannot convert value to bool: {val!r}" + raise ValueError(msg) + + +def fuzzy_search_accounts(search_string: str, values: TotpAccounts) -> TotpAccounts: + matches: list[tuple[float, str]] = [] + + # Split the search_string by spaces for more granular search + search_parts = search_string.lower().split() + + for service_name, service_info in values.items(): + # Lowercase the service_name and username for case-insensitive matching + service_name_lower = service_name.lower() + username_lower = service_info.username.lower() + + # Define match scores for prioritization + score = 0 + if all(part in service_name_lower for part in search_parts): + score += 3 # Full match in service name + if all(part in username_lower for part in search_parts): + score += 2 # Full match in username + if any(part in service_name_lower for part in search_parts): + score += 1 # Partial match in service name + if any(part in username_lower for part in search_parts): + score += 0.5 # Partial match in username + + if score > 0: + matches.append((float(score), service_name)) + + # Sort matches by score in descending order + matches.sort(reverse=True, key=lambda x: x[0]) + + matched_accounts = TotpAccounts( + {k: v for k, v in values.items() if k in [match[1] for match in matches]} + ) + + return matched_accounts + + +def output_alfred_message(title: str, subtitle: str, variables: dict | None = None): + AlfredOutput( + [AlfredOutputItem(title=title, subtitle=subtitle, variables=variables)] + ).print_json()