diff --git a/.gitignore b/.gitignore index 09c1cda..aed8160 100644 --- a/.gitignore +++ b/.gitignore @@ -4,3 +4,5 @@ __pycache__/ .pytest_cache/ *.egg-info/ .venv/ +tests/_config_home/ +GEMINI.md diff --git a/CLI.md b/CLI.md new file mode 100644 index 0000000..7d1b978 --- /dev/null +++ b/CLI.md @@ -0,0 +1,117 @@ +# Feedscope CLI Documentation + +Feedscope CLI provides commands to interact with the Feedbin API. + +## Authentication + +```bash +feedscope auth login +feedscope auth status +feedscope auth logout +``` + +## Entries + +Retrieve and filter entries. + +```bash +# List entries +feedscope entries list [--page N] [--since DATE] [--read/--no-read] [--starred] + +# Show an entry +feedscope entries show + +# List entries for a feed +feedscope entries feed +``` + +## Entry State + +Manage unread, starred, and updated entries. + +```bash +# Unread +feedscope unread list +feedscope unread mark-read ... +feedscope unread mark-unread ... + +# Starred +feedscope starred list +feedscope starred star ... +feedscope starred unstar ... + +# Updated +feedscope updated list [--include-diff] +feedscope updated mark-read ... + +# Recently Read +feedscope recently-read list +feedscope recently-read create ... +``` + +## Saved Searches + +```bash +feedscope saved-search list +feedscope saved-search get [--include-entries] +feedscope saved-search create --name "Name" --query "Query" +feedscope saved-search update [--name "Name"] [--query "Query"] +feedscope saved-search delete +``` + +## Tags & Taggings + +```bash +# Tags +feedscope tags rename --old-name "Old" --new-name "New" +feedscope tags delete --name "Tag" + +# Taggings +feedscope taggings list +feedscope taggings create --feed-id --name "Tag" +feedscope taggings delete +``` + +## Subscriptions + +```bash +feedscope subscriptions list +feedscope subscriptions get ... +feedscope subscriptions create +feedscope subscriptions update "New Title" +feedscope subscriptions delete +``` + +## Supporting Tools + +```bash +# Imports (OPML) +feedscope imports list +feedscope imports create +feedscope imports status + +# Pages +feedscope pages save --url + +# Icons +feedscope icons list + +# Extract Content +feedscope extract +``` + +## Configuration + +Configuration is stored in `~/.config/dev.pirateninja.feedscope/config.toml` (or platform equivalent). + +To use the extraction service, add your API credentials to the config file: + +```toml +[auth] +email = "..." +password = "..." + +[extract] +username = "..." +secret = "..." +``` diff --git a/plans/2025-11-19-subcommand-implementation.md b/plans/2025-11-19-subcommand-implementation.md index e5cdb7e..ce7023f 100644 --- a/plans/2025-11-19-subcommand-implementation.md +++ b/plans/2025-11-19-subcommand-implementation.md @@ -10,39 +10,39 @@ - Deliver the Feedbin-aligned subcommands outlined in `plans/2025-11-19-agents-preferences.md`, starting with entries retrieval, state management (unread/starred/updated/recently read), saved searches/tags, imports/pages/icons, and support utilities such as feed metadata and the full-content extractor. ## Infrastructure tasks -- [ ] Introduce new Typer sub-app modules (e.g., `entries.py`, `state.py`, `searches.py`, `utils.py`) so the main `feedscope` app can keep concerns separate while still using Click under the hood. -- [ ] Add `loguru` via `uv add` and use it consistently for debug/info messages inside the new command modules; keep user-facing output via `typer.echo`. -- [ ] Create a `tests/` directory (per AGENTS) and populate it with CLI-focused pytest files that use Typer’s `CliRunner` to simulate commands. -- [ ] Document `uv run pytest`, `uv run ruff`, and `uv run ty` in README/CONTRIBUTING if needed (so future contributors remember AGENTS requirements). -- [ ] Add the `stamina` retry/backoff library via `uv add` and wrap `httpx` requests with its policies so the CLI gracefully handles transient errors for GET/DELETE requests, logging retries through `loguru`. -- [ ] Ensure the cached `CacheClient` from `hishel` is configured to store responses for safe GET-like requests; make cache-control decisions explicit so stale data isn't re-used for write operations. +- [x] Introduce new Typer sub-app modules (e.g., `entries.py`, `state.py`, `searches.py`, `utils.py`) so the main `feedscope` app can keep concerns separate while still using Click under the hood. +- [x] Add `loguru` via `uv add` and use it consistently for debug/info messages inside the new command modules; keep user-facing output via `typer.echo`. +- [x] Create a `tests/` directory (per AGENTS) and populate it with CLI-focused pytest files that use Typer’s `CliRunner` to simulate commands. +- [x] Document `uv run pytest`, `uv run ruff`, and `uv run ty` in README/CONTRIBUTING if needed (so future contributors remember AGENTS requirements). +- [x] Add the `stamina` retry/backoff library via `uv add` and wrap `httpx` requests with its policies so the CLI gracefully handles transient errors for GET/DELETE requests, logging retries through `loguru`. +- [x] Ensure the cached `CacheClient` from `hishel` is configured to store responses for safe GET-like requests; make cache-control decisions explicit so stale data isn't re-used for write operations. ## Phase 1: Entries & feed metadata -- [ ] Build `feedscope entries list` with support for `--since`, `--page`, `--per-page`, `--read/--starred`, `--mode`, `--include-original`, `--include-enclosure`, and `--include-content-diff`, matching `content/entries.md`. -- [ ] Add `feedscope entries show ` to fetch `GET /v2/entries/.json` along with error handling for status codes listed in `content/entries.md`. -- [ ] Implement `feedscope entries feed ` (or similar) to wrap `GET /v2/feeds//entries.json` and honor the same filters. -- [ ] Write tests verifying query parameter serialization and response handling (mock `httpx.Client` via `respx` or similar) for each command. +- [x] Build `feedscope entries list` with support for `--since`, `--page`, `--per-page`, `--read/--starred`, `--mode`, `--include-original`, `--include-enclosure`, and `--include-content-diff`, matching `content/entries.md`. +- [x] Add `feedscope entries show ` to fetch `GET /v2/entries/.json` along with error handling for status codes listed in `content/entries.md`. +- [x] Implement `feedscope entries feed ` (or similar) to wrap `GET /v2/feeds//entries.json` and honor the same filters. +- [x] Write tests verifying query parameter serialization and response handling (mock `httpx.Client` via `respx` or similar) for each command. ## Phase 2: Entry state management -- [ ] Provide `feedscope unread list` plus `mark-read`/`mark-unread` commands that POST/DELETE `unread_entries` per `content/unread-entries.md`, enforcing the 1,000-entry limit with validation. -- [ ] Mirror that behavior for `feedscope starred list/star/unstar` to match `content/starred-entries.md`. -- [ ] Add `feedscope updated list` and `feedscope updated mark-read` using `content/updated-entries.md`, reusing the entry-fetch helpers from Phase 1 to display diffs when `--include-diff` is requested. -- [ ] Create `feedscope recently-read list/create` per `content/recently-read-entries.md`. -- [ ] Cover these commands with dedicated tests that mock the ID arrays and confirm the right HTTP verb/payload is sent. +- [x] Provide `feedscope unread list` plus `mark-read`/`mark-unread` commands that POST/DELETE `unread_entries` per `content/unread-entries.md`, enforcing the 1,000-entry limit with validation. +- [x] Mirror that behavior for `feedscope starred list/star/unstar` to match `content/starred-entries.md`. +- [x] Add `feedscope updated list` and `feedscope updated mark-read` using `content/updated-entries.md`, reusing the entry-fetch helpers from Phase 1 to display diffs when `--include-diff` is requested. +- [x] Create `feedscope recently-read list/create` per `content/recently-read-entries.md`. +- [x] Cover these commands with dedicated tests that mock the ID arrays and confirm the right HTTP verb/payload is sent. ## Phase 3: Saved searches, tags & taggings -- [ ] Add `feedscope saved-search list`, `get`, `create`, `update`, and `delete` commands following `content/saved-searches.md`, including `--include-entries` and pagination options. -- [ ] Provide `feedscope tags rename`/`delete` and `feedscope taggings list/create/delete` inspired by `content/tags.md` and `content/taggings.md`. -- [ ] Ensure CLI output exposes the relevant JSON arrays (e.g., after rename/delete the updated taggings array) and write pytest coverage for success/failure paths. +- [x] Add `feedscope saved-search list`, `get`, `create`, `update`, and `delete` commands following `content/saved-searches.md`, including `--include-entries` and pagination options. +- [x] Provide `feedscope tags rename`/`delete` and `feedscope taggings list/create/delete` inspired by `content/tags.md` and `content/taggings.md`. +- [x] Ensure CLI output exposes the relevant JSON arrays (e.g., after rename/delete the updated taggings array) and write pytest coverage for success/failure paths. ## Phase 4: Supporting APIs -- [ ] Implement `feedscope imports create|list|status` that uploads OPML, sets `Content-Type: text/xml`, and re-uses the client cache. -- [ ] Provide `feedscope pages save` to POST URLs/titles (`content/pages.md`) and return the created entry payload. -- [ ] Add `feedscope icons list` for `GET /v2/icons.json` and consider caching or optional JSONL output for scripting. -- [ ] Create an `extract` command that, given credentials stored in config (new `extract.username`/`extract.secret` entries), builds the HMAC-SHA1 signature as in `content/extract-full-content.md` before fetching parse results. -- [ ] Ensure each API helper has a test that mocks `httpx` responses and validates that required headers/payloads are constructed correctly. +- [x] Implement `feedscope imports create|list|status` that uploads OPML, sets `Content-Type: text/xml`, and re-uses the client cache. +- [x] Provide `feedscope pages save` to POST URLs/titles (`content/pages.md`) and return the created entry payload. +- [x] Add `feedscope icons list` for `GET /v2/icons.json` and consider caching or optional JSONL output for scripting. +- [x] Create an `extract` command that, given credentials stored in config (new `extract.username`/`extract.secret` entries), builds the HMAC-SHA1 signature as in `content/extract-full-content.md` before fetching parse results. +- [x] Ensure each API helper has a test that mocks `httpx` responses and validates that required headers/payloads are constructed correctly. ## Phase 5: Workflow & polishing -- [ ] Update the README (or add CLI docs) to describe the new commands, referencing the content docs as the API source of truth. -- [ ] Run `uv run ruff format`, `uv run ty`, and `uv run pytest` after implementing each phase to keep the codebase clean. +- [x] Update the README (or add CLI docs) to describe the new commands, referencing the content docs as the API source of truth. +- [x] Run `uv run ruff format`, `uv run ty`, and `uv run pytest` after implementing each phase to keep the codebase clean. - [ ] Optional: expose `feedscope auth status` improvements or helper `feedscope config show` if needed to expose additional configuration fields (e.g., Extract credentials). diff --git a/pyproject.toml b/pyproject.toml index 39bfb28..bb00cda 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -17,6 +17,7 @@ dependencies = [ "rich>=14.1.0", "hishel>=0.0.36", "loguru-config", + "stamina>=25.2.0", ] [project.scripts] @@ -30,6 +31,7 @@ build-backend = "setuptools.build_meta" dev = [ "poethepoet>=0.32.2", "pytest>=8.3.3", + "respx>=0.22.0", "ruff>=0.7.3", "ty==0.0.1a27", ] @@ -37,7 +39,7 @@ dev = [ [tool.poe.tasks] lint = { cmd = "uv run ruff check src tests" } format = { cmd = "uv run ruff format src tests" } -typecheck = { cmd = "uv run ty src/feedscope" } +typecheck = { cmd = "uv run ty check src/feedscope" } test = { cmd = "uv run pytest" } qa = { sequence = ["lint", "typecheck", "test"] } diff --git a/src/feedscope/__init__.py b/src/feedscope/__init__.py index 754eb2e..8f39e8a 100644 --- a/src/feedscope/__init__.py +++ b/src/feedscope/__init__.py @@ -11,6 +11,11 @@ from .config_cli import config_app from .state import AppState from .subscriptions import subscriptions_app +from .entries import entries_app +from .searches import searches_app +from .entry_state import unread_app, starred_app, updated_app, recently_read_app +from .tags import tags_app, taggings_app +from .supporting import imports_app, pages_app, icons_app, extract_command def configure_logging(config_file: Path | None) -> AppState: @@ -42,6 +47,18 @@ def configure_logging(config_file: Path | None) -> AppState: app.add_typer(auth_app, name="auth") app.add_typer(config_app, name="config") app.add_typer(subscriptions_app, name="subscriptions") +app.add_typer(entries_app, name="entries") +app.add_typer(searches_app, name="saved-search") +app.add_typer(unread_app, name="unread") +app.add_typer(starred_app, name="starred") +app.add_typer(updated_app, name="updated") +app.add_typer(recently_read_app, name="recently-read") +app.add_typer(tags_app, name="tags") +app.add_typer(taggings_app, name="taggings") +app.add_typer(imports_app, name="imports") +app.add_typer(pages_app, name="pages") +app.add_typer(icons_app, name="icons") +app.command(name="extract")(extract_command) @app.callback() diff --git a/src/feedscope/__main__.py b/src/feedscope/__main__.py index 572908c..f8aa02c 100644 --- a/src/feedscope/__main__.py +++ b/src/feedscope/__main__.py @@ -1,6 +1,7 @@ """ This module allows the package to be run as a script. """ + from . import main if __name__ == "__main__": diff --git a/src/feedscope/auth.py b/src/feedscope/auth.py index 06f2f58..29f8c0c 100644 --- a/src/feedscope/auth.py +++ b/src/feedscope/auth.py @@ -16,7 +16,9 @@ def login( ctx: typer.Context, email: Annotated[str, typer.Argument(help="Feedbin email address")], - password: Annotated[str, typer.Option("--password", "-p", help="Feedbin password", hide_input=True)] = None, + password: Annotated[ + str, typer.Option("--password", "-p", help="Feedbin password", hide_input=True) + ] = None, ) -> None: """Check authentication credentials with Feedbin API.""" @@ -54,7 +56,8 @@ def login( raise typer.Exit(1) else: typer.echo( - f"❌ Unexpected response: {response.status_code}", color=typer.colors.RED + f"❌ Unexpected response: {response.status_code}", + color=typer.colors.RED, ) raise typer.Exit(1) @@ -92,7 +95,8 @@ def status(ctx: typer.Context) -> None: typer.echo("✅ Authentication successful!", color=typer.colors.GREEN) elif response.status_code == 401: typer.echo( - "❌ Authentication failed - invalid credentials.", color=typer.colors.RED + "❌ Authentication failed - invalid credentials.", + color=typer.colors.RED, ) typer.echo( "Please run `feedscope auth login` to update your credentials.", @@ -101,7 +105,8 @@ def status(ctx: typer.Context) -> None: raise typer.Exit(1) else: typer.echo( - f"❌ Unexpected response: {response.status_code}", color=typer.colors.RED + f"❌ Unexpected response: {response.status_code}", + color=typer.colors.RED, ) raise typer.Exit(1) @@ -114,7 +119,9 @@ def status(ctx: typer.Context) -> None: def whoami(ctx: typer.Context) -> None: """Show the current user from the config file.""" state = get_state(ctx) - logger.debug("Inspecting current auth user with log config {}", state.log_config_path) + logger.debug( + "Inspecting current auth user with log config {}", state.log_config_path + ) config = get_config() if config.auth.email and config.auth.password: @@ -129,7 +136,9 @@ def whoami(ctx: typer.Context) -> None: def remove(ctx: typer.Context) -> None: """Remove stored authentication credentials.""" state = get_state(ctx) - logger.debug("Removing stored credentials with log config {}", state.log_config_path) + logger.debug( + "Removing stored credentials with log config {}", state.log_config_path + ) config = get_config() config_file = config.config_file_path diff --git a/src/feedscope/client.py b/src/feedscope/client.py index 1af3604..59e5181 100644 --- a/src/feedscope/client.py +++ b/src/feedscope/client.py @@ -2,9 +2,35 @@ from hishel import CacheClient, FileStorage from platformdirs import user_cache_dir from pathlib import Path +import stamina + + +class FeedscopeClient(CacheClient): + """Custom client that adds retries for safe methods.""" + + def request(self, method: str, url, **kwargs) -> httpx.Response: + # Only retry safe methods or DELETE (as per plan "GET/DELETE") + if method.upper() in ["GET", "DELETE", "HEAD", "OPTIONS"]: + try: + for attempt in stamina.retry_context( + on=(httpx.RequestError, httpx.HTTPStatusError), attempts=3 + ): + with attempt: + response = super().request(method, url, **kwargs) + # Trigger retry on server errors + if response.status_code >= 500: + response.raise_for_status() + return response + except httpx.HTTPStatusError as e: + # If retries exhausted for 5xx, return the last response + return e.response + # RequestError will bubble up if retries exhausted + + return super().request(method, url, **kwargs) + def get_client() -> httpx.Client: - """Get a cached httpx client.""" + """Get a cached httpx client with retries.""" cache_dir = Path(user_cache_dir("dev.pirateninja.feedscope", "http-cache")) storage = FileStorage(base_path=cache_dir) - return CacheClient(storage=storage) + return FeedscopeClient(storage=storage) diff --git a/src/feedscope/config.py b/src/feedscope/config.py index 6fe1dbd..9fddcb6 100644 --- a/src/feedscope/config.py +++ b/src/feedscope/config.py @@ -18,12 +18,20 @@ class AuthCredentials(BaseModel): password: str = "" +class ExtractCredentials(BaseModel): + """Extraction service credentials.""" + + username: str = "" + secret: str = "" + + class FeedscopeConfig(BaseSettings): model_config = SettingsConfigDict( toml_file=Path(user_config_dir("dev.pirateninja.feedscope")) / "config.toml", ) auth: AuthCredentials = AuthCredentials() + extract: ExtractCredentials = ExtractCredentials() @classmethod def settings_customise_sources( @@ -58,13 +66,20 @@ def save(self) -> None: else: doc = tomlkit.document() - # Update values + # Update auth if "auth" not in doc or not isinstance(doc.get("auth"), dict): doc["auth"] = tomlkit.table() doc["auth"]["email"] = self.auth.email doc["auth"]["password"] = self.auth.password + # Update extract + if "extract" not in doc or not isinstance(doc.get("extract"), dict): + doc["extract"] = tomlkit.table() + + doc["extract"]["username"] = self.extract.username + doc["extract"]["secret"] = self.extract.secret + if "email" in doc: del doc["email"] if "password" in doc: diff --git a/src/feedscope/entries.py b/src/feedscope/entries.py new file mode 100644 index 0000000..92aae35 --- /dev/null +++ b/src/feedscope/entries.py @@ -0,0 +1,215 @@ +import typer +from typing import Optional +from typing_extensions import Annotated +from datetime import datetime +import json +import httpx + +from .config import get_config +from .client import get_client +from .utils import fetch_and_display_entries + +entries_app = typer.Typer(help="Retrieve and manage entries") + + +def _build_entry_params( + page: Optional[int] = None, + per_page: Optional[int] = None, + since: Optional[datetime] = None, + read: Optional[bool] = None, + starred: Optional[bool] = None, + mode: Optional[str] = None, + include_original: bool = False, + include_enclosure: bool = False, + include_content_diff: bool = False, +) -> dict: + params = {} + if page is not None: + params["page"] = page + if per_page is not None: + params["per_page"] = per_page + if since is not None: + params["since"] = since.isoformat() + if read is not None: + params["read"] = str(read).lower() + if starred is not None: + params["starred"] = str(starred).lower() + if mode is not None: + params["mode"] = mode + if include_original: + params["include_original"] = "true" + if include_enclosure: + params["include_enclosure"] = "true" + if include_content_diff: + params["include_content_diff"] = "true" + return params + + +@entries_app.command(name="list") +def list_entries( + ctx: typer.Context, + page: Annotated[Optional[int], typer.Option(help="Page number")] = None, + per_page: Annotated[ + Optional[int], typer.Option(help="Number of entries per page") + ] = None, + since: Annotated[ + Optional[datetime], + typer.Option(help="Get entries created after this timestamp"), + ] = None, + read: Annotated[Optional[bool], typer.Option(help="Filter by read status")] = None, + starred: Annotated[ + Optional[bool], typer.Option(help="Filter by starred status") + ] = None, + mode: Annotated[Optional[str], typer.Option(help="Mode (e.g. extended)")] = None, + include_original: Annotated[ + bool, typer.Option(help="Include original entry data") + ] = False, + include_enclosure: Annotated[ + bool, typer.Option(help="Include enclosure data") + ] = False, + include_content_diff: Annotated[ + bool, typer.Option(help="Include content diff") + ] = False, + json_output: Annotated[ + bool, typer.Option("--json", help="Output raw JSON") + ] = False, +): + """List entries.""" + params = _build_entry_params( + page, + per_page, + since, + read, + starred, + mode, + include_original, + include_enclosure, + include_content_diff, + ) + fetch_and_display_entries( + ctx, "https://api.feedbin.com/v2/entries.json", params, json_output + ) + + +@entries_app.command(name="feed") +def feed_entries( + ctx: typer.Context, + feed_id: Annotated[int, typer.Argument(help="Feed ID")], + page: Annotated[Optional[int], typer.Option(help="Page number")] = None, + per_page: Annotated[ + Optional[int], typer.Option(help="Number of entries per page") + ] = None, + since: Annotated[ + Optional[datetime], + typer.Option(help="Get entries created after this timestamp"), + ] = None, + read: Annotated[Optional[bool], typer.Option(help="Filter by read status")] = None, + starred: Annotated[ + Optional[bool], typer.Option(help="Filter by starred status") + ] = None, + mode: Annotated[Optional[str], typer.Option(help="Mode (e.g. extended)")] = None, + include_original: Annotated[ + bool, typer.Option(help="Include original entry data") + ] = False, + include_enclosure: Annotated[ + bool, typer.Option(help="Include enclosure data") + ] = False, + include_content_diff: Annotated[ + bool, typer.Option(help="Include content diff") + ] = False, + json_output: Annotated[ + bool, typer.Option("--json", help="Output raw JSON") + ] = False, +): + """List entries for a specific feed.""" + params = _build_entry_params( + page, + per_page, + since, + read, + starred, + mode, + include_original, + include_enclosure, + include_content_diff, + ) + fetch_and_display_entries( + ctx, + f"https://api.feedbin.com/v2/feeds/{feed_id}/entries.json", + params, + json_output, + ) + + +@entries_app.command(name="show") +def show_entry( + ctx: typer.Context, + entry_id: Annotated[int, typer.Argument(help="Entry ID")], + mode: Annotated[Optional[str], typer.Option(help="Mode (e.g. extended)")] = None, + include_original: Annotated[ + bool, typer.Option(help="Include original entry data") + ] = False, + include_enclosure: Annotated[ + bool, typer.Option(help="Include enclosure data") + ] = False, + include_content_diff: Annotated[ + bool, typer.Option(help="Include content diff") + ] = False, + json_output: Annotated[ + bool, typer.Option("--json", help="Output raw JSON") + ] = False, +): + """Show a single entry.""" + config = get_config() + + if not config.auth.email or not config.auth.password: + typer.echo( + "❌ Authentication credentials not found. Please run `feedscope auth login` first.", + color=typer.colors.RED, + ) + raise typer.Exit(1) + + url = f"https://api.feedbin.com/v2/entries/{entry_id}.json" + params = {} + if mode is not None: + params["mode"] = mode + if include_original: + params["include_original"] = "true" + if include_enclosure: + params["include_enclosure"] = "true" + if include_content_diff: + params["include_content_diff"] = "true" + + try: + with get_client() as client: + response = client.get( + url, + params=params, + auth=(config.auth.email, config.auth.password), + ) + + if response.status_code != 200: + typer.echo(f"Error fetching entry: {response.status_code}", err=True) + if response.status_code == 404: + typer.echo("Entry not found.", err=True) + elif response.status_code == 403: + typer.echo( + "Forbidden. You may not have access to this entry.", err=True + ) + raise typer.Exit(1) + + entry = response.json() + if json_output: + typer.echo(json.dumps(entry, indent=2)) + else: + typer.echo(f"Title: {entry.get('title')}") + typer.echo(f"ID: {entry.get('id')}") + typer.echo(f"Published: {entry.get('published')}") + typer.echo(f"URL: {entry.get('url')}") + if mode == "extended": + typer.echo(f"Author: {entry.get('author')}") + typer.echo(f"Summary: {entry.get('summary')}") + + except httpx.RequestError as e: + typer.echo(f"❌ Network error: {e}", color=typer.colors.RED) + raise typer.Exit(1) diff --git a/src/feedscope/entry_state.py b/src/feedscope/entry_state.py new file mode 100644 index 0000000..21680b1 --- /dev/null +++ b/src/feedscope/entry_state.py @@ -0,0 +1,282 @@ +import typer +from typing import List, Optional +from typing_extensions import Annotated +import json +import httpx +from datetime import datetime + +from .config import get_config +from .client import get_client +from .utils import fetch_and_display_entries + +unread_app = typer.Typer(help="Manage unread entries") +starred_app = typer.Typer(help="Manage starred entries") +updated_app = typer.Typer(help="Manage updated entries") +recently_read_app = typer.Typer(help="Manage recently read entries") + + +def _manage_entries_state( + ctx: typer.Context, + endpoint: str, + method: str, + entry_ids: List[int], + key: str, + json_output: bool = False, +): + config = get_config() + if not config.auth.email or not config.auth.password: + typer.echo( + "❌ Authentication credentials not found. Please run `feedscope auth login` first.", + color=typer.colors.RED, + ) + raise typer.Exit(1) + + if not entry_ids: + typer.echo("No entry IDs provided.", err=True) + return + + if len(entry_ids) > 1000: + typer.echo("❌ Limit of 1,000 entry_ids per request.", color=typer.colors.RED) + raise typer.Exit(1) + + url = f"https://api.feedbin.com/v2/{endpoint}.json" + data = {key: entry_ids} + + try: + with get_client() as client: + response = client.request( + method, + url, + json=data, + auth=(config.auth.email, config.auth.password), + ) + + if response.status_code == 200: + result = response.json() + if json_output: + typer.echo(json.dumps(result, indent=2)) + else: + typer.echo(f"Successfully processed {len(result)} entries.") + else: + typer.echo(f"Error: {response.status_code}", err=True) + raise typer.Exit(1) + + except httpx.RequestError as e: + typer.echo(f"❌ Network error: {e}", color=typer.colors.RED) + raise typer.Exit(1) + + +# Unread +@unread_app.command(name="list") +def list_unread( + ctx: typer.Context, + json_output: Annotated[ + bool, typer.Option("--json", help="Output raw JSON") + ] = False, +): + """List unread entry IDs.""" + fetch_and_display_entries( + ctx, "https://api.feedbin.com/v2/unread_entries.json", {}, json_output + ) + + +@unread_app.command(name="mark-read") +def mark_read( + ctx: typer.Context, + entry_ids: Annotated[List[int], typer.Argument(help="Entry IDs to mark as read")], + json_output: Annotated[ + bool, typer.Option("--json", help="Output raw JSON") + ] = False, +): + """Mark entries as read (remove from unread).""" + _manage_entries_state( + ctx, "unread_entries", "DELETE", entry_ids, "unread_entries", json_output + ) + + +@unread_app.command(name="mark-unread") +def mark_unread( + ctx: typer.Context, + entry_ids: Annotated[List[int], typer.Argument(help="Entry IDs to mark as unread")], + json_output: Annotated[ + bool, typer.Option("--json", help="Output raw JSON") + ] = False, +): + """Mark entries as unread.""" + _manage_entries_state( + ctx, "unread_entries", "POST", entry_ids, "unread_entries", json_output + ) + + +# Starred +@starred_app.command(name="list") +def list_starred( + ctx: typer.Context, + json_output: Annotated[ + bool, typer.Option("--json", help="Output raw JSON") + ] = False, +): + """List starred entry IDs.""" + fetch_and_display_entries( + ctx, "https://api.feedbin.com/v2/starred_entries.json", {}, json_output + ) + + +@starred_app.command(name="star") +def star_entries( + ctx: typer.Context, + entry_ids: Annotated[List[int], typer.Argument(help="Entry IDs to star")], + json_output: Annotated[ + bool, typer.Option("--json", help="Output raw JSON") + ] = False, +): + """Star entries.""" + _manage_entries_state( + ctx, "starred_entries", "POST", entry_ids, "starred_entries", json_output + ) + + +@starred_app.command(name="unstar") +def unstar_entries( + ctx: typer.Context, + entry_ids: Annotated[List[int], typer.Argument(help="Entry IDs to unstar")], + json_output: Annotated[ + bool, typer.Option("--json", help="Output raw JSON") + ] = False, +): + """Unstar entries.""" + _manage_entries_state( + ctx, "starred_entries", "DELETE", entry_ids, "starred_entries", json_output + ) + + +# Updated +@updated_app.command(name="list") +def list_updated( + ctx: typer.Context, + since: Annotated[ + Optional[datetime], + typer.Option(help="Get entries updated after this timestamp"), + ] = None, + include_diff: Annotated[ + bool, typer.Option(help="Fetch details including content diff") + ] = False, + json_output: Annotated[ + bool, typer.Option("--json", help="Output raw JSON") + ] = False, +): + """List updated entry IDs.""" + if include_diff: + config = get_config() + if not config.auth.email or not config.auth.password: + typer.echo( + "❌ Authentication credentials not found. Please run `feedscope auth login` first.", + color=typer.colors.RED, + ) + raise typer.Exit(1) + + params = {} + if since: + params["since"] = since.isoformat() + + try: + with get_client() as client: + response = client.get( + "https://api.feedbin.com/v2/updated_entries.json", + params=params, + auth=(config.auth.email, config.auth.password), + ) + if response.status_code != 200: + typer.echo( + f"Error fetching updated IDs: {response.status_code}", err=True + ) + raise typer.Exit(1) + ids = response.json() + + if not ids: + typer.echo("No updated entries.") + return + + batch_ids = ids[:100] + ids_str = ",".join(map(str, batch_ids)) + + entries_params = { + "ids": ids_str, + "include_content_diff": "true", + "include_original": "true", + } + + fetch_and_display_entries( + ctx, + "https://api.feedbin.com/v2/entries.json", + entries_params, + json_output, + ) + + if len(ids) > 100: + typer.echo( + f"Warning: Only showing first 100 of {len(ids)} updated entries.", + err=True, + ) + + except httpx.RequestError as e: + typer.echo(f"❌ Network error: {e}", color=typer.colors.RED) + raise typer.Exit(1) + + else: + params = {} + if since: + params["since"] = since.isoformat() + + fetch_and_display_entries( + ctx, "https://api.feedbin.com/v2/updated_entries.json", params, json_output + ) + + +@updated_app.command(name="mark-read") +def mark_updated_read( + ctx: typer.Context, + entry_ids: Annotated[List[int], typer.Argument(help="Entry IDs to mark as read")], + json_output: Annotated[ + bool, typer.Option("--json", help="Output raw JSON") + ] = False, +): + """Mark updated entries as read.""" + _manage_entries_state( + ctx, "updated_entries", "DELETE", entry_ids, "updated_entries", json_output + ) + + +# Recently Read +@recently_read_app.command(name="list") +def list_recently_read( + ctx: typer.Context, + json_output: Annotated[ + bool, typer.Option("--json", help="Output raw JSON") + ] = False, +): + """List recently read entry IDs.""" + fetch_and_display_entries( + ctx, "https://api.feedbin.com/v2/recently_read_entries.json", {}, json_output + ) + + +@recently_read_app.command(name="create") +def create_recently_read( + ctx: typer.Context, + entry_ids: Annotated[ + List[int], typer.Argument(help="Entry IDs to add to recently read") + ], + json_output: Annotated[ + bool, typer.Option("--json", help="Output raw JSON") + ] = False, +): + """Add entries to recently read.""" + _manage_entries_state( + ctx, + "recently_read_entries", + "POST", + entry_ids, + "recently_read_entries", + json_output, + ) diff --git a/src/feedscope/searches.py b/src/feedscope/searches.py new file mode 100644 index 0000000..06a3a44 --- /dev/null +++ b/src/feedscope/searches.py @@ -0,0 +1,234 @@ +import typer +from typing import Optional +from typing_extensions import Annotated +import json +import httpx + +from .config import get_config +from .client import get_client +from .utils import fetch_and_display_entries + +searches_app = typer.Typer(help="Manage saved searches") + + +@searches_app.command(name="list") +def list_searches( + ctx: typer.Context, + json_output: Annotated[ + bool, typer.Option("--json", help="Output raw JSON") + ] = False, +): + """List all saved searches.""" + config = get_config() + if not config.auth.email or not config.auth.password: + typer.echo( + "❌ Authentication credentials not found. Please run `feedscope auth login` first.", + color=typer.colors.RED, + ) + raise typer.Exit(1) + + url = "https://api.feedbin.com/v2/saved_searches.json" + + try: + with get_client() as client: + response = client.get( + url, + auth=(config.auth.email, config.auth.password), + ) + + if response.status_code != 200: + typer.echo( + f"Error fetching saved searches: {response.status_code}", err=True + ) + raise typer.Exit(1) + + searches = response.json() + + if json_output: + typer.echo(json.dumps(searches, indent=2)) + else: + for search in searches: + typer.echo( + f"[{search['id']}] {search['name']} - Query: {search['query']}" + ) + + except httpx.RequestError as e: + typer.echo(f"❌ Network error: {e}", color=typer.colors.RED) + raise typer.Exit(1) + + +@searches_app.command(name="get") +def get_search( + ctx: typer.Context, + search_id: Annotated[int, typer.Argument(help="Saved Search ID")], + include_entries: Annotated[ + bool, typer.Option(help="Include full entry objects") + ] = False, + page: Annotated[Optional[int], typer.Option(help="Page number")] = None, + json_output: Annotated[ + bool, typer.Option("--json", help="Output raw JSON") + ] = False, +): + """Get results for a saved search.""" + params = {} + if include_entries: + params["include_entries"] = "true" + if page: + params["page"] = page + + fetch_and_display_entries( + ctx, + f"https://api.feedbin.com/v2/saved_searches/{search_id}.json", + params, + json_output, + ) + + +@searches_app.command(name="create") +def create_search( + ctx: typer.Context, + name: Annotated[str, typer.Option(help="Name of the saved search")], + query: Annotated[str, typer.Option(help="Search query")], + json_output: Annotated[ + bool, typer.Option("--json", help="Output raw JSON") + ] = False, +): + """Create a new saved search.""" + config = get_config() + if not config.auth.email or not config.auth.password: + typer.echo( + "❌ Authentication credentials not found. Please run `feedscope auth login` first.", + color=typer.colors.RED, + ) + raise typer.Exit(1) + + url = "https://api.feedbin.com/v2/saved_searches.json" + data = {"name": name, "query": query} + + try: + with get_client() as client: + response = client.post( + url, + json=data, + auth=(config.auth.email, config.auth.password), + ) + + if response.status_code == 201: + typer.echo( + "✅ Saved search created successfully.", color=typer.colors.GREEN + ) + if json_output: + typer.echo(json.dumps(response.json(), indent=2)) + else: + typer.echo( + f"Error creating saved search: {response.status_code}", err=True + ) + if json_output: + typer.echo(response.text) + raise typer.Exit(1) + + except httpx.RequestError as e: + typer.echo(f"❌ Network error: {e}", color=typer.colors.RED) + raise typer.Exit(1) + + +@searches_app.command(name="update") +def update_search( + ctx: typer.Context, + search_id: Annotated[int, typer.Argument(help="Saved Search ID")], + name: Annotated[Optional[str], typer.Option(help="New name")] = None, + query: Annotated[Optional[str], typer.Option(help="New query")] = None, + json_output: Annotated[ + bool, typer.Option("--json", help="Output raw JSON") + ] = False, +): + """Update a saved search.""" + config = get_config() + if not config.auth.email or not config.auth.password: + typer.echo( + "❌ Authentication credentials not found. Please run `feedscope auth login` first.", + color=typer.colors.RED, + ) + raise typer.Exit(1) + + url = f"https://api.feedbin.com/v2/saved_searches/{search_id}.json" + data = {} + if name: + data["name"] = name + if query: + data["query"] = query + + if not data: + typer.echo("No updates provided.") + return + + try: + with get_client() as client: + response = client.patch( + url, + json=data, + auth=(config.auth.email, config.auth.password), + ) + + if response.status_code == 200: + typer.echo( + "✅ Saved search updated successfully.", color=typer.colors.GREEN + ) + if json_output: + typer.echo(json.dumps(response.json(), indent=2)) + elif response.status_code == 403: + typer.echo("Forbidden. You may not own this saved search.", err=True) + raise typer.Exit(1) + else: + typer.echo( + f"Error updating saved search: {response.status_code}", err=True + ) + raise typer.Exit(1) + + except httpx.RequestError as e: + typer.echo(f"❌ Network error: {e}", color=typer.colors.RED) + raise typer.Exit(1) + + +@searches_app.command(name="delete") +def delete_search( + ctx: typer.Context, + search_id: Annotated[int, typer.Argument(help="Saved Search ID")], +): + """Delete a saved search.""" + config = get_config() + if not config.auth.email or not config.auth.password: + typer.echo( + "❌ Authentication credentials not found. Please run `feedscope auth login` first.", + color=typer.colors.RED, + ) + raise typer.Exit(1) + + if not typer.confirm(f"Are you sure you want to delete saved search {search_id}?"): + raise typer.Abort() + + url = f"https://api.feedbin.com/v2/saved_searches/{search_id}.json" + + try: + with get_client() as client: + response = client.delete( + url, + auth=(config.auth.email, config.auth.password), + ) + + if response.status_code == 204: + typer.echo( + "✅ Saved search deleted successfully.", color=typer.colors.GREEN + ) + elif response.status_code == 403: + typer.echo("Forbidden. You may not own this saved search.", err=True) + raise typer.Exit(1) + else: + typer.echo( + f"Error deleting saved search: {response.status_code}", err=True + ) + raise typer.Exit(1) + + except httpx.RequestError as e: + typer.echo(f"❌ Network error: {e}", color=typer.colors.RED) + raise typer.Exit(1) diff --git a/src/feedscope/supporting.py b/src/feedscope/supporting.py new file mode 100644 index 0000000..fbb53b6 --- /dev/null +++ b/src/feedscope/supporting.py @@ -0,0 +1,255 @@ +import typer +from typing_extensions import Annotated +import json +import httpx +from pathlib import Path +import hmac +import hashlib +import base64 + +from .config import get_config +from .client import get_client + +imports_app = typer.Typer(help="Manage imports") +pages_app = typer.Typer(help="Manage pages") +icons_app = typer.Typer(help="Manage icons") + + +def _check_auth(): + config = get_config() + if not config.auth.email or not config.auth.password: + typer.echo( + "❌ Authentication credentials not found. Please run `feedscope auth login` first.", + color=typer.colors.RED, + ) + raise typer.Exit(1) + return config + + +# Imports +@imports_app.command(name="list") +def list_imports( + ctx: typer.Context, + json_output: Annotated[ + bool, typer.Option("--json", help="Output raw JSON") + ] = False, +): + """List imports.""" + config = _check_auth() + url = "https://api.feedbin.com/v2/imports.json" + + try: + with get_client() as client: + response = client.get(url, auth=(config.auth.email, config.auth.password)) + if response.status_code == 200: + imports = response.json() + if json_output: + typer.echo(json.dumps(imports, indent=2)) + else: + for imp in imports: + typer.echo( + f"ID: {imp['id']}, Complete: {imp['complete']}, Created: {imp['created_at']}" + ) + else: + typer.echo(f"Error: {response.status_code}", err=True) + raise typer.Exit(1) + except httpx.RequestError as e: + typer.echo(f"❌ Network error: {e}", color=typer.colors.RED) + raise typer.Exit(1) + + +@imports_app.command(name="status") +def import_status( + ctx: typer.Context, + import_id: Annotated[int, typer.Argument(help="Import ID")], + json_output: Annotated[ + bool, typer.Option("--json", help="Output raw JSON") + ] = False, +): + """Get status of an import.""" + config = _check_auth() + url = f"https://api.feedbin.com/v2/imports/{import_id}.json" + + try: + with get_client() as client: + response = client.get(url, auth=(config.auth.email, config.auth.password)) + if response.status_code == 200: + imp = response.json() + if json_output: + typer.echo(json.dumps(imp, indent=2)) + else: + typer.echo( + f"Import {imp['id']} Status: {'Complete' if imp['complete'] else 'Pending'}" + ) + if "import_items" in imp: + for item in imp["import_items"]: + typer.echo(f" - {item['title']}: {item['status']}") + else: + typer.echo(f"Error: {response.status_code}", err=True) + raise typer.Exit(1) + except httpx.RequestError as e: + typer.echo(f"❌ Network error: {e}", color=typer.colors.RED) + raise typer.Exit(1) + + +@imports_app.command(name="create") +def create_import( + ctx: typer.Context, + file_path: Annotated[ + Path, typer.Argument(help="Path to OPML file", exists=True, readable=True) + ], + json_output: Annotated[ + bool, typer.Option("--json", help="Output raw JSON") + ] = False, +): + """Create a new import from OPML file.""" + config = _check_auth() + url = "https://api.feedbin.com/v2/imports.json" + + content = file_path.read_text(encoding="utf-8") # OPML is XML, usually text + + try: + with get_client() as client: + # POST body as text/xml + response = client.post( + url, + content=content, + headers={"Content-Type": "text/xml"}, + auth=(config.auth.email, config.auth.password), + ) + + if response.status_code == 201: + imp = response.json() + typer.echo("✅ Import created.", color=typer.colors.GREEN) + if json_output: + typer.echo(json.dumps(imp, indent=2)) + else: + typer.echo(f"ID: {imp['id']}") + else: + typer.echo(f"Error: {response.status_code}", err=True) + if json_output: + typer.echo(response.text) + raise typer.Exit(1) + except httpx.RequestError as e: + typer.echo(f"❌ Network error: {e}", color=typer.colors.RED) + raise typer.Exit(1) + + +# Pages +@pages_app.command(name="save") +def save_page( + ctx: typer.Context, + url: Annotated[str, typer.Option(help="URL to save")], + title: Annotated[str, typer.Option(help="Title of the page")] = None, + json_output: Annotated[ + bool, typer.Option("--json", help="Output raw JSON") + ] = False, +): + """Save a web page as an entry.""" + config = _check_auth() + api_url = "https://api.feedbin.com/v2/pages.json" + data = {"url": url} + if title: + data["title"] = title + + try: + with get_client() as client: + response = client.post( + api_url, json=data, auth=(config.auth.email, config.auth.password) + ) + + if response.status_code == 200: # Docs say 200 return entry + entry = response.json() + typer.echo("✅ Page saved.", color=typer.colors.GREEN) + if json_output: + typer.echo(json.dumps(entry, indent=2)) + else: + typer.echo(f"Created Entry ID: {entry.get('id')}") + else: + typer.echo(f"Error: {response.status_code}", err=True) + raise typer.Exit(1) + except httpx.RequestError as e: + typer.echo(f"❌ Network error: {e}", color=typer.colors.RED) + raise typer.Exit(1) + + +# Icons +@icons_app.command(name="list") +def list_icons( + ctx: typer.Context, + json_output: Annotated[ + bool, typer.Option("--json", help="Output raw JSON") + ] = False, +): + """List feed icons.""" + config = _check_auth() + url = "https://api.feedbin.com/v2/icons.json" + + try: + with get_client() as client: + response = client.get(url, auth=(config.auth.email, config.auth.password)) + if response.status_code == 200: + icons = response.json() + if json_output: + typer.echo(json.dumps(icons, indent=2)) + else: + for icon in icons: + typer.echo(f"{icon['host']}: {icon['url']}") + else: + typer.echo(f"Error: {response.status_code}", err=True) + raise typer.Exit(1) + except httpx.RequestError as e: + typer.echo(f"❌ Network error: {e}", color=typer.colors.RED) + raise typer.Exit(1) + + +# Extract +def extract_command( + ctx: typer.Context, + url: Annotated[str, typer.Argument(help="URL to extract content from")], + json_output: Annotated[ + bool, typer.Option("--json", help="Output raw JSON") + ] = False, +): + """Extract content from a URL using Feedbin's service.""" + config = get_config() + username = config.extract.username + secret = config.extract.secret + + if not username or not secret: + typer.echo( + "❌ Extraction credentials not found in config (extract.username, extract.secret).", + color=typer.colors.RED, + ) + raise typer.Exit(1) + + # HMAC-SHA1 + digest = hashlib.sha1 + signature = hmac.new(secret.encode(), url.encode(), digest).hexdigest() + + # Base64 URL safe + base64_url = base64.urlsafe_b64encode(url.encode()).decode().replace("\n", "") + + api_url = f"https://extract.feedbin.com/parser/{username}/{signature}" + params = {"base64_url": base64_url} + + try: + with get_client() as client: + response = client.get(api_url, params=params) + + if response.status_code == 200: + data = response.json() + if json_output: + typer.echo(json.dumps(data, indent=2)) + else: + typer.echo(f"Title: {data.get('title')}") + typer.echo(f"Word Count: {data.get('word_count')}") + typer.echo(f"Excerpt: {data.get('excerpt')}") + else: + typer.echo(f"Error extracting: {response.status_code}", err=True) + if json_output: + typer.echo(response.text) + raise typer.Exit(1) + except httpx.RequestError as e: + typer.echo(f"❌ Network error: {e}", color=typer.colors.RED) + raise typer.Exit(1) diff --git a/src/feedscope/tags.py b/src/feedscope/tags.py new file mode 100644 index 0000000..826099e --- /dev/null +++ b/src/feedscope/tags.py @@ -0,0 +1,200 @@ +import typer +from typing_extensions import Annotated +import json +import httpx + +from .config import get_config +from .client import get_client + +tags_app = typer.Typer(help="Manage tags") +taggings_app = typer.Typer(help="Manage taggings") + + +def _check_auth(): + config = get_config() + if not config.auth.email or not config.auth.password: + typer.echo( + "❌ Authentication credentials not found. Please run `feedscope auth login` first.", + color=typer.colors.RED, + ) + raise typer.Exit(1) + return config + + +# Tags commands +@tags_app.command(name="rename") +def rename_tag( + ctx: typer.Context, + old_name: Annotated[str, typer.Option(help="Old tag name")], + new_name: Annotated[str, typer.Option(help="New tag name")], + json_output: Annotated[ + bool, typer.Option("--json", help="Output raw JSON") + ] = False, +): + """Rename a tag.""" + config = _check_auth() + url = "https://api.feedbin.com/v2/tags.json" + data = {"old_name": old_name, "new_name": new_name} + + try: + with get_client() as client: + response = client.post( + url, + json=data, + auth=(config.auth.email, config.auth.password), + ) + + if response.status_code == 200: + typer.echo("✅ Tag renamed successfully.", color=typer.colors.GREEN) + if json_output: + typer.echo(json.dumps(response.json(), indent=2)) + else: + typer.echo(f"Error renaming tag: {response.status_code}", err=True) + raise typer.Exit(1) + except httpx.RequestError as e: + typer.echo(f"❌ Network error: {e}", color=typer.colors.RED) + raise typer.Exit(1) + + +@tags_app.command(name="delete") +def delete_tag( + ctx: typer.Context, + name: Annotated[str, typer.Option(help="Tag name to delete")], + json_output: Annotated[ + bool, typer.Option("--json", help="Output raw JSON") + ] = False, +): + """Delete a tag.""" + config = _check_auth() + + if not typer.confirm(f"Are you sure you want to delete tag '{name}'?"): + raise typer.Abort() + + url = "https://api.feedbin.com/v2/tags.json" + data = {"name": name} + + try: + with get_client() as client: + # DELETE with body + response = client.request( + "DELETE", + url, + json=data, + auth=(config.auth.email, config.auth.password), + ) + + if response.status_code == 200: + typer.echo("✅ Tag deleted successfully.", color=typer.colors.GREEN) + if json_output: + typer.echo(json.dumps(response.json(), indent=2)) + else: + typer.echo(f"Error deleting tag: {response.status_code}", err=True) + raise typer.Exit(1) + except httpx.RequestError as e: + typer.echo(f"❌ Network error: {e}", color=typer.colors.RED) + raise typer.Exit(1) + + +# Taggings commands +@taggings_app.command(name="list") +def list_taggings( + ctx: typer.Context, + json_output: Annotated[ + bool, typer.Option("--json", help="Output raw JSON") + ] = False, +): + """List all taggings.""" + config = _check_auth() + url = "https://api.feedbin.com/v2/taggings.json" + + try: + with get_client() as client: + response = client.get( + url, + auth=(config.auth.email, config.auth.password), + ) + + if response.status_code == 200: + taggings = response.json() + if json_output: + typer.echo(json.dumps(taggings, indent=2)) + else: + for tagging in taggings: + typer.echo( + f"[{tagging['id']}] Feed {tagging['feed_id']} -> {tagging['name']}" + ) + else: + typer.echo(f"Error fetching taggings: {response.status_code}", err=True) + raise typer.Exit(1) + except httpx.RequestError as e: + typer.echo(f"❌ Network error: {e}", color=typer.colors.RED) + raise typer.Exit(1) + + +@taggings_app.command(name="create") +def create_tagging( + ctx: typer.Context, + feed_id: Annotated[int, typer.Option(help="Feed ID")], + name: Annotated[str, typer.Option(help="Tag name")], + json_output: Annotated[ + bool, typer.Option("--json", help="Output raw JSON") + ] = False, +): + """Create a new tagging.""" + config = _check_auth() + url = "https://api.feedbin.com/v2/taggings.json" + data = {"feed_id": feed_id, "name": name} + + try: + with get_client() as client: + response = client.post( + url, + json=data, + auth=(config.auth.email, config.auth.password), + ) + + if response.status_code == 201: + typer.echo("✅ Tagging created successfully.", color=typer.colors.GREEN) + if json_output: + typer.echo(json.dumps(response.json(), indent=2)) + elif response.status_code == 302: + typer.echo("ℹ️ Tagging already exists.", color=typer.colors.YELLOW) + else: + typer.echo(f"Error creating tagging: {response.status_code}", err=True) + raise typer.Exit(1) + except httpx.RequestError as e: + typer.echo(f"❌ Network error: {e}", color=typer.colors.RED) + raise typer.Exit(1) + + +@taggings_app.command(name="delete") +def delete_tagging( + ctx: typer.Context, + tagging_id: Annotated[int, typer.Argument(help="Tagging ID")], +): + """Delete a tagging.""" + config = _check_auth() + + if not typer.confirm(f"Are you sure you want to delete tagging {tagging_id}?"): + raise typer.Abort() + + url = f"https://api.feedbin.com/v2/taggings/{tagging_id}.json" + + try: + with get_client() as client: + response = client.delete( + url, + auth=(config.auth.email, config.auth.password), + ) + + if response.status_code == 204: + typer.echo("✅ Tagging deleted successfully.", color=typer.colors.GREEN) + elif response.status_code == 403: + typer.echo("Forbidden. You may not own this tagging.", err=True) + raise typer.Exit(1) + else: + typer.echo(f"Error deleting tagging: {response.status_code}", err=True) + raise typer.Exit(1) + except httpx.RequestError as e: + typer.echo(f"❌ Network error: {e}", color=typer.colors.RED) + raise typer.Exit(1) diff --git a/src/feedscope/utils.py b/src/feedscope/utils.py new file mode 100644 index 0000000..40ccb11 --- /dev/null +++ b/src/feedscope/utils.py @@ -0,0 +1,62 @@ +"""Utility functions for Feedscope CLI.""" + +import typer +import httpx +import json +from loguru import logger +from .client import get_client +from .config import get_config + + +def fetch_and_display_entries( + ctx: typer.Context, url: str, params: dict, json_output: bool +): + """ + Fetches entries from URL with params and displays them. + Shared by entries, feed, updated commands. + """ + config = get_config() + + if not config.auth.email or not config.auth.password: + typer.echo( + "❌ Authentication credentials not found. Please run `feedscope auth login` first.", + color=typer.colors.RED, + ) + raise typer.Exit(1) + + logger.debug("Fetching entries from {} with params {}", url, params) + + try: + with get_client() as client: + response = client.get( + url, + params=params, + auth=(config.auth.email, config.auth.password), + ) + + if response.status_code != 200: + typer.echo(f"Error fetching entries: {response.status_code}", err=True) + if response.status_code == 403: + typer.echo("Forbidden. Check if you have access.", err=True) + elif response.status_code == 404: + typer.echo("Not found.", err=True) + raise typer.Exit(1) + + entries = response.json() + + if json_output: + typer.echo(json.dumps(entries, indent=2)) + else: + for entry in entries: + if isinstance(entry, int): + # It's a list of IDs (e.g. unread, starred, updated) + typer.echo(entry) + else: + title = entry.get("title") or "(No Title)" + entry_id = entry.get("id") + published = entry.get("published") + typer.echo(f"[{entry_id}] {published} - {title}") + + except httpx.RequestError as e: + typer.echo(f"❌ Network error: {e}", color=typer.colors.RED) + raise typer.Exit(1) diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000..ce7523c --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,41 @@ +import os +from pathlib import Path +import pytest +from platformdirs import user_config_dir + +# Ensure configuration writes are isolated to a test-specific directory +TEST_CONFIG_HOME = Path(__file__).parent / "_config_home" +TEST_CONFIG_HOME.mkdir(parents=True, exist_ok=True) +os.environ["XDG_CONFIG_HOME"] = str(TEST_CONFIG_HOME) + +CONFIG_FILE = Path(user_config_dir("dev.pirateninja.feedscope")) / "config.toml" + + +@pytest.fixture(autouse=True) +def clean_config_file() -> None: + """Ensure the config file is removed before and after each test.""" + if CONFIG_FILE.exists(): + CONFIG_FILE.unlink() + yield + if CONFIG_FILE.exists(): + CONFIG_FILE.unlink() + + +@pytest.fixture +def config_path(): + return CONFIG_FILE + + +@pytest.fixture +def auth_config(clean_config_file): + """Setup auth config.""" + # Write a dummy config + import tomlkit + + doc = tomlkit.document() + doc["auth"] = {"email": "test@example.com", "password": "password"} + + CONFIG_FILE.parent.mkdir(parents=True, exist_ok=True) + with CONFIG_FILE.open("w") as f: + f.write(tomlkit.dumps(doc)) + return doc diff --git a/tests/test_cli.py b/tests/test_cli.py index a0a3b12..edb9b9f 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -1,36 +1,12 @@ """Basic CLI operation tests for feedscope.""" + from pathlib import Path import json -import os -import pytest from platformdirs import user_config_dir from typer.testing import CliRunner - -# Ensure configuration writes are isolated to a test-specific directory -TEST_CONFIG_HOME = Path(__file__).parent / "_config_home" -TEST_CONFIG_HOME.mkdir(parents=True, exist_ok=True) -os.environ["XDG_CONFIG_HOME"] = str(TEST_CONFIG_HOME) - from feedscope import app - -CONFIG_FILE = Path(user_config_dir("dev.pirateninja.feedscope")) / "config.toml" - - -@pytest.fixture(autouse=True) -def clean_config_file() -> None: - """Ensure the config file is removed before and after each test.""" - - if CONFIG_FILE.exists(): - CONFIG_FILE.unlink() - - yield - - if CONFIG_FILE.exists(): - CONFIG_FILE.unlink() - - runner = CliRunner() @@ -101,7 +77,7 @@ def test_log_config_supports_toml(tmp_path: Path) -> None: "\n".join( [ "handlers = [", - f" {{ sink = \"{log_file}\", format = \"{{message}}\", level = \"DEBUG\" }}", + f' {{ sink = "{log_file}", format = "{{message}}", level = "DEBUG" }}', "]", "", ] @@ -149,7 +125,9 @@ def test_config_location_emits_logging(tmp_path: Path) -> None: ) ) - result = runner.invoke(app, ["--log-config", str(config_file), "config", "location"]) + result = runner.invoke( + app, ["--log-config", str(config_file), "config", "location"] + ) assert result.exit_code == 0 assert log_file.exists() diff --git a/tests/test_entries.py b/tests/test_entries.py new file mode 100644 index 0000000..e163749 --- /dev/null +++ b/tests/test_entries.py @@ -0,0 +1,99 @@ +import httpx +import respx +from typer.testing import CliRunner +from feedscope import app + +runner = CliRunner() + + +def test_entries_help(): + result = runner.invoke(app, ["entries", "--help"]) + assert result.exit_code == 0 + assert "Retrieve and manage entries" in result.stdout + + +@respx.mock +def test_entries_list(auth_config): + respx.get("https://api.feedbin.com/v2/entries.json").mock( + return_value=httpx.Response( + 200, + json=[ + { + "id": 1, + "title": "Test Entry", + "published": "2025-01-01T00:00:00.000000Z", + } + ], + ) + ) + + result = runner.invoke(app, ["entries", "list"]) + assert result.exit_code == 0 + assert "[1] 2025-01-01T00:00:00.000000Z - Test Entry" in result.stdout + + +@respx.mock +def test_entries_list_filters(auth_config): + mock = respx.get("https://api.feedbin.com/v2/entries.json").mock( + return_value=httpx.Response(200, json=[]) + ) + + result = runner.invoke( + app, + [ + "entries", + "list", + "--no-read", + "--starred", + "--since", + "2025-01-01T00:00:00", + "--include-enclosure", + ], + ) + assert result.exit_code == 0 + + request = mock.calls[0].request + assert request.url.params["read"] == "false" + assert request.url.params["starred"] == "true" + assert "since" in request.url.params + assert request.url.params["include_enclosure"] == "true" + + +@respx.mock +def test_entries_show(auth_config): + respx.get("https://api.feedbin.com/v2/entries/1.json").mock( + return_value=httpx.Response( + 200, + json={ + "id": 1, + "title": "Test Entry", + "published": "2025-01-01T00:00:00.000000Z", + "url": "http://example.com", + }, + ) + ) + + result = runner.invoke(app, ["entries", "show", "1"]) + assert result.exit_code == 0 + assert "Test Entry" in result.stdout + assert "http://example.com" in result.stdout + + +@respx.mock +def test_entries_feed(auth_config): + respx.get("https://api.feedbin.com/v2/feeds/123/entries.json").mock( + return_value=httpx.Response( + 200, + json=[ + { + "id": 1, + "title": "Feed Entry", + "published": "2025-01-01T00:00:00.000000Z", + } + ], + ) + ) + + result = runner.invoke(app, ["entries", "feed", "123"]) + assert result.exit_code == 0 + assert "Feed Entry" in result.stdout diff --git a/tests/test_entry_state.py b/tests/test_entry_state.py new file mode 100644 index 0000000..5c3f33b --- /dev/null +++ b/tests/test_entry_state.py @@ -0,0 +1,124 @@ +import httpx +import respx +import json +from typer.testing import CliRunner +from feedscope import app + +runner = CliRunner() + + +def test_unread_help(): + result = runner.invoke(app, ["unread", "--help"]) + assert result.exit_code == 0 + assert "Manage unread entries" in result.stdout + + +@respx.mock +def test_unread_list(auth_config): + respx.get("https://api.feedbin.com/v2/unread_entries.json").mock( + return_value=httpx.Response(200, json=[1, 2, 3]) + ) + result = runner.invoke(app, ["unread", "list"]) + assert result.exit_code == 0 + assert "1" in result.stdout + assert "3" in result.stdout + + +@respx.mock +def test_unread_mark_read(auth_config): + mock = respx.delete("https://api.feedbin.com/v2/unread_entries.json").mock( + return_value=httpx.Response(200, json=[1, 2]) + ) + result = runner.invoke(app, ["unread", "mark-read", "1", "2"]) + assert result.exit_code == 0 + assert "Successfully processed 2 entries" in result.stdout + + assert json.loads(mock.calls[0].request.content) == {"unread_entries": [1, 2]} + + +@respx.mock +def test_unread_mark_unread(auth_config): + mock = respx.post("https://api.feedbin.com/v2/unread_entries.json").mock( + return_value=httpx.Response(200, json=[1]) + ) + result = runner.invoke(app, ["unread", "mark-unread", "1"]) + assert result.exit_code == 0 + assert json.loads(mock.calls[0].request.content) == {"unread_entries": [1]} + + +@respx.mock +def test_starred_actions(auth_config): + # list + respx.get("https://api.feedbin.com/v2/starred_entries.json").mock( + return_value=httpx.Response(200, json=[10]) + ) + result = runner.invoke(app, ["starred", "list"]) + assert result.exit_code == 0 + assert "10" in result.stdout + + # star + mock_post = respx.post("https://api.feedbin.com/v2/starred_entries.json").mock( + return_value=httpx.Response(200, json=[10]) + ) + result = runner.invoke(app, ["starred", "star", "10"]) + assert result.exit_code == 0 + assert json.loads(mock_post.calls[0].request.content) == {"starred_entries": [10]} + + # unstar + mock_del = respx.delete("https://api.feedbin.com/v2/starred_entries.json").mock( + return_value=httpx.Response(200, json=[10]) + ) + result = runner.invoke(app, ["starred", "unstar", "10"]) + assert result.exit_code == 0 + assert json.loads(mock_del.calls[0].request.content) == {"starred_entries": [10]} + + +@respx.mock +def test_updated_list_diff(auth_config): + # Mock updated IDs fetch + respx.get("https://api.feedbin.com/v2/updated_entries.json").mock( + return_value=httpx.Response(200, json=[100, 101]) + ) + + # Mock entries details fetch + mock_entries = respx.get("https://api.feedbin.com/v2/entries.json").mock( + return_value=httpx.Response( + 200, + json=[ + { + "id": 100, + "title": "Updated One", + "published": "...", + "content_diff": "
diff
", + } + ], + ) + ) + + result = runner.invoke(app, ["updated", "list", "--include-diff"]) + assert result.exit_code == 0 + assert "Updated One" in result.stdout + + req = mock_entries.calls[0].request + assert req.url.params["ids"] == "100,101" + assert req.url.params["include_content_diff"] == "true" + assert req.url.params["include_original"] == "true" + + +@respx.mock +def test_recently_read(auth_config): + respx.get("https://api.feedbin.com/v2/recently_read_entries.json").mock( + return_value=httpx.Response(200, json=[5]) + ) + result = runner.invoke(app, ["recently-read", "list"]) + assert result.exit_code == 0 + assert "5" in result.stdout + + mock_post = respx.post( + "https://api.feedbin.com/v2/recently_read_entries.json" + ).mock(return_value=httpx.Response(200, json=[6])) + result = runner.invoke(app, ["recently-read", "create", "6"]) + assert result.exit_code == 0 + assert json.loads(mock_post.calls[0].request.content) == { + "recently_read_entries": [6] + } diff --git a/tests/test_searches.py b/tests/test_searches.py new file mode 100644 index 0000000..d00448a --- /dev/null +++ b/tests/test_searches.py @@ -0,0 +1,72 @@ +import httpx +import respx +from typer.testing import CliRunner +from feedscope import app + +runner = CliRunner() + + +def test_searches_help(): + result = runner.invoke(app, ["saved-search", "--help"]) + assert result.exit_code == 0 + assert "Manage saved searches" in result.stdout + + +@respx.mock +def test_searches_list(auth_config): + respx.get("https://api.feedbin.com/v2/saved_searches.json").mock( + return_value=httpx.Response( + 200, json=[{"id": 1, "name": "Test Search", "query": "test"}] + ) + ) + result = runner.invoke(app, ["saved-search", "list"]) + assert result.exit_code == 0 + assert "Test Search" in result.stdout + + +@respx.mock +def test_searches_get(auth_config): + # IDs only + respx.get("https://api.feedbin.com/v2/saved_searches/1.json").mock( + return_value=httpx.Response(200, json=[10, 11]) + ) + result = runner.invoke(app, ["saved-search", "get", "1"]) + assert result.exit_code == 0 + assert "10" in result.stdout + + # Entries + mock_entries = respx.get("https://api.feedbin.com/v2/saved_searches/1.json").mock( + return_value=httpx.Response( + 200, json=[{"id": 10, "title": "Entry 10", "published": "..."}] + ) + ) + result = runner.invoke(app, ["saved-search", "get", "1", "--include-entries"]) + assert result.exit_code == 0 + assert "Entry 10" in result.stdout + assert mock_entries.calls[1].request.url.params["include_entries"] == "true" + + +@respx.mock +def test_searches_crud(auth_config): + # Create + respx.post("https://api.feedbin.com/v2/saved_searches.json").mock( + return_value=httpx.Response(201, json={"id": 2, "name": "New", "query": "new"}) + ) + result = runner.invoke( + app, ["saved-search", "create", "--name", "New", "--query", "new"] + ) + assert result.exit_code == 0 + + # Update + respx.patch("https://api.feedbin.com/v2/saved_searches/2.json").mock( + return_value=httpx.Response(200, json={"id": 2, "name": "Updated"}) + ) + result = runner.invoke(app, ["saved-search", "update", "2", "--name", "Updated"]) + assert result.exit_code == 0 + + # Delete + respx.delete("https://api.feedbin.com/v2/saved_searches/2.json").mock( + return_value=httpx.Response(204) + ) + result = runner.invoke(app, ["saved-search", "delete", "2"], input="y\n") + assert result.exit_code == 0 diff --git a/tests/test_supporting.py b/tests/test_supporting.py new file mode 100644 index 0000000..a96ac2c --- /dev/null +++ b/tests/test_supporting.py @@ -0,0 +1,89 @@ +import httpx +import respx +from typer.testing import CliRunner +from feedscope import app + +runner = CliRunner() + + +def test_imports_help(): + result = runner.invoke(app, ["imports", "--help"]) + assert result.exit_code == 0 + assert "Manage imports" in result.stdout + + +def test_extract_help(): + result = runner.invoke(app, ["extract", "--help"]) + assert result.exit_code == 0 + assert "Extract content from a URL" in result.stdout + + +@respx.mock +def test_imports_create(auth_config, tmp_path): + opml_file = tmp_path / "subscriptions.xml" + opml_file.write_text("...") + + mock_post = respx.post("https://api.feedbin.com/v2/imports.json").mock( + return_value=httpx.Response(201, json={"id": 1}) + ) + + result = runner.invoke(app, ["imports", "create", str(opml_file)]) + assert result.exit_code == 0 + assert "Import created" in result.stdout + assert mock_post.calls[0].request.headers["Content-Type"] == "text/xml" + + +@respx.mock +def test_imports_list(auth_config): + respx.get("https://api.feedbin.com/v2/imports.json").mock( + return_value=httpx.Response( + 200, json=[{"id": 1, "complete": True, "created_at": "..."}] + ) + ) + result = runner.invoke(app, ["imports", "list"]) + assert result.exit_code == 0 + assert "ID: 1" in result.stdout + + +@respx.mock +def test_pages_save(auth_config): + respx.post("https://api.feedbin.com/v2/pages.json").mock( + return_value=httpx.Response(200, json={"id": 100}) + ) + result = runner.invoke( + app, ["pages", "save", "--url", "http://example.com", "--title", "Example"] + ) + assert result.exit_code == 0 + assert "Created Entry ID: 100" in result.stdout + + +@respx.mock +def test_icons_list(auth_config): + respx.get("https://api.feedbin.com/v2/icons.json").mock( + return_value=httpx.Response( + 200, json=[{"host": "example.com", "url": "http://example.com/icon.png"}] + ) + ) + result = runner.invoke(app, ["icons", "list"]) + assert result.exit_code == 0 + assert "example.com: http://example.com/icon.png" in result.stdout + + +@respx.mock +def test_extract(auth_config, config_path): + # Setup extract config manually since auth_config only sets auth + import tomlkit + + doc = tomlkit.parse(config_path.read_text()) + doc["extract"] = {"username": "user", "secret": "secret"} + config_path.write_text(tomlkit.dumps(doc)) + + respx.get( + url__regex=r"https://extract\.feedbin\.com/parser/user/.*" + ).mock( + return_value=httpx.Response(200, json={"title": "Extracted", "word_count": 100, "excerpt": "..."}) + ) + + result = runner.invoke(app, ["extract", "http://example.com"]) + assert result.exit_code == 0 + assert "Title: Extracted" in result.stdout \ No newline at end of file diff --git a/tests/test_tags.py b/tests/test_tags.py new file mode 100644 index 0000000..d948150 --- /dev/null +++ b/tests/test_tags.py @@ -0,0 +1,59 @@ +import httpx +import respx +from typer.testing import CliRunner +from feedscope import app + +runner = CliRunner() + + +def test_tags_help(): + result = runner.invoke(app, ["tags", "--help"]) + assert result.exit_code == 0 + assert "Manage tags" in result.stdout + + +@respx.mock +def test_tags_rename(auth_config): + respx.post("https://api.feedbin.com/v2/tags.json").mock( + return_value=httpx.Response(200, json=[]) + ) + result = runner.invoke( + app, ["tags", "rename", "--old-name", "Old", "--new-name", "New"] + ) + assert result.exit_code == 0 + + +@respx.mock +def test_tags_delete(auth_config): + respx.request("DELETE", "https://api.feedbin.com/v2/tags.json").mock( + return_value=httpx.Response(200, json=[]) + ) + result = runner.invoke(app, ["tags", "delete", "--name", "Tag"], input="y\n") + assert result.exit_code == 0 + + +@respx.mock +def test_taggings_crud(auth_config): + # List + respx.get("https://api.feedbin.com/v2/taggings.json").mock( + return_value=httpx.Response(200, json=[{"id": 1, "feed_id": 10, "name": "Tag"}]) + ) + result = runner.invoke(app, ["taggings", "list"]) + assert result.exit_code == 0 + assert "Tag" in result.stdout + + # Create + respx.post("https://api.feedbin.com/v2/taggings.json").mock( + return_value=httpx.Response(201, json={"id": 2}) + ) + result = runner.invoke( + app, ["taggings", "create", "--feed-id", "10", "--name", "NewTag"] + ) + assert result.exit_code == 0 + + # Delete + respx.delete("https://api.feedbin.com/v2/taggings/2.json").mock( + return_value=httpx.Response(204) + ) + result = runner.invoke(app, ["taggings", "delete", "2"], input="y\n") + assert result.exit_code == 0 diff --git a/uv.lock b/uv.lock index 054da55..40189f8 100644 --- a/uv.lock +++ b/uv.lock @@ -1,5 +1,5 @@ version = 1 -revision = 2 +revision = 3 requires-python = ">=3.11" [[package]] @@ -79,6 +79,7 @@ dependencies = [ { name = "platformdirs" }, { name = "pydantic-settings" }, { name = "rich" }, + { name = "stamina" }, { name = "tomlkit" }, { name = "typer" }, ] @@ -87,6 +88,7 @@ dependencies = [ dev = [ { name = "poethepoet" }, { name = "pytest" }, + { name = "respx" }, { name = "ruff" }, { name = "ty" }, ] @@ -100,6 +102,7 @@ requires-dist = [ { name = "platformdirs", specifier = ">=4.0.0" }, { name = "pydantic-settings", specifier = ">=2.0.0" }, { name = "rich", specifier = ">=14.1.0" }, + { name = "stamina", specifier = ">=25.2.0" }, { name = "tomlkit", specifier = ">=0.12.0" }, { name = "typer", specifier = ">=0.16.1" }, ] @@ -108,6 +111,7 @@ requires-dist = [ dev = [ { name = "poethepoet", specifier = ">=0.32.2" }, { name = "pytest", specifier = ">=8.3.3" }, + { name = "respx", specifier = ">=0.22.0" }, { name = "ruff", specifier = ">=0.7.3" }, { name = "ty", specifier = "==0.0.1a27" }, ] @@ -567,6 +571,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/f1/12/de94a39c2ef588c7e6455cfbe7343d3b2dc9d6b6b2f40c4c6565744c873d/pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b", size = 149341, upload-time = "2025-09-25T21:32:56.828Z" }, ] +[[package]] +name = "respx" +version = "0.22.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "httpx" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f4/7c/96bd0bc759cf009675ad1ee1f96535edcb11e9666b985717eb8c87192a95/respx-0.22.0.tar.gz", hash = "sha256:3c8924caa2a50bd71aefc07aa812f2466ff489f1848c96e954a5362d17095d91", size = 28439, upload-time = "2024-12-19T22:33:59.374Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8e/67/afbb0978d5399bc9ea200f1d4489a23c9a1dad4eee6376242b8182389c79/respx-0.22.0-py2.py3-none-any.whl", hash = "sha256:631128d4c9aba15e56903fb5f66fb1eff412ce28dd387ca3a81339e52dbd3ad0", size = 25127, upload-time = "2024-12-19T22:33:57.837Z" }, +] + [[package]] name = "rich" version = "14.1.0" @@ -624,6 +640,27 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" }, ] +[[package]] +name = "stamina" +version = "25.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "tenacity" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/58/b7/8064b246b3d684720080ee8ffbf1dde5caabe852eb9cb53655eb97992af2/stamina-25.2.0.tar.gz", hash = "sha256:fdff938789e8a0c4c496e1ee8a08ee3c7c3351239f235b53e60d4f5964d07e19", size = 565737, upload-time = "2025-12-11T09:16:59.195Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8f/81/c525760353dff91ae2e4c42c3f3d9bf0bfeecbb6165cc393e86915f1717d/stamina-25.2.0-py3-none-any.whl", hash = "sha256:7f0de7dba735464c256a31e6372c01b8bb51fb6efd649e6773f4ce804462feea", size = 18791, upload-time = "2025-12-11T09:16:57.235Z" }, +] + +[[package]] +name = "tenacity" +version = "9.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0a/d4/2b0cd0fe285e14b36db076e78c93766ff1d529d70408bd1d2a5a84f1d929/tenacity-9.1.2.tar.gz", hash = "sha256:1169d376c297e7de388d18b4481760d478b0e99a777cad3a9c86e556f4b697cb", size = 48036, upload-time = "2025-04-02T08:25:09.966Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e5/30/643397144bfbfec6f6ef821f36f33e57d35946c44a2352d3c9f0ae847619/tenacity-9.1.2-py3-none-any.whl", hash = "sha256:f77bf36710d8b73a50b2dd155c97b870017ad21afe6ab300326b0371b3b05138", size = 28248, upload-time = "2025-04-02T08:25:07.678Z" }, +] + [[package]] name = "tomlkit" version = "0.13.3"