From b5d9cdfdcd74fb69e3892ea3a7a47fe017fab938 Mon Sep 17 00:00:00 2001 From: Vlada Dusek Date: Thu, 14 May 2026 21:43:25 +0200 Subject: [PATCH] feat: accept camelCase keys in input TypedDicts Synthesize a `CamelDict` sibling for every input TypedDict so users can pass API-shaped dicts and still satisfy the type checker. Closes #756. --- .rules.md | 2 + scripts/postprocess_generated_models.py | 203 +++++++++++++++++- .../_resource_clients/request_queue.py | 25 ++- src/apify_client/_typeddicts.py | 183 ++++++++++++++++ src/apify_client/_types.py | 23 +- .../unit/test_postprocess_generated_models.py | 152 +++++++++++++ 6 files changed, 569 insertions(+), 19 deletions(-) diff --git a/.rules.md b/.rules.md index 3d24ea05..a1557d6b 100644 --- a/.rules.md +++ b/.rules.md @@ -75,6 +75,8 @@ Docstrings are written on sync clients and **automatically copied** to async cli `src/apify_client/_models.py` and `src/apify_client/_typeddicts.py` are **auto-generated** — do not edit them manually. Every Pydantic model and TypedDict comes from the OpenAPI spec. +Each input-side TypedDict ships in two casings: snake_case (`RequestDict`) and camelCase (`RequestCamelDict`). The camel variants are synthesized in `scripts/postprocess_generated_models.py` from the Pydantic `Field(alias=...)` map; resource-client signatures union both with the Pydantic model so users can pass either casing without losing type-checker support. + - Generated by `datamodel-code-generator` from the OpenAPI spec at `https://docs.apify.com/api/openapi.json` (config in `pyproject.toml` under `[tool.datamodel-codegen]`, aliases in `datamodel_codegen_aliases.json`) - After generation, `scripts/postprocess_generated_models.py` is run to apply additional fixes - To regenerate locally: diff --git a/scripts/postprocess_generated_models.py b/scripts/postprocess_generated_models.py index f9146a5e..d0f5b483 100644 --- a/scripts/postprocess_generated_models.py +++ b/scripts/postprocess_generated_models.py @@ -15,6 +15,10 @@ dependencies). The file is generated in full by datamodel-codegen; the trimming happens here. - Rename every kept class to add a `Dict` suffix so it doesn't clash with the Pydantic model name (e.g. `WebhookCreate` -> `WebhookCreateDict`) and rewire references. +- Generate a camelCase sibling for every kept TypedDict (`FooDict` -> `FooCamelDict`) so users + can pass API-shaped dicts and still satisfy the type checker. Field identifiers are looked up + in the Pydantic alias map extracted from `_models.py`; nested TypedDict refs are rewired to + the camel variant. - Add `@docs_group('Typed dicts')` to every kept class. """ @@ -391,6 +395,195 @@ def rename_with_dict_suffix(content: str, names: set[str]) -> str: return content +def _extract_alias_from_field_call(field_call: ast.Call) -> str | None: + """Return the `alias=` kwarg value from a `Field(...)` call, or None if not present.""" + for kw in field_call.keywords: + if kw.arg == 'alias' and isinstance(kw.value, ast.Constant) and isinstance(kw.value.value, str): + return kw.value.value + return None + + +def _extract_class_field_aliases(class_node: ast.ClassDef) -> dict[str, str]: + """Return `{snake_field: api_field}` for every annotated field declared on `class_node`. + + Fields without a `Field(alias=...)` map to themselves (their declared Python name matches the + API name — typical for single-word fields like `url`, `id`). + """ + aliases: dict[str, str] = {} + for stmt in class_node.body: + if not isinstance(stmt, ast.AnnAssign) or not isinstance(stmt.target, ast.Name): + continue + field_name = stmt.target.id + if field_name == 'model_config': + continue + # Default: no alias means snake name == API name. + api_name = field_name + # Walk the annotation to find a nested `Field(alias='...')` call inside `Annotated[...]`. + for sub in ast.walk(stmt.annotation): + if isinstance(sub, ast.Call) and isinstance(sub.func, ast.Name) and sub.func.id == 'Field': + found = _extract_alias_from_field_call(sub) + if found is not None: + api_name = found + break + aliases[field_name] = api_name + return aliases + + +def build_alias_map(models_source: str) -> dict[str, dict[str, str]]: + """Return `{ModelName: {snake_field: api_field}}` for every Pydantic model in `models_source`. + + The map is the source of truth for camelCase field names: it captures both `Field(alias=...)` + overrides and the bare-name case (single-word fields without an alias). Used when synthesizing + camelCase TypedDict variants so the API spelling round-trips losslessly. + """ + tree = ast.parse(models_source) + return {node.name: _extract_class_field_aliases(node) for node in tree.body if isinstance(node, ast.ClassDef)} + + +def _camel_dict_name(snake_name: str) -> str: + """Insert `Camel` before the trailing `Dict` (e.g. `RequestDict` -> `RequestCamelDict`).""" + if not snake_name.endswith('Dict'): + raise ValueError(f"Expected name to end with 'Dict': {snake_name!r}") + return snake_name[: -len('Dict')] + 'CamelDict' + + +def _is_dict_str_any(node: ast.expr) -> bool: + """Return True if `node` is a `dict[str, Any]` subscript (casing-agnostic open mapping).""" + return isinstance(node, ast.Subscript) and isinstance(node.value, ast.Name) and node.value.id == 'dict' + + +def _rename_fields_in_class_block(block: list[str], field_aliases: dict[str, str]) -> list[str]: + """Rewrite each field declaration line in `block` using `field_aliases`. + + Matches lines of the form `:` and substitutes the identifier when an + alias is present. Multi-line annotations and trailing default values are preserved verbatim + because only the field name on the first line is replaced. + """ + field_decl = re.compile(r'^(\s+)([a-z_][a-z0-9_]*)(\s*:)') + out: list[str] = [] + for line in block: + m = field_decl.match(line) + if m is None: + out.append(line) + continue + indent, name, colon = m.group(1), m.group(2), m.group(3) + api_name = field_aliases.get(name) + if api_name is None or api_name == name: + out.append(line) + continue + out.append(f'{indent}{api_name}{colon}{line[m.end() :]}') + return out + + +def _rename_typeddict_refs_in_block(block: list[str], rename_set: set[str]) -> list[str]: + """Rewrite every whole-word occurrence of each name in `rename_set` to its camel form. + + Operates on the block as a single string so refs spanning multiple lines (e.g. annotations + wrapped across lines) are caught. + """ + if not rename_set: + return block + text = '\n'.join(block) + # `\b` anchors already prevent partial-prefix matches; we just iterate the set in any stable + # order. Sorting keeps the substitution deterministic across Python hash seeds. + for snake in sorted(rename_set): + text = re.sub(rf'\b{re.escape(snake)}\b', _camel_dict_name(snake), text) + return text.split('\n') + + +def add_camel_case_typeddicts(content: str, alias_map: dict[str, dict[str, str]]) -> str: + """Insert a camelCase sibling for every TypedDict and TypeAlias in `content`. + + For each class `Dict(TypedDict)` and each `Dict: TypeAlias = ...`, emit a sibling + `CamelDict` directly after the original. Field identifiers are renamed using + `alias_map[]`; nested TypedDict references in annotations are rewired to their camel + variant via whole-word substitution. + + `TaskInputDict: TypeAlias = dict[str, Any]` and similar casing-agnostic aliases get a trivial + camel alias too, so refs from other camel TypedDicts (e.g. `RequestBaseCamelDict.user_data: + NotRequired[RequestUserDataCamelDict]`) resolve cleanly. + + Idempotent: blocks whose name already ends with `CamelDict` are skipped. + """ + tree = ast.parse(content) + lines = content.split('\n') + + # Pass 1: gather every snake-side symbol that needs a camel sibling. + snake_classes: list[tuple[ast.ClassDef, int, int]] = [] # node, block_start, block_end (exclusive) + snake_aliases: list[tuple[int, int]] = [] # block_start, block_end + flat_aliases: list[tuple[int, str]] = [] # block_end, alias_name + + body_with_trailing_docstrings = _extract_top_level_symbols(tree) + end_by_name: dict[str, int] = {name: end for name, _, end in body_with_trailing_docstrings} + existing_symbols: set[str] = {name for name, _, _ in body_with_trailing_docstrings} + + for node in tree.body: + if isinstance(node, ast.ClassDef): + # Every class kept in `_typeddicts.py` is a TypedDict — either directly (base is + # `TypedDict`) or by inheriting from a sibling TypedDict (e.g. `RequestDict(RequestBaseDict)`). + # The `Dict` suffix is the load-bearing filter; the base check is informational only. + if not node.name.endswith('Dict') or node.name.endswith('CamelDict'): + continue + if _camel_dict_name(node.name) in existing_symbols: + continue + start = node.lineno - 1 + if start > 0 and lines[start - 1].lstrip().startswith('@'): + start -= 1 + end = end_by_name.get(node.name, node.end_lineno or node.lineno) + snake_classes.append((node, start, end)) + elif ( + isinstance(node, ast.AnnAssign) + and isinstance(node.target, ast.Name) + and isinstance(node.annotation, ast.Name) + and node.annotation.id == 'TypeAlias' + ): + name = node.target.id + if not name.endswith('Dict') or name.endswith('CamelDict'): + continue + if _camel_dict_name(name) in existing_symbols: + continue + if node.value is None: + continue + start = node.lineno - 1 + end = end_by_name.get(name, node.end_lineno or node.lineno) + if _is_dict_str_any(node.value): + flat_aliases.append((end, name)) + else: + snake_aliases.append((start, end)) + + # The rename set covers EVERY snake-side `*Dict` symbol in the file (not just the ones we + # need to clone) so nested refs inside a cloned block still rewire correctly even on re-runs + # where most camel siblings already exist. + rename_set: set[str] = { + name for name in existing_symbols if name.endswith('Dict') and not name.endswith('CamelDict') + } + + # Pass 2: build camel blocks. + insertions: list[tuple[int, list[str]]] = [] + + for class_node, start, end in snake_classes: + block = lines[start:end] + renamed_refs = _rename_typeddict_refs_in_block(block, rename_set) + field_aliases = alias_map.get(class_node.name[: -len('Dict')], {}) + camel_block = _rename_fields_in_class_block(renamed_refs, field_aliases) + insertions.append((end, ['', *camel_block])) + + for start, end in snake_aliases: + block = lines[start:end] + camel_block = _rename_typeddict_refs_in_block(block, rename_set) + insertions.append((end, ['', *camel_block])) + + for end, name in flat_aliases: + insertions.append((end, ['', f'{_camel_dict_name(name)}: TypeAlias = dict[str, Any]'])) + + # Insert in reverse line order so earlier indices stay valid. + new_lines = lines[:] + for after, block in sorted(insertions, key=lambda i: i[0], reverse=True): + new_lines[after:after] = block + + return _collapse_blank_lines('\n'.join(new_lines)) + + def postprocess_models(models_path: Path, literals_path: Path) -> list[Path]: """Apply `_models.py`-specific fixes and emit `_literals.py`. @@ -414,13 +607,14 @@ def postprocess_models(models_path: Path, literals_path: Path) -> list[Path]: return changed -def postprocess_typeddicts(path: Path) -> bool: +def postprocess_typeddicts(path: Path, alias_map: dict[str, dict[str, str]]) -> bool: """Apply `_typeddicts.py`-specific fixes. Returns True if the file changed.""" original = path.read_text() pruned, kept = prune_typeddicts(original, RESOURCE_INPUT_TYPEDDICTS) renamed = rename_with_dict_suffix(pruned, kept) flattened = flatten_empty_typeddicts(renamed) - final = add_docs_group_decorators(flattened, 'Typed dicts') + camelized = add_camel_case_typeddicts(flattened, alias_map) + final = add_docs_group_decorators(camelized, 'Typed dicts') if final == original: return False path.write_text(final) @@ -442,9 +636,10 @@ def main() -> None: else: print('No fixes needed for _models.py / _literals.py') - if postprocess_typeddicts(TYPEDDICTS_PATH): + alias_map = build_alias_map(MODELS_PATH.read_text()) + if postprocess_typeddicts(TYPEDDICTS_PATH, alias_map): changed.append(TYPEDDICTS_PATH) - print(f'Pruned and renamed TypedDicts in {TYPEDDICTS_PATH}') + print(f'Pruned, renamed, and camelized TypedDicts in {TYPEDDICTS_PATH}') else: print('No fixes needed for _typeddicts.py') diff --git a/src/apify_client/_resource_clients/request_queue.py b/src/apify_client/_resource_clients/request_queue.py index 8bf31fb6..6b81e3ac 100644 --- a/src/apify_client/_resource_clients/request_queue.py +++ b/src/apify_client/_resource_clients/request_queue.py @@ -45,7 +45,14 @@ from datetime import timedelta from apify_client._literals import GeneralAccess - from apify_client._typeddicts import RequestDict, RequestDraftDeleteDict, RequestDraftDict + from apify_client._typeddicts import ( + RequestCamelDict, + RequestDict, + RequestDraftCamelDict, + RequestDraftDeleteCamelDict, + RequestDraftDeleteDict, + RequestDraftDict, + ) from apify_client._types import Timeout _RQ_MAX_REQUESTS_PER_BATCH = 25 @@ -190,7 +197,7 @@ def list_and_lock_head( def add_request( self, - request: RequestDraftDict | RequestDraft, + request: RequestDraftDict | RequestDraftCamelDict | RequestDraft, *, forefront: bool | None = None, timeout: Timeout = 'short', @@ -252,7 +259,7 @@ def get_request(self, request_id: str, *, timeout: Timeout = 'short') -> Request def update_request( self, - request: RequestDict | Request, + request: RequestDict | RequestCamelDict | Request, *, forefront: bool | None = None, timeout: Timeout = 'medium', @@ -366,7 +373,7 @@ def delete_request_lock( def batch_add_requests( self, - requests: list[RequestDraft] | list[RequestDraftDict], + requests: list[RequestDraft] | list[RequestDraftDict] | list[RequestDraftCamelDict], *, forefront: bool = False, max_parallel: int = 1, @@ -464,7 +471,7 @@ def batch_add_requests( def batch_delete_requests( self, - requests: list[RequestDraftDelete] | list[RequestDraftDeleteDict], + requests: list[RequestDraftDelete] | list[RequestDraftDeleteDict] | list[RequestDraftDeleteCamelDict], *, timeout: Timeout = 'short', ) -> BatchDeleteResult: @@ -747,7 +754,7 @@ async def list_and_lock_head( async def add_request( self, - request: RequestDraftDict | RequestDraft, + request: RequestDraftDict | RequestDraftCamelDict | RequestDraft, *, forefront: bool | None = None, timeout: Timeout = 'short', @@ -807,7 +814,7 @@ async def get_request(self, request_id: str, *, timeout: Timeout = 'short') -> R async def update_request( self, - request: RequestDict | Request, + request: RequestDict | RequestCamelDict | Request, *, forefront: bool | None = None, timeout: Timeout = 'medium', @@ -968,7 +975,7 @@ async def _batch_add_requests_worker( async def batch_add_requests( self, - requests: list[RequestDraft] | list[RequestDraftDict], + requests: list[RequestDraft] | list[RequestDraftDict] | list[RequestDraftCamelDict], *, forefront: bool = False, max_parallel: int = 5, @@ -1077,7 +1084,7 @@ async def batch_add_requests( async def batch_delete_requests( self, - requests: list[RequestDraftDelete] | list[RequestDraftDeleteDict], + requests: list[RequestDraftDelete] | list[RequestDraftDeleteDict] | list[RequestDraftDeleteCamelDict], *, timeout: Timeout = 'short', ) -> BatchDeleteResult: diff --git a/src/apify_client/_typeddicts.py b/src/apify_client/_typeddicts.py index 1f4c7e94..fa430ab7 100644 --- a/src/apify_client/_typeddicts.py +++ b/src/apify_client/_typeddicts.py @@ -49,6 +49,48 @@ class RequestBaseDict(TypedDict): """ +@docs_group('Typed dicts') +class RequestBaseCamelDict(TypedDict): + uniqueKey: NotRequired[str] + """ + A unique key used for request de-duplication. Requests with the same unique key are considered identical. + """ + url: NotRequired[str] + """ + The URL of the request. + """ + method: NotRequired[Literal['GET', 'HEAD', 'POST', 'PUT', 'DELETE', 'CONNECT', 'OPTIONS', 'TRACE', 'PATCH']] + retryCount: NotRequired[int] + """ + The number of times this request has been retried. + """ + loadedUrl: NotRequired[str | None] + """ + The final URL that was loaded, after redirects (if any). + """ + payload: NotRequired[str | dict[str, Any] | None] + """ + The request payload, typically used with POST or PUT requests. + """ + headers: NotRequired[dict[str, Any] | None] + """ + HTTP headers sent with the request. + """ + userData: NotRequired[RequestUserDataCamelDict] + noRetry: NotRequired[bool | None] + """ + Indicates whether the request should not be retried if processing fails. + """ + errorMessages: NotRequired[list[str]] + """ + Error messages recorded from failed processing attempts. + """ + handledAt: NotRequired[str | None] + """ + The timestamp when the request was marked as handled, if applicable. + """ + + @docs_group('Typed dicts') class RequestDict(RequestBaseDict): """A request stored in the request queue, including its metadata and processing state.""" @@ -59,6 +101,16 @@ class RequestDict(RequestBaseDict): """ +@docs_group('Typed dicts') +class RequestCamelDict(RequestBaseCamelDict): + """A request stored in the request queue, including its metadata and processing state.""" + + id: NotRequired[str] + """ + A unique identifier assigned to the request. + """ + + @docs_group('Typed dicts') class RequestDraftDict(TypedDict): """A request that failed to be processed during a request queue operation and can be retried.""" @@ -78,6 +130,25 @@ class RequestDraftDict(TypedDict): method: NotRequired[Literal['GET', 'HEAD', 'POST', 'PUT', 'DELETE', 'CONNECT', 'OPTIONS', 'TRACE', 'PATCH']] +@docs_group('Typed dicts') +class RequestDraftCamelDict(TypedDict): + """A request that failed to be processed during a request queue operation and can be retried.""" + + id: NotRequired[str] + """ + A unique identifier assigned to the request. + """ + uniqueKey: str + """ + A unique key used for request de-duplication. Requests with the same unique key are considered identical. + """ + url: str + """ + The URL of the request. + """ + method: NotRequired[Literal['GET', 'HEAD', 'POST', 'PUT', 'DELETE', 'CONNECT', 'OPTIONS', 'TRACE', 'PATCH']] + + @docs_group('Typed dicts') class RequestDraftDeleteByIdDict(TypedDict): """A request that should be deleted, identified by its ID.""" @@ -92,6 +163,20 @@ class RequestDraftDeleteByIdDict(TypedDict): """ +@docs_group('Typed dicts') +class RequestDraftDeleteByIdCamelDict(TypedDict): + """A request that should be deleted, identified by its ID.""" + + id: str + """ + A unique identifier assigned to the request. + """ + uniqueKey: NotRequired[str] + """ + A unique key used for request de-duplication. Requests with the same unique key are considered identical. + """ + + @docs_group('Typed dicts') class RequestDraftDeleteByUniqueKeyDict(TypedDict): """A request that should be deleted, identified by its unique key.""" @@ -106,17 +191,40 @@ class RequestDraftDeleteByUniqueKeyDict(TypedDict): """ +@docs_group('Typed dicts') +class RequestDraftDeleteByUniqueKeyCamelDict(TypedDict): + """A request that should be deleted, identified by its unique key.""" + + id: NotRequired[str] + """ + A unique identifier assigned to the request. + """ + uniqueKey: str + """ + A unique key used for request de-duplication. Requests with the same unique key are considered identical. + """ + + RequestDraftDeleteDict: TypeAlias = RequestDraftDeleteByIdDict | RequestDraftDeleteByUniqueKeyDict """ A request that should be deleted. """ +RequestDraftDeleteCamelDict: TypeAlias = RequestDraftDeleteByIdCamelDict | RequestDraftDeleteByUniqueKeyCamelDict +""" +A request that should be deleted. +""" + RequestUserDataDict: TypeAlias = dict[str, Any] +RequestUserDataCamelDict: TypeAlias = dict[str, Any] + TaskInputDict: TypeAlias = dict[str, Any] +TaskInputCamelDict: TypeAlias = dict[str, Any] + @docs_group('Typed dicts') class WebhookConditionDict(TypedDict): @@ -125,6 +233,13 @@ class WebhookConditionDict(TypedDict): actor_run_id: NotRequired[str | None] +@docs_group('Typed dicts') +class WebhookConditionCamelDict(TypedDict): + actorId: NotRequired[str | None] + actorTaskId: NotRequired[str | None] + actorRunId: NotRequired[str | None] + + @docs_group('Typed dicts') class WebhookCreateDict(TypedDict): is_ad_hoc: NotRequired[bool | None] @@ -155,6 +270,36 @@ class WebhookCreateDict(TypedDict): should_interpolate_strings: NotRequired[bool | None] +@docs_group('Typed dicts') +class WebhookCreateCamelDict(TypedDict): + isAdHoc: NotRequired[bool | None] + eventTypes: list[ + Literal[ + 'ACTOR.BUILD.ABORTED', + 'ACTOR.BUILD.CREATED', + 'ACTOR.BUILD.FAILED', + 'ACTOR.BUILD.SUCCEEDED', + 'ACTOR.BUILD.TIMED_OUT', + 'ACTOR.RUN.ABORTED', + 'ACTOR.RUN.CREATED', + 'ACTOR.RUN.FAILED', + 'ACTOR.RUN.RESURRECTED', + 'ACTOR.RUN.SUCCEEDED', + 'ACTOR.RUN.TIMED_OUT', + 'TEST', + ] + ] + condition: WebhookConditionCamelDict + idempotencyKey: NotRequired[str | None] + ignoreSslErrors: NotRequired[bool | None] + doNotRetry: NotRequired[bool | None] + requestUrl: str + payloadTemplate: NotRequired[str | None] + headersTemplate: NotRequired[str | None] + description: NotRequired[str | None] + shouldInterpolateStrings: NotRequired[bool | None] + + @docs_group('Typed dicts') class WebhookRepresentationDict(TypedDict): """Minimal representation of an ad-hoc webhook attached to a single Actor run or build via the @@ -191,3 +336,41 @@ class WebhookRepresentationDict(TypedDict): """ Optional template for the HTTP headers sent by the webhook. """ + + +@docs_group('Typed dicts') +class WebhookRepresentationCamelDict(TypedDict): + """Minimal representation of an ad-hoc webhook attached to a single Actor run or build via the + `webhooks` query parameter. The query parameter value is a Base64-encoded JSON array whose + items match this schema. Persistent webhook fields (e.g. `condition`) are not used here. + + """ + + eventTypes: list[ + Literal[ + 'ACTOR.BUILD.ABORTED', + 'ACTOR.BUILD.CREATED', + 'ACTOR.BUILD.FAILED', + 'ACTOR.BUILD.SUCCEEDED', + 'ACTOR.BUILD.TIMED_OUT', + 'ACTOR.RUN.ABORTED', + 'ACTOR.RUN.CREATED', + 'ACTOR.RUN.FAILED', + 'ACTOR.RUN.RESURRECTED', + 'ACTOR.RUN.SUCCEEDED', + 'ACTOR.RUN.TIMED_OUT', + 'TEST', + ] + ] + requestUrl: str + """ + The URL to which the webhook sends its payload. + """ + payloadTemplate: NotRequired[str | None] + """ + Optional template for the JSON payload sent by the webhook. + """ + headersTemplate: NotRequired[str | None] + """ + Optional template for the HTTP headers sent by the webhook. + """ diff --git a/src/apify_client/_types.py b/src/apify_client/_types.py index 8d285dce..e3bb3cfa 100644 --- a/src/apify_client/_types.py +++ b/src/apify_client/_types.py @@ -4,17 +4,28 @@ from typing import Literal from apify_client._models import WebhookCreate, WebhookRepresentation -from apify_client._typeddicts import WebhookCreateDict, WebhookRepresentationDict +from apify_client._typeddicts import ( + WebhookCreateCamelDict, + WebhookCreateDict, + WebhookRepresentationCamelDict, + WebhookRepresentationDict, +) WebhooksList = ( - list[WebhookCreate] | list[WebhookCreateDict] | list[WebhookRepresentation] | list[WebhookRepresentationDict] + list[WebhookCreate] + | list[WebhookCreateDict] + | list[WebhookCreateCamelDict] + | list[WebhookRepresentation] + | list[WebhookRepresentationDict] + | list[WebhookRepresentationCamelDict] ) """Type for the `webhooks` parameter on resource-client `start`/`call` methods and `from_webhooks`. -`WebhookRepresentation` / `WebhookRepresentationDict` are the minimal ad-hoc webhook shape (only -`event_types` and `request_url` required). `WebhookCreate` / `WebhookCreateDict` are accepted so a -persistent-webhook definition can be reused; their fields not relevant to ad-hoc webhooks (e.g. -`condition`) are ignored at runtime. +`WebhookRepresentation` / `WebhookRepresentationDict` / `WebhookRepresentationCamelDict` are the +minimal ad-hoc webhook shape (only `event_types` and `request_url` required). `WebhookCreate` / +`WebhookCreateDict` / `WebhookCreateCamelDict` are accepted so a persistent-webhook definition +can be reused; their fields not relevant to ad-hoc webhooks (e.g. `condition`) are ignored at +runtime. The `*CamelDict` variants accept camelCase keys matching the Apify API spelling. """ TerminalActorJobStatus = Literal['SUCCEEDED', 'FAILED', 'TIMED-OUT', 'ABORTED'] diff --git a/tests/unit/test_postprocess_generated_models.py b/tests/unit/test_postprocess_generated_models.py index bd024db7..7be2fa08 100644 --- a/tests/unit/test_postprocess_generated_models.py +++ b/tests/unit/test_postprocess_generated_models.py @@ -3,12 +3,16 @@ import textwrap from scripts.postprocess_generated_models import ( + add_camel_case_typeddicts, add_docs_group_decorators, + build_alias_map, convert_enums_to_literals, fix_discriminators, split_literals_to_file, ) +from apify_client._models import Request + # -- fix_discriminators ------------------------------------------------------- @@ -412,3 +416,151 @@ def test_split_literals_to_file_output_has_valid_header() -> None: _, literals = split_literals_to_file(content) assert 'from __future__ import annotations' in literals assert 'from typing import Literal' in literals + + +# -- build_alias_map ---------------------------------------------------------- + + +def test_build_alias_map_extracts_field_aliases() -> None: + """`Field(alias='camelName')` annotations are captured as the API spelling.""" + models = textwrap.dedent("""\ + from typing import Annotated + from pydantic import BaseModel, Field + + class Foo(BaseModel): + user_id: Annotated[str, Field(alias='userId')] + retry_count: Annotated[int, Field(alias='retryCount')] = 0 + """) + result = build_alias_map(models) + assert result['Foo'] == {'user_id': 'userId', 'retry_count': 'retryCount'} + + +def test_build_alias_map_treats_unaliased_fields_as_self_named() -> None: + """Fields without `Field(alias=...)` map to themselves — single-word API spellings.""" + models = textwrap.dedent("""\ + from pydantic import BaseModel + + class Foo(BaseModel): + url: str + method: str + """) + result = build_alias_map(models) + assert result['Foo'] == {'url': 'url', 'method': 'method'} + + +def test_build_alias_map_skips_model_config() -> None: + """`model_config` is Pydantic plumbing, not a data field — exclude it from the alias map.""" + models = textwrap.dedent("""\ + from pydantic import BaseModel, ConfigDict + + class Foo(BaseModel): + model_config = ConfigDict(extra='allow') + url: str + """) + result = build_alias_map(models) + assert 'model_config' not in result['Foo'] + assert result['Foo'] == {'url': 'url'} + + +# -- add_camel_case_typeddicts ----------------------------------------------- + + +def test_add_camel_case_typeddicts_creates_sibling_class() -> None: + """A snake_case TypedDict gets a CamelDict sibling with renamed fields.""" + content = textwrap.dedent("""\ + from typing import NotRequired, TypedDict + + class FooDict(TypedDict): + user_id: NotRequired[str] + retry_count: NotRequired[int] + """) + alias_map = {'Foo': {'user_id': 'userId', 'retry_count': 'retryCount'}} + result = add_camel_case_typeddicts(content, alias_map) + assert 'class FooCamelDict(TypedDict):' in result + assert 'userId: NotRequired[str]' in result + assert 'retryCount: NotRequired[int]' in result + + +def test_add_camel_case_typeddicts_rewires_nested_refs() -> None: + """References to other snake TypedDicts in a cloned annotation are renamed to their camel form.""" + content = textwrap.dedent("""\ + from typing import NotRequired, TypedDict + + class BarDict(TypedDict): + x: int + + class FooDict(TypedDict): + nested: NotRequired[BarDict] + """) + alias_map = {'Foo': {'nested': 'nested'}, 'Bar': {'x': 'x'}} + result = add_camel_case_typeddicts(content, alias_map) + assert 'class FooCamelDict(TypedDict):' in result + assert 'nested: NotRequired[BarCamelDict]' in result + + +def test_add_camel_case_typeddicts_clones_typealias_unions() -> None: + """A `TypeAlias = A | B` union over TypedDicts gets a camel sibling referencing the camel members.""" + content = textwrap.dedent("""\ + from typing import TypeAlias, TypedDict + + class ADict(TypedDict): + x: int + + class BDict(TypedDict): + y: int + + UDict: TypeAlias = ADict | BDict + """) + alias_map = {'A': {'x': 'x'}, 'B': {'y': 'y'}} + result = add_camel_case_typeddicts(content, alias_map) + assert 'UCamelDict: TypeAlias = ACamelDict | BCamelDict' in result + + +def test_add_camel_case_typeddicts_creates_camel_for_dict_str_any_alias() -> None: + """A `dict[str, Any]` TypeAlias is casing-agnostic but still gets a Camel partner so refs resolve.""" + content = textwrap.dedent("""\ + from typing import Any, NotRequired, TypeAlias, TypedDict + + UserDataDict: TypeAlias = dict[str, Any] + + class FooDict(TypedDict): + data: NotRequired[UserDataDict] + """) + alias_map = {'Foo': {'data': 'data'}} + result = add_camel_case_typeddicts(content, alias_map) + assert 'UserDataCamelDict: TypeAlias = dict[str, Any]' in result + assert 'data: NotRequired[UserDataCamelDict]' in result + + +def test_add_camel_case_typeddicts_is_idempotent() -> None: + """Re-running on a file that already has Camel siblings doesn't duplicate them.""" + content = textwrap.dedent("""\ + from typing import NotRequired, TypedDict + + class FooDict(TypedDict): + user_id: NotRequired[str] + """) + alias_map = {'Foo': {'user_id': 'userId'}} + once = add_camel_case_typeddicts(content, alias_map) + twice = add_camel_case_typeddicts(once, alias_map) + assert once == twice + assert twice.count('class FooCamelDict(TypedDict):') == 1 + + +def test_add_camel_case_typeddicts_camel_validates_with_pydantic() -> None: + """A camel-keyed dict literal round-trips through the corresponding Pydantic model — runtime parity.""" + camel_payload = { + 'uniqueKey': 'GET|abc', + 'url': 'https://example.com', + 'retryCount': 0, + 'loadedUrl': 'https://example.com/final', + 'userData': {'tag': 'x'}, + } + snake_payload = { + 'unique_key': 'GET|abc', + 'url': 'https://example.com', + 'retry_count': 0, + 'loaded_url': 'https://example.com/final', + 'user_data': {'tag': 'x'}, + } + assert Request.model_validate(camel_payload) == Request.model_validate(snake_payload)