diff --git a/.claude/settings.json b/.claude/settings.json new file mode 100644 index 0000000..054fe6a --- /dev/null +++ b/.claude/settings.json @@ -0,0 +1,15 @@ +{ + "hooks": { + "PreToolUse": [ + { + "matcher": "Edit|Write", + "hooks": [ + { + "type": "command", + "command": "python3 -c \"\nimport json, sys\nfrom pathlib import Path\ninp = json.load(sys.stdin)\npath = inp.get('file_path', '')\nif path.endswith('.pyi'):\n pyih = Path(path).with_suffix('.pyih')\n if pyih.exists():\n print(f'BLOCKED: {Path(path).name} is auto-generated from {pyih.name}. Edit the .pyih source instead, then run: python translate_pyih.py', file=sys.stderr)\n sys.exit(2)\n\"" + } + ] + } + ] + } +} diff --git a/AGENTS.md b/AGENTS.md new file mode 120000 index 0000000..681311e --- /dev/null +++ b/AGENTS.md @@ -0,0 +1 @@ +CLAUDE.md \ No newline at end of file diff --git a/CLAUDE.md b/CLAUDE.md new file mode 100644 index 0000000..49d128d --- /dev/null +++ b/CLAUDE.md @@ -0,0 +1,81 @@ +## Project Overview + +Funcy is a Python library of functional programming tools. It has zero runtime dependencies and supports Python 3.4+ and PyPy3. + +## Commands + +```bash +# Run all tests +pytest -W error + +# Run a single test file +pytest tests/test_seqs.py + +# Run a single test +pytest tests/test_seqs.py::test_take + +# Lint +flake8 funcy +flake8 --select=F,E5,W tests + +# Type checking tests (validates stubs against # E: markers) +python type_tests/run.py # run all three checkers +python type_tests/run.py mypy # run a single checker +python type_tests/run.py pyright +python type_tests/run.py ty +python type_tests/run.py coverage # verify all public names have type tests + +# Verify stubs match runtime signatures +tox -e stubtest + +# Build docs +cd docs && sphinx-build -b html -W . _build/html + +# Tox (multi-version testing) +tox -e py313 +tox -e lint +tox -e docs +tox -e typetest +tox -e stubtest +``` + +## Architecture + +All public API is re-exported through `funcy/__init__.py` via wildcard imports. Each module defines `__all__`. + +### Module Map + +- **seqs.py** - Sequence/iterator operations (`take`, `drop`, `first`, `map`, `filter`, `partition`, `chunks`, `group_by`, `distinct`, etc.) +- **colls.py** - Collection manipulation (`merge`, `walk`, `select`, `get_in`, `set_in`, `split_keys`, etc.) +- **funcs.py** - Function composition (`identity`, `partial`, `curry`, `compose`, `complement`, `juxt`) +- **flow.py** - Control flow (`retry`, `throttle`, `ignore`, `silent`, `once`, `limit_error_rate`) +- **calc.py** - Caching/memoization (`memoize`, `cache`, `make_lookuper`) +- **decorators.py** - Decorator utilities (`@decorator`, `@wraps`, `ContextDecorator`) +- **debug.py** - Debugging helpers (`tap`, `log_calls`, `log_errors`, `print_durations`) +- **objects.py** - Object utilities (`cached_property`, `monkey`, `LazyObject`) +- **strings.py** - Regex wrappers (`re_find`, `re_all`, `re_test`) +- **types.py** - Type predicates (`isa`, `is_mapping`, `is_seq`) +- **tree.py** - Tree traversal (`tree_leaves`, `tree_nodes`) +- **funcolls.py** - Functional collection predicates (`all_fn`, `any_fn`, `none_fn`, `some_fn`) +- **funcmakers.py** - Extended function semantics (internal, not in `__all__` of `__init__`) +- **_inspect.py** - Function introspection helpers (internal) + +### Key Design Patterns + +- **Lazy by default**: Most sequence operations return iterators. Eager variants are prefixed with `l` (e.g., `lmap`, `lfilter`, `lcat`). +- **Extended function protocol** (`funcmakers.py`): Many functions accept not just callables but also regex strings, ints/slices (as `itemgetter`), dicts (as lookup), and sets (as membership test). This is handled by `make_func`/`make_pred`. +- **Type preservation**: Collection operations like `walk` preserve the input type (dict stays dict, set stays set). + +### Type Stubs + +For type stub details (patterns, `.pyih` code generation, type tests), see [CLAUDE_TYPES.md](CLAUDE_TYPES.md). For extended `.pyih` syntax reference, see [PYIH.md](PYIH.md). + +## Shell Rules + +- Never use `cat < _T`) instead of redundant overloads +- Use `Callable[[_K], _K2]` for functions that transform types (e.g. `walk_keys`) +- EMPTY sentinel: functions like `all(pred, seq=EMPTY)` that shift args use `@overload`, listed in `stubtest_allowlist.txt` +- Regex return types: `_ReResult = str | tuple[str, ...] | dict[str, str]` — can't narrow by pattern without a mypy plugin (pyright and ty have no plugin systems) + +## `.pyih` -> `.pyi` Code Generation + +Four modules use `.pyih` source files that are translated to `.pyi` stubs: `seqs`, `colls`, `funcmakers`, `funcs`. The translator is `translate_pyih.py`. **Never directly edit a `.pyi` that has a `.pyih` counterpart** — a repo hook blocks this. + +**Workflow**: edit the `.pyih` file, then run `python translate_pyih.py` to regenerate all `.pyi` files. + +For full `.pyih` syntax documentation (XFunc, XPred, collection expansion, xfunc_skip, etc.), see [PYIH.md](PYIH.md). + +## Type Tests + +Type tests live in `type_tests/` at repo root. The runner (`type_tests/run.py`) validates against three checkers (mypy, pyright, ty). + +Test file markers: +- No marker: line must type-check cleanly on all checkers +- `# E: `: line must produce a type error (all checkers must error) +- `# XFAIL: `: line currently errors but shouldn't ideally (aspirational, all checkers) +- `# XFAIL[ty]: `: known failure for a specific checker (ty has many TypeVar inference bugs) +- `# E: reason # XFAIL[ty]: reason`: expected error, but ty doesn't catch it (mypy/pyright must error, ty is excused) +- `# R: type # XFAIL[ty]: reason`: expected reveal type, but ty gets it wrong (skips reveal check for ty) + +**Testing philosophy**: We care about our stubs working correctly, not about what checkers can infer. When testing with abstract types (Mapping, Sequence, Iterable), use real implementations — custom subclasses — not concrete types cast to abstract (e.g. `m: Mapping = d` where `d` is a dict). Checkers see through such casts and resolve to the concrete type, making the test misleading. A real `class MyMapping(Mapping[K, V])` forces the checker to use the abstract overload. + +`stubtest` verifies stubs match runtime signatures. Functions with EMPTY sentinel patterns go in `stubtest_allowlist.txt`. diff --git a/PYIH.md b/PYIH.md new file mode 100644 index 0000000..4eb63aa --- /dev/null +++ b/PYIH.md @@ -0,0 +1,109 @@ +# `.pyih` → `.pyi` Stub Generation + +Funcy uses `.pyih` (pyi higher) source files to generate `.pyi` type stubs. This avoids the cross-product explosion of overloads when typing the extended function protocol (7 variants) combined with collection type preservation. + +## Quick Start + +```bash +# Edit the .pyih source +vim funcy/seqs.pyih + +# Regenerate all .pyi files +python translate_pyih.py + +# Verify types +python type_tests/run.py mypy +python type_tests/run.py pyright +python type_tests/run.py ty +``` + +## Which modules use `.pyih` + +| Module | `.pyih` source | Generated `.pyi` | +|--------|---------------|-----------------| +| seqs | `funcy/seqs.pyih` | `funcy/seqs.pyi` | +| colls | `funcy/colls.pyih` | `funcy/colls.pyi` | +| funcs | `funcy/funcs.pyih` | `funcy/funcs.pyi` | +| funcmakers | `funcy/funcmakers.pyih` | `funcy/funcmakers.pyi` | + +Other modules (`flow`, `calc`, `debug`, etc.) have hand-written `.pyi` stubs. + +## `.pyih` Syntax + +`.pyih` files are valid Python 3.12+ syntax. They use three custom constructs that `translate_pyih.py` expands: + +### `XFunc[[A], B]` — Extended mapper + +Used for function parameters that accept the extended function protocol (Callable, None, Set, Regex, int, slice, Mapping) and transform values from type `A` to type `B`. + +```python +def map(f: XFunc[[A], B], seq: Iterable[A]) -> Iterator[B]: ... +``` + +Expands to 7 overloads: + +| Variant | `f` type | `A` becomes | `B` becomes | +|---------|----------|-------------|-------------| +| Callable | `Callable[[A], B]` | A | B | +| None | `None` | A | A (identity) | +| Set | `AbstractSet[A]` | A | `bool` | +| Regex | `str \| bytes \| re.Pattern[str]` | `Any` | `_ReResult \| None` | +| int | `int` | `Sequence[_T]` | `_T` (itemgetter) | +| slice | `slice` | `Sequence[_T]` | `Sequence[_T]` | +| Mapping | `Mapping[A, B]` | A | B (lookup) | + +### `XPred[A]` — Extended predicate + +Used for function parameters that accept the extended function protocol as a predicate. The element type `A` is preserved (predicates filter, not transform). + +```python +def filter(pred: XPred[A], seq: Iterable[A]) -> Iterator[A]: ... +``` + +Expands to 7 overloads that constrain the input type: + +| Variant | `pred` type | `A` constraint | +|---------|------------|----------------| +| Callable | `Callable[[A], Any]` | A | +| None | `None` | A (truthiness) | +| Set | `AbstractSet[A]` | A (membership) | +| Regex | `str \| bytes \| re.Pattern[str]` | `str` | +| int | `int` | `Sequence[Any]` | +| slice | `slice` | `Sequence[Any]` | +| Mapping | `Mapping[A, Any]` | A (key existence) | + +### `[C: (list, set, ...)]` — Collection type expansion + +PEP 695 type parameter syntax. Generates one overload per concrete type, substituting `C` throughout. Combines with XFunc/XPred for quadratic expansion. + +```python +def walk[C: (list, set, frozenset)](f: XFunc[[A], B], coll: C[A]) -> C[B]: ... +``` + +Generates `3 * 7 = 21` overloads (3 collection types x 7 XFunc variants). + +### `# xfunc_skip: Callable` — Skip variants + +Placed before a function def. Skips named variant(s) during expansion. Useful when the Callable case needs a separate hand-written overload with more specific types. + +```python +# xfunc_skip: Callable +def compose(f: XFunc[[_T], _V]) -> Callable[[_T], _V]: ... +# Callable case handled separately with typed multi-arg overloads: +def compose(__f: Callable[..., _R], *rest: _XFunc) -> Callable[..., _R]: ... +``` + +### Passthrough + +Functions without `XFunc`, `XPred`, or collection type parameters pass through verbatim: + +```python +def take(n: int, seq: Iterable[_T]) -> list[_T]: ... # copied as-is +``` + +## Rules + +1. **Never edit `.pyi` files directly** if a `.pyih` counterpart exists — a repo hook blocks this. +2. After editing a `.pyih`, always run `python translate_pyih.py` to regenerate. +3. Run all three type checkers: `python type_tests/run.py mypy && python type_tests/run.py ty && python type_tests/run.py pyright` +4. Run `python -m mypy.stubtest funcy. --allowlist stubtest_allowlist.txt` to verify stubs match runtime. diff --git a/README.rst b/README.rst index d15c2b8..b91c243 100644 --- a/README.rst +++ b/README.rst @@ -184,6 +184,12 @@ To run the tests using your default python: pip install -r test_requirements.txt pytest +To run type checking tests:: + + pip install mypy pyright + python type_tests/run.py mypy + python type_tests/run.py pyright + To fully run ``tox`` you need all the supported pythons to be installed. These are 3.4+ and PyPy3. You can run it for particular environment even in absense of all of the above:: @@ -191,6 +197,8 @@ of all of the above:: tox -e py310 tox -e pypy3 tox -e lint + tox -e typetest + tox -e stubtest .. |Build Status| image:: https://github.com/Suor/funcy/actions/workflows/test.yml/badge.svg diff --git a/docs/extended_fns.rst b/docs/extended_fns.rst index 88e3120..46b7c52 100644 --- a/docs/extended_fns.rst +++ b/docs/extended_fns.rst @@ -29,7 +29,7 @@ Sequence filtering :func:`filter` :func:`remove` :func:`distinct` Sequence splitting :func:`dropwhile` :func:`takewhile` :func:`split` :func:`split_by` :func:`partition_by` Aggregration :func:`group_by` :func:`count_by` :func:`group_by_keys` Collection transformation :func:`walk` :func:`walk_keys` :func:`walk_values` -Collection filtering :func:`select` :func:`select_keys` :func:`select_values` +Collection filtering :func:`select` :func:`select_keys` :func:`select_values` :func:`split_keys` Content tests :func:`all` :func:`any` :func:`none` :func:`one` :func:`some` :func:`is_distinct` Function logic :func:`all_fn` :func:`any_fn` :func:`none_fn` :func:`one_fn` :func:`some_fn` Function tools :func:`iffy` :func:`compose` :func:`rcompose` :func:`complement` :func:`juxt` :func:`all_fn` :func:`any_fn` :func:`none_fn` :func:`one_fn` :func:`some_fn` diff --git a/funcy/calc.pyi b/funcy/calc.pyi new file mode 100644 index 0000000..9cd6e62 --- /dev/null +++ b/funcy/calc.pyi @@ -0,0 +1,21 @@ +from collections.abc import Callable, Mapping +from datetime import timedelta +from typing import Any, TypeVar, overload + +__all__ = ['memoize', 'make_lookuper', 'silent_lookuper', 'cache'] + +_F = TypeVar('_F', bound=Callable[..., Any]) +_K = TypeVar('_K') +_V = TypeVar('_V') + +class SkipMemory(Exception): ... + +@overload +def memoize(func: _F) -> _F: ... +@overload +def memoize(*, key_func: Callable[..., Any]) -> Callable[[_F], _F]: ... + +def cache(timeout: int | float | timedelta, *, key_func: Callable[..., Any] | None = ...) -> Callable[[_F], _F]: ... + +def make_lookuper(func: Callable[..., Mapping[_K, _V]]) -> Callable[..., _V]: ... +def silent_lookuper(func: Callable[..., Mapping[_K, _V]]) -> Callable[..., _V | None]: ... diff --git a/funcy/colls.pyi b/funcy/colls.pyi new file mode 100644 index 0000000..8701c2f --- /dev/null +++ b/funcy/colls.pyi @@ -0,0 +1,1055 @@ +# THIS FILE IS AUTOGENERATED by translate_pyih.py from colls.pyih. DO NOT EDIT. + +import re +from collections.abc import Callable, Hashable, Iterable, Iterator, Mapping, MutableMapping, Set as AbstractSet, Sequence +from typing import Any, TypeAlias, TypeVar, overload + +__all__ = ['empty', 'iteritems', 'itervalues', + 'join', 'merge', 'join_with', 'merge_with', + 'walk', 'walk_keys', 'walk_values', 'select', 'select_keys', 'select_values', + 'split_keys', 'compact', + 'is_distinct', 'all', 'any', 'none', 'one', 'some', + 'zipdict', 'flip', 'project', 'omit', 'zip_values', 'zip_dicts', + 'where', 'pluck', 'pluck_attr', 'invoke', 'lwhere', 'lpluck', 'lpluck_attr', 'linvoke', + 'get_in', 'get_lax', 'set_in', 'update_in', 'del_in', 'has_path'] + +_K = TypeVar('_K') +_K2 = TypeVar('_K2') +_V = TypeVar('_V') +_V2 = TypeVar('_V2') +_W = TypeVar('_W') +_T = TypeVar('_T') + +# All types accepted by the extended function protocol (funcmakers.make_func): +# Callable, int/slice for itemgetter, str/bytes/re.Pattern for regex, +# Mapping for lookup, Set for membership, None for identity/bool +_XFunc = Callable[..., Any] | int | slice | str | bytes | re.Pattern[str] | Mapping[Any, Any] | AbstractSet[Any] | None +_ReResult: TypeAlias = str | tuple[str, ...] | dict[str, str] + +### Generic ops + +def empty(coll: _T) -> _T: ... + +def iteritems(coll: Mapping[_K, _V]) -> Iterable[tuple[_K, _V]]: ... +def itervalues(coll: Mapping[Any, _V]) -> Iterable[_V]: ... + +def join(colls: Iterable[_T]) -> _T | None: ... + +@overload +def merge(__coll: _T, *colls: _T) -> _T: ... +@overload +def merge() -> None: ... + +def join_with(f: Callable[[list[_W]], _V], dicts: Iterable[Mapping[_K, _W]], strict: bool = ...) -> dict[_K, _V]: ... +def merge_with(f: Callable[[list[_W]], _V], *dicts: Mapping[_K, _W]) -> dict[_K, _V]: ... + +### Walk / Select + +# dict/MutableMapping/Mapping: typed Callable with pair→pair signature +# (must come before COLLS — dict is Iterable[K], so Iterable overload would shadow) +@overload +def walk(f: Callable[[tuple[_K, _V]], tuple[_K2, _V2]], coll: dict[_K, _V]) -> dict[_K2, _V2]: ... +@overload +def walk(f: Callable[[tuple[_K, _V]], tuple[_K2, _V2]], coll: MutableMapping[_K, _V]) -> MutableMapping[_K2, _V2]: ... +@overload +def walk(f: Callable[[tuple[_K, _V]], tuple[_K2, _V2]], coll: Mapping[_K, _V]) -> Mapping[_K2, _V2]: ... +@overload +def walk(f: _XFunc, coll: dict[Any, Any]) -> dict[Any, Any]: ... +@overload +def walk(f: _XFunc, coll: MutableMapping[Any, Any]) -> MutableMapping[Any, Any]: ... +@overload +def walk(f: _XFunc, coll: Mapping[Any, Any]) -> Mapping[Any, Any]: ... +@overload +def walk(f: Callable[[_T], _V], coll: list[_T]) -> list[_V]: ... +@overload +def walk(f: None, coll: list[_T]) -> list[_T]: ... +@overload +def walk(f: AbstractSet[_T], coll: list[_T]) -> list[bool]: ... +@overload +def walk(f: str | bytes | re.Pattern[str], coll: list[Any]) -> list[_ReResult | None]: ... +@overload +def walk(f: int, coll: list[Sequence[_T]]) -> list[_T]: ... +@overload +def walk(f: slice, coll: list[Sequence[_T]]) -> list[Sequence[_T]]: ... +@overload +def walk(f: Mapping[_T, _V], coll: list[_T]) -> list[_V]: ... +@overload +def walk(f: Callable[[_T], _V], coll: tuple[_T, ...]) -> tuple[_V, ...]: ... +@overload +def walk(f: None, coll: tuple[_T, ...]) -> tuple[_T, ...]: ... +@overload +def walk(f: AbstractSet[_T], coll: tuple[_T, ...]) -> tuple[bool, ...]: ... +@overload +def walk(f: str | bytes | re.Pattern[str], coll: tuple[Any, ...]) -> tuple[_ReResult | None, ...]: ... +@overload +def walk(f: int, coll: tuple[Sequence[_T], ...]) -> tuple[_T, ...]: ... +@overload +def walk(f: slice, coll: tuple[Sequence[_T], ...]) -> tuple[Sequence[_T], ...]: ... +@overload +def walk(f: Mapping[_T, _V], coll: tuple[_T, ...]) -> tuple[_V, ...]: ... +@overload +def walk(f: Callable[[_T], _V], coll: set[_T]) -> set[_V]: ... +@overload +def walk(f: None, coll: set[_T]) -> set[_T]: ... +@overload +def walk(f: AbstractSet[_T], coll: set[_T]) -> set[bool]: ... +@overload +def walk(f: str | bytes | re.Pattern[str], coll: set[Any]) -> set[_ReResult | None]: ... +@overload +def walk(f: int, coll: set[Sequence[_T]]) -> set[_T]: ... +@overload +def walk(f: slice, coll: set[Sequence[_T]]) -> set[Sequence[_T]]: ... +@overload +def walk(f: Mapping[_T, _V], coll: set[_T]) -> set[_V]: ... +@overload +def walk(f: Callable[[_T], _V], coll: frozenset[_T]) -> frozenset[_V]: ... +@overload +def walk(f: None, coll: frozenset[_T]) -> frozenset[_T]: ... +@overload +def walk(f: AbstractSet[_T], coll: frozenset[_T]) -> frozenset[bool]: ... +@overload +def walk(f: str | bytes | re.Pattern[str], coll: frozenset[Any]) -> frozenset[_ReResult | None]: ... +@overload +def walk(f: int, coll: frozenset[Sequence[_T]]) -> frozenset[_T]: ... +@overload +def walk(f: slice, coll: frozenset[Sequence[_T]]) -> frozenset[Sequence[_T]]: ... +@overload +def walk(f: Mapping[_T, _V], coll: frozenset[_T]) -> frozenset[_V]: ... +@overload +def walk(f: Callable[[_T], _V], coll: Sequence[_T]) -> Sequence[_V]: ... +@overload +def walk(f: None, coll: Sequence[_T]) -> Sequence[_T]: ... +@overload +def walk(f: AbstractSet[_T], coll: Sequence[_T]) -> Sequence[bool]: ... +@overload +def walk(f: str | bytes | re.Pattern[str], coll: Sequence[Any]) -> Sequence[_ReResult | None]: ... +@overload +def walk(f: int, coll: Sequence[Sequence[_T]]) -> Sequence[_T]: ... +@overload +def walk(f: slice, coll: Sequence[Sequence[_T]]) -> Sequence[Sequence[_T]]: ... +@overload +def walk(f: Mapping[_T, _V], coll: Sequence[_T]) -> Sequence[_V]: ... +@overload +def walk(f: Callable[[_T], _V], coll: Iterator[_T]) -> Iterator[_V]: ... +@overload +def walk(f: None, coll: Iterator[_T]) -> Iterator[_T]: ... +@overload +def walk(f: AbstractSet[_T], coll: Iterator[_T]) -> Iterator[bool]: ... +@overload +def walk(f: str | bytes | re.Pattern[str], coll: Iterator[Any]) -> Iterator[_ReResult | None]: ... +@overload +def walk(f: int, coll: Iterator[Sequence[_T]]) -> Iterator[_T]: ... +@overload +def walk(f: slice, coll: Iterator[Sequence[_T]]) -> Iterator[Sequence[_T]]: ... +@overload +def walk(f: Mapping[_T, _V], coll: Iterator[_T]) -> Iterator[_V]: ... +@overload +def walk(f: Callable[[_T], _V], coll: Iterable[_T]) -> Iterable[_V]: ... +@overload +def walk(f: None, coll: Iterable[_T]) -> Iterable[_T]: ... +@overload +def walk(f: AbstractSet[_T], coll: Iterable[_T]) -> Iterable[bool]: ... +@overload +def walk(f: str | bytes | re.Pattern[str], coll: Iterable[Any]) -> Iterable[_ReResult | None]: ... +@overload +def walk(f: int, coll: Iterable[Sequence[_T]]) -> Iterable[_T]: ... +@overload +def walk(f: slice, coll: Iterable[Sequence[_T]]) -> Iterable[Sequence[_T]]: ... +@overload +def walk(f: Mapping[_T, _V], coll: Iterable[_T]) -> Iterable[_V]: ... +@overload +def walk(f: _XFunc, coll: _T) -> _T: ... + +# walk_keys: collection of pairs × XFunc +@overload +def walk_keys(f: Callable[[_K], _K2], coll: list[tuple[_K, _V]]) -> list[tuple[_K2, _V]]: ... +@overload +def walk_keys(f: None, coll: list[tuple[_K, _V]]) -> list[tuple[_K, _V]]: ... +@overload +def walk_keys(f: AbstractSet[_K], coll: list[tuple[_K, _V]]) -> list[tuple[bool, _V]]: ... +@overload +def walk_keys(f: str | bytes | re.Pattern[str], coll: list[tuple[Any, _V]]) -> list[tuple[_ReResult | None, _V]]: ... +@overload +def walk_keys(f: int, coll: list[tuple[Sequence[_T], _V]]) -> list[tuple[_T, _V]]: ... +@overload +def walk_keys(f: Mapping[_K, _K2], coll: list[tuple[_K, _V]]) -> list[tuple[_K2, _V]]: ... +@overload +def walk_keys(f: Callable[[_K], _K2], coll: tuple[tuple[_K, _V], ...]) -> tuple[tuple[_K2, _V], ...]: ... +@overload +def walk_keys(f: None, coll: tuple[tuple[_K, _V], ...]) -> tuple[tuple[_K, _V], ...]: ... +@overload +def walk_keys(f: AbstractSet[_K], coll: tuple[tuple[_K, _V], ...]) -> tuple[tuple[bool, _V], ...]: ... +@overload +def walk_keys(f: str | bytes | re.Pattern[str], coll: tuple[tuple[Any, _V], ...]) -> tuple[tuple[_ReResult | None, _V], ...]: ... +@overload +def walk_keys(f: int, coll: tuple[tuple[Sequence[_T], _V], ...]) -> tuple[tuple[_T, _V], ...]: ... +@overload +def walk_keys(f: Mapping[_K, _K2], coll: tuple[tuple[_K, _V], ...]) -> tuple[tuple[_K2, _V], ...]: ... +@overload +def walk_keys(f: Callable[[_K], _K2], coll: set[tuple[_K, _V]]) -> set[tuple[_K2, _V]]: ... +@overload +def walk_keys(f: None, coll: set[tuple[_K, _V]]) -> set[tuple[_K, _V]]: ... +@overload +def walk_keys(f: AbstractSet[_K], coll: set[tuple[_K, _V]]) -> set[tuple[bool, _V]]: ... +@overload +def walk_keys(f: str | bytes | re.Pattern[str], coll: set[tuple[Any, _V]]) -> set[tuple[_ReResult | None, _V]]: ... +@overload +def walk_keys(f: int, coll: set[tuple[Sequence[_T], _V]]) -> set[tuple[_T, _V]]: ... +@overload +def walk_keys(f: Mapping[_K, _K2], coll: set[tuple[_K, _V]]) -> set[tuple[_K2, _V]]: ... +@overload +def walk_keys(f: Callable[[_K], _K2], coll: frozenset[tuple[_K, _V]]) -> frozenset[tuple[_K2, _V]]: ... +@overload +def walk_keys(f: None, coll: frozenset[tuple[_K, _V]]) -> frozenset[tuple[_K, _V]]: ... +@overload +def walk_keys(f: AbstractSet[_K], coll: frozenset[tuple[_K, _V]]) -> frozenset[tuple[bool, _V]]: ... +@overload +def walk_keys(f: str | bytes | re.Pattern[str], coll: frozenset[tuple[Any, _V]]) -> frozenset[tuple[_ReResult | None, _V]]: ... +@overload +def walk_keys(f: int, coll: frozenset[tuple[Sequence[_T], _V]]) -> frozenset[tuple[_T, _V]]: ... +@overload +def walk_keys(f: Mapping[_K, _K2], coll: frozenset[tuple[_K, _V]]) -> frozenset[tuple[_K2, _V]]: ... +@overload +def walk_keys(f: Callable[[_K], _K2], coll: Sequence[tuple[_K, _V]]) -> Sequence[tuple[_K2, _V]]: ... +@overload +def walk_keys(f: None, coll: Sequence[tuple[_K, _V]]) -> Sequence[tuple[_K, _V]]: ... +@overload +def walk_keys(f: AbstractSet[_K], coll: Sequence[tuple[_K, _V]]) -> Sequence[tuple[bool, _V]]: ... +@overload +def walk_keys(f: str | bytes | re.Pattern[str], coll: Sequence[tuple[Any, _V]]) -> Sequence[tuple[_ReResult | None, _V]]: ... +@overload +def walk_keys(f: int, coll: Sequence[tuple[Sequence[_T], _V]]) -> Sequence[tuple[_T, _V]]: ... +@overload +def walk_keys(f: Mapping[_K, _K2], coll: Sequence[tuple[_K, _V]]) -> Sequence[tuple[_K2, _V]]: ... +@overload +def walk_keys(f: Callable[[_K], _K2], coll: Iterator[tuple[_K, _V]]) -> Iterator[tuple[_K2, _V]]: ... +@overload +def walk_keys(f: None, coll: Iterator[tuple[_K, _V]]) -> Iterator[tuple[_K, _V]]: ... +@overload +def walk_keys(f: AbstractSet[_K], coll: Iterator[tuple[_K, _V]]) -> Iterator[tuple[bool, _V]]: ... +@overload +def walk_keys(f: str | bytes | re.Pattern[str], coll: Iterator[tuple[Any, _V]]) -> Iterator[tuple[_ReResult | None, _V]]: ... +@overload +def walk_keys(f: int, coll: Iterator[tuple[Sequence[_T], _V]]) -> Iterator[tuple[_T, _V]]: ... +@overload +def walk_keys(f: Mapping[_K, _K2], coll: Iterator[tuple[_K, _V]]) -> Iterator[tuple[_K2, _V]]: ... +@overload +def walk_keys(f: Callable[[_K], _K2], coll: Iterable[tuple[_K, _V]]) -> Iterable[tuple[_K2, _V]]: ... +@overload +def walk_keys(f: None, coll: Iterable[tuple[_K, _V]]) -> Iterable[tuple[_K, _V]]: ... +@overload +def walk_keys(f: AbstractSet[_K], coll: Iterable[tuple[_K, _V]]) -> Iterable[tuple[bool, _V]]: ... +@overload +def walk_keys(f: str | bytes | re.Pattern[str], coll: Iterable[tuple[Any, _V]]) -> Iterable[tuple[_ReResult | None, _V]]: ... +@overload +def walk_keys(f: int, coll: Iterable[tuple[Sequence[_T], _V]]) -> Iterable[tuple[_T, _V]]: ... +@overload +def walk_keys(f: Mapping[_K, _K2], coll: Iterable[tuple[_K, _V]]) -> Iterable[tuple[_K2, _V]]: ... +@overload +def walk_keys(f: Callable[[_K], _K2], coll: dict[_K, _V]) -> dict[_K2, _V]: ... +@overload +def walk_keys(f: None, coll: dict[_K, _V]) -> dict[_K, _V]: ... +@overload +def walk_keys(f: AbstractSet[_K], coll: dict[_K, _V]) -> dict[bool, _V]: ... +@overload +def walk_keys(f: str | bytes | re.Pattern[str], coll: dict[Any, _V]) -> dict[_ReResult | None, _V]: ... +@overload +def walk_keys(f: int, coll: dict[Sequence[_T], _V]) -> dict[_T, _V]: ... +@overload +def walk_keys(f: Mapping[_K, _K2], coll: dict[_K, _V]) -> dict[_K2, _V]: ... +@overload +def walk_keys(f: Callable[[_K], _K2], coll: MutableMapping[_K, _V]) -> MutableMapping[_K2, _V]: ... +@overload +def walk_keys(f: None, coll: MutableMapping[_K, _V]) -> MutableMapping[_K, _V]: ... +@overload +def walk_keys(f: AbstractSet[_K], coll: MutableMapping[_K, _V]) -> MutableMapping[bool, _V]: ... +@overload +def walk_keys(f: str | bytes | re.Pattern[str], coll: MutableMapping[Any, _V]) -> MutableMapping[_ReResult | None, _V]: ... +@overload +def walk_keys(f: int, coll: MutableMapping[Sequence[_T], _V]) -> MutableMapping[_T, _V]: ... +@overload +def walk_keys(f: Mapping[_K, _K2], coll: MutableMapping[_K, _V]) -> MutableMapping[_K2, _V]: ... +@overload +def walk_keys(f: Callable[[_K], _K2], coll: Mapping[_K, _V]) -> Mapping[_K2, _V]: ... +@overload +def walk_keys(f: None, coll: Mapping[_K, _V]) -> Mapping[_K, _V]: ... +@overload +def walk_keys(f: AbstractSet[_K], coll: Mapping[_K, _V]) -> Mapping[bool, _V]: ... +@overload +def walk_keys(f: str | bytes | re.Pattern[str], coll: Mapping[Any, _V]) -> Mapping[_ReResult | None, _V]: ... +@overload +def walk_keys(f: int, coll: Mapping[Sequence[_T], _V]) -> Mapping[_T, _V]: ... +@overload +def walk_keys(f: Mapping[_K, _K2], coll: Mapping[_K, _V]) -> Mapping[_K2, _V]: ... + +@overload +def walk_values(f: Callable[[_V], _V2], coll: list[tuple[_K, _V]]) -> list[tuple[_K, _V2]]: ... +@overload +def walk_values(f: None, coll: list[tuple[_K, _V]]) -> list[tuple[_K, _V]]: ... +@overload +def walk_values(f: AbstractSet[_V], coll: list[tuple[_K, _V]]) -> list[tuple[_K, bool]]: ... +@overload +def walk_values(f: str | bytes | re.Pattern[str], coll: list[tuple[_K, Any]]) -> list[tuple[_K, _ReResult | None]]: ... +@overload +def walk_values(f: int, coll: list[tuple[_K, Sequence[_T]]]) -> list[tuple[_K, _T]]: ... +@overload +def walk_values(f: slice, coll: list[tuple[_K, Sequence[_T]]]) -> list[tuple[_K, Sequence[_T]]]: ... +@overload +def walk_values(f: Mapping[_V, _V2], coll: list[tuple[_K, _V]]) -> list[tuple[_K, _V2]]: ... +@overload +def walk_values(f: Callable[[_V], _V2], coll: tuple[tuple[_K, _V], ...]) -> tuple[tuple[_K, _V2], ...]: ... +@overload +def walk_values(f: None, coll: tuple[tuple[_K, _V], ...]) -> tuple[tuple[_K, _V], ...]: ... +@overload +def walk_values(f: AbstractSet[_V], coll: tuple[tuple[_K, _V], ...]) -> tuple[tuple[_K, bool], ...]: ... +@overload +def walk_values(f: str | bytes | re.Pattern[str], coll: tuple[tuple[_K, Any], ...]) -> tuple[tuple[_K, _ReResult | None], ...]: ... +@overload +def walk_values(f: int, coll: tuple[tuple[_K, Sequence[_T]], ...]) -> tuple[tuple[_K, _T], ...]: ... +@overload +def walk_values(f: slice, coll: tuple[tuple[_K, Sequence[_T]], ...]) -> tuple[tuple[_K, Sequence[_T]], ...]: ... +@overload +def walk_values(f: Mapping[_V, _V2], coll: tuple[tuple[_K, _V], ...]) -> tuple[tuple[_K, _V2], ...]: ... +@overload +def walk_values(f: Callable[[_V], _V2], coll: set[tuple[_K, _V]]) -> set[tuple[_K, _V2]]: ... +@overload +def walk_values(f: None, coll: set[tuple[_K, _V]]) -> set[tuple[_K, _V]]: ... +@overload +def walk_values(f: AbstractSet[_V], coll: set[tuple[_K, _V]]) -> set[tuple[_K, bool]]: ... +@overload +def walk_values(f: str | bytes | re.Pattern[str], coll: set[tuple[_K, Any]]) -> set[tuple[_K, _ReResult | None]]: ... +@overload +def walk_values(f: int, coll: set[tuple[_K, Sequence[_T]]]) -> set[tuple[_K, _T]]: ... +@overload +def walk_values(f: slice, coll: set[tuple[_K, Sequence[_T]]]) -> set[tuple[_K, Sequence[_T]]]: ... +@overload +def walk_values(f: Mapping[_V, _V2], coll: set[tuple[_K, _V]]) -> set[tuple[_K, _V2]]: ... +@overload +def walk_values(f: Callable[[_V], _V2], coll: frozenset[tuple[_K, _V]]) -> frozenset[tuple[_K, _V2]]: ... +@overload +def walk_values(f: None, coll: frozenset[tuple[_K, _V]]) -> frozenset[tuple[_K, _V]]: ... +@overload +def walk_values(f: AbstractSet[_V], coll: frozenset[tuple[_K, _V]]) -> frozenset[tuple[_K, bool]]: ... +@overload +def walk_values(f: str | bytes | re.Pattern[str], coll: frozenset[tuple[_K, Any]]) -> frozenset[tuple[_K, _ReResult | None]]: ... +@overload +def walk_values(f: int, coll: frozenset[tuple[_K, Sequence[_T]]]) -> frozenset[tuple[_K, _T]]: ... +@overload +def walk_values(f: slice, coll: frozenset[tuple[_K, Sequence[_T]]]) -> frozenset[tuple[_K, Sequence[_T]]]: ... +@overload +def walk_values(f: Mapping[_V, _V2], coll: frozenset[tuple[_K, _V]]) -> frozenset[tuple[_K, _V2]]: ... +@overload +def walk_values(f: Callable[[_V], _V2], coll: Sequence[tuple[_K, _V]]) -> Sequence[tuple[_K, _V2]]: ... +@overload +def walk_values(f: None, coll: Sequence[tuple[_K, _V]]) -> Sequence[tuple[_K, _V]]: ... +@overload +def walk_values(f: AbstractSet[_V], coll: Sequence[tuple[_K, _V]]) -> Sequence[tuple[_K, bool]]: ... +@overload +def walk_values(f: str | bytes | re.Pattern[str], coll: Sequence[tuple[_K, Any]]) -> Sequence[tuple[_K, _ReResult | None]]: ... +@overload +def walk_values(f: int, coll: Sequence[tuple[_K, Sequence[_T]]]) -> Sequence[tuple[_K, _T]]: ... +@overload +def walk_values(f: slice, coll: Sequence[tuple[_K, Sequence[_T]]]) -> Sequence[tuple[_K, Sequence[_T]]]: ... +@overload +def walk_values(f: Mapping[_V, _V2], coll: Sequence[tuple[_K, _V]]) -> Sequence[tuple[_K, _V2]]: ... +@overload +def walk_values(f: Callable[[_V], _V2], coll: Iterator[tuple[_K, _V]]) -> Iterator[tuple[_K, _V2]]: ... +@overload +def walk_values(f: None, coll: Iterator[tuple[_K, _V]]) -> Iterator[tuple[_K, _V]]: ... +@overload +def walk_values(f: AbstractSet[_V], coll: Iterator[tuple[_K, _V]]) -> Iterator[tuple[_K, bool]]: ... +@overload +def walk_values(f: str | bytes | re.Pattern[str], coll: Iterator[tuple[_K, Any]]) -> Iterator[tuple[_K, _ReResult | None]]: ... +@overload +def walk_values(f: int, coll: Iterator[tuple[_K, Sequence[_T]]]) -> Iterator[tuple[_K, _T]]: ... +@overload +def walk_values(f: slice, coll: Iterator[tuple[_K, Sequence[_T]]]) -> Iterator[tuple[_K, Sequence[_T]]]: ... +@overload +def walk_values(f: Mapping[_V, _V2], coll: Iterator[tuple[_K, _V]]) -> Iterator[tuple[_K, _V2]]: ... +@overload +def walk_values(f: Callable[[_V], _V2], coll: Iterable[tuple[_K, _V]]) -> Iterable[tuple[_K, _V2]]: ... +@overload +def walk_values(f: None, coll: Iterable[tuple[_K, _V]]) -> Iterable[tuple[_K, _V]]: ... +@overload +def walk_values(f: AbstractSet[_V], coll: Iterable[tuple[_K, _V]]) -> Iterable[tuple[_K, bool]]: ... +@overload +def walk_values(f: str | bytes | re.Pattern[str], coll: Iterable[tuple[_K, Any]]) -> Iterable[tuple[_K, _ReResult | None]]: ... +@overload +def walk_values(f: int, coll: Iterable[tuple[_K, Sequence[_T]]]) -> Iterable[tuple[_K, _T]]: ... +@overload +def walk_values(f: slice, coll: Iterable[tuple[_K, Sequence[_T]]]) -> Iterable[tuple[_K, Sequence[_T]]]: ... +@overload +def walk_values(f: Mapping[_V, _V2], coll: Iterable[tuple[_K, _V]]) -> Iterable[tuple[_K, _V2]]: ... +@overload +def walk_values(f: Callable[[_V], _V2], coll: dict[_K, _V]) -> dict[_K, _V2]: ... +@overload +def walk_values(f: None, coll: dict[_K, _V]) -> dict[_K, _V]: ... +@overload +def walk_values(f: AbstractSet[_V], coll: dict[_K, _V]) -> dict[_K, bool]: ... +@overload +def walk_values(f: str | bytes | re.Pattern[str], coll: dict[_K, Any]) -> dict[_K, _ReResult | None]: ... +@overload +def walk_values(f: int, coll: dict[_K, Sequence[_T]]) -> dict[_K, _T]: ... +@overload +def walk_values(f: slice, coll: dict[_K, Sequence[_T]]) -> dict[_K, Sequence[_T]]: ... +@overload +def walk_values(f: Mapping[_V, _V2], coll: dict[_K, _V]) -> dict[_K, _V2]: ... +@overload +def walk_values(f: Callable[[_V], _V2], coll: MutableMapping[_K, _V]) -> MutableMapping[_K, _V2]: ... +@overload +def walk_values(f: None, coll: MutableMapping[_K, _V]) -> MutableMapping[_K, _V]: ... +@overload +def walk_values(f: AbstractSet[_V], coll: MutableMapping[_K, _V]) -> MutableMapping[_K, bool]: ... +@overload +def walk_values(f: str | bytes | re.Pattern[str], coll: MutableMapping[_K, Any]) -> MutableMapping[_K, _ReResult | None]: ... +@overload +def walk_values(f: int, coll: MutableMapping[_K, Sequence[_T]]) -> MutableMapping[_K, _T]: ... +@overload +def walk_values(f: slice, coll: MutableMapping[_K, Sequence[_T]]) -> MutableMapping[_K, Sequence[_T]]: ... +@overload +def walk_values(f: Mapping[_V, _V2], coll: MutableMapping[_K, _V]) -> MutableMapping[_K, _V2]: ... +@overload +def walk_values(f: Callable[[_V], _V2], coll: Mapping[_K, _V]) -> Mapping[_K, _V2]: ... +@overload +def walk_values(f: None, coll: Mapping[_K, _V]) -> Mapping[_K, _V]: ... +@overload +def walk_values(f: AbstractSet[_V], coll: Mapping[_K, _V]) -> Mapping[_K, bool]: ... +@overload +def walk_values(f: str | bytes | re.Pattern[str], coll: Mapping[_K, Any]) -> Mapping[_K, _ReResult | None]: ... +@overload +def walk_values(f: int, coll: Mapping[_K, Sequence[_T]]) -> Mapping[_K, _T]: ... +@overload +def walk_values(f: slice, coll: Mapping[_K, Sequence[_T]]) -> Mapping[_K, Sequence[_T]]: ... +@overload +def walk_values(f: Mapping[_V, _V2], coll: Mapping[_K, _V]) -> Mapping[_K, _V2]: ... + +# dict/Mapping: pred receives (key, value) pairs, only Callable is meaningful +# (must come before COLLS — dict is Iterable[K], so Iterable overload would shadow these) +@overload +def select(pred: Callable[[tuple[_K, _V]], Any], coll: dict[_K, _V]) -> dict[_K, _V]: ... +@overload +def select(pred: Callable[[tuple[_K, _V]], Any], coll: MutableMapping[_K, _V]) -> MutableMapping[_K, _V]: ... +@overload +def select(pred: Callable[[tuple[_K, _V]], Any], coll: Mapping[_K, _V]) -> Mapping[_K, _V]: ... +@overload +def select(pred: _XFunc, coll: dict[_K, _V]) -> dict[_K, _V]: ... +@overload +def select(pred: _XFunc, coll: MutableMapping[_K, _V]) -> MutableMapping[_K, _V]: ... +@overload +def select(pred: _XFunc, coll: Mapping[_K, _V]) -> Mapping[_K, _V]: ... +@overload +def select(pred: Callable[[_T], Any], coll: list[_T]) -> list[_T]: ... +@overload +def select(pred: None, coll: list[_T]) -> list[_T]: ... +@overload +def select(pred: AbstractSet[_T], coll: list[_T]) -> list[_T]: ... +@overload +def select(pred: str | bytes | re.Pattern[str], coll: list[str]) -> list[str]: ... +@overload +def select(pred: int, coll: list[Sequence[Any]]) -> list[Sequence[Any]]: ... +@overload +def select(pred: slice, coll: list[Sequence[Any]]) -> list[Sequence[Any]]: ... +@overload +def select(pred: Mapping[_T, Any], coll: list[_T]) -> list[_T]: ... +@overload +def select(pred: Callable[[_T], Any], coll: tuple[_T, ...]) -> tuple[_T, ...]: ... +@overload +def select(pred: None, coll: tuple[_T, ...]) -> tuple[_T, ...]: ... +@overload +def select(pred: AbstractSet[_T], coll: tuple[_T, ...]) -> tuple[_T, ...]: ... +@overload +def select(pred: str | bytes | re.Pattern[str], coll: tuple[str, ...]) -> tuple[str, ...]: ... +@overload +def select(pred: int, coll: tuple[Sequence[Any], ...]) -> tuple[Sequence[Any], ...]: ... +@overload +def select(pred: slice, coll: tuple[Sequence[Any], ...]) -> tuple[Sequence[Any], ...]: ... +@overload +def select(pred: Mapping[_T, Any], coll: tuple[_T, ...]) -> tuple[_T, ...]: ... +@overload +def select(pred: Callable[[_T], Any], coll: set[_T]) -> set[_T]: ... +@overload +def select(pred: None, coll: set[_T]) -> set[_T]: ... +@overload +def select(pred: AbstractSet[_T], coll: set[_T]) -> set[_T]: ... +@overload +def select(pred: str | bytes | re.Pattern[str], coll: set[str]) -> set[str]: ... +@overload +def select(pred: int, coll: set[Sequence[Any]]) -> set[Sequence[Any]]: ... +@overload +def select(pred: slice, coll: set[Sequence[Any]]) -> set[Sequence[Any]]: ... +@overload +def select(pred: Mapping[_T, Any], coll: set[_T]) -> set[_T]: ... +@overload +def select(pred: Callable[[_T], Any], coll: frozenset[_T]) -> frozenset[_T]: ... +@overload +def select(pred: None, coll: frozenset[_T]) -> frozenset[_T]: ... +@overload +def select(pred: AbstractSet[_T], coll: frozenset[_T]) -> frozenset[_T]: ... +@overload +def select(pred: str | bytes | re.Pattern[str], coll: frozenset[str]) -> frozenset[str]: ... +@overload +def select(pred: int, coll: frozenset[Sequence[Any]]) -> frozenset[Sequence[Any]]: ... +@overload +def select(pred: slice, coll: frozenset[Sequence[Any]]) -> frozenset[Sequence[Any]]: ... +@overload +def select(pred: Mapping[_T, Any], coll: frozenset[_T]) -> frozenset[_T]: ... +@overload +def select(pred: Callable[[_T], Any], coll: Sequence[_T]) -> Sequence[_T]: ... +@overload +def select(pred: None, coll: Sequence[_T]) -> Sequence[_T]: ... +@overload +def select(pred: AbstractSet[_T], coll: Sequence[_T]) -> Sequence[_T]: ... +@overload +def select(pred: str | bytes | re.Pattern[str], coll: Sequence[str]) -> Sequence[str]: ... +@overload +def select(pred: int, coll: Sequence[Sequence[Any]]) -> Sequence[Sequence[Any]]: ... +@overload +def select(pred: slice, coll: Sequence[Sequence[Any]]) -> Sequence[Sequence[Any]]: ... +@overload +def select(pred: Mapping[_T, Any], coll: Sequence[_T]) -> Sequence[_T]: ... +@overload +def select(pred: Callable[[_T], Any], coll: Iterator[_T]) -> Iterator[_T]: ... +@overload +def select(pred: None, coll: Iterator[_T]) -> Iterator[_T]: ... +@overload +def select(pred: AbstractSet[_T], coll: Iterator[_T]) -> Iterator[_T]: ... +@overload +def select(pred: str | bytes | re.Pattern[str], coll: Iterator[str]) -> Iterator[str]: ... +@overload +def select(pred: int, coll: Iterator[Sequence[Any]]) -> Iterator[Sequence[Any]]: ... +@overload +def select(pred: slice, coll: Iterator[Sequence[Any]]) -> Iterator[Sequence[Any]]: ... +@overload +def select(pred: Mapping[_T, Any], coll: Iterator[_T]) -> Iterator[_T]: ... +@overload +def select(pred: Callable[[_T], Any], coll: Iterable[_T]) -> Iterable[_T]: ... +@overload +def select(pred: None, coll: Iterable[_T]) -> Iterable[_T]: ... +@overload +def select(pred: AbstractSet[_T], coll: Iterable[_T]) -> Iterable[_T]: ... +@overload +def select(pred: str | bytes | re.Pattern[str], coll: Iterable[str]) -> Iterable[str]: ... +@overload +def select(pred: int, coll: Iterable[Sequence[Any]]) -> Iterable[Sequence[Any]]: ... +@overload +def select(pred: slice, coll: Iterable[Sequence[Any]]) -> Iterable[Sequence[Any]]: ... +@overload +def select(pred: Mapping[_T, Any], coll: Iterable[_T]) -> Iterable[_T]: ... +@overload +def select(pred: _XFunc, coll: _T) -> _T: ... +# select_keys: collection of pairs × XPred +@overload +def select_keys(pred: Callable[[_K], Any], coll: list[tuple[_K, _V]]) -> list[tuple[_K, _V]]: ... +@overload +def select_keys(pred: None, coll: list[tuple[_K, _V]]) -> list[tuple[_K, _V]]: ... +@overload +def select_keys(pred: AbstractSet[_K], coll: list[tuple[_K, _V]]) -> list[tuple[_K, _V]]: ... +@overload +def select_keys(pred: str | bytes | re.Pattern[str], coll: list[tuple[str, _V]]) -> list[tuple[str, _V]]: ... +@overload +def select_keys(pred: int, coll: list[tuple[Sequence[Any], _V]]) -> list[tuple[Sequence[Any], _V]]: ... +@overload +def select_keys(pred: slice, coll: list[tuple[Sequence[Any], _V]]) -> list[tuple[Sequence[Any], _V]]: ... +@overload +def select_keys(pred: Mapping[_K, Any], coll: list[tuple[_K, _V]]) -> list[tuple[_K, _V]]: ... +@overload +def select_keys(pred: Callable[[_K], Any], coll: tuple[tuple[_K, _V], ...]) -> tuple[tuple[_K, _V], ...]: ... +@overload +def select_keys(pred: None, coll: tuple[tuple[_K, _V], ...]) -> tuple[tuple[_K, _V], ...]: ... +@overload +def select_keys(pred: AbstractSet[_K], coll: tuple[tuple[_K, _V], ...]) -> tuple[tuple[_K, _V], ...]: ... +@overload +def select_keys(pred: str | bytes | re.Pattern[str], coll: tuple[tuple[str, _V], ...]) -> tuple[tuple[str, _V], ...]: ... +@overload +def select_keys(pred: int, coll: tuple[tuple[Sequence[Any], _V], ...]) -> tuple[tuple[Sequence[Any], _V], ...]: ... +@overload +def select_keys(pred: slice, coll: tuple[tuple[Sequence[Any], _V], ...]) -> tuple[tuple[Sequence[Any], _V], ...]: ... +@overload +def select_keys(pred: Mapping[_K, Any], coll: tuple[tuple[_K, _V], ...]) -> tuple[tuple[_K, _V], ...]: ... +@overload +def select_keys(pred: Callable[[_K], Any], coll: set[tuple[_K, _V]]) -> set[tuple[_K, _V]]: ... +@overload +def select_keys(pred: None, coll: set[tuple[_K, _V]]) -> set[tuple[_K, _V]]: ... +@overload +def select_keys(pred: AbstractSet[_K], coll: set[tuple[_K, _V]]) -> set[tuple[_K, _V]]: ... +@overload +def select_keys(pred: str | bytes | re.Pattern[str], coll: set[tuple[str, _V]]) -> set[tuple[str, _V]]: ... +@overload +def select_keys(pred: int, coll: set[tuple[Sequence[Any], _V]]) -> set[tuple[Sequence[Any], _V]]: ... +@overload +def select_keys(pred: slice, coll: set[tuple[Sequence[Any], _V]]) -> set[tuple[Sequence[Any], _V]]: ... +@overload +def select_keys(pred: Mapping[_K, Any], coll: set[tuple[_K, _V]]) -> set[tuple[_K, _V]]: ... +@overload +def select_keys(pred: Callable[[_K], Any], coll: frozenset[tuple[_K, _V]]) -> frozenset[tuple[_K, _V]]: ... +@overload +def select_keys(pred: None, coll: frozenset[tuple[_K, _V]]) -> frozenset[tuple[_K, _V]]: ... +@overload +def select_keys(pred: AbstractSet[_K], coll: frozenset[tuple[_K, _V]]) -> frozenset[tuple[_K, _V]]: ... +@overload +def select_keys(pred: str | bytes | re.Pattern[str], coll: frozenset[tuple[str, _V]]) -> frozenset[tuple[str, _V]]: ... +@overload +def select_keys(pred: int, coll: frozenset[tuple[Sequence[Any], _V]]) -> frozenset[tuple[Sequence[Any], _V]]: ... +@overload +def select_keys(pred: slice, coll: frozenset[tuple[Sequence[Any], _V]]) -> frozenset[tuple[Sequence[Any], _V]]: ... +@overload +def select_keys(pred: Mapping[_K, Any], coll: frozenset[tuple[_K, _V]]) -> frozenset[tuple[_K, _V]]: ... +@overload +def select_keys(pred: Callable[[_K], Any], coll: Sequence[tuple[_K, _V]]) -> Sequence[tuple[_K, _V]]: ... +@overload +def select_keys(pred: None, coll: Sequence[tuple[_K, _V]]) -> Sequence[tuple[_K, _V]]: ... +@overload +def select_keys(pred: AbstractSet[_K], coll: Sequence[tuple[_K, _V]]) -> Sequence[tuple[_K, _V]]: ... +@overload +def select_keys(pred: str | bytes | re.Pattern[str], coll: Sequence[tuple[str, _V]]) -> Sequence[tuple[str, _V]]: ... +@overload +def select_keys(pred: int, coll: Sequence[tuple[Sequence[Any], _V]]) -> Sequence[tuple[Sequence[Any], _V]]: ... +@overload +def select_keys(pred: slice, coll: Sequence[tuple[Sequence[Any], _V]]) -> Sequence[tuple[Sequence[Any], _V]]: ... +@overload +def select_keys(pred: Mapping[_K, Any], coll: Sequence[tuple[_K, _V]]) -> Sequence[tuple[_K, _V]]: ... +@overload +def select_keys(pred: Callable[[_K], Any], coll: Iterator[tuple[_K, _V]]) -> Iterator[tuple[_K, _V]]: ... +@overload +def select_keys(pred: None, coll: Iterator[tuple[_K, _V]]) -> Iterator[tuple[_K, _V]]: ... +@overload +def select_keys(pred: AbstractSet[_K], coll: Iterator[tuple[_K, _V]]) -> Iterator[tuple[_K, _V]]: ... +@overload +def select_keys(pred: str | bytes | re.Pattern[str], coll: Iterator[tuple[str, _V]]) -> Iterator[tuple[str, _V]]: ... +@overload +def select_keys(pred: int, coll: Iterator[tuple[Sequence[Any], _V]]) -> Iterator[tuple[Sequence[Any], _V]]: ... +@overload +def select_keys(pred: slice, coll: Iterator[tuple[Sequence[Any], _V]]) -> Iterator[tuple[Sequence[Any], _V]]: ... +@overload +def select_keys(pred: Mapping[_K, Any], coll: Iterator[tuple[_K, _V]]) -> Iterator[tuple[_K, _V]]: ... +@overload +def select_keys(pred: Callable[[_K], Any], coll: Iterable[tuple[_K, _V]]) -> Iterable[tuple[_K, _V]]: ... +@overload +def select_keys(pred: None, coll: Iterable[tuple[_K, _V]]) -> Iterable[tuple[_K, _V]]: ... +@overload +def select_keys(pred: AbstractSet[_K], coll: Iterable[tuple[_K, _V]]) -> Iterable[tuple[_K, _V]]: ... +@overload +def select_keys(pred: str | bytes | re.Pattern[str], coll: Iterable[tuple[str, _V]]) -> Iterable[tuple[str, _V]]: ... +@overload +def select_keys(pred: int, coll: Iterable[tuple[Sequence[Any], _V]]) -> Iterable[tuple[Sequence[Any], _V]]: ... +@overload +def select_keys(pred: slice, coll: Iterable[tuple[Sequence[Any], _V]]) -> Iterable[tuple[Sequence[Any], _V]]: ... +@overload +def select_keys(pred: Mapping[_K, Any], coll: Iterable[tuple[_K, _V]]) -> Iterable[tuple[_K, _V]]: ... +@overload +def select_keys(pred: Callable[[_K], Any], coll: dict[_K, _V]) -> dict[_K, _V]: ... +@overload +def select_keys(pred: None, coll: dict[_K, _V]) -> dict[_K, _V]: ... +@overload +def select_keys(pred: AbstractSet[_K], coll: dict[_K, _V]) -> dict[_K, _V]: ... +@overload +def select_keys(pred: str | bytes | re.Pattern[str], coll: dict[str, _V]) -> dict[str, _V]: ... +@overload +def select_keys(pred: int, coll: dict[Sequence[Any], _V]) -> dict[Sequence[Any], _V]: ... +@overload +def select_keys(pred: slice, coll: dict[Sequence[Any], _V]) -> dict[Sequence[Any], _V]: ... +@overload +def select_keys(pred: Mapping[_K, Any], coll: dict[_K, _V]) -> dict[_K, _V]: ... +@overload +def select_keys(pred: Callable[[_K], Any], coll: MutableMapping[_K, _V]) -> MutableMapping[_K, _V]: ... +@overload +def select_keys(pred: None, coll: MutableMapping[_K, _V]) -> MutableMapping[_K, _V]: ... +@overload +def select_keys(pred: AbstractSet[_K], coll: MutableMapping[_K, _V]) -> MutableMapping[_K, _V]: ... +@overload +def select_keys(pred: str | bytes | re.Pattern[str], coll: MutableMapping[str, _V]) -> MutableMapping[str, _V]: ... +@overload +def select_keys(pred: int, coll: MutableMapping[Sequence[Any], _V]) -> MutableMapping[Sequence[Any], _V]: ... +@overload +def select_keys(pred: slice, coll: MutableMapping[Sequence[Any], _V]) -> MutableMapping[Sequence[Any], _V]: ... +@overload +def select_keys(pred: Mapping[_K, Any], coll: MutableMapping[_K, _V]) -> MutableMapping[_K, _V]: ... +@overload +def select_keys(pred: Callable[[_K], Any], coll: Mapping[_K, _V]) -> Mapping[_K, _V]: ... +@overload +def select_keys(pred: None, coll: Mapping[_K, _V]) -> Mapping[_K, _V]: ... +@overload +def select_keys(pred: AbstractSet[_K], coll: Mapping[_K, _V]) -> Mapping[_K, _V]: ... +@overload +def select_keys(pred: str | bytes | re.Pattern[str], coll: Mapping[str, _V]) -> Mapping[str, _V]: ... +@overload +def select_keys(pred: int, coll: Mapping[Sequence[Any], _V]) -> Mapping[Sequence[Any], _V]: ... +@overload +def select_keys(pred: slice, coll: Mapping[Sequence[Any], _V]) -> Mapping[Sequence[Any], _V]: ... +@overload +def select_keys(pred: Mapping[_K, Any], coll: Mapping[_K, _V]) -> Mapping[_K, _V]: ... + +# select_values: collection of pairs × XPred +@overload +def select_values(pred: Callable[[_V], Any], coll: list[tuple[_K, _V]]) -> list[tuple[_K, _V]]: ... +@overload +def select_values(pred: None, coll: list[tuple[_K, _V]]) -> list[tuple[_K, _V]]: ... +@overload +def select_values(pred: AbstractSet[_V], coll: list[tuple[_K, _V]]) -> list[tuple[_K, _V]]: ... +@overload +def select_values(pred: str | bytes | re.Pattern[str], coll: list[tuple[_K, str]]) -> list[tuple[_K, str]]: ... +@overload +def select_values(pred: int, coll: list[tuple[_K, Sequence[Any]]]) -> list[tuple[_K, Sequence[Any]]]: ... +@overload +def select_values(pred: slice, coll: list[tuple[_K, Sequence[Any]]]) -> list[tuple[_K, Sequence[Any]]]: ... +@overload +def select_values(pred: Mapping[_V, Any], coll: list[tuple[_K, _V]]) -> list[tuple[_K, _V]]: ... +@overload +def select_values(pred: Callable[[_V], Any], coll: tuple[tuple[_K, _V], ...]) -> tuple[tuple[_K, _V], ...]: ... +@overload +def select_values(pred: None, coll: tuple[tuple[_K, _V], ...]) -> tuple[tuple[_K, _V], ...]: ... +@overload +def select_values(pred: AbstractSet[_V], coll: tuple[tuple[_K, _V], ...]) -> tuple[tuple[_K, _V], ...]: ... +@overload +def select_values(pred: str | bytes | re.Pattern[str], coll: tuple[tuple[_K, str], ...]) -> tuple[tuple[_K, str], ...]: ... +@overload +def select_values(pred: int, coll: tuple[tuple[_K, Sequence[Any]], ...]) -> tuple[tuple[_K, Sequence[Any]], ...]: ... +@overload +def select_values(pred: slice, coll: tuple[tuple[_K, Sequence[Any]], ...]) -> tuple[tuple[_K, Sequence[Any]], ...]: ... +@overload +def select_values(pred: Mapping[_V, Any], coll: tuple[tuple[_K, _V], ...]) -> tuple[tuple[_K, _V], ...]: ... +@overload +def select_values(pred: Callable[[_V], Any], coll: set[tuple[_K, _V]]) -> set[tuple[_K, _V]]: ... +@overload +def select_values(pred: None, coll: set[tuple[_K, _V]]) -> set[tuple[_K, _V]]: ... +@overload +def select_values(pred: AbstractSet[_V], coll: set[tuple[_K, _V]]) -> set[tuple[_K, _V]]: ... +@overload +def select_values(pred: str | bytes | re.Pattern[str], coll: set[tuple[_K, str]]) -> set[tuple[_K, str]]: ... +@overload +def select_values(pred: int, coll: set[tuple[_K, Sequence[Any]]]) -> set[tuple[_K, Sequence[Any]]]: ... +@overload +def select_values(pred: slice, coll: set[tuple[_K, Sequence[Any]]]) -> set[tuple[_K, Sequence[Any]]]: ... +@overload +def select_values(pred: Mapping[_V, Any], coll: set[tuple[_K, _V]]) -> set[tuple[_K, _V]]: ... +@overload +def select_values(pred: Callable[[_V], Any], coll: frozenset[tuple[_K, _V]]) -> frozenset[tuple[_K, _V]]: ... +@overload +def select_values(pred: None, coll: frozenset[tuple[_K, _V]]) -> frozenset[tuple[_K, _V]]: ... +@overload +def select_values(pred: AbstractSet[_V], coll: frozenset[tuple[_K, _V]]) -> frozenset[tuple[_K, _V]]: ... +@overload +def select_values(pred: str | bytes | re.Pattern[str], coll: frozenset[tuple[_K, str]]) -> frozenset[tuple[_K, str]]: ... +@overload +def select_values(pred: int, coll: frozenset[tuple[_K, Sequence[Any]]]) -> frozenset[tuple[_K, Sequence[Any]]]: ... +@overload +def select_values(pred: slice, coll: frozenset[tuple[_K, Sequence[Any]]]) -> frozenset[tuple[_K, Sequence[Any]]]: ... +@overload +def select_values(pred: Mapping[_V, Any], coll: frozenset[tuple[_K, _V]]) -> frozenset[tuple[_K, _V]]: ... +@overload +def select_values(pred: Callable[[_V], Any], coll: Sequence[tuple[_K, _V]]) -> Sequence[tuple[_K, _V]]: ... +@overload +def select_values(pred: None, coll: Sequence[tuple[_K, _V]]) -> Sequence[tuple[_K, _V]]: ... +@overload +def select_values(pred: AbstractSet[_V], coll: Sequence[tuple[_K, _V]]) -> Sequence[tuple[_K, _V]]: ... +@overload +def select_values(pred: str | bytes | re.Pattern[str], coll: Sequence[tuple[_K, str]]) -> Sequence[tuple[_K, str]]: ... +@overload +def select_values(pred: int, coll: Sequence[tuple[_K, Sequence[Any]]]) -> Sequence[tuple[_K, Sequence[Any]]]: ... +@overload +def select_values(pred: slice, coll: Sequence[tuple[_K, Sequence[Any]]]) -> Sequence[tuple[_K, Sequence[Any]]]: ... +@overload +def select_values(pred: Mapping[_V, Any], coll: Sequence[tuple[_K, _V]]) -> Sequence[tuple[_K, _V]]: ... +@overload +def select_values(pred: Callable[[_V], Any], coll: Iterator[tuple[_K, _V]]) -> Iterator[tuple[_K, _V]]: ... +@overload +def select_values(pred: None, coll: Iterator[tuple[_K, _V]]) -> Iterator[tuple[_K, _V]]: ... +@overload +def select_values(pred: AbstractSet[_V], coll: Iterator[tuple[_K, _V]]) -> Iterator[tuple[_K, _V]]: ... +@overload +def select_values(pred: str | bytes | re.Pattern[str], coll: Iterator[tuple[_K, str]]) -> Iterator[tuple[_K, str]]: ... +@overload +def select_values(pred: int, coll: Iterator[tuple[_K, Sequence[Any]]]) -> Iterator[tuple[_K, Sequence[Any]]]: ... +@overload +def select_values(pred: slice, coll: Iterator[tuple[_K, Sequence[Any]]]) -> Iterator[tuple[_K, Sequence[Any]]]: ... +@overload +def select_values(pred: Mapping[_V, Any], coll: Iterator[tuple[_K, _V]]) -> Iterator[tuple[_K, _V]]: ... +@overload +def select_values(pred: Callable[[_V], Any], coll: Iterable[tuple[_K, _V]]) -> Iterable[tuple[_K, _V]]: ... +@overload +def select_values(pred: None, coll: Iterable[tuple[_K, _V]]) -> Iterable[tuple[_K, _V]]: ... +@overload +def select_values(pred: AbstractSet[_V], coll: Iterable[tuple[_K, _V]]) -> Iterable[tuple[_K, _V]]: ... +@overload +def select_values(pred: str | bytes | re.Pattern[str], coll: Iterable[tuple[_K, str]]) -> Iterable[tuple[_K, str]]: ... +@overload +def select_values(pred: int, coll: Iterable[tuple[_K, Sequence[Any]]]) -> Iterable[tuple[_K, Sequence[Any]]]: ... +@overload +def select_values(pred: slice, coll: Iterable[tuple[_K, Sequence[Any]]]) -> Iterable[tuple[_K, Sequence[Any]]]: ... +@overload +def select_values(pred: Mapping[_V, Any], coll: Iterable[tuple[_K, _V]]) -> Iterable[tuple[_K, _V]]: ... +@overload +def select_values(pred: Callable[[_V], Any], coll: dict[_K, _V]) -> dict[_K, _V]: ... +@overload +def select_values(pred: None, coll: dict[_K, _V]) -> dict[_K, _V]: ... +@overload +def select_values(pred: AbstractSet[_V], coll: dict[_K, _V]) -> dict[_K, _V]: ... +@overload +def select_values(pred: str | bytes | re.Pattern[str], coll: dict[_K, str]) -> dict[_K, str]: ... +@overload +def select_values(pred: int, coll: dict[_K, Sequence[Any]]) -> dict[_K, Sequence[Any]]: ... +@overload +def select_values(pred: slice, coll: dict[_K, Sequence[Any]]) -> dict[_K, Sequence[Any]]: ... +@overload +def select_values(pred: Mapping[_V, Any], coll: dict[_K, _V]) -> dict[_K, _V]: ... +@overload +def select_values(pred: Callable[[_V], Any], coll: MutableMapping[_K, _V]) -> MutableMapping[_K, _V]: ... +@overload +def select_values(pred: None, coll: MutableMapping[_K, _V]) -> MutableMapping[_K, _V]: ... +@overload +def select_values(pred: AbstractSet[_V], coll: MutableMapping[_K, _V]) -> MutableMapping[_K, _V]: ... +@overload +def select_values(pred: str | bytes | re.Pattern[str], coll: MutableMapping[_K, str]) -> MutableMapping[_K, str]: ... +@overload +def select_values(pred: int, coll: MutableMapping[_K, Sequence[Any]]) -> MutableMapping[_K, Sequence[Any]]: ... +@overload +def select_values(pred: slice, coll: MutableMapping[_K, Sequence[Any]]) -> MutableMapping[_K, Sequence[Any]]: ... +@overload +def select_values(pred: Mapping[_V, Any], coll: MutableMapping[_K, _V]) -> MutableMapping[_K, _V]: ... +@overload +def select_values(pred: Callable[[_V], Any], coll: Mapping[_K, _V]) -> Mapping[_K, _V]: ... +@overload +def select_values(pred: None, coll: Mapping[_K, _V]) -> Mapping[_K, _V]: ... +@overload +def select_values(pred: AbstractSet[_V], coll: Mapping[_K, _V]) -> Mapping[_K, _V]: ... +@overload +def select_values(pred: str | bytes | re.Pattern[str], coll: Mapping[_K, str]) -> Mapping[_K, str]: ... +@overload +def select_values(pred: int, coll: Mapping[_K, Sequence[Any]]) -> Mapping[_K, Sequence[Any]]: ... +@overload +def select_values(pred: slice, coll: Mapping[_K, Sequence[Any]]) -> Mapping[_K, Sequence[Any]]: ... +@overload +def select_values(pred: Mapping[_V, Any], coll: Mapping[_K, _V]) -> Mapping[_K, _V]: ... + +@overload +def split_keys(pred: Callable[[_K], Any], coll: Mapping[_K, _V]) -> tuple[dict[_K, _V], dict[_K, _V]]: ... +@overload +def split_keys(pred: None, coll: Mapping[_K, _V]) -> tuple[dict[_K, _V], dict[_K, _V]]: ... +@overload +def split_keys(pred: AbstractSet[_K], coll: Mapping[_K, _V]) -> tuple[dict[_K, _V], dict[_K, _V]]: ... +@overload +def split_keys(pred: str | bytes | re.Pattern[str], coll: Mapping[str, _V]) -> tuple[dict[str, _V], dict[str, _V]]: ... +@overload +def split_keys(pred: int, coll: Mapping[Sequence[Any], _V]) -> tuple[dict[Sequence[Any], _V], dict[Sequence[Any], _V]]: ... +@overload +def split_keys(pred: slice, coll: Mapping[Sequence[Any], _V]) -> tuple[dict[Sequence[Any], _V], dict[Sequence[Any], _V]]: ... +@overload +def split_keys(pred: Mapping[_K, Any], coll: Mapping[_K, _V]) -> tuple[dict[_K, _V], dict[_K, _V]]: ... + +# Strips None from element/value type. Concrete (invariant) types are grouped since they +# can't steal from each other. Covariant types (Mapping, Sequence, etc.) need _T right +# after _T|None so concrete types' _T catches non-None inputs first. +# Maps before colls — dict is Iterable[K], so Iterable overload would shadow. +@overload +def compact(coll: dict[_K, _V | None]) -> dict[_K, _V]: ... +@overload +def compact(coll: MutableMapping[_K, _V | None]) -> MutableMapping[_K, _V]: ... +@overload +def compact(coll: dict[_K, _V]) -> dict[_K, _V]: ... +@overload +def compact(coll: MutableMapping[_K, _V]) -> MutableMapping[_K, _V]: ... +@overload +def compact(coll: Mapping[_K, _V | None]) -> Mapping[_K, _V]: ... +@overload +def compact(coll: Mapping[_K, _V]) -> Mapping[_K, _V]: ... +@overload +def compact(coll: list[_T | None]) -> list[_T]: ... +@overload +def compact(coll: tuple[_T | None, ...]) -> tuple[_T, ...]: ... +@overload +def compact(coll: set[_T | None]) -> set[_T]: ... +@overload +def compact(coll: frozenset[_T | None]) -> frozenset[_T]: ... +@overload +def compact(coll: list[_T]) -> list[_T]: ... +@overload +def compact(coll: set[_T]) -> set[_T]: ... +@overload +def compact(coll: Sequence[_T | None]) -> Sequence[_T]: ... +@overload +def compact(coll: Iterator[_T | None]) -> Iterator[_T]: ... +@overload +def compact(coll: Iterable[_T | None]) -> Iterable[_T]: ... +@overload +def compact(coll: _T) -> _T: ... + +### Content tests + +@overload +def is_distinct(coll: Iterable[Hashable]) -> bool: ... +@overload +def is_distinct(coll: Iterable[_T], key: Callable[[_T], Hashable]) -> bool: ... +@overload +def is_distinct(coll: Iterable[_T], key: None) -> bool: ... +@overload +def is_distinct(coll: Iterable[_T], key: AbstractSet[_T]) -> bool: ... +@overload +def is_distinct(coll: Iterable[Any], key: str | bytes | re.Pattern[str]) -> bool: ... +@overload +def is_distinct(coll: Iterable[Sequence[_T]], key: int) -> bool: ... +@overload +def is_distinct(coll: Iterable[Sequence[_T]], key: slice) -> bool: ... +@overload +def is_distinct(coll: Iterable[_T], key: Mapping[_T, Hashable]) -> bool: ... + +@overload +def all(seq: Iterable[Any]) -> bool: ... +@overload +def all(pred: Callable[[_T], Any], seq: Iterable[_T]) -> bool: ... +@overload +def all(pred: None, seq: Iterable[_T]) -> bool: ... +@overload +def all(pred: AbstractSet[_T], seq: Iterable[_T]) -> bool: ... +@overload +def all(pred: str | bytes | re.Pattern[str], seq: Iterable[str]) -> bool: ... +@overload +def all(pred: int, seq: Iterable[Sequence[Any]]) -> bool: ... +@overload +def all(pred: slice, seq: Iterable[Sequence[Any]]) -> bool: ... +@overload +def all(pred: Mapping[_T, Any], seq: Iterable[_T]) -> bool: ... + +@overload +def any(seq: Iterable[Any]) -> bool: ... +@overload +def any(pred: Callable[[_T], Any], seq: Iterable[_T]) -> bool: ... +@overload +def any(pred: None, seq: Iterable[_T]) -> bool: ... +@overload +def any(pred: AbstractSet[_T], seq: Iterable[_T]) -> bool: ... +@overload +def any(pred: str | bytes | re.Pattern[str], seq: Iterable[str]) -> bool: ... +@overload +def any(pred: int, seq: Iterable[Sequence[Any]]) -> bool: ... +@overload +def any(pred: slice, seq: Iterable[Sequence[Any]]) -> bool: ... +@overload +def any(pred: Mapping[_T, Any], seq: Iterable[_T]) -> bool: ... + +@overload +def none(seq: Iterable[Any]) -> bool: ... +@overload +def none(pred: Callable[[_T], Any], seq: Iterable[_T]) -> bool: ... +@overload +def none(pred: None, seq: Iterable[_T]) -> bool: ... +@overload +def none(pred: AbstractSet[_T], seq: Iterable[_T]) -> bool: ... +@overload +def none(pred: str | bytes | re.Pattern[str], seq: Iterable[str]) -> bool: ... +@overload +def none(pred: int, seq: Iterable[Sequence[Any]]) -> bool: ... +@overload +def none(pred: slice, seq: Iterable[Sequence[Any]]) -> bool: ... +@overload +def none(pred: Mapping[_T, Any], seq: Iterable[_T]) -> bool: ... + +@overload +def one(seq: Iterable[Any]) -> bool: ... +@overload +def one(pred: Callable[[_T], Any], seq: Iterable[_T]) -> bool: ... +@overload +def one(pred: None, seq: Iterable[_T]) -> bool: ... +@overload +def one(pred: AbstractSet[_T], seq: Iterable[_T]) -> bool: ... +@overload +def one(pred: str | bytes | re.Pattern[str], seq: Iterable[str]) -> bool: ... +@overload +def one(pred: int, seq: Iterable[Sequence[Any]]) -> bool: ... +@overload +def one(pred: slice, seq: Iterable[Sequence[Any]]) -> bool: ... +@overload +def one(pred: Mapping[_T, Any], seq: Iterable[_T]) -> bool: ... + +@overload +def some(seq: Iterable[_T]) -> _T | None: ... +@overload +def some(pred: Callable[[_T], Any], seq: Iterable[_T]) -> _T | None: ... +@overload +def some(pred: None, seq: Iterable[_T]) -> _T | None: ... +@overload +def some(pred: AbstractSet[_T], seq: Iterable[_T]) -> _T | None: ... +@overload +def some(pred: str | bytes | re.Pattern[str], seq: Iterable[str]) -> str | None: ... +@overload +def some(pred: int, seq: Iterable[Sequence[Any]]) -> Sequence[Any] | None: ... +@overload +def some(pred: slice, seq: Iterable[Sequence[Any]]) -> Sequence[Any] | None: ... +@overload +def some(pred: Mapping[_T, Any], seq: Iterable[_T]) -> _T | None: ... + +### Dict utilities + +def zipdict(keys: Iterable[_K], vals: Iterable[_V]) -> dict[_K, _V]: ... +@overload +def flip(mapping: dict[_K, _V]) -> dict[_V, _K]: ... +@overload +def flip(mapping: MutableMapping[_K, _V]) -> MutableMapping[_V, _K]: ... +@overload +def flip(mapping: Mapping[_K, _V]) -> Mapping[_V, _K]: ... +@overload +def flip(mapping: list[tuple[_K, _V]]) -> list[tuple[_V, _K]]: ... +@overload +def flip(mapping: tuple[tuple[_K, _V], ...]) -> tuple[tuple[_V, _K], ...]: ... +@overload +def flip(mapping: set[tuple[_K, _V]]) -> set[tuple[_V, _K]]: ... +@overload +def flip(mapping: frozenset[tuple[_K, _V]]) -> frozenset[tuple[_V, _K]]: ... +@overload +def flip(mapping: Sequence[tuple[_K, _V]]) -> Sequence[tuple[_V, _K]]: ... +@overload +def flip(mapping: Iterator[tuple[_K, _V]]) -> Iterator[tuple[_V, _K]]: ... +@overload +def flip(mapping: Iterable[tuple[_K, _V]]) -> Iterable[tuple[_V, _K]]: ... +@overload +def project(mapping: dict[_K, _V], keys: Iterable[_K]) -> dict[_K, _V]: ... +@overload +def project(mapping: MutableMapping[_K, _V], keys: Iterable[_K]) -> MutableMapping[_K, _V]: ... +@overload +def project(mapping: Mapping[_K, _V], keys: Iterable[_K]) -> Mapping[_K, _V]: ... +@overload +def omit(mapping: dict[_K, _V], keys: Iterable[_K]) -> dict[_K, _V]: ... +@overload +def omit(mapping: MutableMapping[_K, _V], keys: Iterable[_K]) -> MutableMapping[_K, _V]: ... +@overload +def omit(mapping: Mapping[_K, _V], keys: Iterable[_K]) -> Mapping[_K, _V]: ... + +def zip_values(*dicts: Mapping[_K, _V]) -> Iterator[tuple[_V, ...]]: ... +def zip_dicts(*dicts: Mapping[_K, _V]) -> Iterator[tuple[_K, tuple[_V, ...]]]: ... + +### Nested access + +def get_in(coll: Mapping[Any, Any] | Sequence[Any], path: Iterable[Any], default: Any = ...) -> Any: ... +def get_lax(coll: Mapping[Any, Any] | Sequence[Any], path: Iterable[Any], default: Any = ...) -> Any: ... +@overload +def set_in(coll: dict[_K, _V], path: Sequence[Any], value: Any) -> dict[_K, _V]: ... +@overload +def set_in(coll: MutableMapping[_K, _V], path: Sequence[Any], value: Any) -> MutableMapping[_K, _V]: ... +@overload +def set_in(coll: _T, path: Sequence[Any], value: Any) -> _T: ... +@overload +def update_in(coll: dict[_K, _V], path: Sequence[Any], update: Callable[[Any], Any], default: Any = ...) -> dict[_K, _V]: ... +@overload +def update_in(coll: MutableMapping[_K, _V], path: Sequence[Any], update: Callable[[Any], Any], default: Any = ...) -> MutableMapping[_K, _V]: ... +@overload +def update_in(coll: _T, path: Sequence[Any], update: Callable[[Any], Any], default: Any = ...) -> _T: ... +@overload +def del_in(coll: dict[_K, _V], path: Sequence[Any]) -> dict[_K, _V]: ... +@overload +def del_in(coll: MutableMapping[_K, _V], path: Sequence[Any]) -> MutableMapping[_K, _V]: ... +@overload +def del_in(coll: _T, path: Sequence[Any]) -> _T: ... +def has_path(coll: Mapping[Any, Any] | Sequence[Any], path: Iterable[Any]) -> bool: ... + +### Query + +_M = TypeVar('_M', bound=Mapping[Any, Any]) + +def where(mappings: Iterable[_M], **cond: Any) -> Iterator[_M]: ... +def pluck(key: Any, mappings: Iterable[Mapping[_K, _V]]) -> Iterator[_V]: ... +def pluck_attr(attr: str, objects: Iterable[Any]) -> Iterator[Any]: ... +def invoke(objects: Iterable[Any], name: str, *args: Any, **kwargs: Any) -> Iterator[Any]: ... + +def lwhere(mappings: Iterable[_M], **cond: Any) -> list[_M]: ... +def lpluck(key: Any, mappings: Iterable[Mapping[_K, _V]]) -> list[_V]: ... +def lpluck_attr(attr: str, objects: Iterable[Any]) -> list[Any]: ... +def linvoke(objects: Iterable[Any], name: str, *args: Any, **kwargs: Any) -> list[Any]: ... diff --git a/funcy/colls.pyih b/funcy/colls.pyih new file mode 100644 index 0000000..e444ccc --- /dev/null +++ b/funcy/colls.pyih @@ -0,0 +1,153 @@ +import re +from collections.abc import Callable, Hashable, Iterable, Iterator, Mapping, MutableMapping, Set as AbstractSet, Sequence +from typing import Any, TypeAlias, TypeVar, overload + +__all__ = ['empty', 'iteritems', 'itervalues', + 'join', 'merge', 'join_with', 'merge_with', + 'walk', 'walk_keys', 'walk_values', 'select', 'select_keys', 'select_values', + 'split_keys', 'compact', + 'is_distinct', 'all', 'any', 'none', 'one', 'some', + 'zipdict', 'flip', 'project', 'omit', 'zip_values', 'zip_dicts', + 'where', 'pluck', 'pluck_attr', 'invoke', 'lwhere', 'lpluck', 'lpluck_attr', 'linvoke', + 'get_in', 'get_lax', 'set_in', 'update_in', 'del_in', 'has_path'] + +_K = TypeVar('_K') +_K2 = TypeVar('_K2') +_V = TypeVar('_V') +_V2 = TypeVar('_V2') +_W = TypeVar('_W') +_T = TypeVar('_T') + +# All types accepted by the extended function protocol (funcmakers.make_func): +# Callable, int/slice for itemgetter, str/bytes/re.Pattern for regex, +# Mapping for lookup, Set for membership, None for identity/bool +_XFunc = Callable[..., Any] | int | slice | str | bytes | re.Pattern[str] | Mapping[Any, Any] | AbstractSet[Any] | None +_ReResult: TypeAlias = str | tuple[str, ...] | dict[str, str] + +### Generic ops + +def empty(coll: _T) -> _T: ... + +def iteritems(coll: Mapping[_K, _V]) -> Iterable[tuple[_K, _V]]: ... +def itervalues(coll: Mapping[Any, _V]) -> Iterable[_V]: ... + +def join(colls: Iterable[_T]) -> _T | None: ... + +def merge(__coll: _T, *colls: _T) -> _T: ... +def merge() -> None: ... + +def join_with(f: Callable[[list[_W]], _V], dicts: Iterable[Mapping[_K, _W]], strict: bool = ...) -> dict[_K, _V]: ... +def merge_with(f: Callable[[list[_W]], _V], *dicts: Mapping[_K, _W]) -> dict[_K, _V]: ... + +### Walk / Select + +# dict/MutableMapping/Mapping: typed Callable with pair→pair signature +# (must come before COLLS — dict is Iterable[K], so Iterable overload would shadow) +def walk[C: MAPS](f: Callable[[tuple[_K, _V]], tuple[_K2, _V2]], coll: C[_K, _V]) -> C[_K2, _V2]: ... +# catch-alls for _XFunc and other collection types +def walk[C: MAPS](f: _XFunc, coll: C[Any, Any]) -> C[Any, Any]: ... +# 1-param collections × XFunc (quadratic expansion) +def walk[C: COLLS](f: XFunc[[_T], _V], coll: C[_T]) -> C[_V]: ... +# catch-all for other collection types +def walk(f: _XFunc, coll: _T) -> _T: ... + +# walk_keys: collection of pairs × XFunc +# xfunc_skip: slice +def walk_keys[C: COLLS](f: XFunc[[_K], _K2], coll: C[tuple[_K, _V]]) -> C[tuple[_K2, _V]]: ... +# xfunc_skip: slice +def walk_keys[C: MAPS](f: XFunc[[_K], _K2], coll: C[_K, _V]) -> C[_K2, _V]: ... + +def walk_values[C: COLLS](f: XFunc[[_V], _V2], coll: C[tuple[_K, _V]]) -> C[tuple[_K, _V2]]: ... +def walk_values[C: MAPS](f: XFunc[[_V], _V2], coll: C[_K, _V]) -> C[_K, _V2]: ... + +# dict/Mapping: pred receives (key, value) pairs, only Callable is meaningful +# (must come before COLLS — dict is Iterable[K], so Iterable overload would shadow these) +def select[C: MAPS](pred: Callable[[tuple[_K, _V]], Any], coll: C[_K, _V]) -> C[_K, _V]: ... +# dict/Mapping: catch-all for _XFunc preds +def select[C: MAPS](pred: _XFunc, coll: C[_K, _V]) -> C[_K, _V]: ... +# 1-param collections × XPred +def select[C: COLLS](pred: XPred[_T], coll: C[_T]) -> C[_T]: ... +# catch-all for other collection types +def select(pred: _XFunc, coll: _T) -> _T: ... +# select_keys: collection of pairs × XPred +def select_keys[C: COLLS](pred: XPred[_K], coll: C[tuple[_K, _V]]) -> C[tuple[_K, _V]]: ... +# select_keys: dict/MutableMapping/Mapping +def select_keys[C: MAPS](pred: XPred[_K], coll: C[_K, _V]) -> C[_K, _V]: ... + +# select_values: collection of pairs × XPred +def select_values[C: COLLS](pred: XPred[_V], coll: C[tuple[_K, _V]]) -> C[tuple[_K, _V]]: ... +# select_values: dict/MutableMapping/Mapping +def select_values[C: MAPS](pred: XPred[_V], coll: C[_K, _V]) -> C[_K, _V]: ... + +def split_keys(pred: XPred[_K], coll: Mapping[_K, _V]) -> tuple[dict[_K, _V], dict[_K, _V]]: ... + +# Strips None from element/value type. Concrete (invariant) types are grouped since they +# can't steal from each other. Covariant types (Mapping, Sequence, etc.) need _T right +# after _T|None so concrete types' _T catches non-None inputs first. +# Maps before colls — dict is Iterable[K], so Iterable overload would shadow. +def compact[C: (dict, MutableMapping)](coll: C[_K, _V | None]) -> C[_K, _V]: ... +def compact[C: (dict, MutableMapping)](coll: C[_K, _V]) -> C[_K, _V]: ... +def compact(coll: Mapping[_K, _V | None]) -> Mapping[_K, _V]: ... +def compact(coll: Mapping[_K, _V]) -> Mapping[_K, _V]: ... +def compact[C: (list, tuple, set, frozenset)](coll: C[_T | None]) -> C[_T]: ... +# Covariant types: _T variant not needed — C[int] already matches C[_T | None] via covariance +def compact[C: (list, set)](coll: C[_T]) -> C[_T]: ... +def compact[C: (Sequence, Iterator, Iterable)](coll: C[_T | None]) -> C[_T]: ... +def compact(coll: _T) -> _T: ... + +### Content tests + +def is_distinct(coll: Iterable[Hashable]) -> bool: ... +def is_distinct(coll: Iterable[_T], key: XFunc[[_T], Hashable]) -> bool: ... + +def all(seq: Iterable[Any]) -> bool: ... +def all(pred: XPred[_T], seq: Iterable[_T]) -> bool: ... + +def any(seq: Iterable[Any]) -> bool: ... +def any(pred: XPred[_T], seq: Iterable[_T]) -> bool: ... + +def none(seq: Iterable[Any]) -> bool: ... +def none(pred: XPred[_T], seq: Iterable[_T]) -> bool: ... + +def one(seq: Iterable[Any]) -> bool: ... +def one(pred: XPred[_T], seq: Iterable[_T]) -> bool: ... + +def some(seq: Iterable[_T]) -> _T | None: ... +def some(pred: XPred[_T], seq: Iterable[_T]) -> _T | None: ... + +### Dict utilities + +def zipdict(keys: Iterable[_K], vals: Iterable[_V]) -> dict[_K, _V]: ... +def flip[C: MAPS](mapping: C[_K, _V]) -> C[_V, _K]: ... +def flip[C: COLLS](mapping: C[tuple[_K, _V]]) -> C[tuple[_V, _K]]: ... +def project[C: MAPS](mapping: C[_K, _V], keys: Iterable[_K]) -> C[_K, _V]: ... +def omit[C: MAPS](mapping: C[_K, _V], keys: Iterable[_K]) -> C[_K, _V]: ... + +def zip_values(*dicts: Mapping[_K, _V]) -> Iterator[tuple[_V, ...]]: ... +def zip_dicts(*dicts: Mapping[_K, _V]) -> Iterator[tuple[_K, tuple[_V, ...]]]: ... + +### Nested access + +def get_in(coll: Mapping[Any, Any] | Sequence[Any], path: Iterable[Any], default: Any = ...) -> Any: ... +def get_lax(coll: Mapping[Any, Any] | Sequence[Any], path: Iterable[Any], default: Any = ...) -> Any: ... +def set_in[C: MUT_MAPS](coll: C[_K, _V], path: Sequence[Any], value: Any) -> C[_K, _V]: ... +def set_in(coll: _T, path: Sequence[Any], value: Any) -> _T: ... +def update_in[C: MUT_MAPS](coll: C[_K, _V], path: Sequence[Any], update: Callable[[Any], Any], default: Any = ...) -> C[_K, _V]: ... +def update_in(coll: _T, path: Sequence[Any], update: Callable[[Any], Any], default: Any = ...) -> _T: ... +def del_in[C: MUT_MAPS](coll: C[_K, _V], path: Sequence[Any]) -> C[_K, _V]: ... +def del_in(coll: _T, path: Sequence[Any]) -> _T: ... +def has_path(coll: Mapping[Any, Any] | Sequence[Any], path: Iterable[Any]) -> bool: ... + +### Query + +_M = TypeVar('_M', bound=Mapping[Any, Any]) + +def where(mappings: Iterable[_M], **cond: Any) -> Iterator[_M]: ... +def pluck(key: Any, mappings: Iterable[Mapping[_K, _V]]) -> Iterator[_V]: ... +def pluck_attr(attr: str, objects: Iterable[Any]) -> Iterator[Any]: ... +def invoke(objects: Iterable[Any], name: str, *args: Any, **kwargs: Any) -> Iterator[Any]: ... + +def lwhere(mappings: Iterable[_M], **cond: Any) -> list[_M]: ... +def lpluck(key: Any, mappings: Iterable[Mapping[_K, _V]]) -> list[_V]: ... +def lpluck_attr(attr: str, objects: Iterable[Any]) -> list[Any]: ... +def linvoke(objects: Iterable[Any], name: str, *args: Any, **kwargs: Any) -> list[Any]: ... diff --git a/funcy/debug.pyi b/funcy/debug.pyi new file mode 100644 index 0000000..ea940f7 --- /dev/null +++ b/funcy/debug.pyi @@ -0,0 +1,67 @@ +from collections.abc import Callable, Iterable, Iterator +from typing import Any, TypeVar, overload + +__all__ = [ + 'tap', + 'log_calls', 'print_calls', + 'log_enters', 'print_enters', + 'log_exits', 'print_exits', + 'log_errors', 'print_errors', + 'log_durations', 'print_durations', + 'log_iter_durations', 'print_iter_durations', +] + +_T = TypeVar('_T') +_F = TypeVar('_F', bound=Callable[..., Any]) + +def tap(x: _T, label: str | None = ...) -> _T: ... + +### Logging decorators + +def log_calls(print_func: Callable[..., Any], errors: bool = ..., stack: bool = ..., repr_len: int = ...) -> Callable[[_F], _F]: ... + +@overload +def print_calls(func: _F) -> _F: ... +@overload +def print_calls(errors: bool = ..., stack: bool = ..., repr_len: int = ...) -> Callable[[_F], _F]: ... + +def log_enters(print_func: Callable[..., Any], repr_len: int = ...) -> Callable[[_F], _F]: ... + +@overload +def print_enters(func: _F) -> _F: ... +@overload +def print_enters(repr_len: int = ...) -> Callable[[_F], _F]: ... + +def log_exits(print_func: Callable[..., Any], errors: bool = ..., stack: bool = ..., repr_len: int = ...) -> Callable[[_F], _F]: ... + +@overload +def print_exits(func: _F) -> _F: ... +@overload +def print_exits(errors: bool = ..., stack: bool = ..., repr_len: int = ...) -> Callable[[_F], _F]: ... + +### Error / Duration context managers + decorators + +class log_errors: + def __init__(self, print_func: Callable[..., Any], label: str | None = ..., stack: bool = ..., repr_len: int = ...) -> None: ... + @overload + def __call__(self, func: _F) -> _F: ... + @overload + def __call__(self, label: str | None = ..., **kwargs: Any) -> log_errors: ... + def __enter__(self) -> log_errors: ... + def __exit__(self, *exc: Any) -> None: ... + +print_errors: log_errors + +class log_durations: + def __init__(self, print_func: Callable[..., Any], label: str | None = ..., unit: str = ..., threshold: float = ..., repr_len: int = ...) -> None: ... + @overload + def __call__(self, func: _F) -> _F: ... + @overload + def __call__(self, label: str | None = ..., **kwargs: Any) -> log_durations: ... + def __enter__(self) -> log_durations: ... + def __exit__(self, *exc: Any) -> None: ... + +print_durations: log_durations + +def log_iter_durations(seq: Iterable[_T], print_func: Callable[..., Any], label: str | None = ..., unit: str = ...) -> Iterator[_T]: ... +def print_iter_durations(seq: Iterable[_T], label: str | None = ..., unit: str = ...) -> Iterator[_T]: ... diff --git a/funcy/decorators.pyi b/funcy/decorators.pyi new file mode 100644 index 0000000..16ebf28 --- /dev/null +++ b/funcy/decorators.pyi @@ -0,0 +1,28 @@ +from collections.abc import Callable, Sequence +from contextlib import ContextDecorator as ContextDecorator, contextmanager as contextmanager +from inspect import unwrap as unwrap +from typing import Any, TypeVar, overload + +__all__ = ['decorator', 'wraps', 'unwrap', 'ContextDecorator', 'contextmanager'] + +_F = TypeVar('_F', bound=Callable[..., Any]) + +class Call: + """Proxy for decorated function with call arguments saved in attributes.""" + _func: Callable[..., Any] + _args: tuple[Any, ...] + _kwargs: dict[str, Any] + def __init__(self, func: Callable[..., Any], args: tuple[Any, ...], kwargs: dict[str, Any]) -> None: ... + def __call__(self, *a: Any, **kw: Any) -> Any: ... + def __getattr__(self, name: str) -> Any: ... + +class _Decorator: + """Result of @decorator: can be used as decorator or called with args to create one.""" + @overload + def __call__(self, __func: _F) -> _F: ... + @overload + def __call__(self, *args: Any, **kwargs: Any) -> _Decorator: ... + +def decorator(deco: Callable[..., Any]) -> _Decorator: ... + +def wraps(wrapped: _F, assigned: Sequence[str] = ..., updated: Sequence[str] = ...) -> Callable[[Callable[..., Any]], _F]: ... diff --git a/funcy/flow.pyi b/funcy/flow.pyi new file mode 100644 index 0000000..9bb0c59 --- /dev/null +++ b/funcy/flow.pyi @@ -0,0 +1,52 @@ +from collections.abc import Callable, Iterable, Iterator +from contextlib import suppress, AbstractContextManager +from datetime import timedelta +from typing import Any, NoReturn, ParamSpec, TypeVar, overload + +__all__ = ['raiser', 'ignore', 'silent', 'suppress', 'nullcontext', 'reraise', 'retry', 'fallback', + 'limit_error_rate', 'ErrorRateExceeded', 'throttle', + 'post_processing', 'collecting', 'joining', + 'once', 'once_per', 'once_per_args', + 'wrap_with'] + +_T = TypeVar('_T') +_T2 = TypeVar('_T2') +_P = ParamSpec('_P') +_F = TypeVar('_F', bound=Callable[..., Any]) + +_ExcType = type[BaseException] | tuple[type[BaseException], ...] + +### Error handling + +def raiser(exception_or_class: BaseException | type[BaseException] | str = ..., *args: Any, **kwargs: Any) -> Callable[..., NoReturn]: ... + +def ignore(errors: _ExcType | Iterable[type[BaseException]], default: Any = ...) -> Callable[[_F], _F]: ... +def silent(func: _F) -> _F: ... + +from contextlib import nullcontext + +def reraise(errors: _ExcType | Iterable[type[BaseException]], into: BaseException | type[BaseException] | Callable[[BaseException], BaseException]) -> AbstractContextManager[None]: ... + +def retry(tries: int, errors: _ExcType | Iterable[type[BaseException]] = ..., timeout: int | float | Callable[[int], float] = ..., filter_errors: Callable[[BaseException], bool] | None = ...) -> Callable[[_F], _F]: ... + +def fallback(*approaches: Callable[[], Any] | tuple[Callable[[], Any], _ExcType | Iterable[type[BaseException]]]) -> Any: ... + +class ErrorRateExceeded(Exception): ... + +def limit_error_rate(fails: int, timeout: int | float | timedelta, exception: type[BaseException] = ...) -> Callable[[_F], _F]: ... + +def throttle(period: int | float | timedelta) -> Callable[[_F], _F]: ... + +### Post processing + +def post_processing(func: Callable[[_T], _T2]) -> Callable[[Callable[_P, _T]], Callable[_P, _T2]]: ... +def collecting(func: Callable[_P, Iterable[_T]]) -> Callable[_P, list[_T]]: ... +def joining(sep: str) -> Callable[[Callable[_P, Any]], Callable[_P, str]]: ... + +### Initialization + +def once_per(*argnames: str) -> Callable[[_F], _F]: ... +def once(func: _F) -> _F: ... +def once_per_args(func: _F) -> _F: ... + +def wrap_with(ctx: Any) -> Callable[[_F], _F]: ... diff --git a/funcy/funcmakers.pyi b/funcy/funcmakers.pyi new file mode 100644 index 0000000..4e072ca --- /dev/null +++ b/funcy/funcmakers.pyi @@ -0,0 +1,49 @@ +# THIS FILE IS AUTOGENERATED by translate_pyih.py from funcmakers.pyih. DO NOT EDIT. + +import re +from collections.abc import Callable, Mapping, Sequence, Set as AbstractSet +from typing import Any, TypeAlias, TypeVar, overload + +__all__ = ('make_func', 'make_pred') + +_K = TypeVar('_K') +_T = TypeVar('_T') +_V = TypeVar('_V') + +_XFunc = Callable[..., Any] | int | slice | str | bytes | re.Pattern[str] | Mapping[Any, Any] | AbstractSet[Any] | None +_ReResult: TypeAlias = str | tuple[str, ...] | dict[str, str] + +# make_func: return types reflect test=False (the default) +# For test=True behavior, use make_pred instead. + +@overload +def make_func(f: Callable[..., _V], test: bool = ...) -> Callable[..., _V]: ... +@overload +def make_func(f: None, test: bool = ...) -> Callable[[_T], _T]: ... +@overload +def make_func(f: int, test: bool = ...) -> Callable[[Sequence[_T]], _T]: ... +@overload +def make_func(f: slice, test: bool = ...) -> Callable[[Sequence[_T]], Sequence[_T]]: ... +@overload +def make_func(f: str | bytes | re.Pattern[str], test: bool = ...) -> Callable[[str], _ReResult | None]: ... +@overload +def make_func(f: Mapping[_K, _V], test: bool = ...) -> Callable[[_K], _V]: ... +@overload +def make_func(f: AbstractSet[_T], test: bool = ...) -> Callable[[_T], bool]: ... + +# make_pred: return types reflect test=True behavior + +@overload +def make_pred(pred: Callable[..., _V]) -> Callable[..., _V]: ... +@overload +def make_pred(pred: None) -> Callable[[Any], bool]: ... +@overload +def make_pred(pred: int) -> Callable[[Sequence[_T]], _T]: ... +@overload +def make_pred(pred: slice) -> Callable[[Sequence[_T]], Sequence[_T]]: ... +@overload +def make_pred(pred: str | bytes | re.Pattern[str]) -> Callable[[str], bool]: ... +@overload +def make_pred(pred: Mapping[_K, _V]) -> Callable[[_K], _V]: ... +@overload +def make_pred(pred: AbstractSet[_T]) -> Callable[[_T], bool]: ... diff --git a/funcy/funcmakers.pyih b/funcy/funcmakers.pyih new file mode 100644 index 0000000..9aff632 --- /dev/null +++ b/funcy/funcmakers.pyih @@ -0,0 +1,33 @@ +import re +from collections.abc import Callable, Mapping, Sequence, Set as AbstractSet +from typing import Any, TypeAlias, TypeVar, overload + +__all__ = ('make_func', 'make_pred') + +_K = TypeVar('_K') +_T = TypeVar('_T') +_V = TypeVar('_V') + +_XFunc = Callable[..., Any] | int | slice | str | bytes | re.Pattern[str] | Mapping[Any, Any] | AbstractSet[Any] | None +_ReResult: TypeAlias = str | tuple[str, ...] | dict[str, str] + +# make_func: return types reflect test=False (the default) +# For test=True behavior, use make_pred instead. + +def make_func(f: Callable[..., _V], test: bool = ...) -> Callable[..., _V]: ... +def make_func(f: None, test: bool = ...) -> Callable[[_T], _T]: ... +def make_func(f: int, test: bool = ...) -> Callable[[Sequence[_T]], _T]: ... +def make_func(f: slice, test: bool = ...) -> Callable[[Sequence[_T]], Sequence[_T]]: ... +def make_func(f: str | bytes | re.Pattern[str], test: bool = ...) -> Callable[[str], _ReResult | None]: ... +def make_func(f: Mapping[_K, _V], test: bool = ...) -> Callable[[_K], _V]: ... +def make_func(f: AbstractSet[_T], test: bool = ...) -> Callable[[_T], bool]: ... + +# make_pred: return types reflect test=True behavior + +def make_pred(pred: Callable[..., _V]) -> Callable[..., _V]: ... +def make_pred(pred: None) -> Callable[[Any], bool]: ... +def make_pred(pred: int) -> Callable[[Sequence[_T]], _T]: ... +def make_pred(pred: slice) -> Callable[[Sequence[_T]], Sequence[_T]]: ... +def make_pred(pred: str | bytes | re.Pattern[str]) -> Callable[[str], bool]: ... +def make_pred(pred: Mapping[_K, _V]) -> Callable[[_K], _V]: ... +def make_pred(pred: AbstractSet[_T]) -> Callable[[_T], bool]: ... diff --git a/funcy/funcolls.pyi b/funcy/funcolls.pyi new file mode 100644 index 0000000..d946bb7 --- /dev/null +++ b/funcy/funcolls.pyi @@ -0,0 +1,14 @@ +import re +from collections.abc import Callable, Mapping, Set as AbstractSet +from typing import Any + +__all__ = ['all_fn', 'any_fn', 'none_fn', 'one_fn', 'some_fn'] + +# All types accepted by the extended function protocol (funcmakers.make_func) +_XFunc = Callable[..., Any] | int | slice | str | bytes | re.Pattern[str] | Mapping[Any, Any] | AbstractSet[Any] | None + +def all_fn(*fs: _XFunc) -> Callable[..., bool]: ... +def any_fn(*fs: _XFunc) -> Callable[..., bool]: ... +def none_fn(*fs: _XFunc) -> Callable[..., bool]: ... +def one_fn(*fs: _XFunc) -> Callable[..., bool]: ... +def some_fn(*fs: _XFunc) -> Callable[..., Any]: ... diff --git a/funcy/funcs.pyi b/funcy/funcs.pyi new file mode 100644 index 0000000..6ba6791 --- /dev/null +++ b/funcy/funcs.pyi @@ -0,0 +1,130 @@ +# THIS FILE IS AUTOGENERATED by translate_pyih.py from funcs.pyih. DO NOT EDIT. + +import re +from collections.abc import Callable, Iterator, Mapping, Set as AbstractSet, Sequence +from functools import partial, reduce +from typing import Any, TypeAlias, TypeVar, overload + +__all__ = ['identity', 'constantly', 'caller', + 'reduce', 'partial', + 'rpartial', 'func_partial', + 'curry', 'rcurry', 'autocurry', + 'iffy', + 'compose', 'rcompose', 'complement', 'juxt', 'ljuxt'] + +_K = TypeVar('_K') +_T = TypeVar('_T') +_T2 = TypeVar('_T2') +_V = TypeVar('_V') +_R = TypeVar('_R') +_F = TypeVar('_F', bound=Callable[..., Any]) + +_XFunc = Callable[..., Any] | int | slice | str | bytes | re.Pattern[str] | Mapping[Any, Any] | AbstractSet[Any] | None +_ReResult: TypeAlias = str | tuple[str, ...] | dict[str, str] + +def identity(x: _T) -> _T: ... +def constantly(x: _T) -> Callable[..., _T]: ... +def caller(*a: Any, **kw: Any) -> Callable[[Callable[..., _T]], _T]: ... + +def func_partial(func: Callable[..., _T], *args: Any, **kwargs: Any) -> Callable[..., _T]: ... +def rpartial(func: Callable[..., _T], *args: Any, **kwargs: Any) -> Callable[..., _T]: ... + +def curry(func: Callable[..., _T], n: int = ...) -> Callable[..., _T]: ... +def rcurry(func: Callable[..., _T], n: int = ...) -> Callable[..., _T]: ... +def autocurry(func: _F, n: int = ...) -> _F: ... + +# iffy: one-arg form — action with extended function semantics (Callable handled below) +@overload +def iffy(action: None) -> Callable[[_T], _T]: ... +@overload +def iffy(action: AbstractSet[_T]) -> Callable[[_T], bool]: ... +@overload +def iffy(action: str | bytes | re.Pattern[str]) -> Callable[[Any], _ReResult | None]: ... +@overload +def iffy(action: int) -> Callable[[Sequence[_T]], _T]: ... +@overload +def iffy(action: slice) -> Callable[[Sequence[_T]], Sequence[_T]]: ... +@overload +def iffy(action: Mapping[_T, _V]) -> Callable[[_T], _V]: ... +@overload +def iffy(action: Callable[..., _T]) -> Callable[..., _T]: ... +@overload +def iffy(pred: Callable[[_T], Any], action: Callable[[_T], _V], default: Any = ...) -> Callable[[_T], _V]: ... +@overload +def iffy(pred: None, action: Callable[[_T], _V], default: Any = ...) -> Callable[[_T], _V]: ... +@overload +def iffy(pred: AbstractSet[_T], action: Callable[[_T], _V], default: Any = ...) -> Callable[[_T], _V]: ... +@overload +def iffy(pred: str | bytes | re.Pattern[str], action: Callable[[str], _V], default: Any = ...) -> Callable[[str], _V]: ... +@overload +def iffy(pred: int, action: Callable[[Sequence[Any]], _V], default: Any = ...) -> Callable[[Sequence[Any]], _V]: ... +@overload +def iffy(pred: slice, action: Callable[[Sequence[Any]], _V], default: Any = ...) -> Callable[[Sequence[Any]], _V]: ... +@overload +def iffy(pred: Mapping[_T, Any], action: Callable[[_T], _V], default: Any = ...) -> Callable[[_T], _V]: ... +@overload +def iffy(pred: _XFunc, action: _XFunc, default: Any = ...) -> Callable[..., Any]: ... + +# compose: single function with extended semantics (Callable handled below) +@overload +def compose(f: None) -> Callable[[_T], _T]: ... +@overload +def compose(f: AbstractSet[_T]) -> Callable[[_T], bool]: ... +@overload +def compose(f: str | bytes | re.Pattern[str]) -> Callable[[Any], _ReResult | None]: ... +@overload +def compose(f: int) -> Callable[[Sequence[_T]], _T]: ... +@overload +def compose(f: slice) -> Callable[[Sequence[_T]], Sequence[_T]]: ... +@overload +def compose(f: Mapping[_T, _V]) -> Callable[[_T], _V]: ... +@overload +def compose(__f: Callable[..., _R], *rest: _XFunc) -> Callable[..., _R]: ... +@overload +def compose(*fs: _XFunc) -> Callable[..., Any]: ... + +# rcompose: single function with extended semantics (Callable handled below) +@overload +def rcompose(f: None) -> Callable[[_T], _T]: ... +@overload +def rcompose(f: AbstractSet[_T]) -> Callable[[_T], bool]: ... +@overload +def rcompose(f: str | bytes | re.Pattern[str]) -> Callable[[Any], _ReResult | None]: ... +@overload +def rcompose(f: int) -> Callable[[Sequence[_T]], _T]: ... +@overload +def rcompose(f: slice) -> Callable[[Sequence[_T]], Sequence[_T]]: ... +@overload +def rcompose(f: Mapping[_T, _V]) -> Callable[[_T], _V]: ... +@overload +def rcompose(__f1: _XFunc, __f2: Callable[..., _R]) -> Callable[..., _R]: ... +@overload +def rcompose(__f1: _XFunc, __f2: _XFunc, __f3: Callable[..., _R]) -> Callable[..., _R]: ... +@overload +def rcompose(*fs: _XFunc) -> Callable[..., Any]: ... + +# complement: XPred constrains input type of returned bool-function (Callable handled below) +@overload +def complement(pred: None) -> Callable[[_T], bool]: ... +@overload +def complement(pred: AbstractSet[_T]) -> Callable[[_T], bool]: ... +@overload +def complement(pred: str | bytes | re.Pattern[str]) -> Callable[[str], bool]: ... +@overload +def complement(pred: int) -> Callable[[Sequence[Any]], bool]: ... +@overload +def complement(pred: slice) -> Callable[[Sequence[Any]], bool]: ... +@overload +def complement(pred: Mapping[_T, Any]) -> Callable[[_T], bool]: ... +@overload +def complement(pred: Callable[..., Any]) -> Callable[..., bool]: ... + +@overload +def juxt(__f1: Callable[..., _T], __f2: Callable[..., _T2]) -> Callable[..., Iterator[_T | _T2]]: ... +@overload +def juxt(*fs: _XFunc) -> Callable[..., Iterator[Any]]: ... + +@overload +def ljuxt(__f1: Callable[..., _T], __f2: Callable[..., _T2]) -> Callable[..., list[_T | _T2]]: ... +@overload +def ljuxt(*fs: _XFunc) -> Callable[..., list[Any]]: ... diff --git a/funcy/funcs.pyih b/funcy/funcs.pyih new file mode 100644 index 0000000..c9627ed --- /dev/null +++ b/funcy/funcs.pyih @@ -0,0 +1,70 @@ +import re +from collections.abc import Callable, Iterator, Mapping, Set as AbstractSet, Sequence +from functools import partial, reduce +from typing import Any, TypeAlias, TypeVar, overload + +__all__ = ['identity', 'constantly', 'caller', + 'reduce', 'partial', + 'rpartial', 'func_partial', + 'curry', 'rcurry', 'autocurry', + 'iffy', + 'compose', 'rcompose', 'complement', 'juxt', 'ljuxt'] + +_K = TypeVar('_K') +_T = TypeVar('_T') +_T2 = TypeVar('_T2') +_V = TypeVar('_V') +_R = TypeVar('_R') +_F = TypeVar('_F', bound=Callable[..., Any]) + +_XFunc = Callable[..., Any] | int | slice | str | bytes | re.Pattern[str] | Mapping[Any, Any] | AbstractSet[Any] | None +_ReResult: TypeAlias = str | tuple[str, ...] | dict[str, str] + +def identity(x: _T) -> _T: ... +def constantly(x: _T) -> Callable[..., _T]: ... +def caller(*a: Any, **kw: Any) -> Callable[[Callable[..., _T]], _T]: ... + +def func_partial(func: Callable[..., _T], *args: Any, **kwargs: Any) -> Callable[..., _T]: ... +def rpartial(func: Callable[..., _T], *args: Any, **kwargs: Any) -> Callable[..., _T]: ... + +def curry(func: Callable[..., _T], n: int = ...) -> Callable[..., _T]: ... +def rcurry(func: Callable[..., _T], n: int = ...) -> Callable[..., _T]: ... +def autocurry(func: _F, n: int = ...) -> _F: ... + +# iffy: one-arg form — action with extended function semantics (Callable handled below) +# xfunc_skip: Callable +def iffy(action: XFunc[[_T], _V]) -> Callable[[_T], _V]: ... +# iffy: one-arg Callable (preserves return type cleanly with ...) +def iffy(action: Callable[..., _T]) -> Callable[..., _T]: ... +# iffy: two-arg — XPred for pred constrains input type, Callable action gives output type +def iffy(pred: XPred[_T], action: Callable[[_T], _V], default: Any = ...) -> Callable[[_T], _V]: ... +# iffy: catch-all for _XFunc action or unresolved pred/action +def iffy(pred: _XFunc, action: _XFunc, default: Any = ...) -> Callable[..., Any]: ... + +# compose: single function with extended semantics (Callable handled below) +# xfunc_skip: Callable +def compose(f: XFunc[[_T], _V]) -> Callable[[_T], _V]: ... +# compose: multiple functions — first arg determines return type +def compose(__f: Callable[..., _R], *rest: _XFunc) -> Callable[..., _R]: ... +# compose: all _XFunc (no typed return) +def compose(*fs: _XFunc) -> Callable[..., Any]: ... + +# rcompose: single function with extended semantics (Callable handled below) +# xfunc_skip: Callable +def rcompose(f: XFunc[[_T], _V]) -> Callable[[_T], _V]: ... +# rcompose: multiple functions — last arg determines return type +def rcompose(__f1: _XFunc, __f2: Callable[..., _R]) -> Callable[..., _R]: ... +def rcompose(__f1: _XFunc, __f2: _XFunc, __f3: Callable[..., _R]) -> Callable[..., _R]: ... +def rcompose(*fs: _XFunc) -> Callable[..., Any]: ... + +# complement: XPred constrains input type of returned bool-function (Callable handled below) +# xfunc_skip: Callable +def complement(pred: XPred[_T]) -> Callable[[_T], bool]: ... +# complement: Callable pred (... arity preserved) +def complement(pred: Callable[..., Any]) -> Callable[..., bool]: ... + +def juxt(__f1: Callable[..., _T], __f2: Callable[..., _T2]) -> Callable[..., Iterator[_T | _T2]]: ... +def juxt(*fs: _XFunc) -> Callable[..., Iterator[Any]]: ... + +def ljuxt(__f1: Callable[..., _T], __f2: Callable[..., _T2]) -> Callable[..., list[_T | _T2]]: ... +def ljuxt(*fs: _XFunc) -> Callable[..., list[Any]]: ... diff --git a/funcy/objects.pyi b/funcy/objects.pyi new file mode 100644 index 0000000..c495541 --- /dev/null +++ b/funcy/objects.pyi @@ -0,0 +1,27 @@ +from collections.abc import Callable +from typing import Any, Generic, TypeVar, overload + +__all__ = ['cached_property', 'cached_readonly', 'wrap_prop', 'monkey', 'LazyObject'] + +_T = TypeVar('_T') +_F = TypeVar('_F', bound=Callable[..., Any]) + +class cached_property(Generic[_T]): + fget: Callable[..., _T] + fset: None + fdel: None + def __init__(self, fget: Callable[..., _T]) -> None: ... + @overload + def __get__(self, instance: None, owner: type) -> cached_property[_T]: ... + @overload + def __get__(self, instance: Any, owner: type) -> _T: ... + +class cached_readonly(cached_property[_T]): + def __set__(self, instance: Any, value: Any) -> None: ... + +def wrap_prop(ctx: Any) -> Callable[..., Any]: ... + +def monkey(cls: type | Any, name: str | None = ...) -> Callable[[_F], _F]: ... + +class LazyObject: + def __init__(self, init: Callable[[], Any]) -> None: ... diff --git a/funcy/primitives.pyi b/funcy/primitives.pyi new file mode 100644 index 0000000..42bbd7a --- /dev/null +++ b/funcy/primitives.pyi @@ -0,0 +1,8 @@ +__all__ = ['isnone', 'notnone', 'inc', 'dec', 'even', 'odd'] + +def isnone(x: object) -> bool: ... +def notnone(x: object) -> bool: ... +def inc(x: int) -> int: ... +def dec(x: int) -> int: ... +def even(x: int) -> bool: ... +def odd(x: int) -> bool: ... diff --git a/funcy/py.typed b/funcy/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/funcy/seqs.pyi b/funcy/seqs.pyi new file mode 100644 index 0000000..d5fbbb6 --- /dev/null +++ b/funcy/seqs.pyi @@ -0,0 +1,408 @@ +# THIS FILE IS AUTOGENERATED by translate_pyih.py from seqs.pyih. DO NOT EDIT. + +import re +from collections.abc import Callable, Iterable, Iterator, Mapping, Set as AbstractSet, Sequence +from typing import Any, TypeAlias, TypeVar, overload +from itertools import chain, count, cycle, repeat, accumulate + +__all__ = [ + 'count', 'cycle', 'repeat', 'repeatedly', 'iterate', + 'take', 'drop', 'first', 'second', 'nth', 'last', 'rest', 'butlast', 'ilen', + 'map', 'filter', 'lmap', 'lfilter', 'remove', 'lremove', 'keep', 'lkeep', 'without', 'lwithout', + 'concat', 'lconcat', 'chain', 'cat', 'lcat', 'flatten', 'lflatten', 'mapcat', 'lmapcat', + 'interleave', 'interpose', 'distinct', 'ldistinct', + 'dropwhile', 'takewhile', 'split', 'lsplit', 'split_at', 'lsplit_at', 'split_by', 'lsplit_by', + 'group_by', 'group_by_keys', 'group_values', 'count_by', 'count_reps', + 'partition', 'lpartition', 'chunks', 'lchunks', 'partition_by', 'lpartition_by', + 'with_prev', 'with_next', 'pairwise', 'lzip', + 'reductions', 'lreductions', 'sums', 'lsums', 'accumulate', +] + +_K = TypeVar('_K') +_T = TypeVar('_T') +_V = TypeVar('_V') + +# Non-callable types accepted by the extended function protocol (funcmakers.make_func): +# int/slice for itemgetter, str/bytes/re.Pattern for regex, +# Mapping for lookup, Set for membership, None for identity/bool +_XFunc = Callable[..., Any] | int | slice | str | bytes | re.Pattern[str] | Mapping[Any, Any] | AbstractSet[Any] | None +_ReResult: TypeAlias = str | tuple[str, ...] | dict[str, str] + +### Generators + +def repeatedly(f: Callable[[], _T], n: int = ...) -> Iterator[_T]: ... +def iterate(f: Callable[[_T], _T], x: _T) -> Iterator[_T]: ... + +### Slicing + +def take(n: int, seq: Iterable[_T]) -> list[_T]: ... +def drop(n: int, seq: Iterable[_T]) -> Iterator[_T]: ... +def first(seq: Iterable[_T]) -> _T | None: ... +def second(seq: Iterable[_T]) -> _T | None: ... +def nth(n: int, seq: Iterable[_T]) -> _T | None: ... +def last(seq: Iterable[_T]) -> _T | None: ... +def rest(seq: Iterable[_T]) -> Iterator[_T]: ... +def butlast(seq: Iterable[_T]) -> Iterator[_T]: ... +def ilen(seq: Iterable[Any]) -> int: ... + +### Map / Filter + +@overload +def map(f: Callable[[_T], _V], seq: Iterable[_T]) -> Iterator[_V]: ... +@overload +def map(f: None, seq: Iterable[_T]) -> Iterator[_T]: ... +@overload +def map(f: AbstractSet[_T], seq: Iterable[_T]) -> Iterator[bool]: ... +@overload +def map(f: str | bytes | re.Pattern[str], seq: Iterable[Any]) -> Iterator[_ReResult | None]: ... +@overload +def map(f: int, seq: Iterable[Sequence[_T]]) -> Iterator[_T]: ... +@overload +def map(f: slice, seq: Iterable[Sequence[_T]]) -> Iterator[Sequence[_T]]: ... +@overload +def map(f: Mapping[_T, _V], seq: Iterable[_T]) -> Iterator[_V]: ... +@overload +def map(f: Callable[..., _V], *seqs: Iterable[Any]) -> Iterator[_V]: ... + +@overload +def lmap(f: Callable[[_T], _V], seq: Iterable[_T]) -> list[_V]: ... +@overload +def lmap(f: None, seq: Iterable[_T]) -> list[_T]: ... +@overload +def lmap(f: AbstractSet[_T], seq: Iterable[_T]) -> list[bool]: ... +@overload +def lmap(f: str | bytes | re.Pattern[str], seq: Iterable[Any]) -> list[_ReResult | None]: ... +@overload +def lmap(f: int, seq: Iterable[Sequence[_T]]) -> list[_T]: ... +@overload +def lmap(f: slice, seq: Iterable[Sequence[_T]]) -> list[Sequence[_T]]: ... +@overload +def lmap(f: Mapping[_T, _V], seq: Iterable[_T]) -> list[_V]: ... +@overload +def lmap(f: Callable[..., _V], *seqs: Iterable[Any]) -> list[_V]: ... + +@overload +def filter(pred: Callable[[_T], Any], seq: Iterable[_T]) -> Iterator[_T]: ... +@overload +def filter(pred: None, seq: Iterable[_T]) -> Iterator[_T]: ... +@overload +def filter(pred: AbstractSet[_T], seq: Iterable[_T]) -> Iterator[_T]: ... +@overload +def filter(pred: str | bytes | re.Pattern[str], seq: Iterable[str]) -> Iterator[str]: ... +@overload +def filter(pred: int, seq: Iterable[Sequence[Any]]) -> Iterator[Sequence[Any]]: ... +@overload +def filter(pred: slice, seq: Iterable[Sequence[Any]]) -> Iterator[Sequence[Any]]: ... +@overload +def filter(pred: Mapping[_T, Any], seq: Iterable[_T]) -> Iterator[_T]: ... +@overload +def lfilter(pred: Callable[[_T], Any], seq: Iterable[_T]) -> list[_T]: ... +@overload +def lfilter(pred: None, seq: Iterable[_T]) -> list[_T]: ... +@overload +def lfilter(pred: AbstractSet[_T], seq: Iterable[_T]) -> list[_T]: ... +@overload +def lfilter(pred: str | bytes | re.Pattern[str], seq: Iterable[str]) -> list[str]: ... +@overload +def lfilter(pred: int, seq: Iterable[Sequence[Any]]) -> list[Sequence[Any]]: ... +@overload +def lfilter(pred: slice, seq: Iterable[Sequence[Any]]) -> list[Sequence[Any]]: ... +@overload +def lfilter(pred: Mapping[_T, Any], seq: Iterable[_T]) -> list[_T]: ... + +@overload +def remove(pred: Callable[[_T], Any], seq: Iterable[_T]) -> Iterator[_T]: ... +@overload +def remove(pred: None, seq: Iterable[_T]) -> Iterator[_T]: ... +@overload +def remove(pred: AbstractSet[_T], seq: Iterable[_T]) -> Iterator[_T]: ... +@overload +def remove(pred: str | bytes | re.Pattern[str], seq: Iterable[str]) -> Iterator[str]: ... +@overload +def remove(pred: int, seq: Iterable[Sequence[Any]]) -> Iterator[Sequence[Any]]: ... +@overload +def remove(pred: slice, seq: Iterable[Sequence[Any]]) -> Iterator[Sequence[Any]]: ... +@overload +def remove(pred: Mapping[_T, Any], seq: Iterable[_T]) -> Iterator[_T]: ... +@overload +def lremove(pred: Callable[[_T], Any], seq: Iterable[_T]) -> list[_T]: ... +@overload +def lremove(pred: None, seq: Iterable[_T]) -> list[_T]: ... +@overload +def lremove(pred: AbstractSet[_T], seq: Iterable[_T]) -> list[_T]: ... +@overload +def lremove(pred: str | bytes | re.Pattern[str], seq: Iterable[str]) -> list[str]: ... +@overload +def lremove(pred: int, seq: Iterable[Sequence[Any]]) -> list[Sequence[Any]]: ... +@overload +def lremove(pred: slice, seq: Iterable[Sequence[Any]]) -> list[Sequence[Any]]: ... +@overload +def lremove(pred: Mapping[_T, Any], seq: Iterable[_T]) -> list[_T]: ... + +@overload +def keep(seq: Iterable[_T]) -> Iterator[_T]: ... +@overload +def keep(f: Callable[[_T], _V], seq: Iterable[_T]) -> Iterator[_V]: ... +@overload +def keep(f: None, seq: Iterable[_T]) -> Iterator[_T]: ... +@overload +def keep(f: AbstractSet[_T], seq: Iterable[_T]) -> Iterator[bool]: ... +@overload +def keep(f: str | bytes | re.Pattern[str], seq: Iterable[Any]) -> Iterator[_ReResult | None]: ... +@overload +def keep(f: int, seq: Iterable[Sequence[_T]]) -> Iterator[_T]: ... +@overload +def keep(f: slice, seq: Iterable[Sequence[_T]]) -> Iterator[Sequence[_T]]: ... +@overload +def keep(f: Mapping[_T, _V], seq: Iterable[_T]) -> Iterator[_V]: ... + +@overload +def lkeep(seq: Iterable[_T]) -> list[_T]: ... +@overload +def lkeep(f: Callable[[_T], _V], seq: Iterable[_T]) -> list[_V]: ... +@overload +def lkeep(f: None, seq: Iterable[_T]) -> list[_T]: ... +@overload +def lkeep(f: AbstractSet[_T], seq: Iterable[_T]) -> list[bool]: ... +@overload +def lkeep(f: str | bytes | re.Pattern[str], seq: Iterable[Any]) -> list[_ReResult | None]: ... +@overload +def lkeep(f: int, seq: Iterable[Sequence[_T]]) -> list[_T]: ... +@overload +def lkeep(f: slice, seq: Iterable[Sequence[_T]]) -> list[Sequence[_T]]: ... +@overload +def lkeep(f: Mapping[_T, _V], seq: Iterable[_T]) -> list[_V]: ... + +def without(seq: Iterable[_T], *items: _T) -> Iterator[_T]: ... +def lwithout(seq: Iterable[_T], *items: _T) -> list[_T]: ... + +### Concat / Flatten + +def concat(*seqs: Iterable[_T]) -> Iterator[_T]: ... +def lconcat(*seqs: Iterable[_T]) -> list[_T]: ... +def cat(seqs: Iterable[Iterable[_T]]) -> Iterator[_T]: ... +def lcat(seqs: Iterable[Iterable[_T]]) -> list[_T]: ... + +def flatten(seq: Iterable[Any], follow: Callable[[Any], bool] = ...) -> Iterator[Any]: ... +def lflatten(seq: Iterable[Any], follow: Callable[[Any], bool] = ...) -> list[Any]: ... + +@overload +def mapcat(f: Callable[[_T], Iterable[_V]], seq: Iterable[_T]) -> Iterator[_V]: ... +@overload +def mapcat(f: int, seq: Iterable[Sequence[Iterable[_T]]]) -> Iterator[_T]: ... +@overload +def mapcat(f: Mapping[_K, Iterable[_V]], seq: Iterable[_K]) -> Iterator[_V]: ... +@overload +def mapcat(f: Callable[..., Iterable[_V]], *seqs: Iterable[Any]) -> Iterator[_V]: ... +@overload +def mapcat(f: _XFunc, *seqs: Iterable[Any]) -> Iterator[Any]: ... + +@overload +def lmapcat(f: Callable[[_T], Iterable[_V]], seq: Iterable[_T]) -> list[_V]: ... +@overload +def lmapcat(f: int, seq: Iterable[Sequence[Iterable[_T]]]) -> list[_T]: ... +@overload +def lmapcat(f: Mapping[_K, Iterable[_V]], seq: Iterable[_K]) -> list[_V]: ... +@overload +def lmapcat(f: Callable[..., Iterable[_V]], *seqs: Iterable[Any]) -> list[_V]: ... +@overload +def lmapcat(f: _XFunc, *seqs: Iterable[Any]) -> list[Any]: ... + +def interleave(*seqs: Iterable[_T]) -> Iterator[_T]: ... +def interpose(sep: _T, seq: Iterable[_T]) -> Iterator[_T]: ... + +### Distinct + +def distinct(seq: Iterable[_T], key: _XFunc = ...) -> Iterator[_T]: ... +def ldistinct(seq: Iterable[_T], key: _XFunc = ...) -> list[_T]: ... + +### Takewhile / Dropwhile / Split + +@overload +def takewhile(seq: Iterable[_T]) -> Iterator[_T]: ... +@overload +def takewhile(pred: Callable[[_T], Any], seq: Iterable[_T]) -> Iterator[_T]: ... +@overload +def takewhile(pred: None, seq: Iterable[_T]) -> Iterator[_T]: ... +@overload +def takewhile(pred: AbstractSet[_T], seq: Iterable[_T]) -> Iterator[_T]: ... +@overload +def takewhile(pred: str | bytes | re.Pattern[str], seq: Iterable[str]) -> Iterator[str]: ... +@overload +def takewhile(pred: int, seq: Iterable[Sequence[Any]]) -> Iterator[Sequence[Any]]: ... +@overload +def takewhile(pred: slice, seq: Iterable[Sequence[Any]]) -> Iterator[Sequence[Any]]: ... +@overload +def takewhile(pred: Mapping[_T, Any], seq: Iterable[_T]) -> Iterator[_T]: ... + +@overload +def dropwhile(seq: Iterable[_T]) -> Iterator[_T]: ... +@overload +def dropwhile(pred: Callable[[_T], Any], seq: Iterable[_T]) -> Iterator[_T]: ... +@overload +def dropwhile(pred: None, seq: Iterable[_T]) -> Iterator[_T]: ... +@overload +def dropwhile(pred: AbstractSet[_T], seq: Iterable[_T]) -> Iterator[_T]: ... +@overload +def dropwhile(pred: str | bytes | re.Pattern[str], seq: Iterable[str]) -> Iterator[str]: ... +@overload +def dropwhile(pred: int, seq: Iterable[Sequence[Any]]) -> Iterator[Sequence[Any]]: ... +@overload +def dropwhile(pred: slice, seq: Iterable[Sequence[Any]]) -> Iterator[Sequence[Any]]: ... +@overload +def dropwhile(pred: Mapping[_T, Any], seq: Iterable[_T]) -> Iterator[_T]: ... + +@overload +def split(pred: Callable[[_T], Any], seq: Iterable[_T]) -> tuple[Iterator[_T], Iterator[_T]]: ... +@overload +def split(pred: None, seq: Iterable[_T]) -> tuple[Iterator[_T], Iterator[_T]]: ... +@overload +def split(pred: AbstractSet[_T], seq: Iterable[_T]) -> tuple[Iterator[_T], Iterator[_T]]: ... +@overload +def split(pred: str | bytes | re.Pattern[str], seq: Iterable[str]) -> tuple[Iterator[str], Iterator[str]]: ... +@overload +def split(pred: int, seq: Iterable[Sequence[Any]]) -> tuple[Iterator[Sequence[Any]], Iterator[Sequence[Any]]]: ... +@overload +def split(pred: slice, seq: Iterable[Sequence[Any]]) -> tuple[Iterator[Sequence[Any]], Iterator[Sequence[Any]]]: ... +@overload +def split(pred: Mapping[_T, Any], seq: Iterable[_T]) -> tuple[Iterator[_T], Iterator[_T]]: ... +@overload +def lsplit(pred: Callable[[_T], Any], seq: Iterable[_T]) -> tuple[list[_T], list[_T]]: ... +@overload +def lsplit(pred: None, seq: Iterable[_T]) -> tuple[list[_T], list[_T]]: ... +@overload +def lsplit(pred: AbstractSet[_T], seq: Iterable[_T]) -> tuple[list[_T], list[_T]]: ... +@overload +def lsplit(pred: str | bytes | re.Pattern[str], seq: Iterable[str]) -> tuple[list[str], list[str]]: ... +@overload +def lsplit(pred: int, seq: Iterable[Sequence[Any]]) -> tuple[list[Sequence[Any]], list[Sequence[Any]]]: ... +@overload +def lsplit(pred: slice, seq: Iterable[Sequence[Any]]) -> tuple[list[Sequence[Any]], list[Sequence[Any]]]: ... +@overload +def lsplit(pred: Mapping[_T, Any], seq: Iterable[_T]) -> tuple[list[_T], list[_T]]: ... + +def split_at(n: int, seq: Iterable[_T]) -> tuple[Iterator[_T], Iterator[_T]]: ... +def lsplit_at(n: int, seq: Iterable[_T]) -> tuple[list[_T], list[_T]]: ... + +@overload +def split_by(pred: Callable[[_T], Any], seq: Iterable[_T]) -> tuple[Iterator[_T], Iterator[_T]]: ... +@overload +def split_by(pred: None, seq: Iterable[_T]) -> tuple[Iterator[_T], Iterator[_T]]: ... +@overload +def split_by(pred: AbstractSet[_T], seq: Iterable[_T]) -> tuple[Iterator[_T], Iterator[_T]]: ... +@overload +def split_by(pred: str | bytes | re.Pattern[str], seq: Iterable[str]) -> tuple[Iterator[str], Iterator[str]]: ... +@overload +def split_by(pred: int, seq: Iterable[Sequence[Any]]) -> tuple[Iterator[Sequence[Any]], Iterator[Sequence[Any]]]: ... +@overload +def split_by(pred: slice, seq: Iterable[Sequence[Any]]) -> tuple[Iterator[Sequence[Any]], Iterator[Sequence[Any]]]: ... +@overload +def split_by(pred: Mapping[_T, Any], seq: Iterable[_T]) -> tuple[Iterator[_T], Iterator[_T]]: ... +@overload +def lsplit_by(pred: Callable[[_T], Any], seq: Iterable[_T]) -> tuple[list[_T], list[_T]]: ... +@overload +def lsplit_by(pred: None, seq: Iterable[_T]) -> tuple[list[_T], list[_T]]: ... +@overload +def lsplit_by(pred: AbstractSet[_T], seq: Iterable[_T]) -> tuple[list[_T], list[_T]]: ... +@overload +def lsplit_by(pred: str | bytes | re.Pattern[str], seq: Iterable[str]) -> tuple[list[str], list[str]]: ... +@overload +def lsplit_by(pred: int, seq: Iterable[Sequence[Any]]) -> tuple[list[Sequence[Any]], list[Sequence[Any]]]: ... +@overload +def lsplit_by(pred: slice, seq: Iterable[Sequence[Any]]) -> tuple[list[Sequence[Any]], list[Sequence[Any]]]: ... +@overload +def lsplit_by(pred: Mapping[_T, Any], seq: Iterable[_T]) -> tuple[list[_T], list[_T]]: ... + +### Grouping + +@overload +def group_by(f: Callable[[_T], _K], seq: Iterable[_T]) -> dict[_K, list[_T]]: ... +@overload +def group_by(f: None, seq: Iterable[_T]) -> dict[_T, list[_T]]: ... +@overload +def group_by(f: AbstractSet[_T], seq: Iterable[_T]) -> dict[bool, list[_T]]: ... +@overload +def group_by(f: str | bytes | re.Pattern[str], seq: Iterable[Any]) -> dict[_ReResult | None, list[Any]]: ... +@overload +def group_by(f: int, seq: Iterable[Sequence[_T]]) -> dict[_T, list[Sequence[_T]]]: ... +@overload +def group_by(f: slice, seq: Iterable[Sequence[_T]]) -> dict[Sequence[_T], list[Sequence[_T]]]: ... +@overload +def group_by(f: Mapping[_T, _K], seq: Iterable[_T]) -> dict[_K, list[_T]]: ... + +@overload +def group_by_keys(get_keys: Callable[[_T], Iterable[_K]], seq: Iterable[_T]) -> dict[_K, list[_T]]: ... +@overload +def group_by_keys(get_keys: _XFunc, seq: Iterable[_T]) -> dict[Any, list[_T]]: ... + +def group_values(seq: Iterable[tuple[_K, _V]]) -> dict[_K, list[_V]]: ... + +@overload +def count_by(f: Callable[[_T], _K], seq: Iterable[_T]) -> dict[_K, int]: ... +@overload +def count_by(f: None, seq: Iterable[_T]) -> dict[_T, int]: ... +@overload +def count_by(f: AbstractSet[_T], seq: Iterable[_T]) -> dict[bool, int]: ... +@overload +def count_by(f: str | bytes | re.Pattern[str], seq: Iterable[Any]) -> dict[_ReResult | None, int]: ... +@overload +def count_by(f: int, seq: Iterable[Sequence[_T]]) -> dict[_T, int]: ... +@overload +def count_by(f: slice, seq: Iterable[Sequence[_T]]) -> dict[Sequence[_T], int]: ... +@overload +def count_by(f: Mapping[_T, _K], seq: Iterable[_T]) -> dict[_K, int]: ... + +def count_reps(seq: Iterable[_T]) -> dict[_T, int]: ... + +### Partitioning + +@overload +def partition(n: int, seq: Iterable[_T]) -> Iterator[list[_T]]: ... +@overload +def partition(n: int, step: int, seq: Iterable[_T]) -> Iterator[list[_T]]: ... + +@overload +def lpartition(n: int, seq: Iterable[_T]) -> list[list[_T]]: ... +@overload +def lpartition(n: int, step: int, seq: Iterable[_T]) -> list[list[_T]]: ... + +@overload +def chunks(n: int, seq: Iterable[_T]) -> Iterator[list[_T]]: ... +@overload +def chunks(n: int, step: int, seq: Iterable[_T]) -> Iterator[list[_T]]: ... + +@overload +def lchunks(n: int, seq: Iterable[_T]) -> list[list[_T]]: ... +@overload +def lchunks(n: int, step: int, seq: Iterable[_T]) -> list[list[_T]]: ... + +def partition_by(f: _XFunc, seq: Iterable[_T]) -> Iterator[Iterator[_T]]: ... +def lpartition_by(f: _XFunc, seq: Iterable[_T]) -> list[list[_T]]: ... + +### Pairing + +def with_prev(seq: Iterable[_T], fill: _T | None = ...) -> Iterator[tuple[_T, _T | None]]: ... +def with_next(seq: Iterable[_T], fill: _T | None = ...) -> Iterator[tuple[_T, _T | None]]: ... +def pairwise(seq: Iterable[_T]) -> Iterator[tuple[_T, _T]]: ... + +@overload +def lzip(__i1: Iterable[_T], __i2: Iterable[_V]) -> list[tuple[_T, _V]]: ... +@overload +def lzip(__i1: Iterable[Any], __i2: Iterable[Any], __i3: Iterable[Any], *seqs: Iterable[Any]) -> list[tuple[Any, ...]]: ... +@overload +def lzip(*seqs: Iterable[Any], strict: bool = ...) -> list[tuple[Any, ...]]: ... + +### Reductions + +@overload +def reductions(f: Callable[[_T, _T], _T], seq: Iterable[_T]) -> Iterator[_T]: ... +@overload +def reductions(f: Callable[[_T, _T], _T], seq: Iterable[_T], acc: _T) -> Iterator[_T]: ... + +@overload +def lreductions(f: Callable[[_T, _T], _T], seq: Iterable[_T]) -> list[_T]: ... +@overload +def lreductions(f: Callable[[_T, _T], _T], seq: Iterable[_T], acc: _T) -> list[_T]: ... + +def sums(seq: Iterable[_T], acc: _T = ...) -> Iterator[_T]: ... +def lsums(seq: Iterable[_T], acc: _T = ...) -> list[_T]: ... diff --git a/funcy/seqs.pyih b/funcy/seqs.pyih new file mode 100644 index 0000000..37df879 --- /dev/null +++ b/funcy/seqs.pyih @@ -0,0 +1,165 @@ +import re +from collections.abc import Callable, Iterable, Iterator, Mapping, Set as AbstractSet, Sequence +from typing import Any, TypeAlias, TypeVar, overload +from itertools import chain, count, cycle, repeat, accumulate + +__all__ = [ + 'count', 'cycle', 'repeat', 'repeatedly', 'iterate', + 'take', 'drop', 'first', 'second', 'nth', 'last', 'rest', 'butlast', 'ilen', + 'map', 'filter', 'lmap', 'lfilter', 'remove', 'lremove', 'keep', 'lkeep', 'without', 'lwithout', + 'concat', 'lconcat', 'chain', 'cat', 'lcat', 'flatten', 'lflatten', 'mapcat', 'lmapcat', + 'interleave', 'interpose', 'distinct', 'ldistinct', + 'dropwhile', 'takewhile', 'split', 'lsplit', 'split_at', 'lsplit_at', 'split_by', 'lsplit_by', + 'group_by', 'group_by_keys', 'group_values', 'count_by', 'count_reps', + 'partition', 'lpartition', 'chunks', 'lchunks', 'partition_by', 'lpartition_by', + 'with_prev', 'with_next', 'pairwise', 'lzip', + 'reductions', 'lreductions', 'sums', 'lsums', 'accumulate', +] + +_K = TypeVar('_K') +_T = TypeVar('_T') +_V = TypeVar('_V') + +# Non-callable types accepted by the extended function protocol (funcmakers.make_func): +# int/slice for itemgetter, str/bytes/re.Pattern for regex, +# Mapping for lookup, Set for membership, None for identity/bool +_XFunc = Callable[..., Any] | int | slice | str | bytes | re.Pattern[str] | Mapping[Any, Any] | AbstractSet[Any] | None +_ReResult: TypeAlias = str | tuple[str, ...] | dict[str, str] + +### Generators + +def repeatedly(f: Callable[[], _T], n: int = ...) -> Iterator[_T]: ... +def iterate(f: Callable[[_T], _T], x: _T) -> Iterator[_T]: ... + +### Slicing + +def take(n: int, seq: Iterable[_T]) -> list[_T]: ... +def drop(n: int, seq: Iterable[_T]) -> Iterator[_T]: ... +def first(seq: Iterable[_T]) -> _T | None: ... +def second(seq: Iterable[_T]) -> _T | None: ... +def nth(n: int, seq: Iterable[_T]) -> _T | None: ... +def last(seq: Iterable[_T]) -> _T | None: ... +def rest(seq: Iterable[_T]) -> Iterator[_T]: ... +def butlast(seq: Iterable[_T]) -> Iterator[_T]: ... +def ilen(seq: Iterable[Any]) -> int: ... + +### Map / Filter + +def map(f: XFunc[[_T], _V], seq: Iterable[_T]) -> Iterator[_V]: ... +def map(f: Callable[..., _V], *seqs: Iterable[Any]) -> Iterator[_V]: ... + +def lmap(f: XFunc[[_T], _V], seq: Iterable[_T]) -> list[_V]: ... +def lmap(f: Callable[..., _V], *seqs: Iterable[Any]) -> list[_V]: ... + +def filter(pred: XPred[_T], seq: Iterable[_T]) -> Iterator[_T]: ... +def lfilter(pred: XPred[_T], seq: Iterable[_T]) -> list[_T]: ... + +def remove(pred: XPred[_T], seq: Iterable[_T]) -> Iterator[_T]: ... +def lremove(pred: XPred[_T], seq: Iterable[_T]) -> list[_T]: ... + +def keep(seq: Iterable[_T]) -> Iterator[_T]: ... +def keep(f: XFunc[[_T], _V], seq: Iterable[_T]) -> Iterator[_V]: ... + +def lkeep(seq: Iterable[_T]) -> list[_T]: ... +def lkeep(f: XFunc[[_T], _V], seq: Iterable[_T]) -> list[_V]: ... + +def without(seq: Iterable[_T], *items: _T) -> Iterator[_T]: ... +def lwithout(seq: Iterable[_T], *items: _T) -> list[_T]: ... + +### Concat / Flatten + +def concat(*seqs: Iterable[_T]) -> Iterator[_T]: ... +def lconcat(*seqs: Iterable[_T]) -> list[_T]: ... +def cat(seqs: Iterable[Iterable[_T]]) -> Iterator[_T]: ... +def lcat(seqs: Iterable[Iterable[_T]]) -> list[_T]: ... + +def flatten(seq: Iterable[Any], follow: Callable[[Any], bool] = ...) -> Iterator[Any]: ... +def lflatten(seq: Iterable[Any], follow: Callable[[Any], bool] = ...) -> list[Any]: ... + +def mapcat(f: Callable[[_T], Iterable[_V]], seq: Iterable[_T]) -> Iterator[_V]: ... +def mapcat(f: int, seq: Iterable[Sequence[Iterable[_T]]]) -> Iterator[_T]: ... +def mapcat(f: Mapping[_K, Iterable[_V]], seq: Iterable[_K]) -> Iterator[_V]: ... +def mapcat(f: Callable[..., Iterable[_V]], *seqs: Iterable[Any]) -> Iterator[_V]: ... +def mapcat(f: _XFunc, *seqs: Iterable[Any]) -> Iterator[Any]: ... + +def lmapcat(f: Callable[[_T], Iterable[_V]], seq: Iterable[_T]) -> list[_V]: ... +def lmapcat(f: int, seq: Iterable[Sequence[Iterable[_T]]]) -> list[_T]: ... +def lmapcat(f: Mapping[_K, Iterable[_V]], seq: Iterable[_K]) -> list[_V]: ... +def lmapcat(f: Callable[..., Iterable[_V]], *seqs: Iterable[Any]) -> list[_V]: ... +def lmapcat(f: _XFunc, *seqs: Iterable[Any]) -> list[Any]: ... + +def interleave(*seqs: Iterable[_T]) -> Iterator[_T]: ... +def interpose(sep: _T, seq: Iterable[_T]) -> Iterator[_T]: ... + +### Distinct + +def distinct(seq: Iterable[_T], key: _XFunc = ...) -> Iterator[_T]: ... +def ldistinct(seq: Iterable[_T], key: _XFunc = ...) -> list[_T]: ... + +### Takewhile / Dropwhile / Split + +def takewhile(seq: Iterable[_T]) -> Iterator[_T]: ... +def takewhile(pred: XPred[_T], seq: Iterable[_T]) -> Iterator[_T]: ... + +def dropwhile(seq: Iterable[_T]) -> Iterator[_T]: ... +def dropwhile(pred: XPred[_T], seq: Iterable[_T]) -> Iterator[_T]: ... + +def split(pred: XPred[_T], seq: Iterable[_T]) -> tuple[Iterator[_T], Iterator[_T]]: ... +def lsplit(pred: XPred[_T], seq: Iterable[_T]) -> tuple[list[_T], list[_T]]: ... + +def split_at(n: int, seq: Iterable[_T]) -> tuple[Iterator[_T], Iterator[_T]]: ... +def lsplit_at(n: int, seq: Iterable[_T]) -> tuple[list[_T], list[_T]]: ... + +def split_by(pred: XPred[_T], seq: Iterable[_T]) -> tuple[Iterator[_T], Iterator[_T]]: ... +def lsplit_by(pred: XPred[_T], seq: Iterable[_T]) -> tuple[list[_T], list[_T]]: ... + +### Grouping + +def group_by(f: XFunc[[_T], _K], seq: Iterable[_T]) -> dict[_K, list[_T]]: ... + +def group_by_keys(get_keys: Callable[[_T], Iterable[_K]], seq: Iterable[_T]) -> dict[_K, list[_T]]: ... +def group_by_keys(get_keys: _XFunc, seq: Iterable[_T]) -> dict[Any, list[_T]]: ... + +def group_values(seq: Iterable[tuple[_K, _V]]) -> dict[_K, list[_V]]: ... + +def count_by(f: XFunc[[_T], _K], seq: Iterable[_T]) -> dict[_K, int]: ... + +def count_reps(seq: Iterable[_T]) -> dict[_T, int]: ... + +### Partitioning + +def partition(n: int, seq: Iterable[_T]) -> Iterator[list[_T]]: ... +def partition(n: int, step: int, seq: Iterable[_T]) -> Iterator[list[_T]]: ... + +def lpartition(n: int, seq: Iterable[_T]) -> list[list[_T]]: ... +def lpartition(n: int, step: int, seq: Iterable[_T]) -> list[list[_T]]: ... + +def chunks(n: int, seq: Iterable[_T]) -> Iterator[list[_T]]: ... +def chunks(n: int, step: int, seq: Iterable[_T]) -> Iterator[list[_T]]: ... + +def lchunks(n: int, seq: Iterable[_T]) -> list[list[_T]]: ... +def lchunks(n: int, step: int, seq: Iterable[_T]) -> list[list[_T]]: ... + +def partition_by(f: _XFunc, seq: Iterable[_T]) -> Iterator[Iterator[_T]]: ... +def lpartition_by(f: _XFunc, seq: Iterable[_T]) -> list[list[_T]]: ... + +### Pairing + +def with_prev(seq: Iterable[_T], fill: _T | None = ...) -> Iterator[tuple[_T, _T | None]]: ... +def with_next(seq: Iterable[_T], fill: _T | None = ...) -> Iterator[tuple[_T, _T | None]]: ... +def pairwise(seq: Iterable[_T]) -> Iterator[tuple[_T, _T]]: ... + +def lzip(__i1: Iterable[_T], __i2: Iterable[_V]) -> list[tuple[_T, _V]]: ... +def lzip(__i1: Iterable[Any], __i2: Iterable[Any], __i3: Iterable[Any], *seqs: Iterable[Any]) -> list[tuple[Any, ...]]: ... +def lzip(*seqs: Iterable[Any], strict: bool = ...) -> list[tuple[Any, ...]]: ... + +### Reductions + +def reductions(f: Callable[[_T, _T], _T], seq: Iterable[_T]) -> Iterator[_T]: ... +def reductions(f: Callable[[_T, _T], _T], seq: Iterable[_T], acc: _T) -> Iterator[_T]: ... + +def lreductions(f: Callable[[_T, _T], _T], seq: Iterable[_T]) -> list[_T]: ... +def lreductions(f: Callable[[_T, _T], _T], seq: Iterable[_T], acc: _T) -> list[_T]: ... + +def sums(seq: Iterable[_T], acc: _T = ...) -> Iterator[_T]: ... +def lsums(seq: Iterable[_T], acc: _T = ...) -> list[_T]: ... diff --git a/funcy/strings.pyi b/funcy/strings.pyi new file mode 100644 index 0000000..4667805 --- /dev/null +++ b/funcy/strings.pyi @@ -0,0 +1,30 @@ +import re +from collections.abc import Callable, Iterable, Iterator +from typing import Any, TypeAlias + +__all__ = ['re_iter', 're_all', 're_find', 're_finder', 're_test', 're_tester', + 'str_join', + 'cut_prefix', 'cut_suffix'] + +_re_type: type + +# Return type of regex match operations depends on capture groups in the pattern: +# no groups -> str +# 1 unnamed group -> str +# N unnamed groups -> tuple[str, ...] +# named groups -> dict[str, str] +# We can't detect group count from the pattern type, so we use a union. +_ReResult: TypeAlias = str | tuple[str, ...] | dict[str, str] + +def re_iter(regex: str | re.Pattern[str], s: str, flags: int = ...) -> Iterator[_ReResult]: ... +def re_all(regex: str | re.Pattern[str], s: str, flags: int = ...) -> list[_ReResult]: ... +def re_find(regex: str | re.Pattern[str], s: str, flags: int = ...) -> _ReResult | None: ... +def re_test(regex: str | re.Pattern[str], s: str, flags: int = ...) -> bool: ... + +def re_finder(regex: str | re.Pattern[str], flags: int = ...) -> Callable[[str], _ReResult | None]: ... +def re_tester(regex: str | re.Pattern[str], flags: int = ...) -> Callable[[str], bool]: ... + +def str_join(sep: str | Iterable[Any], seq: Iterable[Any] = ...) -> str: ... + +def cut_prefix(s: str, prefix: str) -> str: ... +def cut_suffix(s: str, suffix: str) -> str: ... diff --git a/funcy/tree.pyi b/funcy/tree.pyi new file mode 100644 index 0000000..d5e956d --- /dev/null +++ b/funcy/tree.pyi @@ -0,0 +1,28 @@ +from collections.abc import Callable, Iterable, Iterator +from typing import Any + +__all__ = ['tree_leaves', 'ltree_leaves', 'tree_nodes', 'ltree_nodes'] + +def tree_leaves( + root: Any, + follow: Callable[[Any], bool] = ..., + children: Callable[[Any], Iterable[Any]] = ..., +) -> Iterator[Any]: ... + +def ltree_leaves( + root: Any, + follow: Callable[[Any], bool] = ..., + children: Callable[[Any], Iterable[Any]] = ..., +) -> list[Any]: ... + +def tree_nodes( + root: Any, + follow: Callable[[Any], bool] = ..., + children: Callable[[Any], Iterable[Any]] = ..., +) -> Iterator[Any]: ... + +def ltree_nodes( + root: Any, + follow: Callable[[Any], bool] = ..., + children: Callable[[Any], Iterable[Any]] = ..., +) -> list[Any]: ... diff --git a/funcy/types.pyi b/funcy/types.pyi new file mode 100644 index 0000000..f4bf014 --- /dev/null +++ b/funcy/types.pyi @@ -0,0 +1,18 @@ +from collections.abc import Callable +from typing import Any + +__all__ = ('isa', 'is_mapping', 'is_set', 'is_seq', 'is_list', 'is_tuple', + 'is_seqcoll', 'is_seqcont', + 'iterable', 'is_iter') + +def isa(*types: type) -> Callable[[Any], bool]: ... + +is_mapping: Callable[[Any], bool] +is_set: Callable[[Any], bool] +is_seq: Callable[[Any], bool] +is_list: Callable[[Any], bool] +is_tuple: Callable[[Any], bool] +is_seqcoll: Callable[[Any], bool] +is_seqcont: Callable[[Any], bool] +iterable: Callable[[Any], bool] +is_iter: Callable[[Any], bool] diff --git a/mypy.ini b/mypy.ini new file mode 100644 index 0000000..7ca1f34 --- /dev/null +++ b/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +# Intentional: more-specific overloads (e.g. dict before Iterable) overlap by design +disable_error_code = overload-overlap diff --git a/setup.py b/setup.py index af138fe..0999946 100644 --- a/setup.py +++ b/setup.py @@ -18,6 +18,7 @@ license='BSD', packages=['funcy'], + package_data={"funcy": ["py.typed", "*.pyi"]}, classifiers=[ 'Development Status :: 5 - Production/Stable', diff --git a/stubtest_allowlist.txt b/stubtest_allowlist.txt new file mode 100644 index 0000000..cee895a --- /dev/null +++ b/stubtest_allowlist.txt @@ -0,0 +1,40 @@ +# Functions using the EMPTY sentinel pattern have default values +# that stubtest can't reconcile with the overloaded stub signatures +funcy.colls.all +funcy.colls.any +funcy.colls.none +funcy.colls.one +funcy.colls.some +funcy.seqs.chunks +funcy.seqs.lchunks +funcy.seqs.dropwhile +funcy.seqs.takewhile +funcy.seqs.keep +funcy.seqs.lkeep +funcy.seqs.lpartition +funcy.seqs.partition +funcy.calc.memoize +funcy.funcs.iffy + +# @decorator-based functions: runtime signature has extra `call` arg +funcy.flow.collecting +funcy.flow.joining +funcy.flow.post_processing +funcy.flow.retry +funcy.flow.wrap_with +funcy.debug.log_calls +funcy.debug.log_enters +funcy.debug.log_exits +funcy.debug.print_calls +funcy.debug.print_enters +funcy.debug.print_exits + +# Re-exported from itertools: different arg names or classmethods +funcy.seqs.cat +funcy.seqs.concat + +# Overloaded __call__/__get__/__exit__ don't match runtime exactly +funcy.debug.log_errors.__call__ +funcy.debug.log_errors.__exit__ +funcy.debug.log_durations.__call__ +funcy.objects.cached_property.__get__ diff --git a/tests/test_translate_pyih.py b/tests/test_translate_pyih.py new file mode 100644 index 0000000..f3d8fa7 --- /dev/null +++ b/tests/test_translate_pyih.py @@ -0,0 +1,331 @@ +"""Tests for the .pyih -> .pyi translator.""" +import sys +import pytest + +if sys.version_info < (3, 12): + pytest.skip( + "translate_pyih requires Python 3.12+ (PEP 695 syntax)", allow_module_level=True + ) + +from translate_pyih import parse_pyih, generate_func_group, generate_pyi + + +def get_func_output(source): + """Parse source and return generated output for all func groups.""" + items = parse_pyih(source) + results = [] + for item in items: + if item['kind'] == 'func_group': + results.append(generate_func_group(item)) + return results + + +def get_single_func(source): + """Parse source and return generated output for a single func group.""" + results = get_func_output(source) + assert len(results) == 1, f"Expected 1 func group, got {len(results)}" + return results[0] + + +class TestPassthrough: + """Functions without XFunc/XPred/C pass through unchanged.""" + + def test_simple_function(self): + out = get_single_func( + "def take(n: int, seq: Iterable[_T]) -> list[_T]: ..." + ) + assert out == "def take(n: int, seq: Iterable[_T]) -> list[_T]: ..." + + def test_no_params(self): + out = get_single_func("def merge() -> None: ...") + assert out == "def merge() -> None: ..." + + def test_multiple_overloads_get_decorator(self): + source = ( + "def partition(n: int, seq: Iterable[_T]) -> Iterator[list[_T]]: ...\n" + "def partition(n: int, step: int, seq: Iterable[_T]) -> Iterator[list[_T]]: ..." + ) + out = get_single_func(source) + assert out.count("@overload") == 2 + assert out.count("def partition(") == 2 + + def test_single_function_no_overload_decorator(self): + out = get_single_func("def first(seq: Iterable[_T]) -> _T | None: ...") + assert "@overload" not in out + + +class TestXFunc: + """XFunc[[A], B] expands to 7 overload variants.""" + + def test_basic_expansion(self): + out = get_single_func( + "def map(f: XFunc[[_T], _V], seq: Iterable[_T]) -> Iterator[_V]: ..." + ) + assert out.count("@overload") == 7 + assert out.count("def map(") == 7 + + def test_callable_variant(self): + out = get_single_func( + "def map(f: XFunc[[_T], _V], seq: Iterable[_T]) -> Iterator[_V]: ..." + ) + assert "def map(f: Callable[[_T], _V], seq: Iterable[_T]) -> Iterator[_V]: ..." in out + + def test_none_variant(self): + out = get_single_func( + "def map(f: XFunc[[_T], _V], seq: Iterable[_T]) -> Iterator[_V]: ..." + ) + # None: B becomes A (identity) + assert "def map(f: None, seq: Iterable[_T]) -> Iterator[_T]: ..." in out + + def test_set_variant(self): + out = get_single_func( + "def map(f: XFunc[[_T], _V], seq: Iterable[_T]) -> Iterator[_V]: ..." + ) + # Set: B becomes bool + assert "def map(f: AbstractSet[_T], seq: Iterable[_T]) -> Iterator[bool]: ..." in out + + def test_regex_variant(self): + out = get_single_func( + "def map(f: XFunc[[_T], _V], seq: Iterable[_T]) -> Iterator[_V]: ..." + ) + # Regex: A becomes Any, B becomes _ReResult | None + assert ("def map(f: str | bytes | re.Pattern[str], seq: Iterable[Any]) " + "-> Iterator[_ReResult | None]: ...") in out + + def test_int_variant(self): + out = get_single_func( + "def map(f: XFunc[[_T], _V], seq: Iterable[_T]) -> Iterator[_V]: ..." + ) + # int: A becomes Sequence[_T], B becomes _T + assert "def map(f: int, seq: Iterable[Sequence[_T]]) -> Iterator[_T]: ..." in out + + def test_slice_variant(self): + out = get_single_func( + "def map(f: XFunc[[_T], _V], seq: Iterable[_T]) -> Iterator[_V]: ..." + ) + # slice: A becomes Sequence[_T], B becomes Sequence[_T] + assert ("def map(f: slice, seq: Iterable[Sequence[_T]]) " + "-> Iterator[Sequence[_T]]: ...") in out + + def test_mapping_variant(self): + out = get_single_func( + "def map(f: XFunc[[_T], _V], seq: Iterable[_T]) -> Iterator[_V]: ..." + ) + # Mapping: A->key, B->value + assert "def map(f: Mapping[_T, _V], seq: Iterable[_T]) -> Iterator[_V]: ..." in out + + def test_skip_variant(self): + source = ( + "# xfunc_skip: slice\n" + "def walk_keys(f: XFunc[[_K], _K2], coll: Mapping[_K, _V]) -> dict[_K2, _V]: ..." + ) + out = get_single_func(source) + assert out.count("def walk_keys(") == 6 # 7 - 1 skipped + assert "f: slice" not in out + + def test_extra_overload_alongside_xfunc(self): + source = ( + "def map(f: XFunc[[_T], _V], seq: Iterable[_T]) -> Iterator[_V]: ...\n" + "def map(f: Callable[..., _V], *seqs: Iterable[Any]) -> Iterator[_V]: ..." + ) + out = get_single_func(source) + # 7 XFunc variants + 1 passthrough = 8 + assert out.count("def map(") == 8 + assert "def map(f: Callable[..., _V], *seqs: Iterable[Any]) -> Iterator[_V]: ..." in out + + def test_typevar_no_partial_match(self): + """_K should not match _K2 during substitution.""" + out = get_single_func( + "def walk_keys(f: XFunc[[_K], _K2], coll: dict[_K, _V]) -> dict[_K2, _V]: ..." + ) + # int variant: _K -> Sequence[_T], _K2 -> _T + assert "def walk_keys(f: int, coll: dict[Sequence[_T], _V]) -> dict[_T, _V]: ..." in out + # Should NOT have mangled _K2 into Sequence[_T]2 + assert "Sequence[_T]2" not in out + + +class TestXPred: + """XPred[A] expands to 7 overload variants preserving element type.""" + + def test_basic_expansion(self): + out = get_single_func( + "def filter(pred: XPred[_T], seq: Iterable[_T]) -> Iterator[_T]: ..." + ) + assert out.count("@overload") == 7 + assert out.count("def filter(") == 7 + + def test_callable_variant(self): + out = get_single_func( + "def filter(pred: XPred[_T], seq: Iterable[_T]) -> Iterator[_T]: ..." + ) + assert ( + "def filter(pred: Callable[[_T], Any], seq: Iterable[_T]) -> Iterator[_T]: ..." in out + ) + + def test_none_variant(self): + out = get_single_func( + "def filter(pred: XPred[_T], seq: Iterable[_T]) -> Iterator[_T]: ..." + ) + assert "def filter(pred: None, seq: Iterable[_T]) -> Iterator[_T]: ..." in out + + def test_regex_constrains_to_str(self): + out = get_single_func( + "def filter(pred: XPred[_T], seq: Iterable[_T]) -> Iterator[_T]: ..." + ) + # Regex pred constrains elements to str + assert ("def filter(pred: str | bytes | re.Pattern[str], " + "seq: Iterable[str]) -> Iterator[str]: ...") in out + + def test_mapping_preserves_element(self): + out = get_single_func( + "def filter(pred: XPred[_T], seq: Iterable[_T]) -> Iterator[_T]: ..." + ) + assert ("def filter(pred: Mapping[_T, Any], " + "seq: Iterable[_T]) -> Iterator[_T]: ...") in out + + def test_set_preserves_element(self): + out = get_single_func( + "def filter(pred: XPred[_T], seq: Iterable[_T]) -> Iterator[_T]: ..." + ) + assert ("def filter(pred: AbstractSet[_T], " + "seq: Iterable[_T]) -> Iterator[_T]: ...") in out + + def test_return_type_preserved(self): + """XPred preserves element type in complex return types.""" + out = get_single_func( + "def split(pred: XPred[_T], seq: Iterable[_T]) -> " + "tuple[Iterator[_T], Iterator[_T]]: ..." + ) + # Callable variant + assert "-> tuple[Iterator[_T], Iterator[_T]]: ..." in out + # Regex variant constrains _T to str + assert "-> tuple[Iterator[str], Iterator[str]]: ..." in out + + +class TestCollectionExpansion: + """[C: (type1, type2)] expands to per-type overloads.""" + + def test_basic_expansion(self): + out = get_single_func( + "def walk[C: (list, set)](f: Callable[[_T], _V], coll: C[_T]) -> C[_V]: ..." + ) + assert out.count("def walk(") == 2 + assert "coll: list[_T]) -> list[_V]: ..." in out + assert "coll: set[_T]) -> set[_V]: ..." in out + + def test_two_param_types(self): + out = get_single_func( + "def walk_keys[C: (dict, Mapping)](f: Callable[[_K], _K2], " + "coll: C[_K, _V]) -> C[_K2, _V]: ..." + ) + assert "coll: dict[_K, _V]) -> dict[_K2, _V]: ..." in out + assert "coll: Mapping[_K, _V]) -> Mapping[_K2, _V]: ..." in out + + +class TestCollectionWithXFunc: + """[C: (...)] × XFunc gives quadratic expansion.""" + + def test_quadratic_count(self): + out = get_single_func( + "def walk[C: (list, set)](f: XFunc[[_T], _V], coll: C[_T]) -> C[_V]: ..." + ) + # 7 XFunc variants × 2 collection types = 14 + assert out.count("def walk(") == 14 + + def test_collection_type_preserved(self): + out = get_single_func( + "def walk[C: (list, set)](f: XFunc[[_T], _V], coll: C[_T]) -> C[_V]: ..." + ) + # list: Callable variant + assert "def walk(f: Callable[[_T], _V], coll: list[_T]) -> list[_V]: ..." in out + # set: Callable variant + assert "def walk(f: Callable[[_T], _V], coll: set[_T]) -> set[_V]: ..." in out + # list: None variant + assert "def walk(f: None, coll: list[_T]) -> list[_T]: ..." in out + # set: int variant + assert "def walk(f: int, coll: set[Sequence[_T]]) -> set[_T]: ..." in out + + +class TestCollectionWithXPred: + """[C: (...)] × XPred gives quadratic expansion with type preservation.""" + + def test_quadratic_count(self): + out = get_single_func( + "def select_keys[C: (dict, Mapping)](pred: XPred[_K], " + "coll: C[_K, _V]) -> C[_K, _V]: ..." + ) + # 7 XPred variants × 2 collection types = 14 + assert out.count("def select_keys(") == 14 + + def test_dict_preserved(self): + out = get_single_func( + "def select_keys[C: (dict, Mapping)](pred: XPred[_K], " + "coll: C[_K, _V]) -> C[_K, _V]: ..." + ) + assert ("def select_keys(pred: Callable[[_K], Any], " + "coll: dict[_K, _V]) -> dict[_K, _V]: ...") in out + + def test_mapping_preserved(self): + out = get_single_func( + "def select_keys[C: (dict, Mapping)](pred: XPred[_K], " + "coll: C[_K, _V]) -> C[_K, _V]: ..." + ) + assert ("def select_keys(pred: Callable[[_K], Any], " + "coll: Mapping[_K, _V]) -> Mapping[_K, _V]: ...") in out + + def test_regex_constrains_key_in_both(self): + out = get_single_func( + "def select_keys[C: (dict, Mapping)](pred: XPred[_K], " + "coll: C[_K, _V]) -> C[_K, _V]: ..." + ) + assert ("def select_keys(pred: str | bytes | re.Pattern[str], " + "coll: dict[str, _V]) -> dict[str, _V]: ...") in out + assert ("def select_keys(pred: str | bytes | re.Pattern[str], " + "coll: Mapping[str, _V]) -> Mapping[str, _V]: ...") in out + + +class TestVerbatimPreservation: + """Non-function content is preserved.""" + + def test_imports_preserved(self): + source = "import re\nfrom typing import Any\n" + out = generate_pyi(source, "test.pyih") + assert "import re" in out + assert "from typing import Any" in out + + def test_comments_preserved(self): + source = "# A comment\ndef take(n: int) -> int: ...\n" + out = generate_pyi(source, "test.pyih") + assert "# A comment" in out + + def test_comments_between_different_functions(self): + source = ( + "def foo(x: int) -> int: ...\n" + "\n" + "# Section header\n" + "\n" + "def bar(x: int) -> int: ...\n" + ) + out = generate_pyi(source, "test.pyih") + assert "# Section header" in out + assert "def foo(" in out + assert "def bar(" in out + + def test_comments_between_same_function_overloads(self): + source = ( + "def walk[C: (list, set)](f: XFunc[[_T], _V], coll: C[_T]) -> C[_V]: ...\n" + "# dict overload\n" + "def walk(f: Callable[[_K], _V], coll: dict[_K, _V]) -> dict[_K, _V]: ...\n" + ) + out = get_single_func(source) + # All walk overloads should be in one group + assert out.count("def walk(") == 15 # 7×2 + 1 passthrough + + +class TestGeneratedHeader: + """Generated files get a header.""" + + def test_header(self): + out = generate_pyi("def f(x: int) -> int: ...\n", "test.pyih") + assert "AUTOGENERATED by translate_pyih.py from test.pyih" in out + assert "DO NOT EDIT" in out diff --git a/tox.ini b/tox.ini index f97ab51..875bff5 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = py34, py35, py36, py37, py38, py39, py310, py311, py312, py313, pypy3, docs, lint +envlist = py34, py35, py36, py37, py38, py39, py310, py311, py312, py313, pypy3, docs, lint, typetest, stubtest [testenv] deps = -r test_requirements.txt @@ -12,12 +12,34 @@ changedir = docs commands = sphinx-build -b html -W . _build/html +[pytest] +testpaths = tests + ; TODO: get rid of flakes [flake8] max-line-length = 100 ignore = E127,E128,E302,F403,E126,E272,E226,E301,E261,E265,E251,E303,E305,E306,E266,E731,E402,F405,W503 exclude = docs/conf.py, .tox +[testenv:typetest] +basepython = python3.13 +deps = + pyright + mypy + ty +commands = + python type_tests/run.py coverage + python type_tests/run.py pyright + python type_tests/run.py mypy + python type_tests/run.py ty + +[testenv:stubtest] +basepython = python3.13 +deps = + mypy +commands = + python -m mypy.stubtest funcy.types funcy.strings funcy.tree funcy.colls funcy.seqs funcy.funcs funcy.funcolls funcy.flow funcy.calc funcy.debug funcy.objects funcy.decorators --allowlist stubtest_allowlist.txt --mypy-config-file {toxinidir}/mypy.ini + [testenv:lint] basepython = python3.10 passenv = PYTHONPATH diff --git a/translate_pyih.py b/translate_pyih.py new file mode 100755 index 0000000..3d6da95 --- /dev/null +++ b/translate_pyih.py @@ -0,0 +1,660 @@ +#!/usr/bin/env python3 +"""Translate .pyih stub sources to .pyi stub files. + +.pyih files use two custom constructs that this script expands: + +1. XFunc[[A], B] — Extended function semantics (mapper mode) + Expands to 7 @overload variants (Callable, None, Set, Regex, int, slice, Mapping) + +2. XPred[A] — Extended function semantics (predicate mode) + Expands to 7 @overload variants that preserve the element type + +3. [C: (list, set, ...)] — Collection type expansion + Substitutes C with each concrete type listed in the bound + +Functions without these constructs pass through unchanged. +Non-XFunc overloads of the same function pass through unchanged. +A `# xfunc_skip: variant1, variant2` comment before a function skips those variants. + +Requires Python 3.12+ (for PEP 695 type param parsing). Generated .pyi files work on any Python. +""" +import ast +import re +from pathlib import Path + +FUNCY_DIR = Path(__file__).parent / "funcy" + +# Collection type aliases for use in .pyih files as [C: ALIAS_NAME] +COLL_ALIASES = { + 'COLLS': ['list', 'tuple', 'set', 'frozenset', 'Sequence', 'Iterator', 'Iterable'], + 'MAPS': ['dict', 'MutableMapping', 'Mapping'], + 'MUT_MAPS': ['dict', 'MutableMapping'], +} + +# Header for generated files +GENERATED_HEADER = "# THIS FILE IS AUTOGENERATED by translate_pyih.py from {source}. DO NOT EDIT.\n" + + +def find_pyih_files(): + """Find all .pyih files in funcy/.""" + return sorted(FUNCY_DIR.glob("*.pyih")) + + +# --------------------------------------------------------------------------- +# Parsing +# --------------------------------------------------------------------------- + +_XFUNC_SKIP_RE = re.compile(r'\s*#\s*xfunc_skip:\s*(.+)') + + +def extract_xfunc_skip(lines: list[str]) -> tuple[set[str], list[str]]: + """Extract xfunc_skip directives from a list of source lines. + + Returns (skip_set, remaining_lines) where skip_set contains the variant names + to skip and remaining_lines are the lines without the xfunc_skip directives. + """ + skip = set() + remaining = [] + for line in lines: + m = _XFUNC_SKIP_RE.match(line) + if m: + skip = {s.strip() for s in m.group(1).split(',')} + else: + remaining.append(line) + return skip, remaining + + +def parse_pyih(source: str) -> list[dict]: + """Parse a .pyih file into a list of items (imports, comments, functions, etc.). + + Returns a list of dicts, each with: + - kind: 'verbatim' | 'func_group' + - For 'verbatim': text (str) + - For 'func_group': name (str), overloads (list of overload dicts) + Each overload dict has: + - name: function name (str) + - params: list of (name, annotation_str) + - return_type: str + - xfunc_param: name of param with XFunc/XPred annotation, or None + - xfunc_kind: 'XFunc' | 'XPred' | None + - xfunc_a: type var name for A, or None + - xfunc_b: type var name for B, or None (XFunc only) + - coll_var: name of collection type var C, or None + - coll_types: list of concrete types for C, or None + - xfunc_skip: set of variant names to skip + """ + tree = ast.parse(source) + source_lines = source.splitlines() + + items = [] + last_end = 0 # 0-indexed exclusive end of last consumed line + + body = tree.body + i = 0 + + while i < len(body): + node = body[i] + + if not isinstance(node, ast.FunctionDef): + # Non-function node: verbatim from last_end through end of this node + lines = source_lines[last_end:node.end_lineno] + if lines: + items.append({'kind': 'verbatim', 'text': '\n'.join(lines)}) + last_end = node.end_lineno + i += 1 + continue + + func_name = node.name + func_start = node.lineno - 1 # 0-indexed + + # Lines between last_end and this function's start may contain: + # - regular verbatim content (comments, blanks) + # - xfunc_skip directive applying to the first overload + pre_lines = source_lines[last_end:func_start] + xfunc_skip_first, clean_pre = extract_xfunc_skip(pre_lines) + if clean_pre: + items.append({'kind': 'verbatim', 'text': '\n'.join(clean_pre)}) + + # Collect all consecutive same-name FunctionDef overloads + overloads = [] + xfunc_skip = xfunc_skip_first + + while i < len(body) and isinstance(body[i], ast.FunctionDef) and body[i].name == func_name: + func_node = body[i] + overloads.append(parse_func_node(func_node, xfunc_skip)) + last_end = func_node.end_lineno + xfunc_skip = set() + + # Look ahead: if next node is same-name FunctionDef, scan the gap + # between this overload's end and next overload's start for xfunc_skip + if (i + 1 < len(body) + and isinstance(body[i + 1], ast.FunctionDef) + and body[i + 1].name == func_name): + next_start = body[i + 1].lineno - 1 + gap_lines = source_lines[func_node.end_lineno:next_start] + xfunc_skip, _ = extract_xfunc_skip(gap_lines) + + i += 1 + + items.append({'kind': 'func_group', 'name': func_name, 'overloads': overloads}) + + # Trailing content after all body nodes + trailing = source_lines[last_end:] + if trailing and '\n'.join(trailing).strip(): + items.append({'kind': 'verbatim', 'text': '\n'.join(trailing)}) + + return items + + +def annotation_to_str(node: ast.expr) -> str: + """Convert an AST annotation node back to its source string.""" + return ast.unparse(node) + + +def parse_func_node(node: ast.FunctionDef, xfunc_skip: set) -> dict: + """Parse an ast.FunctionDef into an overload dict.""" + # Collection type params: def f[C: (list, set)]() + coll_var = None + coll_types = None + for tp in node.type_params: + if isinstance(tp, ast.TypeVar) and tp.bound is not None: + if isinstance(tp.bound, ast.Tuple): + coll_var = tp.name + coll_types = [annotation_to_str(e) for e in tp.bound.elts] + break + elif isinstance(tp.bound, ast.Name) and tp.bound.id in COLL_ALIASES: + coll_var = tp.name + coll_types = COLL_ALIASES[tp.bound.id] + break + + # Build params list, including defaults as part of the type string ("type = default") + args = node.args + all_args = args.args + n_defaults = len(args.defaults) + default_offset = len(all_args) - n_defaults + + # FIX: this function looks weird, like we undo the parsing to convert back to string, + # should not do that, should work with structured data + # RES: All three string-vs-structured-data FIXes (here, XFunc regex detection at ~L207, + # and replace_typevar_in_container at ~L358) are coupled. The early stringification + # here forces regex use downstream. Proper fix: store ast.expr nodes in params (with + # defaults as a separate field), detect XFunc/XPred by checking isinstance(node, + # ast.Subscript) with node.value.id == 'XFunc'/'XPred', and substitute typevars via + # an AST transformer. Stringify only at final output. This touches ~15 functions across + # the whole pipeline — too large/risky for a single pass. Do it as a dedicated branch + # with incremental steps: (1) add ast.expr to param tuples alongside strings, + # (2) switch XFunc/XPred detection to AST, (3) switch substitutions to AST transforms, + # (4) remove string intermediaries. Verify output unchanged after each step. + # FIX: actually three sane approaches here: + # 1. Revert to string usage, do not mix regex and ast + # 2. Use ast everywhere + # 3. Use ast to parse but store as dicts and other nested collections, which will + # be easier to use downstream. + # Should evaluate all three (not destroying the current implementation) and compare. In the + # extreme maybe we should implement it all three ways - control that it behaves the same. + # And then compare the scripts. + params = [] + for j, arg in enumerate(all_args): + pname = arg.arg + ptype = annotation_to_str(arg.annotation) if arg.annotation else '' + if j >= default_offset: + default_str = annotation_to_str(args.defaults[j - default_offset]) + ptype = f'{ptype} = {default_str}' if ptype else f'= {default_str}' + params.append((pname, ptype)) + + # *args (vararg) + if args.vararg: + va = args.vararg + ptype = annotation_to_str(va.annotation) if va.annotation else '' + params.append((f'*{va.arg}', ptype)) + + # keyword-only args + for j, arg in enumerate(args.kwonlyargs): + pname = arg.arg + ptype = annotation_to_str(arg.annotation) if arg.annotation else '' + kw_default = args.kw_defaults[j] + if kw_default is not None: + default_str = annotation_to_str(kw_default) + ptype = f'{ptype} = {default_str}' if ptype else f'= {default_str}' + params.append((pname, ptype)) + + # **kwargs + if args.kwarg: + kw = args.kwarg + ptype = annotation_to_str(kw.annotation) if kw.annotation else '' + params.append((f'**{kw.arg}', ptype)) + + return_type = annotation_to_str(node.returns) if node.returns else '' + + # Detect XFunc/XPred in params (regex on unparsed annotation strings) + xfunc_param = None + xfunc_kind = None + xfunc_a = None + xfunc_b = None + for pname, ptype in params: + # Strip default from ptype for matching + base_type = ptype.split(' = ')[0] + # FIX: on top of the above, this looks like remnant of regex parsing + # RES: Coupled with FIX at ~L164. Once params store ast.expr nodes, detect XFunc/XPred + # via isinstance(annotation, ast.Subscript) and annotation.value.id check. + xf_match = re.match(r'XFunc\[\[(\w+)\],\s*(\w+)\]', base_type) + if xf_match: + xfunc_param = pname + xfunc_kind = 'XFunc' + xfunc_a = xf_match.group(1) + xfunc_b = xf_match.group(2) + break + xp_match = re.match(r'XPred\[(\w+)\]', base_type) + if xp_match: + xfunc_param = pname + xfunc_kind = 'XPred' + xfunc_a = xp_match.group(1) + break + + return { + 'name': node.name, + 'params': params, + 'return_type': return_type, + 'xfunc_param': xfunc_param, + 'xfunc_kind': xfunc_kind, + 'xfunc_a': xfunc_a, + 'xfunc_b': xfunc_b, + 'coll_var': coll_var, + 'coll_types': coll_types, + 'xfunc_skip': xfunc_skip, + } + + +# --------------------------------------------------------------------------- +# XFunc / XPred Expansion +# --------------------------------------------------------------------------- + +# Standard type var names used in generated output +TYPEVARS = {'_T', '_V', '_K', '_K2', '_V2', '_T2'} + +# The 7 XFunc variants +XFUNC_VARIANTS = [ + 'Callable', 'None', 'Set', 'Regex', 'int', 'slice', 'Mapping', +] + + +def expand_xfunc(overload: dict) -> list[str]: + """Expand an XFunc overload into 7 variant overloads.""" + a_var = overload['xfunc_a'] + b_var = overload['xfunc_b'] + skip = overload['xfunc_skip'] + results = [] + + for variant in XFUNC_VARIANTS: + if variant.lower() in {s.lower() for s in skip}: + continue + lines = generate_xfunc_variant(overload, variant, a_var, b_var) + if lines: + results.extend(lines) + + return results + + +def generate_xfunc_variant(overload: dict, variant: str, a_var: str, b_var: str) -> list[str]: + """Generate a single XFunc variant overload.""" + params = overload['params'] + return_type = overload['return_type'] + func_param = overload['xfunc_param'] + + # Build new params list with the XFunc param replaced + new_params = [] + for pname, ptype in params: + if pname == func_param: + new_ptype = get_xfunc_param_type(variant, a_var, b_var) + new_params.append((pname, new_ptype)) + else: + new_ptype = substitute_xfunc_type(ptype, variant, a_var, b_var, is_input=True) + new_params.append((pname, new_ptype)) + + new_return = substitute_xfunc_type(return_type, variant, a_var, b_var, is_input=False) + + func_name = overload['name'] + params_str = ', '.join(f'{n}: {t}' if t else n for n, t in new_params) + + return [ + '@overload', + f'def {func_name}({params_str}) -> {new_return}: ...', + ] + + +def get_xfunc_param_type(variant: str, a_var: str, b_var: str) -> str: + """Get the type annotation for the XFunc param in the given variant.""" + match variant: + case 'Callable': + return f'Callable[[{a_var}], {b_var}]' + case 'None': + return 'None' + case 'Set': + return f'AbstractSet[{a_var}]' + case 'Regex': + return 'str | bytes | re.Pattern[str]' + case 'int': + return 'int' + case 'slice': + return 'slice' + case 'Mapping': + return f'Mapping[{a_var}, {b_var}]' + raise ValueError(f"Unknown variant: {variant}") + + +def substitute_xfunc_type(type_str: str, variant: str, a_var: str, b_var: str | None, + is_input: bool) -> str: + """Substitute A/B type vars in a type string for the given XFunc variant.""" + s = type_str + match variant: + case 'Callable': + # A and B stay as type vars — no change needed + pass + case 'None': + # B becomes A (identity) + if b_var: + s = replace_typevar_in_container(s, b_var, a_var) + case 'Set': + # B becomes bool, A stays (but Set takes Any for the set itself) + if b_var: + s = replace_typevar_in_container(s, b_var, 'bool') + # For input types containing Iterable[A], we keep A since Set[A] constrains membership + case 'Regex': + # B becomes _ReResult | None, A becomes Any (regex erases input type info) + if b_var: + s = replace_typevar_in_container(s, b_var, '_ReResult | None') + s = replace_typevar_in_container(s, a_var, 'Any') + case 'int': + # For inputs and outputs: A becomes Sequence[_T], B becomes _T + s = replace_typevar_in_container(s, a_var, 'Sequence[_T]') + if b_var: + s = replace_typevar_in_container(s, b_var, '_T') + case 'slice': + # A becomes Sequence[_T], B becomes Sequence[_T] + s = replace_typevar_in_container(s, a_var, 'Sequence[_T]') + if b_var: + s = replace_typevar_in_container(s, b_var, 'Sequence[_T]') + case 'Mapping': + # A and B are used as Mapping[A, B] key/value — they stay as type vars + pass + return s + + +def replace_typevar_in_container(type_str: str, var: str, replacement: str) -> str: + """Replace a type var when it appears as a type parameter in a container. + + E.g., Iterable[A] -> Iterable[Sequence[_T]] but not standalone A. + """ + # Replace A when it appears inside brackets: [..., A, ...] or [A] + # Use word boundary to avoid partial matches + # FIX: will this be easier if we don't keep strings and work with ast nodes OR other way + # structured data? + # RES: Yes. With AST nodes, this becomes an ast.NodeTransformer that replaces Name(id=var) + # nodes with the replacement subtree. Coupled with FIX at ~L164 — see plan there. + return re.sub(r'\b' + re.escape(var) + r'\b', replacement, type_str) + + +# --------------------------------------------------------------------------- +# XPred Expansion +# --------------------------------------------------------------------------- + +def expand_xpred(overload: dict) -> list[str]: + """Expand an XPred overload into 7 variant overloads.""" + a_var = overload['xfunc_a'] + skip = overload['xfunc_skip'] + results = [] + + for variant in XFUNC_VARIANTS: + if variant.lower() in {s.lower() for s in skip}: + continue + lines = generate_xpred_variant(overload, variant, a_var) + if lines: + results.extend(lines) + + return results + + +def generate_xpred_variant(overload: dict, variant: str, a_var: str) -> list[str]: + """Generate a single XPred variant overload.""" + params = overload['params'] + return_type = overload['return_type'] + func_param = overload['xfunc_param'] + + new_params = [] + for pname, ptype in params: + if pname == func_param: + new_ptype = get_xpred_param_type(variant, a_var) + new_params.append((pname, new_ptype)) + else: + new_ptype = substitute_xpred_type(ptype, variant, a_var) + new_params.append((pname, new_ptype)) + + new_return = substitute_xpred_type(return_type, variant, a_var) + + func_name = overload['name'] + params_str = ', '.join(f'{n}: {t}' if t else n for n, t in new_params) + + return [ + '@overload', + f'def {func_name}({params_str}) -> {new_return}: ...', + ] + + +def get_xpred_param_type(variant: str, a_var: str) -> str: + """Get the pred type annotation for the given variant.""" + match variant: + case 'Callable': + return f'Callable[[{a_var}], Any]' + case 'None': + return 'None' + case 'Set': + return f'AbstractSet[{a_var}]' + case 'Regex': + return 'str | bytes | re.Pattern[str]' + case 'int': + return 'int' + case 'slice': + return 'slice' + case 'Mapping': + return f'Mapping[{a_var}, Any]' + raise ValueError(f"Unknown variant: {variant}") + + +def substitute_xpred_type(type_str: str, variant: str, a_var: str) -> str: + """Substitute A type var in type strings for XPred variants. + + For predicates, the element type is preserved but the input constraint changes. + """ + s = type_str + match variant: + case 'Callable' | 'None': + pass # A stays as-is + case 'Set' | 'Mapping': + pass # A stays — Set[A]/Mapping[A, Any] constrains against Iterable[A] + case 'Regex': + # Regex only works on strings + s = re.sub(r'\b' + re.escape(a_var) + r'\b', 'str', s) + case 'int': + # int indexes into sequences: constrain A to Sequence[Any] + s = re.sub(r'\b' + re.escape(a_var) + r'\b', 'Sequence[Any]', s) + case 'slice': + s = re.sub(r'\b' + re.escape(a_var) + r'\b', 'Sequence[Any]', s) + return s + + +# --------------------------------------------------------------------------- +# Collection Type Expansion +# --------------------------------------------------------------------------- + +# Mapping-like types that take 2 type params +TWO_PARAM_TYPES = {'dict', 'Mapping', 'defaultdict', 'OrderedDict', 'Counter'} + + +def expand_collection_types(overload: dict) -> list[dict]: + """Expand a collection-parameterized overload into one per concrete type.""" + coll_var = overload['coll_var'] + coll_types = overload['coll_types'] + + results = [] + for ctype in coll_types: + new_overload = overload.copy() + new_overload['coll_var'] = None + new_overload['coll_types'] = None + + # Substitute C with the concrete type in params and return type + new_params = [] + for pname, ptype in overload['params']: + new_ptype = substitute_coll_type(ptype, coll_var, ctype) + new_params.append((pname, new_ptype)) + new_overload['params'] = new_params + new_overload['return_type'] = substitute_coll_type( + overload['return_type'], coll_var, ctype) + + results.append(new_overload) + return results + + +def substitute_coll_type(type_str: str, coll_var: str, ctype: str) -> str: + """Replace C[X] or C[X, Y] with concrete_type[X] or concrete_type[X, Y]. + + For tuple, C[X] becomes tuple[X, ...] (variable-length homogeneous tuple). + """ + if ctype == 'tuple': + # Replace C[...] with tuple[..., ...] using bracket-aware matching + result = _substitute_tuple_type(type_str, coll_var) + return result + return re.sub(r'\b' + re.escape(coll_var) + r'\b', ctype, type_str) + + +def _substitute_tuple_type(type_str: str, coll_var: str) -> str: + """Replace C[X] with tuple[X, ...] for variable-length tuple semantics.""" + # FIX: this looks way too complicated, but may go away if we go away from using strings + # Find C[ and then match brackets to find the closing ] + pattern = re.compile(r'\b' + re.escape(coll_var) + r'\[') + result = [] + pos = 0 + for m in pattern.finditer(type_str): + result.append(type_str[pos:m.start()]) + # Find matching closing bracket + bracket_start = m.end() - 1 # position of '[' + depth = 1 + i = bracket_start + 1 + while i < len(type_str) and depth > 0: + if type_str[i] == '[': + depth += 1 + elif type_str[i] == ']': + depth -= 1 + i += 1 + # Extract inner content (between [ and ]) + inner = type_str[bracket_start + 1:i - 1] + result.append(f'tuple[{inner}, ...]') + pos = i + result.append(type_str[pos:]) + return ''.join(result) + + +# --------------------------------------------------------------------------- +# Output Generation +# --------------------------------------------------------------------------- + +def generate_overload_lines(overload: dict) -> list[str]: + """Generate output lines for a single overload (after expansion).""" + func_name = overload['name'] + params_str = ', '.join(f'{n}: {t}' if t else n for n, t in overload['params']) + return [f'def {func_name}({params_str}) -> {overload["return_type"]}: ...'] + + +def generate_func_group(group: dict) -> str: + """Generate output for a function group (all overloads of same name).""" + all_lines = [] + + for overload in group['overloads']: + has_xfunc = overload['xfunc_kind'] is not None + has_coll = overload['coll_var'] is not None + + if has_coll and has_xfunc: + # Expand collection types first, then XFunc/XPred within each + for coll_overload in expand_collection_types(overload): + if coll_overload['xfunc_kind'] == 'XFunc': + all_lines.extend(expand_xfunc(coll_overload)) + else: + all_lines.extend(expand_xpred(coll_overload)) + elif has_coll: + # Collection expansion only + for coll_overload in expand_collection_types(overload): + all_lines.append('@overload') + all_lines.extend(generate_overload_lines(coll_overload)) + elif has_xfunc: + if overload['xfunc_kind'] == 'XFunc': + all_lines.extend(expand_xfunc(overload)) + else: + all_lines.extend(expand_xpred(overload)) + else: + # Pass through as-is + all_lines.extend(generate_overload_lines(overload)) + + # Count actual function defs (non-decorator lines) + func_lines = [line for line in all_lines if line.startswith('def ')] + needs_overload = len(func_lines) > 1 + + if needs_overload: + # Add @overload before each def that doesn't already have one + result_lines = [] + for line in all_lines: + if line.startswith('def '): + # Check if previous line is already @overload + if not result_lines or result_lines[-1] != '@overload': + result_lines.append('@overload') + result_lines.append(line) + return '\n'.join(result_lines) + else: + # Single def, no @overload needed — strip any existing @overload + return '\n'.join(line for line in all_lines if line != '@overload') + + +def generate_pyi(source: str, source_name: str) -> str: + """Generate .pyi content from .pyih source.""" + items = parse_pyih(source) + output_parts = [GENERATED_HEADER.format(source=source_name)] + + for item in items: + if item['kind'] == 'verbatim': + output_parts.append(item['text']) + elif item['kind'] == 'func_group': + output_parts.append(generate_func_group(item)) + + result = '\n'.join(output_parts) + + # Clean up: remove excessive blank lines (3+ -> 2), ensure trailing newline + result = re.sub(r'\n{3,}', '\n\n', result) + if not result.endswith('\n'): + result += '\n' + + return result + + +# --------------------------------------------------------------------------- +# Main +# --------------------------------------------------------------------------- + +def main(): + pyih_files = find_pyih_files() + if not pyih_files: + print("No .pyih files found in funcy/") + return + + for pyih_path in pyih_files: + pyi_path = pyih_path.with_suffix('.pyi') + source_name = pyih_path.name + + print(f"Generating {pyi_path.name} from {source_name}...") + source = pyih_path.read_text() + output = generate_pyi(source, source_name) + pyi_path.write_text(output) + print(f" -> {pyi_path}") + + print("Done.") + + +if __name__ == '__main__': + main() diff --git a/type_tests/run.py b/type_tests/run.py new file mode 100755 index 0000000..0363db9 --- /dev/null +++ b/type_tests/run.py @@ -0,0 +1,386 @@ +#!/usr/bin/env python3 +"""Runner for type-checking tests. + +Runs a type checker (pyright, mypy, or ty) on type_tests/ and validates +that errors match the expected markers in test files. + +Markers: + # E: — must produce a type error (real error we catch) + # XFAIL: — currently errors, but shouldn't ideally (all checkers) + # XFAIL[ty]: — known failure for a specific checker + # R: — reveal_type must match this type (substring match after normalization) + +Usage: + python type_tests/run.py pyright + python type_tests/run.py mypy + python type_tests/run.py ty +""" +import glob +import os +import re +import subprocess +import sys + +TEST_DIR = os.path.dirname(__file__) or "." + + +def parse_markers(test_dir, checker): + """Parse test files for # E:, # XFAIL:, and # R: markers. + + Returns (expected, skipped, reveals) where: + expected: {filepath: set of line numbers} — lines that must error + skipped: {filepath: set of line numbers} — lines to ignore + reveals: {filepath: {lineno: pattern}} — expected reveal_type patterns + """ + expected = {} + skipped = {} + reveals = {} + has_error_marker = {} # lines with # E: regardless of XFAIL + for filepath in sorted(glob.glob(os.path.join(test_dir, "test_*.py"))): + exp = set() + skip = set() + e_lines = set() + rev = {} + with open(filepath) as f: + for lineno, line in enumerate(f, 1): + # Check for checker-specific XFAIL first (can coexist with # E:) + xfail_match = re.search(r"# XFAIL\[([^\]]+)\]:", line) + is_xfail_for_checker = ( + xfail_match and checker in xfail_match.group(1).split(",") + ) + if "# XFAIL:" in line or is_xfail_for_checker: + skip.add(lineno) + if "# E:" in line: + e_lines.add(lineno) + if not is_xfail_for_checker: + exp.add(lineno) + # R: marker can coexist with XFAIL markers + r = re.search(r"# R: (.+?)(?:\s*# (?:XFAIL|E:).*)?$", line) + if r: + rev[lineno] = r.group(1).strip() + abspath = os.path.abspath(filepath) + if exp: + expected[abspath] = exp + if skip: + skipped[abspath] = skip + if rev: + reveals[abspath] = rev + if e_lines: + has_error_marker[abspath] = e_lines + return expected, skipped, reveals, has_error_marker + + +def normalize_type(revealed, checker): + """Normalize a revealed type string to a canonical form for exact comparison. + + Handles differences between mypy, pyright, and ty: + - mypy: "def (*Any, **Any) -> str", "def (str) -> int", "_T`6" + - pyright: "_T@make_func", "(str | int)" parenthesized unions + - ty: "(...) -> str", "(str, /) -> int", "_T'return" + Canonical form: "(...) -> str", "(str) -> int", "_T" + """ + s = revealed + if checker == "mypy": + # mypy uses qualified names: builtins.int -> int, builtins.str -> str + s = re.sub(r'builtins\.', '', s) + # Remove module prefixes for common types (handles multiple levels like funcy.objects.X) + s = re.sub(r'\b(?:[a-z_][a-z_0-9]*\.)+([A-Z])', r'\1', s) + # mypy omits -> None in revealed types, add it back for consistency + if s.startswith("def ") and "->" not in s: + s = s + " -> None" + # Normalize Callable[..., X]: mypy "(*Any, **Any)" -> "..." + s = re.sub(r'\(\*Any, \*\*Any\)', '(...)', s) + # Normalize TypeVar suffixes: mypy uses _T`123 + s = re.sub(r'`\d+', '', s) + if checker == "pyright": + # Normalize TypeVar suffixes: pyright uses _T@func_name + s = re.sub(r'@\w+', '', s) + # Strip parentheses around union types in return position: "-> (A | B)" -> "-> A | B" + s = re.sub(r'->\s*\(([^()]+)\)', r'-> \1', s) + if checker == "ty": + # Strip positional-only marker at end of params: ", /)" -> ")" + s = re.sub(r', /\)', ')', s) + # Normalize TypeVar suffixes: ty uses _T'word + s = re.sub(r"'[a-z_]+", '', s) + # ty uses Unknown for unresolved types + s = re.sub(r'\bUnknown\b', 'Any', s) + # Strip "def name" prefix: mypy uses "def (...)", ty uses "def name(...)" + s = re.sub(r'^def \w*\s*', '', s) + # Strip generic TypeVar prefix: [_T] or [_K, _V] at start + s = re.sub(r'^\[[\w, ]+\]\s*', '', s) + # Normalize bottom type: pyright uses NoReturn, mypy/ty use Never + s = re.sub(r'\bNoReturn\b', 'Never', s) + # Normalize whitespace + s = re.sub(r'\s+', ' ', s).strip() + return s + + +def run_pyright(test_dir): + """Run pyright and parse errors and reveal_type notes.""" + result = subprocess.run( + ["pyright", "--outputjson", test_dir], + capture_output=True, text=True, + ) + errors = {} # {filepath: {lineno: message}} + reveals = {} # {filepath: {lineno: revealed_type}} + try: + import json + data = json.loads(result.stdout) + for diag in data.get("generalDiagnostics", []): + filepath = os.path.abspath(diag["file"]) + lineno = diag["range"]["start"]["line"] + 1 # pyright uses 0-based + if diag.get("severity") == "error": + errors.setdefault(filepath, {})[lineno] = diag.get("message", "") + elif diag.get("severity") == "information": + m = re.search(r'Type of ".+?" is "(.+)"', diag.get("message", "")) + if m: + reveals.setdefault(filepath, {})[lineno] = m.group(1) + except (json.JSONDecodeError, KeyError): + print("Failed to parse pyright output", file=sys.stderr) + print(result.stdout[:2000], file=sys.stderr) + sys.exit(2) + return errors, reveals + + +def run_mypy(test_dir): + """Run mypy and parse errors and reveal_type notes.""" + result = subprocess.run( + ["python", "-m", "mypy", test_dir, "--no-error-summary"], + capture_output=True, text=True, + ) + errors = {} # {filepath: {lineno: message}} + reveals = {} # {filepath: {lineno: revealed_type}} + for line in result.stdout.splitlines(): + # mypy error format: file.py:lineno: error: message [code] + match = re.match(r"(.+?):(\d+):\s*error:\s*(.*)", line) + if match: + filepath = os.path.abspath(match.group(1)) + lineno = int(match.group(2)) + errors.setdefault(filepath, {})[lineno] = match.group(3).strip() + continue + # mypy reveal format: file.py:lineno: note: Revealed type is "..." + match = re.match(r'(.+?):(\d+):\s*note:\s*Revealed type is "(.+)"', line) + if match: + filepath = os.path.abspath(match.group(1)) + lineno = int(match.group(2)) + reveals.setdefault(filepath, {})[lineno] = match.group(3) + return errors, reveals + + +def run_ty(test_dir): + """Run ty and parse errors and reveal_type notes.""" + result = subprocess.run( + ["ty", "check", test_dir], + capture_output=True, text=True, + ) + errors = {} # {filepath: {lineno: message}} + reveals = {} # {filepath: {lineno: revealed_type}} + output = (result.stdout + result.stderr).splitlines() + in_error = False + in_reveal = False + error_message = "" + reveal_file = None + reveal_line = 0 + for line in output: + # ty format: + # error[rule-name]: Message text + # --> file.py:lineno:col + # info[revealed-type]: Revealed type + # --> file.py:lineno:col + # | ^^^ `type_here` + m_err = re.match(r"\s*error\[.+?\]:\s*(.*)", line) + if m_err: + in_error = True + in_reveal = False + error_message = m_err.group(1).strip() + elif re.match(r"\s*info\[revealed-type\]:", line): + in_reveal = True + in_error = False + elif re.match(r"\s*(?:info|warning)", line): + in_error = False + in_reveal = False + elif in_error: + match = re.match(r"\s*-->\s*(.+?):(\d+):\d+", line) + if match: + filepath = os.path.abspath(match.group(1)) + lineno = int(match.group(2)) + errors.setdefault(filepath, {})[lineno] = error_message + in_error = False + elif in_reveal: + # Try to get the type from the ^^^ `type` line + m = re.search(r'`(.+?)`\s*$', line) + if m and reveal_file is not None: + reveals.setdefault(reveal_file, {})[reveal_line] = m.group(1) + in_reveal = False + else: + match = re.match(r"\s*-->\s*(.+?):(\d+):\d+", line) + if match: + reveal_file = os.path.abspath(match.group(1)) + reveal_line = int(match.group(2)) + return errors, reveals + + +CHECKERS = { + "pyright": run_pyright, + "mypy": run_mypy, + "ty": run_ty, +} + +# Names intentionally not tested (stdlib re-exports, etc.) +COVERAGE_SKIP = { + "accumulate", "chain", "contextmanager", "count", "cycle", + "nullcontext", "partial", "reduce", "repeat", "suppress", + "unwrap", "ContextDecorator", +} + + +def check_coverage(test_dir): + """Check that all public funcy names are actually used in type tests.""" + import ast + sys.path.insert(0, os.path.join(os.path.dirname(test_dir))) + import funcy + + public_names = set(funcy.__all__) + + # Collect names actually used (not just imported) in test files. + # ast.Name nodes only appear for real references, not inside import statements + # (those use ast.alias), so any ast.Name matching a funcy name is a real usage. + used = set() + for filepath in sorted(glob.glob(os.path.join(test_dir, "test_*.py"))): + with open(filepath) as f: + tree = ast.parse(f.read()) + for node in ast.walk(tree): + if isinstance(node, ast.Name) and node.id in public_names: + used.add(node.id) + + missing = public_names - used - COVERAGE_SKIP + if missing: + print("FAIL - public funcy names not tested:") + for name in sorted(missing): + print(f" {name}") + sys.exit(1) + else: + print(f"OK - all {len(public_names)} public funcy names are covered " + f"({len(COVERAGE_SKIP)} skipped)") + + +def find_stale_xfails(skip, error_markers, actual_lines, expected_rev, actual_rev, checker): + """Find XFAIL lines that no longer fail. + + Two cases: + - # E: + # XFAIL[checker]: stale if checker now catches the error + - Pure XFAIL: stale if no longer errors and no reveal mismatch + """ + stale = set() + for lineno in skip: + if lineno in error_markers: + if lineno in actual_lines: + stale.add(lineno) + else: + has_error = lineno in actual_lines + has_reveal_mismatch = ( + lineno in expected_rev + and (lineno not in actual_rev + or expected_rev[lineno] != normalize_type(actual_rev[lineno], checker)) + ) + if not has_error and not has_reveal_mismatch: + stale.add(lineno) + return stale + + +def validate_file(filepath, expected, skipped, expected_reveals, error_markers, + actual, actual_reveals, checker): + """Validate a single test file, return list of failure message strings.""" + exp = expected.get(filepath, set()) + act_dict = actual.get(filepath, {}) + act = set(act_dict.keys()) + skip = skipped.get(filepath, set()) + exp_rev = expected_reveals.get(filepath, {}) + act_rev = actual_reveals.get(filepath, {}) + e_markers = error_markers.get(filepath, set()) + + relpath = os.path.relpath(filepath) + failures = [] + + # Unexpected errors: actual errors on lines not marked # E: or # XFAIL: + for line in sorted(act - exp - skip): + failures.append(f" UNEXPECTED ERROR: {relpath}:{line}: {act_dict.get(line, '')}") + + # Missing errors: lines marked # E: that didn't error + for line in sorted(exp - act): + failures.append(f" MISSING EXPECTED ERROR: {relpath}:{line}") + + # Stale XFAILs: lines marked # XFAIL that no longer fail + for line in sorted(find_stale_xfails(skip, e_markers, act, exp_rev, act_rev, checker)): + failures.append(f" STALE XFAIL (no longer fails): {relpath}:{line}") + + # Reveal type mismatches + for lineno, pattern in sorted(exp_rev.items()): + if lineno in skip: + continue + if lineno not in act_rev: + failures.append(f" MISSING REVEAL: {relpath}:{lineno} (expected: {pattern})") + else: + actual_type = normalize_type(act_rev[lineno], checker) + if pattern != actual_type: + failures.append(f" REVEAL MISMATCH: {relpath}:{lineno}") + failures.append(f" expected: {pattern}") + failures.append(f" actual: {actual_type}") + + return failures + + +def run_checker(checker): + """Run a single type checker and validate results. Returns True on success.""" + print(f"Running {checker} on {TEST_DIR}...") + + expected, skipped, expected_reveals, error_markers = parse_markers(TEST_DIR, checker) + actual, actual_reveals = CHECKERS[checker](TEST_DIR) + + # Only consider errors in test files (ignore errors in runner and library source) + abs_test_dir = os.path.abspath(TEST_DIR) + def is_test_file(f): + return f.startswith(abs_test_dir) and os.path.basename(f).startswith("test_") + actual = {f: lines for f, lines in actual.items() if is_test_file(f)} + actual_reveals = {f: lines for f, lines in actual_reveals.items() if is_test_file(f)} + + # Collect all test files + all_files = sorted(set(list(expected.keys()) + list(actual.keys()) + + list(expected_reveals.keys()))) + + failures = [] + for filepath in all_files: + failures.extend(validate_file( + filepath, expected, skipped, expected_reveals, error_markers, + actual, actual_reveals, checker)) + + if not failures: + print(f"OK - {checker}: all type errors match expectations") + return True + else: + for msg in failures: + print(msg) + print(f"FAIL - {checker}: type error mismatches found") + return False + + +def main(): + valid = {*CHECKERS, "coverage", "all"} + if len(sys.argv) > 2 or (len(sys.argv) == 2 and sys.argv[1] not in valid): + print(f"Usage: {sys.argv[0]} [{','.join(CHECKERS)},coverage,all]") + sys.exit(2) + + command = sys.argv[1] if len(sys.argv) == 2 else "all" + + if command == "coverage": + check_coverage(TEST_DIR) + return + + checkers = list(CHECKERS) if command == "all" else [command] + failed = [c for c in checkers if not run_checker(c)] + if failed: + sys.exit(1) + + +if __name__ == "__main__": + main() diff --git a/type_tests/test_calc.py b/type_tests/test_calc.py new file mode 100644 index 0000000..f6ed1c9 --- /dev/null +++ b/type_tests/test_calc.py @@ -0,0 +1,31 @@ +from funcy import memoize, cache, make_lookuper, silent_lookuper + +# -- memoize as decorator (no args) -- +@memoize +def fib(n: int) -> int: + return n if n < 2 else fib(n - 1) + fib(n - 2) +reveal_type(fib) # R: (n: int) -> int + +# -- memoize with key_func -- +@memoize(key_func=lambda x: x % 10) +def mod_cache(x: int) -> str: + return str(x) +reveal_type(mod_cache) # R: (x: int) -> str + +# -- cache -- +@cache(60) +def cached_fn(x: int) -> str: + return str(x) +reveal_type(cached_fn) # R: (x: int) -> str + +# -- make_lookuper -- +@make_lookuper +def my_lookup() -> dict[str, int]: + return {"a": 1, "b": 2} +reveal_type(my_lookup) # R: (...) -> int + +# -- silent_lookuper -- +@silent_lookuper +def my_silent() -> dict[str, int]: + return {"a": 1, "b": 2} +reveal_type(my_silent) # R: (...) -> int | None diff --git a/type_tests/test_colls.py b/type_tests/test_colls.py new file mode 100644 index 0000000..53c39d0 --- /dev/null +++ b/type_tests/test_colls.py @@ -0,0 +1,420 @@ +from typing import Any, assert_type +from collections.abc import Iterable, Iterator, Mapping, MutableMapping, Sequence + +from funcy import ( + empty, iteritems, itervalues, + join, merge, join_with, merge_with, + walk, walk_keys, walk_values, select, select_keys, select_values, + split_keys, compact, + is_distinct, zipdict, flip, project, omit, zip_values, zip_dicts, + where, pluck, pluck_attr, invoke, + lwhere, lpluck, lpluck_attr, linvoke, + get_in, get_lax, set_in, update_in, del_in, has_path, +) +from funcy.colls import all, any, none, one, some # shadow builtins + +# Real abstract-type implementations (not concrete types cast to abstract — checkers see through that) +class StrIntMapping(Mapping[str, int]): + def __getitem__(self, k: str) -> int: return 0 + def __iter__(self) -> Iterator[str]: return iter([]) + def __len__(self) -> int: return 0 + +class StrIntMutableMapping(MutableMapping[str, int]): + def __getitem__(self, k: str) -> int: return 0 + def __setitem__(self, k: str, v: int) -> None: pass + def __delitem__(self, k: str) -> None: pass + def __iter__(self) -> Iterator[str]: return iter([]) + def __len__(self) -> int: return 0 + +class IntSequence(Sequence[int]): + def __getitem__(self, index: int) -> int: return 0 # type: ignore[override] + def __len__(self) -> int: return 0 + + +_ReResult = str | tuple[str, ...] | dict[str, str] + +# Typed variables for ty TypeVar inference +si_dict: dict[str, int] = {"a": 1, "b": 2, "c": 3} +si_dict2: dict[str, int] = {"x": 10} +int_list: list[int] = [1, 2, 3] +int_set: set[int] = {1, 2, 3} +str_keys: list[str] = ["a", "b"] +int_vals: list[int] = [1, 2] +strs: list[str] = ["abc", "def"] +int_pairs: list[tuple[int, int]] = [(1, 2)] + +# Typed collections for join tests (avoid inline literals that ty can't infer) +dict_list: list[dict[str, int]] = [{"a": 1}, {"b": 2}] +int_list_list: list[list[int]] = [[1, 2], [3]] +int_set_list: list[set[int]] = [{1, 2}, {3}] + +# Typed functions (not Callable-annotated lambdas — ty can't infer those through overloads) +def pred_true(p: object) -> bool: return True +def pred_gt0(x: int) -> bool: return x > 0 +def pred_gt1(v: int) -> bool: return v > 1 +def pred_ne_c(k: str) -> bool: return k != "c" +def pred_eq_a(k: str) -> bool: return k == "a" + +# Typed dict for walk_values int/lambda tests +str_to_intlist: dict[str, Sequence[int]] = {"a": [1, 2], "b": [3, 4]} +str_to_int1: dict[str, int] = {"a": 1} + +# -- merge -- +reveal_type(merge(si_dict, si_dict2)) # R: dict[str, int] +reveal_type(merge(int_list, int_list)) # R: list[int] +reveal_type(merge(int_set, int_set)) # R: set[int] + +# -- join: may be None -- +reveal_type(join(dict_list)) # R: dict[str, int] | None +reveal_type(join(int_list_list)) # R: list[int] | None +reveal_type(join(int_set_list)) # R: set[int] | None +# Canary: detect when ty starts inferring types of inline list literals +assert_type(join([{"a": 1}, {"b": 2}]), dict[str, int] | None) # XFAIL[ty]: inline list literal Unknown + +# -- walk_keys: Callable transforms keys, values preserved -- +def str_key_to_int(k: str) -> int: return ord(k) +reveal_type(walk_keys(str_key_to_int, si_dict)) # R: dict[int, int] +reveal_type(walk_keys(str.upper, si_dict)) # R: dict[str, int] +# walk_keys: Canary for ty builtin overload resolution +reveal_type(walk_keys(len, si_dict)) # R: dict[int, int] # XFAIL[ty]: len overload not matched +# walk_keys: None = identity +reveal_type(walk_keys(None, si_dict)) # R: dict[str, int] +# walk_keys: Set = membership -> bool keys +reveal_type(walk_keys({"a", "b"}, si_dict)) # R: dict[bool, int] +# walk_keys: Mapping = lookup -> mapped key type +key_map: dict[str, int] = {"a": 1, "b": 2} +reveal_type(walk_keys(key_map, si_dict)) # R: dict[int, int] +# walk_keys: int = itemgetter on sequence keys +seq_key_dict: dict[Sequence[int], int] = {(1, 2): 10, (3, 4): 20} +reveal_type(walk_keys(0, seq_key_dict)) # R: dict[int, int] +# walk_keys: str/regex = regex finder on keys +reveal_type(walk_keys(r"\d+", si_dict)) # R: dict[str | tuple[str, ...] | dict[str, str] | None, int] + +# -- walk_values: Callable transforms values, keys preserved -- +def int_to_str(v: int) -> str: return str(v) +reveal_type(walk_values(int_to_str, si_dict)) # R: dict[str, str] +reveal_type(walk_values(pred_gt0, str_to_int1)) # R: dict[str, bool] +# walk_values: None = identity +reveal_type(walk_values(None, si_dict)) # R: dict[str, int] +# walk_values: Set = membership -> bool values +reveal_type(walk_values({1, 2}, si_dict)) # R: dict[str, bool] +# walk_values: int = itemgetter on sequence values +reveal_type(walk_values(0, str_to_intlist)) # R: dict[str, int] +# walk_values: slice = subsequence of sequence values +reveal_type(walk_values(slice(0, 2), str_to_intlist)) # R: dict[str, Sequence[int]] +# walk_values: Mapping = lookup -> mapped value type +val_map: dict[int, str] = {1: "one", 2: "two"} +reveal_type(walk_values(val_map, si_dict)) # R: dict[str, str] +# walk_values: str/regex = regex finder on values +str_dict: dict[str, str] = {"a": "123", "b": "abc"} +reveal_type(walk_values(r"\d+", str_dict)) # R: dict[str, str | tuple[str, ...] | dict[str, str] | None] + +# -- walk: Callable transforms items, preserves collection type -- +reveal_type(walk(int_to_str, int_list)) # R: list[str] +reveal_type(walk(int_to_str, int_set)) # R: set[str] +# walk: XFunc variants on list/set +reveal_type(walk(None, int_list)) # R: list[int] +reveal_type(walk(None, int_set)) # R: set[int] +# walk: dict with properly typed pair function +def swap_pair(pair: tuple[str, int]) -> tuple[int, str]: return (pair[1], str(pair[0])) +reveal_type(walk(swap_pair, si_dict)) # R: dict[int, str] +# walk: dict with untyped/extended function falls back to dict[Any, Any] +reveal_type(walk(int_to_str, si_dict)) # R: dict[Any, Any] +# walk: frozenset +int_fset: frozenset[int] = frozenset({1, 2, 3}) +reveal_type(walk(int_to_str, int_fset)) # R: frozenset[str] +# -- walk_keys: always returns dict -- +reveal_type(walk_keys(str_key_to_int, si_dict)) # R: dict[int, int] +real_mapping = StrIntMapping() +real_mutable_mapping = StrIntMutableMapping() +# walk: Mapping with typed pair function returns dict +reveal_type(walk(swap_pair, real_mapping)) # R: Mapping[int, str] +# walk: MutableMapping with typed pair function returns dict +reveal_type(walk(swap_pair, StrIntMutableMapping())) # R: MutableMapping[int, str] +reveal_type(walk_keys(str_key_to_int, real_mapping)) # R: Mapping[int, int] +# walk: collection of pairs (list[tuple[K, V]]) — handled by list XFunc overload +str_int_pairs: list[tuple[str, int]] = [("a", 1), ("b", 2)] +def transform_pair(p: tuple[str, int]) -> tuple[int, str]: return (p[1], str(p[0])) +reveal_type(walk(transform_pair, str_int_pairs)) # R: list[tuple[int, str]] +reveal_type(walk(None, str_int_pairs)) # R: list[tuple[str, int]] +reveal_type(walk(str, str_int_pairs)) # R: list[str] # XFAIL[ty]: TypeVar inference gives list[tuple[str, int]] + +# -- walk_keys: MutableMapping returns dict -- +reveal_type(walk_keys(str_key_to_int, real_mutable_mapping)) # R: MutableMapping[int, int] +reveal_type(walk_keys(None, real_mutable_mapping)) # R: MutableMapping[str, int] +# -- walk_keys: collection of pairs preserves collection type -- +reveal_type(walk_keys(str_key_to_int, str_int_pairs)) # R: list[tuple[int, int]] +reveal_type(walk_keys(str.upper, str_int_pairs)) # R: list[tuple[str, int]] +reveal_type(walk_keys(None, str_int_pairs)) # R: list[tuple[str, int]] +reveal_type(walk_keys({"a", "b"}, str_int_pairs)) # R: list[tuple[bool, int]] +reveal_type(walk_keys(key_map, str_int_pairs)) # R: list[tuple[int, int]] +# walk_keys: collection of pairs with set +str_int_pair_set: set[tuple[str, int]] = {("a", 1), ("b", 2)} +reveal_type(walk_keys(str_key_to_int, str_int_pair_set)) # R: set[tuple[int, int]] +reveal_type(walk_keys(None, str_int_pair_set)) # R: set[tuple[str, int]] + +# -- walk_values: always returns dict -- +reveal_type(walk_values(int_to_str, si_dict)) # R: dict[str, str] +reveal_type(walk_values(int_to_str, real_mapping)) # R: Mapping[str, str] +# -- walk_values: MutableMapping returns dict -- +reveal_type(walk_values(int_to_str, real_mutable_mapping)) # R: MutableMapping[str, str] +reveal_type(walk_values(None, real_mutable_mapping)) # R: MutableMapping[str, int] +# -- walk_values: collection of pairs preserves collection type -- +reveal_type(walk_values(int_to_str, str_int_pairs)) # R: list[tuple[str, str]] +reveal_type(walk_values(None, str_int_pairs)) # R: list[tuple[str, int]] +reveal_type(walk_values({1, 2}, str_int_pairs)) # R: list[tuple[str, bool]] +reveal_type(walk_values(val_map, str_int_pairs)) # R: list[tuple[str, str]] +# walk_values: collection of pairs with set +reveal_type(walk_values(int_to_str, str_int_pair_set)) # R: set[tuple[str, str]] + +# -- select: filtering preserves type -- +reveal_type(select(pred_true, si_dict)) # R: dict[str, int] +reveal_type(select(pred_gt0, int_list)) # R: list[int] +reveal_type(select(pred_gt0, int_set)) # R: set[int] +# select: XPred variants on list +reveal_type(select(None, int_list)) # R: list[int] +reveal_type(select({1, 2}, int_list)) # R: list[int] # XFAIL[ty]: Set pred Any|int +int_lookup: dict[int, str] = {1: "yes", 2: "yes"} +reveal_type(select(int_lookup, int_list)) # R: list[int] +# select: frozenset +reveal_type(select(pred_gt0, int_fset)) # R: frozenset[int] +reveal_type(select(None, int_fset)) # R: frozenset[int] +# select: dict with Callable on pairs +def pair_pred(pair: tuple[str, int]) -> bool: return pair[1] > 1 +reveal_type(select(pair_pred, si_dict)) # R: dict[str, int] +# select: Mapping with Callable on pairs +reveal_type(select(pred_true, real_mapping)) # R: Mapping[str, int] +# select: MutableMapping with Callable on pairs +reveal_type(select(pred_true, real_mutable_mapping)) # R: MutableMapping[str, int] + +# -- select_keys / select_values: preserves collection type -- +d: dict[str, int] = {"a": 1, "b": 2, "c": 3} +reveal_type(select_keys(pred_ne_c, d)) # R: dict[str, int] +reveal_type(select_values(pred_gt1, d)) # R: dict[str, int] +# select_keys / select_values with real Mapping input preserves Mapping type +reveal_type(select_keys(pred_ne_c, real_mapping)) # R: Mapping[str, int] +reveal_type(select_values(pred_gt1, real_mapping)) # R: Mapping[str, int] +# select_keys / select_values with MutableMapping +reveal_type(select_keys(pred_ne_c, real_mutable_mapping)) # R: MutableMapping[str, int] +reveal_type(select_values(pred_gt1, real_mutable_mapping)) # R: MutableMapping[str, int] + +# -- compact: preserves type -- +reveal_type(compact(d)) # R: dict[str, int] +maybe_list: list[int | None] = [0, 1, None, 2] +reveal_type(compact(maybe_list)) # R: list[int] +# compact: set, frozenset +maybe_set: set[int | None] = {0, 1, None, 2} +reveal_type(compact(maybe_set)) # R: set[int] +maybe_fset: frozenset[int | None] = frozenset({0, 1, None, 2}) +reveal_type(compact(maybe_fset)) # R: frozenset[int] +# compact: dict with None values — strips None +maybe_dict: dict[str, int | None] = {"a": 1, "b": None} +reveal_type(compact(maybe_dict)) # R: dict[str, int] +# compact: Mapping and MutableMapping without None +reveal_type(compact(real_mapping)) # R: Mapping[str, int] +reveal_type(compact(real_mutable_mapping)) # R: MutableMapping[str, int] +# compact: collection of pairs as list[tuple[K, V]] +compact_pairs: list[tuple[str, int | None]] = [("a", 1), ("b", None), ("c", 0)] +reveal_type(compact(compact_pairs)) # R: list[tuple[str, int | None]] +# compact on list without None in element type +non_none_list: list[int] = [0, 1, 2] +reveal_type(compact(non_none_list)) # R: list[int] + +# -- empty: preserves type -- +reveal_type(empty(d)) # R: dict[str, int] +reveal_type(empty(int_list)) # R: list[int] + +# -- iteritems / itervalues -- +reveal_type(iteritems(d)) # R: Iterable[tuple[str, int]] +reveal_type(itervalues(d)) # R: Iterable[int] + +# -- split_keys -- +reveal_type(split_keys(pred_eq_a, d)) # R: tuple[dict[str, int], dict[str, int]] +reveal_type(split_keys(r"\d+", d)) # R: tuple[dict[str, int], dict[str, int]] + +# -- select_keys: XPred variants -- +reveal_type(select_keys(None, d)) # R: dict[str, int] +reveal_type(select_keys({"a", "b"}, d)) # R: dict[str, int] # XFAIL[ty]: Set pred Any|str +reveal_type(select_keys(r"\w+", d)) # R: dict[str, int] +key_lookup: dict[str, int] = {"a": 1, "b": 2} +reveal_type(select_keys(key_lookup, d)) # R: dict[str, int] + +# -- select_values: XPred variants -- +reveal_type(select_values(None, d)) # R: dict[str, int] +reveal_type(select_values({1, 2}, d)) # R: dict[str, int] # XFAIL[ty]: Set pred Any|int +int_to_int_map: dict[int, str] = {1: "yes", 2: "yes"} +reveal_type(select_values(int_to_int_map, d)) # R: dict[str, int] + +# -- select_keys / select_values: collection of pairs -- +sk_pairs: list[tuple[str, int]] = [("a", 1), ("b", 2), ("c", 3)] +reveal_type(select_keys(pred_ne_c, sk_pairs)) # R: list[tuple[str, int]] +reveal_type(select_values(pred_gt1, sk_pairs)) # R: list[tuple[str, int]] +reveal_type(select_keys(None, sk_pairs)) # R: list[tuple[str, int]] +reveal_type(select_values(None, sk_pairs)) # R: list[tuple[str, int]] +reveal_type(select_keys({"a", "b"}, sk_pairs)) # R: list[tuple[str, int]] # XFAIL[ty]: Set pred Any|str +reveal_type(select_values({1, 2}, sk_pairs)) # R: list[tuple[str, int]] # XFAIL[ty]: Set pred Any|int +reveal_type(select_keys(r"[ab]", sk_pairs)) # R: list[tuple[str, int]] +sk_pair_set: set[tuple[str, int]] = {("a", 1), ("b", 2)} +reveal_type(select_keys(pred_ne_c, sk_pair_set)) # R: set[tuple[str, int]] +reveal_type(select_values(pred_gt1, sk_pair_set)) # R: set[tuple[str, int]] +reveal_type(select_keys(None, sk_pair_set)) # R: set[tuple[str, int]] +reveal_type(select_values(None, sk_pair_set)) # R: set[tuple[str, int]] + +# -- select_keys / select_values with Mapping: XPred variants -- +reveal_type(select_keys(None, real_mapping)) # R: Mapping[str, int] +reveal_type(select_keys({"a"}, real_mapping)) # R: Mapping[str, int] # XFAIL[ty]: Set pred Any|str +reveal_type(select_values(None, real_mapping)) # R: Mapping[str, int] +reveal_type(select_values({1, 2}, real_mapping)) # R: Mapping[str, int] # XFAIL[ty]: Set pred Any|int + +# -- split_keys: XPred variants -- +reveal_type(split_keys(None, d)) # R: tuple[dict[str, int], dict[str, int]] +reveal_type(split_keys({"a"}, d)) # R: tuple[dict[str, int], dict[str, int]] # XFAIL[ty]: Set pred Any|str +reveal_type(split_keys(key_lookup, d)) # R: tuple[dict[str, int], dict[str, int]] + +# -- some: XPred variants -- +reveal_type(some(r"\d+", strs)) # R: str | None +reveal_type(some({1, 2}, int_list)) # R: int | None # XFAIL[ty]: Set pred Any|int + +# -- flip / project / omit: preserve collection type -- +reveal_type(flip(d)) # R: dict[int, str] +reveal_type(project(d, str_keys)) # R: dict[str, int] +reveal_type(omit(d, str_keys)) # R: dict[str, int] +# flip / project / omit with real Mapping +reveal_type(flip(real_mapping)) # R: Mapping[int, str] +reveal_type(project(real_mapping, ["a"])) # R: Mapping[str, int] # XFAIL[ty]: TypeVar unification across Mapping + Iterable params gives str | Unknown +reveal_type(omit(real_mapping, ["a"])) # R: Mapping[str, int] # XFAIL[ty]: TypeVar unification across Mapping + Iterable params gives str | Unknown +# flip / project / omit with real MutableMapping +reveal_type(flip(real_mutable_mapping)) # R: MutableMapping[int, str] +reveal_type(project(real_mutable_mapping, ["a"])) # R: MutableMapping[str, int] # XFAIL[ty]: TypeVar unification across MutableMapping + Iterable params gives str | Unknown +reveal_type(omit(real_mutable_mapping, ["a"])) # R: MutableMapping[str, int] # XFAIL[ty]: TypeVar unification across MutableMapping + Iterable params gives str | Unknown +# flip with collection of pairs +reveal_type(flip(str_int_pairs)) # R: list[tuple[int, str]] +reveal_type(flip(str_int_pair_set)) # R: set[tuple[int, str]] +int_str_fset_pairs: frozenset[tuple[int, str]] = frozenset({(1, "a"), (2, "b")}) +reveal_type(flip(int_str_fset_pairs)) # R: frozenset[tuple[str, int]] + +# -- zipdict -- +reveal_type(zipdict(str_keys, int_vals)) # R: dict[str, int] +reveal_type(zipdict(range(3), str_keys)) # R: dict[int, str] + +# -- bool-returning functions -- +reveal_type(is_distinct(int_list)) # R: bool +reveal_type(is_distinct(int_list, int_to_str)) # R: bool +reveal_type(is_distinct(strs, None)) # R: bool +reveal_type(is_distinct(int_list, {1, 2})) # R: bool +reveal_type(all(int_list)) # R: bool +reveal_type(all(pred_gt0, int_list)) # R: bool +reveal_type(any(int_list)) # R: bool +reveal_type(any(pred_gt0, int_list)) # R: bool +reveal_type(none(int_list)) # R: bool +reveal_type(none(pred_gt0, int_list)) # R: bool +reveal_type(one(int_list)) # R: bool +reveal_type(one(pred_gt0, int_list)) # R: bool +reveal_type(has_path(si_dict, str_keys)) # R: bool + +# -- some returns element type -- +maybe_ints: list[int | None] = [0, None, 3] +reveal_type(some(maybe_ints)) # R: int | None +reveal_type(some(pred_gt0, int_list)) # R: int | None + +# -- where / lwhere: preserves element type -- +records: list[dict[str, int]] = [{"name": 1, "age": 2}] +reveal_type(where(records, name=1)) # R: Iterator[dict[str, int]] +reveal_type(lwhere(records, name=1)) # R: list[dict[str, int]] + +# -- pluck / lpluck -- +reveal_type(pluck("name", records)) # R: Iterator[int] +reveal_type(lpluck("name", records)) # R: list[int] + +# -- pluck_attr / lpluck_attr (dynamic attr, Any is correct) -- +reveal_type(pluck_attr("real", int_list)) # R: Iterator[Any] +reveal_type(lpluck_attr("real", int_list)) # R: list[Any] + +# -- invoke / linvoke (dynamic method, Any is correct) -- +reveal_type(invoke(strs, "upper")) # R: Iterator[Any] +reveal_type(linvoke(strs, "upper")) # R: list[Any] + +# -- zip_values / zip_dicts -- +d1: dict[str, int] = {"a": 1, "b": 2} +d2: dict[str, int] = {"a": 3, "b": 4} +reveal_type(zip_values(d1, d2)) # R: Iterator[tuple[int, ...]] +reveal_type(zip_dicts(d1, d2)) # R: Iterator[tuple[str, tuple[int, ...]]] + +# -- get_in / set_in / update_in / del_in: nested access -- +nested: dict[str, dict[str, int]] = {"a": {"b": 1}} +reveal_type(get_in(nested, ["a", "b"])) # R: Any +reveal_type(get_lax(nested, ["a", "b"])) # R: Any +# set_in / update_in / del_in preserve collection type +reveal_type(set_in(nested, ["a", "b"], 42)) # R: dict[str, dict[str, int]] +def inc(x: int) -> int: return x + 1 +reveal_type(update_in(nested, ["a", "b"], inc)) # R: dict[str, dict[str, int]] +reveal_type(del_in(nested, ["a", "b"])) # R: dict[str, dict[str, int]] +# set_in / update_in / del_in with MutableMapping +reveal_type(set_in(real_mutable_mapping, ["a"], 42)) # R: MutableMapping[str, int] +reveal_type(update_in(real_mutable_mapping, ["a"], inc)) # R: MutableMapping[str, int] +reveal_type(del_in(real_mutable_mapping, ["a"])) # R: MutableMapping[str, int] +# set_in / update_in / del_in with list +nested_list: list[int] = [1, 2, 3] +reveal_type(set_in(nested_list, [0], 42)) # R: list[int] +reveal_type(update_in(nested_list, [0], inc)) # R: list[int] +reveal_type(del_in(nested_list, [0])) # R: list[int] + +# -- join_with / merge_with -- +dict_pair2: list[dict[str, int]] = [d1, d2] +def add_all(xs: list[int]) -> int: return 0 +reveal_type(join_with(add_all, dict_pair2)) # R: dict[str, int] +reveal_type(merge_with(add_all, d1, d2)) # R: dict[str, int] +# wider Callable: accepts Iterable[int] instead of list[int] +def sum_iter(xs: Iterable[int]) -> int: return 0 +reveal_type(join_with(sum_iter, dict_pair2)) # R: dict[str, int] +reveal_type(merge_with(sum_iter, d1, d2)) # R: dict[str, int] +# wrong value type: Callable accepts list[str] but dicts have int values +def join_strs(xs: list[str]) -> str: return "" +join_with(join_strs, dict_pair2) # E: value type mismatch +merge_with(join_strs, d1, d2) # E: value type mismatch + +# -- Extended function protocol in predicates (int, str) -- +reveal_type(all(r"\d+", strs)) # R: bool +reveal_type(any(0, int_pairs)) # R: bool + +# -- Extended function return type tests -- +reveal_type(walk_keys(None, si_dict)) # R: dict[str, int] +reveal_type(walk_values(None, si_dict)) # R: dict[str, int] + +# -- New collection types: tuple -- +int_tuple: tuple[int, ...] = tuple(int_list) +reveal_type(walk(int_to_str, int_tuple)) # R: tuple[str, ...] +reveal_type(walk(None, int_tuple)) # R: tuple[int, ...] +reveal_type(select(pred_gt0, int_tuple)) # R: tuple[int, ...] +reveal_type(select(None, int_tuple)) # R: tuple[int, ...] +reveal_type(compact(int_tuple)) # R: tuple[int, ...] +str_int_tuple_pairs: tuple[tuple[str, int], ...] = tuple(str_int_pairs) +reveal_type(walk_keys(str_key_to_int, str_int_tuple_pairs)) # R: tuple[tuple[int, int], ...] +reveal_type(walk_keys(None, str_int_tuple_pairs)) # R: tuple[tuple[str, int], ...] +reveal_type(walk_values(int_to_str, str_int_tuple_pairs)) # R: tuple[tuple[str, str], ...] +reveal_type(select_keys(pred_ne_c, str_int_tuple_pairs)) # R: tuple[tuple[str, int], ...] +reveal_type(select_values(pred_gt1, str_int_tuple_pairs)) # R: tuple[tuple[str, int], ...] +reveal_type(flip(str_int_tuple_pairs)) # R: tuple[tuple[int, str], ...] + +# -- New collection types: Sequence -- +real_sequence = IntSequence() +reveal_type(walk(int_to_str, real_sequence)) # R: Sequence[str] +reveal_type(walk(None, real_sequence)) # R: Sequence[int] +reveal_type(select(pred_gt0, real_sequence)) # R: Sequence[int] +reveal_type(compact(real_sequence)) # R: Sequence[int] + +# -- New collection types: Iterator -- +int_iter: Iterator[int] = iter([1, 2, 3]) +reveal_type(walk(int_to_str, int_iter)) # R: Iterator[str] # XFAIL[ty]: Iterator gives Any +reveal_type(select(pred_gt0, int_iter)) # R: Iterator[int] # XFAIL[ty]: Iterator gives Any + +# -- Should be errors -- +walk(int_to_str, int_list, int_list) # E: too many arguments +zipdict(123, [1, 2]) # E: not iterable +has_path(nested, 42) # E: path not iterable + +# Extended function type mismatches +walk_keys({1: "a"}, si_dict) # E: Mapping[int, str] keys don't match str keys +split_keys({1, 2}, d) # E: Set[int] pred vs str keys +select_keys({1, 2}, d) # E: Set[int] pred vs str keys +select_values(r"\d+", d) # E: regex pred vs int values diff --git a/type_tests/test_debug.py b/type_tests/test_debug.py new file mode 100644 index 0000000..2e1dcf9 --- /dev/null +++ b/type_tests/test_debug.py @@ -0,0 +1,69 @@ +from typing import assert_type +from collections.abc import Iterator +from funcy import tap, log_calls, print_calls, log_errors, print_errors +from funcy import log_enters, print_enters, log_exits, print_exits +from funcy import log_durations, print_durations +from funcy import log_iter_durations, print_iter_durations + +# -- tap preserves type -- +x: int = 42 +s: str = "hello" +reveal_type(tap(x)) # R: int # XFAIL[ty]: Literal narrowing +reveal_type(tap(s)) # R: str # XFAIL[ty]: Literal narrowing +reveal_type(tap(x, "label")) # R: int # XFAIL[ty]: Literal narrowing + +# -- log_calls / print_calls preserve function signature -- +@log_calls(print) +def logged(x: int) -> str: return str(x) +reveal_type(logged) # R: (x: int) -> str + +@print_calls +def printed(x: int) -> str: return str(x) +reveal_type(printed) # R: (x: int) -> str + +# -- log_enters / print_enters preserve function signature -- +@log_enters(print) +def entering(x: int) -> str: return str(x) +reveal_type(entering) # R: (x: int) -> str + +@print_enters +def entering2(x: int) -> str: return str(x) +reveal_type(entering2) # R: (x: int) -> str + +# -- log_exits / print_exits preserve function signature -- +@log_exits(print) +def exiting(x: int) -> str: return str(x) +reveal_type(exiting) # R: (x: int) -> str + +@print_exits +def exiting2(x: int) -> str: return str(x) +reveal_type(exiting2) # R: (x: int) -> str + +# -- log_errors as decorator preserves function signature -- +@log_errors(print) +def risky(x: int) -> str: return str(x) +reveal_type(risky) # R: (x: int) -> str + +# -- log_errors is a context manager -- +with log_errors(print) as ctx: + assert_type(ctx, log_errors) + +# -- print_errors is a log_errors instance -- +assert_type(print_errors, log_errors) + +# -- log_durations as decorator preserves function signature -- +@log_durations(print) +def timed(x: int) -> str: return str(x) +reveal_type(timed) # R: (x: int) -> str + +# -- log_durations is a context manager -- +with log_durations(print) as dur_ctx: + assert_type(dur_ctx, log_durations) + +# -- print_durations is a log_durations instance -- +assert_type(print_durations, log_durations) + +# -- iter durations preserves type -- +nums: list[int] = [1, 2, 3] +reveal_type(log_iter_durations(nums, print)) # R: Iterator[int] +reveal_type(print_iter_durations(nums)) # R: Iterator[int] diff --git a/type_tests/test_decorators.py b/type_tests/test_decorators.py new file mode 100644 index 0000000..fdd53cf --- /dev/null +++ b/type_tests/test_decorators.py @@ -0,0 +1,37 @@ +from funcy import decorator, wraps + +# -- simple decorator preserves function signature -- +@decorator +def my_deco(call): + return call() + +@my_deco +def greet(name: str) -> str: + return f"hello {name}" + +reveal_type(greet) # R: (name: str) -> str + +# -- decorator factory (multi-arg) preserves function signature -- +@decorator +def with_tag(call, tag="div"): + return call() + +@with_tag(tag="span") +def render(text: str) -> str: + return text + +reveal_type(render) # R: (text: str) -> str + +# -- decorator factory used without args preserves function signature -- +@with_tag +def render2(text: str) -> str: + return text + +reveal_type(render2) # R: (text: str) -> str + +# -- wraps preserves wrapper type -- +def original(x: int, y: str) -> bool: return True +def wrapper(*args: object, **kwargs: object) -> bool: + return original(*args, **kwargs) # type: ignore[arg-type] +wrapped = wraps(original)(wrapper) +reveal_type(wrapped) # R: (x: int, y: str) -> bool diff --git a/type_tests/test_flow.py b/type_tests/test_flow.py new file mode 100644 index 0000000..9bb3f9e --- /dev/null +++ b/type_tests/test_flow.py @@ -0,0 +1,85 @@ +from typing import Any +from collections.abc import Iterator +from funcy import ( + raiser, ignore, silent, reraise, retry, fallback, + limit_error_rate, ErrorRateExceeded, throttle, + post_processing, collecting, joining, + once, once_per, once_per_args, wrap_with, +) + +# -- raiser returns a function that raises -- +reveal_type(raiser(ValueError)) # R: (...) -> Never +reveal_type(raiser("error message")) # R: (...) -> Never + +# -- ignore / silent preserve function signature -- +@ignore(ValueError) +def may_fail(x: int) -> int: return x +reveal_type(may_fail) # R: (x: int) -> int + +@silent +def always_safe(x: int) -> int: return x +reveal_type(always_safe) # R: (x: int) -> int + +# -- reraise -- +reveal_type(reraise(ValueError, RuntimeError)) # R: AbstractContextManager[None, bool | None] + +# -- retry preserves function signature -- +@retry(3, errors=ValueError) +def retried() -> str: return "ok" +reveal_type(retried) # R: () -> str + +# -- throttle preserves function signature -- +@throttle(1.0) +def throttled() -> int: return 1 +reveal_type(throttled) # R: () -> int + +# -- limit_error_rate preserves function signature -- +@limit_error_rate(10, 60) +def rate_limited(x: int) -> str: return str(x) +reveal_type(rate_limited) # R: (x: int) -> str + +# -- post_processing transforms return type -- +@post_processing(list) +def gen_list() -> Iterator[int]: + yield 1 +reveal_type(gen_list) # R: () -> list[int] # XFAIL[ty]: can't infer through Callable[[_T], _T2] + +# -- collecting returns list -- +@collecting +def gen() -> Iterator[int]: + yield 1 +reveal_type(gen) # R: () -> list[int] + +# -- joining returns str -- +@joining(", ") +def gen_ints() -> Iterator[int]: + yield 1 +reveal_type(gen_ints) # R: () -> str + +# -- wrap_with preserves function signature -- +import threading +@wrap_with(threading.Lock()) +def locked(x: int) -> str: return str(x) +reveal_type(locked) # R: (x: int) -> str + +# -- once / once_per / once_per_args -- +@once +def init() -> None: pass +reveal_type(init) # R: () -> None + +@once_per("x") +def init_per(x: int) -> None: pass +reveal_type(init_per) # R: (x: int) -> None + +@once_per_args +def init_all(x: int) -> None: pass +reveal_type(init_all) # R: (x: int) -> None + +# -- ErrorRateExceeded -- +reveal_type(ErrorRateExceeded()) # R: ErrorRateExceeded + +# -- fallback -- +reveal_type(fallback(lambda: 1, lambda: 2)) # R: Any + +# -- Should be errors -- +raiser(42) # E: wrong arg type diff --git a/type_tests/test_funcmakers.py b/type_tests/test_funcmakers.py new file mode 100644 index 0000000..a4041c4 --- /dev/null +++ b/type_tests/test_funcmakers.py @@ -0,0 +1,80 @@ +import re +from typing import reveal_type +from funcy.funcmakers import make_func, make_pred + +def to_str(x: object) -> str: return str(x) +def gt_zero(x: int) -> bool: return x > 0 + +nums: list[int] = [1, 2, 3] + +# === make_func === + +# -- Callable: preserves return type -- +reveal_type(make_func(to_str)) # R: (...) -> str +reveal_type(make_func(gt_zero)) # R: (...) -> bool +reveal_type(make_func(int)) # R: (...) -> int + +# -- None: identity function -- +reveal_type(make_func(None)) # R: (_T) -> _T + +# -- int: itemgetter on Sequences -- +f_int = make_func(0) +reveal_type(f_int) # R: (Sequence[_T]) -> _T +reveal_type(f_int(nums)) # R: int +strs: tuple[str, ...] = ("a", "b", to_str(123)) +reveal_type(f_int(strs)) # R: str + +# -- slice: itemgetter on Sequences -- +f_slice = make_func(slice(1, 3)) +reveal_type(f_slice) # R: (Sequence[_T]) -> Sequence[_T] +reveal_type(f_slice(nums)) # R: Sequence[int] + +# -- str/bytes/Pattern: re_finder -- +reveal_type(make_func(r"\d+")) # R: (str) -> str | tuple[str, ...] | dict[str, str] | None +reveal_type(make_func(b"\\d+")) # R: (str) -> str | tuple[str, ...] | dict[str, str] | None +reveal_type(make_func(re.compile(r"\d+"))) # R: (str) -> str | tuple[str, ...] | dict[str, str] | None + +# -- Mapping: lookup function with typed key/value -- +d: dict[str, int] = {"a": 1, "b": 2} +reveal_type(make_func(d)) # R: (str) -> int + +# -- Set: membership test -- +s: frozenset[int] = frozenset({1, 2, 3}) +reveal_type(make_func(s)) # R: (int) -> bool + +# === make_pred === + +# -- Callable: preserves return type -- +reveal_type(make_pred(gt_zero)) # R: (...) -> bool +reveal_type(make_pred(str.startswith)) # R: (...) -> bool + +# -- None: becomes bool -- +reveal_type(make_pred(None)) # R: (Any) -> bool + +# -- int: itemgetter (used as predicate) -- +f_pred_int = make_pred(0) +reveal_type(f_pred_int) # R: (Sequence[_T]) -> _T +reveal_type(f_pred_int(nums)) # R: int + +# -- slice: itemgetter (used as predicate) -- +f_pred_slice = make_pred(slice(0, 2)) +reveal_type(f_pred_slice) # R: (Sequence[_T]) -> Sequence[_T] +reveal_type(f_pred_slice(nums)) # R: Sequence[int] + +# -- str/bytes/Pattern: re_tester (returns bool) -- +reveal_type(make_pred(r"\d+")) # R: (str) -> bool +reveal_type(make_pred(b"\\d+")) # R: (str) -> bool +reveal_type(make_pred(re.compile(r"\d+"))) # R: (str) -> bool + +# -- Mapping: lookup (used as predicate) -- +reveal_type(make_pred({"a": 1})) # R: (str) -> int # XFAIL[ty]: can't infer Mapping TypeVars + +# -- Set: membership test -- +int_set: set[int] = {1, 2, 3} +reveal_type(make_pred(int_set)) # R: (int) -> bool + +# === Errors: invalid types === +make_func(3.14) # E: wrong argument type +make_func([1, 2]) # E: wrong argument type +make_pred(3.14) # E: wrong argument type +make_pred([1, 2]) # E: wrong argument type diff --git a/type_tests/test_funcolls.py b/type_tests/test_funcolls.py new file mode 100644 index 0000000..673d7a9 --- /dev/null +++ b/type_tests/test_funcolls.py @@ -0,0 +1,18 @@ +from typing import Any, assert_type +from collections.abc import Callable +from funcy import all_fn, any_fn, none_fn, one_fn, some_fn + +# -- all_fn / any_fn / none_fn / one_fn return predicates -- +is_positive = lambda x: x > 0 +is_even = lambda x: x % 2 == 0 + +assert_type(all_fn(is_positive, is_even), Callable[..., bool]) +assert_type(any_fn(is_positive, is_even), Callable[..., bool]) +assert_type(none_fn(is_positive, is_even), Callable[..., bool]) +assert_type(one_fn(is_positive, is_even), Callable[..., bool]) + +# -- some_fn returns the first truthy result -- +assert_type(some_fn(is_positive, is_even), Callable[..., Any]) + +# -- Extended function protocol -- +assert_type(all_fn(r"\d+", str.isdigit), Callable[..., bool]) diff --git a/type_tests/test_funcs.py b/type_tests/test_funcs.py new file mode 100644 index 0000000..eb4ef10 --- /dev/null +++ b/type_tests/test_funcs.py @@ -0,0 +1,86 @@ +import re +from collections.abc import Iterator, Sequence +from typing import reveal_type +from funcy import ( + identity, constantly, caller, + rpartial, func_partial, + curry, rcurry, autocurry, + iffy, compose, rcompose, complement, juxt, ljuxt, +) + +# -- identity preserves type -- +x: int = 42 +s: str = "hello" +reveal_type(identity(x)) # R: int # XFAIL[ty]: Literal narrowing +reveal_type(identity(s)) # R: str # XFAIL[ty]: Literal narrowing + +# -- constantly returns a function that always returns x -- +f = constantly(x) +reveal_type(f) # R: (...) -> int # XFAIL[ty]: Literal narrowing +reveal_type(f("anything")) # R: int # XFAIL[ty]: Literal narrowing + +# -- caller -- +def add_int(a: int, b: int) -> int: return a + b +reveal_type(caller(1, 2)(add_int)) # R: int + +# -- rpartial / func_partial preserve return type -- +def add(a: int, b: int) -> int: return a + b +reveal_type(rpartial(add, 1)) # R: (...) -> int +reveal_type(func_partial(add, 1)) # R: (...) -> int + +# -- curry / rcurry -- +reveal_type(curry(add)) # R: (...) -> int +reveal_type(rcurry(add)) # R: (...) -> int + +# -- autocurry preserves function signature -- +reveal_type(autocurry(add)) # R: (a: int, b: int) -> int + +# -- iffy: Callable action preserves return type -- +def int_to_str(x: int) -> str: return str(x) +reveal_type(iffy(bool, int_to_str)) # R: (int) -> str +reveal_type(iffy(int_to_str)) # R: (...) -> str + +# -- iffy: XFunc variants for one-arg form -- +reveal_type(iffy(None)) # R: (_T) -> _T +lookup: dict[int, str] = {1: "a"} +reveal_type(iffy(lookup)) # R: (int) -> str +int_set: frozenset[int] = frozenset({1, 2, 3}) +reveal_type(iffy(int_set)) # R: (int) -> bool + +# -- iffy: XPred variants for two-arg form -- +reveal_type(iffy(None, int_to_str)) # R: (int) -> str +reveal_type(iffy(int_set, int_to_str)) # R: (int) -> str + +# -- compose / rcompose: Callable -- +reveal_type(compose(int_to_str, abs)) # R: (...) -> str +reveal_type(rcompose(abs, int_to_str)) # R: (...) -> str + +# -- compose: XFunc variants for single-arg form -- +reveal_type(compose(None)) # R: (_T) -> _T +reveal_type(compose(lookup)) # R: (int) -> str +reveal_type(compose(int_set)) # R: (int) -> bool +reveal_type(compose(0)) # R: (Sequence[_T]) -> _T + +# -- complement: Callable -- +reveal_type(complement(bool)) # R: (...) -> bool + +# -- complement: XPred variants -- +reveal_type(complement(int_set)) # R: (int) -> bool +reveal_type(complement(lookup)) # R: (int) -> bool +reveal_type(complement(r"\d+")) # R: (str) -> bool +reveal_type(complement(0)) # R: (Sequence[Any]) -> bool + +# -- juxt / ljuxt -- +def to_bytes(x: int) -> bytes: return str(x).encode() +reveal_type(juxt(int_to_str, to_bytes)) # R: (...) -> Iterator[str | bytes] +reveal_type(ljuxt(int_to_str, to_bytes)) # R: (...) -> list[str | bytes] + +# -- juxt / ljuxt: _XFunc fallback -- +reveal_type(juxt(int_to_str, None)) # R: (...) -> Iterator[Any] +reveal_type(ljuxt(int_to_str, None)) # R: (...) -> list[Any] + +# -- Should be errors -- +iffy(3.14) # E: float is not a valid action +iffy(3.14, int_to_str) # E: float is not a valid pred +compose(3.14) # E: float is not a valid function +complement(3.14) # E: float is not a valid pred diff --git a/type_tests/test_objects.py b/type_tests/test_objects.py new file mode 100644 index 0000000..5c25139 --- /dev/null +++ b/type_tests/test_objects.py @@ -0,0 +1,34 @@ +from funcy import cached_property, cached_readonly, wrap_prop, monkey, LazyObject + +# -- cached_property -- +class MyClass: + @cached_property + def value(self) -> int: + return 42 + + @cached_readonly + def ro_value(self) -> str: + return "hello" + +obj = MyClass() +reveal_type(obj.value) # R: int +reveal_type(obj.ro_value) # R: str + +# -- LazyObject -- +lazy = LazyObject(lambda: [1, 2, 3]) +reveal_type(lazy) # R: LazyObject + +# -- wrap_prop -- +import threading +reveal_type(wrap_prop(threading.Lock())) # R: (...) -> Any + +# -- monkey -- +class Target: + pass + +@monkey(Target) +def new_method(self: Target) -> int: + return 42 + +# -- Should be errors -- +cached_property(42) # E: not a callable diff --git a/type_tests/test_primitives.py b/type_tests/test_primitives.py new file mode 100644 index 0000000..09afe27 --- /dev/null +++ b/type_tests/test_primitives.py @@ -0,0 +1,23 @@ +from typing import assert_type +from funcy import isnone, notnone, inc, dec, even, odd + +# -- isnone / notnone -- +assert_type(isnone(None), bool) +assert_type(isnone(42), bool) +assert_type(notnone(None), bool) +assert_type(notnone("hello"), bool) + +# -- inc / dec -- +n: int = 1 +assert_type(inc(n), int) +assert_type(dec(n), int) + +# -- even / odd -- +assert_type(even(4), bool) +assert_type(odd(3), bool) + +# -- Should be errors -- +inc("x") # E: wrong argument type +dec("x") # E: wrong argument type +even("x") # E: wrong argument type +odd("x") # E: wrong argument type diff --git a/type_tests/test_seqs.py b/type_tests/test_seqs.py new file mode 100644 index 0000000..a14d3aa --- /dev/null +++ b/type_tests/test_seqs.py @@ -0,0 +1,237 @@ +import re +from typing import Any, assert_type +from collections.abc import Iterator, Sequence +from funcy import ( + take, drop, first, second, nth, last, rest, butlast, ilen, + repeatedly, iterate, + lmap, lfilter, remove, lremove, lkeep, without, lwithout, + concat, lconcat, cat, lcat, flatten, lflatten, mapcat, lmapcat, + interleave, interpose, distinct, ldistinct, + split, lsplit, split_at, lsplit_at, split_by, lsplit_by, + group_by, group_by_keys, group_values, count_by, count_reps, + partition, lpartition, chunks, lchunks, partition_by, lpartition_by, + with_prev, with_next, pairwise, lzip, + reductions, lreductions, sums, lsums, +) +from funcy.seqs import map, filter, keep, takewhile, dropwhile # shadow builtins + +_ReResult = str | tuple[str, ...] | dict[str, str] + +nums: list[int] = [1, 2, 3, 4, 5] +strs: list[str] = ["a", "b", "c"] +int_pairs: list[tuple[int, int]] = [(1, 2), (3, 4)] + +# Typed helper functions +def int_to_str(x: int) -> str: return str(x) + +# -- Generators -- +def make_int() -> int: return 1 +def double_int(x: int) -> int: return x * 2 +reveal_type(repeatedly(make_int)) # R: Iterator[int] +reveal_type(repeatedly(make_int, 5)) # R: Iterator[int] +reveal_type(iterate(double_int, 1)) # R: Iterator[int] + +# -- Slicing -- +reveal_type(take(3, nums)) # R: list[int] +reveal_type(drop(1, nums)) # R: Iterator[int] +reveal_type(first(nums)) # R: int | None +reveal_type(second(nums)) # R: int | None +reveal_type(nth(0, nums)) # R: int | None +reveal_type(last(nums)) # R: int | None +reveal_type(rest(nums)) # R: Iterator[int] +reveal_type(butlast(nums)) # R: Iterator[int] +reveal_type(ilen(nums)) # R: int + +### lmap: comprehensive extended function protocol tests ### + +# Callable: type-changing +reveal_type(lmap(int_to_str, nums)) # R: list[str] +reveal_type(lmap(str.upper, strs)) # R: list[str] +def double(x: int) -> int: return x * 2 +reveal_type(lmap(double, nums)) # R: list[int] + +# None: identity, preserves element type +reveal_type(lmap(None, nums)) # R: list[int] +reveal_type(lmap(None, strs)) # R: list[str] + +# Set: membership test -> bool +reveal_type(lmap({1, 2, 3}, nums)) # R: list[bool] +reveal_type(lmap(frozenset({"a", "b"}), strs)) # R: list[bool] + +# str: regex finder +reveal_type(lmap(r"\d+", strs)) # R: list[str | tuple[str, ...] | dict[str, str] | None] +# bytes: regex finder +reveal_type(lmap(b"\\d+", strs)) # R: list[str | tuple[str, ...] | dict[str, str] | None] +# re.Pattern: regex finder +reveal_type(lmap(re.compile(r"\d+"), strs)) # R: list[str | tuple[str, ...] | dict[str, str] | None] + +# int: itemgetter on sequences +reveal_type(lmap(0, int_pairs)) # R: list[int] +str_pairs: list[list[str]] = [["a", "b"], ["c", "d"]] +reveal_type(lmap(0, str_pairs)) # R: list[str] +reveal_type(lmap(0, strs)) # R: list[str] + +# slice: itemgetter on sequences, returns subsequence +int_lists: list[list[int]] = [nums, nums] +reveal_type(lmap(slice(0, 2), int_lists)) # R: list[Sequence[int]] + +# Mapping: lookup, returns value type +lookup: dict[str, int] = {"a": 1, "b": 2, "c": 3} +reveal_type(lmap(lookup, strs)) # R: list[int] + +# Multi-seq callable +def add_int(a: int, b: int) -> int: return a + b +reveal_type(lmap(add_int, nums, nums)) # R: list[int] + +### map: same as lmap but returns Iterator ### + +reveal_type(map(int_to_str, nums)) # R: Iterator[str] +reveal_type(map(double, nums)) # R: Iterator[int] +# None: identity +reveal_type(map(None, nums)) # R: Iterator[int] +# Set: membership +reveal_type(map({1, 2, 3}, nums)) # R: Iterator[bool] +# Regex +reveal_type(map(r"\d+", strs)) # R: Iterator[str | tuple[str, ...] | dict[str, str] | None] +# int: itemgetter +reveal_type(map(0, int_pairs)) # R: Iterator[int] +# slice: itemgetter +reveal_type(map(slice(0, 2), int_lists)) # R: Iterator[Sequence[int]] +# Mapping: lookup +reveal_type(map(lookup, strs)) # R: Iterator[int] + +### filter/lfilter: predicate, element type preserved ### + +reveal_type(filter(bool, nums)) # R: Iterator[int] +reveal_type(lfilter(bool, nums)) # R: list[int] + +reveal_type(remove(bool, nums)) # R: Iterator[int] +reveal_type(lremove(bool, nums)) # R: list[int] + +# -- Keep -- +reveal_type(keep(nums)) # R: Iterator[int] +reveal_type(keep(int_to_str, nums)) # R: Iterator[str] +reveal_type(keep(None, nums)) # R: Iterator[int] +reveal_type(keep(lookup, strs)) # R: Iterator[int] +reveal_type(lkeep(nums)) # R: list[int] +reveal_type(lkeep(int_to_str, nums)) # R: list[str] +reveal_type(lkeep(None, nums)) # R: list[int] +reveal_type(lkeep(lookup, strs)) # R: list[int] + +# -- Without -- +reveal_type(without(nums, 1, 2)) # R: Iterator[int] +reveal_type(lwithout(nums, 1, 2)) # R: list[int] + +# -- Concat / Flatten -- +reveal_type(concat(nums, nums)) # R: Iterator[int] +reveal_type(lconcat(nums, nums)) # R: list[int] +reveal_type(cat([nums, nums])) # R: Iterator[int] +reveal_type(lcat([nums, nums])) # R: list[int] +# Note: flatten is recursive, can't type the leaf element statically +reveal_type(flatten([1, [2, [3]]])) # R: Iterator[Any] +reveal_type(lflatten([1, [2, [3]]])) # R: list[Any] + +# -- Mapcat -- +def int_to_list(x: int) -> list[str]: return [str(x)] +reveal_type(mapcat(int_to_list, nums)) # R: Iterator[str] +reveal_type(lmapcat(int_to_list, nums)) # R: list[str] + +# -- Interleave / Interpose -- +reveal_type(interleave(nums, nums)) # R: Iterator[int] +reveal_type(interpose(0, nums)) # R: Iterator[int] + +# -- Distinct -- +reveal_type(distinct(nums)) # R: Iterator[int] +reveal_type(distinct(nums, key=int_to_str)) # R: Iterator[int] +reveal_type(ldistinct(nums)) # R: list[int] + +# -- Takewhile / Dropwhile -- +reveal_type(takewhile(nums)) # R: Iterator[int] +reveal_type(takewhile(bool, nums)) # R: Iterator[int] +reveal_type(dropwhile(nums)) # R: Iterator[int] +reveal_type(dropwhile(bool, nums)) # R: Iterator[int] + +# -- Split -- +yes, no = split(bool, nums) +reveal_type(yes) # R: Iterator[int] +reveal_type(no) # R: Iterator[int] + +lyes, lno = lsplit(bool, nums) +reveal_type(lyes) # R: list[int] +reveal_type(lno) # R: list[int] + +a, b = split_at(2, nums) +reveal_type(a) # R: Iterator[int] +la, lb = lsplit_at(2, nums) +reveal_type(la) # R: list[int] + +sa, sb = split_by(bool, nums) +reveal_type(sa) # R: Iterator[int] +lsa, lsb = lsplit_by(bool, nums) +reveal_type(lsa) # R: list[int] + +# -- Grouping with named callables -- +reveal_type(group_by(int_to_str, nums)) # R: dict[str, list[int]] +def int_to_tags(x: int) -> list[str]: return ["even"] if x % 2 == 0 else ["odd"] +reveal_type(group_by_keys(int_to_tags, nums)) # R: dict[str, list[int]] +reveal_type(count_by(int_to_str, nums)) # R: dict[str, int] +reveal_type(count_reps(nums)) # R: dict[int, int] + +# group_values needs typed input (inline literal Unknown in ty) +str_int_pairs: list[tuple[str, int]] = [("a", 1), ("a", 2), ("b", 3)] +reveal_type(group_values(str_int_pairs)) # R: dict[str, list[int]] + +# -- Partitioning -- +reveal_type(partition(2, nums)) # R: Iterator[list[int]] +reveal_type(partition(2, 1, nums)) # R: Iterator[list[int]] +reveal_type(lpartition(2, nums)) # R: list[list[int]] +reveal_type(chunks(2, nums)) # R: Iterator[list[int]] +reveal_type(lchunks(2, nums)) # R: list[list[int]] +reveal_type(partition_by(int_to_str, nums)) # R: Iterator[Iterator[int]] +reveal_type(lpartition_by(int_to_str, nums)) # R: list[list[int]] + +# -- Pairing -- +reveal_type(with_prev(nums)) # R: Iterator[tuple[int, int | None]] +reveal_type(with_next(nums)) # R: Iterator[tuple[int, int | None]] +reveal_type(pairwise(nums)) # R: Iterator[tuple[int, int]] + +# -- Zip -- +reveal_type(lzip(nums, strs)) # R: list[tuple[int, str]] + +# -- Reductions -- +def add(a: int, b: int) -> int: return a + b +reveal_type(reductions(add, nums)) # R: Iterator[int] +reveal_type(reductions(add, nums, 0)) # R: Iterator[int] +reveal_type(lreductions(add, nums)) # R: list[int] +reveal_type(lreductions(add, nums, 0)) # R: list[int] +reveal_type(sums(nums)) # R: Iterator[int] +reveal_type(sums(nums, 0)) # R: Iterator[int] +reveal_type(lsums(nums)) # R: list[int] +reveal_type(lsums(nums, 0)) # R: list[int] + +# -- Extended function return type tests -- +reveal_type(map(None, nums)) # R: Iterator[int] +reveal_type(map({1, 2}, nums)) # R: Iterator[bool] +reveal_type(filter(None, nums)) # R: Iterator[int] +reveal_type(filter({1, 2}, nums)) # R: Iterator[int] # XFAIL[ty]: Any | int +reveal_type(group_by(None, nums)) # R: dict[int, list[int]] +reveal_type(group_by({1, 2}, nums)) # R: dict[bool, list[int]] # XFAIL[ty]: Any | int +reveal_type(count_by(None, nums)) # R: dict[int, int] + +# -- Should be errors -- +take("nope", nums) # E: wrong arg type +first(42) # E: not iterable +lmap(double, 42) # E: not iterable + +# Extended function type mismatches — predicates +filter(re.compile(r"x"), nums) # E: regex pred requires str elements +lfilter(r"\d+", nums) # E: regex pred requires str elements +remove(re.compile(r"x"), nums) # E: regex pred requires str elements +filter({1: True}, strs) # E: Mapping[int] pred vs Iterable[str] +lremove({1: True}, strs) # E: Mapping[int] pred vs Iterable[str] + +# Extended function type mismatches — mappers +map({1: "a"}, strs) # E: Mapping[int, str] vs Iterable[str] # XFAIL[ty]: not caught +lmap({1: "a"}, strs) # E: Mapping[int, str] vs Iterable[str] # XFAIL[ty]: not caught +group_by({1: "a"}, strs) # E: Mapping[int, str] vs Iterable[str] # XFAIL[ty]: not caught +count_by({1: "a"}, strs) # E: Mapping[int, str] vs Iterable[str] # XFAIL[ty]: not caught diff --git a/type_tests/test_strings.py b/type_tests/test_strings.py new file mode 100644 index 0000000..c7fcd80 --- /dev/null +++ b/type_tests/test_strings.py @@ -0,0 +1,45 @@ +import re +from typing import assert_type +from collections.abc import Callable, Iterator +from funcy import re_iter, re_all, re_find, re_test, re_finder, re_tester, str_join, cut_prefix, cut_suffix + +_ReResult = str | tuple[str, ...] | dict[str, str] + +# -- re_find -- +assert_type(re_find(r"\d+", "abc123"), _ReResult | None) +assert_type(re_test(r"\d+", "abc123"), bool) +assert_type(re_all(r"\d+", "abc123"), list[_ReResult]) +assert_type(re_iter(r"\d+", "abc123"), Iterator[_ReResult]) + +# -- factory functions -- +assert_type(re_finder(r"\d+"), Callable[[str], _ReResult | None]) +assert_type(re_tester(r"\d+"), Callable[[str], bool]) + +# -- re with compiled pattern -- +pattern = re.compile(r"\d+") +assert_type(re_find(pattern, "abc123"), _ReResult | None) +assert_type(re_test(pattern, "abc123"), bool) + +# -- Can't narrow return type based on regex capture groups. +# Would require a mypy plugin (pyright and ty have no plugin systems). +# No groups or single group -> ideally str +assert_type(re_find(r"\d+", "abc123"), str | None) # XFAIL: can't narrow by capture groups +assert_type(re_find(r"(\d+)", "abc123"), str | None) # XFAIL: can't narrow by capture groups +assert_type(re_all(r"\d+", "abc123"), list[str]) # XFAIL: can't narrow by capture groups +# Multiple groups -> ideally tuple[str, str] +assert_type(re_find(r"(\d+)-(\w+)", "1-a"), tuple[str, str] | None) # XFAIL: can't narrow by capture groups +assert_type(re_all(r"(\d+)-(\w+)", "1-a 2-b"), list[tuple[str, str]]) # XFAIL: can't narrow by capture groups +# Named groups -> ideally dict[str, str] +assert_type(re_find(r"(?P\d+)", "123"), dict[str, str] | None) # XFAIL: can't narrow by capture groups + +# -- str_join -- +assert_type(str_join(", ", [1, 2, 3]), str) +assert_type(str_join([1, 2, 3]), str) + +# -- cut_prefix / cut_suffix -- +assert_type(cut_prefix("hello world", "hello "), str) +assert_type(cut_suffix("hello world", " world"), str) + +# -- Should be errors -- +re_find(123, "abc") # E: wrong regex type +cut_prefix(123, "x") # E: wrong argument type diff --git a/type_tests/test_tree.py b/type_tests/test_tree.py new file mode 100644 index 0000000..7445725 --- /dev/null +++ b/type_tests/test_tree.py @@ -0,0 +1,25 @@ +from typing import Any, assert_type +from collections.abc import Iterator +from funcy import tree_leaves, ltree_leaves, tree_nodes, ltree_nodes + +nested = [1, [2, [3, 4]], [5]] + +# -- tree_leaves -- +assert_type(tree_leaves(nested), Iterator[Any]) + +# -- ltree_leaves -- +assert_type(ltree_leaves(nested), list[Any]) + +# -- tree_nodes -- +assert_type(tree_nodes(nested), Iterator[Any]) + +# -- ltree_nodes -- +assert_type(ltree_nodes(nested), list[Any]) + +# -- With custom follow/children -- +assert_type(tree_leaves(nested, follow=lambda x: isinstance(x, list)), Iterator[Any]) +assert_type(tree_leaves(nested, follow=lambda x: isinstance(x, list), children=iter), Iterator[Any]) + +# -- Should be errors -- +tree_leaves(nested, follow="not a callable") # E: wrong argument type +tree_leaves(nested, children="not a callable") # E: wrong argument type diff --git a/type_tests/test_types.py b/type_tests/test_types.py new file mode 100644 index 0000000..01cc6ef --- /dev/null +++ b/type_tests/test_types.py @@ -0,0 +1,26 @@ +from typing import Any, assert_type +from collections.abc import Callable +from funcy import isa, is_mapping, is_set, is_seq, is_list, is_tuple, is_seqcoll, is_seqcont, iterable, is_iter + +# -- isa returns a callable predicate -- +assert_type(isa(int), Callable[[Any], bool]) +assert_type(isa(int, str), Callable[[Any], bool]) + +# -- Module-level predicates are callable -- +assert_type(is_mapping, Callable[[Any], bool]) +assert_type(is_set, Callable[[Any], bool]) +assert_type(is_seq, Callable[[Any], bool]) +assert_type(is_list, Callable[[Any], bool]) +assert_type(is_tuple, Callable[[Any], bool]) +assert_type(is_seqcoll, Callable[[Any], bool]) +assert_type(is_seqcont, Callable[[Any], bool]) +assert_type(iterable, Callable[[Any], bool]) +assert_type(is_iter, Callable[[Any], bool]) + +# -- Predicates return bool -- +assert_type(is_mapping({}), bool) +assert_type(is_list([1, 2]), bool) +assert_type(isa(int)(42), bool) + +# -- Should be errors -- +isa(42) # E: not a type