diff --git a/.github/workflows/pythonpackage.yml b/.github/workflows/pythonpackage.yml index 9eede88..ea06651 100644 --- a/.github/workflows/pythonpackage.yml +++ b/.github/workflows/pythonpackage.yml @@ -12,10 +12,13 @@ on: jobs: build: runs-on: ubuntu-latest + permissions: + # For https://github.com/tsuyoshicho/action-mypy to display a report on the PR + pull-requests: write strategy: fail-fast: false matrix: - python-version: ["3.8", "3.9", "3.10", "3.11", "pypy3.9"] + python-version: ["3.8", "3.9", "3.10", "3.11", "3.12", "pypy3.9"] include: - python-version: "3.11" use_pandas: 1 @@ -30,14 +33,26 @@ jobs: - name: Pylint run: poetry run pylint functional - name: black - run: poetry run black --check functional - if: always() + run: poetry run black --check --diff --color functional + if: success() || failure() - name: Test with pytest - run: poetry run pytest --cov=functional --cov-report=xml - if: always() + run: poetry run pytest --doctest-modules --cov=functional --cov-report=xml + if: success() || failure() - name: mypy - run: poetry run mypy functional - if: always() + run: | + # First run without --check-untyped-defs that can fail CI + poetry run mypy --warn-unused-configs --warn-redundant-casts --warn-unused-ignores functional + # Second run with --check-untyped-defs, ignored for CI status + poetry run mypy --warn-unused-configs --check-untyped-defs --warn-redundant-casts --warn-unused-ignores --extra-checks functional || true + if: success() || failure() + - uses: tsuyoshicho/action-mypy@v4 + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + reporter: github-pr-review + level: warning + execute_command: poetry run mypy --warn-unused-configs --check-untyped-defs --warn-redundant-casts --warn-unused-ignores --extra-checks functional + filter_mode: nofilter + if: (success() || failure()) && matrix.python-version == '3.11' # run only on latest to avoid duplicate warnings - uses: codecov/codecov-action@v1 with: file: ./coverage.xml diff --git a/docs/conf.py b/docs/conf.py index 74d1f6b..ef91773 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # # ScalaFunctional documentation build configuration file, created by # sphinx-quickstart on Wed Mar 11 23:00:20 2015. diff --git a/examples/PyFunctional-pandas-tutorial.ipynb b/examples/PyFunctional-pandas-tutorial.ipynb index a1c3a2b..284123c 100644 --- a/examples/PyFunctional-pandas-tutorial.ipynb +++ b/examples/PyFunctional-pandas-tutorial.ipynb @@ -152,7 +152,7 @@ ], "source": [ "# Load the initial data using pandas, possibly do some work in pandas\n", - "df = pd.read_csv('camping_purchases.csv', header=None)\n", + "df = pd.read_csv(\"camping_purchases.csv\", header=None)\n", "df" ] }, @@ -256,7 +256,7 @@ ], "source": [ "# Show representation using PyFunctional's csv parsing\n", - "seq.csv('camping_purchases.csv')" + "seq.csv(\"camping_purchases.csv\")" ] }, { @@ -286,7 +286,7 @@ ], "source": [ "# PyFunctional doesn't try to parse the columns, perhaps an area for improvement\n", - "seq.csv('camping_purchases.csv').list()" + "seq.csv(\"camping_purchases.csv\").list()" ] }, { @@ -316,7 +316,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.5.2" + "version": "3.11.4" } }, "nbformat": 4, diff --git a/functional/conftest.py b/functional/conftest.py new file mode 100644 index 0000000..d22885a --- /dev/null +++ b/functional/conftest.py @@ -0,0 +1,8 @@ +import pytest + +from functional import seq + + +@pytest.fixture(autouse=True) +def add_seq(doctest_namespace): + doctest_namespace["seq"] = seq diff --git a/functional/execution.py b/functional/execution.py index 9088673..7653944 100644 --- a/functional/execution.py +++ b/functional/execution.py @@ -1,22 +1,30 @@ +from __future__ import annotations + from functools import partial +from typing import TYPE_CHECKING, Callable, Iterable, Iterator, Optional + from functional.util import compose, parallelize +if TYPE_CHECKING: + from functional.transformations import Transformation -class ExecutionStrategies(object): + +class ExecutionStrategies: """ Enum like object listing the types of execution strategies. """ - PRE_COMPUTE = 0 PARALLEL = 1 -class ExecutionEngine(object): +class ExecutionEngine: """ Class to perform serial execution of a Sequence evaluation. """ - def evaluate(self, sequence, transformations): + def evaluate( + self, sequence: Iterable, transformations: Iterable[Transformation] + ) -> Iterator: """ Execute the sequence of transformations in serial :param sequence: Sequence to evaluation @@ -25,11 +33,7 @@ def evaluate(self, sequence, transformations): """ result = sequence for transform in transformations: - strategies = transform.execution_strategies - if strategies is not None and ExecutionStrategies.PRE_COMPUTE in strategies: - result = transform.function(list(result)) - else: - result = transform.function(result) + result = transform.function(result) return iter(result) @@ -38,16 +42,20 @@ class ParallelExecutionEngine(ExecutionEngine): Class to perform parallel execution of a Sequence evaluation. """ - def __init__(self, processes=None, partition_size=None): + def __init__( + self, processes: Optional[int] = None, partition_size: Optional[int] = None + ): """ Set the number of processes for parallel execution. :param processes: Number of parallel Processes """ - super(ParallelExecutionEngine, self).__init__() + super().__init__() self.processes = processes self.partition_size = partition_size - def evaluate(self, sequence, transformations): + def evaluate( + self, sequence: Iterable, transformations: Iterable[Transformation] + ) -> Iterator: """ Execute the sequence of transformations in parallel :param sequence: Sequence to evaluation @@ -58,17 +66,15 @@ def evaluate(self, sequence, transformations): parallel = partial( parallelize, processes=self.processes, partition_size=self.partition_size ) - staged = [] + staged: list[Callable[[Iterable], Iterable]] = [] for transform in transformations: - strategies = transform.execution_strategies or {} + strategies = transform.execution_strategies if ExecutionStrategies.PARALLEL in strategies: staged.insert(0, transform.function) else: if staged: result = parallel(compose(*staged), result) staged = [] - if ExecutionStrategies.PRE_COMPUTE in strategies: - result = list(result) result = transform.function(result) if staged: result = parallel(compose(*staged), result) diff --git a/functional/io.py b/functional/io.py index 91696f9..2f68415 100644 --- a/functional/io.py +++ b/functional/io.py @@ -1,18 +1,22 @@ +from __future__ import annotations + +import builtins +import bz2 import gzip import lzma -import bz2 -import io -import builtins +from pathlib import Path +from typing import Any, ClassVar, Optional, Union -from typing import Optional, Generic, TypeVar, Any +from typing_extensions import TypeAlias +# adapted from typeshed +StrOrBytesPath: TypeAlias = Union[str, bytes, Path] +FileDescriptorOrPath: TypeAlias = Union[int, StrOrBytesPath] WRITE_MODE = "wt" -_FileConv_co = TypeVar("_FileConv_co", covariant=True) - -class ReusableFile(Generic[_FileConv_co]): +class ReusableFile: """ Class which emulates the builtin file except that calling iter() on it will return separate iterators on different file handlers (which are automatically closed when iteration stops). This @@ -23,7 +27,7 @@ class ReusableFile(Generic[_FileConv_co]): # pylint: disable=too-many-instance-attributes def __init__( self, - path: str, + path: StrOrBytesPath, delimiter: Optional[str] = None, mode: str = "r", buffering: int = -1, @@ -81,12 +85,12 @@ def read(self): class CompressedFile(ReusableFile): - magic_bytes: Optional[bytes] = None + magic_bytes: ClassVar[bytes] # pylint: disable=too-many-instance-attributes def __init__( self, - path: str, + path: StrOrBytesPath, delimiter: Optional[str] = None, mode: str = "rt", buffering: int = -1, @@ -95,7 +99,7 @@ def __init__( errors: Optional[str] = None, newline: Optional[str] = None, ): - super(CompressedFile, self).__init__( + super().__init__( path, delimiter=delimiter, mode=mode, @@ -112,12 +116,12 @@ def is_compressed(cls, data): class GZFile(CompressedFile): - magic_bytes: bytes = b"\x1f\x8b\x08" + magic_bytes = b"\x1f\x8b\x08" # pylint: disable=too-many-instance-attributes def __init__( self, - path: str, + path: StrOrBytesPath, delimiter: Optional[str] = None, mode: str = "rt", buffering: int = -1, @@ -126,7 +130,7 @@ def __init__( errors: Optional[str] = None, newline: Optional[str] = None, ): - super(GZFile, self).__init__( + super().__init__( path, delimiter=delimiter, mode=mode, @@ -138,41 +142,35 @@ def __init__( ) def __iter__(self): - if "t" in self.mode: - with gzip.GzipFile(self.path, compresslevel=self.compresslevel) as gz_file: - gz_file.read1 = gz_file.read - with io.TextIOWrapper( - gz_file, - encoding=self.encoding, - errors=self.errors, - newline=self.newline, - ) as file_content: - yield from file_content - else: - with gzip.open( - self.path, mode=self.mode, compresslevel=self.compresslevel - ) as file_content: - yield from file_content + with gzip.open( + self.path, + mode=self.mode, + compresslevel=self.compresslevel, + encoding=self.encoding, + errors=self.errors, + newline=self.newline, + ) as file_content: + yield from file_content - def read(self): - with gzip.GzipFile(self.path, compresslevel=self.compresslevel) as gz_file: - gz_file.read1 = gz_file.read - with io.TextIOWrapper( - gz_file, - encoding=self.encoding, - errors=self.errors, - newline=self.newline, - ) as file_content: - return file_content.read() + def read(self) -> str | bytes: + with gzip.open( + self.path, + mode=self.mode, + compresslevel=self.compresslevel, + encoding=self.encoding, + errors=self.errors, + newline=self.newline, + ) as file_content: + return file_content.read() class BZ2File(CompressedFile): - magic_bytes: bytes = b"\x42\x5a\x68" + magic_bytes = b"\x42\x5a\x68" # pylint: disable=too-many-instance-attributes def __init__( self, - path: str, + path: StrOrBytesPath, delimiter: Optional[str] = None, mode: str = "rt", buffering: int = -1, @@ -181,7 +179,7 @@ def __init__( errors: Optional[str] = None, newline: Optional[str] = None, ): - super(BZ2File, self).__init__( + super().__init__( path, delimiter=delimiter, mode=mode, @@ -216,7 +214,7 @@ def read(self): class XZFile(CompressedFile): - magic_bytes: bytes = b"\xfd\x37\x7a\x58\x5a\x00" + magic_bytes = b"\xfd\x37\x7a\x58\x5a\x00" # pylint: disable=too-many-instance-attributes def __init__( @@ -234,7 +232,7 @@ def __init__( filters=None, format=None, ): - super(XZFile, self).__init__( + super().__init__( path, delimiter=delimiter, mode=mode, @@ -278,23 +276,23 @@ def read(self): return file_content.read() -COMPRESSION_CLASSES = [GZFile, BZ2File, XZFile] -N_COMPRESSION_CHECK_BYTES = max(len(cls.magic_bytes) for cls in COMPRESSION_CLASSES) # type: ignore +COMPRESSION_CLASSES: list[type[CompressedFile]] = [GZFile, BZ2File, XZFile] +N_COMPRESSION_CHECK_BYTES = max(len(cls.magic_bytes) for cls in COMPRESSION_CLASSES) -def get_read_function(filename: str, disable_compression: bool): +def get_read_function(filename: FileDescriptorOrPath, disable_compression: bool): if disable_compression: return ReusableFile with open(filename, "rb") as f: start_bytes = f.read(N_COMPRESSION_CHECK_BYTES) for cls in COMPRESSION_CLASSES: - if cls.is_compressed(start_bytes): # type: ignore + if cls.is_compressed(start_bytes): return cls return ReusableFile def universal_write_open( - path: str, + path: StrOrBytesPath, mode: str, buffering: int = -1, encoding: Optional[str] = None, diff --git a/functional/lineage.py b/functional/lineage.py index fb9e266..8b187be 100644 --- a/functional/lineage.py +++ b/functional/lineage.py @@ -1,13 +1,25 @@ +from __future__ import annotations + +from collections.abc import Iterable, Iterator +from typing import Optional + from functional.execution import ExecutionEngine -from functional.transformations import CACHE_T +from functional.transformations import CACHE_T, Transformation -class Lineage(object): +class Lineage: """ Class for tracking the lineage of transformations, and applying them to a given sequence. """ - def __init__(self, prior_lineage=None, engine=None): + transformations: list[Transformation] + engine: ExecutionEngine + + def __init__( + self, + prior_lineage: Optional[Lineage] = None, + engine: Optional[ExecutionEngine] = None, + ): """ Construct an empty lineage if prior_lineage is None or if its not use it as the list of current transformations @@ -16,7 +28,7 @@ def __init__(self, prior_lineage=None, engine=None): :return: new Lineage object """ self.transformations = ( - [] if prior_lineage is None else list(prior_lineage.transformations) + [] if prior_lineage is None else prior_lineage.transformations.copy() ) self.engine = ( (engine or ExecutionEngine()) @@ -24,7 +36,7 @@ def __init__(self, prior_lineage=None, engine=None): else prior_lineage.engine ) - def __repr__(self): + def __repr__(self) -> str: """ Returns readable representation of Lineage @@ -34,7 +46,7 @@ def __repr__(self): ["sequence"] + [transform.name for transform in self.transformations] ) - def __len__(self): + def __len__(self) -> int: """ Number of transformations in lineage @@ -42,7 +54,7 @@ def __len__(self): """ return len(self.transformations) - def __getitem__(self, item): + def __getitem__(self, item: int) -> Transformation: """ Return specific transformation in lineage. :param item: Transformation to retrieve @@ -50,14 +62,14 @@ def __getitem__(self, item): """ return self.transformations[item] - def apply(self, transform): + def apply(self, transform: Transformation): """ Add the transformation to the lineage :param transform: Transformation to apply """ self.transformations.append(transform) - def evaluate(self, sequence): + def evaluate(self, sequence: Iterable) -> Iterator: """ Compute the lineage on the sequence. @@ -68,7 +80,7 @@ def evaluate(self, sequence): transformations = self.transformations[last_cache_index:] return self.engine.evaluate(sequence, transformations) - def cache_scan(self): + def cache_scan(self) -> int: """ Scan the lineage for the index of the most recent cache. :return: Index of most recent cache diff --git a/functional/pipeline.py b/functional/pipeline.py index 0eb0234..e069a4d 100644 --- a/functional/pipeline.py +++ b/functional/pipeline.py @@ -1,45 +1,77 @@ """ The pipeline module contains the transformations and actions API of PyFunctional """ -from operator import mul, add -import collections -from functools import reduce, wraps, partial -import json +from __future__ import annotations + +import builtins +import collections import csv -import sqlite3 +import itertools +import json import re - -from collections.abc import Iterable -from typing import List, Optional, Tuple, Union +import sqlite3 +from collections.abc import Iterable, Iterator +from functools import partial, reduce, wraps +from numbers import Number +from operator import add, mul +from typing import ( + Any, + Callable, + Generic, + Hashable, + Literal, + NoReturn, + Optional, + TypeVar, + Union, + cast, + overload, +) from tabulate import tabulate +from typing_extensions import Self, TypeVarTuple, Unpack -from functional.execution import ExecutionEngine +from functional import transformations +from functional.execution import ExecutionEngine, ExecutionStrategies +from functional.io import WRITE_MODE, StrOrBytesPath, universal_write_open from functional.lineage import Lineage from functional.util import ( - is_iterable, - is_primitive, + SupportsRichComparison, + coalesce, + identity, + is_iterable_not_list, is_namedtuple, + is_primitive, is_tabulatable, - identity, - default_value, ) -from functional.io import WRITE_MODE, universal_write_open -from functional import transformations -from functional.execution import ExecutionStrategies +T = TypeVar("T") +U = TypeVar("U") +V = TypeVar("V") +W = TypeVar("W") +Ts = TypeVarTuple("Ts") +Tnumber = TypeVar("Tnumber", bound=Number) + +Unset = object() -class Sequence(object): + +class Sequence(Generic[T], Iterable[T]): """ Sequence is a wrapper around any type of sequence which provides access to common functional transformations and reductions in a data pipeline style """ + engine: ExecutionEngine + _max_repr_items: Optional[int] + _base_sequence: Iterable[T] + _lineage: Lineage + no_wrap: Optional[bool] + def __init__( self, - sequence: Iterable, - transform=None, + sequence: Iterable[T], + transform: Optional[transformations.Transformation] = None, engine: Optional[ExecutionEngine] = None, max_repr_items: Optional[int] = None, no_wrap: Optional[bool] = None, @@ -62,11 +94,11 @@ def __init__( self._max_repr_items: Optional[int] = ( max_repr_items or sequence._max_repr_items ) - self._base_sequence: Union[Iterable, List, Tuple] = sequence._base_sequence + self._base_sequence: Union[Iterable, list, tuple] = sequence._base_sequence self._lineage: Lineage = Lineage( prior_lineage=sequence._lineage, engine=engine ) - elif isinstance(sequence, (list, tuple)) or is_iterable(sequence): + elif isinstance(sequence, (list, tuple)) or is_iterable_not_list(sequence): self._max_repr_items = max_repr_items self._base_sequence = sequence self._lineage = Lineage(engine=engine) @@ -76,7 +108,7 @@ def __init__( self._lineage.apply(transform) self.no_wrap = no_wrap - def __iter__(self): + def __iter__(self) -> Iterator[T]: """ Return iterator of sequence. @@ -84,7 +116,7 @@ def __iter__(self): """ return self._evaluate() - def __eq__(self, other): + def __eq__(self, other) -> bool: """ Checks for equality with the sequence's equality operator. @@ -93,7 +125,7 @@ def __eq__(self, other): """ return self.sequence == other - def __ne__(self, other): + def __ne__(self, other) -> bool: """ Checks for inequality with the sequence's inequality operator. @@ -102,7 +134,7 @@ def __ne__(self, other): """ return self.sequence != other - def __hash__(self): + def __hash__(self) -> NoReturn: """ Return the hash of the sequence. @@ -110,7 +142,7 @@ def __hash__(self): """ raise TypeError("unhashable type: Sequence") - def __repr__(self): + def __repr__(self) -> str: """ Return repr using sequence's repr function. @@ -122,7 +154,7 @@ def __repr__(self): else: return repr(items[: self._max_repr_items])[:-1] + ", ...]" - def __str__(self): + def __str__(self) -> str: """ Return string using sequence's string function. @@ -130,23 +162,23 @@ def __str__(self): """ return str(self.to_list()) - def __bool__(self): + def __bool__(self) -> bool: """ Returns True if size is not zero. :return: True if size is not zero """ - return self.size() != 0 + return self.len() != 0 - def __nonzero__(self): + def __nonzero__(self) -> bool: """ Returns True if size is not zero. :return: True if size is not zero """ - return self.size() != 0 + return self.len() != 0 - def __getitem__(self, item): + def __getitem__(self, item: int) -> T | Sequence: """ Gets item at given index. @@ -156,7 +188,7 @@ def __getitem__(self, item): self.cache() return _wrap(self.sequence[item]) - def __reversed__(self): + def __reversed__(self) -> Sequence[T]: """ Return reversed sequence using sequence's reverse function @@ -164,7 +196,7 @@ def __reversed__(self): """ return self._transform(transformations.reversed_t()) - def __contains__(self, item): + def __contains__(self, item) -> bool: """ Checks if item is in sequence. @@ -173,7 +205,7 @@ def __contains__(self, item): """ return self.sequence.__contains__(item) - def __add__(self, other): + def __add__(self, other) -> Sequence[T]: """ Concatenates sequence with other. @@ -185,7 +217,7 @@ def __add__(self, other): else: return Sequence(self.sequence + other, no_wrap=self.no_wrap) - def _evaluate(self): + def _evaluate(self) -> Iterator: """ Creates and returns an iterator which applies all the transformations in the lineage @@ -193,7 +225,7 @@ def _evaluate(self): """ return self._lineage.evaluate(self._base_sequence) - def _transform(self, *transforms): + def _transform(self, *transforms: transformations.Transformation) -> Sequence: """ Copies the given Sequence and appends new transformation :param transform: transform to apply or list of transforms to apply @@ -205,7 +237,7 @@ def _transform(self, *transforms): return sequence @property - def sequence(self): + def sequence(self) -> list[T]: """ Alias for to_list used internally for brevity @@ -213,7 +245,7 @@ def sequence(self): """ return self.to_list() - def cache(self, delete_lineage=False): + def cache(self, delete_lineage: bool = False) -> Self: """ Caches the result of the Sequence so far. This means that any functions applied on the pipeline before cache() are evaluated, and the result is stored in the Sequence. This is @@ -234,40 +266,34 @@ def cache(self, delete_lineage=False): self._lineage = Lineage(engine=self.engine) return self - def head(self, no_wrap: Optional[bool] = None): + def head(self, no_wrap: Optional[bool] = None) -> T | Sequence: """ - Returns the first element of the sequence. + Returns the first element of the sequence. Raises IndexError when the sequence is empty. >>> seq([1, 2, 3]).head() 1 - Raises IndexError when the sequence is empty. - >>> seq([]).head() Traceback (most recent call last): - ... IndexError: list index out of range :param no_wrap: If set to True, the returned value will never be wrapped with Sequence :return: first element of sequence """ - if default_value(no_wrap, self.no_wrap, False): + if coalesce(no_wrap, self.no_wrap, False): return self.sequence[0] else: return _wrap(self.take(1)[0]) - def first(self, no_wrap: Optional[bool] = None): + def first(self, no_wrap: Optional[bool] = None) -> T | Sequence: """ - Returns the first element of the sequence. + Returns the first element of the sequence. Raises IndexError when the sequence is empty. >>> seq([1, 2, 3]).first() 1 - Raises IndexError when the sequence is empty. - >>> seq([]).first() Traceback (most recent call last): - ... IndexError: list index out of range :param no_wrap: If set to True, the returned value will never be wrapped with Sequence @@ -275,7 +301,7 @@ def first(self, no_wrap: Optional[bool] = None): """ return self.head(no_wrap=no_wrap) - def head_option(self, no_wrap: Optional[bool] = None): + def head_option(self, no_wrap: Optional[bool] = None) -> T | Sequence | None: """ Returns the first element of the sequence or None, if the sequence is empty. @@ -283,7 +309,6 @@ def head_option(self, no_wrap: Optional[bool] = None): 1 >>> seq([]).head_option() - None :param no_wrap: If set to True, the returned value will never be wrapped with Sequence :return: first element of sequence or None if sequence is empty @@ -292,7 +317,7 @@ def head_option(self, no_wrap: Optional[bool] = None): return None return self.head(no_wrap=no_wrap) - def last(self, no_wrap: Optional[bool] = None): + def last(self, no_wrap: Optional[bool] = None) -> T | Sequence: """ Returns the last element of the sequence. @@ -303,18 +328,17 @@ def last(self, no_wrap: Optional[bool] = None): >>> seq([]).last() Traceback (most recent call last): - ... IndexError: list index out of range :param no_wrap: If set to True, the returned value will never be wrapped with Sequence :return: last element of sequence """ - if default_value(no_wrap, self.no_wrap, False): + if coalesce(no_wrap, self.no_wrap, False): return self.sequence[-1] else: return _wrap(self.sequence[-1]) - def last_option(self, no_wrap: Optional[bool] = None): + def last_option(self, no_wrap: Optional[bool] = None) -> T | Sequence | None: """ Returns the last element of the sequence or None, if the sequence is empty. @@ -322,7 +346,6 @@ def last_option(self, no_wrap: Optional[bool] = None): 3 >>> seq([]).last_option() - None :param no_wrap: If set to True, the returned value will never be wrapped with Sequence :return: last element of sequence or None if sequence is empty @@ -331,7 +354,7 @@ def last_option(self, no_wrap: Optional[bool] = None): return None return self.last(no_wrap=no_wrap) - def init(self): + def init(self) -> Sequence[T]: """ Returns the sequence, without its last element. @@ -340,9 +363,9 @@ def init(self): :return: sequence without last element """ - return self._transform(transformations.init_t()) + return self.drop_right(1) - def tail(self): + def tail(self) -> Sequence[T]: """ Returns the sequence, without its first element. @@ -353,7 +376,7 @@ def tail(self): """ return self._transform(transformations.tail_t()) - def inits(self): + def inits(self) -> Sequence[Sequence[T]]: """ Returns consecutive inits of the sequence. @@ -364,7 +387,7 @@ def inits(self): """ return self._transform(transformations.inits_t(_wrap)) - def tails(self): + def tails(self) -> Sequence[Sequence[T]]: """ Returns consecutive tails of the sequence. @@ -375,12 +398,26 @@ def tails(self): """ return self._transform(transformations.tails_t(_wrap)) - def cartesian(self, *iterables, **kwargs): + @overload + def cartesian(self, __a: Iterable[U]) -> Sequence[tuple[T, U]]: + ... + + @overload + def cartesian(self, __a: Iterable[U], __b: Iterable[V]) -> Sequence[tuple[T, U, V]]: + ... + + @overload + def cartesian( + self, __a: Iterable[U], __b: Iterable[V], __c: Iterable[W] + ) -> Sequence[tuple[T, U, V, W]]: + ... + + def cartesian(self, *iterables, repeat=1): """ Returns the cartesian product of the passed iterables with the specified number of repetitions. - The keyword argument `repeat` is read from kwargs to pass to itertools.cartesian. + Argument `repeat` is passed to itertools.product. >>> seq.range(2).cartesian(range(2)) [(0, 0), (0, 1), (1, 0), (1, 1)] @@ -389,11 +426,9 @@ def cartesian(self, *iterables, **kwargs): :param kwargs: the variable `repeat` is read from kwargs :return: cartesian product """ - return self._transform( - transformations.cartesian_t(iterables, kwargs.get("repeat", 1)) - ) + return self._transform(transformations.cartesian_t(iterables, repeat)) - def drop(self, n): + def drop(self, n: int) -> Sequence[T]: """ Drop the first n elements of the sequence. @@ -403,12 +438,9 @@ def drop(self, n): :param n: number of elements to drop :return: sequence without first n elements """ - if n <= 0: - return self._transform(transformations.drop_t(0)) - else: - return self._transform(transformations.drop_t(n)) + return self._transform(transformations.drop_t(max(0, n))) - def drop_right(self, n): + def drop_right(self, n: int) -> Sequence[T]: """ Drops the last n elements of the sequence. @@ -420,7 +452,7 @@ def drop_right(self, n): """ return self._transform(transformations.drop_right_t(n)) - def drop_while(self, func): + def drop_while(self, func: Callable[[T], object]) -> Sequence[T]: """ Drops elements in the sequence while func evaluates to True, then returns the rest. @@ -432,7 +464,7 @@ def drop_while(self, func): """ return self._transform(transformations.drop_while_t(func)) - def take(self, n): + def take(self, n: int) -> Sequence[T]: """ Take the first n elements of the sequence. @@ -442,12 +474,9 @@ def take(self, n): :param n: number of elements to take :return: first n elements of sequence """ - if n <= 0: - return self._transform(transformations.take_t(0)) - else: - return self._transform(transformations.take_t(n)) + return self._transform(transformations.take_t(max(0, n))) - def take_while(self, func): + def take_while(self, func: Callable[[T], object]) -> Sequence[T]: """ Take elements in the sequence until func evaluates to False, then return them. @@ -459,7 +488,7 @@ def take_while(self, func): """ return self._transform(transformations.take_while_t(func)) - def union(self, other): + def union(self, other: Sequence[U]) -> Sequence[Union[T, U]]: """ New sequence with unique elements from self and other. @@ -471,7 +500,7 @@ def union(self, other): """ return self._transform(transformations.union_t(other)) - def intersection(self, other): + def intersection(self, other: Sequence[T]) -> Sequence[T]: """ New sequence with unique elements present in sequence and other. @@ -483,7 +512,7 @@ def intersection(self, other): """ return self._transform(transformations.intersection_t(other)) - def difference(self, other): + def difference(self, other: Sequence[T]) -> Sequence[T]: """ New sequence with unique elements present in sequence but not in other. @@ -495,7 +524,7 @@ def difference(self, other): """ return self._transform(transformations.difference_t(other)) - def symmetric_difference(self, other): + def symmetric_difference(self, other: Sequence[T]) -> Sequence[T]: """ New sequence with elements in either sequence or other, but not both. @@ -507,7 +536,7 @@ def symmetric_difference(self, other): """ return self._transform(transformations.symmetric_difference_t(other)) - def map(self, func): + def map(self, func: Callable[[T], U]) -> Sequence[U]: """ Maps f onto the elements of the sequence. @@ -517,21 +546,17 @@ def map(self, func): :param func: function to map with :return: sequence with func mapped onto it """ + if func is identity: + return self # type: ignore # U is T here but mypy doesn't understand return self._transform(transformations.map_t(func)) - def select(self, func): - """ - Selects f from the elements of the sequence. - - >>> seq([1, 2, 3, 4]).select(lambda x: x * -1) - [-1, -2, -3, -4] - - :param func: function to select with - :return: sequence with func mapped onto it - """ - return self._transform(transformations.select_t(func)) + def select(self, func: Callable[[T], U]) -> Sequence[U]: + """Alias for map.""" + return self.map(func) - def starmap(self, func): + def starmap( + self: Sequence[tuple[Unpack[Ts]]], func: Callable[[Unpack[Ts]], U] + ) -> Sequence[U]: """ starmaps f onto the sequence as itertools.starmap does. @@ -543,7 +568,9 @@ def starmap(self, func): """ return self._transform(transformations.starmap_t(func)) - def smap(self, func): + def smap( + self: Sequence[tuple[Unpack[Ts]]], func: Callable[[Unpack[Ts]], U] + ) -> Sequence[U]: """ Alias to Sequence.starmap @@ -557,7 +584,7 @@ def smap(self, func): """ return self._transform(transformations.starmap_t(func)) - def for_each(self, func): + def for_each(self, func: Callable[[T], Any]) -> None: """ Executes func on each element of the sequence. @@ -571,7 +598,7 @@ def for_each(self, func): for e in self: func(e) - def peek(self, func): + def peek(self, func: Callable[[T], Any]) -> Sequence[T]: """ Executes func on each element of the sequence and returns the element @@ -586,7 +613,7 @@ def peek(self, func): """ return self._transform(transformations.peek_t(func)) - def filter(self, func): + def filter(self, func: Callable[[T], object]) -> Sequence[T]: """ Filters sequence to include only elements where func is True. @@ -598,7 +625,7 @@ def filter(self, func): """ return self._transform(transformations.filter_t(func)) - def filter_not(self, func): + def filter_not(self, func: Callable[[T], object]) -> Sequence[T]: """ Filters sequence to include only elements where func is False. @@ -610,19 +637,11 @@ def filter_not(self, func): """ return self._transform(transformations.filter_not_t(func)) - def where(self, func): - """ - Selects elements where func evaluates to True. - - >>> seq([-1, 1, -2, 2]).where(lambda x: x > 0) - [1, 2] - - :param func: function to filter on - :return: filtered sequence - """ - return self._transform(transformations.where_t(func)) + def where(self, func: Callable[[T], object]) -> Sequence[T]: + """Alias for filter.""" + return self.filter(func) - def count(self, func): + def count(self, func: Callable[[T], object]) -> int: """ Counts the number of elements in the sequence which satisfy the predicate func. @@ -634,7 +653,7 @@ def count(self, func): """ return sum(bool(func(element)) for element in self) - def len(self): + def len(self) -> int: """ Return length of sequence using its length function. @@ -644,9 +663,10 @@ def len(self): :return: length of sequence """ self.cache() + assert isinstance(self._base_sequence, list) return len(self._base_sequence) - def size(self): + def size(self) -> int: """ Return size of sequence using its length function. @@ -654,7 +674,7 @@ def size(self): """ return self.len() - def empty(self): + def empty(self) -> bool: """ Returns True if the sequence has length zero. @@ -666,9 +686,9 @@ def empty(self): :return: True if sequence length is zero """ - return self.size() == 0 + return self.len() == 0 - def non_empty(self): + def non_empty(self) -> bool: """ Returns True if the sequence does not have length zero. @@ -680,9 +700,9 @@ def non_empty(self): :return: True if sequence length is not zero """ - return self.size() != 0 + return self.len() != 0 - def any(self): + def any(self) -> bool: """ Returns True if any element in the sequence has truth value True @@ -696,7 +716,7 @@ def any(self): """ return any(self) - def all(self): + def all(self) -> bool: """ Returns True if the truth value of all items in the sequence true. @@ -710,7 +730,7 @@ def all(self): """ return all(self) - def exists(self, func): + def exists(self, func: Callable[[T], object]) -> bool: """ Returns True if an element in the sequence makes func evaluate to True. @@ -725,7 +745,7 @@ def exists(self, func): """ return any(func(element) for element in self) - def for_all(self, func): + def for_all(self, func: Callable[[T], object]) -> bool: """ Returns True if all elements in sequence make func evaluate to True. @@ -740,7 +760,9 @@ def for_all(self, func): """ return all(func(element) for element in self) - def max(self): + def max( + self: Sequence[SupportsRichComparison], + ) -> SupportsRichComparison | Sequence: """ Returns the largest element in the sequence. If the sequence has multiple maximal elements, only the first one is returned. @@ -759,19 +781,19 @@ def max(self): >>> seq([1, "a"]).max() Traceback (most recent call last): - ... - TypeError: unorderable types: int() < str() + TypeError: ... >>> seq([]).max() Traceback (most recent call last): - ... ValueError: max() arg is an empty sequence :return: Maximal value of sequence """ return _wrap(max(self)) - def min(self): + def min( + self: Sequence[SupportsRichComparison], + ) -> SupportsRichComparison | Sequence: """ Returns the smallest element in the sequence. If the sequence has multiple minimal elements, only the first one is returned. @@ -790,19 +812,17 @@ def min(self): >>> seq([1, "a"]).min() Traceback (most recent call last): - ... TypeError: unorderable types: int() < str() >>> seq([]).min() Traceback (most recent call last): - ... ValueError: min() arg is an empty sequence :return: Minimal value of sequence """ return _wrap(min(self)) - def max_by(self, func): + def max_by(self, func: Callable[[T], SupportsRichComparison]) -> T | Sequence: """ Returns the largest element in the sequence. Provided function is used to generate key used to compare the elements. @@ -819,7 +839,6 @@ def max_by(self, func): >>> seq([]).max_by(lambda x: x) Traceback (most recent call last): - ... ValueError: max() arg is an empty sequence :param func: function to compute max by @@ -827,7 +846,7 @@ def max_by(self, func): """ return _wrap(max(self, key=func)) - def min_by(self, func): + def min_by(self, func: Callable[[T], SupportsRichComparison]) -> T | Sequence: """ Returns the smallest element in the sequence. Provided function is used to generate key used to compare the elements. @@ -836,7 +855,7 @@ def min_by(self, func): The sequence can not be empty. Raises ValueError when the sequence is empty. - >>> seq([2, 4, 5, 1, 3]).min_by(lambda num: num % 6) + >>> seq([2, 4, 5, 1, 3]).min_by(lambda num: num % 5) 5 >>> seq('aa', 'xyz', 'abcd', 'xyy').min_by(len) @@ -844,7 +863,6 @@ def min_by(self, func): >>> seq([]).min_by(lambda x: x) Traceback (most recent call last): - ... ValueError: min() arg is an empty sequence :param func: function to compute min by @@ -852,7 +870,7 @@ def min_by(self, func): """ return _wrap(min(self, key=func)) - def find(self, func): + def find(self, func: Callable[[T], object]) -> T | None: """ Finds the first element of the sequence that satisfies func. If no such element exists, then return None. @@ -865,18 +883,18 @@ def find(self, func): """ return next((element for element in self if func(element)), None) - def flatten(self): + def flatten(self: Sequence[Iterable[U]]) -> Sequence[U]: """ Flattens a sequence of sequences to a single sequence of elements. - >>> seq([[1, 2], [3, 4], [5, 6]]) + >>> seq([[1, 2], [3, 4], [5, 6]]).flatten() [1, 2, 3, 4, 5, 6] :return: flattened sequence """ - return self._transform(transformations.flatten_t()) + return self._transform(transformations.flat_map_t(identity)) - def flat_map(self, func): + def flat_map(self, func: Callable[[T], Iterable[U]]) -> Sequence[U]: """ Applies func to each element of the sequence, which themselves should be sequences. Then appends each element of each sequence to a final result @@ -895,64 +913,76 @@ def flat_map(self, func): """ return self._transform(transformations.flat_map_t(func)) - def group_by(self, func): + def group_by(self, func: Callable[[T], U]) -> Sequence[tuple[U, Sequence[T]]]: """ Group elements into a list of (Key, Value) tuples where func creates the key and maps to values matching that key. >>> seq(["abc", "ab", "z", "f", "qw"]).group_by(len) - [(1, ['z', 'f']), (2, ['ab', 'qw']), (3, ['abc'])] + [(3, ['abc']), (2, ['ab', 'qw']), (1, ['z', 'f'])] :param func: group by result of this function :return: grouped sequence """ return self._transform(transformations.group_by_t(func)) - def group_by_key(self): + def group_by_key(self: Sequence[tuple[U, V]]) -> Sequence[tuple[U, Sequence[V]]]: """ Group sequence of (Key, Value) elements by Key. >>> seq([('a', 1), ('b', 2), ('b', 3), ('b', 4), ('c', 3), ('c', 0)]).group_by_key() - [('a', [1]), ('c', [3, 0]), ('b', [2, 3, 4])] + [('a', [1]), ('b', [2, 3, 4]), ('c', [3, 0])] :return: sequence grouped by key """ return self._transform(transformations.group_by_key_t()) - def reduce_by_key(self, func): + def reduce_by_key( + self: Sequence[tuple[U, V]], func: Callable[[V, V], V] + ) -> Sequence[tuple[U, V]]: """ Reduces a sequence of (Key, Value) using func on each sequence of values. >>> seq([('a', 1), ('b', 2), ('b', 3), ('b', 4), ('c', 3), ('c', 0)]) \ .reduce_by_key(lambda x, y: x + y) - [('a', 1), ('c', 3), ('b', 9)] + [('a', 1), ('b', 9), ('c', 3)] :param func: reduce each list of values using two parameter, associative func :return: Sequence of tuples where the value is reduced with func """ return self._transform(transformations.reduce_by_key_t(func)) - def count_by_key(self): + def count_by_key(self: Sequence[tuple[U, V]]) -> Sequence[tuple[U, int]]: """ Reduces a sequence of (Key, Value) by counting each key >>> seq([('a', 1), ('b', 2), ('b', 3), ('b', 4), ('c', 3), ('c', 0)]).count_by_key() [('a', 1), ('b', 3), ('c', 2)] + :return: Sequence of tuples where value is the count of each key """ return self._transform(transformations.count_by_key_t()) - def count_by_value(self): + def count_by_value(self) -> Sequence[tuple[T, int]]: """ Reduces a sequence of items by counting each unique item >>> seq(['a', 'a', 'a', 'b', 'b', 'c', 'd']).count_by_value() [('a', 3), ('b', 2), ('c', 1), ('d', 1)] + :return: Sequence of tuples where value is the count of each key """ return self._transform(transformations.count_by_value_t()) - def reduce(self, func, *initial): + @overload + def reduce(self, func: Callable[[T, T], T]) -> T: + ... + + @overload + def reduce(self, func: Callable[[U, T], U], initial: U) -> U: + ... + + def reduce(self, func, initial=Unset): """ Reduce sequence of elements using func. API mirrors functools.reduce @@ -963,16 +993,12 @@ def reduce(self, func, *initial): :param initial: single optional argument acting as initial value :return: reduced value using func """ - if len(initial) == 0: + if initial is Unset: return _wrap(reduce(func, self)) - elif len(initial) == 1: - return _wrap(reduce(func, self, initial[0])) else: - raise ValueError( - "reduce takes exactly one optional parameter for initial value" - ) + return _wrap(reduce(func, self, initial)) - def accumulate(self, func=add): + def accumulate(self, func: Callable[[T, T], T] = add) -> Sequence[T]: """ Accumulate sequence of elements using func. API mirrors itertools.accumulate @@ -987,7 +1013,7 @@ def accumulate(self, func=add): """ return self._transform(transformations.accumulate_t(func)) - def make_string(self, separator): + def make_string(self, separator: str) -> str: """ Concatenate the elements of the sequence into a string separated by separator. @@ -999,7 +1025,7 @@ def make_string(self, separator): """ return separator.join(str(e) for e in self) - def product(self, projection=None): + def product(self, projection: Callable[[T], U] = identity) -> U: # type: ignore """ Takes product of elements in sequence. @@ -1016,22 +1042,10 @@ def product(self, projection=None): :return: product of elements in sequence """ if self.empty(): - if projection: - return projection(1) - else: - return 1 - if self.size() == 1: - if projection: - return projection(self.first()) - else: - return self.first() - - if projection: - return self.map(projection).reduce(mul) - else: - return self.reduce(mul) + return projection(1) # type: ignore + return self.map(projection).reduce(mul) - def sum(self, projection=None): + def sum(self, projection: Callable[[T], Tnumber] = identity) -> Tnumber: # type: ignore """ Takes sum of elements in sequence. @@ -1044,30 +1058,40 @@ def sum(self, projection=None): :param projection: function to project on the sequence before taking the sum :return: sum of elements in sequence """ - if projection: - return sum(self.map(projection)) - else: - return sum(self) + return sum(self.map(projection)) # type: ignore + # sum wants int but our Tnumber is a bit better - def average(self, projection=None): + def average( + self, projection: Callable[[T], Tnumber] = identity + ) -> Tnumber: # type: ignore """ Takes the average of elements in the sequence >>> seq([1, 2]).average() 1.5 - >>> seq([('a', 1), ('b', 2)]).average(lambda x: x[1]) + >>> seq([('a', 1), ('b', 3)]).average(lambda x: x[1]) + 2.0 :param projection: function to project on the sequence before taking the average :return: average of elements in the sequence """ - length = self.size() - if projection: - return sum(self.map(projection)) / length - else: - return sum(self) / length + length = self.len() # call .len() before because it calls .cache() + return sum(self.map(projection)) / length # type: ignore + + @overload + def aggregate( + self, func: Callable[[T, T], T], result_lambda: Callable[[T], U] + ) -> U: + ... - def aggregate(self, *args): + @overload + def aggregate( + self, seed: U, func: Callable[[U, T], U], result_lambda: Callable[[U], V] + ) -> V: + ... + + def aggregate(self, func_or_seed, func=None, result_lambda=identity): """ Aggregates the sequence by specified arguments. Its behavior varies depending on if one, two, or three arguments are passed. Assuming the type of the sequence is A: @@ -1086,32 +1110,20 @@ def aggregate(self, *args): :param args: options for how to execute the aggregation :return: aggregated value """ - seed = None - result_lambda = identity - if len(args) == 1: - func = args[0] - elif len(args) == 2: - seed = args[0] - func = args[1] - elif len(args) == 3: - seed = args[0] - func = args[1] - result_lambda = args[2] - else: - raise ValueError(f"aggregate takes 1-3 arguments, {len(args)} were given") - if len(args) == 1: + func, seed = (func, func_or_seed) if func else (func_or_seed, None) + if seed is None: return result_lambda(self.drop(1).fold_left(self.first(), func)) else: return result_lambda(self.fold_left(seed, func)) - def fold_left(self, zero_value, func): + def fold_left(self, zero_value: U, func: Callable[[U, T], U]) -> U | Sequence: """ Assuming that the sequence elements are of type A, folds from left to right starting with the seed value given by zero_value (of type A) using a function of type func(current: B, next: A) => B. current represents the folded value so far and next is the next element from the sequence to fold into current. - >>> seq('a', 'b', 'c').fold_left(['start'], lambda current, next: current + [next])) + >>> seq('a', 'b', 'c').fold_left(['start'], lambda current, next: current + [next]) ['start', 'a', 'b', 'c'] :param zero_value: zero value to reduce into @@ -1123,15 +1135,15 @@ def fold_left(self, zero_value, func): result = func(result, element) return _wrap(result) - def fold_right(self, zero_value, func): + def fold_right(self, zero_value: U, func: Callable[[T, U], U]) -> U | Sequence: """ Assuming that the sequence elements are of type A, folds from right to left starting with the seed value given by zero_value (of type A) using a function of type func(next: A, current: B) => B. current represents the folded value so far and next is the next element from the sequence to fold into current. - >>> seq('a', 'b', 'c').fold_left(['start'], lambda next, current: current + [next]) - ['start', 'c', 'b', a'] + >>> seq('a', 'b', 'c').fold_right(['start'], lambda next, current: current + [next]) + ['start', 'c', 'b', 'a'] :param zero_value: zero value to reduce into :param func: Two parameter function as described by function docs @@ -1142,7 +1154,7 @@ def fold_right(self, zero_value, func): result = func(element, result) return _wrap(result) - def zip(self, sequence): + def zip(self, sequence: Iterable[U]) -> Sequence[tuple[T, U]]: """ Zips the stored sequence with the given sequence. @@ -1154,7 +1166,7 @@ def zip(self, sequence): """ return self._transform(transformations.zip_t(sequence)) - def zip_with_index(self, start=0): + def zip_with_index(self, start: int = 0) -> Sequence[tuple[T, int]]: """ Zips the sequence to its index, with the index being the second element of each tuple. @@ -1163,9 +1175,9 @@ def zip_with_index(self, start=0): :return: sequence zipped to its index """ - return self._transform(transformations.zip_with_index_t(start)) + return self.zip(itertools.count(start)) - def enumerate(self, start=0): + def enumerate(self, start: int = 0) -> Sequence[tuple[int, T]]: """ Uses python enumerate to to zip the sequence with indexes starting at start. @@ -1177,7 +1189,9 @@ def enumerate(self, start=0): """ return self._transform(transformations.enumerate_t(start)) - def inner_join(self, other): + def inner_join( + self: Sequence[tuple[U, V]], other: Sequence[tuple[U, W]] + ) -> Sequence[tuple[U, tuple[V, W]]]: """ Sequence and other must be composed of (Key, Value) pairs. If self.sequence contains (K, V) pairs and other contains (K, W) pairs, the return result @@ -1190,9 +1204,13 @@ def inner_join(self, other): :param other: sequence to join with :return: joined sequence of (K, (V, W)) pairs """ - return self.join(other, "inner") + return self.join(other, "inner") # type: ignore - def join(self, other, join_type="inner"): + def join( + self: Sequence[tuple[U, V]], + other: Sequence[tuple[U, W]], + join_type: Literal["inner", "left", "right", "outer"] = "inner", + ) -> Sequence[tuple[U, Union[tuple[V, Optional[W]], tuple[Optional[V], W]]]]: """ Sequence and other must be composed of (Key, Value) pairs. If self.sequence contains (K, V) pairs and other contains (K, W) pairs, the return result is a sequence of (K, (V, W)) pairs. @@ -1208,10 +1226,10 @@ def join(self, other, join_type="inner"): [('a', (1, 2)), ('c', (3, 5))] >>> seq([('a', 1), ('b', 2)]).join([('a', 3), ('c', 4)], "left") - [('a', (1, 3)), ('b', (2, None)] + [('a', (1, 3)), ('b', (2, None))] >>> seq([('a', 1), ('b', 2)]).join([('a', 3), ('c', 4)], "right") - [('a', (1, 3)), ('c', (None, 4)] + [('a', (1, 3)), ('c', (None, 4))] >>> seq([('a', 1), ('b', 2)]).join([('a', 3), ('c', 4)], "outer") [('a', (1, 3)), ('b', (2, None)), ('c', (None, 4))] @@ -1222,41 +1240,47 @@ def join(self, other, join_type="inner"): """ return self._transform(transformations.join_t(other, join_type)) - def left_join(self, other): + def left_join( + self: Sequence[tuple[U, V]], other: Sequence[tuple[U, W]] + ) -> Sequence[tuple[U, tuple[V, Optional[W]]]]: """ Sequence and other must be composed of (Key, Value) pairs. If self.sequence contains (K, V) pairs and other contains (K, W) pairs, the return result is a sequence of (K, (V, W)) pairs. V values will always be present, W values may be present or None. - >>> seq([('a', 1), ('b', 2)]).join([('a', 3), ('c', 4)]) - [('a', (1, 3)), ('b', (2, None)] + >>> seq([('a', 1), ('b', 2)]).left_join([('a', 3), ('c', 4)]) + [('a', (1, 3)), ('b', (2, None))] :param other: sequence to join with :return: left joined sequence of (K, (V, W)) pairs """ - return self.join(other, "left") + return self.join(other, "left") # type: ignore - def right_join(self, other): + def right_join( + self: Sequence[tuple[U, V]], other: Sequence[tuple[U, W]] + ) -> Sequence[tuple[U, tuple[Optional[V], W]]]: """ Sequence and other must be composed of (Key, Value) pairs. If self.sequence contains (K, V) pairs and other contains (K, W) pairs, the return result is a sequence of (K, (V, W)) pairs. W values will always bepresent, V values may be present or None. - >>> seq([('a', 1), ('b', 2)]).join([('a', 3), ('c', 4)]) - [('a', (1, 3)), ('b', (2, None)] + >>> seq([('a', 1), ('b', 2)]).right_join([('a', 3), ('c', 4)]) + [('a', (1, 3)), ('c', (None, 4))] :param other: sequence to join with :return: right joined sequence of (K, (V, W)) pairs """ - return self.join(other, "right") + return self.join(other, "right") # type: ignore - def outer_join(self, other): + def outer_join( + self: Sequence[tuple[U, V]], other: Sequence[tuple[U, W]] + ) -> Sequence[tuple[U, Union[tuple[V, Optional[W]], tuple[Optional[V], W]]]]: """ Sequence and other must be composed of (Key, Value) pairs. If self.sequence contains (K, V) pairs and other contains (K, W) pairs, the return result is a sequence of (K, (V, W)) pairs. One of V or W will always be not None, but the other may be None - >>> seq([('a', 1), ('b', 2)]).outer_join([('a', 3), ('c', 4)], "outer") + >>> seq([('a', 1), ('b', 2)]).outer_join([('a', 3), ('c', 4)]) [('a', (1, 3)), ('b', (2, None)), ('c', (None, 4))] :param other: sequence to join with @@ -1264,19 +1288,19 @@ def outer_join(self, other): """ return self.join(other, "outer") - def partition(self, func): + def partition(self, func: Callable[[T], object]) -> Sequence[Sequence[T]]: """ Partition the sequence based on satisfying the predicate func. >>> seq([-1, 1, -2, 2]).partition(lambda x: x < 0) - ([-1, -2], [1, 2]) + [[-1, -2], [1, 2]] :param func: predicate to partition on :return: tuple of partitioned sequences """ return self._transform(transformations.partition_t(_wrap, func)) - def grouped(self, size): + def grouped(self, size: int) -> Sequence[Sequence[T]]: """ Partitions the elements into groups of length size. @@ -1293,7 +1317,7 @@ def grouped(self, size): """ return self._transform(transformations.grouped_t(size)) - def sliding(self, size, step=1): + def sliding(self, size: int, step: int = 1) -> Sequence[Sequence[T]]: """ Groups elements in fixed size blocks by passing a sliding window over them. @@ -1305,32 +1329,31 @@ def sliding(self, size, step=1): """ return self._transform(transformations.sliding_t(_wrap, size, step)) - def sorted(self, key=None, reverse=False): + def sorted( + self, + key: Callable[[T], SupportsRichComparison] | None = None, + reverse: bool = False, + ) -> Sequence[T]: """ Uses python sort and its passed arguments to sort the input. >>> seq([2, 1, 4, 3]).sorted() [1, 2, 3, 4] + >>> seq([(2, 'a'), (1, 'b'), (4, 'c'), (3, 'd')]).order_by(lambda x: x[0]) + [(1, 'b'), (2, 'a'), (3, 'd'), (4, 'c')] + :param key: sort using key function :param reverse: return list reversed or not :return: sorted sequence """ return self._transform(transformations.sorted_t(key=key, reverse=reverse)) - def order_by(self, func): - """ - Orders the input according to func - - >>> seq([(2, 'a'), (1, 'b'), (4, 'c'), (3, 'd')]).order_by(lambda x: x[0]) - [1, 2, 3, 4] + def order_by(self, func: Callable[[T], SupportsRichComparison]) -> Sequence[T]: + """Alias for sorted.""" + return self._transform(transformations.sorted_t(key=func)) - :param func: order by funciton - :return: ordered sequence - """ - return self._transform(transformations.order_by_t(func)) - - def reverse(self): + def reverse(self) -> reversed: """ Returns the reversed sequence. @@ -1339,9 +1362,11 @@ def reverse(self): :return: reversed sequence """ - return reversed(self) + return reversed(self) # type: ignore + # __reversed__ is supposed to return an iterator but ours does not :/ + # it's a Sequence (can't call next()) - def distinct(self): + def distinct(self) -> Sequence[T]: """ Returns sequence of distinct elements. Elements must be hashable. @@ -1350,9 +1375,9 @@ def distinct(self): :return: sequence of distinct elements """ - return self._transform(transformations.distinct_t()) + return self.distinct_by(identity) - def distinct_by(self, func): + def distinct_by(self, func: Callable[[T], Hashable]) -> Sequence[T]: """ Returns sequence of elements who are distinct by the passed function. The return value of func must be hashable. When two elements are distinct by func, the first is taken. @@ -1362,7 +1387,7 @@ def distinct_by(self, func): """ return self._transform(transformations.distinct_by_t(func)) - def slice(self, start, until): + def slice(self, start: int, until: int) -> Sequence[T]: """ Takes a slice of the sequence starting at start and until but not including until. @@ -1377,15 +1402,15 @@ def slice(self, start, until): """ return self._transform(transformations.slice_t(start, until)) - def to_list(self, n=None): + def to_list(self, n: Optional[int] = None) -> list[T]: """ Converts sequence to list of elements. >>> type(seq([]).to_list()) - list + >>> type(seq([])) - functional.pipeline.Sequence + >>> seq([1, 2, 3]).to_list() [1, 2, 3] @@ -1395,19 +1420,20 @@ def to_list(self, n=None): """ if n is None: self.cache() + assert isinstance(self._base_sequence, list) return self._base_sequence else: return self.cache().take(n).list() - def list(self, n=None): + def list(self, n: Optional[int] = None) -> list[T]: """ Converts sequence to list of elements. >>> type(seq([]).list()) - list + >>> type(seq([])) - functional.pipeline.Sequence + >>> seq([1, 2, 3]).list() [1, 2, 3] @@ -1417,15 +1443,15 @@ def list(self, n=None): """ return self.to_list(n=n) - def to_set(self): + def to_set(self) -> set[T]: """ Converts sequence to a set of elements. - >>> type(seq([])).to_set() - set + >>> type(seq([]).to_set()) + >>> type(seq([])) - functional.pipeline.Sequence + >>> seq([1, 1, 2, 2]).to_set() {1, 2} @@ -1434,15 +1460,15 @@ def to_set(self): """ return set(self.sequence) - def set(self): + def set(self) -> set[T]: """ Converts sequence to a set of elements. - >>> type(seq([])).to_set() - set + >>> type(seq([]).to_set()) + >>> type(seq([])) - functional.pipeline.Sequence + >>> seq([1, 1, 2, 2]).set() {1, 2} @@ -1451,12 +1477,24 @@ def set(self): """ return self.to_set() - def to_dict(self, default=None): + @overload + def to_dict(self: Sequence[tuple[U, V]], default: None) -> dict[U, V]: + ... + + @overload + def to_dict( + self: Sequence[tuple[U, V]], default: Callable[[], V] + ) -> collections.defaultdict[U, V]: + ... + + def to_dict( + self: Sequence[tuple[U, V]], default: Callable[[], V] | V | None = None + ) -> dict[U, V] | collections.defaultdict[U, V]: """ Converts sequence of (Key, Value) pairs to a dictionary. >>> type(seq([('a', 1)]).to_dict()) - dict + >>> seq([('a', 1), ('b', 2)]).to_dict() {'a': 1, 'b': 2} @@ -1471,17 +1509,28 @@ def to_dict(self, default=None): if default is None: return dictionary else: - if hasattr(default, "__call__"): - return collections.defaultdict(default, dictionary) - else: - return collections.defaultdict(lambda: default, dictionary) + return collections.defaultdict( + default if callable(default) else lambda: cast(V, default), dictionary + ) + + @overload + def dict(self: Sequence[tuple[U, V]]) -> dict[U, V]: + ... + + @overload + def dict( + self: Sequence[tuple[U, V]], default: Callable[[], V] + ) -> collections.defaultdict[U, V]: + ... - def dict(self, default=None): + def dict( + self: Sequence[tuple[U, V]], default: Optional[Callable[[], V]] = None + ) -> dict[U, V] | collections.defaultdict[U, V]: """ Converts sequence of (Key, Value) pairs to a dictionary. >>> type(seq([('a', 1)]).dict()) - dict + >>> seq([('a', 1), ('b', 2)]).dict() {'a': 1, 'b': 2} @@ -1497,19 +1546,19 @@ def dict(self, default=None): # pylint: disable=too-many-locals def to_file( self, - path, - delimiter=None, - mode="wt", - buffering=-1, - encoding=None, - errors=None, - newline=None, - compresslevel=9, - format=None, - check=-1, - preset=None, - filters=None, - compression=None, + path: StrOrBytesPath, + delimiter: Optional[str] = None, + mode: str = "wt", + buffering: int = -1, + encoding: Optional[str] = None, + errors: Optional[str] = None, + newline: Optional[str] = None, + compresslevel: int = 9, + format: Optional[int] = None, + check: int = -1, + preset: Optional[int] = None, + filters: Optional[Iterable[builtins.dict]] = None, + compression: Optional[str] = None, ): """ Saves the sequence to a file by executing str(self) which becomes str(self.to_list()). If @@ -1548,7 +1597,12 @@ def to_file( else: output.write(str(self)) - def to_jsonl(self, path, mode="wb", compression=None): + def to_jsonl( + self, + path: StrOrBytesPath, + mode: str = "wb", + compression: Optional[str] = None, + ): """ Saves the sequence to a jsonl file. Each element is mapped using json.dumps then written with a newline separating each element. @@ -1645,21 +1699,30 @@ def _insert_item(item): conn.execute(sql, item) else: raise TypeError( - f"item must be one of dict, namedtuple, tuple or list got {type(item)}" + "item must be one of dict, namedtuple, tuple or list, got:", + type(item), + item, ) self.for_each(_insert_item) - def to_sqlite3(self, conn, target, *args, **kwargs): + def to_sqlite3( + self, + conn: StrOrBytesPath | sqlite3.Connection | sqlite3.Cursor, + target: str, + *args, + **kwargs, + ): """ Saves the sequence to sqlite3 database. Target table must be created in advance. The table schema is inferred from the elements in the sequence if only target table name is supplied. + >>> conn = sqlite3.connect(':memory:') + >>> _ = conn.execute('CREATE TABLE user (id int, name text)') >>> seq([(1, 'Tom'), (2, 'Jack')])\ - .to_sqlite3('users.db', 'INSERT INTO user (id, name) VALUES (?, ?)') - + .to_sqlite3(conn, 'INSERT INTO user (id, name) VALUES (?, ?)') >>> seq([{'id': 1, 'name': 'Tom'}, {'id': 2, 'name': 'Jack'}]).to_sqlite3(conn, 'user') :param conn: path or sqlite connection, cursor @@ -1682,8 +1745,10 @@ def to_sqlite3(self, conn, target, *args, **kwargs): insert_f(input_conn, target) input_conn.commit() else: - raise ValueError( - "conn must be a must be a file path or sqlite3 Connection/Cursor" + raise TypeError( + "conn must be a file path or sqlite3 Connection/Cursor, got:", + type(conn), + conn, ) def to_pandas(self, columns=None): @@ -1794,17 +1859,15 @@ def tabulate( def _wrap(value): """ - Wraps the passed value in a Sequence if it is not a primitive. If it is a string - argument it is expanded to a list of characters. + Wraps the passed value in a Sequence if it is not a primitive. >>> _wrap(1) 1 - >>> _wrap("abc") - ['a', 'b', 'c'] + 'abc' >>> type(_wrap([1, 2])) - functional.pipeline.Sequence + :param value: value to wrap :return: wrapped or not wrapped value @@ -1828,37 +1891,39 @@ def _wrap(value): return value -def extend(func=None, aslist=False, final=False, name=None, parallel=False): +def extend( + func: Optional[Callable[[Any], Any]] = None, + aslist: bool = False, + final: bool = False, + name: str = "", + parallel: bool = False, +): """ Function decorator for adding new methods to the Sequence class. >>> @extend() - def zip2(it): - return [(i,i) for i in it] + ... def zip2(it): + ... return [(i,i) for i in it] >>> seq.range(3).zip2() [(0, 0), (1, 1), (2, 2)] >>> @extend(aslist=True) - def zip2(it): - return zip(it,it) + ... def zip2(it): + ... return zip(it,it) >>> seq.range(3).zip2() [(0, 0), (1, 1), (2, 2)] >>> @extend(final=True) - def make_set(it): - return set(it) - + ... def make_set(it): + ... return set(it) >>> r = seq([0,1,1]).make_set() >>> r {0, 1} - >>> type(r) - - :param func: function to decorate :param aslist: if True convert input sequence to list (default False) :param final: If True decorated function does not return a sequence. Useful diff --git a/functional/streams.py b/functional/streams.py index e1340a9..0fbfeb0 100644 --- a/functional/streams.py +++ b/functional/streams.py @@ -1,19 +1,36 @@ -import re +from __future__ import annotations + +import builtins import csv as csvapi import json as jsonapi +from pathlib import Path +import re import sqlite3 as sqlite3api -import builtins - from itertools import chain -from typing import Iterable +from typing import ( + TYPE_CHECKING, + Any, + Iterable, + Mapping, + Optional, + TypeVar, + overload, + IO, +) +import typing from functional.execution import ExecutionEngine, ParallelExecutionEngine +from functional.io import FileDescriptorOrPath, get_read_function from functional.pipeline import Sequence -from functional.util import is_primitive, default_value -from functional.io import get_read_function +from functional.util import coalesce, is_primitive +T = TypeVar("T") -class Stream(object): +if TYPE_CHECKING: + import pandas as pd + + +class Stream: """ Represents and implements a stream which separates the responsibilities of Sequence and ExecutionEngine. @@ -21,7 +38,12 @@ class Stream(object): An instance of Stream is normally accessed as `seq` """ - def __init__(self, disable_compression=False, max_repr_items=100, no_wrap=None): + def __init__( + self, + disable_compression: bool = False, + max_repr_items: int = 100, + no_wrap: Optional[bool] = None, + ): """ Default stream constructor. :param disable_compression: Disable file compression detection @@ -31,7 +53,21 @@ def __init__(self, disable_compression=False, max_repr_items=100, no_wrap=None): self.max_repr_items = max_repr_items self.no_wrap = no_wrap - def __call__(self, *args, no_wrap=None, **kwargs): + @overload + def __call__( + self, arg: pd.DataFrame, no_wrap: Optional[bool] = None + ) -> Sequence[list[Any]]: + ... + + @overload + def __call__(self, arg: Iterable[T], no_wrap: Optional[bool] = None) -> Sequence[T]: + ... + + @overload + def __call__(self, *args: T, no_wrap: Optional[bool] = None) -> Sequence[T]: + ... + + def __call__(self, *args, no_wrap=None): """ Create a Sequence using a sequential ExecutionEngine. @@ -46,10 +82,10 @@ def __call__(self, *args, no_wrap=None, **kwargs): """ engine = ExecutionEngine() return self._parse_args( - args, engine, no_wrap=default_value(no_wrap, self.no_wrap, False) + args, engine, no_wrap=coalesce(no_wrap, self.no_wrap, False) ) - def chain(self, *args, no_wrap=None, **kwargs): + def chain(self, *args: Iterable[T], no_wrap: Optional[bool] = None) -> Sequence[T]: """ Create a Sequence chaining multiple iterators. """ @@ -57,10 +93,12 @@ def chain(self, *args, no_wrap=None, **kwargs): if not isinstance(arg, Iterable): raise TypeError("The type of arg should be iterator.") - return self(chain(*args), no_wrap=no_wrap, **kwargs) + return self(chain(*args), no_wrap=no_wrap) - def _parse_args(self, args, engine, no_wrap=None): - _no_wrap = default_value(no_wrap, self.no_wrap, False) + def _parse_args( + self, args, engine: Optional[ExecutionEngine], no_wrap: Optional[bool] = None + ) -> Sequence[T]: + _no_wrap = coalesce(no_wrap, self.no_wrap, False) if len(args) == 0: return Sequence( [], engine=engine, max_repr_items=self.max_repr_items, no_wrap=_no_wrap @@ -104,13 +142,13 @@ def _parse_args(self, args, engine, no_wrap=None): def open( self, - path, - delimiter=None, - mode="r", - buffering=-1, - encoding=None, - errors=None, - newline=None, + path: FileDescriptorOrPath, + delimiter: Optional[str] = None, + mode: str = "r", + buffering: int = -1, + encoding: Optional[str] = None, + errors: Optional[str] = None, + newline: Optional[str] = None, ): """ Reads and parses input files as defined. @@ -121,7 +159,7 @@ def open( allowed. >>> seq.open('examples/gear_list.txt').take(1) - [u'tent\\n'] + ['tent\\n'] :param path: path to file :param delimiter: delimiter to split joined text on. if None, defaults to per line split @@ -149,7 +187,7 @@ def open( else: return self("".join(list(file)).split(delimiter)) - def range(self, *args): + def range(self, *args: int) -> Sequence[int]: """ Alias to range function where seq.range(args) is equivalent to seq(range(args)). @@ -161,7 +199,12 @@ def range(self, *args): """ return self(builtins.range(*args)) # pylint: disable=no-member - def csv(self, csv_file, dialect="excel", **fmt_params): + def csv( + self, + csv_file: FileDescriptorOrPath | Iterable[str], + dialect: str = "excel", + **fmt_params, + ): """ Reads and parses the input of a csv stream or file. @@ -176,14 +219,14 @@ def csv(self, csv_file, dialect="excel", **fmt_params): :param fmt_params: options passed to csv.reader :return: Sequence wrapping csv file """ - if isinstance(csv_file, str): + if isinstance(csv_file, (int, str, bytes, Path)): file_open = get_read_function(csv_file, self.disable_compression) input_file = file_open(csv_file) elif hasattr(csv_file, "next") or hasattr(csv_file, "__next__"): input_file = csv_file else: - raise ValueError( - "csv_file must be a file path or implement the iterator interface" + raise TypeError( + "csv_file must be a file descriptor or implement the iterator interface" ) csv_input = csvapi.reader(input_file, dialect=dialect, **fmt_params) @@ -191,20 +234,20 @@ def csv(self, csv_file, dialect="excel", **fmt_params): def csv_dict_reader( self, - csv_file, + csv_file: FileDescriptorOrPath | Iterable[str], fieldnames=None, restkey=None, restval=None, dialect="excel", - **kwds + **kwds, ): - if isinstance(csv_file, str): + if isinstance(csv_file, (int, str, bytes, Path)): file_open = get_read_function(csv_file, self.disable_compression) input_file = file_open(csv_file) elif hasattr(csv_file, "next") or hasattr(csv_file, "__next__"): input_file = csv_file else: - raise ValueError( + raise TypeError( "csv_file must be a file path or implement the iterator interface" ) @@ -214,11 +257,11 @@ def csv_dict_reader( restkey=restkey, restval=restval, dialect=dialect, - **kwds + **kwds, ) return self(csv_input).cache(delete_lineage=True) - def jsonl(self, jsonl_file): + def jsonl(self, jsonl_file: FileDescriptorOrPath | Iterable[str]): """ Reads and parses the input of a jsonl file stream or file. @@ -226,19 +269,19 @@ def jsonl(self, jsonl_file): the python json module. >>> seq.jsonl('examples/chat_logs.jsonl').first() - {u'date': u'10/09', u'message': u'hello anyone there?', u'user': u'bob'} + {'message': 'hello anyone there?', 'date': '10/09', 'user': 'bob'} :param jsonl_file: path or file containing jsonl content :return: Sequence wrapping jsonl file """ - if isinstance(jsonl_file, str): + if isinstance(jsonl_file, (int, str, bytes, Path)): file_open = get_read_function(jsonl_file, self.disable_compression) input_file = file_open(jsonl_file) else: input_file = jsonl_file return self(input_file).map(jsonapi.loads).cache(delete_lineage=True) - def json(self, json_file): + def json(self, json_file: FileDescriptorOrPath | IO) -> Sequence[Any]: """ Reads and parses the input of a json file handler or file. @@ -251,19 +294,19 @@ def json(self, json_file): of entries >>> seq.json('examples/users.json').first() - [u'sarah', {u'date_created': u'08/08', u'news_email': True, u'email': u'sarah@gmail.com'}] + ['bob', {'date_created': '09/01', 'email': 'bob@gmail.com', 'news_mail': True}] :param json_file: path or file containing json content :return: Sequence wrapping jsonl file """ - if isinstance(json_file, str): + if isinstance(json_file, (str, bytes, Path)): file_open = get_read_function(json_file, self.disable_compression) input_file = file_open(json_file) json_input = jsonapi.load(input_file) elif hasattr(json_file, "read"): json_input = jsonapi.load(json_file) else: - raise ValueError( + raise TypeError( "json_file must be a file path or implement the iterator interface" ) @@ -273,12 +316,19 @@ def json(self, json_file): return self(json_input.items()) # pylint: disable=keyword-arg-before-vararg - def sqlite3(self, conn, sql, parameters=None, *args, **kwargs): + def sqlite3( + self, + conn: sqlite3api.Connection | sqlite3api.Cursor | str, + sql: str, + parameters: typing.Sequence[Any] | Mapping[str, Any] | None = None, + *args, + **kwargs, + ) -> Sequence[Any]: """ Reads input by querying from a sqlite database. - >>> seq.sqlite3('examples/users.db', 'select id, name from users where id = 1;').first() - [(1, 'Tom')] + >>> seq.sqlite3('examples/users.db', 'select id, name from user where id = 1;').first() + [1, 'Tom'] :param conn: path or sqlite connection, cursor :param sql: SQL query string @@ -295,8 +345,10 @@ def sqlite3(self, conn, sql, parameters=None, *args, **kwargs): with sqlite3api.connect(conn, *args, **kwargs) as input_conn: return self(input_conn.execute(sql, parameters)) else: - raise ValueError( - "conn must be a must be a file path or sqlite3 Connection/Cursor" + raise TypeError( + "conn must be a file path or sqlite3 Connection/Cursor, got:", + type(conn), + conn, ) @@ -307,10 +359,10 @@ class ParallelStream(Stream): def __init__( self, - processes=None, - partition_size=None, - disable_compression=False, - no_wrap=None, + processes: Optional[int] = None, + partition_size: Optional[int] = None, + disable_compression: bool = False, + no_wrap: bool | None = None, ): """ Configure Stream for parallel processing and file compression detection @@ -318,13 +370,17 @@ def __init__( :param disable_compression: Disable file compression detection :param no_wrap: default value of no_wrap for functions like first() or last() """ - super(ParallelStream, self).__init__( - disable_compression=disable_compression, no_wrap=no_wrap - ) + super().__init__(disable_compression=disable_compression, no_wrap=no_wrap) self.processes = processes self.partition_size = partition_size - def __call__(self, *args, no_wrap=None, **kwargs): + def __call__( + self, + *args, + no_wrap: Optional[bool] = None, + processes: Optional[int] = None, + partition_size: Optional[int] = None, + ): """ Create a Sequence using a parallel ExecutionEngine. @@ -337,13 +393,14 @@ def __call__(self, *args, no_wrap=None, **kwargs): :param args: Sequence to wrap :return: Wrapped sequence """ - processes = kwargs.get("processes") or self.processes - partition_size = kwargs.get("partition_size") or self.partition_size engine = ParallelExecutionEngine( - processes=processes, partition_size=partition_size + processes=processes if processes is not None else self.processes, + partition_size=( + partition_size if partition_size is not None else self.partition_size + ), ) return self._parse_args( - args, engine, no_wrap=default_value(no_wrap, self.no_wrap, False) + args, engine, no_wrap=coalesce(no_wrap, self.no_wrap, False) ) diff --git a/functional/test/test_functional.py b/functional/test/test_functional.py index 732f6e6..3a70154 100644 --- a/functional/test/test_functional.py +++ b/functional/test/test_functional.py @@ -1,24 +1,23 @@ # pylint: skip-file -import unittest import array -from collections import namedtuple, deque +import importlib.util +import unittest +from collections import deque, namedtuple from itertools import product +from typing import no_type_check + +from typing_extensions import assert_type -from functional.pipeline import Sequence, is_iterable, _wrap, extend +from functional import pseq, seq +from functional.pipeline import Sequence, _wrap, extend, is_iterable_not_list from functional.transformations import name -from functional import seq, pseq +from functional.util import identity Data = namedtuple("Data", "x y") -def pandas_is_installed(): - try: - global pandas - import pandas - - return True - except ImportError: - return False +def pandas_is_installed() -> bool: + return importlib.util.find_spec("pandas") is not None class TestPipeline(unittest.TestCase): @@ -26,10 +25,10 @@ def setUp(self): self.seq = seq def assert_type(self, s): - self.assertTrue(isinstance(s, Sequence)) + assert isinstance(s, Sequence) def assert_not_type(self, s): - self.assertFalse(isinstance(s, Sequence)) + assert not isinstance(s, Sequence) def assertIteratorEqual(self, iter_0, iter_1): seq_0 = list(iter_0) @@ -37,8 +36,8 @@ def assertIteratorEqual(self, iter_0, iter_1): self.assertListEqual(seq_0, seq_1) def test_is_iterable(self): - self.assertFalse(is_iterable([])) - self.assertTrue(is_iterable(iter([1, 2]))) + assert not is_iterable_not_list([]) + assert is_iterable_not_list(iter([1, 2])) def test_constructor(self): self.assertRaises(TypeError, lambda: Sequence(1)) @@ -116,7 +115,7 @@ def test_iter(self): def test_contains(self): string = "abcdef" s = self.seq(iter(string)).map(lambda x: x) - self.assertTrue("c" in s) + assert "c" in s def test_add(self): l0 = self.seq([1, 2, 3]).map(lambda x: x) @@ -410,14 +409,13 @@ def test_reduce(self): with self.assertRaises(TypeError): seq([]).reduce(f) - with self.assertRaises(ValueError): + with self.assertRaises(TypeError): seq([]).reduce(f, 0, 0) self.assertEqual(seq([]).reduce(f, 1), 1) self.assertEqual(seq([0, 2]).reduce(f, 1), 3) def test_accumulate(self): - f = lambda x, y: x + y l_char = ["a", "b", "c"] expect_char = ["a", "ab", "abc"] l_num = [1, 2, 3] @@ -438,9 +436,9 @@ def test_aggregate(self): self.assertEqual(l.aggregate(f), "abc") self.assertEqual(l.aggregate("z", f), "zabc") self.assertEqual(l.aggregate("z", f, lambda x: x.upper()), "ZABC") - with self.assertRaises(ValueError): + with self.assertRaises(TypeError): l.aggregate() - with self.assertRaises(ValueError): + with self.assertRaises(TypeError): l.aggregate(None, None, None, None) def test_fold_left(self): @@ -480,15 +478,11 @@ def test_order_by(self): self.assert_type(r) def test_reverse(self): - l = [1, 2, 3] - expect = [4, 3, 2] - s = self.seq(l).map(lambda x: x + 1) - result = s.reverse() - self.assertIteratorEqual(expect, result) - self.assert_type(result) - result = s.__reversed__() - self.assertIteratorEqual(expect, result) - self.assert_type(result) + s = self.seq([1, 2, 3]).map(lambda x: x + 1) + assert s.reverse() == [4, 3, 2] + self.assert_type(s.reverse()) + assert reversed(s) == [4, 3, 2] + self.assert_type(reversed(s)) def test_distinct(self): l = [1, 3, 1, 2, 2, 3] @@ -497,11 +491,9 @@ def test_distinct(self): result = s.distinct() self.assertEqual(result.size(), len(expect)) for er in zip(expect, result): - self.assertEqual( - er[0], er[1], "Order was not preserved after running distinct!" - ) + assert er[0] == er[1], "Order was not preserved after running distinct!" for e in result: - self.assertTrue(e in expect) + assert e in expect self.assert_type(result) def test_distinct_by(self): @@ -521,20 +513,15 @@ def test_slice(self): self.assert_type(result) def test_any(self): - l = [True, False] - self.assertTrue(self.seq(l).any()) + assert self.seq([True, False]).any() def test_all(self): - l = [True, False] - self.assertFalse(self.seq(l).all()) - l = [True, True] - self.assertTrue(self.seq(l).all()) + assert not self.seq([True, False]).all() + assert self.seq([True, True]).all() def test_enumerate(self): - l = [2, 3, 4] - e = [(0, 2), (1, 3), (2, 4)] - result = self.seq(l).enumerate() - self.assertIteratorEqual(result, e) + result = self.seq([2, 3, 4]).enumerate() + assert result == [(0, 2), (1, 3), (2, 4)] self.assert_type(result) def test_inner_join(self): @@ -612,7 +599,7 @@ def test_outer_join(self): self.assertDictEqual(dict(result1), dict(expect)) def test_join(self): - with self.assertRaises(TypeError): + with self.assertRaises(ValueError): self.seq([(1, 2)]).join([(2, 3)], "").to_list() def test_max(self): @@ -647,17 +634,15 @@ def test_flatten(self): def test_flat_map(self): l = [[1, 1, 1], [2, 2, 2], [3, 3, 3]] - f = lambda x: x expect = [1, 1, 1, 2, 2, 2, 3, 3, 3] - result = self.seq(l).flat_map(f) + result = self.seq(l).flat_map(identity) self.assertIteratorEqual(expect, result) self.assert_type(result) def test_group_by(self): l = [(1, 1), (1, 2), (1, 3), (2, 2)] - f = lambda x: x[0] expect = {1: [(1, 1), (1, 2), (1, 3)], 2: [(2, 2)]} - result = self.seq(l).group_by(f) + result = self.seq(l).group_by(lambda x: x[0]) result_comparison = {} for kv in result: result_comparison[kv[0]] = kv[1] @@ -682,8 +667,8 @@ def test_grouped(self): def test_grouped_returns_list(self): l = self.seq([1, 2, 3, 4, 5, 6, 7, 8]) - self.assertTrue(is_iterable(l.grouped(2))) - self.assertTrue(is_iterable(l.grouped(3))) + assert is_iterable_not_list(l.grouped(2)) + assert is_iterable_not_list(l.grouped(3)) def test_grouped_returns_list_of_lists(self): test_inputs = [ @@ -715,7 +700,7 @@ def gen_test(collection, group_size): all_sub_collections_are_lists, err_msg = gen_test( test_input, group_size ) - self.assertTrue(all_sub_collections_are_lists, msg=err_msg) + assert all_sub_collections_are_lists, err_msg def test_sliding(self): l = self.seq([1, 2, 3, 4, 5, 6, 7]) @@ -728,15 +713,15 @@ def test_sliding(self): self.assertIteratorEqual(l.sliding(2, 3), expect) def test_empty(self): - self.assertTrue(self.seq([]).empty()) + assert self.seq([]).empty() self.assertEqual(self.seq(), self.seq([])) def test_non_empty(self): - self.assertTrue(self.seq([1]).non_empty()) + assert self.seq([1]).non_empty() def test_non_zero_bool(self): - self.assertTrue(bool(self.seq([1]))) - self.assertFalse(bool(self.seq([]))) + assert bool(self.seq([1])) + assert not bool(self.seq([])) def test_make_string(self): l = [1, 2, 3] @@ -753,9 +738,8 @@ def test_partition(self): l = [-1, -2, -3, 1, 2, 3] e2 = [-1, -2, -3] e1 = [1, 2, 3] - f = lambda x: x > 0 s = self.seq(l) - p1, p2 = s.partition(f) + p1, p2 = s.partition(lambda x: x > 0) self.assertIteratorEqual(e1, list(p1)) self.assertIteratorEqual(e2, list(p2)) self.assert_type(p1) @@ -827,7 +811,7 @@ def test_to_list(self): l = [1, 2, 3, "abc", {1: 2}, {1, 2, 3}] result = self.seq(l).to_list() self.assertIteratorEqual(result, l) - self.assertTrue(isinstance(result, list)) + assert isinstance(result, list) result = self.seq(iter([0, 1, 2])).to_list() self.assertIsInstance(result, list) result = self.seq(l).list(n=2) @@ -837,7 +821,7 @@ def test_list(self): l = [1, 2, 3, "abc", {1: 2}, {1, 2, 3}] result = self.seq(l).list() self.assertEqual(result, l) - self.assertTrue(isinstance(result, list)) + assert isinstance(result, list) result = self.seq(iter([0, 1, 2])).to_list() self.assertIsInstance(result, list) result = self.seq(l).list(n=2) @@ -866,28 +850,28 @@ def f(e): def test_exists(self): l = ["aaa", "BBB", "ccc"] - self.assertTrue(self.seq(l).exists(str.islower)) - self.assertTrue(self.seq(l).exists(str.isupper)) - self.assertFalse(self.seq(l).exists(lambda s: "d" in s)) + assert self.seq(l).exists(str.islower) + assert self.seq(l).exists(str.isupper) + assert not self.seq(l).exists(lambda s: "d" in s) def test_for_all(self): l = ["aaa", "bbb", "ccc"] - self.assertTrue(self.seq(l).for_all(str.islower)) - self.assertFalse(self.seq(l).for_all(str.isupper)) + assert self.seq(l).for_all(str.islower) + assert not self.seq(l).for_all(str.isupper) def test_to_dict(self): l = [(1, 2), (2, 10), (7, 2)] d = {1: 2, 2: 10, 7: 2} result = self.seq(l).to_dict() self.assertDictEqual(result, d) - self.assertTrue(isinstance(result, dict)) + assert isinstance(result, dict) result = self.seq(l).to_dict(default=lambda: 100) - self.assertTrue(1 in result) - self.assertFalse(3 in result) + assert 1 in result + assert not 3 in result self.assertEqual(result[4], 100) result = self.seq(l).dict(default=100) - self.assertTrue(1 in result) - self.assertFalse(3 in result) + assert 1 in result + assert not 3 in result self.assertEqual(result[4], 100) def test_dict(self): @@ -895,14 +879,14 @@ def test_dict(self): d = {1: 2, 2: 10, 7: 2} result = self.seq(l).dict() self.assertDictEqual(result, d) - self.assertTrue(isinstance(result, dict)) + assert isinstance(result, dict) result = self.seq(l).dict(default=lambda: 100) - self.assertTrue(1 in result) - self.assertFalse(3 in result) + assert 1 in result + assert not 3 in result self.assertEqual(result[4], 100) result = self.seq(l).dict(default=100) - self.assertTrue(1 in result) - self.assertFalse(3 in result) + assert 1 in result + assert not 3 in result self.assertEqual(result[4], 100) def test_reduce_by_key(self): @@ -951,7 +935,7 @@ def test_wrap(self): self.assert_not_type(_wrap(Data(1, 2))) def test_wrap_objects(self): - class A(object): + class A: a = 1 l = [A(), A(), A()] @@ -962,9 +946,11 @@ class A(object): pandas_is_installed(), "Skip pandas tests if pandas is not installed" ) def test_wrap_pandas(self): + import pandas + df1 = pandas.DataFrame({"name": ["name1", "name2"], "value": [1, 2]}) df2 = pandas.DataFrame({"name": ["name1", "name2"], "value": [3, 4]}) - result = seq([df1, df2]).reduce(lambda x, y: x.append(y)) + result = seq([df1, df2]).reduce(lambda x, y: pandas.concat([x, y])) self.assertEqual(result.len(), 4) self.assertEqual(result[0].to_list(), ["name1", 1]) self.assertEqual(result[1].to_list(), ["name2", 2]) @@ -1011,15 +997,16 @@ def test_cache(self): raise self.skipTest("pseq doesn't support functions with side-effects") calls = [] func = calls.append - result = self.seq(1, 2, 3).map(func).cache().map(lambda x: x).to_list() + r1 = self.seq(1, 2, 3).map(func).cache().map(lambda x: x).to_list() self.assertEqual(len(calls), 3) - self.assertEqual(result, [None, None, None]) - result = self.seq(1, 2, 3).map(lambda x: x).cache() + self.assertEqual(r1, [None, None, None]) + self.assertEqual( - repr(result._lineage), "Lineage: sequence -> map() -> cache" + repr(self.seq(1, 2, 3).map(lambda x: x).cache()._lineage), + "Lineage: sequence -> map() -> cache", ) - result = self.seq(1, 2, 3).map(lambda x: x).cache(delete_lineage=True) - self.assertEqual(repr(result._lineage), "Lineage: sequence") + r2 = self.seq(1, 2, 3).map(lambda x: x).cache(delete_lineage=True) + self.assertEqual(repr(r2._lineage), "Lineage: sequence") def test_tabulate(self): sequence = seq([[1, 2, 3], [4, 5, 6]]) @@ -1028,21 +1015,18 @@ def test_tabulate(self): result = sequence.tabulate() self.assertEqual(result, "- - -\n1 2 3\n4 5 6\n- - -") - sequence = seq(1, 2, 3) - self.assertEqual(sequence.tabulate(), None) + self.assertEqual(seq(1, 2, 3).tabulate(), None) - class NotTabulatable(object): + class NotTabulatable: pass - sequence = seq(NotTabulatable(), NotTabulatable(), NotTabulatable()) - self.assertEqual(sequence.tabulate(), None) + s2 = seq(NotTabulatable(), NotTabulatable(), NotTabulatable()) + self.assertEqual(s2.tabulate(), None) long_data = seq([(i, i + 1) for i in range(30)]) - self.assertTrue("Showing 10 of 30 rows" in long_data.tabulate(n=10)) - self.assertTrue("Showing 10 of 30 rows" in long_data._repr_html_()) - self.assertTrue( - "Showing 10 of 30 rows" not in long_data.tabulate(n=10, tablefmt="plain") - ) + assert "Showing 10 of 30 rows" in long_data.tabulate(n=10) + assert "Showing 10 of 30 rows" in long_data._repr_html_() + assert "Showing 10 of 30 rows" not in long_data.tabulate(n=10, tablefmt="plain") def test_tabulate_namedtuple(self): sequence_tabulated = seq([Data(1, 2), Data(6, 7)]).tabulate() @@ -1056,26 +1040,24 @@ def test_repr_max_lines(self): class TestExtend(unittest.TestCase): + @no_type_check def test_custom_functions(self): @extend(aslist=True) def my_zip(it): return zip(it, it) - result = seq.range(3).my_zip().list() - expected = list(zip(range(3), range(3))) - self.assertEqual(result, expected) - - result = seq.range(3).my_zip().my_zip().list() - expected = list(zip(expected, expected)) - self.assertEqual(result, expected) + assert seq.range(3).my_zip().list() == [(0, 0), (1, 1), (2, 2)] + assert seq.range(3).my_zip().my_zip().list() == [ + ((0, 0), (0, 0)), + ((1, 1), (1, 1)), + ((2, 2), (2, 2)), + ] @extend def square(it): return [i**2 for i in it] - result = seq.range(100).square().list() - expected = [i**2 for i in range(100)] - self.assertEqual(result, expected) + assert seq.range(100).square().list() == [i**2 for i in range(100)] name = "PARALLEL_SQUARE" @@ -1084,36 +1066,29 @@ def square_parallel(it): return [i**2 for i in it] result = seq.range(100).square_parallel() - self.assertEqual(result.sum(), sum(expected)) - self.assertEqual( - repr(result._lineage), f"Lineage: sequence -> extended[{name}]" - ) + assert result.sum() == sum(i**2 for i in range(100)) + assert repr(result._lineage) == f"Lineage: sequence -> extended[{name}]" @extend def my_filter(it, n=10): return (i for i in it if i > n) # test keyword args - result = seq.range(20).my_filter(n=10).list() - expected = list(filter(lambda x: x > 10, range(20))) - self.assertEqual(result, expected) + assert seq.range(20).my_filter(n=10).list() == list(range(11, 20)) # test args - result = seq.range(20).my_filter(10).list() - self.assertEqual(result, expected) + assert seq.range(20).my_filter(10).list() == list(range(11, 20)) # test final @extend(final=True) def toarray(it): return array.array("f", it) - result = seq.range(10).toarray() - expected = array.array("f", range(10)) - self.assertEqual(result, expected) + assert seq.range(10).toarray() == array.array("f", range(10)) - result = seq.range(10).map(lambda x: x**2).toarray() - expected = array.array("f", [i**2 for i in range(10)]) - self.assertEqual(result, expected) + assert seq.range(10).map(lambda x: x**2).toarray() == array.array( + "f", [i**2 for i in range(10)] + ) # a more complex example combining all above @extend() @@ -1139,3 +1114,11 @@ def sum_pair(it): class TestParallelPipeline(TestPipeline): def setUp(self): self.seq = pseq + + +def test_typing() -> None: + assert_type(seq([1, 2, 3]), Sequence[int]) + assert_type(seq([1, 2, 3]).map(identity), Sequence[int]) + assert_type(seq([1]).map(str), Sequence[str]) + assert_type(seq("ab").to_list(), list[str]) + assert_type(seq(["ab"]).to_list(), list[str]) diff --git a/functional/test/test_io.py b/functional/test/test_io.py index a513860..4ef3045 100644 --- a/functional/test/test_io.py +++ b/functional/test/test_io.py @@ -7,64 +7,47 @@ class TestUtil(unittest.TestCase): def test_reusable_file(self): license_file_lf = ReusableFile("LICENSE.txt") with open("LICENSE.txt", encoding="utf8") as license_file: - self.assertEqual(list(license_file), list(license_file_lf)) - iter_1 = iter(license_file_lf) - iter_2 = iter(license_file_lf) - self.assertEqual(list(iter_1), list(iter_2)) + assert list(license_file) == list(license_file_lf) + assert list(iter(license_file_lf)) == list(iter(license_file_lf)) def test_gzip_file(self): file_name = "functional/test/data/test.txt.gz" - expect = [ + assert list(GZFile(file_name, mode="rt", encoding="utf-8")) == [ "line0\n", "line1\n", "line2", ] - self.assertListEqual( - expect, list(GZFile(file_name, mode="rt", encoding="utf-8")) - ) - - expect = [ + assert list(GZFile(file_name, mode="rb")) == [ b"line0\n", b"line1\n", b"line2", ] - self.assertListEqual(expect, list(GZFile(file_name, mode="rb"))) def test_bz2_file(self): file_name = "functional/test/data/test.txt.bz2" - expect = [ + assert list(BZ2File(file_name, mode="rt", encoding="utf-8")) == [ "line0\n", "line1\n", "line2", ] - self.assertListEqual( - expect, list(BZ2File(file_name, mode="rt", encoding="utf-8")) - ) - - expect = [ + assert list(BZ2File(file_name, mode="rb")) == [ b"line0\n", b"line1\n", b"line2", ] - self.assertListEqual(expect, list(BZ2File(file_name, mode="rb"))) def test_xz_file(self): file_name = "functional/test/data/test.txt.xz" - expect = [ + assert list(XZFile(file_name, mode="rt", encoding="utf-8")) == [ "line0\n", "line1\n", "line2", ] - self.assertListEqual( - expect, list(XZFile(file_name, mode="rt", encoding="utf-8")) - ) - - expect = [ + assert list(XZFile(file_name, mode="rb")) == [ b"line0\n", b"line1\n", b"line2", ] - self.assertListEqual(expect, list(XZFile(file_name, mode="rb"))) def test_universal_write_open(self): with self.assertRaises(ValueError): diff --git a/functional/test/test_streams.py b/functional/test/test_streams.py index 6ec5e6d..13849d7 100644 --- a/functional/test/test_streams.py +++ b/functional/test/test_streams.py @@ -112,7 +112,7 @@ def test_chain(self): self.assertEqual([], self.seq.chain().to_list()) with self.assertRaises(TypeError): - self.seq.chain(1, 2).to_list() + self.seq.chain(1, 2).to_list() # type: ignore self.assertEqual(list(data_a), self.seq.chain(data_a).to_list()) @@ -122,10 +122,10 @@ def test_csv(self): result = self.seq.csv("functional/test/data/test.csv").to_list() expect = [["1", "2", "3", "4"], ["a", "b", "c", "d"]] self.assertEqual(expect, result) - with open("functional/test/data/test.csv", "r", encoding="utf8") as csv_file: + with open("functional/test/data/test.csv", encoding="utf8") as csv_file: self.assertEqual(expect, self.seq.csv(csv_file).to_list()) - with self.assertRaises(ValueError): - self.seq.csv(1) + with self.assertRaises(TypeError): + self.seq.csv([]) # type: ignore def test_csv_dict_reader(self): result = self.seq.csv_dict_reader( @@ -138,7 +138,7 @@ def test_csv_dict_reader(self): self.assertEqual(result[1]["b"], "5") self.assertEqual(result[1]["c"], "6") - with open("functional/test/data/test_header.csv", "r", encoding="utf8") as f: + with open("functional/test/data/test_header.csv", encoding="utf8") as f: result = self.seq.csv_dict_reader(f).to_list() self.assertEqual(result[0]["a"], "1") self.assertEqual(result[0]["b"], "2") @@ -147,28 +147,28 @@ def test_csv_dict_reader(self): self.assertEqual(result[1]["b"], "5") self.assertEqual(result[1]["c"], "6") - with self.assertRaises(ValueError): - self.seq.csv_dict_reader(1) + with self.assertRaises(TypeError): + self.seq.csv_dict_reader([]) # type: ignore def test_gzip_csv(self): result = self.seq.csv("functional/test/data/test.csv.gz").to_list() expect = [["1", "2", "3", "4"], ["a", "b", "c", "d"]] self.assertEqual(expect, result) - with self.assertRaises(ValueError): - self.seq.csv(1) + with self.assertRaises(TypeError): + self.seq.csv([]) # type: ignore def test_bz2_csv(self): result = self.seq.csv("functional/test/data/test.csv.bz2").to_list() expect = [["1", "2", "3", "4"], ["a", "b", "c", "d"]] self.assertEqual(expect, result) - with self.assertRaises(ValueError): - self.seq.csv(1) + with self.assertRaises(TypeError): + self.seq.csv([]) # type: ignore def test_xz_csv(self): result = self.seq.csv("functional/test/data/test.csv.xz").to_list() expect = [["1", "2", "3", "4"], ["a", "b", "c", "d"]] self.assertEqual(expect, result) - with self.assertRaises(ValueError): + with self.assertRaises(OSError): self.seq.csv(1) def test_jsonl(self): @@ -212,8 +212,8 @@ def test_json(self): result = self.seq.json(file_handle).to_list() self.assertEqual(dict_expect, result) - with self.assertRaises(ValueError): - self.seq.json(1) + with self.assertRaises(TypeError): + self.seq.json([]) # type: ignore def test_gzip_json(self): list_test_path = "functional/test/data/test_list.json.gz" @@ -226,8 +226,8 @@ def test_gzip_json(self): result = self.seq.json(dict_test_path).to_list() self.assertEqual(dict_expect, result) - with self.assertRaises(ValueError): - self.seq.json(1) + with self.assertRaises(TypeError): + self.seq.json([]) # type: ignore def test_bz2_json(self): list_test_path = "functional/test/data/test_list.json.bz2" @@ -240,8 +240,8 @@ def test_bz2_json(self): result = self.seq.json(dict_test_path).to_list() self.assertEqual(dict_expect, result) - with self.assertRaises(ValueError): - self.seq.json(1) + with self.assertRaises(TypeError): + self.seq.json([]) # type: ignore def test_xz_json(self): list_test_path = "functional/test/data/test_list.json.xz" @@ -254,15 +254,15 @@ def test_xz_json(self): result = self.seq.json(dict_test_path).to_list() self.assertEqual(dict_expect, result) - with self.assertRaises(ValueError): - self.seq.json(1) + with self.assertRaises(TypeError): + self.seq.json([]) # type: ignore def test_sqlite3(self): db_file = "functional/test/data/test_sqlite3.db" # test failure case - with self.assertRaises(ValueError): - self.seq.sqlite3(1, "SELECT * from user").to_list() + with self.assertRaises(TypeError): + self.seq.sqlite3(1, "SELECT * from user").to_list() # type: ignore # test select from file path query_0 = "SELECT id, name FROM user;" @@ -299,7 +299,7 @@ def test_sqlite3(self): expected_2 = [(1, "Tom")] self.assertListEqual(expected_2, result_2) - def test_pandas(self): + def test_pandas(self) -> None: try: import pandas @@ -316,11 +316,11 @@ def test_to_file(self): tmp_path = "functional/test/data/tmp/output.txt" sequence = self.seq(1, 2, 3, 4) sequence.to_file(tmp_path) - with open(tmp_path, "r", encoding="utf8") as output: + with open(tmp_path, encoding="utf8") as output: self.assertEqual("[1, 2, 3, 4]", output.readlines()[0]) sequence.to_file(tmp_path, delimiter=":") - with open(tmp_path, "r", encoding="utf8") as output: + with open(tmp_path, encoding="utf8") as output: self.assertEqual("1:2:3:4", output.readlines()[0]) def test_to_file_compressed(self): @@ -448,7 +448,7 @@ def test_to_csv_compressed(self): def test_to_sqlite3_failure(self): insert_sql = "INSERT INTO user (id, name) VALUES (?, ?)" elements = [(1, "Tom"), (2, "Jack"), (3, "Jane"), (4, "Stephan")] - with self.assertRaises(ValueError): + with self.assertRaises(TypeError): self.seq(elements).to_sqlite3(1, insert_sql) def test_to_sqlite3_file(self): diff --git a/functional/test/test_util.py b/functional/test/test_util.py index eac2dc3..b76e726 100644 --- a/functional/test/test_util.py +++ b/functional/test/test_util.py @@ -11,7 +11,7 @@ pack, unpack, compute_partition_size, - default_value, + coalesce, ) Data = namedtuple("Data", "x y") @@ -67,13 +67,13 @@ def test_compute_partition_size(self): self.assertEqual(result, 1) def test_default_value(self): - result = default_value(True) + result = coalesce(True) self.assertEqual(result, True) - result = default_value(False) + result = coalesce(False) self.assertEqual(result, False) - result = default_value(None, True) + result = coalesce(None, True) self.assertEqual(result, True) - result = default_value(None, False) + result = coalesce(None, False) self.assertEqual(result, False) with self.assertRaises(ValueError): - result = default_value(None, None) + result = coalesce(None, None) diff --git a/functional/transformations.py b/functional/transformations.py index f13481a..7bfcea5 100644 --- a/functional/transformations.py +++ b/functional/transformations.py @@ -1,31 +1,43 @@ +from __future__ import annotations + +import collections +import collections.abc +import types +from collections.abc import Callable, Iterable, Set from functools import partial from itertools import ( + accumulate, + chain, dropwhile, - takewhile, + filterfalse, islice, - count, product, - chain, starmap, - filterfalse, + takewhile, ) -import collections -import types -from collections.abc import Callable +from typing import TYPE_CHECKING, NamedTuple, Optional, TypeVar from functional.execution import ExecutionStrategies +from functional.util import identity +if TYPE_CHECKING: + from functional.pipeline import Sequence -#: Defines a Transformation from a name, function, and execution_strategies -Transformation = collections.namedtuple( - "Transformation", ["name", "function", "execution_strategies"] -) + +class Transformation(NamedTuple): + name: str + function: Callable[[Iterable], Iterable] + execution_strategies: Set[int] = frozenset() + + +T = TypeVar("T") #: Cache transformation -CACHE_T = Transformation("cache", None, None) +CACHE_T = Transformation("cache", identity) +# this identity will not be used but it's to comply with typing -def name(function: Callable): +def name(function: Callable) -> str: """ Retrieve a pretty name for the function :param function: function to get name from @@ -37,72 +49,51 @@ def name(function: Callable): return str(function) -def map_t(func: Callable): +def listify(sequence: Iterable[T]) -> collections.abc.Sequence[T]: """ - Transformation for Sequence.map - :param func: map function - :return: transformation + Convert an iterable to a list + :param sequence: sequence to convert + :return: list """ - return Transformation( - f"map({name(func)})", - partial(map, func), - {ExecutionStrategies.PARALLEL}, - ) + if isinstance(sequence, collections.abc.Sequence): + return sequence + return list(sequence) -def select_t(func: Callable): +def map_t(func: Callable) -> Transformation: """ - Transformation for Sequence.select - :param func: select function + Transformation for Sequence.map + :param func: map function :return: transformation """ return Transformation( - "select({name(func)})", - partial(map, func), - {ExecutionStrategies.PARALLEL}, + f"map({name(func)})", partial(map, func), {ExecutionStrategies.PARALLEL} ) -def starmap_t(func: Callable): +def starmap_t(func: Callable) -> Transformation: """ Transformation for Sequence.starmap and Sequence.smap :param func: starmap function :return: transformation """ return Transformation( - "starmap({name(func)})", - partial(starmap, func), - {ExecutionStrategies.PARALLEL}, + f"starmap({name(func)})", partial(starmap, func), {ExecutionStrategies.PARALLEL} ) -def filter_t(func: Callable): +def filter_t(func: Callable) -> Transformation: """ Transformation for Sequence.filter :param func: filter function :return: transformation """ return Transformation( - f"filter({name(func)})", - partial(filter, func), - {ExecutionStrategies.PARALLEL}, - ) - - -def where_t(func: Callable): - """ - Transformation for Sequence.where - :param func: where function - :return: transformation - """ - return Transformation( - f"where({name(func)})", - partial(filter, func), - {ExecutionStrategies.PARALLEL}, + f"filter({name(func)})", partial(filter, func), {ExecutionStrategies.PARALLEL} ) -def filter_not_t(func: Callable): +def filter_not_t(func: Callable) -> Transformation: """ Transformation for Sequence.filter_not :param func: filter_not function @@ -115,15 +106,27 @@ def filter_not_t(func: Callable): ) -def reversed_t(): +def _reverse_iter(iterable: Iterable[T]) -> Iterable[T]: + """ + Reverse an iterable + :param iterable: iterable to reverse + :return: reversed iterable + """ + try: # avoid a copy if we can + return reversed(iterable) # type: ignore + except TypeError: + return reversed(list(iterable)) + + +def reversed_t() -> Transformation: """ Transformation for Sequence.reverse :return: transformation """ - return Transformation("reversed", reversed, [ExecutionStrategies.PRE_COMPUTE]) + return Transformation("reversed", _reverse_iter) -def slice_t(start: int, until: int): +def slice_t(start: int, until: int) -> Transformation: """ Transformation for Sequence.slice :param start: start index @@ -133,28 +136,10 @@ def slice_t(start: int, until: int): return Transformation( f"slice({start}, {until})", lambda sequence: islice(sequence, start, until), - None, ) -def distinct_t(): - """ - Transformation for Sequence.distinct - :return: transformation - """ - - def distinct(sequence): - seen = set() - for element in sequence: - if element in seen: - continue - seen.add(element) - yield element - - return Transformation("distinct", distinct, None) - - -def distinct_by_t(func: Callable): +def distinct_by_t(func: Callable) -> Transformation: """ Transformation for Sequence.distinct_by :param func: distinct_by function @@ -162,42 +147,27 @@ def distinct_by_t(func: Callable): """ def distinct_by(sequence): - distinct_lookup = {} + seen = set() for element in sequence: key = func(element) - if key not in distinct_lookup: - distinct_lookup[key] = element - return distinct_lookup.values() + if key not in seen: + seen.add(key) + yield element - return Transformation(f"distinct_by({name(func)})", distinct_by, None) + return Transformation(f"distinct_by({name(func)})", distinct_by) -def sorted_t(key=None, reverse: bool = False): +def sorted_t(key: Optional[Callable] = None, reverse: bool = False): """ Transformation for Sequence.sorted :param key: key to sort by :param reverse: reverse or not :return: transformation """ - return Transformation( - "sorted", lambda sequence: sorted(sequence, key=key, reverse=reverse), None - ) - - -def order_by_t(func: Callable): - """ - Transformation for Sequence.order_by - :param func: order_by function - :return: transformation - """ - return Transformation( - f"order_by({name(func)})", - lambda sequence: sorted(sequence, key=func), - None, - ) + return Transformation("sorted", partial(sorted, key=key, reverse=reverse)) -def drop_right_t(n: int): +def drop_right_t(n: int) -> Transformation: """ Transformation for Sequence.drop_right :param n: number to drop from right @@ -208,48 +178,44 @@ def drop_right_t(n: int): else: end_index = -n return Transformation( - f"drop_right({n})", - lambda sequence: sequence[:end_index], - [ExecutionStrategies.PRE_COMPUTE], + f"drop_right({n})", lambda sequence: listify(sequence)[:end_index] ) -def drop_t(n: int): +def drop_t(n: int) -> Transformation: """ Transformation for Sequence.drop :param n: number to drop from left :return: transformation """ - return Transformation( - f"drop({n})", lambda sequence: islice(sequence, n, None), None - ) + return Transformation(f"drop({n})", lambda sequence: islice(sequence, n, None)) -def drop_while_t(func: Callable): +def drop_while_t(func: Callable) -> Transformation: """ Transformation for Sequence.drop_while :param func: drops while func is true :return: transformation """ - return Transformation(f"drop_while({name(func)})", partial(dropwhile, func), None) + return Transformation(f"drop_while({name(func)})", partial(dropwhile, func)) -def take_t(n: int): +def take_t(n: int) -> Transformation: """ Transformation for Sequence.take :param n: number to take :return: transformation """ - return Transformation(f"take({n})", lambda sequence: islice(sequence, 0, n), None) + return Transformation(f"take({n})", lambda sequence: islice(sequence, 0, n)) -def take_while_t(func: Callable): +def take_while_t(func: Callable) -> Transformation: """ Transformation for Sequence.take_while :param func: takes while func is True :return: transformation """ - return Transformation(f"take_while({name(func)})", partial(takewhile, func), None) + return Transformation(f"take_while({name(func)})", partial(takewhile, func)) def flat_map_impl(func: Callable, sequence): @@ -260,11 +226,10 @@ def flat_map_impl(func: Callable, sequence): :return: flat_map generator """ for element in sequence: - for value in func(element): - yield value + yield from func(element) -def flat_map_t(func): +def flat_map_t(func) -> Transformation: """ Transformation for Sequence.flat_map :param func: function to flat_map @@ -277,49 +242,29 @@ def flat_map_t(func): ) -def flatten_t(): - """ - Transformation for Sequence.flatten - :return: transformation - """ - return Transformation( - "flatten", partial(flat_map_impl, lambda x: x), {ExecutionStrategies.PARALLEL} - ) - - -def zip_t(zip_sequence): +def zip_t(zip_sequence) -> Transformation: """ Transformation for Sequence.zip :param zip_sequence: sequence to zip with :return: transformation """ return Transformation( - "zip()", lambda sequence: zip(sequence, zip_sequence), None - ) - - -def zip_with_index_t(start): - """ - Transformation for Sequence.zip_with_index - :return: transformation - """ - return Transformation( - "zip_with_index", lambda sequence: zip(sequence, count(start=start)), None + "zip()", lambda sequence: zip(sequence, zip_sequence) ) -def enumerate_t(start): +def enumerate_t(start) -> Transformation: """ Transformation for Sequence.enumerate :param start: start index for enumerate :return: transformation """ return Transformation( - "enumerate", lambda sequence: enumerate(sequence, start=start), None + "enumerate", lambda sequence: enumerate(sequence, start=start) ) -def cartesian_t(iterables, repeat): +def cartesian_t(iterables, repeat: int) -> Transformation: """ Transformation for Sequence.cartesian :param iterables: elements for cartesian product @@ -327,26 +272,26 @@ def cartesian_t(iterables, repeat): :return: transformation """ return Transformation( - "cartesian", lambda sequence: product(sequence, *iterables, repeat=repeat), None + "cartesian", lambda sequence: product(sequence, *iterables, repeat=repeat) ) -def init_t(): +def tail_t() -> Transformation: """ - Transformation for Sequence.init + Transformation for Sequence.tail :return: transformation """ - return Transformation( - "init", lambda sequence: sequence[:-1], {ExecutionStrategies.PRE_COMPUTE} - ) + return Transformation("tail", lambda sequence: islice(sequence, 1, None)) -def tail_t(): +def _inits(sequence: Iterable[T], wrap) -> list[Sequence[T]]: """ - Transformation for Sequence.tail - :return: transformation + Implementation for inits_t + :param sequence: sequence to inits + :return: inits of sequence """ - return Transformation("tail", lambda sequence: islice(sequence, 1, None), None) + lseq = listify(sequence) + return [wrap(lseq[:i]) for i in reversed(range(len(lseq) + 1))] def inits_t(wrap): @@ -355,13 +300,17 @@ def inits_t(wrap): :param wrap: wrap children values with this :return: transformation """ - return Transformation( - "inits", - lambda sequence: [ - wrap(sequence[:i]) for i in reversed(range(len(sequence) + 1)) - ], - {ExecutionStrategies.PRE_COMPUTE}, - ) + return Transformation("inits", partial(_inits, wrap=wrap)) + + +def _tails(sequence: Iterable[T], wrap) -> list[Sequence[T]]: + """ + Implementation for tails_t + :param sequence: sequence to tails + :return: tails of sequence + """ + lseq = listify(sequence) + return [wrap(lseq[i:]) for i in range(len(lseq) + 1)] def tails_t(wrap): @@ -370,11 +319,7 @@ def tails_t(wrap): :param wrap: wrap children values with this :return: transformation """ - return Transformation( - "tails", - lambda sequence: [wrap(sequence[i:]) for i in range(len(sequence) + 1)], - {ExecutionStrategies.PRE_COMPUTE}, - ) + return Transformation("tails", partial(_tails, wrap=wrap)) def union_t(other): @@ -383,7 +328,7 @@ def union_t(other): :param other: sequence to union with :return: transformation """ - return Transformation("union", lambda sequence: set(sequence).union(other), None) + return Transformation("union", lambda sequence: set(sequence).union(other)) def intersection_t(other): @@ -393,7 +338,7 @@ def intersection_t(other): :return: transformation """ return Transformation( - "intersection", lambda sequence: set(sequence).intersection(other), None + "intersection", lambda sequence: set(sequence).intersection(other) ) @@ -404,7 +349,7 @@ def difference_t(other): :return: transformation """ return Transformation( - "difference", lambda sequence: set(sequence).difference(other), None + "difference", lambda sequence: set(sequence).difference(other) ) @@ -417,7 +362,6 @@ def symmetric_difference_t(other): return Transformation( "symmetric_difference", lambda sequence: set(sequence).symmetric_difference(other), - None, ) @@ -428,11 +372,8 @@ def group_by_key_impl(sequence): :return: grouped sequence """ result = {} - for element in sequence: - if result.get(element[0]): - result.get(element[0]).append(element[1]) - else: - result[element[0]] = [element[1]] + for key, value in sequence: + result.setdefault(key, []).append(value) return result.items() @@ -441,7 +382,7 @@ def group_by_key_t(): Transformation for Sequence.group_by_key :return: transformation """ - return Transformation("group_by_key", group_by_key_impl, None) + return Transformation("group_by_key", group_by_key_impl) def reduce_by_key_impl(func, sequence): @@ -467,7 +408,7 @@ def reduce_by_key_t(func): :return: transformation """ return Transformation( - f"reduce_by_key({name(func)})", partial(reduce_by_key_impl, func), None + f"reduce_by_key({name(func)})", partial(reduce_by_key_impl, func) ) @@ -478,8 +419,6 @@ def accumulate_impl(func, sequence): :param sequence: sequence to accumulate :param func: accumulate function """ - from itertools import accumulate - return accumulate(sequence, func) @@ -487,9 +426,7 @@ def accumulate_t(func): """ Transformation for Sequence.accumulate """ - return Transformation( - f"accumulate({name(func)})", partial(accumulate_impl, func), None - ) + return Transformation(f"accumulate({name(func)})", partial(accumulate_impl, func)) def count_by_key_impl(sequence): @@ -498,10 +435,7 @@ def count_by_key_impl(sequence): :param sequence: sequence of (key, value) pairs :return: counts by key """ - counter = collections.Counter() - for key, _ in sequence: - counter[key] += 1 - return counter.items() + return collections.Counter(key for key, _ in sequence).items() def count_by_key_t(): @@ -509,7 +443,7 @@ def count_by_key_t(): Transformation for Sequence.count_by_key :return: transformation """ - return Transformation("count_by_key", count_by_key_impl, None) + return Transformation("count_by_key", count_by_key_impl) def count_by_value_impl(sequence): @@ -518,10 +452,7 @@ def count_by_value_impl(sequence): :param sequence: sequence of values :return: counts by value """ - counter = collections.Counter() - for e in sequence: - counter[e] += 1 - return counter.items() + return collections.Counter(sequence).items() def count_by_value_t(): @@ -529,7 +460,7 @@ def count_by_value_t(): Transformation for Sequence.count_by_value :return: transformation """ - return Transformation("count_by_value", count_by_value_impl, None) + return Transformation("count_by_value", count_by_value_impl) def group_by_impl(func, sequence): @@ -541,10 +472,7 @@ def group_by_impl(func, sequence): """ result = {} for element in sequence: - if result.get(func(element)): - result.get(func(element)).append(element) - else: - result[func(element)] = [element] + result.setdefault(func(element), []).append(element) return result.items() @@ -554,10 +482,10 @@ def group_by_t(func): :param func: grouping function :return: transformation """ - return Transformation(f"group_by({name(func)})", partial(group_by_impl, func), None) + return Transformation(f"group_by({name(func)})", partial(group_by_impl, func)) -def grouped_impl(size, sequence): +def grouped_impl(size: int, sequence: Iterable[T]) -> Iterable[list[T]]: """ Implementation for grouped_t :param size: size of groups @@ -573,16 +501,18 @@ def grouped_impl(size, sequence): return -def grouped_t(size): +def grouped_t(size: int) -> Transformation: """ Transformation for Sequence.grouped :param size: size of groups :return: transformation """ - return Transformation(f"grouped({size})", partial(grouped_impl, size), None) + return Transformation(f"grouped({size})", partial(grouped_impl, size)) -def sliding_impl(wrap, size, step, sequence): +def sliding_impl( + wrap, size: int, step: int, sequence: Iterable[T] +) -> Iterable[list[T]]: """ Implementation for sliding_t :param wrap: wrap children values with this @@ -591,10 +521,11 @@ def sliding_impl(wrap, size, step, sequence): :param sequence: sequence to create sliding windows from :return: sequence of sliding windows """ + lseq = listify(sequence) i = 0 - n = len(sequence) + n = len(lseq) while i + size <= n or (step != 1 and i < n): - yield wrap(sequence[i : i + size]) + yield wrap(lseq[i : i + size]) i += step @@ -607,9 +538,7 @@ def sliding_t(wrap, size, step): :return: transformation """ return Transformation( - f"sliding({size}, {step})", - partial(sliding_impl, wrap, size, step), - {ExecutionStrategies.PRE_COMPUTE}, + f"sliding({size}, {step})", partial(sliding_impl, wrap, size, step) ) @@ -633,7 +562,7 @@ def partition_t(wrap, func): :return: transformation """ return Transformation( - f"partition({name(func)})", partial(partition_impl, wrap, func), None + f"partition({name(func)})", partial(partition_impl, wrap, func) ) @@ -677,9 +606,9 @@ def join_impl(other, join_type, sequence): elif join_type == "right": keys = other_kv.keys() elif join_type == "outer": - keys = set(list(seq_kv.keys()) + list(other_kv.keys())) + keys = (seq_kv | other_kv).keys() # keeps ordering vs set union else: - raise TypeError("Wrong type of join specified") + raise ValueError("Wrong type of join specified") result = {} for k in keys: result[k] = (seq_kv.get(k), other_kv.get(k)) @@ -694,9 +623,7 @@ def join_t(other, join_type): :param join_type: join type from left, right, inner, and outer :return: transformation """ - return Transformation( - f"{join_type}_join", partial(join_impl, other, join_type), None - ) + return Transformation(f"{join_type}_join", partial(join_impl, other, join_type)) def peek_impl(func, sequence): @@ -717,4 +644,4 @@ def peek_t(func: Callable): :param func: peek function :return: transformation """ - return Transformation(f"peek({name(func)})", partial(peek_impl, func), None) + return Transformation(f"peek({name(func)})", partial(peek_impl, func)) diff --git a/functional/util.py b/functional/util.py index 2cac321..db47804 100644 --- a/functional/util.py +++ b/functional/util.py @@ -1,18 +1,41 @@ +from __future__ import annotations + import math from collections.abc import Iterable from functools import reduce from itertools import chain, count, islice, takewhile from multiprocessing import Pool, cpu_count -from typing import Any +from typing import Any, Callable, Optional, Protocol, Sized, TypeVar, Union, cast import dill as serializer # type: ignore +from typing_extensions import TypeAlias + +T = TypeVar("T") +U = TypeVar("U") +_T_contra = TypeVar("_T_contra", contravariant=True) + + +# from typeshed +class SupportsDunderLT(Protocol[_T_contra]): + def __lt__(self, __other: _T_contra) -> bool: + ... + +class SupportsDunderGT(Protocol[_T_contra]): + def __gt__(self, __other: _T_contra) -> bool: + ... + + +SupportsRichComparison: TypeAlias = Union[SupportsDunderLT[Any], SupportsDunderGT[Any]] +SupportsRichComparisonT = TypeVar( + "SupportsRichComparisonT", bound=SupportsRichComparison +) # noqa: Y001 PROTOCOL = serializer.HIGHEST_PROTOCOL CPU_COUNT = cpu_count() -def is_primitive(val): +def is_primitive(val: object) -> bool: """ Checks if the passed value is a primitive type. @@ -32,6 +55,7 @@ def is_primitive(val): False >>> is_primitive(set([])) + False :param val: value to check :return: True if value is a primitive, else False @@ -39,7 +63,7 @@ def is_primitive(val): return isinstance(val, (str, bool, float, complex, bytes, int)) -def is_namedtuple(val): +def is_namedtuple(val: object) -> bool: """ Use Duck Typing to check if val is a named tuple. Checks that val is of type tuple and contains the attribute _fields which is defined for named tuples. @@ -54,7 +78,7 @@ def is_namedtuple(val): return all(isinstance(n, str) for n in fields) -def identity(arg): +def identity(arg: T) -> T: """ Function which returns the argument. Used as a default lambda function. @@ -68,15 +92,15 @@ def identity(arg): return arg -def is_iterable(val): +def is_iterable_not_list(val: object) -> bool: """ Check if val is not a list, but is a Iterable type. This is used to determine when list() should be called on val >>> l = [1, 2] - >>> is_iterable(l) + >>> is_iterable_not_list(l) False - >>> is_iterable(iter(l)) + >>> is_iterable_not_list(iter(l)) True :param val: value to check @@ -87,16 +111,16 @@ def is_iterable(val): def is_tabulatable(val: object) -> bool: return not is_primitive(val) and ( - is_iterable(val) or is_namedtuple(val) or isinstance(val, list) + is_iterable_not_list(val) or is_namedtuple(val) or isinstance(val, list) ) -def split_every(parts, iterable): +def split_every(parts: int, iterable: Iterable[T]) -> Iterable[list[T]]: """ Split an iterable into parts of length parts >>> l = iter([1, 2, 3, 4]) - >>> split_every(2, l) + >>> list(split_every(2, l)) [[1, 2], [3, 4]] :param iterable: iterable to split @@ -106,7 +130,7 @@ def split_every(parts, iterable): return takewhile(bool, (list(islice(iterable, parts)) for _ in count())) -def unpack(packed): +def unpack(packed: bytes) -> Optional[list]: """ Unpack the function and args then apply the function to the arguments and return result :param packed: input packed tuple of (func, args) @@ -119,7 +143,7 @@ def unpack(packed): return None -def pack(func, args): +def pack(func: Callable, args: Iterable) -> bytes: """ Pack a function and the args it should be applied to :param func: Function to apply @@ -129,26 +153,36 @@ def pack(func, args): return serializer.dumps((func, args), PROTOCOL) -def parallelize(func, result, processes=None, partition_size=None): +def parallelize( + func: Callable[[T], U], + seq: Iterable[T], + processes: Optional[int] = None, + partition_size: Optional[int] = None, +): """ - Creates an iterable which is lazily computed in parallel from applying func on result + Creates an iterable which is lazily computed in parallel from applying func on seq :param func: Function to apply - :param result: Data to apply to + :param seq: Data to apply to :param processes: Number of processes to use in parallel :param partition_size: Size of partitions for each parallel process - :return: Iterable of applying func on result + :return: Iterable of applying func on seq """ parallel_iter = lazy_parallelize( - func, result, processes=processes, partition_size=partition_size + func, seq, processes=processes, partition_size=partition_size ) return chain.from_iterable(parallel_iter) -def lazy_parallelize(func, result, processes=None, partition_size=None): +def lazy_parallelize( + func: Callable[[T], U], + seq: Iterable[T], + processes: Optional[int] = None, + partition_size: Optional[int] = None, +) -> Iterable[list[U]]: """ - Lazily computes an iterable in parallel, and returns them in pool chunks + Lazily computes an map in parallel, and returns them in pool chunks :param func: Function to apply - :param result: Data to apply to + :param seq: Data to apply to :param processes: Number of processes to use in parallel :param partition_size: Size of partitions for each parallel process :return: Iterable of chunks where each chunk as func applied to it @@ -157,14 +191,16 @@ def lazy_parallelize(func, result, processes=None, partition_size=None): processes = CPU_COUNT else: processes = min(processes, CPU_COUNT) - partition_size = partition_size or compute_partition_size(result, processes) + partition_size = partition_size or compute_partition_size( + cast(Sized, seq), processes + ) with Pool(processes=processes) as pool: - partitions = split_every(partition_size, iter(result)) + partitions = split_every(partition_size, iter(seq)) packed_partitions = (pack(func, (partition,)) for partition in partitions) yield from pool.imap(unpack, packed_partitions) -def compute_partition_size(result, processes): +def compute_partition_size(result: Sized, processes: int) -> int: """ Attempts to compute the partition size to evenly distribute work across processes. Defaults to 1 if the length of result cannot be determined. @@ -179,7 +215,7 @@ def compute_partition_size(result, processes): return 1 -def compose(*functions): +def compose(*functions: Callable) -> Callable: """ Compose all the function arguments together :param functions: Functions to compose @@ -189,7 +225,7 @@ def compose(*functions): return reduce(lambda f, g: lambda x: f(g(x)), functions, lambda x: x) -def default_value(*vals: Any): +def coalesce(*vals: Optional[bool]) -> bool: for val in vals: if val is not None: return val diff --git a/poetry.lock b/poetry.lock index 6a7c934..af7cb75 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,14 +1,14 @@ -# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. [[package]] name = "astroid" -version = "2.15.6" +version = "2.15.8" description = "An abstract syntax tree for Python with inference support." optional = false python-versions = ">=3.7.2" files = [ - {file = "astroid-2.15.6-py3-none-any.whl", hash = "sha256:389656ca57b6108f939cf5d2f9a2a825a3be50ba9d589670f393236e0a03b91c"}, - {file = "astroid-2.15.6.tar.gz", hash = "sha256:903f024859b7c7687d7a7f3a3f73b17301f8e42dfd9cc9df9d4418172d3e2dbd"}, + {file = "astroid-2.15.8-py3-none-any.whl", hash = "sha256:1aa149fc5c6589e3d0ece885b4491acd80af4f087baafa3fb5203b113e68cd3c"}, + {file = "astroid-2.15.8.tar.gz", hash = "sha256:6c107453dffee9055899705de3c9ead36e74119cee151e5a9aaf7f0b0e020a6a"}, ] [package.dependencies] @@ -282,37 +282,38 @@ files = [ [[package]] name = "mypy" -version = "1.4.1" +version = "1.8.0" description = "Optional static typing for Python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "mypy-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:566e72b0cd6598503e48ea610e0052d1b8168e60a46e0bfd34b3acf2d57f96a8"}, - {file = "mypy-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ca637024ca67ab24a7fd6f65d280572c3794665eaf5edcc7e90a866544076878"}, - {file = "mypy-1.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dde1d180cd84f0624c5dcaaa89c89775550a675aff96b5848de78fb11adabcd"}, - {file = "mypy-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8c4d8e89aa7de683e2056a581ce63c46a0c41e31bd2b6d34144e2c80f5ea53dc"}, - {file = "mypy-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:bfdca17c36ae01a21274a3c387a63aa1aafe72bff976522886869ef131b937f1"}, - {file = "mypy-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7549fbf655e5825d787bbc9ecf6028731973f78088fbca3a1f4145c39ef09462"}, - {file = "mypy-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:98324ec3ecf12296e6422939e54763faedbfcc502ea4a4c38502082711867258"}, - {file = "mypy-1.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:141dedfdbfe8a04142881ff30ce6e6653c9685b354876b12e4fe6c78598b45e2"}, - {file = "mypy-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8207b7105829eca6f3d774f64a904190bb2231de91b8b186d21ffd98005f14a7"}, - {file = "mypy-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:16f0db5b641ba159eff72cff08edc3875f2b62b2fa2bc24f68c1e7a4e8232d01"}, - {file = "mypy-1.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:470c969bb3f9a9efcedbadcd19a74ffb34a25f8e6b0e02dae7c0e71f8372f97b"}, - {file = "mypy-1.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e5952d2d18b79f7dc25e62e014fe5a23eb1a3d2bc66318df8988a01b1a037c5b"}, - {file = "mypy-1.4.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:190b6bab0302cec4e9e6767d3eb66085aef2a1cc98fe04936d8a42ed2ba77bb7"}, - {file = "mypy-1.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:9d40652cc4fe33871ad3338581dca3297ff5f2213d0df345bcfbde5162abf0c9"}, - {file = "mypy-1.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:01fd2e9f85622d981fd9063bfaef1aed6e336eaacca00892cd2d82801ab7c042"}, - {file = "mypy-1.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2460a58faeea905aeb1b9b36f5065f2dc9a9c6e4c992a6499a2360c6c74ceca3"}, - {file = "mypy-1.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2746d69a8196698146a3dbe29104f9eb6a2a4d8a27878d92169a6c0b74435b6"}, - {file = "mypy-1.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ae704dcfaa180ff7c4cfbad23e74321a2b774f92ca77fd94ce1049175a21c97f"}, - {file = "mypy-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:43d24f6437925ce50139a310a64b2ab048cb2d3694c84c71c3f2a1626d8101dc"}, - {file = "mypy-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c482e1246726616088532b5e964e39765b6d1520791348e6c9dc3af25b233828"}, - {file = "mypy-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:43b592511672017f5b1a483527fd2684347fdffc041c9ef53428c8dc530f79a3"}, - {file = "mypy-1.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:34a9239d5b3502c17f07fd7c0b2ae6b7dd7d7f6af35fbb5072c6208e76295816"}, - {file = "mypy-1.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5703097c4936bbb9e9bce41478c8d08edd2865e177dc4c52be759f81ee4dd26c"}, - {file = "mypy-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:e02d700ec8d9b1859790c0475df4e4092c7bf3272a4fd2c9f33d87fac4427b8f"}, - {file = "mypy-1.4.1-py3-none-any.whl", hash = "sha256:45d32cec14e7b97af848bddd97d85ea4f0db4d5a149ed9676caa4eb2f7402bb4"}, - {file = "mypy-1.4.1.tar.gz", hash = "sha256:9bbcd9ab8ea1f2e1c8031c21445b511442cc45c89951e49bbf852cbb70755b1b"}, + {file = "mypy-1.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:485a8942f671120f76afffff70f259e1cd0f0cfe08f81c05d8816d958d4577d3"}, + {file = "mypy-1.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:df9824ac11deaf007443e7ed2a4a26bebff98d2bc43c6da21b2b64185da011c4"}, + {file = "mypy-1.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2afecd6354bbfb6e0160f4e4ad9ba6e4e003b767dd80d85516e71f2e955ab50d"}, + {file = "mypy-1.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8963b83d53ee733a6e4196954502b33567ad07dfd74851f32be18eb932fb1cb9"}, + {file = "mypy-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:e46f44b54ebddbeedbd3d5b289a893219065ef805d95094d16a0af6630f5d410"}, + {file = "mypy-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:855fe27b80375e5c5878492f0729540db47b186509c98dae341254c8f45f42ae"}, + {file = "mypy-1.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4c886c6cce2d070bd7df4ec4a05a13ee20c0aa60cb587e8d1265b6c03cf91da3"}, + {file = "mypy-1.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d19c413b3c07cbecf1f991e2221746b0d2a9410b59cb3f4fb9557f0365a1a817"}, + {file = "mypy-1.8.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9261ed810972061388918c83c3f5cd46079d875026ba97380f3e3978a72f503d"}, + {file = "mypy-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:51720c776d148bad2372ca21ca29256ed483aa9a4cdefefcef49006dff2a6835"}, + {file = "mypy-1.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:52825b01f5c4c1c4eb0db253ec09c7aa17e1a7304d247c48b6f3599ef40db8bd"}, + {file = "mypy-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f5ac9a4eeb1ec0f1ccdc6f326bcdb464de5f80eb07fb38b5ddd7b0de6bc61e55"}, + {file = "mypy-1.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afe3fe972c645b4632c563d3f3eff1cdca2fa058f730df2b93a35e3b0c538218"}, + {file = "mypy-1.8.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:42c6680d256ab35637ef88891c6bd02514ccb7e1122133ac96055ff458f93fc3"}, + {file = "mypy-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:720a5ca70e136b675af3af63db533c1c8c9181314d207568bbe79051f122669e"}, + {file = "mypy-1.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:028cf9f2cae89e202d7b6593cd98db6759379f17a319b5faf4f9978d7084cdc6"}, + {file = "mypy-1.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4e6d97288757e1ddba10dd9549ac27982e3e74a49d8d0179fc14d4365c7add66"}, + {file = "mypy-1.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f1478736fcebb90f97e40aff11a5f253af890c845ee0c850fe80aa060a267c6"}, + {file = "mypy-1.8.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42419861b43e6962a649068a61f4a4839205a3ef525b858377a960b9e2de6e0d"}, + {file = "mypy-1.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:2b5b6c721bd4aabaadead3a5e6fa85c11c6c795e0c81a7215776ef8afc66de02"}, + {file = "mypy-1.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5c1538c38584029352878a0466f03a8ee7547d7bd9f641f57a0f3017a7c905b8"}, + {file = "mypy-1.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ef4be7baf08a203170f29e89d79064463b7fc7a0908b9d0d5114e8009c3a259"}, + {file = "mypy-1.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7178def594014aa6c35a8ff411cf37d682f428b3b5617ca79029d8ae72f5402b"}, + {file = "mypy-1.8.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ab3c84fa13c04aeeeabb2a7f67a25ef5d77ac9d6486ff33ded762ef353aa5592"}, + {file = "mypy-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:99b00bc72855812a60d253420d8a2eae839b0afa4938f09f4d2aa9bb4654263a"}, + {file = "mypy-1.8.0-py3-none-any.whl", hash = "sha256:538fd81bb5e430cc1381a443971c0475582ff9f434c16cd46d2c66763ce85d9d"}, + {file = "mypy-1.8.0.tar.gz", hash = "sha256:6ff8b244d7085a0b425b56d327b480c3b29cafbd2eff27316a004f9a7391ae07"}, ] [package.dependencies] @@ -323,7 +324,7 @@ typing-extensions = ">=4.1.0" [package.extras] dmypy = ["psutil (>=4.0)"] install-types = ["pip"] -python2 = ["typed-ast (>=1.4.0,<2)"] +mypyc = ["setuptools (>=50)"] reports = ["lxml"] [[package]] @@ -339,76 +340,121 @@ files = [ [[package]] name = "numpy" -version = "1.21.6" -description = "NumPy is the fundamental package for array computing with Python." -optional = false -python-versions = ">=3.7,<3.11" -files = [ - {file = "numpy-1.21.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8737609c3bbdd48e380d463134a35ffad3b22dc56295eff6f79fd85bd0eeeb25"}, - {file = "numpy-1.21.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:fdffbfb6832cd0b300995a2b08b8f6fa9f6e856d562800fea9182316d99c4e8e"}, - {file = "numpy-1.21.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3820724272f9913b597ccd13a467cc492a0da6b05df26ea09e78b171a0bb9da6"}, - {file = "numpy-1.21.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f17e562de9edf691a42ddb1eb4a5541c20dd3f9e65b09ded2beb0799c0cf29bb"}, - {file = "numpy-1.21.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f30427731561ce75d7048ac254dbe47a2ba576229250fb60f0fb74db96501a1"}, - {file = "numpy-1.21.6-cp310-cp310-win32.whl", hash = "sha256:d4bf4d43077db55589ffc9009c0ba0a94fa4908b9586d6ccce2e0b164c86303c"}, - {file = "numpy-1.21.6-cp310-cp310-win_amd64.whl", hash = "sha256:d136337ae3cc69aa5e447e78d8e1514be8c3ec9b54264e680cf0b4bd9011574f"}, - {file = "numpy-1.21.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6aaf96c7f8cebc220cdfc03f1d5a31952f027dda050e5a703a0d1c396075e3e7"}, - {file = "numpy-1.21.6-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:67c261d6c0a9981820c3a149d255a76918278a6b03b6a036800359aba1256d46"}, - {file = "numpy-1.21.6-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a6be4cb0ef3b8c9250c19cc122267263093eee7edd4e3fa75395dfda8c17a8e2"}, - {file = "numpy-1.21.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c4068a8c44014b2d55f3c3f574c376b2494ca9cc73d2f1bd692382b6dffe3db"}, - {file = "numpy-1.21.6-cp37-cp37m-win32.whl", hash = "sha256:7c7e5fa88d9ff656e067876e4736379cc962d185d5cd808014a8a928d529ef4e"}, - {file = "numpy-1.21.6-cp37-cp37m-win_amd64.whl", hash = "sha256:bcb238c9c96c00d3085b264e5c1a1207672577b93fa666c3b14a45240b14123a"}, - {file = "numpy-1.21.6-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:82691fda7c3f77c90e62da69ae60b5ac08e87e775b09813559f8901a88266552"}, - {file = "numpy-1.21.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:643843bcc1c50526b3a71cd2ee561cf0d8773f062c8cbaf9ffac9fdf573f83ab"}, - {file = "numpy-1.21.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:357768c2e4451ac241465157a3e929b265dfac85d9214074985b1786244f2ef3"}, - {file = "numpy-1.21.6-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9f411b2c3f3d76bba0865b35a425157c5dcf54937f82bbeb3d3c180789dd66a6"}, - {file = "numpy-1.21.6-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4aa48afdce4660b0076a00d80afa54e8a97cd49f457d68a4342d188a09451c1a"}, - {file = "numpy-1.21.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6a96eef20f639e6a97d23e57dd0c1b1069a7b4fd7027482a4c5c451cd7732f4"}, - {file = "numpy-1.21.6-cp38-cp38-win32.whl", hash = "sha256:5c3c8def4230e1b959671eb959083661b4a0d2e9af93ee339c7dada6759a9470"}, - {file = "numpy-1.21.6-cp38-cp38-win_amd64.whl", hash = "sha256:bf2ec4b75d0e9356edea834d1de42b31fe11f726a81dfb2c2112bc1eaa508fcf"}, - {file = "numpy-1.21.6-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:4391bd07606be175aafd267ef9bea87cf1b8210c787666ce82073b05f202add1"}, - {file = "numpy-1.21.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:67f21981ba2f9d7ba9ade60c9e8cbaa8cf8e9ae51673934480e45cf55e953673"}, - {file = "numpy-1.21.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ee5ec40fdd06d62fe5d4084bef4fd50fd4bb6bfd2bf519365f569dc470163ab0"}, - {file = "numpy-1.21.6-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:1dbe1c91269f880e364526649a52eff93ac30035507ae980d2fed33aaee633ac"}, - {file = "numpy-1.21.6-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d9caa9d5e682102453d96a0ee10c7241b72859b01a941a397fd965f23b3e016b"}, - {file = "numpy-1.21.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58459d3bad03343ac4b1b42ed14d571b8743dc80ccbf27444f266729df1d6f5b"}, - {file = "numpy-1.21.6-cp39-cp39-win32.whl", hash = "sha256:7f5ae4f304257569ef3b948810816bc87c9146e8c446053539947eedeaa32786"}, - {file = "numpy-1.21.6-cp39-cp39-win_amd64.whl", hash = "sha256:e31f0bb5928b793169b87e3d1e070f2342b22d5245c755e2b81caa29756246c3"}, - {file = "numpy-1.21.6-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:dd1c8f6bd65d07d3810b90d02eba7997e32abbdf1277a481d698969e921a3be0"}, - {file = "numpy-1.21.6.zip", hash = "sha256:ecb55251139706669fdec2ff073c98ef8e9a84473e51e716211b41aa0f18e656"}, +version = "1.24.3" +description = "Fundamental package for array computing in Python" +optional = true +python-versions = ">=3.8" +files = [ + {file = "numpy-1.24.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3c1104d3c036fb81ab923f507536daedc718d0ad5a8707c6061cdfd6d184e570"}, + {file = "numpy-1.24.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:202de8f38fc4a45a3eea4b63e2f376e5f2dc64ef0fa692838e31a808520efaf7"}, + {file = "numpy-1.24.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8535303847b89aa6b0f00aa1dc62867b5a32923e4d1681a35b5eef2d9591a463"}, + {file = "numpy-1.24.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d926b52ba1367f9acb76b0df6ed21f0b16a1ad87c6720a1121674e5cf63e2b6"}, + {file = "numpy-1.24.3-cp310-cp310-win32.whl", hash = "sha256:f21c442fdd2805e91799fbe044a7b999b8571bb0ab0f7850d0cb9641a687092b"}, + {file = "numpy-1.24.3-cp310-cp310-win_amd64.whl", hash = "sha256:ab5f23af8c16022663a652d3b25dcdc272ac3f83c3af4c02eb8b824e6b3ab9d7"}, + {file = "numpy-1.24.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9a7721ec204d3a237225db3e194c25268faf92e19338a35f3a224469cb6039a3"}, + {file = "numpy-1.24.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d6cc757de514c00b24ae8cf5c876af2a7c3df189028d68c0cb4eaa9cd5afc2bf"}, + {file = "numpy-1.24.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76e3f4e85fc5d4fd311f6e9b794d0c00e7002ec122be271f2019d63376f1d385"}, + {file = "numpy-1.24.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1d3c026f57ceaad42f8231305d4653d5f05dc6332a730ae5c0bea3513de0950"}, + {file = "numpy-1.24.3-cp311-cp311-win32.whl", hash = "sha256:c91c4afd8abc3908e00a44b2672718905b8611503f7ff87390cc0ac3423fb096"}, + {file = "numpy-1.24.3-cp311-cp311-win_amd64.whl", hash = "sha256:5342cf6aad47943286afa6f1609cad9b4266a05e7f2ec408e2cf7aea7ff69d80"}, + {file = "numpy-1.24.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7776ea65423ca6a15255ba1872d82d207bd1e09f6d0894ee4a64678dd2204078"}, + {file = "numpy-1.24.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ae8d0be48d1b6ed82588934aaaa179875e7dc4f3d84da18d7eae6eb3f06c242c"}, + {file = "numpy-1.24.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ecde0f8adef7dfdec993fd54b0f78183051b6580f606111a6d789cd14c61ea0c"}, + {file = "numpy-1.24.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4749e053a29364d3452c034827102ee100986903263e89884922ef01a0a6fd2f"}, + {file = "numpy-1.24.3-cp38-cp38-win32.whl", hash = "sha256:d933fabd8f6a319e8530d0de4fcc2e6a61917e0b0c271fded460032db42a0fe4"}, + {file = "numpy-1.24.3-cp38-cp38-win_amd64.whl", hash = "sha256:56e48aec79ae238f6e4395886b5eaed058abb7231fb3361ddd7bfdf4eed54289"}, + {file = "numpy-1.24.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4719d5aefb5189f50887773699eaf94e7d1e02bf36c1a9d353d9f46703758ca4"}, + {file = "numpy-1.24.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0ec87a7084caa559c36e0a2309e4ecb1baa03b687201d0a847c8b0ed476a7187"}, + {file = "numpy-1.24.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea8282b9bcfe2b5e7d491d0bf7f3e2da29700cec05b49e64d6246923329f2b02"}, + {file = "numpy-1.24.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:210461d87fb02a84ef243cac5e814aad2b7f4be953b32cb53327bb49fd77fbb4"}, + {file = "numpy-1.24.3-cp39-cp39-win32.whl", hash = "sha256:784c6da1a07818491b0ffd63c6bbe5a33deaa0e25a20e1b3ea20cf0e43f8046c"}, + {file = "numpy-1.24.3-cp39-cp39-win_amd64.whl", hash = "sha256:d5036197ecae68d7f491fcdb4df90082b0d4960ca6599ba2659957aafced7c17"}, + {file = "numpy-1.24.3-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:352ee00c7f8387b44d19f4cada524586f07379c0d49270f87233983bc5087ca0"}, + {file = "numpy-1.24.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a7d6acc2e7524c9955e5c903160aa4ea083736fde7e91276b0e5d98e6332812"}, + {file = "numpy-1.24.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:35400e6a8d102fd07c71ed7dcadd9eb62ee9a6e84ec159bd48c28235bbb0f8e4"}, + {file = "numpy-1.24.3.tar.gz", hash = "sha256:ab344f1bf21f140adab8e47fdbc7c35a477dc01408791f8ba00d018dd0bc5155"}, ] [[package]] name = "numpy" -version = "1.25.1" +version = "1.24.4" description = "Fundamental package for array computing in Python" -optional = false +optional = true +python-versions = ">=3.8" +files = [ + {file = "numpy-1.24.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c0bfb52d2169d58c1cdb8cc1f16989101639b34c7d3ce60ed70b19c63eba0b64"}, + {file = "numpy-1.24.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ed094d4f0c177b1b8e7aa9cba7d6ceed51c0e569a5318ac0ca9a090680a6a1b1"}, + {file = "numpy-1.24.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79fc682a374c4a8ed08b331bef9c5f582585d1048fa6d80bc6c35bc384eee9b4"}, + {file = "numpy-1.24.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ffe43c74893dbf38c2b0a1f5428760a1a9c98285553c89e12d70a96a7f3a4d6"}, + {file = "numpy-1.24.4-cp310-cp310-win32.whl", hash = "sha256:4c21decb6ea94057331e111a5bed9a79d335658c27ce2adb580fb4d54f2ad9bc"}, + {file = "numpy-1.24.4-cp310-cp310-win_amd64.whl", hash = "sha256:b4bea75e47d9586d31e892a7401f76e909712a0fd510f58f5337bea9572c571e"}, + {file = "numpy-1.24.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f136bab9c2cfd8da131132c2cf6cc27331dd6fae65f95f69dcd4ae3c3639c810"}, + {file = "numpy-1.24.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e2926dac25b313635e4d6cf4dc4e51c8c0ebfed60b801c799ffc4c32bf3d1254"}, + {file = "numpy-1.24.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:222e40d0e2548690405b0b3c7b21d1169117391c2e82c378467ef9ab4c8f0da7"}, + {file = "numpy-1.24.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7215847ce88a85ce39baf9e89070cb860c98fdddacbaa6c0da3ffb31b3350bd5"}, + {file = "numpy-1.24.4-cp311-cp311-win32.whl", hash = "sha256:4979217d7de511a8d57f4b4b5b2b965f707768440c17cb70fbf254c4b225238d"}, + {file = "numpy-1.24.4-cp311-cp311-win_amd64.whl", hash = "sha256:b7b1fc9864d7d39e28f41d089bfd6353cb5f27ecd9905348c24187a768c79694"}, + {file = "numpy-1.24.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1452241c290f3e2a312c137a9999cdbf63f78864d63c79039bda65ee86943f61"}, + {file = "numpy-1.24.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:04640dab83f7c6c85abf9cd729c5b65f1ebd0ccf9de90b270cd61935eef0197f"}, + {file = "numpy-1.24.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5425b114831d1e77e4b5d812b69d11d962e104095a5b9c3b641a218abcc050e"}, + {file = "numpy-1.24.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd80e219fd4c71fc3699fc1dadac5dcf4fd882bfc6f7ec53d30fa197b8ee22dc"}, + {file = "numpy-1.24.4-cp38-cp38-win32.whl", hash = "sha256:4602244f345453db537be5314d3983dbf5834a9701b7723ec28923e2889e0bb2"}, + {file = "numpy-1.24.4-cp38-cp38-win_amd64.whl", hash = "sha256:692f2e0f55794943c5bfff12b3f56f99af76f902fc47487bdfe97856de51a706"}, + {file = "numpy-1.24.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2541312fbf09977f3b3ad449c4e5f4bb55d0dbf79226d7724211acc905049400"}, + {file = "numpy-1.24.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9667575fb6d13c95f1b36aca12c5ee3356bf001b714fc354eb5465ce1609e62f"}, + {file = "numpy-1.24.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3a86ed21e4f87050382c7bc96571755193c4c1392490744ac73d660e8f564a9"}, + {file = "numpy-1.24.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d11efb4dbecbdf22508d55e48d9c8384db795e1b7b51ea735289ff96613ff74d"}, + {file = "numpy-1.24.4-cp39-cp39-win32.whl", hash = "sha256:6620c0acd41dbcb368610bb2f4d83145674040025e5536954782467100aa8835"}, + {file = "numpy-1.24.4-cp39-cp39-win_amd64.whl", hash = "sha256:befe2bf740fd8373cf56149a5c23a0f601e82869598d41f8e188a0e9869926f8"}, + {file = "numpy-1.24.4-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:31f13e25b4e304632a4619d0e0777662c2ffea99fcae2029556b17d8ff958aef"}, + {file = "numpy-1.24.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95f7ac6540e95bc440ad77f56e520da5bf877f87dca58bd095288dce8940532a"}, + {file = "numpy-1.24.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e98f220aa76ca2a977fe435f5b04d7b3470c0a2e6312907b37ba6068f26787f2"}, + {file = "numpy-1.24.4.tar.gz", hash = "sha256:80f5e3a4e498641401868df4208b74581206afbee7cf7b8329daae82676d9463"}, +] + +[[package]] +name = "numpy" +version = "1.26.3" +description = "Fundamental package for array computing in Python" +optional = true python-versions = ">=3.9" files = [ - {file = "numpy-1.25.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:77d339465dff3eb33c701430bcb9c325b60354698340229e1dff97745e6b3efa"}, - {file = "numpy-1.25.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d736b75c3f2cb96843a5c7f8d8ccc414768d34b0a75f466c05f3a739b406f10b"}, - {file = "numpy-1.25.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a90725800caeaa160732d6b31f3f843ebd45d6b5f3eec9e8cc287e30f2805bf"}, - {file = "numpy-1.25.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c6c9261d21e617c6dc5eacba35cb68ec36bb72adcff0dee63f8fbc899362588"}, - {file = "numpy-1.25.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0def91f8af6ec4bb94c370e38c575855bf1d0be8a8fbfba42ef9c073faf2cf19"}, - {file = "numpy-1.25.1-cp310-cp310-win32.whl", hash = "sha256:fd67b306320dcadea700a8f79b9e671e607f8696e98ec255915c0c6d6b818503"}, - {file = "numpy-1.25.1-cp310-cp310-win_amd64.whl", hash = "sha256:c1516db588987450b85595586605742879e50dcce923e8973f79529651545b57"}, - {file = "numpy-1.25.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6b82655dd8efeea69dbf85d00fca40013d7f503212bc5259056244961268b66e"}, - {file = "numpy-1.25.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e8f6049c4878cb16960fbbfb22105e49d13d752d4d8371b55110941fb3b17800"}, - {file = "numpy-1.25.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41a56b70e8139884eccb2f733c2f7378af06c82304959e174f8e7370af112e09"}, - {file = "numpy-1.25.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5154b1a25ec796b1aee12ac1b22f414f94752c5f94832f14d8d6c9ac40bcca6"}, - {file = "numpy-1.25.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:38eb6548bb91c421261b4805dc44def9ca1a6eef6444ce35ad1669c0f1a3fc5d"}, - {file = "numpy-1.25.1-cp311-cp311-win32.whl", hash = "sha256:791f409064d0a69dd20579345d852c59822c6aa087f23b07b1b4e28ff5880fcb"}, - {file = "numpy-1.25.1-cp311-cp311-win_amd64.whl", hash = "sha256:c40571fe966393b212689aa17e32ed905924120737194b5d5c1b20b9ed0fb171"}, - {file = "numpy-1.25.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3d7abcdd85aea3e6cdddb59af2350c7ab1ed764397f8eec97a038ad244d2d105"}, - {file = "numpy-1.25.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1a180429394f81c7933634ae49b37b472d343cccb5bb0c4a575ac8bbc433722f"}, - {file = "numpy-1.25.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d412c1697c3853c6fc3cb9751b4915859c7afe6a277c2bf00acf287d56c4e625"}, - {file = "numpy-1.25.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20e1266411120a4f16fad8efa8e0454d21d00b8c7cee5b5ccad7565d95eb42dd"}, - {file = "numpy-1.25.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:f76aebc3358ade9eacf9bc2bb8ae589863a4f911611694103af05346637df1b7"}, - {file = "numpy-1.25.1-cp39-cp39-win32.whl", hash = "sha256:247d3ffdd7775bdf191f848be8d49100495114c82c2bd134e8d5d075fb386a1c"}, - {file = "numpy-1.25.1-cp39-cp39-win_amd64.whl", hash = "sha256:1d5d3c68e443c90b38fdf8ef40e60e2538a27548b39b12b73132456847f4b631"}, - {file = "numpy-1.25.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:35a9527c977b924042170a0887de727cd84ff179e478481404c5dc66b4170009"}, - {file = "numpy-1.25.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d3fe3dd0506a28493d82dc3cf254be8cd0d26f4008a417385cbf1ae95b54004"}, - {file = "numpy-1.25.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:012097b5b0d00a11070e8f2e261128c44157a8689f7dedcf35576e525893f4fe"}, - {file = "numpy-1.25.1.tar.gz", hash = "sha256:9a3a9f3a61480cc086117b426a8bd86869c213fc4072e606f01c4e4b66eb92bf"}, + {file = "numpy-1.26.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:806dd64230dbbfaca8a27faa64e2f414bf1c6622ab78cc4264f7f5f028fee3bf"}, + {file = "numpy-1.26.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:02f98011ba4ab17f46f80f7f8f1c291ee7d855fcef0a5a98db80767a468c85cd"}, + {file = "numpy-1.26.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d45b3ec2faed4baca41c76617fcdcfa4f684ff7a151ce6fc78ad3b6e85af0a6"}, + {file = "numpy-1.26.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bdd2b45bf079d9ad90377048e2747a0c82351989a2165821f0c96831b4a2a54b"}, + {file = "numpy-1.26.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:211ddd1e94817ed2d175b60b6374120244a4dd2287f4ece45d49228b4d529178"}, + {file = "numpy-1.26.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b1240f767f69d7c4c8a29adde2310b871153df9b26b5cb2b54a561ac85146485"}, + {file = "numpy-1.26.3-cp310-cp310-win32.whl", hash = "sha256:21a9484e75ad018974a2fdaa216524d64ed4212e418e0a551a2d83403b0531d3"}, + {file = "numpy-1.26.3-cp310-cp310-win_amd64.whl", hash = "sha256:9e1591f6ae98bcfac2a4bbf9221c0b92ab49762228f38287f6eeb5f3f55905ce"}, + {file = "numpy-1.26.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b831295e5472954104ecb46cd98c08b98b49c69fdb7040483aff799a755a7374"}, + {file = "numpy-1.26.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9e87562b91f68dd8b1c39149d0323b42e0082db7ddb8e934ab4c292094d575d6"}, + {file = "numpy-1.26.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c66d6fec467e8c0f975818c1796d25c53521124b7cfb760114be0abad53a0a2"}, + {file = "numpy-1.26.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f25e2811a9c932e43943a2615e65fc487a0b6b49218899e62e426e7f0a57eeda"}, + {file = "numpy-1.26.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:af36e0aa45e25c9f57bf684b1175e59ea05d9a7d3e8e87b7ae1a1da246f2767e"}, + {file = "numpy-1.26.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:51c7f1b344f302067b02e0f5b5d2daa9ed4a721cf49f070280ac202738ea7f00"}, + {file = "numpy-1.26.3-cp311-cp311-win32.whl", hash = "sha256:7ca4f24341df071877849eb2034948459ce3a07915c2734f1abb4018d9c49d7b"}, + {file = "numpy-1.26.3-cp311-cp311-win_amd64.whl", hash = "sha256:39763aee6dfdd4878032361b30b2b12593fb445ddb66bbac802e2113eb8a6ac4"}, + {file = "numpy-1.26.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a7081fd19a6d573e1a05e600c82a1c421011db7935ed0d5c483e9dd96b99cf13"}, + {file = "numpy-1.26.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12c70ac274b32bc00c7f61b515126c9205323703abb99cd41836e8125ea0043e"}, + {file = "numpy-1.26.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f784e13e598e9594750b2ef6729bcd5a47f6cfe4a12cca13def35e06d8163e3"}, + {file = "numpy-1.26.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f24750ef94d56ce6e33e4019a8a4d68cfdb1ef661a52cdaee628a56d2437419"}, + {file = "numpy-1.26.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:77810ef29e0fb1d289d225cabb9ee6cf4d11978a00bb99f7f8ec2132a84e0166"}, + {file = "numpy-1.26.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8ed07a90f5450d99dad60d3799f9c03c6566709bd53b497eb9ccad9a55867f36"}, + {file = "numpy-1.26.3-cp312-cp312-win32.whl", hash = "sha256:f73497e8c38295aaa4741bdfa4fda1a5aedda5473074369eca10626835445511"}, + {file = "numpy-1.26.3-cp312-cp312-win_amd64.whl", hash = "sha256:da4b0c6c699a0ad73c810736303f7fbae483bcb012e38d7eb06a5e3b432c981b"}, + {file = "numpy-1.26.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1666f634cb3c80ccbd77ec97bc17337718f56d6658acf5d3b906ca03e90ce87f"}, + {file = "numpy-1.26.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:18c3319a7d39b2c6a9e3bb75aab2304ab79a811ac0168a671a62e6346c29b03f"}, + {file = "numpy-1.26.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b7e807d6888da0db6e7e75838444d62495e2b588b99e90dd80c3459594e857b"}, + {file = "numpy-1.26.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4d362e17bcb0011738c2d83e0a65ea8ce627057b2fdda37678f4374a382a137"}, + {file = "numpy-1.26.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b8c275f0ae90069496068c714387b4a0eba5d531aace269559ff2b43655edd58"}, + {file = "numpy-1.26.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:cc0743f0302b94f397a4a65a660d4cd24267439eb16493fb3caad2e4389bccbb"}, + {file = "numpy-1.26.3-cp39-cp39-win32.whl", hash = "sha256:9bc6d1a7f8cedd519c4b7b1156d98e051b726bf160715b769106661d567b3f03"}, + {file = "numpy-1.26.3-cp39-cp39-win_amd64.whl", hash = "sha256:867e3644e208c8922a3be26fc6bbf112a035f50f0a86497f98f228c50c607bb2"}, + {file = "numpy-1.26.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3c67423b3703f8fbd90f5adaa37f85b5794d3366948efe9a5190a5f3a83fc34e"}, + {file = "numpy-1.26.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46f47ee566d98849323f01b349d58f2557f02167ee301e5e28809a8c0e27a2d0"}, + {file = "numpy-1.26.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a8474703bffc65ca15853d5fd4d06b18138ae90c17c8d12169968e998e448bb5"}, + {file = "numpy-1.26.3.tar.gz", hash = "sha256:697df43e2b6310ecc9d95f05d5ef20eacc09c7c4ecc9da3f235d39e71b7da1e4"}, ] [[package]] @@ -473,7 +519,7 @@ test = ["hypothesis (>=3.58)", "pytest (>=6.0)", "pytest-xdist"] name = "pandas-stubs" version = "2.0.3.230814" description = "Type annotations for pandas" -optional = false +optional = true python-versions = ">=3.8" files = [ {file = "pandas_stubs-2.0.3.230814-py3-none-any.whl", hash = "sha256:4b3dfc027d49779176b7daa031a3405f7b839bcb6e312f4b9f29fea5feec5b4f"}, @@ -530,17 +576,17 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "pylint" -version = "2.17.4" +version = "2.17.7" description = "python code static checker" optional = false python-versions = ">=3.7.2" files = [ - {file = "pylint-2.17.4-py3-none-any.whl", hash = "sha256:7a1145fb08c251bdb5cca11739722ce64a63db479283d10ce718b2460e54123c"}, - {file = "pylint-2.17.4.tar.gz", hash = "sha256:5dcf1d9e19f41f38e4e85d10f511e5b9c35e1aa74251bf95cdd8cb23584e2db1"}, + {file = "pylint-2.17.7-py3-none-any.whl", hash = "sha256:27a8d4c7ddc8c2f8c18aa0050148f89ffc09838142193fdbe98f172781a3ff87"}, + {file = "pylint-2.17.7.tar.gz", hash = "sha256:f4fcac7ae74cfe36bc8451e931d8438e4a476c20314b1101c458ad0f05191fad"}, ] [package.dependencies] -astroid = ">=2.15.4,<=2.17.0-dev0" +astroid = ">=2.15.8,<=2.17.0-dev0" colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} dill = [ {version = ">=0.2", markers = "python_version < \"3.11\""}, @@ -673,7 +719,7 @@ files = [ name = "types-pytz" version = "2023.3.1.1" description = "Typing stubs for pytz" -optional = false +optional = true python-versions = "*" files = [ {file = "types-pytz-2023.3.1.1.tar.gz", hash = "sha256:cc23d0192cd49c8f6bba44ee0c81e4586a8f30204970fc0894d209a6b08dab9a"}, @@ -792,4 +838,4 @@ all = ["pandas"] [metadata] lock-version = "2.0" python-versions = "^3.8.0" -content-hash = "df9321536c517f64571916d7ede61c1d058a8e52d53c3f9b34f2d07b6222383d" +content-hash = "d00b6b68fcfa1218fa711f26f103999adafcb60f96cddf825b808e5e705b0084" diff --git a/pyproject.toml b/pyproject.toml index ff3471e..c26de6e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -30,7 +30,7 @@ packages = [{ include = "functional" }] python = "^3.8.0" dill = ">=0.2.5" tabulate = "<=1.0.0" -pandas = { version = "^1.0.3", optional = true } +pandas = { version = ">=1.0.3", optional = true } [tool.poetry.extras] all = ["pandas"] @@ -43,10 +43,13 @@ pytest-cov = "^4.0.0" coverage = "^7.2.5" [tool.poetry.group.dev.dependencies] -mypy = "^1.1.1" +mypy = "^1.8.0" types-tabulate = "^0.9.0.3" -pandas-stubs = "^2.0.3.230814" +pandas-stubs = { version = "^2.0.3.230814", optional = true } [build-system] requires = ["poetry>=0.12"] build-backend = "poetry.masonry.api" + +[tool.pytest.ini_options] +doctest_optionflags = "NORMALIZE_WHITESPACE IGNORE_EXCEPTION_DETAIL"