Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add pyupgrade #4369

Merged
merged 4 commits into from
Aug 12, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 14 additions & 2 deletions .github/workflows/test-python.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,13 @@ jobs:
pip install -r py-polars/build.requirements.txt
- name: Run formatting checks
run: |
cd py-polars && black --check . && blackdoc --check . && isort --check . && rustup override set nightly-2022-07-24 && cargo fmt --all -- --check && cd ..
cd py-polars
black --check .
blackdoc --check .
isort --check .
pyupgrade --py37-plus `find . -name "*.py" -type f`
rustup override set nightly-2022-07-24 && cargo fmt --all -- --check
cd ..
- name: Run linting
run: |
cd py-polars && flake8 && cd ..
Expand Down Expand Up @@ -76,7 +82,13 @@ jobs:
pip install -r py-polars/build.requirements.txt
- name: Run formatting checks
run: |
cd py-polars && black --check . && blackdoc --check . && isort --check . && rustup override set nightly-2022-07-24 && cargo fmt --all -- --check && cd ..
cd py-polars
black --check .
blackdoc --check .
isort --check .
pyupgrade --py37-plus `find . -name "*.py" -type f`
rustup override set nightly-2022-07-24 && cargo fmt --all -- --check
cd ..
- name: Run linting
run: |
cd py-polars && flake8 && cd ..
Expand Down
1 change: 1 addition & 0 deletions py-polars/Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ pre-commit: venv
$(PYTHON_BIN)/isort .
$(PYTHON_BIN)/black .
$(PYTHON_BIN)/blackdoc .
$(PYTHON_BIN)/pyupgrade --py37-plus `find polars/ tests/ -name "*.py" -type f`
$(PYTHON_BIN)/mypy
$(PYTHON_BIN)/flake8
make -C .. fmt_toml
Expand Down
3 changes: 2 additions & 1 deletion py-polars/build.requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -16,14 +16,15 @@ pytest-cov==3.0.0
hypothesis==6.49.1
black==22.6.0
blackdoc==0.3.5
isort~=5.10.1
isort==5.10.1
mypy==0.971
flake8==5.0.2
flake8-bugbear==22.7.1
flake8-comprehensions==3.10.0
flake8-docstrings==1.6.0
flake8-simplify==0.19.3
flake8-tidy-imports==4.8.0
pyupgrade==2.37.3
ghp-import==2.1.0
sphinx==4.3.2
pydata-sphinx-theme==0.9.0
Expand Down
2 changes: 1 addition & 1 deletion py-polars/polars/datatypes.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
_DOCUMENTING = True


def get_idx_type() -> Type[DataType]:
def get_idx_type() -> type[DataType]:
"""
Get the datatype used for polars Indexing

Expand Down
102 changes: 51 additions & 51 deletions py-polars/polars/internals/expr.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
import math
import random
from datetime import date, datetime, timedelta
from typing import TYPE_CHECKING, Any, Callable, List, Sequence
from typing import TYPE_CHECKING, Any, Callable, Sequence

from polars import internals as pli
from polars.datatypes import (
Expand Down Expand Up @@ -385,7 +385,7 @@ def exclude(
self,
columns: (
str
| List[str]
| list[str]
| DataType
| type[DataType]
| DataType
Expand Down Expand Up @@ -1696,8 +1696,8 @@ def arg_min(self) -> Expr:

def sort_by(
self,
by: Expr | str | List[Expr | str],
reverse: bool | List[bool] = False,
by: Expr | str | list[Expr | str],
reverse: bool | list[bool] = False,
) -> Expr:
"""
Sort this column by the ordering of another column, or multiple other columns.
Expand Down Expand Up @@ -1756,7 +1756,7 @@ def sort_by(

return wrap_expr(self._pyexpr.sort_by(by, reverse))

def take(self, index: List[int] | Expr | pli.Series | np.ndarray[Any, Any]) -> Expr:
def take(self, index: list[int] | Expr | pli.Series | np.ndarray[Any, Any]) -> Expr:
"""
Take values by index.

Expand Down Expand Up @@ -2243,7 +2243,7 @@ def product(self) -> Expr:
"""
return wrap_expr(self._pyexpr.product())

def n_unique(self) -> "Expr":
def n_unique(self) -> Expr:
"""
Count unique values.

Expand All @@ -2263,7 +2263,7 @@ def n_unique(self) -> "Expr":
"""
return wrap_expr(self._pyexpr.n_unique())

def null_count(self) -> "Expr":
def null_count(self) -> Expr:
"""
Count null values.

Expand Down Expand Up @@ -2398,32 +2398,7 @@ def last(self) -> Expr:
"""
return wrap_expr(self._pyexpr.last())

def list(self) -> Expr:
"""
Aggregate to list.

Examples
--------
>>> df = pl.DataFrame(
... {
... "a": [1, 2, 3],
... "b": [4, 5, 6],
... }
... )
>>> df.select(pl.all().list())
shape: (1, 2)
┌───────────┬───────────┐
│ a ┆ b │
│ --- ┆ --- │
│ list[i64] ┆ list[i64] │
╞═══════════╪═══════════╡
│ [1, 2, 3] ┆ [4, 5, 6] │
└───────────┴───────────┘

"""
return wrap_expr(self._pyexpr.list())

def over(self, expr: str | Expr | List[Expr | str]) -> Expr:
def over(self, expr: str | Expr | list[Expr | str]) -> Expr:
"""
Apply window function over a subgroup.

Expand Down Expand Up @@ -3250,7 +3225,7 @@ def interpolate(self) -> Expr:
def rolling_min(
self,
window_size: int | str,
weights: List[float] | None = None,
weights: list[float] | None = None,
min_periods: int | None = None,
center: bool = False,
by: str | None = None,
Expand Down Expand Up @@ -3350,7 +3325,7 @@ def rolling_min(
def rolling_max(
self,
window_size: int | str,
weights: List[float] | None = None,
weights: list[float] | None = None,
min_periods: int | None = None,
center: bool = False,
by: str | None = None,
Expand Down Expand Up @@ -3449,7 +3424,7 @@ def rolling_max(
def rolling_mean(
self,
window_size: int | str,
weights: List[float] | None = None,
weights: list[float] | None = None,
min_periods: int | None = None,
center: bool = False,
by: str | None = None,
Expand Down Expand Up @@ -3546,7 +3521,7 @@ def rolling_mean(
def rolling_sum(
self,
window_size: int | str,
weights: List[float] | None = None,
weights: list[float] | None = None,
min_periods: int | None = None,
center: bool = False,
by: str | None = None,
Expand Down Expand Up @@ -3645,7 +3620,7 @@ def rolling_sum(
def rolling_std(
self,
window_size: int | str,
weights: List[float] | None = None,
weights: list[float] | None = None,
min_periods: int | None = None,
center: bool = False,
by: str | None = None,
Expand Down Expand Up @@ -3715,7 +3690,7 @@ def rolling_std(
def rolling_var(
self,
window_size: int | str,
weights: List[float] | None = None,
weights: list[float] | None = None,
min_periods: int | None = None,
center: bool = False,
by: str | None = None,
Expand Down Expand Up @@ -3785,7 +3760,7 @@ def rolling_var(
def rolling_median(
self,
window_size: int | str,
weights: List[float] | None = None,
weights: list[float] | None = None,
min_periods: int | None = None,
center: bool = False,
by: str | None = None,
Expand Down Expand Up @@ -3853,7 +3828,7 @@ def rolling_quantile(
quantile: float,
interpolation: InterpolationMethod = "nearest",
window_size: int | str = 2,
weights: List[float] | None = None,
weights: list[float] | None = None,
min_periods: int | None = None,
center: bool = False,
by: str | None = None,
Expand Down Expand Up @@ -3931,7 +3906,7 @@ def rolling_apply(
self,
function: Callable[[pli.Series], Any],
window_size: int,
weights: List[float] | None = None,
weights: list[float] | None = None,
min_periods: int | None = None,
center: bool = False,
) -> Expr:
Expand Down Expand Up @@ -5052,19 +5027,44 @@ def set_sorted(self, reverse: bool = False) -> Expr:
"""
return self.map(lambda s: s.set_sorted(reverse))

# Below are the namespaces defined. Keep these at the end of the definition of Expr,
# as to not confuse mypy with the type annotation `str` with the namespace "str"
# Keep the `list` and `str` methods below at the end of the definition of Expr,
# as to not confuse mypy with the type annotation `str` and `list`

@property
def dt(self) -> ExprDateTimeNameSpace:
"""Create an object namespace of all datetime related methods."""
return ExprDateTimeNameSpace(self)
def list(self) -> Expr:
"""
Aggregate to list.

Examples
--------
>>> df = pl.DataFrame(
... {
... "a": [1, 2, 3],
... "b": [4, 5, 6],
... }
... )
>>> df.select(pl.all().list())
shape: (1, 2)
┌───────────┬───────────┐
│ a ┆ b │
│ --- ┆ --- │
│ list[i64] ┆ list[i64] │
╞═══════════╪═══════════╡
│ [1, 2, 3] ┆ [4, 5, 6] │
└───────────┴───────────┘

"""
return wrap_expr(self._pyexpr.list())

@property
def str(self) -> ExprStringNameSpace:
"""Create an object namespace of all string related methods."""
return ExprStringNameSpace(self)

@property
def dt(self) -> ExprDateTimeNameSpace:
"""Create an object namespace of all datetime related methods."""
return ExprDateTimeNameSpace(self)

@property
def arr(self) -> ExprListNameSpace:
"""Create an object namespace of all list related methods."""
Expand Down Expand Up @@ -5125,7 +5125,7 @@ def field(self, name: str) -> Expr:
"""
return wrap_expr(self._pyexpr.struct_field_by_name(name))

def rename_fields(self, names: List[str]) -> Expr:
def rename_fields(self, names: list[str]) -> Expr:
"""
Rename the fields of the struct

Expand Down Expand Up @@ -5273,7 +5273,7 @@ def unique(self) -> Expr:
return wrap_expr(self._pyexpr.lst_unique())

def concat(
self, other: List[Expr | str] | Expr | str | pli.Series | List[Any]
self, other: list[Expr | str] | Expr | str | pli.Series | list[Any]
) -> Expr:
"""
Concat the arrays in a Series dtype List in linear time.
Expand Down Expand Up @@ -5309,7 +5309,7 @@ def concat(
):
return self.concat(pli.Series([other]))

other_list: List[Expr | str | pli.Series]
other_list: list[Expr | str | pli.Series]
if not isinstance(other, list):
other_list = [other]
else:
Expand Down
4 changes: 2 additions & 2 deletions py-polars/polars/internals/functions.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from __future__ import annotations

from datetime import date, datetime, timedelta
from typing import TYPE_CHECKING, Optional, Sequence, overload
from typing import TYPE_CHECKING, Sequence, overload

from polars import internals as pli
from polars.datatypes import Categorical, Date, Float64
Expand Down Expand Up @@ -273,7 +273,7 @@ def date_range(
def cut(
s: pli.Series,
bins: list[float],
labels: Optional[list[str]] = None,
labels: list[str] | None = None,
break_point_label: str = "break_point",
category_label: str = "category",
) -> pli.DataFrame:
Expand Down
4 changes: 2 additions & 2 deletions py-polars/polars/internals/series.py
Original file line number Diff line number Diff line change
Expand Up @@ -2557,7 +2557,7 @@ def set_at_idx(
self._s = f(idx_array, value)
return self

def cleared(self) -> "Series":
def cleared(self) -> Series:
"""
Create an empty copy of the current Series, with identical name/dtype but no
data.
Expand All @@ -2578,7 +2578,7 @@ def cleared(self) -> "Series":
"""
return self.limit(0) if len(self) > 0 else self.clone()

def clone(self) -> "Series":
def clone(self) -> Series:
"""
Very cheap deepcopy/clone.

Expand Down
3 changes: 1 addition & 2 deletions py-polars/tests/test_df.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@

import sys
import typing
from builtins import range
from datetime import datetime, timedelta
from io import BytesIO
from typing import Any, Iterator
Expand Down Expand Up @@ -594,7 +593,7 @@ def test_read_missing_file() -> None:
pl.read_csv("fake_csv_file")

with pytest.raises(FileNotFoundError, match="fake_csv_file"):
with open("fake_csv_file", "r") as f:
with open("fake_csv_file") as f:
pl.read_csv(f)


Expand Down