Skip to content

Commit

Permalink
use ruff linter
Browse files Browse the repository at this point in the history
  • Loading branch information
jannikmi committed Mar 14, 2024
1 parent bda1002 commit d9abf65
Show file tree
Hide file tree
Showing 21 changed files with 603 additions and 495 deletions.
47 changes: 11 additions & 36 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -17,39 +17,20 @@ repos:
- id: requirements-txt-fixer
- id: detect-private-key

- repo: https:/pycqa/isort
rev: 5.13.2
hooks:
- id: isort
args: [ "--profile", "black", "--filter-files" ]

- repo: https:/psf/black
rev: 24.1.1
hooks:
- id: black
language_version: python3
args:
- --line-length=120
- repo: https:/astral-sh/ruff-pre-commit
rev: v0.3.2
hooks:
# linter.
- id: ruff
args: [ --fix ]
- id: ruff-format

- repo: https:/asottile/blacken-docs
rev: 1.16.0
hooks:
- id: blacken-docs

- repo: https:/pycqa/flake8
rev: 7.0.0
hooks:
- id: flake8
exclude: ^(docs|scripts|tests)/|build.py
args:
- --ignore=E402 # E402 module level import not at top of file
- --max-line-length=120

additional_dependencies:
- flake8-bugbear
- flake8-comprehensions
- flake8-tidy-imports

# TODO also check docs/. make compatible with sphinx
- repo: https:/myint/rstcheck
rev: 'v6.2.0'
Expand All @@ -63,22 +44,16 @@ repos:
- id: validate-pyproject

- repo: https:/asottile/pyupgrade
rev: v3.15.0
rev: v3.15.1
hooks:
- id: pyupgrade

# # TODO very detailed linting:
# - repo: https:/pycqa/pylint
# rev: v2.13.8
# hooks:
# - id: pylint

- repo: https:/pre-commit/mirrors-mypy
rev: v1.8.0
rev: v1.9.0
hooks:
- id: mypy
exclude: ^((tests|scripts)/)
#

# - repo: https:/mgedmin/check-manifest
# rev: "0.48"
# hooks:
Expand All @@ -87,6 +62,6 @@ repos:
# additional_dependencies: [ numpy, poetry==1.1.11 ]

- repo: https:/pre-commit/mirrors-clang-format
rev: v17.0.6
rev: v18.1.1
hooks:
- id: clang-format
2 changes: 1 addition & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ hook:
@pre-commit install
@pre-commit run --all-files

hook2:
hookup:
@pre-commit autoupdate

hook3:
Expand Down
16 changes: 11 additions & 5 deletions build.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
""" optionally builds inside polygon algorithm C extension
"""optionally builds inside polygon algorithm C extension
Resources:
https:/FirefoxMetzger/mini-extension
Expand All @@ -25,7 +25,9 @@
try:
ffibuilder = cffi.FFI()
except Exception as exc:
warnings.warn(f"C lang extension cannot be build, since cffi failed with this error: {exc}")
warnings.warn(
f"C lang extension cannot be build, since cffi failed with this error: {exc}"
)
# Clang extension should be fully optional
ffibuilder = None

Expand All @@ -50,15 +52,17 @@

def build_c_extension():
if ffibuilder is None:
warnings.warn(f"missing ffibuilder. skipping build process")
warnings.warn("missing ffibuilder. skipping build process")
return

# not required
# ffibuilder.compile(verbose=True)

# Note: built into "timezonefinder" package folder
distribution = setuptools.Distribution({"package_dir": {"": "timezonefinder"}})
cffi.setuptools_ext.cffi_modules(distribution, "cffi_modules", ["build.py:ffibuilder"])
cffi.setuptools_ext.cffi_modules(
distribution, "cffi_modules", ["build.py:ffibuilder"]
)
cmd = distribution.cmdclass["build_ext"](distribution)
cmd.inplace = 1
cmd.ensure_finalized()
Expand All @@ -68,7 +72,9 @@ def build_c_extension():
# distutils.errors.CompileError:
# a build failure in the extension (e.g. C compile is not installed) must not abort the build process,
# but instead simply not install the failing extension.
warnings.warn(f"C lang extension cannot be build, since cmd.run() failed with this error: {exc}")
warnings.warn(
f"C lang extension cannot be build, since cmd.run() failed with this error: {exc}"
)


if __name__ == "__main__":
Expand Down
3 changes: 2 additions & 1 deletion docs/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,8 @@
# This ensures that the source package is importable
sys.path.insert(0, os.path.join(project_root))

import timezonefinder # needed for auto document, ATTENTION: must then be installed during online build!
# needed for auto document, ATTENTION: must then be installed during online build!
import timezonefinder # noqa: E402 Module level import not at top of file

print(timezonefinder)

Expand Down
11 changes: 8 additions & 3 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -79,9 +79,14 @@ build-backend = "poetry.core.masonry.api"
script = "build.py"
generate-setup-file = false

[tool.isort]
profile = "black"
multi_line_output = 3

[tool.ruff]
target-version = "py38"
src = ["timezonefinder", "tests", "build.py"]

[tool.ruff.lint.isort]
known-local-folder = ["timezonefinder"]


[tool.tox]
legacy_tox_ini = """
Expand Down
10 changes: 8 additions & 2 deletions scripts/check_speed_initialisation.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,17 @@
)
def test_initialisation_speed(class_under_test, in_memory_mode: bool):
print()
print(f"testing initialiation: {class_under_test.__name__}(in_memory={in_memory_mode})")
print(
f"testing initialiation: {class_under_test.__name__}(in_memory={in_memory_mode})"
)

def initialise_instance():
class_under_test(in_memory=in_memory_mode)

t = timeit.timeit("initialise_instance()", globals={"initialise_instance": initialise_instance}, number=N)
t = timeit.timeit(
"initialise_instance()",
globals={"initialise_instance": initialise_instance},
number=N,
)
t_avg = t / N
print(f"avg. startup time: {t_avg:.2f} ({N} runs)\n")
8 changes: 6 additions & 2 deletions scripts/check_speed_inside_polygon.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,9 @@ def gen_test_input():


def check_inside_polygon_speed():
print("testing the speed of the different point in polygon algorithm implementations")
print(
"testing the speed of the different point in polygon algorithm implementations"
)
print(f"testing {nr_of_runs} queries: random points and timezone polygons")
print(f"Python implementation using Numba JIT compilation: {utils.using_numba}")

Expand All @@ -50,7 +52,9 @@ def time_func(test_func):
print()
t_clang = time_func(utils.pt_in_poly_clang)
t_python = time_func(utils.pt_in_poly_python)
py_func_descr = f"Python implementation {'WITH' if utils.using_numba else 'WITHOUT'} Numba"
py_func_descr = (
f"Python implementation {'WITH' if utils.using_numba else 'WITHOUT'} Numba"
)
if t_clang < t_python:
speedup = (t_python / t_clang) - 1
print(f"C implementation is {speedup:.1f}x faster than the {py_func_descr}")
Expand Down
2 changes: 1 addition & 1 deletion scripts/check_speed_timezone_finding.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ def test_timezone_finding_speed(

def time_all_runs(func2time: Callable, test_inputs: Iterable):
for lng, lat in test_inputs:
tz = func2time(lng=lng, lat=lat) # 'Europe/Berlin'
_ = func2time(lng=lng, lat=lat) # 'Europe/Berlin'

def time_func(test_instance, test_func_name):
test_func = test_instance.__getattribute__(test_func_name)
Expand Down
46 changes: 35 additions & 11 deletions scripts/file_converter.py
Original file line number Diff line number Diff line change
Expand Up @@ -174,14 +174,19 @@ def parse_polygons_from_json(input_path: Path) -> int:
x_coords = poly[0]
y_coords = poly[1]
polygon_lengths.append(len(x_coords))
bounds = Boundaries(np.max(x_coords), np.min(x_coords), np.max(y_coords), np.min(y_coords))
bounds = Boundaries(
np.max(x_coords), np.min(x_coords), np.max(y_coords), np.min(y_coords)
)
poly_boundaries.append(bounds)
poly_zone_ids.append(zone_id)

# everything else is interpreted as a hole!
for hole_nr, hole in enumerate(poly_with_hole):
nr_of_holes += 1 # keep track of how many holes there are
print(f"\rpolygon {poly_id}, zone {tz_name}, hole number {nr_of_holes}, {hole_nr+1} in polygon", end="")
print(
f"\rpolygon {poly_id}, zone {tz_name}, hole number {nr_of_holes}, {hole_nr+1} in polygon",
end="",
)
polynrs_of_holes.append(poly_id)
hole_poly = to_numpy_polygon(hole)
holes.append(hole_poly)
Expand Down Expand Up @@ -287,7 +292,9 @@ def pt_in_cell(pt: np.ndarray) -> bool:
return any(map(pt_in_cell, poly.T))


def get_corrected_hex_boundaries(x_coords, y_coords, surr_n_pole, surr_s_pole) -> Tuple["Boundaries", bool]:
def get_corrected_hex_boundaries(
x_coords, y_coords, surr_n_pole, surr_s_pole
) -> Tuple["Boundaries", bool]:
"""boundaries of a hex cell used for pre-filtering the polygons
which have to be checked with expensive point-in-polygon algorithm
Expand Down Expand Up @@ -379,7 +386,9 @@ def from_id(cls, id: int):
x_coords, y_coords = coords[0], coords[1]
surr_n_pole = lies_in_h3_cell(id, lng=0.0, lat=MAX_LAT)
surr_s_pole = lies_in_h3_cell(id, lng=0.0, lat=-MAX_LAT)
bounds, x_overflow = get_corrected_hex_boundaries(x_coords, y_coords, surr_n_pole, surr_s_pole)
bounds, x_overflow = get_corrected_hex_boundaries(
x_coords, y_coords, surr_n_pole, surr_s_pole
)
return cls(id, res, coords, bounds, x_overflow, surr_n_pole, surr_s_pole)

@property
Expand Down Expand Up @@ -467,7 +476,9 @@ def polys_in_cell(self) -> Set[int]:
def zones_in_cell(self) -> Set[int]:
if self._zones_in_cell is None:
# lazy evaluation, caching
self._zones_in_cell = set(map(lambda p: poly_zone_ids[p], self.polys_in_cell))
self._zones_in_cell = set(
map(lambda p: poly_zone_ids[p], self.polys_in_cell)
)
return self._zones_in_cell

@property
Expand Down Expand Up @@ -527,7 +538,10 @@ def optimise_shortcut_ordering(poly_ids: List[int]) -> List[int]:
zone_ids = [poly_zone_ids[i] for i in poly_ids]
zone_ids_unique = list(set(zone_ids))
zipped = list(zip(poly_ids, zone_ids, poly_sizes))
zone2size = {i: sum(map(lambda e: e[2], filter(lambda e: e[1] == i, zipped))) for i in zone_ids_unique}
zone2size = {
i: sum(map(lambda e: e[2], filter(lambda e: e[1] == i, zipped)))
for i in zone_ids_unique
}
zone_ids_sorted = sorted(zone_ids_unique, key=lambda x: zone2size[x])
poly_ids_sorted = []
for zone_id in zone_ids_sorted:
Expand Down Expand Up @@ -655,7 +669,9 @@ def validate_shortcut_mapping(mapping: ShortcutMapping):
def compile_polygon_binaries(output_path):
global nr_of_polygons

def compile_addresses(length_list: List[int], multiplier: int, byte_amount_per_entry: int):
def compile_addresses(
length_list: List[int], multiplier: int, byte_amount_per_entry: int
):
adr = 0
addresses = [adr]
for length in length_list:
Expand All @@ -670,7 +686,9 @@ def compile_addresses(length_list: List[int], multiplier: int, byte_amount_per_e
poly_nr2zone_id,
upper_value_limit=nr_of_polygons + 1,
)
write_binary(output_path, POLY_ZONE_IDS, poly_zone_ids, upper_value_limit=nr_of_zones)
write_binary(
output_path, POLY_ZONE_IDS, poly_zone_ids, upper_value_limit=nr_of_zones
)
write_boundary_data(output_path, POLY_MAX_VALUES, poly_boundaries)
write_coordinate_data(output_path, POLY_DATA, polygons)
write_binary(
Expand All @@ -682,7 +700,9 @@ def compile_addresses(length_list: List[int], multiplier: int, byte_amount_per_e
)

# 2 entries per coordinate
poly_addresses = compile_addresses(polygon_lengths, multiplier=2, byte_amount_per_entry=NR_BYTES_I)
poly_addresses = compile_addresses(
polygon_lengths, multiplier=2, byte_amount_per_entry=NR_BYTES_I
)
write_binary(
output_path,
POLY_ADR2DATA,
Expand Down Expand Up @@ -732,7 +752,9 @@ def compile_addresses(length_list: List[int], multiplier: int, byte_amount_per_e
)

# 2 entries per coordinate
hole_adr2data = compile_addresses(all_hole_lengths, multiplier=2, byte_amount_per_entry=NR_BYTES_I)
hole_adr2data = compile_addresses(
all_hole_lengths, multiplier=2, byte_amount_per_entry=NR_BYTES_I
)
used_space = write_binary(
output_path,
HOLE_ADR2DATA,
Expand Down Expand Up @@ -770,7 +792,9 @@ def parse_data(
import argparse

parser = argparse.ArgumentParser(description="parse data directories")
parser.add_argument("-inp", help="path to input JSON file", default=DEFAULT_INPUT_PATH)
parser.add_argument(
"-inp", help="path to input JSON file", default=DEFAULT_INPUT_PATH
)
parser.add_argument(
"-out",
help="path to output folder for storing the parsed data files",
Expand Down
4 changes: 3 additions & 1 deletion scripts/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -187,7 +187,9 @@ def write_binary(
path = abspath(join(output_path, bin_file_name + configs.BINARY_FILE_ENDING))
print(f"writing {path}")
with open(path, "wb") as output_file:
writing_fct(output_file, data, data_format, lower_value_limit, upper_value_limit)
writing_fct(
output_file, data, data_format, lower_value_limit, upper_value_limit
)
file_length = output_file.tell()
return file_length

Expand Down
6 changes: 5 additions & 1 deletion tests/auxiliaries.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,11 @@ def proto_test_case(data, fct):
# print(input, expected_output, fct(input))
actual_output = fct(input)
if actual_output != expected_output:
print("input: {} expected: {} got: {}".format(input, expected_output, actual_output))
print(
"input: {} expected: {} got: {}".format(
input, expected_output, actual_output
)
)
assert actual_output == expected_output


Expand Down
Loading

0 comments on commit d9abf65

Please sign in to comment.