Skip to content

Commit a2bd173

Browse files
Merge pull request #125 from Doist/goncalossilva/simplify-ruff
2 parents 62ee415 + 28477ee commit a2bd173

19 files changed

+155
-262
lines changed

.pre-commit-config.yaml

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,3 @@
1-
default_language_version:
2-
python: python3.12
3-
41
repos:
52
- repo: https://github.com/pre-commit/pre-commit-hooks
63
rev: v4.5.0

poetry.lock

Lines changed: 2 additions & 2 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

pyproject.toml

Lines changed: 24 additions & 108 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ readme = "README.md"
2020
exclude = ['tests/']
2121

2222
[tool.poetry.dependencies]
23-
python = "^3.9,<3.14"
23+
python = ">=3.9,<3.14"
2424
boto3 = "^1.27.33"
2525

2626
[tool.poetry.group.dev.dependencies]
@@ -42,19 +42,40 @@ extend-exclude = ["env", "runtime"]
4242

4343
[tool.ruff.lint]
4444
select = [
45+
"A", # flake8-builtins
4546
"ASYNC", # flake8-async
47+
"B", # flake8-bugbear
4648
"C4", # flake8-comprehensions
4749
"D", # pydocstyle,
50+
"DTZ", # flake8-datetimez,
4851
"E", # pycodestyle errors
4952
"W", # pycodestyle warnings
50-
"W", # pycodestyle
5153
"F", # pyflakes
5254
"I", # isort
5355
"PL", # pylint
5456
"RUF", # ruff
5557
"S", # flake8-bandit
58+
"T20", # flake8-print
5659
"SIM", # flake8-simplify
5760
"UP", # pyupgrade
61+
"TC", # flake8-type-checking
62+
"BLE", # flake8-blind-except
63+
"FA", # flake8-future-annotations
64+
"FIX", # flake8-fixme
65+
"ICN", # flake8-import-conventions
66+
"LOG", # flake8-logging
67+
"G", # flake8-logging-format
68+
"RET", # flake8-logging-return
69+
"ISC", # flake8-implicit-str-concat
70+
"INP", # flake8-no-pep420
71+
"PIE", # flake8-pie
72+
"PT", # flake8-pytest-style
73+
"TID", # flake8-tidy-imports
74+
"PTH", # flake8-user-pathlib
75+
"PERF", # perflint
76+
"FURB", # refurb
77+
"PGH", # pygrep-hooks
78+
"PYI", # flake8-pyi
5879
]
5980

6081
ignore = [
@@ -68,127 +89,22 @@ ignore = [
6889
"D105",
6990
"D107",
7091
# These need to be fixed.
71-
"D202",
7292
"D205",
7393
"D400",
7494
"D401",
7595

7696
## E / W - pycodestyle ##
7797
"E501", # line too long
78-
"E203", # whitespace-before-punctuation
79-
"E741", # ambiguous variable name
8098

81-
## PL - pylint ##
82-
# Commented-out rules are rules that we disable in pylint but are not supported by ruff yet.
83-
84-
# Import order issues
85-
# "PLC0411", # wrong-import-order
86-
# "PLC0412", # wrong-import-position
87-
"PLC0414", # ungrouped-imports
88-
89-
# Documentation issues
90-
# "C0114", # missing-module-docstring
91-
92-
# Complexity issues
93-
# "PLR0904", # too-many-public-methods
94-
# "PLC0302", # too-many-lines
95-
# "PLR1702", # too-many-nested-blocks
96-
# "PLR0902", # too-many-instance-attributes
97-
"PLR0911", # too-many-return-statements
98-
"PLR0915", # too-many-statements
99-
"PLR0912", # too-many-branches
100-
# "PLR0903", # too-few-public-methods
101-
# "PLR0914", # too-many-locals
102-
# "PLC0301", # line-too-long
99+
# ## PL - pylint ##
103100
"PLR0913", # too-many-arguments
104101
"PLR2004", # magic-value-comparison
105-
"PLR5501", # collapsible-else-if
106102
"PLW0603", # global-statement
107-
"PLW2901", # redefined-loop-name
108-
"PLC1901", # compare-to-empty-string
109-
110-
## RUF - ruff ##
111-
"RUF001", # ambiguous-unicode-character-string
112-
"RUF002", # ambiguous-unicode-character-docstring
113-
"RUF003", # ambiguous-unicode-character-comment
114-
"RUF012", # mutable-class-default
115-
116-
# Enable when Poetry supports PEP 621 and we migrate our confguration to it.
117-
# See: https://github.com/python-poetry/poetry-core/pull/567
118-
"RUF200",
119103

120104
"S101", # assert
121-
"S104", # hardcoded-bind-all-interfaces
122-
"S105", # hardcoded-password-string
123-
"S106", # hardcoded-password-func-arg
124-
"S107", # hardcoded-password-default
125-
"S110", # try-except-pass
126105
"S301", # suspicious-pickle-usage
127-
"S303", # suspicious-insecure-hash-usage
128-
"S310", # suspicious-url-open-usage
129106
"S311", # suspicious-non-cryptographic-random-usage
130-
"S324", # hashlib-insecure-hash-function
131-
"S603", # subprocess-without-shell-equals-true
132-
"S607", # start-process-with-partial-path
133-
"S608", # hardcoded-sql-expression
134-
135-
## SIM - flake8-simplify ##
136-
"SIM102", # collapsible-if
137-
"SIM105", # suppressible-exception
138-
"SIM108", # if-else-block-instead-of-if-exp
139-
"SIM114", # if-with-same-arms
140-
"SIM116", # if-else-block-instead-of-dict-lookup
141-
"SIM117", # multiple-with-statements
142-
143-
# Enable when the rule is out of preview and false-positives are handled.
144-
# See: https://docs.astral.sh/ruff/rules/in-dict-keys/
145-
"SIM118", # in-dict-keys
146107
]
147108

148-
[tool.ruff.lint.per-file-ignores]
149-
# These files have only a bunch of imports in them to force code loading.
150-
"todoist/workers/todoist_handlers.py" = ["F401"]
151-
"todoist/signals/signal_handlers.py" = ["F401"]
152-
"todoist/workers/scripts_handlers.py" = ["F401"]
153-
"scripts/**" = ["S101"] # Allow assert statement in scripts.
154-
"tests/**" = ["S101"] # Allow assert statement in tests.
155-
156-
# We allow module-level imports to be not at the top of scripts, cron,
157-
# and configs because we initialize Todoist environment there
158-
"todoist/*/scripts/*.py" = ["E402"]
159-
"todoist/*/cron/*.py" = ["E402"]
160-
"scripts/*.py" = ["E402"]
161-
"configs/*.py" = ["E402"]
162-
163-
# To import all fixtures from other conftests.
164-
"conftest.py" = ["F401", "F403"]
165-
# To import select fixtures from non-local conftests.
166-
# Importing and using the fixture makes it be shadowed.
167-
"test_*.py" = ["F401", "F811"]
168-
169-
"tests/minimal/conftest.py" = ["E402", "F401", "F403"]
170-
171-
[tool.ruff.lint.isort]
172-
section-order = [
173-
"future",
174-
"standard-library",
175-
"third-party",
176-
"parts",
177-
"first-party",
178-
"td-models",
179-
"td-apps",
180-
"local-folder",
181-
]
182-
183-
[tool.ruff.lint.isort.sections]
184-
"parts" = ["parts"]
185-
"td-models" = ["todoist.models"]
186-
"td-apps" = ["todoist.apps"]
187-
188109
[tool.ruff.lint.pydocstyle]
189110
convention = "pep257"
190-
191-
[tool.ruff.lint.pyupgrade]
192-
# Required by tools like Pydantic that use type information at runtime.
193-
# https://github.com/asottile/pyupgrade/issues/622#issuecomment-1088766572
194-
keep-runtime-typing = true

sqs_workers/async_task.py

Lines changed: 12 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,12 @@
11
from __future__ import annotations
22

3-
from collections.abc import Generator
43
from contextlib import contextmanager
54
from typing import (
65
TYPE_CHECKING,
76
Any,
87
Callable,
98
Generic,
109
NoReturn,
11-
Optional,
1210
TypedDict,
1311
)
1412

@@ -17,6 +15,8 @@
1715
from sqs_workers.utils import bind_arguments
1816

1917
if TYPE_CHECKING:
18+
from collections.abc import Generator
19+
2020
from sqs_workers.queue import JobQueue
2121

2222

@@ -64,22 +64,21 @@ def run(self, *args: P.args, **kwargs: P.kwargs) -> Any:
6464
raise TypeError("Must use keyword arguments only for batch read queues")
6565
kwargs = bind_arguments(self.processor, [[kwargs]], {})
6666
return self.processor(**kwargs) # type:ignore[call-arg]
67-
else:
68-
kwargs = bind_arguments(self.processor, args, kwargs)
69-
return self.processor(**kwargs) # type:ignore[call-arg]
67+
kwargs = bind_arguments(self.processor, args, kwargs)
68+
return self.processor(**kwargs) # type:ignore[call-arg]
7069

7170
@contextmanager
7271
def batch(self) -> Generator[None, None, None]:
7372
"""Context manager to add jobs in batch."""
7473
with self.queue.add_batch():
7574
yield
7675

77-
def delay(self, *args: P.args, **kwargs: P.kwargs) -> Optional[str]:
76+
def delay(self, *args: P.args, **kwargs: P.kwargs) -> str | None:
7877
"""Run the task asynchronously."""
79-
_content_type = kwargs.pop("_content_type", self.queue.env.codec) # type: ignore
80-
_delay_seconds = kwargs.pop("_delay_seconds", None) # type: ignore
81-
_deduplication_id = kwargs.pop("_deduplication_id", None) # type: ignore
82-
_group_id = kwargs.pop("_group_id", None) # type: ignore
78+
_content_type = kwargs.pop("_content_type", self.queue.env.codec)
79+
_delay_seconds = kwargs.pop("_delay_seconds", None)
80+
_deduplication_id = kwargs.pop("_deduplication_id", None)
81+
_group_id = kwargs.pop("_group_id", None)
8382

8483
if self.queue.batching_policy.batching_enabled:
8584
if len(args) > 0:
@@ -89,10 +88,10 @@ def delay(self, *args: P.args, **kwargs: P.kwargs) -> Optional[str]:
8988

9089
return self.queue.add_job(
9190
self.job_name,
92-
_content_type=_content_type, # type: ignore
93-
_delay_seconds=_delay_seconds, # type: ignore
91+
_content_type=_content_type, # type: ignore[arg-type]
92+
_delay_seconds=_delay_seconds, # type: ignore[arg-type]
9493
_deduplication_id=_deduplication_id,
95-
_group_id=_group_id, # type: ignore
94+
_group_id=_group_id, # type: ignore[arg-type]
9695
**kwargs,
9796
)
9897

sqs_workers/backoff_policies.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ def get_visibility_timeout(self, message):
99
class ConstantBackoff(BackoffPolicy):
1010
"""
1111
Backoff policy which always returns the message back to the queue
12-
immediately on failure
12+
immediately on failure.
1313
"""
1414

1515
def __init__(self, backoff_value: float = 0):
@@ -22,7 +22,7 @@ def get_visibility_timeout(self, message) -> float:
2222
class ExponentialBackoff(BackoffPolicy):
2323
"""
2424
Backoff policy which keeps the message hidden from the queue
25-
with an exponential backoff
25+
with an exponential backoff.
2626
"""
2727

2828
def __init__(

sqs_workers/batching.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -12,19 +12,19 @@ class BatchingConfiguration(ABC):
1212
def batching_enabled(self) -> bool:
1313
"""
1414
If false, messages are sent 1 by 1 to the call handler
15-
If true, messages are sent as a list to the call handler
15+
If true, messages are sent as a list to the call handler.
1616
"""
1717
...
1818

1919
@property
2020
@abstractmethod
2121
def batch_size(self) -> int:
22-
"""Number of messages to process at once if batching_enabled"""
22+
"""Number of messages to process at once if batching_enabled."""
2323
...
2424

2525

2626
class NoBatching(BatchingConfiguration):
27-
"""Configures the processor to send messages 1 by 1 to the call handler"""
27+
"""Configures the processor to send messages 1 by 1 to the call handler."""
2828

2929
@property
3030
def batching_enabled(self) -> bool:
@@ -36,7 +36,7 @@ def batch_size(self) -> int:
3636

3737

3838
class BatchMessages(BatchingConfiguration):
39-
"""Configures the processor to send a list of messages to the call handler"""
39+
"""Configures the processor to send a list of messages to the call handler."""
4040

4141
def __init__(self, batch_size: int):
4242
self.number_of_messages = batch_size

sqs_workers/config.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -42,8 +42,7 @@ def get_instance(self, item, **kwargs):
4242
value = self[item]
4343
if isinstance(value, dict):
4444
return instantiate_from_dict(value, maker_key=self.maker_key, **kwargs)
45-
else:
46-
return instantiate_from_string(value, **kwargs)
45+
return instantiate_from_string(value, **kwargs)
4746

4847
def make_child(self, options=None):
4948
if options is None:

sqs_workers/context.py

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -34,40 +34,40 @@ def hello_world(username=None, context=None):
3434

3535
@classmethod
3636
def from_dict(cls, kwargs):
37-
"""Create context from the dict"""
37+
"""Create context from the dict."""
3838
return cls(**kwargs)
3939

4040
def __init__(self, **kwargs):
41-
"""Create a new context and populate it with keys and values from kwargs"""
41+
"""Create a new context and populate it with keys and values from kwargs."""
4242
self._context = {}
4343
self.set(**kwargs)
4444

4545
def set(self, **kwargs):
46-
"""Clean up current context and replace it with values from kwargs"""
46+
"""Clean up current context and replace it with values from kwargs."""
4747
self._context = kwargs.copy()
4848

4949
def update(self, **kwargs):
50-
"""Extend current context with values from kwargs"""
50+
"""Extend current context with values from kwargs."""
5151
self._context.update(kwargs)
5252

5353
def clear(self):
54-
"""Remove all values from the context"""
54+
"""Remove all values from the context."""
5555
self._context.clear()
5656

5757
def to_dict(self):
58-
"""Convert context to dictionary"""
58+
"""Convert context to dictionary."""
5959
return self._context.copy()
6060

6161
def get(self, key, default=None):
62-
"""Get value by key from the context"""
62+
"""Get value by key from the context."""
6363
return self._context.get(key, default)
6464

6565
def __getitem__(self, item):
66-
"""Dict API emulation. Get value by key from the context"""
66+
"""Dict API emulation. Get value by key from the context."""
6767
return self._context[item]
6868

6969
def __setitem__(self, key, value):
70-
"""Dict API emulation. Set value by key"""
70+
"""Dict API emulation. Set value by key."""
7171
self._context[key] = value
7272

7373
@contextmanager

sqs_workers/core.py

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,8 @@
1+
from __future__ import annotations
2+
13
import json
24
import logging
3-
from typing import Any, Optional
5+
from typing import Any
46

57
logger = logging.getLogger(__name__)
68

@@ -33,7 +35,7 @@ def __repr__(self) -> str:
3335

3436
class RedrivePolicy:
3537
"""
36-
Redrive Policy for SQS queues
38+
Redrive Policy for SQS queues.
3739
3840
See for more details:
3941
https://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/sqs-dead-letter-queues.html
@@ -58,6 +60,6 @@ def __json__(self) -> str:
5860
)
5961

6062

61-
def get_job_name(message) -> Optional[str]:
63+
def get_job_name(message) -> str | None:
6264
attrs = message.message_attributes or {}
6365
return (attrs.get("JobName") or {}).get("StringValue")

0 commit comments

Comments
 (0)