Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Remove pylint #1023

Closed
wants to merge 2 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,6 @@ fmt: .venv
lint: .venv
.venv/bin/ruff check
.venv/bin/ruff format --check
PYTHONPATH=. .venv/bin/pylint baseplate/
.venv/bin/mypy baseplate/

.PHONY: checks
Expand Down
2 changes: 0 additions & 2 deletions baseplate/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -212,7 +212,6 @@ def __getattr__(self, name: str) -> Any:
return obj

# this is just here for type checking
# pylint: disable=useless-super-delegation
def __setattr__(self, name: str, value: Any) -> None:
super().__setattr__(name, value)

Expand Down Expand Up @@ -286,7 +285,6 @@ def register(self, observer: BaseplateObserver) -> None:
"""
self.observers.append(observer)

# pylint: disable=cyclic-import
def configure_observers(self) -> None:
"""Configure diagnostics observers based on application configuration.

Expand Down
7 changes: 3 additions & 4 deletions baseplate/clients/cassandra.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,14 +12,14 @@
)

from cassandra.auth import PlainTextAuthProvider
from cassandra.cluster import ( # pylint: disable=no-name-in-module
from cassandra.cluster import (
_NOT_SET,
Cluster,
ExecutionProfile,
ResponseFuture,
Session,
)
from cassandra.query import ( # pylint: disable=no-name-in-module
from cassandra.query import (
BoundStatement,
PreparedStatement,
SimpleStatement,
Expand Down Expand Up @@ -224,7 +224,6 @@ class CQLMapperContextFactory(CassandraContextFactory):
def make_object_for_context(self, name: str, span: Span) -> "cqlmapper.connection.Connection":
# Import inline so you can still use the regular Cassandra integration
# without installing cqlmapper
# pylint: disable=redefined-outer-name
import cqlmapper.connection

session_adapter = super().make_object_for_context(name, span)
Expand Down Expand Up @@ -271,7 +270,7 @@ def wait_for_callbacks_result(self: ResponseFuture) -> Any:
logger.warning("Cassandra metrics callback took too long. Some metrics may be lost.")

if exc:
raise exc # pylint: disable=E0702
raise exc

return result

Expand Down
2 changes: 1 addition & 1 deletion baseplate/clients/kombu.py
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,7 @@ def deserialize(self, message: bytes) -> T:
"""Deserialize the message bytes into an object for consuming."""


class KombuThriftSerializer(KombuSerializer[T]): # pylint: disable=unsubscriptable-object
class KombuThriftSerializer(KombuSerializer[T]):
"""Thrift object serializer for Kombu."""

def __init__(
Expand Down
16 changes: 4 additions & 12 deletions baseplate/clients/memcache/lib.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,9 +34,7 @@ class Flags:
ZLIB = 1 << 3


def decompress_and_load( # pylint: disable=unused-argument
key: str, serialized: bytes, flags: int
) -> Any:
def decompress_and_load(key: str, serialized: bytes, flags: int) -> Any:
"""Deserialize data.

This should be paired with
Expand Down Expand Up @@ -97,9 +95,7 @@ def make_dump_and_compress_fn(
assert min_compress_length >= 0
assert 0 <= compress_level <= 9

def dump_and_compress( # pylint: disable=unused-argument
key: str, value: Any
) -> tuple[bytes, int]:
def dump_and_compress(key: str, value: Any) -> tuple[bytes, int]:
"""Serialize a Python object in a way compatible with decompress_and_load().

:param key: the memcached key.
Expand Down Expand Up @@ -147,9 +143,7 @@ class PickleFlags:
ZLIB = 1 << 3


def decompress_and_unpickle( # pylint: disable=unused-argument
key: str, serialized: bytes, flags: int
) -> Any:
def decompress_and_unpickle(key: str, serialized: bytes, flags: int) -> Any:
"""Deserialize data stored by ``pylibmc``.

.. warning:: This should only be used when sharing caches with applications
Expand Down Expand Up @@ -214,9 +208,7 @@ def make_pickle_and_compress_fn(
assert min_compress_length >= 0
assert 0 <= compress_level <= 9

def pickle_and_compress( # pylint: disable=unused-argument
key: str, value: Any
) -> tuple[bytes, int]:
def pickle_and_compress(key: str, value: Any) -> tuple[bytes, int]:
"""Serialize a Python object in a way compatible with decompress_and_unpickle().

:param key: the memcached key.
Expand Down
7 changes: 1 addition & 6 deletions baseplate/clients/redis.py
Original file line number Diff line number Diff line change
Expand Up @@ -197,7 +197,6 @@ def make_object_for_context(self, name: str, span: Span) -> "MonitoredRedisConne
)


# pylint: disable=too-many-public-methods
class MonitoredRedisConnection(redis.StrictRedis):
"""Redis connection that collects diagnostic information.

Expand Down Expand Up @@ -253,7 +252,6 @@ def execute_command(self, *args: Any, **kwargs: Any) -> Any:
REQUESTS_TOTAL.labels(**result_labels).inc()
LATENCY_SECONDS.labels(**result_labels).observe(perf_counter() - start_time)

# pylint: disable=arguments-renamed
def pipeline( # type: ignore
self, name: str, transaction: bool = True, shard_hint: Optional[str] = None
) -> "MonitoredRedisPipeline":
Expand Down Expand Up @@ -299,7 +297,6 @@ def __init__(
self.redis_client_name = redis_client_name
super().__init__(connection_pool, response_callbacks, **kwargs)

# pylint: disable=arguments-differ
def execute(self, **kwargs: Any) -> Any:
with self.server_span.make_child(self.trace_name):
success = "true"
Expand Down Expand Up @@ -375,9 +372,7 @@ def get(self, timeout: Optional[float] = None) -> bytes:

return message

def put( # pylint: disable=unused-argument
self, message: bytes, timeout: Optional[float] = None
) -> None:
def put(self, message: bytes, timeout: Optional[float] = None) -> None:
"""Add a message to the queue.

:param message: will be typecast to a string upon storage and will come
Expand Down
4 changes: 0 additions & 4 deletions baseplate/clients/redis_cluster.py
Original file line number Diff line number Diff line change
Expand Up @@ -211,7 +211,6 @@ def maybe_track_key_usage(self, args: list[str]) -> None:
# Unfortunately this is not provide as-is so we combine two connection pool classes to provide
# the desired behaviour.
class ClusterWithReadReplicasBlockingConnectionPool(rediscluster.ClusterBlockingConnectionPool):
# pylint: disable=arguments-differ
def get_node_by_slot(self, slot: int, read_command: bool = False) -> dict[str, Any]:
"""Get a node from the slot.

Expand Down Expand Up @@ -471,7 +470,6 @@ def execute_command(self, *args: Any, **kwargs: Any) -> Any:

return res

# pylint: disable=arguments-differ
def pipeline(self, name: str) -> "MonitoredClusterRedisPipeline":
"""Create a pipeline.

Expand All @@ -496,7 +494,6 @@ def transaction(self, *args: Any, **kwargs: Any) -> Any:
raise NotImplementedError


# pylint: disable=abstract-method
class MonitoredClusterRedisPipeline(ClusterPipeline):
def __init__(
self,
Expand All @@ -522,7 +519,6 @@ def execute_command(self, *args: Any, **kwargs: Any) -> Any:

return res

# pylint: disable=arguments-differ
def execute(self, **kwargs: Any) -> Any:
with self.server_span.make_child(self.trace_name):
success = "true"
Expand Down
2 changes: 0 additions & 2 deletions baseplate/clients/sqlalchemy.py
Original file line number Diff line number Diff line change
Expand Up @@ -224,7 +224,6 @@ def make_object_for_context(self, name: str, span: Span) -> Engine | Session:
engine = self.engine.execution_options(context_name=name, server_span=span)
return engine

# pylint: disable=unused-argument, too-many-arguments
def on_before_execute(
self,
conn: Connection,
Expand Down Expand Up @@ -262,7 +261,6 @@ def on_before_execute(

return annotated_statement, parameters

# pylint: disable=unused-argument, too-many-arguments
def on_after_execute(
self,
conn: Connection,
Expand Down
1 change: 0 additions & 1 deletion baseplate/clients/thrift.py
Original file line number Diff line number Diff line change
Expand Up @@ -181,7 +181,6 @@ def predicate(x: Any) -> bool:
class _PooledClientProxy:
"""A proxy which acts like a thrift client but uses a connection pool."""

# pylint: disable=too-many-arguments
def __init__(
self,
client_cls: Any,
Expand Down
1 change: 0 additions & 1 deletion baseplate/frameworks/pyramid/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -301,7 +301,6 @@ def should_trust_edge_context_payload(self, request: Request) -> bool:
return self.trust_headers


# pylint: disable=too-many-ancestors
class BaseplateRequest(RequestContext, pyramid.request.Request):
def __init__(self, *args: Any, **kwargs: Any) -> None:
context_config = kwargs.pop("context_config", None)
Expand Down
2 changes: 1 addition & 1 deletion baseplate/frameworks/pyramid/csrf.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@

try:
# ICSRFStoragePolicy was not added to Pyramid until version 1.9
from pyramid.interfaces import ICSRFStoragePolicy # pylint: disable=no-name-in-module
from pyramid.interfaces import ICSRFStoragePolicy
except ImportError:
logger.error(
"baseplate.frameworks.pyramid.csrf requires that you use a version of pyramid >= 1.9"
Expand Down
6 changes: 1 addition & 5 deletions baseplate/frameworks/queue_consumer/kafka.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
)

if TYPE_CHECKING:
WorkQueue = queue.Queue[confluent_kafka.Message] # pylint: disable=unsubscriptable-object
WorkQueue = queue.Queue[confluent_kafka.Message]
else:
WorkQueue = queue.Queue

Expand Down Expand Up @@ -347,14 +347,12 @@ def make_kafka_consumer(
topic in all_topics
), f"topic '{topic}' does not exist. maybe it's misspelled or on a different kafka cluster?" # noqa: E501

# pylint: disable=unused-argument
def log_assign(
consumer: confluent_kafka.Consumer, partitions: list[confluent_kafka.TopicPartition]
) -> None:
for topic_partition in partitions:
logger.info("assigned %s/%s", topic_partition.topic, topic_partition.partition)

# pylint: disable=unused-argument
def log_revoke(
consumer: confluent_kafka.Consumer, partitions: list[confluent_kafka.TopicPartition]
) -> None:
Expand Down Expand Up @@ -469,7 +467,6 @@ def build_message_handler(self) -> KafkaMessageHandler:

self.message_handler_count += 1

# pylint: disable=unused-argument
def commit_offset(
context: RequestContext, data: Any, message: confluent_kafka.Message
) -> None:
Expand Down Expand Up @@ -542,7 +539,6 @@ class FastConsumerFactory(_BaseKafkaQueueConsumerFactory):

"""

# pylint: disable=unused-argument
@staticmethod
def _commit_callback(
err: confluent_kafka.KafkaError, topic_partition_list: list[confluent_kafka.TopicPartition]
Expand Down
2 changes: 1 addition & 1 deletion baseplate/frameworks/queue_consumer/kombu.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ class AmqpConsumerPrometheusLabels(NamedTuple):
)

if TYPE_CHECKING:
WorkQueue = queue.Queue[kombu.Message] # pylint: disable=unsubscriptable-object
WorkQueue = queue.Queue[kombu.Message]
else:
WorkQueue = queue.Queue

Expand Down
1 change: 0 additions & 1 deletion baseplate/lib/_requests.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@


class _UNIXConnection(urllib3.connectionpool.HTTPConnection):
# pylint: disable=super-init-not-called
def __init__(self, url: str):
urllib3.connectionpool.HTTPConnection.__init__(self, "localhost")
self.url = urllib.parse.urlparse(url)
Expand Down
5 changes: 2 additions & 3 deletions baseplate/lib/config.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
# pylint: disable=invalid-name
"""Configuration parsing and validation.

This module provides ``parse_config`` which turns a dictionary of stringy keys
Expand Down Expand Up @@ -174,7 +173,7 @@ class EndpointConfiguration(NamedTuple):

"""

family: socket.AddressFamily # pylint: disable=no-member
family: socket.AddressFamily
address: Union[InternetAddress, str]

def __str__(self) -> str:
Expand Down Expand Up @@ -236,7 +235,7 @@ def File(mode: str = "r") -> Callable[[str], IO]: # noqa: D401

def open_file(text: str) -> IO:
try:
return open(text, mode=mode, encoding="UTF-8") # pylint: disable=R1732
return open(text, mode=mode, encoding="UTF-8")
except OSError:
raise ValueError(f"could not open file: {text}")

Expand Down
4 changes: 2 additions & 2 deletions baseplate/lib/crypto.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ class SignatureInfo(NamedTuple):

def _compute_digest(secret_value: bytes, header: bytes, message: str) -> bytes:
payload = header + message.encode("utf8")
digest = hmac.new(secret_value, payload, hashlib.sha256).digest() # pylint: disable=no-member
digest = hmac.new(secret_value, payload, hashlib.sha256).digest()
return digest


Expand Down Expand Up @@ -149,7 +149,7 @@ def validate_signature(secret: VersionedSecret, message: str, signature: bytes)
version, expiration = _HEADER_FORMAT.unpack(header)
if version != 1:
raise ValueError
if len(signature_digest) != hashlib.sha256().digest_size: # pylint: disable=no-member
if len(signature_digest) != hashlib.sha256().digest_size:
raise ValueError
except (struct.error, KeyError, binascii.Error, TypeError, ValueError):
raise UnreadableSignatureError
Expand Down
1 change: 0 additions & 1 deletion baseplate/lib/file_watcher.py
Original file line number Diff line number Diff line change
Expand Up @@ -182,7 +182,6 @@ def get_data_and_mtime(self) -> tuple[T, float]:
if self._mtime < current_mtime:
logger.debug("Loading %s.", self._path)
try:
# pylint: disable=unspecified-encoding
with open(self._path, **self._open_options._asdict()) as f:
self._data = self._parser(f)
except Exception as exc:
Expand Down
1 change: 0 additions & 1 deletion baseplate/lib/live_data/zookeeper.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,6 @@ def zookeeper_client_from_config(
},
)

# pylint: disable=maybe-no-member
cfg = full_cfg.zookeeper

auth_data = []
Expand Down
2 changes: 0 additions & 2 deletions baseplate/lib/metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -217,7 +217,6 @@ class Batch(BaseClient):

"""

# pylint: disable=super-init-not-called
def __init__(self, transport: Transport, namespace: bytes):
self.transport = BufferedTransport(transport)
self.namespace = namespace
Expand Down Expand Up @@ -590,7 +589,6 @@ def metrics_client_from_config(raw_config: config.RawConfig) -> Client:
},
)

# pylint: disable=maybe-no-member
return make_client(
namespace=cfg.metrics.namespace,
endpoint=cfg.metrics.endpoint,
Expand Down
7 changes: 3 additions & 4 deletions baseplate/lib/secrets.py
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,6 @@ def parse_secrets_fetcher(data: dict[str, Any], secret_path: str = "") -> dict[s
raise SecretNotFoundError(secret_path)


# pylint: disable=unused-argument
def parse_vault_csi(data: dict[str, Any], secret_path: str = "") -> dict[str, str]:
return data["data"]

Expand All @@ -145,7 +144,7 @@ def __init__(
timeout: Optional[int] = None,
backoff: Optional[float] = None,
parser: Optional[SecretParser] = None,
): # pylint: disable=super-init-not-called
):
self.parser = parser or parse_secrets_fetcher
self._filewatcher = FileWatcher(path, json.load, timeout=timeout, backoff=backoff)

Expand Down Expand Up @@ -361,7 +360,7 @@ def make_object_for_context(self, name: str, span: Span) -> "SecretsStore":
class _CachingSecretsStore(SecretsStore):
"""Lazily load and cache the parsed data until the server span ends."""

def __init__(self, filewatcher: FileWatcher, parser: SecretParser): # pylint: disable=super-init-not-called
def __init__(self, filewatcher: FileWatcher, parser: SecretParser):
self._filewatcher = filewatcher
self.parser = parser

Expand Down Expand Up @@ -397,7 +396,7 @@ def __init__(
self,
path: str,
parser: SecretParser,
): # pylint: disable=super-init-not-called
):
self.path = Path(path)
self.parser = parser
self.cache = {}
Expand Down
2 changes: 0 additions & 2 deletions baseplate/lib/service_discovery.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,6 @@ def get_backends(self) -> Sequence[Backend]:

"""
try:
# pylint: disable=maybe-no-member
return self._filewatcher.get_data().backends
except WatchedFileNotAvailableError:
return []
Expand All @@ -119,7 +118,6 @@ def get_backend(self) -> Backend:
except WatchedFileNotAvailableError:
inventory = None

# pylint: disable=maybe-no-member
if not inventory or not inventory.lottery:
raise NoBackendsAvailableError

Expand Down
Loading