Skip to content

Commit

Permalink
Apply unsafe fixes from ruff
Browse files Browse the repository at this point in the history
  • Loading branch information
oschwald committed Jan 29, 2025
1 parent 62c7084 commit 147f44e
Show file tree
Hide file tree
Showing 9 changed files with 49 additions and 49 deletions.
4 changes: 2 additions & 2 deletions examples/benchmark.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,9 @@
reader = maxminddb.open_database(args.file, args.mode)


def lookup_ip_address():
def lookup_ip_address() -> None:
ip = socket.inet_ntoa(struct.pack("!L", random.getrandbits(32)))
record = reader.get(str(ip))
reader.get(str(ip))


elapsed = timeit.timeit(
Expand Down
2 changes: 1 addition & 1 deletion maxminddb/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ def open_database(
database: Union[AnyStr, int, os.PathLike, IO],
mode: int = MODE_AUTO,
) -> Reader:
"""Open a MaxMind DB database
"""Open a MaxMind DB database.
Arguments:
database -- A path to a valid MaxMind DB file such as a GeoIP2 database
Expand Down
2 changes: 1 addition & 1 deletion maxminddb/const.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
"""Constants used in the API"""
"""Constants used in the API."""

MODE_AUTO = 0
MODE_MMAP_EXT = 1
Expand Down
6 changes: 3 additions & 3 deletions maxminddb/decoder.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,15 +23,15 @@


class Decoder: # pylint: disable=too-few-public-methods
"""Decoder for the data section of the MaxMind DB"""
"""Decoder for the data section of the MaxMind DB."""

def __init__(
self,
database_buffer: Union[FileBuffer, "mmap.mmap", bytes],
pointer_base: int = 0,
pointer_test: bool = False,
) -> None:
"""Created a Decoder for a MaxMind DB
"""Created a Decoder for a MaxMind DB.
Arguments:
database_buffer -- an mmap'd MaxMind DB file.
Expand Down Expand Up @@ -139,7 +139,7 @@ def _decode_utf8_string(self, size: int, offset: int) -> Tuple[str, int]:
}

def decode(self, offset: int) -> Tuple[Record, int]:
"""Decode a section of the data section starting at offset
"""Decode a section of the data section starting at offset.
Arguments:
offset -- the location of the data structure to decode
Expand Down
6 changes: 3 additions & 3 deletions maxminddb/extension.pyi
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ This module contains the C extension database reader and related classes.
# pylint: disable=E0601,E0602
from ipaddress import IPv4Address, IPv6Address
from os import PathLike
from typing import IO, Any, AnyStr, Dict, List, Optional, Tuple, Union
from typing import IO, Any, AnyStr, Optional, Tuple, Union

from maxminddb.types import Record

Expand Down Expand Up @@ -89,7 +89,7 @@ class Metadata:
A string identifying the database type, e.g., "GeoIP2-City".
"""

description: Dict[str, str]
description: dict[str, str]
"""
A map from locales to text descriptions of the database.
"""
Expand All @@ -101,7 +101,7 @@ class Metadata:
both IPv4 and IPv6 lookups.
"""

languages: List[str]
languages: list[str]
"""
A list of locale codes supported by the databse.
"""
Expand Down
12 changes: 6 additions & 6 deletions maxminddb/file.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@


class FileBuffer:
"""A slice-able file reader"""
"""A slice-able file reader."""

def __init__(self, database: str) -> None:
# pylint: disable=consider-using-with
Expand All @@ -28,31 +28,31 @@ def __getitem__(self, key: Union[slice, int]):
raise TypeError("Invalid argument type.")

def rfind(self, needle: bytes, start: int) -> int:
"""Reverse find needle from start"""
"""Reverse find needle from start."""
pos = self._read(self._size - start - 1, start).rfind(needle)
if pos == -1:
return pos
return start + pos

def size(self) -> int:
"""Size of file"""
"""Size of file."""
return self._size

def close(self) -> None:
"""Close file"""
"""Close file."""
self._handle.close()

if hasattr(os, "pread"):

def _read(self, buffersize: int, offset: int) -> bytes:
"""Read that uses pread"""
"""Read that uses pread."""
# pylint: disable=no-member
return os.pread(self._handle.fileno(), buffersize, offset) # type: ignore

else:

def _read(self, buffersize: int, offset: int) -> bytes:
"""Read with a lock
"""Read with a lock.
This lock is necessary as after a fork, the different processes
will share the same file table entry, even if we dup the fd, and
Expand Down
29 changes: 16 additions & 13 deletions maxminddb/reader.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ def __init__(
database: Union[AnyStr, int, PathLike, IO],
mode: int = MODE_AUTO,
) -> None:
"""Reader for the MaxMind DB file format
"""Reader for the MaxMind DB file format.
Arguments:
database -- A path to a valid MaxMind DB file such as a GeoIP2 database
Expand Down Expand Up @@ -132,11 +132,11 @@ def __init__(
self._ipv4_start = ipv4_start

def metadata(self) -> "Metadata":
"""Return the metadata associated with the MaxMind DB file"""
"""Return the metadata associated with the MaxMind DB file."""
return self._metadata

def get(self, ip_address: Union[str, IPv6Address, IPv4Address]) -> Optional[Record]:
"""Return the record for the ip_address in the MaxMind DB
"""Return the record for the ip_address in the MaxMind DB.
Arguments:
ip_address -- an IP address in the standard string notation
Expand All @@ -149,7 +149,7 @@ def get_with_prefix_len(
self,
ip_address: Union[str, IPv6Address, IPv4Address],
) -> Tuple[Optional[Record], int]:
"""Return a tuple with the record and the associated prefix length
"""Return a tuple with the record and the associated prefix length.
Arguments:
ip_address -- an IP address in the standard string notation
Expand Down Expand Up @@ -245,20 +245,22 @@ def _read_node(self, node_number: int, index: int) -> int:
offset = base_offset + index * 4
node_bytes = self._buffer[offset : offset + 4]
else:
raise InvalidDatabaseError(f"Unknown record size: {record_size}")
msg = f"Unknown record size: {record_size}"
raise InvalidDatabaseError(msg)
return struct.unpack(b"!I", node_bytes)[0]

def _resolve_data_pointer(self, pointer: int) -> Record:
resolved = pointer - self._metadata.node_count + self._metadata.search_tree_size

if resolved >= self._buffer_size:
raise InvalidDatabaseError("The MaxMind DB file's search tree is corrupt")
msg = "The MaxMind DB file's search tree is corrupt"
raise InvalidDatabaseError(msg)

(data, _) = self._decoder.decode(resolved)
return data

def close(self) -> None:
"""Closes the MaxMind DB file and returns the resources to the system"""
"""Closes the MaxMind DB file and returns the resources to the system."""
try:
self._buffer.close() # type: ignore
except AttributeError:
Expand All @@ -270,13 +272,14 @@ def __exit__(self, *args) -> None:

def __enter__(self) -> "Reader":
if self.closed:
raise ValueError("Attempt to reopen a closed MaxMind DB")
msg = "Attempt to reopen a closed MaxMind DB"
raise ValueError(msg)
return self


# pylint: disable=too-many-instance-attributes,R0801
class Metadata:
"""Metadata for the MaxMind DB reader"""
"""Metadata for the MaxMind DB reader."""

binary_format_major_version: int
"""
Expand Down Expand Up @@ -328,7 +331,7 @@ class Metadata:
"""

def __init__(self, **kwargs) -> None:
"""Creates new Metadata object. kwargs are key/value pairs from spec"""
"""Creates new Metadata object. kwargs are key/value pairs from spec."""
# Although I could just update __dict__, that is less obvious and it
# doesn't work well with static analysis tools and some IDEs
self.node_count = kwargs["node_count"]
Expand All @@ -343,20 +346,20 @@ def __init__(self, **kwargs) -> None:

@property
def node_byte_size(self) -> int:
"""The size of a node in bytes
"""The size of a node in bytes.
:type: int
"""
return self.record_size // 4

@property
def search_tree_size(self) -> int:
"""The size of the search tree
"""The size of the search tree.
:type: int
"""
return self.node_count * self.node_byte_size

def __repr__(self):
def __repr__(self) -> str:
args = ", ".join(f"{k}={v!r}" for k, v in self.__dict__.items())
return f"{self.__module__}.{self.__class__.__name__}({args})"
22 changes: 10 additions & 12 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@
ext_module = [
Extension(
"maxminddb.extension",
libraries=["maxminddb"] + libraries,
libraries=["maxminddb", *libraries],
sources=["extension/maxminddb.c"],
extra_compile_args=compile_args,
),
Expand Down Expand Up @@ -78,20 +78,20 @@


class BuildFailed(Exception):
def __init__(self):
def __init__(self) -> None:
self.cause = sys.exc_info()[1]


class ve_build_ext(build_ext):
# This class allows C extension building to fail.

def run(self):
def run(self) -> None:
try:
build_ext.run(self)
except DistutilsPlatformError:
raise BuildFailed

def build_extension(self, ext):
def build_extension(self, ext) -> None:
try:
build_ext.build_extension(self, ext)
except ext_errors:
Expand Down Expand Up @@ -120,24 +120,22 @@ def build_extension(self, ext):
)


def status_msgs(*msgs):
print("*" * 75)
for msg in msgs:
print(msg)
print("*" * 75)
def status_msgs(*msgs) -> None:
for _msg in msgs:
pass


def find_packages(location):
packages = []
for pkg in ["maxminddb"]:
for _dir, subdirectories, files in os.walk(os.path.join(location, pkg)):
for _dir, _subdirectories, files in os.walk(os.path.join(location, pkg)):
if "__init__.py" in files:
tokens = _dir.split(os.sep)[len(location.split(os.sep)) :]
packages.append(".".join(tokens))
return packages


def run_setup(with_cext):
def run_setup(with_cext) -> None:
kwargs = {}
loc_cmdclass = cmdclass.copy()
if with_cext:
Expand All @@ -158,7 +156,7 @@ def run_setup(with_cext):
run_setup(True)
except BuildFailed as exc:
if os.getenv("MAXMINDDB_REQUIRE_EXTENSION"):
raise exc
raise
status_msgs(
exc.cause,
"WARNING: The C extension could not be compiled, "
Expand Down
15 changes: 7 additions & 8 deletions tests/reader_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -478,9 +478,8 @@ def test_with_statement_close(self):
with self.assertRaisesRegex(
ValueError,
"Attempt to reopen a closed MaxMind DB",
):
with reader:
pass
), reader:
pass

def test_closed(self):
reader = open_database(
Expand Down Expand Up @@ -523,13 +522,13 @@ def test_multiprocessing(self):
def test_threading(self):
self._check_concurrency(threading.Thread)

def _check_concurrency(self, worker_class):
def _check_concurrency(self, worker_class) -> None:
reader = open_database(
"tests/data/test-data/GeoIP2-Domain-Test.mmdb",
self.mode,
)

def lookup(pipe):
def lookup(pipe) -> None:
try:
for i in range(32):
reader.get(self.ipf(f"65.115.240.{i}"))
Expand All @@ -554,7 +553,7 @@ def lookup(pipe):

self.assertEqual(count, 32, "expected number of successful lookups")

def _check_metadata(self, reader, ip_version, record_size):
def _check_metadata(self, reader, ip_version, record_size) -> None:
metadata = reader.metadata()

self.assertEqual(2, metadata.binary_format_major_version, "major version")
Expand All @@ -572,7 +571,7 @@ def _check_metadata(self, reader, ip_version, record_size):

self.assertEqual(metadata.record_size, record_size)

def _check_ip_v4(self, reader, file_name):
def _check_ip_v4(self, reader, file_name) -> None:
for i in range(6):
address = "1.1.1." + str(pow(2, i))
self.assertEqual(
Expand Down Expand Up @@ -602,7 +601,7 @@ def _check_ip_v4(self, reader, file_name):
for ip in ["1.1.1.33", "255.254.253.123"]:
self.assertIsNone(reader.get(self.ipf(ip)))

def _check_ip_v6(self, reader, file_name):
def _check_ip_v6(self, reader, file_name) -> None:
subnets = ["::1:ffff:ffff", "::2:0:0", "::2:0:40", "::2:0:50", "::2:0:58"]

for address in subnets:
Expand Down

0 comments on commit 147f44e

Please sign in to comment.