Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Update classes to use LoggerMixin #1838

Merged
merged 1 commit into from
May 13, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 2 additions & 6 deletions src/palace/manager/api/adobe_vendor_id.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@

import base64
import datetime
import logging
import sys
import uuid
from typing import Any
Expand All @@ -28,14 +27,15 @@
from palace.manager.sqlalchemy.model.library import Library
from palace.manager.sqlalchemy.model.patron import Patron
from palace.manager.util.datetime_helpers import datetime_utc, utc_now
from palace.manager.util.log import LoggerMixin

if sys.version_info >= (3, 11):
from typing import Self
else:
from typing_extensions import Self


class AuthdataUtility:
class AuthdataUtility(LoggerMixin):

"""Generate authdata JWTs as per the Vendor ID Service spec:
https://docs.google.com/document/d/1j8nWPVmy95pJ_iU4UTC-QgHK2QhDUSdQ0OQTFR2NE_0
Expand Down Expand Up @@ -95,10 +95,6 @@ def __init__(
# This is used to encode both JWTs and short client tokens.
self.secret = secret

self.log = logging.getLogger(
f"{self.__class__.__module__}.{self.__class__.__name__}"
)

self.short_token_signer = HMACAlgorithm(HMACAlgorithm.SHA256)
self.short_token_signing_key = self.short_token_signer.prepare_key(self.secret)

Expand Down
6 changes: 2 additions & 4 deletions src/palace/manager/api/enki.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@

import datetime
import json
import logging
import time
from collections.abc import Callable, Generator, Mapping
from typing import Any, cast
Expand Down Expand Up @@ -69,6 +68,7 @@
from palace.manager.sqlalchemy.model.resource import Hyperlink, Representation
from palace.manager.util.datetime_helpers import from_timestamp, strptime_utc, utc_now
from palace.manager.util.http import HTTP, RemoteIntegrationException, RequestTimedOut
from palace.manager.util.log import LoggerMixin


class EnkiConstants:
Expand Down Expand Up @@ -613,13 +613,11 @@ def update_availability(self, licensepool: LicensePool) -> None:
pass


class BibliographicParser:
class BibliographicParser(LoggerMixin):
"""Parses Enki's representation of book information into
Metadata and CirculationData objects.
"""

log = logging.getLogger("Enki Bibliographic Parser")

# Convert the English names of languages given in the Enki API to
# the codes we use internally.
LANGUAGE_CODES = {
Expand Down
12 changes: 2 additions & 10 deletions src/palace/manager/scripts.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,7 @@
from palace.manager.sqlalchemy.util import LOCK_ID_DB_INIT, get_one, pg_advisory_lock
from palace.manager.util.datetime_helpers import utc_now
from palace.manager.util.languages import LanguageCodes
from palace.manager.util.log import LoggerMixin


class Script(CoreScript):
Expand Down Expand Up @@ -475,7 +476,7 @@ def run(self):
os.system("pybabel compile -f -d translations")


class InstanceInitializationScript:
class InstanceInitializationScript(LoggerMixin):
"""An idempotent script to initialize an instance of the Circulation Manager.

This script is intended for use in servers, Docker containers, etc,
Expand All @@ -491,7 +492,6 @@ def __init__(
config_file: Path | None = None,
engine_factory: Callable[[], Engine] = SessionManager.engine,
) -> None:
self._log: logging.Logger | None = None
self._container = container_instance()

# Call init_resources() to initialize the logging configuration.
Expand All @@ -500,14 +500,6 @@ def __init__(

self._engine_factory = engine_factory

@property
def log(self) -> logging.Logger:
if self._log is None:
self._log = logging.getLogger(
f"{self.__module__}.{self.__class__.__name__}"
)
return self._log

@staticmethod
def _get_alembic_config(
connection: Connection, config_file: Path | None
Expand Down
29 changes: 13 additions & 16 deletions src/palace/manager/search/migrator.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
import logging
from abc import ABC, abstractmethod
from collections.abc import Iterable

from palace.manager.core.exceptions import BasePalaceException
from palace.manager.search.revision import SearchSchemaRevision
from palace.manager.search.revision_directory import SearchRevisionDirectory
from palace.manager.search.service import SearchService, SearchServiceFailedDocument
from palace.manager.util.log import LoggerMixin


class SearchMigrationException(BasePalaceException):
Expand All @@ -30,11 +30,10 @@ def finish(self) -> None:
"""Make sure all changes are committed."""


class SearchDocumentReceiver(SearchDocumentReceiverType):
class SearchDocumentReceiver(SearchDocumentReceiverType, LoggerMixin):
"""A receiver of search documents."""

def __init__(self, pointer: str, service: SearchService):
self._logger = logging.getLogger(SearchDocumentReceiver.__name__)
self._pointer = pointer
self._service = service

Expand All @@ -53,12 +52,12 @@ def add_documents(

def finish(self) -> None:
"""Make sure all changes are committed."""
self._logger.info("Finishing search documents.")
self.log.info("Finishing search documents.")
self._service.refresh()
self._logger.info("Finished search documents.")
self.log.info("Finished search documents.")


class SearchMigrationInProgress(SearchDocumentReceiverType):
class SearchMigrationInProgress(SearchDocumentReceiverType, LoggerMixin):
"""A migration in progress. Documents are being submitted, and the migration must be
explicitly finished or cancelled to take effect (or not!)."""

Expand All @@ -68,7 +67,6 @@ def __init__(
revision: SearchSchemaRevision,
service: SearchService,
):
self._logger = logging.getLogger(SearchMigrationInProgress.__name__)
self._base_name = base_name
self._revision = revision
self._service = service
Expand All @@ -84,7 +82,7 @@ def add_documents(

def finish(self) -> None:
"""Finish the migration."""
self._logger.info(f"Completing migration to {self._revision.version}")
self.log.info(f"Completing migration to {self._revision.version}")
# Make sure all changes are committed.
self._receiver.finish()
# Create the "indexed" alias.
Expand All @@ -94,19 +92,18 @@ def finish(self) -> None:
# Set the read pointer to point at the now-populated index
self._service.read_pointer_set(self._revision)
self._service.refresh()
self._logger.info(f"Completed migration to {self._revision.version}")
self.log.info(f"Completed migration to {self._revision.version}")

def cancel(self) -> None:
"""Cancel the migration, leaving the read and write pointers untouched."""
self._logger.info(f"Cancelling migration to {self._revision.version}")
self.log.info(f"Cancelling migration to {self._revision.version}")
return None


class SearchMigrator:
class SearchMigrator(LoggerMixin):
"""A search migrator. This moves a search service to the targeted schema version."""

def __init__(self, revisions: SearchRevisionDirectory, service: SearchService):
self._logger = logging.getLogger(SearchMigrator.__name__)
self._revisions = revisions
self._service = service

Expand All @@ -124,7 +121,7 @@ def migrate(self, base_name: str, version: int) -> SearchMigrationInProgress | N
:raises SearchMigrationException: On errors, but always leaves the system in a usable state.
"""

self._logger.info(f"starting migration to {base_name} {version}")
self.log.info(f"starting migration to {base_name} {version}")

try:
target = self._revisions.available.get(version)
Expand All @@ -140,7 +137,7 @@ def migrate(self, base_name: str, version: int) -> SearchMigrationInProgress | N
# Does the read pointer exist? Point it at the empty index if not.
read = self._service.read_pointer()
if read is None:
self._logger.info("Read pointer did not exist.")
self.log.info("Read pointer did not exist.")
self._service.read_pointer_set_empty()

# We're probably going to have to do a migration. We might end up returning
Expand All @@ -152,7 +149,7 @@ def migrate(self, base_name: str, version: int) -> SearchMigrationInProgress | N
# Does the write pointer exist?
write = self._service.write_pointer()
if write is None or (not write.version == version):
self._logger.info(
self.log.info(
f"Write pointer does not point to the desired version: {write} != {version}."
)
# Either the write pointer didn't exist, or it's pointing at a version
Expand All @@ -162,7 +159,7 @@ def migrate(self, base_name: str, version: int) -> SearchMigrationInProgress | N

# The index now definitely exists, but it might not be populated. Populate it if necessary.
if not self._service.index_is_populated(target):
self._logger.info("Write index is not populated.")
self.log.info("Write index is not populated.")
return in_progress

# If we didn't need to return the migration, finish it here. This will
Expand Down
23 changes: 11 additions & 12 deletions src/palace/manager/search/service.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import logging
import re
from abc import ABC, abstractmethod
from collections.abc import Iterable
Expand All @@ -10,6 +9,7 @@

from palace.manager.core.exceptions import BasePalaceException
from palace.manager.search.revision import SearchSchemaRevision
from palace.manager.util.log import LoggerMixin


@dataclass
Expand Down Expand Up @@ -159,11 +159,10 @@ def is_pointer_empty(self, pointer: str):
"""Check to see if a pointer points to an empty index"""


class SearchServiceOpensearch1(SearchService):
class SearchServiceOpensearch1(SearchService, LoggerMixin):
"""The real Opensearch 1.x service."""

def __init__(self, client: OpenSearch, base_revision_name: str):
self._logger = logging.getLogger(SearchServiceOpensearch1.__name__)
self._client = client
self._search = Search(using=self._client)
self._base_revision_name = base_revision_name
Expand Down Expand Up @@ -197,7 +196,7 @@ def write_pointer(self) -> SearchWritePointer | None:
def create_empty_index(self) -> None:
try:
index_name = self._empty(self.base_revision_name)
self._logger.debug(f"creating empty index {index_name}")
self.log.debug(f"creating empty index {index_name}")
self._client.indices.create(index=index_name)
except RequestError as e:
if e.error == "resource_already_exists_exception":
Expand All @@ -213,7 +212,7 @@ def read_pointer_set(self, revision: SearchSchemaRevision) -> None:
{"add": {"index": target_index, "alias": alias_name}},
]
}
self._logger.debug(f"setting read pointer {alias_name} to index {target_index}")
self.log.debug(f"setting read pointer {alias_name} to index {target_index}")
self._client.indices.update_aliases(body=action)

def index_set_populated(self, revision: SearchSchemaRevision) -> None:
Expand All @@ -225,7 +224,7 @@ def index_set_populated(self, revision: SearchSchemaRevision) -> None:
{"add": {"index": target_index, "alias": alias_name}},
]
}
self._logger.debug(
self.log.debug(
f"creating 'indexed' flag alias {alias_name} for index {target_index}"
)
self._client.indices.update_aliases(body=action)
Expand All @@ -239,15 +238,15 @@ def read_pointer_set_empty(self) -> None:
{"add": {"index": target_index, "alias": alias_name}},
]
}
self._logger.debug(
self.log.debug(
f"setting read pointer {alias_name} to empty index {target_index}"
)
self._client.indices.update_aliases(body=action)

def index_create(self, revision: SearchSchemaRevision) -> None:
try:
index_name = revision.name_for_index(self.base_revision_name)
self._logger.info(f"creating index {index_name}")
self.log.info(f"creating index {index_name}")
self._client.indices.create(
index=index_name,
body=revision.mapping_document().serialize(),
Expand All @@ -265,7 +264,7 @@ def index_is_populated(self, revision: SearchSchemaRevision) -> bool:
def index_set_mapping(self, revision: SearchSchemaRevision) -> None:
data = {"properties": revision.mapping_document().serialize_properties()}
index_name = revision.name_for_index(self.base_revision_name)
self._logger.debug(f"setting mappings for index {index_name}")
self.log.debug(f"setting mappings for index {index_name}")
self._client.indices.put_mapping(index=index_name, body=data)
self._ensure_scripts(revision)

Expand All @@ -279,7 +278,7 @@ def _ensure_scripts(self, revision: SearchSchemaRevision) -> None:
def index_submit_documents(
self, pointer: str, documents: Iterable[dict]
) -> list[SearchServiceFailedDocument]:
self._logger.info(f"submitting documents to index {pointer}")
self.log.info(f"submitting documents to index {pointer}")

# Specifically override the target in all documents to the target pointer
# Add a hard requirement that the target be an alias (this prevents documents from implicitly creating
Expand Down Expand Up @@ -320,7 +319,7 @@ def index_clear_documents(self, pointer: str):
)

def refresh(self):
self._logger.debug(f"waiting for indexes to become ready")
self.log.debug(f"waiting for indexes to become ready")
self._client.indices.refresh()

def write_pointer_set(self, revision: SearchSchemaRevision) -> None:
Expand All @@ -332,7 +331,7 @@ def write_pointer_set(self, revision: SearchSchemaRevision) -> None:
{"add": {"index": target_index, "alias": alias_name}},
]
}
self._logger.debug(f"setting write pointer {alias_name} to {target_index}")
self.log.debug(f"setting write pointer {alias_name} to {target_index}")
self._client.indices.update_aliases(body=action)

def read_pointer(self) -> str | None:
Expand Down
Loading