From acf0b8f0c7e910c3dd794a4179decdac4c3fc21b Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Mon, 13 May 2024 09:07:57 -0300 Subject: [PATCH] Update classes to use logger mixin (#1838) --- src/palace/manager/api/adobe_vendor_id.py | 8 ++---- src/palace/manager/api/enki.py | 6 ++--- src/palace/manager/scripts.py | 12 ++------- src/palace/manager/search/migrator.py | 29 +++++++++------------ src/palace/manager/search/service.py | 23 ++++++++-------- src/palace/manager/sqlalchemy/model/work.py | 24 ++++++++--------- 6 files changed, 42 insertions(+), 60 deletions(-) diff --git a/src/palace/manager/api/adobe_vendor_id.py b/src/palace/manager/api/adobe_vendor_id.py index d8e8f3153f..9bdd5950af 100644 --- a/src/palace/manager/api/adobe_vendor_id.py +++ b/src/palace/manager/api/adobe_vendor_id.py @@ -2,7 +2,6 @@ import base64 import datetime -import logging import sys import uuid from typing import Any @@ -28,6 +27,7 @@ from palace.manager.sqlalchemy.model.library import Library from palace.manager.sqlalchemy.model.patron import Patron from palace.manager.util.datetime_helpers import datetime_utc, utc_now +from palace.manager.util.log import LoggerMixin if sys.version_info >= (3, 11): from typing import Self @@ -35,7 +35,7 @@ from typing_extensions import Self -class AuthdataUtility: +class AuthdataUtility(LoggerMixin): """Generate authdata JWTs as per the Vendor ID Service spec: https://docs.google.com/document/d/1j8nWPVmy95pJ_iU4UTC-QgHK2QhDUSdQ0OQTFR2NE_0 @@ -95,10 +95,6 @@ def __init__( # This is used to encode both JWTs and short client tokens. self.secret = secret - self.log = logging.getLogger( - f"{self.__class__.__module__}.{self.__class__.__name__}" - ) - self.short_token_signer = HMACAlgorithm(HMACAlgorithm.SHA256) self.short_token_signing_key = self.short_token_signer.prepare_key(self.secret) diff --git a/src/palace/manager/api/enki.py b/src/palace/manager/api/enki.py index df733a26db..03c06dfd3c 100644 --- a/src/palace/manager/api/enki.py +++ b/src/palace/manager/api/enki.py @@ -2,7 +2,6 @@ import datetime import json -import logging import time from collections.abc import Callable, Generator, Mapping from typing import Any, cast @@ -69,6 +68,7 @@ from palace.manager.sqlalchemy.model.resource import Hyperlink, Representation from palace.manager.util.datetime_helpers import from_timestamp, strptime_utc, utc_now from palace.manager.util.http import HTTP, RemoteIntegrationException, RequestTimedOut +from palace.manager.util.log import LoggerMixin class EnkiConstants: @@ -613,13 +613,11 @@ def update_availability(self, licensepool: LicensePool) -> None: pass -class BibliographicParser: +class BibliographicParser(LoggerMixin): """Parses Enki's representation of book information into Metadata and CirculationData objects. """ - log = logging.getLogger("Enki Bibliographic Parser") - # Convert the English names of languages given in the Enki API to # the codes we use internally. LANGUAGE_CODES = { diff --git a/src/palace/manager/scripts.py b/src/palace/manager/scripts.py index 46fbd8ae0b..9d7e39af68 100644 --- a/src/palace/manager/scripts.py +++ b/src/palace/manager/scripts.py @@ -69,6 +69,7 @@ from palace.manager.sqlalchemy.util import LOCK_ID_DB_INIT, get_one, pg_advisory_lock from palace.manager.util.datetime_helpers import utc_now from palace.manager.util.languages import LanguageCodes +from palace.manager.util.log import LoggerMixin class Script(CoreScript): @@ -475,7 +476,7 @@ def run(self): os.system("pybabel compile -f -d translations") -class InstanceInitializationScript: +class InstanceInitializationScript(LoggerMixin): """An idempotent script to initialize an instance of the Circulation Manager. This script is intended for use in servers, Docker containers, etc, @@ -491,7 +492,6 @@ def __init__( config_file: Path | None = None, engine_factory: Callable[[], Engine] = SessionManager.engine, ) -> None: - self._log: logging.Logger | None = None self._container = container_instance() # Call init_resources() to initialize the logging configuration. @@ -500,14 +500,6 @@ def __init__( self._engine_factory = engine_factory - @property - def log(self) -> logging.Logger: - if self._log is None: - self._log = logging.getLogger( - f"{self.__module__}.{self.__class__.__name__}" - ) - return self._log - @staticmethod def _get_alembic_config( connection: Connection, config_file: Path | None diff --git a/src/palace/manager/search/migrator.py b/src/palace/manager/search/migrator.py index 101d5ac714..fa76d76c44 100644 --- a/src/palace/manager/search/migrator.py +++ b/src/palace/manager/search/migrator.py @@ -1,4 +1,3 @@ -import logging from abc import ABC, abstractmethod from collections.abc import Iterable @@ -6,6 +5,7 @@ from palace.manager.search.revision import SearchSchemaRevision from palace.manager.search.revision_directory import SearchRevisionDirectory from palace.manager.search.service import SearchService, SearchServiceFailedDocument +from palace.manager.util.log import LoggerMixin class SearchMigrationException(BasePalaceException): @@ -30,11 +30,10 @@ def finish(self) -> None: """Make sure all changes are committed.""" -class SearchDocumentReceiver(SearchDocumentReceiverType): +class SearchDocumentReceiver(SearchDocumentReceiverType, LoggerMixin): """A receiver of search documents.""" def __init__(self, pointer: str, service: SearchService): - self._logger = logging.getLogger(SearchDocumentReceiver.__name__) self._pointer = pointer self._service = service @@ -53,12 +52,12 @@ def add_documents( def finish(self) -> None: """Make sure all changes are committed.""" - self._logger.info("Finishing search documents.") + self.log.info("Finishing search documents.") self._service.refresh() - self._logger.info("Finished search documents.") + self.log.info("Finished search documents.") -class SearchMigrationInProgress(SearchDocumentReceiverType): +class SearchMigrationInProgress(SearchDocumentReceiverType, LoggerMixin): """A migration in progress. Documents are being submitted, and the migration must be explicitly finished or cancelled to take effect (or not!).""" @@ -68,7 +67,6 @@ def __init__( revision: SearchSchemaRevision, service: SearchService, ): - self._logger = logging.getLogger(SearchMigrationInProgress.__name__) self._base_name = base_name self._revision = revision self._service = service @@ -84,7 +82,7 @@ def add_documents( def finish(self) -> None: """Finish the migration.""" - self._logger.info(f"Completing migration to {self._revision.version}") + self.log.info(f"Completing migration to {self._revision.version}") # Make sure all changes are committed. self._receiver.finish() # Create the "indexed" alias. @@ -94,19 +92,18 @@ def finish(self) -> None: # Set the read pointer to point at the now-populated index self._service.read_pointer_set(self._revision) self._service.refresh() - self._logger.info(f"Completed migration to {self._revision.version}") + self.log.info(f"Completed migration to {self._revision.version}") def cancel(self) -> None: """Cancel the migration, leaving the read and write pointers untouched.""" - self._logger.info(f"Cancelling migration to {self._revision.version}") + self.log.info(f"Cancelling migration to {self._revision.version}") return None -class SearchMigrator: +class SearchMigrator(LoggerMixin): """A search migrator. This moves a search service to the targeted schema version.""" def __init__(self, revisions: SearchRevisionDirectory, service: SearchService): - self._logger = logging.getLogger(SearchMigrator.__name__) self._revisions = revisions self._service = service @@ -124,7 +121,7 @@ def migrate(self, base_name: str, version: int) -> SearchMigrationInProgress | N :raises SearchMigrationException: On errors, but always leaves the system in a usable state. """ - self._logger.info(f"starting migration to {base_name} {version}") + self.log.info(f"starting migration to {base_name} {version}") try: target = self._revisions.available.get(version) @@ -140,7 +137,7 @@ def migrate(self, base_name: str, version: int) -> SearchMigrationInProgress | N # Does the read pointer exist? Point it at the empty index if not. read = self._service.read_pointer() if read is None: - self._logger.info("Read pointer did not exist.") + self.log.info("Read pointer did not exist.") self._service.read_pointer_set_empty() # We're probably going to have to do a migration. We might end up returning @@ -152,7 +149,7 @@ def migrate(self, base_name: str, version: int) -> SearchMigrationInProgress | N # Does the write pointer exist? write = self._service.write_pointer() if write is None or (not write.version == version): - self._logger.info( + self.log.info( f"Write pointer does not point to the desired version: {write} != {version}." ) # Either the write pointer didn't exist, or it's pointing at a version @@ -162,7 +159,7 @@ def migrate(self, base_name: str, version: int) -> SearchMigrationInProgress | N # The index now definitely exists, but it might not be populated. Populate it if necessary. if not self._service.index_is_populated(target): - self._logger.info("Write index is not populated.") + self.log.info("Write index is not populated.") return in_progress # If we didn't need to return the migration, finish it here. This will diff --git a/src/palace/manager/search/service.py b/src/palace/manager/search/service.py index 0d86bcbee5..04c481e20b 100644 --- a/src/palace/manager/search/service.py +++ b/src/palace/manager/search/service.py @@ -1,4 +1,3 @@ -import logging import re from abc import ABC, abstractmethod from collections.abc import Iterable @@ -10,6 +9,7 @@ from palace.manager.core.exceptions import BasePalaceException from palace.manager.search.revision import SearchSchemaRevision +from palace.manager.util.log import LoggerMixin @dataclass @@ -159,11 +159,10 @@ def is_pointer_empty(self, pointer: str): """Check to see if a pointer points to an empty index""" -class SearchServiceOpensearch1(SearchService): +class SearchServiceOpensearch1(SearchService, LoggerMixin): """The real Opensearch 1.x service.""" def __init__(self, client: OpenSearch, base_revision_name: str): - self._logger = logging.getLogger(SearchServiceOpensearch1.__name__) self._client = client self._search = Search(using=self._client) self._base_revision_name = base_revision_name @@ -197,7 +196,7 @@ def write_pointer(self) -> SearchWritePointer | None: def create_empty_index(self) -> None: try: index_name = self._empty(self.base_revision_name) - self._logger.debug(f"creating empty index {index_name}") + self.log.debug(f"creating empty index {index_name}") self._client.indices.create(index=index_name) except RequestError as e: if e.error == "resource_already_exists_exception": @@ -213,7 +212,7 @@ def read_pointer_set(self, revision: SearchSchemaRevision) -> None: {"add": {"index": target_index, "alias": alias_name}}, ] } - self._logger.debug(f"setting read pointer {alias_name} to index {target_index}") + self.log.debug(f"setting read pointer {alias_name} to index {target_index}") self._client.indices.update_aliases(body=action) def index_set_populated(self, revision: SearchSchemaRevision) -> None: @@ -225,7 +224,7 @@ def index_set_populated(self, revision: SearchSchemaRevision) -> None: {"add": {"index": target_index, "alias": alias_name}}, ] } - self._logger.debug( + self.log.debug( f"creating 'indexed' flag alias {alias_name} for index {target_index}" ) self._client.indices.update_aliases(body=action) @@ -239,7 +238,7 @@ def read_pointer_set_empty(self) -> None: {"add": {"index": target_index, "alias": alias_name}}, ] } - self._logger.debug( + self.log.debug( f"setting read pointer {alias_name} to empty index {target_index}" ) self._client.indices.update_aliases(body=action) @@ -247,7 +246,7 @@ def read_pointer_set_empty(self) -> None: def index_create(self, revision: SearchSchemaRevision) -> None: try: index_name = revision.name_for_index(self.base_revision_name) - self._logger.info(f"creating index {index_name}") + self.log.info(f"creating index {index_name}") self._client.indices.create( index=index_name, body=revision.mapping_document().serialize(), @@ -265,7 +264,7 @@ def index_is_populated(self, revision: SearchSchemaRevision) -> bool: def index_set_mapping(self, revision: SearchSchemaRevision) -> None: data = {"properties": revision.mapping_document().serialize_properties()} index_name = revision.name_for_index(self.base_revision_name) - self._logger.debug(f"setting mappings for index {index_name}") + self.log.debug(f"setting mappings for index {index_name}") self._client.indices.put_mapping(index=index_name, body=data) self._ensure_scripts(revision) @@ -279,7 +278,7 @@ def _ensure_scripts(self, revision: SearchSchemaRevision) -> None: def index_submit_documents( self, pointer: str, documents: Iterable[dict] ) -> list[SearchServiceFailedDocument]: - self._logger.info(f"submitting documents to index {pointer}") + self.log.info(f"submitting documents to index {pointer}") # Specifically override the target in all documents to the target pointer # Add a hard requirement that the target be an alias (this prevents documents from implicitly creating @@ -320,7 +319,7 @@ def index_clear_documents(self, pointer: str): ) def refresh(self): - self._logger.debug(f"waiting for indexes to become ready") + self.log.debug(f"waiting for indexes to become ready") self._client.indices.refresh() def write_pointer_set(self, revision: SearchSchemaRevision) -> None: @@ -332,7 +331,7 @@ def write_pointer_set(self, revision: SearchSchemaRevision) -> None: {"add": {"index": target_index, "alias": alias_name}}, ] } - self._logger.debug(f"setting write pointer {alias_name} to {target_index}") + self.log.debug(f"setting write pointer {alias_name} to {target_index}") self._client.indices.update_aliases(body=action) def read_pointer(self) -> str | None: diff --git a/src/palace/manager/sqlalchemy/model/work.py b/src/palace/manager/sqlalchemy/model/work.py index 0fe04f5a07..0a6f45be66 100644 --- a/src/palace/manager/sqlalchemy/model/work.py +++ b/src/palace/manager/sqlalchemy/model/work.py @@ -2,7 +2,6 @@ from __future__ import annotations -import logging import sys from collections import Counter from datetime import date, datetime @@ -59,6 +58,7 @@ ) from palace.manager.util.datetime_helpers import utc_now from palace.manager.util.languages import LanguageCodes +from palace.manager.util.log import LoggerMixin if sys.version_info >= (3, 11): from typing import Self @@ -93,7 +93,7 @@ def __repr__(self): return "%s (%d%%)" % (self.genre.name, self.affinity * 100) -class Work(Base): +class Work(Base, LoggerMixin): APPEALS_URI = "http://librarysimplified.org/terms/appeals/" CHARACTER_APPEAL = "Character" @@ -509,7 +509,7 @@ def make_exclusive_open_access_for_permanent_work_id(self, pwid, medium, languag elif not pool.presentation_edition: # A LicensePool with no presentation edition # cannot have an associated Work. - logging.warning( + self.log.warning( "LicensePool %r has no presentation edition, setting .work to None.", pool, ) @@ -520,7 +520,7 @@ def make_exclusive_open_access_for_permanent_work_id(self, pwid, medium, languag if not this_pwid: # A LicensePool with no permanent work ID # cannot have an associated Work. - logging.warning( + self.log.warning( "Presentation edition for LicensePool %r has no PWID, setting .work to None.", pool, ) @@ -677,9 +677,9 @@ def reject_covers(cls, _db, works_or_identifiers, search_index_client=None): work_ids = [w.id for w in works] if len(works) == 1: - logging.info("Suppressing cover for %r", works[0]) + cls.logger().info("Suppressing cover for %r", works[0]) else: - logging.info("Supressing covers for %i Works", len(works)) + cls.logger().info("Supressing covers for %i Works", len(works)) cover_urls = list() for work in works: @@ -1043,7 +1043,7 @@ def calculate_presentation( # TODO: maybe change changed to a boolean, and return it as method result changed = "unchanged" representation = repr(self) - logging.info("Presentation %s for work: %s", changed, representation) + self.log.info("Presentation %s for work: %s", changed, representation) # We want works to be presentation-ready as soon as possible, # unless they are missing crucial information like language or @@ -1280,7 +1280,7 @@ def set_presentation_ready_based_on_content(self, search_index_client=None): # processed, this work will be removed from the search # index. self.external_index_needs_updating() - logging.warning("Work is not presentation ready: %r", self) + self.log.warning("Work is not presentation ready: %r", self) else: self.set_presentation_ready(search_index_client=search_index_client) @@ -1538,7 +1538,7 @@ def to_search_documents(cls, works: list[Self]) -> list[dict]: search_doc = cls.search_doc_as_dict(cast(Self, item)) results.append(search_doc) except: - logging.exception(f"Could not create search document for {item}") + cls.logger().exception(f"Could not create search document for {item}") return results @@ -1595,7 +1595,7 @@ def _convert(value): value = value.replace(tzinfo=pytz.UTC) return value.timestamp() except (ValueError, OverflowError) as e: - logging.error( + cls.logger().error( f"Could not convert date value {value} for document {doc.id}: {e}" ) return 0 @@ -1605,7 +1605,7 @@ def _convert(value): value.year, value.month, value.day, tzinfo=pytz.UTC ).timestamp() except (ValueError, OverflowError) as e: - logging.error( + cls.logger().error( f"Could not convert date value {value} for document {doc.id}: {e}" ) return 0 @@ -1835,7 +1835,7 @@ def delete( try: search_index.remove_work(self) except opensearchpy.exceptions.NotFoundError: - logging.warning( + self.log.warning( f"Work {self.id} not found in search index while attempting to delete it." ) _db.delete(self)