Skip to content

Commit

Permalink
split log even more
Browse files Browse the repository at this point in the history
  • Loading branch information
datawhores committed Jan 23, 2024
1 parent 68fc918 commit 8382f88
Show file tree
Hide file tree
Showing 10 changed files with 330 additions and 308 deletions.
8 changes: 4 additions & 4 deletions ofscraper/api/me.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
import ofscraper.utils.constants as constants
import ofscraper.utils.context.stdout as stdout
import ofscraper.utils.encoding as encoding
import ofscraper.utils.logs.logger as logger
import ofscraper.utils.logs.helpers as log_helpers

log = logging.getLogger("shared")

Expand Down Expand Up @@ -53,12 +53,12 @@ def _scraper_user_helper(c):
with c.requests(constants.getattr("meEP"))() as r:
if r.ok:
data = r.json_()
logger.updateSenstiveDict(data["id"], "userid")
logger.updateSenstiveDict(
log_helpers.updateSenstiveDict(data["id"], "userid")
log_helpers.updateSenstiveDict(
f"{data['username']} | {data['username']}|\\b{data['username']}\\b",
"username",
)
logger.updateSenstiveDict(
log_helpers.updateSenstiveDict(
f"{data['name']} | {data['name']}|\\b{data['name']}\\b",
"name",
)
Expand Down
4 changes: 2 additions & 2 deletions ofscraper/commands/scraper.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@
import ofscraper.utils.context.stdout as stdout
import ofscraper.utils.dates as dates
import ofscraper.utils.logs.logger as logger
import ofscraper.utils.logs.startvals as startvals
import ofscraper.utils.logs.logs as logs
import ofscraper.utils.paths.check as check
import ofscraper.utils.paths.paths as paths
import ofscraper.utils.profiles.manage as profiles_manage
Expand Down Expand Up @@ -411,7 +411,7 @@ def set_schedule(*functs):
def schedule_helper(functs):
jobqueue.put(logger.start_threads)
jobqueue.put(logger.updateOtherLoggerStream)
jobqueue.put(startvals.printStartValues)
jobqueue.put(logs.printStartValues)
jobqueue.put(partial(userselector.getselected_usernames, rescan=True))
for funct in functs:
jobqueue.put(funct)
Expand Down
4 changes: 2 additions & 2 deletions ofscraper/download/alt_download.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@
import ofscraper.utils.config.data as data
import ofscraper.utils.constants as constants
import ofscraper.utils.dates as dates
import ofscraper.utils.logs.logger as logger
import ofscraper.utils.logs.helpers as log_helpers
import ofscraper.utils.paths.paths as paths
from ofscraper.download.common import (
addGlobalDir,
Expand Down Expand Up @@ -187,7 +187,7 @@ async def inner(ele):
for prot in adapt_set.content_protections:
if prot.value == None:
kId = prot.pssh[0].pssh
logger.updateSenstiveDict(kId, "pssh_code")
log_helpers.updateSenstiveDict(kId, "pssh_code")
break
for repr in adapt_set.representations:
origname = f"{repr.base_urls[0].base_url_value}"
Expand Down
5 changes: 2 additions & 3 deletions ofscraper/download/alt_downloadbatch.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,9 +28,8 @@
import ofscraper.utils.config.data as data
import ofscraper.utils.constants as constants
import ofscraper.utils.dates as dates
import ofscraper.utils.logs.logger as logger
import ofscraper.utils.logs.helpers as log_helpers
import ofscraper.utils.paths.paths as paths
import ofscraper.utils.system.free as free
from ofscraper.download.common import (
addLocalDir,
check_forced_skip,
Expand Down Expand Up @@ -175,7 +174,7 @@ async def alt_download_preparer(ele):
for prot in adapt_set.content_protections:
if prot.value == None:
kId = prot.pssh[0].pssh
logger.updateSenstiveDict(kId, "pssh_code")
log_helpers.updateSenstiveDict(kId, "pssh_code")
break
for repr in adapt_set.representations:
origname = f"{repr.base_urls[0].base_url_value}"
Expand Down
3 changes: 2 additions & 1 deletion ofscraper/runner/load.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
import ofscraper.utils.config.config as config_
import ofscraper.utils.dates as dates
import ofscraper.utils.logs.logger as logger
import ofscraper.utils.logs.logs as logs
import ofscraper.utils.paths.manage as paths_manage
import ofscraper.utils.system.system as system

Expand Down Expand Up @@ -42,7 +43,7 @@ def setdate():
def setLogger():
logger.init_values()
logger.get_shared_logger()
logger.discord_warning()
logs.discord_warning()
logger.start_stdout_logthread()
logger.start_other_helper()

Expand Down
4 changes: 2 additions & 2 deletions ofscraper/runner/run.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
import ofscraper.utils.args.read as read_args
import ofscraper.utils.console as console
import ofscraper.utils.context.exit as exit_context
import ofscraper.utils.logs.startvals as startvals
import ofscraper.utils.logs.logs as logs
import ofscraper.utils.system.system as system


Expand Down Expand Up @@ -51,4 +51,4 @@ def initLogs():
console.get_shared_console().print(
"[bold yellow]Warning another OF-Scraper instance was detected[bold yellow]\n\n\n"
)
startvals.printStartValues()
logs.printStartValues()
189 changes: 189 additions & 0 deletions ofscraper/utils/logs/classes.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,189 @@
import copy
import logging
import re

from tenacity import (
Retrying,
retry,
retry_if_not_exception_type,
stop_after_attempt,
wait_fixed,
)

import ofscraper.classes.sessionbuilder as sessionbuilder
import ofscraper.utils.config.data as data
import ofscraper.utils.constants as constants
import ofscraper.utils.logs.helpers as helpers


class PipeHandler(logging.Handler):
"""
This handler sends events to a queue. Typically, it would be used together
with a multiprocessing Queue to centralise logging to file in one process
(in a multi-process application), so as to avoid file write contention
between processes.
This code is new in Python 3.2, but this class can be copy pasted into
user code for use with earlier Python versions.
"""

def __init__(self, pipe):
"""
Initialise an instance, using the passed queue.
"""
logging.Handler.__init__(self)
self.pipe = pipe

def prepare(self, record):
"""
Prepare a record for queuing. The object returned by this method is
enqueued.
The base implementation formats the record to merge the message and
arguments, and removes unpickleable items from the record in-place.
Specifically, it overwrites the record's `msg` and
`message` attributes with the merged message (obtained by
calling the handler's `format` method), and sets the `args`,
`exc_info` and `exc_text` attributes to None.
You might want to override this method if you want to convert
the record to a dict or JSON string, or send a modified copy
of the record while leaving the original intact.
"""
# The format operation gets traceback text into record.exc_text
# (if there's exception data), and also returns the formatted
# message. We can then use this to replace the original
# msg + args, as these might be unpickleable. We also zap the
# exc_info, exc_text and stack_info attributes, as they are no longer
# needed and, if not None, will typically not be pickleable.
msg = self.format(record)
# bpo-35726: make copy of record to avoid affecting other handlers in the chain.
record = copy.copy(record)
record.message = msg
record.msg = msg
record.args = None
record.exc_info = None
record.exc_text = None
record.stack_info = None
return record

def emit(self, record):
"""
Emit a record.
Writes the LogRecord to the queue, preparing it for pickling first.
"""
try:
msg = self.prepare(record)
self.pipe[0].send(msg)
except Exception:
self.handleError(record)


class DebugOnly(logging.Filter):
def filter(self, record):
if record.levelno == 10 or record.levelno == 11:
return True
return False


class TraceOnly(logging.Filter):
def filter(self, record):
if record.levelno <= 11:
return True
return False


class NoDebug(logging.Filter):
def filter(self, record):
if record.levelno <= 11:
return False
return True


class DiscordHandler(logging.Handler):
def __init__(self):
logging.Handler.__init__(self)
self.sess = sessionbuilder.sessionBuilder(
backend="httpx",
set_header=False,
set_cookies=False,
set_sign=False,
total_timeout=10,
)

def emit(self, record):
def inner(sess):
with sess:
for _ in Retrying(
retry=retry_if_not_exception_type(KeyboardInterrupt),
stop=stop_after_attempt(constants.getattr("NUM_TRIES")),
wait=wait_fixed(8),
):
with _:
with sess.requests(
url,
"post",
headers={"Content-type": "application/json"},
json={"content": log_entry},
)() as r:
if not r.status == 204:
raise Exception

log_entry = self.format(record)
url = data.get_discord()
log_entry = re.sub("\[bold\]|\[/bold\]", "**", log_entry)
log_entry = f"{log_entry}\n\n"
if url == None or url == "":
return

inner(self.sess)


class TextHandler(logging.Handler):
def __init__(self):
logging.Handler.__init__(self)
self._widget = None

def emit(self, record):
# only emit after widget is set
if self._widget == None:
return
log_entry = self.format(record)
log_entry = f"{log_entry}"
self._widget.write(log_entry)

@property
def widget(self):
return self._widget

@widget.setter
def widget(self, widget):
self._widget = widget


class SensitiveFormatter(logging.Formatter):
"""Formatter that removes sensitive information in logs."""

@staticmethod
def _filter(s):
s = re.sub("&Policy=[^&\"']+", "&Policy={hidden}", s)
s = re.sub("&Signature=[^&\"']+", "&Signature={hidden}", s)
s = re.sub("&Key-Pair-Id=[^&\"']+", "&Key-Pair-Id={hidden}", s)
for ele in helpers.getSenstiveDict().items():
s = re.sub(re.escape(str(ele[0])), str(ele[1]), s)
return s

def format(self, record):
original = logging.Formatter.format(self, record) # call parent method
return self._filter(original)


class LogFileFormatter(SensitiveFormatter):
"""Formatter that removes sensitive information in logs."""

@staticmethod
def _filter(s):
s = SensitiveFormatter._filter(s)
s = re.sub("\[bold\]|\[/bold\]", "", s)
return s
74 changes: 74 additions & 0 deletions ofscraper/utils/logs/helpers.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,74 @@
import logging

senstiveDict = {}


def logForLevel(level):
def inner(self, message, *args, **kwargs):
if self.isEnabledFor(level):
self._log(level, message, args, **kwargs)

return inner


def logToRoot(level):
def inner(message, *args, **kwargs):
logging.log(level, message, *args, **kwargs)

return inner


def addtraceback():
level = logging.DEBUG + 1

logging.addLevelName(level, "TRACEBACK_")
logging.TRACEBACK = level
setattr(logging, "TRACEBACK_", level)
setattr(logging.getLoggerClass(), "traceback_", logForLevel(level))
setattr(logging, "traceback_", logToRoot(level))


def addtrace():
level = logging.DEBUG - 5

logging.addLevelName(level, "TRACE")
logging.TRACE = level
setattr(logging, "TRACE", level)
setattr(logging.getLoggerClass(), "trace", logForLevel(level))
setattr(logging, "trace", logToRoot(level))


def updateSenstiveDict(word, replacement):
global senstiveDict
senstiveDict[word] = replacement


def getSenstiveDict():
global senstiveDict
return senstiveDict


def getLevel(input):
"""
CRITICAL 50
ERROR 40
WARNING 30
INFO 20
DEBUG 10
TRACE 5
"""
return {
"OFF": 100,
"PROMPT": "ERROR",
"LOW": "WARNING",
"NORMAL": "INFO",
"DEBUG": "DEBUG",
"TRACE": "TRACE",
}.get(input, 100)


def getNumber(input_):
input_ = getLevel(input_)
if isinstance(input_, str):
return logging.getLevelName(input_)
return input_
Loading

0 comments on commit 8382f88

Please sign in to comment.