diff --git a/ofscraper/actions/process.py b/ofscraper/actions/process.py index c4c7bfce4..11870cd03 100644 --- a/ofscraper/actions/process.py +++ b/ofscraper/actions/process.py @@ -40,7 +40,6 @@ import ofscraper.utils.profiles.tools as profile_tools from ofscraper.utils.context.run_async import run - log = logging.getLogger("shared") @@ -118,26 +117,21 @@ def process_user_first_helper(ele): def scrape_paid(user_dict=None): - user_dict =OF.process_all_paid() + user_dict = OF.process_all_paid() oldUsers = selector.get_ALL_SUBS_DICT() - length=len(list(user_dict.keys())) - for count,value in enumerate(user_dict.values()): + length = len(list(user_dict.keys())) + for count, value in enumerate(user_dict.values()): model_id = value["model_id"] username = value["username"] - posts=value["posts"] - medias=value["medias"] + posts = value["posts"] + medias = value["medias"] log.warning( - f"Download paid content for {model_id}_{username} number:{count+1}/{length} models " - ) + f"Download paid content for {model_id}_{username} number:{count+1}/{length} models " + ) selector.set_ALL_SUBS_DICTVManger( {username: models.Model(profile.scrape_profile(model_id))} ) - download.download_process( - username, - model_id, - medias, - posts=posts - ) + download.download_process(username, model_id, medias, posts=posts) # restore og users selector.set_ALL_SUBS_DICT(oldUsers) diff --git a/ofscraper/actions/scraper.py b/ofscraper/actions/scraper.py index 73a2cee1f..d0b45efe9 100644 --- a/ofscraper/actions/scraper.py +++ b/ofscraper/actions/scraper.py @@ -41,7 +41,6 @@ import ofscraper.utils.system.system as system from ofscraper.utils.context.run_async import run - log = logging.getLogger("shared") @@ -97,7 +96,7 @@ async def process_paid_post(model_id, username, c): paid_content, model_id=model_id, username=username, - ) + ) output = [] [output.extend(post.media) for post in paid_content] log.debug(f"[bold]Paid media count without locked[/bold] {len(output)}") @@ -218,7 +217,9 @@ async def process_timeline_posts(model_id, username, c): timeline_posts, ) ) - timeline_only_posts=list(filter(lambda x:x.regular_timeline,timeline_posts)) + timeline_only_posts = list( + filter(lambda x: x.regular_timeline, timeline_posts) + ) await operations.make_post_table_changes( timeline_only_posts, @@ -259,7 +260,9 @@ async def process_timeline_posts(model_id, username, c): async def process_archived_posts(model_id, username, c): try: with stdout.lowstdout(): - archived_posts = await archive.get_archived_posts_progress(model_id, username, c=c) + archived_posts = await archive.get_archived_posts_progress( + model_id, username, c=c + ) archived_posts = list( map( lambda x: posts_.Post(x, model_id, username, "archived"), @@ -309,9 +312,7 @@ async def process_pinned_posts(model_id, username, c): with stdout.lowstdout(): pinned_posts = await pinned.get_pinned_posts_progress(model_id, c=c) pinned_posts = list( - map( - lambda x: posts_.Post(x, model_id, username), pinned_posts - ) + map(lambda x: posts_.Post(x, model_id, username), pinned_posts) ) await operations.make_post_table_changes( pinned_posts, @@ -386,16 +387,23 @@ async def process_all_paid(): paid_content = await paid.get_all_paid_posts() user_dict = {} for ele in paid_content: - user_id = ele.get("fromUser", {}).get("id") or ele.get("author", {}).get("id") - user_dict.setdefault(user_id, []).append(ele) + user_id = ele.get("fromUser", {}).get("id") or ele.get("author", {}).get( + "id" + ) + user_dict.setdefault(user_id, []).append(ele) output = {} for model_id, value in user_dict.items(): username = profile.scrape_profile(model_id).get("username") - if username == "modeldeleted" and await operations.check_profile_table_exists( - model_id=model_id, username=username + if ( + username == "modeldeleted" + and await operations.check_profile_table_exists( + model_id=model_id, username=username + ) ): username = ( - await operations.get_profile_info(model_id=model_id, username=username) + await operations.get_profile_info( + model_id=model_id, username=username + ) or username ) log.info(f"Processing {username}_{model_id}") @@ -408,10 +416,14 @@ async def process_all_paid(): ) ) seen = set() - new_posts = [post for post in all_posts if post.id not in seen and not seen.add(post.id)] - new_medias=[item for post in new_posts for item in post.media] - new_medias=filters.filterMedia(new_medias) - new_posts=filters.filterPost(new_posts) + new_posts = [ + post + for post in all_posts + if post.id not in seen and not seen.add(post.id) + ] + new_medias = [item for post in new_posts for item in post.media] + new_medias = filters.filterMedia(new_medias) + new_posts = filters.filterPost(new_posts) await operations.make_post_table_changes( new_posts, model_id=model_id, @@ -424,9 +436,12 @@ async def process_all_paid(): downloaded=False, ) - output[model_id]=dict(model_id=model_id,username=username,posts=new_posts,medias=new_medias) + output[model_id] = dict( + model_id=model_id, username=username, posts=new_posts, medias=new_medias + ) log.debug( - f"[bold]Paid media count {username}_{model_id}[/bold] {len(new_medias)}") + f"[bold]Paid media count {username}_{model_id}[/bold] {len(new_medias)}" + ) log.debug( f"[bold]Paid Media for all models[/bold] {sum(map(lambda x:len(x['medias']),output.values()))}" diff --git a/ofscraper/api/archive.py b/ofscraper/api/archive.py index bc68bbecc..7b30f70f1 100644 --- a/ofscraper/api/archive.py +++ b/ofscraper/api/archive.py @@ -6,6 +6,7 @@ \____/|__| /____ >\___ >__| (____ /\____/ \___ >__| \/ \/ \/ \/ """ + import asyncio import contextvars import logging @@ -25,9 +26,9 @@ import ofscraper.utils.cache as cache import ofscraper.utils.constants as constants import ofscraper.utils.progress as progress_utils +import ofscraper.utils.settings as settings from ofscraper.classes.semaphoreDelayed import semaphoreDelayed from ofscraper.utils.context.run_async import run -import ofscraper.utils.settings as settings log = logging.getLogger("shared") attempt = contextvars.ContextVar("attempt") @@ -61,7 +62,7 @@ async def get_archived_posts_progress(model_id, username, forced_after=None, c=N oldarchived = list(filter(lambda x: x != None, oldarchived)) after = await get_after(model_id, username, forced_after) splitArrays = get_split_array(oldarchived, username, after) - tasks=get_tasks(splitArrays, c, model_id, job_progress, after) + tasks = get_tasks(splitArrays, c, model_id, job_progress, after) page_task = overall_progress.add_task( f"Archived Content Pages Progress: {page_count}", visible=True @@ -70,7 +71,7 @@ async def get_archived_posts_progress(model_id, username, forced_after=None, c=N new_tasks = [] try: async with asyncio.timeout( - constants.getattr("API_TIMEOUT_PER_TASKS") * max(len(tasks),2) + constants.getattr("API_TIMEOUT_PER_TASKS") * max(len(tasks), 2) ): for task in asyncio.as_completed(tasks): try: @@ -96,7 +97,11 @@ async def get_archived_posts_progress(model_id, username, forced_after=None, c=N progress_utils.archived_layout.visible = False seen = set() - new_posts = [post for post in responseArray if post["id"] not in seen and not seen.add(post["id"])] + new_posts = [ + post + for post in responseArray + if post["id"] not in seen and not seen.add(post["id"]) + ] log.trace(f"archive postids {list(map(lambda x:x.get('id'),new_posts))}") log.trace( @@ -111,7 +116,6 @@ async def get_archived_posts_progress(model_id, username, forced_after=None, c=N return new_posts - @run async def get_archived_posts(model_id, username, forced_after=None, c=None): tasks = [] @@ -124,7 +128,7 @@ async def get_archived_posts(model_id, username, forced_after=None, c=None): if not read_args.retriveArgs().no_cache else [] ) - job_progress=None + job_progress = None log.trace( "oldarchive {posts}".format( @@ -136,14 +140,13 @@ async def get_archived_posts(model_id, username, forced_after=None, c=None): oldarchived = list(filter(lambda x: x != None, oldarchived)) after = await get_after(model_id, username, forced_after) splitArrays = get_split_array(oldarchived, username, after) - tasks=get_tasks(splitArrays, c, model_id, job_progress, after) - + tasks = get_tasks(splitArrays, c, model_id, job_progress, after) while bool(tasks): new_tasks = [] try: async with asyncio.timeout( - constants.getattr("API_TIMEOUT_PER_TASKS") * max(len(tasks),2) + constants.getattr("API_TIMEOUT_PER_TASKS") * max(len(tasks), 2) ): for task in asyncio.as_completed(tasks): try: @@ -163,7 +166,11 @@ async def get_archived_posts(model_id, username, forced_after=None, c=None): tasks = new_tasks seen = set() - new_posts = [post for post in responseArray if post["id"] not in seen and not seen.add(post["id"])] + new_posts = [ + post + for post in responseArray + if post["id"] not in seen and not seen.add(post["id"]) + ] log.trace(f"archive postids {list(map(lambda x:x.get('id'),new_posts))}") log.trace( @@ -230,7 +237,9 @@ def get_tasks(splitArrays, c, model_id, job_progress, after): c, model_id, job_progress=job_progress, - required_ids=set([ele.get("created_at") for ele in splitArrays[i]]), + required_ids=set( + [ele.get("created_at") for ele in splitArrays[i]] + ), timestamp=splitArrays[i - 1][-1].get("created_at"), offset=False, ) @@ -278,8 +287,12 @@ def get_tasks(splitArrays, c, model_id, job_progress, after): def set_check(unduped, model_id, after): if not after: seen = set() - new_posts = [post for post in cache.get(f"archived_check_{model_id}", default=[]) +unduped if post["id"] not in seen and not seen.add(post["id"])] - + new_posts = [ + post + for post in cache.get(f"archived_check_{model_id}", default=[]) + unduped + if post["id"] not in seen and not seen.add(post["id"]) + ] + cache.set( f"archived_check_{model_id}", new_posts, @@ -291,16 +304,14 @@ def set_check(unduped, model_id, after): async def get_after(model_id, username, forced_after=None): if forced_after != None: return forced_after - elif not settings.get_after_enabled(): + elif not settings.get_after_enabled(): return 0 elif read_args.retriveArgs().after == 0: return 0 elif read_args.retriveArgs().after: return read_args.retriveArgs().after.float_timestamp - elif ( - cache.get(f"{model_id}_full_archived_scrape") - ): + elif cache.get(f"{model_id}_full_archived_scrape"): log.info( "Used --after previously. Scraping all archived posts required to make sure content is not missing" ) @@ -310,10 +321,12 @@ async def get_after(model_id, username, forced_after=None): log.debug("Setting date to zero because database is empty") return 0 missing_items = list(filter(lambda x: x.get("downloaded") != 1, curr)) - missing_items = list(sorted(missing_items, key=lambda x: x.get('posted_at') or 0)) + missing_items = list(sorted(missing_items, key=lambda x: x.get("posted_at") or 0)) if len(missing_items) == 0: log.debug("Using last db date because,all downloads in db marked as downloaded") - return await operations.get_last_archived_date(model_id=model_id, username=username) + return await operations.get_last_archived_date( + model_id=model_id, username=username + ) else: log.debug( f"Setting date slightly before earliest missing item\nbecause {len(missing_items)} posts in db are marked as undownloaded" @@ -446,7 +459,5 @@ async def scrape_archived_posts( finally: sem.release() - job_progress.remove_task( - task - ) if job_progress and task else None + job_progress.remove_task(task) if job_progress and task else None return posts, new_tasks diff --git a/ofscraper/api/highlights.py b/ofscraper/api/highlights.py index 5f534b240..553ba074d 100644 --- a/ofscraper/api/highlights.py +++ b/ofscraper/api/highlights.py @@ -57,7 +57,7 @@ async def get_stories_post_progress(model_id, c=None): new_tasks = [] try: async with asyncio.timeout( - constants.getattr("API_TIMEOUT_PER_TASKS") * max(len(tasks),2) + constants.getattr("API_TIMEOUT_PER_TASKS") * max(len(tasks), 2) ): for task in asyncio.as_completed(tasks): try: @@ -90,8 +90,11 @@ async def get_stories_post_progress(model_id, c=None): ) log.debug(f"[bold]stories Count with Dupes[/bold] {len(responseArray)} found") seen = set() - new_posts = [post for post in responseArray if post["id"] not in seen and not seen.add(post["id"])] - + new_posts = [ + post + for post in responseArray + if post["id"] not in seen and not seen.add(post["id"]) + ] log.trace(f"stories postids {list(map(lambda x:x.get('id'),new_posts))}") log.trace( @@ -122,7 +125,7 @@ async def get_stories_post(model_id, c=None): new_tasks = [] try: async with asyncio.timeout( - constants.getattr("API_TIMEOUT_PER_TASKS") * max(len(tasks),2) + constants.getattr("API_TIMEOUT_PER_TASKS") * max(len(tasks), 2) ): for task in asyncio.as_completed(tasks): try: @@ -148,8 +151,11 @@ async def get_stories_post(model_id, c=None): ) log.debug(f"[bold]stories Count with Dupes[/bold] {len(responseArray)} found") seen = set() - new_posts = [post for post in responseArray if post["id"] not in seen and not seen.add(post["id"])] - + new_posts = [ + post + for post in responseArray + if post["id"] not in seen and not seen.add(post["id"]) + ] log.trace(f"stories postids {list(map(lambda x:x.get('id'),new_posts))}") log.trace( @@ -163,6 +169,7 @@ async def get_stories_post(model_id, c=None): return new_posts + async def scrape_stories(c, user_id, job_progress=None) -> list: global sem global tasks @@ -227,9 +234,11 @@ async def scrape_stories(c, user_id, job_progress=None) -> list: finally: sem.release() - job_progress.remove_task( - task - ) if job_progress and task != None else None + ( + job_progress.remove_task(task) + if job_progress and task != None + else None + ) return stories, new_tasks @@ -265,7 +274,7 @@ async def get_highlight_list_progress(model_id, c=None): new_tasks = [] try: async with asyncio.timeout( - constants.getattr("API_TIMEOUT_PER_TASKS") * max(len(tasks),2) + constants.getattr("API_TIMEOUT_PER_TASKS") * max(len(tasks), 2) ): for task in asyncio.as_completed(tasks): try: @@ -310,7 +319,7 @@ async def get_highlights_via_list_progress(highlightLists, c=None): new_tasks = [] try: async with asyncio.timeout( - constants.getattr("API_TIMEOUT_PER_TASKS") * max(len(tasks),2) + constants.getattr("API_TIMEOUT_PER_TASKS") * max(len(tasks), 2) ): for task in asyncio.as_completed(tasks): try: @@ -334,17 +343,18 @@ async def get_highlights_via_list_progress(highlightLists, c=None): "highlight raw duped {posts}".format( posts="\n\n".join( list( - map( - lambda x: f"dupedinfo heighlight: {str(x)}", highlightResponse - ) + map(lambda x: f"dupedinfo heighlight: {str(x)}", highlightResponse) ) ) ) ) log.debug(f"[bold]highlight Count with Dupes[/bold] {len(highlightResponse)} found") seen = set() - new_posts = [post for post in highlightResponse if post["id"] not in seen and not seen.add(post["id"])] - + new_posts = [ + post + for post in highlightResponse + if post["id"] not in seen and not seen.add(post["id"]) + ] log.trace(f"highlights postids {list(map(lambda x:x.get('id'),new_posts))}") log.trace( @@ -383,7 +393,7 @@ async def get_highlight_list(model_id, c=None): new_tasks = [] try: async with asyncio.timeout( - constants.getattr("API_TIMEOUT_PER_TASKS") * max(len(tasks),2) + constants.getattr("API_TIMEOUT_PER_TASKS") * max(len(tasks), 2) ): for task in asyncio.as_completed(tasks): try: @@ -419,7 +429,7 @@ async def get_highlights_via_list(highlightLists, c): new_tasks = [] try: async with asyncio.timeout( - constants.getattr("API_TIMEOUT_PER_TASKS") * max(len(tasks),2) + constants.getattr("API_TIMEOUT_PER_TASKS") * max(len(tasks), 2) ): for task in asyncio.as_completed(tasks): try: @@ -440,17 +450,18 @@ async def get_highlights_via_list(highlightLists, c): "highlights raw duped {posts}".format( posts="\n\n".join( list( - map( - lambda x: f"dupedinfo heighlight: {str(x)}", highlightResponse - ) + map(lambda x: f"dupedinfo heighlight: {str(x)}", highlightResponse) ) ) ) ) log.debug(f"[bold]highlight Count with Dupes[/bold] {len(highlightResponse)} found") seen = set() - new_posts = [post for post in highlightResponse if post["id"] not in seen and not seen.add(post["id"])] - + new_posts = [ + post + for post in highlightResponse + if post["id"] not in seen and not seen.add(post["id"]) + ] log.trace(f"highlights postids {list(map(lambda x:x.get('id'),new_posts))}") log.trace( @@ -464,6 +475,7 @@ async def get_highlights_via_list(highlightLists, c): return new_posts + async def scrape_highlight_list(c, user_id, job_progress=None, offset=0) -> list: global sem attempt.set(0) @@ -519,9 +531,11 @@ async def scrape_highlight_list(c, user_id, job_progress=None, offset=0) -> list finally: sem.release() - job_progress.remove_task( - task - ) if job_progress and task != None else None + ( + job_progress.remove_task(task) + if job_progress and task != None + else None + ) return data, new_tasks @@ -574,9 +588,11 @@ async def scrape_highlights(c, id, job_progress=None) -> list: finally: sem.release() - job_progress.remove_task( - task - ) if job_progress and task != None else None + ( + job_progress.remove_task(task) + if job_progress and task != None + else None + ) return resp_data["stories"], new_tasks diff --git a/ofscraper/api/labels.py b/ofscraper/api/labels.py index a096e6efd..a6af05e40 100644 --- a/ofscraper/api/labels.py +++ b/ofscraper/api/labels.py @@ -10,6 +10,7 @@ (_______)|/ \_______)(_______/|/ \__/|/ \||/ (_______/|/ \__/ """ + import asyncio import contextvars import logging @@ -57,7 +58,7 @@ async def get_labels(model_id, c=None): new_tasks = [] try: async with asyncio.timeout( - constants.getattr("API_TIMEOUT_PER_TASKS") * max(len(tasks),2) + constants.getattr("API_TIMEOUT_PER_TASKS") * max(len(tasks), 2) ): for task in asyncio.as_completed(tasks): try: @@ -172,9 +173,11 @@ async def scrape_labels(c, model_id, job_progress=None, offset=0): finally: sem.release() - job_progress.remove_task( - task - ) if job_progress and task != None else None + ( + job_progress.remove_task(task) + if job_progress and task != None + else None + ) @run @@ -204,7 +207,7 @@ async def get_labelled_posts(labels, username, c=None): new_tasks = [] try: async with asyncio.timeout( - constants.getattr("API_TIMEOUT_PER_TASKS") * max(len(tasks),2) + constants.getattr("API_TIMEOUT_PER_TASKS") * max(len(tasks), 2) ): for task in asyncio.as_completed(tasks): try: @@ -217,15 +220,22 @@ async def get_labelled_posts(labels, username, c=None): f"[bold]Label {label['name']} new post count with Dupes[/bold] {len(new_posts)} found" ) new_posts = label_dedupe(new_posts) - log.trace(f"{label['name']} postids {list(map(lambda x:x.get('id'),new_posts))}") log.trace( - - f"{label['name']} post raw unduped {{posts}}".format(posts="\n\n".join( - list(map(lambda x: f"undupedinfo label: {str(x)}",new_posts)) + f"{label['name']} postids {list(map(lambda x:x.get('id'),new_posts))}" ) - ) - ) - + log.trace( + f"{label['name']} post raw unduped {{posts}}".format( + posts="\n\n".join( + list( + map( + lambda x: f"undupedinfo label: {str(x)}", + new_posts, + ) + ) + ) + ) + ) + log.debug( f"[bold]Label {label['name']} new post count without Dupes[/bold] {len(new_posts)} found" ) @@ -351,20 +361,25 @@ async def scrape_labelled_posts(c, label, model_id, job_progress=None, offset=0) finally: sem.release() - job_progress.remove_task( - task - ) if job_progress and task != None else None + ( + job_progress.remove_task(task) + if job_progress and task != None + else None + ) return label, posts, new_tasks def label_dedupe(labelArray): seen = set() - new_posts = [post for post in labelArray if post["id"] not in seen and not seen.add(post["id"])] + new_posts = [ + post + for post in labelArray + if post["id"] not in seen and not seen.add(post["id"]) + ] return new_posts - def get_default_label_dict(labels): output = {} for label in labels: diff --git a/ofscraper/api/messages.py b/ofscraper/api/messages.py index 5c1ab2f43..e6cfe889a 100644 --- a/ofscraper/api/messages.py +++ b/ofscraper/api/messages.py @@ -10,6 +10,7 @@ (_______)|/ \_______)(_______/|/ \__/|/ \||/ (_______/|/ \__/ """ + import asyncio import contextvars import logging @@ -27,10 +28,10 @@ import ofscraper.db.operations as operations import ofscraper.utils.args.read as read_args import ofscraper.utils.cache as cache -import ofscraper.utils.settings as settings import ofscraper.utils.constants as constants import ofscraper.utils.progress as progress_utils import ofscraper.utils.sems as sems +import ofscraper.utils.settings as settings from ofscraper.utils.context.run_async import run log = logging.getLogger("shared") @@ -73,7 +74,7 @@ async def get_messages_progress(model_id, username, forced_after=None, c=None): new_tasks = [] try: async with asyncio.timeout( - constants.getattr("API_TIMEOUT_PER_TASKS") * max(len(tasks),2) + constants.getattr("API_TIMEOUT_PER_TASKS") * max(len(tasks), 2) ): for task in asyncio.as_completed(tasks): try: @@ -106,9 +107,12 @@ async def get_messages_progress(model_id, username, forced_after=None, c=None): ) ) seen = set() - new_posts = [post for post in responseArray if post["id"] not in seen and not seen.add(post["id"])] + new_posts = [ + post + for post in responseArray + if post["id"] not in seen and not seen.add(post["id"]) + ] - log.debug(f"[bold]Messages Count without Dupes[/bold] {len(responseArray)} found") log.trace(f"messages messageids {list(map(lambda x:x.get('id'),new_posts))}") @@ -169,7 +173,7 @@ async def get_messages(model_id, username, forced_after=None, c=None): new_tasks = [] try: async with asyncio.timeout( - constants.getattr("API_TIMEOUT_PER_TASKS") * max(len(tasks),2) + constants.getattr("API_TIMEOUT_PER_TASKS") * max(len(tasks), 2) ): for task in asyncio.as_completed(tasks): try: @@ -196,7 +200,11 @@ async def get_messages(model_id, username, forced_after=None, c=None): ) ) seen = set() - new_posts = [post for post in responseArray if post["id"] not in seen and not seen.add(post["id"])] + new_posts = [ + post + for post in responseArray + if post["id"] not in seen and not seen.add(post["id"]) + ] log.debug(f"[bold]Messages Count without Dupes[/bold] {len(responseArray)} found") @@ -285,9 +293,11 @@ def get_tasks(splitArrays, filteredArray, oldmessages, model_id, job_progress, c c, model_id, job_progress=job_progress, - message_id=splitArrays[0][0].get("id") - if len(filteredArray) == len(oldmessages) - else None, + message_id=( + splitArrays[0][0].get("id") + if len(filteredArray) == len(oldmessages) + else None + ), required_ids=set([ele.get("created_at") for ele in splitArrays[0]]), ) ) @@ -300,7 +310,9 @@ def get_tasks(splitArrays, filteredArray, oldmessages, model_id, job_progress, c model_id, job_progress=job_progress, message_id=splitArrays[i - 1][-1].get("id"), - required_ids=set([ele.get("created_at") for ele in splitArrays[i]]), + required_ids=set( + [ele.get("created_at") for ele in splitArrays[i]] + ), ) ) ) @@ -314,7 +326,9 @@ def get_tasks(splitArrays, filteredArray, oldmessages, model_id, job_progress, c model_id, job_progress=job_progress, message_id=splitArrays[-2][-1].get("id"), - required_ids=set([ele.get("created_at") for ele in splitArrays[-1]]), + required_ids=set( + [ele.get("created_at") for ele in splitArrays[-1]] + ), ) ) ) @@ -327,9 +341,11 @@ def get_tasks(splitArrays, filteredArray, oldmessages, model_id, job_progress, c model_id, job_progress=job_progress, required_ids=None, - message_id=splitArrays[0][0].get("id") - if len(filteredArray) == len(oldmessages) - else None, + message_id=( + splitArrays[0][0].get("id") + if len(filteredArray) == len(oldmessages) + else None + ), ) ) ) @@ -352,7 +368,11 @@ def get_tasks(splitArrays, filteredArray, oldmessages, model_id, job_progress, c def set_check(unduped, model_id, after): if not after: seen = set() - new_posts = [post for post in cache.get(f"message_check_{model_id}", default=[]) +unduped if post["id"] not in seen and not seen.add(post["id"])] + new_posts = [ + post + for post in cache.get(f"message_check_{model_id}", default=[]) + unduped + if post["id"] not in seen and not seen.add(post["id"]) + ] cache.set( f"message_check_{model_id}", list(new_posts), @@ -490,9 +510,11 @@ async def scrape_messages( raise E finally: sem.release() - job_progress.remove_task( - task - ) if job_progress and task != None else None + ( + job_progress.remove_task(task) + if job_progress and task != None + else None + ) return messages, new_tasks @@ -517,15 +539,13 @@ def get_individual_post(model_id, postid): async def get_after(model_id, username, forced_after=None): if forced_after != None: return forced_after - elif not settings.get_after_enabled(): + elif not settings.get_after_enabled(): return 0 elif read_args.retriveArgs().after == 0: return 0 elif read_args.retriveArgs().after: return read_args.retriveArgs().after.float_timestamp - elif ( - cache.get(f"{model_id}_scrape_messages") - ): + elif cache.get(f"{model_id}_scrape_messages"): log.debug( "Used --after previously. Scraping all messages required to make sure content is not missing" ) @@ -535,7 +555,9 @@ async def get_after(model_id, username, forced_after=None): log.debug("Setting date to zero because database is empty") return 0 missing_items = list(filter(lambda x: x.get("downloaded") != 1, curr)) - missing_items = list(sorted(missing_items, key=lambda x: arrow.get(x.get("posted_at") or 0))) + missing_items = list( + sorted(missing_items, key=lambda x: arrow.get(x.get("posted_at") or 0)) + ) if len(missing_items) == 0: log.debug( "Using last db date because,all downloads in db are marked as downloaded" diff --git a/ofscraper/api/paid.py b/ofscraper/api/paid.py index e6b78f98f..f8ba662f0 100644 --- a/ofscraper/api/paid.py +++ b/ofscraper/api/paid.py @@ -10,6 +10,7 @@ (_______)|/ \_______)(_______/|/ \__/|/ \||/ (_______/|/ \__/ """ + import asyncio import contextvars import logging @@ -56,7 +57,7 @@ async def get_paid_posts_progress(username, model_id, c=None): new_tasks = [] try: async with asyncio.timeout( - constants.getattr("API_TIMEOUT_PER_TASKS") * max(len(tasks),2) + constants.getattr("API_TIMEOUT_PER_TASKS") * max(len(tasks), 2) ): for task in asyncio.as_completed(tasks): try: @@ -80,9 +81,12 @@ async def get_paid_posts_progress(username, model_id, c=None): progress_utils.paid_layout.visible = False log.debug(f"[bold]Paid Count with Dupes[/bold] {len(responseArray)} found") - seen = set() - new_posts = [post for post in responseArray if post['id'] not in seen and not seen.add(post['id'])] + new_posts = [ + post + for post in responseArray + if post["id"] not in seen and not seen.add(post["id"]) + ] log.trace(f"paid postids {list(map(lambda x:x.get('id'),new_posts))}") @@ -118,7 +122,7 @@ async def get_paid_posts(model_id, username, c=None): new_tasks = [] try: async with asyncio.timeout( - constants.getattr("API_TIMEOUT_PER_TASKS") * max(len(tasks),2) + constants.getattr("API_TIMEOUT_PER_TASKS") * max(len(tasks), 2) ): for task in asyncio.as_completed(tasks): try: @@ -141,9 +145,13 @@ async def get_paid_posts(model_id, username, c=None): list(map(lambda x: f"dupedinfo paid: {str(x)}", responseArray)) ) ) - ) + ) seen = set() - new_posts = [post for post in responseArray if post['id'] not in seen and not seen.add(post['id'])] + new_posts = [ + post + for post in responseArray + if post["id"] not in seen and not seen.add(post["id"]) + ] log.trace(f"paid postids {list(map(lambda x:x.get('id'),new_posts))}") log.trace( "paid raw unduped {posts}".format( @@ -160,7 +168,11 @@ async def get_paid_posts(model_id, username, c=None): def set_check(unduped, model_id): seen = set() - new_posts = [post for post in cache.get(f"purchase_check_{model_id}", default=[]) +unduped if post["id"] not in seen and not seen.add(post["id"])] + new_posts = [ + post + for post in cache.get(f"purchase_check_{model_id}", default=[]) + unduped + if post["id"] not in seen and not seen.add(post["id"]) + ] cache.set( f"purchased_check_{model_id}", new_posts, @@ -236,9 +248,11 @@ async def scrape_paid(c, username, job_progress=None, offset=0): ) ) ) - job_progress.remove_task( - task - ) if task and job_progress else None + ( + job_progress.remove_task(task) + if task and job_progress + else None + ) else: log.debug(f"[bold]paid response status code:[/bold]{r.status}") @@ -338,26 +352,30 @@ async def get_all_paid_posts(): continue overall_progress.remove_task(page_task) - + log.debug(f"[bold]Paid Post count with Dupes[/bold] {len(output)} found") log.trace( - "paid raw duped {posts}".format( - posts="\n\n".join( - list(map(lambda x: f"dupedinfo all paid: {str(x)}", output)) + "paid raw duped {posts}".format( + posts="\n\n".join( + list(map(lambda x: f"dupedinfo all paid: {str(x)}", output)) + ) ) ) - ) seen = set() - new_posts = [post for post in output if post['id'] not in seen and not seen.add(post['id'])] + new_posts = [ + post + for post in output + if post["id"] not in seen and not seen.add(post["id"]) + ] log.trace(f"all paid postids {list(map(lambda x:x.get('id'),new_posts))}") log.debug(f"[bold]Paid Post count without Dupes[/bold] {len(new_posts)} found") log.trace( - "paid raw duped {posts}".format( - posts="\n\n".join( - list(map(lambda x: f"undupedinfo all paid: {str(x)}", new_posts)) + "paid raw duped {posts}".format( + posts="\n\n".join( + list(map(lambda x: f"undupedinfo all paid: {str(x)}", new_posts)) + ) ) ) - ) cache.set( "purchased_all", list(map(lambda x: x.get("id"), list())), diff --git a/ofscraper/api/pinned.py b/ofscraper/api/pinned.py index 013badc49..15eec8bf0 100644 --- a/ofscraper/api/pinned.py +++ b/ofscraper/api/pinned.py @@ -6,6 +6,7 @@ \____/|__| /____ >\___ >__| (____ /\____/ \___ >__| \/ \/ \/ \/ """ + import asyncio import contextvars import logging @@ -21,13 +22,11 @@ ) import ofscraper.utils.args.read as read_args +import ofscraper.utils.cache as cache import ofscraper.utils.constants as constants import ofscraper.utils.progress as progress_utils from ofscraper.classes.semaphoreDelayed import semaphoreDelayed from ofscraper.utils.context.run_async import run -import ofscraper.utils.cache as cache - - log = logging.getLogger("shared") attempt = contextvars.ContextVar("attempt") @@ -51,9 +50,11 @@ async def get_pinned_posts_progress(model_id, c=None): c, model_id, job_progress=job_progress, - timestamp=read_args.retriveArgs().after.float_timestamp - if read_args.retriveArgs().after - else None, + timestamp=( + read_args.retriveArgs().after.float_timestamp + if read_args.retriveArgs().after + else None + ), ) ) ) @@ -65,7 +66,7 @@ async def get_pinned_posts_progress(model_id, c=None): new_tasks = [] try: async with asyncio.timeout( - constants.getattr("API_TIMEOUT_PER_TASKS") * max(len(tasks),2) + constants.getattr("API_TIMEOUT_PER_TASKS") * max(len(tasks), 2) ): for task in asyncio.as_completed(tasks): try: @@ -94,9 +95,13 @@ async def get_pinned_posts_progress(model_id, c=None): list(map(lambda x: f"dupedinfo pinned: {str(x)}", responseArray)) ) ) - ) + ) seen = set() - new_posts = [post for post in responseArray if post["id"] not in seen and not seen.add(post["id"])] + new_posts = [ + post + for post in responseArray + if post["id"] not in seen and not seen.add(post["id"]) + ] log.trace(f"pinned postids{list(map(lambda x:x.get('id'),new_posts))}") log.trace( @@ -127,19 +132,20 @@ async def get_pinned_posts(model_id, c=None): c, model_id, job_progress=job_progress, - timestamp=read_args.retriveArgs().after.float_timestamp - if read_args.retriveArgs().after - else None, + timestamp=( + read_args.retriveArgs().after.float_timestamp + if read_args.retriveArgs().after + else None + ), ) ) ) - while bool(tasks): new_tasks = [] try: async with asyncio.timeout( - constants.getattr("API_TIMEOUT_PER_TASKS") * max(len(tasks),2) + constants.getattr("API_TIMEOUT_PER_TASKS") * max(len(tasks), 2) ): for task in asyncio.as_completed(tasks): try: @@ -162,9 +168,13 @@ async def get_pinned_posts(model_id, c=None): list(map(lambda x: f"dupedinfo pinned: {str(x)}", responseArray)) ) ) - ) + ) seen = set() - new_posts = [post for post in responseArray if post["id"] not in seen and not seen.add(post["id"])] + new_posts = [ + post + for post in responseArray + if post["id"] not in seen and not seen.add(post["id"]) + ] log.trace(f"pinned postids{list(map(lambda x:x.get('id'),new_posts))}") log.trace( @@ -179,11 +189,14 @@ async def get_pinned_posts(model_id, c=None): return new_posts - def set_check(unduped, model_id): if not read_args.retriveArgs().after: seen = set() - new_posts = [post for post in cache.get(f"pinned_check_{model_id}", default=[]) +unduped if post["id"] not in seen and not seen.add(post["id"])] + new_posts = [ + post + for post in cache.get(f"pinned_check_{model_id}", default=[]) + unduped + if post["id"] not in seen and not seen.add(post["id"]) + ] cache.set( f"pinned_check_{model_id}", new_posts, @@ -223,10 +236,14 @@ async def scrape_pinned_posts( await asyncio.sleep(1) try: attempt.set(attempt.get(0) + 1) - task = job_progress.add_task( - f"Attempt {attempt.get()}/{constants.getattr('NUM_TRIES')}: Timestamp -> {arrow.get(math.trunc(float(timestamp))) if timestamp!=None else 'initial'}", - visible=True, - ) if job_progress else None + task = ( + job_progress.add_task( + f"Attempt {attempt.get()}/{constants.getattr('NUM_TRIES')}: Timestamp -> {arrow.get(math.trunc(float(timestamp))) if timestamp!=None else 'initial'}", + visible=True, + ) + if job_progress + else None + ) async with c.requests(url=url)() as r: if r.ok: posts = (await r.json_())["list"] diff --git a/ofscraper/api/profile.py b/ofscraper/api/profile.py index 33b1dffd3..b567317f9 100644 --- a/ofscraper/api/profile.py +++ b/ofscraper/api/profile.py @@ -10,6 +10,7 @@ (_______)|/ \_______)(_______/|/ \__/|/ \||/ (_______/|/ \__/ """ + import asyncio import contextvars import logging diff --git a/ofscraper/api/subscriptions/lists.py b/ofscraper/api/subscriptions/lists.py index fe084a9d8..f77d8a23b 100644 --- a/ofscraper/api/subscriptions/lists.py +++ b/ofscraper/api/subscriptions/lists.py @@ -10,6 +10,7 @@ (_______)|/ \_______)(_______/|/ \__/|/ \||/ (_______/|/ \__/ """ + import asyncio import contextvars import logging @@ -112,7 +113,8 @@ async def get_lists(): new_tasks = [] try: async with asyncio.timeout( - constants.getattr("API_TIMEOUT_PER_TASKS") * max(len(tasks),2) + constants.getattr("API_TIMEOUT_PER_TASKS") + * max(len(tasks), 2) ): for task in asyncio.as_completed(tasks): try: @@ -245,7 +247,8 @@ async def get_list_users(lists): new_tasks = [] try: async with asyncio.timeout( - constants.getattr("API_TIMEOUT_PER_TASKS") * max(len(tasks),2) + constants.getattr("API_TIMEOUT_PER_TASKS") + * max(len(tasks), 2) ): for task in asyncio.as_completed(tasks): try: diff --git a/ofscraper/api/subscriptions/subscriptions.py b/ofscraper/api/subscriptions/subscriptions.py index 98fbf9443..455ef1378 100644 --- a/ofscraper/api/subscriptions/subscriptions.py +++ b/ofscraper/api/subscriptions/subscriptions.py @@ -110,7 +110,7 @@ async def activeHelper(subscribe_count, c): new_tasks = [] try: async with asyncio.timeout( - constants.getattr("API_TIMEOUT_PER_TASKS") * max(len(tasks),2) + constants.getattr("API_TIMEOUT_PER_TASKS") * max(len(tasks), 2) ): for task in asyncio.as_completed(tasks): try: @@ -170,7 +170,7 @@ async def expiredHelper(subscribe_count, c): new_tasks = [] try: async with asyncio.timeout( - constants.getattr("API_TIMEOUT_PER_TASKS") * max(len(tasks),2) + constants.getattr("API_TIMEOUT_PER_TASKS") * max(len(tasks), 2) ): for task in asyncio.as_completed(tasks): try: diff --git a/ofscraper/api/timeline.py b/ofscraper/api/timeline.py index 429cd6566..64c43417d 100644 --- a/ofscraper/api/timeline.py +++ b/ofscraper/api/timeline.py @@ -6,6 +6,7 @@ \____/|__| /____ >\___ >__| (____ /\____/ \___ >__| \/ \/ \/ \/ """ + import asyncio import contextvars import logging @@ -24,10 +25,10 @@ import ofscraper.db.operations as operations import ofscraper.utils.args.read as read_args import ofscraper.utils.cache as cache -import ofscraper.utils.settings as settings import ofscraper.utils.constants as constants import ofscraper.utils.progress as progress_utils import ofscraper.utils.sems as sems +import ofscraper.utils.settings as settings from ofscraper.utils.context.run_async import run log = logging.getLogger("shared") @@ -56,7 +57,7 @@ async def get_timeline_media_progress(model_id, username, forced_after=None, c=N new_tasks = [] try: async with asyncio.timeout( - constants.getattr("API_TIMEOUT_PER_TASKS") * max(len(tasks),2) + constants.getattr("API_TIMEOUT_PER_TASKS") * max(len(tasks), 2) ): for task in asyncio.as_completed(tasks): try: @@ -84,13 +85,16 @@ async def get_timeline_media_progress(model_id, username, forced_after=None, c=N log.trace( "post raw duped {posts}".format( posts="\n\n".join( - list(map(lambda x: f"dupedinfo timeline: {str(x)}",responseArray)) + list(map(lambda x: f"dupedinfo timeline: {str(x)}", responseArray)) ) ) ) seen = set() - new_posts = [post for post in responseArray if post["id"] not in seen and not seen.add(post["id"])] - + new_posts = [ + post + for post in responseArray + if post["id"] not in seen and not seen.add(post["id"]) + ] log.trace(f"timeline postids {list(map(lambda x:x.get('id'),new_posts))}") log.trace( @@ -138,7 +142,7 @@ async def get_timeline_media(model_id, username, forced_after=None, c=None): new_tasks = [] try: async with asyncio.timeout( - constants.getattr("API_TIMEOUT_PER_TASKS") * max(len(tasks),2) + constants.getattr("API_TIMEOUT_PER_TASKS") * max(len(tasks), 2) ): for task in asyncio.as_completed(tasks): try: @@ -160,18 +164,22 @@ async def get_timeline_media(model_id, username, forced_after=None, c=None): log.trace( "post raw duped {posts}".format( posts="\n\n".join( - list(map(lambda x: f"dupedinfo timeline: {str(x)}",responseArray)) + list(map(lambda x: f"dupedinfo timeline: {str(x)}", responseArray)) ) ) ) seen = set() - new_posts = [post for post in responseArray if post["id"] not in seen and not seen.add(post["id"])] + new_posts = [ + post + for post in responseArray + if post["id"] not in seen and not seen.add(post["id"]) + ] log.trace(f"timeline postids {list(map(lambda x:x.get('id'),new_posts))}") log.trace( "post raw unduped {posts}".format( posts="\n\n".join( - list(map(lambda x: f"undupedinfo timeline: {str(x)}",new_posts)) + list(map(lambda x: f"undupedinfo timeline: {str(x)}", new_posts)) ) ) ) @@ -238,7 +246,9 @@ def get_tasks(splitArrays, c, model_id, job_progress, after): c, model_id, job_progress=job_progress, - required_ids=set([ele.get("created_at") for ele in splitArrays[i]]), + required_ids=set( + [ele.get("created_at") for ele in splitArrays[i]] + ), timestamp=splitArrays[i - 1][-1].get("created_at"), offset=False, ) @@ -286,7 +296,11 @@ def get_tasks(splitArrays, c, model_id, job_progress, after): def set_check(unduped, model_id, after): if not after: seen = set() - new_posts = [post for post in cache.get(f"timeline_check_{model_id}", default=[]) +unduped if post["id"] not in seen and not seen.add(post["id"])] + new_posts = [ + post + for post in cache.get(f"timeline_check_{model_id}", default=[]) + unduped + if post["id"] not in seen and not seen.add(post["id"]) + ] cache.set( f"timeline_check_{model_id}", new_posts, @@ -312,15 +326,13 @@ def get_individual_post(id): async def get_after(model_id, username, forced_after=None): if forced_after is not None: return forced_after - elif not settings.get_after_enabled(): + elif not settings.get_after_enabled(): return 0 elif read_args.retriveArgs().after == 0: return 0 elif read_args.retriveArgs().after: return read_args.retriveArgs().after.float_timestamp - elif ( - cache.get(f"{model_id}_full_timeline_scrape") - ): + elif cache.get(f"{model_id}_full_timeline_scrape"): log.info( "Used --after previously. Scraping all timeline posts required to make sure content is not missing" ) @@ -329,11 +341,15 @@ async def get_after(model_id, username, forced_after=None): if len(curr) == 0: log.debug("Setting date to zero because database is empty") return 0 - missing_items = list(filter(lambda x: x.get("downloaded")!=1, curr)) - missing_items = list(sorted(missing_items, key=lambda x: arrow.get(x.get("posted_at") or 0))) + missing_items = list(filter(lambda x: x.get("downloaded") != 1, curr)) + missing_items = list( + sorted(missing_items, key=lambda x: arrow.get(x.get("posted_at") or 0)) + ) if len(missing_items) == 0: log.debug("Using last db date because,all downloads in db marked as downloaded") - return await operations.get_last_timeline_date(model_id=model_id, username=username) + return await operations.get_last_timeline_date( + model_id=model_id, username=username + ) else: log.debug( f"Setting date slightly before earliest missing item\nbecause {len(missing_items)} posts in db are marked as undownloaded" @@ -463,7 +479,9 @@ async def scrape_timeline_posts( finally: sem.release() - job_progress.remove_task( - task - ) if job_progress and task != None else None - return posts, new_tasks \ No newline at end of file + ( + job_progress.remove_task(task) + if job_progress and task != None + else None + ) + return posts, new_tasks diff --git a/ofscraper/classes/placeholder.py b/ofscraper/classes/placeholder.py index e24cfe66d..857f321ed 100644 --- a/ofscraper/classes/placeholder.py +++ b/ofscraper/classes/placeholder.py @@ -199,9 +199,7 @@ def add_price_variables(self, username): "current_price": ( constants.getattr("MODEL_PRICE_PLACEHOLDER") if not modelObj - else "Free" - if modelObj.final_current_price == 0 - else "Paid" + else "Free" if modelObj.final_current_price == 0 else "Paid" ) } ) @@ -210,9 +208,7 @@ def add_price_variables(self, username): "regular_price": ( constants.getattr("MODEL_PRICE_PLACEHOLDER") if not modelObj - else "Free" - if modelObj.final_regular_price == 0 - else "Paid" + else "Free" if modelObj.final_regular_price == 0 else "Paid" ) } ) @@ -221,9 +217,7 @@ def add_price_variables(self, username): "promo_price": ( constants.getattr("MODEL_PRICE_PLACEHOLDER") if not modelObj - else "Free" - if modelObj.final_promo_price == 0 - else "Paid" + else "Free" if modelObj.final_promo_price == 0 else "Paid" ) } ) @@ -232,9 +226,7 @@ def add_price_variables(self, username): "renewal_price": ( constants.getattr("MODEL_PRICE_PLACEHOLDER") if not modelObj - else "Free" - if modelObj.final_renewal_price == 0 - else "Paid" + else "Free" if modelObj.final_renewal_price == 0 else "Paid" ) } ) @@ -405,9 +397,7 @@ def add_price_variables(self, username): "current_price": ( constants.getattr("MODEL_PRICE_PLACEHOLDER") if not modelObj - else "Free" - if modelObj.final_current_price == 0 - else "Paid" + else "Free" if modelObj.final_current_price == 0 else "Paid" ) } ) @@ -416,9 +406,7 @@ def add_price_variables(self, username): "regular_price": ( constants.getattr("MODEL_PRICE_PLACEHOLDER") if not modelObj - else "Free" - if modelObj.final_regular_price == 0 - else "Paid" + else "Free" if modelObj.final_regular_price == 0 else "Paid" ) } ) @@ -427,9 +415,7 @@ def add_price_variables(self, username): "promo_price": ( constants.getattr("MODEL_PRICE_PLACEHOLDER") if not modelObj - else "Free" - if modelObj.final_promo_price == 0 - else "Paid" + else "Free" if modelObj.final_promo_price == 0 else "Paid" ) } ) @@ -438,9 +424,7 @@ def add_price_variables(self, username): "renewal_price": ( constants.getattr("MODEL_PRICE_PLACEHOLDER") if not modelObj - else "Free" - if modelObj.final_renewal_price == 0 - else "Paid" + else "Free" if modelObj.final_renewal_price == 0 else "Paid" ) } ) diff --git a/ofscraper/classes/posts.py b/ofscraper/classes/posts.py index 39db78d73..4a0f1dfaf 100644 --- a/ofscraper/classes/posts.py +++ b/ofscraper/classes/posts.py @@ -48,11 +48,13 @@ def archived(self): if self.post.get("isArchived"): return 1 return 0 + @property def pinned(self): if self.post.get("isPinned"): return 1 return 0 + @property def regular_timeline(self): return not self.archived and not self.pinned @@ -79,6 +81,7 @@ def db_text(self): @property def title(self): return self._post.get("title") + @property def responsetype(self): if self._responsetype: @@ -87,10 +90,9 @@ def responsetype(self): return "pinned" elif self.archived: return "self.archived" - elif self.post.get("responseType")=="post": + elif self.post.get("responseType") == "post": return "timeline" return self.post.get("responseType") - @property def modified_responsetype(self): diff --git a/ofscraper/classes/table.py b/ofscraper/classes/table.py index e08b2e95e..b77b2d215 100644 --- a/ofscraper/classes/table.py +++ b/ofscraper/classes/table.py @@ -112,7 +112,7 @@ class IntegerInput(Input): def __init__( self, *args, - **kwargs + **kwargs, # ---snip--- ) -> None: super().__init__( @@ -181,7 +181,7 @@ class IntegerInput(Input): def __init__( self, *args, - **kwargs + **kwargs, # ---snip--- ) -> None: super().__init__( @@ -313,7 +313,7 @@ class IntegerInput(Input): def __init__( self, *args, - **kwargs + **kwargs, # ---snip--- ) -> None: super().__init__( @@ -792,9 +792,9 @@ def sort_helper(self, label=None): helperNode = self.query_one("#Length") self._filtered_rows = sorted( self._filtered_rows, - key=lambda x: helperNode.convertString(x[index]) - if x[index] != "N/A" - else 0, + key=lambda x: ( + helperNode.convertString(x[index]) if x[index] != "N/A" else 0 + ), reverse=self.reverse, ) self.make_table() @@ -807,9 +807,9 @@ def sort_helper(self, label=None): helperNode = self.query_one("#Post_Date") self._filtered_rows = sorted( self._filtered_rows, - key=lambda x: helperNode.convertString(x[index]) - if x[index] != "N/A" - else 0, + key=lambda x: ( + helperNode.convertString(x[index]) if x[index] != "N/A" else 0 + ), reverse=self.reverse, ) self.make_table() diff --git a/ofscraper/commands/check.py b/ofscraper/commands/check.py index 4ef3d3799..da94a5ad7 100644 --- a/ofscraper/commands/check.py +++ b/ofscraper/commands/check.py @@ -9,14 +9,13 @@ import arrow +import ofscraper.api.archive as archived import ofscraper.api.highlights as highlights import ofscraper.api.messages as messages_ import ofscraper.api.paid as paid_ +import ofscraper.api.pinned as pinned import ofscraper.api.profile as profile import ofscraper.api.timeline as timeline -import ofscraper.api.archive as archived -import ofscraper.api.pinned as pinned - import ofscraper.classes.posts as posts_ import ofscraper.classes.sessionbuilder as sessionbuilder import ofscraper.classes.table as table @@ -76,20 +75,20 @@ def process_download_cart(): log.info("Getting items from queue") try: row, key = app.row_queue.get() - restype = app.row_names.index("Responsetype") + restype = row[app.row_names.index("Responsetype")].plain username = app.row_names.index("UserName") post_id = app.row_names.index("Post_ID") media_id = app.row_names.index("Media_ID") url = None - if row[restype].plain == "message": + if restype == "message": url = constants.getattr("messageTableSPECIFIC").format( row[username].plain, row[post_id].plain ) - elif row[restype].plain in {"pinned","timeline","archived"}: + elif restype in {"pinned", "timeline", "archived"}: url = f"{row[post_id]}" - elif row[restype].plain == "highlights": + elif restype == "highlights": url = constants.getattr("storyEP").format(row[post_id].plain) - elif row[restype].plain == "stories": + elif restype == "stories": url = constants.getattr("highlightsWithAStoryEP").format( row[post_id].plain ) @@ -195,7 +194,7 @@ async def post_check_helper(): data, expire=constants.getattr("DAY_SECONDS"), ) - oldarchive=cache.get(f"archived_check_{model_id}", default=[]) + oldarchive = cache.get(f"archived_check_{model_id}", default=[]) if len(oldarchive) > 0 and not read_args.retriveArgs().force: user_dict[user_name].extend(oldarchive) else: @@ -208,13 +207,11 @@ async def post_check_helper(): data, expire=constants.getattr("DAY_SECONDS"), ) - oldpinned=cache.get(f"pinned_check_{model_id}", default=[]) + oldpinned = cache.get(f"pinned_check_{model_id}", default=[]) if len(oldpinned) > 0 and not read_args.retriveArgs().force: user_dict[user_name].extend(oldpinned) else: - data = await pinned.get_pinned_posts( - model_id, c=c - ) + data = await pinned.get_pinned_posts(model_id, c=c) user_dict[user_name].extend(data) cache.set( f"pinned_check_{model_id}", diff --git a/ofscraper/db/operations.py b/ofscraper/db/operations.py index c871405a4..1b1db417f 100644 --- a/ofscraper/db/operations.py +++ b/ofscraper/db/operations.py @@ -10,6 +10,7 @@ (_______)|/ \_______)(_______/|/ \__/|/ \||/ (_______/|/ \__/ """ + import logging import pathlib import shutil @@ -29,10 +30,10 @@ from ofscraper.db.operations_.stories import * from ofscraper.utils.context.run_async import run - console = Console() log = logging.getLogger("shared") + async def create_tables(model_id, username): await create_models_table(model_id=model_id, username=username) await create_profile_table(model_id=model_id, username=username) @@ -45,19 +46,21 @@ async def create_tables(model_id, username): await create_labels_table(model_id=model_id, username=username) await create_schema_table(model_id=model_id, username=username) + async def modify_tables(model_id=None, username=None): - backup=create_backup_transition(model_id, username) + backup = create_backup_transition(model_id, username) try: await add_column_tables(model_id=model_id, username=username) - await modify_tables_constraints_and_columns(model_id=model_id, username=username) + await modify_tables_constraints_and_columns( + model_id=model_id, username=username + ) except Exception as E: - restore_backup_transition(backup,model_id,username) + restore_backup_transition(backup, model_id, username) raise E -def restore_backup_transition(backup,model_id,username): - database=placeholder.databasePlaceholder().databasePathHelper( - model_id, username - ) + +def restore_backup_transition(backup, model_id, username): + database = placeholder.databasePlaceholder().databasePathHelper(model_id, username) shutil.copy2(backup, database) @@ -73,7 +76,7 @@ def create_backup_transition(model_id, username): "stories_model_id", "messages_model_id", "labels_model_id", - "media_posted_at" + "media_posted_at", ] groupB = [ "profile_username_constraint_removed", @@ -85,7 +88,7 @@ def create_backup_transition(model_id, username): "products_model_id_constraint_added", "messages_model_id_constraint_added", ] - if len(set(groupA+groupB).difference(set(changes)))>0: + if len(set(groupA + groupB).difference(set(changes))) > 0: log.info("creating a backup before transition") new_path = create_backup(model_id, username, "old_schema_db_backup.db") log.info(f"transition backup created at {new_path}") @@ -106,7 +109,7 @@ async def add_column_tables(model_id=None, username=None): await add_flag_schema("media_posted_at", model_id=model_id, username=username) if not "posts_pinned" in changes: await add_column_post_pinned(model_id=model_id, username=username) - await add_flag_schema("posts_pinned", model_id=model_id, username=username) + await add_flag_schema("posts_pinned", model_id=model_id, username=username) if not "posts_model_id" in changes: await add_column_post_ID(model_id=model_id, username=username) await add_flag_schema("posts_model_id", model_id=model_id, username=username) @@ -220,6 +223,7 @@ def create_backup(model_id, username, backup=None): cache.close() return database_copy + @run async def table_init_create(model_id=None, username=None): await create_tables(model_id=model_id, username=username) diff --git a/ofscraper/db/operations_/labels.py b/ofscraper/db/operations_/labels.py index 92fed1268..c93032743 100644 --- a/ofscraper/db/operations_/labels.py +++ b/ofscraper/db/operations_/labels.py @@ -10,9 +10,11 @@ (_______)|/ \_______)(_______/|/ \__/|/ \||/ (_______/|/ \__/ """ + import contextlib import logging import sqlite3 + from rich.console import Console import ofscraper.db.operations_.helpers as helpers @@ -106,7 +108,7 @@ def update_labels_table( model_id, label.label_id, model_id, - post.id + post.id, ), posts, ) @@ -120,16 +122,16 @@ def write_labels_table_transition( inputData: list, model_id=None, username=None, conn=None ): with contextlib.closing(conn.cursor()) as curr: - ordered_keys=["label_id","name", "type", "post_id","model_id"] + ordered_keys = ["label_id", "name", "type", "post_id", "model_id"] insertData = [tuple([data[key] for key in ordered_keys]) for data in inputData] curr.executemany(labelInsert, insertData) conn.commit() @wrapper.operation_wrapper_async -def get_all_labels_posts(label,model_id=None, username=None, conn=None): +def get_all_labels_posts(label, model_id=None, username=None, conn=None): with contextlib.closing(conn.cursor()) as curr: - curr.execute(labelPostsID, [model_id,label.label_id]) + curr.execute(labelPostsID, [model_id, label.label_id]) return [dict(row)["post_id"] for row in curr.fetchall()] @@ -138,7 +140,9 @@ def add_column_labels_ID(conn=None, **kwargs): with contextlib.closing(conn.cursor()) as cur: try: # Check if column exists (separate statement) - cur.execute("SELECT CASE WHEN EXISTS (SELECT 1 FROM PRAGMA_TABLE_INFO('labels') WHERE name = 'model_id') THEN 1 ELSE 0 END AS alter_required;") + cur.execute( + "SELECT CASE WHEN EXISTS (SELECT 1 FROM PRAGMA_TABLE_INFO('labels') WHERE name = 'model_id') THEN 1 ELSE 0 END AS alter_required;" + ) alter_required = cur.fetchone()[0] # Fetch the result (0 or 1) # Add column if necessary (conditional execution) if alter_required == 0: @@ -149,6 +153,7 @@ def add_column_labels_ID(conn=None, **kwargs): conn.rollback() raise e # Raise the error for handling + @wrapper.operation_wrapper_async def drop_labels_table(model_id=None, username=None, conn=None) -> list: with contextlib.closing(conn.cursor()) as cur: @@ -177,7 +182,10 @@ def get_all_labels_transition(model_id=None, username=None, conn=None) -> list: cur.execute(sql) # Execute the query data = [dict(row) for row in cur.fetchall()] - return [dict(row,label_id=row.get("label_id") or row.get("id")) for row in data] + return [ + dict(row, label_id=row.get("label_id") or row.get("id")) for row in data + ] + async def modify_unique_constriant_labels(model_id=None, username=None): data = await get_all_labels_transition(model_id=model_id, username=username) @@ -185,13 +193,16 @@ async def modify_unique_constriant_labels(model_id=None, username=None): await create_labels_table(model_id=model_id, username=username) await write_labels_table_transition(data, model_id=model_id, username=username) + async def make_label_table_changes(label, model_id=None, username=None): - curr = set( await get_all_labels_posts( label,model_id=model_id,username=username)) + curr = set(await get_all_labels_posts(label, model_id=model_id, username=username)) new_posts = list(filter(lambda x: x.id not in curr, label.posts)) curr_posts = list(filter(lambda x: x.id in curr, label.posts)) if len(new_posts) > 0: new_posts = helpers.converthelper(new_posts) - await write_labels_table(label,new_posts, model_id=model_id, username=username) + await write_labels_table(label, new_posts, model_id=model_id, username=username) if read_args.retriveArgs().metadata and len(curr_posts) > 0: curr_posts = helpers.converthelper(curr_posts) - await update_labels_table(label,curr_posts, model_id=model_id, username=username) \ No newline at end of file + await update_labels_table( + label, curr_posts, model_id=model_id, username=username + ) diff --git a/ofscraper/db/operations_/media.py b/ofscraper/db/operations_/media.py index ea5793080..638872266 100644 --- a/ofscraper/db/operations_/media.py +++ b/ofscraper/db/operations_/media.py @@ -10,18 +10,19 @@ (_______)|/ \_______)(_______/|/ \__/|/ \||/ (_______/|/ \__/ """ + import contextlib import logging import math import pathlib import sqlite3 + import arrow from rich.console import Console import ofscraper.db.operations_.wrapper as wrapper from ofscraper.utils.context.run_async import run - console = Console() log = logging.getLogger("shared") @@ -110,7 +111,7 @@ media_type,preview,linked, downloaded,created_at,posted_at,hash,model_id) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?);""" -mediaDownloadSelect= """ +mediaDownloadSelect = """ SELECT directory,filename,size downloaded,hash @@ -144,6 +145,9 @@ FROM medias where api_type=('Message') or api_type=('Messages') and model_id=(?) """ +profileUnique = """ +SELECT DISTINCT user_id FROM profiles +""" @wrapper.operation_wrapper_async def create_media_table(model_id=None, username=None, conn=None): @@ -156,8 +160,10 @@ def create_media_table(model_id=None, username=None, conn=None): def add_column_media_hash(model_id=None, username=None, conn=None): with contextlib.closing(conn.cursor()) as cur: try: - # Check if column exists (separate statement) - cur.execute("SELECT CASE WHEN EXISTS (SELECT 1 FROM PRAGMA_TABLE_INFO('medias') WHERE name = 'hash') THEN 1 ELSE 0 END AS alter_required;") + # Check if column exists (separate statement) + cur.execute( + "SELECT CASE WHEN EXISTS (SELECT 1 FROM PRAGMA_TABLE_INFO('medias') WHERE name = 'hash') THEN 1 ELSE 0 END AS alter_required;" + ) alter_required = cur.fetchone()[0] # Fetch the result (0 or 1) # Add column if necessary (conditional execution) if alter_required == 0: @@ -175,7 +181,9 @@ def add_column_media_posted_at(conn=None, **kwargs): with contextlib.closing(conn.cursor()) as cur: try: # Check if column exists (separate statement) - cur.execute("SELECT CASE WHEN EXISTS (SELECT 1 FROM PRAGMA_TABLE_INFO('medias') WHERE name = 'posted_at') THEN 1 ELSE 0 END AS alter_required;") + cur.execute( + "SELECT CASE WHEN EXISTS (SELECT 1 FROM PRAGMA_TABLE_INFO('medias') WHERE name = 'posted_at') THEN 1 ELSE 0 END AS alter_required;" + ) alter_required = cur.fetchone()[0] # Fetch the result (0 or 1) # Add column if necessary (conditional execution) @@ -186,12 +194,16 @@ def add_column_media_posted_at(conn=None, **kwargs): except sqlite3.Error as e: conn.rollback() raise e # Rollback in case of errors + + @wrapper.operation_wrapper_async def add_column_media_ID(model_id=None, username=None, conn=None): with contextlib.closing(conn.cursor()) as cur: try: # Check if column exists (separate statement) - cur.execute("SELECT CASE WHEN EXISTS (SELECT 1 FROM PRAGMA_TABLE_INFO('medias') WHERE name = 'model_id') THEN 1 ELSE 0 END AS alter_required;") + cur.execute( + "SELECT CASE WHEN EXISTS (SELECT 1 FROM PRAGMA_TABLE_INFO('medias') WHERE name = 'model_id') THEN 1 ELSE 0 END AS alter_required;" + ) alter_required = cur.fetchone()[0] # Fetch the result (0 or 1) # Add column if necessary (conditional execution) @@ -293,23 +305,23 @@ def write_media_table_via_api_batch(medias, model_id=None, conn=None, **kwargs) def write_media_table_transition(inputData, model_id=None, conn=None, **kwargs): with contextlib.closing(conn.cursor()) as curr: ordered_keys = [ - "media_id", - "post_id", - "link", - "directory", - "filename", - "size", - "api_type", - "media_type", - "preview", - "linked", - "downloaded", - "created_at", - "posted_at", - "hash", - "model_id" - ] - insertData=[tuple([data[key] for key in ordered_keys]) for data in inputData] + "media_id", + "post_id", + "link", + "directory", + "filename", + "size", + "api_type", + "media_type", + "preview", + "linked", + "downloaded", + "created_at", + "posted_at", + "hash", + "model_id", + ] + insertData = [tuple([data[key] for key in ordered_keys]) for data in inputData] curr.executemany(mediaInsertFull, insertData) conn.commit() @@ -317,11 +329,11 @@ def write_media_table_transition(inputData, model_id=None, conn=None, **kwargs): @wrapper.operation_wrapper_async def get_all_medias_transition(model_id=None, username=None, conn=None) -> list: with contextlib.closing(conn.cursor()) as cur: + model_id=model_id if len(cur.execute(profileUnique).fetchall())>2 else None cur.execute(mediaALLTransition) conn.commit() data = [dict(row) for row in cur.fetchall()] - return data - + return [dict(row,model_id=row.get("model_id") or model_id) for row in data] @wrapper.operation_wrapper_async @@ -336,7 +348,11 @@ def get_messages_media(conn=None, model_id=None, **kwargs) -> list: with contextlib.closing(conn.cursor()) as cur: cur.execute(getMessagesMedia, [model_id]) data = [dict(row) for row in cur.fetchall()] - return [dict(ele,posted_at=arrow.get(ele.get("posted_at") or 0).float_timestamp) for ele in data] + return [ + dict(ele, posted_at=arrow.get(ele.get("posted_at") or 0).float_timestamp) + for ele in data + ] + @run @wrapper.operation_wrapper_async @@ -344,7 +360,11 @@ def get_archived_media(conn=None, model_id=None, **kwargs) -> list: with contextlib.closing(conn.cursor()) as cur: cur.execute(getArchivedMedia, [model_id]) data = [dict(row) for row in cur.fetchall()] - return [dict(ele,posted_at=arrow.get(ele.get("posted_at") or 0).float_timestamp) for ele in data] + return [ + dict(ele, posted_at=arrow.get(ele.get("posted_at") or 0).float_timestamp) + for ele in data + ] + @run @wrapper.operation_wrapper_async @@ -352,7 +372,10 @@ def get_timeline_media(model_id=None, username=None, conn=None) -> list: with contextlib.closing(conn.cursor()) as cur: cur.execute(getTimelineMedia, [model_id]) data = [dict(row) for row in cur.fetchall()] - return [dict(ele,posted_at=arrow.get(ele.get("posted_at") or 0).float_timestamp) for ele in data] + return [ + dict(ele, posted_at=arrow.get(ele.get("posted_at") or 0).float_timestamp) + for ele in data + ] def update_media_table_via_api_helper( @@ -369,24 +392,32 @@ def update_media_table_via_api_helper( media.postdate, model_id, media.id, - model_id + model_id, ] curr.execute(mediaUpdateAPI, insertData) conn.commit() def update_media_table_download_helper( - media,model_id, filename=None, hashdata=None, conn=None, downloaded=None, curr=None, **kwargs + media, + model_id, + filename=None, + hashdata=None, + conn=None, + downloaded=None, + curr=None, + **kwargs, ) -> list: prevData = curr.execute(mediaDownloadSelect, (media.id,)).fetchall() prevData = prevData[0] if isinstance(prevData, list) and bool(prevData) else None insertData = media_exist_insert_helper( filename=filename, hashdata=hashdata, prevData=prevData, downloaded=downloaded ) - insertData.extend([media.id,model_id]) + insertData.extend([media.id, model_id]) curr.execute(mediaUpdateDownload, insertData) conn.commit() + def media_exist_insert_helper( filename=None, downloaded=None, hashdata=None, prevData=None ): diff --git a/ofscraper/db/operations_/messages.py b/ofscraper/db/operations_/messages.py index bf8401da4..7ecb2da89 100644 --- a/ofscraper/db/operations_/messages.py +++ b/ofscraper/db/operations_/messages.py @@ -10,6 +10,7 @@ (_______)|/ \_______)(_______/|/ \__/|/ \||/ (_______/|/ \__/ """ + import contextlib import logging import sqlite3 @@ -18,15 +19,14 @@ from rich.console import Console import ofscraper.db.operations_.helpers as helpers +import ofscraper.db.operations_.media as media import ofscraper.db.operations_.wrapper as wrapper import ofscraper.utils.args.read as read_args -import ofscraper.db.operations_.media as media - console = Console() log = logging.getLogger("shared") -#user_id==sender +# user_id==sender messagesCreate = """ CREATE TABLE IF NOT EXISTS messages ( id INTEGER NOT NULL, @@ -89,7 +89,7 @@ def update_messages_table(messages: dict, model_id=None, conn=None, **kwargs): message.fromuser, model_id, message.id, - model_id + model_id, ), messages, ) @@ -125,7 +125,16 @@ def write_messages_table_transition( inputData: list, model_id=None, conn=None, **kwargs ): with contextlib.closing(conn.cursor()) as cur: - ordered_keys=["post_id", "text","price","paid","archived", "created_at","user_id","model_id"] + ordered_keys = [ + "post_id", + "text", + "price", + "paid", + "archived", + "created_at", + "user_id", + "model_id", + ] insertData = [tuple([data[key] for key in ordered_keys]) for data in inputData] cur.executemany(messagesInsert, insertData) conn.commit() @@ -142,8 +151,7 @@ def get_all_messages_ids(model_id=None, username=None, conn=None) -> list: def get_all_messages_transition(model_id=None, username=None, conn=None) -> list: with contextlib.closing(conn.cursor()) as cur: cur.execute(messagesALLTransition) - return [dict(row)for row in cur.fetchall()] - + return [dict(row) for row in cur.fetchall()] @wrapper.operation_wrapper_async @@ -158,10 +166,11 @@ def get_messages_post_info(model_id=None, username=None, conn=None, **kwargs) -> with contextlib.closing(conn.cursor()) as cur: cur.execute(messagesData, [model_id]) conn.commit() - data=[dict(row) for row in cur.fetchall()] - return [dict(ele,created_at=arrow.get(ele.get("created_at")).float_timestamp) for ele in data] - - + data = [dict(row) for row in cur.fetchall()] + return [ + dict(ele, created_at=arrow.get(ele.get("created_at")).float_timestamp) + for ele in data + ] @wrapper.operation_wrapper_async @@ -169,7 +178,9 @@ def add_column_messages_ID(conn=None, **kwargs): with contextlib.closing(conn.cursor()) as cur: try: # Separate statements with conditional execution - cur.execute("SELECT CASE WHEN EXISTS (SELECT 1 FROM PRAGMA_TABLE_INFO('messages') WHERE name = 'model_id') THEN 1 ELSE 0 END AS alter_required;") + cur.execute( + "SELECT CASE WHEN EXISTS (SELECT 1 FROM PRAGMA_TABLE_INFO('messages') WHERE name = 'model_id') THEN 1 ELSE 0 END AS alter_required;" + ) alter_required = cur.fetchone()[0] if alter_required == 0: cur.execute("ALTER TABLE messages ADD COLUMN model_id INTEGER;") @@ -180,6 +191,7 @@ def add_column_messages_ID(conn=None, **kwargs): conn.rollback() raise e + async def modify_unique_constriant_messages(model_id=None, username=None): data = await get_all_messages_transition(model_id=model_id, username=username) await drop_messages_table(model_id=model_id, username=username) diff --git a/ofscraper/db/operations_/others.py b/ofscraper/db/operations_/others.py index 4ac4b7677..361628754 100644 --- a/ofscraper/db/operations_/others.py +++ b/ofscraper/db/operations_/others.py @@ -10,9 +10,11 @@ (_______)|/ \_______)(_______/|/ \__/|/ \||/ (_______/|/ \__/ """ + import contextlib import logging import sqlite3 + from rich.console import Console import ofscraper.db.operations_.wrapper as wrapper @@ -112,7 +114,9 @@ def add_column_other_ID(conn=None, **kwargs): try: # Separate statements with conditional execution - cur.execute("SELECT CASE WHEN EXISTS (SELECT 1 FROM PRAGMA_TABLE_INFO('others') WHERE name = 'model_id') THEN 1 ELSE 0 END AS alter_required;") + cur.execute( + "SELECT CASE WHEN EXISTS (SELECT 1 FROM PRAGMA_TABLE_INFO('others') WHERE name = 'model_id') THEN 1 ELSE 0 END AS alter_required;" + ) alter_required = cur.fetchone()[0] if alter_required == 0: cur.execute("ALTER TABLE others ADD COLUMN model_id INTEGER;") @@ -130,7 +134,9 @@ def add_column_products_ID(conn=None, **kwargs): try: # Separate statements with conditional execution - cur.execute("SELECT CASE WHEN EXISTS (SELECT 1 FROM PRAGMA_TABLE_INFO('products') WHERE name = 'model_id') THEN 1 ELSE 0 END AS alter_required;") + cur.execute( + "SELECT CASE WHEN EXISTS (SELECT 1 FROM PRAGMA_TABLE_INFO('products') WHERE name = 'model_id') THEN 1 ELSE 0 END AS alter_required;" + ) alter_required = cur.fetchone()[0] if alter_required == 0: cur.execute("ALTER TABLE products ADD COLUMN model_id INTEGER;") @@ -142,7 +148,6 @@ def add_column_products_ID(conn=None, **kwargs): raise e - @wrapper.operation_wrapper_async def create_schema_table(model_id=None, username=None, conn=None): with contextlib.closing(conn.cursor()) as cur: @@ -164,7 +169,6 @@ def add_flag_schema(flag, model_id=None, username=None, conn=None): conn.commit() - @wrapper.operation_wrapper_async def get_all_others_transition(model_id=None, username=None, conn=None): with contextlib.closing(conn.cursor()) as cur: @@ -172,7 +176,6 @@ def get_all_others_transition(model_id=None, username=None, conn=None): return [dict(row) for row in cur.fetchall()] - @wrapper.operation_wrapper_async def drop_others_table(model_id=None, username=None, conn=None): with contextlib.closing(conn.cursor()) as cur: @@ -185,7 +188,7 @@ def write_others_table_transition( inputData, model_id=None, conn=None, **kwargs ) -> list: with contextlib.closing(conn.cursor()) as cur: - ordered_keys=[ "text","price","paid","archived","created_at","model_id" ] + ordered_keys = ["text", "price", "paid", "archived", "created_at", "model_id"] insertData = [tuple([data[key] for key in ordered_keys]) for data in inputData] cur.executemany(othersInsert, insertData) conn.commit() @@ -210,7 +213,7 @@ def write_products_table_transition( inputData, model_id=None, conn=None, **kwargs ) -> list: with contextlib.closing(conn.cursor()) as cur: - ordered_keys=[ "text","price","paid","archived","created_at","model_id" ] + ordered_keys = ["text", "price", "paid", "archived", "created_at", "model_id"] insertData = [tuple([data[key] for key in ordered_keys]) for data in inputData] cur.executemany(productsInsert, insertData) conn.commit() diff --git a/ofscraper/db/operations_/posts.py b/ofscraper/db/operations_/posts.py index 8c37b065a..a04971c1a 100644 --- a/ofscraper/db/operations_/posts.py +++ b/ofscraper/db/operations_/posts.py @@ -10,9 +10,11 @@ (_______)|/ \_______)(_______/|/ \__/|/ \||/ (_______/|/ \__/ """ + import contextlib import logging import sqlite3 + import arrow from rich.console import Console @@ -94,7 +96,16 @@ def write_post_table_transition( inputData: list, model_id=None, username=None, conn=None ): with contextlib.closing(conn.cursor()) as cur: - ordered_keys = ('post_id', 'text', 'price', 'paid', 'archived', "pinned",'created_at',"model_id") + ordered_keys = ( + "post_id", + "text", + "price", + "paid", + "archived", + "pinned", + "created_at", + "model_id", + ) insertData = [tuple([data[key] for key in ordered_keys]) for data in inputData] cur.executemany(postInsert, insertData) conn.commit() @@ -113,7 +124,7 @@ def update_posts_table(posts: list, model_id=None, username=None, conn=None): data.date, model_id, data.id, - model_id + model_id, ], posts, ) @@ -126,9 +137,11 @@ def update_posts_table(posts: list, model_id=None, username=None, conn=None): def get_timeline_postsinfo(model_id=None, username=None, conn=None, **kwargs) -> list: with contextlib.closing(conn.cursor()) as cur: cur.execute(timelinePostInfo, [model_id]) - data=[dict(row) for row in cur.fetchall()] - return [dict(ele,created_at=arrow.get(ele.get("created_at")).float_timestamp) for ele in data] - + data = [dict(row) for row in cur.fetchall()] + return [ + dict(ele, created_at=arrow.get(ele.get("created_at")).float_timestamp) + for ele in data + ] @wrapper.operation_wrapper_async @@ -149,8 +162,8 @@ def get_all_post_ids(model_id=None, username=None, conn=None) -> list: def get_all_posts_transition(model_id=None, username=None, conn=None) -> list: with contextlib.closing(conn.cursor()) as cur: cur.execute(postsALLTransition) - data=[dict(row) for row in cur.fetchall()] - return [dict(row,pinned=row.get("pinned")) for row in data] + data = [dict(row) for row in cur.fetchall()] + return [dict(row, pinned=row.get("pinned")) for row in data] @wrapper.operation_wrapper_async @@ -165,7 +178,9 @@ def add_column_post_ID(conn=None, **kwargs): with contextlib.closing(conn.cursor()) as cur: try: # Check if column exists (separate statement) - cur.execute("SELECT CASE WHEN EXISTS (SELECT 1 FROM PRAGMA_TABLE_INFO('posts') WHERE name = 'model_id') THEN 1 ELSE 0 END AS alter_required;") + cur.execute( + "SELECT CASE WHEN EXISTS (SELECT 1 FROM PRAGMA_TABLE_INFO('posts') WHERE name = 'model_id') THEN 1 ELSE 0 END AS alter_required;" + ) alter_required = cur.fetchone()[0] # Fetch the result (0 or 1) # Add column if necessary (conditional execution) @@ -183,7 +198,9 @@ def add_column_post_pinned(conn=None, **kwargs): with contextlib.closing(conn.cursor()) as cur: try: # Check if column exists (separate statement) - cur.execute("SELECT CASE WHEN EXISTS (SELECT 1 FROM PRAGMA_TABLE_INFO('posts') WHERE name = 'pinned') THEN 1 ELSE 0 END AS alter_required;") + cur.execute( + "SELECT CASE WHEN EXISTS (SELECT 1 FROM PRAGMA_TABLE_INFO('posts') WHERE name = 'pinned') THEN 1 ELSE 0 END AS alter_required;" + ) alter_required = cur.fetchone()[0] # Fetch the result (0 or 1) # Add column if necessary (conditional execution) @@ -191,7 +208,7 @@ def add_column_post_pinned(conn=None, **kwargs): cur.execute("ALTER TABLE posts ADD COLUMN pinned INTEGER;") # Commit changes conn.commit() - + except sqlite3.Error as e: conn.rollback() raise e # Rollback in case of errors @@ -202,8 +219,11 @@ def get_archived_postinfo(model_id=None, username=None, conn=None, **kwargs) -> with contextlib.closing(conn.cursor()) as cur: cur.execute(archivedPostInfo, [model_id]) conn.commit() - data=[dict(row) for row in cur.fetchall()] - return [dict(ele,created_at=arrow.get(ele.get("created_at")).float_timestamp) for ele in data] + data = [dict(row) for row in cur.fetchall()] + return [ + dict(ele, created_at=arrow.get(ele.get("created_at")).float_timestamp) + for ele in data + ] async def modify_unique_constriant_posts(model_id=None, username=None): @@ -229,6 +249,7 @@ async def get_last_archived_date(model_id=None, username=None): data = await media.get_archived_media(model_id=model_id, username=username) return sorted(data, key=lambda x: x["posted_at"] or 0)[-1].get("posted_at") or 0 + async def get_last_timeline_date(model_id=None, username=None): data = await media.get_timeline_media(model_id=model_id, username=username) - return sorted(data, key=lambda x: x["posted_at"] or 0)[-1].get("posted_at") or 0 \ No newline at end of file + return sorted(data, key=lambda x: x["posted_at"] or 0)[-1].get("posted_at") or 0 diff --git a/ofscraper/db/operations_/profile.py b/ofscraper/db/operations_/profile.py index dc1c16229..31fc30030 100644 --- a/ofscraper/db/operations_/profile.py +++ b/ofscraper/db/operations_/profile.py @@ -10,6 +10,7 @@ (_______)|/ \_______)(_______/|/ \__/|/ \||/ (_______/|/ \__/ """ + import contextlib import logging import pathlib @@ -23,7 +24,7 @@ console = Console() log = logging.getLogger("shared") -#user_id==modes.id cause of legacy +# user_id==modes.id cause of legacy profilesCreate = """ CREATE TABLE IF NOT EXISTS profiles ( id INTEGER NOT NULL, @@ -43,6 +44,7 @@ userNameList = """ SELECT username FROM profiles where user_id=(?) """ + profileTableCheck = """ SELECT name FROM sqlite_master WHERE type='table' AND name='profiles'; """ @@ -84,10 +86,8 @@ def get_profile_info(model_id=None, username=None, conn=None) -> list: return None with contextlib.closing(conn.cursor()) as cur: try: - cur.execute( - userNameList, ([model_id]) - ) - return (list(map(lambda x: x[0], cur.fetchall())) or [None] )[0] + cur.execute(userNameList, ([model_id])) + return (list(map(lambda x: x[0], cur.fetchall())) or [None])[0] except sqlite3.OperationalError: None except Exception as E: @@ -104,7 +104,7 @@ def create_profile_table(model_id=None, username=None, conn=None): @wrapper.operation_wrapper_async def write_profile_table(model_id=None, username=None, conn=None) -> list: with contextlib.closing(conn.cursor()) as cur: - insertData = [model_id, username,model_id,username] + insertData = [model_id, username, model_id, username] cur.execute(profileInsert, insertData) conn.commit() @@ -129,7 +129,6 @@ def check_profile_table_exists(model_id=None, username=None, conn=None): return False - @wrapper.operation_wrapper_async def get_all_profiles(model_id=None, username=None, conn=None) -> list: database_path = placeholder.databasePlaceholder().databasePathHelper( @@ -165,7 +164,7 @@ def create_models_table(model_id=None, username=None, conn=None): @wrapper.operation_wrapper_async def write_models_table(model_id=None, username=None, conn=None) -> list: with contextlib.closing(conn.cursor()) as cur: - cur.execute(modelInsert, [model_id,model_id]) + cur.execute(modelInsert, [model_id, model_id]) conn.commit() diff --git a/ofscraper/db/operations_/stories.py b/ofscraper/db/operations_/stories.py index 6ecebe29f..0ff106966 100644 --- a/ofscraper/db/operations_/stories.py +++ b/ofscraper/db/operations_/stories.py @@ -10,6 +10,7 @@ (_______)|/ \_______)(_______/|/ \__/|/ \||/ (_______/|/ \__/ """ + import contextlib import logging import sqlite3 @@ -61,6 +62,7 @@ SELECT post_id FROM stories """ + @wrapper.operation_wrapper_async def create_stories_table(model_id=None, username=None, conn=None): with contextlib.closing(conn.cursor()) as cur: @@ -95,7 +97,15 @@ def write_stories_table_transition( inputData: dict, model_id=None, username=None, conn=None ): with contextlib.closing(conn.cursor()) as cur: - ordered_keys=["post_id", "text","price","paid","archived", "created_at","model_id"] + ordered_keys = [ + "post_id", + "text", + "price", + "paid", + "archived", + "created_at", + "model_id", + ] insertData = [tuple([data[key] for key in ordered_keys]) for data in inputData] cur.executemany(storiesInsert, insertData) conn.commit() @@ -115,7 +125,7 @@ def update_stories_table(stories: dict, model_id=None, username=None, conn=None) data.date, model_id, data.id, - model_id + model_id, ), stories, ) @@ -144,7 +154,9 @@ def add_column_stories_ID(conn=None, **kwargs): with contextlib.closing(conn.cursor()) as cur: try: # Separate statements with conditional execution - cur.execute("SELECT CASE WHEN EXISTS (SELECT 1 FROM PRAGMA_TABLE_INFO('stories') WHERE name = 'model_id') THEN 1 ELSE 0 END AS alter_required;") + cur.execute( + "SELECT CASE WHEN EXISTS (SELECT 1 FROM PRAGMA_TABLE_INFO('stories') WHERE name = 'model_id') THEN 1 ELSE 0 END AS alter_required;" + ) alter_required = cur.fetchone()[0] if alter_required == 0: cur.execute("ALTER TABLE stories ADD COLUMN model_id INTEGER;") @@ -156,8 +168,6 @@ def add_column_stories_ID(conn=None, **kwargs): raise e - - @wrapper.operation_wrapper_async def drop_stories_table(model_id=None, username=None, conn=None) -> list: with contextlib.closing(conn.cursor()) as cur: diff --git a/ofscraper/db/operations_/wrapper.py b/ofscraper/db/operations_/wrapper.py index 6196e2160..8bb74e81c 100644 --- a/ofscraper/db/operations_/wrapper.py +++ b/ofscraper/db/operations_/wrapper.py @@ -10,6 +10,7 @@ (_______)|/ \_______)(_______/|/ \__/|/ \||/ (_______/|/ \__/ """ + import asyncio import logging import sqlite3 diff --git a/ofscraper/download/alt_download.py b/ofscraper/download/alt_download.py index c97b6c4e1..238c21aca 100644 --- a/ofscraper/download/alt_download.py +++ b/ofscraper/download/alt_download.py @@ -10,6 +10,7 @@ (_______)|/ \_______)(_______/|/ \__/|/ \||/ (_______/|/ \__/ """ + import asyncio import pathlib import re @@ -191,9 +192,11 @@ async def alt_download_downloader(item, c, ele, job_progress): try: _attempt = common.alt_attempt_get(item) _attempt.set(_attempt.get(0) + 1) - pathlib.Path(placeholderObj.tempfilepath).unlink( - missing_ok=True - ) if _attempt.get() > 1 else None + ( + pathlib.Path(placeholderObj.tempfilepath).unlink(missing_ok=True) + if _attempt.get() > 1 + else None + ) data = await asyncio.get_event_loop().run_in_executor( common_globals.cache_thread, partial(cache.get, f"{item['name']}_headers"), diff --git a/ofscraper/download/alt_downloadbatch.py b/ofscraper/download/alt_downloadbatch.py index 3fbea7eb4..c7598743e 100644 --- a/ofscraper/download/alt_downloadbatch.py +++ b/ofscraper/download/alt_downloadbatch.py @@ -188,9 +188,11 @@ async def alt_download_downloader( try: _attempt = common.alt_attempt_get(item) _attempt.set(_attempt.get(0) + 1) - pathlib.Path(placeholderObj.tempfilepath).unlink( - missing_ok=True - ) if _attempt.get() > 1 else None + ( + pathlib.Path(placeholderObj.tempfilepath).unlink(missing_ok=True) + if _attempt.get() > 1 + else None + ) data = await asyncio.get_event_loop().run_in_executor( common_globals.cache_thread, partial(cache.get, f"{item['name']}_headers"), diff --git a/ofscraper/download/common/common.py b/ofscraper/download/common/common.py index 9ee87b82e..7bb02a956 100644 --- a/ofscraper/download/common/common.py +++ b/ofscraper/download/common/common.py @@ -10,6 +10,7 @@ (_______)|/ \_______)(_______/|/ \__/|/ \||/ (_______/|/ \__/ """ + import asyncio import pathlib from functools import partial, singledispatch @@ -139,9 +140,7 @@ def get_unknown_content_type(ele): return ( "mp4" if ele.mediatype.lower() == "videos" - else "jpg" - if ele.mediatype.lower() == "images" - else None + else "jpg" if ele.mediatype.lower() == "images" else None ) diff --git a/ofscraper/download/common/metadata.py b/ofscraper/download/common/metadata.py index 04afc8478..6c91aea17 100644 --- a/ofscraper/download/common/metadata.py +++ b/ofscraper/download/common/metadata.py @@ -33,9 +33,11 @@ async def metadata(c, ele, username, model_id, placeholderObj=None): downloaded=await metadata_downloaded_helper(placeholderObj), ) return ( - ele.mediatype - if await metadata_downloaded_helper(placeholderObj) - else "forced_skipped", + ( + ele.mediatype + if await metadata_downloaded_helper(placeholderObj) + else "forced_skipped" + ), 0, ) elif download_data and download_data.get("content-type"): @@ -50,9 +52,11 @@ async def metadata(c, ele, username, model_id, placeholderObj=None): downloaded=await metadata_downloaded_helper(placeholderObj), ) return ( - ele.mediatype - if await metadata_downloaded_helper(placeholderObj) - else "forced_skipped", + ( + ele.mediatype + if await metadata_downloaded_helper(placeholderObj) + else "forced_skipped" + ), 0, ) elif _ == 1: @@ -87,7 +91,7 @@ async def metadata(c, ele, username, model_id, placeholderObj=None): async def metadata_downloaded_helper(placeholderObj): - placeholderObj=await placeholderObj.init() + placeholderObj = await placeholderObj.init() if read_args.retriveArgs().metadata == "none": return None diff --git a/ofscraper/download/download.py b/ofscraper/download/download.py index efdea1940..fba6b24d9 100644 --- a/ofscraper/download/download.py +++ b/ofscraper/download/download.py @@ -15,7 +15,6 @@ from ofscraper.utils.context.run_async import run - def medialist_filter(medialist, model_id, username): log = logging.getLogger("shared") if not read_args.retriveArgs().dupe: diff --git a/ofscraper/download/downloadnormal.py b/ofscraper/download/downloadnormal.py index d65c0db84..58c1f7027 100644 --- a/ofscraper/download/downloadnormal.py +++ b/ofscraper/download/downloadnormal.py @@ -10,6 +10,7 @@ (_______)|/ \_______)(_______/|/ \__/|/ \||/ (_______/|/ \__/ """ + import asyncio import logging import traceback diff --git a/ofscraper/download/main_download.py b/ofscraper/download/main_download.py index f5376a894..3732be0ef 100644 --- a/ofscraper/download/main_download.py +++ b/ofscraper/download/main_download.py @@ -10,6 +10,7 @@ (_______)|/ \_______)(_______/|/ \__/|/ \||/ (_______/|/ \__/ """ + import asyncio import pathlib import traceback @@ -142,9 +143,11 @@ async def main_download_downloader(c, ele, job_progress): try: data = await get_data(ele) common_globals.attempt.set(common_globals.attempt.get(0) + 1) - pathlib.Path(tempholderObj.tempfilepath).unlink( - missing_ok=True - ) if common_globals.attempt.get() > 1 else None + ( + pathlib.Path(tempholderObj.tempfilepath).unlink(missing_ok=True) + if common_globals.attempt.get() > 1 + else None + ) if data: return await main_data_handler( data, c, tempholderObj, ele, job_progress diff --git a/ofscraper/download/main_downloadbatch.py b/ofscraper/download/main_downloadbatch.py index 6b5db714c..b04eb763f 100644 --- a/ofscraper/download/main_downloadbatch.py +++ b/ofscraper/download/main_downloadbatch.py @@ -10,6 +10,7 @@ (_______)|/ \_______)(_______/|/ \__/|/ \||/ (_______/|/ \__/ """ + import asyncio import pathlib import traceback @@ -139,9 +140,11 @@ async def main_download_downloader(c, ele): with _: try: common_globals.attempt.set(common_globals.attempt.get(0) + 1) - pathlib.Path(tempholderObj.tempfilepath).unlink( - missing_ok=True - ) if common_globals.attempt.get() > 1 else None + ( + pathlib.Path(tempholderObj.tempfilepath).unlink(missing_ok=True) + if common_globals.attempt.get() > 1 + else None + ) data = await get_data(ele) if data: return await main_data_handler(data, c, ele, tempholderObj) diff --git a/ofscraper/prompts/prompt_groups/area.py b/ofscraper/prompts/prompt_groups/area.py index a8790ab24..e04026ee2 100644 --- a/ofscraper/prompts/prompt_groups/area.py +++ b/ofscraper/prompts/prompt_groups/area.py @@ -10,6 +10,7 @@ (_______)|/ \_______)(_______/|/ \__/|/ \||/ (_______/|/ \__/ """ + from InquirerPy.base import Choice from prompt_toolkit.shortcuts import prompt as prompt @@ -26,9 +27,11 @@ def areas_prompt() -> list: message = ( "Which area(s) would you do you want to download and like" if "like" in args.action and len(args.like_area) == 0 - else "Which area(s) would you want to download and unlike" - if "unike" in args.action and len(args.like_area) == 0 - else "Which area(s) would you like to download" + else ( + "Which area(s) would you want to download and unlike" + if "unike" in args.action and len(args.like_area) == 0 + else "Which area(s) would you like to download" + ) ) more_instruction = ( """Hint: Since you have Like or Unlike set diff --git a/ofscraper/prompts/prompt_groups/auth.py b/ofscraper/prompts/prompt_groups/auth.py index 05364e23a..03aaebb7b 100644 --- a/ofscraper/prompts/prompt_groups/auth.py +++ b/ofscraper/prompts/prompt_groups/auth.py @@ -10,6 +10,7 @@ (_______)|/ \_______)(_______/|/ \__/|/ \||/ (_______/|/ \__/ """ + import sys from InquirerPy.base import Choice diff --git a/ofscraper/prompts/prompt_groups/binary.py b/ofscraper/prompts/prompt_groups/binary.py index 14245198f..fb87edfbd 100644 --- a/ofscraper/prompts/prompt_groups/binary.py +++ b/ofscraper/prompts/prompt_groups/binary.py @@ -10,6 +10,7 @@ (_______)|/ \_______)(_______/|/ \__/|/ \||/ (_______/|/ \__/ """ + import copy import json import logging diff --git a/ofscraper/prompts/prompt_groups/config.py b/ofscraper/prompts/prompt_groups/config.py index eac0632ca..5f75ede02 100644 --- a/ofscraper/prompts/prompt_groups/config.py +++ b/ofscraper/prompts/prompt_groups/config.py @@ -10,6 +10,7 @@ (_______)|/ \_______)(_______/|/ \__/|/ \||/ (_______/|/ \__/ """ + import json import logging import os @@ -487,9 +488,11 @@ def advanced_config() -> dict: "name": "custom", "message": "edit custom value:\n", "option_instruction": "This is a helper value for remapping placeholder values", - "default": json.dumps(custom.get_custom()) - if not isinstance(custom.get_custom(), str) - else custom.get_custom() or "", + "default": ( + json.dumps(custom.get_custom()) + if not isinstance(custom.get_custom(), str) + else custom.get_custom() or "" + ), }, { "type": "list", diff --git a/ofscraper/prompts/prompt_groups/menu.py b/ofscraper/prompts/prompt_groups/menu.py index 47e09d2b7..b01925a10 100644 --- a/ofscraper/prompts/prompt_groups/menu.py +++ b/ofscraper/prompts/prompt_groups/menu.py @@ -10,6 +10,7 @@ (_______)|/ \_______)(_______/|/ \__/|/ \||/ (_______/|/ \__/ """ + from InquirerPy.separator import Separator from prompt_toolkit.shortcuts import prompt as prompt diff --git a/ofscraper/prompts/prompt_groups/model.py b/ofscraper/prompts/prompt_groups/model.py index 7a1e5c630..e5c45c7c8 100644 --- a/ofscraper/prompts/prompt_groups/model.py +++ b/ofscraper/prompts/prompt_groups/model.py @@ -10,6 +10,7 @@ (_______)|/ \_______)(_______/|/ \__/|/ \||/ (_______/|/ \__/ """ + import inspect import arrow @@ -82,11 +83,11 @@ def modify_subtype_prompt(args): { "type": "list", "name": "renewal", - "default": True - if read_args.retriveArgs().renewal - else False - if read_args.retriveArgs().renewal == False - else None, + "default": ( + True + if read_args.retriveArgs().renewal + else False if read_args.retriveArgs().renewal == False else None + ), "message": "Filter account by whether it has a renewal date", "choices": [ Choice(True, "Renewal On"), @@ -97,11 +98,11 @@ def modify_subtype_prompt(args): { "type": "list", "name": "expire", - "default": True - if read_args.retriveArgs().sub_status - else False - if read_args.retriveArgs().sub_status == False - else None, + "default": ( + True + if read_args.retriveArgs().sub_status + else False if read_args.retriveArgs().sub_status == False else None + ), "message": "Filter accounts based on access to content via a subscription", "choices": [ Choice(True, "Active Only"), @@ -140,11 +141,13 @@ def modify_active_prompt(args): """, "validate": prompt_validators.datevalidator(), "filter": lambda x: arrow.get(x or 0), - "default": arrow.get(read_args.retriveArgs().last_seen_after).format( - constants.getattr("PROMPT_DATE_FORMAT") - ) - if read_args.retriveArgs().last_seen_after - else "", + "default": ( + arrow.get(read_args.retriveArgs().last_seen_after).format( + constants.getattr("PROMPT_DATE_FORMAT") + ) + if read_args.retriveArgs().last_seen_after + else "" + ), }, { "type": "input", @@ -154,11 +157,13 @@ def modify_active_prompt(args): Otherwise must be in date format""", "validate": prompt_validators.datevalidator(), "filter": lambda x: arrow.get(x or 0), - "default": arrow.get(read_args.retriveArgs().last_seen_before).format( - constants.getattr("PROMPT_DATE_FORMAT") - ) - if read_args.retriveArgs().last_seen_before - else "", + "default": ( + arrow.get(read_args.retriveArgs().last_seen_before).format( + constants.getattr("PROMPT_DATE_FORMAT") + ) + if read_args.retriveArgs().last_seen_before + else "" + ), }, ], more_instructions=""" @@ -200,11 +205,11 @@ def modify_promo_prompt(args): { "type": "list", "name": "free-trial", - "default": True - if read_args.retriveArgs().free_trial == True - else False - if read_args.retriveArgs().free_trial == False - else None, + "default": ( + True + if read_args.retriveArgs().free_trial == True + else False if read_args.retriveArgs().free_trial == False else None + ), "message": "Filter Accounts By whether the account is a free trial", "choices": [ Choice(True, "Free Trial only"), @@ -227,11 +232,11 @@ def modify_promo_prompt(args): "type": "list", "name": "promo", "message": "Which kind of promo(s) do you want to enable", - "default": True - if read_args.retriveArgs().promo - else False - if read_args.retriveArgs().promo == False - else None, + "default": ( + True + if read_args.retriveArgs().promo + else False if read_args.retriveArgs().promo == False else None + ), "choices": [ Choice({"all_promo": True, "promo": True}, "Any Promo"), Choice( @@ -268,11 +273,15 @@ def modify_promo_prompt(args): "type": "list", "name": promo_type, "message": f"Filter accounts presence of {'Any Promotions' if promo_type=='all_promo' else 'Claimable Promotions'}", - "default": True - if vars(read_args.retriveArgs())[promo_type] - else False - if vars(read_args.retriveArgs())[promo_type] == False - else None, + "default": ( + True + if vars(read_args.retriveArgs())[promo_type] + else ( + False + if vars(read_args.retriveArgs())[promo_type] == False + else None + ) + ), "choices": [ Choice(True, "Promotions Only"), Choice(False, "No Promotions"), diff --git a/ofscraper/prompts/prompt_groups/profile.py b/ofscraper/prompts/prompt_groups/profile.py index 7ec3253c7..c363821c6 100644 --- a/ofscraper/prompts/prompt_groups/profile.py +++ b/ofscraper/prompts/prompt_groups/profile.py @@ -10,6 +10,7 @@ (_______)|/ \_______)(_______/|/ \__/|/ \||/ (_______/|/ \__/ """ + from InquirerPy.separator import Separator from InquirerPy.validator import EmptyInputValidator from prompt_toolkit.shortcuts import prompt as prompt diff --git a/ofscraper/utils/actions.py b/ofscraper/utils/actions.py index 04e23d82d..d2f089dd9 100644 --- a/ofscraper/utils/actions.py +++ b/ofscraper/utils/actions.py @@ -10,6 +10,7 @@ (_______)|/ \_______)(_______/|/ \__/|/ \||/ (_______/|/ \__/ """ + import ofscraper.prompts.prompts as prompts import ofscraper.utils.args.areas as areas import ofscraper.utils.args.read as read_args diff --git a/ofscraper/utils/auth/helpers.py b/ofscraper/utils/auth/helpers.py index 466002e0a..e3500176b 100644 --- a/ofscraper/utils/auth/helpers.py +++ b/ofscraper/utils/auth/helpers.py @@ -10,6 +10,7 @@ (_______)|/ \_______)(_______/|/ \__/|/ \||/ (_______/|/ \__/ """ + import json import logging diff --git a/ofscraper/utils/config/config.py b/ofscraper/utils/config/config.py index e6fcbc31b..f873398dc 100644 --- a/ofscraper/utils/config/config.py +++ b/ofscraper/utils/config/config.py @@ -10,6 +10,7 @@ (_______)|/ \_______)(_______/|/ \__/|/ \||/ (_______/|/ \__/ """ + import logging from humanfriendly import parse_size diff --git a/ofscraper/utils/config/schema.py b/ofscraper/utils/config/schema.py index feb08560c..6954504b1 100644 --- a/ofscraper/utils/config/schema.py +++ b/ofscraper/utils/config/schema.py @@ -8,9 +8,9 @@ def get_current_config_schema(config: dict = None) -> dict: if isinstance(config, dict) and config.get("config"): config = config["config"] new_config = { - "main_profile" - if config == False - else constants.getattr("mainProfile"): data.get_main_profile(config=config), + ( + "main_profile" if config == False else constants.getattr("mainProfile") + ): data.get_main_profile(config=config), "metadata": data.get_metadata(config=config), "discord": data.get_discord(config=config), "file_options": { diff --git a/ofscraper/utils/config/wrapper.py b/ofscraper/utils/config/wrapper.py index 00e4e5ca1..99408a8d7 100644 --- a/ofscraper/utils/config/wrapper.py +++ b/ofscraper/utils/config/wrapper.py @@ -9,9 +9,7 @@ def inner(**kwargs): configT = ( False if config == False - else config - if config != None - else config_file.open_config() + else config if config != None else config_file.open_config() ) return func(config=configT, **kwargs) diff --git a/ofscraper/utils/settings.py b/ofscraper/utils/settings.py index f16c20a60..75abd10a4 100644 --- a/ofscraper/utils/settings.py +++ b/ofscraper/utils/settings.py @@ -139,5 +139,8 @@ def get_ffmpeg(): or "" ) + def get_after_enabled(): - return (read_args.retriveArgs().after is not None or not config_data.get_disable_after()) + return ( + read_args.retriveArgs().after is not None or not config_data.get_disable_after() + )