Skip to content

Commit

Permalink
basic unique check
Browse files Browse the repository at this point in the history
  • Loading branch information
datawhores committed Nov 28, 2023
1 parent a04f3db commit 3d84ea1
Show file tree
Hide file tree
Showing 4 changed files with 35 additions and 9 deletions.
22 changes: 19 additions & 3 deletions ofscraper/classes/placeholder.py
Original file line number Diff line number Diff line change
Expand Up @@ -182,9 +182,25 @@ def createfilename(self,ele,username,model_id,ext):
return out





def check_uniquename(self):
format=config_.get_fileformat(config_.read_config())
if re.search("text",format):
return True
elif re.search("filename",format):
return True
elif re.search("post_id",format):
return True
elif re.search("postid",format):
return True
elif re.search("media_id",format):
return True
elif re.search("mediaid",format):
return True
elif re.search("custom",format):
return True
return False





Expand Down
16 changes: 13 additions & 3 deletions ofscraper/commands/scraper.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,10 +8,8 @@
\/ \/ \/ \/
"""

import asyncio
import time
import os
import sys
import platform
import time
import traceback
Expand Down Expand Up @@ -41,7 +39,8 @@
import ofscraper.utils.of as OF
import ofscraper.utils.exit as exit
import ofscraper.utils.misc as misc

import ofscraper.classes.placeholder as placeholder
import ofscraper.constants as constants
log=logging.getLogger("shared")

@exit.exit_wrapper
Expand Down Expand Up @@ -130,6 +129,12 @@ def process_post():
@exit.exit_wrapper
def process_post_user_first():
with scrape_context_manager():
if not placeholder.Placeholders().check_uniquename():
log.error("[red]Warning: Your generated filenames may not be unique\n \
https://of-scraper.gitbook.io/of-scraper/config-options/customizing-save-path#warning[/red] \
")
time.sleep(constants.LOG_DISPLAY_TIMEOUT*3)

profiles.print_current_profile()
init.print_sign_status()
if args_.getargs().users_first:
Expand Down Expand Up @@ -169,6 +174,11 @@ def process_post_user_first():
@exit.exit_wrapper
def normal_post_process():
with scrape_context_manager():
if not placeholder.Placeholders().check_uniquename():
log.error("[red]Warning: Your generated filenames may not be unique\n \
https://of-scraper.gitbook.io/of-scraper/config-options/customizing-save-path#warning[/red] \
")
time.sleep(constants.LOG_DISPLAY_TIMEOUT*3)
profiles.print_current_profile()
init.print_sign_status()
userdata=userselector.getselected_usernames(rescan=False)
Expand Down
2 changes: 1 addition & 1 deletion ofscraper/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -189,4 +189,4 @@
CDRM='https://cdrm-project.com/wv'

LOGGER_TIMEOUT=180
USER_LOOP_TIME=.8
LOG_DISPLAY_TIMEOUT=.8
4 changes: 2 additions & 2 deletions ofscraper/utils/userselector.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ def all_subs_helper():
elif len(ALL_SUBS)==0:
print("No accounts found during scan")
#give log time to process
time.sleep(constants.USER_LOOP_TIME)
time.sleep(constants.LOG_DISPLAY_TIMEOUT)
if not prompts.retry_user_scan():
raise Exception("Could not find any accounts on list")

Expand Down Expand Up @@ -121,7 +121,7 @@ def filterNSort(usernames):
filterusername=list(filter(lambda x:x["name"] not in args_.getargs().excluded_username ,filterusername))
log.debug(f"final username count with all filters: {len(filterusername)}")
#give log time to process
time.sleep(constants.USER_LOOP_TIME)
time.sleep(constants.LOG_DISPLAY_TIMEOUT)
if len(filterusername)!=0:
return sort_models_helper(filterusername)
print(
Expand Down

0 comments on commit 3d84ea1

Please sign in to comment.