Skip to content

Commit

Permalink
merge
Browse files Browse the repository at this point in the history
  • Loading branch information
datawhores committed Mar 23, 2024
1 parent b26a073 commit 567d5f2
Show file tree
Hide file tree
Showing 4 changed files with 0 additions and 92 deletions.
9 changes: 0 additions & 9 deletions ofscraper/classes/placeholder.py
Original file line number Diff line number Diff line change
Expand Up @@ -186,18 +186,9 @@ def __init__(self, ele, ext) -> None:
self._ext = ext

async def init(self):
<<<<<<< HEAD
dir = await self.getmediadir()
file = await self.createfilename()
self._filepath = paths.truncate(pathlib.Path(dir, file))
=======
self._filepath = paths.truncate(
pathlib.Path(
await self.getmediadir(),
await self.createfilename(),
)
)
>>>>>>> 4ea84272b579254367eb3be4278df9dc58c2be37

def add_price_variables(self, username):
modelObj = selector.get_model_fromParsed(username)
Expand Down
21 changes: 0 additions & 21 deletions ofscraper/commands/check.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,6 @@ def process_download_cart():
list(media_dict.values())[0],
)
)
<<<<<<< HEAD
if settings.get_mediatypes() == ["Text"]:
textDownloader(post_dict.values(), username=username)
elif len(medialist) > 0 and len(settings.get_mediatypes()) > 1:
Expand All @@ -128,26 +127,6 @@ def process_download_cart():
else:
raise Exception("Issue getting download")

=======
media = medialist[0]
model_id = media.post.model_id
username = media.post.username
args = read_args.retriveArgs()
args.username = set([username])
write_args.setArgs(args)
selector.all_subs_helper()
log.info(
f"Downloading individual media ({media.filename}) to disk for {username}"
)
operations.table_init_create(model_id=model_id, username=username)
values = downloadnormal.process_dicts(
username,
model_id,
[media],
)
if values == None or values[-1] == 1:
raise Exception("Download is marked as skipped")
>>>>>>> 4ea84272b579254367eb3be4278df9dc58c2be37
log.info("Download Finished")
app.update_cell(key, "Download_Cart", "[downloaded]")
app.update_cell(key, "Downloaded", True)
Expand Down
54 changes: 0 additions & 54 deletions ofscraper/commands/manual.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,6 @@ def get_manual_usernames(media_dict):
def get_media_from_urls(urls):
user_name_dict = {}
media_dict = {}
<<<<<<< HEAD
post_dict = {}
for url in url_helper(urls):
response = get_info(url)
Expand Down Expand Up @@ -99,48 +98,12 @@ def get_media_from_urls(urls):
post_dict.update(get_post_item(model_id, value, "stories"))
# special case
return media_dict, post_dict
=======
with sessionbuilder.sessionBuilder(backend="httpx") as c:
for url in url_helper(urls):
response = get_info(url)
model = response[0]
postid = response[1]
type = response[2]
if type == "post":
model_id = user_name_dict.get(model) or profile.get_id(model)
value = timeline.get_individual_post(postid, c=c)
media_dict.update(get_all_media(postid, model_id, value))
elif type == "msg":
model_id = model
value = messages_.get_individual_post(model_id, postid, c=c)
media_dict.update(get_all_media(postid, model_id, value))
elif type == "msg2":
model_id = user_name_dict.get(model) or profile.get_id(model)
value = messages_.get_individual_post(model_id, postid, c=c)
media_dict.update(get_all_media(postid, model_id, value))
elif type == "unknown":
value = unknown_type_helper(postid, c) or {}
model_id = value.get("author", {}).get("id")
media_dict.update(get_all_media(postid, model_id, value))
elif type == "highlights":
value = highlights_.get_individual_highlights(postid, c) or {}
model_id = value.get("userId")
media_dict.update(get_all_media(postid, model_id, value, "highlights"))
# special case
elif type == "stories":
value = highlights_.get_individual_stories(postid, c) or {}
model_id = value.get("userId")
media_dict.update(get_all_media(postid, model_id, value, "stories"))
# special case
return media_dict
>>>>>>> 4ea84272b579254367eb3be4278df9dc58c2be37


def unknown_type_helper(postid):
return timeline.get_individual_post(postid)


<<<<<<< HEAD
def get_post_item(model_id, value, inputtype=None):
if value == None:
return []
Expand All @@ -149,8 +112,6 @@ def get_post_item(model_id, value, inputtype=None):
return {post.id: post}


=======
>>>>>>> 4ea84272b579254367eb3be4278df9dc58c2be37
def get_all_media(posts_id, model_id, value, inputtype=None):
media_dict = {}
value = value or {}
Expand All @@ -173,7 +134,6 @@ def get_all_media(posts_id, model_id, value, inputtype=None):
return media_dict


<<<<<<< HEAD
@run
async def paid_failback(post_id, model_id, username):
logging.getLogger("shared").debug(
Expand All @@ -194,20 +154,6 @@ async def paid_failback(post_id, model_id, username):
output,
)
)
=======
def paid_failback(post_id, id, username):
logging.getLogger("shared").debug(
"Using failback search because query return 0 media"
)
data = of.process_paid_post(id, username)
return list(
filter(
lambda x: isinstance(x, media_.Media)
and (str(x.id) == post_id or str(x.postid) == post_id),
data,
)
)
>>>>>>> 4ea84272b579254367eb3be4278df9dc58c2be37


def get_info(url):
Expand Down
8 changes: 0 additions & 8 deletions ofscraper/db/operations.py
Original file line number Diff line number Diff line change
Expand Up @@ -382,19 +382,11 @@ def media_insert_helper(media, filename, downloaded=None, hash=None, prevData=No
size = None
if filename and pathlib.Path(filename).exists():
directory = str(pathlib.Path(filename).parent)
<<<<<<< HEAD
filename_path = str(pathlib.Path(filename).name)
size = math.ceil(pathlib.Path(filename).stat().st_size)
elif filename:
directory = str(pathlib.Path(filename).parent)
filename_path = str(pathlib.Path(filename).name)
=======
filename_path = str(pathlib.Path(filename.name))
size = math.ceil(pathlib.Path(filename).stat().st_size)
elif filename:
directory = str(pathlib.Path(filename).parent)
filename_path = str(pathlib.Path(filename.name))
>>>>>>> 4ea84272b579254367eb3be4278df9dc58c2be37
elif prevData:
directory = prevData[3]
filename_path = prevData[4]
Expand Down

0 comments on commit 567d5f2

Please sign in to comment.