Skip to content

Commit

Permalink
fix manual
Browse files Browse the repository at this point in the history
  • Loading branch information
datawhores committed Feb 17, 2024
1 parent 454852b commit 57a45ae
Show file tree
Hide file tree
Showing 3 changed files with 38 additions and 58 deletions.
2 changes: 1 addition & 1 deletion ofscraper/api/timeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -338,7 +338,7 @@ def set_check(unduped, model_id, after):


def get_individual_post(id, c=None):
with c.requests(constants.getattr("INDVIDUAL_TIMELINE").format(id))() as r:
with c.requests(constants.getattr("INDIVIDUAL_TIMELINE").format(id))() as r:
if r.ok:
log.trace(f"post raw individual {r.json()}")
return r.json()
Expand Down
92 changes: 36 additions & 56 deletions ofscraper/commands/manual.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,16 +21,12 @@
def manual_download(urls=None):
log = logging.getLogger("shared")
network.check_cdm()
allow_manual_dupes()
media_dict = get_media_from_urls(urls)
log.debug(f"Number of values from media dict {len(list(media_dict.values()))}")
args = read_args.retriveArgs()
args.dupe = True
write_args.setArgs(args)
get_manual_usernames(media_dict)
selector.all_subs_helper()
for value in media_dict.values():
if len(value) == 0:
continue
for value in filter(lambda x: len(x) > 0, media_dict.values()):
model_id = value[0].post.model_id
username = value[0].post.username
log.info(f"Downloading individual media for {username}")
Expand All @@ -41,6 +37,12 @@ def manual_download(urls=None):
log.info(f"Finished")


def allow_manual_dupes():
args = read_args.retriveArgs()
args.dupe = True
write_args.setArgs(args)


def get_manual_usernames(media_dict):
usernames = []
for value in media_dict.values():
Expand All @@ -53,11 +55,8 @@ def get_manual_usernames(media_dict):


def get_media_from_urls(urls):
args = read_args.retriveArgs()
args.dupe = True
write_args.setArgs(args)
user_name_dict = {}
id_dict = {}
media_dict = {}
with sessionbuilder.sessionBuilder(backend="httpx") as c:
for url in url_helper(urls):
response = get_info(url)
Expand All @@ -66,68 +65,49 @@ def get_media_from_urls(urls):
type = response[2]
if type == "post":
model_id = user_name_dict.get(model) or profile.get_id(model)
user_name_dict[model] = model_id
id_dict[model_id] = id_dict.get(model_id, []) + [
timeline.get_individual_post(postid, c=c)
]
value = timeline.get_individual_post(postid, c=c)
media_dict.update(get_all_media(model_id, value))
elif type == "msg":
model_id = model
data = messages_.get_individual_post(model_id, postid, c=c)
if (data or {}).get("id") != postid:
data = paid.get_individual_post(model, model_id, postid)
id_dict[model_id] = id_dict.get(model_id, []) + [data]
value = messages_.get_individual_post(model_id, postid, c=c)
media_dict.update(get_all_media(model_id, value))
elif type == "msg2":
model_id = user_name_dict.get(model) or profile.get_id(model)
data = messages_.get_individual_post(model_id, postid, c=c) or {}
if (data).get("id") != int(postid):
data = paid.get_individual_post(model, model_id, postid) or {}
id_dict[model_id] = id_dict.get(model_id, []) + [data]
value = messages_.get_individual_post(model_id, postid, c=c)
media_dict.update(get_all_media(model_id, value))
elif type == "unknown":
data = unknown_type_helper(postid, c) or {}
model_id = data.get("author", {}).get("id")
id_dict[model_id] = id_dict.get(model_id, []) + [data]
value = unknown_type_helper(postid, c) or {}
model_id = value.get("author", {}).get("id")
media_dict.update(get_all_media(model_id, value))
elif type == "highlights":
data = highlights_.get_individual_highlights(postid, c) or {}
model_id = data.get("userId")
id_dict[model_id] = id_dict.get(model_id, []) + [data]
value = highlights_.get_individual_highlights(postid, c) or {}
model_id = value.get("userId")
media_dict.update(get_all_media(model_id, value, "highlights"))
# special case
return get_all_media(id_dict, "highlights")
elif type == "stories":
data = highlights_.get_individual_stories(postid, c) or {}
model_id = data.get("userId")
id_dict[model_id] = id_dict.get(model_id, []) + [data]
value = highlights_.get_individual_stories(postid, c) or {}
model_id = value.get("userId")
media_dict.update(get_all_media(model_id, value, "stories"))
# special case
return get_all_media(id_dict, "stories")

else:
continue

return get_all_media(id_dict)
return media_dict


def unknown_type_helper(postid, client):
# try to get post by id
return timeline.get_individual_post(postid, client)


def get_all_media(id_dict, inputtype=None):
def get_all_media(model_id, value, inputtype=None):
media_dict = {}

for model_id, value in id_dict.items():
if model_id == None:
continue
temp = []
user_name = profile.scrape_profile(model_id)["username"]
posts_array = list(
map(
lambda x: posts_.Post(x, model_id, user_name, responsetype=inputtype),
value,
)
)
[temp.extend(ele.media) for ele in posts_array]
if len(temp) == 0:
temp.extend(paid_failback(model_id, user_name))
media_dict[model_id] = temp
value = value or {}
media = []
if model_id == None:
return {}
user_name = profile.scrape_profile(model_id)["username"]
post_item = posts_.Post(value, model_id, user_name, responsetype=inputtype)
media = post_item.media
if len(media) == 0:
media.extend(paid_failback(model_id, user_name))
media_dict[model_id] = media
return media_dict


Expand Down
2 changes: 1 addition & 1 deletion ofscraper/const/url.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
LICENCE_URL = "https://onlyfans.com/api2/v2/users/media/{}/drm/{}/{}?type=widevine"


INDVIDUAL_TIMELINE = "https://onlyfans.com/api2/v2/posts/{}?skip_users=all"
INDIVIDUAL_TIMELINE = "https://onlyfans.com/api2/v2/posts/{}?skip_users=all"
meEP = "https://onlyfans.com/api2/v2/users/me"

subscriptionsEP = "https://onlyfans.com/api2/v2/subscriptions/subscribes?offset={}&limit=10&type=all&format=infinite"
Expand Down

0 comments on commit 57a45ae

Please sign in to comment.