Skip to content

Commit

Permalink
fix issue with check and manual mode cause by changes to api
Browse files Browse the repository at this point in the history
  • Loading branch information
datawhores committed Mar 12, 2024
1 parent acebd71 commit bc03f38
Show file tree
Hide file tree
Showing 6 changed files with 279 additions and 242 deletions.
22 changes: 11 additions & 11 deletions ofscraper/api/highlights.py
Original file line number Diff line number Diff line change
Expand Up @@ -495,8 +495,8 @@ def get_highlightList(data):
return []


def get_individual_highlights(id, c=None):
return get_individual_stories(id, c)
def get_individual_highlights(id):
return get_individual_stories(id)
# with c.requests(constants.getattr("highlightSPECIFIC").format(id))() as r:
# if r.ok:
# log.trace(f"highlight raw highlight individua; {r.json()}")
Expand All @@ -508,12 +508,12 @@ def get_individual_highlights(id, c=None):


def get_individual_stories(id, c=None):
# with c or sessionbuilder.sessionBuilder(backend="httpx") as c:
with c.requests(constants.getattr("storiesSPECIFIC").format(id))() as r:
if r.ok:
log.trace(f"highlight raw highlight individua; {r.json_()}")
return r.json()
else:
log.debug(f"[bold]highlight response status code:[/bold]{r.status}")
log.debug(f"[bold]highlightresponse:[/bold] {r.text_()}")
log.debug(f"[bold]highlight headers:[/bold] {r.headers}")
with sessionbuilder.sessionBuilder(backend="httpx") as c:
with c.requests(constants.getattr("storiesSPECIFIC").format(id))() as r:
if r.ok:
log.trace(f"highlight raw highlight individua; {r.json_()}")
return r.json()
else:
log.debug(f"[bold]highlight response status code:[/bold]{r.status}")
log.debug(f"[bold]highlightresponse:[/bold] {r.text_()}")
log.debug(f"[bold]highlight headers:[/bold] {r.headers}")
4 changes: 2 additions & 2 deletions ofscraper/api/messages.py
Original file line number Diff line number Diff line change
Expand Up @@ -630,8 +630,8 @@ async def scrape_messages(
return messages, new_tasks


def get_individual_post(model_id, postid, c=None):
with c or sessionbuilder.sessionBuilder(
def get_individual_post(model_id, postid):
with sessionbuilder.sessionBuilder(
backend="httpx",
) as c:
with c.requests(
Expand Down
1 change: 1 addition & 0 deletions ofscraper/api/paid.py
Original file line number Diff line number Diff line change
Expand Up @@ -144,6 +144,7 @@ def set_check(unduped, model_id):
cache.close()


@run
async def scrape_paid(c, username, job_progress=None, offset=0):
"""Takes headers to access onlyfans as an argument and then checks the purchased content
url to look for any purchased content. If it finds some it will return it as a list.
Expand Down
22 changes: 12 additions & 10 deletions ofscraper/api/timeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -455,16 +455,18 @@ def set_check(unduped, model_id, after):
cache.close()


def get_individual_post(id, c=None):
# c=c or sessionbuilder.sessionBuilder(backend="httpx")
with c.requests(constants.getattr("INDIVIDUAL_TIMELINE").format(id))() as r:
if r.ok:
log.trace(f"post raw individual {r.json()}")
return r.json()
else:
log.debug(f"[bold]individual post response status code:[/bold]{r.status}")
log.debug(f"[bold]individual post response:[/bold] {r.text_()}")
log.debug(f"[bold]individual post headers:[/bold] {r.headers}")
def get_individual_post(id):
with sessionbuilder.sessionBuilder(backend="httpx") as c:
with c.requests(constants.getattr("INDIVIDUAL_TIMELINE").format(id))() as r:
if r.ok:
log.trace(f"post raw individual {r.json()}")
return r.json()
else:
log.debug(
f"[bold]individual post response status code:[/bold]{r.status}"
)
log.debug(f"[bold]individual post response:[/bold] {r.text_()}")
log.debug(f"[bold]individual post headers:[/bold] {r.headers}")


def get_after(model_id, username, forced_after=None):
Expand Down
Loading

0 comments on commit bc03f38

Please sign in to comment.