Skip to content

Commit 0b48cca

Browse files
authored
Merge pull request #4 from karl0ss/retry_url
Retry url
2 parents 18092af + 7bfc183 commit 0b48cca

File tree

4 files changed

+50
-13
lines changed

4 files changed

+50
-13
lines changed

GoGoDownloader.py

+11-2
Original file line numberDiff line numberDiff line change
@@ -146,11 +146,20 @@ def gogodownloader(config):
146146

147147
dl_links = []
148148
episode_links = gogo.get_links(source)
149-
149+
print(f"{OK}Scraping Links")
150150
for link in episode_links:
151151
dl_links.append(gogo.get_download_link(link))
152152

153-
gogo.file_downloader(dl_links)
153+
result = gogo.file_downloader(dl_links)
154+
if len(result.errors) > 0:
155+
while len(result.errors) > 0:
156+
print(f"{ERR}{len(result.errors)} links failed retrying.")
157+
episode_links = gogo.get_links(source)
158+
print(f"{OK}Re-Scraping Links")
159+
dl_links.clear()
160+
for link in episode_links:
161+
dl_links.append(gogo.get_download_link(link))
162+
result = gogo.file_downloader(dl_links, overwrite_downloads=0)
154163

155164
use_again = input(f"{IN}Do you want to use the app again? (y|n) > ").lower()
156165
if use_again == "y":

backend.py

+38-11
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
import re
12
import requests
23
import json
34
import os
@@ -120,17 +121,42 @@ def get_download_link(self, url):
120121
url,
121122
cookies=dict(auth=gogoanime.get_gogoanime_auth_cookie(self)),
122123
)
123-
124+
quality_arr = ["1080", "720", "640", "480"]
124125
soup = BeautifulSoup(page.content, "html.parser")
125-
126-
for link in soup.find_all("a", href=True):
127-
if self.episode_quality in link.text:
126+
try:
127+
for link in soup.find_all(
128+
"a", href=True, string=re.compile(self.episode_quality)
129+
):
128130
return link["href"]
129-
130-
def file_downloader(self, file_list: dict):
131+
else:
132+
ep_num = url.rsplit("-", 1)[1]
133+
print(
134+
f"{self.episode_quality} not found for ep{ep_num} checking for next best"
135+
)
136+
for q in quality_arr:
137+
for link in soup.find_all("a", href=True, string=re.compile(q)):
138+
print(f"{q} found.")
139+
return link["href"]
140+
except:
141+
print("No matching download found")
142+
143+
def file_downloader(self, file_list: dict, overwrite_downloads: bool = None):
144+
"""[summary]
145+
146+
Args:
147+
file_list (dict): [description]
148+
overwrite_downloads (bool, optional): [description]. Defaults to None.
149+
150+
Returns:
151+
[type]: [description]
152+
"""
153+
if overwrite_downloads is None:
154+
overwrite = self.config["OverwriteDownloads"]
155+
else:
156+
overwrite = overwrite_downloads
131157
dl = Downloader(
132158
max_conn=max_concurrent_downloads(self.config["MaxConcurrentDownloads"]),
133-
overwrite=self.config["OverwriteDownloads"],
159+
overwrite=overwrite,
134160
headers=dict(
135161
[
136162
(
@@ -147,10 +173,11 @@ def file_downloader(self, file_list: dict):
147173
)
148174

149175
for link in file_list:
150-
dl.enqueue_file(
151-
link,
152-
path=f"./{self.title}",
153-
)
176+
if link is not None:
177+
dl.enqueue_file(
178+
link,
179+
path=f"./{self.title}",
180+
)
154181

155182
files = dl.download()
156183
return files

img/gogo_icon.ico

31.2 KB
Binary file not shown.

version.txt

+1
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
3.0.0

0 commit comments

Comments
 (0)