mirror of
https://github.com/mikf/gallery-dl.git
synced 2024-11-26 04:32:51 +01:00
embed error messages in StopExtraction exceptions
This commit is contained in:
parent
d5e3910270
commit
4409d00141
@ -100,7 +100,7 @@ class NoExtractorError(GalleryDLException):
|
|||||||
class StopExtraction(GalleryDLException):
|
class StopExtraction(GalleryDLException):
|
||||||
"""Stop data extraction"""
|
"""Stop data extraction"""
|
||||||
|
|
||||||
def __init__(self, message=None):
|
def __init__(self, message=None, *args):
|
||||||
GalleryDLException.__init__(self)
|
GalleryDLException.__init__(self)
|
||||||
self.message = message
|
self.message = message % args if args else message
|
||||||
self.code = 1 if message else 0
|
self.code = 1 if message else 0
|
||||||
|
@ -69,8 +69,7 @@ class ExhentaiExtractor(Extractor):
|
|||||||
def login(self):
|
def login(self):
|
||||||
"""Login and set necessary cookies"""
|
"""Login and set necessary cookies"""
|
||||||
if self.LIMIT:
|
if self.LIMIT:
|
||||||
self.log.error("Image limit reached!")
|
raise exception.StopExtraction("Image limit reached!")
|
||||||
raise exception.StopExtraction()
|
|
||||||
if self._check_cookies(self.cookienames):
|
if self._check_cookies(self.cookienames):
|
||||||
return
|
return
|
||||||
username, password = self._get_auth_info()
|
username, password = self._get_auth_info()
|
||||||
@ -235,9 +234,9 @@ class ExhentaiGalleryExtractor(ExhentaiExtractor):
|
|||||||
url = iurl
|
url = iurl
|
||||||
data = self._parse_image_info(url)
|
data = self._parse_image_info(url)
|
||||||
except IndexError:
|
except IndexError:
|
||||||
self.log.error("Unable to parse image info for '%s'", url)
|
|
||||||
self.log.debug("Page content:\n%s", page)
|
self.log.debug("Page content:\n%s", page)
|
||||||
raise exception.StopExtraction()
|
raise exception.StopExtraction(
|
||||||
|
"Unable to parse image info for '%s'", url)
|
||||||
|
|
||||||
data["num"] = self.image_num
|
data["num"] = self.image_num
|
||||||
data["image_token"] = self.key["start"] = extr('var startkey="', '";')
|
data["image_token"] = self.key["start"] = extr('var startkey="', '";')
|
||||||
@ -272,9 +271,9 @@ class ExhentaiGalleryExtractor(ExhentaiExtractor):
|
|||||||
url = imgurl
|
url = imgurl
|
||||||
data = self._parse_image_info(url)
|
data = self._parse_image_info(url)
|
||||||
except IndexError:
|
except IndexError:
|
||||||
self.log.error("Unable to parse image info for '%s'", url)
|
|
||||||
self.log.debug("Page content:\n%s", page)
|
self.log.debug("Page content:\n%s", page)
|
||||||
raise exception.StopExtraction()
|
raise exception.StopExtraction(
|
||||||
|
"Unable to parse image info for '%s'", url)
|
||||||
|
|
||||||
data["num"] = request["page"]
|
data["num"] = request["page"]
|
||||||
data["image_token"] = imgkey
|
data["image_token"] = imgkey
|
||||||
@ -311,12 +310,12 @@ class ExhentaiGalleryExtractor(ExhentaiExtractor):
|
|||||||
self._remaining -= data["cost"]
|
self._remaining -= data["cost"]
|
||||||
|
|
||||||
if self._remaining <= 0:
|
if self._remaining <= 0:
|
||||||
|
ExhentaiExtractor.LIMIT = True
|
||||||
url = "{}/s/{}/{}-{}".format(
|
url = "{}/s/{}/{}-{}".format(
|
||||||
self.root, data["image_token"], self.gallery_id, data["num"])
|
self.root, data["image_token"], self.gallery_id, data["num"])
|
||||||
self.log.error("Image limit reached! Continue with "
|
raise exception.StopExtraction(
|
||||||
"'%s' as URL after resetting it.", url)
|
"Image limit reached! Continue with '%s' "
|
||||||
ExhentaiExtractor.LIMIT = True
|
"as URL after resetting it.", url)
|
||||||
raise exception.StopExtraction()
|
|
||||||
|
|
||||||
def _update_limits(self):
|
def _update_limits(self):
|
||||||
url = "https://e-hentai.org/home.php"
|
url = "https://e-hentai.org/home.php"
|
||||||
|
@ -423,14 +423,15 @@ class FlickrAPI(oauth.OAuth1API):
|
|||||||
params["api_key"] = self.api_key
|
params["api_key"] = self.api_key
|
||||||
data = self.request(self.API_URL, params=params).json()
|
data = self.request(self.API_URL, params=params).json()
|
||||||
if "code" in data:
|
if "code" in data:
|
||||||
|
msg = data.get("message")
|
||||||
|
self.log.debug("Server response: %s", data)
|
||||||
if data["code"] == 1:
|
if data["code"] == 1:
|
||||||
raise exception.NotFoundError(self.extractor.subcategory)
|
raise exception.NotFoundError(self.extractor.subcategory)
|
||||||
elif data["code"] == 98:
|
elif data["code"] == 98:
|
||||||
raise exception.AuthenticationError(data.get("message"))
|
raise exception.AuthenticationError(msg)
|
||||||
elif data["code"] == 99:
|
elif data["code"] == 99:
|
||||||
raise exception.AuthorizationError()
|
raise exception.AuthorizationError(msg)
|
||||||
self.log.error("API call failed: %s", data.get("message"))
|
raise exception.StopExtraction("API request failed: %s", msg)
|
||||||
raise exception.StopExtraction()
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def _pagination(self, method, params, key="photos"):
|
def _pagination(self, method, params, key="photos"):
|
||||||
|
@ -29,8 +29,7 @@ class HbrowseBase():
|
|||||||
|
|
||||||
if not data["manga"] and "<b>Warning</b>" in page:
|
if not data["manga"] and "<b>Warning</b>" in page:
|
||||||
msg = page.rpartition(">")[2].strip()
|
msg = page.rpartition(">")[2].strip()
|
||||||
self.log.error("Site is not accessible: '%s'", msg)
|
raise exception.StopExtraction("Site is not accessible: '%s'", msg)
|
||||||
raise exception.StopExtraction()
|
|
||||||
|
|
||||||
tags = text.extract(page, 'class="listTable"', '</table>', pos)[0]
|
tags = text.extract(page, 'class="listTable"', '</table>', pos)[0]
|
||||||
|
|
||||||
|
@ -33,10 +33,9 @@ class RedirectMixin():
|
|||||||
except (EOFError, OSError):
|
except (EOFError, OSError):
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
self.log.error(
|
raise exception.StopExtraction(
|
||||||
"Redirect to \n%s\nVisit this URL in your browser and "
|
"Redirect to \n%s\nVisit this URL in your browser and "
|
||||||
"solve the CAPTCHA to continue", response.url)
|
"solve the CAPTCHA to continue", response.url)
|
||||||
raise exception.StopExtraction()
|
|
||||||
|
|
||||||
|
|
||||||
class KissmangaBase(RedirectMixin):
|
class KissmangaBase(RedirectMixin):
|
||||||
|
@ -31,10 +31,10 @@ class LusciousExtractor(Extractor):
|
|||||||
)
|
)
|
||||||
|
|
||||||
if response.status_code >= 400:
|
if response.status_code >= 400:
|
||||||
self.log.error("GraphQL query failed ('%s %s')",
|
|
||||||
response.status_code, response.reason)
|
|
||||||
self.log.debug("Server response: %s", response.text)
|
self.log.debug("Server response: %s", response.text)
|
||||||
raise exception.StopExtraction()
|
raise exception.StopExtraction(
|
||||||
|
"GraphQL query failed ('%s %s')",
|
||||||
|
response.status_code, response.reason)
|
||||||
|
|
||||||
return response.json()["data"]
|
return response.json()["data"]
|
||||||
|
|
||||||
|
@ -296,8 +296,8 @@ class OAuthMastodon(OAuthBase):
|
|||||||
data = self.session.post(url, data=data).json()
|
data = self.session.post(url, data=data).json()
|
||||||
|
|
||||||
if "client_id" not in data or "client_secret" not in data:
|
if "client_id" not in data or "client_secret" not in data:
|
||||||
self.log.error("Failed to register new application: '%s'", data)
|
raise exception.StopExtraction(
|
||||||
raise exception.StopExtraction()
|
"Failed to register new application: '%s'", data)
|
||||||
|
|
||||||
data["client-id"] = data.pop("client_id")
|
data["client-id"] = data.pop("client_id")
|
||||||
data["client-secret"] = data.pop("client_secret")
|
data["client-secret"] = data.pop("client_secret")
|
||||||
|
@ -149,10 +149,9 @@ class PhotobucketImageExtractor(Extractor):
|
|||||||
if "message" not in image:
|
if "message" not in image:
|
||||||
break # success
|
break # success
|
||||||
tries += 1
|
tries += 1
|
||||||
self.log.debug("'%s'", image["message"])
|
self.log.debug(image["message"])
|
||||||
else:
|
else:
|
||||||
self.log.error("%s", image["message"])
|
raise exception.StopExtraction(image["message"])
|
||||||
raise exception.StopExtraction()
|
|
||||||
|
|
||||||
# adjust metadata entries to be at least somewhat similar
|
# adjust metadata entries to be at least somewhat similar
|
||||||
# to what the 'album' extractor provides
|
# to what the 'album' extractor provides
|
||||||
|
@ -241,9 +241,8 @@ class PinterestAPI():
|
|||||||
if response.status_code == 404 or response.history:
|
if response.status_code == 404 or response.history:
|
||||||
resource = self.extractor.subcategory.rpartition("-")[2]
|
resource = self.extractor.subcategory.rpartition("-")[2]
|
||||||
raise exception.NotFoundError(resource)
|
raise exception.NotFoundError(resource)
|
||||||
self.extractor.log.error("API request failed")
|
|
||||||
self.extractor.log.debug("%s", response.text)
|
self.extractor.log.debug("%s", response.text)
|
||||||
raise exception.StopExtraction()
|
raise exception.StopExtraction("API request failed")
|
||||||
|
|
||||||
def _pagination(self, resource, options):
|
def _pagination(self, resource, options):
|
||||||
while True:
|
while True:
|
||||||
|
@ -335,11 +335,9 @@ class PixivSearchExtractor(PixivExtractor):
|
|||||||
def get_metadata(self, user=None):
|
def get_metadata(self, user=None):
|
||||||
query = text.parse_query(self.query)
|
query = text.parse_query(self.query)
|
||||||
|
|
||||||
if "word" in query:
|
if "word" not in query:
|
||||||
self.word = text.unescape(query["word"])
|
raise exception.StopExtraction("Missing search term")
|
||||||
else:
|
self.word = query["word"]
|
||||||
self.log.error("missing search term")
|
|
||||||
raise exception.StopExtraction()
|
|
||||||
|
|
||||||
sort = query.get("order", "date_d")
|
sort = query.get("order", "date_d")
|
||||||
sort_map = {
|
sort_map = {
|
||||||
@ -504,8 +502,7 @@ class PixivAppAPI():
|
|||||||
return response.json()
|
return response.json()
|
||||||
if response.status_code == 404:
|
if response.status_code == 404:
|
||||||
raise exception.NotFoundError()
|
raise exception.NotFoundError()
|
||||||
self.log.error("API request failed: %s", response.text)
|
raise exception.StopExtraction("API request failed: %s", response.text)
|
||||||
raise exception.StopExtraction()
|
|
||||||
|
|
||||||
def _pagination(self, endpoint, params):
|
def _pagination(self, endpoint, params):
|
||||||
while True:
|
while True:
|
||||||
|
@ -201,9 +201,8 @@ class SankakuTagExtractor(SankakuExtractor):
|
|||||||
|
|
||||||
tags = self.tags.split()
|
tags = self.tags.split()
|
||||||
if not self.logged_in and len(tags) > 4:
|
if not self.logged_in and len(tags) > 4:
|
||||||
self.log.error("Unauthenticated users cannot use "
|
raise exception.StopExtraction(
|
||||||
"more than 4 tags at once.")
|
"Unauthenticated users cannot use more than 4 tags at once.")
|
||||||
raise exception.StopExtraction()
|
|
||||||
return {"search_tags": " ".join(tags)}
|
return {"search_tags": " ".join(tags)}
|
||||||
|
|
||||||
def get_posts(self):
|
def get_posts(self):
|
||||||
|
@ -52,7 +52,7 @@ class SexcomExtractor(Extractor):
|
|||||||
def _parse_pin(self, url):
|
def _parse_pin(self, url):
|
||||||
response = self.request(url, fatal=False)
|
response = self.request(url, fatal=False)
|
||||||
if response.status_code >= 400:
|
if response.status_code >= 400:
|
||||||
self.log.warning('Unable to fetch %s ("%s: %s")',
|
self.log.warning('Unable to fetch %s ("%s %s")',
|
||||||
url, response.status_code, response.reason)
|
url, response.status_code, response.reason)
|
||||||
return None
|
return None
|
||||||
extr = text.extract_from(response.text)
|
extr = text.extract_from(response.text)
|
||||||
|
@ -259,11 +259,9 @@ class SmugmugAPI(oauth.OAuth1API):
|
|||||||
if data["Code"] == 404:
|
if data["Code"] == 404:
|
||||||
raise exception.NotFoundError()
|
raise exception.NotFoundError()
|
||||||
if data["Code"] == 429:
|
if data["Code"] == 429:
|
||||||
self.log.error("Rate limit reached")
|
raise exception.StopExtraction("Rate limit reached")
|
||||||
else:
|
self.log.debug(data)
|
||||||
self.log.error("API request failed")
|
raise exception.StopExtraction("API request failed")
|
||||||
self.log.debug(data)
|
|
||||||
raise exception.StopExtraction()
|
|
||||||
|
|
||||||
def _expansion(self, endpoint, expands, params=None):
|
def _expansion(self, endpoint, expands, params=None):
|
||||||
endpoint = self._extend(endpoint, expands)
|
endpoint = self._extend(endpoint, expands)
|
||||||
|
@ -113,10 +113,9 @@ class TsuminoGalleryExtractor(TsuminoBase, GalleryExtractor):
|
|||||||
response = self.request(url, headers=headers, fatal=False)
|
response = self.request(url, headers=headers, fatal=False)
|
||||||
|
|
||||||
if "/Auth/" in response.url:
|
if "/Auth/" in response.url:
|
||||||
self.log.error(
|
raise exception.StopExtraction(
|
||||||
"Failed to get gallery JSON data. Visit '%s' in a browser "
|
"Failed to get gallery JSON data. Visit '%s' in a browser "
|
||||||
"and solve the CAPTCHA to continue.", response.url)
|
"and solve the CAPTCHA to continue.", response.url)
|
||||||
raise exception.StopExtraction()
|
|
||||||
|
|
||||||
page = response.text
|
page = response.text
|
||||||
tpl, pos = text.extract(page, 'data-cdn="', '"')
|
tpl, pos = text.extract(page, 'data-cdn="', '"')
|
||||||
@ -195,8 +194,8 @@ class TsuminoSearchExtractor(TsuminoBase, Extractor):
|
|||||||
return self._parse_simple(query)
|
return self._parse_simple(query)
|
||||||
return self._parse_jsurl(query)
|
return self._parse_jsurl(query)
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
self.log.error("Invalid search query: '%s' (%s)", query, exc)
|
raise exception.StopExtraction(
|
||||||
raise exception.StopExtraction()
|
"Invalid search query '%s' (%s)", query, exc)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _parse_simple(query):
|
def _parse_simple(query):
|
||||||
|
@ -407,26 +407,22 @@ class TumblrAPI(oauth.OAuth1API):
|
|||||||
# daily rate limit
|
# daily rate limit
|
||||||
if response.headers.get("x-ratelimit-perday-remaining") == "0":
|
if response.headers.get("x-ratelimit-perday-remaining") == "0":
|
||||||
reset = response.headers.get("x-ratelimit-perday-reset")
|
reset = response.headers.get("x-ratelimit-perday-reset")
|
||||||
self.log.error(
|
raise exception.StopExtraction(
|
||||||
"Daily API rate limit exceeded: aborting; "
|
"Daily API rate limit exceeded: aborting; "
|
||||||
"rate limit will reset at %s",
|
"rate limit will reset at %s", self._to_time(reset),
|
||||||
self._to_time(reset),
|
|
||||||
)
|
)
|
||||||
raise exception.StopExtraction()
|
|
||||||
|
|
||||||
# hourly rate limit
|
# hourly rate limit
|
||||||
reset = response.headers.get("x-ratelimit-perhour-reset")
|
reset = response.headers.get("x-ratelimit-perhour-reset")
|
||||||
if reset:
|
if reset:
|
||||||
self.log.info(
|
self.log.info(
|
||||||
"Hourly API rate limit exceeded; "
|
"Hourly API rate limit exceeded; waiting until "
|
||||||
"waiting until %s for rate limit reset",
|
"%s for rate limit reset", self._to_time(reset),
|
||||||
self._to_time(reset),
|
|
||||||
)
|
)
|
||||||
time.sleep(int(reset) + 1)
|
time.sleep(int(reset) + 1)
|
||||||
return self._call(blog, endpoint, params)
|
return self._call(blog, endpoint, params)
|
||||||
|
|
||||||
self.log.error(data)
|
raise exception.StopExtraction(data)
|
||||||
raise exception.StopExtraction()
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _to_time(reset):
|
def _to_time(reset):
|
||||||
|
@ -48,7 +48,7 @@ class Job():
|
|||||||
self.dispatch(msg)
|
self.dispatch(msg)
|
||||||
except exception.StopExtraction as exc:
|
except exception.StopExtraction as exc:
|
||||||
if exc.message:
|
if exc.message:
|
||||||
log.error("%s", exc.message)
|
log.error(exc.message)
|
||||||
return exc.code
|
return exc.code
|
||||||
except exception.GalleryDLException as exc:
|
except exception.GalleryDLException as exc:
|
||||||
log.error("%s: %s", exc.__class__.__name__, exc)
|
log.error("%s: %s", exc.__class__.__name__, exc)
|
||||||
|
Loading…
Reference in New Issue
Block a user