2015-04-08 01:43:25 +02:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
|
2019-02-04 13:46:02 +01:00
|
|
|
# Copyright 2014-2019 Mike Fährmann
|
2015-04-08 01:43:25 +02:00
|
|
|
#
|
|
|
|
# This program is free software; you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License version 2 as
|
|
|
|
# published by the Free Software Foundation.
|
|
|
|
|
|
|
|
"""Common classes and constants used by extractor modules."""
|
|
|
|
|
2017-07-24 18:33:42 +02:00
|
|
|
import re
|
2015-04-08 01:46:04 +02:00
|
|
|
import time
|
2017-06-24 12:17:26 +02:00
|
|
|
import netrc
|
2014-10-12 21:56:44 +02:00
|
|
|
import queue
|
2017-03-07 23:50:19 +01:00
|
|
|
import logging
|
2014-10-12 21:56:44 +02:00
|
|
|
import requests
|
2015-04-08 01:46:04 +02:00
|
|
|
import threading
|
2017-07-03 15:02:19 +02:00
|
|
|
import http.cookiejar
|
2015-11-24 19:47:51 +01:00
|
|
|
from .message import Message
|
2019-03-10 15:31:33 +01:00
|
|
|
from .. import config, text, exception, cloudflare
|
2015-04-08 01:46:04 +02:00
|
|
|
|
2017-01-12 22:35:42 +01:00
|
|
|
|
2015-04-08 01:43:25 +02:00
|
|
|
class Extractor():
|
|
|
|
|
2015-11-30 00:30:02 +01:00
|
|
|
category = ""
|
|
|
|
subcategory = ""
|
2017-09-26 20:50:49 +02:00
|
|
|
categorytransfer = False
|
2019-02-08 13:45:40 +01:00
|
|
|
directory_fmt = ("{category}",)
|
2019-02-14 16:07:17 +01:00
|
|
|
filename_fmt = "{filename}.{extension}"
|
2018-01-29 22:13:06 +01:00
|
|
|
archive_fmt = ""
|
2017-07-22 15:43:35 +02:00
|
|
|
cookiedomain = ""
|
2019-03-12 21:02:11 +01:00
|
|
|
root = ""
|
|
|
|
test = None
|
2015-11-29 23:41:43 +01:00
|
|
|
|
2019-02-11 13:31:10 +01:00
|
|
|
def __init__(self, match):
|
2015-04-08 01:43:25 +02:00
|
|
|
self.session = requests.Session()
|
2017-03-07 23:50:19 +01:00
|
|
|
self.log = logging.getLogger(self.category)
|
2019-02-12 10:20:21 +01:00
|
|
|
self.url = match.string
|
2019-03-12 21:02:11 +01:00
|
|
|
self._init_headers()
|
|
|
|
self._init_cookies()
|
|
|
|
self._init_proxies()
|
2018-10-06 19:59:19 +02:00
|
|
|
self._retries = self.config("retries", 5)
|
|
|
|
self._timeout = self.config("timeout", 30)
|
|
|
|
self._verify = self.config("verify", True)
|
2017-07-03 15:02:19 +02:00
|
|
|
|
2019-02-09 14:39:38 +01:00
|
|
|
@classmethod
|
|
|
|
def from_url(cls, url):
|
|
|
|
if isinstance(cls.pattern, str):
|
|
|
|
cls.pattern = re.compile(cls.pattern)
|
|
|
|
match = cls.pattern.match(url)
|
|
|
|
return cls(match) if match else None
|
|
|
|
|
2014-10-12 21:56:44 +02:00
|
|
|
def __iter__(self):
|
2015-04-08 01:43:25 +02:00
|
|
|
return self.items()
|
|
|
|
|
|
|
|
def items(self):
|
|
|
|
yield Message.Version, 1
|
2014-10-12 21:56:44 +02:00
|
|
|
|
2017-03-03 17:26:50 +01:00
|
|
|
def skip(self, num):
|
|
|
|
return 0
|
|
|
|
|
2017-04-25 17:12:48 +02:00
|
|
|
def config(self, key, default=None):
|
|
|
|
return config.interpolate(
|
|
|
|
("extractor", self.category, self.subcategory, key), default)
|
|
|
|
|
2018-12-04 21:23:35 +01:00
|
|
|
def request(self, url, method="GET", *, session=None,
|
2018-08-02 14:59:37 +02:00
|
|
|
encoding=None, expect=(), retries=None, **kwargs):
|
|
|
|
tries = 0
|
|
|
|
retries = retries or self._retries
|
2018-12-04 21:23:35 +01:00
|
|
|
session = session or self.session
|
2018-08-02 14:59:37 +02:00
|
|
|
kwargs.setdefault("timeout", self._timeout)
|
2018-10-06 16:38:43 +02:00
|
|
|
kwargs.setdefault("verify", self._verify)
|
2018-11-16 18:02:24 +01:00
|
|
|
|
2017-08-05 16:11:46 +02:00
|
|
|
while True:
|
|
|
|
try:
|
2018-12-04 21:23:35 +01:00
|
|
|
response = session.request(method, url, **kwargs)
|
2018-11-19 12:33:34 +01:00
|
|
|
except (requests.exceptions.ConnectionError,
|
|
|
|
requests.exceptions.Timeout,
|
|
|
|
requests.exceptions.ChunkedEncodingError,
|
|
|
|
requests.exceptions.ContentDecodingError) as exc:
|
2017-11-12 21:18:47 +01:00
|
|
|
msg = exc
|
2018-11-19 12:33:34 +01:00
|
|
|
except (requests.exceptions.RequestException) as exc:
|
2018-04-23 18:45:59 +02:00
|
|
|
raise exception.HttpError(exc)
|
2017-10-13 22:43:38 +02:00
|
|
|
else:
|
2018-06-17 21:49:13 +02:00
|
|
|
code = response.status_code
|
|
|
|
if 200 <= code < 400 or code in expect:
|
2018-04-23 18:45:59 +02:00
|
|
|
if encoding:
|
|
|
|
response.encoding = encoding
|
|
|
|
return response
|
2019-03-10 15:31:33 +01:00
|
|
|
if cloudflare.is_challenge(response):
|
|
|
|
self.log.info("Solving Cloudflare challenge")
|
2019-03-14 16:14:29 +01:00
|
|
|
url, domain, cookies = cloudflare.solve_challenge(
|
|
|
|
session, response, kwargs)
|
|
|
|
cloudflare.cookies.update(self.category, (domain, cookies))
|
2019-03-10 15:31:33 +01:00
|
|
|
continue
|
2018-04-23 18:45:59 +02:00
|
|
|
|
2018-11-16 18:02:24 +01:00
|
|
|
msg = "{}: {} for url: {}".format(code, response.reason, url)
|
2018-06-17 21:49:13 +02:00
|
|
|
if code < 500 and code != 429:
|
2018-04-23 18:45:59 +02:00
|
|
|
break
|
|
|
|
|
2018-11-19 12:33:34 +01:00
|
|
|
tries += 1
|
|
|
|
self.log.debug("%s (%d/%d)", msg, tries, retries)
|
2018-08-02 14:59:37 +02:00
|
|
|
if tries >= retries:
|
2018-04-23 18:45:59 +02:00
|
|
|
break
|
|
|
|
time.sleep(2 ** tries)
|
|
|
|
|
|
|
|
raise exception.HttpError(msg)
|
2017-07-25 14:59:41 +02:00
|
|
|
|
|
|
|
def _get_auth_info(self):
|
2017-06-24 12:17:26 +02:00
|
|
|
"""Return authentication information as (username, password) tuple"""
|
|
|
|
username = self.config("username")
|
|
|
|
password = None
|
|
|
|
|
|
|
|
if username:
|
|
|
|
password = self.config("password")
|
2018-11-16 18:02:24 +01:00
|
|
|
elif self.config("netrc", False):
|
2017-06-24 12:17:26 +02:00
|
|
|
try:
|
|
|
|
info = netrc.netrc().authenticators(self.category)
|
|
|
|
username, _, password = info
|
|
|
|
except (OSError, netrc.NetrcParseError) as exc:
|
|
|
|
self.log.error("netrc: %s", exc)
|
|
|
|
except TypeError:
|
|
|
|
self.log.warning("netrc: No authentication info")
|
|
|
|
|
|
|
|
return username, password
|
|
|
|
|
2019-03-12 21:02:11 +01:00
|
|
|
def _init_headers(self):
|
2017-11-15 13:54:40 +01:00
|
|
|
"""Set additional headers for the 'session' object"""
|
2019-04-09 10:52:27 +02:00
|
|
|
headers = self.session.headers
|
|
|
|
headers.clear()
|
|
|
|
|
|
|
|
headers["User-Agent"] = self.config(
|
2018-11-11 10:07:10 +01:00
|
|
|
"user-agent", ("Mozilla/5.0 (X11; Linux x86_64; rv:62.0) "
|
|
|
|
"Gecko/20100101 Firefox/62.0"))
|
2019-04-09 10:52:27 +02:00
|
|
|
headers["Accept"] = "*/*"
|
|
|
|
headers["Accept-Language"] = "en-US,en;q=0.5"
|
|
|
|
headers["Accept-Encoding"] = "gzip, deflate"
|
|
|
|
headers["Connection"] = "keep-alive"
|
|
|
|
headers["Upgrade-Insecure-Requests"] = "1"
|
2017-11-15 13:54:40 +01:00
|
|
|
|
2019-03-12 21:02:11 +01:00
|
|
|
def _init_proxies(self):
|
2019-01-30 17:09:32 +01:00
|
|
|
"""Update the session's proxy map"""
|
|
|
|
proxies = self.config("proxy")
|
|
|
|
if proxies:
|
|
|
|
if isinstance(proxies, str):
|
|
|
|
proxies = {"http": proxies, "https": proxies}
|
|
|
|
if isinstance(proxies, dict):
|
|
|
|
for scheme, proxy in proxies.items():
|
|
|
|
if "://" not in proxy:
|
|
|
|
proxies[scheme] = "http://" + proxy.lstrip("/")
|
|
|
|
self.session.proxies = proxies
|
|
|
|
else:
|
|
|
|
self.log.warning("invalid proxy specifier: %s", proxies)
|
|
|
|
|
2019-03-12 21:02:11 +01:00
|
|
|
def _init_cookies(self):
|
2018-02-19 18:24:56 +01:00
|
|
|
"""Populate the session's cookiejar"""
|
|
|
|
cookies = self.config("cookies")
|
2017-07-25 14:59:41 +02:00
|
|
|
if cookies:
|
|
|
|
if isinstance(cookies, dict):
|
2019-03-12 21:02:11 +01:00
|
|
|
self._update_cookies_dict(cookies, self.cookiedomain)
|
2017-07-25 14:59:41 +02:00
|
|
|
else:
|
2019-03-12 21:02:11 +01:00
|
|
|
cookiejar = http.cookiejar.MozillaCookieJar()
|
2017-07-25 14:59:41 +02:00
|
|
|
try:
|
2019-03-12 21:02:11 +01:00
|
|
|
cookiejar.load(cookies)
|
2017-07-25 14:59:41 +02:00
|
|
|
except OSError as exc:
|
|
|
|
self.log.warning("cookies: %s", exc)
|
2019-03-12 21:02:11 +01:00
|
|
|
else:
|
|
|
|
self.session.cookies.update(cookiejar)
|
|
|
|
|
2019-03-14 16:14:29 +01:00
|
|
|
cookies = cloudflare.cookies(self.category)
|
|
|
|
if cookies:
|
|
|
|
domain, cookies = cookies
|
|
|
|
self._update_cookies_dict(cookies, domain)
|
|
|
|
|
2019-03-12 21:02:11 +01:00
|
|
|
def _update_cookies(self, cookies, *, domain=""):
|
|
|
|
"""Update the session's cookiejar with 'cookies'"""
|
|
|
|
if isinstance(cookies, dict):
|
|
|
|
self._update_cookies_dict(cookies, domain or self.cookiedomain)
|
|
|
|
else:
|
|
|
|
setcookie = self.session.cookies.set_cookie
|
|
|
|
try:
|
|
|
|
cookies = iter(cookies)
|
|
|
|
except TypeError:
|
|
|
|
setcookie(cookies)
|
|
|
|
else:
|
|
|
|
for cookie in cookies:
|
|
|
|
setcookie(cookie)
|
|
|
|
|
|
|
|
def _update_cookies_dict(self, cookiedict, domain):
|
|
|
|
"""Update cookiejar with name-value pairs from a dict"""
|
|
|
|
setcookie = self.session.cookies.set
|
|
|
|
for name, value in cookiedict.items():
|
|
|
|
setcookie(name, value, domain=domain)
|
2014-10-12 21:56:44 +02:00
|
|
|
|
2019-01-30 17:09:32 +01:00
|
|
|
def _check_cookies(self, cookienames, *, domain=""):
|
2017-07-25 14:59:41 +02:00
|
|
|
"""Check if all 'cookienames' are in the session's cookiejar"""
|
2019-01-30 17:09:32 +01:00
|
|
|
if not domain:
|
2017-07-22 15:43:35 +02:00
|
|
|
domain = self.cookiedomain
|
2019-01-30 17:09:32 +01:00
|
|
|
try:
|
|
|
|
for name in cookienames:
|
2017-07-17 10:33:36 +02:00
|
|
|
self.session.cookies._find(name, domain)
|
2019-01-30 17:09:32 +01:00
|
|
|
except KeyError:
|
|
|
|
return False
|
2017-07-17 10:33:36 +02:00
|
|
|
return True
|
|
|
|
|
2019-02-06 17:24:44 +01:00
|
|
|
@classmethod
|
|
|
|
def _get_tests(cls):
|
|
|
|
"""Yield an extractor's test cases as (URL, RESULTS) tuples"""
|
2019-03-12 21:02:11 +01:00
|
|
|
tests = cls.test
|
|
|
|
if not tests:
|
2019-02-06 17:24:44 +01:00
|
|
|
return
|
|
|
|
|
|
|
|
if len(tests) == 2 and (not tests[1] or isinstance(tests[1], dict)):
|
|
|
|
tests = (tests,)
|
|
|
|
|
|
|
|
for test in tests:
|
|
|
|
if isinstance(test, str):
|
|
|
|
test = (test, None)
|
|
|
|
yield test
|
|
|
|
|
2015-04-08 01:43:25 +02:00
|
|
|
|
2018-02-03 23:14:32 +01:00
|
|
|
class ChapterExtractor(Extractor):
|
|
|
|
|
|
|
|
subcategory = "chapter"
|
2019-02-08 13:45:40 +01:00
|
|
|
directory_fmt = (
|
2018-02-03 23:14:32 +01:00
|
|
|
"{category}", "{manga}",
|
2019-02-08 13:45:40 +01:00
|
|
|
"{volume:?v/ />02}c{chapter:>03}{chapter_minor:?//}{title:?: //}")
|
2018-02-03 23:14:32 +01:00
|
|
|
filename_fmt = (
|
2018-02-07 11:22:47 +01:00
|
|
|
"{manga}_c{chapter:>03}{chapter_minor:?//}_{page:>03}.{extension}")
|
2018-02-12 23:09:34 +01:00
|
|
|
archive_fmt = (
|
|
|
|
"{manga}_{chapter}{chapter_minor}_{page}")
|
2018-02-03 23:14:32 +01:00
|
|
|
|
2019-02-11 18:38:47 +01:00
|
|
|
def __init__(self, match, url=None):
|
2019-02-11 13:31:10 +01:00
|
|
|
Extractor.__init__(self, match)
|
2019-02-11 18:38:47 +01:00
|
|
|
self.chapter_url = url or self.root + match.group(1)
|
2018-02-03 23:14:32 +01:00
|
|
|
|
|
|
|
def items(self):
|
2019-01-30 17:58:48 +01:00
|
|
|
self.login()
|
2019-02-11 18:38:47 +01:00
|
|
|
page = self.request(self.chapter_url).text
|
|
|
|
data = self.metadata(page)
|
|
|
|
imgs = self.images(page)
|
2018-02-03 23:14:32 +01:00
|
|
|
|
|
|
|
if "count" in data:
|
2018-02-07 11:22:47 +01:00
|
|
|
images = zip(
|
|
|
|
range(1, data["count"]+1),
|
2019-01-30 17:58:48 +01:00
|
|
|
imgs,
|
2018-02-07 11:22:47 +01:00
|
|
|
)
|
2018-02-03 23:14:32 +01:00
|
|
|
else:
|
|
|
|
try:
|
|
|
|
data["count"] = len(imgs)
|
|
|
|
except TypeError:
|
|
|
|
pass
|
|
|
|
images = enumerate(imgs, 1)
|
|
|
|
|
|
|
|
yield Message.Version, 1
|
|
|
|
yield Message.Directory, data
|
|
|
|
for data["page"], (url, imgdata) in images:
|
|
|
|
if imgdata:
|
|
|
|
data.update(imgdata)
|
|
|
|
yield Message.Url, url, text.nameext_from_url(url, data)
|
|
|
|
|
2019-01-30 17:58:48 +01:00
|
|
|
def login(self):
|
|
|
|
"""Login and set necessary cookies"""
|
|
|
|
|
2019-02-11 18:38:47 +01:00
|
|
|
def metadata(self, page):
|
2018-02-03 23:14:32 +01:00
|
|
|
"""Return a dict with general metadata"""
|
|
|
|
|
2019-02-11 18:38:47 +01:00
|
|
|
def images(self, page):
|
2018-02-03 23:14:32 +01:00
|
|
|
"""Return a list of all (image-url, metadata)-tuples"""
|
|
|
|
|
|
|
|
|
2017-05-20 11:27:43 +02:00
|
|
|
class MangaExtractor(Extractor):
|
|
|
|
|
|
|
|
subcategory = "manga"
|
2017-09-26 20:50:49 +02:00
|
|
|
categorytransfer = True
|
2019-02-13 13:23:36 +01:00
|
|
|
chapterclass = None
|
2017-05-20 11:27:43 +02:00
|
|
|
reverse = True
|
|
|
|
|
|
|
|
def __init__(self, match, url=None):
|
2019-02-11 13:31:10 +01:00
|
|
|
Extractor.__init__(self, match)
|
2019-02-11 18:38:47 +01:00
|
|
|
self.manga_url = url or self.root + match.group(1)
|
2017-05-20 11:27:43 +02:00
|
|
|
|
2019-01-07 18:22:33 +01:00
|
|
|
if self.config("chapter-reverse", False):
|
|
|
|
self.reverse = not self.reverse
|
|
|
|
|
2017-05-20 11:27:43 +02:00
|
|
|
def items(self):
|
2019-02-11 18:38:47 +01:00
|
|
|
self.login()
|
|
|
|
page = self.request(self.manga_url).text
|
2017-05-20 11:27:43 +02:00
|
|
|
|
|
|
|
chapters = self.chapters(page)
|
|
|
|
if self.reverse:
|
|
|
|
chapters.reverse()
|
|
|
|
|
|
|
|
yield Message.Version, 1
|
2017-09-24 16:03:29 +02:00
|
|
|
for chapter, data in chapters:
|
2019-02-13 13:23:36 +01:00
|
|
|
data["_extractor"] = self.chapterclass
|
2017-09-24 16:03:29 +02:00
|
|
|
yield Message.Queue, chapter, data
|
2017-05-20 11:27:43 +02:00
|
|
|
|
2019-02-11 18:38:47 +01:00
|
|
|
def login(self):
|
|
|
|
"""Login and set necessary cookies"""
|
|
|
|
|
2017-05-20 11:27:43 +02:00
|
|
|
def chapters(self, page):
|
2018-02-03 23:14:32 +01:00
|
|
|
"""Return a list of all (chapter-url, metadata)-tuples"""
|
2017-05-20 11:27:43 +02:00
|
|
|
|
|
|
|
|
2019-02-26 14:08:02 +01:00
|
|
|
class GalleryExtractor(ChapterExtractor):
|
|
|
|
|
|
|
|
subcategory = "gallery"
|
|
|
|
filename_fmt = "{category}_{gallery_id}_{page:>03}.{extension}"
|
|
|
|
directory_fmt = ("{category}", "{gallery_id} {title}")
|
|
|
|
archive_fmt = "{gallery_id}_{page}"
|
|
|
|
|
|
|
|
|
2019-02-04 14:21:19 +01:00
|
|
|
class AsynchronousMixin():
|
|
|
|
"""Run info extraction in a separate thread"""
|
|
|
|
|
|
|
|
def __iter__(self):
|
|
|
|
messages = queue.Queue(5)
|
|
|
|
thread = threading.Thread(
|
|
|
|
target=self.async_items,
|
|
|
|
args=(messages,),
|
|
|
|
daemon=True,
|
|
|
|
)
|
|
|
|
|
|
|
|
thread.start()
|
|
|
|
while True:
|
|
|
|
msg = messages.get()
|
|
|
|
if msg is None:
|
|
|
|
thread.join()
|
|
|
|
return
|
|
|
|
if isinstance(msg, Exception):
|
|
|
|
thread.join()
|
|
|
|
raise msg
|
|
|
|
yield msg
|
|
|
|
messages.task_done()
|
|
|
|
|
|
|
|
def async_items(self, messages):
|
|
|
|
try:
|
|
|
|
for msg in self.items():
|
|
|
|
messages.put(msg)
|
|
|
|
except Exception as exc:
|
|
|
|
messages.put(exc)
|
|
|
|
messages.put(None)
|
|
|
|
|
|
|
|
|
2019-02-04 13:46:02 +01:00
|
|
|
class SharedConfigMixin():
|
|
|
|
"""Enable sharing of config settings based on 'basecategory'"""
|
2017-08-29 22:42:48 +02:00
|
|
|
basecategory = ""
|
|
|
|
|
2019-02-04 13:46:02 +01:00
|
|
|
def config(self, key, default=None, *, sentinel=object()):
|
2017-08-29 22:42:48 +02:00
|
|
|
value = Extractor.config(self, key, sentinel)
|
|
|
|
if value is sentinel:
|
|
|
|
cat, self.category = self.category, self.basecategory
|
|
|
|
value = Extractor.config(self, key, default)
|
|
|
|
self.category = cat
|
|
|
|
return value
|
|
|
|
|
|
|
|
|
2019-03-07 22:55:26 +01:00
|
|
|
def generate_extractors(extractor_data, symtable, classes):
|
|
|
|
"""Dynamically generate Extractor classes"""
|
|
|
|
extractors = config.get(("extractor", classes[0].basecategory))
|
|
|
|
ckey = extractor_data.get("_ckey")
|
|
|
|
prev = None
|
|
|
|
|
|
|
|
if extractors:
|
|
|
|
extractor_data.update(extractors)
|
|
|
|
|
|
|
|
for category, info in extractor_data.items():
|
|
|
|
|
|
|
|
if not isinstance(info, dict):
|
|
|
|
continue
|
|
|
|
|
|
|
|
root = info["root"]
|
|
|
|
domain = root[root.index(":") + 3:]
|
|
|
|
pattern = info.get("pattern") or re.escape(domain)
|
|
|
|
name = (info.get("name") or category).capitalize()
|
|
|
|
|
|
|
|
for cls in classes:
|
|
|
|
|
|
|
|
class Extr(cls):
|
|
|
|
pass
|
|
|
|
Extr.__module__ = cls.__module__
|
|
|
|
Extr.__name__ = Extr.__qualname__ = \
|
|
|
|
name + cls.subcategory.capitalize() + "Extractor"
|
|
|
|
Extr.__doc__ = \
|
|
|
|
"Extractor for " + cls.subcategory + "s from " + domain
|
|
|
|
Extr.category = category
|
|
|
|
Extr.pattern = r"(?:https?://)?" + pattern + cls.pattern_fmt
|
|
|
|
Extr.test = info.get("test-" + cls.subcategory)
|
|
|
|
Extr.root = root
|
|
|
|
|
|
|
|
if "extra" in info:
|
|
|
|
for key, value in info["extra"].items():
|
|
|
|
setattr(Extr, key, value)
|
|
|
|
if prev and ckey:
|
|
|
|
setattr(Extr, ckey, prev)
|
|
|
|
|
|
|
|
symtable[Extr.__name__] = prev = Extr
|
|
|
|
|
|
|
|
|
2017-08-05 16:11:46 +02:00
|
|
|
# Reduce strictness of the expected magic string in cookiejar files.
|
|
|
|
# (This allows the use of Wget-generated cookiejars without modification)
|
2017-07-24 18:33:42 +02:00
|
|
|
http.cookiejar.MozillaCookieJar.magic_re = re.compile(
|
|
|
|
"#( Netscape)? HTTP Cookie File", re.IGNORECASE)
|