2015-04-08 01:43:25 +02:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
|
2017-01-12 22:35:42 +01:00
|
|
|
# Copyright 2014-2017 Mike Fährmann
|
2015-04-08 01:43:25 +02:00
|
|
|
#
|
|
|
|
# This program is free software; you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License version 2 as
|
|
|
|
# published by the Free Software Foundation.
|
|
|
|
|
|
|
|
"""Common classes and constants used by extractor modules."""
|
|
|
|
|
2017-01-23 00:07:36 +01:00
|
|
|
import os
|
2017-07-24 18:33:42 +02:00
|
|
|
import re
|
2015-04-08 01:46:04 +02:00
|
|
|
import time
|
2017-06-24 12:17:26 +02:00
|
|
|
import netrc
|
2014-10-12 21:56:44 +02:00
|
|
|
import queue
|
2017-03-07 23:50:19 +01:00
|
|
|
import logging
|
2014-10-12 21:56:44 +02:00
|
|
|
import requests
|
2015-04-08 01:46:04 +02:00
|
|
|
import threading
|
2017-07-03 15:02:19 +02:00
|
|
|
import http.cookiejar
|
2015-11-24 19:47:51 +01:00
|
|
|
from .message import Message
|
2017-08-05 16:11:46 +02:00
|
|
|
from .. import config, exception
|
2015-04-08 01:46:04 +02:00
|
|
|
|
2017-01-12 22:35:42 +01:00
|
|
|
|
2015-04-08 01:43:25 +02:00
|
|
|
class Extractor():
|
|
|
|
|
2015-11-30 00:30:02 +01:00
|
|
|
category = ""
|
|
|
|
subcategory = ""
|
2017-09-26 20:50:49 +02:00
|
|
|
categorytransfer = False
|
2017-05-30 12:10:16 +02:00
|
|
|
directory_fmt = ["{category}"]
|
2018-01-14 18:55:42 +01:00
|
|
|
filename_fmt = "{name}.{extension}"
|
2017-07-22 15:43:35 +02:00
|
|
|
cookiedomain = ""
|
2015-11-29 23:41:43 +01:00
|
|
|
|
2015-04-08 01:43:25 +02:00
|
|
|
def __init__(self):
|
|
|
|
self.session = requests.Session()
|
2017-03-07 23:50:19 +01:00
|
|
|
self.log = logging.getLogger(self.category)
|
2017-07-25 14:59:41 +02:00
|
|
|
self._set_cookies(self.config("cookies"))
|
2017-11-15 13:54:40 +01:00
|
|
|
self._set_headers()
|
2017-07-03 15:02:19 +02:00
|
|
|
|
2014-10-12 21:56:44 +02:00
|
|
|
def __iter__(self):
|
2015-04-08 01:43:25 +02:00
|
|
|
return self.items()
|
|
|
|
|
|
|
|
def items(self):
|
|
|
|
yield Message.Version, 1
|
2014-10-12 21:56:44 +02:00
|
|
|
|
2017-03-03 17:26:50 +01:00
|
|
|
def skip(self, num):
|
|
|
|
return 0
|
|
|
|
|
2017-04-25 17:12:48 +02:00
|
|
|
def config(self, key, default=None):
|
|
|
|
return config.interpolate(
|
|
|
|
("extractor", self.category, self.subcategory, key), default)
|
|
|
|
|
2017-08-05 16:11:46 +02:00
|
|
|
def request(self, url, method="GET", encoding=None, fatal=True, retries=3,
|
2017-09-25 13:01:10 +02:00
|
|
|
allow_empty=False, *args, **kwargs):
|
|
|
|
max_retries = retries
|
2017-08-05 16:11:46 +02:00
|
|
|
while True:
|
|
|
|
try:
|
2017-10-20 18:51:06 +02:00
|
|
|
response = None
|
2017-08-05 16:11:46 +02:00
|
|
|
response = self.session.request(method, url, *args, **kwargs)
|
|
|
|
if fatal:
|
|
|
|
response.raise_for_status()
|
|
|
|
if encoding:
|
|
|
|
response.encoding = encoding
|
2017-09-25 13:01:10 +02:00
|
|
|
if response.content or allow_empty:
|
|
|
|
return response
|
|
|
|
msg = "empty response body"
|
2017-11-12 21:18:47 +01:00
|
|
|
except requests.exceptions.HTTPError as exc:
|
|
|
|
msg = exc
|
|
|
|
code = response.status_code
|
|
|
|
if 400 <= code < 500 and code != 429: # Client Error
|
|
|
|
retries = 0
|
2017-08-05 16:11:46 +02:00
|
|
|
except requests.exceptions.RequestException as exc:
|
|
|
|
msg = exc
|
|
|
|
if not retries:
|
|
|
|
raise exception.HttpError(msg)
|
2017-10-20 18:51:06 +02:00
|
|
|
if response and response.status_code == 429: # Too Many Requests
|
2017-10-13 22:43:38 +02:00
|
|
|
waittime = float(response.headers.get("Retry-After", 10.0))
|
|
|
|
else:
|
|
|
|
waittime = 1
|
2017-09-25 13:01:10 +02:00
|
|
|
retries -= 1
|
2017-10-13 22:43:38 +02:00
|
|
|
time.sleep(waittime * (max_retries - retries))
|
2017-07-25 14:59:41 +02:00
|
|
|
|
|
|
|
def _get_auth_info(self):
|
2017-06-24 12:17:26 +02:00
|
|
|
"""Return authentication information as (username, password) tuple"""
|
|
|
|
username = self.config("username")
|
|
|
|
password = None
|
|
|
|
|
|
|
|
if username:
|
|
|
|
password = self.config("password")
|
|
|
|
elif config.get(("netrc",), False):
|
|
|
|
try:
|
|
|
|
info = netrc.netrc().authenticators(self.category)
|
|
|
|
username, _, password = info
|
|
|
|
except (OSError, netrc.NetrcParseError) as exc:
|
|
|
|
self.log.error("netrc: %s", exc)
|
|
|
|
except TypeError:
|
|
|
|
self.log.warning("netrc: No authentication info")
|
|
|
|
|
|
|
|
return username, password
|
|
|
|
|
2017-11-15 13:54:40 +01:00
|
|
|
def _set_headers(self):
|
|
|
|
"""Set additional headers for the 'session' object"""
|
|
|
|
self.session.headers["Accept-Language"] = "en-US,en;q=0.5"
|
|
|
|
self.session.headers["User-Agent"] = self.config(
|
|
|
|
"user-agent", ("Mozilla/5.0 (X11; Linux x86_64; rv:54.0) "
|
|
|
|
"Gecko/20100101 Firefox/54.0"))
|
|
|
|
|
2017-07-25 14:59:41 +02:00
|
|
|
def _set_cookies(self, cookies):
|
|
|
|
"""Populate the cookiejar with 'cookies'"""
|
|
|
|
if cookies:
|
|
|
|
if isinstance(cookies, dict):
|
|
|
|
setcookie = self.session.cookies.set
|
|
|
|
for name, value in cookies.items():
|
|
|
|
setcookie(name, value, domain=self.cookiedomain)
|
|
|
|
else:
|
|
|
|
try:
|
|
|
|
cj = http.cookiejar.MozillaCookieJar()
|
|
|
|
cj.load(cookies)
|
|
|
|
self.session.cookies.update(cj)
|
|
|
|
except OSError as exc:
|
|
|
|
self.log.warning("cookies: %s", exc)
|
2014-10-12 21:56:44 +02:00
|
|
|
|
2017-07-17 10:33:36 +02:00
|
|
|
def _check_cookies(self, cookienames, domain=None):
|
2017-07-25 14:59:41 +02:00
|
|
|
"""Check if all 'cookienames' are in the session's cookiejar"""
|
2017-07-22 15:43:35 +02:00
|
|
|
if not domain and self.cookiedomain:
|
|
|
|
domain = self.cookiedomain
|
2017-07-17 10:33:36 +02:00
|
|
|
for name in cookienames:
|
|
|
|
try:
|
|
|
|
self.session.cookies._find(name, domain)
|
|
|
|
except KeyError:
|
|
|
|
return False
|
|
|
|
return True
|
|
|
|
|
2015-04-08 01:43:25 +02:00
|
|
|
|
2015-04-10 15:29:09 +02:00
|
|
|
class AsynchronousExtractor(Extractor):
|
2014-10-12 21:56:44 +02:00
|
|
|
|
2015-10-05 15:35:48 +02:00
|
|
|
def __init__(self):
|
2015-04-08 01:43:25 +02:00
|
|
|
Extractor.__init__(self)
|
2017-09-30 18:52:23 +02:00
|
|
|
queue_size = int(config.get(("queue-size",), 5))
|
|
|
|
self.__queue = queue.Queue(queue_size)
|
2015-04-10 17:31:49 +02:00
|
|
|
self.__thread = threading.Thread(target=self.async_items, daemon=True)
|
2014-10-12 21:56:44 +02:00
|
|
|
|
|
|
|
def __iter__(self):
|
2015-04-08 01:43:25 +02:00
|
|
|
get = self.__queue.get
|
2014-10-12 21:56:44 +02:00
|
|
|
done = self.__queue.task_done
|
|
|
|
|
|
|
|
self.__thread.start()
|
|
|
|
while True:
|
|
|
|
task = get()
|
|
|
|
if task is None:
|
|
|
|
return
|
2016-07-24 22:16:59 +02:00
|
|
|
if isinstance(task, Exception):
|
|
|
|
raise task
|
2014-10-12 21:56:44 +02:00
|
|
|
yield task
|
|
|
|
done()
|
|
|
|
|
2015-04-08 01:43:25 +02:00
|
|
|
def async_items(self):
|
2014-10-12 21:56:44 +02:00
|
|
|
put = self.__queue.put
|
|
|
|
try:
|
2015-04-08 01:43:25 +02:00
|
|
|
for task in self.items():
|
2014-10-12 21:56:44 +02:00
|
|
|
put(task)
|
2017-09-30 18:52:23 +02:00
|
|
|
except Exception as exc:
|
|
|
|
put(exc)
|
2014-10-12 21:56:44 +02:00
|
|
|
put(None)
|
2015-04-08 01:46:04 +02:00
|
|
|
|
|
|
|
|
2017-05-20 11:27:43 +02:00
|
|
|
class MangaExtractor(Extractor):
|
|
|
|
|
|
|
|
subcategory = "manga"
|
2017-09-26 20:50:49 +02:00
|
|
|
categorytransfer = True
|
2017-05-20 11:27:43 +02:00
|
|
|
scheme = "http"
|
|
|
|
root = ""
|
|
|
|
reverse = True
|
|
|
|
|
|
|
|
def __init__(self, match, url=None):
|
|
|
|
Extractor.__init__(self)
|
|
|
|
self.url = url or self.scheme + "://" + match.group(1)
|
|
|
|
|
|
|
|
def items(self):
|
|
|
|
self.login()
|
|
|
|
page = self.request(self.url).text
|
|
|
|
|
|
|
|
chapters = self.chapters(page)
|
|
|
|
if self.reverse:
|
|
|
|
chapters.reverse()
|
|
|
|
|
|
|
|
yield Message.Version, 1
|
2017-09-24 16:03:29 +02:00
|
|
|
for chapter, data in chapters:
|
|
|
|
yield Message.Queue, chapter, data
|
2017-05-20 11:27:43 +02:00
|
|
|
|
|
|
|
def login(self):
|
|
|
|
"""Login and set necessary cookies"""
|
|
|
|
|
|
|
|
def chapters(self, page):
|
2017-09-24 16:03:29 +02:00
|
|
|
"""Return a list of all (url, metadata)-tuples"""
|
2017-05-20 11:27:43 +02:00
|
|
|
return []
|
|
|
|
|
|
|
|
|
2017-08-29 22:42:48 +02:00
|
|
|
class SharedConfigExtractor(Extractor):
|
|
|
|
|
|
|
|
basecategory = ""
|
|
|
|
|
|
|
|
def config(self, key, default=None, sentinel=object()):
|
|
|
|
value = Extractor.config(self, key, sentinel)
|
|
|
|
if value is sentinel:
|
|
|
|
cat, self.category = self.category, self.basecategory
|
|
|
|
value = Extractor.config(self, key, default)
|
|
|
|
self.category = cat
|
|
|
|
return value
|
|
|
|
|
|
|
|
|
2017-08-05 16:11:46 +02:00
|
|
|
# Reduce strictness of the expected magic string in cookiejar files.
|
|
|
|
# (This allows the use of Wget-generated cookiejars without modification)
|
2017-07-24 18:33:42 +02:00
|
|
|
|
|
|
|
http.cookiejar.MozillaCookieJar.magic_re = re.compile(
|
|
|
|
"#( Netscape)? HTTP Cookie File", re.IGNORECASE)
|
|
|
|
|
|
|
|
|
2017-01-23 00:07:36 +01:00
|
|
|
# The first import of requests happens inside this file.
|
|
|
|
# If we are running on Windows and the from requests expected certificate file
|
|
|
|
# is missing (which happens in a standalone executable from py2exe), the
|
|
|
|
# requests.Session object gets monkey patched to always set its 'verify'
|
|
|
|
# attribute to False to avoid an exception being thrown when attempting to
|
|
|
|
# access https:// URLs.
|
|
|
|
|
|
|
|
if os.name == "nt":
|
|
|
|
import os.path
|
|
|
|
import requests.certs
|
2017-02-11 21:01:31 +01:00
|
|
|
import requests.packages.urllib3 as ulib3
|
2017-01-23 00:07:36 +01:00
|
|
|
if not os.path.isfile(requests.certs.where()):
|
|
|
|
def patched_init(self):
|
|
|
|
session_init(self)
|
|
|
|
self.verify = False
|
|
|
|
session_init = requests.Session.__init__
|
|
|
|
requests.Session.__init__ = patched_init
|
2017-02-11 21:01:31 +01:00
|
|
|
ulib3.disable_warnings(ulib3.exceptions.InsecureRequestWarning)
|