2015-04-08 01:43:25 +02:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
|
2023-01-14 17:16:18 +01:00
|
|
|
# Copyright 2014-2023 Mike Fährmann
|
2015-04-08 01:43:25 +02:00
|
|
|
#
|
|
|
|
# This program is free software; you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License version 2 as
|
|
|
|
# published by the Free Software Foundation.
|
|
|
|
|
|
|
|
"""Common classes and constants used by extractor modules."""
|
|
|
|
|
2022-07-06 18:56:23 +02:00
|
|
|
import os
|
2017-07-24 18:33:42 +02:00
|
|
|
import re
|
2021-02-25 23:39:34 +01:00
|
|
|
import ssl
|
2015-04-08 01:46:04 +02:00
|
|
|
import time
|
2017-06-24 12:17:26 +02:00
|
|
|
import netrc
|
2014-10-12 21:56:44 +02:00
|
|
|
import queue
|
2024-04-16 00:02:48 +02:00
|
|
|
import getpass
|
2017-03-07 23:50:19 +01:00
|
|
|
import logging
|
2019-07-16 22:54:39 +02:00
|
|
|
import datetime
|
2014-10-12 21:56:44 +02:00
|
|
|
import requests
|
2015-04-08 01:46:04 +02:00
|
|
|
import threading
|
2021-02-25 23:39:34 +01:00
|
|
|
from requests.adapters import HTTPAdapter
|
2015-11-24 19:47:51 +01:00
|
|
|
from .message import Message
|
2022-11-13 19:17:39 +01:00
|
|
|
from .. import config, text, util, cache, exception
|
2015-04-08 01:46:04 +02:00
|
|
|
|
2017-01-12 22:35:42 +01:00
|
|
|
|
2015-04-08 01:43:25 +02:00
|
|
|
class Extractor():
|
|
|
|
|
2015-11-30 00:30:02 +01:00
|
|
|
category = ""
|
|
|
|
subcategory = ""
|
2020-11-17 00:34:07 +01:00
|
|
|
basecategory = ""
|
2017-09-26 20:50:49 +02:00
|
|
|
categorytransfer = False
|
2019-02-08 13:45:40 +01:00
|
|
|
directory_fmt = ("{category}",)
|
2019-02-14 16:07:17 +01:00
|
|
|
filename_fmt = "{filename}.{extension}"
|
2018-01-29 22:13:06 +01:00
|
|
|
archive_fmt = ""
|
2023-10-01 13:55:17 +02:00
|
|
|
root = ""
|
2023-07-21 22:38:39 +02:00
|
|
|
cookies_domain = ""
|
2023-10-01 13:55:17 +02:00
|
|
|
referer = True
|
2023-11-04 17:30:27 +01:00
|
|
|
ciphers = None
|
2023-10-01 13:55:17 +02:00
|
|
|
tls12 = True
|
2021-02-27 16:26:42 +01:00
|
|
|
browser = None
|
2020-12-05 00:06:40 +01:00
|
|
|
request_interval = 0.0
|
|
|
|
request_interval_min = 0.0
|
|
|
|
request_timestamp = 0.0
|
2015-11-29 23:41:43 +01:00
|
|
|
|
2019-02-11 13:31:10 +01:00
|
|
|
def __init__(self, match):
|
2017-03-07 23:50:19 +01:00
|
|
|
self.log = logging.getLogger(self.category)
|
2019-02-12 10:20:21 +01:00
|
|
|
self.url = match.string
|
2024-03-31 23:25:05 +02:00
|
|
|
self.match = match
|
|
|
|
self.groups = match.groups()
|
2020-09-14 22:06:54 +02:00
|
|
|
self._cfgpath = ("extractor", self.category, self.subcategory)
|
2021-03-03 23:13:29 +01:00
|
|
|
self._parentdir = ""
|
|
|
|
|
2019-02-09 14:39:38 +01:00
|
|
|
@classmethod
|
|
|
|
def from_url(cls, url):
|
|
|
|
if isinstance(cls.pattern, str):
|
|
|
|
cls.pattern = re.compile(cls.pattern)
|
|
|
|
match = cls.pattern.match(url)
|
|
|
|
return cls(match) if match else None
|
|
|
|
|
2014-10-12 21:56:44 +02:00
|
|
|
def __iter__(self):
|
2023-07-25 20:09:44 +02:00
|
|
|
self.initialize()
|
2015-04-08 01:43:25 +02:00
|
|
|
return self.items()
|
|
|
|
|
2023-07-25 20:09:44 +02:00
|
|
|
def initialize(self):
|
|
|
|
self._init_options()
|
|
|
|
self._init_session()
|
|
|
|
self._init_cookies()
|
|
|
|
self._init()
|
|
|
|
self.initialize = util.noop
|
|
|
|
|
2023-07-29 13:43:27 +02:00
|
|
|
def finalize(self):
|
|
|
|
pass
|
|
|
|
|
2015-04-08 01:43:25 +02:00
|
|
|
def items(self):
|
|
|
|
yield Message.Version, 1
|
2014-10-12 21:56:44 +02:00
|
|
|
|
2017-03-03 17:26:50 +01:00
|
|
|
def skip(self, num):
|
|
|
|
return 0
|
|
|
|
|
2017-04-25 17:12:48 +02:00
|
|
|
def config(self, key, default=None):
|
2020-09-14 22:06:54 +02:00
|
|
|
return config.interpolate(self._cfgpath, key, default)
|
2017-04-25 17:12:48 +02:00
|
|
|
|
2023-11-18 23:43:40 +01:00
|
|
|
def config2(self, key, key2, default=None, sentinel=util.SENTINEL):
|
|
|
|
value = self.config(key, sentinel)
|
|
|
|
if value is not sentinel:
|
|
|
|
return value
|
|
|
|
return self.config(key2, default)
|
|
|
|
|
2023-05-04 10:49:14 +02:00
|
|
|
def config_deprecated(self, key, deprecated, default=None,
|
|
|
|
sentinel=util.SENTINEL, history=set()):
|
|
|
|
value = self.config(deprecated, sentinel)
|
|
|
|
if value is not sentinel:
|
|
|
|
if deprecated not in history:
|
|
|
|
history.add(deprecated)
|
|
|
|
self.log.warning("'%s' is deprecated. Use '%s' instead.",
|
|
|
|
deprecated, key)
|
|
|
|
default = value
|
|
|
|
|
|
|
|
value = self.config(key, sentinel)
|
|
|
|
if value is not sentinel:
|
|
|
|
return value
|
|
|
|
return default
|
|
|
|
|
2020-09-14 21:39:17 +02:00
|
|
|
def config_accumulate(self, key):
|
2020-09-14 22:06:54 +02:00
|
|
|
return config.accumulate(self._cfgpath, key)
|
2020-09-14 21:39:17 +02:00
|
|
|
|
2024-01-18 03:20:36 +01:00
|
|
|
def config_instance(self, key, default=None):
|
|
|
|
return default
|
|
|
|
|
2020-11-17 00:34:07 +01:00
|
|
|
def _config_shared(self, key, default=None):
|
2023-07-28 17:07:25 +02:00
|
|
|
return config.interpolate_common(
|
|
|
|
("extractor",), self._cfgpath, key, default)
|
2020-11-17 00:34:07 +01:00
|
|
|
|
|
|
|
def _config_shared_accumulate(self, key):
|
2023-07-28 17:07:25 +02:00
|
|
|
first = True
|
|
|
|
extr = ("extractor",)
|
|
|
|
|
|
|
|
for path in self._cfgpath:
|
|
|
|
if first:
|
|
|
|
first = False
|
|
|
|
values = config.accumulate(extr + path, key)
|
|
|
|
else:
|
|
|
|
conf = config.get(extr, path[0])
|
|
|
|
if conf:
|
|
|
|
values[:0] = config.accumulate(
|
|
|
|
(self.subcategory,), key, conf=conf)
|
2020-11-17 00:34:07 +01:00
|
|
|
return values
|
|
|
|
|
2023-05-02 22:16:58 +02:00
|
|
|
def request(self, url, method="GET", session=None,
|
2023-01-14 17:16:18 +01:00
|
|
|
retries=None, retry_codes=None, encoding=None,
|
|
|
|
fatal=True, notfound=None, **kwargs):
|
2021-09-11 01:26:11 +02:00
|
|
|
if session is None:
|
|
|
|
session = self.session
|
2022-03-10 23:32:16 +01:00
|
|
|
if retries is None:
|
|
|
|
retries = self._retries
|
2023-01-14 17:16:18 +01:00
|
|
|
if retry_codes is None:
|
|
|
|
retry_codes = self._retry_codes
|
2022-03-10 23:32:16 +01:00
|
|
|
if "proxies" not in kwargs:
|
|
|
|
kwargs["proxies"] = self._proxies
|
2021-09-11 01:26:11 +02:00
|
|
|
if "timeout" not in kwargs:
|
|
|
|
kwargs["timeout"] = self._timeout
|
|
|
|
if "verify" not in kwargs:
|
|
|
|
kwargs["verify"] = self._verify
|
2023-12-06 21:33:40 +01:00
|
|
|
|
|
|
|
if "json" in kwargs:
|
|
|
|
json = kwargs["json"]
|
|
|
|
if json is not None:
|
|
|
|
kwargs["data"] = util.json_dumps(json).encode()
|
|
|
|
del kwargs["json"]
|
|
|
|
headers = kwargs.get("headers")
|
|
|
|
if headers:
|
|
|
|
headers["Content-Type"] = "application/json"
|
|
|
|
else:
|
|
|
|
kwargs["headers"] = {"Content-Type": "application/json"}
|
|
|
|
|
2020-08-05 21:52:04 +02:00
|
|
|
response = None
|
2021-09-11 01:26:11 +02:00
|
|
|
tries = 1
|
2018-11-16 18:02:24 +01:00
|
|
|
|
2021-09-14 17:40:05 +02:00
|
|
|
if self._interval:
|
|
|
|
seconds = (self._interval() -
|
2020-12-05 00:06:40 +01:00
|
|
|
(time.time() - Extractor.request_timestamp))
|
|
|
|
if seconds > 0.0:
|
2022-10-07 12:42:00 +02:00
|
|
|
self.sleep(seconds, "request")
|
2020-09-19 22:07:41 +02:00
|
|
|
|
2017-08-05 16:11:46 +02:00
|
|
|
while True:
|
|
|
|
try:
|
2018-12-04 21:23:35 +01:00
|
|
|
response = session.request(method, url, **kwargs)
|
2018-11-19 12:33:34 +01:00
|
|
|
except (requests.exceptions.ConnectionError,
|
|
|
|
requests.exceptions.Timeout,
|
|
|
|
requests.exceptions.ChunkedEncodingError,
|
|
|
|
requests.exceptions.ContentDecodingError) as exc:
|
2017-11-12 21:18:47 +01:00
|
|
|
msg = exc
|
2024-04-18 15:42:53 +02:00
|
|
|
code = 0
|
2018-11-19 12:33:34 +01:00
|
|
|
except (requests.exceptions.RequestException) as exc:
|
2018-04-23 18:45:59 +02:00
|
|
|
raise exception.HttpError(exc)
|
2017-10-13 22:43:38 +02:00
|
|
|
else:
|
2018-06-17 21:49:13 +02:00
|
|
|
code = response.status_code
|
2020-05-12 20:40:25 +02:00
|
|
|
if self._write_pages:
|
|
|
|
self._dump_response(response)
|
2024-04-16 18:37:59 +02:00
|
|
|
if (
|
|
|
|
code < 400 or
|
|
|
|
code < 500 and (not fatal and code != 429 or fatal is None)
|
|
|
|
):
|
2018-04-23 18:45:59 +02:00
|
|
|
if encoding:
|
|
|
|
response.encoding = encoding
|
|
|
|
return response
|
2019-07-04 23:45:26 +02:00
|
|
|
if notfound and code == 404:
|
|
|
|
raise exception.NotFoundError(notfound)
|
2020-04-24 22:47:27 +02:00
|
|
|
|
2024-04-18 15:45:36 +02:00
|
|
|
msg = "'{} {}' for '{}'".format(
|
|
|
|
code, response.reason, response.url)
|
2021-02-15 23:17:02 +01:00
|
|
|
server = response.headers.get("Server")
|
2022-10-31 18:33:52 +01:00
|
|
|
if server and server.startswith("cloudflare") and \
|
|
|
|
code in (403, 503):
|
|
|
|
content = response.content
|
|
|
|
if b"_cf_chl_opt" in content or b"jschl-answer" in content:
|
2023-01-14 17:16:18 +01:00
|
|
|
self.log.warning("Cloudflare challenge")
|
2021-02-15 23:17:02 +01:00
|
|
|
break
|
2022-10-31 18:33:52 +01:00
|
|
|
if b'name="captcha-bypass"' in content:
|
2021-02-15 23:17:02 +01:00
|
|
|
self.log.warning("Cloudflare CAPTCHA")
|
|
|
|
break
|
2024-04-16 18:41:28 +02:00
|
|
|
|
|
|
|
if code == 429 and self._interval_429:
|
|
|
|
pass
|
|
|
|
elif code not in retry_codes and code < 500:
|
2018-04-23 18:45:59 +02:00
|
|
|
break
|
2021-02-15 23:17:02 +01:00
|
|
|
|
2020-09-19 22:07:41 +02:00
|
|
|
finally:
|
2020-12-05 00:06:40 +01:00
|
|
|
Extractor.request_timestamp = time.time()
|
2018-04-23 18:45:59 +02:00
|
|
|
|
2019-06-30 21:27:28 +02:00
|
|
|
self.log.debug("%s (%s/%s)", msg, tries, retries+1)
|
|
|
|
if tries > retries:
|
2018-04-23 18:45:59 +02:00
|
|
|
break
|
2024-03-11 21:34:00 +01:00
|
|
|
|
2024-04-16 18:41:28 +02:00
|
|
|
seconds = tries
|
2024-03-11 21:34:00 +01:00
|
|
|
if self._interval:
|
2024-04-16 18:41:28 +02:00
|
|
|
s = self._interval()
|
|
|
|
if seconds < s:
|
|
|
|
seconds = s
|
|
|
|
if code == 429 and self._interval_429:
|
|
|
|
s = self._interval_429()
|
|
|
|
if seconds < s:
|
|
|
|
seconds = s
|
|
|
|
self.wait(seconds=seconds, reason="429 Too Many Requests")
|
2024-03-11 21:34:00 +01:00
|
|
|
else:
|
2024-04-16 18:41:28 +02:00
|
|
|
self.sleep(seconds, "retry")
|
2019-06-30 21:27:28 +02:00
|
|
|
tries += 1
|
2018-04-23 18:45:59 +02:00
|
|
|
|
2020-07-30 18:23:26 +02:00
|
|
|
raise exception.HttpError(msg, response)
|
2017-07-25 14:59:41 +02:00
|
|
|
|
2023-05-02 22:16:58 +02:00
|
|
|
def wait(self, seconds=None, until=None, adjust=1.0,
|
2024-04-16 17:51:14 +02:00
|
|
|
reason="rate limit"):
|
2020-04-05 21:23:05 +02:00
|
|
|
now = time.time()
|
2020-01-04 23:21:45 +01:00
|
|
|
|
|
|
|
if seconds:
|
|
|
|
seconds = float(seconds)
|
2020-04-05 21:23:05 +02:00
|
|
|
until = now + seconds
|
2020-01-04 23:21:45 +01:00
|
|
|
elif until:
|
2020-04-05 21:23:05 +02:00
|
|
|
if isinstance(until, datetime.datetime):
|
|
|
|
# convert to UTC timestamp
|
2022-03-23 22:20:37 +01:00
|
|
|
until = util.datetime_to_timestamp(until)
|
2020-04-05 21:23:05 +02:00
|
|
|
else:
|
|
|
|
until = float(until)
|
|
|
|
seconds = until - now
|
2020-01-04 23:21:45 +01:00
|
|
|
else:
|
|
|
|
raise ValueError("Either 'seconds' or 'until' is required")
|
|
|
|
|
2020-04-05 21:23:05 +02:00
|
|
|
seconds += adjust
|
|
|
|
if seconds <= 0.0:
|
|
|
|
return
|
|
|
|
|
2020-01-04 23:21:45 +01:00
|
|
|
if reason:
|
2020-04-05 21:23:05 +02:00
|
|
|
t = datetime.datetime.fromtimestamp(until).time()
|
2020-01-05 00:47:10 +01:00
|
|
|
isotime = "{:02}:{:02}:{:02}".format(t.hour, t.minute, t.second)
|
2024-04-16 17:51:14 +02:00
|
|
|
self.log.info("Waiting until %s (%s)", isotime, reason)
|
2020-04-05 21:23:05 +02:00
|
|
|
time.sleep(seconds)
|
2020-01-04 23:21:45 +01:00
|
|
|
|
2022-10-07 12:42:00 +02:00
|
|
|
def sleep(self, seconds, reason):
|
|
|
|
self.log.debug("Sleeping %.2f seconds (%s)",
|
|
|
|
seconds, reason)
|
|
|
|
time.sleep(seconds)
|
|
|
|
|
2024-04-16 00:02:48 +02:00
|
|
|
def input(self, prompt, echo=True):
|
|
|
|
if echo:
|
|
|
|
try:
|
|
|
|
return input(prompt)
|
|
|
|
except (EOFError, OSError):
|
|
|
|
return None
|
|
|
|
else:
|
|
|
|
return getpass.getpass(prompt)
|
|
|
|
|
2017-07-25 14:59:41 +02:00
|
|
|
def _get_auth_info(self):
|
2017-06-24 12:17:26 +02:00
|
|
|
"""Return authentication information as (username, password) tuple"""
|
|
|
|
username = self.config("username")
|
|
|
|
password = None
|
|
|
|
|
|
|
|
if username:
|
2023-12-06 21:31:31 +01:00
|
|
|
password = self.config("password") or util.LazyPrompt()
|
2018-11-16 18:02:24 +01:00
|
|
|
elif self.config("netrc", False):
|
2017-06-24 12:17:26 +02:00
|
|
|
try:
|
|
|
|
info = netrc.netrc().authenticators(self.category)
|
|
|
|
username, _, password = info
|
|
|
|
except (OSError, netrc.NetrcParseError) as exc:
|
|
|
|
self.log.error("netrc: %s", exc)
|
|
|
|
except TypeError:
|
|
|
|
self.log.warning("netrc: No authentication info")
|
|
|
|
|
|
|
|
return username, password
|
|
|
|
|
2023-07-25 20:09:44 +02:00
|
|
|
def _init(self):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def _init_options(self):
|
|
|
|
self._write_pages = self.config("write-pages", False)
|
|
|
|
self._retry_codes = self.config("retry-codes")
|
|
|
|
self._retries = self.config("retries", 4)
|
|
|
|
self._timeout = self.config("timeout", 30)
|
|
|
|
self._verify = self.config("verify", True)
|
|
|
|
self._proxies = util.build_proxy_map(self.config("proxy"), self.log)
|
|
|
|
self._interval = util.build_duration_func(
|
|
|
|
self.config("sleep-request", self.request_interval),
|
|
|
|
self.request_interval_min,
|
|
|
|
)
|
2024-04-16 18:41:28 +02:00
|
|
|
self._interval_429 = util.build_duration_func(
|
|
|
|
self.config("sleep-429", 60),
|
|
|
|
)
|
2023-07-25 20:09:44 +02:00
|
|
|
|
|
|
|
if self._retries < 0:
|
|
|
|
self._retries = float("inf")
|
|
|
|
if not self._retry_codes:
|
|
|
|
self._retry_codes = ()
|
|
|
|
|
2021-03-03 23:13:29 +01:00
|
|
|
def _init_session(self):
|
|
|
|
self.session = session = requests.Session()
|
|
|
|
headers = session.headers
|
|
|
|
headers.clear()
|
2022-01-31 23:12:08 +01:00
|
|
|
ssl_options = ssl_ciphers = 0
|
2022-01-20 23:16:00 +01:00
|
|
|
|
2022-07-07 11:17:43 +02:00
|
|
|
browser = self.config("browser")
|
|
|
|
if browser is None:
|
|
|
|
browser = self.browser
|
2021-03-06 18:15:32 +01:00
|
|
|
if browser and isinstance(browser, str):
|
2021-03-03 23:13:29 +01:00
|
|
|
browser, _, platform = browser.lower().partition(":")
|
|
|
|
|
|
|
|
if not platform or platform == "auto":
|
|
|
|
platform = ("Windows NT 10.0; Win64; x64"
|
|
|
|
if util.WINDOWS else "X11; Linux x86_64")
|
|
|
|
elif platform == "windows":
|
|
|
|
platform = "Windows NT 10.0; Win64; x64"
|
|
|
|
elif platform == "linux":
|
|
|
|
platform = "X11; Linux x86_64"
|
|
|
|
elif platform == "macos":
|
2021-08-14 04:01:41 +02:00
|
|
|
platform = "Macintosh; Intel Mac OS X 11.5"
|
2021-03-03 23:13:29 +01:00
|
|
|
|
|
|
|
if browser == "chrome":
|
2022-01-31 23:12:08 +01:00
|
|
|
if platform.startswith("Macintosh"):
|
|
|
|
platform = platform.replace(".", "_") + "_2"
|
2021-03-03 23:13:29 +01:00
|
|
|
else:
|
2022-01-31 23:12:08 +01:00
|
|
|
browser = "firefox"
|
|
|
|
|
|
|
|
for key, value in HTTP_HEADERS[browser]:
|
|
|
|
if value and "{}" in value:
|
|
|
|
headers[key] = value.format(platform)
|
|
|
|
else:
|
|
|
|
headers[key] = value
|
|
|
|
|
|
|
|
ssl_options |= (ssl.OP_NO_SSLv2 | ssl.OP_NO_SSLv3 |
|
|
|
|
ssl.OP_NO_TLSv1 | ssl.OP_NO_TLSv1_1)
|
|
|
|
ssl_ciphers = SSL_CIPHERS[browser]
|
2021-02-25 23:39:34 +01:00
|
|
|
else:
|
2022-11-13 19:17:39 +01:00
|
|
|
useragent = self.config("user-agent")
|
|
|
|
if useragent is None:
|
|
|
|
useragent = ("Mozilla/5.0 (Windows NT 10.0; Win64; x64; "
|
2023-09-02 22:11:57 +02:00
|
|
|
"rv:109.0) Gecko/20100101 Firefox/115.0")
|
2022-11-13 19:17:39 +01:00
|
|
|
elif useragent == "browser":
|
|
|
|
useragent = _browser_useragent()
|
|
|
|
headers["User-Agent"] = useragent
|
2021-03-03 23:13:29 +01:00
|
|
|
headers["Accept"] = "*/*"
|
|
|
|
headers["Accept-Language"] = "en-US,en;q=0.5"
|
2023-11-04 17:30:27 +01:00
|
|
|
ssl_ciphers = self.ciphers
|
2022-06-29 13:22:59 +02:00
|
|
|
|
|
|
|
if BROTLI:
|
|
|
|
headers["Accept-Encoding"] = "gzip, deflate, br"
|
|
|
|
else:
|
2021-03-03 23:13:29 +01:00
|
|
|
headers["Accept-Encoding"] = "gzip, deflate"
|
|
|
|
|
2023-10-01 13:55:17 +02:00
|
|
|
referer = self.config("referer", self.referer)
|
|
|
|
if referer:
|
|
|
|
if isinstance(referer, str):
|
|
|
|
headers["Referer"] = referer
|
2023-09-18 23:50:25 +02:00
|
|
|
elif self.root:
|
|
|
|
headers["Referer"] = self.root + "/"
|
|
|
|
|
2021-03-03 23:13:29 +01:00
|
|
|
custom_headers = self.config("headers")
|
|
|
|
if custom_headers:
|
|
|
|
headers.update(custom_headers)
|
|
|
|
|
2022-01-31 23:12:08 +01:00
|
|
|
custom_ciphers = self.config("ciphers")
|
|
|
|
if custom_ciphers:
|
|
|
|
if isinstance(custom_ciphers, list):
|
|
|
|
ssl_ciphers = ":".join(custom_ciphers)
|
|
|
|
else:
|
|
|
|
ssl_ciphers = custom_ciphers
|
|
|
|
|
|
|
|
source_address = self.config("source-address")
|
|
|
|
if source_address:
|
|
|
|
if isinstance(source_address, str):
|
|
|
|
source_address = (source_address, 0)
|
|
|
|
else:
|
|
|
|
source_address = (source_address[0], source_address[1])
|
|
|
|
|
2022-02-01 01:37:03 +01:00
|
|
|
tls12 = self.config("tls12")
|
|
|
|
if tls12 is None:
|
|
|
|
tls12 = self.tls12
|
|
|
|
if not tls12:
|
2022-01-31 23:12:08 +01:00
|
|
|
ssl_options |= ssl.OP_NO_TLSv1_2
|
|
|
|
self.log.debug("TLS 1.2 disabled.")
|
|
|
|
|
|
|
|
adapter = _build_requests_adapter(
|
|
|
|
ssl_options, ssl_ciphers, source_address)
|
|
|
|
session.mount("https://", adapter)
|
|
|
|
session.mount("http://", adapter)
|
2017-11-15 13:54:40 +01:00
|
|
|
|
2019-03-12 21:02:11 +01:00
|
|
|
def _init_cookies(self):
|
2018-02-19 18:24:56 +01:00
|
|
|
"""Populate the session's cookiejar"""
|
2023-07-21 22:38:39 +02:00
|
|
|
self.cookies = self.session.cookies
|
|
|
|
self.cookies_file = None
|
|
|
|
if self.cookies_domain is None:
|
2020-01-01 16:07:23 +01:00
|
|
|
return
|
|
|
|
|
2018-02-19 18:24:56 +01:00
|
|
|
cookies = self.config("cookies")
|
2017-07-25 14:59:41 +02:00
|
|
|
if cookies:
|
|
|
|
if isinstance(cookies, dict):
|
2023-07-21 22:38:39 +02:00
|
|
|
self.cookies_update_dict(cookies, self.cookies_domain)
|
2022-06-04 12:38:38 +02:00
|
|
|
|
2019-10-18 21:31:33 +02:00
|
|
|
elif isinstance(cookies, str):
|
2023-07-21 22:38:39 +02:00
|
|
|
path = util.expand_path(cookies)
|
2017-07-25 14:59:41 +02:00
|
|
|
try:
|
2023-07-21 22:38:39 +02:00
|
|
|
with open(path) as fp:
|
|
|
|
util.cookiestxt_load(fp, self.cookies)
|
2020-01-21 21:59:36 +01:00
|
|
|
except Exception as exc:
|
2017-07-25 14:59:41 +02:00
|
|
|
self.log.warning("cookies: %s", exc)
|
2019-03-12 21:02:11 +01:00
|
|
|
else:
|
2023-01-06 11:13:44 +01:00
|
|
|
self.log.debug("Loading cookies from '%s'", cookies)
|
2023-07-21 22:38:39 +02:00
|
|
|
self.cookies_file = path
|
2022-06-04 12:38:38 +02:00
|
|
|
|
2022-05-07 23:03:48 +02:00
|
|
|
elif isinstance(cookies, (list, tuple)):
|
2022-06-04 12:38:38 +02:00
|
|
|
key = tuple(cookies)
|
|
|
|
cookiejar = _browser_cookies.get(key)
|
|
|
|
|
|
|
|
if cookiejar is None:
|
|
|
|
from ..cookies import load_cookies
|
2023-07-21 22:38:39 +02:00
|
|
|
cookiejar = self.cookies.__class__()
|
2022-06-04 12:38:38 +02:00
|
|
|
try:
|
|
|
|
load_cookies(cookiejar, cookies)
|
|
|
|
except Exception as exc:
|
|
|
|
self.log.warning("cookies: %s", exc)
|
|
|
|
else:
|
|
|
|
_browser_cookies[key] = cookiejar
|
|
|
|
else:
|
|
|
|
self.log.debug("Using cached cookies from %s", key)
|
|
|
|
|
2023-07-21 22:38:39 +02:00
|
|
|
set_cookie = self.cookies.set_cookie
|
2022-06-04 12:38:38 +02:00
|
|
|
for cookie in cookiejar:
|
2023-07-21 22:38:39 +02:00
|
|
|
set_cookie(cookie)
|
2022-06-04 12:38:38 +02:00
|
|
|
|
2019-10-18 21:31:33 +02:00
|
|
|
else:
|
|
|
|
self.log.warning(
|
2022-05-07 23:03:48 +02:00
|
|
|
"Expected 'dict', 'list', or 'str' value for 'cookies' "
|
|
|
|
"option, got '%s' (%s)",
|
|
|
|
cookies.__class__.__name__, cookies)
|
2019-03-12 21:02:11 +01:00
|
|
|
|
2023-07-21 22:38:39 +02:00
|
|
|
def cookies_store(self):
|
|
|
|
"""Store the session's cookies in a cookies.txt file"""
|
2023-05-04 15:10:47 +02:00
|
|
|
export = self.config("cookies-update", True)
|
|
|
|
if not export:
|
|
|
|
return
|
|
|
|
|
|
|
|
if isinstance(export, str):
|
|
|
|
path = util.expand_path(export)
|
|
|
|
else:
|
2023-07-21 22:38:39 +02:00
|
|
|
path = self.cookies_file
|
2023-05-04 15:10:47 +02:00
|
|
|
if not path:
|
|
|
|
return
|
|
|
|
|
2024-04-13 18:59:18 +02:00
|
|
|
path_tmp = path + ".tmp"
|
2023-05-04 15:10:47 +02:00
|
|
|
try:
|
2024-04-13 18:59:18 +02:00
|
|
|
with open(path_tmp, "w") as fp:
|
2023-07-21 22:38:39 +02:00
|
|
|
util.cookiestxt_store(fp, self.cookies)
|
2024-04-13 18:59:18 +02:00
|
|
|
os.replace(path_tmp, path)
|
2023-05-04 15:10:47 +02:00
|
|
|
except OSError as exc:
|
|
|
|
self.log.warning("cookies: %s", exc)
|
2019-10-18 21:31:33 +02:00
|
|
|
|
2023-07-21 22:38:39 +02:00
|
|
|
def cookies_update(self, cookies, domain=""):
|
2019-03-12 21:02:11 +01:00
|
|
|
"""Update the session's cookiejar with 'cookies'"""
|
|
|
|
if isinstance(cookies, dict):
|
2023-07-21 22:38:39 +02:00
|
|
|
self.cookies_update_dict(cookies, domain or self.cookies_domain)
|
2019-03-12 21:02:11 +01:00
|
|
|
else:
|
2023-07-21 22:38:39 +02:00
|
|
|
set_cookie = self.cookies.set_cookie
|
2019-03-12 21:02:11 +01:00
|
|
|
try:
|
|
|
|
cookies = iter(cookies)
|
|
|
|
except TypeError:
|
2023-07-21 22:38:39 +02:00
|
|
|
set_cookie(cookies)
|
2019-03-12 21:02:11 +01:00
|
|
|
else:
|
|
|
|
for cookie in cookies:
|
2023-07-21 22:38:39 +02:00
|
|
|
set_cookie(cookie)
|
2019-03-12 21:02:11 +01:00
|
|
|
|
2023-07-21 22:38:39 +02:00
|
|
|
def cookies_update_dict(self, cookiedict, domain):
|
2019-03-12 21:02:11 +01:00
|
|
|
"""Update cookiejar with name-value pairs from a dict"""
|
2023-07-21 22:38:39 +02:00
|
|
|
set_cookie = self.cookies.set
|
2019-03-12 21:02:11 +01:00
|
|
|
for name, value in cookiedict.items():
|
2023-07-21 22:38:39 +02:00
|
|
|
set_cookie(name, value, domain=domain)
|
2014-10-12 21:56:44 +02:00
|
|
|
|
2023-07-21 22:38:39 +02:00
|
|
|
def cookies_check(self, cookies_names, domain=None):
|
|
|
|
"""Check if all 'cookies_names' are in the session's cookiejar"""
|
|
|
|
if not self.cookies:
|
2020-02-24 23:52:05 +01:00
|
|
|
return False
|
|
|
|
|
2019-11-05 17:55:32 +01:00
|
|
|
if domain is None:
|
2023-07-21 22:38:39 +02:00
|
|
|
domain = self.cookies_domain
|
|
|
|
names = set(cookies_names)
|
2020-02-24 23:52:05 +01:00
|
|
|
now = time.time()
|
|
|
|
|
2023-07-21 22:38:39 +02:00
|
|
|
for cookie in self.cookies:
|
2022-01-01 03:52:01 +01:00
|
|
|
if cookie.name in names and (
|
|
|
|
not domain or cookie.domain == domain):
|
2022-02-16 22:58:57 +01:00
|
|
|
|
2022-02-12 01:57:06 +01:00
|
|
|
if cookie.expires:
|
|
|
|
diff = int(cookie.expires - now)
|
2022-02-16 22:58:57 +01:00
|
|
|
|
2022-02-12 01:57:06 +01:00
|
|
|
if diff <= 0:
|
|
|
|
self.log.warning(
|
|
|
|
"Cookie '%s' has expired", cookie.name)
|
2022-02-16 22:58:57 +01:00
|
|
|
continue
|
|
|
|
|
2022-02-12 01:57:06 +01:00
|
|
|
elif diff <= 86400:
|
|
|
|
hours = diff // 3600
|
|
|
|
self.log.warning(
|
|
|
|
"Cookie '%s' will expire in less than %s hour%s",
|
|
|
|
cookie.name, hours + 1, "s" if hours else "")
|
2022-02-16 22:58:57 +01:00
|
|
|
|
|
|
|
names.discard(cookie.name)
|
|
|
|
if not names:
|
|
|
|
return True
|
2019-12-13 15:51:20 +01:00
|
|
|
return False
|
2017-07-17 10:33:36 +02:00
|
|
|
|
2021-08-16 17:40:29 +02:00
|
|
|
def _prepare_ddosguard_cookies(self):
|
2023-07-21 22:38:39 +02:00
|
|
|
if not self.cookies.get("__ddg2", domain=self.cookies_domain):
|
|
|
|
self.cookies.set(
|
|
|
|
"__ddg2", util.generate_token(), domain=self.cookies_domain)
|
2021-08-16 17:40:29 +02:00
|
|
|
|
2023-07-25 20:09:44 +02:00
|
|
|
def _cache(self, func, maxage, keyarg=None):
|
|
|
|
# return cache.DatabaseCacheDecorator(func, maxage, keyarg)
|
|
|
|
return cache.DatabaseCacheDecorator(func, keyarg, maxage)
|
|
|
|
|
|
|
|
def _cache_memory(self, func, maxage=None, keyarg=None):
|
|
|
|
return cache.Memcache()
|
|
|
|
|
2019-07-16 22:54:39 +02:00
|
|
|
def _get_date_min_max(self, dmin=None, dmax=None):
|
|
|
|
"""Retrieve and parse 'date-min' and 'date-max' config values"""
|
|
|
|
def get(key, default):
|
|
|
|
ts = self.config(key, default)
|
|
|
|
if isinstance(ts, str):
|
|
|
|
try:
|
|
|
|
ts = int(datetime.datetime.strptime(ts, fmt).timestamp())
|
|
|
|
except ValueError as exc:
|
|
|
|
self.log.warning("Unable to parse '%s': %s", key, exc)
|
|
|
|
ts = default
|
|
|
|
return ts
|
|
|
|
fmt = self.config("date-format", "%Y-%m-%dT%H:%M:%S")
|
|
|
|
return get("date-min", dmin), get("date-max", dmax)
|
|
|
|
|
2019-11-22 23:20:21 +01:00
|
|
|
def _dispatch_extractors(self, extractor_data, default=()):
|
|
|
|
""" """
|
|
|
|
extractors = {
|
|
|
|
data[0].subcategory: data
|
|
|
|
for data in extractor_data
|
|
|
|
}
|
|
|
|
|
|
|
|
include = self.config("include", default) or ()
|
|
|
|
if include == "all":
|
|
|
|
include = extractors
|
|
|
|
elif isinstance(include, str):
|
2023-12-28 19:07:04 +01:00
|
|
|
include = include.replace(" ", "").split(",")
|
2019-11-22 23:20:21 +01:00
|
|
|
|
|
|
|
result = [(Message.Version, 1)]
|
|
|
|
for category in include:
|
2023-12-28 19:07:04 +01:00
|
|
|
try:
|
2019-11-22 23:20:21 +01:00
|
|
|
extr, url = extractors[category]
|
2023-12-28 19:07:04 +01:00
|
|
|
except KeyError:
|
|
|
|
self.log.warning("Invalid include '%s'", category)
|
|
|
|
else:
|
2019-11-22 23:20:21 +01:00
|
|
|
result.append((Message.Queue, url, {"_extractor": extr}))
|
|
|
|
return iter(result)
|
|
|
|
|
2023-08-06 17:03:09 +02:00
|
|
|
@classmethod
|
|
|
|
def _dump(cls, obj):
|
|
|
|
util.dump_json(obj, ensure_ascii=False, indent=2)
|
|
|
|
|
2020-06-18 15:07:30 +02:00
|
|
|
def _dump_response(self, response, history=True):
|
2020-05-23 01:59:21 +02:00
|
|
|
"""Write the response content to a .dump file in the current directory.
|
|
|
|
|
|
|
|
The file name is derived from the response url,
|
|
|
|
replacing special characters with "_"
|
|
|
|
"""
|
2020-06-18 15:07:30 +02:00
|
|
|
if history:
|
|
|
|
for resp in response.history:
|
|
|
|
self._dump_response(resp, False)
|
2020-05-23 01:59:21 +02:00
|
|
|
|
|
|
|
if hasattr(Extractor, "_dump_index"):
|
|
|
|
Extractor._dump_index += 1
|
|
|
|
else:
|
|
|
|
Extractor._dump_index = 1
|
|
|
|
Extractor._dump_sanitize = re.compile(r"[\\\\|/<>:\"?*&=#]+").sub
|
|
|
|
|
|
|
|
fname = "{:>02}_{}".format(
|
|
|
|
Extractor._dump_index,
|
2022-07-06 18:56:23 +02:00
|
|
|
Extractor._dump_sanitize('_', response.url),
|
|
|
|
)
|
|
|
|
|
|
|
|
if util.WINDOWS:
|
|
|
|
path = os.path.abspath(fname)[:255]
|
|
|
|
else:
|
|
|
|
path = fname[:251]
|
2020-05-23 01:59:21 +02:00
|
|
|
|
|
|
|
try:
|
2022-07-06 18:56:23 +02:00
|
|
|
with open(path + ".txt", 'wb') as fp:
|
2020-06-18 15:07:30 +02:00
|
|
|
util.dump_response(
|
2023-01-14 22:34:46 +01:00
|
|
|
response, fp,
|
|
|
|
headers=(self._write_pages in ("all", "ALL")),
|
|
|
|
hide_auth=(self._write_pages != "ALL")
|
|
|
|
)
|
2020-05-23 01:59:21 +02:00
|
|
|
except Exception as e:
|
|
|
|
self.log.warning("Failed to dump HTTP request (%s: %s)",
|
|
|
|
e.__class__.__name__, e)
|
|
|
|
|
2015-04-08 01:43:25 +02:00
|
|
|
|
2019-10-16 18:12:07 +02:00
|
|
|
class GalleryExtractor(Extractor):
|
2018-02-03 23:14:32 +01:00
|
|
|
|
2019-10-16 18:12:07 +02:00
|
|
|
subcategory = "gallery"
|
|
|
|
filename_fmt = "{category}_{gallery_id}_{num:>03}.{extension}"
|
|
|
|
directory_fmt = ("{category}", "{gallery_id} {title}")
|
|
|
|
archive_fmt = "{gallery_id}_{num}"
|
|
|
|
enum = "num"
|
2018-02-03 23:14:32 +01:00
|
|
|
|
2019-02-11 18:38:47 +01:00
|
|
|
def __init__(self, match, url=None):
|
2019-02-11 13:31:10 +01:00
|
|
|
Extractor.__init__(self, match)
|
2024-03-31 23:25:05 +02:00
|
|
|
self.gallery_url = self.root + self.groups[0] if url is None else url
|
2018-02-03 23:14:32 +01:00
|
|
|
|
|
|
|
def items(self):
|
2019-01-30 17:58:48 +01:00
|
|
|
self.login()
|
2023-05-22 22:29:30 +02:00
|
|
|
|
|
|
|
if self.gallery_url:
|
|
|
|
page = self.request(
|
|
|
|
self.gallery_url, notfound=self.subcategory).text
|
|
|
|
else:
|
|
|
|
page = None
|
|
|
|
|
2019-02-11 18:38:47 +01:00
|
|
|
data = self.metadata(page)
|
|
|
|
imgs = self.images(page)
|
2018-02-03 23:14:32 +01:00
|
|
|
|
|
|
|
if "count" in data:
|
2021-09-18 02:15:42 +02:00
|
|
|
if self.config("page-reverse"):
|
|
|
|
images = util.enumerate_reversed(imgs, 1, data["count"])
|
|
|
|
else:
|
|
|
|
images = zip(
|
|
|
|
range(1, data["count"]+1),
|
|
|
|
imgs,
|
|
|
|
)
|
2018-02-03 23:14:32 +01:00
|
|
|
else:
|
2021-09-18 02:15:42 +02:00
|
|
|
enum = enumerate
|
2018-02-03 23:14:32 +01:00
|
|
|
try:
|
|
|
|
data["count"] = len(imgs)
|
|
|
|
except TypeError:
|
|
|
|
pass
|
2021-09-18 02:15:42 +02:00
|
|
|
else:
|
|
|
|
if self.config("page-reverse"):
|
|
|
|
enum = util.enumerate_reversed
|
|
|
|
images = enum(imgs, 1)
|
2018-02-03 23:14:32 +01:00
|
|
|
|
|
|
|
yield Message.Directory, data
|
2019-10-16 18:12:07 +02:00
|
|
|
for data[self.enum], (url, imgdata) in images:
|
2018-02-03 23:14:32 +01:00
|
|
|
if imgdata:
|
|
|
|
data.update(imgdata)
|
2020-01-14 11:53:32 +01:00
|
|
|
if "extension" not in imgdata:
|
|
|
|
text.nameext_from_url(url, data)
|
|
|
|
else:
|
|
|
|
text.nameext_from_url(url, data)
|
|
|
|
yield Message.Url, url, data
|
2018-02-03 23:14:32 +01:00
|
|
|
|
2019-01-30 17:58:48 +01:00
|
|
|
def login(self):
|
|
|
|
"""Login and set necessary cookies"""
|
|
|
|
|
2019-02-11 18:38:47 +01:00
|
|
|
def metadata(self, page):
|
2018-02-03 23:14:32 +01:00
|
|
|
"""Return a dict with general metadata"""
|
|
|
|
|
2019-02-11 18:38:47 +01:00
|
|
|
def images(self, page):
|
2018-02-03 23:14:32 +01:00
|
|
|
"""Return a list of all (image-url, metadata)-tuples"""
|
|
|
|
|
|
|
|
|
2019-10-16 18:12:07 +02:00
|
|
|
class ChapterExtractor(GalleryExtractor):
|
|
|
|
|
|
|
|
subcategory = "chapter"
|
|
|
|
directory_fmt = (
|
|
|
|
"{category}", "{manga}",
|
|
|
|
"{volume:?v/ />02}c{chapter:>03}{chapter_minor:?//}{title:?: //}")
|
|
|
|
filename_fmt = (
|
|
|
|
"{manga}_c{chapter:>03}{chapter_minor:?//}_{page:>03}.{extension}")
|
|
|
|
archive_fmt = (
|
|
|
|
"{manga}_{chapter}{chapter_minor}_{page}")
|
|
|
|
enum = "page"
|
|
|
|
|
|
|
|
|
2017-05-20 11:27:43 +02:00
|
|
|
class MangaExtractor(Extractor):
|
|
|
|
|
|
|
|
subcategory = "manga"
|
2017-09-26 20:50:49 +02:00
|
|
|
categorytransfer = True
|
2019-02-13 13:23:36 +01:00
|
|
|
chapterclass = None
|
2017-05-20 11:27:43 +02:00
|
|
|
reverse = True
|
|
|
|
|
|
|
|
def __init__(self, match, url=None):
|
2019-02-11 13:31:10 +01:00
|
|
|
Extractor.__init__(self, match)
|
2024-03-31 23:25:05 +02:00
|
|
|
self.manga_url = self.root + self.groups[0] if url is None else url
|
2017-05-20 11:27:43 +02:00
|
|
|
|
2019-01-07 18:22:33 +01:00
|
|
|
if self.config("chapter-reverse", False):
|
|
|
|
self.reverse = not self.reverse
|
|
|
|
|
2017-05-20 11:27:43 +02:00
|
|
|
def items(self):
|
2019-02-11 18:38:47 +01:00
|
|
|
self.login()
|
|
|
|
page = self.request(self.manga_url).text
|
2017-05-20 11:27:43 +02:00
|
|
|
|
|
|
|
chapters = self.chapters(page)
|
|
|
|
if self.reverse:
|
|
|
|
chapters.reverse()
|
|
|
|
|
2017-09-24 16:03:29 +02:00
|
|
|
for chapter, data in chapters:
|
2019-02-13 13:23:36 +01:00
|
|
|
data["_extractor"] = self.chapterclass
|
2017-09-24 16:03:29 +02:00
|
|
|
yield Message.Queue, chapter, data
|
2017-05-20 11:27:43 +02:00
|
|
|
|
2019-02-11 18:38:47 +01:00
|
|
|
def login(self):
|
|
|
|
"""Login and set necessary cookies"""
|
|
|
|
|
2017-05-20 11:27:43 +02:00
|
|
|
def chapters(self, page):
|
2018-02-03 23:14:32 +01:00
|
|
|
"""Return a list of all (chapter-url, metadata)-tuples"""
|
2017-05-20 11:27:43 +02:00
|
|
|
|
|
|
|
|
2019-02-04 14:21:19 +01:00
|
|
|
class AsynchronousMixin():
|
|
|
|
"""Run info extraction in a separate thread"""
|
|
|
|
|
|
|
|
def __iter__(self):
|
2023-07-25 20:09:44 +02:00
|
|
|
self.initialize()
|
|
|
|
|
2019-02-04 14:21:19 +01:00
|
|
|
messages = queue.Queue(5)
|
|
|
|
thread = threading.Thread(
|
|
|
|
target=self.async_items,
|
|
|
|
args=(messages,),
|
|
|
|
daemon=True,
|
|
|
|
)
|
|
|
|
|
|
|
|
thread.start()
|
|
|
|
while True:
|
|
|
|
msg = messages.get()
|
|
|
|
if msg is None:
|
|
|
|
thread.join()
|
|
|
|
return
|
|
|
|
if isinstance(msg, Exception):
|
|
|
|
thread.join()
|
|
|
|
raise msg
|
|
|
|
yield msg
|
|
|
|
messages.task_done()
|
|
|
|
|
|
|
|
def async_items(self, messages):
|
|
|
|
try:
|
|
|
|
for msg in self.items():
|
|
|
|
messages.put(msg)
|
|
|
|
except Exception as exc:
|
|
|
|
messages.put(exc)
|
|
|
|
messages.put(None)
|
|
|
|
|
|
|
|
|
2021-01-26 03:40:14 +01:00
|
|
|
class BaseExtractor(Extractor):
|
2021-02-15 23:17:02 +01:00
|
|
|
instances = ()
|
2021-01-26 03:40:14 +01:00
|
|
|
|
|
|
|
def __init__(self, match):
|
|
|
|
Extractor.__init__(self, match)
|
2024-03-31 23:25:05 +02:00
|
|
|
if not self.category:
|
|
|
|
self._init_category()
|
2024-05-01 03:26:42 +02:00
|
|
|
self._cfgpath = ("extractor", self.category, self.subcategory)
|
2021-01-26 03:40:14 +01:00
|
|
|
|
2024-03-31 23:25:05 +02:00
|
|
|
def _init_category(self):
|
|
|
|
for index, group in enumerate(self.groups):
|
2022-04-27 21:10:34 +02:00
|
|
|
if group is not None:
|
|
|
|
if index:
|
2024-01-18 03:20:36 +01:00
|
|
|
self.category, self.root, info = self.instances[index-1]
|
2022-04-27 21:10:34 +02:00
|
|
|
if not self.root:
|
2024-03-31 23:25:05 +02:00
|
|
|
self.root = text.root_from_url(self.match.group(0))
|
2024-01-18 03:20:36 +01:00
|
|
|
self.config_instance = info.get
|
2022-04-27 21:10:34 +02:00
|
|
|
else:
|
|
|
|
self.root = group
|
|
|
|
self.category = group.partition("://")[2]
|
|
|
|
break
|
|
|
|
|
2021-01-26 03:40:14 +01:00
|
|
|
@classmethod
|
|
|
|
def update(cls, instances):
|
|
|
|
extra_instances = config.get(("extractor",), cls.basecategory)
|
|
|
|
if extra_instances:
|
|
|
|
for category, info in extra_instances.items():
|
|
|
|
if isinstance(info, dict) and "root" in info:
|
|
|
|
instances[category] = info
|
|
|
|
|
|
|
|
pattern_list = []
|
|
|
|
instance_list = cls.instances = []
|
|
|
|
for category, info in instances.items():
|
2022-02-10 01:38:50 +01:00
|
|
|
root = info["root"]
|
|
|
|
if root:
|
|
|
|
root = root.rstrip("/")
|
2024-01-18 03:20:36 +01:00
|
|
|
instance_list.append((category, root, info))
|
2021-01-26 03:40:14 +01:00
|
|
|
|
|
|
|
pattern = info.get("pattern")
|
|
|
|
if not pattern:
|
|
|
|
pattern = re.escape(root[root.index(":") + 3:])
|
|
|
|
pattern_list.append(pattern + "()")
|
|
|
|
|
2021-12-14 22:58:38 +01:00
|
|
|
return (
|
|
|
|
r"(?:" + cls.basecategory + r":(https?://[^/?#]+)|"
|
|
|
|
r"(?:https?://)?(?:" + "|".join(pattern_list) + r"))"
|
|
|
|
)
|
2021-01-26 03:40:14 +01:00
|
|
|
|
|
|
|
|
2022-01-31 23:12:08 +01:00
|
|
|
class RequestsAdapter(HTTPAdapter):
|
2021-02-25 23:39:34 +01:00
|
|
|
|
2022-01-31 23:12:08 +01:00
|
|
|
def __init__(self, ssl_context=None, source_address=None):
|
|
|
|
self.ssl_context = ssl_context
|
2022-01-20 23:16:00 +01:00
|
|
|
self.source_address = source_address
|
2021-02-25 23:39:34 +01:00
|
|
|
HTTPAdapter.__init__(self)
|
|
|
|
|
|
|
|
def init_poolmanager(self, *args, **kwargs):
|
|
|
|
kwargs["ssl_context"] = self.ssl_context
|
2022-01-20 23:16:00 +01:00
|
|
|
kwargs["source_address"] = self.source_address
|
2021-02-25 23:39:34 +01:00
|
|
|
return HTTPAdapter.init_poolmanager(self, *args, **kwargs)
|
|
|
|
|
|
|
|
def proxy_manager_for(self, *args, **kwargs):
|
|
|
|
kwargs["ssl_context"] = self.ssl_context
|
2022-01-20 23:16:00 +01:00
|
|
|
kwargs["source_address"] = self.source_address
|
2021-02-25 23:39:34 +01:00
|
|
|
return HTTPAdapter.proxy_manager_for(self, *args, **kwargs)
|
|
|
|
|
|
|
|
|
2022-01-31 23:12:08 +01:00
|
|
|
def _build_requests_adapter(ssl_options, ssl_ciphers, source_address):
|
|
|
|
key = (ssl_options, ssl_ciphers, source_address)
|
|
|
|
try:
|
|
|
|
return _adapter_cache[key]
|
|
|
|
except KeyError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
if ssl_options or ssl_ciphers:
|
|
|
|
ssl_context = ssl.create_default_context()
|
|
|
|
if ssl_options:
|
|
|
|
ssl_context.options |= ssl_options
|
|
|
|
if ssl_ciphers:
|
|
|
|
ssl_context.set_ecdh_curve("prime256v1")
|
|
|
|
ssl_context.set_ciphers(ssl_ciphers)
|
2024-05-10 18:20:08 +02:00
|
|
|
ssl_context.check_hostname = False
|
2022-01-31 23:12:08 +01:00
|
|
|
else:
|
|
|
|
ssl_context = None
|
|
|
|
|
|
|
|
adapter = _adapter_cache[key] = RequestsAdapter(
|
|
|
|
ssl_context, source_address)
|
|
|
|
return adapter
|
|
|
|
|
|
|
|
|
2022-11-13 19:17:39 +01:00
|
|
|
@cache.cache(maxage=86400)
|
|
|
|
def _browser_useragent():
|
|
|
|
"""Get User-Agent header from default browser"""
|
|
|
|
import webbrowser
|
|
|
|
import socket
|
|
|
|
|
|
|
|
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
|
|
|
server.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
|
|
|
server.bind(("127.0.0.1", 6414))
|
|
|
|
server.listen(1)
|
|
|
|
|
|
|
|
webbrowser.open("http://127.0.0.1:6414/user-agent")
|
|
|
|
|
|
|
|
client = server.accept()[0]
|
|
|
|
server.close()
|
|
|
|
|
|
|
|
for line in client.recv(1024).split(b"\r\n"):
|
|
|
|
key, _, value = line.partition(b":")
|
|
|
|
if key.strip().lower() == b"user-agent":
|
|
|
|
useragent = value.strip()
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
useragent = b""
|
|
|
|
|
|
|
|
client.send(b"HTTP/1.1 200 OK\r\n\r\n" + useragent)
|
|
|
|
client.close()
|
|
|
|
|
|
|
|
return useragent.decode()
|
|
|
|
|
|
|
|
|
2022-01-31 23:12:08 +01:00
|
|
|
_adapter_cache = {}
|
2022-06-04 12:38:38 +02:00
|
|
|
_browser_cookies = {}
|
2022-01-31 23:12:08 +01:00
|
|
|
|
|
|
|
|
|
|
|
HTTP_HEADERS = {
|
|
|
|
"firefox": (
|
2023-09-02 22:11:57 +02:00
|
|
|
("User-Agent", "Mozilla/5.0 ({}; "
|
|
|
|
"rv:109.0) Gecko/20100101 Firefox/115.0"),
|
2022-01-31 23:12:08 +01:00
|
|
|
("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,"
|
2022-06-28 17:38:58 +02:00
|
|
|
"image/avif,image/webp,*/*;q=0.8"),
|
2022-01-31 23:12:08 +01:00
|
|
|
("Accept-Language", "en-US,en;q=0.5"),
|
2022-06-29 13:22:59 +02:00
|
|
|
("Accept-Encoding", None),
|
2022-01-31 23:12:08 +01:00
|
|
|
("Referer", None),
|
2022-06-28 17:38:58 +02:00
|
|
|
("DNT", "1"),
|
2022-01-31 23:12:08 +01:00
|
|
|
("Connection", "keep-alive"),
|
|
|
|
("Upgrade-Insecure-Requests", "1"),
|
|
|
|
("Cookie", None),
|
2022-06-28 17:38:58 +02:00
|
|
|
("Sec-Fetch-Dest", "empty"),
|
|
|
|
("Sec-Fetch-Mode", "no-cors"),
|
|
|
|
("Sec-Fetch-Site", "same-origin"),
|
|
|
|
("TE", "trailers"),
|
2022-01-31 23:12:08 +01:00
|
|
|
),
|
|
|
|
"chrome": (
|
2023-03-08 17:19:59 +01:00
|
|
|
("Connection", "keep-alive"),
|
2022-01-31 23:12:08 +01:00
|
|
|
("Upgrade-Insecure-Requests", "1"),
|
|
|
|
("User-Agent", "Mozilla/5.0 ({}) AppleWebKit/537.36 (KHTML, "
|
2023-03-08 17:19:59 +01:00
|
|
|
"like Gecko) Chrome/111.0.0.0 Safari/537.36"),
|
2022-01-31 23:12:08 +01:00
|
|
|
("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,"
|
2023-03-08 17:19:59 +01:00
|
|
|
"image/avif,image/webp,image/apng,*/*;q=0.8,"
|
|
|
|
"application/signed-exchange;v=b3;q=0.7"),
|
2022-01-31 23:12:08 +01:00
|
|
|
("Referer", None),
|
2023-03-08 17:19:59 +01:00
|
|
|
("Sec-Fetch-Site", "same-origin"),
|
|
|
|
("Sec-Fetch-Mode", "no-cors"),
|
|
|
|
("Sec-Fetch-Dest", "empty"),
|
2022-06-29 13:22:59 +02:00
|
|
|
("Accept-Encoding", None),
|
2022-01-31 23:12:08 +01:00
|
|
|
("Accept-Language", "en-US,en;q=0.9"),
|
2023-03-08 17:19:59 +01:00
|
|
|
("cookie", None),
|
|
|
|
("content-length", None),
|
2022-01-31 23:12:08 +01:00
|
|
|
),
|
|
|
|
}
|
|
|
|
|
|
|
|
SSL_CIPHERS = {
|
|
|
|
"firefox": (
|
2021-02-25 23:39:34 +01:00
|
|
|
"TLS_AES_128_GCM_SHA256:"
|
|
|
|
"TLS_CHACHA20_POLY1305_SHA256:"
|
|
|
|
"TLS_AES_256_GCM_SHA384:"
|
|
|
|
"ECDHE-ECDSA-AES128-GCM-SHA256:"
|
|
|
|
"ECDHE-RSA-AES128-GCM-SHA256:"
|
|
|
|
"ECDHE-ECDSA-CHACHA20-POLY1305:"
|
|
|
|
"ECDHE-RSA-CHACHA20-POLY1305:"
|
|
|
|
"ECDHE-ECDSA-AES256-GCM-SHA384:"
|
|
|
|
"ECDHE-RSA-AES256-GCM-SHA384:"
|
|
|
|
"ECDHE-ECDSA-AES256-SHA:"
|
|
|
|
"ECDHE-ECDSA-AES128-SHA:"
|
|
|
|
"ECDHE-RSA-AES128-SHA:"
|
|
|
|
"ECDHE-RSA-AES256-SHA:"
|
2022-02-01 02:33:01 +01:00
|
|
|
"AES128-GCM-SHA256:"
|
|
|
|
"AES256-GCM-SHA384:"
|
2021-02-25 23:39:34 +01:00
|
|
|
"AES128-SHA:"
|
2022-06-28 17:38:58 +02:00
|
|
|
"AES256-SHA"
|
2022-01-31 23:12:08 +01:00
|
|
|
),
|
|
|
|
"chrome": (
|
2021-02-25 23:39:34 +01:00
|
|
|
"TLS_AES_128_GCM_SHA256:"
|
|
|
|
"TLS_AES_256_GCM_SHA384:"
|
|
|
|
"TLS_CHACHA20_POLY1305_SHA256:"
|
|
|
|
"ECDHE-ECDSA-AES128-GCM-SHA256:"
|
|
|
|
"ECDHE-RSA-AES128-GCM-SHA256:"
|
|
|
|
"ECDHE-ECDSA-AES256-GCM-SHA384:"
|
|
|
|
"ECDHE-RSA-AES256-GCM-SHA384:"
|
|
|
|
"ECDHE-ECDSA-CHACHA20-POLY1305:"
|
|
|
|
"ECDHE-RSA-CHACHA20-POLY1305:"
|
|
|
|
"ECDHE-RSA-AES128-SHA:"
|
|
|
|
"ECDHE-RSA-AES256-SHA:"
|
|
|
|
"AES128-GCM-SHA256:"
|
|
|
|
"AES256-GCM-SHA384:"
|
|
|
|
"AES128-SHA:"
|
2023-03-08 17:19:59 +01:00
|
|
|
"AES256-SHA"
|
2022-01-31 23:12:08 +01:00
|
|
|
),
|
|
|
|
}
|
2021-02-25 23:39:34 +01:00
|
|
|
|
|
|
|
|
2022-07-18 22:20:30 +02:00
|
|
|
urllib3 = requests.packages.urllib3
|
|
|
|
|
2022-06-29 13:22:59 +02:00
|
|
|
# detect brotli support
|
|
|
|
try:
|
2022-07-18 22:20:30 +02:00
|
|
|
BROTLI = urllib3.response.brotli is not None
|
2022-06-29 13:22:59 +02:00
|
|
|
except AttributeError:
|
|
|
|
BROTLI = False
|
|
|
|
|
2022-07-18 22:20:30 +02:00
|
|
|
# set (urllib3) warnings filter
|
|
|
|
action = config.get((), "warnings", "default")
|
|
|
|
if action:
|
|
|
|
try:
|
|
|
|
import warnings
|
|
|
|
warnings.simplefilter(action, urllib3.exceptions.HTTPWarning)
|
|
|
|
except Exception:
|
|
|
|
pass
|
|
|
|
del action
|