1
0
mirror of https://github.com/mikf/gallery-dl.git synced 2024-11-23 11:12:40 +01:00
gallery-dl/gallery_dl/extractor/common.py

920 lines
30 KiB
Python
Raw Normal View History

2015-04-08 01:43:25 +02:00
# -*- coding: utf-8 -*-
# Copyright 2014-2023 Mike Fährmann
2015-04-08 01:43:25 +02:00
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
"""Common classes and constants used by extractor modules."""
import os
2017-07-24 18:33:42 +02:00
import re
import ssl
2015-04-08 01:46:04 +02:00
import time
import netrc
2014-10-12 21:56:44 +02:00
import queue
2017-03-07 23:50:19 +01:00
import logging
import datetime
2014-10-12 21:56:44 +02:00
import requests
2015-04-08 01:46:04 +02:00
import threading
from requests.adapters import HTTPAdapter
2015-11-24 19:47:51 +01:00
from .message import Message
from .. import config, text, util, cache, exception
2015-04-08 01:46:04 +02:00
2015-04-08 01:43:25 +02:00
class Extractor():
category = ""
subcategory = ""
basecategory = ""
categorytransfer = False
directory_fmt = ("{category}",)
filename_fmt = "{filename}.{extension}"
2018-01-29 22:13:06 +01:00
archive_fmt = ""
root = ""
cookies_domain = ""
referer = True
ciphers = None
tls12 = True
browser = None
request_interval = 0.0
request_interval_min = 0.0
request_timestamp = 0.0
2015-11-29 23:41:43 +01:00
def __init__(self, match):
2017-03-07 23:50:19 +01:00
self.log = logging.getLogger(self.category)
self.url = match.string
2020-09-14 22:06:54 +02:00
self._cfgpath = ("extractor", self.category, self.subcategory)
self._parentdir = ""
@classmethod
def from_url(cls, url):
if isinstance(cls.pattern, str):
cls.pattern = re.compile(cls.pattern)
match = cls.pattern.match(url)
return cls(match) if match else None
2014-10-12 21:56:44 +02:00
def __iter__(self):
self.initialize()
2015-04-08 01:43:25 +02:00
return self.items()
def initialize(self):
self._init_options()
self._init_session()
self._init_cookies()
self._init()
self.initialize = util.noop
def finalize(self):
pass
2015-04-08 01:43:25 +02:00
def items(self):
yield Message.Version, 1
2014-10-12 21:56:44 +02:00
def skip(self, num):
return 0
def config(self, key, default=None):
2020-09-14 22:06:54 +02:00
return config.interpolate(self._cfgpath, key, default)
def config_deprecated(self, key, deprecated, default=None,
sentinel=util.SENTINEL, history=set()):
value = self.config(deprecated, sentinel)
if value is not sentinel:
if deprecated not in history:
history.add(deprecated)
self.log.warning("'%s' is deprecated. Use '%s' instead.",
deprecated, key)
default = value
value = self.config(key, sentinel)
if value is not sentinel:
return value
return default
def config_accumulate(self, key):
2020-09-14 22:06:54 +02:00
return config.accumulate(self._cfgpath, key)
def _config_shared(self, key, default=None):
return config.interpolate_common(
("extractor",), self._cfgpath, key, default)
def _config_shared_accumulate(self, key):
first = True
extr = ("extractor",)
for path in self._cfgpath:
if first:
first = False
values = config.accumulate(extr + path, key)
else:
conf = config.get(extr, path[0])
if conf:
values[:0] = config.accumulate(
(self.subcategory,), key, conf=conf)
return values
def request(self, url, method="GET", session=None,
retries=None, retry_codes=None, encoding=None,
fatal=True, notfound=None, **kwargs):
if session is None:
session = self.session
if retries is None:
retries = self._retries
if retry_codes is None:
retry_codes = self._retry_codes
if "proxies" not in kwargs:
kwargs["proxies"] = self._proxies
if "timeout" not in kwargs:
kwargs["timeout"] = self._timeout
if "verify" not in kwargs:
kwargs["verify"] = self._verify
response = None
tries = 1
if self._interval:
seconds = (self._interval() -
(time.time() - Extractor.request_timestamp))
if seconds > 0.0:
self.sleep(seconds, "request")
2020-09-19 22:07:41 +02:00
while True:
try:
response = session.request(method, url, **kwargs)
except (requests.exceptions.ConnectionError,
requests.exceptions.Timeout,
requests.exceptions.ChunkedEncodingError,
requests.exceptions.ContentDecodingError) as exc:
msg = exc
except (requests.exceptions.RequestException) as exc:
raise exception.HttpError(exc)
else:
code = response.status_code
if self._write_pages:
self._dump_response(response)
if 200 <= code < 400 or fatal is None and \
(400 <= code < 500) or not fatal and \
(400 <= code < 429 or 431 <= code < 500):
if encoding:
response.encoding = encoding
return response
if notfound and code == 404:
raise exception.NotFoundError(notfound)
2020-04-24 22:47:27 +02:00
msg = "'{} {}' for '{}'".format(code, response.reason, url)
server = response.headers.get("Server")
2022-10-31 18:33:52 +01:00
if server and server.startswith("cloudflare") and \
code in (403, 503):
content = response.content
if b"_cf_chl_opt" in content or b"jschl-answer" in content:
self.log.warning("Cloudflare challenge")
break
2022-10-31 18:33:52 +01:00
if b'name="captcha-bypass"' in content:
self.log.warning("Cloudflare CAPTCHA")
break
if code not in retry_codes and code < 500:
break
2020-09-19 22:07:41 +02:00
finally:
Extractor.request_timestamp = time.time()
self.log.debug("%s (%s/%s)", msg, tries, retries+1)
if tries > retries:
break
self.sleep(
max(tries, self._interval()) if self._interval else tries,
"retry")
tries += 1
2020-07-30 18:23:26 +02:00
raise exception.HttpError(msg, response)
2017-07-25 14:59:41 +02:00
def wait(self, seconds=None, until=None, adjust=1.0,
reason="rate limit reset"):
now = time.time()
2020-01-04 23:21:45 +01:00
if seconds:
seconds = float(seconds)
until = now + seconds
2020-01-04 23:21:45 +01:00
elif until:
if isinstance(until, datetime.datetime):
# convert to UTC timestamp
until = util.datetime_to_timestamp(until)
else:
until = float(until)
seconds = until - now
2020-01-04 23:21:45 +01:00
else:
raise ValueError("Either 'seconds' or 'until' is required")
seconds += adjust
if seconds <= 0.0:
return
2020-01-04 23:21:45 +01:00
if reason:
t = datetime.datetime.fromtimestamp(until).time()
isotime = "{:02}:{:02}:{:02}".format(t.hour, t.minute, t.second)
2020-01-04 23:21:45 +01:00
self.log.info("Waiting until %s for %s.", isotime, reason)
time.sleep(seconds)
2020-01-04 23:21:45 +01:00
def sleep(self, seconds, reason):
self.log.debug("Sleeping %.2f seconds (%s)",
seconds, reason)
time.sleep(seconds)
2017-07-25 14:59:41 +02:00
def _get_auth_info(self):
"""Return authentication information as (username, password) tuple"""
username = self.config("username")
password = None
if username:
password = self.config("password")
elif self.config("netrc", False):
try:
info = netrc.netrc().authenticators(self.category)
username, _, password = info
except (OSError, netrc.NetrcParseError) as exc:
self.log.error("netrc: %s", exc)
except TypeError:
self.log.warning("netrc: No authentication info")
return username, password
def _init(self):
pass
def _init_options(self):
self._write_pages = self.config("write-pages", False)
self._retry_codes = self.config("retry-codes")
self._retries = self.config("retries", 4)
self._timeout = self.config("timeout", 30)
self._verify = self.config("verify", True)
self._proxies = util.build_proxy_map(self.config("proxy"), self.log)
self._interval = util.build_duration_func(
self.config("sleep-request", self.request_interval),
self.request_interval_min,
)
if self._retries < 0:
self._retries = float("inf")
if not self._retry_codes:
self._retry_codes = ()
def _init_session(self):
self.session = session = requests.Session()
headers = session.headers
headers.clear()
ssl_options = ssl_ciphers = 0
browser = self.config("browser")
if browser is None:
browser = self.browser
if browser and isinstance(browser, str):
browser, _, platform = browser.lower().partition(":")
if not platform or platform == "auto":
platform = ("Windows NT 10.0; Win64; x64"
if util.WINDOWS else "X11; Linux x86_64")
elif platform == "windows":
platform = "Windows NT 10.0; Win64; x64"
elif platform == "linux":
platform = "X11; Linux x86_64"
elif platform == "macos":
2021-08-14 04:01:41 +02:00
platform = "Macintosh; Intel Mac OS X 11.5"
if browser == "chrome":
if platform.startswith("Macintosh"):
platform = platform.replace(".", "_") + "_2"
else:
browser = "firefox"
for key, value in HTTP_HEADERS[browser]:
if value and "{}" in value:
headers[key] = value.format(platform)
else:
headers[key] = value
ssl_options |= (ssl.OP_NO_SSLv2 | ssl.OP_NO_SSLv3 |
ssl.OP_NO_TLSv1 | ssl.OP_NO_TLSv1_1)
ssl_ciphers = SSL_CIPHERS[browser]
else:
useragent = self.config("user-agent")
if useragent is None:
useragent = ("Mozilla/5.0 (Windows NT 10.0; Win64; x64; "
"rv:109.0) Gecko/20100101 Firefox/115.0")
elif useragent == "browser":
useragent = _browser_useragent()
headers["User-Agent"] = useragent
headers["Accept"] = "*/*"
headers["Accept-Language"] = "en-US,en;q=0.5"
ssl_ciphers = self.ciphers
if BROTLI:
headers["Accept-Encoding"] = "gzip, deflate, br"
else:
headers["Accept-Encoding"] = "gzip, deflate"
referer = self.config("referer", self.referer)
if referer:
if isinstance(referer, str):
headers["Referer"] = referer
2023-09-18 23:50:25 +02:00
elif self.root:
headers["Referer"] = self.root + "/"
custom_headers = self.config("headers")
if custom_headers:
headers.update(custom_headers)
custom_ciphers = self.config("ciphers")
if custom_ciphers:
if isinstance(custom_ciphers, list):
ssl_ciphers = ":".join(custom_ciphers)
else:
ssl_ciphers = custom_ciphers
source_address = self.config("source-address")
if source_address:
if isinstance(source_address, str):
source_address = (source_address, 0)
else:
source_address = (source_address[0], source_address[1])
tls12 = self.config("tls12")
if tls12 is None:
tls12 = self.tls12
if not tls12:
ssl_options |= ssl.OP_NO_TLSv1_2
self.log.debug("TLS 1.2 disabled.")
adapter = _build_requests_adapter(
ssl_options, ssl_ciphers, source_address)
session.mount("https://", adapter)
session.mount("http://", adapter)
2019-03-12 21:02:11 +01:00
def _init_cookies(self):
"""Populate the session's cookiejar"""
self.cookies = self.session.cookies
self.cookies_file = None
if self.cookies_domain is None:
return
cookies = self.config("cookies")
2017-07-25 14:59:41 +02:00
if cookies:
if isinstance(cookies, dict):
self.cookies_update_dict(cookies, self.cookies_domain)
elif isinstance(cookies, str):
path = util.expand_path(cookies)
2017-07-25 14:59:41 +02:00
try:
with open(path) as fp:
util.cookiestxt_load(fp, self.cookies)
except Exception as exc:
2017-07-25 14:59:41 +02:00
self.log.warning("cookies: %s", exc)
2019-03-12 21:02:11 +01:00
else:
self.log.debug("Loading cookies from '%s'", cookies)
self.cookies_file = path
elif isinstance(cookies, (list, tuple)):
key = tuple(cookies)
cookiejar = _browser_cookies.get(key)
if cookiejar is None:
from ..cookies import load_cookies
cookiejar = self.cookies.__class__()
try:
load_cookies(cookiejar, cookies)
except Exception as exc:
self.log.warning("cookies: %s", exc)
else:
_browser_cookies[key] = cookiejar
else:
self.log.debug("Using cached cookies from %s", key)
set_cookie = self.cookies.set_cookie
for cookie in cookiejar:
set_cookie(cookie)
else:
self.log.warning(
"Expected 'dict', 'list', or 'str' value for 'cookies' "
"option, got '%s' (%s)",
cookies.__class__.__name__, cookies)
2019-03-12 21:02:11 +01:00
def cookies_store(self):
"""Store the session's cookies in a cookies.txt file"""
export = self.config("cookies-update", True)
if not export:
return
if isinstance(export, str):
path = util.expand_path(export)
else:
path = self.cookies_file
if not path:
return
try:
with open(path, "w") as fp:
util.cookiestxt_store(fp, self.cookies)
except OSError as exc:
self.log.warning("cookies: %s", exc)
def cookies_update(self, cookies, domain=""):
2019-03-12 21:02:11 +01:00
"""Update the session's cookiejar with 'cookies'"""
if isinstance(cookies, dict):
self.cookies_update_dict(cookies, domain or self.cookies_domain)
2019-03-12 21:02:11 +01:00
else:
set_cookie = self.cookies.set_cookie
2019-03-12 21:02:11 +01:00
try:
cookies = iter(cookies)
except TypeError:
set_cookie(cookies)
2019-03-12 21:02:11 +01:00
else:
for cookie in cookies:
set_cookie(cookie)
2019-03-12 21:02:11 +01:00
def cookies_update_dict(self, cookiedict, domain):
2019-03-12 21:02:11 +01:00
"""Update cookiejar with name-value pairs from a dict"""
set_cookie = self.cookies.set
2019-03-12 21:02:11 +01:00
for name, value in cookiedict.items():
set_cookie(name, value, domain=domain)
2014-10-12 21:56:44 +02:00
def cookies_check(self, cookies_names, domain=None):
"""Check if all 'cookies_names' are in the session's cookiejar"""
if not self.cookies:
2020-02-24 23:52:05 +01:00
return False
if domain is None:
domain = self.cookies_domain
names = set(cookies_names)
2020-02-24 23:52:05 +01:00
now = time.time()
for cookie in self.cookies:
if cookie.name in names and (
not domain or cookie.domain == domain):
if cookie.expires:
diff = int(cookie.expires - now)
if diff <= 0:
self.log.warning(
"Cookie '%s' has expired", cookie.name)
continue
elif diff <= 86400:
hours = diff // 3600
self.log.warning(
"Cookie '%s' will expire in less than %s hour%s",
cookie.name, hours + 1, "s" if hours else "")
names.discard(cookie.name)
if not names:
return True
return False
2017-07-17 10:33:36 +02:00
def _prepare_ddosguard_cookies(self):
if not self.cookies.get("__ddg2", domain=self.cookies_domain):
self.cookies.set(
"__ddg2", util.generate_token(), domain=self.cookies_domain)
def _cache(self, func, maxage, keyarg=None):
# return cache.DatabaseCacheDecorator(func, maxage, keyarg)
return cache.DatabaseCacheDecorator(func, keyarg, maxage)
def _cache_memory(self, func, maxage=None, keyarg=None):
return cache.Memcache()
def _get_date_min_max(self, dmin=None, dmax=None):
"""Retrieve and parse 'date-min' and 'date-max' config values"""
def get(key, default):
ts = self.config(key, default)
if isinstance(ts, str):
try:
ts = int(datetime.datetime.strptime(ts, fmt).timestamp())
except ValueError as exc:
self.log.warning("Unable to parse '%s': %s", key, exc)
ts = default
return ts
fmt = self.config("date-format", "%Y-%m-%dT%H:%M:%S")
return get("date-min", dmin), get("date-max", dmax)
def _dispatch_extractors(self, extractor_data, default=()):
""" """
extractors = {
data[0].subcategory: data
for data in extractor_data
}
include = self.config("include", default) or ()
if include == "all":
include = extractors
elif isinstance(include, str):
include = include.split(",")
result = [(Message.Version, 1)]
for category in include:
if category in extractors:
extr, url = extractors[category]
result.append((Message.Queue, url, {"_extractor": extr}))
return iter(result)
@classmethod
def _dump(cls, obj):
util.dump_json(obj, ensure_ascii=False, indent=2)
def _dump_response(self, response, history=True):
"""Write the response content to a .dump file in the current directory.
The file name is derived from the response url,
replacing special characters with "_"
"""
if history:
for resp in response.history:
self._dump_response(resp, False)
if hasattr(Extractor, "_dump_index"):
Extractor._dump_index += 1
else:
Extractor._dump_index = 1
Extractor._dump_sanitize = re.compile(r"[\\\\|/<>:\"?*&=#]+").sub
fname = "{:>02}_{}".format(
Extractor._dump_index,
Extractor._dump_sanitize('_', response.url),
)
if util.WINDOWS:
path = os.path.abspath(fname)[:255]
else:
path = fname[:251]
try:
with open(path + ".txt", 'wb') as fp:
util.dump_response(
response, fp,
headers=(self._write_pages in ("all", "ALL")),
hide_auth=(self._write_pages != "ALL")
)
except Exception as e:
self.log.warning("Failed to dump HTTP request (%s: %s)",
e.__class__.__name__, e)
2015-04-08 01:43:25 +02:00
class GalleryExtractor(Extractor):
subcategory = "gallery"
filename_fmt = "{category}_{gallery_id}_{num:>03}.{extension}"
directory_fmt = ("{category}", "{gallery_id} {title}")
archive_fmt = "{gallery_id}_{num}"
enum = "num"
def __init__(self, match, url=None):
Extractor.__init__(self, match)
self.gallery_url = self.root + match.group(1) if url is None else url
def items(self):
self.login()
if self.gallery_url:
page = self.request(
self.gallery_url, notfound=self.subcategory).text
else:
page = None
data = self.metadata(page)
imgs = self.images(page)
if "count" in data:
if self.config("page-reverse"):
images = util.enumerate_reversed(imgs, 1, data["count"])
else:
images = zip(
range(1, data["count"]+1),
imgs,
)
else:
enum = enumerate
try:
data["count"] = len(imgs)
except TypeError:
pass
else:
if self.config("page-reverse"):
enum = util.enumerate_reversed
images = enum(imgs, 1)
yield Message.Directory, data
for data[self.enum], (url, imgdata) in images:
if imgdata:
data.update(imgdata)
if "extension" not in imgdata:
text.nameext_from_url(url, data)
else:
text.nameext_from_url(url, data)
yield Message.Url, url, data
def login(self):
"""Login and set necessary cookies"""
def metadata(self, page):
"""Return a dict with general metadata"""
def images(self, page):
"""Return a list of all (image-url, metadata)-tuples"""
class ChapterExtractor(GalleryExtractor):
subcategory = "chapter"
directory_fmt = (
"{category}", "{manga}",
"{volume:?v/ />02}c{chapter:>03}{chapter_minor:?//}{title:?: //}")
filename_fmt = (
"{manga}_c{chapter:>03}{chapter_minor:?//}_{page:>03}.{extension}")
archive_fmt = (
"{manga}_{chapter}{chapter_minor}_{page}")
enum = "page"
class MangaExtractor(Extractor):
subcategory = "manga"
categorytransfer = True
chapterclass = None
reverse = True
def __init__(self, match, url=None):
Extractor.__init__(self, match)
self.manga_url = url or self.root + match.group(1)
if self.config("chapter-reverse", False):
self.reverse = not self.reverse
def items(self):
self.login()
page = self.request(self.manga_url).text
chapters = self.chapters(page)
if self.reverse:
chapters.reverse()
2017-09-24 16:03:29 +02:00
for chapter, data in chapters:
data["_extractor"] = self.chapterclass
2017-09-24 16:03:29 +02:00
yield Message.Queue, chapter, data
def login(self):
"""Login and set necessary cookies"""
def chapters(self, page):
"""Return a list of all (chapter-url, metadata)-tuples"""
class AsynchronousMixin():
"""Run info extraction in a separate thread"""
def __iter__(self):
self.initialize()
messages = queue.Queue(5)
thread = threading.Thread(
target=self.async_items,
args=(messages,),
daemon=True,
)
thread.start()
while True:
msg = messages.get()
if msg is None:
thread.join()
return
if isinstance(msg, Exception):
thread.join()
raise msg
yield msg
messages.task_done()
def async_items(self, messages):
try:
for msg in self.items():
messages.put(msg)
except Exception as exc:
messages.put(exc)
messages.put(None)
class BaseExtractor(Extractor):
instances = ()
def __init__(self, match):
if not self.category:
self._init_category(match)
Extractor.__init__(self, match)
def _init_category(self, match):
for index, group in enumerate(match.groups()):
if group is not None:
if index:
self.category, self.root = self.instances[index-1]
if not self.root:
self.root = text.root_from_url(match.group(0))
else:
self.root = group
self.category = group.partition("://")[2]
break
@classmethod
def update(cls, instances):
extra_instances = config.get(("extractor",), cls.basecategory)
if extra_instances:
for category, info in extra_instances.items():
if isinstance(info, dict) and "root" in info:
instances[category] = info
pattern_list = []
instance_list = cls.instances = []
for category, info in instances.items():
root = info["root"]
if root:
root = root.rstrip("/")
instance_list.append((category, root))
pattern = info.get("pattern")
if not pattern:
pattern = re.escape(root[root.index(":") + 3:])
pattern_list.append(pattern + "()")
return (
r"(?:" + cls.basecategory + r":(https?://[^/?#]+)|"
r"(?:https?://)?(?:" + "|".join(pattern_list) + r"))"
)
class RequestsAdapter(HTTPAdapter):
def __init__(self, ssl_context=None, source_address=None):
self.ssl_context = ssl_context
self.source_address = source_address
HTTPAdapter.__init__(self)
def init_poolmanager(self, *args, **kwargs):
kwargs["ssl_context"] = self.ssl_context
kwargs["source_address"] = self.source_address
return HTTPAdapter.init_poolmanager(self, *args, **kwargs)
def proxy_manager_for(self, *args, **kwargs):
kwargs["ssl_context"] = self.ssl_context
kwargs["source_address"] = self.source_address
return HTTPAdapter.proxy_manager_for(self, *args, **kwargs)
def _build_requests_adapter(ssl_options, ssl_ciphers, source_address):
key = (ssl_options, ssl_ciphers, source_address)
try:
return _adapter_cache[key]
except KeyError:
pass
if ssl_options or ssl_ciphers:
ssl_context = ssl.create_default_context()
if ssl_options:
ssl_context.options |= ssl_options
if ssl_ciphers:
ssl_context.set_ecdh_curve("prime256v1")
ssl_context.set_ciphers(ssl_ciphers)
else:
ssl_context = None
adapter = _adapter_cache[key] = RequestsAdapter(
ssl_context, source_address)
return adapter
@cache.cache(maxage=86400)
def _browser_useragent():
"""Get User-Agent header from default browser"""
import webbrowser
import socket
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
server.bind(("127.0.0.1", 6414))
server.listen(1)
webbrowser.open("http://127.0.0.1:6414/user-agent")
client = server.accept()[0]
server.close()
for line in client.recv(1024).split(b"\r\n"):
key, _, value = line.partition(b":")
if key.strip().lower() == b"user-agent":
useragent = value.strip()
break
else:
useragent = b""
client.send(b"HTTP/1.1 200 OK\r\n\r\n" + useragent)
client.close()
return useragent.decode()
_adapter_cache = {}
_browser_cookies = {}
HTTP_HEADERS = {
"firefox": (
("User-Agent", "Mozilla/5.0 ({}; "
"rv:109.0) Gecko/20100101 Firefox/115.0"),
("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,"
"image/avif,image/webp,*/*;q=0.8"),
("Accept-Language", "en-US,en;q=0.5"),
("Accept-Encoding", None),
("Referer", None),
("DNT", "1"),
("Connection", "keep-alive"),
("Upgrade-Insecure-Requests", "1"),
("Cookie", None),
("Sec-Fetch-Dest", "empty"),
("Sec-Fetch-Mode", "no-cors"),
("Sec-Fetch-Site", "same-origin"),
("TE", "trailers"),
),
"chrome": (
("Connection", "keep-alive"),
("Upgrade-Insecure-Requests", "1"),
("User-Agent", "Mozilla/5.0 ({}) AppleWebKit/537.36 (KHTML, "
"like Gecko) Chrome/111.0.0.0 Safari/537.36"),
("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,"
"image/avif,image/webp,image/apng,*/*;q=0.8,"
"application/signed-exchange;v=b3;q=0.7"),
("Referer", None),
("Sec-Fetch-Site", "same-origin"),
("Sec-Fetch-Mode", "no-cors"),
("Sec-Fetch-Dest", "empty"),
("Accept-Encoding", None),
("Accept-Language", "en-US,en;q=0.9"),
("cookie", None),
("content-length", None),
),
}
SSL_CIPHERS = {
"firefox": (
"TLS_AES_128_GCM_SHA256:"
"TLS_CHACHA20_POLY1305_SHA256:"
"TLS_AES_256_GCM_SHA384:"
"ECDHE-ECDSA-AES128-GCM-SHA256:"
"ECDHE-RSA-AES128-GCM-SHA256:"
"ECDHE-ECDSA-CHACHA20-POLY1305:"
"ECDHE-RSA-CHACHA20-POLY1305:"
"ECDHE-ECDSA-AES256-GCM-SHA384:"
"ECDHE-RSA-AES256-GCM-SHA384:"
"ECDHE-ECDSA-AES256-SHA:"
"ECDHE-ECDSA-AES128-SHA:"
"ECDHE-RSA-AES128-SHA:"
"ECDHE-RSA-AES256-SHA:"
2022-02-01 02:33:01 +01:00
"AES128-GCM-SHA256:"
"AES256-GCM-SHA384:"
"AES128-SHA:"
"AES256-SHA"
),
"chrome": (
"TLS_AES_128_GCM_SHA256:"
"TLS_AES_256_GCM_SHA384:"
"TLS_CHACHA20_POLY1305_SHA256:"
"ECDHE-ECDSA-AES128-GCM-SHA256:"
"ECDHE-RSA-AES128-GCM-SHA256:"
"ECDHE-ECDSA-AES256-GCM-SHA384:"
"ECDHE-RSA-AES256-GCM-SHA384:"
"ECDHE-ECDSA-CHACHA20-POLY1305:"
"ECDHE-RSA-CHACHA20-POLY1305:"
"ECDHE-RSA-AES128-SHA:"
"ECDHE-RSA-AES256-SHA:"
"AES128-GCM-SHA256:"
"AES256-GCM-SHA384:"
"AES128-SHA:"
"AES256-SHA"
),
}
2022-07-18 22:20:30 +02:00
urllib3 = requests.packages.urllib3
# detect brotli support
try:
2022-07-18 22:20:30 +02:00
BROTLI = urllib3.response.brotli is not None
except AttributeError:
BROTLI = False
2022-07-18 22:20:30 +02:00
# set (urllib3) warnings filter
action = config.get((), "warnings", "default")
if action:
try:
import warnings
warnings.simplefilter(action, urllib3.exceptions.HTTPWarning)
except Exception:
pass
del action