diff --git a/gallery_dl/extractor/500px.py b/gallery_dl/extractor/500px.py index 1213194a..ac38b604 100644 --- a/gallery_dl/extractor/500px.py +++ b/gallery_dl/extractor/500px.py @@ -21,7 +21,7 @@ class _500pxExtractor(Extractor): filename_fmt = "{id}_{name}.{extension}" archive_fmt = "{id}" root = "https://500px.com" - cookiedomain = ".500px.com" + cookies_domain = ".500px.com" def __init__(self, match): Extractor.__init__(self, match) @@ -73,7 +73,7 @@ class _500pxExtractor(Extractor): def _request_api(self, url, params): headers = { "Origin": self.root, - "x-csrf-token": self.session.cookies.get( + "x-csrf-token": self.cookies.get( "x-csrf-token", domain=".500px.com"), } return self.request(url, headers=headers, params=params).json() @@ -81,7 +81,7 @@ class _500pxExtractor(Extractor): def _request_graphql(self, opname, variables): url = "https://api.500px.com/graphql" headers = { - "x-csrf-token": self.session.cookies.get( + "x-csrf-token": self.cookies.get( "x-csrf-token", domain=".500px.com"), } data = { diff --git a/gallery_dl/extractor/8chan.py b/gallery_dl/extractor/8chan.py index 0e128c3a..f098008a 100644 --- a/gallery_dl/extractor/8chan.py +++ b/gallery_dl/extractor/8chan.py @@ -27,7 +27,7 @@ class _8chanExtractor(Extractor): Extractor.__init__(self, match) @memcache() - def _prepare_cookies(self): + def cookies_prepare(self): # fetch captcha cookies # (necessary to download without getting interrupted) now = datetime.utcnow() @@ -39,14 +39,14 @@ class _8chanExtractor(Extractor): # - remove 'expires' timestamp # - move 'captchaexpiration' value forward by 1 month) domain = self.root.rpartition("/")[2] - for cookie in self.session.cookies: + for cookie in self.cookies: if cookie.domain.endswith(domain): cookie.expires = None if cookie.name == "captchaexpiration": cookie.value = (now + timedelta(30, 300)).strftime( "%a, %d %b %Y %H:%M:%S GMT") - return self.session.cookies + return self.cookies class _8chanThreadExtractor(_8chanExtractor): @@ -113,7 +113,7 @@ class _8chanThreadExtractor(_8chanExtractor): thread["_http_headers"] = {"Referer": url + "html"} try: - self.session.cookies = self._prepare_cookies() + self.cookies = self.cookies_prepare() except Exception as exc: self.log.debug("Failed to fetch captcha cookies: %s: %s", exc.__class__.__name__, exc, exc_info=True) diff --git a/gallery_dl/extractor/aryion.py b/gallery_dl/extractor/aryion.py index 6f015728..ad0f9dc6 100644 --- a/gallery_dl/extractor/aryion.py +++ b/gallery_dl/extractor/aryion.py @@ -23,8 +23,8 @@ class AryionExtractor(Extractor): directory_fmt = ("{category}", "{user!l}", "{path:J - }") filename_fmt = "{id} {title}.{extension}" archive_fmt = "{id}" - cookiedomain = ".aryion.com" - cookienames = ("phpbb3_rl7a3_sid",) + cookies_domain = ".aryion.com" + cookies_names = ("phpbb3_rl7a3_sid",) root = "https://aryion.com" def __init__(self, match): @@ -33,11 +33,12 @@ class AryionExtractor(Extractor): self.recursive = True def login(self): - if self._check_cookies(self.cookienames): + if self.cookies_check(self.cookies_names): return + username, password = self._get_auth_info() if username: - self._update_cookies(self._login_impl(username, password)) + self.cookies_update(self._login_impl(username, password)) @cache(maxage=14*24*3600, keyarg=1) def _login_impl(self, username, password): @@ -53,7 +54,7 @@ class AryionExtractor(Extractor): response = self.request(url, method="POST", data=data) if b"You have been successfully logged in." not in response.content: raise exception.AuthenticationError() - return {c: response.cookies[c] for c in self.cookienames} + return {c: response.cookies[c] for c in self.cookies_names} def items(self): self.login() diff --git a/gallery_dl/extractor/common.py b/gallery_dl/extractor/common.py index 3cb5fc41..2e5ce4d4 100644 --- a/gallery_dl/extractor/common.py +++ b/gallery_dl/extractor/common.py @@ -32,7 +32,7 @@ class Extractor(): directory_fmt = ("{category}",) filename_fmt = "{filename}.{extension}" archive_fmt = "" - cookiedomain = "" + cookies_domain = "" browser = None root = "" test = None @@ -330,26 +330,26 @@ class Extractor(): def _init_cookies(self): """Populate the session's cookiejar""" - self._cookiefile = None - self._cookiejar = self.session.cookies - if self.cookiedomain is None: + self.cookies = self.session.cookies + self.cookies_file = None + if self.cookies_domain is None: return cookies = self.config("cookies") if cookies: if isinstance(cookies, dict): - self._update_cookies_dict(cookies, self.cookiedomain) + self.cookies_update_dict(cookies, self.cookies_domain) elif isinstance(cookies, str): - cookiefile = util.expand_path(cookies) + path = util.expand_path(cookies) try: - with open(cookiefile) as fp: - util.cookiestxt_load(fp, self._cookiejar) + with open(path) as fp: + util.cookiestxt_load(fp, self.cookies) except Exception as exc: self.log.warning("cookies: %s", exc) else: self.log.debug("Loading cookies from '%s'", cookies) - self._cookiefile = cookiefile + self.cookies_file = path elif isinstance(cookies, (list, tuple)): key = tuple(cookies) @@ -357,7 +357,7 @@ class Extractor(): if cookiejar is None: from ..cookies import load_cookies - cookiejar = self._cookiejar.__class__() + cookiejar = self.cookies.__class__() try: load_cookies(cookiejar, cookies) except Exception as exc: @@ -367,9 +367,9 @@ class Extractor(): else: self.log.debug("Using cached cookies from %s", key) - setcookie = self._cookiejar.set_cookie + set_cookie = self.cookies.set_cookie for cookie in cookiejar: - setcookie(cookie) + set_cookie(cookie) else: self.log.warning( @@ -377,8 +377,8 @@ class Extractor(): "option, got '%s' (%s)", cookies.__class__.__name__, cookies) - def _store_cookies(self): - """Store the session's cookiejar in a cookies.txt file""" + def cookies_store(self): + """Store the session's cookies in a cookies.txt file""" export = self.config("cookies-update", True) if not export: return @@ -386,47 +386,47 @@ class Extractor(): if isinstance(export, str): path = util.expand_path(export) else: - path = self._cookiefile + path = self.cookies_file if not path: return try: with open(path, "w") as fp: - util.cookiestxt_store(fp, self._cookiejar) + util.cookiestxt_store(fp, self.cookies) except OSError as exc: self.log.warning("cookies: %s", exc) - def _update_cookies(self, cookies, domain=""): + def cookies_update(self, cookies, domain=""): """Update the session's cookiejar with 'cookies'""" if isinstance(cookies, dict): - self._update_cookies_dict(cookies, domain or self.cookiedomain) + self.cookies_update_dict(cookies, domain or self.cookies_domain) else: - setcookie = self._cookiejar.set_cookie + set_cookie = self.cookies.set_cookie try: cookies = iter(cookies) except TypeError: - setcookie(cookies) + set_cookie(cookies) else: for cookie in cookies: - setcookie(cookie) + set_cookie(cookie) - def _update_cookies_dict(self, cookiedict, domain): + def cookies_update_dict(self, cookiedict, domain): """Update cookiejar with name-value pairs from a dict""" - setcookie = self._cookiejar.set + set_cookie = self.cookies.set for name, value in cookiedict.items(): - setcookie(name, value, domain=domain) + set_cookie(name, value, domain=domain) - def _check_cookies(self, cookienames, domain=None): - """Check if all 'cookienames' are in the session's cookiejar""" - if not self._cookiejar: + def cookies_check(self, cookies_names, domain=None): + """Check if all 'cookies_names' are in the session's cookiejar""" + if not self.cookies: return False if domain is None: - domain = self.cookiedomain - names = set(cookienames) + domain = self.cookies_domain + names = set(cookies_names) now = time.time() - for cookie in self._cookiejar: + for cookie in self.cookies: if cookie.name in names and ( not domain or cookie.domain == domain): @@ -450,9 +450,9 @@ class Extractor(): return False def _prepare_ddosguard_cookies(self): - if not self._cookiejar.get("__ddg2", domain=self.cookiedomain): - self._cookiejar.set( - "__ddg2", util.generate_token(), domain=self.cookiedomain) + if not self.cookies.get("__ddg2", domain=self.cookies_domain): + self.cookies.set( + "__ddg2", util.generate_token(), domain=self.cookies_domain) def _get_date_min_max(self, dmin=None, dmax=None): """Retrieve and parse 'date-min' and 'date-max' config values""" diff --git a/gallery_dl/extractor/deviantart.py b/gallery_dl/extractor/deviantart.py index 1ad2bd96..9f16b334 100644 --- a/gallery_dl/extractor/deviantart.py +++ b/gallery_dl/extractor/deviantart.py @@ -32,8 +32,8 @@ class DeviantartExtractor(Extractor): root = "https://www.deviantart.com" directory_fmt = ("{category}", "{username}") filename_fmt = "{category}_{index}_{title}.{extension}" - cookiedomain = None - cookienames = ("auth", "auth_secure", "userinfo") + cookies_domain = None + cookies_names = ("auth", "auth_secure", "userinfo") _last_request = 0 def __init__(self, match): @@ -71,12 +71,13 @@ class DeviantartExtractor(Extractor): return num def login(self): - if not self._check_cookies(self.cookienames): - username, password = self._get_auth_info() - if not username: - return False - self._update_cookies(_login_impl(self, username, password)) - return True + if self.cookies_check(self.cookies_names): + return True + + username, password = self._get_auth_info() + if username: + self.cookies_update(_login_impl(self, username, password)) + return True def items(self): self.api = DeviantartOAuthAPI(self) @@ -1123,7 +1124,7 @@ class DeviantartScrapsExtractor(DeviantartExtractor): subcategory = "scraps" directory_fmt = ("{category}", "{username}", "Scraps") archive_fmt = "s_{_username}_{index}.{extension}" - cookiedomain = ".deviantart.com" + cookies_domain = ".deviantart.com" pattern = BASE_PATTERN + r"/gallery/(?:\?catpath=)?scraps\b" test = ( ("https://www.deviantart.com/shimoda7/gallery/scraps", { @@ -1146,7 +1147,7 @@ class DeviantartSearchExtractor(DeviantartExtractor): subcategory = "search" directory_fmt = ("{category}", "Search", "{search_tags}") archive_fmt = "Q_{search_tags}_{index}.{extension}" - cookiedomain = ".deviantart.com" + cookies_domain = ".deviantart.com" pattern = (r"(?:https?://)?www\.deviantart\.com" r"/search(?:/deviations)?/?\?([^#]+)") test = ( @@ -1205,7 +1206,7 @@ class DeviantartGallerySearchExtractor(DeviantartExtractor): """Extractor for deviantart gallery searches""" subcategory = "gallery-search" archive_fmt = "g_{_username}_{index}.{extension}" - cookiedomain = ".deviantart.com" + cookies_domain = ".deviantart.com" pattern = BASE_PATTERN + r"/gallery/?\?(q=[^#]+)" test = ( ("https://www.deviantart.com/shimoda7/gallery?q=memory", { @@ -1869,7 +1870,7 @@ def _login_impl(extr, username, password): return { cookie.name: cookie.value - for cookie in extr.session.cookies + for cookie in extr.cookies } diff --git a/gallery_dl/extractor/erome.py b/gallery_dl/extractor/erome.py index 709bc576..cb527410 100644 --- a/gallery_dl/extractor/erome.py +++ b/gallery_dl/extractor/erome.py @@ -65,7 +65,7 @@ class EromeExtractor(Extractor): def request(self, url, **kwargs): if self.__cookies: self.__cookies = False - self.session.cookies.update(_cookie_cache()) + self.cookies.update(_cookie_cache()) for _ in range(5): response = Extractor.request(self, url, **kwargs) diff --git a/gallery_dl/extractor/exhentai.py b/gallery_dl/extractor/exhentai.py index 9cd7ae4e..087ff51c 100644 --- a/gallery_dl/extractor/exhentai.py +++ b/gallery_dl/extractor/exhentai.py @@ -23,8 +23,8 @@ class ExhentaiExtractor(Extractor): directory_fmt = ("{category}", "{gid} {title[:247]}") filename_fmt = "{gid}_{num:>04}_{image_token}_{filename}.{extension}" archive_fmt = "{gid}_{num}" - cookienames = ("ipb_member_id", "ipb_pass_hash") - cookiedomain = ".exhentai.org" + cookies_domain = ".exhentai.org" + cookies_names = ("ipb_member_id", "ipb_pass_hash") root = "https://exhentai.org" request_interval = 5.0 @@ -39,7 +39,7 @@ class ExhentaiExtractor(Extractor): if domain == "auto": domain = ("ex" if version == "ex" else "e-") + "hentai.org" self.root = "https://" + domain - self.cookiedomain = "." + domain + self.cookies_domain = "." + domain Extractor.__init__(self, match) self.original = self.config("original", True) @@ -53,7 +53,7 @@ class ExhentaiExtractor(Extractor): self.session.headers["Referer"] = self.root + "/" if version != "ex": - self.session.cookies.set("nw", "1", domain=self.cookiedomain) + self.cookies.set("nw", "1", domain=self.cookies_domain) def request(self, url, **kwargs): response = Extractor.request(self, url, **kwargs) @@ -66,17 +66,20 @@ class ExhentaiExtractor(Extractor): """Login and set necessary cookies""" if self.LIMIT: raise exception.StopExtraction("Image limit reached!") - if self._check_cookies(self.cookienames): + + if self.cookies_check(self.cookies_names): return + username, password = self._get_auth_info() if username: - self._update_cookies(self._login_impl(username, password)) - else: - self.log.info("no username given; using e-hentai.org") - self.root = "https://e-hentai.org" - self.original = False - self.limits = False - self.session.cookies["nw"] = "1" + return self.cookies_update(self._login_impl(username, password)) + + self.log.info("no username given; using e-hentai.org") + self.root = "https://e-hentai.org" + self.cookies_domain = ".e-hentai.org" + self.cookies.set("nw", "1", domain=self.cookies_domain) + self.original = False + self.limits = False @cache(maxage=90*24*3600, keyarg=1) def _login_impl(self, username, password): @@ -97,7 +100,7 @@ class ExhentaiExtractor(Extractor): response = self.request(url, method="POST", headers=headers, data=data) if b"You are now logged in as:" not in response.content: raise exception.AuthenticationError() - return {c: response.cookies[c] for c in self.cookienames} + return {c: response.cookies[c] for c in self.cookies_names} class ExhentaiGalleryExtractor(ExhentaiExtractor): @@ -390,8 +393,9 @@ class ExhentaiGalleryExtractor(ExhentaiExtractor): url = "https://e-hentai.org/home.php" cookies = { cookie.name: cookie.value - for cookie in self.session.cookies - if cookie.domain == self.cookiedomain and cookie.name != "igneous" + for cookie in self.cookies + if cookie.domain == self.cookies_domain and + cookie.name != "igneous" } page = self.request(url, cookies=cookies).text diff --git a/gallery_dl/extractor/fanbox.py b/gallery_dl/extractor/fanbox.py index 373529f4..40ad8cdd 100644 --- a/gallery_dl/extractor/fanbox.py +++ b/gallery_dl/extractor/fanbox.py @@ -32,9 +32,8 @@ class FanboxExtractor(Extractor): self.embeds = self.config("embeds", True) def items(self): - if self._warning: - if not self._check_cookies(("FANBOXSESSID",)): + if not self.cookies_check(("FANBOXSESSID",)): self.log.warning("no 'FANBOXSESSID' cookie set") FanboxExtractor._warning = False diff --git a/gallery_dl/extractor/fantia.py b/gallery_dl/extractor/fantia.py index f92b9046..3679e375 100644 --- a/gallery_dl/extractor/fantia.py +++ b/gallery_dl/extractor/fantia.py @@ -35,7 +35,7 @@ class FantiaExtractor(Extractor): } if self._warning: - if not self._check_cookies(("_session_id",)): + if not self.cookies_check(("_session_id",)): self.log.warning("no '_session_id' cookie set") FantiaExtractor._warning = False diff --git a/gallery_dl/extractor/flickr.py b/gallery_dl/extractor/flickr.py index d44ff3c8..9f97a331 100644 --- a/gallery_dl/extractor/flickr.py +++ b/gallery_dl/extractor/flickr.py @@ -20,7 +20,7 @@ class FlickrExtractor(Extractor): filename_fmt = "{category}_{id}.{extension}" directory_fmt = ("{category}", "{user[username]}") archive_fmt = "{id}" - cookiedomain = None + cookies_domain = None def __init__(self, match): Extractor.__init__(self, match) diff --git a/gallery_dl/extractor/furaffinity.py b/gallery_dl/extractor/furaffinity.py index ec9cd940..c03c89b2 100644 --- a/gallery_dl/extractor/furaffinity.py +++ b/gallery_dl/extractor/furaffinity.py @@ -20,7 +20,8 @@ class FuraffinityExtractor(Extractor): directory_fmt = ("{category}", "{user!l}") filename_fmt = "{id}{title:? //}.{extension}" archive_fmt = "{id}" - cookiedomain = ".furaffinity.net" + cookies_domain = ".furaffinity.net" + cookies_names = ("a", "b") root = "https://www.furaffinity.net" _warning = True @@ -39,9 +40,8 @@ class FuraffinityExtractor(Extractor): self._new_layout = None def items(self): - if self._warning: - if not self._check_cookies(("a", "b")): + if not self.cookies_check(self.cookies_names): self.log.warning("no 'a' and 'b' session cookies set") FuraffinityExtractor._warning = False @@ -371,7 +371,7 @@ class FuraffinityPostExtractor(FuraffinityExtractor): class FuraffinityUserExtractor(FuraffinityExtractor): """Extractor for furaffinity user profiles""" subcategory = "user" - cookiedomain = None + cookies_domain = None pattern = BASE_PATTERN + r"/user/([^/?#]+)" test = ( ("https://www.furaffinity.net/user/mirlinthloth/", { diff --git a/gallery_dl/extractor/gofile.py b/gallery_dl/extractor/gofile.py index 044dddbd..60886a9d 100644 --- a/gallery_dl/extractor/gofile.py +++ b/gallery_dl/extractor/gofile.py @@ -72,7 +72,7 @@ class GofileFolderExtractor(Extractor): token = self.config("api-token") if not token: token = self._create_account() - self.session.cookies.set("accountToken", token, domain=".gofile.io") + self.cookies.set("accountToken", token, domain=".gofile.io") self.api_token = token self.website_token = (self.config("website-token") or diff --git a/gallery_dl/extractor/hentaifoundry.py b/gallery_dl/extractor/hentaifoundry.py index e01a4ed8..78a576df 100644 --- a/gallery_dl/extractor/hentaifoundry.py +++ b/gallery_dl/extractor/hentaifoundry.py @@ -20,7 +20,7 @@ class HentaifoundryExtractor(Extractor): directory_fmt = ("{category}", "{user}") filename_fmt = "{category}_{index}_{title}.{extension}" archive_fmt = "{index}" - cookiedomain = "www.hentai-foundry.com" + cookies_domain = "www.hentai-foundry.com" root = "https://www.hentai-foundry.com" per_page = 25 @@ -123,14 +123,14 @@ class HentaifoundryExtractor(Extractor): def _init_site_filters(self): """Set site-internal filters to show all images""" - if self.session.cookies.get("PHPSESSID", domain=self.cookiedomain): + if self.cookies.get("PHPSESSID", domain=self.cookies_domain): return url = self.root + "/?enterAgree=1" self.request(url, method="HEAD") - csrf_token = self.session.cookies.get( - "YII_CSRF_TOKEN", domain=self.cookiedomain) + csrf_token = self.cookies.get( + "YII_CSRF_TOKEN", domain=self.cookies_domain) if not csrf_token: self.log.warning("Unable to update site content filters") return diff --git a/gallery_dl/extractor/idolcomplex.py b/gallery_dl/extractor/idolcomplex.py index ce68d6d3..02f037dd 100644 --- a/gallery_dl/extractor/idolcomplex.py +++ b/gallery_dl/extractor/idolcomplex.py @@ -19,9 +19,9 @@ import re class IdolcomplexExtractor(SankakuExtractor): """Base class for idolcomplex extractors""" category = "idolcomplex" - cookienames = ("login", "pass_hash") - cookiedomain = "idol.sankakucomplex.com" - root = "https://" + cookiedomain + cookies_domain = "idol.sankakucomplex.com" + cookies_names = ("login", "pass_hash") + root = "https://" + cookies_domain request_interval = 5.0 def __init__(self, match): @@ -51,14 +51,14 @@ class IdolcomplexExtractor(SankakuExtractor): """Return an iterable containing all relevant post ids""" def login(self): - if self._check_cookies(self.cookienames): + if self.cookies_check(self.cookies_names): return + username, password = self._get_auth_info() if username: - cookies = self._login_impl(username, password) - self._update_cookies(cookies) - else: - self.logged_in = False + return self.cookies_update(self._login_impl(username, password)) + + self.logged_in = False @cache(maxage=90*24*3600, keyarg=1) def _login_impl(self, username, password): @@ -76,7 +76,7 @@ class IdolcomplexExtractor(SankakuExtractor): if not response.history or response.url != self.root + "/user/home": raise exception.AuthenticationError() cookies = response.history[0].cookies - return {c: cookies[c] for c in self.cookienames} + return {c: cookies[c] for c in self.cookies_names} def _parse_post(self, post_id): """Extract metadata of a single post""" diff --git a/gallery_dl/extractor/imagebam.py b/gallery_dl/extractor/imagebam.py index f993db83..67d0b110 100644 --- a/gallery_dl/extractor/imagebam.py +++ b/gallery_dl/extractor/imagebam.py @@ -21,7 +21,7 @@ class ImagebamExtractor(Extractor): def __init__(self, match): Extractor.__init__(self, match) self.path = match.group(1) - self.session.cookies.set("nsfw_inter", "1", domain="www.imagebam.com") + self.cookies.set("nsfw_inter", "1", domain="www.imagebam.com") def _parse_image_page(self, path): page = self.request(self.root + path).text diff --git a/gallery_dl/extractor/imgbb.py b/gallery_dl/extractor/imgbb.py index a2210752..ee979a65 100644 --- a/gallery_dl/extractor/imgbb.py +++ b/gallery_dl/extractor/imgbb.py @@ -62,7 +62,7 @@ class ImgbbExtractor(Extractor): def login(self): username, password = self._get_auth_info() if username: - self._update_cookies(self._login_impl(username, password)) + self.cookies_update(self._login_impl(username, password)) @cache(maxage=360*24*3600, keyarg=1) def _login_impl(self, username, password): @@ -82,7 +82,7 @@ class ImgbbExtractor(Extractor): if not response.history: raise exception.AuthenticationError() - return self.session.cookies + return self.cookies def _pagination(self, page, endpoint, params): data = None diff --git a/gallery_dl/extractor/instagram.py b/gallery_dl/extractor/instagram.py index faeffa6a..29208aef 100644 --- a/gallery_dl/extractor/instagram.py +++ b/gallery_dl/extractor/instagram.py @@ -27,8 +27,8 @@ class InstagramExtractor(Extractor): filename_fmt = "{sidecar_media_id:?/_/}{media_id}.{extension}" archive_fmt = "{media_id}" root = "https://www.instagram.com" - cookiedomain = ".instagram.com" - cookienames = ("sessionid",) + cookies_domain = ".instagram.com" + cookies_names = ("sessionid",) request_interval = (6.0, 12.0) def __init__(self, match): @@ -44,6 +44,8 @@ class InstagramExtractor(Extractor): def items(self): self.login() + self.cookies.set( + "csrftoken", self.csrf_token, domain=self.cookies_domain) if self.config("api") == "graphql": self.api = InstagramGraphqlAPI(self) @@ -131,14 +133,14 @@ class InstagramExtractor(Extractor): return response def login(self): - if not self._check_cookies(self.cookienames): - username, password = self._get_auth_info() - if username: - self._update_cookies(_login_impl(self, username, password)) - else: - self._logged_in = False - self.session.cookies.set( - "csrftoken", self.csrf_token, domain=self.cookiedomain) + if self.cookies_check(self.cookies_names): + return + + username, password = self._get_auth_info() + if username: + return self.cookies_update(_login_impl(self, username, password)) + + self._logged_in = False def _parse_post_rest(self, post): if "items" in post: # story or highlight diff --git a/gallery_dl/extractor/itchio.py b/gallery_dl/extractor/itchio.py index 6034d122..96ebbdc4 100644 --- a/gallery_dl/extractor/itchio.py +++ b/gallery_dl/extractor/itchio.py @@ -63,7 +63,7 @@ class ItchioGameExtractor(Extractor): "Origin": "https://{}.itch.io".format(self.user), } data = { - "csrf_token": text.unquote(self.session.cookies["itchio_token"]), + "csrf_token": text.unquote(self.cookies["itchio_token"]), } for upload_id in text.extract_iter(page, 'data-upload_id="', '"'): diff --git a/gallery_dl/extractor/kemonoparty.py b/gallery_dl/extractor/kemonoparty.py index 5aeefeba..d5d02c29 100644 --- a/gallery_dl/extractor/kemonoparty.py +++ b/gallery_dl/extractor/kemonoparty.py @@ -26,14 +26,14 @@ class KemonopartyExtractor(Extractor): directory_fmt = ("{category}", "{service}", "{user}") filename_fmt = "{id}_{title}_{num:>02}_{filename[:180]}.{extension}" archive_fmt = "{service}_{user}_{id}_{num}" - cookiedomain = ".kemono.party" + cookies_domain = ".kemono.party" def __init__(self, match): domain = match.group(1) tld = match.group(2) self.category = domain + "party" self.root = text.root_from_url(match.group(0)) - self.cookiedomain = ".{}.{}".format(domain, tld) + self.cookies_domain = ".{}.{}".format(domain, tld) Extractor.__init__(self, match) self.session.headers["Referer"] = self.root + "/" @@ -126,8 +126,8 @@ class KemonopartyExtractor(Extractor): def login(self): username, password = self._get_auth_info() if username: - self._update_cookies(self._login_impl( - (username, self.cookiedomain), password)) + self.cookies_update(self._login_impl( + (username, self.cookies_domain), password)) @cache(maxage=28*24*3600, keyarg=1) def _login_impl(self, username, password): diff --git a/gallery_dl/extractor/luscious.py b/gallery_dl/extractor/luscious.py index 57db0c9d..80f8758c 100644 --- a/gallery_dl/extractor/luscious.py +++ b/gallery_dl/extractor/luscious.py @@ -15,7 +15,7 @@ from .. import text, exception class LusciousExtractor(Extractor): """Base class for luscious extractors""" category = "luscious" - cookiedomain = ".luscious.net" + cookies_domain = ".luscious.net" root = "https://members.luscious.net" def _graphql(self, op, variables, query): diff --git a/gallery_dl/extractor/mangahere.py b/gallery_dl/extractor/mangahere.py index 531aef48..ccce09b4 100644 --- a/gallery_dl/extractor/mangahere.py +++ b/gallery_dl/extractor/mangahere.py @@ -114,7 +114,7 @@ class MangahereMangaExtractor(MangahereBase, MangaExtractor): def __init__(self, match): MangaExtractor.__init__(self, match) - self.session.cookies.set("isAdult", "1", domain="www.mangahere.cc") + self.cookies.set("isAdult", "1", domain="www.mangahere.cc") def chapters(self, page): results = [] diff --git a/gallery_dl/extractor/mangasee.py b/gallery_dl/extractor/mangasee.py index b7070f28..dfa9bdf0 100644 --- a/gallery_dl/extractor/mangasee.py +++ b/gallery_dl/extractor/mangasee.py @@ -93,7 +93,7 @@ class MangaseeChapterExtractor(MangaseeBase, ChapterExtractor): self.session.headers["Referer"] = self.gallery_url domain = self.root.rpartition("/")[2] - cookies = self.session.cookies + cookies = self.cookies if not cookies.get("PHPSESSID", domain=domain): cookies.set("PHPSESSID", util.generate_token(13), domain=domain) diff --git a/gallery_dl/extractor/mangoxo.py b/gallery_dl/extractor/mangoxo.py index ac4c7978..cca18b13 100644 --- a/gallery_dl/extractor/mangoxo.py +++ b/gallery_dl/extractor/mangoxo.py @@ -19,14 +19,14 @@ class MangoxoExtractor(Extractor): """Base class for mangoxo extractors""" category = "mangoxo" root = "https://www.mangoxo.com" - cookiedomain = "www.mangoxo.com" - cookienames = ("SESSION",) + cookies_domain = "www.mangoxo.com" + cookies_names = ("SESSION",) _warning = True def login(self): username, password = self._get_auth_info() if username: - self._update_cookies(self._login_impl(username, password)) + self.cookies_update(self._login_impl(username, password)) elif MangoxoExtractor._warning: MangoxoExtractor._warning = False self.log.warning("Unauthenticated users cannot see " @@ -51,7 +51,7 @@ class MangoxoExtractor(Extractor): data = response.json() if str(data.get("result")) != "1": raise exception.AuthenticationError(data.get("msg")) - return {"SESSION": self.session.cookies.get("SESSION")} + return {"SESSION": self.cookies.get("SESSION")} @staticmethod def _sign_by_md5(username, password, token): diff --git a/gallery_dl/extractor/mastodon.py b/gallery_dl/extractor/mastodon.py index e190c7eb..ddd34f0d 100644 --- a/gallery_dl/extractor/mastodon.py +++ b/gallery_dl/extractor/mastodon.py @@ -19,7 +19,7 @@ class MastodonExtractor(BaseExtractor): directory_fmt = ("mastodon", "{instance}", "{account[username]}") filename_fmt = "{category}_{id}_{media[id]}.{extension}" archive_fmt = "{media[id]}" - cookiedomain = None + cookies_domain = None def __init__(self, match): BaseExtractor.__init__(self, match) diff --git a/gallery_dl/extractor/newgrounds.py b/gallery_dl/extractor/newgrounds.py index e047f3df..e3ea3fc9 100644 --- a/gallery_dl/extractor/newgrounds.py +++ b/gallery_dl/extractor/newgrounds.py @@ -21,8 +21,8 @@ class NewgroundsExtractor(Extractor): filename_fmt = "{category}_{_index}_{title}.{extension}" archive_fmt = "{_type}{_index}" root = "https://www.newgrounds.com" - cookiedomain = ".newgrounds.com" - cookienames = ("NG_GG_username", "vmk1du5I8m") + cookies_domain = ".newgrounds.com" + cookies_names = ("NG_GG_username", "vmk1du5I8m") request_interval = 1.0 def __init__(self, match): @@ -72,11 +72,12 @@ class NewgroundsExtractor(Extractor): """Return general metadata""" def login(self): - if self._check_cookies(self.cookienames): + if self.cookies_check(self.cookies_names): return + username, password = self._get_auth_info() if username: - self._update_cookies(self._login_impl(username, password)) + self.cookies_update(self._login_impl(username, password)) @cache(maxage=360*24*3600, keyarg=1) def _login_impl(self, username, password): @@ -85,7 +86,7 @@ class NewgroundsExtractor(Extractor): url = self.root + "/passport/" response = self.request(url) if response.history and response.url.endswith("/social"): - return self.session.cookies + return self.cookies page = response.text headers = {"Origin": self.root, "Referer": url} @@ -105,7 +106,7 @@ class NewgroundsExtractor(Extractor): return { cookie.name: cookie.value for cookie in response.history[0].cookies - if cookie.expires and cookie.domain == self.cookiedomain + if cookie.expires and cookie.domain == self.cookies_domain } def extract_post(self, post_url): diff --git a/gallery_dl/extractor/nijie.py b/gallery_dl/extractor/nijie.py index 079bae76..e822895b 100644 --- a/gallery_dl/extractor/nijie.py +++ b/gallery_dl/extractor/nijie.py @@ -22,8 +22,8 @@ class NijieExtractor(AsynchronousMixin, BaseExtractor): def __init__(self, match): self._init_category(match) - self.cookiedomain = "." + self.root.rpartition("/")[2] - self.cookienames = (self.category + "_tok",) + self.cookies_domain = "." + self.root.rpartition("/")[2] + self.cookies_names = (self.category + "_tok",) if self.category == "horne": self._extract_data = self._extract_data_horne @@ -121,10 +121,11 @@ class NijieExtractor(AsynchronousMixin, BaseExtractor): return text.unescape(text.extr(page, "
", "<")) def login(self): - """Login and obtain session cookies""" - if not self._check_cookies(self.cookienames): - username, password = self._get_auth_info() - self._update_cookies(self._login_impl(username, password)) + if self.cookies_check(self.cookies_names): + return + + username, password = self._get_auth_info() + self.cookies_update(self._login_impl(username, password)) @cache(maxage=90*24*3600, keyarg=1) def _login_impl(self, username, password): @@ -139,7 +140,7 @@ class NijieExtractor(AsynchronousMixin, BaseExtractor): response = self.request(url, method="POST", data=data) if "/login.php" in response.text: raise exception.AuthenticationError() - return self.session.cookies + return self.cookies def _pagination(self, path): url = "{}/{}.php".format(self.root, path) @@ -172,7 +173,7 @@ BASE_PATTERN = NijieExtractor.update({ class NijieUserExtractor(NijieExtractor): """Extractor for nijie user profiles""" subcategory = "user" - cookiedomain = None + cookies_domain = None pattern = BASE_PATTERN + r"/members\.php\?id=(\d+)" test = ( ("https://nijie.info/members.php?id=44"), diff --git a/gallery_dl/extractor/nitter.py b/gallery_dl/extractor/nitter.py index beb3da25..fda169d8 100644 --- a/gallery_dl/extractor/nitter.py +++ b/gallery_dl/extractor/nitter.py @@ -21,7 +21,7 @@ class NitterExtractor(BaseExtractor): archive_fmt = "{tweet_id}_{num}" def __init__(self, match): - self.cookiedomain = self.root.partition("://")[2] + self.cookies_domain = self.root.partition("://")[2] BaseExtractor.__init__(self, match) lastindex = match.lastindex @@ -35,7 +35,7 @@ class NitterExtractor(BaseExtractor): if videos: ytdl = (videos == "ytdl") videos = True - self._cookiejar.set("hlsPlayback", "on", domain=self.cookiedomain) + self.cookies.set("hlsPlayback", "on", domain=self.cookies_domain) for tweet in self.tweets(): diff --git a/gallery_dl/extractor/paheal.py b/gallery_dl/extractor/paheal.py index 1fa571c4..7bccf838 100644 --- a/gallery_dl/extractor/paheal.py +++ b/gallery_dl/extractor/paheal.py @@ -21,7 +21,7 @@ class PahealExtractor(Extractor): root = "https://rule34.paheal.net" def items(self): - self.session.cookies.set( + self.cookies.set( "ui-tnc-agreed", "true", domain="rule34.paheal.net") data = self.get_metadata() diff --git a/gallery_dl/extractor/patreon.py b/gallery_dl/extractor/patreon.py index e4bfa2a8..99d9457a 100644 --- a/gallery_dl/extractor/patreon.py +++ b/gallery_dl/extractor/patreon.py @@ -19,7 +19,7 @@ class PatreonExtractor(Extractor): """Base class for patreon extractors""" category = "patreon" root = "https://www.patreon.com" - cookiedomain = ".patreon.com" + cookies_domain = ".patreon.com" directory_fmt = ("{category}", "{creator[full_name]}") filename_fmt = "{id}_{title}_{num:>02}.{extension}" archive_fmt = "{id}_{num}" @@ -28,11 +28,11 @@ class PatreonExtractor(Extractor): _warning = True def items(self): - if self._warning: - if not self._check_cookies(("session_id",)): + if not self.cookies_check(("session_id",)): self.log.warning("no 'session_id' cookie set") PatreonExtractor._warning = False + generators = self._build_file_generators(self.config("files")) for post in self.posts(): diff --git a/gallery_dl/extractor/pillowfort.py b/gallery_dl/extractor/pillowfort.py index 841a99bd..af7d57f1 100644 --- a/gallery_dl/extractor/pillowfort.py +++ b/gallery_dl/extractor/pillowfort.py @@ -24,7 +24,7 @@ class PillowfortExtractor(Extractor): filename_fmt = ("{post_id} {title|original_post[title]:?/ /}" "{num:>02}.{extension}") archive_fmt = "{id}" - cookiedomain = "www.pillowfort.social" + cookies_domain = "www.pillowfort.social" def __init__(self, match): Extractor.__init__(self, match) @@ -82,15 +82,14 @@ class PillowfortExtractor(Extractor): yield msgtype, url, post def login(self): - cget = self.session.cookies.get - if cget("_Pf_new_session", domain=self.cookiedomain) \ - or cget("remember_user_token", domain=self.cookiedomain): + if self.cookies.get("_Pf_new_session", domain=self.cookies_domain): + return + if self.cookies.get("remember_user_token", domain=self.cookies_domain): return username, password = self._get_auth_info() if username: - cookies = self._login_impl(username, password) - self._update_cookies(cookies) + self.cookies_update(self._login_impl(username, password)) @cache(maxage=14*24*3600, keyarg=1) def _login_impl(self, username, password): diff --git a/gallery_dl/extractor/pixiv.py b/gallery_dl/extractor/pixiv.py index 861959e4..8b77de47 100644 --- a/gallery_dl/extractor/pixiv.py +++ b/gallery_dl/extractor/pixiv.py @@ -26,7 +26,7 @@ class PixivExtractor(Extractor): directory_fmt = ("{category}", "{user[id]} {user[account]}") filename_fmt = "{id}_p{num}.{extension}" archive_fmt = "{id}{suffix}.{extension}" - cookiedomain = None + cookies_domain = None def __init__(self, match): Extractor.__init__(self, match) @@ -971,7 +971,7 @@ class PixivSketchExtractor(Extractor): filename_fmt = "{post_id} {id}.{extension}" archive_fmt = "S{user[id]}_{id}" root = "https://sketch.pixiv.net" - cookiedomain = ".pixiv.net" + cookies_domain = ".pixiv.net" pattern = r"(?:https?://)?sketch\.pixiv\.net/@([^/?#]+)" test = ("https://sketch.pixiv.net/@nicoby", { "pattern": r"https://img\-sketch\.pixiv\.net/uploads/medium" diff --git a/gallery_dl/extractor/pornhub.py b/gallery_dl/extractor/pornhub.py index fa4efa02..0b734a77 100644 --- a/gallery_dl/extractor/pornhub.py +++ b/gallery_dl/extractor/pornhub.py @@ -58,7 +58,7 @@ class PornhubGalleryExtractor(PornhubExtractor): self._first = None def items(self): - self.session.cookies.set( + self.cookies.set( "accessAgeDisclaimerPH", "1", domain=".pornhub.com") data = self.metadata() diff --git a/gallery_dl/extractor/reddit.py b/gallery_dl/extractor/reddit.py index 54b162b3..05da7f4a 100644 --- a/gallery_dl/extractor/reddit.py +++ b/gallery_dl/extractor/reddit.py @@ -19,7 +19,7 @@ class RedditExtractor(Extractor): directory_fmt = ("{category}", "{subreddit}") filename_fmt = "{id}{num:? //>02} {title[:220]}.{extension}" archive_fmt = "{filename}" - cookiedomain = ".reddit.com" + cookies_domain = ".reddit.com" request_interval = 0.6 def items(self): @@ -399,9 +399,9 @@ class RedditAPI(): if not self.refresh_token: # allow downloading from quarantined subreddits (#2180) - extractor._cookiejar.set( + extractor.cookies.set( "_options", '%7B%22pref_quarantine_optin%22%3A%20true%7D', - domain=extractor.cookiedomain) + domain=extractor.cookies_domain) def submission(self, submission_id): """Fetch the (submission, comments)=-tuple for a submission id""" diff --git a/gallery_dl/extractor/sankaku.py b/gallery_dl/extractor/sankaku.py index 09e5421d..ae25718c 100644 --- a/gallery_dl/extractor/sankaku.py +++ b/gallery_dl/extractor/sankaku.py @@ -25,7 +25,7 @@ class SankakuExtractor(BooruExtractor): basecategory = "booru" category = "sankaku" filename_fmt = "{category}_{id}_{md5}.{extension}" - cookiedomain = None + cookies_domain = None _warning = True TAG_TYPES = { diff --git a/gallery_dl/extractor/seiga.py b/gallery_dl/extractor/seiga.py index 711435ef..ab466149 100644 --- a/gallery_dl/extractor/seiga.py +++ b/gallery_dl/extractor/seiga.py @@ -16,7 +16,7 @@ class SeigaExtractor(Extractor): """Base class for seiga extractors""" category = "seiga" archive_fmt = "{image_id}" - cookiedomain = ".nicovideo.jp" + cookies_domain = ".nicovideo.jp" root = "https://seiga.nicovideo.jp" def __init__(self, match): @@ -24,7 +24,7 @@ class SeigaExtractor(Extractor): self.start_image = 0 def items(self): - if not self._check_cookies(("user_session",)): + if not self.cookies_check(("user_session",)): raise exception.StopExtraction("'user_session' cookie required") images = iter(self.get_images()) @@ -186,7 +186,7 @@ class SeigaImageExtractor(SeigaExtractor): return num def get_images(self): - self.session.cookies.set( + self.cookies.set( "skip_fetish_warning", "1", domain="seiga.nicovideo.jp") url = "{}/seiga/im{}".format(self.root, self.image_id) diff --git a/gallery_dl/extractor/senmanga.py b/gallery_dl/extractor/senmanga.py index 92c9d2cb..b3b27462 100644 --- a/gallery_dl/extractor/senmanga.py +++ b/gallery_dl/extractor/senmanga.py @@ -71,7 +71,7 @@ class SenmangaChapterExtractor(ChapterExtractor): self.session.headers["Referer"] = self.gallery_url # select "All pages" viewer - self.session.cookies.set( + self.cookies.set( "viewer", "1", domain="raw.senmanga.com") def metadata(self, page): diff --git a/gallery_dl/extractor/shimmie2.py b/gallery_dl/extractor/shimmie2.py index 285cd8fe..b0dd9bbd 100644 --- a/gallery_dl/extractor/shimmie2.py +++ b/gallery_dl/extractor/shimmie2.py @@ -29,7 +29,7 @@ class Shimmie2Extractor(BaseExtractor): cookies = instance.get("cookies") if cookies: domain = self.root.rpartition("/")[2] - self._update_cookies_dict(cookies, domain=domain) + self.cookies_update_dict(cookies, domain=domain) file_url = instance.get("file_url") if file_url: self.file_url_fmt = file_url diff --git a/gallery_dl/extractor/smugmug.py b/gallery_dl/extractor/smugmug.py index 713d4c41..e30c4911 100644 --- a/gallery_dl/extractor/smugmug.py +++ b/gallery_dl/extractor/smugmug.py @@ -21,7 +21,7 @@ class SmugmugExtractor(Extractor): category = "smugmug" filename_fmt = ("{category}_{User[NickName]:?/_/}" "{Image[UploadKey]}_{Image[ImageKey]}.{extension}") - cookiedomain = None + cookies_domain = None empty_user = { "Uri": "", "ResponseLevel": "Public", diff --git a/gallery_dl/extractor/subscribestar.py b/gallery_dl/extractor/subscribestar.py index 4de7e9b5..a2e1388c 100644 --- a/gallery_dl/extractor/subscribestar.py +++ b/gallery_dl/extractor/subscribestar.py @@ -22,14 +22,14 @@ class SubscribestarExtractor(Extractor): directory_fmt = ("{category}", "{author_name}") filename_fmt = "{post_id}_{id}.{extension}" archive_fmt = "{id}" - cookiedomain = "www.subscribestar.com" - cookienames = ("auth_token",) + cookies_domain = "www.subscribestar.com" + cookies_names = ("auth_token",) def __init__(self, match): tld, self.item = match.groups() if tld == "adult": self.root = "https://subscribestar.adult" - self.cookiedomain = "subscribestar.adult" + self.cookies_domain = "subscribestar.adult" self.subcategory += "-adult" Extractor.__init__(self, match) @@ -49,12 +49,12 @@ class SubscribestarExtractor(Extractor): """Yield HTML content of all relevant posts""" def login(self): - if self._check_cookies(self.cookienames): + if self.cookies_check(self.cookies_names): return + username, password = self._get_auth_info() if username: - cookies = self._login_impl(username, password) - self._update_cookies(cookies) + self.cookies_update(self._login_impl(username, password)) @cache(maxage=28*24*3600, keyarg=1) def _login_impl(self, username, password): diff --git a/gallery_dl/extractor/tapas.py b/gallery_dl/extractor/tapas.py index 545a95bb..ec4a249c 100644 --- a/gallery_dl/extractor/tapas.py +++ b/gallery_dl/extractor/tapas.py @@ -22,8 +22,8 @@ class TapasExtractor(Extractor): directory_fmt = ("{category}", "{series[title]}", "{id} {title}") filename_fmt = "{num:>02}.{extension}" archive_fmt = "{id}_{num}" - cookiedomain = ".tapas.io" - cookienames = ("_cpc_",) + cookies_domain = ".tapas.io" + cookies_names = ("_cpc_",) _cache = None def __init__(self, match): @@ -70,14 +70,17 @@ class TapasExtractor(Extractor): yield Message.Url, url, text.nameext_from_url(url, episode) def login(self): - if not self._check_cookies(self.cookienames): - username, password = self._get_auth_info() - if username: - self._update_cookies(self._login_impl(username, password)) - else: - sc = self.session.cookies.set - sc("birthDate" , "1981-02-03", domain=self.cookiedomain) - sc("adjustedBirthDate", "1981-02-03", domain=self.cookiedomain) + if self.cookies_check(self.cookies_names): + return + + username, password = self._get_auth_info() + if username: + return self.cookies_update(self._login_impl(username, password)) + + self.cookies.set( + "birthDate" , "1981-02-03", domain=self.cookies_domain) + self.cookies.set( + "adjustedBirthDate", "1981-02-03", domain=self.cookies_domain) @cache(maxage=14*24*3600, keyarg=1) def _login_impl(self, username, password): diff --git a/gallery_dl/extractor/tsumino.py b/gallery_dl/extractor/tsumino.py index 92bd6347..e7d5226a 100644 --- a/gallery_dl/extractor/tsumino.py +++ b/gallery_dl/extractor/tsumino.py @@ -16,15 +16,15 @@ from ..cache import cache class TsuminoBase(): """Base class for tsumino extractors""" category = "tsumino" - cookiedomain = "www.tsumino.com" + cookies_domain = "www.tsumino.com" root = "https://www.tsumino.com" def login(self): username, password = self._get_auth_info() if username: - self._update_cookies(self._login_impl(username, password)) + self.cookies_update(self._login_impl(username, password)) else: - self.session.cookies.setdefault( + self.cookies.setdefault( "ASP.NET_SessionId", "x1drgggilez4cpkttneukrc5") @cache(maxage=14*24*3600, keyarg=1) @@ -37,7 +37,7 @@ class TsuminoBase(): response = self.request(url, method="POST", headers=headers, data=data) if not response.history: raise exception.AuthenticationError() - return self.session.cookies + return self.cookies class TsuminoGalleryExtractor(TsuminoBase, GalleryExtractor): diff --git a/gallery_dl/extractor/tumblr.py b/gallery_dl/extractor/tumblr.py index b45609d7..f42da488 100644 --- a/gallery_dl/extractor/tumblr.py +++ b/gallery_dl/extractor/tumblr.py @@ -31,7 +31,7 @@ class TumblrExtractor(Extractor): directory_fmt = ("{category}", "{blog_name}") filename_fmt = "{category}_{blog_name}_{id}_{num:>02}.{extension}" archive_fmt = "{id}_{num}" - cookiedomain = None + cookies_domain = None def __init__(self, match): Extractor.__init__(self, match) diff --git a/gallery_dl/extractor/twitter.py b/gallery_dl/extractor/twitter.py index 092ddb49..7e420799 100644 --- a/gallery_dl/extractor/twitter.py +++ b/gallery_dl/extractor/twitter.py @@ -24,8 +24,8 @@ class TwitterExtractor(Extractor): directory_fmt = ("{category}", "{user[name]}") filename_fmt = "{tweet_id}_{num}.{extension}" archive_fmt = "{tweet_id}_{retweet_id}_{num}" - cookiedomain = ".twitter.com" - cookienames = ("auth_token",) + cookies_domain = ".twitter.com" + cookies_names = ("auth_token",) root = "https://twitter.com" browser = "firefox" @@ -455,10 +455,12 @@ class TwitterExtractor(Extractor): """Yield all relevant tweet objects""" def login(self): - if not self._check_cookies(self.cookienames): - username, password = self._get_auth_info() - if username: - self._update_cookies(_login_impl(self, username, password)) + if self.cookies_check(self.cookies_names): + return + + username, password = self._get_auth_info() + if username: + self.cookies_update(_login_impl(self, username, password)) class TwitterUserExtractor(TwitterExtractor): @@ -1121,19 +1123,19 @@ class TwitterAPI(): self._syndication = self.extractor.syndication self._json_dumps = json.JSONEncoder(separators=(",", ":")).encode - cookies = extractor.session.cookies - cookiedomain = extractor.cookiedomain + cookies = extractor.cookies + cookies_domain = extractor.cookies_domain csrf = extractor.config("csrf") if csrf is None or csrf == "cookies": - csrf_token = cookies.get("ct0", domain=cookiedomain) + csrf_token = cookies.get("ct0", domain=cookies_domain) else: csrf_token = None if not csrf_token: csrf_token = util.generate_token() - cookies.set("ct0", csrf_token, domain=cookiedomain) + cookies.set("ct0", csrf_token, domain=cookies_domain) - auth_token = cookies.get("auth_token", domain=cookiedomain) + auth_token = cookies.get("auth_token", domain=cookies_domain) self.headers = { "Accept": "*/*", @@ -1489,8 +1491,8 @@ class TwitterAPI(): guest_token = self._guest_token() if guest_token != self.headers["x-guest-token"]: self.headers["x-guest-token"] = guest_token - self.extractor.session.cookies.set( - "gt", guest_token, domain=self.extractor.cookiedomain) + self.extractor.cookies.set( + "gt", guest_token, domain=self.extractor.cookies_domain) def _call(self, endpoint, params, method="GET", auth=True, root=None): url = (root or self.root) + endpoint @@ -1683,8 +1685,8 @@ class TwitterAPI(): if user.get("blocked_by"): if self.headers["x-twitter-auth-type"] and \ extr.config("logout"): - extr._cookiefile = None - del extr.session.cookies["auth_token"] + extr.cookies_file = None + del extr.cookies["auth_token"] self.headers["x-twitter-auth-type"] = None extr.log.info("Retrying API request as guest") continue @@ -1938,7 +1940,7 @@ def _login_impl(extr, username, password): extr.log.debug(response.text) raise exception.AuthenticationError(", ".join(errors)) - extr.session.cookies.clear() + extr.cookies.clear() api = TwitterAPI(extr) api._authenticate_guest() headers = api.headers @@ -2078,5 +2080,5 @@ def _login_impl(extr, username, password): return { cookie.name: cookie.value - for cookie in extr.session.cookies + for cookie in extr.cookies } diff --git a/gallery_dl/extractor/vipergirls.py b/gallery_dl/extractor/vipergirls.py index 6dff01c8..d8aa6cdb 100644 --- a/gallery_dl/extractor/vipergirls.py +++ b/gallery_dl/extractor/vipergirls.py @@ -23,8 +23,8 @@ class VipergirlsExtractor(Extractor): root = "https://vipergirls.to" request_interval = 0.5 request_interval_min = 0.2 - cookiedomain = ".vipergirls.to" - cookienames = ("vg_userid", "vg_password") + cookies_domain = ".vipergirls.to" + cookies_names = ("vg_userid", "vg_password") def __init__(self, match): Extractor.__init__(self, match) @@ -42,10 +42,12 @@ class VipergirlsExtractor(Extractor): yield Message.Queue, image.attrib["main_url"], data def login(self): - if not self._check_cookies(self.cookienames): - username, password = self._get_auth_info() - if username: - self._update_cookies(self._login_impl(username, password)) + if self.cookies_check(self.cookies_names): + return + + username, password = self._get_auth_info() + if username: + self.cookies_update(self._login_impl(username, password)) @cache(maxage=90*24*3600, keyarg=1) def _login_impl(self, username, password): diff --git a/gallery_dl/extractor/webtoons.py b/gallery_dl/extractor/webtoons.py index 21f7c21e..7b3e8033 100644 --- a/gallery_dl/extractor/webtoons.py +++ b/gallery_dl/extractor/webtoons.py @@ -18,10 +18,10 @@ BASE_PATTERN = r"(?:https?://)?(?:www\.)?webtoons\.com/(([^/?#]+)" class WebtoonsBase(): category = "webtoons" root = "https://www.webtoons.com" - cookiedomain = ".webtoons.com" + cookies_domain = ".webtoons.com" def setup_agegate_cookies(self): - self._update_cookies({ + self.cookies_update({ "atGDPR" : "AD_CONSENT", "needCCPA" : "false", "needCOPPA" : "false", diff --git a/gallery_dl/extractor/weibo.py b/gallery_dl/extractor/weibo.py index 5a3adc80..2de7a2fc 100644 --- a/gallery_dl/extractor/weibo.py +++ b/gallery_dl/extractor/weibo.py @@ -34,7 +34,7 @@ class WeiboExtractor(Extractor): cookies = _cookie_cache() if cookies is not None: - self.session.cookies.update(cookies) + self.cookies.update(cookies) self.session.headers["Referer"] = self.root + "/" def request(self, url, **kwargs): diff --git a/gallery_dl/extractor/ytdl.py b/gallery_dl/extractor/ytdl.py index b3a16521..7f3c8de7 100644 --- a/gallery_dl/extractor/ytdl.py +++ b/gallery_dl/extractor/ytdl.py @@ -76,7 +76,7 @@ class YoutubeDLExtractor(Extractor): ytdl_module, self, user_opts, extr_opts) # transfer cookies to ytdl - cookies = self.session.cookies + cookies = self.cookies if cookies: set_cookie = ytdl_instance.cookiejar.set_cookie for cookie in cookies: diff --git a/gallery_dl/extractor/zerochan.py b/gallery_dl/extractor/zerochan.py index 148b92af..8187db88 100644 --- a/gallery_dl/extractor/zerochan.py +++ b/gallery_dl/extractor/zerochan.py @@ -21,17 +21,19 @@ class ZerochanExtractor(BooruExtractor): root = "https://www.zerochan.net" filename_fmt = "{id}.{extension}" archive_fmt = "{id}" - cookiedomain = ".zerochan.net" - cookienames = ("z_id", "z_hash") + cookies_domain = ".zerochan.net" + cookies_names = ("z_id", "z_hash") def login(self): self._logged_in = True - if not self._check_cookies(self.cookienames): - username, password = self._get_auth_info() - if username: - self._update_cookies(self._login_impl(username, password)) - else: - self._logged_in = False + if self.cookies_check(self.cookies_names): + return + + username, password = self._get_auth_info() + if username: + return self.cookies_update(self._login_impl(username, password)) + + self._logged_in = False @cache(maxage=90*86400, keyarg=1) def _login_impl(self, username, password): diff --git a/gallery_dl/job.py b/gallery_dl/job.py index ca5785d9..7ecdc391 100644 --- a/gallery_dl/job.py +++ b/gallery_dl/job.py @@ -378,7 +378,7 @@ class DownloadJob(Job): for callback in hooks["post-after"]: callback(pathfmt) - self.extractor._store_cookies() + self.extractor.cookies_store() if "finalize" in hooks: status = self.status for callback in hooks["finalize"]: diff --git a/test/test_cookies.py b/test/test_cookies.py index 335fa3dd..5a4fbe65 100644 --- a/test/test_cookies.py +++ b/test/test_cookies.py @@ -1,7 +1,7 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- -# Copyright 2017-2022 Mike Fährmann +# Copyright 2017-2023 Mike Fährmann # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License version 2 as @@ -47,7 +47,7 @@ class TestCookiejar(unittest.TestCase): def test_cookiefile(self): config.set((), "cookies", self.cookiefile) - cookies = extractor.find("test:").session.cookies + cookies = extractor.find("test:").cookies self.assertEqual(len(cookies), 1) cookie = next(iter(cookies)) @@ -66,7 +66,7 @@ class TestCookiejar(unittest.TestCase): config.set((), "cookies", filename) log = logging.getLogger("test") with mock.patch.object(log, "warning") as mock_warning: - cookies = extractor.find("test:").session.cookies + cookies = extractor.find("test:").cookies self.assertEqual(len(cookies), 0) self.assertEqual(mock_warning.call_count, 1) self.assertEqual(mock_warning.call_args[0][0], "cookies: %s") @@ -83,7 +83,7 @@ class TestCookiedict(unittest.TestCase): config.clear() def test_dict(self): - cookies = extractor.find("test:").session.cookies + cookies = extractor.find("test:").cookies self.assertEqual(len(cookies), len(self.cdict)) self.assertEqual(sorted(cookies.keys()), sorted(self.cdict.keys())) self.assertEqual(sorted(cookies.values()), sorted(self.cdict.values())) @@ -91,11 +91,11 @@ class TestCookiedict(unittest.TestCase): def test_domain(self): for category in ["exhentai", "idolcomplex", "nijie", "horne"]: extr = _get_extractor(category) - cookies = extr.session.cookies + cookies = extr.cookies for key in self.cdict: self.assertTrue(key in cookies) for c in cookies: - self.assertEqual(c.domain, extr.cookiedomain) + self.assertEqual(c.domain, extr.cookies_domain) class TestCookieLogin(unittest.TestCase): @@ -123,79 +123,79 @@ class TestCookieUtils(unittest.TestCase): def test_check_cookies(self): extr = extractor.find("test:") - self.assertFalse(extr._cookiejar, "empty") - self.assertFalse(extr.cookiedomain, "empty") + self.assertFalse(extr.cookies, "empty") + self.assertFalse(extr.cookies_domain, "empty") # always returns False when checking for empty cookie list - self.assertFalse(extr._check_cookies(())) + self.assertFalse(extr.cookies_check(())) - self.assertFalse(extr._check_cookies(("a",))) - self.assertFalse(extr._check_cookies(("a", "b"))) - self.assertFalse(extr._check_cookies(("a", "b", "c"))) + self.assertFalse(extr.cookies_check(("a",))) + self.assertFalse(extr.cookies_check(("a", "b"))) + self.assertFalse(extr.cookies_check(("a", "b", "c"))) - extr._cookiejar.set("a", "1") - self.assertTrue(extr._check_cookies(("a",))) - self.assertFalse(extr._check_cookies(("a", "b"))) - self.assertFalse(extr._check_cookies(("a", "b", "c"))) + extr.cookies.set("a", "1") + self.assertTrue(extr.cookies_check(("a",))) + self.assertFalse(extr.cookies_check(("a", "b"))) + self.assertFalse(extr.cookies_check(("a", "b", "c"))) - extr._cookiejar.set("b", "2") - self.assertTrue(extr._check_cookies(("a",))) - self.assertTrue(extr._check_cookies(("a", "b"))) - self.assertFalse(extr._check_cookies(("a", "b", "c"))) + extr.cookies.set("b", "2") + self.assertTrue(extr.cookies_check(("a",))) + self.assertTrue(extr.cookies_check(("a", "b"))) + self.assertFalse(extr.cookies_check(("a", "b", "c"))) def test_check_cookies_domain(self): extr = extractor.find("test:") - self.assertFalse(extr._cookiejar, "empty") - extr.cookiedomain = ".example.org" + self.assertFalse(extr.cookies, "empty") + extr.cookies_domain = ".example.org" - self.assertFalse(extr._check_cookies(("a",))) - self.assertFalse(extr._check_cookies(("a", "b"))) + self.assertFalse(extr.cookies_check(("a",))) + self.assertFalse(extr.cookies_check(("a", "b"))) - extr._cookiejar.set("a", "1") - self.assertFalse(extr._check_cookies(("a",))) + extr.cookies.set("a", "1") + self.assertFalse(extr.cookies_check(("a",))) - extr._cookiejar.set("a", "1", domain=extr.cookiedomain) - self.assertTrue(extr._check_cookies(("a",))) + extr.cookies.set("a", "1", domain=extr.cookies_domain) + self.assertTrue(extr.cookies_check(("a",))) - extr._cookiejar.set("a", "1", domain="www" + extr.cookiedomain) - self.assertEqual(len(extr._cookiejar), 3) - self.assertTrue(extr._check_cookies(("a",))) + extr.cookies.set("a", "1", domain="www" + extr.cookies_domain) + self.assertEqual(len(extr.cookies), 3) + self.assertTrue(extr.cookies_check(("a",))) - extr._cookiejar.set("b", "2", domain=extr.cookiedomain) - extr._cookiejar.set("c", "3", domain=extr.cookiedomain) - self.assertTrue(extr._check_cookies(("a", "b", "c"))) + extr.cookies.set("b", "2", domain=extr.cookies_domain) + extr.cookies.set("c", "3", domain=extr.cookies_domain) + self.assertTrue(extr.cookies_check(("a", "b", "c"))) def test_check_cookies_expires(self): extr = extractor.find("test:") - self.assertFalse(extr._cookiejar, "empty") - self.assertFalse(extr.cookiedomain, "empty") + self.assertFalse(extr.cookies, "empty") + self.assertFalse(extr.cookies_domain, "empty") now = int(time.time()) log = logging.getLogger("test") - extr._cookiejar.set("a", "1", expires=now-100) + extr.cookies.set("a", "1", expires=now-100) with mock.patch.object(log, "warning") as mw: - self.assertFalse(extr._check_cookies(("a",))) + self.assertFalse(extr.cookies_check(("a",))) self.assertEqual(mw.call_count, 1) self.assertEqual(mw.call_args[0], ("Cookie '%s' has expired", "a")) - extr._cookiejar.set("a", "1", expires=now+100) + extr.cookies.set("a", "1", expires=now+100) with mock.patch.object(log, "warning") as mw: - self.assertTrue(extr._check_cookies(("a",))) + self.assertTrue(extr.cookies_check(("a",))) self.assertEqual(mw.call_count, 1) self.assertEqual(mw.call_args[0], ( "Cookie '%s' will expire in less than %s hour%s", "a", 1, "")) - extr._cookiejar.set("a", "1", expires=now+100+7200) + extr.cookies.set("a", "1", expires=now+100+7200) with mock.patch.object(log, "warning") as mw: - self.assertTrue(extr._check_cookies(("a",))) + self.assertTrue(extr.cookies_check(("a",))) self.assertEqual(mw.call_count, 1) self.assertEqual(mw.call_args[0], ( "Cookie '%s' will expire in less than %s hour%s", "a", 3, "s")) - extr._cookiejar.set("a", "1", expires=now+100+24*3600) + extr.cookies.set("a", "1", expires=now+100+24*3600) with mock.patch.object(log, "warning") as mw: - self.assertTrue(extr._check_cookies(("a",))) + self.assertTrue(extr.cookies_check(("a",))) self.assertEqual(mw.call_count, 0)