1
0
mirror of https://github.com/mikf/gallery-dl.git synced 2024-11-22 02:32:33 +01:00

[sankaku] remove login support

The old login method for 'https://chan.sankakucomplex.com/user/login'
and the cookies it produces have no effect on the results from
'beta.sankakucomplex.com'.
This commit is contained in:
Mike Fährmann 2020-12-08 21:05:47 +01:00
parent 7f3d811d7b
commit b2c55f0a72
No known key found for this signature in database
GPG Key ID: 5680CA389D365A88
8 changed files with 51 additions and 90 deletions

View File

@ -213,9 +213,18 @@ Some extractors require you to provide valid login credentials in the form of
a username & password pair. This is necessary for
``pixiv``, ``nijie``, and ``seiga``
and optional for
``aryion``, ``danbooru``, ``e621``, ``exhentai``, ``idolcomplex``, ``inkbunny``,
``instagram``, ``luscious``, ``pinterest``, ``sankaku``, ``subscribestar``,
``tsumino``, and ``twitter``.
``aryion``,
``danbooru``,
``e621``,
``exhentai``,
``idolcomplex``,
``inkbunny``,
``instagram``,
``luscious``,
``pinterest``,
``subscribestar``,
``tsumino``,
and ``twitter``.
You can set the necessary information in your configuration file
(cf. gallery-dl.conf_)

View File

@ -286,7 +286,6 @@ Description
* ``instagram``
* ``luscious``
* ``pinterest``
* ``sankaku``
* ``subscribestar``
* ``tsumino``
* ``twitter``
@ -1355,20 +1354,6 @@ Description
available format is found.
extractor.sankaku.wait-min & .wait-max
--------------------------------------
Type
``float``
Default
``3.0`` and ``6.0``
Description
Minimum and maximum wait time in seconds between each image
Sankaku Channel responds with ``429 Too Many Requests`` if it
receives too many HTTP requests in a certain amount of time.
Waiting a few seconds between each request tries to prevent that.
extractor.sankakucomplex.embeds
-------------------------------
Type

View File

@ -128,13 +128,6 @@
{
"format": "mp4"
},
"sankaku":
{
"username": null,
"password": null,
"wait-min": 3.0,
"wait-max": 6.0
},
"seiga":
{
"username": null,

View File

@ -112,7 +112,7 @@ rule #34 https://rule34.paheal.net/ Posts, Tag Searches
Rule 34 https://rule34.xxx/ Pools, Posts, Tag Searches
Safebooru https://safebooru.org/ Pools, Posts, Tag Searches
Sakugabooru https://www.sakugabooru.com/ Pools, Popular Images, Posts, Tag Searches
Sankaku Channel https://chan.sankakucomplex.com/ Pools, Posts, Tag Searches Supported
Sankaku Channel https://chan.sankakucomplex.com/ Pools, Posts, Tag Searches
Sankaku Complex https://www.sankakucomplex.com/ Articles, Tag Searches
Sen Manga https://raw.senmanga.com/ Chapters
Sense-Scans https://sensescans.com/reader/ Chapters, Manga
@ -158,7 +158,7 @@ Turboimagehost https://www.turboimagehost.com/ individual Images
.. |furaffinity-C| replace:: Favorites, Galleries, Posts, Scraps, Search Results, User Profiles
.. |hentaifoundry-C| replace:: Favorites, individual Images, Pictures, Popular Images, Recent Images, Scraps, Stories, User Profiles
.. |imgur-C| replace:: Albums, Favorites, Galleries, individual Images, Search Results, Subreddits, Tag Searches, User Profiles
.. |instagram-C| replace:: Channels, individual Images, Saved Posts, Stories, Tag Searches, User Profiles
.. |instagram-C| replace:: Channels, Posts, Saved Posts, Stories, Tag Searches, User Profiles
.. |newgrounds-C| replace:: Art, Audio, Favorites, individual Images, Media Files, Movies, User Profiles
.. |nijie-C| replace:: Doujin, Favorites, Illustrations, individual Images, User Profiles
.. |pixiv-C| replace:: Favorites, Follows, pixiv.me Links, Rankings, Search Results, User Profiles, individual Images

View File

@ -10,6 +10,7 @@
from .sankaku import SankakuExtractor
from .common import Message
from ..cache import cache
from .. import text, util, exception
import collections
import random
@ -20,9 +21,9 @@ import re
class IdolcomplexExtractor(SankakuExtractor):
"""Base class for idolcomplex extractors"""
category = "idolcomplex"
cookienames = ("login", "pass_hash")
cookiedomain = "idol.sankakucomplex.com"
root = "https://" + cookiedomain
subdomain = "idol"
def __init__(self, match):
SankakuExtractor.__init__(self, match)
@ -55,6 +56,34 @@ class IdolcomplexExtractor(SankakuExtractor):
def post_ids(self):
"""Return an iterable containing all relevant post ids"""
def login(self):
if self._check_cookies(self.cookienames):
return
username, password = self._get_auth_info()
if username:
cookies = self._login_impl(username, password)
self._update_cookies(cookies)
else:
self.logged_in = False
@cache(maxage=90*24*3600, keyarg=1)
def _login_impl(self, username, password):
self.log.info("Logging in as %s", username)
url = self.root + "/user/authenticate"
data = {
"url" : "",
"user[name]" : username,
"user[password]": password,
"commit" : "Login",
}
response = self.request(url, method="POST", data=data)
if not response.history or response.url != self.root + "/user/home":
raise exception.AuthenticationError()
cookies = response.history[0].cookies
return {c: cookies[c] for c in self.cookienames}
def _parse_post(self, post_id):
"""Extract metadata of a single post"""
url = self.root + "/post/show/" + post_id

View File

@ -8,23 +8,19 @@
"""Extractors for https://chan.sankakucomplex.com/"""
from .common import Extractor, Message
from .booru import BooruExtractor
from .. import text, exception
from ..cache import cache
import collections
BASE_PATTERN = r"(?:https?://)?(?:beta|chan)\.sankakucomplex\.com"
class SankakuExtractor(Extractor):
class SankakuExtractor(BooruExtractor):
"""Base class for sankaku channel extractors"""
basecategory = "booru"
category = "sankaku"
filename_fmt = "{category}_{id}_{md5}.{extension}"
cookienames = ("login", "pass_hash")
cookiedomain = "chan.sankakucomplex.com"
request_interval_min = 1.0
subdomain = "chan"
per_page = 100
TAG_TYPES = {
@ -40,56 +36,6 @@ class SankakuExtractor(Extractor):
9: "meta",
}
def items(self):
extended_tags = self.config("tags", False)
self.login()
data = self.metadata()
for post in self.posts():
try:
url = self._prepare_post(post, extended_tags)
except KeyError:
continue
post.update(data)
text.nameext_from_url(url, post)
yield Message.Directory, post
yield Message.Url, url, post
def metadata(self):
return ()
def posts(self):
return ()
def login(self):
"""Login and set necessary cookies"""
if self._check_cookies(self.cookienames):
return
username, password = self._get_auth_info()
if username:
cookies = self._login_impl((username, self.subdomain), password)
self._update_cookies(cookies)
else:
self.logged_in = False
@cache(maxage=90*24*3600, keyarg=1)
def _login_impl(self, usertuple, password):
username = usertuple[0]
self.log.info("Logging in as %s", username)
url = self.root + "/user/authenticate"
data = {
"url" : "",
"user[name]" : username,
"user[password]": password,
"commit" : "Login",
}
response = self.request(url, method="POST", data=data)
if not response.history or response.url != self.root + "/user/home":
raise exception.AuthenticationError()
cookies = response.history[0].cookies
return {c: cookies[c] for c in self.cookienames}
def _prepare_post(self, post, extended_tags=False):
url = post["file_url"]
if url[0] == "/":

View File

@ -181,7 +181,6 @@ AUTH_MAP = {
"pinterest" : "Supported",
"pixiv" : "Required",
"reddit" : _OAUTH,
"sankaku" : "Supported",
"seiga" : "Required",
"smugmug" : _OAUTH,
"subscribestar" : "Supported",

View File

@ -88,7 +88,7 @@ class TestCookiedict(unittest.TestCase):
self.assertEqual(sorted(cookies.values()), sorted(self.cdict.values()))
def test_domain(self):
for category in ["exhentai", "nijie", "sankaku", "seiga"]:
for category in ["exhentai", "idolcomplex", "nijie", "seiga"]:
extr = _get_extractor(category)
cookies = extr.session.cookies
for key in self.cdict:
@ -104,10 +104,10 @@ class TestCookieLogin(unittest.TestCase):
def test_cookie_login(self):
extr_cookies = {
"exhentai": ("ipb_member_id", "ipb_pass_hash"),
"nijie" : ("nemail", "nlogin"),
"sankaku" : ("login", "pass_hash"),
"seiga" : ("user_session",),
"exhentai" : ("ipb_member_id", "ipb_pass_hash"),
"idolcomplex": ("login", "pass_hash"),
"nijie" : ("nemail", "nlogin"),
"seiga" : ("user_session",),
}
for category, cookienames in extr_cookies.items():
cookies = {name: "value" for name in cookienames}