mirror of
https://github.com/mikf/gallery-dl.git
synced 2024-11-22 10:42:34 +01:00
[sankaku] remove login support
The old login method for 'https://chan.sankakucomplex.com/user/login' and the cookies it produces have no effect on the results from 'beta.sankakucomplex.com'.
This commit is contained in:
parent
7f3d811d7b
commit
b2c55f0a72
15
README.rst
15
README.rst
@ -213,9 +213,18 @@ Some extractors require you to provide valid login credentials in the form of
|
|||||||
a username & password pair. This is necessary for
|
a username & password pair. This is necessary for
|
||||||
``pixiv``, ``nijie``, and ``seiga``
|
``pixiv``, ``nijie``, and ``seiga``
|
||||||
and optional for
|
and optional for
|
||||||
``aryion``, ``danbooru``, ``e621``, ``exhentai``, ``idolcomplex``, ``inkbunny``,
|
``aryion``,
|
||||||
``instagram``, ``luscious``, ``pinterest``, ``sankaku``, ``subscribestar``,
|
``danbooru``,
|
||||||
``tsumino``, and ``twitter``.
|
``e621``,
|
||||||
|
``exhentai``,
|
||||||
|
``idolcomplex``,
|
||||||
|
``inkbunny``,
|
||||||
|
``instagram``,
|
||||||
|
``luscious``,
|
||||||
|
``pinterest``,
|
||||||
|
``subscribestar``,
|
||||||
|
``tsumino``,
|
||||||
|
and ``twitter``.
|
||||||
|
|
||||||
You can set the necessary information in your configuration file
|
You can set the necessary information in your configuration file
|
||||||
(cf. gallery-dl.conf_)
|
(cf. gallery-dl.conf_)
|
||||||
|
@ -286,7 +286,6 @@ Description
|
|||||||
* ``instagram``
|
* ``instagram``
|
||||||
* ``luscious``
|
* ``luscious``
|
||||||
* ``pinterest``
|
* ``pinterest``
|
||||||
* ``sankaku``
|
|
||||||
* ``subscribestar``
|
* ``subscribestar``
|
||||||
* ``tsumino``
|
* ``tsumino``
|
||||||
* ``twitter``
|
* ``twitter``
|
||||||
@ -1355,20 +1354,6 @@ Description
|
|||||||
available format is found.
|
available format is found.
|
||||||
|
|
||||||
|
|
||||||
extractor.sankaku.wait-min & .wait-max
|
|
||||||
--------------------------------------
|
|
||||||
Type
|
|
||||||
``float``
|
|
||||||
Default
|
|
||||||
``3.0`` and ``6.0``
|
|
||||||
Description
|
|
||||||
Minimum and maximum wait time in seconds between each image
|
|
||||||
|
|
||||||
Sankaku Channel responds with ``429 Too Many Requests`` if it
|
|
||||||
receives too many HTTP requests in a certain amount of time.
|
|
||||||
Waiting a few seconds between each request tries to prevent that.
|
|
||||||
|
|
||||||
|
|
||||||
extractor.sankakucomplex.embeds
|
extractor.sankakucomplex.embeds
|
||||||
-------------------------------
|
-------------------------------
|
||||||
Type
|
Type
|
||||||
|
@ -128,13 +128,6 @@
|
|||||||
{
|
{
|
||||||
"format": "mp4"
|
"format": "mp4"
|
||||||
},
|
},
|
||||||
"sankaku":
|
|
||||||
{
|
|
||||||
"username": null,
|
|
||||||
"password": null,
|
|
||||||
"wait-min": 3.0,
|
|
||||||
"wait-max": 6.0
|
|
||||||
},
|
|
||||||
"seiga":
|
"seiga":
|
||||||
{
|
{
|
||||||
"username": null,
|
"username": null,
|
||||||
|
@ -112,7 +112,7 @@ rule #34 https://rule34.paheal.net/ Posts, Tag Searches
|
|||||||
Rule 34 https://rule34.xxx/ Pools, Posts, Tag Searches
|
Rule 34 https://rule34.xxx/ Pools, Posts, Tag Searches
|
||||||
Safebooru https://safebooru.org/ Pools, Posts, Tag Searches
|
Safebooru https://safebooru.org/ Pools, Posts, Tag Searches
|
||||||
Sakugabooru https://www.sakugabooru.com/ Pools, Popular Images, Posts, Tag Searches
|
Sakugabooru https://www.sakugabooru.com/ Pools, Popular Images, Posts, Tag Searches
|
||||||
Sankaku Channel https://chan.sankakucomplex.com/ Pools, Posts, Tag Searches Supported
|
Sankaku Channel https://chan.sankakucomplex.com/ Pools, Posts, Tag Searches
|
||||||
Sankaku Complex https://www.sankakucomplex.com/ Articles, Tag Searches
|
Sankaku Complex https://www.sankakucomplex.com/ Articles, Tag Searches
|
||||||
Sen Manga https://raw.senmanga.com/ Chapters
|
Sen Manga https://raw.senmanga.com/ Chapters
|
||||||
Sense-Scans https://sensescans.com/reader/ Chapters, Manga
|
Sense-Scans https://sensescans.com/reader/ Chapters, Manga
|
||||||
@ -158,7 +158,7 @@ Turboimagehost https://www.turboimagehost.com/ individual Images
|
|||||||
.. |furaffinity-C| replace:: Favorites, Galleries, Posts, Scraps, Search Results, User Profiles
|
.. |furaffinity-C| replace:: Favorites, Galleries, Posts, Scraps, Search Results, User Profiles
|
||||||
.. |hentaifoundry-C| replace:: Favorites, individual Images, Pictures, Popular Images, Recent Images, Scraps, Stories, User Profiles
|
.. |hentaifoundry-C| replace:: Favorites, individual Images, Pictures, Popular Images, Recent Images, Scraps, Stories, User Profiles
|
||||||
.. |imgur-C| replace:: Albums, Favorites, Galleries, individual Images, Search Results, Subreddits, Tag Searches, User Profiles
|
.. |imgur-C| replace:: Albums, Favorites, Galleries, individual Images, Search Results, Subreddits, Tag Searches, User Profiles
|
||||||
.. |instagram-C| replace:: Channels, individual Images, Saved Posts, Stories, Tag Searches, User Profiles
|
.. |instagram-C| replace:: Channels, Posts, Saved Posts, Stories, Tag Searches, User Profiles
|
||||||
.. |newgrounds-C| replace:: Art, Audio, Favorites, individual Images, Media Files, Movies, User Profiles
|
.. |newgrounds-C| replace:: Art, Audio, Favorites, individual Images, Media Files, Movies, User Profiles
|
||||||
.. |nijie-C| replace:: Doujin, Favorites, Illustrations, individual Images, User Profiles
|
.. |nijie-C| replace:: Doujin, Favorites, Illustrations, individual Images, User Profiles
|
||||||
.. |pixiv-C| replace:: Favorites, Follows, pixiv.me Links, Rankings, Search Results, User Profiles, individual Images
|
.. |pixiv-C| replace:: Favorites, Follows, pixiv.me Links, Rankings, Search Results, User Profiles, individual Images
|
||||||
|
@ -10,6 +10,7 @@
|
|||||||
|
|
||||||
from .sankaku import SankakuExtractor
|
from .sankaku import SankakuExtractor
|
||||||
from .common import Message
|
from .common import Message
|
||||||
|
from ..cache import cache
|
||||||
from .. import text, util, exception
|
from .. import text, util, exception
|
||||||
import collections
|
import collections
|
||||||
import random
|
import random
|
||||||
@ -20,9 +21,9 @@ import re
|
|||||||
class IdolcomplexExtractor(SankakuExtractor):
|
class IdolcomplexExtractor(SankakuExtractor):
|
||||||
"""Base class for idolcomplex extractors"""
|
"""Base class for idolcomplex extractors"""
|
||||||
category = "idolcomplex"
|
category = "idolcomplex"
|
||||||
|
cookienames = ("login", "pass_hash")
|
||||||
cookiedomain = "idol.sankakucomplex.com"
|
cookiedomain = "idol.sankakucomplex.com"
|
||||||
root = "https://" + cookiedomain
|
root = "https://" + cookiedomain
|
||||||
subdomain = "idol"
|
|
||||||
|
|
||||||
def __init__(self, match):
|
def __init__(self, match):
|
||||||
SankakuExtractor.__init__(self, match)
|
SankakuExtractor.__init__(self, match)
|
||||||
@ -55,6 +56,34 @@ class IdolcomplexExtractor(SankakuExtractor):
|
|||||||
def post_ids(self):
|
def post_ids(self):
|
||||||
"""Return an iterable containing all relevant post ids"""
|
"""Return an iterable containing all relevant post ids"""
|
||||||
|
|
||||||
|
def login(self):
|
||||||
|
if self._check_cookies(self.cookienames):
|
||||||
|
return
|
||||||
|
username, password = self._get_auth_info()
|
||||||
|
if username:
|
||||||
|
cookies = self._login_impl(username, password)
|
||||||
|
self._update_cookies(cookies)
|
||||||
|
else:
|
||||||
|
self.logged_in = False
|
||||||
|
|
||||||
|
@cache(maxage=90*24*3600, keyarg=1)
|
||||||
|
def _login_impl(self, username, password):
|
||||||
|
self.log.info("Logging in as %s", username)
|
||||||
|
|
||||||
|
url = self.root + "/user/authenticate"
|
||||||
|
data = {
|
||||||
|
"url" : "",
|
||||||
|
"user[name]" : username,
|
||||||
|
"user[password]": password,
|
||||||
|
"commit" : "Login",
|
||||||
|
}
|
||||||
|
response = self.request(url, method="POST", data=data)
|
||||||
|
|
||||||
|
if not response.history or response.url != self.root + "/user/home":
|
||||||
|
raise exception.AuthenticationError()
|
||||||
|
cookies = response.history[0].cookies
|
||||||
|
return {c: cookies[c] for c in self.cookienames}
|
||||||
|
|
||||||
def _parse_post(self, post_id):
|
def _parse_post(self, post_id):
|
||||||
"""Extract metadata of a single post"""
|
"""Extract metadata of a single post"""
|
||||||
url = self.root + "/post/show/" + post_id
|
url = self.root + "/post/show/" + post_id
|
||||||
|
@ -8,23 +8,19 @@
|
|||||||
|
|
||||||
"""Extractors for https://chan.sankakucomplex.com/"""
|
"""Extractors for https://chan.sankakucomplex.com/"""
|
||||||
|
|
||||||
from .common import Extractor, Message
|
from .booru import BooruExtractor
|
||||||
from .. import text, exception
|
from .. import text, exception
|
||||||
from ..cache import cache
|
|
||||||
import collections
|
import collections
|
||||||
|
|
||||||
BASE_PATTERN = r"(?:https?://)?(?:beta|chan)\.sankakucomplex\.com"
|
BASE_PATTERN = r"(?:https?://)?(?:beta|chan)\.sankakucomplex\.com"
|
||||||
|
|
||||||
|
|
||||||
class SankakuExtractor(Extractor):
|
class SankakuExtractor(BooruExtractor):
|
||||||
"""Base class for sankaku channel extractors"""
|
"""Base class for sankaku channel extractors"""
|
||||||
basecategory = "booru"
|
basecategory = "booru"
|
||||||
category = "sankaku"
|
category = "sankaku"
|
||||||
filename_fmt = "{category}_{id}_{md5}.{extension}"
|
filename_fmt = "{category}_{id}_{md5}.{extension}"
|
||||||
cookienames = ("login", "pass_hash")
|
|
||||||
cookiedomain = "chan.sankakucomplex.com"
|
|
||||||
request_interval_min = 1.0
|
request_interval_min = 1.0
|
||||||
subdomain = "chan"
|
|
||||||
per_page = 100
|
per_page = 100
|
||||||
|
|
||||||
TAG_TYPES = {
|
TAG_TYPES = {
|
||||||
@ -40,56 +36,6 @@ class SankakuExtractor(Extractor):
|
|||||||
9: "meta",
|
9: "meta",
|
||||||
}
|
}
|
||||||
|
|
||||||
def items(self):
|
|
||||||
extended_tags = self.config("tags", False)
|
|
||||||
self.login()
|
|
||||||
data = self.metadata()
|
|
||||||
for post in self.posts():
|
|
||||||
try:
|
|
||||||
url = self._prepare_post(post, extended_tags)
|
|
||||||
except KeyError:
|
|
||||||
continue
|
|
||||||
post.update(data)
|
|
||||||
text.nameext_from_url(url, post)
|
|
||||||
yield Message.Directory, post
|
|
||||||
yield Message.Url, url, post
|
|
||||||
|
|
||||||
def metadata(self):
|
|
||||||
return ()
|
|
||||||
|
|
||||||
def posts(self):
|
|
||||||
return ()
|
|
||||||
|
|
||||||
def login(self):
|
|
||||||
"""Login and set necessary cookies"""
|
|
||||||
if self._check_cookies(self.cookienames):
|
|
||||||
return
|
|
||||||
username, password = self._get_auth_info()
|
|
||||||
if username:
|
|
||||||
cookies = self._login_impl((username, self.subdomain), password)
|
|
||||||
self._update_cookies(cookies)
|
|
||||||
else:
|
|
||||||
self.logged_in = False
|
|
||||||
|
|
||||||
@cache(maxage=90*24*3600, keyarg=1)
|
|
||||||
def _login_impl(self, usertuple, password):
|
|
||||||
username = usertuple[0]
|
|
||||||
self.log.info("Logging in as %s", username)
|
|
||||||
|
|
||||||
url = self.root + "/user/authenticate"
|
|
||||||
data = {
|
|
||||||
"url" : "",
|
|
||||||
"user[name]" : username,
|
|
||||||
"user[password]": password,
|
|
||||||
"commit" : "Login",
|
|
||||||
}
|
|
||||||
response = self.request(url, method="POST", data=data)
|
|
||||||
|
|
||||||
if not response.history or response.url != self.root + "/user/home":
|
|
||||||
raise exception.AuthenticationError()
|
|
||||||
cookies = response.history[0].cookies
|
|
||||||
return {c: cookies[c] for c in self.cookienames}
|
|
||||||
|
|
||||||
def _prepare_post(self, post, extended_tags=False):
|
def _prepare_post(self, post, extended_tags=False):
|
||||||
url = post["file_url"]
|
url = post["file_url"]
|
||||||
if url[0] == "/":
|
if url[0] == "/":
|
||||||
|
@ -181,7 +181,6 @@ AUTH_MAP = {
|
|||||||
"pinterest" : "Supported",
|
"pinterest" : "Supported",
|
||||||
"pixiv" : "Required",
|
"pixiv" : "Required",
|
||||||
"reddit" : _OAUTH,
|
"reddit" : _OAUTH,
|
||||||
"sankaku" : "Supported",
|
|
||||||
"seiga" : "Required",
|
"seiga" : "Required",
|
||||||
"smugmug" : _OAUTH,
|
"smugmug" : _OAUTH,
|
||||||
"subscribestar" : "Supported",
|
"subscribestar" : "Supported",
|
||||||
|
@ -88,7 +88,7 @@ class TestCookiedict(unittest.TestCase):
|
|||||||
self.assertEqual(sorted(cookies.values()), sorted(self.cdict.values()))
|
self.assertEqual(sorted(cookies.values()), sorted(self.cdict.values()))
|
||||||
|
|
||||||
def test_domain(self):
|
def test_domain(self):
|
||||||
for category in ["exhentai", "nijie", "sankaku", "seiga"]:
|
for category in ["exhentai", "idolcomplex", "nijie", "seiga"]:
|
||||||
extr = _get_extractor(category)
|
extr = _get_extractor(category)
|
||||||
cookies = extr.session.cookies
|
cookies = extr.session.cookies
|
||||||
for key in self.cdict:
|
for key in self.cdict:
|
||||||
@ -105,8 +105,8 @@ class TestCookieLogin(unittest.TestCase):
|
|||||||
def test_cookie_login(self):
|
def test_cookie_login(self):
|
||||||
extr_cookies = {
|
extr_cookies = {
|
||||||
"exhentai" : ("ipb_member_id", "ipb_pass_hash"),
|
"exhentai" : ("ipb_member_id", "ipb_pass_hash"),
|
||||||
|
"idolcomplex": ("login", "pass_hash"),
|
||||||
"nijie" : ("nemail", "nlogin"),
|
"nijie" : ("nemail", "nlogin"),
|
||||||
"sankaku" : ("login", "pass_hash"),
|
|
||||||
"seiga" : ("user_session",),
|
"seiga" : ("user_session",),
|
||||||
}
|
}
|
||||||
for category, cookienames in extr_cookies.items():
|
for category, cookienames in extr_cookies.items():
|
||||||
|
Loading…
Reference in New Issue
Block a user