2015-11-09 02:29:33 +01:00
|
|
|
# -*- coding: utf-8 -*-
|
2014-10-12 21:56:44 +02:00
|
|
|
|
2023-01-18 21:38:01 +01:00
|
|
|
# Copyright 2014-2023 Mike Fährmann
|
2015-11-09 02:29:33 +01:00
|
|
|
#
|
|
|
|
# This program is free software; you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License version 2 as
|
|
|
|
# published by the Free Software Foundation.
|
|
|
|
|
2020-12-22 01:57:53 +01:00
|
|
|
"""Extractors for https://sankaku.app/"""
|
2015-11-09 02:29:33 +01:00
|
|
|
|
2020-12-08 21:05:47 +01:00
|
|
|
from .booru import BooruExtractor
|
2020-12-29 17:36:37 +01:00
|
|
|
from .common import Message
|
2021-05-04 18:00:38 +02:00
|
|
|
from .. import text, util, exception
|
2020-12-17 16:12:59 +01:00
|
|
|
from ..cache import cache
|
2018-07-13 16:20:14 +02:00
|
|
|
import collections
|
2023-04-19 20:09:11 +02:00
|
|
|
import re
|
2020-12-05 22:08:58 +01:00
|
|
|
|
2020-12-22 01:57:53 +01:00
|
|
|
BASE_PATTERN = r"(?:https?://)?" \
|
2024-07-30 17:05:22 +02:00
|
|
|
r"(?:(?:chan|www|beta|black|white)\.sankakucomplex\.com|sankaku\.app)" \
|
2022-06-01 18:41:58 +02:00
|
|
|
r"(?:/[a-z]{2})?"
|
2015-11-09 02:29:33 +01:00
|
|
|
|
2017-02-01 00:53:19 +01:00
|
|
|
|
2020-12-08 21:05:47 +01:00
|
|
|
class SankakuExtractor(BooruExtractor):
|
2020-12-05 22:08:58 +01:00
|
|
|
"""Base class for sankaku channel extractors"""
|
2017-12-11 21:44:27 +01:00
|
|
|
basecategory = "booru"
|
2015-11-21 04:26:30 +01:00
|
|
|
category = "sankaku"
|
2023-08-04 17:14:43 +02:00
|
|
|
root = "https://sankaku.app"
|
2015-11-21 04:26:30 +01:00
|
|
|
filename_fmt = "{category}_{id}_{md5}.{extension}"
|
2023-07-21 22:38:39 +02:00
|
|
|
cookies_domain = None
|
2020-12-17 16:12:59 +01:00
|
|
|
_warning = True
|
2020-12-05 22:08:58 +01:00
|
|
|
|
|
|
|
TAG_TYPES = {
|
|
|
|
0: "general",
|
|
|
|
1: "artist",
|
|
|
|
2: "studio",
|
|
|
|
3: "copyright",
|
|
|
|
4: "character",
|
|
|
|
5: "genre",
|
|
|
|
6: "",
|
|
|
|
7: "",
|
|
|
|
8: "medium",
|
|
|
|
9: "meta",
|
|
|
|
}
|
2014-10-12 21:56:44 +02:00
|
|
|
|
2020-12-22 02:27:19 +01:00
|
|
|
def skip(self, num):
|
|
|
|
return 0
|
|
|
|
|
2024-07-18 20:44:49 +02:00
|
|
|
def _init(self):
|
|
|
|
self.api = SankakuAPI(self)
|
|
|
|
|
2020-12-24 01:04:44 +01:00
|
|
|
def _file_url(self, post):
|
2020-12-05 22:08:58 +01:00
|
|
|
url = post["file_url"]
|
2023-07-01 19:11:41 +02:00
|
|
|
if not url:
|
|
|
|
if post["status"] != "active":
|
|
|
|
self.log.warning(
|
|
|
|
"Unable to download post %s (%s)",
|
|
|
|
post["id"], post["status"])
|
|
|
|
elif self._warning:
|
|
|
|
self.log.warning(
|
|
|
|
"Login required to download 'contentious_content' posts")
|
|
|
|
SankakuExtractor._warning = False
|
2022-07-11 12:43:04 +02:00
|
|
|
elif url[8] == "v":
|
|
|
|
url = "https://s.sankakucomplex.com" + url[url.index("/", 8):]
|
2020-12-24 01:04:44 +01:00
|
|
|
return url
|
|
|
|
|
2022-09-23 11:51:30 +02:00
|
|
|
def _prepare(self, post):
|
2020-12-21 02:04:02 +01:00
|
|
|
post["created_at"] = post["created_at"]["s"]
|
|
|
|
post["date"] = text.parse_timestamp(post["created_at"])
|
2024-08-17 16:40:47 +02:00
|
|
|
post["tags"] = [tag["name"].lower().replace(" ", "_")
|
|
|
|
for tag in post["tags"] if tag["name"]]
|
2021-03-23 15:42:13 +01:00
|
|
|
post["tag_string"] = " ".join(post["tags"])
|
2022-09-23 11:51:30 +02:00
|
|
|
post["_http_validate"] = self._check_expired
|
|
|
|
|
|
|
|
def _check_expired(self, response):
|
|
|
|
return not response.history or '.com/expired.png' not in response.url
|
2020-12-05 22:08:58 +01:00
|
|
|
|
2022-10-31 12:01:19 +01:00
|
|
|
def _tags(self, post, page):
|
2020-12-05 22:08:58 +01:00
|
|
|
tags = collections.defaultdict(list)
|
|
|
|
for tag in post["tags"]:
|
2021-06-14 16:20:10 +02:00
|
|
|
name = tag["name"]
|
|
|
|
if name:
|
2024-11-03 09:21:39 +01:00
|
|
|
tags[tag["type"]].append(name.lower().replace(" ", "_"))
|
|
|
|
types = self.TAG_TYPES
|
|
|
|
for type, values in tags.items():
|
|
|
|
name = types[type]
|
|
|
|
post["tags_" + name] = values
|
|
|
|
post["tag_string_" + name] = " ".join(values)
|
2020-12-05 22:08:58 +01:00
|
|
|
|
2024-07-18 20:44:49 +02:00
|
|
|
def _notes(self, post, page):
|
|
|
|
if post.get("has_notes"):
|
|
|
|
post["notes"] = self.api.notes(post["id"])
|
|
|
|
for note in post["notes"]:
|
|
|
|
note["created_at"] = note["created_at"]["s"]
|
|
|
|
note["updated_at"] = note["updated_at"]["s"]
|
|
|
|
else:
|
|
|
|
post["notes"] = ()
|
|
|
|
|
2017-12-11 21:44:27 +01:00
|
|
|
|
|
|
|
class SankakuTagExtractor(SankakuExtractor):
|
2020-12-22 01:57:53 +01:00
|
|
|
"""Extractor for images from sankaku.app by search-tags"""
|
2017-12-11 21:44:27 +01:00
|
|
|
subcategory = "tag"
|
2019-02-08 13:45:40 +01:00
|
|
|
directory_fmt = ("{category}", "{search_tags}")
|
2018-03-01 17:40:31 +01:00
|
|
|
archive_fmt = "t_{search_tags}_{id}"
|
2023-10-29 13:48:42 +01:00
|
|
|
pattern = BASE_PATTERN + r"(?:/posts)?/?\?([^#]*)"
|
2023-09-11 16:30:55 +02:00
|
|
|
example = "https://sankaku.app/?tags=TAG"
|
2017-12-11 21:44:27 +01:00
|
|
|
|
|
|
|
def __init__(self, match):
|
2019-02-11 13:31:10 +01:00
|
|
|
SankakuExtractor.__init__(self, match)
|
2018-02-26 11:13:49 +01:00
|
|
|
query = text.parse_query(match.group(1))
|
2018-02-27 16:36:19 +01:00
|
|
|
self.tags = text.unquote(query.get("tags", "").replace("+", " "))
|
2017-12-11 21:44:27 +01:00
|
|
|
|
2023-04-19 20:09:11 +02:00
|
|
|
if "date:" in self.tags:
|
|
|
|
# rewrite 'date:' tags (#1790)
|
|
|
|
self.tags = re.sub(
|
|
|
|
r"date:(\d\d)[.-](\d\d)[.-](\d\d\d\d)",
|
|
|
|
r"date:\3.\2.\1", self.tags)
|
|
|
|
self.tags = re.sub(
|
|
|
|
r"date:(\d\d\d\d)[.-](\d\d)[.-](\d\d)",
|
|
|
|
r"date:\1.\2.\3", self.tags)
|
|
|
|
|
2020-12-05 22:08:58 +01:00
|
|
|
def metadata(self):
|
|
|
|
return {"search_tags": self.tags}
|
2017-12-11 21:44:27 +01:00
|
|
|
|
2020-12-05 22:08:58 +01:00
|
|
|
def posts(self):
|
2020-12-22 02:27:19 +01:00
|
|
|
params = {"tags": self.tags}
|
2024-07-18 20:44:49 +02:00
|
|
|
return self.api.posts_keyset(params)
|
2017-12-12 18:20:15 +01:00
|
|
|
|
|
|
|
|
2017-12-12 19:18:43 +01:00
|
|
|
class SankakuPoolExtractor(SankakuExtractor):
|
2020-12-22 01:57:53 +01:00
|
|
|
"""Extractor for image pools or books from sankaku.app"""
|
2017-12-12 19:18:43 +01:00
|
|
|
subcategory = "pool"
|
2020-12-05 22:08:58 +01:00
|
|
|
directory_fmt = ("{category}", "pool", "{pool[id]} {pool[name_en]}")
|
2018-03-01 17:40:31 +01:00
|
|
|
archive_fmt = "p_{pool}_{id}"
|
2023-11-24 02:41:52 +01:00
|
|
|
pattern = BASE_PATTERN + r"/(?:books|pools?/show)/(\d+)"
|
2023-09-11 16:30:55 +02:00
|
|
|
example = "https://sankaku.app/books/12345"
|
2017-12-12 19:18:43 +01:00
|
|
|
|
|
|
|
def __init__(self, match):
|
2019-02-11 13:31:10 +01:00
|
|
|
SankakuExtractor.__init__(self, match)
|
2017-12-12 19:18:43 +01:00
|
|
|
self.pool_id = match.group(1)
|
|
|
|
|
2020-12-05 22:08:58 +01:00
|
|
|
def metadata(self):
|
2024-07-18 20:44:49 +02:00
|
|
|
pool = self.api.pools(self.pool_id)
|
2021-03-23 18:41:11 +01:00
|
|
|
pool["tags"] = [tag["name"] for tag in pool["tags"]]
|
|
|
|
pool["artist_tags"] = [tag["name"] for tag in pool["artist_tags"]]
|
|
|
|
|
2020-12-05 22:08:58 +01:00
|
|
|
self._posts = pool.pop("posts")
|
2021-03-23 15:32:54 +01:00
|
|
|
for num, post in enumerate(self._posts, 1):
|
|
|
|
post["num"] = num
|
2021-03-23 18:41:11 +01:00
|
|
|
|
2020-12-05 22:08:58 +01:00
|
|
|
return {"pool": pool}
|
2017-12-12 19:18:43 +01:00
|
|
|
|
2020-12-05 22:08:58 +01:00
|
|
|
def posts(self):
|
|
|
|
return self._posts
|
2017-12-12 19:18:43 +01:00
|
|
|
|
|
|
|
|
2017-12-12 18:20:15 +01:00
|
|
|
class SankakuPostExtractor(SankakuExtractor):
|
2020-12-22 01:57:53 +01:00
|
|
|
"""Extractor for single posts from sankaku.app"""
|
2017-12-12 18:20:15 +01:00
|
|
|
subcategory = "post"
|
2018-03-01 17:40:31 +01:00
|
|
|
archive_fmt = "{id}"
|
2024-01-18 16:05:41 +01:00
|
|
|
pattern = BASE_PATTERN + r"/posts?(?:/show)?/(\w+)"
|
2023-09-11 16:30:55 +02:00
|
|
|
example = "https://sankaku.app/post/show/12345"
|
2017-12-12 18:20:15 +01:00
|
|
|
|
|
|
|
def __init__(self, match):
|
2019-02-11 13:31:10 +01:00
|
|
|
SankakuExtractor.__init__(self, match)
|
2017-12-12 18:20:15 +01:00
|
|
|
self.post_id = match.group(1)
|
|
|
|
|
2020-12-05 22:08:58 +01:00
|
|
|
def posts(self):
|
2024-07-18 20:44:49 +02:00
|
|
|
return self.api.posts(self.post_id)
|
2020-12-17 16:12:59 +01:00
|
|
|
|
|
|
|
|
2020-12-29 17:36:37 +01:00
|
|
|
class SankakuBooksExtractor(SankakuExtractor):
|
|
|
|
"""Extractor for books by tag search on sankaku.app"""
|
|
|
|
subcategory = "books"
|
|
|
|
pattern = BASE_PATTERN + r"/books/?\?([^#]*)"
|
2023-09-11 16:30:55 +02:00
|
|
|
example = "https://sankaku.app/books?tags=TAG"
|
2020-12-29 17:36:37 +01:00
|
|
|
|
|
|
|
def __init__(self, match):
|
|
|
|
SankakuExtractor.__init__(self, match)
|
|
|
|
query = text.parse_query(match.group(1))
|
|
|
|
self.tags = text.unquote(query.get("tags", "").replace("+", " "))
|
|
|
|
|
|
|
|
def items(self):
|
|
|
|
params = {"tags": self.tags, "pool_type": "0"}
|
2024-07-18 20:44:49 +02:00
|
|
|
for pool in self.api.pools_keyset(params):
|
2020-12-29 17:36:37 +01:00
|
|
|
pool["_extractor"] = SankakuPoolExtractor
|
|
|
|
url = "https://sankaku.app/books/{}".format(pool["id"])
|
|
|
|
yield Message.Queue, url, pool
|
|
|
|
|
|
|
|
|
2020-12-17 16:12:59 +01:00
|
|
|
class SankakuAPI():
|
2020-12-22 01:57:53 +01:00
|
|
|
"""Interface for the sankaku.app API"""
|
2020-12-17 16:12:59 +01:00
|
|
|
|
|
|
|
def __init__(self, extractor):
|
|
|
|
self.extractor = extractor
|
2022-10-01 11:49:47 +02:00
|
|
|
self.headers = {
|
2024-01-26 17:56:08 +01:00
|
|
|
"Accept" : "application/vnd.sankaku.api+json;v=2",
|
|
|
|
"Platform" : "web-app",
|
|
|
|
"Api-Version": None,
|
|
|
|
"Origin" : extractor.root,
|
2022-10-01 11:49:47 +02:00
|
|
|
}
|
2020-12-17 16:12:59 +01:00
|
|
|
|
2024-01-26 17:56:08 +01:00
|
|
|
if extractor.config("id-format") in ("alnum", "alphanumeric"):
|
|
|
|
self.headers["Api-Version"] = "2"
|
|
|
|
|
|
|
|
self.username, self.password = extractor._get_auth_info()
|
2020-12-17 16:12:59 +01:00
|
|
|
if not self.username:
|
2021-05-04 18:00:38 +02:00
|
|
|
self.authenticate = util.noop
|
2020-12-17 16:12:59 +01:00
|
|
|
|
2024-07-18 20:44:49 +02:00
|
|
|
def notes(self, post_id):
|
|
|
|
params = {"lang": "en"}
|
|
|
|
return self._call("/posts/{}/notes".format(post_id), params)
|
|
|
|
|
2020-12-17 16:12:59 +01:00
|
|
|
def pools(self, pool_id):
|
2020-12-22 02:27:19 +01:00
|
|
|
params = {"lang": "en"}
|
|
|
|
return self._call("/pools/" + pool_id, params)
|
|
|
|
|
2020-12-29 17:36:37 +01:00
|
|
|
def pools_keyset(self, params):
|
|
|
|
return self._pagination("/pools/keyset", params)
|
|
|
|
|
2020-12-22 02:27:19 +01:00
|
|
|
def posts(self, post_id):
|
|
|
|
params = {
|
|
|
|
"lang" : "en",
|
|
|
|
"page" : "1",
|
|
|
|
"limit": "1",
|
2023-04-23 16:46:40 +02:00
|
|
|
"tags" : ("md5:" if len(post_id) == 32 else "id_range:") + post_id,
|
2020-12-22 02:27:19 +01:00
|
|
|
}
|
2020-12-27 02:31:01 +01:00
|
|
|
return self._call("/posts", params)
|
2020-12-17 16:12:59 +01:00
|
|
|
|
|
|
|
def posts_keyset(self, params):
|
|
|
|
return self._pagination("/posts/keyset", params)
|
|
|
|
|
|
|
|
def authenticate(self):
|
|
|
|
self.headers["Authorization"] = \
|
|
|
|
_authenticate_impl(self.extractor, self.username, self.password)
|
|
|
|
|
2020-12-27 02:31:01 +01:00
|
|
|
def _call(self, endpoint, params=None):
|
2020-12-17 16:12:59 +01:00
|
|
|
url = "https://capi-v2.sankakucomplex.com" + endpoint
|
|
|
|
for _ in range(5):
|
|
|
|
self.authenticate()
|
|
|
|
response = self.extractor.request(
|
2022-10-01 11:49:47 +02:00
|
|
|
url, params=params, headers=self.headers, fatal=None)
|
2020-12-17 16:12:59 +01:00
|
|
|
|
|
|
|
if response.status_code == 429:
|
2022-10-01 11:49:47 +02:00
|
|
|
until = response.headers.get("X-RateLimit-Reset")
|
|
|
|
if not until and b"tags-limit" in response.content:
|
|
|
|
raise exception.StopExtraction("Search tag limit exceeded")
|
|
|
|
seconds = None if until else 60
|
|
|
|
self.extractor.wait(until=until, seconds=seconds)
|
2020-12-17 16:12:59 +01:00
|
|
|
continue
|
|
|
|
|
|
|
|
data = response.json()
|
2020-12-27 02:31:01 +01:00
|
|
|
try:
|
|
|
|
success = data.get("success", True)
|
|
|
|
except AttributeError:
|
|
|
|
success = True
|
|
|
|
if not success:
|
2020-12-17 16:12:59 +01:00
|
|
|
code = data.get("code")
|
2021-04-30 22:04:45 +02:00
|
|
|
if code and code.endswith(
|
|
|
|
("unauthorized", "invalid-token", "invalid_token")):
|
2020-12-17 16:12:59 +01:00
|
|
|
_authenticate_impl.invalidate(self.username)
|
|
|
|
continue
|
|
|
|
raise exception.StopExtraction(code)
|
|
|
|
return data
|
|
|
|
|
|
|
|
def _pagination(self, endpoint, params):
|
|
|
|
params["lang"] = "en"
|
|
|
|
params["limit"] = str(self.extractor.per_page)
|
|
|
|
|
2022-09-30 19:55:48 +02:00
|
|
|
refresh = self.extractor.config("refresh", False)
|
|
|
|
if refresh:
|
|
|
|
offset = expires = 0
|
|
|
|
from time import time
|
|
|
|
|
2020-12-17 16:12:59 +01:00
|
|
|
while True:
|
|
|
|
data = self._call(endpoint, params)
|
2022-09-30 19:55:48 +02:00
|
|
|
|
|
|
|
if refresh:
|
|
|
|
posts = data["data"]
|
|
|
|
if offset:
|
|
|
|
posts = util.advance(posts, offset)
|
|
|
|
|
|
|
|
for post in posts:
|
|
|
|
if not expires:
|
|
|
|
url = post["file_url"]
|
|
|
|
if url:
|
|
|
|
expires = text.parse_int(
|
2022-11-04 23:39:38 +01:00
|
|
|
text.extr(url, "e=", "&")) - 60
|
2022-09-30 19:55:48 +02:00
|
|
|
|
|
|
|
if 0 < expires <= time():
|
|
|
|
self.extractor.log.debug("Refreshing download URLs")
|
|
|
|
expires = None
|
|
|
|
break
|
|
|
|
|
|
|
|
offset += 1
|
|
|
|
yield post
|
|
|
|
|
|
|
|
if expires is None:
|
|
|
|
expires = 0
|
|
|
|
continue
|
|
|
|
offset = expires = 0
|
|
|
|
|
|
|
|
else:
|
|
|
|
yield from data["data"]
|
2020-12-17 16:12:59 +01:00
|
|
|
|
|
|
|
params["next"] = data["meta"]["next"]
|
|
|
|
if not params["next"]:
|
|
|
|
return
|
|
|
|
|
|
|
|
|
2023-12-18 23:19:44 +01:00
|
|
|
@cache(maxage=365*86400, keyarg=1)
|
2020-12-17 16:12:59 +01:00
|
|
|
def _authenticate_impl(extr, username, password):
|
|
|
|
extr.log.info("Logging in as %s", username)
|
|
|
|
|
2021-01-12 00:15:22 +01:00
|
|
|
url = "https://capi-v2.sankakucomplex.com/auth/token"
|
|
|
|
headers = {"Accept": "application/vnd.sankaku.api+json;v=2"}
|
2020-12-17 16:12:59 +01:00
|
|
|
data = {"login": username, "password": password}
|
|
|
|
|
|
|
|
response = extr.request(
|
|
|
|
url, method="POST", headers=headers, json=data, fatal=False)
|
|
|
|
data = response.json()
|
|
|
|
|
|
|
|
if response.status_code >= 400 or not data.get("success"):
|
|
|
|
raise exception.AuthenticationError(data.get("error"))
|
|
|
|
return "Bearer " + data["access_token"]
|