2016-08-01 15:36:56 +02:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
|
2019-01-28 17:14:15 +01:00
|
|
|
# Copyright 2016-2019 Mike Fährmann
|
2016-08-01 15:36:56 +02:00
|
|
|
#
|
|
|
|
# This program is free software; you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License version 2 as
|
|
|
|
# published by the Free Software Foundation.
|
|
|
|
|
2019-10-22 21:17:08 +02:00
|
|
|
"""Extractors for https://members.luscious.net/"""
|
2016-08-01 15:36:56 +02:00
|
|
|
|
2019-10-22 21:17:08 +02:00
|
|
|
from .common import Extractor, Message
|
2019-03-28 23:35:11 +01:00
|
|
|
from .. import text, exception
|
2016-08-01 15:36:56 +02:00
|
|
|
|
2017-02-01 00:53:19 +01:00
|
|
|
|
2019-10-22 21:17:08 +02:00
|
|
|
class LusciousExtractor(Extractor):
|
2019-01-28 17:14:15 +01:00
|
|
|
"""Base class for luscious extractors"""
|
2016-08-01 15:36:56 +02:00
|
|
|
category = "luscious"
|
2019-01-28 17:14:15 +01:00
|
|
|
cookiedomain = ".luscious.net"
|
|
|
|
root = "https://members.luscious.net"
|
|
|
|
|
2019-10-22 21:17:08 +02:00
|
|
|
def _graphql(self, op, variables, query):
|
2019-01-28 17:14:15 +01:00
|
|
|
data = {
|
2019-10-25 13:14:59 +02:00
|
|
|
"id" : 1,
|
2019-10-22 21:17:08 +02:00
|
|
|
"operationName": op,
|
2019-10-25 13:14:59 +02:00
|
|
|
"query" : query,
|
|
|
|
"variables" : variables,
|
2019-01-28 17:14:15 +01:00
|
|
|
}
|
2019-10-22 21:17:08 +02:00
|
|
|
response = self.request(
|
|
|
|
"{}/graphql/nobatch/?operationName={}".format(self.root, op),
|
|
|
|
method="POST", json=data, fatal=False,
|
|
|
|
)
|
2019-01-28 17:14:15 +01:00
|
|
|
|
2019-10-22 21:17:08 +02:00
|
|
|
if response.status_code >= 400:
|
|
|
|
self.log.error("GraphQL query failed ('%s %s')",
|
|
|
|
response.status_code, response.reason)
|
|
|
|
self.log.debug("Server response: %s", response.text)
|
|
|
|
raise exception.StopExtraction()
|
2019-01-28 17:14:15 +01:00
|
|
|
|
2019-10-22 21:17:08 +02:00
|
|
|
return response.json()["data"]
|
2019-01-28 17:14:15 +01:00
|
|
|
|
2019-03-28 23:35:11 +01:00
|
|
|
|
2019-10-22 21:17:08 +02:00
|
|
|
class LusciousAlbumExtractor(LusciousExtractor):
|
2019-01-28 17:14:15 +01:00
|
|
|
"""Extractor for image albums from luscious.net"""
|
2016-09-12 10:20:57 +02:00
|
|
|
subcategory = "album"
|
2019-10-22 21:17:08 +02:00
|
|
|
filename_fmt = "{category}_{album[id]}_{num:>03}.{extension}"
|
|
|
|
directory_fmt = ("{category}", "{album[id]} {album[title]}")
|
|
|
|
archive_fmt = "{album[id]}_{id}"
|
2019-02-08 13:45:40 +01:00
|
|
|
pattern = (r"(?:https?://)?(?:www\.|members\.)?luscious\.net"
|
2019-10-22 21:17:08 +02:00
|
|
|
r"/(?:albums|pictures/c/[^/?&#]+/album)/[^/?&#]+_(\d+)")
|
2019-02-08 13:45:40 +01:00
|
|
|
test = (
|
2018-11-25 18:55:21 +01:00
|
|
|
("https://luscious.net/albums/okinami-no-koigokoro_277031/", {
|
2017-01-03 15:03:36 +01:00
|
|
|
"url": "7e4984a271a1072ac6483e4228a045895aff86f3",
|
2019-10-22 21:17:08 +02:00
|
|
|
# "keyword": "07c0b915f2ab1cc3bbf28b76e7950fccee1213f3",
|
2019-10-25 13:14:59 +02:00
|
|
|
# "content": "b3a747a6464509440bd0ff6d1267e6959f8d6ff3",
|
2017-01-03 15:03:36 +01:00
|
|
|
}),
|
2017-08-02 19:58:13 +02:00
|
|
|
("https://luscious.net/albums/virgin-killer-sweater_282582/", {
|
2018-04-05 18:12:01 +02:00
|
|
|
"url": "21cc68a7548f4d71dfd67d8caf96349dde7e791c",
|
2019-10-22 21:17:08 +02:00
|
|
|
# "keyword": "e1202078b504adeccd521aa932f456a5a85479a0",
|
2017-01-03 15:03:36 +01:00
|
|
|
}),
|
2019-02-19 13:30:39 +01:00
|
|
|
("https://luscious.net/albums/not-found_277035/", {
|
|
|
|
"exception": exception.NotFoundError,
|
|
|
|
}),
|
|
|
|
("https://members.luscious.net/albums/login-required_323871/", {
|
2019-10-22 21:17:08 +02:00
|
|
|
"count": 78,
|
2019-02-19 13:30:39 +01:00
|
|
|
}),
|
2019-02-08 13:45:40 +01:00
|
|
|
("https://www.luscious.net/albums/okinami_277031/"),
|
|
|
|
("https://members.luscious.net/albums/okinami_277031/"),
|
2018-11-25 18:55:21 +01:00
|
|
|
("https://luscious.net/pictures/c/video_game_manga/album"
|
2019-02-08 13:45:40 +01:00
|
|
|
"/okinami-no-koigokoro_277031/sorted/position/id/16528978/@_1"),
|
|
|
|
)
|
2016-08-01 15:36:56 +02:00
|
|
|
|
|
|
|
def __init__(self, match):
|
2019-10-22 21:17:08 +02:00
|
|
|
LusciousExtractor.__init__(self, match)
|
|
|
|
self.album_id = match.group(1)
|
2016-08-01 15:36:56 +02:00
|
|
|
|
2019-10-22 21:17:08 +02:00
|
|
|
def items(self):
|
|
|
|
album = self.metadata()
|
|
|
|
yield Message.Version, 1
|
|
|
|
yield Message.Directory, {"album": album}
|
|
|
|
for num, image in enumerate(self.images(), 1):
|
|
|
|
image["num"] = num
|
|
|
|
image["album"] = album
|
2019-10-25 13:14:59 +02:00
|
|
|
|
|
|
|
image["thumbnail"] = image.pop("thumbnails")[0]["url"]
|
|
|
|
image["tags"] = [item["text"] for item in image["tags"]]
|
|
|
|
image["date"] = text.parse_timestamp(image["created"])
|
|
|
|
image["id"] = text.parse_int(image["id"])
|
|
|
|
|
2019-10-22 21:17:08 +02:00
|
|
|
url = image["url_to_video"] or image["url_to_original"]
|
|
|
|
yield Message.Url, url, text.nameext_from_url(url, image)
|
|
|
|
|
|
|
|
def metadata(self):
|
|
|
|
variables = {
|
|
|
|
"id": self.album_id,
|
|
|
|
}
|
|
|
|
query = (
|
|
|
|
"query AlbumGet($id: ID!) { album { get(id: $id) { ... on Album { "
|
|
|
|
"...AlbumStandard } ... on MutationError { errors { code message }"
|
|
|
|
" } } } } fragment AlbumStandard on Album { __typename id title la"
|
|
|
|
"bels description created modified like_status number_of_favorites"
|
|
|
|
" rating status marked_for_deletion marked_for_processing number_o"
|
|
|
|
"f_pictures number_of_animated_pictures slug is_manga url download"
|
|
|
|
"_url permissions cover { width height size url } created_by { id "
|
|
|
|
"name display_name user_title name display_name avatar { url size "
|
|
|
|
"} url } content { id title url } language { id title url } tags {"
|
|
|
|
" id category slug text url count } genres { id title slug url } a"
|
|
|
|
"udiences { id title url url } last_viewed_picture { id position u"
|
|
|
|
"rl } }"
|
|
|
|
)
|
|
|
|
|
2019-10-25 13:14:59 +02:00
|
|
|
album = self._graphql("AlbumGet", variables, query)["album"]["get"]
|
|
|
|
if "errors" in album:
|
2019-02-19 13:30:39 +01:00
|
|
|
raise exception.NotFoundError("album")
|
2019-10-25 13:14:59 +02:00
|
|
|
|
|
|
|
album["audiences"] = [item["title"] for item in album["audiences"]]
|
|
|
|
album["genres"] = [item["title"] for item in album["genres"]]
|
|
|
|
album["tags"] = [item["text"] for item in album["tags"]]
|
|
|
|
|
|
|
|
album["cover"] = album["cover"]["url"]
|
|
|
|
album["content"] = album["content"]["title"]
|
|
|
|
album["language"] = album["language"]["title"].partition(" ")[0]
|
|
|
|
album["created_by"] = album["created_by"]["display_name"]
|
|
|
|
|
|
|
|
album["id"] = text.parse_int(album["id"])
|
|
|
|
album["date"] = text.parse_timestamp(album["created"])
|
|
|
|
|
|
|
|
return album
|
2019-10-22 21:17:08 +02:00
|
|
|
|
|
|
|
def images(self):
|
|
|
|
variables = {
|
|
|
|
"input": {
|
|
|
|
"filters": [{
|
|
|
|
"name" : "album_id",
|
|
|
|
"value": self.album_id,
|
|
|
|
}],
|
|
|
|
"display": "position",
|
|
|
|
"page" : 1,
|
|
|
|
},
|
2019-03-28 23:35:11 +01:00
|
|
|
}
|
2019-10-22 21:17:08 +02:00
|
|
|
query = (
|
|
|
|
"query AlbumListOwnPictures($input: PictureListInput!) { picture {"
|
|
|
|
" list(input: $input) { info { ...FacetCollectionInfo } items { .."
|
|
|
|
".PictureStandardWithoutAlbum } } } } fragment FacetCollectionInfo"
|
|
|
|
" on FacetCollectionInfo { page has_next_page has_previous_page to"
|
|
|
|
"tal_items total_pages items_per_page url_complete url_filters_onl"
|
|
|
|
"y } fragment PictureStandardWithoutAlbum on Picture { __typename "
|
|
|
|
"id title created like_status number_of_comments number_of_favorit"
|
|
|
|
"es status width height resolution aspect_ratio url_to_original ur"
|
|
|
|
"l_to_video is_animated position tags { id category slug text url "
|
|
|
|
"} permissions url thumbnails { width height size url } }"
|
|
|
|
)
|
2016-08-01 15:36:56 +02:00
|
|
|
|
2019-10-22 21:17:08 +02:00
|
|
|
while True:
|
|
|
|
data = self._graphql("AlbumListOwnPictures", variables, query)
|
|
|
|
yield from data["picture"]["list"]["items"]
|
2018-12-21 17:55:37 +01:00
|
|
|
|
2019-10-22 21:17:08 +02:00
|
|
|
if not data["picture"]["list"]["info"]["has_next_page"]:
|
|
|
|
return
|
|
|
|
variables["input"]["page"] += 1
|
2018-11-25 18:55:21 +01:00
|
|
|
|
|
|
|
|
2019-10-22 21:17:08 +02:00
|
|
|
class LusciousSearchExtractor(LusciousExtractor):
|
2018-11-25 18:55:21 +01:00
|
|
|
"""Extractor for album searches on luscious.net"""
|
|
|
|
subcategory = "search"
|
2019-02-08 13:45:40 +01:00
|
|
|
pattern = (r"(?:https?://)?(?:www\.|members\.)?luscious\.net"
|
2019-10-22 21:17:08 +02:00
|
|
|
r"/albums/list/?(?:\?([^#]+))?")
|
2019-02-08 13:45:40 +01:00
|
|
|
test = (
|
2019-10-22 21:17:08 +02:00
|
|
|
("https://members.luscious.net/albums/list/"),
|
|
|
|
("https://members.luscious.net/albums/list/"
|
|
|
|
"?display=date_newest&language_ids=%2B1&tagged=+full_color&page=1", {
|
2019-03-28 23:35:11 +01:00
|
|
|
"pattern": LusciousAlbumExtractor.pattern,
|
2019-10-22 21:17:08 +02:00
|
|
|
"range": "41-60",
|
|
|
|
"count": 20,
|
2018-11-25 18:55:21 +01:00
|
|
|
}),
|
2019-02-08 13:45:40 +01:00
|
|
|
)
|
2018-11-25 18:55:21 +01:00
|
|
|
|
|
|
|
def __init__(self, match):
|
2019-10-22 21:17:08 +02:00
|
|
|
LusciousExtractor.__init__(self, match)
|
|
|
|
self.query = match.group(1)
|
2018-11-25 18:55:21 +01:00
|
|
|
|
|
|
|
def items(self):
|
2019-10-22 21:17:08 +02:00
|
|
|
query = text.parse_query(self.query)
|
|
|
|
display = query.pop("display", "date_newest")
|
|
|
|
page = query.pop("page", None)
|
|
|
|
|
|
|
|
variables = {
|
|
|
|
"input": {
|
|
|
|
"display": display,
|
|
|
|
"filters": [{"name": n, "value": v} for n, v in query.items()],
|
|
|
|
"page": text.parse_int(page, 1),
|
|
|
|
},
|
|
|
|
}
|
|
|
|
query = (
|
|
|
|
"query AlbumListWithPeek($input: AlbumListInput!) { album { list(i"
|
|
|
|
"nput: $input) { info { ...FacetCollectionInfo } items { ...AlbumM"
|
|
|
|
"inimal peek_thumbnails { width height size url } } } } } fragment"
|
|
|
|
" FacetCollectionInfo on FacetCollectionInfo { page has_next_page "
|
|
|
|
"has_previous_page total_items total_pages items_per_page url_comp"
|
|
|
|
"lete url_filters_only } fragment AlbumMinimal on Album { __typena"
|
|
|
|
"me id title labels description created modified number_of_favorit"
|
|
|
|
"es number_of_pictures slug is_manga url download_url cover { widt"
|
|
|
|
"h height size url } content { id title url } language { id title "
|
|
|
|
"url } tags { id category slug text url count } genres { id title "
|
|
|
|
"slug url } audiences { id title url } }"
|
|
|
|
)
|
2018-11-25 18:55:21 +01:00
|
|
|
|
2019-10-22 21:17:08 +02:00
|
|
|
yield Message.Version, 1
|
2018-11-25 18:55:21 +01:00
|
|
|
while True:
|
2019-10-22 21:17:08 +02:00
|
|
|
data = self._graphql("AlbumListWithPeek", variables, query)
|
2018-11-25 18:55:21 +01:00
|
|
|
|
2019-10-22 21:17:08 +02:00
|
|
|
for album in data["album"]["list"]["items"]:
|
|
|
|
album["url"] = self.root + album["url"]
|
|
|
|
album["_extractor"] = LusciousAlbumExtractor
|
|
|
|
yield Message.Queue, album["url"], album
|
2018-11-25 18:55:21 +01:00
|
|
|
|
2019-10-22 21:17:08 +02:00
|
|
|
if not data["album"]["list"]["info"]["has_next_page"]:
|
2018-11-25 18:55:21 +01:00
|
|
|
return
|
2019-10-22 21:17:08 +02:00
|
|
|
variables["input"]["page"] += 1
|