2017-01-15 21:31:21 +01:00
|
|
|
#!/usr/bin/env python
|
|
|
|
|
|
|
|
import sys
|
|
|
|
import os.path
|
|
|
|
|
|
|
|
ROOTDIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
|
|
|
sys.path.insert(0, os.path.realpath(ROOTDIR))
|
2018-04-29 21:27:25 +02:00
|
|
|
import gallery_dl.extractor # noqa
|
2017-01-15 21:31:21 +01:00
|
|
|
|
|
|
|
|
2017-04-20 16:56:50 +02:00
|
|
|
CATEGORY_MAP = {
|
2017-07-15 15:01:30 +02:00
|
|
|
"2chan" : "Futaba Channel",
|
|
|
|
"archivedmoe" : "Archived.Moe",
|
|
|
|
"archiveofsins" : "Archive of Sins",
|
2018-03-17 15:35:38 +01:00
|
|
|
"artstation" : "ArtStation",
|
2017-09-16 21:11:44 +02:00
|
|
|
"b4k" : "arch.b4k.co",
|
2017-04-20 16:56:50 +02:00
|
|
|
"deviantart" : "DeviantArt",
|
|
|
|
"dokireader" : "Doki Reader",
|
|
|
|
"dynastyscans" : "Dynasty Reader",
|
|
|
|
"e621" : "e621",
|
|
|
|
"exhentai" : "ExHentai",
|
|
|
|
"fallenangels" : "Fallen Angels Scans",
|
|
|
|
"hbrowse" : "HBrowse",
|
|
|
|
"hentai2read" : "Hentai2Read",
|
|
|
|
"hentaifoundry" : "Hentai Foundry",
|
|
|
|
"hentaihere" : "HentaiHere",
|
|
|
|
"hitomi" : "Hitomi.la",
|
2018-01-09 17:52:12 +01:00
|
|
|
"idolcomplex" : "Idol Complex",
|
2017-04-20 16:56:50 +02:00
|
|
|
"imagebam" : "ImageBam",
|
|
|
|
"imagefap" : "ImageFap",
|
|
|
|
"imgbox" : "imgbox",
|
|
|
|
"imgth" : "imgth",
|
|
|
|
"imgur" : "imgur",
|
|
|
|
"jaiminisbox" : "Jaimini's Box",
|
|
|
|
"kireicake" : "Kirei Cake",
|
|
|
|
"kissmanga" : "KissManga",
|
2018-03-06 14:15:15 +01:00
|
|
|
"mangadex" : "MangaDex",
|
2017-04-20 16:56:50 +02:00
|
|
|
"mangafox" : "Manga Fox",
|
|
|
|
"mangahere" : "Manga Here",
|
|
|
|
"mangapark" : "MangaPark",
|
|
|
|
"mangastream" : "Manga Stream",
|
2018-07-19 18:56:45 +02:00
|
|
|
"myportfolio" : "Adobe Portfolio",
|
2017-04-20 16:56:50 +02:00
|
|
|
"nhentai" : "nhentai",
|
|
|
|
"nijie" : "nijie",
|
2017-07-15 15:01:30 +02:00
|
|
|
"nyafuu" : "Nyafuu Archive",
|
2018-01-15 16:39:05 +01:00
|
|
|
"paheal" : "rule #34",
|
2017-04-20 16:56:50 +02:00
|
|
|
"powermanga" : "PowerManga",
|
|
|
|
"readcomiconline": "Read Comic Online",
|
2017-07-24 10:50:40 +02:00
|
|
|
"rbt" : "RebeccaBlackTech",
|
2017-04-20 16:56:50 +02:00
|
|
|
"rule34" : "Rule 34",
|
|
|
|
"sankaku" : "Sankaku Channel",
|
|
|
|
"seaotterscans" : "Sea Otter Scans",
|
|
|
|
"seiga" : "Niconico Seiga",
|
|
|
|
"senmanga" : "Sen Manga",
|
|
|
|
"sensescans" : "Sense-Scans",
|
2018-05-27 15:25:04 +02:00
|
|
|
"simplyhentai" : "Simply Hentai",
|
2017-12-13 21:15:05 +01:00
|
|
|
"slideshare" : "SlideShare",
|
2018-04-29 21:27:25 +02:00
|
|
|
"smugmug" : "SmugMug",
|
2017-07-24 10:50:40 +02:00
|
|
|
"thebarchive" : "The /b/ Archive",
|
2017-04-20 16:56:50 +02:00
|
|
|
"worldthree" : "World Three",
|
2017-11-02 17:28:35 +01:00
|
|
|
"xvideos" : "XVideos",
|
2017-04-20 16:56:50 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
SUBCATEGORY_MAP = {
|
2018-03-31 18:54:25 +02:00
|
|
|
"doujin" : "Doujin",
|
2017-04-20 16:56:50 +02:00
|
|
|
"gallery": "Galleries",
|
|
|
|
"image" : "individual Images",
|
|
|
|
"issue" : "Comic-Issues",
|
|
|
|
"manga" : "Manga",
|
2017-06-28 17:39:07 +02:00
|
|
|
"me" : "pixiv.me Links",
|
2018-08-19 20:36:33 +02:00
|
|
|
"media" : "Media Tweets",
|
2018-05-04 10:03:20 +02:00
|
|
|
"path" : "Images from Users and Folders",
|
2017-04-23 17:08:45 +02:00
|
|
|
"pinit" : "pin.it Links",
|
2017-08-24 21:24:51 +02:00
|
|
|
"popular": "Popular Images",
|
2017-06-15 21:06:20 +02:00
|
|
|
"search" : "Search Results",
|
2017-04-20 16:56:50 +02:00
|
|
|
"status" : "Images from Statuses",
|
|
|
|
"tag" : "Tag-Searches",
|
|
|
|
"user" : "Images from Users",
|
|
|
|
"work" : "Individual Images",
|
2018-08-15 21:28:27 +02:00
|
|
|
"related-pin" : "related Pins",
|
|
|
|
"related-board": "",
|
2017-04-20 16:56:50 +02:00
|
|
|
}
|
|
|
|
|
2017-06-15 21:06:20 +02:00
|
|
|
AUTH_MAP = {
|
2018-01-09 17:52:12 +01:00
|
|
|
"batoto" : "Optional",
|
|
|
|
"deviantart" : "Optional (OAuth)",
|
|
|
|
"exhentai" : "Optional",
|
|
|
|
"flickr" : "Optional (OAuth)",
|
|
|
|
"idolcomplex": "Optional",
|
|
|
|
"nijie" : "Required",
|
|
|
|
"pixiv" : "Required",
|
|
|
|
"reddit" : "Optional (OAuth)",
|
|
|
|
"sankaku" : "Optional",
|
|
|
|
"seiga" : "Required",
|
2018-05-10 18:58:05 +02:00
|
|
|
"smugmug" : "Optional (OAuth)",
|
2018-01-11 14:11:37 +01:00
|
|
|
"tumblr" : "Optional (OAuth)",
|
2017-06-15 21:06:20 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
IGNORE_LIST = (
|
|
|
|
"oauth",
|
|
|
|
)
|
|
|
|
|
2017-04-20 16:56:50 +02:00
|
|
|
|
|
|
|
class RstColumn():
|
|
|
|
|
2017-07-15 15:01:30 +02:00
|
|
|
def __init__(self, title, data, size=None):
|
2017-04-20 16:56:50 +02:00
|
|
|
self.title = title
|
|
|
|
self.data = self._transform(data)
|
2017-07-15 15:01:30 +02:00
|
|
|
if not size:
|
|
|
|
self.size = max(len(value) for value in data + [title])
|
|
|
|
else:
|
|
|
|
self.size = size
|
2017-04-20 16:56:50 +02:00
|
|
|
|
|
|
|
self.title = self._pad(self.title)
|
|
|
|
for i, value in enumerate(self.data):
|
|
|
|
self.data[i] = self._pad(value)
|
|
|
|
|
|
|
|
def __str__(self):
|
|
|
|
return self.title
|
|
|
|
|
|
|
|
def __len__(self):
|
|
|
|
return len(self.data)
|
|
|
|
|
|
|
|
def __getitem__(self, key):
|
|
|
|
return self.data[key] if key < len(self.data) else [""]
|
|
|
|
|
|
|
|
def _transform(self, data):
|
|
|
|
return [
|
|
|
|
value if isinstance(value, str) else ", ".join(value)
|
|
|
|
for value in data
|
|
|
|
]
|
|
|
|
|
|
|
|
def _pad(self, s):
|
2017-07-15 15:01:30 +02:00
|
|
|
if len(s) <= self.size:
|
|
|
|
return s + " " * (self.size - len(s))
|
|
|
|
else:
|
|
|
|
return substitute(s, self.size)
|
2017-04-20 16:56:50 +02:00
|
|
|
|
|
|
|
|
|
|
|
class RstTable():
|
|
|
|
|
|
|
|
def __init__(self, columns):
|
|
|
|
self.columns = columns
|
|
|
|
self.rowcount = max(len(col) for col in columns)
|
2017-07-15 15:01:30 +02:00
|
|
|
self.sep = " ".join("=" * col.size for col in columns)
|
2017-04-20 16:56:50 +02:00
|
|
|
|
|
|
|
def __iter__(self):
|
|
|
|
yield self.sep
|
2017-07-15 15:01:30 +02:00
|
|
|
yield " ".join(col.title for col in self.columns)
|
|
|
|
yield self.sep
|
2017-04-20 16:56:50 +02:00
|
|
|
for i in range(self.rowcount):
|
|
|
|
yield self._format_row(i)
|
2017-07-15 15:01:30 +02:00
|
|
|
yield self.sep
|
2017-04-20 16:56:50 +02:00
|
|
|
|
|
|
|
def _format_row(self, row):
|
2017-07-15 15:01:30 +02:00
|
|
|
return " ".join(col[row] for col in self.columns)
|
|
|
|
|
|
|
|
|
|
|
|
_subs = []
|
|
|
|
|
2018-01-12 14:59:49 +01:00
|
|
|
|
2017-07-15 15:01:30 +02:00
|
|
|
def substitute(value, size):
|
|
|
|
sub = "|{}-{}|".format(value[:15], len(_subs))
|
|
|
|
_subs.append((value, sub))
|
|
|
|
return sub + " " * (size - len(sub))
|
2017-04-20 16:56:50 +02:00
|
|
|
|
|
|
|
|
|
|
|
def build_list():
|
2017-04-23 17:08:45 +02:00
|
|
|
extractors = []
|
|
|
|
classes = []
|
2017-04-20 16:56:50 +02:00
|
|
|
last = None
|
2017-04-23 17:08:45 +02:00
|
|
|
|
|
|
|
for extr in gallery_dl.extractor.extractors():
|
2017-06-15 21:06:20 +02:00
|
|
|
if extr.category in IGNORE_LIST:
|
|
|
|
continue
|
2017-04-23 17:08:45 +02:00
|
|
|
if extr.category == last or not last:
|
|
|
|
classes.append(extr)
|
2017-04-20 16:56:50 +02:00
|
|
|
elif last:
|
2017-04-23 17:08:45 +02:00
|
|
|
if classes[0].subcategory:
|
|
|
|
extractors.append(classes)
|
|
|
|
classes = [extr]
|
|
|
|
last = extr.category
|
|
|
|
extractors.append(classes)
|
2017-04-20 16:56:50 +02:00
|
|
|
|
2017-04-23 17:08:45 +02:00
|
|
|
for extrlist in extractors:
|
|
|
|
extrlist.sort(key=subcategory_key)
|
|
|
|
for extr in extrlist:
|
2018-01-12 14:59:49 +01:00
|
|
|
extr.cat = map_category(extr.category)
|
2017-06-15 21:06:20 +02:00
|
|
|
extr.subcat = map_subcategory(extr.subcategory)
|
2017-04-23 17:08:45 +02:00
|
|
|
extractors.sort(key=category_key)
|
2017-04-20 16:56:50 +02:00
|
|
|
|
2017-04-23 17:08:45 +02:00
|
|
|
return extractors
|
2017-04-20 16:56:50 +02:00
|
|
|
|
|
|
|
|
|
|
|
def get_domain(classes):
|
2017-01-15 21:31:21 +01:00
|
|
|
try:
|
2017-04-23 17:08:45 +02:00
|
|
|
cls = classes[0]
|
|
|
|
url = sys.modules[cls.__module__].__doc__.split()[-1]
|
2017-04-20 16:56:50 +02:00
|
|
|
if url.startswith("http"):
|
|
|
|
return url
|
2017-04-23 17:08:45 +02:00
|
|
|
scheme = "https" if hasattr(cls, "https") and cls.https else "http"
|
|
|
|
host = cls.__doc__.split()[-1]
|
|
|
|
return scheme + "://" + host + "/"
|
2017-04-20 16:56:50 +02:00
|
|
|
except (IndexError, AttributeError):
|
|
|
|
pass
|
|
|
|
return ""
|
|
|
|
|
|
|
|
|
|
|
|
def map_category(c):
|
|
|
|
return CATEGORY_MAP.get(c, c.capitalize())
|
|
|
|
|
|
|
|
|
|
|
|
def map_subcategory(sc):
|
2018-01-12 14:59:49 +01:00
|
|
|
if sc in SUBCATEGORY_MAP:
|
|
|
|
return SUBCATEGORY_MAP[sc]
|
|
|
|
sc = sc.capitalize()
|
|
|
|
return sc if sc.endswith("s") else sc + "s"
|
2017-04-20 16:56:50 +02:00
|
|
|
|
|
|
|
|
2017-04-23 17:08:45 +02:00
|
|
|
def category_key(extrlist):
|
2017-06-15 21:06:20 +02:00
|
|
|
key = extrlist[0].cat.lower()
|
2017-06-02 09:10:58 +02:00
|
|
|
if len(extrlist) == 1 and extrlist[0].__module__.endswith(".imagehosts"):
|
2017-04-23 17:08:45 +02:00
|
|
|
key = "zz" + key
|
|
|
|
return key
|
|
|
|
|
|
|
|
|
|
|
|
def subcategory_key(cls):
|
|
|
|
if cls.subcategory in ("user", "issue"):
|
2017-04-20 16:56:50 +02:00
|
|
|
return "A"
|
2018-08-19 20:36:33 +02:00
|
|
|
if cls.subcategory in ("media",):
|
|
|
|
return "z"
|
2017-04-23 17:08:45 +02:00
|
|
|
return cls.subcategory
|
2017-04-20 16:56:50 +02:00
|
|
|
|
|
|
|
|
2017-04-23 17:08:45 +02:00
|
|
|
extractors = build_list()
|
2017-04-20 16:56:50 +02:00
|
|
|
columns = [
|
|
|
|
RstColumn("Site", [
|
2017-06-15 21:06:20 +02:00
|
|
|
extrlist[0].cat
|
2017-04-23 17:08:45 +02:00
|
|
|
for extrlist in extractors
|
2017-07-15 15:01:30 +02:00
|
|
|
], 20),
|
2017-04-20 16:56:50 +02:00
|
|
|
RstColumn("URL", [
|
2017-04-23 17:08:45 +02:00
|
|
|
get_domain(extrlist)
|
|
|
|
for extrlist in extractors
|
2017-07-15 15:01:30 +02:00
|
|
|
], 35),
|
2017-04-20 16:56:50 +02:00
|
|
|
RstColumn("Capabilities", [
|
2018-08-15 21:28:27 +02:00
|
|
|
", ".join(extr.subcat for extr in extrlist if extr.subcat)
|
2017-04-23 17:08:45 +02:00
|
|
|
for extrlist in extractors
|
2017-07-15 15:01:30 +02:00
|
|
|
], 50),
|
2017-06-15 21:06:20 +02:00
|
|
|
RstColumn("Authentication", [
|
|
|
|
AUTH_MAP.get(extrlist[0].category, "")
|
|
|
|
for extrlist in extractors
|
|
|
|
]),
|
2017-04-20 16:56:50 +02:00
|
|
|
]
|
2017-01-15 21:31:21 +01:00
|
|
|
|
|
|
|
outfile = sys.argv[1] if len(sys.argv) > 1 else "supportedsites.rst"
|
2017-05-09 15:43:55 +02:00
|
|
|
with open(os.path.join(ROOTDIR, "docs", outfile), "w") as file:
|
2017-01-15 21:31:21 +01:00
|
|
|
file.write("Supported Sites\n"
|
|
|
|
"===============\n")
|
2017-04-20 16:56:50 +02:00
|
|
|
for line in RstTable(columns):
|
2017-07-15 15:01:30 +02:00
|
|
|
file.write(line.rstrip() + "\n")
|
|
|
|
file.write("\n")
|
|
|
|
for val, sub in _subs:
|
|
|
|
file.write(".. {} replace:: {}\n".format(sub, val))
|