1
0
mirror of https://github.com/mikf/gallery-dl.git synced 2024-11-22 18:53:21 +01:00
gallery-dl/gallery_dl/job.py

321 lines
9.6 KiB
Python
Raw Normal View History

# -*- coding: utf-8 -*-
# Copyright 2015-2017 Mike Fährmann
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
import sys
2015-12-12 01:16:02 +01:00
import json
import hashlib
2017-03-28 13:12:44 +02:00
from . import extractor, downloader, config, util, output, exception
2015-11-24 19:47:51 +01:00
from .extractor.message import Message
2017-01-30 19:40:15 +01:00
2015-12-12 00:11:05 +01:00
class Job():
"""Base class for Job-types"""
2015-04-08 01:51:48 +02:00
2015-11-12 02:35:30 +01:00
def __init__(self, url):
2017-02-25 23:53:31 +01:00
self.url = url
2015-11-21 00:30:31 +01:00
self.extractor = extractor.find(url)
2015-06-28 12:45:52 +02:00
if self.extractor is None:
2016-07-14 14:25:56 +02:00
raise exception.NoExtractorError(url)
self.extractor.log.debug("Using %s for %s",
self.extractor.__class__.__name__, url)
2015-12-12 00:11:05 +01:00
items = config.get(("images",))
if items:
pred = util.RangePredicate(items)
if pred.lower > 1:
pred.index += self.extractor.skip(pred.lower - 1)
self.pred_url = pred
else:
self.pred_url = True
items = config.get(("chapters",))
self.pred_queue = util.RangePredicate(items) if items else True
def run(self):
"""Execute or run the job"""
try:
2017-03-11 01:47:57 +01:00
log = self.extractor.log
for msg in self.extractor:
2017-02-26 02:06:56 +01:00
self.dispatch(msg)
2017-02-25 23:53:31 +01:00
except exception.AuthenticationError:
2017-03-11 01:47:57 +01:00
log.error("Authentication failed. Please provide a valid "
"username/password pair.")
2017-02-25 23:53:31 +01:00
except exception.AuthorizationError:
2017-03-11 01:47:57 +01:00
log.error("You do not have permission to access the resource "
"at '%s'", self.url)
2017-02-25 23:53:31 +01:00
except exception.NotFoundError as err:
res = str(err) or "resource (gallery/image/user)"
2017-03-11 01:47:57 +01:00
log.error("The %s at '%s' does not exist", res, self.url)
except exception.StopExtraction:
pass
except Exception as exc:
2017-05-23 20:12:57 +02:00
msg = "An unexpected error occurred:"
try:
err = ": ".join(exc.args[0].reason.args[0].split(": ")[1:])
2017-05-23 20:12:57 +02:00
log.error("%s: %s - %s", msg, exc.__class__.__name__, err)
return
except Exception:
2017-05-23 20:12:57 +02:00
pass
log.error(msg, exc_info=True)
2017-02-26 02:06:56 +01:00
def dispatch(self, msg):
"""Call the appropriate message handler"""
2017-03-17 09:39:46 +01:00
if msg[0] == Message.Url:
if self.pred_url:
self.update_kwdict(msg[2])
self.handle_url(msg[1], msg[2])
2017-02-26 02:06:56 +01:00
elif msg[0] == Message.Directory:
self.update_kwdict(msg[1])
self.handle_directory(msg[1])
2017-03-17 09:39:46 +01:00
elif msg[0] == Message.Queue:
if self.pred_queue:
self.handle_queue(msg[1])
2017-02-26 02:06:56 +01:00
elif msg[0] == Message.Headers:
self.handle_headers(msg[1])
elif msg[0] == Message.Cookies:
self.handle_cookies(msg[1])
elif msg[0] == Message.Version:
if msg[1] != 1:
raise "unsupported message-version ({}, {})".format(
self.extractor.category, msg[1]
)
# TODO: support for multiple message versions
def handle_url(self, url, keywords):
"""Handle Message.Url"""
def handle_directory(self, keywords):
"""Handle Message.Directory"""
def handle_queue(self, url):
"""Handle Message.Queue"""
def handle_headers(self, headers):
"""Handle Message.Headers"""
def handle_cookies(self, cookies):
"""Handle Message.Cookies"""
def update_kwdict(self, kwdict):
"""Add 'category' and 'subcategory' keywords"""
kwdict["category"] = self.extractor.category
kwdict["subcategory"] = self.extractor.subcategory
2015-12-12 00:11:05 +01:00
2017-01-30 19:40:15 +01:00
2015-12-12 00:11:05 +01:00
class DownloadJob(Job):
"""Download images into appropriate directory/filename locations"""
def __init__(self, url):
Job.__init__(self, url)
2017-03-28 13:12:44 +02:00
self.pathfmt = util.PathFormat(self.extractor)
2015-04-08 01:51:48 +02:00
self.downloaders = {}
self.out = output.select()
2015-04-08 01:51:48 +02:00
def handle_url(self, url, keywords):
"""Download the resource specified in 'url'"""
self.pathfmt.set_keywords(keywords)
if self.pathfmt.exists():
self.out.skip(self.pathfmt.path)
2015-04-08 01:51:48 +02:00
return
2015-11-12 02:35:30 +01:00
dlinstance = self.get_downloader(url)
dlinstance.download(url, self.pathfmt)
2015-04-08 01:51:48 +02:00
def handle_directory(self, keywords):
2015-04-08 01:51:48 +02:00
"""Set and create the target directory for downloads"""
self.pathfmt.set_directory(keywords)
2015-04-08 01:51:48 +02:00
def handle_queue(self, url):
try:
2017-05-24 15:15:06 +02:00
DownloadJob(url).run()
except exception.NoExtractorError:
pass
def handle_headers(self, headers):
self.get_downloader("http:").set_headers(headers)
def handle_cookies(self, cookies):
self.get_downloader("http:").set_cookies(cookies)
2015-04-08 01:51:48 +02:00
def get_downloader(self, url):
"""Return, and possibly construct, a downloader suitable for 'url'"""
pos = url.find(":")
scheme = url[:pos] if pos != -1 else "http"
if scheme == "https":
scheme = "http"
2015-11-12 02:35:30 +01:00
instance = self.downloaders.get(scheme)
if instance is None:
klass = downloader.find(scheme)
instance = klass(self.out)
2015-11-12 02:35:30 +01:00
self.downloaders[scheme] = instance
return instance
2015-11-13 01:02:49 +01:00
2015-12-12 00:11:05 +01:00
class KeywordJob(Job):
"""Print available keywords"""
2015-11-13 01:02:49 +01:00
2017-05-17 14:31:14 +02:00
def handle_url(self, url, keywords):
print("\nKeywords for filenames:")
print("-----------------------")
self.print_keywords(keywords)
raise exception.StopExtraction()
def handle_directory(self, keywords):
print("Keywords for directory names:")
print("-----------------------------")
self.print_keywords(keywords)
2015-11-13 01:02:49 +01:00
@staticmethod
2017-05-17 14:31:14 +02:00
def print_keywords(keywords, prefix=""):
2015-12-12 01:16:02 +01:00
"""Print key-value pairs with formatting"""
2017-05-17 14:31:14 +02:00
suffix = "]" if prefix else ""
2015-11-13 01:02:49 +01:00
for key, value in sorted(keywords.items()):
2017-05-17 14:31:14 +02:00
key = prefix + key + suffix
if isinstance(value, dict):
2017-05-17 14:31:14 +02:00
KeywordJob.print_keywords(value, key + "[")
elif isinstance(value, list):
if value and isinstance(value[0], dict):
2017-05-17 14:31:14 +02:00
KeywordJob.print_keywords(value[0], key + "[][")
else:
2017-05-17 14:31:14 +02:00
print(key, "[]", sep="")
for val in value:
2017-05-17 14:31:14 +02:00
print(" -", val)
else:
# string or number
2017-05-17 14:31:14 +02:00
print(key, "\n ", value, sep="")
2015-12-10 02:14:28 +01:00
2015-12-12 00:11:05 +01:00
class UrlJob(Job):
"""Print download urls"""
maxdepth = -1
def __init__(self, url, depth=1):
Job.__init__(self, url)
self.depth = depth
if depth == self.maxdepth:
self.handle_queue = self._print
2015-12-10 02:14:28 +01:00
@staticmethod
def handle_url(url, _):
print(url)
2016-08-11 13:20:21 +02:00
def handle_queue(self, url):
try:
UrlJob(url, self.depth + 1).run()
except exception.NoExtractorError:
pass
2015-12-12 01:16:02 +01:00
@staticmethod
def _print(url):
if url.startswith("nofollow:"):
url = url[9:]
print(url)
2015-12-12 01:16:02 +01:00
2017-02-26 02:06:56 +01:00
class TestJob(DownloadJob):
"""Generate test-results for extractor runs"""
2015-12-12 01:16:02 +01:00
class HashIO():
"""Minimal file-like interface"""
def __init__(self, hashobj):
self.hashobj = hashobj
self.path = ""
self.has_extension = True
def __enter__(self):
return self
def __exit__(self, *args):
pass
def open(self):
return self
def write(self, content):
"""Update SHA1 hash"""
self.hashobj.update(content)
def __init__(self, url, content=False):
2015-12-12 01:16:02 +01:00
DownloadJob.__init__(self, url)
2017-01-30 19:40:15 +01:00
self.content = content
self.hash_url = hashlib.sha1()
2015-12-12 01:16:02 +01:00
self.hash_keyword = hashlib.sha1()
self.hash_content = hashlib.sha1()
if content:
self.fileobj = self.HashIO(self.hash_content)
2015-12-12 01:16:02 +01:00
2017-02-26 02:06:56 +01:00
def run(self):
for msg in self.extractor:
self.dispatch(msg)
2017-02-26 02:06:56 +01:00
def handle_url(self, url, keywords):
self.update_url(url)
self.update_keyword(keywords)
self.update_content(url)
2015-12-12 01:16:02 +01:00
def handle_directory(self, keywords):
self.update_keyword(keywords)
2015-12-12 01:16:02 +01:00
def handle_queue(self, url):
2015-12-12 01:16:02 +01:00
self.update_url(url)
def update_url(self, url):
"""Update the URL hash"""
2015-12-12 01:16:02 +01:00
self.hash_url.update(url.encode())
def update_keyword(self, kwdict):
"""Update the keyword hash"""
2015-12-12 01:16:02 +01:00
self.hash_keyword.update(
json.dumps(kwdict, sort_keys=True).encode()
)
def update_content(self, url):
"""Update the content hash"""
if self.content:
self.get_downloader(url).download(url, self.fileobj)
class DataJob(Job):
"""Collect extractor results and dump them"""
def __init__(self, url, file=sys.stdout):
Job.__init__(self, url)
self.file = file
self.data = []
self.ensure_ascii = config.get(("output", "ascii"), True)
def run(self):
# collect data
try:
for msg in self.extractor:
if msg[0] in (Message.Headers, Message.Cookies):
copy = (msg[0], dict(msg[1]))
else:
copy = [
part.copy() if hasattr(part, "copy") else part
for part in msg
2017-04-20 13:20:41 +02:00
]
self.data.append(copy)
except Exception as exc:
self.data.append((exc.__class__.__name__, str(exc)))
# dump to 'file'
json.dump(
self.data, self.file,
sort_keys=True, indent=2, ensure_ascii=self.ensure_ascii
)
self.file.write("\n")