2015-04-05 17:15:27 +02:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
|
2017-01-30 19:40:15 +01:00
|
|
|
# Copyright 2014-2017 Mike Fährmann
|
2015-04-05 17:15:27 +02:00
|
|
|
#
|
|
|
|
# This program is free software; you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License version 2 as
|
|
|
|
# published by the Free Software Foundation.
|
|
|
|
|
2016-10-04 14:33:50 +02:00
|
|
|
from __future__ import unicode_literals, print_function
|
|
|
|
|
2017-01-30 19:40:15 +01:00
|
|
|
__author__ = "Mike Fährmann"
|
|
|
|
__copyright__ = "Copyright 2014-2017 Mike Fährmann"
|
|
|
|
__license__ = "GPLv2"
|
2014-10-12 21:56:44 +02:00
|
|
|
__maintainer__ = "Mike Fährmann"
|
2017-01-30 19:40:15 +01:00
|
|
|
__email__ = "mike_faehrmann@web.de"
|
2014-10-12 21:56:44 +02:00
|
|
|
|
2016-08-06 13:40:49 +02:00
|
|
|
import sys
|
2016-10-04 14:33:50 +02:00
|
|
|
|
|
|
|
if sys.hexversion < 0x3030000:
|
|
|
|
print("Python 3.3+ required", file=sys.stderr)
|
|
|
|
sys.exit(1)
|
|
|
|
|
2017-03-07 23:50:19 +01:00
|
|
|
import logging
|
2017-03-23 16:29:40 +01:00
|
|
|
from . import version, config, option, extractor, job, exception
|
2014-10-12 21:56:44 +02:00
|
|
|
|
2017-03-27 11:59:27 +02:00
|
|
|
__version__ = version.__version__
|
2017-03-11 01:47:57 +01:00
|
|
|
log = logging.getLogger("gallery-dl")
|
|
|
|
|
2017-01-30 19:40:15 +01:00
|
|
|
|
2017-03-07 23:50:19 +01:00
|
|
|
def initialize_logging():
|
|
|
|
# convert levelnames to lowercase
|
|
|
|
for level in (10, 20, 30, 40, 50):
|
|
|
|
name = logging.getLevelName(level)
|
|
|
|
logging.addLevelName(level, name.lower())
|
2017-03-11 01:47:57 +01:00
|
|
|
# setup basic logging to stderr
|
|
|
|
formatter = logging.Formatter("[%(name)s][%(levelname)s] %(message)s")
|
|
|
|
handler = logging.StreamHandler()
|
|
|
|
handler.setFormatter(formatter)
|
|
|
|
root = logging.getLogger()
|
|
|
|
root.setLevel(logging.INFO)
|
|
|
|
root.addHandler(handler)
|
2017-03-07 23:50:19 +01:00
|
|
|
|
|
|
|
|
2017-06-09 20:12:15 +02:00
|
|
|
def progress(urls, pformat):
|
|
|
|
if pformat is True:
|
|
|
|
pformat = "[{current}/{total}] {url}"
|
|
|
|
pinfo = {"total": len(urls)}
|
|
|
|
for pinfo["current"], pinfo["url"] in enumerate(urls, 1):
|
|
|
|
print(pformat.format_map(pinfo), file=sys.stderr)
|
|
|
|
yield pinfo["url"]
|
|
|
|
|
|
|
|
|
2016-12-04 16:11:54 +01:00
|
|
|
def sanatize_input(file):
|
|
|
|
for line in file:
|
|
|
|
line = line.strip()
|
|
|
|
if line:
|
|
|
|
yield line
|
|
|
|
|
2017-01-30 19:40:15 +01:00
|
|
|
|
2017-09-08 17:52:00 +02:00
|
|
|
def prepare_range(rangespec, target):
|
|
|
|
if rangespec:
|
|
|
|
range = util.optimize_range(util.parse_range(rangespec))
|
|
|
|
if range:
|
|
|
|
config.set(("_", target, "range"), range)
|
|
|
|
else:
|
|
|
|
log.warning("invalid/empty %s range", target)
|
|
|
|
|
|
|
|
|
|
|
|
def prepare_filter(filterexpr, target):
|
|
|
|
if filterexpr:
|
|
|
|
try:
|
|
|
|
name = "<{} filter>".format(target)
|
|
|
|
codeobj = compile(filterexpr, name, "eval")
|
|
|
|
config.set(("_", target, "filter"), codeobj)
|
|
|
|
except (SyntaxError, ValueError, TypeError) as exc:
|
|
|
|
log.warning(exc)
|
|
|
|
|
|
|
|
|
2014-10-12 21:56:44 +02:00
|
|
|
def main():
|
2015-04-10 17:31:49 +02:00
|
|
|
try:
|
2017-03-07 23:50:19 +01:00
|
|
|
initialize_logging()
|
2017-03-23 16:29:40 +01:00
|
|
|
parser = option.build_parser()
|
2015-11-14 15:31:07 +01:00
|
|
|
args = parser.parse_args()
|
2017-04-18 11:38:48 +02:00
|
|
|
logging.getLogger().setLevel(args.loglevel)
|
2015-11-14 15:11:44 +01:00
|
|
|
|
2017-09-08 17:52:00 +02:00
|
|
|
# configuration
|
2017-04-25 17:09:10 +02:00
|
|
|
if args.load_config:
|
|
|
|
config.load()
|
2015-11-14 17:22:56 +01:00
|
|
|
if args.cfgfiles:
|
|
|
|
config.load(*args.cfgfiles, strict=True)
|
2017-03-08 16:57:42 +01:00
|
|
|
if args.yamlfiles:
|
|
|
|
config.load(*args.yamlfiles, format="yaml", strict=True)
|
2017-03-23 16:29:40 +01:00
|
|
|
for key, value in args.options:
|
|
|
|
config.set(key, value)
|
2017-09-08 17:52:00 +02:00
|
|
|
config.set(("_",), {})
|
|
|
|
|
|
|
|
# logging
|
2017-04-26 11:33:19 +02:00
|
|
|
if args.loglevel >= logging.ERROR:
|
|
|
|
config.set(("output", "mode"), "null")
|
2017-08-13 20:35:44 +02:00
|
|
|
elif args.loglevel <= logging.DEBUG:
|
2017-12-27 22:12:40 +01:00
|
|
|
import platform, requests
|
2017-08-13 20:35:44 +02:00
|
|
|
log.debug("Version %s", __version__)
|
|
|
|
log.debug("Python %s - %s",
|
|
|
|
platform.python_version(), platform.platform())
|
2017-12-27 22:12:40 +01:00
|
|
|
try:
|
|
|
|
log.debug("requests %s", requests.__version__)
|
|
|
|
log.debug("urllib3 %s", requests.packages.urllib3.__version__)
|
|
|
|
except AttributeError:
|
|
|
|
pass
|
2015-11-14 15:11:44 +01:00
|
|
|
|
|
|
|
if args.list_modules:
|
|
|
|
for module_name in extractor.modules:
|
|
|
|
print(module_name)
|
2016-09-14 09:51:01 +02:00
|
|
|
elif args.list_extractors:
|
|
|
|
for extr in extractor.extractors():
|
2017-06-28 18:51:47 +02:00
|
|
|
if not extr.__doc__:
|
|
|
|
continue
|
2016-09-14 09:51:01 +02:00
|
|
|
print(extr.__name__)
|
2017-06-28 18:51:47 +02:00
|
|
|
print(extr.__doc__)
|
|
|
|
print("Category:", extr.category,
|
|
|
|
"- Subcategory:", extr.subcategory)
|
2016-09-14 09:51:01 +02:00
|
|
|
if hasattr(extr, "test") and extr.test:
|
2017-06-28 18:51:47 +02:00
|
|
|
print("Example :", extr.test[0][0])
|
2016-09-14 09:51:01 +02:00
|
|
|
print()
|
2015-11-14 15:11:44 +01:00
|
|
|
else:
|
2016-12-04 16:11:54 +01:00
|
|
|
if not args.urls and not args.inputfile:
|
2017-08-13 20:35:44 +02:00
|
|
|
parser.error(
|
|
|
|
"The following arguments are required: URL\n"
|
|
|
|
"Use 'gallery-dl --help' to get a list of all options.")
|
2016-07-21 13:13:53 +02:00
|
|
|
|
2015-12-10 02:14:28 +01:00
|
|
|
if args.list_urls:
|
2016-07-14 14:25:56 +02:00
|
|
|
jobtype = job.UrlJob
|
2017-02-17 22:18:16 +01:00
|
|
|
jobtype.maxdepth = args.list_urls
|
2015-12-10 02:14:28 +01:00
|
|
|
elif args.list_keywords:
|
2016-07-14 14:25:56 +02:00
|
|
|
jobtype = job.KeywordJob
|
2017-04-12 18:43:41 +02:00
|
|
|
elif args.list_data:
|
|
|
|
jobtype = job.DataJob
|
2015-12-10 02:14:28 +01:00
|
|
|
else:
|
2016-07-14 14:25:56 +02:00
|
|
|
jobtype = job.DownloadJob
|
2016-07-21 13:13:53 +02:00
|
|
|
|
2016-12-04 16:11:54 +01:00
|
|
|
urls = args.urls
|
|
|
|
if args.inputfile:
|
|
|
|
try:
|
|
|
|
if args.inputfile == "-":
|
|
|
|
file = sys.stdin
|
|
|
|
else:
|
|
|
|
file = open(args.inputfile)
|
2017-06-09 20:12:15 +02:00
|
|
|
urls += sanatize_input(file)
|
2017-05-27 16:16:57 +02:00
|
|
|
except OSError as exc:
|
|
|
|
log.warning("input-file: %s", exc)
|
|
|
|
|
|
|
|
if args.unsupportedfile:
|
|
|
|
try:
|
|
|
|
job.Job.ufile = open(args.unsupportedfile, "w")
|
|
|
|
except OSError as exc:
|
|
|
|
log.warning("unsupported-URL file: %s", exc)
|
2016-12-04 16:11:54 +01:00
|
|
|
|
2017-09-08 17:52:00 +02:00
|
|
|
prepare_range(args.image_range, "image")
|
|
|
|
prepare_range(args.chapter_range, "chapter")
|
|
|
|
prepare_filter(args.image_filter, "image")
|
2017-09-12 16:19:00 +02:00
|
|
|
prepare_filter(args.chapter_filter, "chapter")
|
2017-09-08 17:52:00 +02:00
|
|
|
|
2017-06-09 20:12:15 +02:00
|
|
|
pformat = config.get(("output", "progress"), True)
|
|
|
|
if pformat and len(urls) > 1 and args.loglevel < logging.ERROR:
|
|
|
|
urls = progress(urls, pformat)
|
|
|
|
|
2016-12-04 16:11:54 +01:00
|
|
|
for url in urls:
|
2016-07-14 14:57:42 +02:00
|
|
|
try:
|
2017-04-18 11:38:48 +02:00
|
|
|
log.debug("Starting %s for '%s'", jobtype.__name__, url)
|
2016-07-14 14:57:42 +02:00
|
|
|
jobtype(url).run()
|
|
|
|
except exception.NoExtractorError:
|
2017-03-11 01:47:57 +01:00
|
|
|
log.error("No suitable extractor found for '%s'", url)
|
2017-02-25 23:53:31 +01:00
|
|
|
|
2015-04-10 17:31:49 +02:00
|
|
|
except KeyboardInterrupt:
|
2016-08-06 13:40:49 +02:00
|
|
|
print("\nKeyboardInterrupt", file=sys.stderr)
|
2016-08-05 10:25:31 +02:00
|
|
|
except BrokenPipeError:
|
|
|
|
pass
|
2017-08-13 20:35:44 +02:00
|
|
|
except IOError as exc:
|
2016-08-05 10:25:31 +02:00
|
|
|
import errno
|
2017-08-13 20:35:44 +02:00
|
|
|
if exc.errno != errno.EPIPE:
|
2016-08-05 10:25:31 +02:00
|
|
|
raise
|