2017-02-20 22:02:49 +01:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
|
2020-01-17 23:27:13 +01:00
|
|
|
# Copyright 2017-2020 Mike Fährmann
|
2017-02-20 22:02:49 +01:00
|
|
|
#
|
|
|
|
# This program is free software; you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License version 2 as
|
|
|
|
# published by the Free Software Foundation.
|
|
|
|
|
2017-03-28 13:12:44 +02:00
|
|
|
"""Utility functions and classes"""
|
2017-02-20 22:02:49 +01:00
|
|
|
|
2017-10-09 22:12:58 +02:00
|
|
|
import re
|
2017-03-28 13:12:44 +02:00
|
|
|
import os
|
2017-02-23 21:51:29 +01:00
|
|
|
import sys
|
2019-05-09 16:22:06 +02:00
|
|
|
import json
|
2019-06-19 23:16:32 +02:00
|
|
|
import time
|
2017-10-24 23:33:44 +02:00
|
|
|
import shutil
|
2017-06-16 21:01:40 +02:00
|
|
|
import string
|
2017-09-27 21:18:34 +02:00
|
|
|
import _string
|
2020-10-15 00:43:26 +02:00
|
|
|
import hashlib
|
2018-01-29 22:13:06 +01:00
|
|
|
import sqlite3
|
2017-10-03 22:38:48 +02:00
|
|
|
import datetime
|
2018-08-24 20:21:05 +02:00
|
|
|
import operator
|
2017-12-03 01:38:24 +01:00
|
|
|
import itertools
|
2017-06-16 21:01:40 +02:00
|
|
|
import urllib.parse
|
2020-01-21 21:59:36 +01:00
|
|
|
from http.cookiejar import Cookie
|
2019-07-14 22:37:28 +02:00
|
|
|
from email.utils import mktime_tz, parsedate_tz
|
2017-08-12 21:32:24 +02:00
|
|
|
from . import text, exception
|
2017-02-20 22:02:49 +01:00
|
|
|
|
|
|
|
|
2018-03-14 13:17:34 +01:00
|
|
|
def bencode(num, alphabet="0123456789"):
|
|
|
|
"""Encode an integer into a base-N encoded string"""
|
|
|
|
data = ""
|
|
|
|
base = len(alphabet)
|
|
|
|
while num:
|
|
|
|
num, remainder = divmod(num, base)
|
|
|
|
data = alphabet[remainder] + data
|
|
|
|
return data
|
|
|
|
|
|
|
|
|
2017-06-01 18:14:33 +02:00
|
|
|
def bdecode(data, alphabet="0123456789"):
|
|
|
|
"""Decode a base-N encoded string ( N = len(alphabet) )"""
|
|
|
|
num = 0
|
|
|
|
base = len(alphabet)
|
|
|
|
for c in data:
|
|
|
|
num *= base
|
|
|
|
num += alphabet.index(c)
|
|
|
|
return num
|
|
|
|
|
|
|
|
|
2017-12-03 01:38:24 +01:00
|
|
|
def advance(iterable, num):
|
|
|
|
""""Advance the iterable by 'num' steps"""
|
|
|
|
iterator = iter(iterable)
|
|
|
|
next(itertools.islice(iterator, num, num), None)
|
|
|
|
return iterator
|
|
|
|
|
|
|
|
|
2019-10-28 15:06:17 +01:00
|
|
|
def raises(cls):
|
|
|
|
"""Returns a function that raises 'cls' as exception"""
|
|
|
|
def wrap(*args):
|
|
|
|
raise cls(*args)
|
2018-04-12 17:07:12 +02:00
|
|
|
return wrap
|
|
|
|
|
|
|
|
|
2020-10-15 00:43:26 +02:00
|
|
|
def generate_csrf_token():
|
|
|
|
return hashlib.md5(str(time.time()).encode()).hexdigest()
|
|
|
|
|
|
|
|
|
2017-08-12 20:07:27 +02:00
|
|
|
def combine_dict(a, b):
|
2018-10-08 20:28:54 +02:00
|
|
|
"""Recursively combine the contents of 'b' into 'a'"""
|
2017-08-12 20:07:27 +02:00
|
|
|
for key, value in b.items():
|
|
|
|
if key in a and isinstance(value, dict) and isinstance(a[key], dict):
|
|
|
|
combine_dict(a[key], value)
|
|
|
|
else:
|
|
|
|
a[key] = value
|
2017-08-13 14:31:22 +02:00
|
|
|
return a
|
2017-08-12 20:07:27 +02:00
|
|
|
|
|
|
|
|
2018-10-08 20:28:54 +02:00
|
|
|
def transform_dict(a, func):
|
|
|
|
"""Recursively apply 'func' to all values in 'a'"""
|
|
|
|
for key, value in a.items():
|
|
|
|
if isinstance(value, dict):
|
|
|
|
transform_dict(value, func)
|
|
|
|
else:
|
|
|
|
a[key] = func(value)
|
|
|
|
|
|
|
|
|
2019-11-21 16:57:39 +01:00
|
|
|
def filter_dict(a):
|
|
|
|
"""Return a copy of 'a' without "private" entries"""
|
|
|
|
return {k: v for k, v in a.items() if k[0] != "_"}
|
|
|
|
|
|
|
|
|
2020-06-06 23:49:49 +02:00
|
|
|
def delete_items(obj, keys):
|
|
|
|
"""Remove all 'keys' from 'obj'"""
|
|
|
|
for key in keys:
|
|
|
|
if key in obj:
|
|
|
|
del obj[key]
|
|
|
|
|
|
|
|
|
2019-02-14 11:15:19 +01:00
|
|
|
def number_to_string(value, numbers=(int, float)):
|
2018-10-08 20:28:54 +02:00
|
|
|
"""Convert numbers (int, float) to string; Return everything else as is."""
|
2019-02-14 11:15:19 +01:00
|
|
|
return str(value) if value.__class__ in numbers else value
|
2018-10-08 20:28:54 +02:00
|
|
|
|
|
|
|
|
2019-03-04 21:13:34 +01:00
|
|
|
def to_string(value):
|
|
|
|
"""str() with "better" defaults"""
|
|
|
|
if not value:
|
|
|
|
return ""
|
|
|
|
if value.__class__ is list:
|
|
|
|
try:
|
|
|
|
return ", ".join(value)
|
|
|
|
except Exception:
|
|
|
|
return ", ".join(map(str, value))
|
|
|
|
return str(value)
|
|
|
|
|
|
|
|
|
2019-05-09 16:22:06 +02:00
|
|
|
def dump_json(obj, fp=sys.stdout, ensure_ascii=True, indent=4):
|
|
|
|
"""Serialize 'obj' as JSON and write it to 'fp'"""
|
|
|
|
json.dump(
|
|
|
|
obj, fp,
|
|
|
|
ensure_ascii=ensure_ascii,
|
|
|
|
indent=indent,
|
|
|
|
default=str,
|
|
|
|
sort_keys=True,
|
|
|
|
)
|
|
|
|
fp.write("\n")
|
|
|
|
|
|
|
|
|
2020-06-18 15:07:30 +02:00
|
|
|
def dump_response(response, fp, *,
|
|
|
|
headers=False, content=True, hide_auth=True):
|
2020-05-24 17:27:53 +02:00
|
|
|
"""Write the contents of 'response' into a file-like object"""
|
|
|
|
|
|
|
|
if headers:
|
|
|
|
request = response.request
|
|
|
|
req_headers = request.headers.copy()
|
2020-06-18 15:07:30 +02:00
|
|
|
res_headers = response.headers.copy()
|
2020-05-24 17:27:53 +02:00
|
|
|
outfmt = """\
|
|
|
|
{request.method} {request.url}
|
|
|
|
Status: {response.status_code} {response.reason}
|
|
|
|
|
|
|
|
Request Headers
|
|
|
|
---------------
|
|
|
|
{request_headers}
|
|
|
|
|
|
|
|
Response Headers
|
|
|
|
----------------
|
|
|
|
{response_headers}
|
|
|
|
"""
|
|
|
|
if hide_auth:
|
|
|
|
authorization = req_headers.get("Authorization")
|
|
|
|
if authorization:
|
|
|
|
atype, sep, _ = authorization.partition(" ")
|
|
|
|
req_headers["Authorization"] = atype + " ***" if sep else "***"
|
|
|
|
|
2020-06-18 15:07:30 +02:00
|
|
|
cookie = req_headers.get("Cookie")
|
|
|
|
if cookie:
|
2020-05-24 17:27:53 +02:00
|
|
|
req_headers["Cookie"] = ";".join(
|
2020-06-18 15:07:30 +02:00
|
|
|
c.partition("=")[0] + "=***"
|
|
|
|
for c in cookie.split(";")
|
|
|
|
)
|
|
|
|
|
|
|
|
set_cookie = res_headers.get("Set-Cookie")
|
|
|
|
if set_cookie:
|
|
|
|
res_headers["Set-Cookie"] = re.sub(
|
|
|
|
r"(^|, )([^ =]+)=[^,;]*", r"\1\2=***", set_cookie,
|
2020-05-24 17:27:53 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
fp.write(outfmt.format(
|
|
|
|
request=request,
|
|
|
|
response=response,
|
|
|
|
request_headers="\n".join(
|
|
|
|
name + ": " + value
|
|
|
|
for name, value in req_headers.items()
|
|
|
|
),
|
|
|
|
response_headers="\n".join(
|
|
|
|
name + ": " + value
|
2020-06-18 15:07:30 +02:00
|
|
|
for name, value in res_headers.items()
|
2020-05-24 17:27:53 +02:00
|
|
|
),
|
|
|
|
).encode())
|
|
|
|
|
|
|
|
if content:
|
|
|
|
if headers:
|
|
|
|
fp.write(b"\nContent\n-------\n")
|
|
|
|
fp.write(response.content)
|
|
|
|
|
|
|
|
|
2017-10-26 00:04:28 +02:00
|
|
|
def expand_path(path):
|
|
|
|
"""Expand environment variables and tildes (~)"""
|
|
|
|
if not path:
|
|
|
|
return path
|
2017-12-21 21:56:24 +01:00
|
|
|
if not isinstance(path, str):
|
|
|
|
path = os.path.join(*path)
|
2017-10-26 00:04:28 +02:00
|
|
|
return os.path.expandvars(os.path.expanduser(path))
|
|
|
|
|
|
|
|
|
2020-01-17 23:51:07 +01:00
|
|
|
def remove_file(path):
|
|
|
|
try:
|
|
|
|
os.unlink(path)
|
|
|
|
except OSError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
|
def remove_directory(path):
|
|
|
|
try:
|
|
|
|
os.rmdir(path)
|
|
|
|
except OSError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
2020-01-25 22:57:08 +01:00
|
|
|
def load_cookiestxt(fp):
|
2020-01-21 21:59:36 +01:00
|
|
|
"""Parse a Netscape cookies.txt file and return a list of its Cookies"""
|
|
|
|
cookies = []
|
|
|
|
|
2020-01-25 22:57:08 +01:00
|
|
|
for line in fp:
|
|
|
|
|
|
|
|
line = line.lstrip()
|
|
|
|
# strip '#HttpOnly_'
|
|
|
|
if line.startswith("#HttpOnly_"):
|
|
|
|
line = line[10:]
|
|
|
|
# ignore empty lines and comments
|
|
|
|
if not line or line[0] in ("#", "$"):
|
|
|
|
continue
|
|
|
|
# strip trailing '\n'
|
|
|
|
if line[-1] == "\n":
|
|
|
|
line = line[:-1]
|
|
|
|
|
|
|
|
domain, domain_specified, path, secure, expires, name, value = \
|
|
|
|
line.split("\t")
|
|
|
|
if not name:
|
|
|
|
name = value
|
|
|
|
value = None
|
|
|
|
|
|
|
|
cookies.append(Cookie(
|
|
|
|
0, name, value,
|
|
|
|
None, False,
|
|
|
|
domain,
|
|
|
|
domain_specified == "TRUE",
|
|
|
|
domain.startswith("."),
|
|
|
|
path, False,
|
|
|
|
secure == "TRUE",
|
|
|
|
None if expires == "0" or not expires else expires,
|
|
|
|
False, None, None, {},
|
|
|
|
))
|
2020-01-21 21:59:36 +01:00
|
|
|
|
|
|
|
return cookies
|
|
|
|
|
|
|
|
|
2020-01-25 22:57:08 +01:00
|
|
|
def save_cookiestxt(fp, cookies):
|
|
|
|
"""Write 'cookies' in Netscape cookies.txt format to 'fp'"""
|
|
|
|
fp.write("# Netscape HTTP Cookie File\n\n")
|
|
|
|
|
|
|
|
for cookie in cookies:
|
|
|
|
if cookie.value is None:
|
|
|
|
name = ""
|
|
|
|
value = cookie.name
|
|
|
|
else:
|
|
|
|
name = cookie.name
|
|
|
|
value = cookie.value
|
|
|
|
|
|
|
|
fp.write("\t".join((
|
|
|
|
cookie.domain,
|
|
|
|
"TRUE" if cookie.domain.startswith(".") else "FALSE",
|
|
|
|
cookie.path,
|
|
|
|
"TRUE" if cookie.secure else "FALSE",
|
|
|
|
"0" if cookie.expires is None else str(cookie.expires),
|
|
|
|
name,
|
|
|
|
value,
|
|
|
|
)) + "\n")
|
2020-01-21 21:59:36 +01:00
|
|
|
|
|
|
|
|
2017-08-08 19:22:04 +02:00
|
|
|
def code_to_language(code, default=None):
|
2017-03-28 13:12:44 +02:00
|
|
|
"""Map an ISO 639-1 language code to its actual name"""
|
2017-08-08 19:22:04 +02:00
|
|
|
return CODES.get((code or "").lower(), default)
|
2017-03-28 13:12:44 +02:00
|
|
|
|
|
|
|
|
2017-08-08 19:22:04 +02:00
|
|
|
def language_to_code(lang, default=None):
|
2017-03-28 13:12:44 +02:00
|
|
|
"""Map a language name to its ISO 639-1 code"""
|
2017-08-04 15:01:10 +02:00
|
|
|
if lang is None:
|
2017-08-08 19:22:04 +02:00
|
|
|
return default
|
2017-03-28 13:12:44 +02:00
|
|
|
lang = lang.capitalize()
|
2017-06-16 21:01:40 +02:00
|
|
|
for code, language in CODES.items():
|
2017-03-28 13:12:44 +02:00
|
|
|
if language == lang:
|
|
|
|
return code
|
|
|
|
return default
|
|
|
|
|
|
|
|
|
2017-06-16 21:01:40 +02:00
|
|
|
CODES = {
|
2017-03-28 13:12:44 +02:00
|
|
|
"ar": "Arabic",
|
2018-03-05 18:37:21 +01:00
|
|
|
"bg": "Bulgarian",
|
|
|
|
"ca": "Catalan",
|
2017-03-28 13:12:44 +02:00
|
|
|
"cs": "Czech",
|
|
|
|
"da": "Danish",
|
|
|
|
"de": "German",
|
|
|
|
"el": "Greek",
|
|
|
|
"en": "English",
|
|
|
|
"es": "Spanish",
|
|
|
|
"fi": "Finnish",
|
|
|
|
"fr": "French",
|
|
|
|
"he": "Hebrew",
|
|
|
|
"hu": "Hungarian",
|
|
|
|
"id": "Indonesian",
|
|
|
|
"it": "Italian",
|
|
|
|
"jp": "Japanese",
|
|
|
|
"ko": "Korean",
|
|
|
|
"ms": "Malay",
|
|
|
|
"nl": "Dutch",
|
|
|
|
"no": "Norwegian",
|
|
|
|
"pl": "Polish",
|
|
|
|
"pt": "Portuguese",
|
|
|
|
"ro": "Romanian",
|
|
|
|
"ru": "Russian",
|
|
|
|
"sv": "Swedish",
|
|
|
|
"th": "Thai",
|
|
|
|
"tr": "Turkish",
|
|
|
|
"vi": "Vietnamese",
|
|
|
|
"zh": "Chinese",
|
|
|
|
}
|
|
|
|
|
2018-01-14 18:47:22 +01:00
|
|
|
SPECIAL_EXTRACTORS = {"oauth", "recursive", "test"}
|
2017-06-16 21:01:40 +02:00
|
|
|
|
2017-03-28 13:12:44 +02:00
|
|
|
|
2019-02-13 17:39:43 +01:00
|
|
|
class UniversalNone():
|
2019-02-14 11:15:19 +01:00
|
|
|
"""None-style object that supports more operations than None itself"""
|
|
|
|
__slots__ = ()
|
|
|
|
|
|
|
|
def __getattribute__(self, _):
|
2019-02-13 17:39:43 +01:00
|
|
|
return self
|
|
|
|
|
|
|
|
def __getitem__(self, _):
|
|
|
|
return self
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def __bool__():
|
|
|
|
return False
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def __str__():
|
|
|
|
return "None"
|
|
|
|
|
|
|
|
__repr__ = __str__
|
|
|
|
|
|
|
|
|
|
|
|
NONE = UniversalNone()
|
2020-05-19 21:42:11 +02:00
|
|
|
WINDOWS = (os.name == "nt")
|
2020-05-19 21:47:18 +02:00
|
|
|
SENTINEL = object()
|
2019-02-13 17:39:43 +01:00
|
|
|
|
|
|
|
|
2017-09-06 17:08:50 +02:00
|
|
|
def build_predicate(predicates):
|
|
|
|
if not predicates:
|
|
|
|
return lambda url, kwds: True
|
|
|
|
elif len(predicates) == 1:
|
|
|
|
return predicates[0]
|
|
|
|
else:
|
|
|
|
return ChainPredicate(predicates)
|
|
|
|
|
|
|
|
|
2017-02-23 21:51:29 +01:00
|
|
|
class RangePredicate():
|
2017-09-06 17:08:50 +02:00
|
|
|
"""Predicate; True if the current index is in the given range"""
|
2018-10-07 21:34:25 +02:00
|
|
|
def __init__(self, rangespec):
|
|
|
|
self.ranges = self.optimize_range(self.parse_range(rangespec))
|
2017-02-23 21:51:29 +01:00
|
|
|
self.index = 0
|
2018-10-07 21:34:25 +02:00
|
|
|
|
2017-03-03 17:26:50 +01:00
|
|
|
if self.ranges:
|
|
|
|
self.lower, self.upper = self.ranges[0][0], self.ranges[-1][1]
|
|
|
|
else:
|
|
|
|
self.lower, self.upper = 0, 0
|
2017-02-23 21:51:29 +01:00
|
|
|
|
2017-09-06 17:08:50 +02:00
|
|
|
def __call__(self, url, kwds):
|
2017-02-23 21:51:29 +01:00
|
|
|
self.index += 1
|
|
|
|
|
2017-03-03 17:26:50 +01:00
|
|
|
if self.index > self.upper:
|
2017-02-23 21:51:29 +01:00
|
|
|
raise exception.StopExtraction()
|
|
|
|
|
|
|
|
for lower, upper in self.ranges:
|
|
|
|
if lower <= self.index <= upper:
|
|
|
|
return True
|
|
|
|
return False
|
2017-03-28 13:12:44 +02:00
|
|
|
|
2018-10-07 21:34:25 +02:00
|
|
|
@staticmethod
|
|
|
|
def parse_range(rangespec):
|
|
|
|
"""Parse an integer range string and return the resulting ranges
|
|
|
|
|
|
|
|
Examples:
|
|
|
|
parse_range("-2,4,6-8,10-") -> [(1,2), (4,4), (6,8), (10,INTMAX)]
|
|
|
|
parse_range(" - 3 , 4- 4, 2-6") -> [(1,3), (4,4), (2,6)]
|
|
|
|
"""
|
|
|
|
ranges = []
|
|
|
|
|
|
|
|
for group in rangespec.split(","):
|
|
|
|
if not group:
|
|
|
|
continue
|
|
|
|
first, sep, last = group.partition("-")
|
|
|
|
if not sep:
|
|
|
|
beg = end = int(first)
|
|
|
|
else:
|
|
|
|
beg = int(first) if first.strip() else 1
|
|
|
|
end = int(last) if last.strip() else sys.maxsize
|
|
|
|
ranges.append((beg, end) if beg <= end else (end, beg))
|
|
|
|
|
|
|
|
return ranges
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def optimize_range(ranges):
|
|
|
|
"""Simplify/Combine a parsed list of ranges
|
|
|
|
|
|
|
|
Examples:
|
|
|
|
optimize_range([(2,4), (4,6), (5,8)]) -> [(2,8)]
|
|
|
|
optimize_range([(1,1), (2,2), (3,6), (8,9))]) -> [(1,6), (8,9)]
|
|
|
|
"""
|
|
|
|
if len(ranges) <= 1:
|
|
|
|
return ranges
|
|
|
|
|
|
|
|
ranges.sort()
|
|
|
|
riter = iter(ranges)
|
|
|
|
result = []
|
|
|
|
|
|
|
|
beg, end = next(riter)
|
|
|
|
for lower, upper in riter:
|
|
|
|
if lower > end+1:
|
|
|
|
result.append((beg, end))
|
|
|
|
beg, end = lower, upper
|
|
|
|
elif upper > end:
|
|
|
|
end = upper
|
|
|
|
result.append((beg, end))
|
|
|
|
return result
|
|
|
|
|
2017-03-28 13:12:44 +02:00
|
|
|
|
2017-09-06 17:08:50 +02:00
|
|
|
class UniquePredicate():
|
|
|
|
"""Predicate; True if given URL has not been encountered before"""
|
|
|
|
def __init__(self):
|
|
|
|
self.urls = set()
|
|
|
|
|
|
|
|
def __call__(self, url, kwds):
|
2018-02-20 18:14:27 +01:00
|
|
|
if url.startswith("text:"):
|
|
|
|
return True
|
2017-09-06 17:08:50 +02:00
|
|
|
if url not in self.urls:
|
|
|
|
self.urls.add(url)
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
2017-09-08 17:52:00 +02:00
|
|
|
class FilterPredicate():
|
|
|
|
"""Predicate; True if evaluating the given expression returns True"""
|
|
|
|
|
2018-10-07 21:34:25 +02:00
|
|
|
def __init__(self, filterexpr, target="image"):
|
|
|
|
name = "<{} filter>".format(target)
|
|
|
|
self.codeobj = compile(filterexpr, name, "eval")
|
2019-10-28 15:06:17 +01:00
|
|
|
self.globals = {
|
|
|
|
"parse_int": text.parse_int,
|
|
|
|
"urlsplit" : urllib.parse.urlsplit,
|
|
|
|
"datetime" : datetime.datetime,
|
|
|
|
"abort" : raises(exception.StopExtraction),
|
|
|
|
"re" : re,
|
|
|
|
}
|
2017-09-08 17:52:00 +02:00
|
|
|
|
|
|
|
def __call__(self, url, kwds):
|
|
|
|
try:
|
2019-10-28 15:06:17 +01:00
|
|
|
return eval(self.codeobj, self.globals, kwds)
|
2018-04-12 17:07:12 +02:00
|
|
|
except exception.GalleryDLException:
|
|
|
|
raise
|
2017-09-08 17:52:00 +02:00
|
|
|
except Exception as exc:
|
|
|
|
raise exception.FilterError(exc)
|
|
|
|
|
|
|
|
|
2017-09-06 17:08:50 +02:00
|
|
|
class ChainPredicate():
|
|
|
|
"""Predicate; True if all of its predicates return True"""
|
|
|
|
def __init__(self, predicates):
|
|
|
|
self.predicates = predicates
|
|
|
|
|
|
|
|
def __call__(self, url, kwds):
|
|
|
|
for pred in self.predicates:
|
|
|
|
if not pred(url, kwds):
|
|
|
|
return False
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
2018-02-07 21:47:27 +01:00
|
|
|
class ExtendedUrl():
|
2018-02-15 21:15:33 +01:00
|
|
|
"""URL with attached config key-value pairs"""
|
|
|
|
def __init__(self, url, gconf, lconf):
|
|
|
|
self.value, self.gconfig, self.lconfig = url, gconf, lconf
|
2018-02-07 21:47:27 +01:00
|
|
|
|
|
|
|
def __str__(self):
|
|
|
|
return self.value
|
|
|
|
|
|
|
|
|
2017-09-27 21:18:34 +02:00
|
|
|
class Formatter():
|
2018-08-24 20:21:05 +02:00
|
|
|
"""Custom, extended version of string.Formatter
|
2017-09-27 21:18:34 +02:00
|
|
|
|
|
|
|
This string formatter implementation is a mostly performance-optimized
|
|
|
|
variant of the original string.Formatter class. Unnecessary features have
|
|
|
|
been removed (positional arguments, unused argument check) and new
|
|
|
|
formatting options have been added.
|
|
|
|
|
|
|
|
Extra Conversions:
|
|
|
|
- "l": calls str.lower on the target value
|
|
|
|
- "u": calls str.upper
|
|
|
|
- "c": calls str.capitalize
|
|
|
|
- "C": calls string.capwords
|
2020-10-16 00:37:22 +02:00
|
|
|
- "t": calls str.strip
|
2018-02-25 21:57:59 +01:00
|
|
|
- "U": calls urllib.parse.unquote
|
2019-06-23 22:41:03 +02:00
|
|
|
- "S": calls util.to_string()
|
2017-09-27 21:18:34 +02:00
|
|
|
- Example: {f!l} -> "example"; {f!u} -> "EXAMPLE"
|
|
|
|
|
|
|
|
Extra Format Specifiers:
|
|
|
|
- "?<before>/<after>/":
|
|
|
|
Adds <before> and <after> to the actual value if it evaluates to True.
|
2017-09-30 18:52:23 +02:00
|
|
|
Otherwise the whole replacement field becomes an empty string.
|
2017-09-27 21:18:34 +02:00
|
|
|
Example: {f:?-+/+-/} -> "-+Example+-" (if "f" contains "Example")
|
|
|
|
-> "" (if "f" is None, 0, "")
|
2018-07-29 13:52:07 +02:00
|
|
|
|
|
|
|
- "L<maxlen>/<replacement>/":
|
|
|
|
Replaces the output with <replacement> if its length (in characters)
|
|
|
|
exceeds <maxlen>. Otherwise everything is left as is.
|
|
|
|
Example: {f:L5/too long/} -> "foo" (if "f" is "foo")
|
|
|
|
-> "too long" (if "f" is "foobar")
|
2019-01-16 17:49:29 +01:00
|
|
|
|
|
|
|
- "J<separator>/":
|
|
|
|
Joins elements of a list (or string) using <separator>
|
|
|
|
Example: {f:J - /} -> "a - b - c" (if "f" is ["a", "b", "c"])
|
2019-06-23 22:41:03 +02:00
|
|
|
|
|
|
|
- "R<old>/<new>/":
|
|
|
|
Replaces all occurrences of <old> with <new>
|
|
|
|
Example: {f:R /_/} -> "f_o_o_b_a_r" (if "f" is "f o o b a r")
|
2017-09-27 21:18:34 +02:00
|
|
|
"""
|
2019-03-04 21:13:34 +01:00
|
|
|
CONVERSIONS = {
|
2017-09-27 21:18:34 +02:00
|
|
|
"l": str.lower,
|
|
|
|
"u": str.upper,
|
|
|
|
"c": str.capitalize,
|
|
|
|
"C": string.capwords,
|
2020-10-16 00:37:22 +02:00
|
|
|
"t": str.strip,
|
2018-02-25 21:57:59 +01:00
|
|
|
"U": urllib.parse.unquote,
|
2019-03-04 21:13:34 +01:00
|
|
|
"S": to_string,
|
2017-09-27 21:18:34 +02:00
|
|
|
"s": str,
|
|
|
|
"r": repr,
|
|
|
|
"a": ascii,
|
|
|
|
}
|
|
|
|
|
2018-08-24 20:21:05 +02:00
|
|
|
def __init__(self, format_string, default=None):
|
|
|
|
self.default = default
|
|
|
|
self.result = []
|
|
|
|
self.fields = []
|
2017-09-27 21:18:34 +02:00
|
|
|
|
|
|
|
for literal_text, field_name, format_spec, conversion in \
|
2017-10-06 15:47:06 +02:00
|
|
|
_string.formatter_parser(format_string):
|
2017-09-27 21:18:34 +02:00
|
|
|
if literal_text:
|
2018-08-24 20:21:05 +02:00
|
|
|
self.result.append(literal_text)
|
2017-09-27 21:18:34 +02:00
|
|
|
if field_name:
|
2018-08-24 20:21:05 +02:00
|
|
|
self.fields.append((
|
|
|
|
len(self.result),
|
2019-08-19 15:56:20 +02:00
|
|
|
self._field_access(field_name, format_spec, conversion),
|
2018-08-24 20:21:05 +02:00
|
|
|
))
|
|
|
|
self.result.append("")
|
2017-09-27 21:18:34 +02:00
|
|
|
|
2019-08-19 15:56:20 +02:00
|
|
|
if len(self.result) == 1:
|
|
|
|
if self.fields:
|
|
|
|
self.format_map = self.fields[0][1]
|
|
|
|
else:
|
|
|
|
self.format_map = lambda _: format_string
|
2020-02-15 17:58:21 +01:00
|
|
|
del self.result, self.fields
|
2019-08-19 15:56:20 +02:00
|
|
|
|
2020-02-15 17:58:21 +01:00
|
|
|
def format_map(self, kwdict):
|
|
|
|
"""Apply 'kwdict' to the initial format_string and return its result"""
|
|
|
|
result = self.result
|
2018-08-24 20:21:05 +02:00
|
|
|
for index, func in self.fields:
|
2020-02-15 17:58:21 +01:00
|
|
|
result[index] = func(kwdict)
|
|
|
|
return "".join(result)
|
2017-09-27 21:18:34 +02:00
|
|
|
|
2018-08-24 20:21:05 +02:00
|
|
|
def _field_access(self, field_name, format_spec, conversion):
|
2020-02-16 21:09:44 +01:00
|
|
|
fmt = self._parse_format_spec(format_spec, conversion)
|
2020-02-15 17:58:21 +01:00
|
|
|
|
|
|
|
if "|" in field_name:
|
|
|
|
return self._apply_list([
|
|
|
|
self._parse_field_name(fn)
|
|
|
|
for fn in field_name.split("|")
|
|
|
|
], fmt)
|
|
|
|
else:
|
|
|
|
key, funcs = self._parse_field_name(field_name)
|
|
|
|
if funcs:
|
|
|
|
return self._apply(key, funcs, fmt)
|
|
|
|
return self._apply_simple(key, fmt)
|
2018-02-03 22:28:41 +01:00
|
|
|
|
2020-02-15 17:58:21 +01:00
|
|
|
@staticmethod
|
|
|
|
def _parse_field_name(field_name):
|
|
|
|
first, rest = _string.formatter_field_name_split(field_name)
|
2018-08-24 20:21:05 +02:00
|
|
|
funcs = []
|
2020-02-15 17:58:21 +01:00
|
|
|
|
2018-08-24 20:21:05 +02:00
|
|
|
for is_attr, key in rest:
|
2017-09-27 21:18:34 +02:00
|
|
|
if is_attr:
|
2018-08-24 20:21:05 +02:00
|
|
|
func = operator.attrgetter
|
2017-09-27 21:18:34 +02:00
|
|
|
else:
|
2018-08-24 20:21:05 +02:00
|
|
|
func = operator.itemgetter
|
2020-02-15 18:31:21 +01:00
|
|
|
try:
|
|
|
|
if ":" in key:
|
|
|
|
start, _, stop = key.partition(":")
|
|
|
|
stop, _, step = stop.partition(":")
|
|
|
|
start = int(start) if start else None
|
|
|
|
stop = int(stop) if stop else None
|
|
|
|
step = int(step) if step else None
|
|
|
|
key = slice(start, stop, step)
|
|
|
|
except TypeError:
|
|
|
|
pass # key is an integer
|
|
|
|
|
2018-08-24 20:21:05 +02:00
|
|
|
funcs.append(func(key))
|
|
|
|
|
2020-02-15 17:58:21 +01:00
|
|
|
return first, funcs
|
|
|
|
|
2020-02-16 21:09:44 +01:00
|
|
|
def _parse_format_spec(self, format_spec, conversion):
|
|
|
|
fmt = self._build_format_func(format_spec)
|
|
|
|
if not conversion:
|
2020-02-15 17:58:21 +01:00
|
|
|
return fmt
|
2018-08-24 20:21:05 +02:00
|
|
|
|
2020-02-16 21:09:44 +01:00
|
|
|
conversion = self.CONVERSIONS[conversion]
|
|
|
|
if fmt is format:
|
|
|
|
return conversion
|
2020-02-15 17:58:21 +01:00
|
|
|
else:
|
2020-02-16 21:09:44 +01:00
|
|
|
def chain(obj):
|
|
|
|
return fmt(conversion(obj))
|
|
|
|
return chain
|
|
|
|
|
|
|
|
def _build_format_func(self, format_spec):
|
|
|
|
if format_spec:
|
|
|
|
fmt = format_spec[0]
|
|
|
|
if fmt == "?":
|
|
|
|
return self._parse_optional(format_spec)
|
|
|
|
if fmt == "L":
|
|
|
|
return self._parse_maxlen(format_spec)
|
|
|
|
if fmt == "J":
|
|
|
|
return self._parse_join(format_spec)
|
|
|
|
if fmt == "R":
|
|
|
|
return self._parse_replace(format_spec)
|
|
|
|
return self._default_format(format_spec)
|
|
|
|
return format
|
2018-08-24 20:21:05 +02:00
|
|
|
|
|
|
|
def _apply(self, key, funcs, fmt):
|
2020-02-15 17:58:21 +01:00
|
|
|
def wrap(kwdict):
|
2018-12-22 13:54:14 +01:00
|
|
|
try:
|
2020-02-15 17:58:21 +01:00
|
|
|
obj = kwdict[key]
|
2018-08-24 20:21:05 +02:00
|
|
|
for func in funcs:
|
|
|
|
obj = func(obj)
|
2018-12-22 13:54:14 +01:00
|
|
|
except Exception:
|
2018-08-24 20:21:05 +02:00
|
|
|
obj = self.default
|
|
|
|
return fmt(obj)
|
|
|
|
return wrap
|
|
|
|
|
2020-02-15 17:58:21 +01:00
|
|
|
def _apply_simple(self, key, fmt):
|
|
|
|
def wrap(kwdict):
|
|
|
|
return fmt(kwdict[key] if key in kwdict else self.default)
|
|
|
|
return wrap
|
|
|
|
|
|
|
|
def _apply_list(self, lst, fmt):
|
|
|
|
def wrap(kwdict):
|
|
|
|
for key, funcs in lst:
|
|
|
|
try:
|
|
|
|
obj = kwdict[key]
|
|
|
|
for func in funcs:
|
|
|
|
obj = func(obj)
|
2020-09-19 22:02:47 +02:00
|
|
|
if obj:
|
2020-02-15 17:58:21 +01:00
|
|
|
break
|
|
|
|
except Exception:
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
obj = self.default
|
|
|
|
return fmt(obj)
|
|
|
|
return wrap
|
2018-08-24 20:21:05 +02:00
|
|
|
|
2020-02-16 21:09:44 +01:00
|
|
|
def _parse_optional(self, format_spec):
|
2018-08-24 20:21:05 +02:00
|
|
|
before, after, format_spec = format_spec.split("/", 2)
|
|
|
|
before = before[1:]
|
2020-02-16 21:09:44 +01:00
|
|
|
fmt = self._build_format_func(format_spec)
|
2018-08-24 20:21:05 +02:00
|
|
|
|
2020-02-16 21:09:44 +01:00
|
|
|
def optional(obj):
|
|
|
|
return before + fmt(obj) + after if obj else ""
|
|
|
|
return optional
|
|
|
|
|
|
|
|
def _parse_maxlen(self, format_spec):
|
2018-08-24 20:21:05 +02:00
|
|
|
maxlen, replacement, format_spec = format_spec.split("/", 2)
|
|
|
|
maxlen = text.parse_int(maxlen[1:])
|
2020-02-16 21:09:44 +01:00
|
|
|
fmt = self._build_format_func(format_spec)
|
2018-08-24 20:21:05 +02:00
|
|
|
|
2020-02-16 21:09:44 +01:00
|
|
|
def mlen(obj):
|
|
|
|
obj = fmt(obj)
|
|
|
|
return obj if len(obj) <= maxlen else replacement
|
|
|
|
return mlen
|
|
|
|
|
|
|
|
def _parse_join(self, format_spec):
|
2019-01-16 17:49:29 +01:00
|
|
|
separator, _, format_spec = format_spec.partition("/")
|
|
|
|
separator = separator[1:]
|
2020-02-16 21:09:44 +01:00
|
|
|
fmt = self._build_format_func(format_spec)
|
2019-01-16 17:49:29 +01:00
|
|
|
|
2020-02-16 21:09:44 +01:00
|
|
|
def join(obj):
|
|
|
|
return fmt(separator.join(obj))
|
|
|
|
return join
|
|
|
|
|
|
|
|
def _parse_replace(self, format_spec):
|
2019-06-23 22:41:03 +02:00
|
|
|
old, new, format_spec = format_spec.split("/", 2)
|
|
|
|
old = old[1:]
|
2020-02-16 21:09:44 +01:00
|
|
|
fmt = self._build_format_func(format_spec)
|
|
|
|
|
|
|
|
def replace(obj):
|
|
|
|
return fmt(obj.replace(old, new))
|
|
|
|
return replace
|
2019-06-23 22:41:03 +02:00
|
|
|
|
2018-08-24 20:21:05 +02:00
|
|
|
@staticmethod
|
2020-02-16 21:09:44 +01:00
|
|
|
def _default_format(format_spec):
|
2018-08-24 20:21:05 +02:00
|
|
|
def wrap(obj):
|
|
|
|
return format(obj, format_spec)
|
|
|
|
return wrap
|
2017-09-27 21:18:34 +02:00
|
|
|
|
|
|
|
|
2017-03-28 13:12:44 +02:00
|
|
|
class PathFormat():
|
|
|
|
|
|
|
|
def __init__(self, extractor):
|
2019-08-19 15:56:20 +02:00
|
|
|
filename_fmt = extractor.config("filename", extractor.filename_fmt)
|
|
|
|
directory_fmt = extractor.config("directory", extractor.directory_fmt)
|
|
|
|
kwdefault = extractor.config("keywords-default")
|
2018-08-24 20:21:05 +02:00
|
|
|
|
|
|
|
try:
|
2019-08-19 15:56:20 +02:00
|
|
|
self.filename_formatter = Formatter(
|
|
|
|
filename_fmt, kwdefault).format_map
|
2018-08-24 20:21:05 +02:00
|
|
|
except Exception as exc:
|
2019-10-27 23:05:00 +01:00
|
|
|
raise exception.FilenameFormatError(exc)
|
2017-10-06 15:47:06 +02:00
|
|
|
|
2019-08-19 15:56:20 +02:00
|
|
|
try:
|
|
|
|
self.directory_formatters = [
|
|
|
|
Formatter(dirfmt, kwdefault).format_map
|
|
|
|
for dirfmt in directory_fmt
|
|
|
|
]
|
|
|
|
except Exception as exc:
|
2019-10-27 23:05:00 +01:00
|
|
|
raise exception.DirectoryFormatError(exc)
|
2019-08-19 15:56:20 +02:00
|
|
|
|
2017-03-28 13:12:44 +02:00
|
|
|
self.directory = self.realdirectory = ""
|
2020-07-04 22:00:34 +02:00
|
|
|
self.filename = self.extension = self.prefix = ""
|
2018-06-06 20:17:17 +02:00
|
|
|
self.path = self.realpath = self.temppath = ""
|
2020-07-04 22:00:34 +02:00
|
|
|
self.kwdict = {}
|
|
|
|
self.delete = self._create_directory = False
|
2017-03-28 13:12:44 +02:00
|
|
|
|
2020-01-29 18:32:37 +01:00
|
|
|
basedir = extractor._parentdir
|
|
|
|
if not basedir:
|
|
|
|
basedir = expand_path(
|
|
|
|
extractor.config("base-directory", (".", "gallery-dl")))
|
|
|
|
if os.altsep and os.altsep in basedir:
|
|
|
|
basedir = basedir.replace(os.altsep, os.sep)
|
|
|
|
if basedir[-1] != os.sep:
|
|
|
|
basedir += os.sep
|
2019-08-20 00:25:13 +02:00
|
|
|
self.basedirectory = basedir
|
2017-08-12 21:32:24 +02:00
|
|
|
|
2019-08-16 21:13:49 +02:00
|
|
|
restrict = extractor.config("path-restrict", "auto")
|
2020-05-24 17:35:25 +02:00
|
|
|
replace = extractor.config("path-replace", "_")
|
|
|
|
|
2019-07-23 17:36:07 +02:00
|
|
|
if restrict == "auto":
|
2020-05-19 21:42:11 +02:00
|
|
|
restrict = "\\\\|/<>:\"?*" if WINDOWS else "/"
|
2019-07-23 17:36:07 +02:00
|
|
|
elif restrict == "unix":
|
|
|
|
restrict = "/"
|
|
|
|
elif restrict == "windows":
|
2019-08-16 21:13:49 +02:00
|
|
|
restrict = "\\\\|/<>:\"?*"
|
2020-05-24 17:35:25 +02:00
|
|
|
self.clean_segment = self._build_cleanfunc(restrict, replace)
|
2019-08-16 21:13:49 +02:00
|
|
|
|
|
|
|
remove = extractor.config("path-remove", "\x00-\x1f\x7f")
|
|
|
|
self.clean_path = self._build_cleanfunc(remove, "")
|
2019-07-23 17:36:07 +02:00
|
|
|
|
|
|
|
@staticmethod
|
2019-08-16 21:13:49 +02:00
|
|
|
def _build_cleanfunc(chars, repl):
|
|
|
|
if not chars:
|
2019-07-23 17:36:07 +02:00
|
|
|
return lambda x: x
|
2020-05-22 01:14:10 +02:00
|
|
|
elif isinstance(chars, dict):
|
|
|
|
def func(x, table=str.maketrans(chars)):
|
|
|
|
return x.translate(table)
|
2019-08-16 21:13:49 +02:00
|
|
|
elif len(chars) == 1:
|
|
|
|
def func(x, c=chars, r=repl):
|
|
|
|
return x.replace(c, r)
|
2019-07-23 17:36:07 +02:00
|
|
|
else:
|
2019-08-16 21:13:49 +02:00
|
|
|
def func(x, sub=re.compile("[" + chars + "]").sub, r=repl):
|
|
|
|
return sub(r, x)
|
2019-07-23 17:36:07 +02:00
|
|
|
return func
|
|
|
|
|
2017-05-12 14:10:25 +02:00
|
|
|
def open(self, mode="wb"):
|
2017-10-24 23:33:44 +02:00
|
|
|
"""Open file and return a corresponding file object"""
|
2018-06-06 20:17:17 +02:00
|
|
|
return open(self.temppath, mode)
|
2017-03-28 13:12:44 +02:00
|
|
|
|
2020-09-03 18:37:38 +02:00
|
|
|
def exists(self):
|
|
|
|
"""Return True if the file exists on disk"""
|
2019-08-12 21:40:37 +02:00
|
|
|
if self.extension and os.path.exists(self.realpath):
|
2019-08-08 18:34:31 +02:00
|
|
|
return self.check_file()
|
2017-03-28 13:12:44 +02:00
|
|
|
return False
|
|
|
|
|
2019-08-08 18:34:31 +02:00
|
|
|
@staticmethod
|
|
|
|
def check_file():
|
|
|
|
return True
|
|
|
|
|
|
|
|
def _enum_file(self):
|
|
|
|
num = 1
|
2020-01-17 23:27:13 +01:00
|
|
|
try:
|
|
|
|
while True:
|
|
|
|
self.prefix = str(num) + "."
|
|
|
|
self.set_extension(self.extension, False)
|
|
|
|
os.stat(self.realpath) # raises OSError if file doesn't exist
|
|
|
|
num += 1
|
|
|
|
except OSError:
|
|
|
|
pass
|
|
|
|
return False
|
2019-08-08 18:34:31 +02:00
|
|
|
|
2019-08-12 21:40:37 +02:00
|
|
|
def set_directory(self, kwdict):
|
2017-03-28 13:12:44 +02:00
|
|
|
"""Build directory path and create it if necessary"""
|
2020-05-18 01:35:53 +02:00
|
|
|
self.kwdict = kwdict
|
2019-08-12 21:40:37 +02:00
|
|
|
|
|
|
|
# Build path segments by applying 'kwdict' to directory format strings
|
2019-08-28 23:21:28 +02:00
|
|
|
segments = []
|
|
|
|
append = segments.append
|
2017-08-11 21:48:37 +02:00
|
|
|
try:
|
2019-08-28 23:21:28 +02:00
|
|
|
for formatter in self.directory_formatters:
|
|
|
|
segment = formatter(kwdict).strip()
|
2020-05-19 21:42:11 +02:00
|
|
|
if WINDOWS:
|
2020-03-19 21:12:18 +01:00
|
|
|
# remove trailing dots and spaces (#647)
|
|
|
|
segment = segment.rstrip(". ")
|
2019-08-28 23:21:28 +02:00
|
|
|
if segment:
|
|
|
|
append(self.clean_segment(segment))
|
2017-08-11 21:48:37 +02:00
|
|
|
except Exception as exc:
|
2019-10-27 23:05:00 +01:00
|
|
|
raise exception.DirectoryFormatError(exc)
|
2017-08-11 21:48:37 +02:00
|
|
|
|
2020-01-17 23:27:13 +01:00
|
|
|
# Join path segments
|
2019-08-20 00:25:13 +02:00
|
|
|
sep = os.sep
|
|
|
|
directory = self.clean_path(self.basedirectory + sep.join(segments))
|
2018-03-22 10:24:59 +01:00
|
|
|
|
2019-08-28 23:21:28 +02:00
|
|
|
# Ensure 'directory' ends with a path separator
|
|
|
|
if segments:
|
2019-08-20 00:25:13 +02:00
|
|
|
directory += sep
|
2019-08-19 15:56:20 +02:00
|
|
|
self.directory = directory
|
2018-03-22 10:24:59 +01:00
|
|
|
|
2020-05-19 21:42:11 +02:00
|
|
|
if WINDOWS:
|
2019-08-28 23:21:28 +02:00
|
|
|
# Enable longer-than-260-character paths on Windows
|
|
|
|
directory = "\\\\?\\" + os.path.abspath(directory)
|
|
|
|
|
|
|
|
# abspath() in Python 3.7+ removes trailing path separators (#402)
|
|
|
|
if directory[-1] != sep:
|
|
|
|
directory += sep
|
|
|
|
|
|
|
|
self.realdirectory = directory
|
2020-07-04 22:00:34 +02:00
|
|
|
self._create_directory = True
|
2017-03-28 13:12:44 +02:00
|
|
|
|
2019-08-12 21:40:37 +02:00
|
|
|
def set_filename(self, kwdict):
|
|
|
|
"""Set general filename data"""
|
|
|
|
self.kwdict = kwdict
|
2019-08-16 22:06:26 +02:00
|
|
|
self.temppath = self.prefix = ""
|
2019-08-12 21:40:37 +02:00
|
|
|
self.extension = kwdict["extension"]
|
|
|
|
|
|
|
|
if self.extension:
|
2017-03-28 13:12:44 +02:00
|
|
|
self.build_path()
|
2020-05-16 22:05:40 +02:00
|
|
|
else:
|
|
|
|
self.filename = ""
|
2017-03-28 13:12:44 +02:00
|
|
|
|
2017-10-24 12:53:03 +02:00
|
|
|
def set_extension(self, extension, real=True):
|
2019-08-12 21:40:37 +02:00
|
|
|
"""Set filename extension"""
|
|
|
|
if real:
|
|
|
|
self.extension = extension
|
2019-08-16 22:06:26 +02:00
|
|
|
self.kwdict["extension"] = self.prefix + extension
|
2017-03-28 13:12:44 +02:00
|
|
|
self.build_path()
|
|
|
|
|
2019-07-15 16:39:03 +02:00
|
|
|
def fix_extension(self, _=None):
|
2019-08-12 21:40:37 +02:00
|
|
|
"""Fix filenames without a given filename extension"""
|
|
|
|
if not self.extension:
|
|
|
|
self.set_extension("", False)
|
2019-07-15 16:39:03 +02:00
|
|
|
if self.path[-1] == ".":
|
|
|
|
self.path = self.path[:-1]
|
|
|
|
self.temppath = self.realpath = self.realpath[:-1]
|
|
|
|
return True
|
|
|
|
|
2019-11-29 22:32:07 +01:00
|
|
|
def build_filename(self):
|
|
|
|
"""Apply 'kwdict' to filename format string"""
|
2017-08-11 21:48:37 +02:00
|
|
|
try:
|
2019-11-29 22:32:07 +01:00
|
|
|
return self.clean_path(self.clean_segment(
|
2019-08-19 15:56:20 +02:00
|
|
|
self.filename_formatter(self.kwdict)))
|
2017-08-11 21:48:37 +02:00
|
|
|
except Exception as exc:
|
2019-10-27 23:05:00 +01:00
|
|
|
raise exception.FilenameFormatError(exc)
|
2017-08-11 21:48:37 +02:00
|
|
|
|
2019-11-29 22:32:07 +01:00
|
|
|
def build_path(self):
|
|
|
|
"""Combine directory and filename to full paths"""
|
2020-07-04 22:00:34 +02:00
|
|
|
if self._create_directory:
|
|
|
|
os.makedirs(self.realdirectory, exist_ok=True)
|
|
|
|
self._create_directory = False
|
2019-11-29 22:32:07 +01:00
|
|
|
self.filename = filename = self.build_filename()
|
2017-10-24 12:53:03 +02:00
|
|
|
self.path = self.directory + filename
|
|
|
|
self.realpath = self.realdirectory + filename
|
2018-06-06 20:17:17 +02:00
|
|
|
if not self.temppath:
|
|
|
|
self.temppath = self.realpath
|
2017-03-28 13:12:44 +02:00
|
|
|
|
2017-10-26 00:07:32 +02:00
|
|
|
def part_enable(self, part_directory=None):
|
|
|
|
"""Enable .part file usage"""
|
2019-08-12 21:40:37 +02:00
|
|
|
if self.extension:
|
2018-10-18 22:32:03 +02:00
|
|
|
self.temppath += ".part"
|
2017-10-24 23:33:44 +02:00
|
|
|
else:
|
|
|
|
self.set_extension("part", False)
|
2017-10-26 00:07:32 +02:00
|
|
|
if part_directory:
|
2018-06-06 20:17:17 +02:00
|
|
|
self.temppath = os.path.join(
|
2017-10-26 00:07:32 +02:00
|
|
|
part_directory,
|
2018-06-06 20:17:17 +02:00
|
|
|
os.path.basename(self.temppath),
|
2017-10-26 00:07:32 +02:00
|
|
|
)
|
2017-10-24 23:33:44 +02:00
|
|
|
|
|
|
|
def part_size(self):
|
2017-10-26 00:07:32 +02:00
|
|
|
"""Return size of .part file"""
|
2018-06-06 20:17:17 +02:00
|
|
|
try:
|
|
|
|
return os.stat(self.temppath).st_size
|
|
|
|
except OSError:
|
|
|
|
pass
|
2017-10-24 23:33:44 +02:00
|
|
|
return 0
|
|
|
|
|
2018-06-06 20:17:17 +02:00
|
|
|
def finalize(self):
|
|
|
|
"""Move tempfile to its target location"""
|
2018-06-08 17:39:02 +02:00
|
|
|
if self.delete:
|
2018-06-20 18:12:59 +02:00
|
|
|
self.delete = False
|
2018-06-08 17:39:02 +02:00
|
|
|
os.unlink(self.temppath)
|
|
|
|
return
|
|
|
|
|
2019-06-19 23:16:32 +02:00
|
|
|
if self.temppath != self.realpath:
|
2019-08-12 21:40:37 +02:00
|
|
|
# Move temp file to its actual location
|
2019-06-19 23:16:32 +02:00
|
|
|
try:
|
|
|
|
os.replace(self.temppath, self.realpath)
|
|
|
|
except OSError:
|
|
|
|
shutil.copyfile(self.temppath, self.realpath)
|
|
|
|
os.unlink(self.temppath)
|
2018-06-06 20:17:17 +02:00
|
|
|
|
2020-04-10 22:24:32 +02:00
|
|
|
mtime = self.kwdict.get("_mtime")
|
|
|
|
if mtime:
|
2019-08-12 21:40:37 +02:00
|
|
|
# Set file modification time
|
2020-04-10 22:24:32 +02:00
|
|
|
try:
|
|
|
|
if isinstance(mtime, str):
|
|
|
|
mtime = mktime_tz(parsedate_tz(mtime))
|
|
|
|
os.utime(self.realpath, (time.time(), mtime))
|
|
|
|
except Exception:
|
|
|
|
pass
|
2017-10-24 23:33:44 +02:00
|
|
|
|
2017-06-16 21:01:40 +02:00
|
|
|
|
2018-01-29 22:13:06 +01:00
|
|
|
class DownloadArchive():
|
|
|
|
|
2018-02-13 23:45:30 +01:00
|
|
|
def __init__(self, path, extractor):
|
2018-01-29 22:13:06 +01:00
|
|
|
con = sqlite3.connect(path)
|
|
|
|
con.isolation_level = None
|
2019-09-10 22:26:40 +02:00
|
|
|
self.close = con.close
|
2018-01-29 22:13:06 +01:00
|
|
|
self.cursor = con.cursor()
|
2020-01-03 22:58:28 +01:00
|
|
|
|
|
|
|
try:
|
|
|
|
self.cursor.execute("CREATE TABLE IF NOT EXISTS archive "
|
|
|
|
"(entry PRIMARY KEY) WITHOUT ROWID")
|
|
|
|
except sqlite3.OperationalError:
|
|
|
|
# fallback for missing WITHOUT ROWID support (#553)
|
|
|
|
self.cursor.execute("CREATE TABLE IF NOT EXISTS archive "
|
|
|
|
"(entry PRIMARY KEY)")
|
|
|
|
|
2018-02-24 21:21:59 +01:00
|
|
|
self.keygen = (extractor.category + extractor.config(
|
|
|
|
"archive-format", extractor.archive_fmt)
|
|
|
|
).format_map
|
2018-01-29 22:13:06 +01:00
|
|
|
|
2020-09-23 15:00:27 +02:00
|
|
|
def check(self, kwdict):
|
2019-08-16 21:18:56 +02:00
|
|
|
"""Return True if the item described by 'kwdict' exists in archive"""
|
2019-12-20 16:43:08 +01:00
|
|
|
key = kwdict["_archive_key"] = self.keygen(kwdict)
|
2018-01-29 22:13:06 +01:00
|
|
|
self.cursor.execute(
|
2018-02-13 23:45:30 +01:00
|
|
|
"SELECT 1 FROM archive WHERE entry=? LIMIT 1", (key,))
|
2018-01-29 22:13:06 +01:00
|
|
|
return self.cursor.fetchone()
|
|
|
|
|
2018-02-13 23:45:30 +01:00
|
|
|
def add(self, kwdict):
|
|
|
|
"""Add item described by 'kwdict' to archive"""
|
2019-12-20 16:43:08 +01:00
|
|
|
key = kwdict.get("_archive_key") or self.keygen(kwdict)
|
2018-01-29 22:13:06 +01:00
|
|
|
self.cursor.execute(
|
2018-02-13 23:45:30 +01:00
|
|
|
"INSERT OR IGNORE INTO archive VALUES (?)", (key,))
|