2015-04-10 21:45:41 +02:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
|
2023-01-11 15:37:40 +01:00
|
|
|
# Copyright 2014-2023 Mike Fährmann
|
2015-04-10 21:45:41 +02:00
|
|
|
#
|
|
|
|
# This program is free software; you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License version 2 as
|
|
|
|
# published by the Free Software Foundation.
|
|
|
|
|
2017-06-30 19:38:14 +02:00
|
|
|
"""Downloader module for http:// and https:// URLs"""
|
2015-04-10 21:45:41 +02:00
|
|
|
|
2017-12-02 01:47:26 +01:00
|
|
|
import time
|
2016-09-30 12:32:48 +02:00
|
|
|
import mimetypes
|
2019-06-19 22:19:29 +02:00
|
|
|
from requests.exceptions import RequestException, ConnectionError, Timeout
|
2017-10-24 12:53:03 +02:00
|
|
|
from .common import DownloaderBase
|
2020-01-17 23:51:07 +01:00
|
|
|
from .. import text, util
|
2017-10-24 23:33:44 +02:00
|
|
|
|
2019-12-09 20:14:25 +01:00
|
|
|
from ssl import SSLError
|
2019-07-01 20:10:26 +02:00
|
|
|
try:
|
2019-12-09 20:14:25 +01:00
|
|
|
from OpenSSL.SSL import Error as OpenSSLError
|
2019-07-01 20:10:26 +02:00
|
|
|
except ImportError:
|
2019-12-09 20:14:25 +01:00
|
|
|
OpenSSLError = SSLError
|
2019-07-01 20:10:26 +02:00
|
|
|
|
2017-10-24 23:33:44 +02:00
|
|
|
|
2018-11-16 14:40:05 +01:00
|
|
|
class HttpDownloader(DownloaderBase):
|
2017-10-26 22:11:36 +02:00
|
|
|
scheme = "http"
|
2017-03-26 18:24:46 +02:00
|
|
|
|
2020-05-18 01:35:53 +02:00
|
|
|
def __init__(self, job):
|
|
|
|
DownloaderBase.__init__(self, job)
|
|
|
|
extractor = job.extractor
|
|
|
|
self.downloading = False
|
|
|
|
|
2019-08-07 22:52:29 +02:00
|
|
|
self.adjust_extension = self.config("adjust-extensions", True)
|
2022-11-02 15:34:54 +01:00
|
|
|
self.chunk_size = self.config("chunk-size", 32768)
|
2022-11-05 17:37:43 +01:00
|
|
|
self.metadata = extractor.config("http-metadata")
|
2021-09-28 22:37:11 +02:00
|
|
|
self.progress = self.config("progress", 3.0)
|
2023-01-11 15:37:40 +01:00
|
|
|
self.validate = self.config("validate", True)
|
2021-02-21 19:13:39 +01:00
|
|
|
self.headers = self.config("headers")
|
2020-09-01 22:05:17 +02:00
|
|
|
self.minsize = self.config("filesize-min")
|
|
|
|
self.maxsize = self.config("filesize-max")
|
2018-10-06 19:59:19 +02:00
|
|
|
self.retries = self.config("retries", extractor._retries)
|
2023-01-14 17:16:18 +01:00
|
|
|
self.retry_codes = self.config("retry-codes", extractor._retry_codes)
|
2018-10-06 19:59:19 +02:00
|
|
|
self.timeout = self.config("timeout", extractor._timeout)
|
|
|
|
self.verify = self.config("verify", extractor._verify)
|
2019-06-20 17:19:44 +02:00
|
|
|
self.mtime = self.config("mtime", True)
|
2017-12-02 01:47:26 +01:00
|
|
|
self.rate = self.config("rate")
|
|
|
|
|
2019-06-30 22:55:31 +02:00
|
|
|
if self.retries < 0:
|
|
|
|
self.retries = float("inf")
|
2020-09-01 22:05:17 +02:00
|
|
|
if self.minsize:
|
|
|
|
minsize = text.parse_bytes(self.minsize)
|
|
|
|
if not minsize:
|
2020-11-29 20:55:35 +01:00
|
|
|
self.log.warning(
|
|
|
|
"Invalid minimum file size (%r)", self.minsize)
|
2020-09-01 22:05:17 +02:00
|
|
|
self.minsize = minsize
|
|
|
|
if self.maxsize:
|
|
|
|
maxsize = text.parse_bytes(self.maxsize)
|
|
|
|
if not maxsize:
|
2020-11-29 20:55:35 +01:00
|
|
|
self.log.warning(
|
|
|
|
"Invalid maximum file size (%r)", self.maxsize)
|
2020-09-01 22:05:17 +02:00
|
|
|
self.maxsize = maxsize
|
2022-11-02 15:34:54 +01:00
|
|
|
if isinstance(self.chunk_size, str):
|
|
|
|
chunk_size = text.parse_bytes(self.chunk_size)
|
|
|
|
if not chunk_size:
|
|
|
|
self.log.warning(
|
|
|
|
"Invalid chunk size (%r)", self.chunk_size)
|
|
|
|
chunk_size = 32768
|
|
|
|
self.chunk_size = chunk_size
|
2017-12-02 01:47:26 +01:00
|
|
|
if self.rate:
|
2019-08-29 23:05:47 +02:00
|
|
|
rate = text.parse_bytes(self.rate)
|
2019-12-09 20:21:28 +01:00
|
|
|
if rate:
|
|
|
|
if rate < self.chunk_size:
|
|
|
|
self.chunk_size = rate
|
|
|
|
self.rate = rate
|
|
|
|
self.receive = self._receive_rate
|
|
|
|
else:
|
2019-08-29 23:05:47 +02:00
|
|
|
self.log.warning("Invalid rate limit (%r)", self.rate)
|
2021-09-28 22:37:11 +02:00
|
|
|
if self.progress is not None:
|
|
|
|
self.receive = self._receive_rate
|
2022-11-30 21:56:18 +01:00
|
|
|
if self.progress < 0.0:
|
|
|
|
self.progress = 0.0
|
2017-10-24 12:53:03 +02:00
|
|
|
|
2019-06-19 22:19:29 +02:00
|
|
|
def download(self, url, pathfmt):
|
2017-12-06 22:35:05 +01:00
|
|
|
try:
|
2019-06-19 22:19:29 +02:00
|
|
|
return self._download_impl(url, pathfmt)
|
|
|
|
except Exception:
|
|
|
|
print()
|
|
|
|
raise
|
|
|
|
finally:
|
|
|
|
# remove file from incomplete downloads
|
|
|
|
if self.downloading and not self.part:
|
2020-01-17 23:51:07 +01:00
|
|
|
util.remove_file(pathfmt.temppath)
|
2019-06-19 22:19:29 +02:00
|
|
|
|
|
|
|
def _download_impl(self, url, pathfmt):
|
|
|
|
response = None
|
|
|
|
tries = 0
|
|
|
|
msg = ""
|
|
|
|
|
2022-11-09 13:21:33 +01:00
|
|
|
metadata = self.metadata
|
2021-04-27 00:48:53 +02:00
|
|
|
kwdict = pathfmt.kwdict
|
|
|
|
adjust_extension = kwdict.get(
|
|
|
|
"_http_adjust_extension", self.adjust_extension)
|
|
|
|
|
2022-11-09 13:21:33 +01:00
|
|
|
if self.part and not metadata:
|
2019-06-19 22:19:29 +02:00
|
|
|
pathfmt.part_enable(self.partdir)
|
|
|
|
|
|
|
|
while True:
|
|
|
|
if tries:
|
|
|
|
if response:
|
|
|
|
response.close()
|
2020-11-29 20:55:35 +01:00
|
|
|
response = None
|
2019-06-30 21:27:28 +02:00
|
|
|
self.log.warning("%s (%s/%s)", msg, tries, self.retries+1)
|
|
|
|
if tries > self.retries:
|
2019-06-19 22:19:29 +02:00
|
|
|
return False
|
2020-09-06 22:38:25 +02:00
|
|
|
time.sleep(tries)
|
2019-06-19 22:19:29 +02:00
|
|
|
|
2020-11-29 20:55:35 +01:00
|
|
|
tries += 1
|
|
|
|
file_header = None
|
|
|
|
|
2021-08-05 22:30:17 +02:00
|
|
|
# collect HTTP headers
|
|
|
|
headers = {"Accept": "*/*"}
|
|
|
|
# file-specific headers
|
2021-04-27 00:48:53 +02:00
|
|
|
extra = kwdict.get("_http_headers")
|
2019-11-19 23:50:54 +01:00
|
|
|
if extra:
|
|
|
|
headers.update(extra)
|
2021-08-05 22:30:17 +02:00
|
|
|
# general headers
|
|
|
|
if self.headers:
|
|
|
|
headers.update(self.headers)
|
|
|
|
# partial content
|
|
|
|
file_size = pathfmt.part_size()
|
|
|
|
if file_size:
|
|
|
|
headers["Range"] = "bytes={}-".format(file_size)
|
2019-06-19 22:19:29 +02:00
|
|
|
|
|
|
|
# connect to (remote) source
|
|
|
|
try:
|
|
|
|
response = self.session.request(
|
2022-03-20 10:09:05 +01:00
|
|
|
kwdict.get("_http_method", "GET"), url,
|
2022-03-23 21:48:38 +01:00
|
|
|
stream=True,
|
|
|
|
headers=headers,
|
|
|
|
data=kwdict.get("_http_data"),
|
|
|
|
timeout=self.timeout,
|
|
|
|
proxies=self.proxies,
|
|
|
|
verify=self.verify,
|
|
|
|
)
|
2019-06-19 22:19:29 +02:00
|
|
|
except (ConnectionError, Timeout) as exc:
|
|
|
|
msg = str(exc)
|
|
|
|
continue
|
|
|
|
except Exception as exc:
|
2019-12-10 21:30:08 +01:00
|
|
|
self.log.warning(exc)
|
2019-06-19 22:19:29 +02:00
|
|
|
return False
|
|
|
|
|
|
|
|
# check response
|
|
|
|
code = response.status_code
|
|
|
|
if code == 200: # OK
|
|
|
|
offset = 0
|
|
|
|
size = response.headers.get("Content-Length")
|
|
|
|
elif code == 206: # Partial Content
|
2020-11-29 20:55:35 +01:00
|
|
|
offset = file_size
|
2019-06-19 22:19:29 +02:00
|
|
|
size = response.headers["Content-Range"].rpartition("/")[2]
|
2020-11-29 20:55:35 +01:00
|
|
|
elif code == 416 and file_size: # Requested Range Not Satisfiable
|
2019-06-19 22:19:29 +02:00
|
|
|
break
|
|
|
|
else:
|
2019-10-27 23:16:25 +01:00
|
|
|
msg = "'{} {}' for '{}'".format(code, response.reason, url)
|
2023-03-09 23:30:15 +01:00
|
|
|
if code in self.retry_codes or 500 <= code < 600:
|
|
|
|
continue
|
|
|
|
retry = kwdict.get("_http_retry")
|
|
|
|
if retry and retry(response):
|
2019-06-19 22:19:29 +02:00
|
|
|
continue
|
2019-12-10 21:30:08 +01:00
|
|
|
self.log.warning(msg)
|
2019-06-19 22:19:29 +02:00
|
|
|
return False
|
2020-09-01 22:05:17 +02:00
|
|
|
|
2021-01-29 22:15:21 +01:00
|
|
|
# check for invalid responses
|
2021-04-27 00:48:53 +02:00
|
|
|
validate = kwdict.get("_http_validate")
|
2023-01-11 15:37:40 +01:00
|
|
|
if validate and self.validate:
|
2022-01-28 22:42:54 +01:00
|
|
|
result = validate(response)
|
|
|
|
if isinstance(result, str):
|
|
|
|
url = result
|
|
|
|
tries -= 1
|
|
|
|
continue
|
|
|
|
if not result:
|
|
|
|
self.log.warning("Invalid response")
|
|
|
|
return False
|
2021-01-29 22:15:21 +01:00
|
|
|
|
2020-11-29 20:55:35 +01:00
|
|
|
# check file size
|
2020-09-01 22:05:17 +02:00
|
|
|
size = text.parse_int(size, None)
|
|
|
|
if size is not None:
|
|
|
|
if self.minsize and size < self.minsize:
|
|
|
|
self.log.warning(
|
|
|
|
"File size smaller than allowed minimum (%s < %s)",
|
|
|
|
size, self.minsize)
|
|
|
|
return False
|
|
|
|
if self.maxsize and size > self.maxsize:
|
|
|
|
self.log.warning(
|
|
|
|
"File size larger than allowed maximum (%s > %s)",
|
|
|
|
size, self.maxsize)
|
|
|
|
return False
|
2019-06-19 22:19:29 +02:00
|
|
|
|
2022-11-08 17:01:10 +01:00
|
|
|
build_path = False
|
|
|
|
|
2022-11-05 17:37:43 +01:00
|
|
|
# set missing filename extension from MIME type
|
|
|
|
if not pathfmt.extension:
|
|
|
|
pathfmt.set_extension(self._find_extension(response))
|
2022-11-08 17:01:10 +01:00
|
|
|
build_path = True
|
2022-11-05 17:37:43 +01:00
|
|
|
|
|
|
|
# set metadata from HTTP headers
|
2022-11-09 13:21:33 +01:00
|
|
|
if metadata:
|
|
|
|
kwdict[metadata] = util.extract_headers(response)
|
2022-11-08 17:01:10 +01:00
|
|
|
build_path = True
|
|
|
|
|
2022-11-09 13:21:33 +01:00
|
|
|
# build and check file path
|
2022-11-08 17:01:10 +01:00
|
|
|
if build_path:
|
2022-11-05 17:37:43 +01:00
|
|
|
pathfmt.build_path()
|
|
|
|
if pathfmt.exists():
|
|
|
|
pathfmt.temppath = ""
|
|
|
|
return True
|
2022-11-09 13:21:33 +01:00
|
|
|
if self.part and metadata:
|
|
|
|
pathfmt.part_enable(self.partdir)
|
|
|
|
metadata = False
|
2022-11-05 17:37:43 +01:00
|
|
|
|
2020-11-30 01:10:45 +01:00
|
|
|
content = response.iter_content(self.chunk_size)
|
|
|
|
|
2020-11-29 20:55:35 +01:00
|
|
|
# check filename extension against file header
|
2021-04-27 00:48:53 +02:00
|
|
|
if adjust_extension and not offset and \
|
2022-11-01 17:09:13 +01:00
|
|
|
pathfmt.extension in SIGNATURE_CHECKS:
|
2020-11-29 20:55:35 +01:00
|
|
|
try:
|
2020-11-30 01:10:45 +01:00
|
|
|
file_header = next(
|
2020-12-08 21:20:18 +01:00
|
|
|
content if response.raw.chunked
|
|
|
|
else response.iter_content(16), b"")
|
2020-11-29 20:55:35 +01:00
|
|
|
except (RequestException, SSLError, OpenSSLError) as exc:
|
|
|
|
msg = str(exc)
|
|
|
|
print()
|
|
|
|
continue
|
|
|
|
if self._adjust_extension(pathfmt, file_header) and \
|
|
|
|
pathfmt.exists():
|
2019-06-19 22:19:29 +02:00
|
|
|
pathfmt.temppath = ""
|
|
|
|
return True
|
|
|
|
|
|
|
|
# set open mode
|
|
|
|
if not offset:
|
|
|
|
mode = "w+b"
|
2020-11-29 20:55:35 +01:00
|
|
|
if file_size:
|
2019-08-29 23:05:47 +02:00
|
|
|
self.log.debug("Unable to resume partial download")
|
2019-06-19 22:19:29 +02:00
|
|
|
else:
|
|
|
|
mode = "r+b"
|
2019-08-29 23:05:47 +02:00
|
|
|
self.log.debug("Resuming download at byte %d", offset)
|
2019-06-19 22:19:29 +02:00
|
|
|
|
2020-11-29 20:55:35 +01:00
|
|
|
# download content
|
2019-06-19 22:19:29 +02:00
|
|
|
self.downloading = True
|
2020-11-29 20:55:35 +01:00
|
|
|
with pathfmt.open(mode) as fp:
|
|
|
|
if file_header:
|
|
|
|
fp.write(file_header)
|
2021-09-28 22:37:11 +02:00
|
|
|
offset += len(file_header)
|
2020-11-29 20:55:35 +01:00
|
|
|
elif offset:
|
2021-04-27 00:48:53 +02:00
|
|
|
if adjust_extension and \
|
2022-11-01 17:09:13 +01:00
|
|
|
pathfmt.extension in SIGNATURE_CHECKS:
|
2020-11-29 20:55:35 +01:00
|
|
|
self._adjust_extension(pathfmt, fp.read(16))
|
|
|
|
fp.seek(offset)
|
|
|
|
|
|
|
|
self.out.start(pathfmt.path)
|
2019-06-19 22:19:29 +02:00
|
|
|
try:
|
2021-09-28 22:37:11 +02:00
|
|
|
self.receive(fp, content, size, offset)
|
2019-12-09 20:14:25 +01:00
|
|
|
except (RequestException, SSLError, OpenSSLError) as exc:
|
2019-06-19 22:19:29 +02:00
|
|
|
msg = str(exc)
|
|
|
|
print()
|
|
|
|
continue
|
|
|
|
|
2020-11-29 20:55:35 +01:00
|
|
|
# check file size
|
|
|
|
if size and fp.tell() < size:
|
|
|
|
msg = "file size mismatch ({} < {})".format(
|
|
|
|
fp.tell(), size)
|
2019-06-30 21:27:28 +02:00
|
|
|
print()
|
2019-06-19 22:19:29 +02:00
|
|
|
continue
|
|
|
|
|
|
|
|
break
|
|
|
|
|
|
|
|
self.downloading = False
|
2019-06-20 17:19:44 +02:00
|
|
|
if self.mtime:
|
2021-04-27 00:48:53 +02:00
|
|
|
kwdict.setdefault("_mtime", response.headers.get("Last-Modified"))
|
2020-04-10 22:24:32 +02:00
|
|
|
else:
|
2021-04-27 00:48:53 +02:00
|
|
|
kwdict["_mtime"] = None
|
2020-04-10 22:24:32 +02:00
|
|
|
|
2019-06-19 22:19:29 +02:00
|
|
|
return True
|
|
|
|
|
2020-11-29 20:55:35 +01:00
|
|
|
@staticmethod
|
2022-11-10 13:09:12 +01:00
|
|
|
def receive(fp, content, bytes_total, bytes_start):
|
2020-11-29 20:55:35 +01:00
|
|
|
write = fp.write
|
|
|
|
for data in content:
|
|
|
|
write(data)
|
2019-12-09 20:21:28 +01:00
|
|
|
|
2022-11-10 13:09:12 +01:00
|
|
|
def _receive_rate(self, fp, content, bytes_total, bytes_start):
|
2021-09-28 22:37:11 +02:00
|
|
|
rate = self.rate
|
|
|
|
write = fp.write
|
2022-11-10 13:09:12 +01:00
|
|
|
progress = self.progress
|
|
|
|
|
|
|
|
bytes_downloaded = 0
|
2023-01-31 15:32:12 +01:00
|
|
|
time_start = time.monotonic()
|
2017-12-02 01:47:26 +01:00
|
|
|
|
2020-11-29 20:55:35 +01:00
|
|
|
for data in content:
|
2023-01-31 15:32:12 +01:00
|
|
|
time_elapsed = time.monotonic() - time_start
|
2022-11-10 13:09:12 +01:00
|
|
|
bytes_downloaded += len(data)
|
2017-10-24 12:53:03 +02:00
|
|
|
|
2022-11-10 13:09:12 +01:00
|
|
|
write(data)
|
2021-09-28 22:37:11 +02:00
|
|
|
|
|
|
|
if progress is not None:
|
2022-11-30 21:56:18 +01:00
|
|
|
if time_elapsed > progress:
|
2021-09-28 22:37:11 +02:00
|
|
|
self.out.progress(
|
2022-11-10 13:09:12 +01:00
|
|
|
bytes_total,
|
|
|
|
bytes_start + bytes_downloaded,
|
|
|
|
int(bytes_downloaded / time_elapsed),
|
2021-09-28 22:37:11 +02:00
|
|
|
)
|
2019-12-09 20:21:28 +01:00
|
|
|
|
2021-09-28 22:37:11 +02:00
|
|
|
if rate:
|
2022-11-10 13:09:12 +01:00
|
|
|
time_expected = bytes_downloaded / rate
|
|
|
|
if time_expected > time_elapsed:
|
|
|
|
time.sleep(time_expected - time_elapsed)
|
2017-12-02 01:47:26 +01:00
|
|
|
|
2020-11-29 20:55:35 +01:00
|
|
|
def _find_extension(self, response):
|
|
|
|
"""Get filename extension from MIME type"""
|
2019-06-19 22:19:29 +02:00
|
|
|
mtype = response.headers.get("Content-Type", "image/jpeg")
|
2017-10-24 12:53:03 +02:00
|
|
|
mtype = mtype.partition(";")[0]
|
2017-11-30 22:30:01 +01:00
|
|
|
|
2020-03-03 21:21:57 +01:00
|
|
|
if "/" not in mtype:
|
|
|
|
mtype = "image/" + mtype
|
|
|
|
|
2020-11-29 20:55:35 +01:00
|
|
|
if mtype in MIME_TYPES:
|
|
|
|
return MIME_TYPES[mtype]
|
2017-11-30 22:30:01 +01:00
|
|
|
|
2020-11-29 20:55:35 +01:00
|
|
|
ext = mimetypes.guess_extension(mtype, strict=False)
|
|
|
|
if ext:
|
|
|
|
return ext[1:]
|
2020-12-08 21:20:18 +01:00
|
|
|
|
2020-11-29 20:55:35 +01:00
|
|
|
self.log.warning("Unknown MIME type '%s'", mtype)
|
|
|
|
return "bin"
|
2017-11-30 22:30:01 +01:00
|
|
|
|
2019-06-19 22:19:29 +02:00
|
|
|
@staticmethod
|
2020-11-29 20:55:35 +01:00
|
|
|
def _adjust_extension(pathfmt, file_header):
|
|
|
|
"""Check filename extension against file header"""
|
2022-11-01 17:09:13 +01:00
|
|
|
if not SIGNATURE_CHECKS[pathfmt.extension](file_header):
|
|
|
|
for ext, check in SIGNATURE_CHECKS.items():
|
|
|
|
if check(file_header):
|
2020-11-29 20:55:35 +01:00
|
|
|
pathfmt.set_extension(ext)
|
2022-11-08 17:01:10 +01:00
|
|
|
pathfmt.build_path()
|
2020-11-29 20:55:35 +01:00
|
|
|
return True
|
|
|
|
return False
|
2019-06-19 22:19:29 +02:00
|
|
|
|
2017-11-30 22:30:01 +01:00
|
|
|
|
2020-11-29 20:55:35 +01:00
|
|
|
MIME_TYPES = {
|
|
|
|
"image/jpeg" : "jpg",
|
|
|
|
"image/jpg" : "jpg",
|
|
|
|
"image/png" : "png",
|
|
|
|
"image/gif" : "gif",
|
|
|
|
"image/bmp" : "bmp",
|
|
|
|
"image/x-bmp" : "bmp",
|
2020-02-23 16:49:57 +01:00
|
|
|
"image/x-ms-bmp": "bmp",
|
2020-11-29 20:55:35 +01:00
|
|
|
"image/webp" : "webp",
|
2022-11-01 17:25:21 +01:00
|
|
|
"image/avif" : "avif",
|
2020-11-29 20:55:35 +01:00
|
|
|
"image/svg+xml" : "svg",
|
2021-01-01 16:07:33 +01:00
|
|
|
"image/ico" : "ico",
|
|
|
|
"image/icon" : "ico",
|
|
|
|
"image/x-icon" : "ico",
|
|
|
|
"image/vnd.microsoft.icon" : "ico",
|
2020-11-29 20:55:35 +01:00
|
|
|
"image/x-photoshop" : "psd",
|
|
|
|
"application/x-photoshop" : "psd",
|
2020-04-29 23:01:42 +02:00
|
|
|
"image/vnd.adobe.photoshop": "psd",
|
|
|
|
|
2017-11-30 22:30:01 +01:00
|
|
|
"video/webm": "webm",
|
2020-11-29 20:55:35 +01:00
|
|
|
"video/ogg" : "ogg",
|
|
|
|
"video/mp4" : "mp4",
|
2017-11-30 22:30:01 +01:00
|
|
|
|
2020-11-29 20:55:35 +01:00
|
|
|
"audio/wav" : "wav",
|
2017-11-30 22:30:01 +01:00
|
|
|
"audio/x-wav": "wav",
|
2020-11-29 20:55:35 +01:00
|
|
|
"audio/webm" : "webm",
|
|
|
|
"audio/ogg" : "ogg",
|
|
|
|
"audio/mpeg" : "mp3",
|
2017-11-30 22:30:01 +01:00
|
|
|
|
2020-11-29 20:55:35 +01:00
|
|
|
"application/zip" : "zip",
|
2019-10-10 18:30:23 +02:00
|
|
|
"application/x-zip": "zip",
|
|
|
|
"application/x-zip-compressed": "zip",
|
2020-11-29 20:55:35 +01:00
|
|
|
"application/rar" : "rar",
|
2019-10-10 18:30:23 +02:00
|
|
|
"application/x-rar": "rar",
|
|
|
|
"application/x-rar-compressed": "rar",
|
2020-11-29 20:55:35 +01:00
|
|
|
"application/x-7z-compressed" : "7z",
|
2019-10-10 18:30:23 +02:00
|
|
|
|
2020-12-08 21:20:18 +01:00
|
|
|
"application/pdf" : "pdf",
|
|
|
|
"application/x-pdf": "pdf",
|
2020-12-11 14:21:04 +01:00
|
|
|
"application/x-shockwave-flash": "swf",
|
2020-12-08 21:20:18 +01:00
|
|
|
|
2017-11-30 22:30:01 +01:00
|
|
|
"application/ogg": "ogg",
|
2023-01-15 16:40:55 +01:00
|
|
|
# https://www.iana.org/assignments/media-types/model/obj
|
|
|
|
"model/obj": "obj",
|
2017-11-30 22:30:01 +01:00
|
|
|
"application/octet-stream": "bin",
|
|
|
|
}
|
2018-11-16 14:40:05 +01:00
|
|
|
|
2021-01-01 16:07:33 +01:00
|
|
|
# https://en.wikipedia.org/wiki/List_of_file_signatures
|
2022-11-01 17:09:13 +01:00
|
|
|
SIGNATURE_CHECKS = {
|
|
|
|
"jpg" : lambda s: s[0:3] == b"\xFF\xD8\xFF",
|
|
|
|
"png" : lambda s: s[0:8] == b"\x89PNG\r\n\x1A\n",
|
|
|
|
"gif" : lambda s: s[0:6] in (b"GIF87a", b"GIF89a"),
|
|
|
|
"bmp" : lambda s: s[0:2] == b"BM",
|
|
|
|
"webp": lambda s: (s[0:4] == b"RIFF" and
|
|
|
|
s[8:12] == b"WEBP"),
|
2022-11-16 21:45:26 +01:00
|
|
|
"avif": lambda s: s[4:11] == b"ftypavi" and s[11] in b"fs",
|
2022-11-01 17:09:13 +01:00
|
|
|
"svg" : lambda s: s[0:5] == b"<?xml",
|
|
|
|
"ico" : lambda s: s[0:4] == b"\x00\x00\x01\x00",
|
|
|
|
"cur" : lambda s: s[0:4] == b"\x00\x00\x02\x00",
|
|
|
|
"psd" : lambda s: s[0:4] == b"8BPS",
|
2022-11-16 21:45:26 +01:00
|
|
|
"mp4" : lambda s: (s[4:8] == b"ftyp" and s[8:11] in (
|
|
|
|
b"mp4", b"avc", b"iso", b"M4V")),
|
2022-11-01 17:09:13 +01:00
|
|
|
"webm": lambda s: s[0:4] == b"\x1A\x45\xDF\xA3",
|
|
|
|
"ogg" : lambda s: s[0:4] == b"OggS",
|
|
|
|
"wav" : lambda s: (s[0:4] == b"RIFF" and
|
|
|
|
s[8:12] == b"WAVE"),
|
|
|
|
"mp3" : lambda s: (s[0:3] == b"ID3" or
|
|
|
|
s[0:2] in (b"\xFF\xFB", b"\xFF\xF3", b"\xFF\xF2")),
|
|
|
|
"zip" : lambda s: s[0:4] in (b"PK\x03\x04", b"PK\x05\x06", b"PK\x07\x08"),
|
2022-11-16 21:45:26 +01:00
|
|
|
"rar" : lambda s: s[0:6] == b"Rar!\x1A\x07",
|
2022-11-01 17:09:13 +01:00
|
|
|
"7z" : lambda s: s[0:6] == b"\x37\x7A\xBC\xAF\x27\x1C",
|
|
|
|
"pdf" : lambda s: s[0:5] == b"%PDF-",
|
|
|
|
"swf" : lambda s: s[0:3] in (b"CWS", b"FWS"),
|
2023-01-15 16:40:55 +01:00
|
|
|
"blend": lambda s: s[0:7] == b"BLENDER",
|
|
|
|
# unfortunately the Wavefront .obj format doesn't have a signature,
|
|
|
|
# so we check for the existence of Blender's comment
|
|
|
|
"obj" : lambda s: s[0:11] == b"# Blender v",
|
|
|
|
# Celsys Clip Studio Paint format
|
|
|
|
# https://github.com/rasensuihei/cliputils/blob/master/README.md
|
|
|
|
"clip": lambda s: s[0:8] == b"CSFCHUNK",
|
2020-11-29 20:55:35 +01:00
|
|
|
# check 'bin' files against all other file signatures
|
2022-11-01 17:09:13 +01:00
|
|
|
"bin" : lambda s: False,
|
2020-11-29 20:55:35 +01:00
|
|
|
}
|
|
|
|
|
2018-11-16 14:40:05 +01:00
|
|
|
__downloader__ = HttpDownloader
|