2012-12-04 10:59:38 +01:00
|
|
|
|
#!/usr/bin/env python
|
2013-10-15 12:05:13 +02:00
|
|
|
|
# coding: utf-8
|
2012-12-04 10:59:38 +01:00
|
|
|
|
|
2014-08-27 19:11:45 +02:00
|
|
|
|
from __future__ import unicode_literals
|
|
|
|
|
|
2013-10-15 02:00:53 +02:00
|
|
|
|
# Allow direct execution
|
|
|
|
|
import os
|
2012-11-27 23:20:29 +01:00
|
|
|
|
import sys
|
2012-09-28 14:47:01 +02:00
|
|
|
|
import unittest
|
2013-10-15 02:00:53 +02:00
|
|
|
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
2012-09-28 14:47:01 +02:00
|
|
|
|
|
2013-10-15 02:00:53 +02:00
|
|
|
|
|
|
|
|
|
# Various small unit tests
|
2014-02-25 01:43:17 +01:00
|
|
|
|
import io
|
2014-03-24 23:21:20 +01:00
|
|
|
|
import json
|
2013-10-15 02:00:53 +02:00
|
|
|
|
import xml.etree.ElementTree
|
2012-11-27 23:20:29 +01:00
|
|
|
|
|
2013-09-13 22:05:29 +02:00
|
|
|
|
from youtube_dl.utils import (
|
2014-11-13 15:02:31 +01:00
|
|
|
|
clean_html,
|
2013-09-13 22:05:29 +02:00
|
|
|
|
DateRange,
|
2013-12-17 04:13:36 +01:00
|
|
|
|
encodeFilename,
|
2013-09-13 22:05:29 +02:00
|
|
|
|
find_xpath_attr,
|
2014-01-20 22:11:34 +01:00
|
|
|
|
fix_xml_ampersands,
|
2013-12-17 04:13:36 +01:00
|
|
|
|
orderedSet,
|
2014-09-29 00:36:06 +02:00
|
|
|
|
OnDemandPagedList,
|
|
|
|
|
InAdvancePagedList,
|
2013-12-26 13:49:44 +01:00
|
|
|
|
parse_duration,
|
2014-02-25 01:43:17 +01:00
|
|
|
|
read_batch_urls,
|
2013-12-17 04:13:36 +01:00
|
|
|
|
sanitize_filename,
|
2013-11-21 14:09:28 +01:00
|
|
|
|
shell_quote,
|
2013-12-17 04:13:36 +01:00
|
|
|
|
smuggle_url,
|
2013-12-06 13:36:36 +01:00
|
|
|
|
str_to_int,
|
2014-02-15 16:24:43 +01:00
|
|
|
|
struct_unpack,
|
2013-12-17 04:13:36 +01:00
|
|
|
|
timeconvert,
|
|
|
|
|
unescapeHTML,
|
|
|
|
|
unified_strdate,
|
|
|
|
|
unsmuggle_url,
|
|
|
|
|
url_basename,
|
2014-03-07 15:25:33 +01:00
|
|
|
|
urlencode_postdata,
|
2013-12-17 04:13:36 +01:00
|
|
|
|
xpath_with_ns,
|
2014-03-24 01:40:09 +01:00
|
|
|
|
parse_iso8601,
|
2014-03-24 23:21:20 +01:00
|
|
|
|
strip_jsonp,
|
2014-04-04 23:00:51 +02:00
|
|
|
|
uppercase_escape,
|
2014-09-15 15:10:24 +02:00
|
|
|
|
limit_length,
|
2014-09-13 15:59:16 +02:00
|
|
|
|
escape_rfc3986,
|
|
|
|
|
escape_url,
|
2014-09-30 07:56:24 +02:00
|
|
|
|
js_to_json,
|
2014-11-13 15:28:42 +01:00
|
|
|
|
intlist_to_bytes,
|
2014-11-23 10:49:19 +01:00
|
|
|
|
args_to_str,
|
2014-11-25 09:54:54 +01:00
|
|
|
|
parse_filesize,
|
2014-12-06 12:14:26 +01:00
|
|
|
|
version_tuple,
|
2013-09-13 22:05:29 +02:00
|
|
|
|
)
|
2012-09-28 14:47:01 +02:00
|
|
|
|
|
2012-11-28 12:59:27 +01:00
|
|
|
|
|
2012-09-28 14:47:01 +02:00
|
|
|
|
class TestUtil(unittest.TestCase):
|
2012-11-28 02:04:46 +01:00
|
|
|
|
def test_timeconvert(self):
|
|
|
|
|
self.assertTrue(timeconvert('') is None)
|
|
|
|
|
self.assertTrue(timeconvert('bougrg') is None)
|
|
|
|
|
|
|
|
|
|
def test_sanitize_filename(self):
|
|
|
|
|
self.assertEqual(sanitize_filename('abc'), 'abc')
|
|
|
|
|
self.assertEqual(sanitize_filename('abc_d-e'), 'abc_d-e')
|
|
|
|
|
|
|
|
|
|
self.assertEqual(sanitize_filename('123'), '123')
|
|
|
|
|
|
|
|
|
|
self.assertEqual('abc_de', sanitize_filename('abc/de'))
|
|
|
|
|
self.assertFalse('/' in sanitize_filename('abc/de///'))
|
|
|
|
|
|
|
|
|
|
self.assertEqual('abc_de', sanitize_filename('abc/<>\\*|de'))
|
|
|
|
|
self.assertEqual('xxx', sanitize_filename('xxx/<>\\*|'))
|
|
|
|
|
self.assertEqual('yes no', sanitize_filename('yes? no'))
|
|
|
|
|
self.assertEqual('this - that', sanitize_filename('this: that'))
|
|
|
|
|
|
|
|
|
|
self.assertEqual(sanitize_filename('AT&T'), 'AT&T')
|
2014-08-27 19:11:45 +02:00
|
|
|
|
aumlaut = 'ä'
|
2012-11-28 02:04:46 +01:00
|
|
|
|
self.assertEqual(sanitize_filename(aumlaut), aumlaut)
|
2014-08-27 19:11:45 +02:00
|
|
|
|
tests = '\u043a\u0438\u0440\u0438\u043b\u043b\u0438\u0446\u0430'
|
2012-11-28 02:04:46 +01:00
|
|
|
|
self.assertEqual(sanitize_filename(tests), tests)
|
|
|
|
|
|
|
|
|
|
forbidden = '"\0\\/'
|
|
|
|
|
for fc in forbidden:
|
|
|
|
|
for fbc in forbidden:
|
|
|
|
|
self.assertTrue(fbc not in sanitize_filename(fc))
|
|
|
|
|
|
|
|
|
|
def test_sanitize_filename_restricted(self):
|
|
|
|
|
self.assertEqual(sanitize_filename('abc', restricted=True), 'abc')
|
|
|
|
|
self.assertEqual(sanitize_filename('abc_d-e', restricted=True), 'abc_d-e')
|
|
|
|
|
|
|
|
|
|
self.assertEqual(sanitize_filename('123', restricted=True), '123')
|
|
|
|
|
|
|
|
|
|
self.assertEqual('abc_de', sanitize_filename('abc/de', restricted=True))
|
|
|
|
|
self.assertFalse('/' in sanitize_filename('abc/de///', restricted=True))
|
|
|
|
|
|
|
|
|
|
self.assertEqual('abc_de', sanitize_filename('abc/<>\\*|de', restricted=True))
|
|
|
|
|
self.assertEqual('xxx', sanitize_filename('xxx/<>\\*|', restricted=True))
|
|
|
|
|
self.assertEqual('yes_no', sanitize_filename('yes? no', restricted=True))
|
|
|
|
|
self.assertEqual('this_-_that', sanitize_filename('this: that', restricted=True))
|
|
|
|
|
|
2014-08-27 19:11:45 +02:00
|
|
|
|
tests = 'a\xe4b\u4e2d\u56fd\u7684c'
|
2012-11-28 02:04:46 +01:00
|
|
|
|
self.assertEqual(sanitize_filename(tests, restricted=True), 'a_b_c')
|
2014-08-27 19:11:45 +02:00
|
|
|
|
self.assertTrue(sanitize_filename('\xf6', restricted=True) != '') # No empty filename
|
2012-11-28 02:04:46 +01:00
|
|
|
|
|
2012-11-28 12:59:27 +01:00
|
|
|
|
forbidden = '"\0\\/&!: \'\t\n()[]{}$;`^,#'
|
2012-11-28 02:04:46 +01:00
|
|
|
|
for fc in forbidden:
|
|
|
|
|
for fbc in forbidden:
|
|
|
|
|
self.assertTrue(fbc not in sanitize_filename(fc, restricted=True))
|
|
|
|
|
|
|
|
|
|
# Handle a common case more neatly
|
2014-08-27 19:11:45 +02:00
|
|
|
|
self.assertEqual(sanitize_filename('\u5927\u58f0\u5e26 - Song', restricted=True), 'Song')
|
|
|
|
|
self.assertEqual(sanitize_filename('\u603b\u7edf: Speech', restricted=True), 'Speech')
|
2012-11-28 02:04:46 +01:00
|
|
|
|
# .. but make sure the file name is never empty
|
|
|
|
|
self.assertTrue(sanitize_filename('-', restricted=True) != '')
|
|
|
|
|
self.assertTrue(sanitize_filename(':', restricted=True) != '')
|
|
|
|
|
|
2012-12-03 15:36:24 +01:00
|
|
|
|
def test_sanitize_ids(self):
|
2012-12-20 13:26:37 +01:00
|
|
|
|
self.assertEqual(sanitize_filename('_n_cd26wFpw', is_id=True), '_n_cd26wFpw')
|
|
|
|
|
self.assertEqual(sanitize_filename('_BD_eEpuzXw', is_id=True), '_BD_eEpuzXw')
|
|
|
|
|
self.assertEqual(sanitize_filename('N0Y__7-UOdI', is_id=True), 'N0Y__7-UOdI')
|
2012-12-03 15:36:24 +01:00
|
|
|
|
|
2012-11-28 02:04:46 +01:00
|
|
|
|
def test_ordered_set(self):
|
2012-11-28 12:59:27 +01:00
|
|
|
|
self.assertEqual(orderedSet([1, 1, 2, 3, 4, 4, 5, 6, 7, 3, 5]), [1, 2, 3, 4, 5, 6, 7])
|
2012-11-28 02:04:46 +01:00
|
|
|
|
self.assertEqual(orderedSet([]), [])
|
|
|
|
|
self.assertEqual(orderedSet([1]), [1])
|
2014-11-23 20:41:03 +01:00
|
|
|
|
# keep the list ordered
|
2012-11-28 12:59:27 +01:00
|
|
|
|
self.assertEqual(orderedSet([135, 1, 1, 1]), [135, 1])
|
2012-11-28 02:04:46 +01:00
|
|
|
|
|
|
|
|
|
def test_unescape_html(self):
|
2014-08-27 19:11:45 +02:00
|
|
|
|
self.assertEqual(unescapeHTML('%20;'), '%20;')
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
unescapeHTML('é'), 'é')
|
2014-11-23 20:41:03 +01:00
|
|
|
|
|
2013-04-27 14:01:55 +02:00
|
|
|
|
def test_daterange(self):
|
2014-11-23 20:41:03 +01:00
|
|
|
|
_20century = DateRange("19000101", "20000101")
|
2013-04-27 14:01:55 +02:00
|
|
|
|
self.assertFalse("17890714" in _20century)
|
|
|
|
|
_ac = DateRange("00010101")
|
|
|
|
|
self.assertTrue("19690721" in _ac)
|
|
|
|
|
_firstmilenium = DateRange(end="10000101")
|
|
|
|
|
self.assertTrue("07110427" in _firstmilenium)
|
2013-04-28 11:39:37 +02:00
|
|
|
|
|
2013-04-27 15:14:20 +02:00
|
|
|
|
def test_unified_dates(self):
|
|
|
|
|
self.assertEqual(unified_strdate('December 21, 2010'), '20101221')
|
|
|
|
|
self.assertEqual(unified_strdate('8/7/2009'), '20090708')
|
|
|
|
|
self.assertEqual(unified_strdate('Dec 14, 2012'), '20121214')
|
|
|
|
|
self.assertEqual(unified_strdate('2012/10/11 01:56:38 +0000'), '20121011')
|
2014-02-09 18:09:57 +01:00
|
|
|
|
self.assertEqual(unified_strdate('1968-12-10'), '19681210')
|
2014-09-29 12:45:18 +02:00
|
|
|
|
self.assertEqual(unified_strdate('28/01/2014 21:00:00 +0100'), '20140128')
|
2012-11-27 23:20:29 +01:00
|
|
|
|
|
2013-07-11 16:12:08 +02:00
|
|
|
|
def test_find_xpath_attr(self):
|
2014-08-27 19:11:45 +02:00
|
|
|
|
testxml = '''<root>
|
2013-07-11 16:12:08 +02:00
|
|
|
|
<node/>
|
|
|
|
|
<node x="a"/>
|
|
|
|
|
<node x="a" y="c" />
|
|
|
|
|
<node x="b" y="d" />
|
|
|
|
|
</root>'''
|
|
|
|
|
doc = xml.etree.ElementTree.fromstring(testxml)
|
|
|
|
|
|
|
|
|
|
self.assertEqual(find_xpath_attr(doc, './/fourohfour', 'n', 'v'), None)
|
|
|
|
|
self.assertEqual(find_xpath_attr(doc, './/node', 'x', 'a'), doc[1])
|
|
|
|
|
self.assertEqual(find_xpath_attr(doc, './/node', 'y', 'c'), doc[2])
|
|
|
|
|
|
2013-10-12 21:34:04 +02:00
|
|
|
|
def test_xpath_with_ns(self):
|
2014-08-27 19:11:45 +02:00
|
|
|
|
testxml = '''<root xmlns:media="http://example.com/">
|
2013-10-12 21:34:04 +02:00
|
|
|
|
<media:song>
|
|
|
|
|
<media:author>The Author</media:author>
|
|
|
|
|
<url>http://server.com/download.mp3</url>
|
|
|
|
|
</media:song>
|
|
|
|
|
</root>'''
|
|
|
|
|
doc = xml.etree.ElementTree.fromstring(testxml)
|
|
|
|
|
find = lambda p: doc.find(xpath_with_ns(p, {'media': 'http://example.com/'}))
|
|
|
|
|
self.assertTrue(find('media:song') is not None)
|
2014-08-27 19:11:45 +02:00
|
|
|
|
self.assertEqual(find('media:song/media:author').text, 'The Author')
|
|
|
|
|
self.assertEqual(find('media:song/url').text, 'http://server.com/download.mp3')
|
2013-10-12 21:34:04 +02:00
|
|
|
|
|
2013-10-15 12:05:13 +02:00
|
|
|
|
def test_smuggle_url(self):
|
2014-11-26 13:07:32 +01:00
|
|
|
|
data = {"ö": "ö", "abc": [3]}
|
2013-10-15 12:05:13 +02:00
|
|
|
|
url = 'https://foo.bar/baz?x=y#a'
|
|
|
|
|
smug_url = smuggle_url(url, data)
|
|
|
|
|
unsmug_url, unsmug_data = unsmuggle_url(smug_url)
|
|
|
|
|
self.assertEqual(url, unsmug_url)
|
|
|
|
|
self.assertEqual(data, unsmug_data)
|
|
|
|
|
|
|
|
|
|
res_url, res_data = unsmuggle_url(url)
|
|
|
|
|
self.assertEqual(res_url, url)
|
|
|
|
|
self.assertEqual(res_data, None)
|
|
|
|
|
|
2013-11-21 14:09:28 +01:00
|
|
|
|
def test_shell_quote(self):
|
2014-08-27 19:11:45 +02:00
|
|
|
|
args = ['ffmpeg', '-i', encodeFilename('ñ€ß\'.mp4')]
|
|
|
|
|
self.assertEqual(shell_quote(args), """ffmpeg -i 'ñ€ß'"'"'.mp4'""")
|
2013-11-21 14:09:28 +01:00
|
|
|
|
|
2013-12-06 13:36:36 +01:00
|
|
|
|
def test_str_to_int(self):
|
|
|
|
|
self.assertEqual(str_to_int('123,456'), 123456)
|
|
|
|
|
self.assertEqual(str_to_int('123.456'), 123456)
|
|
|
|
|
|
2013-12-17 04:13:36 +01:00
|
|
|
|
def test_url_basename(self):
|
2014-08-27 19:11:45 +02:00
|
|
|
|
self.assertEqual(url_basename('http://foo.de/'), '')
|
|
|
|
|
self.assertEqual(url_basename('http://foo.de/bar/baz'), 'baz')
|
|
|
|
|
self.assertEqual(url_basename('http://foo.de/bar/baz?x=y'), 'baz')
|
|
|
|
|
self.assertEqual(url_basename('http://foo.de/bar/baz#x=y'), 'baz')
|
|
|
|
|
self.assertEqual(url_basename('http://foo.de/bar/baz/'), 'baz')
|
2013-12-17 12:32:58 +01:00
|
|
|
|
self.assertEqual(
|
2014-08-27 19:11:45 +02:00
|
|
|
|
url_basename('http://media.w3.org/2010/05/sintel/trailer.mp4'),
|
|
|
|
|
'trailer.mp4')
|
2013-10-15 12:05:13 +02:00
|
|
|
|
|
2013-12-26 13:49:44 +01:00
|
|
|
|
def test_parse_duration(self):
|
|
|
|
|
self.assertEqual(parse_duration(None), None)
|
|
|
|
|
self.assertEqual(parse_duration('1'), 1)
|
|
|
|
|
self.assertEqual(parse_duration('1337:12'), 80232)
|
|
|
|
|
self.assertEqual(parse_duration('9:12:43'), 33163)
|
2014-02-16 21:46:26 +01:00
|
|
|
|
self.assertEqual(parse_duration('12:00'), 720)
|
|
|
|
|
self.assertEqual(parse_duration('00:01:01'), 61)
|
2013-12-26 13:49:44 +01:00
|
|
|
|
self.assertEqual(parse_duration('x:y'), None)
|
2014-02-16 21:46:26 +01:00
|
|
|
|
self.assertEqual(parse_duration('3h11m53s'), 11513)
|
2014-08-31 01:41:30 +02:00
|
|
|
|
self.assertEqual(parse_duration('3h 11m 53s'), 11513)
|
|
|
|
|
self.assertEqual(parse_duration('3 hours 11 minutes 53 seconds'), 11513)
|
|
|
|
|
self.assertEqual(parse_duration('3 hours 11 mins 53 secs'), 11513)
|
2014-02-16 21:46:26 +01:00
|
|
|
|
self.assertEqual(parse_duration('62m45s'), 3765)
|
|
|
|
|
self.assertEqual(parse_duration('6m59s'), 419)
|
|
|
|
|
self.assertEqual(parse_duration('49s'), 49)
|
|
|
|
|
self.assertEqual(parse_duration('0h0m0s'), 0)
|
|
|
|
|
self.assertEqual(parse_duration('0m0s'), 0)
|
|
|
|
|
self.assertEqual(parse_duration('0s'), 0)
|
2014-08-25 12:59:53 +02:00
|
|
|
|
self.assertEqual(parse_duration('01:02:03.05'), 3723.05)
|
2014-11-16 14:55:22 +01:00
|
|
|
|
self.assertEqual(parse_duration('T30M38S'), 1838)
|
2014-12-04 17:35:40 +01:00
|
|
|
|
self.assertEqual(parse_duration('5 s'), 5)
|
|
|
|
|
self.assertEqual(parse_duration('3 min'), 180)
|
|
|
|
|
self.assertEqual(parse_duration('2.5 hours'), 9000)
|
2013-12-26 13:49:44 +01:00
|
|
|
|
|
2014-01-20 22:11:34 +01:00
|
|
|
|
def test_fix_xml_ampersands(self):
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
fix_xml_ampersands('"&x=y&z=a'), '"&x=y&z=a')
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
fix_xml_ampersands('"&x=y&wrong;&z=a'),
|
|
|
|
|
'"&x=y&wrong;&z=a')
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
fix_xml_ampersands('&'><"'),
|
|
|
|
|
'&'><"')
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
fix_xml_ampersands('Ӓ᪼'), 'Ӓ᪼')
|
|
|
|
|
self.assertEqual(fix_xml_ampersands('&#&#'), '&#&#')
|
|
|
|
|
|
2014-01-20 11:36:47 +01:00
|
|
|
|
def test_paged_list(self):
|
|
|
|
|
def testPL(size, pagesize, sliceargs, expected):
|
|
|
|
|
def get_page(pagenum):
|
|
|
|
|
firstid = pagenum * pagesize
|
|
|
|
|
upto = min(size, pagenum * pagesize + pagesize)
|
|
|
|
|
for i in range(firstid, upto):
|
|
|
|
|
yield i
|
|
|
|
|
|
2014-09-29 00:36:06 +02:00
|
|
|
|
pl = OnDemandPagedList(get_page, pagesize)
|
2014-01-20 11:36:47 +01:00
|
|
|
|
got = pl.getslice(*sliceargs)
|
|
|
|
|
self.assertEqual(got, expected)
|
|
|
|
|
|
2014-09-29 00:36:06 +02:00
|
|
|
|
iapl = InAdvancePagedList(get_page, size // pagesize + 1, pagesize)
|
|
|
|
|
got = iapl.getslice(*sliceargs)
|
|
|
|
|
self.assertEqual(got, expected)
|
|
|
|
|
|
2014-01-20 11:36:47 +01:00
|
|
|
|
testPL(5, 2, (), [0, 1, 2, 3, 4])
|
|
|
|
|
testPL(5, 2, (1,), [1, 2, 3, 4])
|
|
|
|
|
testPL(5, 2, (2,), [2, 3, 4])
|
|
|
|
|
testPL(5, 2, (4,), [4])
|
|
|
|
|
testPL(5, 2, (0, 3), [0, 1, 2])
|
|
|
|
|
testPL(5, 2, (1, 4), [1, 2, 3])
|
|
|
|
|
testPL(5, 2, (2, 99), [2, 3, 4])
|
|
|
|
|
testPL(5, 2, (20, 99), [])
|
|
|
|
|
|
2014-02-15 16:24:43 +01:00
|
|
|
|
def test_struct_unpack(self):
|
2014-08-27 19:11:45 +02:00
|
|
|
|
self.assertEqual(struct_unpack('!B', b'\x00'), (0,))
|
2014-02-15 16:24:43 +01:00
|
|
|
|
|
2014-02-25 01:43:17 +01:00
|
|
|
|
def test_read_batch_urls(self):
|
2014-08-27 19:11:45 +02:00
|
|
|
|
f = io.StringIO('''\xef\xbb\xbf foo
|
2014-02-25 01:43:17 +01:00
|
|
|
|
bar\r
|
|
|
|
|
baz
|
|
|
|
|
# More after this line\r
|
|
|
|
|
; or after this
|
|
|
|
|
bam''')
|
2014-08-27 19:11:45 +02:00
|
|
|
|
self.assertEqual(read_batch_urls(f), ['foo', 'bar', 'baz', 'bam'])
|
2014-02-25 01:43:17 +01:00
|
|
|
|
|
2014-03-07 15:25:33 +01:00
|
|
|
|
def test_urlencode_postdata(self):
|
|
|
|
|
data = urlencode_postdata({'username': 'foo@bar.com', 'password': '1234'})
|
|
|
|
|
self.assertTrue(isinstance(data, bytes))
|
|
|
|
|
|
2014-03-24 01:40:09 +01:00
|
|
|
|
def test_parse_iso8601(self):
|
|
|
|
|
self.assertEqual(parse_iso8601('2014-03-23T23:04:26+0100'), 1395612266)
|
|
|
|
|
self.assertEqual(parse_iso8601('2014-03-23T22:04:26+0000'), 1395612266)
|
|
|
|
|
self.assertEqual(parse_iso8601('2014-03-23T22:04:26Z'), 1395612266)
|
2014-10-29 20:10:00 +01:00
|
|
|
|
self.assertEqual(parse_iso8601('2014-03-23T22:04:26.1234Z'), 1395612266)
|
2014-03-24 01:40:09 +01:00
|
|
|
|
|
2014-03-24 23:21:20 +01:00
|
|
|
|
def test_strip_jsonp(self):
|
|
|
|
|
stripped = strip_jsonp('cb ([ {"id":"532cb",\n\n\n"x":\n3}\n]\n);')
|
|
|
|
|
d = json.loads(stripped)
|
|
|
|
|
self.assertEqual(d, [{"id": "532cb", "x": 3}])
|
|
|
|
|
|
2014-11-13 16:28:05 +01:00
|
|
|
|
stripped = strip_jsonp('parseMetadata({"STATUS":"OK"})\n\n\n//epc')
|
|
|
|
|
d = json.loads(stripped)
|
|
|
|
|
self.assertEqual(d, {'STATUS': 'OK'})
|
|
|
|
|
|
2014-08-10 11:08:56 +02:00
|
|
|
|
def test_uppercase_escape(self):
|
2014-08-27 19:11:45 +02:00
|
|
|
|
self.assertEqual(uppercase_escape('aä'), 'aä')
|
|
|
|
|
self.assertEqual(uppercase_escape('\\U0001d550'), '𝕐')
|
2014-03-24 23:21:20 +01:00
|
|
|
|
|
2014-09-15 15:10:24 +02:00
|
|
|
|
def test_limit_length(self):
|
|
|
|
|
self.assertEqual(limit_length(None, 12), None)
|
|
|
|
|
self.assertEqual(limit_length('foo', 12), 'foo')
|
|
|
|
|
self.assertTrue(
|
|
|
|
|
limit_length('foo bar baz asd', 12).startswith('foo bar'))
|
|
|
|
|
self.assertTrue('...' in limit_length('foo bar baz asd', 12))
|
|
|
|
|
|
2014-09-13 15:59:16 +02:00
|
|
|
|
def test_escape_rfc3986(self):
|
|
|
|
|
reserved = "!*'();:@&=+$,/?#[]"
|
|
|
|
|
unreserved = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_.~'
|
|
|
|
|
self.assertEqual(escape_rfc3986(reserved), reserved)
|
|
|
|
|
self.assertEqual(escape_rfc3986(unreserved), unreserved)
|
|
|
|
|
self.assertEqual(escape_rfc3986('тест'), '%D1%82%D0%B5%D1%81%D1%82')
|
|
|
|
|
self.assertEqual(escape_rfc3986('%D1%82%D0%B5%D1%81%D1%82'), '%D1%82%D0%B5%D1%81%D1%82')
|
|
|
|
|
self.assertEqual(escape_rfc3986('foo bar'), 'foo%20bar')
|
|
|
|
|
self.assertEqual(escape_rfc3986('foo%20bar'), 'foo%20bar')
|
|
|
|
|
|
|
|
|
|
def test_escape_url(self):
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
escape_url('http://wowza.imust.org/srv/vod/telemb/new/UPLOAD/UPLOAD/20224_IncendieHavré_FD.mp4'),
|
|
|
|
|
'http://wowza.imust.org/srv/vod/telemb/new/UPLOAD/UPLOAD/20224_IncendieHavre%CC%81_FD.mp4'
|
|
|
|
|
)
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
escape_url('http://www.ardmediathek.de/tv/Sturm-der-Liebe/Folge-2036-Zu-Mann-und-Frau-erklärt/Das-Erste/Video?documentId=22673108&bcastId=5290'),
|
|
|
|
|
'http://www.ardmediathek.de/tv/Sturm-der-Liebe/Folge-2036-Zu-Mann-und-Frau-erkl%C3%A4rt/Das-Erste/Video?documentId=22673108&bcastId=5290'
|
|
|
|
|
)
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
escape_url('http://тест.рф/фрагмент'),
|
|
|
|
|
'http://тест.рф/%D1%84%D1%80%D0%B0%D0%B3%D0%BC%D0%B5%D0%BD%D1%82'
|
|
|
|
|
)
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
escape_url('http://тест.рф/абв?абв=абв#абв'),
|
|
|
|
|
'http://тест.рф/%D0%B0%D0%B1%D0%B2?%D0%B0%D0%B1%D0%B2=%D0%B0%D0%B1%D0%B2#%D0%B0%D0%B1%D0%B2'
|
|
|
|
|
)
|
|
|
|
|
self.assertEqual(escape_url('http://vimeo.com/56015672#at=0'), 'http://vimeo.com/56015672#at=0')
|
|
|
|
|
|
2014-09-30 11:12:59 +02:00
|
|
|
|
def test_js_to_json_realworld(self):
|
2014-09-30 07:56:24 +02:00
|
|
|
|
inp = '''{
|
2014-09-30 11:12:59 +02:00
|
|
|
|
'clip':{'provider':'pseudo'}
|
2014-09-30 07:56:24 +02:00
|
|
|
|
}'''
|
|
|
|
|
self.assertEqual(js_to_json(inp), '''{
|
2014-09-30 11:12:59 +02:00
|
|
|
|
"clip":{"provider":"pseudo"}
|
2014-09-30 07:56:24 +02:00
|
|
|
|
}''')
|
|
|
|
|
json.loads(js_to_json(inp))
|
|
|
|
|
|
2014-09-30 11:12:59 +02:00
|
|
|
|
inp = '''{
|
|
|
|
|
'playlist':[{'controls':{'all':null}}]
|
|
|
|
|
}'''
|
|
|
|
|
self.assertEqual(js_to_json(inp), '''{
|
|
|
|
|
"playlist":[{"controls":{"all":null}}]
|
|
|
|
|
}''')
|
|
|
|
|
|
|
|
|
|
def test_js_to_json_edgecases(self):
|
|
|
|
|
on = js_to_json("{abc_def:'1\\'\\\\2\\\\\\'3\"4'}")
|
|
|
|
|
self.assertEqual(json.loads(on), {"abc_def": "1'\\2\\'3\"4"})
|
|
|
|
|
|
|
|
|
|
on = js_to_json('{"abc": true}')
|
|
|
|
|
self.assertEqual(json.loads(on), {'abc': True})
|
|
|
|
|
|
2014-11-13 15:02:31 +01:00
|
|
|
|
def test_clean_html(self):
|
|
|
|
|
self.assertEqual(clean_html('a:\nb'), 'a: b')
|
|
|
|
|
self.assertEqual(clean_html('a:\n "b"'), 'a: "b"')
|
|
|
|
|
|
2014-11-13 15:28:42 +01:00
|
|
|
|
def test_intlist_to_bytes(self):
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
intlist_to_bytes([0, 1, 127, 128, 255]),
|
|
|
|
|
b'\x00\x01\x7f\x80\xff')
|
|
|
|
|
|
2014-11-23 10:49:19 +01:00
|
|
|
|
def test_args_to_str(self):
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
args_to_str(['foo', 'ba/r', '-baz', '2 be', '']),
|
|
|
|
|
'foo ba/r -baz \'2 be\' \'\''
|
|
|
|
|
)
|
|
|
|
|
|
2014-11-25 09:54:54 +01:00
|
|
|
|
def test_parse_filesize(self):
|
|
|
|
|
self.assertEqual(parse_filesize(None), None)
|
|
|
|
|
self.assertEqual(parse_filesize(''), None)
|
|
|
|
|
self.assertEqual(parse_filesize('91 B'), 91)
|
|
|
|
|
self.assertEqual(parse_filesize('foobar'), None)
|
|
|
|
|
self.assertEqual(parse_filesize('2 MiB'), 2097152)
|
|
|
|
|
self.assertEqual(parse_filesize('5 GB'), 5000000000)
|
|
|
|
|
self.assertEqual(parse_filesize('1.2Tb'), 1200000000000)
|
2014-12-04 17:02:05 +01:00
|
|
|
|
self.assertEqual(parse_filesize('1,24 KB'), 1240)
|
2014-11-25 09:54:54 +01:00
|
|
|
|
|
2014-12-06 12:14:26 +01:00
|
|
|
|
def test_version_tuple(self):
|
|
|
|
|
self.assertEqual(version_tuple('1'), (1,))
|
|
|
|
|
self.assertEqual(version_tuple('10.23.344'), (10, 23, 344))
|
|
|
|
|
self.assertEqual(version_tuple('10-6'), (10, 6)) # avconv style
|
|
|
|
|
|
2012-11-27 23:20:29 +01:00
|
|
|
|
if __name__ == '__main__':
|
2012-11-28 02:04:46 +01:00
|
|
|
|
unittest.main()
|