diff options
-rw-r--r-- | test/test_utils.py | 21 | ||||
-rw-r--r-- | youtube_dl/utils.py | 7 |
2 files changed, 0 insertions, 28 deletions
diff --git a/test/test_utils.py b/test/test_utils.py index 032d3656a..86b110a7d 100644 --- a/test/test_utils.py +++ b/test/test_utils.py @@ -40,7 +40,6 @@ from youtube_dl.utils import ( read_batch_urls, sanitize_filename, sanitize_path, - sanitize_url_path_consecutive_slashes, prepend_extension, replace_extension, shell_quote, @@ -176,26 +175,6 @@ class TestUtil(unittest.TestCase): self.assertEqual(sanitize_path('./abc'), 'abc') self.assertEqual(sanitize_path('./../abc'), '..\\abc') - def test_sanitize_url_path_consecutive_slashes(self): - self.assertEqual( - sanitize_url_path_consecutive_slashes('http://hostname/foo//bar/filename.html'), - 'http://hostname/foo/bar/filename.html') - self.assertEqual( - sanitize_url_path_consecutive_slashes('http://hostname//foo/bar/filename.html'), - 'http://hostname/foo/bar/filename.html') - self.assertEqual( - sanitize_url_path_consecutive_slashes('http://hostname//'), - 'http://hostname/') - self.assertEqual( - sanitize_url_path_consecutive_slashes('http://hostname/foo/bar/filename.html'), - 'http://hostname/foo/bar/filename.html') - self.assertEqual( - sanitize_url_path_consecutive_slashes('http://hostname/'), - 'http://hostname/') - self.assertEqual( - sanitize_url_path_consecutive_slashes('http://hostname/abc//'), - 'http://hostname/abc/') - def test_prepend_extension(self): self.assertEqual(prepend_extension('abc.ext', 'temp'), 'abc.temp.ext') self.assertEqual(prepend_extension('abc.ext', 'temp', 'ext'), 'abc.temp.ext') diff --git a/youtube_dl/utils.py b/youtube_dl/utils.py index de09b53b2..d73efcf25 100644 --- a/youtube_dl/utils.py +++ b/youtube_dl/utils.py @@ -327,13 +327,6 @@ def sanitize_path(s): return os.path.join(*sanitized_path) -def sanitize_url_path_consecutive_slashes(url): - """Collapses consecutive slashes in URLs' path""" - parsed_url = list(compat_urlparse.urlparse(url)) - parsed_url[2] = re.sub(r'/{2,}', '/', parsed_url[2]) - return compat_urlparse.urlunparse(parsed_url) - - def orderedSet(iterable): """ Remove all duplicates from the input iterable """ res = [] |