mirror of
https://github.com/mkdocs/mkdocs.git
synced 2026-03-27 09:58:31 +07:00
Account for encoded URLs. (#1673)
Encode URLs of all internal documents (to account for spaces in filenames, etc). Then unencode Markdown links to internal pages before confirming existence, etc. Fixes #1670.
This commit is contained in:
@@ -56,6 +56,7 @@ your global navigation uses more than one level, things will likely be broken.
|
||||
|
||||
### Other Changes and Additions to Version 1.1
|
||||
|
||||
* Bugfix: Account for encoded URLs (#1670).
|
||||
* Bugfix: Ensure theme files do not override `docs_dir` files (#1671).
|
||||
* Bugfix: Do not normalize URL fragments (#1655).
|
||||
* Add canonical tag to `readthedocs` theme (#1669).
|
||||
|
||||
@@ -165,7 +165,7 @@ class File(object):
|
||||
url = '.'
|
||||
else:
|
||||
url = dirname + '/'
|
||||
return url
|
||||
return utils.urlquote(url)
|
||||
|
||||
def url_relative_to(self, other):
|
||||
""" Return url for file relative to other file. """
|
||||
|
||||
@@ -13,7 +13,7 @@ from markdown.treeprocessors import Treeprocessor
|
||||
from markdown.util import AMP_SUBSTITUTE
|
||||
|
||||
from mkdocs.structure.toc import get_toc
|
||||
from mkdocs.utils import meta, urlparse, urlunparse, urljoin, get_markdown_title, warning_filter
|
||||
from mkdocs.utils import meta, urlparse, urlunparse, urljoin, urlunquote, get_markdown_title, warning_filter
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.addFilter(warning_filter)
|
||||
@@ -222,7 +222,7 @@ class _RelativePathTreeprocessor(Treeprocessor):
|
||||
return url
|
||||
|
||||
# Determine the filepath of the target.
|
||||
target_path = os.path.join(os.path.dirname(self.file.src_path), path)
|
||||
target_path = os.path.join(os.path.dirname(self.file.src_path), urlunquote(path))
|
||||
target_path = os.path.normpath(target_path).lstrip(os.sep)
|
||||
|
||||
# Validate that the target exists in files collection.
|
||||
|
||||
@@ -277,6 +277,15 @@ class TestFiles(PathAssertionMixin, unittest.TestCase):
|
||||
self.assertFalse(f.is_javascript())
|
||||
self.assertTrue(f.is_css())
|
||||
|
||||
def test_file_name_with_space(self):
|
||||
f = File('foo bar.md', '/path/to/docs', '/path/to/site', use_directory_urls=False)
|
||||
self.assertPathsEqual(f.src_path, 'foo bar.md')
|
||||
self.assertPathsEqual(f.abs_src_path, '/path/to/docs/foo bar.md')
|
||||
self.assertPathsEqual(f.dest_path, 'foo bar.html')
|
||||
self.assertPathsEqual(f.abs_dest_path, '/path/to/site/foo bar.html')
|
||||
self.assertEqual(f.url, 'foo%20bar.html')
|
||||
self.assertEqual(f.name, 'foo bar')
|
||||
|
||||
def test_files(self):
|
||||
fs = [
|
||||
File('index.md', '/path/to/docs', '/path/to/site', use_directory_urls=True),
|
||||
|
||||
@@ -689,6 +689,20 @@ class RelativePathExtensionTests(LogTestCase):
|
||||
'<p><a href="sub2/non-index/">link</a></p>' # No trailing /
|
||||
)
|
||||
|
||||
@mock.patch('io.open', mock.mock_open(read_data='[link](file%20name.md)'))
|
||||
def test_relative_html_link_with_encoded_space(self):
|
||||
self.assertEqual(
|
||||
self.get_rendered_result(['index.md', 'file name.md']),
|
||||
'<p><a href="file%20name/">link</a></p>'
|
||||
)
|
||||
|
||||
@mock.patch('io.open', mock.mock_open(read_data='[link](file name.md)'))
|
||||
def test_relative_html_link_with_unencoded_space(self):
|
||||
self.assertEqual(
|
||||
self.get_rendered_result(['index.md', 'file name.md']),
|
||||
'<p><a href="file%20name/">link</a></p>'
|
||||
)
|
||||
|
||||
@mock.patch('io.open', mock.mock_open(read_data='[link](../index.md)'))
|
||||
def test_relative_html_link_parent_index(self):
|
||||
self.assertEqual(
|
||||
|
||||
@@ -23,9 +23,13 @@ from mkdocs import exceptions
|
||||
|
||||
try: # pragma: no cover
|
||||
from urllib.parse import urlparse, urlunparse, urljoin # noqa
|
||||
from urllib.parse import quote as urlquote # noqa
|
||||
from urllib.parse import unquote as urlunquote # noqa
|
||||
from collections import UserDict # noqa
|
||||
except ImportError: # pragma: no cover
|
||||
from urlparse import urlparse, urlunparse, urljoin # noqa
|
||||
from urllib import quote # noqa
|
||||
from urllib import unquote # noqa
|
||||
from UserDict import UserDict # noqa
|
||||
|
||||
|
||||
@@ -38,6 +42,12 @@ else: # pragma: no cover
|
||||
string_types = basestring, # noqa
|
||||
text_type = unicode # noqa
|
||||
|
||||
def urlunquote(path): # noqa
|
||||
return unquote(path.encode('utf8', errors='backslashreplace')).decode('utf8', errors='replace')
|
||||
|
||||
def urlquote(path): # noqa
|
||||
return quote(path.encode('utf8', errors='backslashreplace')).decode('utf8', errors='replace')
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
markdown_extensions = [
|
||||
|
||||
Reference in New Issue
Block a user