forked from PrivateCoffee/wikimore
fix: improve URL parsing and linkage for wiki articles
Refine wiki article URL parsing to handle paths with minimal segments and ensure proper language and project mapping. Updated href assignment to use `url_for` for more precise route generation. These changes improve the accuracy and reliability of linking external wiki pages within the project ecosystem. Fixes #13.
This commit is contained in:
parent
2eeb4d30cf
commit
013bc1ec71
1 changed files with 11 additions and 8 deletions
|
@ -155,21 +155,24 @@ def wiki_article(project, lang, title):
|
||||||
a["href"] = f"/{project}/{lang}{href}"
|
a["href"] = f"/{project}/{lang}{href}"
|
||||||
|
|
||||||
elif href.startswith("//") or href.startswith("https://"):
|
elif href.startswith("//") or href.startswith("https://"):
|
||||||
|
print(f"Checking {href}")
|
||||||
parts = urlparse(href)
|
parts = urlparse(href)
|
||||||
|
|
||||||
target_domain = parts.netloc
|
target_domain = f"https://{parts.netloc}"
|
||||||
path_parts = parts.path.split("/")
|
path_parts = parts.path.split("/")
|
||||||
|
|
||||||
if len(path_parts) > 4:
|
if len(path_parts) >= 3:
|
||||||
target_title = "/".join(path_parts[4:])
|
target_title = "/".join(path_parts[2:])
|
||||||
target_lang = target_domain.split(".")[0]
|
|
||||||
|
|
||||||
found = False
|
found = False
|
||||||
for language, language_projects in app.languages.items():
|
for language, language_projects in app.languages.items():
|
||||||
for project_name, project_url in language_projects.items():
|
for project_name, project_url in language_projects["projects"].items():
|
||||||
if target_domain == project_url:
|
if project_url == target_domain:
|
||||||
a["href"] = (
|
a["href"] = url_for(
|
||||||
f"/{project_name}/{target_lang}/wiki/{target_title}"
|
"wiki_article",
|
||||||
|
project=project_name,
|
||||||
|
lang=language,
|
||||||
|
title=target_title,
|
||||||
)
|
)
|
||||||
found = True
|
found = True
|
||||||
break
|
break
|
||||||
|
|
Loading…
Reference in a new issue