Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 5 additions & 1 deletion src/gui/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,11 @@ pub use fav_dialog::FavDialog;
pub use image_panel::ImagePanel;
pub use macro_dialog::MacroDialog;
pub use note_panel::{
build_nvim_command, build_wezterm_command, extract_links, show_wiki_link, spawn_external,
build_nvim_command,
build_wezterm_command,
extract_links,
show_wiki_link,
spawn_external,
NotePanel,
};
pub use notes_dialog::NotesDialog;
Expand Down
84 changes: 61 additions & 23 deletions src/gui/note_panel.rs
Original file line number Diff line number Diff line change
Expand Up @@ -237,11 +237,15 @@ impl NotePanel {
let links = extract_links(&self.note.content);
enum LinkKind {
Wiki(String),
Url(String),
Url(String, String),
}
let mut all_links: Vec<LinkKind> = Vec::new();
all_links.extend(wiki.into_iter().map(LinkKind::Wiki));
all_links.extend(links.into_iter().map(LinkKind::Url));
all_links.extend(
links
.into_iter()
.map(|(label, url)| LinkKind::Url(label, url)),
);
if !all_links.is_empty() {
let was_focused = ui.ctx().memory(|m| m.has_focus(content_id));
ui.horizontal_wrapped(|ui| {
Expand All @@ -255,8 +259,12 @@ impl NotePanel {
LinkKind::Wiki(s) => {
let _ = show_wiki_link(ui, app, s);
}
LinkKind::Url(s) => {
let _ = ui.hyperlink(s);
LinkKind::Url(label, url) => {
if url.starts_with("https://") {
let _ = ui.hyperlink_to(label, url);
} else {
let _ = show_wiki_link(ui, app, url);
}
}
}
}
Expand Down Expand Up @@ -1151,24 +1159,46 @@ fn extract_tags(content: &str) -> Vec<String> {
tags
}

pub fn extract_links(content: &str) -> Vec<String> {
pub fn extract_links(content: &str) -> Vec<(String, String)> {
static MARKDOWN_RE: Lazy<Regex> = Lazy::new(|| Regex::new(r"\[([^\]]+)\]\(([^)]+)\)").unwrap());
static LINK_RE: Lazy<Regex> =
Lazy::new(|| Regex::new(r"([a-zA-Z][a-zA-Z0-9+.-]*://\S+|www\.\S+)").unwrap());
let mut links: Vec<String> = LINK_RE
.find_iter(content)
.filter_map(|m| {
let raw = m.as_str();
let url = if raw.starts_with("www.") {
format!("https://{raw}")
} else {
raw.to_string()
};
Url::parse(&url)
.ok()
.filter(|u| u.scheme() == "https")
.map(|_| raw.to_string())
})
.collect();

let mut links: Vec<(String, String)> = Vec::new();

for cap in MARKDOWN_RE.captures_iter(content) {
let label = cap[1].to_string();
let raw = cap[2].to_string();
let url = if raw.starts_with("www.") {
format!("https://{raw}")
} else {
raw.clone()
};
if Url::parse(&url)
.ok()
.filter(|u| u.scheme() == "https")
.is_some()
{
// store the fully-qualified URL so hyperlinks open externally
links.push((label, url));
}
}

let stripped = MARKDOWN_RE.replace_all(content, "");
links.extend(LINK_RE.find_iter(&stripped).filter_map(|m| {
let raw = m.as_str();
let url = if raw.starts_with("www.") {
format!("https://{raw}")
} else {
raw.to_string()
};
Url::parse(&url)
.ok()
.filter(|u| u.scheme() == "https")
// use the original text as the label but link to the canonical URL
.map(|_| (raw.to_string(), url))
}));

links.sort();
links.dedup();
links
Expand Down Expand Up @@ -1411,13 +1441,21 @@ mod tests {

#[test]
fn extract_links_filters_invalid() {
let content = "visit http://example.com and http://exa%mple.com also https://rust-lang.org and https://rust-lang.org and www.example.com and www.example.com and www.exa%mple.com";
let content = "visit http://example.com and http://exa%mple.com also [Rust](https://rust-lang.org) and https://rust-lang.org and https://rust-lang.org and www.example.com and www.example.com and www.exa%mple.com and [Google](www.google.com)";
let links = extract_links(content);
assert_eq!(
links,
vec![
"https://rust-lang.org".to_string(),
"www.example.com".to_string(),
("Google".to_string(), "https://www.google.com".to_string()),
("Rust".to_string(), "https://rust-lang.org".to_string()),
(
"https://rust-lang.org".to_string(),
"https://rust-lang.org".to_string(),
),
(
"www.example.com".to_string(),
"https://www.example.com".to_string(),
),
]
);
}
Expand Down
14 changes: 11 additions & 3 deletions tests/notes_plugin.rs
Original file line number Diff line number Diff line change
Expand Up @@ -292,13 +292,21 @@ fn missing_link_colored_red() {

#[test]
fn link_validation_rejects_invalid_urls() {
let content = "visit http://example.com and http://exa%mple.com also https://rust-lang.org and https://rust-lang.org and www.example.com and www.example.com and www.exa%mple.com";
let content = "visit http://example.com and http://exa%mple.com also [Rust](https://rust-lang.org) and https://rust-lang.org and https://rust-lang.org and www.example.com and www.example.com and www.exa%mple.com and [Google](www.google.com)";
let links = extract_links(content);
assert_eq!(
links,
vec![
"https://rust-lang.org".to_string(),
"www.example.com".to_string(),
("Google".to_string(), "https://www.google.com".to_string()),
("Rust".to_string(), "https://rust-lang.org".to_string()),
(
"https://rust-lang.org".to_string(),
"https://rust-lang.org".to_string(),
),
(
"www.example.com".to_string(),
"https://www.example.com".to_string(),
),
]
);
}
Loading