Compare commits
24 Commits
a433ad41b5
...
gemini
Author | SHA1 | Date | |
---|---|---|---|
ca9d4cfaf9 | |||
1321e0dcaf | |||
a7468d3f40 | |||
845866ef9d | |||
5f856f35fe | |||
7b9d2d6fd3 | |||
f46d3bac42 | |||
ced5648c01 | |||
7b07b6f051 | |||
e4b08b45bc | |||
1b8614b956 | |||
d55d70ec45 | |||
67a5096267 | |||
0da9e95b2a | |||
2aa4a82af9 | |||
|
88200eb354 | ||
|
bb75241a3c | ||
|
4e7422c478 | ||
|
3d12793550 | ||
|
859ec9fcfc | ||
|
a1dfc6bd04 | ||
|
b72d43b250 | ||
|
8e860b89e7 | ||
|
590cd001be |
3
.gitignore
vendored
Normal file
3
.gitignore
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
Cargo.lock
|
||||
target
|
||||
*.swp
|
@@ -6,5 +6,6 @@ edition = "2021"
|
||||
[dependencies]
|
||||
colored = "2.2.0"
|
||||
regex = "1.11.1"
|
||||
url = "2.5.4"
|
||||
termion = "4.0.3"
|
||||
open = "5.3.2"
|
||||
url = "2.5.4"
|
||||
|
48
README.md
Normal file
48
README.md
Normal file
@@ -0,0 +1,48 @@
|
||||
# Markdown web browser
|
||||
A web browser that let's you browse gemini capsules that use gemtext as a superior standard to html.
|
||||
|
||||
# Requirements
|
||||
- gemget
|
||||
|
||||
# Installing
|
||||
## Build from source:
|
||||
- Clone the repo
|
||||
```cargo run -r```
|
||||
|
||||
# Help
|
||||
Type h in the program to see this text:
|
||||
```
|
||||
Source code: https://git.javalsai.dynv6.net/deadvey/markdown-webbrowser
|
||||
q: quit
|
||||
d: debug info
|
||||
h: help
|
||||
r: reload
|
||||
s: view source code of page
|
||||
i: visit root index of this host eg: root index of gemini://deadvey.com/blog/4.md is just gemini://deadvey.com
|
||||
b: go back in history
|
||||
f: go forward in history
|
||||
ox: print the hyprlink of reference x eg: o5 or o24
|
||||
[url]: follow the inputed url
|
||||
```
|
||||
|
||||
# Example:
|
||||

|
||||
|
||||
# TO DO
|
||||
- Make pages scrollable
|
||||
- Bookmarks
|
||||
- Properly comment it because I can't lie I can't even follow it.
|
||||
- Make it memory safe, it crashes if the input is unexpected.
|
||||
- Use treesitter instead of Regex, because, reasons.
|
||||
- "wtf deadvey" - [error](https://git.javalsai.dynv6.net/ErrorNoInternet)
|
||||
- "Don't use Regex to parse **ANYTHING**" - [error](https://git.javalsai.dynv6.net/ErrorNoInternet)
|
||||
- "use treesitter" - [error](https://git.javalsai.dynv6.net/ErrorNoInternet)
|
||||
- "yeah, definitley use treesitter" - [error](https://git.javalsai.dynv6.net/ErrorNoInternet)
|
||||
- "use treesitter" - [error](https://git.javalsai.dynv6.net/ErrorNoInternet)
|
||||
- "or glow" - [error](https://git.javalsai.dynv6.net/ErrorNoInternet)
|
||||
- "I found another markdown to terminal converter" - [error](https://git.javalsai.dynv6.net/ErrorNoInternet)
|
||||
- "ban [for using regex]" - [error](https://git.javalsai.dynv6.net/ErrorNoInternet)
|
||||
- "use treesitter" - [error](https://git.javalsai.dynv6.net/ErrorNoInternet)
|
||||
- Get a catchier name, 'markdown web browser' sounds kind of lame.
|
||||
|
||||

|
BIN
images/error_yapping.gif
Normal file
BIN
images/error_yapping.gif
Normal file
Binary file not shown.
After Width: | Height: | Size: 2.3 MiB |
BIN
images/screenshot.png
Normal file
BIN
images/screenshot.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 880 KiB |
357
src/main.rs
357
src/main.rs
@@ -1,150 +1,71 @@
|
||||
use std::process::{Command};
|
||||
use std::io::{stdin,stdout,Write};
|
||||
use colored::Colorize;
|
||||
use regex::Regex;
|
||||
use url::{Url, ParseError};
|
||||
use std::fs;
|
||||
|
||||
const DEBUG_MODE: bool = false;
|
||||
|
||||
// Import other files
|
||||
mod parse_gemtext;
|
||||
|
||||
fn clear_screen() {
|
||||
if DEBUG_MODE == false
|
||||
{
|
||||
println!("clearing");
|
||||
Command::new("clear")
|
||||
.status()
|
||||
.expect("Failed to clear screen");
|
||||
//println!("clearing");
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_markdown(page_content: String) -> (String, Vec<String>) {
|
||||
let mut parsed_page_content: String = "".to_string();
|
||||
let mut hyperlink_number_counter: u64 = 0;
|
||||
let mut links: Vec<String> = Vec::new();
|
||||
let (screen_width, screen_height) = termion::terminal_size().unwrap(); // So the horizontal line (<hr/>) spans the whole console
|
||||
fn fetch_page(url: &Url) {
|
||||
let full_url_formatted = format!("{}", url);
|
||||
|
||||
for line in page_content.lines() {
|
||||
let mut parsed_line: String = line.to_string();
|
||||
// Bold
|
||||
let bold_regex = Regex::new(r"\*\*(.*?)\*\*").unwrap();
|
||||
parsed_line = bold_regex.replace_all(&parsed_line, |caps: ®ex::Captures| {
|
||||
caps[1].bold().to_string()
|
||||
}).to_string();
|
||||
let bold_regex = Regex::new(r"__(.*?)__").unwrap();
|
||||
parsed_line = bold_regex.replace_all(&parsed_line, |caps: ®ex::Captures| {
|
||||
caps[1].bold().to_string()
|
||||
}).to_string();
|
||||
|
||||
// Horizontal lines
|
||||
let hr_regex = Regex::new(r"^(\*\*\*)|(---)|(___)$").unwrap();
|
||||
parsed_line = hr_regex.replace_all(&parsed_line, |_caps: ®ex::Captures| {
|
||||
let mut result: String = "\n".to_string();
|
||||
for _x in 0..screen_width/2 {
|
||||
result += "- ";
|
||||
}
|
||||
result += "\n";
|
||||
result
|
||||
}).to_string();
|
||||
|
||||
// Italics
|
||||
let italic_regex = Regex::new(r"\*(.*?)\*").unwrap();
|
||||
parsed_line = italic_regex.replace_all(&parsed_line, |caps: ®ex::Captures| {
|
||||
caps[1].italic().to_string()
|
||||
}).to_string();
|
||||
let italic_regex = Regex::new(r"_(.*?)_").unwrap();
|
||||
parsed_line = italic_regex.replace_all(&parsed_line, |caps: ®ex::Captures| {
|
||||
caps[1].italic().to_string()
|
||||
}).to_string();
|
||||
|
||||
// Block quotes
|
||||
let block_quotes_regex = Regex::new(r"^>(.*)").unwrap();
|
||||
parsed_line = block_quotes_regex.replace_all(&parsed_line, |caps: ®ex::Captures| {
|
||||
format!(" | {}", &caps[1].on_black())
|
||||
}).to_string();
|
||||
|
||||
// Ordered list
|
||||
let ordered_list_regex = Regex::new(r"^([0-9]+)\.(.*)").unwrap();
|
||||
parsed_line = ordered_list_regex.replace_all(&parsed_line, " $1. $2").to_string();
|
||||
|
||||
// Unordered list
|
||||
let unordered_list_regex = Regex::new(r"^(-|\+|\*).(.*)").unwrap();
|
||||
parsed_line = unordered_list_regex.replace_all(&parsed_line, " • $2").to_string();
|
||||
|
||||
// Inline code
|
||||
let inline_code_regex = Regex::new(r"`(.*)`").unwrap();
|
||||
parsed_line = inline_code_regex.replace_all(&parsed_line, |caps: ®ex::Captures| {
|
||||
format!("{}", &caps[1].magenta())
|
||||
}).to_string();
|
||||
|
||||
// HyperLink
|
||||
let hyperlink_regex = Regex::new(r"(.*?)\[(.*?)\]\((.*?)\)").unwrap();
|
||||
parsed_line = hyperlink_regex.replace_all(&parsed_line, |caps: ®ex::Captures| {
|
||||
// Check if the character before the link is not '!'
|
||||
if !caps[1].ends_with('!') { // caps[1] is everything before the link
|
||||
let result = format!("{}{}[{}]", &caps[1], &caps[2].blue().underline(), hyperlink_number_counter);
|
||||
let url = caps[3].to_string();
|
||||
links.push(url);
|
||||
hyperlink_number_counter += 1;
|
||||
result
|
||||
} else {
|
||||
// If it's an image (starts with !), return the link as is
|
||||
format!("[{}]", &caps[2].green())
|
||||
}
|
||||
}).to_string();
|
||||
|
||||
|
||||
parsed_page_content+=&(parsed_line + "\n");
|
||||
}
|
||||
|
||||
// multiline code
|
||||
let multiline_code_regex = Regex::new(r"(?ms)%%%((.*?\n)+?)%%%").unwrap();
|
||||
parsed_page_content = multiline_code_regex.replace_all(&parsed_page_content, |caps: ®ex::Captures| {
|
||||
// Capture the code inside the %% blocks
|
||||
let code_block = &caps[1];
|
||||
|
||||
// Add a tab to each line in the block
|
||||
let indented_code = code_block
|
||||
.lines()
|
||||
.map(|line| format!("\t{}", line)) // Insert tab at the start of each line
|
||||
.collect::<Vec<String>>()
|
||||
.join("\n");
|
||||
|
||||
// Return the formatted block with magenta color
|
||||
format!("{}", indented_code.magenta())
|
||||
}).to_string();
|
||||
|
||||
return (parsed_page_content, links);
|
||||
}
|
||||
|
||||
fn fetch_page(host: &String, port: &String, path: &String) -> String {
|
||||
let full_url_formatted = format!("{}:{}/{}", host, port, path);
|
||||
|
||||
// Call curl using Com, mand
|
||||
let output = Command::new("curl")
|
||||
.arg(full_url_formatted)
|
||||
let output = Command::new("gemget")
|
||||
.args([full_url_formatted, "-o".to_string(), "/tmp/page".to_string()])
|
||||
.output()
|
||||
.expect("Failed to execute curl command");
|
||||
.expect("Failed to execute gemget command");
|
||||
|
||||
// Check if the command was successful
|
||||
if output.status.success() {
|
||||
let page: String = String::from_utf8_lossy(&output.stdout).to_string();
|
||||
return page
|
||||
} else {
|
||||
eprintln!("Error:\n{}", String::from_utf8_lossy(&output.stderr));
|
||||
let result: String = "error".to_string();
|
||||
return result
|
||||
if ! output.status.success() {
|
||||
println!("{}\n{:?}\n", "Failed to fetch page:".red(), output);
|
||||
}
|
||||
}
|
||||
|
||||
fn render_page(host: String, port: String, path: String) -> Vec<String> {
|
||||
fn render_page(url: Url, source: bool) -> Vec<String> {
|
||||
clear_screen();
|
||||
let mut content = fetch_page(&host, &port, &path);
|
||||
fetch_page(&url);
|
||||
let mut links = Vec::new();
|
||||
let (screen_width, screen_height) = termion::terminal_size().unwrap();
|
||||
if let Ok(mut content) = fs::read_to_string::<String>("/tmp/page".to_string()) {
|
||||
Command::new("rm")
|
||||
.arg("/tmp/page")
|
||||
.output()
|
||||
.expect("Failed to delete tmp page");
|
||||
let (screen_width, _screen_height) = termion::terminal_size().unwrap();
|
||||
|
||||
if source == true {
|
||||
content += &format!("{}", &"Viewing source code".yellow());
|
||||
}
|
||||
else {
|
||||
(content, links) = parse_gemtext::parse_gemtext(content);
|
||||
}
|
||||
|
||||
(content, links) = parse_markdown(content);
|
||||
print!("{}: {}\n", host, path);
|
||||
for _i in 0..screen_width {
|
||||
print!("—");
|
||||
print!("-");
|
||||
}
|
||||
print!("{}\n", url);
|
||||
for _i in 0..screen_width {
|
||||
print!("-");
|
||||
}
|
||||
println!("\n\n{}", content);
|
||||
for _i in 0..screen_width {
|
||||
print!("—");
|
||||
print!("-");
|
||||
}
|
||||
println!();
|
||||
|
||||
// Return links (you can add link parsing logic)
|
||||
}
|
||||
return links;
|
||||
}
|
||||
|
||||
@@ -161,84 +82,160 @@ fn input() -> String{
|
||||
return s;
|
||||
}
|
||||
|
||||
fn parse_url(url: String, previous_host: &String) -> (String, String, String) {
|
||||
let mut host: String = previous_host.to_string();
|
||||
let mut port: String = "3477".to_string();
|
||||
let mut path: String = "/".to_string();
|
||||
fn parse_url(user_input: String, previous_url: &Url) -> Result<Url, ParseError> {
|
||||
println!("user input: {}",user_input);
|
||||
println!("previous url: {:?}",previous_url);
|
||||
let to_parse = if user_input.contains("://") {
|
||||
println!("Contains different scheme or is a path");
|
||||
user_input
|
||||
}
|
||||
else {
|
||||
format!("gemini://{}/{}",Url::host_str(previous_url).expect("ivalid").to_string(), user_input)
|
||||
};
|
||||
|
||||
let mttp_regex = Regex::new(r"^mttp:\/\/(.*?)\/(.*?)$").unwrap();
|
||||
let mttp_regex_no_path = Regex::new(r"^mttp:\/\/(.*?)$").unwrap();
|
||||
let accept_this_as_mttp = Regex::new(r"^(.*?)$").unwrap();
|
||||
let path_change = Regex::new(r"^/(.*?)$").unwrap();
|
||||
println!("Parsing: {}", to_parse);
|
||||
if let Ok(mut url) = Url::parse(&to_parse) {
|
||||
if url.port() == None {
|
||||
let _ = url.set_port(Some(1965));
|
||||
}
|
||||
println!("{:?}",url);
|
||||
println!("{}",url.as_str());
|
||||
println!("parsed successfully");
|
||||
return Ok(url)
|
||||
}
|
||||
else {
|
||||
return Err(ParseError::InvalidDomainCharacter)
|
||||
}
|
||||
|
||||
if let Some(caps) = mttp_regex.captures(&url) {
|
||||
host = caps[1].to_string();
|
||||
port = "3477".to_string();
|
||||
path = caps[2].to_string();
|
||||
}
|
||||
else if let Some(caps) = mttp_regex_no_path.captures(&url) {
|
||||
host = caps[1].to_string();
|
||||
port = "3477".to_string();
|
||||
path = "/".to_string();
|
||||
}
|
||||
else if let Some(caps) = path_change.captures(&url) {
|
||||
println!("path change");
|
||||
host = previous_host.to_string();
|
||||
port = "3477".to_string();
|
||||
path = caps[1].to_string();
|
||||
}
|
||||
else if let Some(caps) = accept_this_as_mttp.captures(&url) {
|
||||
host = caps[1].to_string();
|
||||
port = "3477".to_string();
|
||||
path = "/".to_string();
|
||||
}
|
||||
println!("{}:{}/{}",host,port,path);
|
||||
|
||||
return (host, port, path);
|
||||
}
|
||||
|
||||
fn main() {
|
||||
clear_screen();
|
||||
println!("Enter a url: ");
|
||||
let url = input();
|
||||
let user_input = input();
|
||||
|
||||
if url == "q" {
|
||||
if user_input == "q" || user_input == "quit" || user_input == "exit" {
|
||||
std::process::exit(0);
|
||||
}
|
||||
|
||||
let (mut host, mut port, mut path) = parse_url(url, &"example.com".to_string()); // Must pass
|
||||
// 'previous
|
||||
// host'
|
||||
|
||||
let mut load_page: bool = true;
|
||||
let mut history: Vec<Url> = Vec::new();
|
||||
let mut historical_position: usize = 0;
|
||||
let mut links: Vec<String> = Vec::new();
|
||||
let mut source: bool = false; // Wether to view source of markdown page or rendered version
|
||||
if let Ok(mut url) = parse_url(user_input, &Url::parse(&"gemini://geminiprotocol.net").unwrap()) { // Change this and make internal pages ;)
|
||||
history.push(url.clone());
|
||||
'mainloop: loop {
|
||||
let links = render_page(host.clone(), port.clone(), path.clone());
|
||||
|
||||
println!("{}:{}/{}", host, port, path);
|
||||
|
||||
if load_page {
|
||||
links = render_page(history[historical_position].clone(), source);
|
||||
println!("Enter reference number to follow, h for help, or q to quit");
|
||||
let link_to_follow = input();
|
||||
if link_to_follow == "q" {
|
||||
}
|
||||
url = history[historical_position].clone();
|
||||
load_page = false;
|
||||
|
||||
let user_input = input();
|
||||
if user_input == "q" {
|
||||
break 'mainloop;
|
||||
}
|
||||
else if link_to_follow == "r" {
|
||||
else if user_input == "d" {
|
||||
println!(
|
||||
"load_page: {}\nhistory: {:?}\nhistorical_postition: {}\nlinks: {:?}\nsource: {}",
|
||||
load_page,
|
||||
history,
|
||||
historical_position,
|
||||
links,
|
||||
source
|
||||
);
|
||||
}
|
||||
else if user_input == "r" {
|
||||
load_page = true;
|
||||
continue;
|
||||
}
|
||||
else if link_to_follow == "h" {
|
||||
println!("
|
||||
Source code: https://git.javalsai.dynv6.net/deadvey/markdown-webbrowser\n
|
||||
q: quit
|
||||
h: help
|
||||
r: reload
|
||||
i: visit root index of this host eg: root index of mttp://deadvey.com/blog/4.md is just deadvey.com
|
||||
b: go back in history
|
||||
");
|
||||
else if user_input == "s" {
|
||||
source = ! source; // Flip the boolean to toggle source mode
|
||||
load_page = true;
|
||||
}
|
||||
else if user_input == "i" {
|
||||
let _ = url.set_path("/");
|
||||
for _i in historical_position+1..history.len() {
|
||||
history.remove(historical_position+1);
|
||||
}
|
||||
history.push(url.clone());
|
||||
historical_position += 1;
|
||||
load_page = true;
|
||||
}
|
||||
else if user_input == "b" {
|
||||
if historical_position > 0 {
|
||||
historical_position -= 1;
|
||||
load_page = true;
|
||||
}
|
||||
else {
|
||||
let number: usize = link_to_follow.parse::<usize>().unwrap();
|
||||
|
||||
(host, port, path) = parse_url(links[number].clone(), &host);
|
||||
println!("{}:{}/{}", host, port, path);
|
||||
println!("At start of history");
|
||||
}
|
||||
}
|
||||
else if user_input == "f" {
|
||||
if historical_position < history.len()-1 {
|
||||
historical_position += 1;
|
||||
load_page = true;
|
||||
}
|
||||
else {
|
||||
println!("At end of history");
|
||||
}
|
||||
}
|
||||
else if user_input == "h" {
|
||||
println!("Source code: https://git.javalsai.dynv6.net/deadvey/markdown-webbrowser
|
||||
q: quit
|
||||
d: debug info
|
||||
h: help
|
||||
r: reload
|
||||
s: view source code of page
|
||||
i: visit root index of this host eg: root index of gemini://deadvey.com/blog/4.md is just gemini://deadvey.com
|
||||
b: go back in history
|
||||
f: go forward in history
|
||||
ox: print the hyprlink of reference x eg: o5 or o24
|
||||
[url]: follow the inputed url");
|
||||
}
|
||||
else if user_input.chars().nth(0).unwrap() == 'o' {
|
||||
let number_str = &user_input[1..];
|
||||
if let Ok(number) = number_str.parse::<usize>() {
|
||||
println!("{}", links[number]);
|
||||
} else {
|
||||
println!("error");
|
||||
}
|
||||
}
|
||||
else if let Ok(number) = user_input.parse::<usize>() {
|
||||
if number < links.len() {
|
||||
if let Ok(parsed_value) = parse_url(links[number].clone(), &url.clone()) {
|
||||
url = parsed_value;
|
||||
for _i in historical_position+1..history.len() {
|
||||
history.remove(historical_position+1);
|
||||
}
|
||||
history.push(url.clone());
|
||||
historical_position += 1;
|
||||
load_page = true;
|
||||
}
|
||||
else {
|
||||
println!("Invalid url\nAttempting to open url in web browser");
|
||||
}
|
||||
} else {
|
||||
println!("Invalid reference id");
|
||||
}
|
||||
}
|
||||
else if let Ok(parsed_value) = parse_url(user_input, &url.clone()) {
|
||||
url = parsed_value;
|
||||
for _i in historical_position+1..history.len() {
|
||||
history.remove(historical_position+1);
|
||||
}
|
||||
history.push(url.clone());
|
||||
historical_position += 1;
|
||||
load_page = true;
|
||||
}
|
||||
else {
|
||||
println!("Invalid input");
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
println!("Invalid mttp url, try mttp:// at the start of your input.");
|
||||
}
|
||||
}
|
||||
|
||||
|
91
src/parse_gemtext.rs
Normal file
91
src/parse_gemtext.rs
Normal file
@@ -0,0 +1,91 @@
|
||||
use colored::Colorize;
|
||||
use regex::Regex;
|
||||
|
||||
pub fn parse_gemtext(page_content: String) -> (String, Vec<String>) {
|
||||
let mut parsed_page_content: String = "".to_string();
|
||||
let mut hyperlink_number_counter: u64 = 0;
|
||||
let mut links: Vec<String> = Vec::new();
|
||||
let mut preformatted_code_toggle = false;
|
||||
|
||||
// Regex patterns
|
||||
let preformatted_text_regex = Regex::new(r"^```(.*)").unwrap();
|
||||
let header1_regex = Regex::new(r"^# (.*)").unwrap();
|
||||
let header2_regex = Regex::new(r"^## (.*)").unwrap();
|
||||
let header3_regex = Regex::new(r"^### (.*)").unwrap();
|
||||
let block_quotes_regex = Regex::new(r"^>(.*)").unwrap();
|
||||
let unordered_list_regex = Regex::new(r"^([ \t]+|^)(\*).(.*)").unwrap();
|
||||
let hyperlink_regex = Regex::new(r"=>\s(\S*)\s(.*)").unwrap();
|
||||
let quick_hyperlink_regex = Regex::new(r"=>\s(.*)").unwrap();
|
||||
|
||||
for line in page_content.lines() {
|
||||
let mut parsed_line: String = line.to_string();
|
||||
let mut remove_line = false;
|
||||
|
||||
// preformatted text
|
||||
parsed_line = preformatted_text_regex.replace_all(&parsed_line, |caps: ®ex::Captures| {
|
||||
// Flip the toggle
|
||||
preformatted_code_toggle = ! preformatted_code_toggle;
|
||||
|
||||
if caps[1] == *""
|
||||
{
|
||||
remove_line = true;
|
||||
}
|
||||
|
||||
// Remove the ```
|
||||
format!("{}", &caps[1].magenta())
|
||||
}).to_string();
|
||||
|
||||
if preformatted_code_toggle == false
|
||||
{
|
||||
// Headers
|
||||
parsed_line = header1_regex.replace_all(&parsed_line, |caps: ®ex::Captures| {
|
||||
format!("{}", &caps[1].blue().bold().underline())
|
||||
}).to_string();
|
||||
parsed_line = header2_regex.replace_all(&parsed_line, |caps: ®ex::Captures| {
|
||||
format!("{}", &caps[1].blue().bold())
|
||||
}).to_string();
|
||||
parsed_line = header3_regex.replace_all(&parsed_line, |caps: ®ex::Captures| {
|
||||
format!("{}", &caps[1].bold())
|
||||
}).to_string();
|
||||
|
||||
// Block quotes
|
||||
parsed_line = block_quotes_regex.replace_all(&parsed_line, |caps: ®ex::Captures| {
|
||||
format!(" | {}", &caps[1].red())
|
||||
}).to_string();
|
||||
|
||||
// Unordered list ([ ]+|^)- (.*)
|
||||
parsed_line = unordered_list_regex.replace_all(&parsed_line, |caps: ®ex::Captures| {
|
||||
format!("{} • {}", &caps[1], &caps[3])
|
||||
}).to_string();
|
||||
|
||||
// HyperLink
|
||||
parsed_line = hyperlink_regex.replace_all(&parsed_line, |caps: ®ex::Captures| {
|
||||
// Check if the character before the link is not '!'
|
||||
let result = format!("[{}] {}", hyperlink_number_counter, &caps[2].blue().underline());
|
||||
let url = caps[1].to_string();
|
||||
links.push(url);
|
||||
hyperlink_number_counter += 1;
|
||||
result
|
||||
}).to_string();
|
||||
|
||||
parsed_line = quick_hyperlink_regex.replace_all(&parsed_line, |caps: ®ex::Captures| {
|
||||
hyperlink_number_counter += 1;
|
||||
let url = caps[1].to_string();
|
||||
links.push(url);
|
||||
format!("[{}] {}", hyperlink_number_counter, &caps[1].blue().underline())
|
||||
}).to_string();
|
||||
}
|
||||
else if preformatted_code_toggle == true
|
||||
{
|
||||
parsed_line = parsed_line.magenta().to_string();
|
||||
}
|
||||
|
||||
if remove_line == false
|
||||
{
|
||||
parsed_page_content+=&(parsed_line + "\n");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
return (parsed_page_content, links);
|
||||
}
|
Reference in New Issue
Block a user