310 lines
10 KiB
Rust
310 lines
10 KiB
Rust
use std::process::{Command};
|
|
use std::io::{stdin,stdout,Write};
|
|
use colored::Colorize;
|
|
use regex::Regex;
|
|
use url::{Url, ParseError};
|
|
|
|
fn clear_screen() {
|
|
Command::new("clear")
|
|
.status()
|
|
.expect("Failed to clear screen");
|
|
}
|
|
|
|
fn parse_markdown(page_content: String) -> (String, Vec<String>) {
|
|
let mut parsed_page_content: String = "".to_string();
|
|
let mut hyperlink_number_counter: u64 = 0;
|
|
let mut links: Vec<String> = Vec::new();
|
|
let (screen_width, _screen_height) = termion::terminal_size().unwrap(); // So the horizontal line (<hr/>) spans the whole console
|
|
|
|
for line in page_content.lines() {
|
|
let mut parsed_line: String = line.to_string();
|
|
// Bold
|
|
let bold_regex = Regex::new(r"((\*\*)|(__))(.*?)((\*\*)|(__))").unwrap();
|
|
parsed_line = bold_regex.replace_all(&parsed_line, |caps: ®ex::Captures| {
|
|
caps[4].bold().to_string()
|
|
}).to_string();
|
|
|
|
// Strikethrough
|
|
let strikethrough_regex = Regex::new(r"~~(.*?)~~").unwrap();
|
|
parsed_line = strikethrough_regex.replace_all(&parsed_line, |caps: ®ex::Captures| {
|
|
caps[1].strikethrough().to_string()
|
|
}).to_string();
|
|
|
|
// Horizontal lines
|
|
let hr_regex = Regex::new(r"^(\*\*\*)|(---)|(___)$").unwrap();
|
|
parsed_line = hr_regex.replace_all(&parsed_line, |_caps: ®ex::Captures| {
|
|
let mut result: String = "\n".to_string();
|
|
for _x in 0..screen_width/2 {
|
|
result += "- ";
|
|
}
|
|
result += "\n";
|
|
result
|
|
}).to_string();
|
|
|
|
// html br tag support
|
|
let br_regex = Regex::new(r"(.*?)<br/>(.*?)").unwrap();
|
|
parsed_line = br_regex.replace_all(&parsed_line, |caps: ®ex::Captures| {
|
|
format!("{}{}{}", &caps[1], "\n", &caps[2])
|
|
}).to_string();
|
|
|
|
// Italics
|
|
let italic_regex = Regex::new(r"\*(.*?)\*").unwrap();
|
|
parsed_line = italic_regex.replace_all(&parsed_line, |caps: ®ex::Captures| {
|
|
caps[1].italic().to_string()
|
|
}).to_string();
|
|
let italic_regex = Regex::new(r"_(.*?)_").unwrap();
|
|
parsed_line = italic_regex.replace_all(&parsed_line, |caps: ®ex::Captures| {
|
|
caps[1].italic().to_string()
|
|
}).to_string();
|
|
|
|
// Block quotes
|
|
let block_quotes_regex = Regex::new(r"^>(.*)").unwrap();
|
|
parsed_line = block_quotes_regex.replace_all(&parsed_line, |caps: ®ex::Captures| {
|
|
format!(" | {}", &caps[1])
|
|
}).to_string();
|
|
|
|
// Ordered list
|
|
let ordered_list_regex = Regex::new(r"^([ \t]+|^)([0-9]+)\. (.*)").unwrap();
|
|
parsed_line = ordered_list_regex.replace_all(&parsed_line, |caps: ®ex::Captures| {
|
|
format!("{} {}. {}", &caps[1], &caps[2], &caps[3])
|
|
}).to_string();
|
|
|
|
// Unordered list ([ ]+|^)- (.*)
|
|
let unordered_list_regex = Regex::new(r"^([ \t]+|^)(-|\+|\*).(.*)").unwrap();
|
|
parsed_line = unordered_list_regex.replace_all(&parsed_line, |caps: ®ex::Captures| {
|
|
format!("{} • {}", &caps[1], &caps[3])
|
|
}).to_string();
|
|
|
|
// Inline code
|
|
let inline_code_regex = Regex::new(r"`([^`]+?)`").unwrap();
|
|
parsed_line = inline_code_regex.replace_all(&parsed_line, |caps: ®ex::Captures| {
|
|
format!("{}", &caps[1].magenta())
|
|
}).to_string();
|
|
|
|
// HyperLink
|
|
let hyperlink_regex = Regex::new(r"(.*?)\[(.*?)\]\((.*?)\)").unwrap();
|
|
parsed_line = hyperlink_regex.replace_all(&parsed_line, |caps: ®ex::Captures| {
|
|
// Check if the character before the link is not '!'
|
|
if !caps[1].ends_with('!') { // caps[1] is everything before the link
|
|
let result = format!("{}{}[{}]", &caps[1], &caps[2].blue().underline(), hyperlink_number_counter);
|
|
let url = caps[3].to_string();
|
|
links.push(url);
|
|
hyperlink_number_counter += 1;
|
|
result
|
|
} else {
|
|
// If it's an image (starts with !), return the link as is
|
|
let url = caps[3].to_string();
|
|
links.push(url);
|
|
hyperlink_number_counter += 1;
|
|
format!("({})[{}]", &caps[2].green(), hyperlink_number_counter)
|
|
}
|
|
}).to_string();
|
|
|
|
let quick_hyperlink_regex = Regex::new(r"<(.*?)>").unwrap();
|
|
parsed_line = quick_hyperlink_regex.replace_all(&parsed_line, |caps: ®ex::Captures| {
|
|
hyperlink_number_counter += 1;
|
|
let url = caps[1].to_string();
|
|
links.push(url);
|
|
format!("{}[{}]", &caps[1].blue().underline(), hyperlink_number_counter)
|
|
}).to_string();
|
|
|
|
|
|
parsed_page_content+=&(parsed_line + "\n");
|
|
}
|
|
|
|
// multiline code
|
|
let multiline_code_regex = Regex::new(r"(?ms)```((.*?\n)+?)```").unwrap();
|
|
parsed_page_content = multiline_code_regex.replace_all(&parsed_page_content, |caps: ®ex::Captures| {
|
|
// Capture the code inside the %% blocks
|
|
let code_block = &caps[1];
|
|
|
|
// Add a tab to each line in the block
|
|
let indented_code = code_block
|
|
.lines()
|
|
.map(|line| format!("\t{}", line)) // Insert tab at the start of each line
|
|
.collect::<Vec<String>>()
|
|
.join("\n");
|
|
|
|
// Return the formatted block with magenta color
|
|
format!("{}", indented_code.magenta())
|
|
}).to_string();
|
|
|
|
return (parsed_page_content, links);
|
|
}
|
|
|
|
fn fetch_page(url: &Url) -> String {
|
|
let full_url_formatted = format!("{}", url);
|
|
|
|
// Call curl using Com, mand
|
|
let output = Command::new("curl")
|
|
.arg(full_url_formatted)
|
|
.output()
|
|
.expect("Failed to execute curl command");
|
|
|
|
// Check if the command was successful
|
|
if output.status.success() {
|
|
let page: String = String::from_utf8_lossy(&output.stdout).to_string();
|
|
return page
|
|
} else {
|
|
eprintln!("Error:\n{}", String::from_utf8_lossy(&output.stderr));
|
|
let result: String = "error".to_string();
|
|
return result
|
|
}
|
|
}
|
|
|
|
fn render_page(url: Url) -> Vec<String> {
|
|
clear_screen();
|
|
let mut content = fetch_page(&url);
|
|
let mut links = Vec::new();
|
|
let (screen_width, _screen_height) = termion::terminal_size().unwrap();
|
|
|
|
if &content[..13] == "<!DOCTYPE md>" {
|
|
(content, links) = parse_markdown((&content[13..]).to_string());
|
|
}
|
|
else {
|
|
content += &format!("{}", &"Warning: This page is invalid markdown, it should contain <!DOCTYPE md> at the very start of the file, showing raw text".yellow());
|
|
}
|
|
|
|
for _i in 0..screen_width {
|
|
print!("—");
|
|
}
|
|
print!("{}\n", url);
|
|
for _i in 0..screen_width {
|
|
print!("—");
|
|
}
|
|
println!("\n\n{}", content);
|
|
for _i in 0..screen_width {
|
|
print!("—");
|
|
}
|
|
println!();
|
|
|
|
// Return links (you can add link parsing logic)
|
|
return links;
|
|
}
|
|
|
|
fn input() -> String{
|
|
let mut s=String::new();
|
|
let _=stdout().flush();
|
|
stdin().read_line(&mut s).expect("Did not enter a correct string");
|
|
if let Some('\n')=s.chars().next_back() {
|
|
s.pop();
|
|
}
|
|
if let Some('\r')=s.chars().next_back() {
|
|
s.pop();
|
|
}
|
|
return s;
|
|
}
|
|
|
|
fn parse_url(user_input: String, previous_url: String) -> Result<Url, ParseError> {
|
|
let user_input = if user_input.contains("http://") || user_input.contains("https://") {
|
|
println!("Opening http page in web browser");
|
|
open::that(user_input);
|
|
previous_url
|
|
}
|
|
else if user_input.contains("://") {
|
|
println!("Contains different scheme");
|
|
user_input
|
|
}
|
|
else {
|
|
format!("http://{}", user_input) // Prepend 'mttp://' if no scheme is found
|
|
};
|
|
|
|
let mut url: Url = Url::parse(&user_input).unwrap();
|
|
if url.port() == None {
|
|
url.set_port(Some(3477));
|
|
}
|
|
println!("{:?}",url);
|
|
println!("{}",url.as_str());
|
|
return Ok(url);
|
|
|
|
}
|
|
|
|
fn main() {
|
|
clear_screen();
|
|
println!("Enter a url: ");
|
|
let user_input = input();
|
|
|
|
if user_input == "q" {
|
|
std::process::exit(0);
|
|
}
|
|
let mut load_page: bool = true;
|
|
let mut history: Vec<Url> = Vec::new();
|
|
let mut historical_position: usize = 0;
|
|
let mut links: Vec<String> = Vec::new();
|
|
if let Ok(mut url) = parse_url(user_input, "http://deadvey.com".to_string()) { // Change this and make internal pages ;)
|
|
'mainloop: loop {
|
|
if load_page {
|
|
links = Vec::new();
|
|
links = render_page(url.clone());
|
|
println!("Enter reference number to follow, h for help, or q to quit");
|
|
}
|
|
load_page = false;
|
|
|
|
let user_input = input();
|
|
if user_input == "q" {
|
|
break 'mainloop;
|
|
}
|
|
else if user_input == "r" {
|
|
load_page = true;
|
|
continue;
|
|
}/*
|
|
else if user_input == "i" {
|
|
url.path = "/".to_string();
|
|
load_page = true;
|
|
}
|
|
else if user_input == "b" {
|
|
if historical_position >= 1 {
|
|
historical_position -= 1;
|
|
if let Ok(parsed_value) = parse_url(format!("{}://{}/{}",history[historical_position].protocol.clone(), history[historical_position].hostname.clone(),history[historical_position].path.clone()),&url.hostname) {
|
|
url = parsed_value;
|
|
load_page = true;
|
|
}
|
|
else {
|
|
println!("Invalid url");
|
|
}
|
|
}
|
|
}*/
|
|
else if user_input == "h" {
|
|
println!("Source code: https://git.javalsai.dynv6.net/deadvey/markdown-webbrowser\nq: quit\nh: help\nr: reload\ni: visit root index of this host eg: root index of mttp://deadvey.com/blog/4.md is just deadvey.com\nb: go back in history\nox: print the hyprlink of reference x eg: o5 or o24");
|
|
}
|
|
else if user_input.chars().nth(0).unwrap() == 'o' {
|
|
let number_str = &user_input[1..];
|
|
if let Ok(number) = number_str.parse::<usize>() {
|
|
println!("{}", links[number]);
|
|
} else {
|
|
println!("error");
|
|
}
|
|
}
|
|
else if let Ok(number) = user_input.parse::<usize>() {
|
|
if number < links.len() {
|
|
if let Ok(parsed_value) = parse_url(links[number].clone(), url.as_str().to_string()) {
|
|
url = parsed_value;
|
|
/*
|
|
for i in historical_position+1..history.len()-1 {
|
|
history.remove(i);
|
|
}
|
|
historical_position += 1;*/
|
|
load_page = true;
|
|
}
|
|
else {
|
|
println!("Invalid url\nAttempting to open url in web browser");
|
|
}
|
|
} else {
|
|
println!("Invalid reference id");
|
|
}
|
|
}/*
|
|
else if let Ok(parsed_value) = parse_url(user_input, &url.hostname) {
|
|
url = parsed_value;
|
|
load_page = true;
|
|
}*/
|
|
else {
|
|
println!("Invalid input");
|
|
}
|
|
}
|
|
}
|
|
else {
|
|
println!("Invalid mttp url, try mttp:// at the start of your input.");
|
|
}
|
|
}
|
|
|