Compare commits

..

1 Commits

Author SHA1 Message Date
Nicolas Sanchez 4055d3843a evorust
3 years ago

636
Cargo.lock generated

File diff suppressed because it is too large Load Diff

@ -20,15 +20,14 @@ askama_actix = "0.14.0"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
git2 = "0.16.0"
git2 = "0.17.0"
regex = "1"
r2d2 = "0.8"
r2d2_sqlite = "0.21.0"
chrono = "0.4.24"
rusqlite = {version = "0.28.0", features=["bundled", "chrono"]}
rusqlite = {version = "0.29.0", features=["bundled", "chrono"]}
lazy_static = "1.4.0"
mongodb = "2.3.1"
unidecode = "0.3.0"

Binary file not shown.

@ -1,26 +1,6 @@
use clap::Parser;
use r2d2::Pool;
use r2d2_sqlite::SqliteConnectionManager;
use regex::Regex;
use unidecode::unidecode;
pub trait NormalizeName {
fn normalize_name(&self) -> String;
}
impl NormalizeName for String {
fn normalize_name(&self) -> String {
let space_re = Regex::new(r"[\s_]+").unwrap();
let remove_re = Regex::new(r"[^a-z0-9_]").unwrap();
let mut str = unidecode(self);
str = space_re.replace_all(&str, "_").to_string();
remove_re.replace_all(&str, "").to_string().to_lowercase()
}
}
#[derive(Parser)]
pub struct Arguments {
@ -39,10 +19,12 @@ pub struct Arguments {
}
impl Arguments {
pub fn parse_args() -> Arguments {
let args = Arguments::parse();
args
}
}
#[derive(Clone)]

@ -1,7 +1,10 @@
use actix_web::{error, web, Error};
use serde::{Deserialize, Serialize};
use uuid::Uuid;
pub mod page;
use page::Page as Page;
pub type Pool = r2d2::Pool<r2d2_sqlite::SqliteConnectionManager>;
pub type Connection = r2d2::PooledConnection<r2d2_sqlite::SqliteConnectionManager>;
@ -11,14 +14,6 @@ fn uuid4() -> String {
id.to_string()
}
#[derive(Debug, Serialize, Deserialize)]
pub struct Page {
pub domain: String,
pub domain_name: String,
pub page_name: String,
pub page_text: String,
}
pub struct Domain {
pub domain: String,
pub domain_name: String,
@ -58,13 +53,16 @@ pub async fn get_pages_by_domain(
web::block(move || {
let mut stmt = conn
.prepare("SELECT domain, REPLACE(domain, '_', ' '), page_name from pages WHERE active=true and domain=?")?;
.prepare("SELECT domain, REPLACE(domain, '_', ' '), page_name, REPLACE(page_name, '_', ' ') from pages WHERE active=true and domain=?")?;
stmt.query_map([domain], |row| {
Ok(Page {
domain: row.get(0)?,
domain_name: row.get(1)?,
page_name: row.get(2)?,
page: row.get(2)?,
page_name: row.get(3)?,
parent_domain: String::from(""),
parent_page: String::from(""),
page_text: String::from(""),
})
})
@ -87,14 +85,17 @@ pub async fn get_page_by_name(
web::block(move || {
let mut stmt = conn
.prepare("SELECT domain, REPLACE(domain, '_', ' '), page_name, page_text from pages WHERE active=true and domain=? and page_name=?")?;
.prepare("SELECT domain, REPLACE(domain, '_', ' '), page_name, REPLACE(page_name, '_', ''), page_text from pages WHERE active=true and domain=? and page_name=?")?;
stmt.query_map([domain, pagename], |row| {
Ok(Page {
domain: row.get(0)?,
domain_name: row.get(1)?,
page_name: row.get(2)?,
page_text: row.get(3)?,
page: row.get(2)?,
page_name: row.get(3)?,
page_text: row.get(4)?,
parent_domain: String::from(""),
parent_page: String::from(""),
})
})
.and_then(Iterator::collect)

@ -0,0 +1,25 @@
use actix_web::{body::BoxBody, Responder, HttpRequest, http::header::ContentType, HttpResponse};
use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize)]
pub struct Page {
pub domain: String,
pub domain_name: String,
pub page: String,
pub page_name: String,
pub page_text: String,
pub parent_domain: String,
pub parent_page: String,
}
impl Responder for Page {
type Body = BoxBody;
fn respond_to(self, _req: &HttpRequest) -> HttpResponse<Self::Body> {
let res_body = serde_json::to_string(&self).unwrap();
HttpResponse::Ok()
.content_type(ContentType::json())
.body(res_body)
}
}

@ -6,7 +6,7 @@ use askama_actix::TemplateToResponse;
use crate::commons::AppData;
use crate::db;
use crate::db::Page;
use crate::db::page::Page;
#[derive(Template)]
#[template(path = "domain.html")]

@ -1,9 +1,8 @@
use actix_web::body::BoxBody;
use actix_web::http::header::ContentType;
use actix_web::web::Query;
use actix_web::{get, put, Responder, HttpRequest};
use actix_web::{get, put, Responder};
use actix_web::{web, HttpResponse};
use serde::{Deserialize, Serialize};
use serde::{Deserialize};
use askama_actix::Template;
use askama_actix::TemplateToResponse;
@ -28,27 +27,6 @@ struct QueryParams {
pub fromPage: Option<String>,
}
#[derive(Serialize, Deserialize)]
pub struct Page {
pub domain: String,
pub page: String,
pub page_text: String,
pub parent_domain: String,
pub parent_page: String,
}
impl Responder for Page {
type Body = BoxBody;
fn respond_to(self, _req: &HttpRequest) -> HttpResponse<Self::Body> {
let res_body = serde_json::to_string(&self).unwrap();
// Create HttpResponse and set Content Type
HttpResponse::Ok()
.content_type(ContentType::json())
.body(res_body)
}
}
fn new_page_text(
page_name: String,
domain_from: &Option<String>,
@ -99,7 +77,7 @@ async fn page(
let app_name = data.app_name.to_owned() + " - " + page_name.as_str();
let base_url = data.base_url.to_owned();
//let init = String::from("init();");
let init = format!("init({:?}, {:?});", domain, page_name);
let init = format!("init({:?}, {:?}, {:?});", domain, page_name, base_url);
PageTemplate {
app_name,
base_url,

@ -360,10 +360,11 @@ function onparentbutton(e) {
}
function init(domain, page) {
function init(domain, page, baseUrl) {
let cheezenotes = document.getElementById('cheezenotes');
window.domain = domain;
window.page = page;
window.baseUrl = baseUrl;
dpwidth(cheezenotes);

@ -1,5 +1,4 @@
import { saveSelection, loadSelection } from "./caret.js";
import { normalizePagename } from "./tools.js";
function setEditable() {
let editModeButton = document.getElementById('editModeButton');
@ -584,17 +583,26 @@ function formatLink(link) {
if (href.match(/^[^\:\/]+\/.*$/)) {
href = '/' + href;
}
let datahref = href;
href = formatUrl(href);
if (libelle == '') {
return '<span class="token">[](</span><a class="link" data-href="' + href + '" href="' + href + '">' + url + '</a><span class="token">)</span>';
return '<span class="token">[](</span><a class="link" data-href="' + datahref + '" href="' + href + '">' + url + '</a><span class="token">)</span>';
} else {
if (libelle.startsWith('!')) {
return '<span class="token">[' + libelle + '</span><img style="vertical-align: top; max-height: 1rem;" class="image" src="' + href + '" title="' + libelle.substring(1) + '" /><span class="token">](' + url + ')</span>';
} else {
return '<span class="token">[</span><a class="link" data-href="' + href + '" href="' + href + '">' + libelle + '</a><span class="token">](' + url + ')</span>';
return '<span class="token">[</span><a class="link" data-href="' + datahref + '" href="' + href + '">' + libelle + '</a><span class="token">](' + url + ')</span>';
}
}
}
function formatUrl(url) {
if (url.match(/\w+:\/\//)) {
return url;
}
return window.baseUrl + url.replace(' ', '_');
}
function addBold(line) {
line = line.replace(/\*\*([^\s].*?)\*\*/ig, '<b><span class="token">**</span>$1<span class="token">**</span></b>');
line = line.replace(/__([^\s].*?)__/ig, '<b><span class="token">__</span>$1<span class="token">__</span></b>');

@ -1,6 +0,0 @@
function normalizeName(pagename) {
return str.normalize("NFKD").replace(/\p{Diacritic}/gu, "").toLowerCase().replace(/œ/g, "oe").replace(/æ/g, "ae").replace(/\s+/g, "_").replace(/[^a-z0-9_]/g, "").replace(/_+/g, "_");
}
export { normalizeName };
Loading…
Cancel
Save