added sqlite database, enhanced charts, bumped version

This commit is contained in:
aglkm
2024-12-15 13:40:35 +03:00
parent 1f0cc49e23
commit 9a0ab8ba00
13 changed files with 579 additions and 136 deletions

View File

@@ -191,6 +191,7 @@ pub struct ExplorerConfig {
pub coingecko_api: String,
pub public_api: String,
pub external_nodes: Vec<String>,
pub database: String,
}
impl ExplorerConfig {
@@ -208,6 +209,7 @@ impl ExplorerConfig {
coingecko_api: String::new(),
public_api: String::new(),
external_nodes: Vec::new(),
database: String::new(),
}
}
}
@@ -250,7 +252,7 @@ pub struct Statistics {
pub txns: Vec<String>,
pub fees: Vec<String>,
// UTXOs
pub utxo_count: Vec<String>,
pub utxos: Vec<String>,
// Kernels
pub kernels: Vec<String>,
}
@@ -265,7 +267,7 @@ impl Statistics {
hashrate: Vec::new(),
txns: Vec::new(),
fees: Vec::new(),
utxo_count: Vec::new(),
utxos: Vec::new(),
kernels: Vec::new(),
}
}

40
src/database.rs Normal file
View File

@@ -0,0 +1,40 @@
use rusqlite::{Connection, Result};
pub fn open_db_connection(db_name: &str) -> Result<Connection> {
let conn = Connection::open(db_name)?;
Ok(conn)
}
pub fn create_statistics_table(conn: &Connection) -> Result<()> {
conn.execute(
"CREATE TABLE IF NOT EXISTS statistics (
id INTEGER PRIMARY KEY,
date TEXT NOT NULL UNIQUE,
hashrate TEXT NOT NULL,
txns TEXT NOT NULL,
fees TEXT NOT NULL,
utxos TEXT NOT NULL,
kernels TEXT NOT NULL
)",
(), // empty list of parameters.
)?;
Ok(())
}
pub fn read_row(conn: &Connection, row_name: &str) -> Result<Vec<String>> {
let sql = format!("SELECT {} FROM statistics ORDER BY id", row_name);
let mut stmt = conn.prepare(&sql)?;
let data_iter = stmt
.query_map([], |row| {
row.get(0)
}).unwrap();
// Collect all the results into a vector of strings
let data: Vec<String> = data_iter.collect::<Result<Vec<_>, _>>().unwrap();
Ok(data)
}

75
src/exconfig.rs Normal file
View File

@@ -0,0 +1,75 @@
use config::Config;
use std::fs;
use lazy_static::lazy_static;
use crate::data::ExplorerConfig;
// Static explorer config structure
lazy_static! {
pub static ref CONFIG: ExplorerConfig = {
let mut cfg = ExplorerConfig::new();
let toml = Config::builder().add_source(config::File::with_name("Explorer")).build().unwrap();
// Mandatory settings
cfg.host = toml.get_string("host").unwrap();
cfg.proto = toml.get_string("proto").unwrap();
cfg.coingecko_api = toml.get_string("coingecko_api").unwrap();
cfg.public_api = toml.get_string("public_api").unwrap();
// Optional settings
match toml.get_string("port") {
Ok(v) => cfg.port = v,
Err(_e) => {},
}
match toml.get_string("user") {
Ok(v) => cfg.user = v,
Err(_e) => {},
}
match toml.get_string("api_secret_path") {
Ok(v) => cfg.api_secret_path = v,
Err(_e) => {},
}
match toml.get_string("foreign_api_secret_path") {
Ok(v) => cfg.foreign_api_secret_path = v,
Err(_e) => {},
}
match toml.get_string("grin_dir") {
Ok(v) => cfg.grin_dir = v,
Err(_e) => {},
}
match toml.get_array("external_nodes") {
Ok(nodes) => {
for endpoint in nodes.clone() {
cfg.external_nodes.push(endpoint.into_string().unwrap());
}
},
Err(_e) => {},
}
match toml.get_string("database") {
Ok(v) => cfg.database = v,
Err(_e) => {},
}
if cfg.api_secret_path.is_empty() == false {
cfg.api_secret = fs::read_to_string(format!("{}", shellexpand::tilde(&cfg.api_secret_path))).unwrap();
}
if cfg.foreign_api_secret_path.is_empty() == false {
cfg.foreign_api_secret = fs::read_to_string(format!("{}", shellexpand::tilde(&cfg.foreign_api_secret_path))).unwrap();
}
if cfg.grin_dir.is_empty() == false {
cfg.grin_dir = format!("{}", shellexpand::tilde(&cfg.grin_dir));
}
cfg
};
}

View File

@@ -13,9 +13,11 @@ use serde_json::Value;
use tera_thousands::separate_with_commas;
use crate::data::{Block, Dashboard, Kernel, Output, Statistics, Transactions, OUTPUT_SIZE, KERNEL_SIZE};
use crate::requests::CONFIG;
use crate::exconfig::CONFIG;
mod data;
mod database;
mod exconfig;
mod requests;
mod worker;
@@ -219,24 +221,112 @@ pub async fn search(input: Option<&str>) -> Either<Template, Redirect> {
fn stats(statistics: &State<Arc<Mutex<Statistics>>>) -> Template {
let data = statistics.lock().unwrap();
// Get the length of our data vectors (all vectors are the same size)
let len = data.date.len();
// Construct chart periods
let mut month = 0;
let mut six_months = 0;
let mut year = 0;
// Usize type can't be negative, so check the lenght of the vector
if len > 30 {
month = len - 30;
}
if len > (30 * 6) {
six_months = len - (30 * 6);
}
if len > 365 {
year = len - 365;
}
let mut m_date = data.date.clone();
let mut m_hashrate = data.hashrate.clone();
let mut m_txns = data.txns.clone();
let mut m_fees = data.fees.clone();
let mut m_utxos = data.utxos.clone();
let mut m_kernels = data.kernels.clone();
// Get stats for a month period
if month > 0 {
m_date = data.date.get(month..).unwrap().to_vec();
m_hashrate = data.hashrate.get(month..).unwrap().to_vec();
m_txns = data.txns.get(month..).unwrap().to_vec();
m_fees = data.fees.get(month..).unwrap().to_vec();
m_utxos = data.utxos.get(month..).unwrap().to_vec();
m_kernels = data.kernels.get(month..).unwrap().to_vec();
}
let mut sm_date = data.date.clone();
let mut sm_hashrate = data.hashrate.clone();
let mut sm_txns = data.txns.clone();
let mut sm_fees = data.fees.clone();
let mut sm_utxos = data.utxos.clone();
let mut sm_kernels = data.kernels.clone();
// Get stats for six months period
if six_months > 0 {
sm_date = data.date.get(six_months..).unwrap().to_vec();
sm_hashrate = data.hashrate.get(six_months..).unwrap().to_vec();
sm_txns = data.txns.get(six_months..).unwrap().to_vec();
sm_fees = data.fees.get(six_months..).unwrap().to_vec();
sm_utxos = data.utxos.get(six_months..).unwrap().to_vec();
sm_kernels = data.kernels.get(six_months..).unwrap().to_vec();
}
let mut y_date = data.date.clone();
let mut y_hashrate = data.hashrate.clone();
let mut y_txns = data.txns.clone();
let mut y_fees = data.fees.clone();
let mut y_utxos = data.utxos.clone();
let mut y_kernels = data.kernels.clone();
// Get stats for a year period
if year > 0 {
y_date = data.date.get(year..).unwrap().to_vec();
y_hashrate = data.hashrate.get(year..).unwrap().to_vec();
y_txns = data.txns.get(year..).unwrap().to_vec();
y_fees = data.fees.get(year..).unwrap().to_vec();
y_utxos = data.utxos.get(year..).unwrap().to_vec();
y_kernels = data.kernels.get(year..).unwrap().to_vec();
}
Template::render("stats", context! {
route: "stats",
date: data.date.clone(),
user_agent: data.user_agent.clone(),
count: data.count.clone(),
total: data.total,
hashrate: data.hashrate.clone(),
txns: data.txns.clone(),
fees: data.fees.clone(),
utxo_count: data.utxo_count.clone(),
kernels: data.kernels.clone(),
route: "stats",
user_agent: data.user_agent.clone(),
count: data.count.clone(),
total: data.total,
date: data.date.clone(),
hashrate: data.hashrate.clone(),
txns: data.txns.clone(),
fees: data.fees.clone(),
utxos: data.utxos.clone(),
kernels: data.kernels.clone(),
m_date,
m_hashrate,
m_txns,
m_fees,
m_utxos,
m_kernels,
sm_date,
sm_hashrate,
sm_txns,
sm_fees,
sm_utxos,
sm_kernels,
y_date,
y_hashrate,
y_txns,
y_fees,
y_utxos,
y_kernels,
output_size: OUTPUT_SIZE,
kernel_size: KERNEL_SIZE,
})
}
// Rendering Grinflation page.
// Rendering Emission page.
#[get("/emission")]
fn emission(dashboard: &State<Arc<Mutex<Dashboard>>>) -> Template {
let data = dashboard.lock().unwrap();
@@ -752,7 +842,36 @@ async fn main() {
let mut ready_data = false;
let mut ready_stats = false;
let mut ready_db = false;
let mut date = "".to_string();
// Initializing db and table
if CONFIG.database.is_empty() == false {
info!("initializing db.");
let conn = database::open_db_connection(&CONFIG.database).expect("failed to open database");
database::create_statistics_table(&conn).expect("failed to create statistics table");
let mut s = stats.lock().unwrap();
let mut d = dash.lock().unwrap();
// Reading the database
s.date = database::read_row(&conn, "date").unwrap();
s.hashrate = database::read_row(&conn, "hashrate").unwrap();
s.txns = database::read_row(&conn, "txns").unwrap();
s.fees = database::read_row(&conn, "fees").unwrap();
s.utxos = database::read_row(&conn, "utxos").unwrap();
s.kernels = database::read_row(&conn, "kernels").unwrap();
// Read utxos right here, because we have it in worker::stats thread launched next day only
if s.utxos.is_empty() == false {
d.utxo_count = s.utxos.get(s.utxos.len() - 1).unwrap().to_string();
}
// Get the latest date
if s.date.is_empty() == false {
date = s.date.get(s.date.len() - 1).unwrap().to_string();
}
}
// Collecting main data
tokio::spawn(async move {
@@ -775,7 +894,7 @@ async fn main() {
let date_now = format!("\"{}\"", Utc::now().format("%d-%m-%Y"));
if date.is_empty() || date != date_now {
if date != date_now {
date = date_now;
let result = worker::stats(dash_clone.clone(), txns_clone.clone(),
stats_clone.clone()).await;
@@ -784,6 +903,7 @@ async fn main() {
Ok(_v) => {
if ready_stats == false {
ready_stats = true;
ready_db = true;
info!("worker::stats ready.");
}
},
@@ -792,6 +912,10 @@ async fn main() {
error!("{}", e);
},
}
// Got stats from DB, indicate ready state
} else if ready_db == false && CONFIG.database.is_empty() == false {
info!("worker::stats ready.");
ready_db = true;
}
tokio::time::sleep(Duration::from_secs(15)).await;

View File

@@ -6,77 +6,11 @@ use fs_extra::dir::get_size;
use humantime::format_duration;
use std::time::Duration;
use chrono::{Utc, DateTime};
use config::Config;
use std::collections::HashMap;
use std::fs;
use lazy_static::lazy_static;
use crate::data::{Block, Dashboard, ExplorerConfig, Kernel, Output, Statistics, Transactions};
use crate::data::{Block, Dashboard, Kernel, Output, Statistics, Transactions};
use crate::data::{KERNEL_WEIGHT, INPUT_WEIGHT, OUTPUT_WEIGHT, KERNEL_SIZE, INPUT_SIZE, OUTPUT_SIZE};
// Static explorer config structure
lazy_static! {
pub static ref CONFIG: ExplorerConfig = {
let mut cfg = ExplorerConfig::new();
let toml = Config::builder().add_source(config::File::with_name("Explorer")).build().unwrap();
// Mandatory settings
cfg.host = toml.get_string("host").unwrap();
cfg.proto = toml.get_string("proto").unwrap();
cfg.coingecko_api = toml.get_string("coingecko_api").unwrap();
cfg.public_api = toml.get_string("public_api").unwrap();
// Optional settings
match toml.get_string("port") {
Ok(v) => cfg.port = v,
Err(_e) => {},
}
match toml.get_string("user") {
Ok(v) => cfg.user = v,
Err(_e) => {},
}
match toml.get_string("api_secret_path") {
Ok(v) => cfg.api_secret_path = v,
Err(_e) => {},
}
match toml.get_string("foreign_api_secret_path") {
Ok(v) => cfg.foreign_api_secret_path = v,
Err(_e) => {},
}
match toml.get_string("grin_dir") {
Ok(v) => cfg.grin_dir = v,
Err(_e) => {},
}
match toml.get_array("external_nodes") {
Ok(nodes) => {
for endpoint in nodes.clone() {
cfg.external_nodes.push(endpoint.into_string().unwrap());
}
},
Err(_e) => {},
}
if cfg.api_secret_path.is_empty() == false {
cfg.api_secret = fs::read_to_string(format!("{}", shellexpand::tilde(&cfg.api_secret_path))).unwrap();
}
if cfg.foreign_api_secret_path.is_empty() == false {
cfg.foreign_api_secret = fs::read_to_string(format!("{}", shellexpand::tilde(&cfg.foreign_api_secret_path))).unwrap();
}
if cfg.grin_dir.is_empty() == false {
cfg.grin_dir = format!("{}", shellexpand::tilde(&cfg.grin_dir));
}
cfg
};
}
use crate::exconfig::CONFIG;
// RPC requests to grin node.

View File

@@ -5,7 +5,8 @@ use crate::data::Block;
use crate::data::Dashboard;
use crate::data::Statistics;
use crate::data::Transactions;
use crate::database;
use crate::exconfig::CONFIG;
use crate::requests;
@@ -16,7 +17,7 @@ pub async fn data(dash: Arc<Mutex<Dashboard>>, blocks: Arc<Mutex<Vec<Block>>>,
let _ = requests::get_mempool(dash.clone()).await?;
let _ = requests::get_connected_peers(dash.clone(), stats.clone()).await?;
let _ = requests::get_market(dash.clone()).await?;
requests::get_disk_usage(dash.clone())?;
let _ = requests::get_disk_usage(dash.clone())?;
let _ = requests::get_mining_stats(dash.clone()).await?;
let _ = requests::get_recent_blocks(dash.clone(), blocks.clone()).await?;
let _ = requests::get_txn_stats(dash.clone(), txns.clone()).await?;
@@ -26,32 +27,37 @@ pub async fn data(dash: Arc<Mutex<Dashboard>>, blocks: Arc<Mutex<Vec<Block>>>,
// Collecting statistics.
pub async fn stats(dash: Arc<Mutex<Dashboard>>, txns: Arc<Mutex<Transactions>>, stats: Arc<Mutex<Statistics>>) -> Result<(), anyhow::Error> {
let _ = requests::get_unspent_outputs(dash.clone()).await?;
let mut stats = stats.lock().unwrap();
let dash = dash.lock().unwrap();
let txns = txns.lock().unwrap();
if stats.date.len() == 30 {
stats.date.remove(0);
stats.hashrate.remove(0);
stats.txns.remove(0);
stats.fees.remove(0);
stats.utxo_count.remove(0);
stats.kernels.remove(0);
}
stats.date.push(format!("\"{}\"", Utc::now().format("%d-%m-%Y")));
stats.hashrate.push(dash.hashrate_kgs.clone());
stats.txns.push(txns.period_24h.clone());
stats.fees.push(txns.fees_24h.clone());
stats.utxo_count.push(dash.utxo_count.clone());
stats.utxos.push(dash.utxo_count.clone());
let mut kernel_count = 0;
if dash.kernel_mmr_size.is_empty() == false {
let kernel_count = dash.kernel_mmr_size.parse::<u64>().unwrap() / 2;
kernel_count = dash.kernel_mmr_size.parse::<u64>().unwrap() / 2;
stats.kernels.push(kernel_count.to_string());
}
if CONFIG.database.is_empty() == false {
// Open the database
let conn = database::open_db_connection(&CONFIG.database).expect("failed to open database");
//Insert new data into the database
conn.execute(
"INSERT OR IGNORE INTO statistics (date, hashrate, txns, fees, utxos, kernels) VALUES (?1, ?2, ?3, ?4, ?5, ?6)",
(&format!("\"{}\"", Utc::now().format("%d-%m-%Y")), &dash.hashrate_kgs.clone(), &txns.period_24h.clone(), &txns.fees_24h.clone(), &dash.utxo_count.clone(), &kernel_count.to_string()),
)?;
}
Ok(())
}