diff --git a/.gitignore b/.gitignore index 1734fcb..94cc732 100644 --- a/.gitignore +++ b/.gitignore @@ -5,3 +5,4 @@ output/ *.swp *.lock /input +config.toml diff --git a/Cargo.toml b/Cargo.toml index 6c6e527..331e0c9 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -5,6 +5,11 @@ edition = "2021" [dependencies] askama = "0.15.5" -chrono = "0.4.44" +chrono = { version = "0.4.44", features = ["serde"] } csv = "1.4.0" -serde = "1.0.228" +serde = { version = "1.0.228", features = ["derive"] } +sqlx = { version = "0.8", features = ["runtime-tokio", "mysql", "chrono", "bigdecimal"] } +tokio = { version = "1", features = ["full"] } +toml = "0.8" +anyhow = "1" +bigdecimal = { version = "0.4", features = ["serde"] } diff --git a/config.example.toml b/config.example.toml new file mode 100644 index 0000000..235b876 --- /dev/null +++ b/config.example.toml @@ -0,0 +1,6 @@ +[database] +host = "localhost" +port = 3306 +user = "root" +password = "" +name = "rusty_petroleum" diff --git a/migrations/001_initial_schema.sql b/migrations/001_initial_schema.sql new file mode 100644 index 0000000..c1d74ba --- /dev/null +++ b/migrations/001_initial_schema.sql @@ -0,0 +1,52 @@ +-- Initial schema for rusty-petroleum +-- Run this against your MariaDB database before importing data + +CREATE DATABASE IF NOT EXISTS rusty_petroleum; +USE rusty_petroleum; + +CREATE TABLE IF NOT EXISTS customers ( + id INT UNSIGNED AUTO_INCREMENT PRIMARY KEY, + customer_number VARCHAR(50) NOT NULL UNIQUE, + card_report_group TINYINT UNSIGNED NOT NULL DEFAULT 0, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + INDEX idx_customer_number (customer_number) +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci; + +CREATE TABLE IF NOT EXISTS cards ( + id INT UNSIGNED AUTO_INCREMENT PRIMARY KEY, + card_number VARCHAR(50) NOT NULL UNIQUE, + card_type VARCHAR(50), + customer_id INT UNSIGNED NULL, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + INDEX idx_card_number (card_number), + INDEX idx_customer_id (customer_id), + FOREIGN KEY (customer_id) REFERENCES customers(id) ON DELETE SET NULL +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci; + +CREATE TABLE IF NOT EXISTS transactions ( + id BIGINT UNSIGNED AUTO_INCREMENT PRIMARY KEY, + transaction_date DATETIME NOT NULL, + batch_number VARCHAR(20) NOT NULL, + amount DECIMAL(10,2) NOT NULL, + volume DECIMAL(10,3) NOT NULL, + price DECIMAL(8,4) NOT NULL, + quality_code INT NOT NULL, + quality_name VARCHAR(50) NOT NULL, + station VARCHAR(20) NOT NULL, + terminal VARCHAR(10) NOT NULL, + pump VARCHAR(10) NOT NULL, + receipt VARCHAR(20) NOT NULL, + control_number VARCHAR(20), + card_id INT UNSIGNED NOT NULL, + customer_id INT UNSIGNED NULL, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + INDEX idx_transaction_date (transaction_date), + INDEX idx_batch_number (batch_number), + INDEX idx_card_id (card_id), + INDEX idx_customer_id (customer_id), + INDEX idx_station (station), + FOREIGN KEY (card_id) REFERENCES cards(id), + FOREIGN KEY (customer_id) REFERENCES customers(id) ON DELETE SET NULL +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci; diff --git a/src/commands/import.rs b/src/commands/import.rs new file mode 100644 index 0000000..a24d11c --- /dev/null +++ b/src/commands/import.rs @@ -0,0 +1,172 @@ +use crate::db::models::{NewCard, NewCustomer, NewTransaction}; +use crate::db::Repository; +use chrono::NaiveDateTime; +use csv::ReaderBuilder; +use std::collections::HashMap; +use std::fs::File; +use std::path::Path; + +pub async fn run_import(csv_path: &Path, repo: &Repository) -> anyhow::Result<()> { + println!("Läser CSV-fil: {:?}", csv_path); + + let file = File::open(csv_path)?; + let mut rdr = ReaderBuilder::new() + .delimiter(b'\t') + .has_headers(true) + .flexible(true) + .from_reader(file); + + let mut transactions = Vec::new(); + let mut seen_customers: HashMap = HashMap::new(); + let mut seen_cards: HashMap, Option)> = HashMap::new(); + + for result in rdr.records() { + let record = result?; + if let Some(tx) = parse_record(&record)? { + let card_report_group: u8 = tx.card_report_group_number.parse().unwrap_or(0); + if !seen_customers.contains_key(&tx.customer_number) && !tx.customer_number.is_empty() { + seen_customers.insert(tx.customer_number.clone(), card_report_group); + } + if !seen_cards.contains_key(&tx.card_number) { + seen_cards.insert(tx.card_number.clone(), (Some(tx.card_type.clone()), None)); + } + transactions.push(tx); + } + } + + println!("Hittade {} transaktioner", transactions.len()); + println!("Unika kunder: {}", seen_customers.len()); + println!("Unika kort: {}", seen_cards.len()); + + println!("\nImporterar kunder..."); + let mut customer_ids: HashMap = HashMap::new(); + for (customer_number, card_report_group) in &seen_customers { + let new_customer = NewCustomer { + customer_number: customer_number.clone(), + card_report_group: *card_report_group, + }; + let id = repo.upsert_customer(&new_customer).await?; + customer_ids.insert(customer_number.clone(), id); + println!(" Kund {} -> id {}", customer_number, id); + } + + println!("\nImporterar kort..."); + let mut card_ids: HashMap = HashMap::new(); + for (card_number, (card_type, _)) in &mut seen_cards { + let customer_id = customer_ids.get(card_number).copied(); + let new_card = NewCard { + card_number: card_number.clone(), + card_type: card_type.clone(), + customer_id, + }; + let id = repo.upsert_card(&new_card).await?; + card_ids.insert(card_number.clone(), id); + *card_type = None; + } + + println!("\nImporterar transaktioner..."); + let batch_size = 500; + let mut total_inserted = 0u64; + let mut batch: Vec = Vec::with_capacity(batch_size); + + for tx in transactions { + let card_id = *card_ids.get(&tx.card_number).unwrap_or(&0); + let customer_id = customer_ids.get(&tx.customer_number).copied(); + + let new_tx = NewTransaction { + transaction_date: tx.date, + batch_number: tx.batch_number, + amount: tx.amount, + volume: tx.volume, + price: tx.price, + quality_code: tx.quality, + quality_name: tx.quality_name, + station: tx.station, + terminal: tx.terminal, + pump: tx.pump, + receipt: tx.receipt, + control_number: if tx.control_number.is_empty() { None } else { Some(tx.control_number) }, + card_id, + customer_id, + }; + + batch.push(new_tx); + + if batch.len() >= batch_size { + let inserted = repo.insert_transactions_batch(&batch).await?; + total_inserted += inserted; + println!(" Inlagda {} transaktioner (totalt: {})", inserted, total_inserted); + batch.clear(); + } + } + + if !batch.is_empty() { + let inserted = repo.insert_transactions_batch(&batch).await?; + total_inserted += inserted; + println!(" Inlagda {} transaktioner (totalt: {})", inserted, total_inserted); + } + + println!("\nKlart!Importerade {} transaktioner", total_inserted); + + Ok(()) +} + +struct CsvTransaction { + date: NaiveDateTime, + batch_number: String, + amount: f64, + volume: f64, + price: f64, + quality: i32, + quality_name: String, + card_number: String, + card_type: String, + customer_number: String, + station: String, + terminal: String, + pump: String, + receipt: String, + card_report_group_number: String, + control_number: String, +} + +fn get_field(record: &csv::StringRecord, index: usize) -> &str { + record.get(index).unwrap_or("") +} + +fn parse_record(record: &csv::StringRecord) -> anyhow::Result> { + let date_str = get_field(record, 0); + let date = NaiveDateTime::parse_from_str(date_str, "%Y-%m-%d %H:%M:%S") + .or_else(|_| NaiveDateTime::parse_from_str(date_str, "%m/%d/%Y %I:%M:%S %p")) + .map_err(|e| anyhow::anyhow!("Failed to parse date '{}': {}", date_str, e))?; + + let amount: f64 = get_field(record, 2).parse().unwrap_or(0.0); + + if amount <= 0.0 { + return Ok(None); + } + + let customer_number = get_field(record, 9).to_string(); + if customer_number.is_empty() { + return Ok(None); + } + + Ok(Some(CsvTransaction { + date, + batch_number: get_field(record, 1).to_string(), + amount, + volume: get_field(record, 3).parse().unwrap_or(0.0), + price: get_field(record, 4).parse().unwrap_or(0.0), + quality: get_field(record, 5).parse().unwrap_or(0), + quality_name: get_field(record, 6).to_string(), + card_number: get_field(record, 7).to_string(), + card_type: get_field(record, 8).to_string(), + customer_number, + station: get_field(record, 10).to_string(), + terminal: get_field(record, 11).to_string(), + pump: get_field(record, 12).to_string(), + receipt: get_field(record, 13).to_string(), + card_report_group_number: get_field(record, 14).to_string(), + control_number: get_field(record, 15).to_string(), + })) +} diff --git a/src/commands/mod.rs b/src/commands/mod.rs new file mode 100644 index 0000000..657d32b --- /dev/null +++ b/src/commands/mod.rs @@ -0,0 +1,3 @@ +pub mod import; + +pub use import::run_import; diff --git a/src/config.rs b/src/config.rs new file mode 100644 index 0000000..752bbfd --- /dev/null +++ b/src/config.rs @@ -0,0 +1,69 @@ +use std::fs; +use std::path::Path; + +#[derive(Debug, Clone)] +pub struct Config { + pub database: DatabaseConfig, +} + +#[derive(Debug, Clone)] +pub struct DatabaseConfig { + pub host: String, + pub port: u16, + pub user: String, + pub password: String, + pub name: String, +} + +impl DatabaseConfig { + pub fn connection_url(&self) -> String { + format!( + "mysql://{}:{}@{}:{}/{}", + self.user, self.password, self.host, self.port, self.name + ) + } +} + +impl Config { + pub fn load() -> anyhow::Result { + Self::load_from_path(Path::new("config.toml")) + } + + pub fn load_from_path(path: &Path) -> anyhow::Result { + let contents = fs::read_to_string(path) + .map_err(|e| anyhow::anyhow!("Failed to read config file: {}", e))?; + + let config: TomlConfig = toml::from_str(&contents) + .map_err(|e| anyhow::anyhow!("Failed to parse config file: {}", e))?; + + Ok(config.into()) + } +} + +#[derive(serde::Deserialize)] +struct TomlConfig { + database: TomlDatabaseConfig, +} + +#[derive(serde::Deserialize)] +struct TomlDatabaseConfig { + host: String, + port: u16, + user: String, + password: String, + name: String, +} + +impl From for Config { + fn from(toml: TomlConfig) -> Self { + Config { + database: DatabaseConfig { + host: toml.database.host, + port: toml.database.port, + user: toml.database.user, + password: toml.database.password, + name: toml.database.name, + }, + } + } +} diff --git a/src/db/connection.rs b/src/db/connection.rs new file mode 100644 index 0000000..0a7fa0a --- /dev/null +++ b/src/db/connection.rs @@ -0,0 +1,11 @@ +use sqlx::mysql::MySqlPoolOptions; +use sqlx::MySqlPool; + +pub async fn create_pool(database_url: &str) -> anyhow::Result { + let pool = MySqlPoolOptions::new() + .max_connections(10) + .connect(database_url) + .await?; + + Ok(pool) +} diff --git a/src/db/mod.rs b/src/db/mod.rs new file mode 100644 index 0000000..16b1e09 --- /dev/null +++ b/src/db/mod.rs @@ -0,0 +1,7 @@ +pub mod connection; +pub mod models; +pub mod repository; + +pub use connection::create_pool; +pub use models::{Card, Customer, NewCard, NewCustomer, Transaction}; +pub use repository::Repository; diff --git a/src/db/models.rs b/src/db/models.rs new file mode 100644 index 0000000..fe6579b --- /dev/null +++ b/src/db/models.rs @@ -0,0 +1,74 @@ +use bigdecimal::BigDecimal; +use chrono::{DateTime, NaiveDateTime, Utc}; +use serde::{Deserialize, Serialize}; +use sqlx::FromRow; + +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct Customer { + pub id: u32, + pub customer_number: String, + pub card_report_group: u8, + pub created_at: DateTime, + pub updated_at: DateTime, +} + +#[derive(Debug, Clone)] +pub struct NewCustomer { + pub customer_number: String, + pub card_report_group: u8, +} + +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct Card { + pub id: u32, + pub card_number: String, + pub card_type: Option, + pub customer_id: Option, + pub created_at: DateTime, + pub updated_at: DateTime, +} + +#[derive(Debug, Clone)] +pub struct NewCard { + pub card_number: String, + pub card_type: Option, + pub customer_id: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct Transaction { + pub id: u64, + pub transaction_date: NaiveDateTime, + pub batch_number: String, + pub amount: BigDecimal, + pub volume: BigDecimal, + pub price: BigDecimal, + pub quality_code: i32, + pub quality_name: String, + pub station: String, + pub terminal: String, + pub pump: String, + pub receipt: String, + pub control_number: Option, + pub card_id: u32, + pub customer_id: Option, + pub created_at: DateTime, +} + +#[derive(Debug, Clone)] +pub struct NewTransaction { + pub transaction_date: NaiveDateTime, + pub batch_number: String, + pub amount: f64, + pub volume: f64, + pub price: f64, + pub quality_code: i32, + pub quality_name: String, + pub station: String, + pub terminal: String, + pub pump: String, + pub receipt: String, + pub control_number: Option, + pub card_id: u32, + pub customer_id: Option, +} diff --git a/src/db/repository.rs b/src/db/repository.rs new file mode 100644 index 0000000..40a5979 --- /dev/null +++ b/src/db/repository.rs @@ -0,0 +1,222 @@ +use crate::db::models::{Card, Customer, NewCard, NewCustomer, NewTransaction, Transaction}; +use bigdecimal::BigDecimal; +use sqlx::MySqlPool; + +pub struct Repository { + pool: MySqlPool, +} + +impl Repository { + pub fn new(pool: MySqlPool) -> Self { + Self { pool } + } + + pub async fn upsert_customer(&self, customer: &NewCustomer) -> anyhow::Result { + sqlx::query( + r#" + INSERT INTO customers (customer_number, card_report_group) + VALUES (?, ?) + ON DUPLICATE KEY UPDATE + card_report_group = VALUES(card_report_group), + updated_at = CURRENT_TIMESTAMP + "#, + ) + .bind(&customer.customer_number) + .bind(customer.card_report_group) + .execute(&self.pool) + .await?; + + let row: (u32,) = sqlx::query_as( + "SELECT id FROM customers WHERE customer_number = ?", + ) + .bind(&customer.customer_number) + .fetch_one(&self.pool) + .await?; + + Ok(row.0) + } + + pub async fn find_customer_by_number( + &self, + customer_number: &str, + ) -> anyhow::Result> { + let result = sqlx::query_as( + "SELECT id, customer_number, card_report_group, created_at, updated_at + FROM customers + WHERE customer_number = ?", + ) + .bind(customer_number) + .fetch_optional(&self.pool) + .await?; + + Ok(result) + } + + pub async fn upsert_card(&self, card: &NewCard) -> anyhow::Result { + sqlx::query( + r#" + INSERT INTO cards (card_number, card_type, customer_id) + VALUES (?, ?, ?) + ON DUPLICATE KEY UPDATE + card_type = COALESCE(VALUES(card_type), card_type), + customer_id = COALESCE(VALUES(customer_id), customer_id), + updated_at = CURRENT_TIMESTAMP + "#, + ) + .bind(&card.card_number) + .bind(&card.card_type) + .bind(card.customer_id) + .execute(&self.pool) + .await?; + + let row: (u32,) = sqlx::query_as( + "SELECT id FROM cards WHERE card_number = ?", + ) + .bind(&card.card_number) + .fetch_one(&self.pool) + .await?; + + Ok(row.0) + } + + pub async fn find_card_by_number(&self, card_number: &str) -> anyhow::Result> { + let result = sqlx::query_as( + "SELECT id, card_number, card_type, customer_id, created_at, updated_at + FROM cards + WHERE card_number = ?", + ) + .bind(card_number) + .fetch_optional(&self.pool) + .await?; + + Ok(result) + } + + pub async fn insert_transactions_batch( + &self, + transactions: &[NewTransaction], + ) -> anyhow::Result { + if transactions.is_empty() { + return Ok(0); + } + + let mut query = String::from( + "INSERT INTO transactions (transaction_date, batch_number, amount, volume, price, quality_code, quality_name, station, terminal, pump, receipt, control_number, card_id, customer_id) VALUES ", + ); + + let mut values = Vec::new(); + for tx in transactions { + values.push(format!( + "('{}', '{}', {}, {}, {}, {}, '{}', '{}', '{}', '{}', '{}', {}, {}, {})", + tx.transaction_date.format("%Y-%m-%d %H:%M:%S"), + tx.batch_number, + tx.amount, + tx.volume, + tx.price, + tx.quality_code, + tx.quality_name.replace("'", "''"), + tx.station, + tx.terminal, + tx.pump, + tx.receipt, + tx.control_number.as_ref().map(|s| format!("'{}'", s.replace("'", "''"))).unwrap_or_else(|| "NULL".to_string()), + tx.card_id, + tx.customer_id.map(|id| id.to_string()).unwrap_or_else(|| "NULL".to_string()), + )); + } + + query.push_str(&values.join(", ")); + + let result = sqlx::query(&query).execute(&self.pool).await?; + + Ok(result.rows_affected()) + } + + pub async fn get_customer_invoice( + &self, + customer_number: &str, + start_date: &str, + end_date: &str, + ) -> anyhow::Result> { + let result = sqlx::query_as( + r#" + SELECT t.id, t.transaction_date, t.batch_number, t.amount, t.volume, t.price, + t.quality_code, t.quality_name, t.station, t.terminal, t.pump, + t.receipt, t.control_number, t.card_id, t.customer_id, t.created_at + FROM transactions t + JOIN customers c ON t.customer_id = c.id + WHERE c.customer_number = ? + AND t.transaction_date >= ? + AND t.transaction_date <= ? + ORDER BY t.transaction_date + "#, + ) + .bind(customer_number) + .bind(start_date) + .bind(end_date) + .fetch_all(&self.pool) + .await?; + + Ok(result) + } + + pub async fn get_sales_summary_by_product( + &self, + start_date: &str, + end_date: &str, + ) -> anyhow::Result> { + let result = sqlx::query_as( + r#" + SELECT quality_name, COUNT(*) as tx_count, SUM(amount) as total_amount, SUM(volume) as total_volume + FROM transactions + WHERE transaction_date >= ? AND transaction_date <= ? + GROUP BY quality_name + "#, + ) + .bind(start_date) + .bind(end_date) + .fetch_all(&self.pool) + .await?; + + Ok(result) + } + + pub async fn get_sales_summary_by_customer( + &self, + start_date: &str, + end_date: &str, + ) -> anyhow::Result> { + let result = sqlx::query_as( + r#" + SELECT c.customer_number, COUNT(*) as tx_count, SUM(t.amount) as total_amount, SUM(t.volume) as total_volume + FROM transactions t + JOIN customers c ON t.customer_id = c.id + WHERE t.transaction_date >= ? AND t.transaction_date <= ? + GROUP BY c.customer_number + ORDER BY total_amount DESC + "#, + ) + .bind(start_date) + .bind(end_date) + .fetch_all(&self.pool) + .await?; + + Ok(result) + } +} + +#[derive(Debug, sqlx::FromRow)] +pub struct ProductSummary { + pub quality_name: String, + pub tx_count: i64, + pub total_amount: BigDecimal, + pub total_volume: BigDecimal, +} + +#[derive(Debug, sqlx::FromRow)] +pub struct CustomerSummary { + pub customer_number: String, + pub tx_count: i64, + pub total_amount: BigDecimal, + pub total_volume: BigDecimal, +} diff --git a/src/invoice_generator.rs b/src/invoice_generator.rs index c782e9c..fd33e74 100644 --- a/src/invoice_generator.rs +++ b/src/invoice_generator.rs @@ -68,7 +68,7 @@ impl Transaction { } } -pub fn read_csv_file(path: &Path) -> Result> { +pub fn read_csv_file(path: &Path) -> anyhow::Result { let filename = path .file_name() .and_then(|n| n.to_str()) diff --git a/src/main.rs b/src/main.rs index 93290b5..bdeaafa 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,14 +1,18 @@ +mod commands; +mod config; +mod db; +mod invoice_generator; + use askama::Template; use chrono::{NaiveDateTime, Utc}; +use config::Config; use csv::ReaderBuilder; +use db::{create_pool, Repository}; +use invoice_generator::{group_by_customer, read_csv_file, Customer}; use std::collections::HashMap; use std::env; use std::fs; -use std::path::Path; - -mod invoice_generator; - -use invoice_generator::{group_by_customer, read_csv_file, Customer}; +use std::path::{Path, PathBuf}; fn fmt(v: f64) -> String { format!("{:.2}", v) @@ -17,7 +21,7 @@ fn fmt(v: f64) -> String { fn clean_csv_file( input_path: &Path, output_path: &Path, -) -> Result> { +) -> anyhow::Result { let file = fs::File::open(input_path)?; let mut rdr = ReaderBuilder::new() .delimiter(b'\t') @@ -229,99 +233,142 @@ struct CustomerTemplate { generated_date: String, } -fn main() -> Result<(), Box> { +#[tokio::main] +async fn main() -> anyhow::Result<()> { let args: Vec = env::args().collect(); - if args.len() != 3 { - eprintln!("Användning: {} ", args[0]); + if args.len() < 2 { + print_usage(&args[0]); std::process::exit(1); } - let input_path = Path::new(&args[1]); - let base_output_dir = Path::new(&args[2]); + match args[1].as_str() { + "import" => { + if args.len() != 3 { + eprintln!("Användning: {} import ", args[0]); + std::process::exit(1); + } + let csv_path = PathBuf::from(&args[2]); + if !csv_path.exists() { + eprintln!("Fel: Filen hittades inte: {:?}", csv_path); + std::process::exit(1); + } - if !input_path.exists() { - eprintln!("Fel: Filen hittades inte: {:?}", input_path); - std::process::exit(1); - } + let config = Config::load()?; + let pool = create_pool(&config.database.connection_url()).await?; + let repo = Repository::new(pool); - let filename = input_path - .file_name() - .and_then(|n| n.to_str()) - .unwrap_or("unknown") - .to_string(); - - println!("Konverterar {} till rensat format...", filename); - - let temp_cleaned_path = - base_output_dir.join(format!("{}.temp.csv", filename.trim_end_matches(".txt"))); - let batch_number = clean_csv_file(input_path, &temp_cleaned_path)?; - - let output_dir = base_output_dir.join(&batch_number); - fs::create_dir_all(&output_dir)?; - - fs::copy(input_path, output_dir.join(format!("{}.txt", batch_number)))?; - fs::rename( - &temp_cleaned_path, - output_dir.join(format!("{}.csv", batch_number)), - )?; - - println!( - "Konverterade {} transaktioner", - fs::read_to_string(output_dir.join(format!("{}.csv", batch_number)))? - .lines() - .count() - - 1 - ); - - let batch = read_csv_file(&output_dir.join(format!("{}.csv", batch_number)))?; - println!("Laddade {} transaktioner", batch.transactions.len()); - - let first_date = batch.transactions.first().map(|t| t.date).unwrap(); - let last_date = batch.transactions.last().map(|t| t.date).unwrap(); - let period = format!( - "{} - {}", - first_date.format("%Y-%m-%d"), - last_date.format("%Y-%m-%d") - ); - - let customers = group_by_customer(&[batch]); - - let index_customers: Vec<(String, usize)> = customers - .iter() - .map(|(num, c)| (num.clone(), c.cards.len())) - .collect(); - - let html = IndexTemplate { - customers: index_customers.clone(), - period: period.clone(), - } - .render() - .unwrap(); - fs::write(output_dir.join("index.html"), html)?; - - let generated_date = Utc::now().format("%Y-%m-%d %H:%M").to_string(); - - let customer_count = customers.len(); - for (customer_num, customer) in customers { - let prepared = PreparedCustomer::from_customer(customer); - let customer_html = CustomerTemplate { - customer: prepared, - batch_number: batch_number.clone(), - period: period.clone(), - generated_date: generated_date.clone(), + commands::run_import(&csv_path, &repo).await?; } - .render() - .unwrap(); - let filename = format!("customer_{}.html", customer_num); - fs::write(output_dir.join(&filename), customer_html)?; - println!("Genererade {}", filename); - } + "generate" => { + if args.len() != 3 { + eprintln!("Användning: {} generate ", args[0]); + std::process::exit(1); + } + let input_path = Path::new(&args[2]); + let base_output_dir = Path::new(&args[3]); - println!( - "\nGenererade {} kundfakturor i {:?}", - customer_count, output_dir - ); + if !input_path.exists() { + eprintln!("Fel: Filen hittades inte: {:?}", input_path); + std::process::exit(1); + } + + let filename = input_path + .file_name() + .and_then(|n| n.to_str()) + .unwrap_or("unknown") + .to_string(); + + println!("Konverterar {} till rensat format...", filename); + + let temp_cleaned_path = + base_output_dir.join(format!("{}.temp.csv", filename.trim_end_matches(".txt"))); + let batch_number = clean_csv_file(input_path, &temp_cleaned_path)?; + + let output_dir = base_output_dir.join(&batch_number); + fs::create_dir_all(&output_dir)?; + + fs::copy(input_path, output_dir.join(format!("{}.txt", batch_number)))?; + fs::rename( + &temp_cleaned_path, + output_dir.join(format!("{}.csv", batch_number)), + )?; + + println!( + "Konverterade {} transaktioner", + fs::read_to_string(output_dir.join(format!("{}.csv", batch_number)))? + .lines() + .count() + - 1 + ); + + let batch = read_csv_file(&output_dir.join(format!("{}.csv", batch_number)))?; + println!("Laddade {} transaktioner", batch.transactions.len()); + + let first_date = batch.transactions.first().map(|t| t.date).unwrap(); + let last_date = batch.transactions.last().map(|t| t.date).unwrap(); + let period = format!( + "{} - {}", + first_date.format("%Y-%m-%d"), + last_date.format("%Y-%m-%d") + ); + + let customers = group_by_customer(&[batch]); + + let index_customers: Vec<(String, usize)> = customers + .iter() + .map(|(num, c)| (num.clone(), c.cards.len())) + .collect(); + + let html = IndexTemplate { + customers: index_customers.clone(), + period: period.clone(), + } + .render() + .unwrap(); + fs::write(output_dir.join("index.html"), html)?; + + let generated_date = Utc::now().format("%Y-%m-%d %H:%M").to_string(); + + let customer_count = customers.len(); + for (customer_num, customer) in customers { + let prepared = PreparedCustomer::from_customer(customer); + let customer_html = CustomerTemplate { + customer: prepared, + batch_number: batch_number.clone(), + period: period.clone(), + generated_date: generated_date.clone(), + } + .render() + .unwrap(); + let filename = format!("customer_{}.html", customer_num); + fs::write(output_dir.join(&filename), customer_html)?; + println!("Genererade {}", filename); + } + + println!( + "\nGenererade {} kundfakturor i {:?}", + customer_count, output_dir + ); + } + "help" | "--help" | "-h" => { + print_usage(&args[0]); + } + _ => { + eprintln!("Okänt kommando: {}", args[1]); + print_usage(&args[0]); + std::process::exit(1); + } + } Ok(()) } + +fn print_usage(program: &str) { + eprintln!("Användning: {} [argument]", program); + eprintln!(); + eprintln!("Kommandon:"); + eprintln!(" import Importera CSV-data till databasen"); + eprintln!(" generate Generera HTML-fakturor från CSV"); + eprintln!(" help Visa denna hjälptext"); +}