Add MariaDB database support for storing transaction data

Introduces a new database layer to persist CSV transaction data in MariaDB,
enabling both invoicing and sales reporting queries. This replaces the
previous file-to-file-only processing.

Changes:
- Add sqlx, tokio, toml, anyhow, bigdecimal dependencies to Cargo.toml
- Create config module for TOML-based configuration (database credentials)
- Create db module with connection pool, models, and repository
- Create commands module with 'import' subcommand for CSV ingestion
- Refactor main.rs to use subcommand architecture (import/generate)
- Add migration SQL file for manual database schema creation

Schema (3 tables):
- customers: customer_number, card_report_group (1=fleet, 3/4=retail)
- cards: card_number, card_type, customer_id (nullable for anonymous)
- transactions: full transaction data with FK to cards/customers

Usage:
  cargo run -- import <csv-file>   # Import to database
  cargo run -- generate <csv> <dir>  # Generate HTML invoices (unchanged)

Configuration:
  cp config.example.toml config.toml  # Edit with database credentials
  mysql < migrations/001_initial_schema.sql  # Create database first
This commit is contained in:
2026-04-02 06:33:38 +02:00
parent 39b62014b0
commit 9daa186ff6
13 changed files with 762 additions and 93 deletions

1
.gitignore vendored
View File

@@ -5,3 +5,4 @@ output/
*.swp *.swp
*.lock *.lock
/input /input
config.toml

View File

@@ -5,6 +5,11 @@ edition = "2021"
[dependencies] [dependencies]
askama = "0.15.5" askama = "0.15.5"
chrono = "0.4.44" chrono = { version = "0.4.44", features = ["serde"] }
csv = "1.4.0" csv = "1.4.0"
serde = "1.0.228" serde = { version = "1.0.228", features = ["derive"] }
sqlx = { version = "0.8", features = ["runtime-tokio", "mysql", "chrono", "bigdecimal"] }
tokio = { version = "1", features = ["full"] }
toml = "0.8"
anyhow = "1"
bigdecimal = { version = "0.4", features = ["serde"] }

6
config.example.toml Normal file
View File

@@ -0,0 +1,6 @@
[database]
host = "localhost"
port = 3306
user = "root"
password = ""
name = "rusty_petroleum"

View File

@@ -0,0 +1,52 @@
-- Initial schema for rusty-petroleum
-- Run this against your MariaDB database before importing data
CREATE DATABASE IF NOT EXISTS rusty_petroleum;
USE rusty_petroleum;
CREATE TABLE IF NOT EXISTS customers (
id INT UNSIGNED AUTO_INCREMENT PRIMARY KEY,
customer_number VARCHAR(50) NOT NULL UNIQUE,
card_report_group TINYINT UNSIGNED NOT NULL DEFAULT 0,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
INDEX idx_customer_number (customer_number)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
CREATE TABLE IF NOT EXISTS cards (
id INT UNSIGNED AUTO_INCREMENT PRIMARY KEY,
card_number VARCHAR(50) NOT NULL UNIQUE,
card_type VARCHAR(50),
customer_id INT UNSIGNED NULL,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
INDEX idx_card_number (card_number),
INDEX idx_customer_id (customer_id),
FOREIGN KEY (customer_id) REFERENCES customers(id) ON DELETE SET NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
CREATE TABLE IF NOT EXISTS transactions (
id BIGINT UNSIGNED AUTO_INCREMENT PRIMARY KEY,
transaction_date DATETIME NOT NULL,
batch_number VARCHAR(20) NOT NULL,
amount DECIMAL(10,2) NOT NULL,
volume DECIMAL(10,3) NOT NULL,
price DECIMAL(8,4) NOT NULL,
quality_code INT NOT NULL,
quality_name VARCHAR(50) NOT NULL,
station VARCHAR(20) NOT NULL,
terminal VARCHAR(10) NOT NULL,
pump VARCHAR(10) NOT NULL,
receipt VARCHAR(20) NOT NULL,
control_number VARCHAR(20),
card_id INT UNSIGNED NOT NULL,
customer_id INT UNSIGNED NULL,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
INDEX idx_transaction_date (transaction_date),
INDEX idx_batch_number (batch_number),
INDEX idx_card_id (card_id),
INDEX idx_customer_id (customer_id),
INDEX idx_station (station),
FOREIGN KEY (card_id) REFERENCES cards(id),
FOREIGN KEY (customer_id) REFERENCES customers(id) ON DELETE SET NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;

172
src/commands/import.rs Normal file
View File

@@ -0,0 +1,172 @@
use crate::db::models::{NewCard, NewCustomer, NewTransaction};
use crate::db::Repository;
use chrono::NaiveDateTime;
use csv::ReaderBuilder;
use std::collections::HashMap;
use std::fs::File;
use std::path::Path;
pub async fn run_import(csv_path: &Path, repo: &Repository) -> anyhow::Result<()> {
println!("Läser CSV-fil: {:?}", csv_path);
let file = File::open(csv_path)?;
let mut rdr = ReaderBuilder::new()
.delimiter(b'\t')
.has_headers(true)
.flexible(true)
.from_reader(file);
let mut transactions = Vec::new();
let mut seen_customers: HashMap<String, u8> = HashMap::new();
let mut seen_cards: HashMap<String, (Option<String>, Option<u32>)> = HashMap::new();
for result in rdr.records() {
let record = result?;
if let Some(tx) = parse_record(&record)? {
let card_report_group: u8 = tx.card_report_group_number.parse().unwrap_or(0);
if !seen_customers.contains_key(&tx.customer_number) && !tx.customer_number.is_empty() {
seen_customers.insert(tx.customer_number.clone(), card_report_group);
}
if !seen_cards.contains_key(&tx.card_number) {
seen_cards.insert(tx.card_number.clone(), (Some(tx.card_type.clone()), None));
}
transactions.push(tx);
}
}
println!("Hittade {} transaktioner", transactions.len());
println!("Unika kunder: {}", seen_customers.len());
println!("Unika kort: {}", seen_cards.len());
println!("\nImporterar kunder...");
let mut customer_ids: HashMap<String, u32> = HashMap::new();
for (customer_number, card_report_group) in &seen_customers {
let new_customer = NewCustomer {
customer_number: customer_number.clone(),
card_report_group: *card_report_group,
};
let id = repo.upsert_customer(&new_customer).await?;
customer_ids.insert(customer_number.clone(), id);
println!(" Kund {} -> id {}", customer_number, id);
}
println!("\nImporterar kort...");
let mut card_ids: HashMap<String, u32> = HashMap::new();
for (card_number, (card_type, _)) in &mut seen_cards {
let customer_id = customer_ids.get(card_number).copied();
let new_card = NewCard {
card_number: card_number.clone(),
card_type: card_type.clone(),
customer_id,
};
let id = repo.upsert_card(&new_card).await?;
card_ids.insert(card_number.clone(), id);
*card_type = None;
}
println!("\nImporterar transaktioner...");
let batch_size = 500;
let mut total_inserted = 0u64;
let mut batch: Vec<NewTransaction> = Vec::with_capacity(batch_size);
for tx in transactions {
let card_id = *card_ids.get(&tx.card_number).unwrap_or(&0);
let customer_id = customer_ids.get(&tx.customer_number).copied();
let new_tx = NewTransaction {
transaction_date: tx.date,
batch_number: tx.batch_number,
amount: tx.amount,
volume: tx.volume,
price: tx.price,
quality_code: tx.quality,
quality_name: tx.quality_name,
station: tx.station,
terminal: tx.terminal,
pump: tx.pump,
receipt: tx.receipt,
control_number: if tx.control_number.is_empty() { None } else { Some(tx.control_number) },
card_id,
customer_id,
};
batch.push(new_tx);
if batch.len() >= batch_size {
let inserted = repo.insert_transactions_batch(&batch).await?;
total_inserted += inserted;
println!(" Inlagda {} transaktioner (totalt: {})", inserted, total_inserted);
batch.clear();
}
}
if !batch.is_empty() {
let inserted = repo.insert_transactions_batch(&batch).await?;
total_inserted += inserted;
println!(" Inlagda {} transaktioner (totalt: {})", inserted, total_inserted);
}
println!("\nKlart!Importerade {} transaktioner", total_inserted);
Ok(())
}
struct CsvTransaction {
date: NaiveDateTime,
batch_number: String,
amount: f64,
volume: f64,
price: f64,
quality: i32,
quality_name: String,
card_number: String,
card_type: String,
customer_number: String,
station: String,
terminal: String,
pump: String,
receipt: String,
card_report_group_number: String,
control_number: String,
}
fn get_field(record: &csv::StringRecord, index: usize) -> &str {
record.get(index).unwrap_or("")
}
fn parse_record(record: &csv::StringRecord) -> anyhow::Result<Option<CsvTransaction>> {
let date_str = get_field(record, 0);
let date = NaiveDateTime::parse_from_str(date_str, "%Y-%m-%d %H:%M:%S")
.or_else(|_| NaiveDateTime::parse_from_str(date_str, "%m/%d/%Y %I:%M:%S %p"))
.map_err(|e| anyhow::anyhow!("Failed to parse date '{}': {}", date_str, e))?;
let amount: f64 = get_field(record, 2).parse().unwrap_or(0.0);
if amount <= 0.0 {
return Ok(None);
}
let customer_number = get_field(record, 9).to_string();
if customer_number.is_empty() {
return Ok(None);
}
Ok(Some(CsvTransaction {
date,
batch_number: get_field(record, 1).to_string(),
amount,
volume: get_field(record, 3).parse().unwrap_or(0.0),
price: get_field(record, 4).parse().unwrap_or(0.0),
quality: get_field(record, 5).parse().unwrap_or(0),
quality_name: get_field(record, 6).to_string(),
card_number: get_field(record, 7).to_string(),
card_type: get_field(record, 8).to_string(),
customer_number,
station: get_field(record, 10).to_string(),
terminal: get_field(record, 11).to_string(),
pump: get_field(record, 12).to_string(),
receipt: get_field(record, 13).to_string(),
card_report_group_number: get_field(record, 14).to_string(),
control_number: get_field(record, 15).to_string(),
}))
}

3
src/commands/mod.rs Normal file
View File

@@ -0,0 +1,3 @@
pub mod import;
pub use import::run_import;

69
src/config.rs Normal file
View File

@@ -0,0 +1,69 @@
use std::fs;
use std::path::Path;
#[derive(Debug, Clone)]
pub struct Config {
pub database: DatabaseConfig,
}
#[derive(Debug, Clone)]
pub struct DatabaseConfig {
pub host: String,
pub port: u16,
pub user: String,
pub password: String,
pub name: String,
}
impl DatabaseConfig {
pub fn connection_url(&self) -> String {
format!(
"mysql://{}:{}@{}:{}/{}",
self.user, self.password, self.host, self.port, self.name
)
}
}
impl Config {
pub fn load() -> anyhow::Result<Self> {
Self::load_from_path(Path::new("config.toml"))
}
pub fn load_from_path(path: &Path) -> anyhow::Result<Self> {
let contents = fs::read_to_string(path)
.map_err(|e| anyhow::anyhow!("Failed to read config file: {}", e))?;
let config: TomlConfig = toml::from_str(&contents)
.map_err(|e| anyhow::anyhow!("Failed to parse config file: {}", e))?;
Ok(config.into())
}
}
#[derive(serde::Deserialize)]
struct TomlConfig {
database: TomlDatabaseConfig,
}
#[derive(serde::Deserialize)]
struct TomlDatabaseConfig {
host: String,
port: u16,
user: String,
password: String,
name: String,
}
impl From<TomlConfig> for Config {
fn from(toml: TomlConfig) -> Self {
Config {
database: DatabaseConfig {
host: toml.database.host,
port: toml.database.port,
user: toml.database.user,
password: toml.database.password,
name: toml.database.name,
},
}
}
}

11
src/db/connection.rs Normal file
View File

@@ -0,0 +1,11 @@
use sqlx::mysql::MySqlPoolOptions;
use sqlx::MySqlPool;
pub async fn create_pool(database_url: &str) -> anyhow::Result<MySqlPool> {
let pool = MySqlPoolOptions::new()
.max_connections(10)
.connect(database_url)
.await?;
Ok(pool)
}

7
src/db/mod.rs Normal file
View File

@@ -0,0 +1,7 @@
pub mod connection;
pub mod models;
pub mod repository;
pub use connection::create_pool;
pub use models::{Card, Customer, NewCard, NewCustomer, Transaction};
pub use repository::Repository;

74
src/db/models.rs Normal file
View File

@@ -0,0 +1,74 @@
use bigdecimal::BigDecimal;
use chrono::{DateTime, NaiveDateTime, Utc};
use serde::{Deserialize, Serialize};
use sqlx::FromRow;
#[derive(Debug, Clone, Serialize, Deserialize, FromRow)]
pub struct Customer {
pub id: u32,
pub customer_number: String,
pub card_report_group: u8,
pub created_at: DateTime<Utc>,
pub updated_at: DateTime<Utc>,
}
#[derive(Debug, Clone)]
pub struct NewCustomer {
pub customer_number: String,
pub card_report_group: u8,
}
#[derive(Debug, Clone, Serialize, Deserialize, FromRow)]
pub struct Card {
pub id: u32,
pub card_number: String,
pub card_type: Option<String>,
pub customer_id: Option<u32>,
pub created_at: DateTime<Utc>,
pub updated_at: DateTime<Utc>,
}
#[derive(Debug, Clone)]
pub struct NewCard {
pub card_number: String,
pub card_type: Option<String>,
pub customer_id: Option<u32>,
}
#[derive(Debug, Clone, Serialize, Deserialize, FromRow)]
pub struct Transaction {
pub id: u64,
pub transaction_date: NaiveDateTime,
pub batch_number: String,
pub amount: BigDecimal,
pub volume: BigDecimal,
pub price: BigDecimal,
pub quality_code: i32,
pub quality_name: String,
pub station: String,
pub terminal: String,
pub pump: String,
pub receipt: String,
pub control_number: Option<String>,
pub card_id: u32,
pub customer_id: Option<u32>,
pub created_at: DateTime<Utc>,
}
#[derive(Debug, Clone)]
pub struct NewTransaction {
pub transaction_date: NaiveDateTime,
pub batch_number: String,
pub amount: f64,
pub volume: f64,
pub price: f64,
pub quality_code: i32,
pub quality_name: String,
pub station: String,
pub terminal: String,
pub pump: String,
pub receipt: String,
pub control_number: Option<String>,
pub card_id: u32,
pub customer_id: Option<u32>,
}

222
src/db/repository.rs Normal file
View File

@@ -0,0 +1,222 @@
use crate::db::models::{Card, Customer, NewCard, NewCustomer, NewTransaction, Transaction};
use bigdecimal::BigDecimal;
use sqlx::MySqlPool;
pub struct Repository {
pool: MySqlPool,
}
impl Repository {
pub fn new(pool: MySqlPool) -> Self {
Self { pool }
}
pub async fn upsert_customer(&self, customer: &NewCustomer) -> anyhow::Result<u32> {
sqlx::query(
r#"
INSERT INTO customers (customer_number, card_report_group)
VALUES (?, ?)
ON DUPLICATE KEY UPDATE
card_report_group = VALUES(card_report_group),
updated_at = CURRENT_TIMESTAMP
"#,
)
.bind(&customer.customer_number)
.bind(customer.card_report_group)
.execute(&self.pool)
.await?;
let row: (u32,) = sqlx::query_as(
"SELECT id FROM customers WHERE customer_number = ?",
)
.bind(&customer.customer_number)
.fetch_one(&self.pool)
.await?;
Ok(row.0)
}
pub async fn find_customer_by_number(
&self,
customer_number: &str,
) -> anyhow::Result<Option<Customer>> {
let result = sqlx::query_as(
"SELECT id, customer_number, card_report_group, created_at, updated_at
FROM customers
WHERE customer_number = ?",
)
.bind(customer_number)
.fetch_optional(&self.pool)
.await?;
Ok(result)
}
pub async fn upsert_card(&self, card: &NewCard) -> anyhow::Result<u32> {
sqlx::query(
r#"
INSERT INTO cards (card_number, card_type, customer_id)
VALUES (?, ?, ?)
ON DUPLICATE KEY UPDATE
card_type = COALESCE(VALUES(card_type), card_type),
customer_id = COALESCE(VALUES(customer_id), customer_id),
updated_at = CURRENT_TIMESTAMP
"#,
)
.bind(&card.card_number)
.bind(&card.card_type)
.bind(card.customer_id)
.execute(&self.pool)
.await?;
let row: (u32,) = sqlx::query_as(
"SELECT id FROM cards WHERE card_number = ?",
)
.bind(&card.card_number)
.fetch_one(&self.pool)
.await?;
Ok(row.0)
}
pub async fn find_card_by_number(&self, card_number: &str) -> anyhow::Result<Option<Card>> {
let result = sqlx::query_as(
"SELECT id, card_number, card_type, customer_id, created_at, updated_at
FROM cards
WHERE card_number = ?",
)
.bind(card_number)
.fetch_optional(&self.pool)
.await?;
Ok(result)
}
pub async fn insert_transactions_batch(
&self,
transactions: &[NewTransaction],
) -> anyhow::Result<u64> {
if transactions.is_empty() {
return Ok(0);
}
let mut query = String::from(
"INSERT INTO transactions (transaction_date, batch_number, amount, volume, price, quality_code, quality_name, station, terminal, pump, receipt, control_number, card_id, customer_id) VALUES ",
);
let mut values = Vec::new();
for tx in transactions {
values.push(format!(
"('{}', '{}', {}, {}, {}, {}, '{}', '{}', '{}', '{}', '{}', {}, {}, {})",
tx.transaction_date.format("%Y-%m-%d %H:%M:%S"),
tx.batch_number,
tx.amount,
tx.volume,
tx.price,
tx.quality_code,
tx.quality_name.replace("'", "''"),
tx.station,
tx.terminal,
tx.pump,
tx.receipt,
tx.control_number.as_ref().map(|s| format!("'{}'", s.replace("'", "''"))).unwrap_or_else(|| "NULL".to_string()),
tx.card_id,
tx.customer_id.map(|id| id.to_string()).unwrap_or_else(|| "NULL".to_string()),
));
}
query.push_str(&values.join(", "));
let result = sqlx::query(&query).execute(&self.pool).await?;
Ok(result.rows_affected())
}
pub async fn get_customer_invoice(
&self,
customer_number: &str,
start_date: &str,
end_date: &str,
) -> anyhow::Result<Vec<Transaction>> {
let result = sqlx::query_as(
r#"
SELECT t.id, t.transaction_date, t.batch_number, t.amount, t.volume, t.price,
t.quality_code, t.quality_name, t.station, t.terminal, t.pump,
t.receipt, t.control_number, t.card_id, t.customer_id, t.created_at
FROM transactions t
JOIN customers c ON t.customer_id = c.id
WHERE c.customer_number = ?
AND t.transaction_date >= ?
AND t.transaction_date <= ?
ORDER BY t.transaction_date
"#,
)
.bind(customer_number)
.bind(start_date)
.bind(end_date)
.fetch_all(&self.pool)
.await?;
Ok(result)
}
pub async fn get_sales_summary_by_product(
&self,
start_date: &str,
end_date: &str,
) -> anyhow::Result<Vec<ProductSummary>> {
let result = sqlx::query_as(
r#"
SELECT quality_name, COUNT(*) as tx_count, SUM(amount) as total_amount, SUM(volume) as total_volume
FROM transactions
WHERE transaction_date >= ? AND transaction_date <= ?
GROUP BY quality_name
"#,
)
.bind(start_date)
.bind(end_date)
.fetch_all(&self.pool)
.await?;
Ok(result)
}
pub async fn get_sales_summary_by_customer(
&self,
start_date: &str,
end_date: &str,
) -> anyhow::Result<Vec<CustomerSummary>> {
let result = sqlx::query_as(
r#"
SELECT c.customer_number, COUNT(*) as tx_count, SUM(t.amount) as total_amount, SUM(t.volume) as total_volume
FROM transactions t
JOIN customers c ON t.customer_id = c.id
WHERE t.transaction_date >= ? AND t.transaction_date <= ?
GROUP BY c.customer_number
ORDER BY total_amount DESC
"#,
)
.bind(start_date)
.bind(end_date)
.fetch_all(&self.pool)
.await?;
Ok(result)
}
}
#[derive(Debug, sqlx::FromRow)]
pub struct ProductSummary {
pub quality_name: String,
pub tx_count: i64,
pub total_amount: BigDecimal,
pub total_volume: BigDecimal,
}
#[derive(Debug, sqlx::FromRow)]
pub struct CustomerSummary {
pub customer_number: String,
pub tx_count: i64,
pub total_amount: BigDecimal,
pub total_volume: BigDecimal,
}

View File

@@ -68,7 +68,7 @@ impl Transaction {
} }
} }
pub fn read_csv_file(path: &Path) -> Result<Batch, Box<dyn std::error::Error>> { pub fn read_csv_file(path: &Path) -> anyhow::Result<Batch> {
let filename = path let filename = path
.file_name() .file_name()
.and_then(|n| n.to_str()) .and_then(|n| n.to_str())

View File

@@ -1,14 +1,18 @@
mod commands;
mod config;
mod db;
mod invoice_generator;
use askama::Template; use askama::Template;
use chrono::{NaiveDateTime, Utc}; use chrono::{NaiveDateTime, Utc};
use config::Config;
use csv::ReaderBuilder; use csv::ReaderBuilder;
use db::{create_pool, Repository};
use invoice_generator::{group_by_customer, read_csv_file, Customer};
use std::collections::HashMap; use std::collections::HashMap;
use std::env; use std::env;
use std::fs; use std::fs;
use std::path::Path; use std::path::{Path, PathBuf};
mod invoice_generator;
use invoice_generator::{group_by_customer, read_csv_file, Customer};
fn fmt(v: f64) -> String { fn fmt(v: f64) -> String {
format!("{:.2}", v) format!("{:.2}", v)
@@ -17,7 +21,7 @@ fn fmt(v: f64) -> String {
fn clean_csv_file( fn clean_csv_file(
input_path: &Path, input_path: &Path,
output_path: &Path, output_path: &Path,
) -> Result<String, Box<dyn std::error::Error>> { ) -> anyhow::Result<String> {
let file = fs::File::open(input_path)?; let file = fs::File::open(input_path)?;
let mut rdr = ReaderBuilder::new() let mut rdr = ReaderBuilder::new()
.delimiter(b'\t') .delimiter(b'\t')
@@ -229,99 +233,142 @@ struct CustomerTemplate {
generated_date: String, generated_date: String,
} }
fn main() -> Result<(), Box<dyn std::error::Error>> { #[tokio::main]
async fn main() -> anyhow::Result<()> {
let args: Vec<String> = env::args().collect(); let args: Vec<String> = env::args().collect();
if args.len() != 3 { if args.len() < 2 {
eprintln!("Användning: {} <csv-fil> <utdatakatalog>", args[0]); print_usage(&args[0]);
std::process::exit(1); std::process::exit(1);
} }
let input_path = Path::new(&args[1]); match args[1].as_str() {
let base_output_dir = Path::new(&args[2]); "import" => {
if args.len() != 3 {
eprintln!("Användning: {} import <csv-fil>", args[0]);
std::process::exit(1);
}
let csv_path = PathBuf::from(&args[2]);
if !csv_path.exists() {
eprintln!("Fel: Filen hittades inte: {:?}", csv_path);
std::process::exit(1);
}
if !input_path.exists() { let config = Config::load()?;
eprintln!("Fel: Filen hittades inte: {:?}", input_path); let pool = create_pool(&config.database.connection_url()).await?;
std::process::exit(1); let repo = Repository::new(pool);
}
let filename = input_path commands::run_import(&csv_path, &repo).await?;
.file_name()
.and_then(|n| n.to_str())
.unwrap_or("unknown")
.to_string();
println!("Konverterar {} till rensat format...", filename);
let temp_cleaned_path =
base_output_dir.join(format!("{}.temp.csv", filename.trim_end_matches(".txt")));
let batch_number = clean_csv_file(input_path, &temp_cleaned_path)?;
let output_dir = base_output_dir.join(&batch_number);
fs::create_dir_all(&output_dir)?;
fs::copy(input_path, output_dir.join(format!("{}.txt", batch_number)))?;
fs::rename(
&temp_cleaned_path,
output_dir.join(format!("{}.csv", batch_number)),
)?;
println!(
"Konverterade {} transaktioner",
fs::read_to_string(output_dir.join(format!("{}.csv", batch_number)))?
.lines()
.count()
- 1
);
let batch = read_csv_file(&output_dir.join(format!("{}.csv", batch_number)))?;
println!("Laddade {} transaktioner", batch.transactions.len());
let first_date = batch.transactions.first().map(|t| t.date).unwrap();
let last_date = batch.transactions.last().map(|t| t.date).unwrap();
let period = format!(
"{} - {}",
first_date.format("%Y-%m-%d"),
last_date.format("%Y-%m-%d")
);
let customers = group_by_customer(&[batch]);
let index_customers: Vec<(String, usize)> = customers
.iter()
.map(|(num, c)| (num.clone(), c.cards.len()))
.collect();
let html = IndexTemplate {
customers: index_customers.clone(),
period: period.clone(),
}
.render()
.unwrap();
fs::write(output_dir.join("index.html"), html)?;
let generated_date = Utc::now().format("%Y-%m-%d %H:%M").to_string();
let customer_count = customers.len();
for (customer_num, customer) in customers {
let prepared = PreparedCustomer::from_customer(customer);
let customer_html = CustomerTemplate {
customer: prepared,
batch_number: batch_number.clone(),
period: period.clone(),
generated_date: generated_date.clone(),
} }
.render() "generate" => {
.unwrap(); if args.len() != 3 {
let filename = format!("customer_{}.html", customer_num); eprintln!("Användning: {} generate <csv-fil> <utdatakatalog>", args[0]);
fs::write(output_dir.join(&filename), customer_html)?; std::process::exit(1);
println!("Genererade {}", filename); }
} let input_path = Path::new(&args[2]);
let base_output_dir = Path::new(&args[3]);
println!( if !input_path.exists() {
"\nGenererade {} kundfakturor i {:?}", eprintln!("Fel: Filen hittades inte: {:?}", input_path);
customer_count, output_dir std::process::exit(1);
); }
let filename = input_path
.file_name()
.and_then(|n| n.to_str())
.unwrap_or("unknown")
.to_string();
println!("Konverterar {} till rensat format...", filename);
let temp_cleaned_path =
base_output_dir.join(format!("{}.temp.csv", filename.trim_end_matches(".txt")));
let batch_number = clean_csv_file(input_path, &temp_cleaned_path)?;
let output_dir = base_output_dir.join(&batch_number);
fs::create_dir_all(&output_dir)?;
fs::copy(input_path, output_dir.join(format!("{}.txt", batch_number)))?;
fs::rename(
&temp_cleaned_path,
output_dir.join(format!("{}.csv", batch_number)),
)?;
println!(
"Konverterade {} transaktioner",
fs::read_to_string(output_dir.join(format!("{}.csv", batch_number)))?
.lines()
.count()
- 1
);
let batch = read_csv_file(&output_dir.join(format!("{}.csv", batch_number)))?;
println!("Laddade {} transaktioner", batch.transactions.len());
let first_date = batch.transactions.first().map(|t| t.date).unwrap();
let last_date = batch.transactions.last().map(|t| t.date).unwrap();
let period = format!(
"{} - {}",
first_date.format("%Y-%m-%d"),
last_date.format("%Y-%m-%d")
);
let customers = group_by_customer(&[batch]);
let index_customers: Vec<(String, usize)> = customers
.iter()
.map(|(num, c)| (num.clone(), c.cards.len()))
.collect();
let html = IndexTemplate {
customers: index_customers.clone(),
period: period.clone(),
}
.render()
.unwrap();
fs::write(output_dir.join("index.html"), html)?;
let generated_date = Utc::now().format("%Y-%m-%d %H:%M").to_string();
let customer_count = customers.len();
for (customer_num, customer) in customers {
let prepared = PreparedCustomer::from_customer(customer);
let customer_html = CustomerTemplate {
customer: prepared,
batch_number: batch_number.clone(),
period: period.clone(),
generated_date: generated_date.clone(),
}
.render()
.unwrap();
let filename = format!("customer_{}.html", customer_num);
fs::write(output_dir.join(&filename), customer_html)?;
println!("Genererade {}", filename);
}
println!(
"\nGenererade {} kundfakturor i {:?}",
customer_count, output_dir
);
}
"help" | "--help" | "-h" => {
print_usage(&args[0]);
}
_ => {
eprintln!("Okänt kommando: {}", args[1]);
print_usage(&args[0]);
std::process::exit(1);
}
}
Ok(()) Ok(())
} }
fn print_usage(program: &str) {
eprintln!("Användning: {} <kommando> [argument]", program);
eprintln!();
eprintln!("Kommandon:");
eprintln!(" import <csv-fil> Importera CSV-data till databasen");
eprintln!(" generate <csv-fil> <dir> Generera HTML-fakturor från CSV");
eprintln!(" help Visa denna hjälptext");
}