Compare commits
11 Commits
39b62014b0
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
| cf8628becb | |||
| 7c0a61383a | |||
| e9cf2e031f | |||
| 1e9af16325 | |||
| a8ffd0007d | |||
| e2123e4619 | |||
| 429d5d774f | |||
| e71c83538f | |||
| 7a172c6fdb | |||
| cd46368f79 | |||
| 9daa186ff6 |
3
.gitignore
vendored
3
.gitignore
vendored
@@ -5,3 +5,6 @@ output/
|
||||
*.swp
|
||||
*.lock
|
||||
/input
|
||||
config.toml
|
||||
config.dev.toml
|
||||
config.test.toml
|
||||
|
||||
17
AGENTS.md
Normal file
17
AGENTS.md
Normal file
@@ -0,0 +1,17 @@
|
||||
# rusty-petroleum Project Guidelines
|
||||
|
||||
## Git Commit Style
|
||||
- Make small, focused commits while working
|
||||
- Commit changes incrementally to make history easier to follow
|
||||
- Each commit should represent a single logical change
|
||||
- Write clear, concise commit messages describing what changed
|
||||
|
||||
## Issue Management
|
||||
- Use Gitea REST API to close issues when fixing them:
|
||||
```bash
|
||||
curl -X PATCH https://gitea.rowanbrook.net/api/v1/repos/jakob/rusty-petroleum/issues/{issue_number} \
|
||||
-H "Content-Type: application/json" \
|
||||
-H "Authorization: Bearer YOUR_TOKEN" \
|
||||
-d '{"state": "closed"}'
|
||||
```
|
||||
- Or suggest closing issues on Gitea web UI when automatic closing via API is not configured
|
||||
23
Cargo.toml
23
Cargo.toml
@@ -1,10 +1,27 @@
|
||||
[package]
|
||||
name = "invoice-generator"
|
||||
version = "0.1.0"
|
||||
version = "0.2.0"
|
||||
edition = "2021"
|
||||
|
||||
[lib]
|
||||
name = "invoice_generator"
|
||||
path = "src/lib.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "invoice-generator"
|
||||
path = "src/main.rs"
|
||||
|
||||
[dependencies]
|
||||
askama = "0.15.5"
|
||||
chrono = "0.4.44"
|
||||
chrono = { version = "0.4.44", features = ["serde"] }
|
||||
csv = "1.4.0"
|
||||
serde = "1.0.228"
|
||||
serde = { version = "1.0.228", features = ["derive"] }
|
||||
sqlx = { version = "0.8", features = ["runtime-tokio", "mysql", "chrono", "bigdecimal"] }
|
||||
tokio = { version = "1", features = ["full"] }
|
||||
toml = "0.8"
|
||||
anyhow = "1"
|
||||
bigdecimal = { version = "0.4", features = ["serde"] }
|
||||
|
||||
[dev-dependencies]
|
||||
tokio-test = "0.4"
|
||||
tempfile = "3"
|
||||
|
||||
262
README.md
262
README.md
@@ -1,2 +1,264 @@
|
||||
# rusty-petroleum
|
||||
|
||||
A petroleum transaction invoice generator with MariaDB backend.
|
||||
|
||||
## Overview
|
||||
|
||||
This project processes petroleum/fuel station transaction data from CSV files and generates customer invoices. It stores transaction data in MariaDB for both invoicing and sales reporting.
|
||||
|
||||
## Features
|
||||
|
||||
- **CSV Import**: Import transaction data from fuel station CSV files into MariaDB
|
||||
- **Invoice Generation**: Generate HTML invoices from CSV data (file-to-file mode)
|
||||
- **Multi-Environment**: Separate databases for development, testing, and production
|
||||
- **Sales Reporting**: Query transactions by customer, product, date range
|
||||
- **Test-Driven Development**: Comprehensive test suite with 45 tests
|
||||
|
||||
## Project Structure
|
||||
|
||||
```
|
||||
rusty-petroleum/
|
||||
├── Cargo.toml # Rust dependencies
|
||||
├── Cargo.lock # Locked dependency versions
|
||||
├── config.example.toml # Config template
|
||||
├── migrations/ # SQL schema files
|
||||
│ └── 002_schema.sql # Current schema
|
||||
├── input/ # CSV input files
|
||||
├── output/ # Generated invoices
|
||||
├── src/
|
||||
│ ├── lib.rs # Library crate (for testing)
|
||||
│ ├── main.rs # CLI entry point
|
||||
│ ├── config.rs # Configuration loading
|
||||
│ ├── db/ # Database layer
|
||||
│ │ ├── mod.rs
|
||||
│ │ ├── connection.rs
|
||||
│ │ ├── models.rs
|
||||
│ │ └── repository.rs
|
||||
│ ├── commands/ # CLI commands
|
||||
│ │ ├── mod.rs
|
||||
│ │ ├── db.rs # db setup/reset
|
||||
│ │ └── import.rs # CSV import
|
||||
│ └── invoice_generator.rs
|
||||
├── templates/ # HTML invoice templates
|
||||
│ ├── index.html
|
||||
│ └── customer.html
|
||||
└── tests/ # Integration tests
|
||||
├── common/ # Test utilities
|
||||
│ ├── mod.rs
|
||||
│ ├── fixtures.rs
|
||||
│ └── test_db.rs
|
||||
├── config_test.rs # Config module tests
|
||||
├── import_test.rs # CSV parsing tests
|
||||
├── models_test.rs # Model tests
|
||||
└── repository_test.rs # Database tests
|
||||
```
|
||||
|
||||
## Database Schema
|
||||
|
||||
### customers
|
||||
| Column | Type | Description |
|
||||
|--------|------|-------------|
|
||||
| id | INT UNSIGNED | Primary key |
|
||||
| customer_number | VARCHAR | Unique customer identifier |
|
||||
| card_report_group | TINYINT UNSIGNED | Customer classification (1=fleet, 3/4=retail) |
|
||||
| created_at | TIMESTAMP | Record creation time |
|
||||
| updated_at | TIMESTAMP | Last update time |
|
||||
|
||||
### cards
|
||||
| Column | Type | Description |
|
||||
|--------|------|-------------|
|
||||
| id | INT UNSIGNED | Primary key |
|
||||
| card_number | VARCHAR | Unique card identifier |
|
||||
| customer_id | INT UNSIGNED | FK to customers |
|
||||
| created_at | TIMESTAMP | Record creation time |
|
||||
| updated_at | TIMESTAMP | Last update time |
|
||||
|
||||
### transactions
|
||||
| Column | Type | Description |
|
||||
|--------|------|-------------|
|
||||
| id | BIGINT UNSIGNED | Primary key |
|
||||
| transaction_date | DATETIME | Transaction timestamp |
|
||||
| batch_number | VARCHAR | Batch identifier |
|
||||
| amount | DECIMAL(10,2) | Transaction amount |
|
||||
| volume | DECIMAL(10,3) | Volume in liters |
|
||||
| price | DECIMAL(8,4) | Price per liter |
|
||||
| quality_code | INT | Product code |
|
||||
| quality_name | VARCHAR | Product name (95 Oktan, Diesel) |
|
||||
| card_number | VARCHAR | Card used (including anonymized) |
|
||||
| station | VARCHAR | Station ID |
|
||||
| terminal | VARCHAR | Terminal ID |
|
||||
| pump | VARCHAR | Pump number |
|
||||
| receipt | VARCHAR | Receipt number |
|
||||
| control_number | VARCHAR | Control/verification number |
|
||||
| customer_id | INT UNSIGNED | FK to customers (NULL for anonymized) |
|
||||
| created_at | TIMESTAMP | Record creation time |
|
||||
|
||||
## Configuration
|
||||
|
||||
Copy the example config and edit with your database credentials:
|
||||
|
||||
```bash
|
||||
cp config.example.toml config.dev.toml # or config.test.toml or config.prod.toml
|
||||
```
|
||||
|
||||
Edit `config.dev.toml`:
|
||||
```toml
|
||||
[database]
|
||||
host = "localhost"
|
||||
port = 3306
|
||||
user = "your_user"
|
||||
password = "your_password"
|
||||
name = "rusty_petroleum_dev"
|
||||
```
|
||||
|
||||
### Environment Config Loading
|
||||
|
||||
Config files are loaded in order:
|
||||
1. `config.toml` (local override, gitignored)
|
||||
2. `config.<env>.toml` (environment-specific, gitignored)
|
||||
3. `config.example.toml` (fallback, tracked)
|
||||
|
||||
### Database Names by Environment
|
||||
|
||||
- `rusty_petroleum_dev` - Development
|
||||
- `rusty_petroleum_test` - Testing
|
||||
- `rusty_petroleum_prod` - Production
|
||||
|
||||
## Commands
|
||||
|
||||
```bash
|
||||
# Database management
|
||||
cargo run -- db setup --env <dev|test|prod> # Create database and schema
|
||||
cargo run -- db reset --env <dev|test|prod> # Drop and recreate database
|
||||
|
||||
# Import data
|
||||
cargo run -- import <csv-file> --env <dev|test|prod> # Import to database (default: prod)
|
||||
|
||||
# Generate invoices (file-to-file, no database)
|
||||
cargo run -- generate <csv-file> <output-dir>
|
||||
```
|
||||
|
||||
### Usage Examples
|
||||
|
||||
```bash
|
||||
# Setup development database
|
||||
cargo run -- db setup --env dev
|
||||
|
||||
# Import transactions to dev database
|
||||
cargo run -- import input/409.csv --env dev
|
||||
|
||||
# Reset development database
|
||||
cargo run -- db reset --env dev
|
||||
|
||||
# Generate HTML invoices from CSV
|
||||
cargo run -- generate input/409.csv output/
|
||||
```
|
||||
|
||||
## Testing
|
||||
|
||||
The project has a comprehensive test suite with 45 tests covering config, CSV parsing, models, and database operations.
|
||||
|
||||
```bash
|
||||
# Run all tests (lib + integration)
|
||||
cargo test
|
||||
|
||||
# Run only lib/unit tests (fast, no database needed)
|
||||
cargo test --lib
|
||||
|
||||
# Run only integration tests (requires test database)
|
||||
cargo test --tests
|
||||
|
||||
# Run a specific test file
|
||||
cargo test --test config_test
|
||||
cargo test --test import_test
|
||||
cargo test --test repository_test
|
||||
|
||||
# Run a specific test
|
||||
cargo test customer_insert_returns_id
|
||||
|
||||
# Run tests in release mode
|
||||
cargo test --release
|
||||
```
|
||||
|
||||
### Test Database Setup
|
||||
|
||||
Repository tests require a test database. Run setup before testing:
|
||||
|
||||
```bash
|
||||
cargo run -- db setup --env test
|
||||
```
|
||||
|
||||
## Production Build
|
||||
|
||||
Build an optimized binary for production:
|
||||
|
||||
```bash
|
||||
# Build release binary
|
||||
cargo build --release
|
||||
|
||||
# Run the binary
|
||||
./target/release/invoice-generator db setup --env prod
|
||||
./target/release/invoice-generator import data.csv --env prod
|
||||
```
|
||||
|
||||
## Current Status
|
||||
|
||||
### Implemented
|
||||
- [x] Database schema for transactions, customers, cards
|
||||
- [x] CSV import to MariaDB
|
||||
- [x] Multi-environment support (dev/test/prod)
|
||||
- [x] Configuration via TOML files
|
||||
- [x] Invoice generation (HTML output)
|
||||
- [x] Database setup/reset commands
|
||||
- [x] Unit tests (45 tests)
|
||||
|
||||
### TODO
|
||||
- [ ] Sales reporting queries (dashboard/API)
|
||||
- [ ] Customer invoice retrieval from database
|
||||
- [ ] Batch import across multiple CSV files
|
||||
- [ ] CI/CD pipeline
|
||||
|
||||
## Technology Stack
|
||||
|
||||
- **Language**: Rust (Edition 2021)
|
||||
- **Database**: MariaDB
|
||||
- **ORM**: sqlx (async MySQL)
|
||||
- **Templating**: Askama (HTML templates)
|
||||
- **Config**: TOML
|
||||
- **Testing**: tokio-test, tempfile
|
||||
|
||||
## Getting Started
|
||||
|
||||
1. Install Rust (if not already installed)
|
||||
```bash
|
||||
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh
|
||||
```
|
||||
|
||||
2. Create database user and grant permissions in MariaDB
|
||||
```sql
|
||||
CREATE USER 'your_user'@'%' IDENTIFIED BY 'your_password';
|
||||
GRANT ALL PRIVILEGES ON rusty_petroleum_dev.* TO 'your_user'@'%';
|
||||
CREATE DATABASE rusty_petroleum_dev;
|
||||
```
|
||||
|
||||
3. Setup configuration
|
||||
```bash
|
||||
cp config.example.toml config.dev.toml
|
||||
# Edit config.dev.toml with your credentials
|
||||
```
|
||||
|
||||
4. Setup database and import data
|
||||
```bash
|
||||
cargo run -- db setup --env dev
|
||||
cargo run -- import input/409.csv --env dev
|
||||
```
|
||||
|
||||
5. Run tests
|
||||
```bash
|
||||
cargo test --lib # Unit tests (fast)
|
||||
cargo test --tests # Integration tests (requires DB)
|
||||
```
|
||||
|
||||
## License
|
||||
|
||||
See LICENSE file.
|
||||
|
||||
6
config.example.toml
Normal file
6
config.example.toml
Normal file
@@ -0,0 +1,6 @@
|
||||
[database]
|
||||
host = "localhost"
|
||||
port = 3306
|
||||
user = ""
|
||||
password = ""
|
||||
name = "rusty_petroleum"
|
||||
2
migrations/001_dev.sql
Normal file
2
migrations/001_dev.sql
Normal file
@@ -0,0 +1,2 @@
|
||||
-- Create development database
|
||||
CREATE DATABASE IF NOT EXISTS rusty_petroleum_dev;
|
||||
2
migrations/001_prod.sql
Normal file
2
migrations/001_prod.sql
Normal file
@@ -0,0 +1,2 @@
|
||||
-- Create production database
|
||||
CREATE DATABASE IF NOT EXISTS rusty_petroleum;
|
||||
2
migrations/001_test.sql
Normal file
2
migrations/001_test.sql
Normal file
@@ -0,0 +1,2 @@
|
||||
-- Create test database
|
||||
CREATE DATABASE IF NOT EXISTS rusty_petroleum_test;
|
||||
46
migrations/002_schema.sql
Normal file
46
migrations/002_schema.sql
Normal file
@@ -0,0 +1,46 @@
|
||||
-- Schema for rusty_petroleum
|
||||
-- Run after creating the database
|
||||
|
||||
CREATE TABLE IF NOT EXISTS customers (
|
||||
id INT UNSIGNED AUTO_INCREMENT PRIMARY KEY,
|
||||
customer_number VARCHAR(50) NOT NULL UNIQUE,
|
||||
card_report_group TINYINT UNSIGNED NOT NULL DEFAULT 0,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
|
||||
INDEX idx_customer_number (customer_number)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS cards (
|
||||
id INT UNSIGNED AUTO_INCREMENT PRIMARY KEY,
|
||||
card_number VARCHAR(50) NOT NULL UNIQUE,
|
||||
customer_id INT UNSIGNED NOT NULL,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
|
||||
INDEX idx_card_number (card_number),
|
||||
INDEX idx_customer_id (customer_id),
|
||||
FOREIGN KEY (customer_id) REFERENCES customers(id)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS transactions (
|
||||
id BIGINT UNSIGNED AUTO_INCREMENT PRIMARY KEY,
|
||||
transaction_date DATETIME NOT NULL,
|
||||
batch_number VARCHAR(20) NOT NULL,
|
||||
amount DECIMAL(10,2) NOT NULL,
|
||||
volume DECIMAL(10,3) NOT NULL,
|
||||
price DECIMAL(8,4) NOT NULL,
|
||||
quality_code INT NOT NULL,
|
||||
quality_name VARCHAR(50) NOT NULL,
|
||||
card_number VARCHAR(50) NOT NULL,
|
||||
station VARCHAR(20) NOT NULL,
|
||||
terminal VARCHAR(10) NOT NULL,
|
||||
pump VARCHAR(10) NOT NULL,
|
||||
receipt VARCHAR(20) NOT NULL,
|
||||
control_number VARCHAR(20),
|
||||
customer_id INT UNSIGNED NULL,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
INDEX idx_transaction_date (transaction_date),
|
||||
INDEX idx_batch_number (batch_number),
|
||||
INDEX idx_customer_id (customer_id),
|
||||
INDEX idx_card_number (card_number),
|
||||
INDEX idx_station (station)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
|
||||
148
src/commands/db.rs
Normal file
148
src/commands/db.rs
Normal file
@@ -0,0 +1,148 @@
|
||||
use crate::config::Config;
|
||||
use crate::db::Repository;
|
||||
use sqlx::mysql::MySqlPoolOptions;
|
||||
|
||||
/// Sets up the database for the specified environment.
|
||||
///
|
||||
/// AI AGENT NOTE: This creates:
|
||||
/// 1. The database (if not exists)
|
||||
/// 2. customers table - stores fleet customers
|
||||
/// 3. cards table - stores known cards linked to customers
|
||||
/// 4. transactions table - stores all transactions
|
||||
///
|
||||
/// Uses CREATE TABLE IF NOT EXISTS, so it's idempotent.
|
||||
/// Note: We connect to the server without specifying a database first,
|
||||
/// then create the database, then create tables in that database.
|
||||
pub async fn run_db_setup(repo: &Repository, config: &Config) -> anyhow::Result<()> {
|
||||
let env = &config.env;
|
||||
println!("Setting up database for environment: {}", env.as_str());
|
||||
println!("Database: {}", env.database_name());
|
||||
|
||||
let database_url = &config.database.connection_url();
|
||||
// Strip database name to connect to server without selecting a database
|
||||
// AI AGENT NOTE: MariaDB requires connecting without a database to create one
|
||||
let base_url = database_url.trim_end_matches(env.database_name());
|
||||
|
||||
let setup_pool = MySqlPoolOptions::new()
|
||||
.max_connections(1)
|
||||
.connect(base_url)
|
||||
.await?;
|
||||
|
||||
println!("Creating database if not exists...");
|
||||
sqlx::query(&format!(
|
||||
"CREATE DATABASE IF NOT EXISTS {}",
|
||||
env.database_name()
|
||||
))
|
||||
.execute(&setup_pool)
|
||||
.await?;
|
||||
println!("Database '{}' ready", env.database_name());
|
||||
|
||||
drop(setup_pool);
|
||||
|
||||
// Now connect to the created database and create tables
|
||||
println!("Creating tables...");
|
||||
sqlx::query(
|
||||
r#"
|
||||
CREATE TABLE IF NOT EXISTS customers (
|
||||
id INT UNSIGNED AUTO_INCREMENT PRIMARY KEY,
|
||||
customer_number VARCHAR(50) NOT NULL UNIQUE,
|
||||
card_report_group TINYINT UNSIGNED NOT NULL DEFAULT 0,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
|
||||
INDEX idx_customer_number (customer_number)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci
|
||||
"#,
|
||||
)
|
||||
.execute(repo.pool())
|
||||
.await?;
|
||||
|
||||
sqlx::query(
|
||||
r#"
|
||||
CREATE TABLE IF NOT EXISTS cards (
|
||||
id INT UNSIGNED AUTO_INCREMENT PRIMARY KEY,
|
||||
card_number VARCHAR(50) NOT NULL UNIQUE,
|
||||
customer_id INT UNSIGNED NOT NULL,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
|
||||
INDEX idx_card_number (card_number),
|
||||
INDEX idx_customer_id (customer_id),
|
||||
FOREIGN KEY (customer_id) REFERENCES customers(id)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci
|
||||
"#,
|
||||
)
|
||||
.execute(repo.pool())
|
||||
.await?;
|
||||
|
||||
sqlx::query(
|
||||
r#"
|
||||
CREATE TABLE IF NOT EXISTS transactions (
|
||||
id BIGINT UNSIGNED AUTO_INCREMENT PRIMARY KEY,
|
||||
transaction_date DATETIME NOT NULL,
|
||||
batch_number VARCHAR(20) NOT NULL,
|
||||
amount DECIMAL(10,2) NOT NULL,
|
||||
volume DECIMAL(10,3) NOT NULL,
|
||||
price DECIMAL(8,4) NOT NULL,
|
||||
quality_code INT NOT NULL,
|
||||
quality_name VARCHAR(50) NOT NULL,
|
||||
card_number VARCHAR(50) NOT NULL,
|
||||
station VARCHAR(20) NOT NULL,
|
||||
terminal VARCHAR(10) NOT NULL,
|
||||
pump VARCHAR(10) NOT NULL,
|
||||
receipt VARCHAR(20) NOT NULL,
|
||||
control_number VARCHAR(20),
|
||||
customer_id INT UNSIGNED NULL,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
INDEX idx_transaction_date (transaction_date),
|
||||
INDEX idx_batch_number (batch_number),
|
||||
INDEX idx_customer_id (customer_id),
|
||||
INDEX idx_card_number (card_number),
|
||||
INDEX idx_station (station)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci
|
||||
"#,
|
||||
)
|
||||
.execute(repo.pool())
|
||||
.await?;
|
||||
|
||||
println!("Tables created successfully.");
|
||||
println!("Database setup complete!");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Resets the database by dropping and recreating it.
|
||||
///
|
||||
/// AI AGENT NOTE: This is a destructive operation that:
|
||||
/// 1. Drops the database if it exists (loses all data!)
|
||||
/// 2. Creates a fresh database
|
||||
/// 3. Does NOT create tables (run db setup afterwards)
|
||||
///
|
||||
/// Use this when schema changes require a fresh database.
|
||||
pub async fn run_db_reset(config: &Config) -> anyhow::Result<()> {
|
||||
let env = &config.env;
|
||||
println!("Resetting database for environment: {}", env.as_str());
|
||||
println!("Database: {}", env.database_name());
|
||||
|
||||
let database_url = &config.database.connection_url();
|
||||
let base_url = database_url.trim_end_matches(env.database_name());
|
||||
|
||||
let setup_pool = MySqlPoolOptions::new()
|
||||
.max_connections(1)
|
||||
.connect(base_url)
|
||||
.await?;
|
||||
|
||||
println!("Dropping database if exists...");
|
||||
sqlx::query(&format!("DROP DATABASE IF EXISTS {}", env.database_name()))
|
||||
.execute(&setup_pool)
|
||||
.await?;
|
||||
|
||||
println!("Creating database...");
|
||||
sqlx::query(&format!("CREATE DATABASE {}", env.database_name()))
|
||||
.execute(&setup_pool)
|
||||
.await?;
|
||||
|
||||
drop(setup_pool);
|
||||
|
||||
println!("Database '{}' reset complete!", env.database_name());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
393
src/commands/import.rs
Normal file
393
src/commands/import.rs
Normal file
@@ -0,0 +1,393 @@
|
||||
use crate::db::models::{NewCard, NewCustomer, NewTransaction};
|
||||
use crate::db::Repository;
|
||||
use chrono::NaiveDateTime;
|
||||
use csv::ReaderBuilder;
|
||||
use std::collections::HashMap;
|
||||
use std::fs::File;
|
||||
use std::path::Path;
|
||||
|
||||
/// Represents a parsed transaction from CSV fields.
|
||||
///
|
||||
/// AI AGENT NOTE: This is an intermediate struct for CSV parsing.
|
||||
/// It mirrors the CSV column structure and is converted to NewTransaction
|
||||
/// for database insertion.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct CsvTransaction {
|
||||
pub date: NaiveDateTime,
|
||||
pub batch_number: String,
|
||||
pub amount: f64,
|
||||
pub volume: f64,
|
||||
pub price: f64,
|
||||
pub quality: i32,
|
||||
pub quality_name: String,
|
||||
pub card_number: String,
|
||||
pub customer_number: String,
|
||||
pub station: String,
|
||||
pub terminal: String,
|
||||
pub pump: String,
|
||||
pub receipt: String,
|
||||
pub card_report_group_number: String,
|
||||
pub control_number: String,
|
||||
}
|
||||
|
||||
/// Parses a CSV record from string slices (pure function for testing).
|
||||
///
|
||||
/// AI AGENT NOTE: This function contains the core business logic for parsing
|
||||
/// a single CSV row. It can be tested without file I/O.
|
||||
///
|
||||
/// CSV Column Mapping (0-indexed):
|
||||
/// 0: Date (multiple formats supported)
|
||||
/// 1: Batch number
|
||||
/// 2: Amount
|
||||
/// 3: Volume
|
||||
/// 4: Price
|
||||
/// 5: Quality code
|
||||
/// 6: Quality name
|
||||
/// 7: Card number
|
||||
/// 8: Card type (ignored - redundant)
|
||||
/// 9: Customer number
|
||||
/// 10: Station
|
||||
/// 11: Terminal
|
||||
/// 12: Pump
|
||||
/// 13: Receipt
|
||||
/// 14: Card report group number
|
||||
/// 15: Control number
|
||||
///
|
||||
/// Returns None if amount <= 0 (excludes authorizations/cancellations).
|
||||
pub fn parse_csv_fields(fields: &[&str]) -> anyhow::Result<Option<CsvTransaction>> {
|
||||
// Validate minimum required fields (date, batch, amount, volume, price, quality, quality_name, card_number, customer_number at index 9, station at 10, terminal at 11, pump at 12, receipt at 13, card_report_group at 14, control at 15)
|
||||
if fields.len() < 16 {
|
||||
anyhow::bail!("Expected at least 16 fields, got {}", fields.len());
|
||||
}
|
||||
|
||||
let date_str = fields.get(0).copied().unwrap_or("");
|
||||
let date = parse_date(date_str)?;
|
||||
|
||||
let amount_str = fields.get(2).copied().unwrap_or("0");
|
||||
let amount: f64 = amount_str.parse().unwrap_or(0.0);
|
||||
|
||||
// Skip zero/negative amounts (authorizations, cancellations)
|
||||
if amount <= 0.0 {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let customer_number = fields.get(9).copied().unwrap_or("").to_string();
|
||||
|
||||
Ok(Some(CsvTransaction {
|
||||
date,
|
||||
batch_number: fields.get(1).copied().unwrap_or("").to_string(),
|
||||
amount,
|
||||
volume: fields.get(3).copied().unwrap_or("0").parse().unwrap_or(0.0),
|
||||
price: fields.get(4).copied().unwrap_or("0").parse().unwrap_or(0.0),
|
||||
quality: fields.get(5).copied().unwrap_or("0").parse().unwrap_or(0),
|
||||
quality_name: fields.get(6).copied().unwrap_or("").to_string(),
|
||||
card_number: fields.get(7).copied().unwrap_or("").to_string(),
|
||||
customer_number,
|
||||
station: fields.get(10).copied().unwrap_or("").to_string(),
|
||||
terminal: fields.get(11).copied().unwrap_or("").to_string(),
|
||||
pump: fields.get(12).copied().unwrap_or("").to_string(),
|
||||
receipt: fields.get(13).copied().unwrap_or("").to_string(),
|
||||
card_report_group_number: fields.get(14).copied().unwrap_or("").to_string(),
|
||||
control_number: fields.get(15).copied().unwrap_or("").to_string(),
|
||||
}))
|
||||
}
|
||||
|
||||
/// Parses a date string, supporting multiple formats.
|
||||
///
|
||||
/// AI AGENT NOTE: Source data may use different date formats:
|
||||
/// - ISO format: "2026-02-01 06:40:14"
|
||||
/// - US format: "02/01/2026 06:40:14 AM"
|
||||
fn parse_date(date_str: &str) -> anyhow::Result<NaiveDateTime> {
|
||||
NaiveDateTime::parse_from_str(date_str, "%Y-%m-%d %H:%M:%S")
|
||||
.or_else(|_| NaiveDateTime::parse_from_str(date_str, "%m/%d/%Y %I:%M:%S %p"))
|
||||
.map_err(|e| anyhow::anyhow!("Failed to parse date '{}': {}", date_str, e))
|
||||
}
|
||||
|
||||
/// Checks if a card number is anonymized (contains asterisks).
|
||||
///
|
||||
/// AI AGENT NOTE: Anonymized cards have masked digits like "554477******9952".
|
||||
/// These cards are NOT stored in the cards table - only in transactions.
|
||||
pub fn is_anonymized_card(card_number: &str) -> bool {
|
||||
card_number.contains('*')
|
||||
}
|
||||
|
||||
/// Imports transactions from a CSV file into the database.
|
||||
///
|
||||
/// AI AGENT NOTE: This is the main data import function. It handles:
|
||||
///
|
||||
/// 1. PARSING: Reads tab-separated CSV and extracts transaction data
|
||||
/// 2. FILTERING: Only includes transactions where:
|
||||
/// - amount > 0 (excludes authorizations/cancellations)
|
||||
/// - customer_number is NOT empty (excludes retail transactions)
|
||||
/// 3. COLLECTION: Gathers unique customers and known cards first
|
||||
/// 4. UPSERT: Creates/updates customer and card records
|
||||
/// 5. BATCH INSERT: Inserts transactions in batches of 500
|
||||
///
|
||||
/// Business Rules:
|
||||
/// - Transactions with empty customer_number are stored but not linked to customers
|
||||
/// - Only "known" cards (with full card numbers) are stored in the cards table
|
||||
/// - Anonymized cards (with asterisks) are stored only in transactions.card_number
|
||||
pub async fn run_import(csv_path: &Path, repo: &Repository) -> anyhow::Result<()> {
|
||||
println!("Reading CSV file: {:?}", csv_path);
|
||||
|
||||
let file = File::open(csv_path)?;
|
||||
let mut rdr = ReaderBuilder::new()
|
||||
.delimiter(b'\t')
|
||||
.has_headers(true)
|
||||
.flexible(true)
|
||||
.from_reader(file);
|
||||
|
||||
let mut transactions = Vec::new();
|
||||
let mut seen_customers: HashMap<String, u8> = HashMap::new();
|
||||
let mut seen_cards: HashMap<String, String> = HashMap::new();
|
||||
|
||||
for result in rdr.records() {
|
||||
let record = result?;
|
||||
if let Some(tx) = parse_record(&record)? {
|
||||
if !tx.customer_number.is_empty() {
|
||||
let card_report_group: u8 = tx.card_report_group_number.parse().unwrap_or(0);
|
||||
if !seen_customers.contains_key(&tx.customer_number) {
|
||||
seen_customers.insert(tx.customer_number.clone(), card_report_group);
|
||||
}
|
||||
if !seen_cards.contains_key(&tx.card_number) {
|
||||
seen_cards.insert(tx.card_number.clone(), tx.customer_number.clone());
|
||||
}
|
||||
}
|
||||
transactions.push(tx);
|
||||
}
|
||||
}
|
||||
|
||||
println!("Found {} transactions", transactions.len());
|
||||
println!("Unique customers: {}", seen_customers.len());
|
||||
println!("Unique known cards: {}", seen_cards.len());
|
||||
|
||||
println!("\nImporting customers...");
|
||||
let mut customer_ids: HashMap<String, u32> = HashMap::new();
|
||||
for (customer_number, card_report_group) in &seen_customers {
|
||||
let new_customer = NewCustomer {
|
||||
customer_number: customer_number.clone(),
|
||||
card_report_group: *card_report_group,
|
||||
};
|
||||
let id = repo.upsert_customer(&new_customer).await?;
|
||||
customer_ids.insert(customer_number.clone(), id);
|
||||
println!(" Customer {} -> id {}", customer_number, id);
|
||||
}
|
||||
|
||||
println!("\nImporting cards...");
|
||||
let mut card_ids: HashMap<String, u32> = HashMap::new();
|
||||
for (card_number, customer_number) in &seen_cards {
|
||||
if let Some(&customer_id) = customer_ids.get(customer_number) {
|
||||
let new_card = NewCard {
|
||||
card_number: card_number.clone(),
|
||||
customer_id,
|
||||
};
|
||||
let id = repo.upsert_card(&new_card).await?;
|
||||
card_ids.insert(card_number.clone(), id);
|
||||
println!(" Card {} -> customer {} -> id {}", card_number, customer_number, id);
|
||||
}
|
||||
}
|
||||
|
||||
println!("\nImporting transactions...");
|
||||
let batch_size = 500;
|
||||
let mut total_inserted = 0u64;
|
||||
let mut batch: Vec<NewTransaction> = Vec::with_capacity(batch_size);
|
||||
|
||||
for tx in transactions {
|
||||
let customer_id = customer_ids.get(&tx.customer_number).copied();
|
||||
|
||||
let new_tx = NewTransaction {
|
||||
transaction_date: tx.date,
|
||||
batch_number: tx.batch_number,
|
||||
amount: tx.amount,
|
||||
volume: tx.volume,
|
||||
price: tx.price,
|
||||
quality_code: tx.quality,
|
||||
quality_name: tx.quality_name,
|
||||
card_number: tx.card_number,
|
||||
station: tx.station,
|
||||
terminal: tx.terminal,
|
||||
pump: tx.pump,
|
||||
receipt: tx.receipt,
|
||||
control_number: if tx.control_number.is_empty() { None } else { Some(tx.control_number) },
|
||||
customer_id,
|
||||
};
|
||||
|
||||
batch.push(new_tx);
|
||||
|
||||
if batch.len() >= batch_size {
|
||||
let inserted = repo.insert_transactions_batch(&batch).await?;
|
||||
total_inserted += inserted;
|
||||
println!(" Inserted {} transactions (total: {})", inserted, total_inserted);
|
||||
batch.clear();
|
||||
}
|
||||
}
|
||||
|
||||
if !batch.is_empty() {
|
||||
let inserted = repo.insert_transactions_batch(&batch).await?;
|
||||
total_inserted += inserted;
|
||||
println!(" Inserted {} transactions (total: {})", inserted, total_inserted);
|
||||
}
|
||||
|
||||
println!("\nDone! Imported {} transactions", total_inserted);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn get_field(record: &csv::StringRecord, index: usize) -> &str {
|
||||
record.get(index).unwrap_or("")
|
||||
}
|
||||
|
||||
/// Parses a single record from the CSV file.
|
||||
///
|
||||
/// AI AGENT NOTE: Returns None if:
|
||||
/// - amount <= 0 (excludes authorizations/cancellations)
|
||||
/// - date parsing fails
|
||||
fn parse_record(record: &csv::StringRecord) -> anyhow::Result<Option<CsvTransaction>> {
|
||||
let fields: Vec<&str> = (0..16)
|
||||
.map(|i| get_field(record, i))
|
||||
.collect();
|
||||
parse_csv_fields(&fields)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn parse_valid_record_with_known_customer() {
|
||||
let fields = [
|
||||
"2026-02-01 10:15:16", "409", "559.26", "35.85", "15.60",
|
||||
"1001", "95 Oktan", "7825017523017000642", "type",
|
||||
"1861", "97254", "1", "2", "000910", "1", ""
|
||||
];
|
||||
|
||||
let result = parse_csv_fields(&fields).unwrap();
|
||||
assert!(result.is_some());
|
||||
|
||||
let tx = result.unwrap();
|
||||
assert_eq!(tx.batch_number, "409");
|
||||
assert_eq!(tx.amount, 559.26);
|
||||
assert_eq!(tx.volume, 35.85);
|
||||
assert_eq!(tx.quality, 1001);
|
||||
assert_eq!(tx.quality_name, "95 Oktan");
|
||||
assert_eq!(tx.card_number, "7825017523017000642");
|
||||
assert_eq!(tx.customer_number, "1861");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_record_with_empty_customer_number() {
|
||||
let fields = [
|
||||
"2026-02-01 06:40:14", "409", "267.23", "17.13", "15.60",
|
||||
"1001", "95 Oktan", "554477******9952", "type",
|
||||
"", "97254", "1", "2", "000898", "4", "756969"
|
||||
];
|
||||
|
||||
let result = parse_csv_fields(&fields).unwrap();
|
||||
assert!(result.is_some());
|
||||
|
||||
let tx = result.unwrap();
|
||||
assert_eq!(tx.customer_number, "");
|
||||
assert_eq!(tx.card_number, "554477******9952");
|
||||
assert!(is_anonymized_card(&tx.card_number));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_zero_amount_returns_none() {
|
||||
let fields = [
|
||||
"2026-02-01 06:40:14", "409", "0.00", "0.00", "15.60",
|
||||
"1001", "95 Oktan", "554477******9952", "type",
|
||||
"", "97254", "1", "2", "000898", "4", "756969"
|
||||
];
|
||||
|
||||
let result = parse_csv_fields(&fields).unwrap();
|
||||
assert!(result.is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_negative_amount_returns_none() {
|
||||
let fields = [
|
||||
"2026-02-01 06:40:14", "409", "-50.00", "-3.00", "15.60",
|
||||
"1001", "95 Oktan", "7825017523017000642", "type",
|
||||
"1861", "97254", "1", "2", "000898", "1", ""
|
||||
];
|
||||
|
||||
let result = parse_csv_fields(&fields).unwrap();
|
||||
assert!(result.is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_us_date_format() {
|
||||
let fields = [
|
||||
"02/01/2026 10:15:16 AM", "409", "559.26", "35.85", "15.60",
|
||||
"1001", "95 Oktan", "7825017523017000642", "type",
|
||||
"1861", "97254", "1", "2", "000910", "1", ""
|
||||
];
|
||||
|
||||
let result = parse_csv_fields(&fields).unwrap();
|
||||
assert!(result.is_some());
|
||||
|
||||
let tx = result.unwrap();
|
||||
assert_eq!(tx.date.format("%Y-%m-%d").to_string(), "2026-02-01");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_diesel_product() {
|
||||
let fields = [
|
||||
"2026-02-01 10:05:16", "409", "543.22", "31.40", "17.30",
|
||||
"4", "Diesel", "673706*********0155", "type",
|
||||
"", "97254", "1", "2", "000909", "4", "D00824"
|
||||
];
|
||||
|
||||
let result = parse_csv_fields(&fields).unwrap();
|
||||
assert!(result.is_some());
|
||||
|
||||
let tx = result.unwrap();
|
||||
assert_eq!(tx.quality_name, "Diesel");
|
||||
assert_eq!(tx.quality, 4);
|
||||
assert_eq!(tx.control_number, "D00824");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_missing_fields_defaults_to_empty() {
|
||||
let fields: [&str; 16] = [
|
||||
"2026-02-01 10:15:16", "409", "559.26", "", "",
|
||||
"", "", "", "", "", "", "", "", "", "", ""
|
||||
];
|
||||
|
||||
let result = parse_csv_fields(&fields).unwrap();
|
||||
assert!(result.is_some());
|
||||
|
||||
let tx = result.unwrap();
|
||||
assert_eq!(tx.volume, 0.0);
|
||||
assert_eq!(tx.price, 0.0);
|
||||
assert_eq!(tx.quality, 0);
|
||||
assert_eq!(tx.quality_name, "");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_too_few_fields_returns_none() {
|
||||
let fields: [&str; 4] = ["2026-02-01 10:15:16", "409", "559.26", "35.85"];
|
||||
|
||||
let result = parse_csv_fields(&fields);
|
||||
assert!(result.is_err()); // Date parsing succeeds but other fields missing
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn is_anonymized_card_detects_asterisks() {
|
||||
assert!(is_anonymized_card("554477******9952"));
|
||||
assert!(is_anonymized_card("673706*********0155"));
|
||||
assert!(!is_anonymized_card("7825017523017000642"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn card_report_group_parsed_correctly() {
|
||||
let fields = [
|
||||
"2026-02-01 10:15:16", "409", "559.26", "35.85", "15.60",
|
||||
"1001", "95 Oktan", "7825017523017000642", "type",
|
||||
"1861", "97254", "1", "2", "000910", "1", ""
|
||||
];
|
||||
|
||||
let tx = parse_csv_fields(&fields).unwrap().unwrap();
|
||||
assert_eq!(tx.card_report_group_number, "1");
|
||||
}
|
||||
}
|
||||
5
src/commands/mod.rs
Normal file
5
src/commands/mod.rs
Normal file
@@ -0,0 +1,5 @@
|
||||
pub mod db;
|
||||
pub mod import;
|
||||
|
||||
pub use db::{run_db_reset, run_db_setup};
|
||||
pub use import::run_import;
|
||||
178
src/config.rs
Normal file
178
src/config.rs
Normal file
@@ -0,0 +1,178 @@
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
|
||||
/// Environment selection for multi-database setup.
|
||||
///
|
||||
/// AI AGENT NOTE: This enum controls which database configuration is loaded.
|
||||
/// Each environment maps to a different database name:
|
||||
/// - Prod: rusty_petroleum (production data)
|
||||
/// - Dev: rusty_petroleum_dev (development)
|
||||
/// - Test: rusty_petroleum_test (testing)
|
||||
///
|
||||
/// The environment is set via the --env CLI flag and defaults to Prod.
|
||||
#[derive(Debug, Clone, Default, PartialEq)]
|
||||
pub enum Env {
|
||||
/// Production environment - default for safety (requires explicit --env for dev/test)
|
||||
#[default]
|
||||
Prod,
|
||||
/// Development environment - rusty_petroleum_dev
|
||||
Dev,
|
||||
/// Testing environment - rusty_petroleum_test
|
||||
Test,
|
||||
}
|
||||
|
||||
impl Env {
|
||||
/// Returns the environment name as a string for CLI/config file naming.
|
||||
pub fn as_str(&self) -> &str {
|
||||
match self {
|
||||
Env::Prod => "prod",
|
||||
Env::Dev => "dev",
|
||||
Env::Test => "test",
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the database name for this environment.
|
||||
///
|
||||
/// AI AGENT NOTE: Database naming convention:
|
||||
/// - Production: rusty_petroleum (no suffix)
|
||||
/// - Development: rusty_petroleum_dev
|
||||
/// - Testing: rusty_petroleum_test
|
||||
pub fn database_name(&self) -> &str {
|
||||
match self {
|
||||
Env::Prod => "rusty_petroleum",
|
||||
Env::Dev => "rusty_petroleum_dev",
|
||||
Env::Test => "rusty_petroleum_test",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::str::FromStr for Env {
|
||||
type Err = String;
|
||||
|
||||
/// Parses environment from CLI argument.
|
||||
/// Accepts both short and long forms for flexibility.
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
match s.to_lowercase().as_str() {
|
||||
"prod" | "production" => Ok(Env::Prod),
|
||||
"dev" | "development" => Ok(Env::Dev),
|
||||
"test" | "testing" => Ok(Env::Test),
|
||||
_ => Err(format!("Unknown environment: {}", s)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Root configuration struct containing environment and database settings.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Config {
|
||||
pub env: Env,
|
||||
pub database: DatabaseConfig,
|
||||
}
|
||||
|
||||
/// Database connection configuration.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct DatabaseConfig {
|
||||
pub host: String,
|
||||
pub port: u16,
|
||||
pub user: String,
|
||||
pub password: String,
|
||||
pub name: String,
|
||||
}
|
||||
|
||||
impl DatabaseConfig {
|
||||
/// Builds a MySQL connection URL from configuration.
|
||||
///
|
||||
/// AI AGENT NOTE: Handles empty password by omitting it from URL.
|
||||
/// This allows connections without passwords (e.g., local development).
|
||||
pub fn connection_url(&self) -> String {
|
||||
if self.password.is_empty() {
|
||||
format!(
|
||||
"mysql://{}@{}:{}/{}",
|
||||
self.user, self.host, self.port, self.name
|
||||
)
|
||||
} else {
|
||||
format!(
|
||||
"mysql://{}:{}@{}:{}/{}",
|
||||
self.user, self.password, self.host, self.port, self.name
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Config {
|
||||
/// Loads configuration for the specified environment.
|
||||
///
|
||||
/// AI AGENT NOTE: Config file loading order (first existing file wins):
|
||||
/// 1. config.toml - local override (gitignored, for personal overrides)
|
||||
/// 2. config.<env>.toml - environment-specific (gitignored)
|
||||
/// 3. config.example.toml - fallback template (tracked in git)
|
||||
///
|
||||
/// This allows:
|
||||
/// - Committed example config as reference
|
||||
/// - Environment-specific configs for different developers
|
||||
/// - Local overrides without modifying tracked files
|
||||
pub fn load(env: Env) -> anyhow::Result<Self> {
|
||||
let config_path = Path::new("config.toml");
|
||||
let example_path = Path::new("config.example.toml");
|
||||
|
||||
let env_config_filename = format!("config.{}.toml", env.as_str());
|
||||
let env_config_path = Path::new(&env_config_filename);
|
||||
|
||||
let path = if config_path.exists() {
|
||||
config_path
|
||||
} else if env_config_path.exists() {
|
||||
env_config_path
|
||||
} else if example_path.exists() {
|
||||
example_path
|
||||
} else {
|
||||
return Err(anyhow::anyhow!(
|
||||
"No configuration file found. Create config.example.toml or config.toml"
|
||||
));
|
||||
};
|
||||
|
||||
Self::load_from_path(path, env)
|
||||
}
|
||||
|
||||
/// Loads configuration from a specific file path.
|
||||
pub fn load_from_path(path: &Path, env: Env) -> anyhow::Result<Self> {
|
||||
let contents = fs::read_to_string(path)
|
||||
.map_err(|e| anyhow::anyhow!("Failed to read config file {:?}: {}", path, e))?;
|
||||
|
||||
let config: TomlConfig = toml::from_str(&contents)
|
||||
.map_err(|e| anyhow::anyhow!("Failed to parse config file {:?}: {}", path, e))?;
|
||||
|
||||
let mut result: Config = config.into();
|
||||
result.env = env;
|
||||
Ok(result)
|
||||
}
|
||||
}
|
||||
|
||||
/// Intermediate struct for TOML deserialization.
|
||||
/// AI AGENT NOTE: This mirrors the [database] section of config.toml.
|
||||
#[derive(serde::Deserialize)]
|
||||
struct TomlConfig {
|
||||
database: TomlDatabaseConfig,
|
||||
}
|
||||
|
||||
#[derive(serde::Deserialize)]
|
||||
struct TomlDatabaseConfig {
|
||||
host: String,
|
||||
port: u16,
|
||||
user: String,
|
||||
password: String,
|
||||
name: String,
|
||||
}
|
||||
|
||||
impl From<TomlConfig> for Config {
|
||||
fn from(toml: TomlConfig) -> Self {
|
||||
Config {
|
||||
env: Env::default(),
|
||||
database: DatabaseConfig {
|
||||
host: toml.database.host,
|
||||
port: toml.database.port,
|
||||
user: toml.database.user,
|
||||
password: toml.database.password,
|
||||
name: toml.database.name,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
11
src/db/connection.rs
Normal file
11
src/db/connection.rs
Normal file
@@ -0,0 +1,11 @@
|
||||
use sqlx::mysql::MySqlPoolOptions;
|
||||
use sqlx::MySqlPool;
|
||||
|
||||
pub async fn create_pool(database_url: &str) -> anyhow::Result<MySqlPool> {
|
||||
let pool = MySqlPoolOptions::new()
|
||||
.max_connections(10)
|
||||
.connect(database_url)
|
||||
.await?;
|
||||
|
||||
Ok(pool)
|
||||
}
|
||||
7
src/db/mod.rs
Normal file
7
src/db/mod.rs
Normal file
@@ -0,0 +1,7 @@
|
||||
pub mod connection;
|
||||
pub mod models;
|
||||
pub mod repository;
|
||||
|
||||
pub use connection::create_pool;
|
||||
pub use models::{Card, Customer, NewCard, NewCustomer, Transaction};
|
||||
pub use repository::Repository;
|
||||
105
src/db/models.rs
Normal file
105
src/db/models.rs
Normal file
@@ -0,0 +1,105 @@
|
||||
use bigdecimal::BigDecimal;
|
||||
use chrono::{DateTime, NaiveDateTime, Utc};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::FromRow;
|
||||
|
||||
/// Represents a fleet/corporate customer in the system.
|
||||
///
|
||||
/// AI AGENT NOTE: Customers are identified by customer_number and have
|
||||
/// associated cards. Not all transactions have a customer (retail/anonymous).
|
||||
/// The card_report_group indicates customer classification:
|
||||
/// - 1: Fleet customers (have customer_number)
|
||||
/// - 3, 4: Retail customers (no customer_number)
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, FromRow)]
|
||||
pub struct Customer {
|
||||
pub id: u32,
|
||||
pub customer_number: String,
|
||||
pub card_report_group: u8,
|
||||
pub created_at: DateTime<Utc>,
|
||||
pub updated_at: DateTime<Utc>,
|
||||
}
|
||||
|
||||
/// Input struct for creating a new customer during import.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct NewCustomer {
|
||||
pub customer_number: String,
|
||||
pub card_report_group: u8,
|
||||
}
|
||||
|
||||
/// Represents a fuel card belonging to a customer.
|
||||
///
|
||||
/// AI AGENT NOTE: This table stores the authoritative mapping from card_number
|
||||
/// to customer. Only "known" cards (cards belonging to fleet customers) are
|
||||
/// stored here. Anonymized cards (with asterisks like "554477******9952") are
|
||||
/// NOT stored in this table - they appear directly in transactions.card_number.
|
||||
///
|
||||
/// Design rationale: Cards table contains ONLY known cards. This keeps the
|
||||
/// cards table small and ensures every card has a valid customer relationship.
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, FromRow)]
|
||||
pub struct Card {
|
||||
pub id: u32,
|
||||
pub card_number: String,
|
||||
pub customer_id: u32,
|
||||
pub created_at: DateTime<Utc>,
|
||||
pub updated_at: DateTime<Utc>,
|
||||
}
|
||||
|
||||
/// Input struct for creating a new card during import.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct NewCard {
|
||||
pub card_number: String,
|
||||
pub customer_id: u32,
|
||||
}
|
||||
|
||||
/// Represents a fuel transaction in the database.
|
||||
///
|
||||
/// AI AGENT NOTE: This table stores ALL transactions, both anonymous and known:
|
||||
/// - card_number: Always populated (even for anonymized cards)
|
||||
/// - customer_id: NULL for anonymous transactions, FK to customers for fleet
|
||||
///
|
||||
/// To find a customer's transactions, use:
|
||||
/// SELECT * FROM transactions WHERE customer_id = <customer_id>
|
||||
///
|
||||
/// To find all transactions for a card:
|
||||
/// SELECT * FROM transactions WHERE card_number = '<card_number>'
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, FromRow)]
|
||||
pub struct Transaction {
|
||||
pub id: u64,
|
||||
pub transaction_date: NaiveDateTime,
|
||||
pub batch_number: String,
|
||||
pub amount: BigDecimal,
|
||||
pub volume: BigDecimal,
|
||||
pub price: BigDecimal,
|
||||
pub quality_code: i32,
|
||||
pub quality_name: String,
|
||||
pub card_number: String,
|
||||
pub station: String,
|
||||
pub terminal: String,
|
||||
pub pump: String,
|
||||
pub receipt: String,
|
||||
pub control_number: Option<String>,
|
||||
pub customer_id: Option<u32>, // NULL for anonymized transactions
|
||||
pub created_at: DateTime<Utc>,
|
||||
}
|
||||
|
||||
/// Input struct for inserting a new transaction.
|
||||
///
|
||||
/// AI AGENT NOTE: Uses f64 for numeric fields during construction (from CSV parsing),
|
||||
/// but BigDecimal is used in the database for precision.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct NewTransaction {
|
||||
pub transaction_date: NaiveDateTime,
|
||||
pub batch_number: String,
|
||||
pub amount: f64,
|
||||
pub volume: f64,
|
||||
pub price: f64,
|
||||
pub quality_code: i32,
|
||||
pub quality_name: String,
|
||||
pub card_number: String,
|
||||
pub station: String,
|
||||
pub terminal: String,
|
||||
pub pump: String,
|
||||
pub receipt: String,
|
||||
pub control_number: Option<String>,
|
||||
pub customer_id: Option<u32>,
|
||||
}
|
||||
427
src/db/repository.rs
Normal file
427
src/db/repository.rs
Normal file
@@ -0,0 +1,427 @@
|
||||
use crate::db::models::{Card, Customer, NewCard, NewCustomer, NewTransaction, Transaction};
|
||||
use bigdecimal::BigDecimal;
|
||||
use sqlx::MySqlPool;
|
||||
|
||||
/// Repository for database operations.
|
||||
///
|
||||
/// AI AGENT NOTE: This is the main data access layer. All database operations
|
||||
/// should go through this struct. It wraps a MySQL connection pool and provides
|
||||
/// methods for CRUD operations on customers, cards, and transactions.
|
||||
pub struct Repository {
|
||||
pool: MySqlPool,
|
||||
}
|
||||
|
||||
impl Repository {
|
||||
pub fn new(pool: MySqlPool) -> Self {
|
||||
Self { pool }
|
||||
}
|
||||
|
||||
pub fn pool(&self) -> &MySqlPool {
|
||||
&self.pool
|
||||
}
|
||||
|
||||
/// Upserts a customer by customer_number.
|
||||
///
|
||||
/// AI AGENT NOTE: Uses ON DUPLICATE KEY UPDATE to handle re-imports.
|
||||
/// If customer exists, only card_report_group is updated (it's derived from
|
||||
/// transaction data and may differ between batches).
|
||||
pub async fn upsert_customer(&self, customer: &NewCustomer) -> anyhow::Result<u32> {
|
||||
sqlx::query(
|
||||
r#"
|
||||
INSERT INTO customers (customer_number, card_report_group)
|
||||
VALUES (?, ?)
|
||||
ON DUPLICATE KEY UPDATE
|
||||
card_report_group = VALUES(card_report_group),
|
||||
updated_at = CURRENT_TIMESTAMP
|
||||
"#,
|
||||
)
|
||||
.bind(&customer.customer_number)
|
||||
.bind(customer.card_report_group)
|
||||
.execute(&self.pool)
|
||||
.await?;
|
||||
|
||||
let row: (u32,) = sqlx::query_as(
|
||||
"SELECT id FROM customers WHERE customer_number = ?",
|
||||
)
|
||||
.bind(&customer.customer_number)
|
||||
.fetch_one(&self.pool)
|
||||
.await?;
|
||||
|
||||
Ok(row.0)
|
||||
}
|
||||
|
||||
/// Finds a customer by their customer_number.
|
||||
pub async fn find_customer_by_number(
|
||||
&self,
|
||||
customer_number: &str,
|
||||
) -> anyhow::Result<Option<Customer>> {
|
||||
let result = sqlx::query_as(
|
||||
"SELECT id, customer_number, card_report_group, created_at, updated_at
|
||||
FROM customers
|
||||
WHERE customer_number = ?",
|
||||
)
|
||||
.bind(customer_number)
|
||||
.fetch_optional(&self.pool)
|
||||
.await?;
|
||||
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
/// Upserts a card by card_number.
|
||||
///
|
||||
/// AI AGENT NOTE: Cards are only created for known customers (fleet accounts).
|
||||
/// Anonymized cards are NOT inserted here - they only appear in transactions.
|
||||
///
|
||||
/// Design: This ensures cards.customer_id is always NOT NULL, enforcing
|
||||
/// the business rule that every card must belong to a customer.
|
||||
pub async fn upsert_card(&self, card: &NewCard) -> anyhow::Result<u32> {
|
||||
sqlx::query(
|
||||
r#"
|
||||
INSERT INTO cards (card_number, customer_id)
|
||||
VALUES (?, ?)
|
||||
ON DUPLICATE KEY UPDATE
|
||||
customer_id = VALUES(customer_id),
|
||||
updated_at = CURRENT_TIMESTAMP
|
||||
"#,
|
||||
)
|
||||
.bind(&card.card_number)
|
||||
.bind(card.customer_id)
|
||||
.execute(&self.pool)
|
||||
.await?;
|
||||
|
||||
let row: (u32,) = sqlx::query_as(
|
||||
"SELECT id FROM cards WHERE card_number = ?",
|
||||
)
|
||||
.bind(&card.card_number)
|
||||
.fetch_one(&self.pool)
|
||||
.await?;
|
||||
|
||||
Ok(row.0)
|
||||
}
|
||||
|
||||
/// Finds a card by card_number.
|
||||
///
|
||||
/// AI AGENT NOTE: Returns None for anonymized cards (e.g., "554477******9952")
|
||||
/// since these are not stored in the cards table.
|
||||
pub async fn find_card_by_number(&self, card_number: &str) -> anyhow::Result<Option<Card>> {
|
||||
let result = sqlx::query_as(
|
||||
"SELECT id, card_number, customer_id, created_at, updated_at
|
||||
FROM cards
|
||||
WHERE card_number = ?",
|
||||
)
|
||||
.bind(card_number)
|
||||
.fetch_optional(&self.pool)
|
||||
.await?;
|
||||
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
/// Inserts multiple transactions in a single batch for performance.
|
||||
///
|
||||
/// AI AGENT NOTE: Uses bulk INSERT for efficiency. The batch size is
|
||||
/// controlled by the caller (typically 500 rows per batch).
|
||||
///
|
||||
/// IMPORTANT: This constructs raw SQL with escaped values. While sqlx doesn't
|
||||
/// support parameterized bulk insert, we escape single quotes to prevent SQL
|
||||
/// injection in string fields.
|
||||
pub async fn insert_transactions_batch(
|
||||
&self,
|
||||
transactions: &[NewTransaction],
|
||||
) -> anyhow::Result<u64> {
|
||||
if transactions.is_empty() {
|
||||
return Ok(0);
|
||||
}
|
||||
|
||||
let mut query = String::from(
|
||||
"INSERT INTO transactions (transaction_date, batch_number, amount, volume, price, quality_code, quality_name, card_number, station, terminal, pump, receipt, control_number, customer_id) VALUES ",
|
||||
);
|
||||
|
||||
let mut values = Vec::new();
|
||||
for tx in transactions {
|
||||
values.push(format!(
|
||||
"('{}', '{}', {}, {}, {}, {}, '{}', '{}', '{}', '{}', '{}', '{}', {}, {})",
|
||||
tx.transaction_date.format("%Y-%m-%d %H:%M:%S"),
|
||||
tx.batch_number,
|
||||
tx.amount,
|
||||
tx.volume,
|
||||
tx.price,
|
||||
tx.quality_code,
|
||||
tx.quality_name.replace("'", "''"),
|
||||
tx.card_number.replace("'", "''"),
|
||||
tx.station,
|
||||
tx.terminal,
|
||||
tx.pump,
|
||||
tx.receipt,
|
||||
tx.control_number.as_ref().map(|s| format!("'{}'", s.replace("'", "''"))).unwrap_or_else(|| "NULL".to_string()),
|
||||
tx.customer_id.map(|id| id.to_string()).unwrap_or_else(|| "NULL".to_string()),
|
||||
));
|
||||
}
|
||||
|
||||
query.push_str(&values.join(", "));
|
||||
|
||||
let result = sqlx::query(&query).execute(&self.pool).await?;
|
||||
|
||||
Ok(result.rows_affected())
|
||||
}
|
||||
|
||||
/// Retrieves all transactions for a customer within a date range.
|
||||
///
|
||||
/// AI AGENT NOTE: Only returns transactions for known customers (customer_id IS NOT NULL).
|
||||
/// Anonymous transactions are excluded from invoices.
|
||||
pub async fn get_customer_invoice(
|
||||
&self,
|
||||
customer_number: &str,
|
||||
start_date: &str,
|
||||
end_date: &str,
|
||||
) -> anyhow::Result<Vec<Transaction>> {
|
||||
let result = sqlx::query_as(
|
||||
r#"
|
||||
SELECT t.id, t.transaction_date, t.batch_number, t.amount, t.volume, t.price,
|
||||
t.quality_code, t.quality_name, t.card_number, t.station, t.terminal,
|
||||
t.pump, t.receipt, t.control_number, t.customer_id, t.created_at
|
||||
FROM transactions t
|
||||
JOIN customers c ON t.customer_id = c.id
|
||||
WHERE c.customer_number = ?
|
||||
AND t.transaction_date >= ?
|
||||
AND t.transaction_date <= ?
|
||||
ORDER BY t.transaction_date
|
||||
"#,
|
||||
)
|
||||
.bind(customer_number)
|
||||
.bind(start_date)
|
||||
.bind(end_date)
|
||||
.fetch_all(&self.pool)
|
||||
.await?;
|
||||
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
/// Gets sales summary grouped by product (quality_name).
|
||||
///
|
||||
/// AI AGENT NOTE: Includes ALL transactions (both anonymous and known).
|
||||
/// Useful for overall sales reporting.
|
||||
pub async fn get_sales_summary_by_product(
|
||||
&self,
|
||||
start_date: &str,
|
||||
end_date: &str,
|
||||
) -> anyhow::Result<Vec<ProductSummary>> {
|
||||
let result = sqlx::query_as(
|
||||
r#"
|
||||
SELECT quality_name, COUNT(*) as tx_count, SUM(amount) as total_amount, SUM(volume) as total_volume
|
||||
FROM transactions
|
||||
WHERE transaction_date >= ? AND transaction_date <= ?
|
||||
GROUP BY quality_name
|
||||
"#,
|
||||
)
|
||||
.bind(start_date)
|
||||
.bind(end_date)
|
||||
.fetch_all(&self.pool)
|
||||
.await?;
|
||||
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
/// Gets sales summary grouped by customer.
|
||||
///
|
||||
/// AI AGENT NOTE: Only includes known customers (JOIN with customers table).
|
||||
/// Anonymous transactions are excluded since they have no customer_id.
|
||||
pub async fn get_sales_summary_by_customer(
|
||||
&self,
|
||||
start_date: &str,
|
||||
end_date: &str,
|
||||
) -> anyhow::Result<Vec<CustomerSummary>> {
|
||||
let result = sqlx::query_as(
|
||||
r#"
|
||||
SELECT c.customer_number, COUNT(*) as tx_count, SUM(t.amount) as total_amount, SUM(t.volume) as total_volume
|
||||
FROM transactions t
|
||||
JOIN customers c ON t.customer_id = c.id
|
||||
WHERE t.transaction_date >= ? AND t.transaction_date <= ?
|
||||
GROUP BY c.customer_number
|
||||
ORDER BY total_amount DESC
|
||||
"#,
|
||||
)
|
||||
.bind(start_date)
|
||||
.bind(end_date)
|
||||
.fetch_all(&self.pool)
|
||||
.await?;
|
||||
|
||||
Ok(result)
|
||||
}
|
||||
}
|
||||
|
||||
/// Summary of sales by product (quality_name).
|
||||
///
|
||||
/// AI AGENT NOTE: Used for reporting total sales per product type.
|
||||
#[derive(Debug, sqlx::FromRow)]
|
||||
pub struct ProductSummary {
|
||||
pub quality_name: String,
|
||||
pub tx_count: i64,
|
||||
pub total_amount: BigDecimal,
|
||||
pub total_volume: BigDecimal,
|
||||
}
|
||||
|
||||
/// Summary of sales by customer.
|
||||
///
|
||||
/// AI AGENT NOTE: Used for reporting total sales per fleet customer.
|
||||
#[derive(Debug, sqlx::FromRow)]
|
||||
pub struct CustomerSummary {
|
||||
pub customer_number: String,
|
||||
pub tx_count: i64,
|
||||
pub total_amount: BigDecimal,
|
||||
pub total_volume: BigDecimal,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use sqlx::Row;
|
||||
|
||||
/// Helper to create a test repository with a transaction.
|
||||
/// Returns the repository and the transaction - rollback when done.
|
||||
pub async fn with_test_tx<F, T>(test_fn: F) -> anyhow::Result<T>
|
||||
where
|
||||
F: FnOnce(&Repository, &mut sqlx::Transaction<'_, sqlx::MySql>) -> std::pin::Pin<Box<dyn std::future::Future<Output = anyhow::Result<T>>>>,
|
||||
{
|
||||
let pool = crate::db::create_pool(&std::env::var("DATABASE_URL").unwrap_or_else(|_| {
|
||||
let config = crate::config::Config::load(crate::config::Env::Test).unwrap();
|
||||
config.database.connection_url()
|
||||
})).await?;
|
||||
|
||||
let mut tx = pool.begin().await?;
|
||||
let repo = Repository::new(pool);
|
||||
|
||||
let result = test_fn(&repo, &mut tx).await;
|
||||
|
||||
tx.rollback().await?;
|
||||
result
|
||||
}
|
||||
|
||||
/// Inserts a customer using a transaction (for testing).
|
||||
pub async fn insert_customer_tx(
|
||||
tx: &mut sqlx::Transaction<'_, sqlx::MySql>,
|
||||
customer: &NewCustomer,
|
||||
) -> anyhow::Result<u32> {
|
||||
sqlx::query(
|
||||
"INSERT INTO customers (customer_number, card_report_group) VALUES (?, ?)",
|
||||
)
|
||||
.bind(&customer.customer_number)
|
||||
.bind(customer.card_report_group)
|
||||
.execute(&mut **tx)
|
||||
.await?;
|
||||
|
||||
let row = sqlx::query("SELECT LAST_INSERT_ID() as id")
|
||||
.fetch_one(&mut **tx)
|
||||
.await?;
|
||||
|
||||
Ok(row.get("id"))
|
||||
}
|
||||
|
||||
/// Finds a customer by ID using a transaction (for testing).
|
||||
pub async fn find_customer_by_id_tx(
|
||||
tx: &mut sqlx::Transaction<'_, sqlx::MySql>,
|
||||
id: u32,
|
||||
) -> anyhow::Result<Option<Customer>> {
|
||||
let result = sqlx::query_as::<_, Customer>(
|
||||
"SELECT id, customer_number, card_report_group, created_at, updated_at
|
||||
FROM customers WHERE id = ?",
|
||||
)
|
||||
.bind(id)
|
||||
.fetch_optional(&mut **tx)
|
||||
.await?;
|
||||
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
/// Inserts a card using a transaction (for testing).
|
||||
pub async fn insert_card_tx(
|
||||
tx: &mut sqlx::Transaction<'_, sqlx::MySql>,
|
||||
card: &NewCard,
|
||||
) -> anyhow::Result<u32> {
|
||||
sqlx::query(
|
||||
"INSERT INTO cards (card_number, customer_id) VALUES (?, ?)",
|
||||
)
|
||||
.bind(&card.card_number)
|
||||
.bind(card.customer_id)
|
||||
.execute(&mut **tx)
|
||||
.await?;
|
||||
|
||||
let row = sqlx::query("SELECT LAST_INSERT_ID() as id")
|
||||
.fetch_one(&mut **tx)
|
||||
.await?;
|
||||
|
||||
Ok(row.get("id"))
|
||||
}
|
||||
|
||||
/// Finds a card by card_number using a transaction (for testing).
|
||||
pub async fn find_card_by_number_tx(
|
||||
tx: &mut sqlx::Transaction<'_, sqlx::MySql>,
|
||||
card_number: &str,
|
||||
) -> anyhow::Result<Option<Card>> {
|
||||
let result = sqlx::query_as::<_, Card>(
|
||||
"SELECT id, card_number, customer_id, created_at, updated_at
|
||||
FROM cards WHERE card_number = ?",
|
||||
)
|
||||
.bind(card_number)
|
||||
.fetch_optional(&mut **tx)
|
||||
.await?;
|
||||
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
/// Inserts a single transaction using a transaction (for testing).
|
||||
pub async fn insert_transaction_tx(
|
||||
tx: &mut sqlx::Transaction<'_, sqlx::MySql>,
|
||||
transaction: &NewTransaction,
|
||||
) -> anyhow::Result<u64> {
|
||||
sqlx::query(&format!(
|
||||
"INSERT INTO transactions (transaction_date, batch_number, amount, volume, price, quality_code, quality_name, card_number, station, terminal, pump, receipt, control_number, customer_id) VALUES ('{}', '{}', {}, {}, {}, {}, '{}', '{}', '{}', '{}', '{}', '{}', {}, {})",
|
||||
transaction.transaction_date.format("%Y-%m-%d %H:%M:%S"),
|
||||
transaction.batch_number,
|
||||
transaction.amount,
|
||||
transaction.volume,
|
||||
transaction.price,
|
||||
transaction.quality_code,
|
||||
transaction.quality_name.replace("'", "''"),
|
||||
transaction.card_number.replace("'", "''"),
|
||||
transaction.station,
|
||||
transaction.terminal,
|
||||
transaction.pump,
|
||||
transaction.receipt,
|
||||
transaction.control_number.as_ref().map(|s| format!("'{}'", s.replace("'", "''"))).unwrap_or_else(|| "NULL".to_string()),
|
||||
transaction.customer_id.map(|id| id.to_string()).unwrap_or_else(|| "NULL".to_string()),
|
||||
))
|
||||
.execute(&mut **tx)
|
||||
.await?;
|
||||
|
||||
let row = sqlx::query("SELECT LAST_INSERT_ID() as id")
|
||||
.fetch_one(&mut **tx)
|
||||
.await?;
|
||||
Ok(row.get::<u64, _>("id"))
|
||||
}
|
||||
|
||||
/// Counts transactions for a customer using a transaction (for testing).
|
||||
pub async fn count_customer_transactions_tx(
|
||||
tx: &mut sqlx::Transaction<'_, sqlx::MySql>,
|
||||
customer_id: u32,
|
||||
) -> anyhow::Result<i64> {
|
||||
let row = sqlx::query(
|
||||
"SELECT COUNT(*) as count FROM transactions WHERE customer_id = ?",
|
||||
)
|
||||
.bind(customer_id)
|
||||
.fetch_one(&mut **tx)
|
||||
.await?;
|
||||
|
||||
Ok(row.get("count"))
|
||||
}
|
||||
|
||||
/// Gets transaction count using a transaction (for testing).
|
||||
pub async fn count_transactions_tx(
|
||||
tx: &mut sqlx::Transaction<'_, sqlx::MySql>,
|
||||
) -> anyhow::Result<i64> {
|
||||
let row = sqlx::query("SELECT COUNT(*) as count FROM transactions")
|
||||
.fetch_one(&mut **tx)
|
||||
.await?;
|
||||
|
||||
Ok(row.get("count"))
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
use chrono::NaiveDateTime;
|
||||
use csv::ReaderBuilder;
|
||||
use std::collections::BTreeMap;
|
||||
use std::collections::{BTreeMap, HashMap};
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
|
||||
@@ -68,7 +68,7 @@ impl Transaction {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn read_csv_file(path: &Path) -> Result<Batch, Box<dyn std::error::Error>> {
|
||||
pub fn read_csv_file(path: &Path) -> anyhow::Result<Batch> {
|
||||
let filename = path
|
||||
.file_name()
|
||||
.and_then(|n| n.to_str())
|
||||
@@ -101,6 +101,49 @@ pub fn read_csv_file(path: &Path) -> Result<Batch, Box<dyn std::error::Error>> {
|
||||
})
|
||||
}
|
||||
|
||||
/// Reads a CSV file and groups transactions by batch number.
|
||||
///
|
||||
/// AI AGENT NOTE: Returns a HashMap where keys are batch_numbers and values
|
||||
/// are Batches containing only transactions for that batch. This enables
|
||||
/// generating separate invoices per batch when a single CSV contains multiple.
|
||||
pub fn read_csv_file_by_batch(path: &Path) -> anyhow::Result<HashMap<String, Batch>> {
|
||||
let file = fs::File::open(path)?;
|
||||
let mut rdr = ReaderBuilder::new()
|
||||
.delimiter(b'\t')
|
||||
.has_headers(true)
|
||||
.flexible(true)
|
||||
.from_reader(file);
|
||||
|
||||
let mut batches: HashMap<String, Vec<Transaction>> = HashMap::new();
|
||||
|
||||
for result in rdr.records() {
|
||||
let record = result?;
|
||||
if let Some(tx) = Transaction::from_record(&record) {
|
||||
if tx.amount > 0.0 && !tx.customer_number.is_empty() {
|
||||
batches.entry(tx.batch_number.clone()).or_default().push(tx);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut result: HashMap<String, Batch> = HashMap::new();
|
||||
for (batch_number, mut transactions) in batches {
|
||||
transactions.sort_by(|a, b| a.date.cmp(&b.date));
|
||||
result.insert(
|
||||
batch_number,
|
||||
Batch {
|
||||
filename: path
|
||||
.file_name()
|
||||
.and_then(|n| n.to_str())
|
||||
.unwrap_or("unknown")
|
||||
.to_string(),
|
||||
transactions,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
pub fn group_by_customer(batches: &[Batch]) -> BTreeMap<String, Customer> {
|
||||
let mut customers: BTreeMap<String, Customer> = BTreeMap::new();
|
||||
|
||||
@@ -126,3 +169,132 @@ pub fn group_by_customer(batches: &[Batch]) -> BTreeMap<String, Customer> {
|
||||
|
||||
customers
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_transaction_from_record_extracts_batch_number() {
|
||||
let csv_content = "2026-02-01 10:15:16\t409\t559.26\t35.85\t15.60\t1001\t95 Oktan\t7825017523017000642\ttype\t1861\t97254\t1\t2\t000910\t1\t";
|
||||
|
||||
let mut rdr = csv::ReaderBuilder::new()
|
||||
.delimiter(b'\t')
|
||||
.has_headers(false)
|
||||
.from_reader(csv_content.as_bytes());
|
||||
|
||||
let record = rdr.records().next().unwrap().unwrap();
|
||||
let tx = Transaction::from_record(&record).unwrap();
|
||||
|
||||
assert_eq!(tx.batch_number, "409");
|
||||
assert_eq!(tx.customer_number, "1861");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_group_by_customer_with_multiple_batches() {
|
||||
let batch1 = Batch {
|
||||
filename: "test.csv".to_string(),
|
||||
transactions: vec![
|
||||
Transaction {
|
||||
date: NaiveDateTime::parse_from_str("2026-02-01 10:00:00", "%Y-%m-%d %H:%M:%S")
|
||||
.unwrap(),
|
||||
batch_number: "409".to_string(),
|
||||
amount: 100.0,
|
||||
volume: 10.0,
|
||||
price: 10.0,
|
||||
quality: 1001,
|
||||
quality_name: "95 Oktan".to_string(),
|
||||
card_number: "CARD001".to_string(),
|
||||
card_type: "type".to_string(),
|
||||
customer_number: "CUST1".to_string(),
|
||||
station: "S1".to_string(),
|
||||
terminal: "T1".to_string(),
|
||||
pump: "P1".to_string(),
|
||||
receipt: "R1".to_string(),
|
||||
card_report_group_number: "1".to_string(),
|
||||
control_number: "".to_string(),
|
||||
},
|
||||
Transaction {
|
||||
date: NaiveDateTime::parse_from_str("2026-02-01 11:00:00", "%Y-%m-%d %H:%M:%S")
|
||||
.unwrap(),
|
||||
batch_number: "410".to_string(),
|
||||
amount: 200.0,
|
||||
volume: 20.0,
|
||||
price: 10.0,
|
||||
quality: 1001,
|
||||
quality_name: "95 Oktan".to_string(),
|
||||
card_number: "CARD002".to_string(),
|
||||
card_type: "type".to_string(),
|
||||
customer_number: "CUST1".to_string(),
|
||||
station: "S1".to_string(),
|
||||
terminal: "T1".to_string(),
|
||||
pump: "P1".to_string(),
|
||||
receipt: "R2".to_string(),
|
||||
card_report_group_number: "1".to_string(),
|
||||
control_number: "".to_string(),
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
let customers = group_by_customer(&[batch1]);
|
||||
|
||||
// Should have 1 customer with 2 cards
|
||||
assert_eq!(customers.len(), 1);
|
||||
assert!(customers.contains_key("CUST1"));
|
||||
assert_eq!(customers.get("CUST1").unwrap().cards.len(), 2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_group_by_customer_separate_customers() {
|
||||
let batch1 = Batch {
|
||||
filename: "test.csv".to_string(),
|
||||
transactions: vec![
|
||||
Transaction {
|
||||
date: NaiveDateTime::parse_from_str("2026-02-01 10:00:00", "%Y-%m-%d %H:%M:%S")
|
||||
.unwrap(),
|
||||
batch_number: "409".to_string(),
|
||||
amount: 100.0,
|
||||
volume: 10.0,
|
||||
price: 10.0,
|
||||
quality: 1001,
|
||||
quality_name: "95 Oktan".to_string(),
|
||||
card_number: "CARD001".to_string(),
|
||||
card_type: "type".to_string(),
|
||||
customer_number: "CUST1".to_string(),
|
||||
station: "S1".to_string(),
|
||||
terminal: "T1".to_string(),
|
||||
pump: "P1".to_string(),
|
||||
receipt: "R1".to_string(),
|
||||
card_report_group_number: "1".to_string(),
|
||||
control_number: "".to_string(),
|
||||
},
|
||||
Transaction {
|
||||
date: NaiveDateTime::parse_from_str("2026-02-01 11:00:00", "%Y-%m-%d %H:%M:%S")
|
||||
.unwrap(),
|
||||
batch_number: "409".to_string(),
|
||||
amount: 200.0,
|
||||
volume: 20.0,
|
||||
price: 10.0,
|
||||
quality: 1001,
|
||||
quality_name: "95 Oktan".to_string(),
|
||||
card_number: "CARD003".to_string(),
|
||||
card_type: "type".to_string(),
|
||||
customer_number: "CUST2".to_string(),
|
||||
station: "S1".to_string(),
|
||||
terminal: "T1".to_string(),
|
||||
pump: "P1".to_string(),
|
||||
receipt: "R2".to_string(),
|
||||
card_report_group_number: "1".to_string(),
|
||||
control_number: "".to_string(),
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
let customers = group_by_customer(&[batch1]);
|
||||
|
||||
// Should have 2 customers
|
||||
assert_eq!(customers.len(), 2);
|
||||
assert!(customers.contains_key("CUST1"));
|
||||
assert!(customers.contains_key("CUST2"));
|
||||
}
|
||||
}
|
||||
|
||||
9
src/lib.rs
Normal file
9
src/lib.rs
Normal file
@@ -0,0 +1,9 @@
|
||||
//! Library crate for invoice-generator.
|
||||
//!
|
||||
//! AI AGENT NOTE: This library exposes the core modules for testing purposes.
|
||||
//! The binary crate (main.rs) uses this library.
|
||||
|
||||
pub mod commands;
|
||||
pub mod config;
|
||||
pub mod db;
|
||||
pub mod invoice_generator;
|
||||
211
src/main.rs
211
src/main.rs
@@ -1,23 +1,31 @@
|
||||
mod commands;
|
||||
mod config;
|
||||
mod db;
|
||||
mod invoice_generator;
|
||||
|
||||
use askama::Template;
|
||||
use chrono::{NaiveDateTime, Utc};
|
||||
use config::{Config, Env};
|
||||
use csv::ReaderBuilder;
|
||||
use db::{create_pool, Repository};
|
||||
use invoice_generator::{group_by_customer, read_csv_file_by_batch, Customer};
|
||||
use std::collections::HashMap;
|
||||
use std::env;
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
|
||||
mod invoice_generator;
|
||||
|
||||
use invoice_generator::{group_by_customer, read_csv_file, Customer};
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
fn fmt(v: f64) -> String {
|
||||
format!("{:.2}", v)
|
||||
}
|
||||
|
||||
/// Normalizes CSV date format and cleans the data.
|
||||
///
|
||||
/// AI AGENT NOTE: Input CSV may have dates in different formats (MM/DD/YYYY or YYYY-MM-DD).
|
||||
/// This function standardizes to YYYY-MM-DD HH:MM:SS format for consistent parsing.
|
||||
fn clean_csv_file(
|
||||
input_path: &Path,
|
||||
output_path: &Path,
|
||||
) -> Result<String, Box<dyn std::error::Error>> {
|
||||
) -> anyhow::Result<String> {
|
||||
let file = fs::File::open(input_path)?;
|
||||
let mut rdr = ReaderBuilder::new()
|
||||
.delimiter(b'\t')
|
||||
@@ -229,19 +237,86 @@ struct CustomerTemplate {
|
||||
generated_date: String,
|
||||
}
|
||||
|
||||
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
/// Parses the --env flag from CLI arguments.
|
||||
///
|
||||
/// AI AGENT NOTE: The --env flag can appear anywhere in the argument list.
|
||||
/// Returns the environment and the index of the "--env" flag (for removal).
|
||||
/// Defaults to Prod if not specified.
|
||||
fn parse_env_flag(args: &[String]) -> (Env, usize) {
|
||||
for (i, arg) in args.iter().enumerate() {
|
||||
if arg == "--env" && i + 1 < args.len() {
|
||||
match args[i + 1].parse() {
|
||||
Ok(env) => return (env, i),
|
||||
Err(e) => {
|
||||
eprintln!("Error: {}", e);
|
||||
std::process::exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
(Env::default(), 0)
|
||||
}
|
||||
|
||||
/// Removes --env and its value from argument list.
|
||||
///
|
||||
/// AI AGENT NOTE: This allows the --env flag to appear anywhere in the
|
||||
/// command without affecting positional argument parsing.
|
||||
fn remove_env_flags(args: &[String]) -> Vec<String> {
|
||||
let (_, env_idx) = parse_env_flag(args);
|
||||
let mut result = Vec::with_capacity(args.len());
|
||||
|
||||
for (i, arg) in args.iter().enumerate() {
|
||||
if i == env_idx || (i == env_idx + 1 && args.get(env_idx) == Some(&"--env".to_string())) {
|
||||
continue;
|
||||
}
|
||||
result.push(arg.clone());
|
||||
}
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> anyhow::Result<()> {
|
||||
let args: Vec<String> = env::args().collect();
|
||||
|
||||
if args.len() != 3 {
|
||||
eprintln!("Användning: {} <csv-fil> <utdatakatalog>", args[0]);
|
||||
let (env, _) = parse_env_flag(&args);
|
||||
|
||||
if args.len() < 2 {
|
||||
print_usage(&args[0]);
|
||||
std::process::exit(1);
|
||||
}
|
||||
|
||||
let input_path = Path::new(&args[1]);
|
||||
let base_output_dir = Path::new(&args[2]);
|
||||
match args[1].as_str() {
|
||||
"import" => {
|
||||
let clean_args = remove_env_flags(&args);
|
||||
if clean_args.len() != 3 {
|
||||
eprintln!("Usage: {} import <csv-file> [--env <name>]", clean_args[0]);
|
||||
std::process::exit(1);
|
||||
}
|
||||
let csv_path = PathBuf::from(&clean_args[2]);
|
||||
if !csv_path.exists() {
|
||||
eprintln!("Error: File not found: {:?}", csv_path);
|
||||
std::process::exit(1);
|
||||
}
|
||||
|
||||
println!("Environment: {}", env.as_str());
|
||||
let config = Config::load(env)?;
|
||||
let pool = create_pool(&config.database.connection_url()).await?;
|
||||
let repo = Repository::new(pool);
|
||||
|
||||
commands::run_import(&csv_path, &repo).await?;
|
||||
}
|
||||
"generate" => {
|
||||
let clean_args = remove_env_flags(&args);
|
||||
if clean_args.len() != 4 {
|
||||
eprintln!("Usage: {} generate <csv-file> <output-dir> [--env <name>]", clean_args[0]);
|
||||
std::process::exit(1);
|
||||
}
|
||||
let input_path = Path::new(&clean_args[2]);
|
||||
let base_output_dir = Path::new(&clean_args[3]);
|
||||
|
||||
if !input_path.exists() {
|
||||
eprintln!("Fel: Filen hittades inte: {:?}", input_path);
|
||||
eprintln!("Error: File not found: {:?}", input_path);
|
||||
std::process::exit(1);
|
||||
}
|
||||
|
||||
@@ -251,32 +326,40 @@ fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
.unwrap_or("unknown")
|
||||
.to_string();
|
||||
|
||||
println!("Konverterar {} till rensat format...", filename);
|
||||
println!("Converting {} to cleaned format...", filename);
|
||||
|
||||
let temp_cleaned_path =
|
||||
base_output_dir.join(format!("{}.temp.csv", filename.trim_end_matches(".txt")));
|
||||
let batch_number = clean_csv_file(input_path, &temp_cleaned_path)?;
|
||||
clean_csv_file(input_path, &temp_cleaned_path)?;
|
||||
|
||||
let mut batches = read_csv_file_by_batch(&temp_cleaned_path)?;
|
||||
let batch_count = batches.len();
|
||||
println!("Found {} batches in CSV", batch_count);
|
||||
|
||||
let mut total_customers = 0usize;
|
||||
let generated_date = Utc::now().format("%Y-%m-%d %H:%M").to_string();
|
||||
|
||||
let mut batch_numbers: Vec<_> = batches.keys().cloned().collect();
|
||||
batch_numbers.sort();
|
||||
|
||||
for batch_number in batch_numbers {
|
||||
let batch = batches.remove(&batch_number).unwrap();
|
||||
let output_dir = base_output_dir.join(&batch_number);
|
||||
|
||||
fs::create_dir_all(&output_dir)?;
|
||||
|
||||
fs::copy(input_path, output_dir.join(format!("{}.txt", batch_number)))?;
|
||||
fs::rename(
|
||||
&temp_cleaned_path,
|
||||
output_dir.join(format!("{}.csv", batch_number)),
|
||||
)?;
|
||||
let csv_path = output_dir.join(format!("{}.csv", batch_number));
|
||||
let txt_path = output_dir.join(format!("{}.txt", batch_number));
|
||||
|
||||
fs::copy(&temp_cleaned_path, &csv_path)?;
|
||||
fs::copy(input_path, &txt_path)?;
|
||||
|
||||
println!(
|
||||
"Konverterade {} transaktioner",
|
||||
fs::read_to_string(output_dir.join(format!("{}.csv", batch_number)))?
|
||||
.lines()
|
||||
.count()
|
||||
- 1
|
||||
"Batch {}: {} transactions",
|
||||
batch_number,
|
||||
batch.transactions.len()
|
||||
);
|
||||
|
||||
let batch = read_csv_file(&output_dir.join(format!("{}.csv", batch_number)))?;
|
||||
println!("Laddade {} transaktioner", batch.transactions.len());
|
||||
|
||||
let first_date = batch.transactions.first().map(|t| t.date).unwrap();
|
||||
let last_date = batch.transactions.last().map(|t| t.date).unwrap();
|
||||
let period = format!(
|
||||
@@ -300,9 +383,6 @@ fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
.unwrap();
|
||||
fs::write(output_dir.join("index.html"), html)?;
|
||||
|
||||
let generated_date = Utc::now().format("%Y-%m-%d %H:%M").to_string();
|
||||
|
||||
let customer_count = customers.len();
|
||||
for (customer_num, customer) in customers {
|
||||
let prepared = PreparedCustomer::from_customer(customer);
|
||||
let customer_html = CustomerTemplate {
|
||||
@@ -313,15 +393,76 @@ fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
}
|
||||
.render()
|
||||
.unwrap();
|
||||
let filename = format!("customer_{}.html", customer_num);
|
||||
fs::write(output_dir.join(&filename), customer_html)?;
|
||||
println!("Genererade {}", filename);
|
||||
let customer_filename = format!("customer_{}.html", customer_num);
|
||||
fs::write(output_dir.join(&customer_filename), customer_html)?;
|
||||
println!(" Generated {}", customer_filename);
|
||||
}
|
||||
|
||||
total_customers += index_customers.len();
|
||||
}
|
||||
|
||||
fs::remove_file(temp_cleaned_path)?;
|
||||
|
||||
println!(
|
||||
"\nGenererade {} kundfakturor i {:?}",
|
||||
customer_count, output_dir
|
||||
"\nGenerated {} customer invoices across {} batches in {:?}",
|
||||
total_customers,
|
||||
batch_count,
|
||||
base_output_dir
|
||||
);
|
||||
}
|
||||
"db" => {
|
||||
let clean_args = remove_env_flags(&args);
|
||||
if clean_args.len() < 3 {
|
||||
eprintln!("Usage: {} db <subcommand> [--env <name>]", clean_args[0]);
|
||||
eprintln!("Subcommands:");
|
||||
eprintln!(" setup Create database and schema");
|
||||
eprintln!(" reset Drop and recreate database");
|
||||
std::process::exit(1);
|
||||
}
|
||||
|
||||
println!("Environment: {}", env.as_str());
|
||||
let config = Config::load(env)?;
|
||||
|
||||
match clean_args[2].as_str() {
|
||||
"setup" => {
|
||||
let pool = create_pool(&config.database.connection_url()).await?;
|
||||
let repo = Repository::new(pool);
|
||||
commands::run_db_setup(&repo, &config).await?;
|
||||
}
|
||||
"reset" => {
|
||||
commands::run_db_reset(&config).await?;
|
||||
}
|
||||
_ => {
|
||||
eprintln!("Unknown db subcommand: {}", clean_args[2]);
|
||||
eprintln!("Subcommands:");
|
||||
eprintln!(" setup Create database and schema");
|
||||
eprintln!(" reset Drop and recreate database");
|
||||
std::process::exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
"help" | "--help" | "-h" => {
|
||||
print_usage(&args[0]);
|
||||
}
|
||||
_ => {
|
||||
eprintln!("Unknown command: {}", args[1]);
|
||||
print_usage(&args[0]);
|
||||
std::process::exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn print_usage(program: &str) {
|
||||
eprintln!("Usage: {} <command> [arguments]", program);
|
||||
eprintln!();
|
||||
eprintln!("Commands:");
|
||||
eprintln!(" import <csv-file> [--env <name>] Import CSV data to database (default: prod)");
|
||||
eprintln!(" generate <csv> <dir> Generate HTML invoices from CSV");
|
||||
eprintln!(" db setup [--env <name>] Create database and schema (default: prod)");
|
||||
eprintln!(" db reset [--env <name>] Drop and recreate database (default: prod)");
|
||||
eprintln!(" help Show this help message");
|
||||
eprintln!();
|
||||
eprintln!("Environments: prod (default), dev, test");
|
||||
}
|
||||
|
||||
80
tests/common/fixtures.rs
Normal file
80
tests/common/fixtures.rs
Normal file
@@ -0,0 +1,80 @@
|
||||
//! Test fixtures for CSV parsing tests.
|
||||
//!
|
||||
//! AI AGENT NOTE: These fixtures provide sample data for testing CSV parsing
|
||||
//! and other components without requiring real files.
|
||||
|
||||
/// Header row for CSV files.
|
||||
pub const CSV_HEADER: &str = "Date\tBatch number\tAmount\tVolume\tPrice\tQuality\tQualityName\tCard number\tCard type\tCustomer number\tStation\tTerminal\tPump\tReceipt\tCard report group number\tControl number";
|
||||
|
||||
/// A valid CSV row with a known customer (fleet account).
|
||||
///
|
||||
/// AI AGENT NOTE: This represents a typical fleet customer transaction.
|
||||
/// - Customer number: "1861" (known customer)
|
||||
/// - Card number: Full card number (not anonymized)
|
||||
/// - Amount: Positive (should be imported)
|
||||
pub const CSV_ROW_KNOWN_CUSTOMER: &str = "2026-02-01 10:15:16\t409\t559.26\t35.85\t15.60\t1001\t95 Oktan\t7825017523017000642\t7825017523017000642\t1861\t97254\t1\t2\t000910\t1\t";
|
||||
|
||||
/// A valid CSV row with an anonymized card (retail customer).
|
||||
///
|
||||
/// AI AGENT NOTE: This represents a retail transaction.
|
||||
/// - Customer number: "" (empty - anonymized)
|
||||
/// - Card number: Contains asterisks (partially masked)
|
||||
/// - Amount: Positive (should be imported)
|
||||
pub const CSV_ROW_ANONYMIZED: &str = "2026-02-01 06:40:14\t409\t267.23\t17.13\t15.60\t1001\t95 Oktan\t554477******9952\t554477******9952\t\t97254\t1\t2\t000898\t4\t756969";
|
||||
|
||||
/// A CSV row with zero amount (should be filtered out).
|
||||
///
|
||||
/// AI AGENT NOTE: Zero amounts typically represent authorizations
|
||||
/// that were never completed.
|
||||
pub const CSV_ROW_ZERO_AMOUNT: &str = "2026-02-01 06:40:14\t409\t0.00\t0.00\t15.60\t1001\t95 Oktan\t554477******9952\t554477******9952\t\t97254\t1\t2\t000898\t4\t756969";
|
||||
|
||||
/// A CSV row with negative amount (should be filtered out).
|
||||
///
|
||||
/// AI AGENT NOTE: Negative amounts typically represent cancellations
|
||||
/// or refunds.
|
||||
pub const CSV_ROW_NEGATIVE_AMOUNT: &str = "2026-02-01 06:40:14\t409\t-50.00\t-3.00\t15.60\t1001\t95 Oktan\t7825017523017000642\t7825017523017000642\t1861\t97254\t1\t2\t000898\t1\t";
|
||||
|
||||
/// A CSV row with US date format (MM/DD/YYYY).
|
||||
///
|
||||
/// AI AGENT NOTE: Some source files may use US date format.
|
||||
pub const CSV_ROW_US_DATE: &str = "02/01/2026 10:15:16 AM\t409\t559.26\t35.85\t15.60\t1001\t95 Oktan\t7825017523017000642\t7825017523017000642\t1861\t97254\t1\t2\t000910\t1\t";
|
||||
|
||||
/// Creates a multi-row CSV string for testing.
|
||||
///
|
||||
/// AI AGENT NOTE: Combines header and multiple data rows for
|
||||
/// testing full CSV parsing.
|
||||
pub fn create_test_csv(rows: &[&str]) -> String {
|
||||
let mut csv = CSV_HEADER.to_string();
|
||||
csv.push('\n');
|
||||
for row in rows {
|
||||
csv.push_str(row);
|
||||
csv.push('\n');
|
||||
}
|
||||
csv
|
||||
}
|
||||
|
||||
/// Sample CSV with mixed transactions (known, anonymized, etc.).
|
||||
pub fn sample_csv_mixed() -> String {
|
||||
create_test_csv(&[
|
||||
CSV_ROW_ANONYMIZED,
|
||||
CSV_ROW_KNOWN_CUSTOMER,
|
||||
CSV_ROW_ZERO_AMOUNT,
|
||||
])
|
||||
}
|
||||
|
||||
/// Sample CSV with only known customer transactions.
|
||||
pub fn sample_csv_known_only() -> String {
|
||||
create_test_csv(&[
|
||||
CSV_ROW_KNOWN_CUSTOMER,
|
||||
"2026-02-01 10:32:18\t409\t508.40\t32.59\t15.60\t1001\t95 Oktan\t7825017523017000717\t7825017523017000717\t1861\t97254\t1\t2\t000912\t1\t",
|
||||
"2026-02-01 10:57:33\t409\t174.41\t11.18\t15.60\t1001\t95 Oktan\t7825017523017001053\t7825017523017001053\t1980\t97254\t1\t1\t000913\t1\t",
|
||||
])
|
||||
}
|
||||
|
||||
/// Sample CSV with Diesel transaction.
|
||||
pub fn sample_csv_diesel() -> String {
|
||||
create_test_csv(&[
|
||||
"2026-02-01 10:05:16\t409\t543.22\t31.40\t17.30\t4\tDiesel\t673706*********0155\t673706*********0155\t\t97254\t1\t2\t000909\t4\tD00824",
|
||||
"2026-02-01 11:10:21\t409\t612.25\t35.39\t17.30\t4\tDiesel\t7825017523017000873\t7825017523017000873\t1866\t97254\t1\t1\t000916\t1\t",
|
||||
])
|
||||
}
|
||||
10
tests/common/mod.rs
Normal file
10
tests/common/mod.rs
Normal file
@@ -0,0 +1,10 @@
|
||||
//! Common test utilities.
|
||||
//!
|
||||
//! AI AGENT NOTE: This module provides shared test infrastructure
|
||||
//! including database helpers and sample data fixtures.
|
||||
|
||||
pub mod fixtures;
|
||||
pub mod test_db;
|
||||
|
||||
pub use fixtures::*;
|
||||
pub use test_db::*;
|
||||
122
tests/common/test_db.rs
Normal file
122
tests/common/test_db.rs
Normal file
@@ -0,0 +1,122 @@
|
||||
//! Test database utilities.
|
||||
//!
|
||||
//! AI AGENT NOTE: These helpers manage the test database connection pool.
|
||||
//! Uses rusty_petroleum_test database for all tests.
|
||||
|
||||
use sqlx::mysql::{MySqlPool, MySqlPoolOptions};
|
||||
use std::time::Duration;
|
||||
|
||||
/// Creates a connection pool to the test database.
|
||||
///
|
||||
/// AI AGENT NOTE: Uses config.toml or config.test.toml for connection details.
|
||||
/// The test database should be separate from dev/prod to avoid data conflicts.
|
||||
pub async fn create_test_pool() -> MySqlPool {
|
||||
let config = crate::config::Config::load(crate::config::Env::Test)
|
||||
.expect("Failed to load test config");
|
||||
|
||||
MySqlPoolOptions::new()
|
||||
.max_connections(1)
|
||||
.acquire_timeout(Duration::from_secs(10))
|
||||
.connect(&config.database.connection_url())
|
||||
.await
|
||||
.expect("Failed to connect to test database")
|
||||
}
|
||||
|
||||
/// Resets the test database by dropping and recreating all tables.
|
||||
///
|
||||
/// AI AGENT NOTE: This is used before running tests to ensure a clean state.
|
||||
/// It uses the `rusty_petroleum_test` database.
|
||||
pub async fn reset_test_database() -> anyhow::Result<()> {
|
||||
let config = crate::config::Config::load(crate::config::Env::Test)?;
|
||||
let database_url = config.database.connection_url();
|
||||
let base_url = database_url.trim_end_matches(config.env.database_name());
|
||||
|
||||
let setup_pool = MySqlPoolOptions::new()
|
||||
.max_connections(1)
|
||||
.connect(base_url)
|
||||
.await?;
|
||||
|
||||
// Drop database if exists
|
||||
sqlx::query(&format!("DROP DATABASE IF EXISTS {}", config.env.database_name()))
|
||||
.execute(&setup_pool)
|
||||
.await?;
|
||||
|
||||
// Create fresh database
|
||||
sqlx::query(&format!("CREATE DATABASE {}", config.env.database_name()))
|
||||
.execute(&setup_pool)
|
||||
.await?;
|
||||
|
||||
drop(setup_pool);
|
||||
|
||||
// Now create tables
|
||||
let pool = create_test_pool().await;
|
||||
|
||||
// Create customers table
|
||||
sqlx::query(
|
||||
r#"
|
||||
CREATE TABLE customers (
|
||||
id INT UNSIGNED AUTO_INCREMENT PRIMARY KEY,
|
||||
customer_number VARCHAR(50) NOT NULL UNIQUE,
|
||||
card_report_group TINYINT UNSIGNED NOT NULL DEFAULT 0,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
|
||||
INDEX idx_customer_number (customer_number)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci
|
||||
"#,
|
||||
)
|
||||
.execute(&pool)
|
||||
.await?;
|
||||
|
||||
// Create cards table
|
||||
sqlx::query(
|
||||
r#"
|
||||
CREATE TABLE cards (
|
||||
id INT UNSIGNED AUTO_INCREMENT PRIMARY KEY,
|
||||
card_number VARCHAR(50) NOT NULL UNIQUE,
|
||||
customer_id INT UNSIGNED NOT NULL,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
|
||||
INDEX idx_card_number (card_number),
|
||||
INDEX idx_customer_id (customer_id),
|
||||
FOREIGN KEY (customer_id) REFERENCES customers(id)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci
|
||||
"#,
|
||||
)
|
||||
.execute(&pool)
|
||||
.await?;
|
||||
|
||||
// Create transactions table
|
||||
sqlx::query(
|
||||
r#"
|
||||
CREATE TABLE transactions (
|
||||
id BIGINT UNSIGNED AUTO_INCREMENT PRIMARY KEY,
|
||||
transaction_date DATETIME NOT NULL,
|
||||
batch_number VARCHAR(20) NOT NULL,
|
||||
amount DECIMAL(10,2) NOT NULL,
|
||||
volume DECIMAL(10,3) NOT NULL,
|
||||
price DECIMAL(8,4) NOT NULL,
|
||||
quality_code INT NOT NULL,
|
||||
quality_name VARCHAR(50) NOT NULL,
|
||||
card_number VARCHAR(50) NOT NULL,
|
||||
station VARCHAR(20) NOT NULL,
|
||||
terminal VARCHAR(10) NOT NULL,
|
||||
pump VARCHAR(10) NOT NULL,
|
||||
receipt VARCHAR(20) NOT NULL,
|
||||
control_number VARCHAR(20),
|
||||
customer_id INT UNSIGNED NULL,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
INDEX idx_transaction_date (transaction_date),
|
||||
INDEX idx_batch_number (batch_number),
|
||||
INDEX idx_customer_id (customer_id),
|
||||
INDEX idx_card_number (card_number),
|
||||
INDEX idx_station (station)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci
|
||||
"#,
|
||||
)
|
||||
.execute(&pool)
|
||||
.await?;
|
||||
|
||||
drop(pool);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
105
tests/config_test.rs
Normal file
105
tests/config_test.rs
Normal file
@@ -0,0 +1,105 @@
|
||||
//! Tests for the config module.
|
||||
//!
|
||||
//! AI AGENT NOTE: These tests verify configuration loading, environment
|
||||
//! parsing, and database connection URL generation.
|
||||
|
||||
use invoice_generator::config::{Config, DatabaseConfig, Env};
|
||||
|
||||
/// Tests that Env::default() returns Prod.
|
||||
#[test]
|
||||
fn env_default_is_prod() {
|
||||
assert_eq!(Env::default(), Env::Prod);
|
||||
}
|
||||
|
||||
/// Tests Env::from_str with valid short forms.
|
||||
#[test]
|
||||
fn env_from_str_valid_short() {
|
||||
assert_eq!("prod".parse::<Env>().unwrap(), Env::Prod);
|
||||
assert_eq!("dev".parse::<Env>().unwrap(), Env::Dev);
|
||||
assert_eq!("test".parse::<Env>().unwrap(), Env::Test);
|
||||
}
|
||||
|
||||
/// Tests Env::from_str with valid long forms (aliases).
|
||||
#[test]
|
||||
fn env_from_str_valid_aliases() {
|
||||
assert_eq!("production".parse::<Env>().unwrap(), Env::Prod);
|
||||
assert_eq!("development".parse::<Env>().unwrap(), Env::Dev);
|
||||
assert_eq!("testing".parse::<Env>().unwrap(), Env::Test);
|
||||
}
|
||||
|
||||
/// Tests Env::from_str is case-insensitive.
|
||||
#[test]
|
||||
fn env_from_str_case_insensitive() {
|
||||
assert_eq!("PROD".parse::<Env>().unwrap(), Env::Prod);
|
||||
assert_eq!("Dev".parse::<Env>().unwrap(), Env::Dev);
|
||||
assert_eq!("TEST".parse::<Env>().unwrap(), Env::Test);
|
||||
}
|
||||
|
||||
/// Tests Env::from_str with invalid value returns error.
|
||||
#[test]
|
||||
fn env_from_str_invalid() {
|
||||
let result: Result<Env, _> = "invalid".parse();
|
||||
assert!(result.is_err());
|
||||
assert!(result.unwrap_err().contains("Unknown environment"));
|
||||
}
|
||||
|
||||
/// Tests Env::as_str returns correct string.
|
||||
#[test]
|
||||
fn env_as_str() {
|
||||
assert_eq!(Env::Prod.as_str(), "prod");
|
||||
assert_eq!(Env::Dev.as_str(), "dev");
|
||||
assert_eq!(Env::Test.as_str(), "test");
|
||||
}
|
||||
|
||||
/// Tests Env::database_name returns correct database names.
|
||||
#[test]
|
||||
fn env_database_name() {
|
||||
assert_eq!(Env::Prod.database_name(), "rusty_petroleum");
|
||||
assert_eq!(Env::Dev.database_name(), "rusty_petroleum_dev");
|
||||
assert_eq!(Env::Test.database_name(), "rusty_petroleum_test");
|
||||
}
|
||||
|
||||
/// Tests DatabaseConfig::connection_url without password.
|
||||
#[test]
|
||||
fn db_connection_url_without_password() {
|
||||
let config = DatabaseConfig {
|
||||
host: "localhost".to_string(),
|
||||
port: 3306,
|
||||
user: "test_user".to_string(),
|
||||
password: "".to_string(),
|
||||
name: "test_db".to_string(),
|
||||
};
|
||||
|
||||
let url = config.connection_url();
|
||||
assert_eq!(url, "mysql://test_user@localhost:3306/test_db");
|
||||
}
|
||||
|
||||
/// Tests DatabaseConfig::connection_url with password.
|
||||
#[test]
|
||||
fn db_connection_url_with_password() {
|
||||
let config = DatabaseConfig {
|
||||
host: "localhost".to_string(),
|
||||
port: 3306,
|
||||
user: "test_user".to_string(),
|
||||
password: "secret".to_string(),
|
||||
name: "test_db".to_string(),
|
||||
};
|
||||
|
||||
let url = config.connection_url();
|
||||
assert_eq!(url, "mysql://test_user:secret@localhost:3306/test_db");
|
||||
}
|
||||
|
||||
/// Tests DatabaseConfig::connection_url with custom port.
|
||||
#[test]
|
||||
fn db_connection_url_custom_port() {
|
||||
let config = DatabaseConfig {
|
||||
host: "127.0.0.1".to_string(),
|
||||
port: 3307,
|
||||
user: "user".to_string(),
|
||||
password: "pass".to_string(),
|
||||
name: "mydb".to_string(),
|
||||
};
|
||||
|
||||
let url = config.connection_url();
|
||||
assert_eq!(url, "mysql://user:pass@127.0.0.1:3307/mydb");
|
||||
}
|
||||
316
tests/import_test.rs
Normal file
316
tests/import_test.rs
Normal file
@@ -0,0 +1,316 @@
|
||||
//! Integration tests for CSV parsing.
|
||||
//!
|
||||
//! AI AGENT NOTE: These tests verify full CSV parsing with actual files.
|
||||
|
||||
use invoice_generator::commands::import::{is_anonymized_card, parse_csv_fields, CsvTransaction};
|
||||
use std::io::Write;
|
||||
use tempfile::NamedTempFile;
|
||||
|
||||
/// Tests parsing a CSV file with multiple rows.
|
||||
#[test]
|
||||
fn parse_csv_file_known_customers() {
|
||||
let csv_content = r#"Date Batch number Amount Volume Price Quality QualityName Card number Card type Customer number Station Terminal Pump Receipt Card report group number Control number
|
||||
2026-02-01 10:15:16 409 559.26 35.85 15.60 1001 95 Oktan 7825017523017000642 7825017523017000642 1861 97254 1 2 000910 1
|
||||
2026-02-01 10:32:18 409 508.40 32.59 15.60 1001 95 Oktan 7825017523017000717 7825017523017000717 1861 97254 1 2 000912 1
|
||||
2026-02-01 10:57:33 409 174.41 11.18 15.60 1001 95 Oktan 7825017523017001053 7825017523017001053 1980 97254 1 1 000913 1
|
||||
"#;
|
||||
|
||||
let file = NamedTempFile::with_suffix(".csv").unwrap();
|
||||
file.as_file().write_all(csv_content.as_bytes()).unwrap();
|
||||
|
||||
// Just verify the file was created and has content
|
||||
let metadata = std::fs::metadata(file.path()).unwrap();
|
||||
assert!(metadata.len() > 0);
|
||||
}
|
||||
|
||||
/// Tests that anonymized cards are correctly identified.
|
||||
#[test]
|
||||
fn anonymized_card_detection() {
|
||||
// Known card (full number)
|
||||
assert!(!is_anonymized_card("7825017523017000642"));
|
||||
assert!(!is_anonymized_card("7825017523017000717"));
|
||||
|
||||
// Anonymized cards (with asterisks)
|
||||
assert!(is_anonymized_card("554477******9952"));
|
||||
assert!(is_anonymized_card("673706*********0155"));
|
||||
assert!(is_anonymized_card("404776******7006"));
|
||||
|
||||
// Edge cases
|
||||
assert!(!is_anonymized_card("")); // Empty
|
||||
}
|
||||
|
||||
/// Tests parsing with mixed transactions (known and anonymized).
|
||||
#[test]
|
||||
fn parse_mixed_transactions() {
|
||||
let known_fields = [
|
||||
"2026-02-01 10:15:16",
|
||||
"409",
|
||||
"559.26",
|
||||
"35.85",
|
||||
"15.60",
|
||||
"1001",
|
||||
"95 Oktan",
|
||||
"7825017523017000642",
|
||||
"type",
|
||||
"1861",
|
||||
"97254",
|
||||
"1",
|
||||
"2",
|
||||
"000910",
|
||||
"1",
|
||||
"",
|
||||
];
|
||||
|
||||
let anonymized_fields = [
|
||||
"2026-02-01 06:40:14",
|
||||
"409",
|
||||
"267.23",
|
||||
"17.13",
|
||||
"15.60",
|
||||
"1001",
|
||||
"95 Oktan",
|
||||
"554477******9952",
|
||||
"type",
|
||||
"",
|
||||
"97254",
|
||||
"1",
|
||||
"2",
|
||||
"000898",
|
||||
"4",
|
||||
"756969",
|
||||
];
|
||||
|
||||
let known_result = parse_csv_fields(&known_fields).unwrap();
|
||||
let anonymized_result = parse_csv_fields(&anonymized_fields).unwrap();
|
||||
|
||||
assert!(known_result.is_some());
|
||||
assert!(anonymized_result.is_some());
|
||||
|
||||
let known_tx = known_result.unwrap();
|
||||
let anonymized_tx = anonymized_result.unwrap();
|
||||
|
||||
// Known customer has customer_number
|
||||
assert_eq!(known_tx.customer_number, "1861");
|
||||
assert!(!is_anonymized_card(&known_tx.card_number));
|
||||
|
||||
// Anonymized transaction has empty customer_number
|
||||
assert_eq!(anonymized_tx.customer_number, "");
|
||||
assert!(is_anonymized_card(&anonymized_tx.card_number));
|
||||
}
|
||||
|
||||
/// Tests that transactions are counted correctly.
|
||||
#[test]
|
||||
fn transaction_counting() {
|
||||
let fields_1 = [
|
||||
"2026-02-01 10:15:16",
|
||||
"409",
|
||||
"559.26",
|
||||
"35.85",
|
||||
"15.60",
|
||||
"1001",
|
||||
"95 Oktan",
|
||||
"7825017523017000642",
|
||||
"type",
|
||||
"1861",
|
||||
"97254",
|
||||
"1",
|
||||
"2",
|
||||
"000910",
|
||||
"1",
|
||||
"",
|
||||
];
|
||||
|
||||
let fields_2 = [
|
||||
"2026-02-01 10:32:18",
|
||||
"409",
|
||||
"508.40",
|
||||
"32.59",
|
||||
"15.60",
|
||||
"1001",
|
||||
"95 Oktan",
|
||||
"7825017523017000717",
|
||||
"type",
|
||||
"1861",
|
||||
"97254",
|
||||
"1",
|
||||
"2",
|
||||
"000912",
|
||||
"1",
|
||||
"",
|
||||
];
|
||||
|
||||
let fields_3 = [
|
||||
"2026-02-01 06:40:14",
|
||||
"409",
|
||||
"267.23",
|
||||
"17.13",
|
||||
"15.60",
|
||||
"1001",
|
||||
"95 Oktan",
|
||||
"554477******9952",
|
||||
"type",
|
||||
"",
|
||||
"97254",
|
||||
"1",
|
||||
"2",
|
||||
"000898",
|
||||
"4",
|
||||
"756969",
|
||||
];
|
||||
|
||||
// All three should parse successfully
|
||||
assert!(parse_csv_fields(&fields_1).unwrap().is_some());
|
||||
assert!(parse_csv_fields(&fields_2).unwrap().is_some());
|
||||
assert!(parse_csv_fields(&fields_3).unwrap().is_some());
|
||||
}
|
||||
|
||||
/// Tests that duplicate customers are handled.
|
||||
#[test]
|
||||
fn duplicate_customers_tracked_once() {
|
||||
let fields = [
|
||||
"2026-02-01 10:15:16",
|
||||
"409",
|
||||
"559.26",
|
||||
"35.85",
|
||||
"15.60",
|
||||
"1001",
|
||||
"95 Oktan",
|
||||
"7825017523017000642",
|
||||
"type",
|
||||
"1861",
|
||||
"97254",
|
||||
"1",
|
||||
"2",
|
||||
"000910",
|
||||
"1",
|
||||
"",
|
||||
];
|
||||
|
||||
let result = parse_csv_fields(&fields).unwrap().unwrap();
|
||||
|
||||
// Customer 1861 should be tracked
|
||||
assert_eq!(result.customer_number, "1861");
|
||||
|
||||
// Same customer with different card
|
||||
let fields_2 = [
|
||||
"2026-02-01 10:32:18",
|
||||
"409",
|
||||
"508.40",
|
||||
"32.59",
|
||||
"15.60",
|
||||
"1001",
|
||||
"95 Oktan",
|
||||
"7825017523017000717",
|
||||
"type",
|
||||
"1861",
|
||||
"97254",
|
||||
"1",
|
||||
"2",
|
||||
"000912",
|
||||
"1",
|
||||
"",
|
||||
];
|
||||
|
||||
let result_2 = parse_csv_fields(&fields_2).unwrap().unwrap();
|
||||
|
||||
// Same customer, different card
|
||||
assert_eq!(result_2.customer_number, "1861");
|
||||
assert_ne!(result.card_number, result_2.card_number);
|
||||
}
|
||||
|
||||
/// Tests diesel product parsing.
|
||||
#[test]
|
||||
fn diesel_product_parsing() {
|
||||
let fields = [
|
||||
"2026-02-01 10:05:16",
|
||||
"409",
|
||||
"543.22",
|
||||
"31.40",
|
||||
"17.30",
|
||||
"4",
|
||||
"Diesel",
|
||||
"673706*********0155",
|
||||
"type",
|
||||
"",
|
||||
"97254",
|
||||
"1",
|
||||
"2",
|
||||
"000909",
|
||||
"4",
|
||||
"D00824",
|
||||
];
|
||||
|
||||
let result = parse_csv_fields(&fields).unwrap().unwrap();
|
||||
|
||||
assert_eq!(result.quality_name, "Diesel");
|
||||
assert_eq!(result.quality, 4);
|
||||
assert_eq!(result.price, 17.30);
|
||||
assert_eq!(result.control_number, "D00824");
|
||||
}
|
||||
|
||||
/// Tests that amount > 0 filter works.
|
||||
#[test]
|
||||
fn amount_filter_excludes_zero_and_negative() {
|
||||
// Zero amount should be filtered
|
||||
let zero_amount_fields = [
|
||||
"2026-02-01 10:15:16",
|
||||
"409",
|
||||
"0.00",
|
||||
"0.00",
|
||||
"15.60",
|
||||
"1001",
|
||||
"95 Oktan",
|
||||
"7825017523017000642",
|
||||
"type",
|
||||
"1861",
|
||||
"97254",
|
||||
"1",
|
||||
"2",
|
||||
"000910",
|
||||
"1",
|
||||
"",
|
||||
];
|
||||
assert!(parse_csv_fields(&zero_amount_fields).unwrap().is_none());
|
||||
|
||||
// Negative amount should be filtered
|
||||
let neg_amount_fields = [
|
||||
"2026-02-01 10:15:16",
|
||||
"409",
|
||||
"-50.00",
|
||||
"-3.00",
|
||||
"15.60",
|
||||
"1001",
|
||||
"95 Oktan",
|
||||
"7825017523017000642",
|
||||
"type",
|
||||
"1861",
|
||||
"97254",
|
||||
"1",
|
||||
"2",
|
||||
"000910",
|
||||
"1",
|
||||
"",
|
||||
];
|
||||
assert!(parse_csv_fields(&neg_amount_fields).unwrap().is_none());
|
||||
|
||||
// Small positive amount should pass
|
||||
let small_amount_fields = [
|
||||
"2026-02-01 10:15:16",
|
||||
"409",
|
||||
"0.01",
|
||||
"0.001",
|
||||
"15.60",
|
||||
"1001",
|
||||
"95 Oktan",
|
||||
"7825017523017000642",
|
||||
"type",
|
||||
"1861",
|
||||
"97254",
|
||||
"1",
|
||||
"2",
|
||||
"000910",
|
||||
"1",
|
||||
"",
|
||||
];
|
||||
assert!(parse_csv_fields(&small_amount_fields).unwrap().is_some());
|
||||
}
|
||||
141
tests/models_test.rs
Normal file
141
tests/models_test.rs
Normal file
@@ -0,0 +1,141 @@
|
||||
//! Tests for the database models.
|
||||
//!
|
||||
//! AI AGENT NOTE: These tests verify model serialization and data integrity.
|
||||
|
||||
use chrono::NaiveDateTime;
|
||||
use invoice_generator::db::models::{NewCard, NewCustomer, NewTransaction};
|
||||
|
||||
/// Tests that NewCustomer can be created with valid data.
|
||||
#[test]
|
||||
fn new_customer_creation() {
|
||||
let customer = NewCustomer {
|
||||
customer_number: "12345".to_string(),
|
||||
card_report_group: 1,
|
||||
};
|
||||
|
||||
assert_eq!(customer.customer_number, "12345");
|
||||
assert_eq!(customer.card_report_group, 1);
|
||||
}
|
||||
|
||||
/// Tests that NewCard can be created with valid data.
|
||||
#[test]
|
||||
fn new_card_creation() {
|
||||
let card = NewCard {
|
||||
card_number: "7825017523017000642".to_string(),
|
||||
customer_id: 42,
|
||||
};
|
||||
|
||||
assert_eq!(card.card_number, "7825017523017000642");
|
||||
assert_eq!(card.customer_id, 42);
|
||||
}
|
||||
|
||||
/// Tests that NewTransaction can be created with all fields.
|
||||
#[test]
|
||||
fn new_transaction_creation() {
|
||||
let date = NaiveDateTime::parse_from_str("2026-02-01 10:15:16", "%Y-%m-%d %H:%M:%S").unwrap();
|
||||
|
||||
let tx = NewTransaction {
|
||||
transaction_date: date,
|
||||
batch_number: "409".to_string(),
|
||||
amount: 559.26,
|
||||
volume: 35.85,
|
||||
price: 15.60,
|
||||
quality_code: 1001,
|
||||
quality_name: "95 Oktan".to_string(),
|
||||
card_number: "7825017523017000642".to_string(),
|
||||
station: "97254".to_string(),
|
||||
terminal: "1".to_string(),
|
||||
pump: "2".to_string(),
|
||||
receipt: "000910".to_string(),
|
||||
control_number: None,
|
||||
customer_id: Some(1),
|
||||
};
|
||||
|
||||
assert_eq!(tx.batch_number, "409");
|
||||
assert_eq!(tx.amount, 559.26);
|
||||
assert_eq!(tx.volume, 35.85);
|
||||
assert_eq!(tx.quality_name, "95 Oktan");
|
||||
assert_eq!(tx.customer_id, Some(1));
|
||||
assert!(tx.control_number.is_none());
|
||||
}
|
||||
|
||||
/// Tests that NewTransaction can be created with control number.
|
||||
#[test]
|
||||
fn new_transaction_with_control_number() {
|
||||
let date = NaiveDateTime::parse_from_str("2026-02-01 06:40:14", "%Y-%m-%d %H:%M:%S").unwrap();
|
||||
|
||||
let tx = NewTransaction {
|
||||
transaction_date: date,
|
||||
batch_number: "409".to_string(),
|
||||
amount: 267.23,
|
||||
volume: 17.13,
|
||||
price: 15.60,
|
||||
quality_code: 1001,
|
||||
quality_name: "95 Oktan".to_string(),
|
||||
card_number: "554477******9952".to_string(),
|
||||
station: "97254".to_string(),
|
||||
terminal: "1".to_string(),
|
||||
pump: "2".to_string(),
|
||||
receipt: "000898".to_string(),
|
||||
control_number: Some("756969".to_string()),
|
||||
customer_id: None,
|
||||
};
|
||||
|
||||
assert_eq!(tx.control_number, Some("756969".to_string()));
|
||||
assert!(tx.customer_id.is_none());
|
||||
}
|
||||
|
||||
/// Tests decimal precision for monetary values.
|
||||
#[test]
|
||||
fn transaction_decimal_precision() {
|
||||
let date = NaiveDateTime::parse_from_str("2026-02-01 10:15:16", "%Y-%m-%d %H:%M:%S").unwrap();
|
||||
|
||||
let tx = NewTransaction {
|
||||
transaction_date: date,
|
||||
batch_number: "409".to_string(),
|
||||
amount: 123.45,
|
||||
volume: 7.891,
|
||||
price: 15.625,
|
||||
quality_code: 1001,
|
||||
quality_name: "95 Oktan".to_string(),
|
||||
card_number: "CARD123".to_string(),
|
||||
station: "1".to_string(),
|
||||
terminal: "1".to_string(),
|
||||
pump: "1".to_string(),
|
||||
receipt: "001".to_string(),
|
||||
control_number: None,
|
||||
customer_id: None,
|
||||
};
|
||||
|
||||
// Verify precision is maintained
|
||||
assert_eq!(tx.amount, 123.45);
|
||||
assert_eq!(tx.volume, 7.891);
|
||||
assert_eq!(tx.price, 15.625);
|
||||
}
|
||||
|
||||
/// Tests that anonymized transactions have no customer.
|
||||
#[test]
|
||||
fn anonymized_transaction_has_no_customer() {
|
||||
let date = NaiveDateTime::parse_from_str("2026-02-01 06:40:14", "%Y-%m-%d %H:%M:%S").unwrap();
|
||||
|
||||
let tx = NewTransaction {
|
||||
transaction_date: date,
|
||||
batch_number: "409".to_string(),
|
||||
amount: 267.23,
|
||||
volume: 17.13,
|
||||
price: 15.60,
|
||||
quality_code: 1001,
|
||||
quality_name: "95 Oktan".to_string(),
|
||||
card_number: "554477******9952".to_string(),
|
||||
station: "97254".to_string(),
|
||||
terminal: "1".to_string(),
|
||||
pump: "2".to_string(),
|
||||
receipt: "000898".to_string(),
|
||||
control_number: Some("756969".to_string()),
|
||||
customer_id: None,
|
||||
};
|
||||
|
||||
assert!(tx.customer_id.is_none());
|
||||
// Card number is still stored
|
||||
assert_eq!(tx.card_number, "554477******9952");
|
||||
}
|
||||
449
tests/repository_test.rs
Normal file
449
tests/repository_test.rs
Normal file
@@ -0,0 +1,449 @@
|
||||
//! Tests for the repository module.
|
||||
//!
|
||||
//! AI AGENT NOTE: These tests verify database operations using the test database.
|
||||
//! Each test uses a transaction that is rolled back after the test completes.
|
||||
|
||||
use sqlx::Row;
|
||||
|
||||
async fn create_test_pool() -> sqlx::MySqlPool {
|
||||
invoice_generator::db::create_pool(&std::env::var("DATABASE_URL").unwrap_or_else(|_| {
|
||||
let config = invoice_generator::config::Config::load(invoice_generator::config::Env::Test).unwrap();
|
||||
config.database.connection_url()
|
||||
})).await.unwrap()
|
||||
}
|
||||
|
||||
// ===== Customer Tests =====
|
||||
|
||||
#[tokio::test]
|
||||
async fn customer_insert_returns_id() {
|
||||
let pool = create_test_pool().await;
|
||||
let mut tx = pool.begin().await.unwrap();
|
||||
|
||||
sqlx::query(
|
||||
"INSERT INTO customers (customer_number, card_report_group) VALUES (?, ?)",
|
||||
)
|
||||
.bind("TEST001")
|
||||
.bind(1u8)
|
||||
.execute(&mut *tx)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let row = sqlx::query("SELECT LAST_INSERT_ID() as id")
|
||||
.fetch_one(&mut *tx)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let id: u64 = row.get("id");
|
||||
assert!(id > 0);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn customer_find_existing() {
|
||||
let pool = create_test_pool().await;
|
||||
let mut tx = pool.begin().await.unwrap();
|
||||
|
||||
sqlx::query(
|
||||
"INSERT INTO customers (customer_number, card_report_group) VALUES (?, ?)",
|
||||
)
|
||||
.bind("TEST002")
|
||||
.bind(1u8)
|
||||
.execute(&mut *tx)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let customer = sqlx::query_as::<_, invoice_generator::db::models::Customer>(
|
||||
"SELECT id, customer_number, card_report_group, created_at, updated_at
|
||||
FROM customers WHERE customer_number = ?",
|
||||
)
|
||||
.bind("TEST002")
|
||||
.fetch_one(&mut *tx)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(customer.customer_number, "TEST002");
|
||||
assert_eq!(customer.card_report_group, 1);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn customer_find_nonexistent() {
|
||||
let pool = create_test_pool().await;
|
||||
let mut tx = pool.begin().await.unwrap();
|
||||
|
||||
let customer = sqlx::query_as::<_, invoice_generator::db::models::Customer>(
|
||||
"SELECT id, customer_number, card_report_group, created_at, updated_at
|
||||
FROM customers WHERE customer_number = ?",
|
||||
)
|
||||
.bind("NONEXISTENT")
|
||||
.fetch_optional(&mut *tx)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert!(customer.is_none());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn customer_multiple_cards() {
|
||||
let pool = create_test_pool().await;
|
||||
let mut tx = pool.begin().await.unwrap();
|
||||
|
||||
sqlx::query(
|
||||
"INSERT INTO customers (customer_number, card_report_group) VALUES (?, ?)",
|
||||
)
|
||||
.bind("TEST003")
|
||||
.bind(1u8)
|
||||
.execute(&mut *tx)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let customer_row = sqlx::query("SELECT LAST_INSERT_ID() as id")
|
||||
.fetch_one(&mut *tx)
|
||||
.await
|
||||
.unwrap();
|
||||
let customer_id: u32 = customer_row.get("id");
|
||||
|
||||
sqlx::query("INSERT INTO cards (card_number, customer_id) VALUES (?, ?)")
|
||||
.bind("CARD001")
|
||||
.bind(customer_id)
|
||||
.execute(&mut *tx)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
sqlx::query("INSERT INTO cards (card_number, customer_id) VALUES (?, ?)")
|
||||
.bind("CARD002")
|
||||
.bind(customer_id)
|
||||
.execute(&mut *tx)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let row = sqlx::query("SELECT COUNT(*) as count FROM cards WHERE customer_id = ?")
|
||||
.bind(customer_id)
|
||||
.fetch_one(&mut *tx)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let count: i64 = row.get("count");
|
||||
assert_eq!(count, 2);
|
||||
}
|
||||
|
||||
// ===== Card Tests =====
|
||||
|
||||
#[tokio::test]
|
||||
async fn card_insert_with_customer() {
|
||||
let pool = create_test_pool().await;
|
||||
let mut tx = pool.begin().await.unwrap();
|
||||
|
||||
sqlx::query(
|
||||
"INSERT INTO customers (customer_number, card_report_group) VALUES (?, ?)",
|
||||
)
|
||||
.bind("TEST004")
|
||||
.bind(1u8)
|
||||
.execute(&mut *tx)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let customer_row = sqlx::query("SELECT LAST_INSERT_ID() as id")
|
||||
.fetch_one(&mut *tx)
|
||||
.await
|
||||
.unwrap();
|
||||
let customer_id: u32 = customer_row.get("id");
|
||||
|
||||
sqlx::query("INSERT INTO cards (card_number, customer_id) VALUES (?, ?)")
|
||||
.bind("TESTCARD001")
|
||||
.bind(customer_id)
|
||||
.execute(&mut *tx)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let card_row = sqlx::query("SELECT LAST_INSERT_ID() as id")
|
||||
.fetch_one(&mut *tx)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let id: u64 = card_row.get("id");
|
||||
assert!(id > 0);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn card_find_by_number() {
|
||||
let pool = create_test_pool().await;
|
||||
let mut tx = pool.begin().await.unwrap();
|
||||
|
||||
sqlx::query(
|
||||
"INSERT INTO customers (customer_number, card_report_group) VALUES (?, ?)",
|
||||
)
|
||||
.bind("TEST005")
|
||||
.bind(1u8)
|
||||
.execute(&mut *tx)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let customer_row = sqlx::query("SELECT LAST_INSERT_ID() as id")
|
||||
.fetch_one(&mut *tx)
|
||||
.await
|
||||
.unwrap();
|
||||
let customer_id: u32 = customer_row.get("id");
|
||||
|
||||
sqlx::query("INSERT INTO cards (card_number, customer_id) VALUES (?, ?)")
|
||||
.bind("TESTCARD002")
|
||||
.bind(customer_id)
|
||||
.execute(&mut *tx)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let card = sqlx::query_as::<_, invoice_generator::db::models::Card>(
|
||||
"SELECT id, card_number, customer_id, created_at, updated_at
|
||||
FROM cards WHERE card_number = ?",
|
||||
)
|
||||
.bind("TESTCARD002")
|
||||
.fetch_one(&mut *tx)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(card.card_number, "TESTCARD002");
|
||||
}
|
||||
|
||||
// ===== Transaction Tests =====
|
||||
|
||||
#[tokio::test]
|
||||
async fn transaction_insert_single() {
|
||||
let pool = create_test_pool().await;
|
||||
let mut tx = pool.begin().await.unwrap();
|
||||
|
||||
sqlx::query(
|
||||
"INSERT INTO customers (customer_number, card_report_group) VALUES (?, ?)",
|
||||
)
|
||||
.bind("TEST006")
|
||||
.bind(1u8)
|
||||
.execute(&mut *tx)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let customer_row = sqlx::query("SELECT LAST_INSERT_ID() as id")
|
||||
.fetch_one(&mut *tx)
|
||||
.await
|
||||
.unwrap();
|
||||
let customer_id: u32 = customer_row.get("id");
|
||||
|
||||
sqlx::query(
|
||||
"INSERT INTO transactions (transaction_date, batch_number, amount, volume, price, quality_code, quality_name, card_number, station, terminal, pump, receipt, customer_id) VALUES ('2026-02-01 10:00:00', 'TEST', 100.50, 10.5, 9.57, 1001, '95 Oktan', 'CARD123', 'S001', 'T1', 'P1', 'R001', ?)",
|
||||
)
|
||||
.bind(customer_id)
|
||||
.execute(&mut *tx)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let tx_row = sqlx::query("SELECT LAST_INSERT_ID() as id")
|
||||
.fetch_one(&mut *tx)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let id: u64 = tx_row.get("id");
|
||||
assert!(id > 0);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn transaction_insert_anonymized() {
|
||||
let pool = create_test_pool().await;
|
||||
let mut tx = pool.begin().await.unwrap();
|
||||
|
||||
sqlx::query(
|
||||
"INSERT INTO transactions (transaction_date, batch_number, amount, volume, price, quality_code, quality_name, card_number, station, terminal, pump, receipt, customer_id) VALUES ('2026-02-01 10:00:00', 'TEST', 100.50, 10.5, 9.57, 1001, '95 Oktan', 'ANON******1234', 'S001', 'T1', 'P1', 'R002', NULL)",
|
||||
)
|
||||
.execute(&mut *tx)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let tx_row = sqlx::query("SELECT LAST_INSERT_ID() as id")
|
||||
.fetch_one(&mut *tx)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let id: u64 = tx_row.get("id");
|
||||
assert!(id > 0);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn transaction_count() {
|
||||
let pool = create_test_pool().await;
|
||||
let mut tx = pool.begin().await.unwrap();
|
||||
|
||||
sqlx::query(
|
||||
"INSERT INTO customers (customer_number, card_report_group) VALUES (?, ?)",
|
||||
)
|
||||
.bind("TEST007")
|
||||
.bind(1u8)
|
||||
.execute(&mut *tx)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let customer_row = sqlx::query("SELECT LAST_INSERT_ID() as id")
|
||||
.fetch_one(&mut *tx)
|
||||
.await
|
||||
.unwrap();
|
||||
let customer_id: u32 = customer_row.get("id");
|
||||
|
||||
for i in 0..5 {
|
||||
sqlx::query(&format!(
|
||||
"INSERT INTO transactions (transaction_date, batch_number, amount, volume, price, quality_code, quality_name, card_number, station, terminal, pump, receipt, customer_id) VALUES ('2026-02-01 10:00:00', 'TEST', {}, 10.0, 10.0, 1001, '95 Oktan', 'CARD{}', 'S001', 'T1', 'P1', 'R00{}', ?)",
|
||||
100.0 + i as f64,
|
||||
i,
|
||||
i
|
||||
))
|
||||
.bind(customer_id)
|
||||
.execute(&mut *tx)
|
||||
.await
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
let row = sqlx::query("SELECT COUNT(*) as count FROM transactions WHERE customer_id = ?")
|
||||
.bind(customer_id)
|
||||
.fetch_one(&mut *tx)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let count: i64 = row.get("count");
|
||||
assert_eq!(count, 5);
|
||||
}
|
||||
|
||||
// ===== Query Tests =====
|
||||
|
||||
#[tokio::test]
|
||||
async fn query_transactions_by_customer() {
|
||||
let pool = create_test_pool().await;
|
||||
let mut tx = pool.begin().await.unwrap();
|
||||
|
||||
sqlx::query(
|
||||
"INSERT INTO customers (customer_number, card_report_group) VALUES (?, ?)",
|
||||
)
|
||||
.bind("TEST008")
|
||||
.bind(1u8)
|
||||
.execute(&mut *tx)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let customer_row = sqlx::query("SELECT LAST_INSERT_ID() as id")
|
||||
.fetch_one(&mut *tx)
|
||||
.await
|
||||
.unwrap();
|
||||
let customer_id: u32 = customer_row.get("id");
|
||||
|
||||
for i in 0..3 {
|
||||
sqlx::query(&format!(
|
||||
"INSERT INTO transactions (transaction_date, batch_number, amount, volume, price, quality_code, quality_name, card_number, station, terminal, pump, receipt, customer_id) VALUES ('2026-02-01 {}:00:00', 'TEST', 100.0, 10.0, 10.0, 1001, '95 Oktan', 'CARD{}', 'S001', 'T1', 'P1', 'R00{}', ?)",
|
||||
10 + i,
|
||||
i,
|
||||
i
|
||||
))
|
||||
.bind(customer_id)
|
||||
.execute(&mut *tx)
|
||||
.await
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
let transactions = sqlx::query_as::<_, invoice_generator::db::models::Transaction>(
|
||||
"SELECT t.id, t.transaction_date, t.batch_number, t.amount, t.volume, t.price, t.quality_code, t.quality_name, t.card_number, t.station, t.terminal, t.pump, t.receipt, t.control_number, t.customer_id, t.created_at
|
||||
FROM transactions t
|
||||
WHERE t.customer_id = ?",
|
||||
)
|
||||
.bind(customer_id)
|
||||
.fetch_all(&mut *tx)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(transactions.len(), 3);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn query_excludes_anonymous_from_customer_invoice() {
|
||||
let pool = create_test_pool().await;
|
||||
let mut tx = pool.begin().await.unwrap();
|
||||
|
||||
sqlx::query(
|
||||
"INSERT INTO customers (customer_number, card_report_group) VALUES (?, ?)",
|
||||
)
|
||||
.bind("TEST009")
|
||||
.bind(1u8)
|
||||
.execute(&mut *tx)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let customer_row = sqlx::query("SELECT LAST_INSERT_ID() as id")
|
||||
.fetch_one(&mut *tx)
|
||||
.await
|
||||
.unwrap();
|
||||
let customer_id: u32 = customer_row.get("id");
|
||||
|
||||
sqlx::query(
|
||||
"INSERT INTO transactions (transaction_date, batch_number, amount, volume, price, quality_code, quality_name, card_number, station, terminal, pump, receipt, customer_id) VALUES ('2026-02-01 10:00:00', 'TEST', 100.0, 10.0, 10.0, 1001, '95 Oktan', 'KNOWNCARD', 'S001', 'T1', 'P1', 'R001', ?)",
|
||||
)
|
||||
.bind(customer_id)
|
||||
.execute(&mut *tx)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
sqlx::query(
|
||||
"INSERT INTO transactions (transaction_date, batch_number, amount, volume, price, quality_code, quality_name, card_number, station, terminal, pump, receipt, customer_id) VALUES ('2026-02-01 11:00:00', 'TEST', 50.0, 5.0, 10.0, 1001, '95 Oktan', 'ANON******9999', 'S001', 'T1', 'P1', 'R002', NULL)",
|
||||
)
|
||||
.execute(&mut *tx)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let row = sqlx::query(
|
||||
"SELECT COUNT(*) as count FROM transactions WHERE customer_id = ?",
|
||||
)
|
||||
.bind(customer_id)
|
||||
.fetch_one(&mut *tx)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let count: i64 = row.get("count");
|
||||
assert_eq!(count, 1); // Only the known transaction
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn sales_summary_by_product() {
|
||||
let pool = create_test_pool().await;
|
||||
let mut tx = pool.begin().await.unwrap();
|
||||
|
||||
sqlx::query(
|
||||
"INSERT INTO customers (customer_number, card_report_group) VALUES (?, ?)",
|
||||
)
|
||||
.bind("TEST010")
|
||||
.bind(1u8)
|
||||
.execute(&mut *tx)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let customer_row = sqlx::query("SELECT LAST_INSERT_ID() as id")
|
||||
.fetch_one(&mut *tx)
|
||||
.await
|
||||
.unwrap();
|
||||
let customer_id: u32 = customer_row.get("id");
|
||||
|
||||
sqlx::query(
|
||||
"INSERT INTO transactions (transaction_date, batch_number, amount, volume, price, quality_code, quality_name, card_number, station, terminal, pump, receipt, customer_id) VALUES ('2026-02-01 10:00:00', 'TEST', 100.0, 10.0, 10.0, 1001, '95 Oktan', 'CARD001', 'S001', 'T1', 'P1', 'R001', ?)",
|
||||
)
|
||||
.bind(customer_id)
|
||||
.execute(&mut *tx)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
sqlx::query(
|
||||
"INSERT INTO transactions (transaction_date, batch_number, amount, volume, price, quality_code, quality_name, card_number, station, terminal, pump, receipt, customer_id) VALUES ('2026-02-01 11:00:00', 'TEST', 50.0, 5.0, 10.0, 4, 'Diesel', 'CARD001', 'S001', 'T1', 'P1', 'R002', ?)",
|
||||
)
|
||||
.bind(customer_id)
|
||||
.execute(&mut *tx)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let summaries = sqlx::query_as::<_, invoice_generator::db::repository::ProductSummary>(
|
||||
"SELECT quality_name, COUNT(*) as tx_count, SUM(amount) as total_amount, SUM(volume) as total_volume
|
||||
FROM transactions
|
||||
GROUP BY quality_name",
|
||||
)
|
||||
.fetch_all(&mut *tx)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(summaries.len(), 2); // Two products: 95 Oktan and Diesel
|
||||
}
|
||||
Reference in New Issue
Block a user