From d11e4fd0c4c3f90d52380a82915ece257a2fafae Mon Sep 17 00:00:00 2001 From: Joshua Coles Date: Sun, 28 Dec 2025 11:57:12 +0000 Subject: [PATCH] Allow specifying the account ID and fix bug when delivering batched rows --- src/ingestion/db.rs | 2 +- src/ingestion/ingestion_logic.rs | 4 +-- src/ingestion/routes.rs | 47 ++++++++++++++++++++++++++------ 3 files changed, 41 insertions(+), 12 deletions(-) diff --git a/src/ingestion/db.rs b/src/ingestion/db.rs index 60b393d..e1980e3 100644 --- a/src/ingestion/db.rs +++ b/src/ingestion/db.rs @@ -268,7 +268,7 @@ mod tests { let json: Vec> = serde_json::from_str(json).unwrap(); let data = json .iter() - .map(|row| from_json_row(row.clone())) + .map(|row| from_json_row(row)) .collect::, anyhow::Error>>()?; insert(&dbi.db, data.clone(), account_id).await?; diff --git a/src/ingestion/ingestion_logic.rs b/src/ingestion/ingestion_logic.rs index 2594502..6f0006f 100644 --- a/src/ingestion/ingestion_logic.rs +++ b/src/ingestion/ingestion_logic.rs @@ -140,7 +140,7 @@ fn parse_timestamp(date: &str, time: &str) -> anyhow::Result { Ok(date.and_time(time)) } -pub fn from_json_row(row: Vec) -> anyhow::Result { +pub fn from_json_row(row: &[Value]) -> anyhow::Result { let date = DateTime::parse_from_rfc3339(row[headings::DATE].as_str().context("No date")?) .context("Failed to parse date")?; @@ -178,7 +178,7 @@ fn test_json() { let json_rows = json .iter() - .map(|row| from_json_row(row.clone())) + .map(|row| from_json_row(&row)) .collect::, anyhow::Error>>() .unwrap(); diff --git a/src/ingestion/routes.rs b/src/ingestion/routes.rs index d0b1008..b28ce68 100644 --- a/src/ingestion/routes.rs +++ b/src/ingestion/routes.rs @@ -9,24 +9,53 @@ use sea_orm::DatabaseConnection; use serde_json::Value; use std::io::Cursor; +#[derive(serde::Deserialize, Debug)] +#[serde(untagged)] +pub enum MonzoBatchedJsonInput { + Legacy(Vec>), + New { + account_id: Option, + rows: Vec>, + }, +} + +impl MonzoBatchedJsonInput { + fn account_id(&self) -> Option { + match self { + MonzoBatchedJsonInput::Legacy(_) => None, + MonzoBatchedJsonInput::New { account_id, .. } => *account_id, + } + } + + fn rows(&self) -> &[Vec] { + match self { + MonzoBatchedJsonInput::Legacy(rows) => rows, + MonzoBatchedJsonInput::New { rows, .. } => rows, + } + } +} + pub async fn monzo_batched_json( Extension(db): Extension, - Json(data): Json>>, + Json(data): Json, ) -> Result<&'static str, AppError> { - let data = data - .into_iter() - .skip(1) // Skip the header row. - .map(from_json_row) + let rows = data + .rows() + .iter() + .skip_while(|row| row[0] == Value::String("Transaction ID".to_string())) + .map(|row| from_json_row(row.as_ref())) .collect::>()?; // We default to the main account for JSON ingestion for now. - let account_id = db::get_account_id(&db, None).await?; - db::insert(&db, data, account_id).await?; + let account_id = db::get_account_id(&db, data.account_id().map(|id| id.to_string())).await?; + db::insert(&db, rows, account_id).await?; Ok("Ok") } -async fn extract_csv_and_account_name(mut multipart: Multipart) -> Result<(Option, Option), MultipartError> { +async fn extract_csv_and_account_name( + mut multipart: Multipart, +) -> Result<(Option, Option), MultipartError> { let mut csv = None; let mut account_name = None; @@ -59,7 +88,7 @@ pub struct ShortcutBody { pub async fn shortcuts_csv( Extension(db): Extension, - Json(shortcut_body): Json + Json(shortcut_body): Json, ) -> Result<&'static str, AppError> { let account_id = db::get_account_id(&db, Some(shortcut_body.account_name)).await?;