Allow specifying the account ID and fix bug when delivering batched rows
All checks were successful
Build and Publish / Build and Test (push) Successful in 9m12s

This commit is contained in:
Joshua Coles 2025-12-28 11:57:12 +00:00
parent a2ba83e6f8
commit d11e4fd0c4
3 changed files with 41 additions and 12 deletions

View File

@ -268,7 +268,7 @@ mod tests {
let json: Vec<Vec<Value>> = serde_json::from_str(json).unwrap();
let data = json
.iter()
.map(|row| from_json_row(row.clone()))
.map(|row| from_json_row(row))
.collect::<Result<Vec<_>, anyhow::Error>>()?;
insert(&dbi.db, data.clone(), account_id).await?;

View File

@ -140,7 +140,7 @@ fn parse_timestamp(date: &str, time: &str) -> anyhow::Result<NaiveDateTime> {
Ok(date.and_time(time))
}
pub fn from_json_row(row: Vec<Value>) -> anyhow::Result<MonzoRow> {
pub fn from_json_row(row: &[Value]) -> anyhow::Result<MonzoRow> {
let date = DateTime::parse_from_rfc3339(row[headings::DATE].as_str().context("No date")?)
.context("Failed to parse date")?;
@ -178,7 +178,7 @@ fn test_json() {
let json_rows = json
.iter()
.map(|row| from_json_row(row.clone()))
.map(|row| from_json_row(&row))
.collect::<Result<Vec<_>, anyhow::Error>>()
.unwrap();

View File

@ -9,24 +9,53 @@ use sea_orm::DatabaseConnection;
use serde_json::Value;
use std::io::Cursor;
#[derive(serde::Deserialize, Debug)]
#[serde(untagged)]
pub enum MonzoBatchedJsonInput {
Legacy(Vec<Vec<Value>>),
New {
account_id: Option<u8>,
rows: Vec<Vec<Value>>,
},
}
impl MonzoBatchedJsonInput {
fn account_id(&self) -> Option<u8> {
match self {
MonzoBatchedJsonInput::Legacy(_) => None,
MonzoBatchedJsonInput::New { account_id, .. } => *account_id,
}
}
fn rows(&self) -> &[Vec<Value>] {
match self {
MonzoBatchedJsonInput::Legacy(rows) => rows,
MonzoBatchedJsonInput::New { rows, .. } => rows,
}
}
}
pub async fn monzo_batched_json(
Extension(db): Extension<DatabaseConnection>,
Json(data): Json<Vec<Vec<Value>>>,
Json(data): Json<MonzoBatchedJsonInput>,
) -> Result<&'static str, AppError> {
let data = data
.into_iter()
.skip(1) // Skip the header row.
.map(from_json_row)
let rows = data
.rows()
.iter()
.skip_while(|row| row[0] == Value::String("Transaction ID".to_string()))
.map(|row| from_json_row(row.as_ref()))
.collect::<Result<_, _>>()?;
// We default to the main account for JSON ingestion for now.
let account_id = db::get_account_id(&db, None).await?;
db::insert(&db, data, account_id).await?;
let account_id = db::get_account_id(&db, data.account_id().map(|id| id.to_string())).await?;
db::insert(&db, rows, account_id).await?;
Ok("Ok")
}
async fn extract_csv_and_account_name(mut multipart: Multipart) -> Result<(Option<Bytes>, Option<String>), MultipartError> {
async fn extract_csv_and_account_name(
mut multipart: Multipart,
) -> Result<(Option<Bytes>, Option<String>), MultipartError> {
let mut csv = None;
let mut account_name = None;
@ -59,7 +88,7 @@ pub struct ShortcutBody {
pub async fn shortcuts_csv(
Extension(db): Extension<DatabaseConnection>,
Json(shortcut_body): Json<ShortcutBody>
Json(shortcut_body): Json<ShortcutBody>,
) -> Result<&'static str, AppError> {
let account_id = db::get_account_id(&db, Some(shortcut_body.account_name)).await?;