From e73123b7f92f407da96a29754dae34c7dba195ac Mon Sep 17 00:00:00 2001 From: Joshua Coles Date: Fri, 26 Apr 2024 21:44:04 +0100 Subject: [PATCH] Add SQL mode and make it use the `query!` macro for type checking --- .github/workflows/build.yml | 2 ++ ...8ec47fd7277be74c47a59484e4fc993671d43.json | 22 ++++++++++++++++++ ...58a7655f178080465f7abd7f7702d42474157.json | 14 +++++++++++ build.rs | 1 + functions/update_places.sql | 3 ++- functions/update_timeline_items.sql | 3 ++- src/main.rs | 23 +++++++++++++++---- 7 files changed, 61 insertions(+), 7 deletions(-) create mode 100644 .sqlx/query-ac15bfcd1737751e27388ffddfe8ec47fd7277be74c47a59484e4fc993671d43.json create mode 100644 .sqlx/query-f0402ce4a5c93837f39559bfb3358a7655f178080465f7abd7f7702d42474157.json diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 3fd2dd8..2ffc34a 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -45,6 +45,8 @@ jobs: - name: Build release binary run: cargo build --release + env: + SQLX_OFFLINE: true - name: Upload binary uses: actions/upload-artifact@v3 diff --git a/.sqlx/query-ac15bfcd1737751e27388ffddfe8ec47fd7277be74c47a59484e4fc993671d43.json b/.sqlx/query-ac15bfcd1737751e27388ffddfe8ec47fd7277be74c47a59484e4fc993671d43.json new file mode 100644 index 0000000..7a059d0 --- /dev/null +++ b/.sqlx/query-ac15bfcd1737751e27388ffddfe8ec47fd7277be74c47a59484e4fc993671d43.json @@ -0,0 +1,22 @@ +{ + "db_name": "PostgreSQL", + "query": "with timelineItems as (select jsonb_array_elements(raw_files.json -> 'timelineItems') as timelineItem\n from raw_files\n where date = ANY ($1)),\n max_last_saved as (select timelineItem ->> 'itemId' as itemId,\n max((timelineItem ->> 'lastSaved') :: timestamptz) as latest_last_saved\n from timelineItems\n group by timelineItem ->> 'itemId'),\n unique_timline_items as (select distinct on (max_last_saved.itemId) *\n from max_last_saved\n inner join timelineItems\n on timelineItems.timelineItem ->> 'itemId' = max_last_saved.itemId\n and (timelineItems.timelineItem ->> 'lastSaved') :: timestamptz =\n max_last_saved.latest_last_saved)\ninsert\ninto public.timeline_item (item_id, json, place_id, end_date, last_saved, server_last_updated)\nselect unique_timline_items.itemId :: uuid as item_id,\n unique_timline_items.timelineItem as json,\n (unique_timline_items.timelineItem -> 'place' ->> 'placeId') :: uuid as place_id,\n (unique_timline_items.timelineItem ->> 'endDate') :: timestamptz as end_date,\n unique_timline_items.latest_last_saved :: timestamptz as last_saved,\n now() as server_last_updated\nfrom unique_timline_items\non conflict (item_id) do update set json = excluded.json,\n place_id = excluded.place_id,\n end_date = excluded.end_date,\n last_saved = excluded.last_saved,\n server_last_updated = excluded.server_last_updated\nwhere excluded.last_saved > public.timeline_item.last_saved\nreturning item_id;\n", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "item_id", + "type_info": "Uuid" + } + ], + "parameters": { + "Left": [ + "TextArray" + ] + }, + "nullable": [ + false + ] + }, + "hash": "ac15bfcd1737751e27388ffddfe8ec47fd7277be74c47a59484e4fc993671d43" +} diff --git a/.sqlx/query-f0402ce4a5c93837f39559bfb3358a7655f178080465f7abd7f7702d42474157.json b/.sqlx/query-f0402ce4a5c93837f39559bfb3358a7655f178080465f7abd7f7702d42474157.json new file mode 100644 index 0000000..5cc43bb --- /dev/null +++ b/.sqlx/query-f0402ce4a5c93837f39559bfb3358a7655f178080465f7abd7f7702d42474157.json @@ -0,0 +1,14 @@ +{ + "db_name": "PostgreSQL", + "query": "with timelineItems as (select jsonb_array_elements(raw_files.json -> 'timelineItems') as timelineItem\n from raw_files\n where date = ANY ($1)),\n places as (select distinct on (md5(timelineItem ->> 'place' :: text)) timelineItem -> 'place' as place,\n timelineItem -> 'place' ->> 'placeId' as placeId,\n (timelineItem -> 'place' ->> 'lastSaved') :: timestamptz as lastSaved\n from timelineItems\n where timelineItem ->> 'place' is not null),\n places_with_max_last_saved as (select place -> 'placeId' as placeId,\n max((place ->> 'lastSaved') :: timestamptz) as latest_last_saved\n from places\n group by place -> 'placeId'),\n latest_places as (select places.*\n from places_with_max_last_saved\n inner join places on places.place -> 'placeId' = places_with_max_last_saved.placeId and\n places.lastSaved =\n places_with_max_last_saved.latest_last_saved)\ninsert\ninto public.place (place_id, json, last_saved, server_last_updated)\nselect (placeId :: uuid) as place_id, place as json, lastSaved as last_saved, now() as server_last_updated\nfrom latest_places\non conflict (place_id) do update set json = excluded.json,\n last_saved = excluded.last_saved,\n server_last_updated = excluded.server_last_updated\nwhere excluded.last_saved > public.place.last_saved;\n", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "TextArray" + ] + }, + "nullable": [] + }, + "hash": "f0402ce4a5c93837f39559bfb3358a7655f178080465f7abd7f7702d42474157" +} diff --git a/build.rs b/build.rs index d506869..ebb11d1 100644 --- a/build.rs +++ b/build.rs @@ -2,4 +2,5 @@ fn main() { // trigger recompilation when a new migration is added println!("cargo:rerun-if-changed=migrations"); + println!("cargo:rustc-env=DATABASE_URL=postgres://joshuacoles@localhost/arc_test"); } diff --git a/functions/update_places.sql b/functions/update_places.sql index 9d339ec..06a6dc6 100644 --- a/functions/update_places.sql +++ b/functions/update_places.sql @@ -1,5 +1,6 @@ with timelineItems as (select jsonb_array_elements(raw_files.json -> 'timelineItems') as timelineItem - from raw_files), + from raw_files + where date = ANY ($1)), places as (select distinct on (md5(timelineItem ->> 'place' :: text)) timelineItem -> 'place' as place, timelineItem -> 'place' ->> 'placeId' as placeId, (timelineItem -> 'place' ->> 'lastSaved') :: timestamptz as lastSaved diff --git a/functions/update_timeline_items.sql b/functions/update_timeline_items.sql index 5265475..0e82ac4 100644 --- a/functions/update_timeline_items.sql +++ b/functions/update_timeline_items.sql @@ -1,5 +1,6 @@ with timelineItems as (select jsonb_array_elements(raw_files.json -> 'timelineItems') as timelineItem - from raw_files), + from raw_files + where date = ANY ($1)), max_last_saved as (select timelineItem ->> 'itemId' as itemId, max((timelineItem ->> 'lastSaved') :: timestamptz) as latest_last_saved from timelineItems diff --git a/src/main.rs b/src/main.rs index 29c053c..2c95c84 100644 --- a/src/main.rs +++ b/src/main.rs @@ -161,7 +161,7 @@ async fn main() -> anyhow::Result<()> { .await?; if cli.sql_only { - update_data_sql(&db) + update_data_sql(&db, need_refresh) .await?; } else { update_data(&db, need_refresh) @@ -171,10 +171,23 @@ async fn main() -> anyhow::Result<()> { Ok(()) } -#[instrument(skip(db))] -async fn update_data_sql(db: &PgPool) -> anyhow::Result<()> { - db.execute(include_str!("../functions/update_places.sql")).await?; - db.execute(include_str!("../functions/update_timeline_items.sql")).await?; +#[instrument(skip(db, need_refresh))] +async fn update_data_sql(db: &PgPool, need_refresh: Vec) -> anyhow::Result<()> { + let vec = need_refresh.iter() + .map(|d| d.date.clone()) + .collect_vec(); + + let result = sqlx::query_file!("functions/update_places.sql", &vec) + .execute(db) + .await?; + + tracing::info!("Updated {} places", result.rows_affected()); + + let updated = sqlx::query_file!("functions/update_timeline_items.sql", &vec) + .fetch_all(db) + .await?; + + tracing::info!("Updated {} timeline items", updated.len()); Ok(()) }