Add SQL mode and make it use the query! macro for type checking
All checks were successful
Rust / build (push) Successful in 55s

This commit is contained in:
Joshua Coles 2024-04-26 21:44:04 +01:00
parent ef15a630d6
commit e73123b7f9
7 changed files with 61 additions and 7 deletions

View File

@ -45,6 +45,8 @@ jobs:
- name: Build release binary
run: cargo build --release
env:
SQLX_OFFLINE: true
- name: Upload binary
uses: actions/upload-artifact@v3

View File

@ -0,0 +1,22 @@
{
"db_name": "PostgreSQL",
"query": "with timelineItems as (select jsonb_array_elements(raw_files.json -> 'timelineItems') as timelineItem\n from raw_files\n where date = ANY ($1)),\n max_last_saved as (select timelineItem ->> 'itemId' as itemId,\n max((timelineItem ->> 'lastSaved') :: timestamptz) as latest_last_saved\n from timelineItems\n group by timelineItem ->> 'itemId'),\n unique_timline_items as (select distinct on (max_last_saved.itemId) *\n from max_last_saved\n inner join timelineItems\n on timelineItems.timelineItem ->> 'itemId' = max_last_saved.itemId\n and (timelineItems.timelineItem ->> 'lastSaved') :: timestamptz =\n max_last_saved.latest_last_saved)\ninsert\ninto public.timeline_item (item_id, json, place_id, end_date, last_saved, server_last_updated)\nselect unique_timline_items.itemId :: uuid as item_id,\n unique_timline_items.timelineItem as json,\n (unique_timline_items.timelineItem -> 'place' ->> 'placeId') :: uuid as place_id,\n (unique_timline_items.timelineItem ->> 'endDate') :: timestamptz as end_date,\n unique_timline_items.latest_last_saved :: timestamptz as last_saved,\n now() as server_last_updated\nfrom unique_timline_items\non conflict (item_id) do update set json = excluded.json,\n place_id = excluded.place_id,\n end_date = excluded.end_date,\n last_saved = excluded.last_saved,\n server_last_updated = excluded.server_last_updated\nwhere excluded.last_saved > public.timeline_item.last_saved\nreturning item_id;\n",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "item_id",
"type_info": "Uuid"
}
],
"parameters": {
"Left": [
"TextArray"
]
},
"nullable": [
false
]
},
"hash": "ac15bfcd1737751e27388ffddfe8ec47fd7277be74c47a59484e4fc993671d43"
}

View File

@ -0,0 +1,14 @@
{
"db_name": "PostgreSQL",
"query": "with timelineItems as (select jsonb_array_elements(raw_files.json -> 'timelineItems') as timelineItem\n from raw_files\n where date = ANY ($1)),\n places as (select distinct on (md5(timelineItem ->> 'place' :: text)) timelineItem -> 'place' as place,\n timelineItem -> 'place' ->> 'placeId' as placeId,\n (timelineItem -> 'place' ->> 'lastSaved') :: timestamptz as lastSaved\n from timelineItems\n where timelineItem ->> 'place' is not null),\n places_with_max_last_saved as (select place -> 'placeId' as placeId,\n max((place ->> 'lastSaved') :: timestamptz) as latest_last_saved\n from places\n group by place -> 'placeId'),\n latest_places as (select places.*\n from places_with_max_last_saved\n inner join places on places.place -> 'placeId' = places_with_max_last_saved.placeId and\n places.lastSaved =\n places_with_max_last_saved.latest_last_saved)\ninsert\ninto public.place (place_id, json, last_saved, server_last_updated)\nselect (placeId :: uuid) as place_id, place as json, lastSaved as last_saved, now() as server_last_updated\nfrom latest_places\non conflict (place_id) do update set json = excluded.json,\n last_saved = excluded.last_saved,\n server_last_updated = excluded.server_last_updated\nwhere excluded.last_saved > public.place.last_saved;\n",
"describe": {
"columns": [],
"parameters": {
"Left": [
"TextArray"
]
},
"nullable": []
},
"hash": "f0402ce4a5c93837f39559bfb3358a7655f178080465f7abd7f7702d42474157"
}

View File

@ -2,4 +2,5 @@
fn main() {
// trigger recompilation when a new migration is added
println!("cargo:rerun-if-changed=migrations");
println!("cargo:rustc-env=DATABASE_URL=postgres://joshuacoles@localhost/arc_test");
}

View File

@ -1,5 +1,6 @@
with timelineItems as (select jsonb_array_elements(raw_files.json -> 'timelineItems') as timelineItem
from raw_files),
from raw_files
where date = ANY ($1)),
places as (select distinct on (md5(timelineItem ->> 'place' :: text)) timelineItem -> 'place' as place,
timelineItem -> 'place' ->> 'placeId' as placeId,
(timelineItem -> 'place' ->> 'lastSaved') :: timestamptz as lastSaved

View File

@ -1,5 +1,6 @@
with timelineItems as (select jsonb_array_elements(raw_files.json -> 'timelineItems') as timelineItem
from raw_files),
from raw_files
where date = ANY ($1)),
max_last_saved as (select timelineItem ->> 'itemId' as itemId,
max((timelineItem ->> 'lastSaved') :: timestamptz) as latest_last_saved
from timelineItems

View File

@ -161,7 +161,7 @@ async fn main() -> anyhow::Result<()> {
.await?;
if cli.sql_only {
update_data_sql(&db)
update_data_sql(&db, need_refresh)
.await?;
} else {
update_data(&db, need_refresh)
@ -171,10 +171,23 @@ async fn main() -> anyhow::Result<()> {
Ok(())
}
#[instrument(skip(db))]
async fn update_data_sql(db: &PgPool) -> anyhow::Result<()> {
db.execute(include_str!("../functions/update_places.sql")).await?;
db.execute(include_str!("../functions/update_timeline_items.sql")).await?;
#[instrument(skip(db, need_refresh))]
async fn update_data_sql(db: &PgPool, need_refresh: Vec<UpdatedFile>) -> anyhow::Result<()> {
let vec = need_refresh.iter()
.map(|d| d.date.clone())
.collect_vec();
let result = sqlx::query_file!("functions/update_places.sql", &vec)
.execute(db)
.await?;
tracing::info!("Updated {} places", result.rows_affected());
let updated = sqlx::query_file!("functions/update_timeline_items.sql", &vec)
.fetch_all(db)
.await?;
tracing::info!("Updated {} timeline items", updated.len());
Ok(())
}