From b722b21a759fdb7fbdeb6ea4ccd712efb71a0731 Mon Sep 17 00:00:00 2001 From: Joshua Coles Date: Sat, 2 Mar 2024 10:21:33 +0000 Subject: [PATCH] Add time entries handling functions in Toggl API client --- src/toggl_api/api_client.rs | 133 +++++++++++++++++++++++------------- 1 file changed, 85 insertions(+), 48 deletions(-) diff --git a/src/toggl_api/api_client.rs b/src/toggl_api/api_client.rs index b775c3c..c1ec070 100644 --- a/src/toggl_api/api_client.rs +++ b/src/toggl_api/api_client.rs @@ -1,4 +1,4 @@ -use reqwest::{Client, RequestBuilder, Response}; +use reqwest::{Client, RequestBuilder, Response, Url}; use serde_json::Value; use std::collections::HashMap; use std::time::Duration; @@ -6,6 +6,7 @@ use anyhow::anyhow; use axum::http::StatusCode; use base64::Engine; use base64::engine::general_purpose::STANDARD; +use chrono::{DateTime, Utc}; use hyper::HeaderMap; use reqwest::header::HeaderValue; use tracing::instrument; @@ -100,6 +101,39 @@ impl TogglApiClient { Ok(clients) } + pub async fn fetch_time_entries_modified_since(&self, date_time: DateTime) -> crate::Result> { + let url = format!( + "{base_url}/me/time_entries", + base_url = self.base_url + ); + + Ok( + self.make_request(self.client.get(url) + .query(&[("since", date_time.timestamp())])) + .await? + .json::>() + .await? + ) + } + + pub async fn fetch_time_entries_in_range(&self, (start, end): (DateTime, DateTime)) -> crate::Result> { + let url = format!( + "{base_url}/me/time_entries", + base_url = self.base_url + ); + + Ok( + self.make_request(self.client.get(url) + .query(&[ + ("start_date", start.to_rfc3339()), + ("end_date", end.to_rfc3339()) + ])) + .await? + .json::>() + .await? + ) + } + pub async fn fetch_current_time_entry(&self) -> crate::Result> { let url = format!( "{base_url}/me/time_entries/current", @@ -116,53 +150,6 @@ impl TogglApiClient { Ok(res) } - fn paginate_filters(original_filters: &TogglReportQuery, last_row_id: u64) -> TogglReportQuery { - let mut filters: TogglReportQuery = original_filters.clone(); - filters.first_row_number = Some(last_row_id + 1); - filters - } - - #[instrument(skip(self, filters))] - pub async fn full_report( - &self, - filters: &TogglReportQuery, - ) -> anyhow::Result> { - let url = format!( - "{base_url}/workspace/{workspace_id}/search/time_entries", - base_url = self.reports_base_url, - workspace_id = self.workspace_id - ); - - let mut last_row_number = Some(0); - let mut results = vec![]; - - while let Some(last_row_number_n) = last_row_number { - debug!("Fetching page starting with {}", last_row_number_n); - // If we are not on the first page, wait a bit to avoid rate limiting - if last_row_number_n != 0 { - tokio::time::sleep(Duration::from_secs(1)).await; - } - - // TODO: Implement rate limiting - let response = self - .client - .post(&url) - .json(&Self::paginate_filters(&filters, last_row_number_n)) - .send() - .await?; - - let data = response - .json::>() - .await?; - - last_row_number = data.last().map(|e| e.row_number as u64); - - data.into_iter().for_each(|e| results.push(e)); - } - - Ok(results) - } - pub async fn start_time_entry(&self, mut body: HashMap) -> crate::Result<()> { let url = format!( "{base_url}/workspaces/{workspace_id}/time_entries", @@ -181,4 +168,54 @@ impl TogglApiClient { Ok(()) } + + ///////////// + // Reports // + ///////////// + + fn paginate_filters(original_filters: &TogglReportQuery, last_row_id: u64) -> TogglReportQuery { + let mut filters: TogglReportQuery = original_filters.clone(); + filters.first_row_number = Some(last_row_id + 1); + filters + } + + #[instrument(skip(self, filters))] + pub async fn full_report( + &self, + filters: &TogglReportQuery, + ) -> crate::Result> { + let url = format!( + "{base_url}/workspace/{workspace_id}/search/time_entries", + base_url = self.reports_base_url, + workspace_id = self.workspace_id + ); + + let mut last_row_number = Some(0); + let mut results = vec![]; + + while let Some(last_row_number_n) = last_row_number { + debug!("Fetching page starting with {}", last_row_number_n); + // If we are not on the first page, wait a bit to avoid rate limiting + if last_row_number_n != 0 { + tokio::time::sleep(Duration::from_secs(1)).await; + } + + // TODO: Implement rate limiting + let response = self.make_request(self + .client + .post(&url) + .json(&Self::paginate_filters(&filters, last_row_number_n))) + .await?; + + let data = response + .json::>() + .await?; + + last_row_number = data.last().map(|e| e.row_number as u64); + + data.into_iter().for_each(|e| results.push(e)); + } + + Ok(results) + } }