Add time entries handling functions in Toggl API client
This commit is contained in:
parent
90457155aa
commit
b722b21a75
@ -1,4 +1,4 @@
|
||||
use reqwest::{Client, RequestBuilder, Response};
|
||||
use reqwest::{Client, RequestBuilder, Response, Url};
|
||||
use serde_json::Value;
|
||||
use std::collections::HashMap;
|
||||
use std::time::Duration;
|
||||
@ -6,6 +6,7 @@ use anyhow::anyhow;
|
||||
use axum::http::StatusCode;
|
||||
use base64::Engine;
|
||||
use base64::engine::general_purpose::STANDARD;
|
||||
use chrono::{DateTime, Utc};
|
||||
use hyper::HeaderMap;
|
||||
use reqwest::header::HeaderValue;
|
||||
use tracing::instrument;
|
||||
@ -100,6 +101,39 @@ impl TogglApiClient {
|
||||
Ok(clients)
|
||||
}
|
||||
|
||||
pub async fn fetch_time_entries_modified_since(&self, date_time: DateTime<Utc>) -> crate::Result<Vec<TimeEntry>> {
|
||||
let url = format!(
|
||||
"{base_url}/me/time_entries",
|
||||
base_url = self.base_url
|
||||
);
|
||||
|
||||
Ok(
|
||||
self.make_request(self.client.get(url)
|
||||
.query(&[("since", date_time.timestamp())]))
|
||||
.await?
|
||||
.json::<Vec<TimeEntry>>()
|
||||
.await?
|
||||
)
|
||||
}
|
||||
|
||||
pub async fn fetch_time_entries_in_range(&self, (start, end): (DateTime<Utc>, DateTime<Utc>)) -> crate::Result<Vec<TimeEntry>> {
|
||||
let url = format!(
|
||||
"{base_url}/me/time_entries",
|
||||
base_url = self.base_url
|
||||
);
|
||||
|
||||
Ok(
|
||||
self.make_request(self.client.get(url)
|
||||
.query(&[
|
||||
("start_date", start.to_rfc3339()),
|
||||
("end_date", end.to_rfc3339())
|
||||
]))
|
||||
.await?
|
||||
.json::<Vec<TimeEntry>>()
|
||||
.await?
|
||||
)
|
||||
}
|
||||
|
||||
pub async fn fetch_current_time_entry(&self) -> crate::Result<Option<TimeEntry>> {
|
||||
let url = format!(
|
||||
"{base_url}/me/time_entries/current",
|
||||
@ -116,53 +150,6 @@ impl TogglApiClient {
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
fn paginate_filters(original_filters: &TogglReportQuery, last_row_id: u64) -> TogglReportQuery {
|
||||
let mut filters: TogglReportQuery = original_filters.clone();
|
||||
filters.first_row_number = Some(last_row_id + 1);
|
||||
filters
|
||||
}
|
||||
|
||||
#[instrument(skip(self, filters))]
|
||||
pub async fn full_report(
|
||||
&self,
|
||||
filters: &TogglReportQuery,
|
||||
) -> anyhow::Result<Vec<ReportRow>> {
|
||||
let url = format!(
|
||||
"{base_url}/workspace/{workspace_id}/search/time_entries",
|
||||
base_url = self.reports_base_url,
|
||||
workspace_id = self.workspace_id
|
||||
);
|
||||
|
||||
let mut last_row_number = Some(0);
|
||||
let mut results = vec![];
|
||||
|
||||
while let Some(last_row_number_n) = last_row_number {
|
||||
debug!("Fetching page starting with {}", last_row_number_n);
|
||||
// If we are not on the first page, wait a bit to avoid rate limiting
|
||||
if last_row_number_n != 0 {
|
||||
tokio::time::sleep(Duration::from_secs(1)).await;
|
||||
}
|
||||
|
||||
// TODO: Implement rate limiting
|
||||
let response = self
|
||||
.client
|
||||
.post(&url)
|
||||
.json(&Self::paginate_filters(&filters, last_row_number_n))
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
let data = response
|
||||
.json::<Vec<ReportRow>>()
|
||||
.await?;
|
||||
|
||||
last_row_number = data.last().map(|e| e.row_number as u64);
|
||||
|
||||
data.into_iter().for_each(|e| results.push(e));
|
||||
}
|
||||
|
||||
Ok(results)
|
||||
}
|
||||
|
||||
pub async fn start_time_entry(&self, mut body: HashMap<String, Value>) -> crate::Result<()> {
|
||||
let url = format!(
|
||||
"{base_url}/workspaces/{workspace_id}/time_entries",
|
||||
@ -181,4 +168,54 @@ impl TogglApiClient {
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/////////////
|
||||
// Reports //
|
||||
/////////////
|
||||
|
||||
fn paginate_filters(original_filters: &TogglReportQuery, last_row_id: u64) -> TogglReportQuery {
|
||||
let mut filters: TogglReportQuery = original_filters.clone();
|
||||
filters.first_row_number = Some(last_row_id + 1);
|
||||
filters
|
||||
}
|
||||
|
||||
#[instrument(skip(self, filters))]
|
||||
pub async fn full_report(
|
||||
&self,
|
||||
filters: &TogglReportQuery,
|
||||
) -> crate::Result<Vec<ReportRow>> {
|
||||
let url = format!(
|
||||
"{base_url}/workspace/{workspace_id}/search/time_entries",
|
||||
base_url = self.reports_base_url,
|
||||
workspace_id = self.workspace_id
|
||||
);
|
||||
|
||||
let mut last_row_number = Some(0);
|
||||
let mut results = vec![];
|
||||
|
||||
while let Some(last_row_number_n) = last_row_number {
|
||||
debug!("Fetching page starting with {}", last_row_number_n);
|
||||
// If we are not on the first page, wait a bit to avoid rate limiting
|
||||
if last_row_number_n != 0 {
|
||||
tokio::time::sleep(Duration::from_secs(1)).await;
|
||||
}
|
||||
|
||||
// TODO: Implement rate limiting
|
||||
let response = self.make_request(self
|
||||
.client
|
||||
.post(&url)
|
||||
.json(&Self::paginate_filters(&filters, last_row_number_n)))
|
||||
.await?;
|
||||
|
||||
let data = response
|
||||
.json::<Vec<ReportRow>>()
|
||||
.await?;
|
||||
|
||||
last_row_number = data.last().map(|e| e.row_number as u64);
|
||||
|
||||
data.into_iter().for_each(|e| results.push(e));
|
||||
}
|
||||
|
||||
Ok(results)
|
||||
}
|
||||
}
|
||||
|
||||
Loading…
Reference in New Issue
Block a user