221 lines
7 KiB
Rust
221 lines
7 KiB
Rust
// SPDX-FileCopyrightText: 2022 Matteo Settenvini <matteo.settenvini@montecristosoftware.eu>
|
|
// SPDX-License-Identifier: AGPL-3.0-or-later
|
|
|
|
use {
|
|
crate::api_client::ApiClient,
|
|
crate::recipe::{Ingredient, Recipe},
|
|
anyhow::{anyhow, Result},
|
|
chrono::{Duration, Local, NaiveDate},
|
|
icalendar::Component,
|
|
regex::Regex,
|
|
reqwest::{Method, StatusCode, Url},
|
|
std::collections::HashSet,
|
|
std::ops::Range,
|
|
};
|
|
|
|
pub async fn with(
|
|
api_client: &ApiClient,
|
|
calendar_name: &str,
|
|
location: &str,
|
|
start_date: Option<NaiveDate>,
|
|
days: u32,
|
|
required_yield: f64,
|
|
) -> Result<()> {
|
|
use chrono::{NaiveDateTime, NaiveTime, TimeZone};
|
|
let start = start_date
|
|
.map(|d| {
|
|
let day_start = NaiveDateTime::new(d, NaiveTime::from_hms(0, 0, 0));
|
|
Local.from_local_datetime(&day_start).unwrap()
|
|
})
|
|
.unwrap_or_else(|| Local::now());
|
|
|
|
let date_range = Range {
|
|
start,
|
|
end: start + Duration::days(days as i64),
|
|
};
|
|
|
|
let ids = map_events_to_recipe_ids(api_client, calendar_name, &date_range).await?;
|
|
let ingredients = get_ingredients(api_client, ids, required_yield).await?;
|
|
let ingredients = merge_ingredients(ingredients);
|
|
let md = prepare_grocery_list(&date_range, &ingredients)?;
|
|
log::debug!("Saving the following grocery list:\n\n{}", &md);
|
|
save_grocery_list(api_client, location, &md).await?;
|
|
Ok(())
|
|
}
|
|
|
|
async fn map_events_to_recipe_ids<Tz>(
|
|
api_client: &ApiClient,
|
|
calendar_name: &str,
|
|
date_range: &Range<chrono::DateTime<Tz>>,
|
|
) -> Result<HashSet<usize>>
|
|
where
|
|
Tz: chrono::TimeZone,
|
|
Tz::Offset: std::fmt::Display,
|
|
{
|
|
let all_events = api_client.get_events(calendar_name, date_range).await?;
|
|
|
|
let recipe_id_regex: Regex = Regex::new(r"^cookbook@(\d+)$").unwrap();
|
|
let recipe_ids = all_events
|
|
.iter()
|
|
.flat_map(|event| event.property_value("DESCRIPTION"))
|
|
.flat_map(|descr| recipe_id_regex.captures(descr))
|
|
.flat_map(|c| c.get(1))
|
|
.flat_map(|m| m.as_str().parse::<usize>())
|
|
.collect::<HashSet<_>>();
|
|
|
|
Ok(recipe_ids)
|
|
}
|
|
|
|
async fn get_ingredients<RecipeIds>(
|
|
api_client: &ApiClient,
|
|
recipe_ids: RecipeIds,
|
|
required_yield: f64,
|
|
) -> Result<Vec<(Ingredient, String)>>
|
|
where
|
|
RecipeIds: IntoIterator<Item = usize>,
|
|
{
|
|
let ingredients = recipe_ids.into_iter().map(|id: usize| async move {
|
|
// TODO code duplicated with schedule_csv::get_all_recipes
|
|
let recipe_url = format!("apps/cookbook/api/recipes/{id}");
|
|
let response = api_client
|
|
.rest(|client| async {
|
|
Ok(client
|
|
.get(api_client.base_url().join(&recipe_url).unwrap())
|
|
.send()
|
|
.await?)
|
|
})
|
|
.await
|
|
.expect(&format!("Cannot fetch recipe with id {}", id));
|
|
|
|
response.json::<Recipe>().await.map(|r| {
|
|
log::info!("Retrieved ingredients for '{}'", r.name);
|
|
let recipe_name = r.name.clone();
|
|
let scale = required_yield / r.recipe_yield as f64;
|
|
r.ingredients
|
|
.into_iter()
|
|
.map(move |i| (i * scale, recipe_name.clone()))
|
|
})
|
|
});
|
|
|
|
let ingredients = futures::future::try_join_all(ingredients).await?;
|
|
Ok(ingredients.into_iter().flatten().collect())
|
|
}
|
|
|
|
fn merge_ingredients(mut ingredients: Vec<(Ingredient, String)>) -> Vec<(Ingredient, Vec<String>)> {
|
|
if ingredients.is_empty() {
|
|
return vec![];
|
|
}
|
|
|
|
// Prime merged_ingredients with the first ingredient in sorted order.
|
|
ingredients.sort_by(|(a, _), (b, _)| {
|
|
a.name.cmp(&b.name).then_with(|| {
|
|
// inefficient, but not so bad for now
|
|
a.unit.to_string().cmp(&b.unit.to_string())
|
|
})
|
|
});
|
|
let (mut merged_ingredients, ingredients): (Vec<(Ingredient, Vec<String>)>, _) = {
|
|
let v = ingredients.split_off(1);
|
|
(
|
|
ingredients.into_iter().map(|(i, s)| (i, vec![s])).collect(),
|
|
v,
|
|
)
|
|
};
|
|
|
|
for (ingredient, recipe) in ingredients {
|
|
// If it can be summed to the last item of merged_ingredients, do it;
|
|
// else append it
|
|
let (last_i, last_rs) = merged_ingredients.last_mut().unwrap();
|
|
if last_i.name == ingredient.name && last_i.unit == ingredient.unit {
|
|
last_i.amount += ingredient.amount;
|
|
last_rs.push(recipe);
|
|
} else {
|
|
merged_ingredients.push((ingredient, vec![recipe]));
|
|
}
|
|
}
|
|
|
|
merged_ingredients
|
|
}
|
|
|
|
fn prepare_grocery_list<Tz>(
|
|
date_range: &Range<chrono::DateTime<Tz>>,
|
|
ingredients: &Vec<(Ingredient, Vec<String>)>,
|
|
) -> Result<String>
|
|
where
|
|
Tz: chrono::TimeZone,
|
|
Tz::Offset: std::fmt::Display,
|
|
{
|
|
let mut out = String::new();
|
|
use std::fmt::Write;
|
|
|
|
writeln!(
|
|
out,
|
|
"# Grocery list\n## From {} to {}",
|
|
date_range.start.date_naive(),
|
|
date_range.end.date_naive()
|
|
)?;
|
|
writeln!(out)?; // leave an empty line
|
|
for (ingredient, recipes) in ingredients {
|
|
writeln!(out, "- [ ] {} ({})", ingredient, recipes.join(", "))?;
|
|
}
|
|
|
|
Ok(out)
|
|
}
|
|
|
|
async fn save_grocery_list(api_client: &ApiClient, filename: &str, contents: &str) -> Result<()> {
|
|
let filename_components = filename.split('/').collect::<Vec<_>>();
|
|
filename_components
|
|
.iter()
|
|
.take(filename_components.len() - 1)
|
|
.fold(Ok(api_client.webdav_base_url().clone()), |url, dir| {
|
|
url.map(|u| u.join(&format!("{dir}/")).unwrap())
|
|
.and_then(|url| ensure_collection_exist(api_client, url))
|
|
})?;
|
|
|
|
let file_url = api_client.webdav_base_url().join(filename).unwrap();
|
|
log::info!("Saving grocery list to {}", &file_url);
|
|
let response = api_client
|
|
.rest(|client| async {
|
|
let r = client
|
|
.put(file_url.clone())
|
|
.header("Content-Type", "text/markdown; charset=utf-8")
|
|
.body(contents.to_owned())
|
|
.send()
|
|
.await;
|
|
Ok(r?)
|
|
})
|
|
.await?;
|
|
|
|
match response.status() {
|
|
StatusCode::CREATED | StatusCode::NO_CONTENT => Ok(()),
|
|
status => Err(anyhow!(
|
|
"Cannot save grocery list at {}, server responded with status {}",
|
|
file_url,
|
|
status
|
|
)),
|
|
}
|
|
}
|
|
|
|
fn ensure_collection_exist(api_client: &ApiClient, url: Url) -> Result<Url> {
|
|
futures::executor::block_on(async {
|
|
let response = api_client
|
|
.rest(|client| async {
|
|
let r = client
|
|
.request(Method::from_bytes(b"MKCOL").unwrap(), url.clone())
|
|
.send()
|
|
.await;
|
|
Ok(r?)
|
|
})
|
|
.await;
|
|
|
|
match response.map(|r| r.status()) {
|
|
Ok(StatusCode::OK) | Ok(StatusCode::METHOD_NOT_ALLOWED /* already exists */) => Ok(url),
|
|
Ok(status) => Err(anyhow!(
|
|
"Could not create WebDAV collection {}, server responded with {}",
|
|
&url,
|
|
status
|
|
)),
|
|
Err(e) => Err(anyhow!(e)),
|
|
}
|
|
})
|
|
}
|