Update to NC Cookbok's API 0.1.0
This commit is contained in:
parent
8930a2a5a1
commit
29b7ec7877
|
@ -76,7 +76,7 @@ where
|
|||
{
|
||||
let ingredients = recipe_ids.into_iter().map(|id: usize| async move {
|
||||
// TODO code duplicated with schedule_csv::get_all_recipes
|
||||
let recipe_url = format!("apps/cookbook/api/recipes/{id}");
|
||||
let recipe_url = format!("apps/cookbook/api/v1/recipes/{id}");
|
||||
let response = api_client
|
||||
.rest(|client| async {
|
||||
Ok(client
|
||||
|
|
|
@ -12,7 +12,7 @@ where
|
|||
let response = api_client
|
||||
.rest(|client| async {
|
||||
let r = client
|
||||
.post(api_client.base_url().join("apps/cookbook/import")?)
|
||||
.post(api_client.base_url().join("apps/cookbook/api/v1/import")?)
|
||||
.json(&serde_json::json!({
|
||||
"url": url.as_ref(),
|
||||
}))
|
||||
|
|
|
@ -7,7 +7,7 @@ pub async fn with(api_client: &ApiClient) -> Result<()> {
|
|||
let recipes = api_client
|
||||
.rest(|client| async {
|
||||
let response = client
|
||||
.get(api_client.base_url().join("apps/cookbook/api/recipes")?)
|
||||
.get(api_client.base_url().join("apps/cookbook/api/v1/recipes")?)
|
||||
.send()
|
||||
.await;
|
||||
Ok(response?)
|
||||
|
|
|
@ -80,22 +80,22 @@ where
|
|||
|
||||
async fn get_all_recipes(api_client: &ApiClient) -> Result<HashMap<String, Rc<recipe::Recipe>>> {
|
||||
log::info!("Getting list of all recipes");
|
||||
let metadata = api_client
|
||||
let response = api_client
|
||||
.rest(|client| async {
|
||||
let response = client
|
||||
.get(api_client.base_url().join("apps/cookbook/api/recipes")?)
|
||||
.get(api_client.base_url().join("apps/cookbook/api/v1/recipes")?)
|
||||
.send()
|
||||
.await;
|
||||
Ok(response?)
|
||||
})
|
||||
.await?
|
||||
.json::<Vec<recipe::Metadata>>()
|
||||
.await?;
|
||||
|
||||
let metadata = response.json::<Vec<recipe::Metadata>>().await?;
|
||||
|
||||
let recipes = metadata.iter().map(|rm| async {
|
||||
let recipe_url = api_client
|
||||
.base_url()
|
||||
.join(&format!("apps/cookbook/api/recipes/{id}", id = rm.id))
|
||||
.join(&format!("apps/cookbook/api/v1/recipes/{id}", id = rm.id))
|
||||
.unwrap();
|
||||
let response = api_client
|
||||
.rest(|client| async {
|
||||
|
|
|
@ -10,10 +10,14 @@ use {
|
|||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Metadata {
|
||||
#[serde(rename = "recipe_id")]
|
||||
pub id: u32,
|
||||
pub id: u64,
|
||||
pub name: String,
|
||||
pub keywords: Option<String>,
|
||||
|
||||
#[serde(with = "naive_date_format")]
|
||||
pub date_created: DateTime,
|
||||
|
||||
#[serde(with = "naive_date_format")]
|
||||
pub date_modified: DateTime,
|
||||
}
|
||||
|
||||
|
@ -21,7 +25,7 @@ pub struct Metadata {
|
|||
#[derive(Deserialize, Debug)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Recipe {
|
||||
pub id: isize,
|
||||
pub id: u64,
|
||||
pub name: String,
|
||||
pub description: String,
|
||||
pub url: Option<String>,
|
||||
|
@ -35,15 +39,15 @@ pub struct Recipe {
|
|||
|
||||
pub image_url: String,
|
||||
|
||||
#[serde(deserialize_with = "deserialize_duration")]
|
||||
#[serde(with = "duration")]
|
||||
pub prep_time: Duration,
|
||||
|
||||
#[serde(default)]
|
||||
#[serde(deserialize_with = "deserialize_maybe_duration")]
|
||||
#[serde(with = "maybe_duration")]
|
||||
pub cook_time: Option<Duration>,
|
||||
|
||||
#[serde(default)]
|
||||
#[serde(deserialize_with = "deserialize_maybe_duration")]
|
||||
#[serde(with = "maybe_duration")]
|
||||
pub total_time: Option<Duration>,
|
||||
|
||||
pub image: Option<String>,
|
||||
|
@ -101,36 +105,62 @@ pub struct Nutrition {
|
|||
|
||||
type DateTime = chrono::DateTime<chrono::Utc>;
|
||||
|
||||
fn deserialize_maybe_duration<'de, D>(deserializer: D) -> Result<Option<Duration>, D::Error>
|
||||
where
|
||||
mod naive_date_format {
|
||||
use chrono::{DateTime, TimeZone, Utc};
|
||||
use serde::{self, Deserialize, Deserializer};
|
||||
|
||||
const FORMAT: &'static str = "%Y-%m-%d %H:%M:%S";
|
||||
|
||||
pub fn deserialize<'de, D>(deserializer: D) -> Result<DateTime<Utc>, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
Ok(Some(deserialize_duration(deserializer)?))
|
||||
{
|
||||
let s = String::deserialize(deserializer)?;
|
||||
Utc.datetime_from_str(&s, FORMAT)
|
||||
.map_err(serde::de::Error::custom)
|
||||
}
|
||||
}
|
||||
|
||||
fn deserialize_duration<'de, D>(deserializer: D) -> Result<Duration, D::Error>
|
||||
where
|
||||
mod duration {
|
||||
use {
|
||||
super::Duration,
|
||||
serde::{self, de::Error, Deserialize, Deserializer},
|
||||
};
|
||||
|
||||
pub fn deserialize<'de, D>(deserializer: D) -> Result<Duration, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
deserializer.deserialize_str(DurationVisitor)
|
||||
}
|
||||
|
||||
struct DurationVisitor;
|
||||
|
||||
impl<'de> serde::de::Visitor<'de> for DurationVisitor {
|
||||
type Value = Duration;
|
||||
|
||||
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
formatter.write_str("a duration in ISO 8601 format")
|
||||
{
|
||||
let value = String::deserialize(deserializer)?;
|
||||
parse_duration::<D>(&value)
|
||||
}
|
||||
|
||||
fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>
|
||||
pub fn parse_duration<'de, D>(value: &str) -> Result<Duration, D::Error>
|
||||
where
|
||||
E: serde::de::Error,
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
speedate::Duration::parse_str(value)
|
||||
speedate::Duration::parse_str(&value)
|
||||
.map(|dt| Duration::seconds(dt.signed_total_seconds()))
|
||||
.map_err(|e| E::custom(e.to_string()))
|
||||
.map_err(|e| D::Error::custom(e.to_string()))
|
||||
}
|
||||
}
|
||||
|
||||
mod maybe_duration {
|
||||
use {
|
||||
super::Duration,
|
||||
serde::{self, Deserialize, Deserializer},
|
||||
};
|
||||
|
||||
pub fn deserialize<'de, D>(deserializer: D) -> Result<Option<Duration>, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
let s: Option<String> = Option::deserialize(deserializer)?;
|
||||
Ok(if let Some(s) = s {
|
||||
Some(super::duration::parse_duration::<D>(&s)?)
|
||||
} else {
|
||||
None
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in New Issue