initial data store
This commit is contained in:
parent
93ec52f555
commit
f899a0a5a5
4 changed files with 46 additions and 14 deletions
22
src/data_store/mod.rs
Normal file
22
src/data_store/mod.rs
Normal file
|
|
@ -0,0 +1,22 @@
|
||||||
|
//! Local data store
|
||||||
|
|
||||||
|
use anyhow::Result;
|
||||||
|
use camino::Utf8PathBuf;
|
||||||
|
|
||||||
|
use crate::{directories::data_cache_directory, malie::models::Index};
|
||||||
|
|
||||||
|
pub struct Store {
|
||||||
|
data_cache_directory: Utf8PathBuf,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Store {
|
||||||
|
pub async fn new() -> Result<Self> {
|
||||||
|
Ok(Self {
|
||||||
|
data_cache_directory: data_cache_directory().await?,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn write_index(&self, index: Index) -> Result<()> {
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -3,6 +3,7 @@ use clap::Parser;
|
||||||
|
|
||||||
pub mod cli;
|
pub mod cli;
|
||||||
pub mod constants;
|
pub mod constants;
|
||||||
|
pub mod data_store;
|
||||||
pub mod directories;
|
pub mod directories;
|
||||||
pub mod editions;
|
pub mod editions;
|
||||||
pub mod lang;
|
pub mod lang;
|
||||||
|
|
|
||||||
|
|
@ -7,8 +7,10 @@ use tokio_stream::StreamExt;
|
||||||
use tokio_util::io::StreamReader;
|
use tokio_util::io::StreamReader;
|
||||||
use tracing::debug;
|
use tracing::debug;
|
||||||
|
|
||||||
use super::models::RawIndex;
|
use super::models::{Index, RawIndex};
|
||||||
|
use crate::data_store;
|
||||||
use crate::directories::data_cache_directory;
|
use crate::directories::data_cache_directory;
|
||||||
|
use crate::malie::models::filter_invalid_editions;
|
||||||
|
|
||||||
/// Client to download data from mallie.io
|
/// Client to download data from mallie.io
|
||||||
pub struct Client {
|
pub struct Client {
|
||||||
|
|
@ -28,9 +30,10 @@ impl Client {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn download_all_data(&self) -> Result<()> {
|
pub async fn download_all_data(&self) -> Result<()> {
|
||||||
|
let data_store = data_store::Store::new().await?;
|
||||||
self.download_tcgl_index_json().await?;
|
self.download_tcgl_index_json().await?;
|
||||||
let index = self.load_tcgl_index().await?;
|
let index = self.load_tcgl_index().await?;
|
||||||
println!("{index:?}");
|
data_store.write_index(index).await?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -41,13 +44,14 @@ impl Client {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn load_tcgl_index(&self) -> Result<RawIndex> {
|
async fn load_tcgl_index(&self) -> Result<Index> {
|
||||||
let file_path = self.data_cache_directory.join("tcgl_index.json");
|
let file_path = self.data_cache_directory.join("tcgl_index.json");
|
||||||
let index = tokio::fs::read_to_string(&file_path)
|
let index = tokio::fs::read_to_string(&file_path)
|
||||||
.await
|
.await
|
||||||
.with_context(|| format!("Failed to read {file_path}"))?;
|
.with_context(|| format!("Failed to read {file_path}"))?;
|
||||||
let index: RawIndex =
|
let index: RawIndex =
|
||||||
serde_json::from_str(&index).with_context(|| format!("Couldn't parse {file_path}"))?;
|
serde_json::from_str(&index).with_context(|| format!("Couldn't parse {file_path}"))?;
|
||||||
|
let index = filter_invalid_editions(index);
|
||||||
Ok(index)
|
Ok(index)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -10,9 +10,9 @@ use tracing::warn;
|
||||||
use crate::editions::EditionCode;
|
use crate::editions::EditionCode;
|
||||||
|
|
||||||
pub type RawIndex = HashMap<Lang, HashMap<String, RawEdition>>;
|
pub type RawIndex = HashMap<Lang, HashMap<String, RawEdition>>;
|
||||||
pub type Index = HashMap<Lang, Vec<Edition>>;
|
pub type Index = Vec<Edition>;
|
||||||
|
|
||||||
#[derive(Debug, Deserialize, Eq, PartialEq, Hash)]
|
#[derive(Copy, Clone, Debug, Deserialize, Eq, PartialEq, Hash)]
|
||||||
pub enum Lang {
|
pub enum Lang {
|
||||||
#[serde(rename = "de-DE")]
|
#[serde(rename = "de-DE")]
|
||||||
De,
|
De,
|
||||||
|
|
@ -30,14 +30,16 @@ pub enum Lang {
|
||||||
Pt,
|
Pt,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Deserialize)]
|
#[derive(Deserialize)]
|
||||||
pub struct RawEdition {
|
pub struct RawEdition {
|
||||||
path: String,
|
path: String,
|
||||||
#[serde(deserialize_with = "deserialize_edition_code")]
|
#[serde(deserialize_with = "deserialize_edition_code")]
|
||||||
abbr: Option<EditionCode>,
|
abbr: Option<EditionCode>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
pub struct Edition {
|
pub struct Edition {
|
||||||
|
lang: Lang,
|
||||||
path: String,
|
path: String,
|
||||||
abbr: EditionCode,
|
abbr: EditionCode,
|
||||||
}
|
}
|
||||||
|
|
@ -58,19 +60,22 @@ where
|
||||||
Ok(result.ok())
|
Ok(result.ok())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn filter_invalid_editions(index: RawIndex) -> Index {
|
pub fn filter_invalid_editions(index: RawIndex) -> Index {
|
||||||
index
|
index
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|(k, v)| {
|
.map(|(lang, v)| {
|
||||||
let v = v
|
let lang = lang;
|
||||||
.into_values()
|
v.into_values()
|
||||||
.map(|e| match e.abbr {
|
.map(move |e| match e.abbr {
|
||||||
Some(abbr) => Some(Edition { path: e.path, abbr }),
|
Some(abbr) => Some(Edition {
|
||||||
|
path: e.path,
|
||||||
|
abbr,
|
||||||
|
lang,
|
||||||
|
}),
|
||||||
None => None,
|
None => None,
|
||||||
})
|
})
|
||||||
.flatten()
|
.flatten()
|
||||||
.collect();
|
|
||||||
(k, v)
|
|
||||||
})
|
})
|
||||||
|
.flatten()
|
||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
|
||||||
Loading…
Add table
Reference in a new issue