initial data store
This commit is contained in:
parent
93ec52f555
commit
f899a0a5a5
4 changed files with 46 additions and 14 deletions
22
src/data_store/mod.rs
Normal file
22
src/data_store/mod.rs
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
//! Local data store
|
||||
|
||||
use anyhow::Result;
|
||||
use camino::Utf8PathBuf;
|
||||
|
||||
use crate::{directories::data_cache_directory, malie::models::Index};
|
||||
|
||||
pub struct Store {
|
||||
data_cache_directory: Utf8PathBuf,
|
||||
}
|
||||
|
||||
impl Store {
|
||||
pub async fn new() -> Result<Self> {
|
||||
Ok(Self {
|
||||
data_cache_directory: data_cache_directory().await?,
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn write_index(&self, index: Index) -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
|
@ -3,6 +3,7 @@ use clap::Parser;
|
|||
|
||||
pub mod cli;
|
||||
pub mod constants;
|
||||
pub mod data_store;
|
||||
pub mod directories;
|
||||
pub mod editions;
|
||||
pub mod lang;
|
||||
|
|
|
|||
|
|
@ -7,8 +7,10 @@ use tokio_stream::StreamExt;
|
|||
use tokio_util::io::StreamReader;
|
||||
use tracing::debug;
|
||||
|
||||
use super::models::RawIndex;
|
||||
use super::models::{Index, RawIndex};
|
||||
use crate::data_store;
|
||||
use crate::directories::data_cache_directory;
|
||||
use crate::malie::models::filter_invalid_editions;
|
||||
|
||||
/// Client to download data from mallie.io
|
||||
pub struct Client {
|
||||
|
|
@ -28,9 +30,10 @@ impl Client {
|
|||
}
|
||||
|
||||
pub async fn download_all_data(&self) -> Result<()> {
|
||||
let data_store = data_store::Store::new().await?;
|
||||
self.download_tcgl_index_json().await?;
|
||||
let index = self.load_tcgl_index().await?;
|
||||
println!("{index:?}");
|
||||
data_store.write_index(index).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
|
@ -41,13 +44,14 @@ impl Client {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
async fn load_tcgl_index(&self) -> Result<RawIndex> {
|
||||
async fn load_tcgl_index(&self) -> Result<Index> {
|
||||
let file_path = self.data_cache_directory.join("tcgl_index.json");
|
||||
let index = tokio::fs::read_to_string(&file_path)
|
||||
.await
|
||||
.with_context(|| format!("Failed to read {file_path}"))?;
|
||||
let index: RawIndex =
|
||||
serde_json::from_str(&index).with_context(|| format!("Couldn't parse {file_path}"))?;
|
||||
let index = filter_invalid_editions(index);
|
||||
Ok(index)
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -10,9 +10,9 @@ use tracing::warn;
|
|||
use crate::editions::EditionCode;
|
||||
|
||||
pub type RawIndex = HashMap<Lang, HashMap<String, RawEdition>>;
|
||||
pub type Index = HashMap<Lang, Vec<Edition>>;
|
||||
pub type Index = Vec<Edition>;
|
||||
|
||||
#[derive(Debug, Deserialize, Eq, PartialEq, Hash)]
|
||||
#[derive(Copy, Clone, Debug, Deserialize, Eq, PartialEq, Hash)]
|
||||
pub enum Lang {
|
||||
#[serde(rename = "de-DE")]
|
||||
De,
|
||||
|
|
@ -30,14 +30,16 @@ pub enum Lang {
|
|||
Pt,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[derive(Deserialize)]
|
||||
pub struct RawEdition {
|
||||
path: String,
|
||||
#[serde(deserialize_with = "deserialize_edition_code")]
|
||||
abbr: Option<EditionCode>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Edition {
|
||||
lang: Lang,
|
||||
path: String,
|
||||
abbr: EditionCode,
|
||||
}
|
||||
|
|
@ -58,19 +60,22 @@ where
|
|||
Ok(result.ok())
|
||||
}
|
||||
|
||||
fn filter_invalid_editions(index: RawIndex) -> Index {
|
||||
pub fn filter_invalid_editions(index: RawIndex) -> Index {
|
||||
index
|
||||
.into_iter()
|
||||
.map(|(k, v)| {
|
||||
let v = v
|
||||
.into_values()
|
||||
.map(|e| match e.abbr {
|
||||
Some(abbr) => Some(Edition { path: e.path, abbr }),
|
||||
.map(|(lang, v)| {
|
||||
let lang = lang;
|
||||
v.into_values()
|
||||
.map(move |e| match e.abbr {
|
||||
Some(abbr) => Some(Edition {
|
||||
path: e.path,
|
||||
abbr,
|
||||
lang,
|
||||
}),
|
||||
None => None,
|
||||
})
|
||||
.flatten()
|
||||
.collect();
|
||||
(k, v)
|
||||
})
|
||||
.flatten()
|
||||
.collect()
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue