use std::collections::HashMap; use std::path::{Path, PathBuf}; use clap::{Parser}; use color_eyre::eyre::{Result}; use crossterm::style::{style, Stylize}; use futures::StreamExt; use indicatif::{ProgressBar, ProgressStyle}; use itertools::Itertools; use tokio::time::Instant; use crate::models::{DLSiteCategory, DLSiteGenre, DLSiteManiax, DLSiteTranslation}; use crate::config::types::ApplicationConfig; use crate::constants::{DB_CF_OPTIONS, DB_OPTIONS}; use crate::crawler::{dlsite, DLSiteCrawler}; use crate::helpers; use crate::helpers::db::RocksDB; #[derive(Parser, Debug)] pub(super) struct DLSiteCommand { #[command(subcommand)] pub(super) subcommand: DLSiteSubCommand, } #[derive(Parser, Debug)] pub(super) enum DLSiteSubCommand { #[command(name = "sync")] Sync(DLSiteSyncCommand) } #[derive(Parser, Debug)] pub(super) struct DLSiteSyncCommand { #[clap(long, short, action)] missing: bool, #[clap(long = "genre", default_value = "false")] do_sync_genre: bool, #[clap(long = "work", default_value = "true")] do_sync_work: bool } impl DLSiteSubCommand { pub async fn handle(&self) -> Result<()> { match self { Self::Sync(cmd) => cmd.handle().await, } } } impl DLSiteSyncCommand { pub async fn handle(&self) -> Result<()> { let now = Instant::now(); let app_conf = ApplicationConfig::get_config()?; let mut db = RocksDB::new(DB_OPTIONS.clone(), DB_CF_OPTIONS.clone())?; let crawler = DLSiteCrawler::new()?; if self.do_sync_genre { let genre_now = Instant::now(); Self::sync_genres(&mut db, &app_conf, &crawler).await?; println!( "{} {} Done in {:.2?}", style("Genres").cyan(), style("Syncing").green(), genre_now.elapsed() ); } if self.do_sync_work { let work_now = Instant::now(); self.sync_works(&app_conf, &mut db, &crawler).await?; println!( "{} {} Done in {:.2?}", style("Works").cyan(), style("Syncing").green(), work_now.elapsed() ); } println!("{} Done in {:.2?}", style("Syncing").green(), now.elapsed()); Ok(()) } async fn sync_genres(db: &mut RocksDB, app_conf: &ApplicationConfig, crawler: &DLSiteCrawler) -> Result<()> { let requested_categories = crawler.get_all_genres(&app_conf.basic_config.locale).await?; let categories: Vec = requested_categories.iter() .map(|g| g.clone().try_into()) .filter_map(Result::ok) .collect(); db.set_values(&categories)?; let genres = requested_categories.into_iter() .flat_map(|v| v.values) .collect_vec(); let existing_genres = db.get_all_values::()?; let mut modified_genres: Vec = Vec::new(); for genre in genres { let id = genre.value.parse::()?; let existing_genre = existing_genres.iter().find(|v| v.id == id); if let Some(existing_genre) = existing_genre { let name = DLSiteTranslation::try_from(genre.name)?; if existing_genre.name.contains(&name) { modified_genres.push(existing_genre.clone()); continue; } let mut modified_genre = existing_genre.clone(); modified_genre.name.push(name); modified_genres.push(modified_genre); } else { modified_genres.push(DLSiteGenre { id, name: vec![DLSiteTranslation::try_from(genre.name)?] }); } } db.set_values(&modified_genres)?; Ok(()) } async fn sync_works(&self, app_conf: &ApplicationConfig, db: &mut RocksDB, crawler: &DLSiteCrawler) -> Result<()> { let existing_works = db.get_all_values::()?; let work_list = self.get_work_list(&app_conf, &existing_works).await?; let rj_nums = work_list.clone().into_keys().collect::>(); let mut game_infos = crawler.get_game_infos(rj_nums, &app_conf.basic_config.locale).await?; let existing_game_infos = db.get_all_values::()?; let mut modified_maniaxes: Vec = Vec::new(); let progress = ProgressBar::new(game_infos.len() as u64) .with_style(ProgressStyle::default_bar()); while let Some(info) = game_infos.next().await { let maniax = info?; let existing_maniax = existing_game_infos.iter() .find(|v| v.rj_num == maniax.rj_num); if let Some(existing_maniax) = existing_maniax { let name = DLSiteTranslation::try_from(maniax.title)?; if existing_maniax.name.contains(&name) { modified_maniaxes.push(existing_maniax.clone()); continue; } let mut modified_maniax = existing_maniax.clone(); modified_maniax.name.push(name); modified_maniaxes.push(modified_maniax); } else { let mut value: DLSiteManiax = maniax.into(); let maniax_folder = work_list.get(&value.rj_num).unwrap().to_owned(); value.folder_path = maniax_folder; modified_maniaxes.push(value); } progress.inc(1); } db.set_values(&modified_maniaxes)?; Ok(()) } async fn get_work_list(&self, app_conf: &ApplicationConfig, existing_works: &[DLSiteManiax]) -> Result> { let existing_nums = existing_works.iter() .map(|x| x.rj_num.clone()) .collect::>(); let existing_folders = existing_works.iter() .map(|x| x.folder_path.to_str().unwrap().to_string()) .collect::>(); let mut works_list: HashMap = HashMap::new(); let config_paths = app_conf.path_config.dlsite_paths.iter() .map(|path| Path::new(path)) .collect::>(); let dir_paths = helpers::get_all_folders(&config_paths).await?; for dir_path in dir_paths { if !dir_path.is_dir() { println!( "{} {}", style(dir_path.to_str().unwrap()).blue(), style("is not a directory").red() ); continue; } let dir_path_str = dir_path.to_str().unwrap().to_string(); let dir_name = dir_path .file_name().unwrap() .to_str().unwrap() .to_string(); if !dlsite::is_valid_rj_number(&dir_name) && !existing_folders.contains(&dir_path_str) { println!( "{} {}", style(dir_path.to_str().unwrap()).blue(), style("is not a valid rj number, please add it manually").red() ); continue; } if self.missing && existing_nums.contains(&dir_name) { continue; } works_list.insert(dir_name, dir_path); } Ok(works_list) } }