1use getset::{Getters, MutGetters};
98use itertools::{Either, Itertools};
99use log::{info, error};
100use rayon::prelude::*;
101use serde_derive::{Serialize, Deserialize};
102
103use std::borrow::Cow;
104use std::collections::{BTreeMap, HashMap, HashSet};
105use std::fs::{DirBuilder, File};
106use std::io::{BufReader, BufWriter, Read, Write};
107use std::sync::mpsc::channel;
108use std::path::{Path, PathBuf};
109use std::process::Command;
110use std::{thread, thread::{spawn, JoinHandle}};
111use std::time::Duration;
112
113use rpfm_lib::binary::WriteBytes;
114use rpfm_lib::error::{Result, RLibError};
115use rpfm_lib::files::{Container, ContainerPath, db::DB, DecodeableExtraData, FileType, pack::Pack, RFile, RFileDecoded, table::Table};
116use rpfm_lib::games::{GameInfo, supported_games::*};
117use rpfm_lib::integrations::assembly_kit::table_data::RawTable;
118use rpfm_lib::schema::{Definition, DefinitionPatch, Field, FieldType, Schema};
119use rpfm_lib::utils::{current_time, files_from_subdir, last_modified_time_from_files, starts_with_case_insensitive};
120
121use crate::optimizer::{OptimizableContainer, OptimizerOptions};
122use crate::START_POS_WORKAROUND_THREAD;
123use crate::VERSION;
124
125pub const KEY_DELETES_TABLE_NAME: &str = "twad_key_deletes_tables";
131
132pub const USER_SCRIPT_FILE_NAME: &str = "user.script.txt";
134
135pub const VICTORY_OBJECTIVES_FILE_NAME: &str = "db/victory_objectives.txt";
137
138pub const VICTORY_OBJECTIVES_EXTRACTED_FILE_NAME: &str = "victory_objectives.txt";
140
141pub const GAMES_NEEDING_VICTORY_OBJECTIVES: [&str; 9] = [
146 KEY_PHARAOH_DYNASTIES,
147 KEY_PHARAOH,
148 KEY_TROY,
149 KEY_THREE_KINGDOMS,
150 KEY_WARHAMMER_2,
151 KEY_WARHAMMER,
152 KEY_THRONES_OF_BRITANNIA,
153 KEY_ATTILA,
154 KEY_ROME_2
155];
156
157#[derive(Default, Debug, Clone, Getters, Serialize, Deserialize)]
197#[getset(get = "pub")]
198pub struct Dependencies {
199
200 build_date: u64,
202
203 version: String,
205
206 #[serde(skip_serializing, skip_deserializing)]
210 vanilla_loose_files: HashMap<String, RFile>,
211
212 vanilla_files: HashMap<String, RFile>,
214
215 #[serde(skip_serializing, skip_deserializing)]
219 parent_files: HashMap<String, RFile>,
220
221 #[serde(skip_serializing, skip_deserializing)]
223 vanilla_loose_tables: HashMap<String, Vec<String>>,
224
225 vanilla_tables: HashMap<String, Vec<String>>,
227
228 #[serde(skip_serializing, skip_deserializing)]
232 parent_tables: HashMap<String, Vec<String>>,
233
234 #[serde(skip_serializing, skip_deserializing)]
236 vanilla_loose_locs: HashSet<String>,
237
238 vanilla_locs: HashSet<String>,
240
241 #[serde(skip_serializing, skip_deserializing)]
245 parent_locs: HashSet<String>,
246
247 #[serde(skip_serializing, skip_deserializing)]
249 vanilla_loose_folders: HashSet<String>,
250
251 vanilla_folders: HashSet<String>,
253
254 #[serde(skip_serializing, skip_deserializing)]
256 parent_folders: HashSet<String>,
257
258 #[serde(skip_serializing, skip_deserializing)]
260 vanilla_loose_paths: HashMap<String, Vec<String>>,
261
262 vanilla_paths: HashMap<String, Vec<String>>,
264
265 #[serde(skip_serializing, skip_deserializing)]
269 parent_paths: HashMap<String, Vec<String>>,
270
271 #[serde(skip_serializing, skip_deserializing)]
275 local_tables_references: HashMap<String, HashMap<i32, TableReferences>>,
276
277 #[serde(skip_serializing, skip_deserializing)]
279 localisation_data: HashMap<String, String>,
280
281 asskit_only_db_tables: HashMap<String, DB>,
283}
284
285#[derive(Eq, PartialEq, Clone, Default, Debug, Getters, MutGetters, Serialize, Deserialize)]
298#[getset(get = "pub", get_mut = "pub")]
299pub struct TableReferences {
300
301 field_name: String,
306
307 referenced_table_is_ak_only: bool,
313
314 referenced_column_is_localised: bool,
320
321 data: HashMap<String, String>,
326}
327
328impl Dependencies {
333
334 pub fn rebuild(&mut self, schema: &Option<Schema>, parent_pack_names: &[String], file_path: Option<&Path>, game_info: &GameInfo, game_path: &Path, secondary_path: &Path) -> Result<()> {
343
344 if let Some(file_path) = file_path {
346
347 *self = Self::default();
349
350 let stored_data = Self::load(file_path, schema)?;
352 if !stored_data.needs_updating(game_info, game_path)? {
353 *self = stored_data;
354 }
355 }
356
357 self.local_tables_references.clear();
359
360 self.load_loose_files(schema, game_info, game_path)?;
362
363 self.load_parent_files(schema, parent_pack_names, game_info, game_path, secondary_path)?;
365
366 let loc_files = self.loc_data(true, true).unwrap_or_default();
368 let loc_decoded = loc_files.iter()
369 .filter_map(|file| if let Ok(RFileDecoded::Loc(loc)) = file.decoded() { Some(loc) } else { None })
370 .map(|file| file.data())
371 .collect::<Vec<_>>();
372
373 self.localisation_data = loc_decoded.par_iter()
374 .flat_map(|data| data.par_iter()
375 .map(|entry| (entry[0].data_to_string().to_string(), entry[1].data_to_string().to_string()))
376 .collect::<Vec<(_,_)>>()
377 ).collect::<HashMap<_,_>>();
378
379 Ok(())
380 }
381
382 pub fn generate_dependencies_cache(schema: &Option<Schema>, game_info: &GameInfo, game_path: &Path, asskit_path: &Option<PathBuf>, ignore_game_files_in_ak: bool) -> Result<Self> {
384 let mut cache = Self::default();
385 cache.build_date = current_time()?;
386 cache.version = VERSION.to_owned();
387 cache.vanilla_files = Pack::read_and_merge_ca_packs(game_info, game_path)?.files().clone();
388
389 let cacheable = cache.vanilla_files.par_iter_mut()
390 .filter_map(|(_, file)| {
391 let _ = file.guess_file_type();
392
393 match file.file_type() {
394 FileType::DB |
395 FileType::Loc => Some(file),
396 _ => None,
397 }
398 })
399 .collect::<Vec<&mut RFile>>();
400
401 cacheable.iter()
402 .for_each(|file| {
403 match file.file_type() {
404 FileType::DB => {
405 if let Some(table_name) = file.db_table_name_from_path() {
406 match cache.vanilla_tables.get_mut(table_name) {
407 Some(table_paths) => table_paths.push(file.path_in_container_raw().to_owned()),
408 None => { cache.vanilla_tables.insert(table_name.to_owned(), vec![file.path_in_container_raw().to_owned()]); },
409 }
410 }
411 }
412 FileType::Loc => {
413 cache.vanilla_locs.insert(file.path_in_container_raw().to_owned());
414 }
415 _ => {}
416 }
417 }
418 );
419
420 cache.vanilla_folders = cache.vanilla_files.par_iter().filter_map(|(path, _)| {
421 let file_path_split = path.split('/').collect::<Vec<&str>>();
422 let folder_path_len = file_path_split.len() - 1;
423 if folder_path_len == 0 {
424 None
425 } else {
426
427 let mut paths = Vec::with_capacity(folder_path_len);
428
429 for (index, folder) in file_path_split.iter().enumerate() {
430 if index < path.len() - 1 && !folder.is_empty() {
431 paths.push(file_path_split[0..=index].join("/"))
432 }
433 }
434
435 Some(paths)
436 }
437 }).flatten().collect::<HashSet<String>>();
438
439 cache.vanilla_files.keys().for_each(|path| {
440 let lower = path.to_lowercase();
441 match cache.vanilla_paths.get_mut(&lower) {
442 Some(paths) => paths.push(path.to_owned()),
443 None => { cache.vanilla_paths.insert(lower, vec![path.to_owned()]); },
444 }
445 });
446
447 cache.load_loose_files(&None, game_info, game_path)?;
451
452 if let Some(path) = asskit_path {
454 let _ = cache.generate_asskit_only_db_tables(schema, path, *game_info.raw_db_version(), ignore_game_files_in_ak);
455 }
456
457 Ok(cache)
458 }
459
460 fn generate_asskit_only_db_tables(&mut self, schema: &Option<Schema>, raw_db_path: &Path, version: i16, ignore_game_files: bool) -> Result<()> {
467 let files_to_ignore = if ignore_game_files {
468 self.vanilla_tables.keys().map(|table_name| &table_name[..table_name.len() - 7]).collect::<Vec<_>>()
469 } else {
470 vec![]
471 };
472 let raw_tables = RawTable::read_all(raw_db_path, version, &files_to_ignore)?;
473 let asskit_only_db_tables = raw_tables.par_iter()
474 .map(|x| match schema {
475 Some(schema) => {
476 let mut table_name = x.definition.clone().unwrap().name.unwrap().to_owned();
477 table_name.pop();
478 table_name.pop();
479 table_name.pop();
480 table_name.pop();
481
482 table_name = format!("{table_name}_tables");
483
484 let definition = schema.definitions().get(&table_name).and_then(|x| x.first());
485
486 x.to_db(definition)
487 }
488 None => x.to_db(None),
489 })
490 .collect::<Result<Vec<DB>>>()?;
491
492 let mut asskit_only_db_tables = asskit_only_db_tables.par_iter().map(|table| (table.table_name().to_owned(), table.clone())).collect::<HashMap<String, DB>>();
494
495 let decode_extra_data = DecodeableExtraData::default();
496 let extra_data = Some(decode_extra_data);
497
498 let mut files = self.vanilla_loose_locs.iter().filter_map(|path| {
500 self.vanilla_loose_files.remove(path).map(|file| (path.to_owned(), file))
501 }).collect::<Vec<_>>();
502
503 files.par_iter_mut().for_each(|(_, file)| {
504 let _ = file.decode(&extra_data, true, false);
505 });
506
507 self.vanilla_loose_files.par_extend(files);
508
509 let mut files = self.vanilla_locs.iter().filter_map(|path| {
511 self.vanilla_files.remove(path).map(|file| (path.to_owned(), file))
512 }).collect::<Vec<_>>();
513
514 files.par_iter_mut().for_each(|(_, file)| {
515 let _ = file.decode(&extra_data, true, false);
516 });
517
518 self.vanilla_files.par_extend(files);
519
520 self.bruteforce_loc_key_order(&mut Schema::default(), None, None, Some(&mut asskit_only_db_tables))?;
521 self.asskit_only_db_tables = asskit_only_db_tables;
522
523 Ok(())
524 }
525
526 pub fn generate_local_db_references(&mut self, schema: &Schema, packs: &BTreeMap<String, Pack>, table_names: &[String]) {
532
533 let local_tables_references = packs.values()
534 .flat_map(|pack| pack.files_by_type(&[FileType::DB]))
535 .par_bridge()
536 .filter_map(|file| {
537 if let Ok(RFileDecoded::DB(db)) = file.decoded() {
538
539 if table_names.is_empty() || table_names.iter().any(|x| x == db.table_name()) {
541 Some((db.table_name().to_owned(), self.generate_references(schema, db.table_name(), db.definition())))
542 } else { None }
543 } else { None }
544 }).collect::<HashMap<_, _>>();
545
546 self.local_tables_references.extend(local_tables_references);
547 }
548
549 pub fn generate_local_definition_references(&mut self, schema: &Schema, table_name: &str, definition: &Definition) {
551 self.local_tables_references.insert(table_name.to_owned(), self.generate_references(schema, table_name, definition));
552 }
553
554 pub fn generate_references(&self, schema: &Schema, local_table_name: &str, definition: &Definition) -> HashMap<i32, TableReferences> {
556
557 let mut definition = definition.clone();
560 self.add_recursive_lookups_to_definition(schema, &mut definition, local_table_name);
561
562 let patches = Some(definition.patches());
563 let fields_processed = definition.fields_processed();
564
565 if local_table_name == KEY_DELETES_TABLE_NAME {
568 let mut hashmap = HashMap::new();
569 let mut references = TableReferences::default();
570 *references.field_name_mut() = "table_name".to_owned();
571
572 for key in schema.definitions().keys() {
573 if key.len() > 7 {
574 let table_name = key.to_owned().drain(..key.len() - 7).collect::<String>();
575 references.data.insert(table_name, String::new());
576 }
577 }
578
579 hashmap.insert(1, references);
580 return hashmap;
581 }
582
583 fields_processed.par_iter().enumerate().filter_map(|(column, field)| {
584 match field.is_reference(patches) {
585 Some((ref ref_table, ref ref_column)) => {
586 if !ref_table.is_empty() && !ref_column.is_empty() {
587 let ref_table = format!("{ref_table}_tables");
588
589 let lookup_data = if let Some(ref data) = field.lookup_no_patch() { data.to_vec() } else { Vec::with_capacity(0) };
591 let mut references = TableReferences::default();
592 *references.field_name_mut() = field.name().to_owned();
593
594 let fake_found = self.db_reference_data_from_asskit_tables(&mut references, (&ref_table, ref_column, &lookup_data));
595 let real_found = self.db_reference_data_from_vanilla_and_modded_tables(&mut references, (&ref_table, ref_column, &lookup_data));
596
597 if fake_found && real_found.is_none() {
598 references.referenced_table_is_ak_only = true;
599 }
600
601 if let Some(ref_definition) = real_found {
602 if ref_definition.localised_fields().iter().any(|x| x.name() == ref_column) {
603 references.referenced_column_is_localised = true;
604 }
605 }
606
607 Some((column as i32, references))
608 } else { None }
609 },
610
611 None => {
613 if let Some(ref lookup_data) = field.lookup_no_patch() {
614
615 if field.is_key(patches) && fields_processed.iter().filter(|x| x.is_key(patches)).count() == 1 {
617 let ref_table = local_table_name;
618 let ref_column = field.name();
619
620 let mut references = TableReferences::default();
622 *references.field_name_mut() = field.name().to_owned();
623
624 let fake_found = self.db_reference_data_from_asskit_tables(&mut references, (ref_table, ref_column, lookup_data));
625 let real_found = self.db_reference_data_from_vanilla_and_modded_tables(&mut references, (ref_table, ref_column, lookup_data));
626
627 if fake_found && real_found.is_none() {
628 references.referenced_table_is_ak_only = true;
629 }
630
631 if let Some(ref_definition) = real_found {
632 if ref_definition.localised_fields().iter().any(|x| x.name() == ref_column) {
633 references.referenced_column_is_localised = true;
634 }
635 }
636
637 Some((column as i32, references))
638 } else { None }
639 } else { None }
640 },
641 }
642 }).collect::<HashMap<_, _>>()
643 }
644
645 pub fn load(file_path: &Path, schema: &Option<Schema>) -> Result<Self> {
647
648 let mut file_path_1 = file_path.to_path_buf();
652 let handle_1: JoinHandle<Result<(u64, String, Vec<RFile>)>> = spawn(move || {
653 file_path_1.set_extension("pak1");
654 let mut file = BufReader::new(File::open(&file_path_1)?);
655 let mut data = Vec::with_capacity(file.get_ref().metadata()?.len() as usize);
656 file.read_to_end(&mut data)?;
657
658 bitcode::deserialize(&data).map_err(From::from)
660 });
661
662 let mut file_path_2 = file_path.to_path_buf();
663 let handle_2: JoinHandle<Result<Vec<RFile>>> = spawn(move || {
664 file_path_2.set_extension("pak2");
665 let mut file = BufReader::new(File::open(&file_path_2)?);
666 let mut data = Vec::with_capacity(file.get_ref().metadata()?.len() as usize);
667 file.read_to_end(&mut data)?;
668
669 bitcode::deserialize(&data).map_err(From::from)
671 });
672
673 let mut file_path_3 = file_path.to_path_buf();
674 let handle_3: JoinHandle<Result<(HashMap<String, Vec<String>>, HashSet<String>, HashSet<String>, HashMap<String, Vec<String>>, HashMap<String, DB>)>> = spawn(move || {
675 file_path_3.set_extension("pak3");
676 let mut file = BufReader::new(File::open(&file_path_3)?);
677 let mut data = Vec::with_capacity(file.get_ref().metadata()?.len() as usize);
678 file.read_to_end(&mut data)?;
679
680 bitcode::deserialize(&data).map_err(From::from)
682 });
683
684 let mut dependencies = Self::default();
686 let data_3 = handle_3.join().unwrap()?;
687 let data_2 = handle_2.join().unwrap()?;
688 let data_1 = handle_1.join().unwrap()?;
689
690 let mut vanilla_files: HashMap<_,_> = data_1.2.into_par_iter().map(|file| (file.path_in_container_raw().to_owned(), file)).collect();
693 vanilla_files.par_extend(data_2.into_par_iter().map(|file| (file.path_in_container_raw().to_owned(), file)));
694
695 dependencies.build_date = data_1.0;
696 dependencies.version = data_1.1;
697 dependencies.vanilla_files = vanilla_files;
698 dependencies.vanilla_tables = data_3.0;
699 dependencies.vanilla_locs = data_3.1;
700 dependencies.vanilla_folders = data_3.2;
701 dependencies.vanilla_paths = data_3.3;
702 dependencies.asskit_only_db_tables = data_3.4;
703
704 if let Some(schema) = schema {
706 let mut decode_extra_data = DecodeableExtraData::default();
707 decode_extra_data.set_schema(Some(schema));
708 let extra_data = Some(decode_extra_data);
709
710 let mut files = dependencies.vanilla_locs.iter().chain(dependencies.vanilla_tables.values().flatten()).filter_map(|path| {
711 dependencies.vanilla_files.remove(path).map(|file| (path.to_owned(), file))
712 }).collect::<Vec<_>>();
713
714 files.par_iter_mut().for_each(|(_, file)| {
715 let _ = file.decode(&extra_data, true, false);
716 });
717
718 dependencies.vanilla_files.par_extend(files);
719 }
720
721 Ok(dependencies)
722 }
723
724 pub fn save(&mut self, file_path: &Path) -> Result<()> {
726 let mut folder_path = file_path.to_owned();
727 folder_path.pop();
728 DirBuilder::new().recursive(true).create(&folder_path)?;
729
730 let mut file_path_1 = file_path.to_path_buf();
731 let mut file_path_2 = file_path.to_path_buf();
732 let mut file_path_3 = file_path.to_path_buf();
733
734 file_path_1.set_extension("pak1");
735 file_path_2.set_extension("pak2");
736 file_path_3.set_extension("pak3");
737
738 let mut file_1 = File::create(&file_path_1)?;
739 let mut file_2 = File::create(&file_path_2)?;
740 let mut file_3 = File::create(&file_path_3)?;
741
742 let mut vanilla_files_1 = self.vanilla_files.par_iter().map(|(_, b)| b.clone()).collect::<Vec<RFile>>();
746 let vanilla_files_2 = vanilla_files_1.split_off(self.vanilla_files.len() / 2);
747
748 let serialized_1: Vec<u8> = bitcode::serialize(&(&self.build_date, &self.version, &vanilla_files_1))?;
750 let serialized_2: Vec<u8> = bitcode::serialize(&vanilla_files_2)?;
751 let serialized_3: Vec<u8> = bitcode::serialize(&(&self.vanilla_tables, &self.vanilla_locs, &self.vanilla_folders, &self.vanilla_paths, &self.asskit_only_db_tables))?;
752
753 file_1.write_all(&serialized_1).map_err(RLibError::from)?;
754 file_2.write_all(&serialized_2).map_err(RLibError::from)?;
755 file_3.write_all(&serialized_3).map_err(From::from)
756 }
757
758 pub fn needs_updating(&self, game_info: &GameInfo, game_path: &Path) -> Result<bool> {
760 let ca_paths = game_info.ca_packs_paths(game_path)?;
761 let last_date = last_modified_time_from_files(&ca_paths)?;
762 Ok(last_date > self.build_date || self.version != VERSION)
763 }
764
765 fn load_loose_files(&mut self, schema: &Option<Schema>, game_info: &GameInfo, game_path: &Path) -> Result<()> {
767 self.vanilla_loose_files.clear();
768 self.vanilla_loose_tables.clear();
769 self.vanilla_loose_locs.clear();
770 self.vanilla_loose_folders.clear();
771 self.vanilla_loose_paths.clear();
772
773 let game_data_path = game_info.data_path(game_path)?;
774 let game_data_path_str = game_data_path.to_string_lossy().replace('\\', "/");
775
776 self.vanilla_loose_files = files_from_subdir(&game_data_path, true)?
777 .into_par_iter()
778 .filter_map(|path| {
779 let mut path = path.to_string_lossy().replace('\\', "/");
780 if !path.ends_with(".pack") {
781 if let Ok(mut rfile) = RFile::new_from_file(&path) {
782 let subpath = path.split_off(game_data_path_str.len() + 1);
783 rfile.set_path_in_container_raw(&subpath);
784 let _ = rfile.guess_file_type();
785 Some((subpath, rfile))
786 } else {
787 None
788 }
789 } else {
790 None
791 }
792 })
793 .collect::<HashMap<String, RFile>>();
794
795 let cacheable = self.vanilla_loose_files.par_iter_mut()
796 .filter_map(|(_, file)| {
797 let _ = file.guess_file_type();
798
799 match file.file_type() {
800 FileType::DB |
801 FileType::Loc => Some(file),
802 _ => None,
803 }
804 })
805 .collect::<Vec<&mut RFile>>();
806
807 cacheable.iter()
808 .for_each(|file| {
809 match file.file_type() {
810 FileType::DB => {
811 if let Some(table_name) = file.db_table_name_from_path() {
812 match self.vanilla_loose_tables.get_mut(table_name) {
813 Some(table_paths) => table_paths.push(file.path_in_container_raw().to_owned()),
814 None => { self.vanilla_loose_tables.insert(table_name.to_owned(), vec![file.path_in_container_raw().to_owned()]); },
815 }
816 }
817 }
818 FileType::Loc => {
819 self.vanilla_loose_locs.insert(file.path_in_container_raw().to_owned());
820 }
821 _ => {}
822 }
823 }
824 );
825
826 self.vanilla_loose_folders = self.vanilla_loose_files.par_iter().filter_map(|(path, _)| {
827 let file_path_split = path.split('/').collect::<Vec<&str>>();
828 let folder_path_len = file_path_split.len() - 1;
829 if folder_path_len == 0 {
830 None
831 } else {
832
833 let mut paths = Vec::with_capacity(folder_path_len);
834
835 for (index, folder) in file_path_split.iter().enumerate() {
836 if index < path.len() - 1 && !folder.is_empty() {
837 paths.push(file_path_split[0..=index].join("/"))
838 }
839 }
840
841 Some(paths)
842 }
843 }).flatten().collect::<HashSet<String>>();
844
845 self.vanilla_loose_files.keys().for_each(|path| {
846 let lower = path.to_lowercase();
847 match self.vanilla_loose_paths.get_mut(&lower) {
848 Some(paths) => paths.push(path.to_owned()),
849 None => { self.vanilla_loose_paths.insert(lower, vec![path.to_owned()]); },
850 }
851 });
852
853 if let Some(schema) = schema {
855 let mut decode_extra_data = DecodeableExtraData::default();
856 decode_extra_data.set_schema(Some(schema));
857 let extra_data = Some(decode_extra_data);
858
859 let mut files = self.vanilla_loose_locs.iter().chain(self.vanilla_loose_tables.values().flatten()).filter_map(|path| {
860 self.vanilla_loose_files.remove(path).map(|file| (path.to_owned(), file))
861 }).collect::<Vec<_>>();
862
863 files.par_iter_mut().for_each(|(_, file)| {
864 let _ = file.decode(&extra_data, true, false);
865 });
866
867 self.vanilla_loose_files.par_extend(files);
868 }
869
870 Ok(())
871 }
872
873
874 fn load_parent_files(&mut self, schema: &Option<Schema>, parent_pack_names: &[String], game_info: &GameInfo, game_path: &Path, secondary_path: &Path) -> Result<()> {
876 self.parent_files.clear();
877 self.parent_tables.clear();
878 self.parent_locs.clear();
879 self.parent_folders.clear();
880 self.parent_paths.clear();
881
882 self.load_parent_packs(parent_pack_names, game_info, game_path, secondary_path)?;
884 self.parent_files.par_iter_mut().map(|(_, file)| file.guess_file_type()).collect::<Result<()>>()?;
885
886 self.parent_files.iter()
888 .for_each(|(path, file)| {
889 match file.file_type() {
890 FileType::DB => {
891 if let Some(table_name) = file.db_table_name_from_path() {
892 match self.parent_tables.get_mut(table_name) {
893 Some(table_paths) => table_paths.push(path.to_owned()),
894 None => { self.parent_tables.insert(table_name.to_owned(), vec![path.to_owned()]); },
895 }
896 }
897 }
898 FileType::Loc => {
899 self.parent_locs.insert(path.to_owned());
900 }
901 _ => {}
902 }
903 }
904 );
905
906 self.parent_folders = self.parent_files.par_iter().filter_map(|(path, _)| {
908 let file_path_split = path.split('/').collect::<Vec<&str>>();
909 let folder_path_len = file_path_split.len() - 1;
910 if folder_path_len == 0 {
911 None
912 } else {
913
914 let mut paths = Vec::with_capacity(folder_path_len);
915
916 for (index, folder) in file_path_split.iter().enumerate() {
917 if index < path.len() - 1 && !folder.is_empty() {
918 paths.push(file_path_split[0..=index].join("/"))
919 }
920 }
921
922 Some(paths)
923 }
924 }).flatten().collect::<HashSet<String>>();
925
926 self.parent_files.keys().for_each(|path| {
927 let lower = path.to_lowercase();
928 match self.parent_paths.get_mut(&lower) {
929 Some(paths) => paths.push(path.to_owned()),
930 None => { self.parent_paths.insert(lower, vec![path.to_owned()]); },
931 }
932 });
933
934 if let Some(schema) = schema {
936 let mut decode_extra_data = DecodeableExtraData::default();
937 decode_extra_data.set_schema(Some(schema));
938 let extra_data = Some(decode_extra_data);
939
940 let mut files = self.parent_tables.values().flatten().filter_map(|path| {
941 self.parent_files.remove(path).map(|file| (path.to_owned(), file))
942 }).collect::<Vec<_>>();
943
944 files.par_iter_mut().for_each(|(_, file)| {
945 let _ = file.decode(&extra_data, true, false);
946 });
947
948 self.parent_files.par_extend(files);
949 }
950
951 let mut files = self.parent_locs.iter().filter_map(|path| {
953 self.parent_files.remove(path).map(|file| (path.to_owned(), file))
954 }).collect::<Vec<_>>();
955
956 files.par_iter_mut().for_each(|(_, file)| {
957 let _ = file.decode(&None, true, false);
958 });
959
960 self.parent_files.par_extend(files);
961
962 Ok(())
963 }
964
965 fn load_parent_packs(&mut self, parent_pack_names: &[String], game_info: &GameInfo, game_path: &Path, secondary_path: &Path) -> Result<()> {
968 let data_packs_paths = game_info.data_packs_paths(game_path).unwrap_or_default();
969 let secondary_packs_paths = game_info.secondary_packs_paths(secondary_path);
970 let content_packs_paths = game_info.content_packs_paths(game_path);
971 let mut loaded_packfiles = vec![];
972
973 parent_pack_names.iter().for_each(|pack_name| self.load_parent_pack(pack_name, &mut loaded_packfiles, &data_packs_paths, &secondary_packs_paths, &content_packs_paths, game_info));
974
975 Ok(())
976 }
977
978 fn load_parent_pack(
981 &mut self,
982 pack_name: &str,
983 already_loaded: &mut Vec<String>,
984 data_paths: &[PathBuf],
985 secondary_paths: &Option<Vec<PathBuf>>,
986 content_paths: &Option<Vec<PathBuf>>,
987 game_info: &GameInfo
988 ) {
989 if !already_loaded.contains(&pack_name.to_owned()) {
991
992 if let Some(path) = data_paths.iter().find(|x| x.file_name().unwrap().to_string_lossy() == pack_name) {
994 if let Ok(pack) = Pack::read_and_merge(&[path.to_path_buf()], game_info, true, false, false) {
995 already_loaded.push(pack_name.to_owned());
996 pack.dependencies().iter().for_each(|(_, pack_name)| self.load_parent_pack(pack_name, already_loaded, data_paths, secondary_paths, content_paths, game_info));
997 self.parent_files.extend(pack.files().clone());
998
999 return;
1000 }
1001 }
1002
1003 if let Some(ref paths) = secondary_paths {
1005 if let Some(path) = paths.iter().find(|x| x.file_name().unwrap().to_string_lossy() == pack_name) {
1006 if let Ok(pack) = Pack::read_and_merge(&[path.to_path_buf()], game_info, true, false, false) {
1007 already_loaded.push(pack_name.to_owned());
1008 pack.dependencies().iter().for_each(|(_, pack_name)| self.load_parent_pack(pack_name, already_loaded, data_paths, secondary_paths, content_paths, game_info));
1009 self.parent_files.extend(pack.files().clone());
1010
1011 return;
1012 }
1013 }
1014 }
1015
1016 if let Some(ref paths) = content_paths {
1018 if let Some(path) = paths.iter().find(|x| x.file_name().unwrap().to_string_lossy() == pack_name) {
1019 if let Ok(pack) = Pack::read_and_merge(&[path.to_path_buf()], game_info, true, false, false) {
1020 already_loaded.push(pack_name.to_owned());
1021 pack.dependencies().iter().for_each(|(_, pack_name)| self.load_parent_pack(pack_name, already_loaded, data_paths, secondary_paths, content_paths, game_info));
1022 self.parent_files.extend(pack.files().clone());
1023 }
1024 }
1025 }
1026 }
1027 }
1028
1029 pub fn decode_tables(&mut self, schema: &Option<Schema>) {
1033 if let Some(schema) = schema {
1034
1035 let mut decode_extra_data = DecodeableExtraData::default();
1036 decode_extra_data.set_schema(Some(schema));
1037 let extra_data = Some(decode_extra_data);
1038
1039 let mut files = self.vanilla_loose_locs.iter().chain(self.vanilla_loose_tables.values().flatten()).filter_map(|path| {
1041 self.vanilla_loose_files.remove(path).map(|file| (path.to_owned(), file))
1042 }).collect::<Vec<_>>();
1043
1044 files.par_iter_mut().for_each(|(_, file)| {
1045 let _ = file.decode(&extra_data, true, false);
1046 });
1047
1048 self.vanilla_loose_files.par_extend(files);
1049
1050 let mut files = self.vanilla_locs.iter().chain(self.vanilla_tables.values().flatten()).filter_map(|path| {
1052 self.vanilla_files.remove(path).map(|file| (path.to_owned(), file))
1053 }).collect::<Vec<_>>();
1054
1055 files.par_iter_mut().for_each(|(_, file)| {
1056 let _ = file.decode(&extra_data, true, false);
1057 });
1058
1059 self.vanilla_files.par_extend(files);
1060
1061 let mut files = self.parent_locs.iter().chain(self.parent_tables.values().flatten()).filter_map(|path| {
1063 self.parent_files.remove(path).map(|file| (path.to_owned(), file))
1064 }).collect::<Vec<_>>();
1065
1066 files.par_iter_mut().for_each(|(_, file)| {
1067 let _ = file.decode(&extra_data, true, false);
1068 });
1069
1070 self.parent_files.par_extend(files);
1071 }
1072 }
1073
1074 pub fn file(&self, file_path: &str, include_vanilla: bool, include_parent: bool, case_insensitive: bool) -> Result<&RFile> {
1080 let file_path = if let Some(file_path) = file_path.strip_prefix('/') {
1081 file_path
1082 } else {
1083 file_path
1084 };
1085
1086 if include_parent {
1087
1088 if let Some(file) = self.parent_files.get(file_path) {
1090 return Ok(file);
1091 }
1092
1093 if case_insensitive {
1094 let lower = file_path.to_lowercase();
1095 if let Some(file) = self.parent_paths.get(&lower).and_then(|paths| self.parent_files.get(&paths[0])) {
1096 return Ok(file);
1097 }
1098 }
1099 }
1100
1101 if include_vanilla {
1102
1103 if let Some(file) = self.vanilla_files.get(file_path) {
1105 return Ok(file);
1106 }
1107
1108 if case_insensitive {
1109 let lower = file_path.to_lowercase();
1110 if let Some(file) = self.vanilla_paths.get(&lower).and_then(|paths| self.vanilla_files.get(&paths[0])) {
1111 return Ok(file);
1112 }
1113
1114 }
1115
1116 if let Some(file) = self.vanilla_loose_files.get(file_path) {
1118 return Ok(file);
1119 }
1120
1121 if case_insensitive {
1122 let lower = file_path.to_lowercase();
1123 if let Some(file) = self.vanilla_loose_paths.get(&lower).and_then(|paths| self.vanilla_loose_files.get(&paths[0])) {
1124 return Ok(file);
1125 }
1126 }
1127 }
1128
1129 Err(RLibError::DependenciesCacheFileNotFound(file_path.to_owned()))
1130 }
1131
1132 pub fn file_mut(&mut self, file_path: &str, include_vanilla: bool, include_parent: bool) -> Result<&mut RFile> {
1134 if include_parent {
1135 if let Some(file) = self.parent_files.get_mut(file_path) {
1136 return Ok(file);
1137 }
1138 }
1139
1140 if include_vanilla {
1141 if let Some(file) = self.vanilla_files.get_mut(file_path) {
1142 return Ok(file);
1143 }
1144
1145 if let Some(file) = self.vanilla_loose_files.get_mut(file_path) {
1146 return Ok(file);
1147 }
1148 }
1149
1150 Err(RLibError::DependenciesCacheFileNotFound(file_path.to_owned()))
1151 }
1152
1153 pub fn files_mut_by_paths(
1155 &mut self,
1156 paths: &HashSet<String>,
1157 include_vanilla: bool,
1158 include_parent: bool,
1159 ) -> HashMap<String, &mut RFile> {
1160 let mut result: HashMap<String, &mut RFile> = HashMap::with_capacity(paths.len());
1161
1162 if include_parent {
1163 for (k, v) in self.parent_files.iter_mut() {
1164 if paths.contains(k) {
1165 result.insert(k.clone(), v);
1166 }
1167 }
1168 }
1169
1170 if include_vanilla {
1171 for (k, v) in self.vanilla_files.iter_mut() {
1172 if paths.contains(k) && !result.contains_key(k) {
1173 result.insert(k.clone(), v);
1174 }
1175 }
1176
1177 for (k, v) in self.vanilla_loose_files.iter_mut() {
1178 if paths.contains(k) && !result.contains_key(k) {
1179 result.insert(k.clone(), v);
1180 }
1181 }
1182 }
1183
1184 result
1185 }
1186
1187 pub fn files_by_path(&self, file_paths: &[ContainerPath], include_vanilla: bool, include_parent: bool, case_insensitive: bool) -> HashMap<String, &RFile> {
1189 let (file_paths, folder_paths): (Vec<_>, Vec<_>) = file_paths.iter().partition_map(|file_path| match file_path {
1190 ContainerPath::File(file_path) => Either::Left(file_path.to_owned()),
1191 ContainerPath::Folder(file_path) => Either::Right(file_path.to_owned()),
1192 });
1193
1194 let mut hashmap = HashMap::new();
1195
1196 if !file_paths.is_empty() {
1198 hashmap.extend(file_paths.par_iter()
1199 .filter_map(|file_path| self.file(file_path, include_vanilla, include_parent, case_insensitive)
1200 .ok()
1201 .map(|file| (file_path.to_owned(), file)))
1202 .collect::<Vec<(_,_)>>()
1203 );
1204 }
1205
1206 if !folder_paths.is_empty() {
1208 hashmap.extend(folder_paths.into_par_iter().flat_map(|folder_path| {
1209 let mut folder = vec![];
1210 let folder_path = folder_path.to_owned() + "/";
1211 if include_vanilla {
1212
1213 if folder_path == "/" {
1214 folder.extend(self.vanilla_loose_files.par_iter()
1215 .map(|(path, file)| (path.to_owned(), file))
1216 .collect::<Vec<(_,_)>>());
1217
1218 folder.extend(self.vanilla_files.par_iter()
1219 .map(|(path, file)| (path.to_owned(), file))
1220 .collect::<Vec<(_,_)>>());
1221
1222 } else {
1223 folder.extend(self.vanilla_loose_files.par_iter()
1224 .filter(|(path, _)| {
1225 if case_insensitive {
1226 starts_with_case_insensitive(path, &folder_path)
1227 } else {
1228 path.starts_with(&folder_path)
1229 }
1230 })
1231 .map(|(path, file)| (path.to_owned(), file))
1232 .collect::<Vec<(_,_)>>());
1233
1234 folder.extend(self.vanilla_files.par_iter()
1235 .filter(|(path, _)| {
1236 if case_insensitive {
1237 starts_with_case_insensitive(path, &folder_path)
1238 } else {
1239 path.starts_with(&folder_path)
1240 }
1241 })
1242 .map(|(path, file)| (path.to_owned(), file))
1243 .collect::<Vec<(_,_)>>());
1244 }
1245 }
1246
1247 if include_parent {
1248 if folder_path == "/" {
1249 folder.extend(self.parent_files.par_iter()
1250 .map(|(path, file)| (path.to_owned(), file))
1251 .collect::<Vec<(_,_)>>());
1252
1253 } else {
1254 folder.extend(self.parent_files.par_iter()
1255 .filter(|(path, _)| {
1256 if case_insensitive {
1257 starts_with_case_insensitive(path, &folder_path)
1258 } else {
1259 path.starts_with(&folder_path)
1260 }
1261 })
1262 .map(|(path, file)| (path.to_owned(), file))
1263 .collect::<Vec<(_,_)>>());
1264 }
1265 }
1266 folder
1267 }).collect::<Vec<(_,_)>>());
1268 }
1269
1270 hashmap
1271 }
1272
1273 pub fn files_by_types(&self, file_types: &[FileType], include_vanilla: bool, include_parent: bool) -> HashMap<String, &RFile> {
1275 let mut files = HashMap::new();
1276
1277 if include_vanilla {
1279 files.extend(self.vanilla_loose_files.par_iter().chain(self.vanilla_files.par_iter())
1280 .filter(|(_, file)| file_types.contains(&file.file_type()))
1281 .map(|(path, file)| (path.to_owned(), file))
1282 .collect::<HashMap<_,_>>());
1283 }
1284
1285 if include_parent {
1286 files.extend(self.parent_files.par_iter()
1287 .filter(|(_, file)| file_types.contains(&file.file_type()))
1288 .map(|(path, file)| (path.to_owned(), file))
1289 .collect::<HashMap<_,_>>());
1290 }
1291
1292 files
1293 }
1294
1295 pub fn files_by_types_mut(&mut self, file_types: &[FileType], include_vanilla: bool, include_parent: bool) -> HashMap<String, &mut RFile> {
1297 let mut files = HashMap::new();
1298
1299 if include_vanilla {
1301 files.extend(self.vanilla_loose_files.par_iter_mut().chain(self.vanilla_files.par_iter_mut())
1302 .filter(|(_, file)| file_types.contains(&file.file_type()))
1303 .map(|(path, file)| (path.to_owned(), file))
1304 .collect::<HashMap<_,_>>());
1305 }
1306
1307 if include_parent {
1308 files.extend(self.parent_files.par_iter_mut()
1309 .filter(|(_, file)| file_types.contains(&file.file_type()))
1310 .map(|(path, file)| (path.to_owned(), file))
1311 .collect::<HashMap<_,_>>());
1312 }
1313
1314 files
1315 }
1316
1317 pub fn loc_data(&self, include_vanilla: bool, include_parent: bool) -> Result<Vec<&RFile>> {
1321 let mut cache = vec![];
1322
1323 if include_vanilla {
1324 let mut vanilla_loose_locs = self.vanilla_loose_locs.iter().collect::<Vec<_>>();
1325 vanilla_loose_locs.sort();
1326
1327 for path in &vanilla_loose_locs {
1328 if let Some(file) = self.vanilla_loose_files.get(*path) {
1329 cache.push(file);
1330 }
1331 }
1332
1333 let mut vanilla_locs = self.vanilla_locs.iter().collect::<Vec<_>>();
1334 vanilla_locs.sort();
1335
1336 for path in &vanilla_locs {
1337 if let Some(file) = self.vanilla_files.get(*path) {
1338 cache.push(file);
1339 }
1340 }
1341 }
1342
1343 if include_parent {
1344 let mut parent_locs = self.parent_locs.iter().collect::<Vec<_>>();
1345 parent_locs.sort();
1346
1347 for path in &parent_locs {
1348 if let Some(file) = self.parent_files.get(*path) {
1349 cache.push(file);
1350 }
1351 }
1352 }
1353
1354 Ok(cache)
1355 }
1356
1357 pub fn db_data(&self, table_name: &str, include_vanilla: bool, include_parent: bool) -> Result<Vec<&RFile>> {
1363 let mut cache = vec![];
1364
1365 if include_vanilla {
1366 if let Some(vanilla_loose_tables) = self.vanilla_loose_tables.get(table_name) {
1367 let mut vanilla_loose_tables = vanilla_loose_tables.to_vec();
1368 vanilla_loose_tables.sort();
1369
1370 for path in &vanilla_loose_tables {
1371 if let Some(file) = self.vanilla_loose_files.get(path) {
1372 cache.push(file);
1373 }
1374 }
1375 }
1376
1377 if let Some(vanilla_tables) = self.vanilla_tables.get(table_name) {
1378 let mut vanilla_tables = vanilla_tables.to_vec();
1379 vanilla_tables.sort();
1380
1381 for path in &vanilla_tables {
1382 if let Some(file) = self.vanilla_files.get(path) {
1383 cache.push(file);
1384 }
1385 }
1386 }
1387 }
1388
1389 if include_parent {
1390 if let Some(parent_tables) = self.parent_tables.get(table_name) {
1391 let mut parent_tables = parent_tables.to_vec();
1392 parent_tables.sort();
1393
1394 for path in &parent_tables {
1395 if let Some(file) = self.parent_files.get(path) {
1396 cache.push(file);
1397 }
1398 }
1399 }
1400 }
1401
1402 Ok(cache)
1403 }
1404
1405 pub fn db_data_datacored<'a>(&'a self, table_name: &str, packs: &'a BTreeMap<String, Pack>, include_vanilla: bool, include_parent: bool) -> Result<Vec<&'a RFile>> {
1412 let mut cache = vec![];
1413
1414 if include_vanilla {
1415 if let Some(vanilla_loose_tables) = self.vanilla_loose_tables.get(table_name) {
1416 let mut vanilla_loose_tables = vanilla_loose_tables.to_vec();
1417 vanilla_loose_tables.sort();
1418
1419 for path in &vanilla_loose_tables {
1420 if let Some(file) = self.vanilla_loose_files.get(path) {
1421 cache.push(file);
1422 }
1423 }
1424 }
1425
1426 if let Some(vanilla_tables) = self.vanilla_tables.get(table_name) {
1427 let mut vanilla_tables = vanilla_tables.to_vec();
1428 vanilla_tables.sort();
1429
1430 for path in &vanilla_tables {
1431 if let Some(file) = self.vanilla_files.get(path) {
1432 cache.push(file);
1433 }
1434 }
1435 }
1436 }
1437
1438 if include_parent {
1439 if let Some(parent_tables) = self.parent_tables.get(table_name) {
1440 let mut parent_tables = parent_tables.to_vec();
1441 parent_tables.sort();
1442
1443 for path in &parent_tables {
1444 if let Some(file) = self.parent_files.get(path) {
1445 cache.push(file);
1446 }
1447 }
1448 }
1449 }
1450
1451 let paths = cache.iter()
1452 .map(|x| x.path_in_container())
1453 .collect::<Vec<_>>();
1454
1455 for pack in packs.values() {
1456 for pack_file in pack.files_by_paths(&paths, true) {
1457 for cache_file in &mut cache {
1458 if cache_file.path_in_container() == pack_file.path_in_container() {
1459 *cache_file = pack_file;
1460 break;
1461 }
1462 }
1463 }
1464 }
1465
1466 Ok(cache)
1467 }
1468
1469 pub fn db_and_loc_data(&self, include_db: bool, include_loc: bool, include_vanilla: bool, include_parent: bool) -> Result<Vec<&RFile>> {
1473 let mut cache = vec![];
1474
1475 if include_vanilla {
1476 if include_db {
1477 let mut vanilla_loose_tables = self.vanilla_loose_tables.values().flatten().collect::<Vec<_>>();
1478 vanilla_loose_tables.sort();
1479
1480 for path in &vanilla_loose_tables {
1481 if let Some(file) = self.vanilla_loose_files.get(*path) {
1482 cache.push(file);
1483 }
1484 }
1485
1486 let mut vanilla_tables = self.vanilla_tables.values().flatten().collect::<Vec<_>>();
1487 vanilla_tables.sort();
1488
1489 for path in &vanilla_tables {
1490 if let Some(file) = self.vanilla_files.get(*path) {
1491 cache.push(file);
1492 }
1493 }
1494 }
1495
1496 if include_loc {
1497 let mut vanilla_loose_locs = self.vanilla_loose_locs.iter().collect::<Vec<_>>();
1498 vanilla_loose_locs.sort();
1499
1500 for path in &vanilla_loose_locs {
1501 if let Some(file) = self.vanilla_loose_files.get(*path) {
1502 cache.push(file);
1503 }
1504 }
1505
1506 let mut vanilla_locs = self.vanilla_locs.iter().collect::<Vec<_>>();
1507 vanilla_locs.sort();
1508
1509 for path in &vanilla_locs {
1510 if let Some(file) = self.vanilla_files.get(*path) {
1511 cache.push(file);
1512 }
1513 }
1514 }
1515 }
1516
1517 if include_parent {
1518 if include_db {
1519 let mut parent_tables = self.parent_tables.values().flatten().collect::<Vec<_>>();
1520 parent_tables.sort();
1521
1522 for path in &parent_tables {
1523 if let Some(file) = self.parent_files.get(*path) {
1524 cache.push(file);
1525 }
1526 }
1527 }
1528
1529 if include_loc {
1530 let mut parent_locs = self.parent_locs.iter().collect::<Vec<_>>();
1531 parent_locs.sort();
1532
1533 for path in &parent_locs {
1534 if let Some(file) = self.parent_files.get(*path) {
1535 cache.push(file);
1536 }
1537 }
1538 }
1539 }
1540
1541 Ok(cache)
1542 }
1543
1544 pub fn db_reference_data(&self, schema: &Schema, packs: &BTreeMap<String, Pack>, table_name: &str, definition: &Definition, loc_data: &Option<HashMap<Cow<str>, Cow<str>>>) -> HashMap<i32, TableReferences> {
1552
1553 let mut vanilla_references = match self.local_tables_references.get(table_name) {
1557 Some(cached_data) => cached_data.clone(),
1558 None => HashMap::new(),
1559 };
1560
1561 let (_loc_files, loc_decoded) = if loc_data.is_some() {
1563 (vec![], vec![])
1564 } else {
1565 let loc_files: Vec<_> = packs.values().flat_map(|pack| pack.files_by_type(&[FileType::Loc])).collect();
1566 let loc_decoded = loc_files.iter()
1567 .filter_map(|file| if let Ok(RFileDecoded::Loc(loc)) = file.decoded() { Some(loc) } else { None })
1568 .map(|file| file.data())
1569 .collect::<Vec<_>>();
1570 (loc_files, loc_decoded)
1571 };
1572
1573 let mut _loc_data_dummy = HashMap::new();
1574 let loc_data = if let Some(ref loc_data) = loc_data {
1575 loc_data
1576 } else {
1577 _loc_data_dummy = loc_decoded.par_iter()
1578 .flat_map(|data| data.par_iter()
1579 .map(|entry| (entry[0].data_to_string(), entry[1].data_to_string()))
1580 .collect::<Vec<(_,_)>>()
1581 ).collect::<HashMap<_,_>>();
1582 &_loc_data_dummy
1583 };
1584
1585 let mut definition = definition.clone();
1588 self.add_recursive_lookups_to_definition(schema, &mut definition, table_name);
1589
1590 let patches = Some(definition.patches());
1591 let fields_processed = definition.fields_processed();
1592 let local_references = fields_processed.par_iter().enumerate().filter_map(|(column, field)| {
1593 match field.is_reference(patches) {
1594 Some((ref ref_table, ref ref_column)) => {
1595 if !ref_table.is_empty() && !ref_column.is_empty() {
1596
1597 let lookup_data = if let Some(ref data) = field.lookup_no_patch() { data.to_vec() } else { Vec::with_capacity(0) };
1599 let mut references = TableReferences::default();
1600 *references.field_name_mut() = field.name().to_owned();
1601
1602 let _local_found = self.db_reference_data_from_local_pack(&mut references, (ref_table, ref_column, &lookup_data), packs, loc_data);
1603
1604 Some((column as i32, references))
1605 } else { None }
1606 }
1607
1608 None => {
1610 if let Some(ref lookup_data) = field.lookup_no_patch() {
1611
1612 if field.is_key(patches) && fields_processed.iter().filter(|x| x.is_key(patches)).count() == 1 {
1614
1615 let ref_table = if table_name.ends_with("_tables") && table_name.len() > 7 {
1617 table_name.to_owned().drain(..table_name.len() - 7).collect()
1618 } else {
1619 table_name.to_owned()
1620 };
1621
1622 let ref_column = field.name();
1623
1624 let mut references = TableReferences::default();
1626 *references.field_name_mut() = field.name().to_owned();
1627
1628 let _local_found = self.db_reference_data_from_local_pack(&mut references, (&ref_table, ref_column, lookup_data), packs, loc_data);
1629
1630 Some((column as i32, references))
1631 } else { None }
1632 } else { None }
1633 }
1634 }
1635 }).collect::<HashMap<_, _>>();
1636
1637 vanilla_references.par_iter_mut().for_each(|(key, value)|
1638 if let Some(local_value) = local_references.get(key) {
1639 value.data.extend(local_value.data.iter().map(|(k, v)| (k.clone(), v.clone())));
1640 }
1641 );
1642
1643 for (index, field) in fields_processed.iter().enumerate() {
1644 match vanilla_references.get_mut(&(index as i32)) {
1645 Some(references) => {
1646 let hardcoded_lookup = field.lookup_hardcoded(patches);
1647 if !hardcoded_lookup.is_empty() {
1648 references.data.extend(hardcoded_lookup);
1649 }
1650 },
1651 None => {
1652 let mut references = TableReferences::default();
1653 *references.field_name_mut() = field.name().to_owned();
1654 let hardcoded_lookup = field.lookup_hardcoded(patches);
1655 if !hardcoded_lookup.is_empty() {
1656 references.data.extend(hardcoded_lookup);
1657 vanilla_references.insert(index as i32, references);
1658 }
1659 },
1660 }
1661 }
1662
1663 vanilla_references
1664 }
1665
1666 fn db_reference_data_from_vanilla_and_modded_tables(&self, references: &mut TableReferences, reference_info: (&str, &str, &[String])) -> Option<Definition> {
1670 self.db_reference_data_generic(references, reference_info, None, &HashMap::new())
1671 }
1672
1673 fn db_reference_data_from_asskit_tables(&self, references: &mut TableReferences, reference_info: (&str, &str, &[String])) -> bool {
1677 let ref_table = reference_info.0;
1678 let ref_column = reference_info.1;
1679 let ref_lookup_columns = reference_info.2;
1680
1681 match self.asskit_only_db_tables.get(ref_table) {
1682 Some(table) => {
1683 let fields_processed = table.definition().fields_processed();
1684 let ref_column_index = fields_processed.iter().position(|x| x.name() == ref_column);
1685 let ref_lookup_columns_index = ref_lookup_columns.iter().map(|column| fields_processed.iter().position(|x| x.name() == column)).collect::<Vec<_>>();
1686
1687 for row in &*table.data() {
1688 let mut reference_data = String::new();
1689 let mut lookup_data = vec![];
1690
1691 if let Some(index) = ref_column_index {
1693 reference_data = row[index].data_to_string().to_string();
1694 }
1695
1696 for column in ref_lookup_columns_index.iter().flatten() {
1698 lookup_data.push(row[*column].data_to_string());
1699 }
1700
1701 references.data.insert(reference_data, lookup_data.join(" "));
1702 }
1703 true
1704 },
1705 None => false,
1706 }
1707 }
1708
1709 fn db_reference_data_from_local_pack(&self, references: &mut TableReferences, reference_info: (&str, &str, &[String]), packs: &BTreeMap<String, Pack>, loc_data: &HashMap<Cow<str>, Cow<str>>) -> Option<Definition> {
1711 self.db_reference_data_generic(references, reference_info, Some(packs), loc_data)
1712 }
1713
1714 fn db_reference_data_generic(&self, references: &mut TableReferences, reference_info: (&str, &str, &[String]), packs: Option<&BTreeMap<String, Pack>>, loc_data: &HashMap<Cow<str>, Cow<str>>) -> Option<Definition> {
1715 let mut data_found: Option<Definition> = None;
1716
1717 let ref_table = reference_info.0;
1718 let ref_column = reference_info.1;
1719 let ref_lookup_columns = reference_info.2;
1720
1721 let mut cache = HashMap::new();
1722
1723 let ref_table_full = if ref_table.ends_with("_tables") {
1725 ref_table.to_owned()
1726 } else {
1727 ref_table.to_owned() + "_tables"
1728 };
1729
1730 let files = match packs {
1731 Some(packs) => {
1732 let mut files: Vec<&RFile> = packs.values().flat_map(|pack| pack.files_by_path(&ContainerPath::Folder(format!("db/{ref_table_full}")), true)).collect();
1733 files.append(&mut self.db_data(&ref_table_full, true, true).unwrap_or_else(|_| vec![]));
1734 files
1735 },
1736 None => self.db_data(&ref_table_full, true, true).unwrap_or_else(|_| vec![]),
1737 };
1738
1739 let mut table_data_cache: HashMap<String, HashMap<String, String>> = HashMap::new();
1740
1741 files.iter().for_each(|file| {
1742 if let Ok(RFileDecoded::DB(db)) = file.decoded() {
1743 let definition = db.definition();
1744 let fields_processed = definition.fields_processed();
1745
1746 if let Some(ref_column_index) = fields_processed.iter().position(|x| x.name() == ref_column) {
1748
1749 let lookups_analyzed = ref_lookup_columns.iter().map(|ref_lookup_path| {
1751 let ref_lookup_steps = ref_lookup_path.split(':').map(|x| x.split('#').collect::<Vec<_>>()).collect::<Vec<_>>();
1752 let mut is_loc = false;
1753 let mut col_pos = 0;
1754
1755 for (index, ref_lookup_step) in ref_lookup_steps.iter().enumerate() {
1756 if ref_lookup_step.len() == 3 {
1757 let lookup_ref_table = ref_lookup_step[0];
1758 let lookup_ref_key = ref_lookup_step[1];
1759 let lookup_ref_lookup = ref_lookup_step[2];
1760 let lookup_ref_table_long = lookup_ref_table.to_owned() + "_tables";
1761
1762 if !cache.contains_key(lookup_ref_table) {
1764 let mut files = vec![];
1765
1766 if let Some(packs) = packs {
1767 for pack in packs.values() {
1768 files.append(&mut pack.files_by_path(&ContainerPath::Folder(format!("db/{lookup_ref_table_long}")), true));
1769 }
1770 }
1771
1772 for file in self.db_data(&lookup_ref_table_long, true, true).unwrap_or_else(|_| vec![]) {
1774 if files.iter().all(|x| x.path_in_container_raw() != file.path_in_container_raw()) {
1775 files.push(file);
1776 }
1777 }
1778
1779 if !files.is_empty() {
1780
1781 files.sort_by(|a, b| a.path_in_container_raw().cmp(b.path_in_container_raw()));
1783 cache.insert(lookup_ref_table.to_owned(), files);
1784 }
1785 }
1786
1787 if index == ref_lookup_steps.len() - 1 {
1789 if let Some(file) = cache.get(lookup_ref_table) {
1790 if let Some(file) = file.first() {
1791 if let Ok(RFileDecoded::DB(db)) = file.decoded() {
1792 let definition = db.definition();
1793 let fields_processed = definition.fields_processed();
1794 let localised_fields = definition.localised_fields();
1795
1796 match localised_fields.iter().position(|x| x.name() == lookup_ref_lookup) {
1797 Some(loc_pos) => {
1798 is_loc = true;
1799 col_pos = loc_pos;
1800 },
1801 None => match fields_processed.iter().position(|x| x.name() == lookup_ref_lookup) {
1802 Some(pos) => {
1803 is_loc = false;
1804 col_pos = pos;
1805 },
1806 None => {
1807 },
1809 }
1810 }
1811 }
1812 }
1813 }
1814 }
1815
1816 if let Some(files) = cache.get(lookup_ref_table) {
1818 for file in files {
1819 let table_data_column_cache_key = file.path_in_container_raw().to_owned() + &ref_lookup_step.join("++");
1820 if !table_data_cache.contains_key(&table_data_column_cache_key) {
1821 if let Ok(RFileDecoded::DB(db)) = file.decoded() {
1822 let definition = db.definition();
1823 let fields_processed = definition.fields_processed();
1824 let localised_fields = definition.localised_fields();
1825 let localised_order = definition.localised_key_order();
1826
1827 let loc_key = if is_loc {
1828 if let Some(loc_field) = localised_fields.get(col_pos) {
1829 let mut loc_key = String::with_capacity(2 + lookup_ref_table.len() + loc_field.name().len());
1830 loc_key.push_str(lookup_ref_table);
1831 loc_key.push('_');
1832 loc_key.push_str(loc_field.name());
1833 loc_key.push('_');
1834 loc_key
1835 } else {
1836 String::new()
1837 }
1838 } else {
1839 String::new()
1840 };
1841
1842 if let Some(source_key_column) = fields_processed.iter().position(|x| x.name() == lookup_ref_key) {
1843
1844 if index < ref_lookup_steps.len() - 1 {
1846 if let Some(source_lookup_column) = fields_processed.iter().position(|x| x.name() == lookup_ref_lookup) {
1847 let cache = db.data().iter()
1848 .map(|row| (row[source_key_column].data_to_string().to_string(), row[source_lookup_column].data_to_string().to_string()))
1849 .collect::<HashMap<_,_>>();
1850
1851 table_data_cache.insert(table_data_column_cache_key.clone(), cache);
1852 }
1853 }
1854
1855 else if is_loc {
1857 let cache = db.data().iter()
1858 .map(|row| {
1859 let mut loc_key = loc_key.to_owned();
1860 loc_key.push_str(&localised_order.iter().map(|pos| row[*pos as usize].data_to_string()).join(""));
1861 (row[source_key_column].data_to_string().to_string(), loc_key)
1862 })
1863 .collect::<HashMap<_,_>>();
1864 table_data_cache.insert(table_data_column_cache_key.clone(), cache);
1865 }
1866
1867 else {
1868 let cache = db.data().iter()
1869 .map(|row| (row[source_key_column].data_to_string().to_string(), row[col_pos].data_to_string().to_string()))
1870 .collect::<HashMap<_,_>>();
1871
1872 table_data_cache.insert(table_data_column_cache_key.clone(), cache);
1873 }
1874 }
1875 }
1876 }
1877 }
1878 }
1879 } else {
1880 error!("Badly built lookup. This is a bug.");
1881 }
1882 }
1883
1884 (ref_lookup_steps, is_loc)
1885
1886 }).collect::<Vec<_>>();
1887
1888 let data = db.data();
1889 for row in &*data {
1890 let mut lookup_data = Vec::with_capacity(lookups_analyzed.len());
1891
1892 let reference_data = row[ref_column_index].data_to_string();
1894
1895 for (lookup_steps, is_loc) in lookups_analyzed.iter() {
1897 if !reference_data.is_empty() {
1898
1899 if let Some(lookup) = self.db_reference_data_generic_lookup(&cache, loc_data, &reference_data, lookup_steps, *is_loc, &table_data_cache) {
1900 lookup_data.push(lookup);
1901 }
1902 }
1903 }
1904
1905 references.data.insert(reference_data.to_string(), lookup_data.into_iter().join(":"));
1906 }
1907
1908 match data_found {
1910 Some(ref definition) => {
1911 if db.definition().version() > definition.version() {
1912 data_found = Some(db.definition().clone());
1913 }
1914 }
1915
1916 None => data_found = Some(db.definition().clone()),
1917 }
1918 }
1919 }
1920 });
1921
1922 data_found
1923 }
1924
1925 fn db_reference_data_generic_lookup(
1926 &self,
1927 cache: &HashMap<String, Vec<&RFile>>,
1928 loc_data: &HashMap<Cow<str>, Cow<str>>,
1929 lookup_key: &str,
1930 lookup_steps: &[Vec<&str>],
1931 is_loc: bool,
1932 table_data_cache: &HashMap<String, HashMap<String, String>>
1933 ) -> Option<String> {
1934 let mut data_found: Option<String> = None;
1935
1936 if lookup_steps.is_empty() {
1937 return None;
1938 }
1939
1940 let current_step = &lookup_steps[0];
1941 let source_table = current_step[0];
1942
1943 if let Some(files) = cache.get(source_table) {
1944 for file in files {
1945 let table_data_column_cache_key = file.path_in_container_raw().to_owned() + ¤t_step.join("++");
1946 if let Some(table_data_column_cache) = table_data_cache.get(&table_data_column_cache_key) {
1947
1948 if let Some(lookup_value) = table_data_column_cache.get(lookup_key) {
1949
1950 if lookup_steps.len() > 1 {
1952 if !lookup_value.is_empty() {
1953 data_found = self.db_reference_data_generic_lookup(cache, loc_data, lookup_value, &lookup_steps[1..], is_loc, table_data_cache);
1954 }
1955 }
1956
1957 else if is_loc {
1959
1960 if let Some(data) = loc_data.get(&**lookup_value) {
1961 data_found = Some(data.to_string());
1962 } else if let Some(data) = self.localisation_data.get(&**lookup_value) {
1963 data_found = Some(data.to_string());
1964 } else {
1965 data_found = Some(lookup_value.to_string())
1966 }
1967 }
1968
1969 else {
1971 data_found = Some(lookup_value.to_owned());
1972 }
1973
1974 break;
1976 }
1977 }
1978 }
1979 }
1980
1981 data_found
1982 }
1983
1984 pub fn loc_key_source(&self, key: &str) -> Option<(String, String, Vec<String>)> {
1988 let key_split = key.split('_').collect::<Vec<_>>();
1989
1990 for (index, _) in key_split.iter().enumerate().rev() {
1993
1994 if index >= 1 {
1996
1997 let mut table_name = key_split[..index].join("_");
1998 let full_table_name = format!("{table_name}_tables");
1999
2000 if let Ok(rfiles) = self.db_data(&full_table_name, true, false) {
2001 let mut decoded = rfiles.iter()
2002 .filter_map(|x| if let Ok(RFileDecoded::DB(table)) = x.decoded() {
2003 Some(table)
2004 } else {
2005 None
2006 }).collect::<Vec<_>>();
2007
2008 if let Some(ak_file) = self.asskit_only_db_tables().get(&full_table_name) {
2010 decoded.push(ak_file);
2011 }
2012
2013 for table in decoded {
2014 let definition = table.definition();
2015 let localised_fields = definition.localised_fields();
2016 let localised_key_order = definition.localised_key_order();
2017 if !localised_fields.is_empty() {
2018 let mut field = String::new();
2019
2020 for (second_index, value) in key_split[index..].iter().enumerate() {
2022 field.push_str(value);
2023
2024 if localised_fields.iter().any(|x| x.name() == field) {
2025
2026 let key_data = &key_split[index + second_index + 1..].join("_");
2028
2029 let data = table.data();
2032 for row in data.iter() {
2033 let generated_key_split = localised_key_order.iter().map(|col| row[*col as usize].data_to_string()).collect::<Vec<_>>();
2034 let generated_key = generated_key_split.join("");
2035 if &generated_key == key_data {
2036 return Some((table_name, field, generated_key_split.iter().map(|x| x.to_string()).collect()));
2037 }
2038 }
2039 }
2040
2041 field.push('_');
2042 }
2043 }
2044 }
2045 }
2046
2047 table_name.push('_');
2049 }
2050 }
2051
2052 None
2053 }
2054
2055 pub fn file_exists(&self, file_path: &str, include_vanilla: bool, include_parent: bool, case_insensitive: bool) -> bool {
2061 if include_parent {
2062 if self.parent_files.contains_key(file_path) {
2063 return true
2064 } else if case_insensitive {
2065 let lower = file_path.to_lowercase();
2066 if self.parent_paths.contains_key(&lower) {
2067 return true
2068 }
2069 }
2070 }
2071
2072 if include_vanilla {
2073
2074 if self.vanilla_files.contains_key(file_path) || self.vanilla_loose_files.contains_key(file_path) {
2075 return true
2076 } else if case_insensitive {
2077 let lower = file_path.to_lowercase();
2078 if self.vanilla_paths.contains_key(&lower) || self.vanilla_loose_paths.contains_key(&lower) {
2079 return true
2080 }
2081 }
2082 }
2083
2084 false
2085 }
2086
2087 pub fn folder_exists(&self, folder_path: &str, include_vanilla: bool, include_parent: bool, case_insensitive: bool) -> bool {
2089 if include_parent && (
2090 self.parent_folders.contains(folder_path) ||
2091 (case_insensitive && self.parent_folders.par_iter().any(|path| caseless::canonical_caseless_match_str(path, folder_path)))
2092 ) {
2093 return true
2094 }
2095
2096 if include_vanilla && (
2097 (self.vanilla_folders.contains(folder_path) || self.vanilla_loose_folders.contains(folder_path)) ||
2098 (case_insensitive && self.vanilla_folders.par_iter().chain(self.vanilla_loose_folders.par_iter()).any(|path| caseless::canonical_caseless_match_str(path, folder_path)))
2099 ) {
2100 return true
2101 }
2102
2103 false
2104 }
2105
2106 pub fn are_dependencies_generated(file_path: &Path) -> bool {
2108 file_path.is_file()
2109 }
2110
2111 pub fn is_vanilla_data_loaded(&self, include_asskit: bool) -> bool {
2113 if include_asskit {
2114 !self.vanilla_files.is_empty() && self.is_asskit_data_loaded()
2115 } else {
2116 !self.vanilla_files.is_empty()
2117 }
2118 }
2119
2120 pub fn is_asskit_data_loaded(&self) -> bool {
2122 !self.asskit_only_db_tables.is_empty()
2123 }
2124
2125 pub fn is_db_outdated(&self, rfile: &RFileDecoded) -> bool {
2127 if let RFileDecoded::DB(data) = rfile {
2128 let dep_db_undecoded = if let Ok(undecoded) = self.db_data(data.table_name(), true, false) { undecoded } else { return false };
2129 let dep_db_decoded = dep_db_undecoded.iter().filter_map(|x| if let Ok(RFileDecoded::DB(decoded)) = x.decoded() { Some(decoded) } else { None }).collect::<Vec<_>>();
2130
2131 if let Some(vanilla_db) = dep_db_decoded.iter().max_by(|x, y| x.definition().version().cmp(y.definition().version())) {
2132 if vanilla_db.definition().version() > data.definition().version() {
2133 return true;
2134 }
2135 }
2136 }
2137
2138 false
2139 }
2140
2141 pub fn db_version(&self, table_name: &str) -> Option<i32> {
2143 let tables = self.vanilla_tables.get(table_name)?;
2144 for table_path in tables {
2145
2146 let table = self.vanilla_files.get(table_path)?;
2147 if let RFileDecoded::DB(table) = table.decoded().ok()? {
2148 return Some(*table.definition().version());
2149 }
2150
2151 let table = self.vanilla_loose_files.get(table_path)?;
2152 if let RFileDecoded::DB(table) = table.decoded().ok()? {
2153 return Some(*table.definition().version());
2154 }
2155 }
2156
2157 None
2158 }
2159
2160 pub fn db_values_from_table_name_and_column_name(&self, packs: Option<&BTreeMap<String, Pack>>, table_name: &str, column_name: &str, include_vanilla: bool, include_parent: bool) -> HashSet<String> {
2162 let mut values = HashSet::new();
2163
2164 if let Ok(files) = self.db_data(table_name, include_vanilla, include_parent) {
2165 values.extend(files.par_iter().filter_map(|file| {
2166 if let Ok(RFileDecoded::DB(table)) = file.decoded() {
2167 table.definition().column_position_by_name(column_name).map(|column| table.data().par_iter().map(|row| row[column].data_to_string().to_string()).collect::<Vec<_>>())
2168 } else { None }
2169 }).flatten().collect::<Vec<_>>());
2170 }
2171
2172 if let Some(packs) = packs {
2173 for pack in packs.values() {
2174 let files = pack.files_by_path(&ContainerPath::Folder(format!("db/{table_name}")), true);
2175 values.extend(files.par_iter().filter_map(|file| {
2176 if let Ok(RFileDecoded::DB(table)) = file.decoded() {
2177 table.definition().column_position_by_name(column_name).map(|column| table.data().par_iter().map(|row| row[column].data_to_string().to_string()).collect::<Vec<_>>())
2178 } else { None }
2179 }).flatten().collect::<Vec<_>>());
2180 }
2181 }
2182
2183 values
2184 }
2185
2186 pub fn db_values_from_table_name_and_column_name_for_value(&self, packs: Option<&BTreeMap<String, Pack>>, table_name: &str, key_column_name: &str, desired_column_name: &str, include_vanilla: bool, include_parent: bool) -> HashMap<String, String> {
2188 let mut values = HashMap::new();
2189
2190 if let Ok(files) = self.db_data(table_name, include_vanilla, include_parent) {
2191 values.extend(files.par_iter().filter_map(|file| {
2192 if let Ok(RFileDecoded::DB(table)) = file.decoded() {
2193 if let Some(column) = table.definition().column_position_by_name(key_column_name) {
2194 table.definition().column_position_by_name(desired_column_name).map(|desired_column| table.data().par_iter().map(|row| (row[column].data_to_string().to_string(), row[desired_column].data_to_string().to_string())).collect::<Vec<_>>())
2195 } else { None }
2196 } else { None }
2197 }).flatten().collect::<Vec<_>>());
2198 }
2199
2200 if let Some(packs) = packs {
2201 for pack in packs.values() {
2202 let files = pack.files_by_path(&ContainerPath::Folder(format!("db/{table_name}")), true);
2203 values.extend(files.par_iter().filter_map(|file| {
2204 if let Ok(RFileDecoded::DB(table)) = file.decoded() {
2205 if let Some(column) = table.definition().column_position_by_name(key_column_name) {
2206 table.definition().column_position_by_name(desired_column_name).map(|desired_column| table.data().par_iter().map(|row| (row[column].data_to_string().to_string(), row[desired_column].data_to_string().to_string())).collect::<Vec<_>>())
2207 } else { None }
2208 } else { None }
2209 }).flatten().collect::<Vec<_>>());
2210 }
2211 }
2212
2213 values
2214 }
2215
2216 pub fn update_db(&mut self, rfile: &mut RFileDecoded) -> Result<(i32, i32, Vec<String>, Vec<String>)> {
2220 match rfile {
2221 RFileDecoded::DB(data) => {
2222 let dep_db_undecoded = self.db_data(data.table_name(), true, false)?;
2223 let dep_db_decoded = dep_db_undecoded.iter().filter_map(|x| if let Ok(RFileDecoded::DB(decoded)) = x.decoded() { Some(decoded) } else { None }).collect::<Vec<_>>();
2224
2225 if let Some(vanilla_db) = dep_db_decoded.iter().max_by(|x, y| x.definition().version().cmp(y.definition().version())) {
2226
2227 let definition_new = vanilla_db.definition();
2228 let definition_old = data.definition().clone();
2229 if definition_old != *definition_new {
2230 data.set_definition(definition_new);
2231
2232 let fields_old = definition_old.fields_processed();
2234 let fields_new = definition_new.fields_processed();
2235 let fields_deleted = fields_old.iter()
2236 .filter(|x| fields_new.iter().all(|y| y.name() != x.name()))
2237 .map(|x| x.name().to_owned())
2238 .collect::<Vec<_>>();
2239 let fields_added = fields_new.iter()
2240 .filter(|x| fields_old.iter().all(|y| y.name() != x.name()))
2241 .map(|x| x.name().to_owned())
2242 .collect::<Vec<_>>();
2243
2244 Ok((*definition_old.version(), *definition_new.version(), fields_deleted, fields_added))
2245 }
2246 else {
2247 Err(RLibError::NoDefinitionUpdateAvailable)
2248 }
2249 }
2250 else { Err(RLibError::NoTableInGameFilesToCompare) }
2251 }
2252 _ => Err(RLibError::DecodingDBNotADBTable),
2253 }
2254 }
2255
2256 pub fn generate_missing_loc_data(&self, packs: &mut BTreeMap<String, Pack>) -> Result<Vec<ContainerPath>> {
2258 let loc_data = self.loc_data(true, true)?;
2259 let mut existing_locs = HashMap::new();
2260
2261 for loc in &loc_data {
2262 if let Ok(RFileDecoded::Loc(ref data)) = loc.decoded() {
2263 existing_locs.extend(data.table().data().iter().map(|x| (x[0].data_to_string().to_string(), x[1].data_to_string().to_string())));
2264 }
2265 }
2266
2267 let mut all_paths = vec![];
2268 for pack in packs.values_mut() {
2269 all_paths.extend(pack.generate_missing_loc_data(&existing_locs)?);
2270 }
2271 Ok(all_paths)
2272 }
2273
2274 pub fn bruteforce_loc_key_order(&self, schema: &mut Schema, locs: Option<HashMap<String, Vec<String>>>, local_packs: Option<&BTreeMap<String, Pack>>, mut ak_files: Option<&mut HashMap<String, DB>>) -> Result<()> {
2276 let mut fields_still_not_found = vec![];
2277
2278 let loc_files = self.loc_data(true, false)?;
2280 let loc_table = loc_files.iter()
2281 .filter_map(|file| if let Ok(RFileDecoded::Loc(loc)) = file.decoded() { Some(loc) } else { None })
2282 .flat_map(|file| file.data().to_vec())
2283 .map(|entry| (entry[0].data_to_string().to_string(), entry[1].data_to_string().to_string()))
2284 .collect::<HashMap<_,_>>();
2285
2286 let ak_tables = match ak_files {
2287 Some(ref tables) => (**tables).clone(),
2288 None => HashMap::new(),
2289 };
2290
2291 let local_files: Vec<_> = match local_packs {
2293 Some(packs) => packs.values()
2294 .flat_map(|pack| pack.files_by_type(&[FileType::DB]))
2295 .filter_map(|x| match x.decoded() {
2296 Ok(RFileDecoded::DB(db)) => Some(db),
2297 _ => None,
2298 })
2299 .collect(),
2300 None => Vec::new(),
2301 };
2302
2303 let mut db_tables = if ak_files.is_some() {
2305 ak_tables.values().collect::<Vec<_>>()
2306 } else {
2307 self.db_and_loc_data(true, false, true, false)?
2308 .iter()
2309 .filter_map(|file| if let Ok(RFileDecoded::DB(table)) = file.decoded() { Some(table) } else { None })
2310 .collect::<Vec<_>>()
2311 };
2312
2313 db_tables.extend_from_slice(&local_files);
2314
2315 let mut db_tables_dedup: Vec<DB> = vec![];
2317 for table in &db_tables {
2318 match db_tables_dedup.iter_mut().find(|x| x.table_name() == table.table_name() && x.definition().version() == table.definition().version()) {
2319 Some(db_source) => *db_source = DB::merge(&[db_source, table])?,
2320 None => db_tables_dedup.push((*table).clone()),
2321 }
2322 }
2323
2324 for table in &db_tables_dedup {
2325 let definition = table.definition();
2326 let mut loc_fields = definition.localised_fields().to_vec();
2327
2328 let mut loc_fields_final = loc_fields.to_vec();
2331
2332 if let Some(ref loc_fields_info) = locs {
2334 loc_fields.clear();
2335
2336 if let Some(loc_names) = loc_fields_info.get(&table.table_name_without_tables()) {
2337 for name in loc_names {
2338 if loc_fields.iter().all(|x| x.name() != name) {
2339
2340 let mut field = Field::default();
2341 field.set_name(name.to_string());
2342 field.set_field_type(FieldType::StringU8);
2343
2344 loc_fields.push(field);
2345 }
2346 }
2347 }
2348 }
2349
2350 let fields = definition.fields_processed();
2351 let key_fields = fields.iter()
2352 .enumerate()
2353 .filter(|(_, field)| field.is_key(None))
2354 .collect::<Vec<_>>();
2355
2356 let short_table_name = table.table_name_without_tables();
2358 for localised_field in &loc_fields {
2359 let localised_key = format!("{}_{}_", short_table_name, localised_field.name());
2360
2361 if loc_table.keys().any(|x| x.starts_with(&localised_key)) && loc_fields_final.iter().all(|x| x.name() != localised_field.name()) {
2363 loc_fields_final.push(localised_field.clone());
2364 }
2365 }
2366
2367 for table_field in &fields {
2370 if loc_fields_final.iter().all(|x| !x.name().starts_with(table_field.name())) {
2371 let localised_key = format!("{}_{}_", short_table_name, table_field.name());
2372 if loc_table.keys().any(|x| x.starts_with(&localised_key)) && loc_fields_final.iter().all(|x| x.name() != table_field.name()) {
2373 loc_fields_final.push(table_field.clone());
2374 }
2375 }
2376 }
2377
2378 for loc_field in &loc_fields {
2379 if loc_fields_final.iter().all(|x| x.name() != loc_field.name()) {
2380 fields_still_not_found.push(format!("{}/{}", table.table_name_without_tables(), loc_field.name()));
2381 }
2382 }
2383
2384 if let Some(ak_files) = &mut ak_files {
2386 let ak_table = ak_files.get_mut(table.table_name()).unwrap();
2387 let mut definition = ak_table.definition().clone();
2388 definition.set_localised_fields(loc_fields_final.to_vec());
2389 ak_table.set_definition(&definition);
2390
2391 } else if let Some(schema_definition) = schema.definition_by_name_and_version_mut(table.table_name(), *definition.version()) {
2392 schema_definition.set_localised_fields(loc_fields_final.to_vec());
2393 }
2394
2395 if !loc_fields_final.is_empty() {
2397
2398 let order = if key_fields.len() == 1 {
2400 vec![key_fields[0].0 as u32]
2401 }
2402
2403 else {
2405 let mut order = Vec::with_capacity(key_fields.len());
2406 let combos = key_fields.iter().permutations(key_fields.len());
2407 let table_data = table.data();
2408 for combo in combos {
2409
2410 let mut combo_is_valid = true;
2413 for row in table_data.iter() {
2414 let mut combined_key = String::new();
2422 for (index, _) in &combo {
2423 combined_key.push_str(&row[*index].data_to_string());
2424 }
2425
2426 for localised_field in &loc_fields_final {
2427 let localised_key = format!("{}_{}_{}", short_table_name, localised_field.name(), combined_key);
2428 match loc_table.get(&localised_key) {
2429 Some(_) => {
2430 if order.is_empty() {
2431 order = combo.iter().map(|(index, _)| *index as u32).collect();
2432 }
2433 }
2434 None => {
2435 combo_is_valid = false;
2436 break;
2437 }
2438 }
2439 }
2440
2441 if !combo_is_valid {
2443 break;
2444 }
2445 }
2446
2447 if !combo_is_valid {
2449 order = vec![];
2450 continue;
2451 }
2452
2453 if !order.is_empty() {
2454 break;
2455 }
2456 }
2457
2458 order
2459 };
2460
2461 if !order.is_empty() && !loc_fields_final.is_empty() {
2462 info!("Bruteforce: loc key order found for table {}, version {}.", table.table_name(), definition.version());
2463 if let Some(ak_files) = &mut ak_files {
2464 let ak_table = ak_files.get_mut(table.table_name()).unwrap();
2465 let mut definition = ak_table.definition().clone();
2466 definition.set_localised_key_order(order);
2467 ak_table.set_definition(&definition);
2468 } else if let Some(schema_definition) = schema.definition_by_name_and_version_mut(table.table_name(), *definition.version()) {
2469 schema_definition.set_localised_key_order(order);
2470 }
2471 } else {
2472 info!("Bruteforce: loc key order found (but may be incorrect) for table {}, version {}.", table.table_name(), definition.version());
2473
2474 if loc_fields_final.is_empty() {
2476 if let Some(ak_files) = &mut ak_files {
2477 let ak_table = ak_files.get_mut(table.table_name()).unwrap();
2478 let mut definition = ak_table.definition().clone();
2479 definition.set_localised_key_order(vec![]);
2480 ak_table.set_definition(&definition);
2481 } else if let Some(schema_definition) = schema.definition_by_name_and_version_mut(table.table_name(), *definition.version()) {
2482 schema_definition.set_localised_key_order(vec![]);
2483 }
2484 }
2485 }
2486 }
2487
2488 else if let Some(ak_files) = &mut ak_files {
2490 let ak_table = ak_files.get_mut(table.table_name()).unwrap();
2491 let mut definition = ak_table.definition().clone();
2492 definition.set_localised_key_order(vec![]);
2493 ak_table.set_definition(&definition);
2494 } else if let Some(schema_definition) = schema.definition_by_name_and_version_mut(table.table_name(), *definition.version()) {
2495 schema_definition.set_localised_key_order(vec![]);
2496 }
2497 }
2498
2499 fields_still_not_found.sort();
2501 fields_still_not_found.dedup();
2502 info!("Bruteforce: fields still not found :{fields_still_not_found:#?}");
2503
2504 if ak_files.is_none() {
2507 for key in loc_table.keys().sorted() {
2508 if self.loc_key_source(key).is_none() {
2509 info!("-- Bruteforce: cannot find source for loc key {key}.");
2510 }
2511 }
2512 }
2513
2514 Ok(())
2515 }
2516
2517 #[allow(clippy::if_same_then_else)]
2519 pub fn generate_automatic_patches(&self, schema: &mut Schema, packs: &BTreeMap<String, Pack>) -> Result<()> {
2520 let mut db_tables = self.db_and_loc_data(true, false, true, false)?
2521 .iter()
2522 .filter_map(|file| if let Ok(RFileDecoded::DB(table)) = file.decoded() { Some(table) } else { None })
2523 .collect::<Vec<_>>();
2524
2525 for pack in packs.values() {
2526 db_tables.extend_from_slice(&pack.files_by_type(&[FileType::DB])
2527 .iter()
2528 .filter_map(|x| if let Ok(RFileDecoded::DB(db)) = x.decoded() {
2529 Some(db)
2530 } else {
2531 None
2532 })
2533 .collect::<Vec<_>>()
2534 );
2535 }
2536
2537 let current_patches = schema.patches_mut();
2538 let mut new_patches: HashMap<String, DefinitionPatch> = HashMap::new();
2539
2540 let image_paths = self.vanilla_files()
2542 .keys()
2543 .filter(|x| x.ends_with(".png") || x.ends_with(".tga"))
2544 .collect::<Vec<_>>();
2545
2546 let video_paths = self.vanilla_files()
2547 .keys()
2548 .filter(|x| x.ends_with(".ca_vp8"))
2549 .collect::<Vec<_>>();
2550
2551 for table in &db_tables {
2552 let definition = table.definition();
2553 let fields = definition.fields_processed();
2554 for (column, field) in fields.iter().enumerate() {
2555 match field.field_type() {
2556 FieldType::StringU8 |
2557 FieldType::StringU16 |
2558 FieldType::OptionalStringU8 |
2559 FieldType::OptionalStringU16 => {
2560
2561 let mut possible_icon = false;
2567 let low_name = field.name().to_lowercase();
2568 if (low_name.contains("icon") || low_name.contains("image")) &&
2569
2570 !(table.table_name() == "building_sets_tables" && field.name() == "icon") &&
2572
2573 !(table.table_name() == "character_traits_tables" && field.name() == "icon") {
2575 possible_icon = true;
2576 }
2577
2578 let mut possible_relative_paths = table.data().par_iter()
2580 .filter_map(|row| {
2581
2582 if !field.is_filename(None) || (
2584 field.is_filename(None) && (
2585 field.filename_relative_path(None).is_none() ||
2586 field.filename_relative_path(None).unwrap().is_empty()
2587 )
2588 ) || (
2589
2590 (table.table_name() == "advisors_tables" && field.name() == "advisor_icon_path") ||
2592
2593 (table.table_name() == "campaign_post_battle_captive_options_tables" && field.name() == "icon_path") ||
2595
2596 (table.table_name() == "narrative_viewer_tabs_tables" && field.name() == "image_path") ||
2598
2599 (table.table_name() == "technology_ui_groups_tables" && field.name() == "optional_background_image")
2601 ) {
2602
2603 let mut data = row[column].data_to_string().to_lowercase().replace("\\", "/");
2608
2609 if data.starts_with("/") {
2611 if data.len() > 1 {
2612 data = data[1..].to_owned();
2613 } else {
2614 data = String::new();
2615 }
2616 }
2617
2618 if !data.is_empty() && !data.ends_with("/") &&
2619 data != "." &&
2620 data != "x" &&
2621 data != "false" &&
2622 data != "building_placeholder" &&
2623 data != "placehoder.png" &&
2624 data != "placeholder" &&
2625 data != "placeholder.tga" &&
2626 data != "placeholder.png" && (
2627 possible_icon ||
2628 data.ends_with(".png") || data.ends_with(".tga")
2629 ) {
2630
2631 let possible_paths = image_paths.iter()
2632
2633 .filter(|x| {
2635 if table.table_name() == "aide_de_camp_speeches_tables" && field.name() == "icon_name" {
2636 x.starts_with("ui/battle ui/adc_icons/")
2637 } else if table.table_name() == "agent_string_subculture_overrides_tables" && field.name() == "icon_path" {
2638 x.starts_with("ui/campaign ui/agents/icons/")
2639 } else if table.table_name() == "ancillary_types_tables" && field.name() == "ui_icon" {
2640 x.starts_with("ui/portraits/ancillaries/")
2641 } else if table.table_name() == "battlefield_building_categories_tables" && field.name() == "icon_path" {
2642 x.starts_with("ui/battle ui/building icons/")
2643 } else if table.table_name() == "bonus_value_uis_tables" && field.name() == "icon" {
2644 x.starts_with("ui/campaign ui/effect_bundles/")
2645 } else if table.table_name() == "building_culture_variants_tables" && field.name() == "icon" {
2646 x.starts_with("ui/buildings/icons/")
2647 } else if table.table_name() == "campaign_payload_ui_details_tables" && field.name() == "icon" {
2648 x.starts_with("ui/campaign ui/effect_bundles/")
2649 } else if table.table_name() == "campaign_post_battle_captive_options_tables" && field.name() == "icon_path" {
2650 x.starts_with("ui/campaign ui/captive_option_icons/")
2651 } else if table.table_name() == "capture_point_types_tables" && field.name() == "icon_name" {
2652 x.starts_with("ui/battle ui/capture_point_icons/")
2653 } else if table.table_name() == "character_skills_tables" && field.name() == "image_path" {
2654 x.starts_with("ui/campaign ui/skills/")
2655 } else if table.table_name() == "character_traits_tables" && field.name() == "icon_custom" {
2656 x.starts_with("ui/campaign ui/effect_bundles/")
2657
2658 } else if table.table_name() == "cursors_tables" && field.name() == "image" {
2660 !x.starts_with(&(data.to_owned() + "_"))
2661 } else if table.table_name() == "dilemmas_tables" && field.name() == "ui_image" {
2662 x.starts_with("ui/eventpics/")
2663 } else if table.table_name() == "effect_bundles_tables" && field.name() == "ui_icon" {
2664 x.starts_with("ui/campaign ui/effect_bundles/")
2665 } else if table.table_name() == "effects_tables" && (field.name() == "icon" || field.name() == "icon_negative") {
2666 x.starts_with("ui/campaign ui/effect_bundles/")
2667 } else if table.table_name() == "faction_groups_tables" && field.name() == "ui_icon" {
2668 x.starts_with("ui/campaign ui/effect_bundles/")
2669 } else if table.table_name() == "incidents_tables" && field.name() == "ui_image" {
2670 x.starts_with("ui/eventpics/")
2671 } else if table.table_name() == "message_event_strings_tables" && field.name() == "image" {
2672 x.starts_with("ui/eventpics/")
2673 } else if table.table_name() == "missions_tables" && field.name() == "ui_icon" {
2674 x.starts_with("ui/campaign ui/message_icons/")
2675
2676 } else if table.table_name() == "missions_tables" && field.name() == "ui_image" {
2678 x.starts_with("ui/eventpics/") && x.ends_with(&(data.to_owned() + ".png"))
2679 } else if table.table_name() == "pooled_resources_tables" && field.name() == "optional_icon_path" {
2680 x.starts_with("ui/skins/")
2681 } else if table.table_name() == "projectile_shot_type_enum_tables" && field.name() == "icon_name" {
2682 x.starts_with("ui/battle ui/ability_icons/")
2683 } else if table.table_name() == "religions_tables" && field.name() == "ui_icon_path" {
2684 x.starts_with("ui/campaign ui/religion_icons/")
2685 } else if table.table_name() == "special_ability_phases_tables" && field.name() == "ticker_icon" {
2686 x.starts_with("ui/battle ui/ability_icons/")
2687 } else if table.table_name() == "technologies_tables" && field.name() == "icon_name" {
2688 x.starts_with("ui/campaign ui/technologies/")
2689 } else if table.table_name() == "technologies_tables" && field.name() == "info_pic" {
2690 x.starts_with("ui/eventpics/")
2691 } else if table.table_name() == "trait_categories_tables" && field.name() == "icon_path" {
2692 x.starts_with("ui/campaign ui/effect_bundles/")
2693 } else if table.table_name() == "ui_unit_groupings_tables" && field.name() == "icon" {
2694 x.starts_with("ui/common ui/unit_category_icons/")
2695 } else if table.table_name() == "victory_types_tables" && field.name() == "icon" {
2696 x.starts_with("ui/campaign ui/victory_type_icons/")
2697
2698 } else if table.table_name() == "videos_tables" && field.name() == "video_name" {
2700 x.starts_with("movies/")
2701 } else {
2702 true
2703 }
2704 })
2705
2706 .filter(|x| if !data.ends_with('_') {
2711 if !data.contains("/") {
2712 if !data.contains('.') {
2713 x.contains(&("/".to_owned() + &data + "."))
2714 } else {
2715 x.contains(&("/".to_owned() + &data))
2716 }
2717 } else {
2718 x.contains(&data)
2719 }
2720 } else {
2721 false
2722 })
2723
2724 .filter_map(|x| x.rfind(&data).map(|pos| (x, pos)))
2726 .map(|(x, pos)| x[..pos].to_owned() + &x[pos..].replacen(&data, "%", 1))
2727 .collect::<Vec<_>>();
2728
2729
2730 if !possible_paths.is_empty() {
2731 return Some(possible_paths)
2732 }
2733 }
2734 }
2735
2736 None
2737 })
2738 .flatten()
2739 .collect::<HashSet<String>>();
2740
2741 let mut possible_video = false;
2747 if low_name.contains("video") {
2748 possible_video = true;
2749 }
2750
2751 possible_relative_paths.extend(
2752 table.data().par_iter().filter_map(|row| {
2753
2754 if !field.is_filename(None) || (
2756 field.is_filename(None) && (
2757 field.filename_relative_path(None).is_none() ||
2758 field.filename_relative_path(None).unwrap().is_empty()
2759 )
2760 ) || (
2761
2762 table.table_name() == "videos_tables" && field.name() == "video_name"
2764 ) {
2765
2766 let mut data = row[column].data_to_string().to_lowercase().replace("\\", "/");
2767
2768 if data.starts_with("/") {
2770 if data.len() > 1 {
2771 data = data[1..].to_owned();
2772 } else {
2773 data = String::new();
2774 }
2775 }
2776
2777 if !data.is_empty() && (
2778 possible_video ||
2779 data.ends_with(".ca_vp8")
2780 ) {
2781
2782 let possible_paths = video_paths.iter()
2783 .filter(|x| {
2784 if table.table_name() == "videos_tables" && field.name() == "video_name" {
2785 x.starts_with("movies/")
2786 } else {
2787 true
2788 }
2789 })
2790 .filter(|x| if !data.contains('.') {
2794 x.contains(&("/".to_owned() + &data + "."))
2795 } else {
2796 x.contains(&("/".to_owned() + &data))
2797 })
2798
2799 .filter_map(|x| x.rfind(&data).map(|pos| (x, pos)))
2801 .map(|(x, pos)| x[..pos].to_owned() + &x[pos..].replacen(&data, "%", 1))
2802 .collect::<Vec<_>>();
2803
2804
2805 if !possible_paths.is_empty() {
2806 return Some(possible_paths)
2807 }
2808 }
2809 }
2810
2811 None
2812 })
2813 .flatten()
2814 .collect::<HashSet<String>>()
2815 );
2816
2817 if !possible_relative_paths.is_empty() && (possible_relative_paths.len() > 1 || (possible_relative_paths.len() == 1 && possible_relative_paths.iter().collect::<Vec<_>>()[0] != "%")) {
2819 info!("Checking table {}, field {} ...", table.table_name(), field.name());
2820 dbg!(&possible_relative_paths);
2821 }
2822
2823 if (table.table_name() == "models_building_tables" && field.name() == "logic_file") ||
2827 (table.table_name() == "models_sieges_tables" && (field.name() == "model_file" || field.name() == "logic_file" || field.name() == "collision_file")) ||
2828 (table.table_name() == "models_deployables_tables" && (field.name() == "model_file" || field.name() == "logic_file" || field.name() == "collision_file")) {
2829 possible_relative_paths.clear();
2830 possible_relative_paths.insert("%".to_owned());
2831 }
2832
2833 if (table.table_name() == "ui_mercenary_recruitment_infos_tables" && field.name() == "hire_button_icon_path") ||
2835 (table.table_name() == "battles_tables" && (field.name() == "specification" || field.name() == "battle_environment_audio")) ||
2836 (table.table_name() == "factions_tables" && field.name() == "key") ||
2837 (table.table_name() == "frontend_faction_leaders_tables" && field.name() == "key") {
2838 let mut patch = HashMap::new();
2839 patch.insert("is_filename".to_owned(), "false".to_owned());
2840
2841 match new_patches.get_mut(table.table_name()) {
2842 Some(patches) => match patches.get_mut(field.name()) {
2843 Some(patches) => patches.extend(patch),
2844 None => { patches.insert(field.name().to_owned(), patch); }
2845 },
2846 None => {
2847 let mut table_patch = HashMap::new();
2848 table_patch.insert(field.name().to_owned(), patch);
2849 new_patches.insert(table.table_name().to_string(), table_patch);
2850 }
2851 }
2852 }
2853
2854 if !possible_relative_paths.is_empty() {
2856 let mut possible_relative_paths = possible_relative_paths.iter().collect::<Vec<_>>();
2857 possible_relative_paths.sort();
2858
2859 let mut patch = HashMap::new();
2860 if !field.is_filename(None) {
2861 patch.insert("is_filename".to_owned(), "true".to_owned());
2862 }
2863
2864 if possible_relative_paths.len() > 1 || (
2866 (
2867 possible_relative_paths.len() == 1 &&
2868 possible_relative_paths[0].contains('%') &&
2869 possible_relative_paths[0] != "%"
2870 ) || (
2871 possible_relative_paths[0] == "%" &&
2872 field.filename_relative_path(None).is_some() &&
2873 !field.filename_relative_path(None).unwrap().is_empty()
2874 )
2875 ) {
2876 patch.insert("filename_relative_path".to_owned(), possible_relative_paths.into_iter().join(";"));
2877 }
2878
2879 if !patch.is_empty() {
2881 match new_patches.get_mut(table.table_name()) {
2882 Some(patches) => match patches.get_mut(field.name()) {
2883 Some(patches) => patches.extend(patch),
2884 None => { patches.insert(field.name().to_owned(), patch); }
2885 },
2886 None => {
2887 let mut table_patch = HashMap::new();
2888 table_patch.insert(field.name().to_owned(), patch);
2889 new_patches.insert(table.table_name().to_string(), table_patch);
2890 }
2891 }
2892 }
2893 }
2894 }
2912 FieldType::I64 |
2913 FieldType::OptionalI64 => {
2914 }
2932 _ => continue
2933 }
2934 }
2935 }
2936
2937 Schema::add_patches_to_patch_set(current_patches, &new_patches);
2938
2939 Ok(())
2940 }
2941
2942 pub fn add_tile_maps_and_tiles(&mut self, packs: &mut BTreeMap<String, Pack>, pack_key: Option<&str>, game: &GameInfo, schema: &Schema, options: OptimizerOptions, tile_maps: Vec<PathBuf>, tiles: Vec<(PathBuf, String)>) -> Result<(Vec<ContainerPath>, Vec<ContainerPath>)> {
2946 let mut added_paths = vec![];
2947
2948 let pack = match pack_key {
2950 Some(key) => packs.get_mut(key).ok_or_else(|| RLibError::NoPacksProvided)?,
2951 None => packs.values_mut().next().ok_or_else(|| RLibError::NoPacksProvided)?,
2952 };
2953
2954 for tile_map in &tile_maps {
2956 added_paths.append(&mut pack.insert_folder(tile_map, "terrain/battles", &None, &None, true)?);
2957 }
2958
2959 for (tile, subpath) in &tiles {
2961
2962 let (internal_path, needs_tile_database) = if subpath.is_empty() {
2963 ("terrain/tiles/battle".to_owned(), false)
2964 } else {
2965 (format!("terrain/tiles/battle/{}", subpath.replace('\\', "/")), true)
2966 };
2967 added_paths.append(&mut pack.insert_folder(tile, &internal_path, &None, &None, true)?);
2968
2969 if needs_tile_database {
2971
2972 let subpath_len = subpath.replace('\\', "/").split('/').count();
2974 let mut tile_database = tile.to_path_buf();
2975
2976 (0..=subpath_len).for_each(|_| {
2977 tile_database.pop();
2978 });
2979
2980 let file_name = format!("{}_{}.bin", subpath.replace('/', "_"), tile.file_name().unwrap().to_string_lossy());
2981 tile_database.push(format!("_tile_database/TILES/{file_name}"));
2982 let tile_database_path = format!("terrain/tiles/battle/_tile_database/TILES/{file_name}");
2983
2984 added_paths.push(pack.insert_file(&tile_database, &tile_database_path, &None)?.unwrap());
2985 }
2986 }
2987
2988 let (paths_to_delete, paths_to_add) = pack.optimize(Some(added_paths.clone()), self, schema, game, &options)?;
2989
2990 let paths_to_delete = paths_to_delete.iter()
2991 .map(|path| ContainerPath::File(path.to_string()))
2992 .collect::<Vec<_>>();
2993
2994 added_paths.extend(paths_to_add.into_iter()
2995 .map(|path| ContainerPath::File(path.to_string()))
2996 .collect::<Vec<_>>());
2997
2998 Ok((added_paths, paths_to_delete))
2999 }
3000
3001 pub fn build_starpos_pre(&self, packs: &mut BTreeMap<String, Pack>, pack_key: Option<&str>, game: &GameInfo, game_path: &Path, campaign_id: &str, process_hlp_spd_data: bool, sub_start_pos: &str) -> Result<()> {
3005
3006 let map_names = if process_hlp_spd_data {
3008 self.db_values_from_table_name_and_column_name_for_value(Some(packs), "campaigns_tables", "campaign_name", "map_name", true, true)
3009 } else {
3010 HashMap::new()
3011 };
3012
3013 let pack_file = match pack_key {
3015 Some(key) => packs.get_mut(key).ok_or_else(|| RLibError::NoPacksProvided)?,
3016 None => packs.values_mut().next().ok_or_else(|| RLibError::NoPacksProvided)?,
3017 };
3018 let pack_name = pack_file.disk_file_name();
3019 if pack_name.is_empty() {
3020 return Err(RLibError::BuildStartposError("The Pack needs to be saved to disk in order to build a startpos. Save it and try again.".to_owned()));
3021 }
3022
3023 if campaign_id.is_empty() {
3024 return Err(RLibError::BuildStartposError("campaign_id not provided.".to_owned()));
3025 }
3026
3027 let process_hlp_spd_data_string = if process_hlp_spd_data {
3028 String::from("process_campaign_ai_map_data;")
3029 } else {
3030 String::new()
3031 };
3032
3033 let extra_folders = "add_working_directory assembly_kit\\working_data;";
3036 let mut user_script_contents = if game.key() == KEY_ATTILA || game.key() == KEY_THRONES_OF_BRITANNIA { extra_folders.to_owned() } else { String::new() };
3037
3038 user_script_contents.push_str(&format!("
3039 mod {pack_name};
3040 process_campaign_startpos {campaign_id} {sub_start_pos};
3041 {process_hlp_spd_data_string}
3042 quit_after_campaign_processing;"
3043 ));
3044
3045 let game_data_path = game.data_path(game_path)?;
3047 if !game_path.is_dir() {
3048 return Err(RLibError::BuildStartposError("Game path incorrect. Fix it in the settings and try again.".to_owned()));
3049 }
3050
3051 if !PathBuf::from(pack_file.disk_file_path()).starts_with(&game_data_path) {
3052 return Err(RLibError::BuildStartposError("The Pack needs to be in /data. Install it there and try again.".to_owned()));
3053 }
3054
3055 if GAMES_NEEDING_VICTORY_OBJECTIVES.contains(&game.key()) {
3057 let mut game_campaign_path = game_data_path.to_path_buf();
3058 game_campaign_path.push(campaign_id);
3059 DirBuilder::new().recursive(true).create(&game_campaign_path)?;
3060
3061 game_campaign_path.push(VICTORY_OBJECTIVES_EXTRACTED_FILE_NAME);
3062 pack_file.extract(ContainerPath::File(VICTORY_OBJECTIVES_FILE_NAME.to_owned()), &game_campaign_path, false, &None, true, false, &None, true)?;
3063 }
3064
3065 let config_path = game.config_path(game_path).ok_or(RLibError::BuildStartposError("Error getting the game's config path.".to_owned()))?;
3066 let scripts_path = config_path.join("scripts");
3067 DirBuilder::new().recursive(true).create(&scripts_path)?;
3068
3069 if game.key() != KEY_ROME_2 {
3073
3074 let uspa = scripts_path.join(USER_SCRIPT_FILE_NAME);
3076 let uspb = scripts_path.join(USER_SCRIPT_FILE_NAME.to_owned() + ".bak");
3077
3078 if uspa.is_file() {
3079 std::fs::copy(&uspa, uspb)?;
3080 }
3081
3082 let mut file = BufWriter::new(File::create(uspa)?);
3083
3084 if *game.raw_db_version() < 2 {
3086 file.write_string_u16(&user_script_contents)?;
3087 } else {
3088 file.write_all(user_script_contents.as_bytes())?;
3089 }
3090
3091 file.flush()?;
3092 }
3093
3094 if game.key() != KEY_THRONES_OF_BRITANNIA &&
3099 game.key() != KEY_ATTILA &&
3100 game.key() != KEY_SHOGUN_2 {
3101
3102 let sub_start_pos_suffix = if sub_start_pos.is_empty() {
3103 String::new()
3104 } else {
3105 format!("_{sub_start_pos}")
3106 };
3107
3108 let starpos_path = game_data_path.join(format!("campaigns/{campaign_id}/startpos{sub_start_pos_suffix}.esf"));
3109 if starpos_path.is_file() {
3110 let starpos_path_bak = game_data_path.join(format!("campaigns/{campaign_id}/startpos{sub_start_pos_suffix}.esf.bak"));
3111 std::fs::copy(&starpos_path, starpos_path_bak)?;
3112 std::fs::remove_file(starpos_path)?;
3113 }
3114 }
3115
3116 if process_hlp_spd_data {
3118 if let Some(map_name) = map_names.get(campaign_id) {
3119 match game.key() {
3120
3121 KEY_PHARAOH_DYNASTIES |
3125 KEY_PHARAOH |
3126 KEY_WARHAMMER_3 |
3127 KEY_TROY |
3128 KEY_THREE_KINGDOMS |
3129 KEY_WARHAMMER_2 |
3130 KEY_WARHAMMER => {
3131 let hlp_folder_path = game_data_path.join(format!("campaign_maps/{map_name}"));
3132 if !hlp_folder_path.is_dir() {
3133 DirBuilder::new().recursive(true).create(&hlp_folder_path)?;
3134 }
3135
3136 let hlp_path = game_data_path.join(format!("campaign_maps/{map_name}/hlp_data.esf"));
3137 if hlp_path.is_file() {
3138 let hlp_path_bak = game_data_path.join(format!("campaign_maps/{map_name}/hlp_data.esf.bak"));
3139 std::fs::copy(&hlp_path, hlp_path_bak)?;
3140 std::fs::remove_file(hlp_path)?;
3141 }
3142 },
3143
3144 KEY_THRONES_OF_BRITANNIA |
3152 KEY_ATTILA => {
3153 let folder_path = config_path.join(format!("maps/campaign_maps/{map_name}"));
3154
3155 let (sender, receiver) = channel::<bool>();
3156 let join = thread::spawn(move || {
3157 loop {
3158 match receiver.try_recv() {
3159 Ok(stop) => if stop {
3160 break;
3161 }
3162 Err(_) => {
3163 if !folder_path.is_dir() {
3164 let _ = DirBuilder::new().recursive(true).create(&folder_path);
3165 }
3166
3167 thread::sleep(Duration::from_millis(100));
3168 }
3169 }
3170 }
3171 });
3172
3173 *START_POS_WORKAROUND_THREAD.write().unwrap() = Some(vec![(sender, join)]);
3174 },
3175
3176 KEY_ROME_2 => {
3181 let hlp_folder = game_data_path.join(format!("campaign_maps/{map_name}/"));
3182 if hlp_folder.is_dir() {
3183 let _ = DirBuilder::new().recursive(true).create(&hlp_folder);
3184 }
3185
3186 let hlp_path = hlp_folder.join("hlp_data.esf");
3187 if hlp_path.is_file() {
3188 let hlp_path_bak = game_data_path.join(format!("campaign_maps/{map_name}/hlp_data.esf.bak"));
3189 std::fs::copy(&hlp_path, hlp_path_bak)?;
3190 std::fs::remove_file(hlp_path)?;
3191 }
3192
3193 }
3194 KEY_SHOGUN_2 => return Err(RLibError::BuildStartposError("Unsupported... yet. If you want to test support for this game, let me know.".to_owned())),
3195 KEY_NAPOLEON => return Err(RLibError::BuildStartposError("Unsupported... yet. If you want to test support for this game, let me know.".to_owned())),
3196 KEY_EMPIRE => return Err(RLibError::BuildStartposError("Unsupported... yet. If you want to test support for this game, let me know.".to_owned())),
3197 _ => return Err(RLibError::BuildStartposError("How the fuck did you trigger this?".to_owned())),
3198 }
3199
3200 if game.key() != KEY_THRONES_OF_BRITANNIA &&
3202 game.key() != KEY_ATTILA &&
3203 game.key() != KEY_ROME_2 &&
3204 game.key() != KEY_SHOGUN_2 &&
3205 game.key() != KEY_NAPOLEON &&
3206 game.key() != KEY_EMPIRE {
3207
3208 let spd_path = game_data_path.join(format!("campaign_maps/{map_name}/spd_data.esf"));
3209 if spd_path.is_file() {
3210 let spd_path_bak = game_data_path.join(format!("campaign_maps/{map_name}/spd_data.esf.bak"));
3211 std::fs::copy(&spd_path, spd_path_bak)?;
3212 std::fs::remove_file(spd_path)?;
3213 }
3214 }
3215 }
3216 }
3217
3218 if game.key() == KEY_THREE_KINGDOMS {
3220 let exe_path = game.executable_path(game_path).ok_or_else(|| RLibError::BuildStartposError("Game exe path not found.".to_owned()))?;
3221 let exe_name = exe_path.file_name().ok_or_else(|| RLibError::BuildStartposError("Game exe name not found.".to_owned()))?.to_string_lossy();
3222
3223 let mut command = Command::new("cmd");
3225 command.arg("/C");
3226 command.arg("start");
3227 command.arg("/wait");
3228 command.arg("/d");
3229 command.arg(game_path.to_string_lossy().replace('\\', "/"));
3230 command.arg(exe_name.to_string());
3231 command.arg("temp_file.txt;");
3232
3233 let _ = command.output()?;
3234
3235 let uspa = scripts_path.join(USER_SCRIPT_FILE_NAME);
3237 let uspb = scripts_path.join(USER_SCRIPT_FILE_NAME.to_owned() + ".bak");
3238 if uspb.is_file() {
3239 std::fs::copy(uspb, uspa)?;
3240 }
3241
3242 else if uspa.is_file() {
3244 std::fs::remove_file(uspa)?;
3245 }
3246
3247 } else if game.key() == KEY_ROME_2 {
3249 let exe_path = game.executable_path(game_path).ok_or_else(|| RLibError::BuildStartposError("Game exe path not found.".to_owned()))?;
3250 let exe_name = exe_path.file_name().ok_or_else(|| RLibError::BuildStartposError("Game exe name not found.".to_owned()))?.to_string_lossy();
3251
3252 let mut command = Command::new("cmd");
3254 command.arg("/C");
3255 command.arg("start");
3256 command.arg("/d");
3257 command.arg(game_path.to_string_lossy().replace('\\', "/"));
3258 command.arg(exe_name.to_string());
3259 command.arg("temp_file.txt;");
3260
3261 #[cfg(target_os = "windows")] {
3263 use std::os::windows::process::CommandExt;
3264
3265 command.raw_arg(extra_folders);
3267 command.raw_arg(user_script_contents.replace("\n", " "));
3268 }
3269
3270 command.spawn()?;
3271 } else {
3272 match game.game_launch_command(game_path) {
3273 Ok(command) => { let _ = open::that(command); },
3274 _ => return Err(RLibError::BuildStartposError("The currently selected game cannot be launched from Steam.".to_owned())),
3275 }
3276 }
3277
3278 Ok(())
3279 }
3280
3281 pub fn build_starpos_post(&self, packs: &mut BTreeMap<String, Pack>, pack_key: Option<&str>, game: &GameInfo, game_path: &Path, asskit_path: Option<PathBuf>,campaign_id: &str, process_hlp_spd_data: bool, cleanup_mode: bool, sub_start_pos: &[String]) -> Result<Vec<ContainerPath>> {
3288
3289 let map_names = if process_hlp_spd_data {
3291 self.db_values_from_table_name_and_column_name_for_value(Some(packs), "campaigns_tables", "campaign_name", "map_name", true, true)
3292 } else {
3293 HashMap::new()
3294 };
3295
3296 let pack_file = match pack_key {
3298 Some(key) => packs.get_mut(key).ok_or_else(|| RLibError::NoPacksProvided)?,
3299 None => packs.values_mut().next().ok_or_else(|| RLibError::NoPacksProvided)?,
3300 };
3301
3302 let mut startpos_failed = false;
3303 let mut sub_startpos_failed = vec![];
3304 let mut hlp_failed = false;
3305 let mut spd_failed = false;
3306
3307 if let Some(data) = START_POS_WORKAROUND_THREAD.write().unwrap().as_mut() {
3309 let (sender, handle) = data.remove(0);
3310 let _ = sender.send(true);
3311 let _ = handle.join();
3312 }
3313
3314 *START_POS_WORKAROUND_THREAD.write().unwrap() = None;
3315
3316 if !game_path.is_dir() {
3317 return Err(RLibError::BuildStartposError("Game path incorrect. Fix it in the settings and try again.".to_owned()));
3318 }
3319
3320 let game_data_path = game.data_path(game_path)?;
3321
3322 if GAMES_NEEDING_VICTORY_OBJECTIVES.contains(&game.key()) {
3324
3325 let mut game_campaign_path = game_data_path.to_path_buf();
3327 game_campaign_path.push(campaign_id);
3328 if game_campaign_path.is_dir() {
3329 let _ = std::fs::remove_dir_all(game_campaign_path);
3330 }
3331 }
3332
3333 let config_path = game.config_path(game_path).ok_or(RLibError::BuildStartposError("Error getting the game's config path.".to_owned()))?;
3334 let scripts_path = config_path.join("scripts");
3335 if !scripts_path.is_dir() {
3336 DirBuilder::new().recursive(true).create(&scripts_path)?;
3337 }
3338
3339 let uspa = scripts_path.join(USER_SCRIPT_FILE_NAME);
3341 let uspb = scripts_path.join(USER_SCRIPT_FILE_NAME.to_owned() + ".bak");
3342 if uspb.is_file() {
3343 std::fs::copy(uspb, uspa)?;
3344 }
3345
3346 else if uspa.is_file() {
3348 std::fs::remove_file(uspa)?;
3349 }
3350
3351 let mut added_paths = vec![];
3352
3353 let starpos_paths = match game.key() {
3355 KEY_PHARAOH_DYNASTIES |
3356 KEY_PHARAOH |
3357 KEY_WARHAMMER_3 |
3358 KEY_TROY |
3359 KEY_THREE_KINGDOMS |
3360 KEY_WARHAMMER_2 |
3361 KEY_WARHAMMER => {
3362 if sub_start_pos.is_empty() {
3363 vec![game_data_path.join(format!("campaigns/{campaign_id}/startpos.esf"))]
3364 } else {
3365 let mut paths = vec![];
3366 for sub in sub_start_pos {
3367 paths.push(game_data_path.join(format!("campaigns/{campaign_id}/startpos_{sub}.esf")));
3368
3369 }
3370 paths
3371 }
3372 }
3373 KEY_THRONES_OF_BRITANNIA |
3374 KEY_ATTILA => vec![config_path.join(format!("maps/campaigns/{campaign_id}/startpos.esf"))],
3375
3376 KEY_ROME_2 => {
3378 match asskit_path {
3379 Some(asskit_path) => {
3380 if !asskit_path.is_dir() {
3381 return Err(RLibError::BuildStartposError("Assembly Kit path is not a valid folder.".to_owned()));
3382 }
3383
3384 vec![asskit_path.join(format!("working_data/campaigns/{campaign_id}/startpos.esf"))]
3385 },
3386 None => return Err(RLibError::BuildStartposError("Assembly Kit path not provided.".to_owned())),
3387 }
3388 },
3389
3390 KEY_SHOGUN_2 |
3393 KEY_NAPOLEON |
3394 KEY_EMPIRE => vec![game_data_path.join(format!("campaigns/{campaign_id}/startpos.esf"))],
3395 _ => return Err(RLibError::BuildStartposError("How the fuck did you trigger this?".to_owned())),
3396 };
3397
3398 let starpos_paths_pack = if sub_start_pos.is_empty() {
3399 vec![format!("campaigns/{}/startpos.esf", campaign_id)]
3400 } else {
3401 let mut paths = vec![];
3402 for sub in sub_start_pos {
3403 paths.push(format!("campaigns/{campaign_id}/startpos_{sub}.esf"));
3404 }
3405 paths
3406 };
3407
3408 if !cleanup_mode {
3409 for (index, starpos_path) in starpos_paths.iter().enumerate() {
3410 if !starpos_path.is_file() {
3411 if sub_start_pos.is_empty() {
3412 startpos_failed = true;
3413 } else {
3414 sub_startpos_failed.push(sub_start_pos[index].to_owned());
3415 }
3416 } else {
3417
3418 let mut rfile = RFile::new_from_file_path(starpos_path)?;
3419 rfile.set_path_in_container_raw(&starpos_paths_pack[index]);
3420 rfile.load()?;
3421 rfile.guess_file_type()?;
3422
3423 added_paths.push(pack_file.insert(rfile).map(|x| x.unwrap())?);
3424 }
3425 }
3426 }
3427
3428 if game.key() != KEY_THRONES_OF_BRITANNIA &&
3434 game.key() != KEY_ATTILA &&
3435 game.key() != KEY_SHOGUN_2 {
3436
3437 for starpos_path in &starpos_paths {
3438 let file_name = starpos_path.file_name().unwrap().to_string_lossy().to_string();
3439 let file_name_bak = file_name + ".bak";
3440
3441 let mut starpos_path_bak = starpos_path.to_path_buf();
3442 starpos_path_bak.set_file_name(file_name_bak);
3443
3444 if starpos_path_bak.is_file() {
3445 std::fs::copy(&starpos_path_bak, starpos_path)?;
3446 std::fs::remove_file(starpos_path_bak)?;
3447 }
3448 }
3449 }
3450
3451 if game.key() == KEY_SHOGUN_2 {
3453 for starpos_path in &starpos_paths {
3454 if starpos_path.is_file() {
3455 std::fs::remove_file(starpos_path)?;
3456 }
3457 }
3458 }
3459
3460 if process_hlp_spd_data {
3462 if let Some(map_name) = map_names.get(campaign_id) {
3463
3464 let hlp_path = match game.key() {
3466 KEY_PHARAOH_DYNASTIES |
3467 KEY_PHARAOH |
3468 KEY_WARHAMMER_3 |
3469 KEY_TROY |
3470 KEY_THREE_KINGDOMS |
3471 KEY_WARHAMMER_2 |
3472 KEY_WARHAMMER => game_data_path.join(format!("campaign_maps/{map_name}/hlp_data.esf")),
3473 KEY_THRONES_OF_BRITANNIA |
3474 KEY_ATTILA => config_path.join(format!("maps/campaign_maps/{map_name}/hlp_data.esf")),
3475 KEY_ROME_2 => game_data_path.join(format!("campaign_maps/{map_name}/hlp_data.esf")),
3476 _ => return Err(RLibError::BuildStartposError("How the fuck did you trigger this?".to_owned())),
3477 };
3478
3479 let hlp_path_pack = format!("campaign_maps/{map_name}/hlp_data.esf");
3480
3481 if !cleanup_mode {
3482
3483 if !hlp_path.is_file() {
3484 hlp_failed = true;
3485 } else {
3486
3487 let mut rfile_hlp = RFile::new_from_file_path(&hlp_path)?;
3488 rfile_hlp.set_path_in_container_raw(&hlp_path_pack);
3489 rfile_hlp.load()?;
3490 rfile_hlp.guess_file_type()?;
3491
3492 added_paths.push(pack_file.insert(rfile_hlp).map(|x| x.unwrap())?);
3493 }
3494 }
3495
3496 if game.key() != KEY_THRONES_OF_BRITANNIA &&
3498 game.key() != KEY_ATTILA {
3499
3500 let hlp_path_bak = game_data_path.join(format!("campaign_maps/{map_name}/hlp_data.esf.bak"));
3501
3502 if hlp_path_bak.is_file() {
3503 std::fs::copy(&hlp_path_bak, hlp_path)?;
3504 std::fs::remove_file(hlp_path_bak)?;
3505 }
3506 }
3507
3508 if game.key() != KEY_THRONES_OF_BRITANNIA &&
3510 game.key() != KEY_ATTILA &&
3511 game.key() != KEY_ROME_2 {
3512
3513 let spd_path = game_data_path.join(format!("campaign_maps/{map_name}/spd_data.esf"));
3514 let spd_path_pack = format!("campaign_maps/{map_name}/spd_data.esf");
3515
3516 if !cleanup_mode {
3517
3518 if !spd_path.is_file() {
3519 spd_failed = true;
3520 } else {
3521
3522 let mut rfile_spd = RFile::new_from_file_path(&spd_path)?;
3523 rfile_spd.set_path_in_container_raw(&spd_path_pack);
3524 rfile_spd.load()?;
3525 rfile_spd.guess_file_type()?;
3526
3527 added_paths.push(pack_file.insert(rfile_spd).map(|x| x.unwrap())?);
3528 }
3529 }
3530
3531 let spd_path_bak = game_data_path.join(format!("campaign_maps/{map_name}/spd_data.esf.bak"));
3532 if spd_path_bak.is_file() {
3533 std::fs::copy(&spd_path_bak, spd_path)?;
3534 std::fs::remove_file(spd_path_bak)?;
3535 }
3536 }
3537 }
3538 }
3539
3540 let mut error = String::new();
3541 if startpos_failed || (!sub_start_pos.is_empty() && !sub_startpos_failed.is_empty()) || hlp_failed || spd_failed {
3542 error.push_str("<p>One or more files failed to generate:</p><ul>")
3543 }
3544 if startpos_failed {
3545 error.push_str("<li>Startpos file failed to generate.</li>");
3546 }
3547
3548 for sub_failed in &sub_startpos_failed {
3549 error.push_str(&format!("<li>\"{sub_failed}\" Startpos file failed to generate.</li>"));
3550 }
3551
3552 if hlp_failed {
3553 error.push_str("<li>HLP file failed to generate.</li>");
3554 }
3555
3556 if spd_failed {
3557 error.push_str("<li>SPD file failed to generate.</li>");
3558 }
3559
3560 if startpos_failed || hlp_failed || spd_failed {
3561 error.push_str("</ul><p>No files were added and the related files were restored to their pre-build state. Check your tables are correct before trying to generate them again.</p>")
3562 }
3563
3564 if error.is_empty() {
3565 Ok(added_paths)
3566 } else {
3567 Err(RLibError::BuildStartposError(error))
3568 }
3569 }
3570
3571 pub fn import_from_ak(&self, table_name: &str, schema: &Schema) -> Result<DB> {
3575 let definition = if let Some(definitions) = schema.definitions_by_table_name_cloned(table_name) {
3576 if !definitions.is_empty() {
3577 definitions[0].clone()
3578 } else {
3579 return Err(RLibError::DecodingDBNoDefinitionsFound)
3580 }
3581 } else {
3582 return Err(RLibError::DecodingDBNoDefinitionsFound)
3583 };
3584
3585 if let Some(ak_file) = self.asskit_only_db_tables().get(table_name) {
3587 let mut real_table = ak_file.clone();
3588 real_table.set_definition(&definition);
3589 Ok(real_table)
3590 } else {
3591 Err(RLibError::AssemblyKitTableNotFound(table_name.to_owned()))
3592 }
3593 }
3594
3595 pub fn insert_loc_as_vanilla_loc(&mut self, rfile: RFile) {
3603 let path = rfile.path_in_container_raw().to_owned();
3604 self.vanilla_files.insert(path.to_owned(), rfile);
3605 self.vanilla_locs.insert(path);
3606 }
3607
3608 pub fn add_recursive_lookups_to_definition(&self, schema: &Schema, definition: &mut Definition, table_name: &str) {
3612 let schema_patches = definition.patches().clone();
3613
3614 for field in definition.fields_mut().iter_mut() {
3615
3616 if let Some(lookup_data_old) = field.lookup(Some(&schema_patches)) {
3618 let mut lookup_data = vec![];
3619
3620 if !lookup_data_old.is_empty() {
3622
3623 let table_name = if let Some(table_name) = table_name.strip_suffix("_tables") {
3624 table_name.to_owned()
3625 } else {
3626 table_name.to_owned()
3627 };
3628
3629 for lookup_data_old in &lookup_data_old {
3630 let lookup_string = format!("{}#{}#{}", table_name, field.name(), lookup_data_old);
3631 self.add_recursive_lookups(schema, &schema_patches, lookup_data_old, &mut lookup_data, &lookup_string, &table_name);
3632 }
3633
3634 }
3635
3636 if let Some((ref_table_name, ref_column)) = field.is_reference(Some(&schema_patches)) {
3638 for lookup_data_old in &lookup_data_old {
3639 let lookup_string = format!("{ref_table_name}#{ref_column}#{lookup_data_old}");
3640 self.add_recursive_lookups(schema, &schema_patches, lookup_data_old, &mut lookup_data, &lookup_string, &ref_table_name);
3641 }
3642 }
3643
3644 if !lookup_data.is_empty() {
3645 field.set_lookup(Some(lookup_data));
3646 } else {
3647 field.set_lookup(None);
3648 }
3649 }
3650 }
3651 }
3652
3653 fn add_recursive_lookups(&self,
3654 schema: &Schema,
3655 schema_patches: &HashMap<String, HashMap<String, String>>,
3656 lookup: &str,
3657 lookup_data: &mut Vec<String>,
3658 lookup_string: &str,
3659 table_name: &str
3660 ) {
3661 let mut finish_lookup = false;
3662 let table_name = table_name.to_string() + "_tables";
3663 if let Ok(ref_tables) = self.db_data(&table_name, true, true) {
3664 let candidates = ref_tables.iter()
3665 .filter_map(|rfile| rfile.decoded().ok())
3666 .filter_map(|decoded| if let RFileDecoded::DB(db) = decoded {
3667 Some(db.definition().clone())
3668 } else {
3669 None
3670 })
3671 .collect::<Vec<_>>();
3672
3673 if let Some(definition) = schema.definition_newer(&table_name, &candidates) {
3674
3675 if let Some(pos) = definition.column_position_by_name(lookup) {
3677 if let Some(field) = definition.fields_processed().get(pos) {
3678
3679 if let Some((ref_table_name, ref_column)) = field.is_reference(Some(schema_patches)) {
3681 if let Some(lookups) = field.lookup(Some(schema_patches)) {
3682 for lookup in &lookups {
3683 let lookup_string = format!("{lookup_string}:{ref_table_name}#{ref_column}#{lookup}");
3684
3685 self.add_recursive_lookups(schema, schema_patches, lookup, lookup_data, &lookup_string, &ref_table_name);
3686 }
3687 } else {
3688 finish_lookup = true;
3689 }
3690 } else {
3691 finish_lookup = true;
3692 }
3693 } else {
3694 finish_lookup = true;
3695 }
3696 }
3697
3698 else if definition.localised_fields().iter().any(|x| x.name() == lookup) {
3699 finish_lookup = true;
3700 }
3701 } else {
3702 finish_lookup = true;
3703 }
3704 }
3705
3706 if finish_lookup && !lookup_data.iter().any(|x| x == lookup_string) {
3707 lookup_data.push(lookup_string.to_owned());
3708 }
3709 }
3710}