Skip to main content

rpfm_extensions/dependencies/
mod.rs

1//---------------------------------------------------------------------------//
2// Copyright (c) 2017-2026 Ismael Gutiérrez González. All rights reserved.
3//
4// This file is part of the Rusted PackFile Manager (RPFM) project,
5// which can be found here: https://github.com/Frodo45127/rpfm.
6//
7// This file is licensed under the MIT license, which can be found here:
8// https://github.com/Frodo45127/rpfm/blob/master/LICENSE.
9//---------------------------------------------------------------------------//
10
11//! Dependencies management system for Total War modding.
12//!
13//! This module provides a comprehensive system for managing and querying dependencies
14//! between mods and vanilla game files. It serves as a central cache for all reference
15//! data needed when editing DB tables, running diagnostics, or performing other
16//! operations that require knowledge of game data.
17//!
18//! # Overview
19//!
20//! The [`Dependencies`] struct is the core of this module. It manages three categories
21//! of data:
22//!
23//! 1. **Vanilla Files**: Data from the game's official PackFiles, cached on disk for
24//!    fast loading. This includes all DB tables, Loc files, and other resources.
25//!
26//! 2. **Vanilla Loose Files**: Files in the game's `/data` folder that aren't packed,
27//!    such as user-placed mods or extracted files.
28//!
29//! 3. **Parent Mod Files**: Files from mods that the current pack depends on,
30//!    loaded recursively based on the pack's dependency list.
31//!
32//! # Cache Structure
33//!
34//! The dependencies cache is stored on disk in three `.pak` files for efficient
35//! parallel loading:
36//!
37//! - `.pak1`: Build metadata and half of the vanilla files
38//! - `.pak2`: Other half of vanilla files
39//! - `.pak3`: Table/Loc indices, folder structure, and Assembly Kit tables
40//!
41//! The cache is versioned and includes a build date to automatically invalidate
42//! when game files change or RPFM is updated.
43//!
44//! # Reference Data
45//!
46//! A key feature is building reference data for DB table foreign key relationships.
47//! When a column references another table (e.g., `unit_key` referencing `main_units`),
48//! the dependencies system provides:
49//!
50//! - All valid values for the referenced column
51//! - Lookup data for displaying human-readable names
52//! - Detection of Assembly Kit-only tables
53//! - Localised column value resolution
54//!
55//! # Assembly Kit Integration
56//!
57//! Some tables exist only in the Assembly Kit and not in game files. These are
58//! processed separately and stored in the cache as "asskit-only" tables. They're
59//! useful for reference lookups but shouldn't be used as templates for new tables.
60//!
61//! # Usage Example
62//!
63//! ```ignore
64//! use rpfm_extensions::dependencies::Dependencies;
65//! use rpfm_lib::schema::Schema;
66//!
67//! // Load or generate dependencies cache
68//! let mut deps = Dependencies::default();
69//! deps.rebuild(
70//!     &Some(schema),
71//!     &["parent_mod.pack".to_string()],
72//!     Some(cache_path),
73//!     &game_info,
74//!     game_path,
75//!     secondary_path,
76//! )?;
77//!
78//! // Query a specific file
79//! let file = deps.file("db/units_tables/data__", true, true, false)?;
80//!
81//! // Get all DB tables of a specific type
82//! let units = deps.db_data("units_tables", true, true)?;
83//! ```
84//!
85//! # Startpos Generation
86//!
87//! This module provides functions to build startpos files for campaign mods:
88//!
89//! - `build_starpos_pre`: Prepares and launches the game with a user script that
90//!   triggers startpos generation for the specified campaign
91//! - `build_starpos_post`: Called after the game closes to import the generated
92//!   startpos file back into the pack
93//!
94//! The process handles game-specific quirks like different output paths, victory
95//! objectives extraction, and HLP/SPD data generation.
96
97use getset::{Getters, MutGetters};
98use itertools::{Either, Itertools};
99use log::{info, error};
100use rayon::prelude::*;
101use serde_derive::{Serialize, Deserialize};
102
103use std::borrow::Cow;
104use std::collections::{BTreeMap, HashMap, HashSet};
105use std::fs::{DirBuilder, File};
106use std::io::{BufReader, BufWriter, Read, Write};
107use std::sync::mpsc::channel;
108use std::path::{Path, PathBuf};
109use std::process::Command;
110use std::{thread, thread::{spawn, JoinHandle}};
111use std::time::Duration;
112
113use rpfm_lib::binary::WriteBytes;
114use rpfm_lib::error::{Result, RLibError};
115use rpfm_lib::files::{Container, ContainerPath, db::DB, DecodeableExtraData, FileType, pack::Pack, RFile, RFileDecoded, table::Table};
116use rpfm_lib::games::{GameInfo, supported_games::*};
117use rpfm_lib::integrations::assembly_kit::table_data::RawTable;
118use rpfm_lib::schema::{Definition, DefinitionPatch, Field, FieldType, Schema};
119use rpfm_lib::utils::{current_time, files_from_subdir, last_modified_time_from_files, starts_with_case_insensitive};
120
121use crate::optimizer::{OptimizableContainer, OptimizerOptions};
122use crate::START_POS_WORKAROUND_THREAD;
123use crate::VERSION;
124
125/// Table name for the key deletes table used in datacoring.
126///
127/// The `twad_key_deletes` table is used to remove specific rows from vanilla tables
128/// without replacing the entire table. This is the preferred method for removing
129/// vanilla content as it's more compatible with other mods.
130pub const KEY_DELETES_TABLE_NAME: &str = "twad_key_deletes_tables";
131
132/// Filename for the user script file used in startpos generation.
133pub const USER_SCRIPT_FILE_NAME: &str = "user.script.txt";
134
135/// Path to the victory objectives file within a pack.
136pub const VICTORY_OBJECTIVES_FILE_NAME: &str = "db/victory_objectives.txt";
137
138/// Extracted filename for victory objectives.
139pub const VICTORY_OBJECTIVES_EXTRACTED_FILE_NAME: &str = "victory_objectives.txt";
140
141/// List of games that require victory objectives file handling.
142///
143/// These games need special processing for the `victory_objectives.txt` file
144/// during startpos generation.
145pub const GAMES_NEEDING_VICTORY_OBJECTIVES: [&str; 9] = [
146    KEY_PHARAOH_DYNASTIES,
147    KEY_PHARAOH,
148    KEY_TROY,
149    KEY_THREE_KINGDOMS,
150    KEY_WARHAMMER_2,
151    KEY_WARHAMMER,
152    KEY_THRONES_OF_BRITANNIA,
153    KEY_ATTILA,
154    KEY_ROME_2
155];
156
157//-------------------------------------------------------------------------------//
158//                              Enums & Structs
159//-------------------------------------------------------------------------------//
160
161/// Central dependencies manager for all reference data relevant to a Pack.
162///
163/// This struct caches and manages all data needed for reference lookups, diagnostics,
164/// and other operations that require knowledge of vanilla game data or parent mods.
165///
166/// # Data Categories
167///
168/// The dependencies are organized into three persistence levels:
169///
170/// ## Serialized to Disk (cached)
171///
172/// These fields are saved to `.pak` files and only regenerated when the game
173/// files change or RPFM is updated:
174///
175/// - `vanilla_files` - All files from CA's official PackFiles
176/// - `vanilla_tables` - Index of DB table paths by table name
177/// - `vanilla_locs` - Set of Loc file paths
178/// - `vanilla_folders` - Set of folder paths for existence checks
179/// - `vanilla_paths` - Case-insensitive path lookup map
180/// - `asskit_only_db_tables` - Tables only present in Assembly Kit
181///
182/// ## Regenerated on Rebuild
183///
184/// These fields are rebuilt each time `rebuild()` is called, as they depend
185/// on the current environment:
186///
187/// - `vanilla_loose_*` - Files from the game's `/data` folder (not in packs)
188/// - `parent_*` - Files from parent mods the current pack depends on
189///
190/// ## Runtime Cache
191///
192/// Built on-demand during editing and not persisted:
193///
194/// - `local_tables_references` - Cached reference data for edited tables
195/// - `localisation_data` - Merged Loc data for quick lookups
196#[derive(Default, Debug, Clone, Getters, Serialize, Deserialize)]
197#[getset(get = "pub")]
198pub struct Dependencies {
199
200    /// Date of the generation of this dependencies cache. For checking if it needs an update.
201    build_date: u64,
202
203    /// Version of the program used to generate the dependencies, so they're properly invalidated on update.
204    version: String,
205
206    /// Data to quickly load loose files as part of the dependencies.
207    ///
208    /// Not serialized, regenerated on rebuild because these can frequently change.
209    #[serde(skip_serializing, skip_deserializing)]
210    vanilla_loose_files: HashMap<String, RFile>,
211
212    /// Data to quickly load CA dependencies from disk.
213    vanilla_files: HashMap<String, RFile>,
214
215    /// Data to quickly load dependencies from parent mods from disk.
216    ///
217    /// Not serialized, regenerated from parent Packs on rebuild.
218    #[serde(skip_serializing, skip_deserializing)]
219    parent_files: HashMap<String, RFile>,
220
221    /// List of DB tables on the CA loose files. Not really used, but just in case.
222    #[serde(skip_serializing, skip_deserializing)]
223    vanilla_loose_tables: HashMap<String, Vec<String>>,
224
225    /// List of DB tables on the CA files.
226    vanilla_tables: HashMap<String, Vec<String>>,
227
228    /// List of DB tables on the parent files.
229    ///
230    /// Not serialized, regenerated from parent Packs on rebuild.
231    #[serde(skip_serializing, skip_deserializing)]
232    parent_tables: HashMap<String, Vec<String>>,
233
234    /// List of Loc tables on the CA loose files. Not really used, but just in case.
235    #[serde(skip_serializing, skip_deserializing)]
236    vanilla_loose_locs: HashSet<String>,
237
238    /// List of Loc tables on the CA files.
239    vanilla_locs: HashSet<String>,
240
241    /// List of Loc tables on the parent files.
242    ///
243    /// Not serialized, regenerated from parent Packs on rebuild.
244    #[serde(skip_serializing, skip_deserializing)]
245    parent_locs: HashSet<String>,
246
247    /// Data to quickly check if a path exists in the vanilla loose files.
248    #[serde(skip_serializing, skip_deserializing)]
249    vanilla_loose_folders: HashSet<String>,
250
251    /// Data to quickly check if a path exists in the vanilla files.
252    vanilla_folders: HashSet<String>,
253
254    /// Data to quickly check if a path exists in the parent mod files.
255    #[serde(skip_serializing, skip_deserializing)]
256    parent_folders: HashSet<String>,
257
258    /// List of vanilla loose paths lowercased, with their casing counterparts. To quickly find files.
259    #[serde(skip_serializing, skip_deserializing)]
260    vanilla_loose_paths: HashMap<String, Vec<String>>,
261
262    /// List of vanilla paths lowercased, with their casing counterparts. To quickly find files.
263    vanilla_paths: HashMap<String, Vec<String>>,
264
265    /// List of parent paths lowercased, with their casing counterparts. To quickly find files.
266    ///
267    /// Not serialized, regenerated from parent Packs on rebuild.
268    #[serde(skip_serializing, skip_deserializing)]
269    parent_paths: HashMap<String, Vec<String>>,
270
271    /// Cached data for local tables.
272    ///
273    /// This is for runtime caching, and it must not be serialized to disk.
274    #[serde(skip_serializing, skip_deserializing)]
275    local_tables_references: HashMap<String, HashMap<i32, TableReferences>>,
276
277    /// Data from all the locs, so we can quickly search for a loc entry.
278    #[serde(skip_serializing, skip_deserializing)]
279    localisation_data: HashMap<String, String>,
280
281    /// DB Files only available on the assembly kit. Usable only for references. Do not use them as the base for new tables.
282    asskit_only_db_tables: HashMap<String, DB>,
283}
284
285/// Reference data for a single column in a DB table.
286///
287/// When a column has a foreign key reference to another table (or lookup data
288/// for display purposes), this struct holds the valid values and their
289/// human-readable representations.
290///
291/// # Example
292///
293/// For a column referencing `main_units_tables.key`:
294/// - `data` would contain entries like `("wh_main_emp_inf_swordsmen", "Empire Swordsmen")`
295/// - The key is the actual value stored in the DB
296/// - The value is the lookup/display text (if available)
297#[derive(Eq, PartialEq, Clone, Default, Debug, Getters, MutGetters, Serialize, Deserialize)]
298#[getset(get = "pub", get_mut = "pub")]
299pub struct TableReferences {
300
301    /// Name of the column these references are for.
302    ///
303    /// This is primarily for debugging purposes. Do not rely on it for
304    /// programmatic column identification.
305    field_name: String,
306
307    /// Whether the referenced table exists only in the Assembly Kit.
308    ///
309    /// When `true`, the reference data comes from Assembly Kit tables rather
310    /// than game files. This is useful for diagnostics to identify references
311    /// to tables that may not be fully supported.
312    referenced_table_is_ak_only: bool,
313
314    /// Whether the referenced column has been localised.
315    ///
316    /// Some columns that originally contained text are moved to Loc files
317    /// when exported from the Assembly Kit (Dave). This flag indicates that
318    /// the lookup values should be fetched from localisation data.
319    referenced_column_is_localised: bool,
320
321    /// The reference data mapping keys to display values.
322    ///
323    /// - **Key**: The actual value that can be stored in the column
324    /// - **Value**: Human-readable lookup text (may be empty if no lookup defined)
325    data: HashMap<String, String>,
326}
327
328//-------------------------------------------------------------------------------//
329//                             Implementations
330//-------------------------------------------------------------------------------//
331
332impl Dependencies {
333
334    //-----------------------------------//
335    // Generation and disk IO
336    //-----------------------------------//
337
338    /// This function takes care of rebuilding the whole dependencies cache to be used with a new Pack.
339    ///
340    /// If a file path is passed, the dependencies cache at that path will be used, replacing the currently loaded dependencies cache.
341    /// If a schema is not passed, no tables/locs will be pre-decoded. Make sure to decode them later with [Dependencies::decode_tables].
342    pub fn rebuild(&mut self, schema: &Option<Schema>, parent_pack_names: &[String], file_path: Option<&Path>, game_info: &GameInfo, game_path: &Path, secondary_path: &Path) -> Result<()> {
343
344        // If we only want to reload the parent mods, not the full dependencies, we can skip this section.
345        if let Some(file_path) = file_path {
346
347            // First, clear the current data, so we're not left with broken data afterwards if the next operations fail.
348            *self = Self::default();
349
350            // Try to load the binary file and check if it's even valid.
351            let stored_data = Self::load(file_path, schema)?;
352            if !stored_data.needs_updating(game_info, game_path)? {
353                *self = stored_data;
354            }
355        }
356
357        // Clear the table's cached data, to ensure it gets rebuild properly when needed.
358        self.local_tables_references.clear();
359
360        // Load vanilla loose files (from /data).
361        self.load_loose_files(schema, game_info, game_path)?;
362
363        // Load parent mods of the currently loaded Pack.
364        self.load_parent_files(schema, parent_pack_names, game_info, game_path, secondary_path)?;
365
366        // Populate the localisation data.
367        let loc_files = self.loc_data(true, true).unwrap_or_default();
368        let loc_decoded = loc_files.iter()
369            .filter_map(|file| if let Ok(RFileDecoded::Loc(loc)) = file.decoded() { Some(loc) } else { None })
370            .map(|file| file.data())
371            .collect::<Vec<_>>();
372
373        self.localisation_data = loc_decoded.par_iter()
374            .flat_map(|data| data.par_iter()
375                .map(|entry| (entry[0].data_to_string().to_string(), entry[1].data_to_string().to_string()))
376                .collect::<Vec<(_,_)>>()
377            ).collect::<HashMap<_,_>>();
378
379        Ok(())
380    }
381
382    /// This function generates the dependencies cache for the game provided and returns it.
383    pub fn generate_dependencies_cache(schema: &Option<Schema>, game_info: &GameInfo, game_path: &Path, asskit_path: &Option<PathBuf>, ignore_game_files_in_ak: bool) -> Result<Self> {
384        let mut cache = Self::default();
385        cache.build_date = current_time()?;
386        cache.version = VERSION.to_owned();
387        cache.vanilla_files = Pack::read_and_merge_ca_packs(game_info, game_path)?.files().clone();
388
389        let cacheable = cache.vanilla_files.par_iter_mut()
390            .filter_map(|(_, file)| {
391                let _ = file.guess_file_type();
392
393                match file.file_type() {
394                    FileType::DB |
395                    FileType::Loc => Some(file),
396                    _ => None,
397                }
398            })
399            .collect::<Vec<&mut RFile>>();
400
401        cacheable.iter()
402            .for_each(|file| {
403                match file.file_type() {
404                    FileType::DB => {
405                        if let Some(table_name) = file.db_table_name_from_path() {
406                            match cache.vanilla_tables.get_mut(table_name) {
407                                Some(table_paths) => table_paths.push(file.path_in_container_raw().to_owned()),
408                                None => { cache.vanilla_tables.insert(table_name.to_owned(), vec![file.path_in_container_raw().to_owned()]); },
409                            }
410                        }
411                    }
412                    FileType::Loc => {
413                        cache.vanilla_locs.insert(file.path_in_container_raw().to_owned());
414                    }
415                    _ => {}
416                }
417            }
418        );
419
420        cache.vanilla_folders = cache.vanilla_files.par_iter().filter_map(|(path, _)| {
421            let file_path_split = path.split('/').collect::<Vec<&str>>();
422            let folder_path_len = file_path_split.len() - 1;
423            if folder_path_len == 0 {
424                None
425            } else {
426
427                let mut paths = Vec::with_capacity(folder_path_len);
428
429                for (index, folder) in file_path_split.iter().enumerate() {
430                    if index < path.len() - 1 && !folder.is_empty() {
431                        paths.push(file_path_split[0..=index].join("/"))
432                    }
433                }
434
435                Some(paths)
436            }
437        }).flatten().collect::<HashSet<String>>();
438
439        cache.vanilla_files.keys().for_each(|path| {
440            let lower = path.to_lowercase();
441            match cache.vanilla_paths.get_mut(&lower) {
442                Some(paths) => paths.push(path.to_owned()),
443                None => { cache.vanilla_paths.insert(lower, vec![path.to_owned()]); },
444            }
445        });
446
447        // Load vanilla loose files before processing the AK files, so the AK process has these files available.
448        //
449        // It's mainly so any loose loc is used in the bruteforcing process.
450        cache.load_loose_files(&None, game_info, game_path)?;
451
452        // This one can fail, leaving the dependencies with only game data.
453        if let Some(path) = asskit_path {
454            let _ = cache.generate_asskit_only_db_tables(schema, path, *game_info.raw_db_version(), ignore_game_files_in_ak);
455        }
456
457        Ok(cache)
458    }
459
460    /// This function generates a "fake" table list with tables only present in the Assembly Kit.
461    ///
462    /// This works by processing all the tables from the game's raw table folder and turning them into fake decoded tables,
463    /// with version -1. That will allow us to use them for dependency checking and for populating combos.
464    ///
465    /// To keep things fast, only undecoded or missing (from the game files) tables will be included into the PAK2 file.
466    fn generate_asskit_only_db_tables(&mut self, schema: &Option<Schema>, raw_db_path: &Path, version: i16, ignore_game_files: bool) -> Result<()> {
467        let files_to_ignore = if ignore_game_files {
468            self.vanilla_tables.keys().map(|table_name| &table_name[..table_name.len() - 7]).collect::<Vec<_>>()
469        } else {
470            vec![]
471        };
472        let raw_tables = RawTable::read_all(raw_db_path, version, &files_to_ignore)?;
473        let asskit_only_db_tables = raw_tables.par_iter()
474            .map(|x| match schema {
475                Some(schema) => {
476                    let mut table_name = x.definition.clone().unwrap().name.unwrap().to_owned();
477                    table_name.pop();
478                    table_name.pop();
479                    table_name.pop();
480                    table_name.pop();
481
482                    table_name = format!("{table_name}_tables");
483
484                    let definition = schema.definitions().get(&table_name).and_then(|x| x.first());
485
486                    x.to_db(definition)
487                }
488                None => x.to_db(None),
489            })
490            .collect::<Result<Vec<DB>>>()?;
491
492        // We need to bruteforce loc keys for ak tables here, so locs relations are setup correctly for ak tables.
493        let mut asskit_only_db_tables = asskit_only_db_tables.par_iter().map(|table| (table.table_name().to_owned(), table.clone())).collect::<HashMap<String, DB>>();
494
495        let decode_extra_data = DecodeableExtraData::default();
496        let extra_data = Some(decode_extra_data);
497
498        // Vanilla loose files.
499        let mut files = self.vanilla_loose_locs.iter().filter_map(|path| {
500            self.vanilla_loose_files.remove(path).map(|file| (path.to_owned(), file))
501        }).collect::<Vec<_>>();
502
503        files.par_iter_mut().for_each(|(_, file)| {
504            let _ = file.decode(&extra_data, true, false);
505        });
506
507        self.vanilla_loose_files.par_extend(files);
508
509        // Vanilla files.
510        let mut files = self.vanilla_locs.iter().filter_map(|path| {
511            self.vanilla_files.remove(path).map(|file| (path.to_owned(), file))
512        }).collect::<Vec<_>>();
513
514        files.par_iter_mut().for_each(|(_, file)| {
515            let _ = file.decode(&extra_data, true, false);
516        });
517
518        self.vanilla_files.par_extend(files);
519
520        self.bruteforce_loc_key_order(&mut Schema::default(), None, None, Some(&mut asskit_only_db_tables))?;
521        self.asskit_only_db_tables = asskit_only_db_tables;
522
523        Ok(())
524    }
525
526    /// This function builds the local db references data for the tables you pass to it from the Packs provided.
527    ///
528    /// Table names must be provided as full names (with *_tables* at the end).
529    ///
530    /// NOTE: This function, like many others, assumes the tables are already decoded in the Packs. If they're not, they'll be ignored.
531    pub fn generate_local_db_references(&mut self, schema: &Schema, packs: &BTreeMap<String, Pack>, table_names: &[String]) {
532
533        let local_tables_references = packs.values()
534            .flat_map(|pack| pack.files_by_type(&[FileType::DB]))
535            .par_bridge()
536            .filter_map(|file| {
537                if let Ok(RFileDecoded::DB(db)) = file.decoded() {
538
539                    // Only generate references for the tables you pass it, or for all if we pass the list of tables empty.
540                    if table_names.is_empty() || table_names.iter().any(|x| x == db.table_name()) {
541                        Some((db.table_name().to_owned(), self.generate_references(schema, db.table_name(), db.definition())))
542                    } else { None }
543                } else { None }
544            }).collect::<HashMap<_, _>>();
545
546        self.local_tables_references.extend(local_tables_references);
547    }
548
549    /// This function builds the local db references data for the table with the definition you pass to and stores it in the cache.
550    pub fn generate_local_definition_references(&mut self, schema: &Schema, table_name: &str, definition: &Definition) {
551        self.local_tables_references.insert(table_name.to_owned(), self.generate_references(schema, table_name, definition));
552    }
553
554    /// This function builds the local db references data for the table with the definition you pass to, and returns it.
555    pub fn generate_references(&self, schema: &Schema, local_table_name: &str, definition: &Definition) -> HashMap<i32, TableReferences> {
556
557        // Trick: before doing this, we modify the definition to include any lookup from any reference,
558        // so we are actually able to catch recursive-like lookups without reading multiple tables.
559        let mut definition = definition.clone();
560        self.add_recursive_lookups_to_definition(schema, &mut definition, local_table_name);
561
562        let patches = Some(definition.patches());
563        let fields_processed = definition.fields_processed();
564
565        // Key deletes works in a different way. For it we have to get the names of all the tables,
566        // then we retrieve the keys data dinamically when selecting in the ui.
567        if local_table_name == KEY_DELETES_TABLE_NAME {
568            let mut hashmap = HashMap::new();
569            let mut references = TableReferences::default();
570            *references.field_name_mut() = "table_name".to_owned();
571
572            for key in schema.definitions().keys() {
573                if key.len() > 7 {
574                    let table_name = key.to_owned().drain(..key.len() - 7).collect::<String>();
575                    references.data.insert(table_name, String::new());
576                }
577            }
578
579            hashmap.insert(1, references);
580            return hashmap;
581        }
582
583        fields_processed.par_iter().enumerate().filter_map(|(column, field)| {
584            match field.is_reference(patches) {
585                Some((ref ref_table, ref ref_column)) => {
586                    if !ref_table.is_empty() && !ref_column.is_empty() {
587                        let ref_table = format!("{ref_table}_tables");
588
589                        // Get his lookup data if it has it.
590                        let lookup_data = if let Some(ref data) = field.lookup_no_patch() { data.to_vec() } else { Vec::with_capacity(0) };
591                        let mut references = TableReferences::default();
592                        *references.field_name_mut() = field.name().to_owned();
593
594                        let fake_found = self.db_reference_data_from_asskit_tables(&mut references, (&ref_table, ref_column, &lookup_data));
595                        let real_found = self.db_reference_data_from_vanilla_and_modded_tables(&mut references, (&ref_table, ref_column, &lookup_data));
596
597                        if fake_found && real_found.is_none() {
598                            references.referenced_table_is_ak_only = true;
599                        }
600
601                        if let Some(ref_definition) = real_found {
602                            if ref_definition.localised_fields().iter().any(|x| x.name() == ref_column) {
603                                references.referenced_column_is_localised = true;
604                            }
605                        }
606
607                        Some((column as i32, references))
608                    } else { None }
609                },
610
611                // In the fallback case (no references) we still need to check for lookup data within our table and the locs.
612                None => {
613                    if let Some(ref lookup_data) = field.lookup_no_patch() {
614
615                        // Only single-keyed tables can have lookups.
616                        if field.is_key(patches) && fields_processed.iter().filter(|x| x.is_key(patches)).count() == 1 {
617                            let ref_table = local_table_name;
618                            let ref_column = field.name();
619
620                            // Get his lookup data if it has it.
621                            let mut references = TableReferences::default();
622                            *references.field_name_mut() = field.name().to_owned();
623
624                            let fake_found = self.db_reference_data_from_asskit_tables(&mut references, (ref_table, ref_column, lookup_data));
625                            let real_found = self.db_reference_data_from_vanilla_and_modded_tables(&mut references, (ref_table, ref_column, lookup_data));
626
627                            if fake_found && real_found.is_none() {
628                                references.referenced_table_is_ak_only = true;
629                            }
630
631                            if let Some(ref_definition) = real_found {
632                                if ref_definition.localised_fields().iter().any(|x| x.name() == ref_column) {
633                                    references.referenced_column_is_localised = true;
634                                }
635                            }
636
637                            Some((column as i32, references))
638                        } else { None }
639                    } else { None }
640                },
641            }
642        }).collect::<HashMap<_, _>>()
643    }
644
645    /// This function tries to load dependencies from the path provided.
646    pub fn load(file_path: &Path, schema: &Option<Schema>) -> Result<Self> {
647
648        // Optimization: Instead of a big file, we split the dependencies in 3 files. Why?
649        // Because bitcode is not multithreaded and, while reading 3 medium files is slower than a big one,
650        // deserializing 3 medium files in 3 separate threads is way faster than 1 big file in 1 thread.
651        let mut file_path_1 = file_path.to_path_buf();
652        let handle_1: JoinHandle<Result<(u64, String, Vec<RFile>)>> = spawn(move || {
653            file_path_1.set_extension("pak1");
654            let mut file = BufReader::new(File::open(&file_path_1)?);
655            let mut data = Vec::with_capacity(file.get_ref().metadata()?.len() as usize);
656            file.read_to_end(&mut data)?;
657
658            // Never deserialize directly from the file. It's bloody slow!!!
659            bitcode::deserialize(&data).map_err(From::from)
660        });
661
662        let mut file_path_2 = file_path.to_path_buf();
663        let handle_2: JoinHandle<Result<Vec<RFile>>> = spawn(move || {
664            file_path_2.set_extension("pak2");
665            let mut file = BufReader::new(File::open(&file_path_2)?);
666            let mut data = Vec::with_capacity(file.get_ref().metadata()?.len() as usize);
667            file.read_to_end(&mut data)?;
668
669            // Never deserialize directly from the file. It's bloody slow!!!
670            bitcode::deserialize(&data).map_err(From::from)
671        });
672
673        let mut file_path_3 = file_path.to_path_buf();
674        let handle_3: JoinHandle<Result<(HashMap<String, Vec<String>>, HashSet<String>, HashSet<String>, HashMap<String, Vec<String>>, HashMap<String, DB>)>> = spawn(move || {
675            file_path_3.set_extension("pak3");
676            let mut file = BufReader::new(File::open(&file_path_3)?);
677            let mut data = Vec::with_capacity(file.get_ref().metadata()?.len() as usize);
678            file.read_to_end(&mut data)?;
679
680            // Never deserialize directly from the file. It's bloody slow!!!
681            bitcode::deserialize(&data).map_err(From::from)
682        });
683
684        // Get the thread's data in reverse, as 1 and 2 are actually the slower to process.
685        let mut dependencies = Self::default();
686        let data_3 = handle_3.join().unwrap()?;
687        let data_2 = handle_2.join().unwrap()?;
688        let data_1 = handle_1.join().unwrap()?;
689
690        // The vanilla file list is stored in a Vec format instead of a hashmap, because a vec can be splited,
691        // and that list is more than 100mb long in some games. Here we turn it back to HashMap and merge it.
692        let mut vanilla_files: HashMap<_,_> = data_1.2.into_par_iter().map(|file| (file.path_in_container_raw().to_owned(), file)).collect();
693        vanilla_files.par_extend(data_2.into_par_iter().map(|file| (file.path_in_container_raw().to_owned(), file)));
694
695        dependencies.build_date = data_1.0;
696        dependencies.version = data_1.1;
697        dependencies.vanilla_files = vanilla_files;
698        dependencies.vanilla_tables = data_3.0;
699        dependencies.vanilla_locs = data_3.1;
700        dependencies.vanilla_folders = data_3.2;
701        dependencies.vanilla_paths = data_3.3;
702        dependencies.asskit_only_db_tables = data_3.4;
703
704        // Only decode the tables if we passed a schema. If not, it's responsability of the user to decode them later.
705        if let Some(schema) = schema {
706            let mut decode_extra_data = DecodeableExtraData::default();
707            decode_extra_data.set_schema(Some(schema));
708            let extra_data = Some(decode_extra_data);
709
710            let mut files = dependencies.vanilla_locs.iter().chain(dependencies.vanilla_tables.values().flatten()).filter_map(|path| {
711                dependencies.vanilla_files.remove(path).map(|file| (path.to_owned(), file))
712            }).collect::<Vec<_>>();
713
714            files.par_iter_mut().for_each(|(_, file)| {
715                let _ = file.decode(&extra_data, true, false);
716            });
717
718            dependencies.vanilla_files.par_extend(files);
719        }
720
721        Ok(dependencies)
722    }
723
724    /// This function saves a dependencies cache to the provided path.
725    pub fn save(&mut self, file_path: &Path) -> Result<()> {
726        let mut folder_path = file_path.to_owned();
727        folder_path.pop();
728        DirBuilder::new().recursive(true).create(&folder_path)?;
729
730        let mut file_path_1 = file_path.to_path_buf();
731        let mut file_path_2 = file_path.to_path_buf();
732        let mut file_path_3 = file_path.to_path_buf();
733
734        file_path_1.set_extension("pak1");
735        file_path_2.set_extension("pak2");
736        file_path_3.set_extension("pak3");
737
738        let mut file_1 = File::create(&file_path_1)?;
739        let mut file_2 = File::create(&file_path_2)?;
740        let mut file_3 = File::create(&file_path_3)?;
741
742        // Split the vanilla file's list in half and turn it into a vec, so it's faster when loading.
743        // NOTE: While the HashMap -> Vec conversion only keeping values thing is slower to read
744        // than serializing/loading the keys directly, it saves about 40mb of data on disk.
745        let mut vanilla_files_1 = self.vanilla_files.par_iter().map(|(_, b)| b.clone()).collect::<Vec<RFile>>();
746        let vanilla_files_2 = vanilla_files_1.split_off(self.vanilla_files.len() / 2);
747
748        // Never serialize directly into the file. It's bloody slow!!!
749        let serialized_1: Vec<u8> = bitcode::serialize(&(&self.build_date, &self.version, &vanilla_files_1))?;
750        let serialized_2: Vec<u8> = bitcode::serialize(&vanilla_files_2)?;
751        let serialized_3: Vec<u8> = bitcode::serialize(&(&self.vanilla_tables, &self.vanilla_locs, &self.vanilla_folders, &self.vanilla_paths, &self.asskit_only_db_tables))?;
752
753        file_1.write_all(&serialized_1).map_err(RLibError::from)?;
754        file_2.write_all(&serialized_2).map_err(RLibError::from)?;
755        file_3.write_all(&serialized_3).map_err(From::from)
756    }
757
758    /// This function is used to check if the game files used to generate the dependencies cache have changed, requiring an update.
759    pub fn needs_updating(&self, game_info: &GameInfo, game_path: &Path) -> Result<bool> {
760        let ca_paths = game_info.ca_packs_paths(game_path)?;
761        let last_date = last_modified_time_from_files(&ca_paths)?;
762        Ok(last_date > self.build_date || self.version != VERSION)
763    }
764
765    /// This function loads all the loose files within the game's /data folder.
766    fn load_loose_files(&mut self, schema: &Option<Schema>, game_info: &GameInfo, game_path: &Path) -> Result<()> {
767        self.vanilla_loose_files.clear();
768        self.vanilla_loose_tables.clear();
769        self.vanilla_loose_locs.clear();
770        self.vanilla_loose_folders.clear();
771        self.vanilla_loose_paths.clear();
772
773        let game_data_path = game_info.data_path(game_path)?;
774        let game_data_path_str = game_data_path.to_string_lossy().replace('\\', "/");
775
776        self.vanilla_loose_files = files_from_subdir(&game_data_path, true)?
777            .into_par_iter()
778            .filter_map(|path| {
779                let mut path = path.to_string_lossy().replace('\\', "/");
780                if !path.ends_with(".pack") {
781                    if let Ok(mut rfile) = RFile::new_from_file(&path) {
782                        let subpath = path.split_off(game_data_path_str.len() + 1);
783                        rfile.set_path_in_container_raw(&subpath);
784                        let _ = rfile.guess_file_type();
785                        Some((subpath, rfile))
786                    } else {
787                        None
788                    }
789                } else {
790                    None
791                }
792            })
793            .collect::<HashMap<String, RFile>>();
794
795        let cacheable = self.vanilla_loose_files.par_iter_mut()
796            .filter_map(|(_, file)| {
797                let _ = file.guess_file_type();
798
799                match file.file_type() {
800                    FileType::DB |
801                    FileType::Loc => Some(file),
802                    _ => None,
803                }
804            })
805            .collect::<Vec<&mut RFile>>();
806
807        cacheable.iter()
808            .for_each(|file| {
809                match file.file_type() {
810                    FileType::DB => {
811                        if let Some(table_name) = file.db_table_name_from_path() {
812                            match self.vanilla_loose_tables.get_mut(table_name) {
813                                Some(table_paths) => table_paths.push(file.path_in_container_raw().to_owned()),
814                                None => { self.vanilla_loose_tables.insert(table_name.to_owned(), vec![file.path_in_container_raw().to_owned()]); },
815                            }
816                        }
817                    }
818                    FileType::Loc => {
819                        self.vanilla_loose_locs.insert(file.path_in_container_raw().to_owned());
820                    }
821                    _ => {}
822                }
823            }
824        );
825
826        self.vanilla_loose_folders = self.vanilla_loose_files.par_iter().filter_map(|(path, _)| {
827            let file_path_split = path.split('/').collect::<Vec<&str>>();
828            let folder_path_len = file_path_split.len() - 1;
829            if folder_path_len == 0 {
830                None
831            } else {
832
833                let mut paths = Vec::with_capacity(folder_path_len);
834
835                for (index, folder) in file_path_split.iter().enumerate() {
836                    if index < path.len() - 1 && !folder.is_empty() {
837                        paths.push(file_path_split[0..=index].join("/"))
838                    }
839                }
840
841                Some(paths)
842            }
843        }).flatten().collect::<HashSet<String>>();
844
845        self.vanilla_loose_files.keys().for_each(|path| {
846            let lower = path.to_lowercase();
847            match self.vanilla_loose_paths.get_mut(&lower) {
848                Some(paths) => paths.push(path.to_owned()),
849                None => { self.vanilla_loose_paths.insert(lower, vec![path.to_owned()]); },
850            }
851        });
852
853        // Only decode the tables if we passed a schema. If not, it's responsability of the user to decode them later.
854        if let Some(schema) = schema {
855            let mut decode_extra_data = DecodeableExtraData::default();
856            decode_extra_data.set_schema(Some(schema));
857            let extra_data = Some(decode_extra_data);
858
859            let mut files = self.vanilla_loose_locs.iter().chain(self.vanilla_loose_tables.values().flatten()).filter_map(|path| {
860                self.vanilla_loose_files.remove(path).map(|file| (path.to_owned(), file))
861            }).collect::<Vec<_>>();
862
863            files.par_iter_mut().for_each(|(_, file)| {
864                let _ = file.decode(&extra_data, true, false);
865            });
866
867            self.vanilla_loose_files.par_extend(files);
868        }
869
870        Ok(())
871    }
872
873
874    /// This function loads all the loose files within the game's /data folder.
875    fn load_parent_files(&mut self, schema: &Option<Schema>, parent_pack_names: &[String], game_info: &GameInfo, game_path: &Path, secondary_path: &Path) -> Result<()> {
876        self.parent_files.clear();
877        self.parent_tables.clear();
878        self.parent_locs.clear();
879        self.parent_folders.clear();
880        self.parent_paths.clear();
881
882        // Preload parent mods of the currently loaded Pack.
883        self.load_parent_packs(parent_pack_names, game_info, game_path, secondary_path)?;
884        self.parent_files.par_iter_mut().map(|(_, file)| file.guess_file_type()).collect::<Result<()>>()?;
885
886        // Then build the table/loc lists, for easy access.
887        self.parent_files.iter()
888            .for_each(|(path, file)| {
889                match file.file_type() {
890                    FileType::DB => {
891                        if let Some(table_name) = file.db_table_name_from_path() {
892                            match self.parent_tables.get_mut(table_name) {
893                                Some(table_paths) => table_paths.push(path.to_owned()),
894                                None => { self.parent_tables.insert(table_name.to_owned(), vec![path.to_owned()]); },
895                            }
896                        }
897                    }
898                    FileType::Loc => {
899                        self.parent_locs.insert(path.to_owned());
900                    }
901                    _ => {}
902                }
903            }
904        );
905
906        // Build the folder list.
907        self.parent_folders = self.parent_files.par_iter().filter_map(|(path, _)| {
908            let file_path_split = path.split('/').collect::<Vec<&str>>();
909            let folder_path_len = file_path_split.len() - 1;
910            if folder_path_len == 0 {
911                None
912            } else {
913
914                let mut paths = Vec::with_capacity(folder_path_len);
915
916                for (index, folder) in file_path_split.iter().enumerate() {
917                    if index < path.len() - 1 && !folder.is_empty() {
918                        paths.push(file_path_split[0..=index].join("/"))
919                    }
920                }
921
922                Some(paths)
923            }
924        }).flatten().collect::<HashSet<String>>();
925
926        self.parent_files.keys().for_each(|path| {
927            let lower = path.to_lowercase();
928            match self.parent_paths.get_mut(&lower) {
929                Some(paths) => paths.push(path.to_owned()),
930                None => { self.parent_paths.insert(lower, vec![path.to_owned()]); },
931            }
932        });
933
934        // Only decode the tables if we passed a schema. If not, it's responsability of the user to decode them later.
935        if let Some(schema) = schema {
936            let mut decode_extra_data = DecodeableExtraData::default();
937            decode_extra_data.set_schema(Some(schema));
938            let extra_data = Some(decode_extra_data);
939
940            let mut files = self.parent_tables.values().flatten().filter_map(|path| {
941                self.parent_files.remove(path).map(|file| (path.to_owned(), file))
942            }).collect::<Vec<_>>();
943
944            files.par_iter_mut().for_each(|(_, file)| {
945                let _ = file.decode(&extra_data, true, false);
946            });
947
948            self.parent_files.par_extend(files);
949        }
950
951        // Also decode the locs. They don't need an schema.
952        let mut files = self.parent_locs.iter().filter_map(|path| {
953            self.parent_files.remove(path).map(|file| (path.to_owned(), file))
954        }).collect::<Vec<_>>();
955
956        files.par_iter_mut().for_each(|(_, file)| {
957            let _ = file.decode(&None, true, false);
958        });
959
960        self.parent_files.par_extend(files);
961
962        Ok(())
963    }
964
965    /// This function loads all the parent [Packs](rpfm_lib::files::pack::Pack) provided as `parent_pack_names` as dependencies,
966    /// taking care of also loading all dependencies of all of them, if they're not already loaded.
967    fn load_parent_packs(&mut self, parent_pack_names: &[String], game_info: &GameInfo, game_path: &Path, secondary_path: &Path) -> Result<()> {
968        let data_packs_paths = game_info.data_packs_paths(game_path).unwrap_or_default();
969        let secondary_packs_paths = game_info.secondary_packs_paths(secondary_path);
970        let content_packs_paths = game_info.content_packs_paths(game_path);
971        let mut loaded_packfiles = vec![];
972
973        parent_pack_names.iter().for_each(|pack_name| self.load_parent_pack(pack_name, &mut loaded_packfiles, &data_packs_paths, &secondary_packs_paths, &content_packs_paths, game_info));
974
975        Ok(())
976    }
977
978    /// This function loads a parent [Pack](rpfm_lib::files::pack::Pack) as a dependency,
979    /// taking care of also loading all dependencies of it, if they're not already loaded.
980    fn load_parent_pack(
981        &mut self,
982        pack_name: &str,
983        already_loaded: &mut Vec<String>,
984        data_paths: &[PathBuf],
985        secondary_paths: &Option<Vec<PathBuf>>,
986        content_paths: &Option<Vec<PathBuf>>,
987        game_info: &GameInfo
988    ) {
989        // Do not process Packs twice.
990        if !already_loaded.contains(&pack_name.to_owned()) {
991
992            // First check in /data. If we have packs there, do not bother checking for external Packs.
993            if let Some(path) = data_paths.iter().find(|x| x.file_name().unwrap().to_string_lossy() == pack_name) {
994                if let Ok(pack) = Pack::read_and_merge(&[path.to_path_buf()], game_info, true, false, false) {
995                    already_loaded.push(pack_name.to_owned());
996                    pack.dependencies().iter().for_each(|(_, pack_name)| self.load_parent_pack(pack_name, already_loaded, data_paths, secondary_paths, content_paths, game_info));
997                    self.parent_files.extend(pack.files().clone());
998
999                    return;
1000                }
1001            }
1002
1003            // Then check in /secondary. If we have packs there, do not bother checking for content Packs.
1004            if let Some(ref paths) = secondary_paths {
1005                if let Some(path) = paths.iter().find(|x| x.file_name().unwrap().to_string_lossy() == pack_name) {
1006                    if let Ok(pack) = Pack::read_and_merge(&[path.to_path_buf()], game_info, true, false, false) {
1007                        already_loaded.push(pack_name.to_owned());
1008                        pack.dependencies().iter().for_each(|(_, pack_name)| self.load_parent_pack(pack_name, already_loaded, data_paths, secondary_paths, content_paths, game_info));
1009                        self.parent_files.extend(pack.files().clone());
1010
1011                        return;
1012                    }
1013                }
1014            }
1015
1016            // If nothing else works, check in content.
1017            if let Some(ref paths) = content_paths {
1018                if let Some(path) = paths.iter().find(|x| x.file_name().unwrap().to_string_lossy() == pack_name) {
1019                    if let Ok(pack) = Pack::read_and_merge(&[path.to_path_buf()], game_info, true, false, false) {
1020                        already_loaded.push(pack_name.to_owned());
1021                        pack.dependencies().iter().for_each(|(_, pack_name)| self.load_parent_pack(pack_name, already_loaded, data_paths, secondary_paths, content_paths, game_info));
1022                        self.parent_files.extend(pack.files().clone());
1023                    }
1024                }
1025            }
1026        }
1027    }
1028
1029    /// Function to force-decode all tables/locs in the dependencies.
1030    ///
1031    /// Many operations require them to be decoded, so if you did not decoded them on load, make sure to call this to decode them after load.
1032    pub fn decode_tables(&mut self, schema: &Option<Schema>) {
1033        if let Some(schema) = schema {
1034
1035            let mut decode_extra_data = DecodeableExtraData::default();
1036            decode_extra_data.set_schema(Some(schema));
1037            let extra_data = Some(decode_extra_data);
1038
1039            // Vanilla loose files.
1040            let mut files = self.vanilla_loose_locs.iter().chain(self.vanilla_loose_tables.values().flatten()).filter_map(|path| {
1041                self.vanilla_loose_files.remove(path).map(|file| (path.to_owned(), file))
1042            }).collect::<Vec<_>>();
1043
1044            files.par_iter_mut().for_each(|(_, file)| {
1045                let _ = file.decode(&extra_data, true, false);
1046            });
1047
1048            self.vanilla_loose_files.par_extend(files);
1049
1050            // Vanilla files.
1051            let mut files = self.vanilla_locs.iter().chain(self.vanilla_tables.values().flatten()).filter_map(|path| {
1052                self.vanilla_files.remove(path).map(|file| (path.to_owned(), file))
1053            }).collect::<Vec<_>>();
1054
1055            files.par_iter_mut().for_each(|(_, file)| {
1056                let _ = file.decode(&extra_data, true, false);
1057            });
1058
1059            self.vanilla_files.par_extend(files);
1060
1061            // Parent files.
1062            let mut files = self.parent_locs.iter().chain(self.parent_tables.values().flatten()).filter_map(|path| {
1063                self.parent_files.remove(path).map(|file| (path.to_owned(), file))
1064            }).collect::<Vec<_>>();
1065
1066            files.par_iter_mut().for_each(|(_, file)| {
1067                let _ = file.decode(&extra_data, true, false);
1068            });
1069
1070            self.parent_files.par_extend(files);
1071        }
1072    }
1073
1074    //-----------------------------------//
1075    // Getters
1076    //-----------------------------------//
1077
1078    /// This function returns a reference to a specific file from the cache, if exists.
1079    pub fn file(&self, file_path: &str, include_vanilla: bool, include_parent: bool, case_insensitive: bool) -> Result<&RFile> {
1080        let file_path = if let Some(file_path) = file_path.strip_prefix('/') {
1081            file_path
1082        } else {
1083            file_path
1084        };
1085
1086        if include_parent {
1087
1088            // Even on case-insensitive searches, try to use get first. We may get lucky.
1089            if let Some(file) = self.parent_files.get(file_path) {
1090                return Ok(file);
1091            }
1092
1093            if case_insensitive {
1094                let lower = file_path.to_lowercase();
1095                if let Some(file) = self.parent_paths.get(&lower).and_then(|paths| self.parent_files.get(&paths[0])) {
1096                    return Ok(file);
1097                }
1098            }
1099        }
1100
1101        if include_vanilla {
1102
1103            // Even on case-insensitive searches, try to use get first. We may get lucky.
1104            if let Some(file) = self.vanilla_files.get(file_path) {
1105                return Ok(file);
1106            }
1107
1108            if case_insensitive {
1109                let lower = file_path.to_lowercase();
1110                if let Some(file) = self.vanilla_paths.get(&lower).and_then(|paths| self.vanilla_files.get(&paths[0])) {
1111                    return Ok(file);
1112                }
1113
1114            }
1115
1116            // Same check for loose paths.
1117            if let Some(file) = self.vanilla_loose_files.get(file_path) {
1118                return Ok(file);
1119            }
1120
1121            if case_insensitive {
1122                let lower = file_path.to_lowercase();
1123                if let Some(file) = self.vanilla_loose_paths.get(&lower).and_then(|paths| self.vanilla_loose_files.get(&paths[0])) {
1124                    return Ok(file);
1125                }
1126            }
1127        }
1128
1129        Err(RLibError::DependenciesCacheFileNotFound(file_path.to_owned()))
1130    }
1131
1132    /// This function returns a mutable reference to a specific file from the cache, if exists.
1133    pub fn file_mut(&mut self, file_path: &str, include_vanilla: bool, include_parent: bool) -> Result<&mut RFile> {
1134        if include_parent {
1135            if let Some(file) = self.parent_files.get_mut(file_path) {
1136                return Ok(file);
1137            }
1138        }
1139
1140        if include_vanilla {
1141            if let Some(file) = self.vanilla_files.get_mut(file_path) {
1142                return Ok(file);
1143            }
1144
1145            if let Some(file) = self.vanilla_loose_files.get_mut(file_path) {
1146                return Ok(file);
1147            }
1148        }
1149
1150        Err(RLibError::DependenciesCacheFileNotFound(file_path.to_owned()))
1151    }
1152
1153    /// Batch variant of [`Dependencies::file_mut`] that returns one `&mut RFile` per matching path.
1154    pub fn files_mut_by_paths(
1155        &mut self,
1156        paths: &HashSet<String>,
1157        include_vanilla: bool,
1158        include_parent: bool,
1159    ) -> HashMap<String, &mut RFile> {
1160        let mut result: HashMap<String, &mut RFile> = HashMap::with_capacity(paths.len());
1161
1162        if include_parent {
1163            for (k, v) in self.parent_files.iter_mut() {
1164                if paths.contains(k) {
1165                    result.insert(k.clone(), v);
1166                }
1167            }
1168        }
1169
1170        if include_vanilla {
1171            for (k, v) in self.vanilla_files.iter_mut() {
1172                if paths.contains(k) && !result.contains_key(k) {
1173                    result.insert(k.clone(), v);
1174                }
1175            }
1176
1177            for (k, v) in self.vanilla_loose_files.iter_mut() {
1178                if paths.contains(k) && !result.contains_key(k) {
1179                    result.insert(k.clone(), v);
1180                }
1181            }
1182        }
1183
1184        result
1185    }
1186
1187    /// This function returns a reference to all files corresponding to the provided paths.
1188    pub fn files_by_path(&self, file_paths: &[ContainerPath], include_vanilla: bool, include_parent: bool, case_insensitive: bool) -> HashMap<String, &RFile> {
1189        let (file_paths, folder_paths): (Vec<_>, Vec<_>) = file_paths.iter().partition_map(|file_path| match file_path {
1190            ContainerPath::File(file_path) => Either::Left(file_path.to_owned()),
1191            ContainerPath::Folder(file_path) => Either::Right(file_path.to_owned()),
1192        });
1193
1194        let mut hashmap = HashMap::new();
1195
1196        // File check.
1197        if !file_paths.is_empty() {
1198            hashmap.extend(file_paths.par_iter()
1199                .filter_map(|file_path| self.file(file_path, include_vanilla, include_parent, case_insensitive)
1200                    .ok()
1201                    .map(|file| (file_path.to_owned(), file)))
1202                .collect::<Vec<(_,_)>>()
1203            );
1204        }
1205
1206        // Folder check.
1207        if !folder_paths.is_empty() {
1208            hashmap.extend(folder_paths.into_par_iter().flat_map(|folder_path| {
1209                let mut folder = vec![];
1210                let folder_path = folder_path.to_owned() + "/";
1211                if include_vanilla {
1212
1213                    if folder_path == "/" {
1214                        folder.extend(self.vanilla_loose_files.par_iter()
1215                            .map(|(path, file)| (path.to_owned(), file))
1216                            .collect::<Vec<(_,_)>>());
1217
1218                        folder.extend(self.vanilla_files.par_iter()
1219                            .map(|(path, file)| (path.to_owned(), file))
1220                            .collect::<Vec<(_,_)>>());
1221
1222                    } else {
1223                        folder.extend(self.vanilla_loose_files.par_iter()
1224                            .filter(|(path, _)| {
1225                                if case_insensitive {
1226                                    starts_with_case_insensitive(path, &folder_path)
1227                                } else {
1228                                    path.starts_with(&folder_path)
1229                                }
1230                            })
1231                            .map(|(path, file)| (path.to_owned(), file))
1232                            .collect::<Vec<(_,_)>>());
1233
1234                        folder.extend(self.vanilla_files.par_iter()
1235                            .filter(|(path, _)| {
1236                                if case_insensitive {
1237                                    starts_with_case_insensitive(path, &folder_path)
1238                                } else {
1239                                    path.starts_with(&folder_path)
1240                                }
1241                            })
1242                            .map(|(path, file)| (path.to_owned(), file))
1243                            .collect::<Vec<(_,_)>>());
1244                    }
1245                }
1246
1247                if include_parent {
1248                    if folder_path == "/" {
1249                        folder.extend(self.parent_files.par_iter()
1250                            .map(|(path, file)| (path.to_owned(), file))
1251                            .collect::<Vec<(_,_)>>());
1252
1253                    } else {
1254                        folder.extend(self.parent_files.par_iter()
1255                            .filter(|(path, _)| {
1256                                if case_insensitive {
1257                                    starts_with_case_insensitive(path, &folder_path)
1258                                } else {
1259                                    path.starts_with(&folder_path)
1260                                }
1261                            })
1262                            .map(|(path, file)| (path.to_owned(), file))
1263                            .collect::<Vec<(_,_)>>());
1264                    }
1265                }
1266                folder
1267            }).collect::<Vec<(_,_)>>());
1268        }
1269
1270        hashmap
1271    }
1272
1273    /// This function returns a reference to all files of the specified FileTypes from the cache, if any, along with their path.
1274    pub fn files_by_types(&self, file_types: &[FileType], include_vanilla: bool, include_parent: bool) -> HashMap<String, &RFile> {
1275        let mut files = HashMap::new();
1276
1277        // Vanilla first, so if parent files are found, they overwrite vanilla files.
1278        if include_vanilla {
1279            files.extend(self.vanilla_loose_files.par_iter().chain(self.vanilla_files.par_iter())
1280                .filter(|(_, file)| file_types.contains(&file.file_type()))
1281                .map(|(path, file)| (path.to_owned(), file))
1282                .collect::<HashMap<_,_>>());
1283        }
1284
1285        if include_parent {
1286            files.extend(self.parent_files.par_iter()
1287                .filter(|(_, file)| file_types.contains(&file.file_type()))
1288                .map(|(path, file)| (path.to_owned(), file))
1289                .collect::<HashMap<_,_>>());
1290        }
1291
1292        files
1293    }
1294
1295    /// This function returns a mutable reference to all files of the specified FileTypes from the cache, if any, along with their path.
1296    pub fn files_by_types_mut(&mut self, file_types: &[FileType], include_vanilla: bool, include_parent: bool) -> HashMap<String, &mut RFile> {
1297        let mut files = HashMap::new();
1298
1299        // Vanilla first, so if parent files are found, they overwrite vanilla files.
1300        if include_vanilla {
1301            files.extend(self.vanilla_loose_files.par_iter_mut().chain(self.vanilla_files.par_iter_mut())
1302                .filter(|(_, file)| file_types.contains(&file.file_type()))
1303                .map(|(path, file)| (path.to_owned(), file))
1304                .collect::<HashMap<_,_>>());
1305        }
1306
1307        if include_parent {
1308            files.extend(self.parent_files.par_iter_mut()
1309                .filter(|(_, file)| file_types.contains(&file.file_type()))
1310                .map(|(path, file)| (path.to_owned(), file))
1311                .collect::<HashMap<_,_>>());
1312        }
1313
1314        files
1315    }
1316
1317    /// This function returns the vanilla/parent locs from the cache, according to the params you pass it.
1318    ///
1319    /// It returns them in the order the game will load them.
1320    pub fn loc_data(&self, include_vanilla: bool, include_parent: bool) -> Result<Vec<&RFile>> {
1321        let mut cache = vec![];
1322
1323        if include_vanilla {
1324            let mut vanilla_loose_locs = self.vanilla_loose_locs.iter().collect::<Vec<_>>();
1325            vanilla_loose_locs.sort();
1326
1327            for path in &vanilla_loose_locs {
1328                if let Some(file) = self.vanilla_loose_files.get(*path) {
1329                    cache.push(file);
1330                }
1331            }
1332
1333            let mut vanilla_locs = self.vanilla_locs.iter().collect::<Vec<_>>();
1334            vanilla_locs.sort();
1335
1336            for path in &vanilla_locs {
1337                if let Some(file) = self.vanilla_files.get(*path) {
1338                    cache.push(file);
1339                }
1340            }
1341        }
1342
1343        if include_parent {
1344            let mut parent_locs = self.parent_locs.iter().collect::<Vec<_>>();
1345            parent_locs.sort();
1346
1347            for path in &parent_locs {
1348                if let Some(file) = self.parent_files.get(*path) {
1349                    cache.push(file);
1350                }
1351            }
1352        }
1353
1354        Ok(cache)
1355    }
1356
1357    /// This function returns the vanilla/parent db tables from the cache, according to the params you pass it.
1358    ///
1359    /// It returns them in the order the game will load them.
1360    ///
1361    /// NOTE: table_name is expected to be the table's folder name, with "_tables" at the end.
1362    pub fn db_data(&self, table_name: &str, include_vanilla: bool, include_parent: bool) -> Result<Vec<&RFile>> {
1363        let mut cache = vec![];
1364
1365        if include_vanilla {
1366            if let Some(vanilla_loose_tables) = self.vanilla_loose_tables.get(table_name) {
1367                let mut vanilla_loose_tables = vanilla_loose_tables.to_vec();
1368                vanilla_loose_tables.sort();
1369
1370                for path in &vanilla_loose_tables {
1371                    if let Some(file) = self.vanilla_loose_files.get(path) {
1372                        cache.push(file);
1373                    }
1374                }
1375            }
1376
1377            if let Some(vanilla_tables) = self.vanilla_tables.get(table_name) {
1378                let mut vanilla_tables = vanilla_tables.to_vec();
1379                vanilla_tables.sort();
1380
1381                for path in &vanilla_tables {
1382                    if let Some(file) = self.vanilla_files.get(path) {
1383                        cache.push(file);
1384                    }
1385                }
1386            }
1387        }
1388
1389        if include_parent {
1390            if let Some(parent_tables) = self.parent_tables.get(table_name) {
1391                let mut parent_tables = parent_tables.to_vec();
1392                parent_tables.sort();
1393
1394                for path in &parent_tables {
1395                    if let Some(file) = self.parent_files.get(path) {
1396                        cache.push(file);
1397                    }
1398                }
1399            }
1400        }
1401
1402        Ok(cache)
1403    }
1404
1405    /// This function returns the vanilla/parent db tables from the cache, according to the params you pass it,
1406    /// applying to them any datacore from the provided Pack.
1407    ///
1408    /// It returns them in the order the game will load them.
1409    ///
1410    /// NOTE: table_name is expected to be the table's folder name, with "_tables" at the end.
1411    pub fn db_data_datacored<'a>(&'a self, table_name: &str, packs: &'a BTreeMap<String, Pack>, include_vanilla: bool, include_parent: bool) -> Result<Vec<&'a RFile>> {
1412        let mut cache = vec![];
1413
1414        if include_vanilla {
1415            if let Some(vanilla_loose_tables) = self.vanilla_loose_tables.get(table_name) {
1416                let mut vanilla_loose_tables = vanilla_loose_tables.to_vec();
1417                vanilla_loose_tables.sort();
1418
1419                for path in &vanilla_loose_tables {
1420                    if let Some(file) = self.vanilla_loose_files.get(path) {
1421                        cache.push(file);
1422                    }
1423                }
1424            }
1425
1426            if let Some(vanilla_tables) = self.vanilla_tables.get(table_name) {
1427                let mut vanilla_tables = vanilla_tables.to_vec();
1428                vanilla_tables.sort();
1429
1430                for path in &vanilla_tables {
1431                    if let Some(file) = self.vanilla_files.get(path) {
1432                        cache.push(file);
1433                    }
1434                }
1435            }
1436        }
1437
1438        if include_parent {
1439            if let Some(parent_tables) = self.parent_tables.get(table_name) {
1440                let mut parent_tables = parent_tables.to_vec();
1441                parent_tables.sort();
1442
1443                for path in &parent_tables {
1444                    if let Some(file) = self.parent_files.get(path) {
1445                        cache.push(file);
1446                    }
1447                }
1448            }
1449        }
1450
1451        let paths = cache.iter()
1452            .map(|x| x.path_in_container())
1453            .collect::<Vec<_>>();
1454
1455        for pack in packs.values() {
1456            for pack_file in pack.files_by_paths(&paths, true) {
1457                for cache_file in &mut cache {
1458                    if cache_file.path_in_container() == pack_file.path_in_container() {
1459                        *cache_file = pack_file;
1460                        break;
1461                    }
1462                }
1463            }
1464        }
1465
1466        Ok(cache)
1467    }
1468
1469    /// This function returns the vanilla/parent DB and Loc tables from the cache, according to the params you pass it.
1470    ///
1471    /// It returns them in the order the game will load them.
1472    pub fn db_and_loc_data(&self, include_db: bool, include_loc: bool, include_vanilla: bool, include_parent: bool) -> Result<Vec<&RFile>> {
1473        let mut cache = vec![];
1474
1475        if include_vanilla {
1476            if include_db {
1477                let mut vanilla_loose_tables = self.vanilla_loose_tables.values().flatten().collect::<Vec<_>>();
1478                vanilla_loose_tables.sort();
1479
1480                for path in &vanilla_loose_tables {
1481                    if let Some(file) = self.vanilla_loose_files.get(*path) {
1482                        cache.push(file);
1483                    }
1484                }
1485
1486                let mut vanilla_tables = self.vanilla_tables.values().flatten().collect::<Vec<_>>();
1487                vanilla_tables.sort();
1488
1489                for path in &vanilla_tables {
1490                    if let Some(file) = self.vanilla_files.get(*path) {
1491                        cache.push(file);
1492                    }
1493                }
1494            }
1495
1496            if include_loc {
1497                let mut vanilla_loose_locs = self.vanilla_loose_locs.iter().collect::<Vec<_>>();
1498                vanilla_loose_locs.sort();
1499
1500                for path in &vanilla_loose_locs {
1501                    if let Some(file) = self.vanilla_loose_files.get(*path) {
1502                        cache.push(file);
1503                    }
1504                }
1505
1506                let mut vanilla_locs = self.vanilla_locs.iter().collect::<Vec<_>>();
1507                vanilla_locs.sort();
1508
1509                for path in &vanilla_locs {
1510                    if let Some(file) = self.vanilla_files.get(*path) {
1511                        cache.push(file);
1512                    }
1513                }
1514            }
1515        }
1516
1517        if include_parent {
1518            if include_db {
1519                let mut parent_tables = self.parent_tables.values().flatten().collect::<Vec<_>>();
1520                parent_tables.sort();
1521
1522                for path in &parent_tables {
1523                    if let Some(file) = self.parent_files.get(*path) {
1524                        cache.push(file);
1525                    }
1526                }
1527            }
1528
1529            if include_loc {
1530                let mut parent_locs = self.parent_locs.iter().collect::<Vec<_>>();
1531                parent_locs.sort();
1532
1533                for path in &parent_locs {
1534                    if let Some(file) = self.parent_files.get(*path) {
1535                        cache.push(file);
1536                    }
1537                }
1538            }
1539        }
1540
1541        Ok(cache)
1542    }
1543
1544    //-----------------------------------//
1545    // Advanced Getters.
1546    //-----------------------------------//
1547
1548    /// This function returns the reference/lookup data of all relevant columns of a DB Table.
1549    ///
1550    /// NOTE: This assumes you've populated the runtime references before this. If not, it'll fail.
1551    pub fn db_reference_data(&self, schema: &Schema, packs: &BTreeMap<String, Pack>, table_name: &str, definition: &Definition, loc_data: &Option<HashMap<Cow<str>, Cow<str>>>) -> HashMap<i32, TableReferences> {
1552
1553        // First check if the data is already cached, to speed up things.
1554        //
1555        // NOTE: The None branch should only trigger in cases were there's a bug. We just let it pass without reference instead of crashing.
1556        let mut vanilla_references = match self.local_tables_references.get(table_name) {
1557            Some(cached_data) => cached_data.clone(),
1558            None => HashMap::new(),
1559        };
1560
1561        // If we receive premade loc data (because this may trigger on many files at the same time), don't calculate it here.
1562        let (_loc_files, loc_decoded) = if loc_data.is_some() {
1563            (vec![], vec![])
1564        } else {
1565            let loc_files: Vec<_> = packs.values().flat_map(|pack| pack.files_by_type(&[FileType::Loc])).collect();
1566            let loc_decoded = loc_files.iter()
1567                .filter_map(|file| if let Ok(RFileDecoded::Loc(loc)) = file.decoded() { Some(loc) } else { None })
1568                .map(|file| file.data())
1569                .collect::<Vec<_>>();
1570            (loc_files, loc_decoded)
1571        };
1572
1573        let mut _loc_data_dummy = HashMap::new();
1574        let loc_data = if let Some(ref loc_data) = loc_data {
1575            loc_data
1576        } else {
1577            _loc_data_dummy = loc_decoded.par_iter()
1578                .flat_map(|data| data.par_iter()
1579                    .map(|entry| (entry[0].data_to_string(), entry[1].data_to_string()))
1580                    .collect::<Vec<(_,_)>>()
1581                ).collect::<HashMap<_,_>>();
1582            &_loc_data_dummy
1583        };
1584
1585        // Trick: before doing this, we modify the definition to include any lookup from any reference,
1586        // so we are actually able to catch recursive-like lookups without reading multiple tables.
1587        let mut definition = definition.clone();
1588        self.add_recursive_lookups_to_definition(schema, &mut definition, table_name);
1589
1590        let patches = Some(definition.patches());
1591        let fields_processed = definition.fields_processed();
1592        let local_references = fields_processed.par_iter().enumerate().filter_map(|(column, field)| {
1593            match field.is_reference(patches) {
1594                Some((ref ref_table, ref ref_column)) => {
1595                    if !ref_table.is_empty() && !ref_column.is_empty() {
1596
1597                        // Get his lookup data if it has it.
1598                        let lookup_data = if let Some(ref data) = field.lookup_no_patch() { data.to_vec() } else { Vec::with_capacity(0) };
1599                        let mut references = TableReferences::default();
1600                        *references.field_name_mut() = field.name().to_owned();
1601
1602                        let _local_found = self.db_reference_data_from_local_pack(&mut references, (ref_table, ref_column, &lookup_data), packs, loc_data);
1603
1604                        Some((column as i32, references))
1605                    } else { None }
1606                }
1607
1608                // In the fallback case (no references) we still need to check for lookup data within our table and the locs.
1609                None => {
1610                    if let Some(ref lookup_data) = field.lookup_no_patch() {
1611
1612                        // Only single-keyed tables can have lookups.
1613                        if field.is_key(patches) && fields_processed.iter().filter(|x| x.is_key(patches)).count() == 1 {
1614
1615                            // The fallback here is to avoid crashes on packs that have renamed folders.
1616                            let ref_table = if table_name.ends_with("_tables") && table_name.len() > 7 {
1617                                table_name.to_owned().drain(..table_name.len() - 7).collect()
1618                            } else {
1619                                table_name.to_owned()
1620                            };
1621
1622                            let ref_column = field.name();
1623
1624                            // Get his lookup data if it has it.
1625                            let mut references = TableReferences::default();
1626                            *references.field_name_mut() = field.name().to_owned();
1627
1628                            let _local_found = self.db_reference_data_from_local_pack(&mut references, (&ref_table, ref_column, lookup_data), packs, loc_data);
1629
1630                            Some((column as i32, references))
1631                        } else { None }
1632                    } else { None }
1633                }
1634            }
1635        }).collect::<HashMap<_, _>>();
1636
1637        vanilla_references.par_iter_mut().for_each(|(key, value)|
1638            if let Some(local_value) = local_references.get(key) {
1639                value.data.extend(local_value.data.iter().map(|(k, v)| (k.clone(), v.clone())));
1640            }
1641        );
1642
1643        for (index, field) in fields_processed.iter().enumerate() {
1644            match vanilla_references.get_mut(&(index as i32)) {
1645                Some(references) => {
1646                    let hardcoded_lookup = field.lookup_hardcoded(patches);
1647                    if !hardcoded_lookup.is_empty() {
1648                        references.data.extend(hardcoded_lookup);
1649                    }
1650                },
1651                None => {
1652                    let mut references = TableReferences::default();
1653                    *references.field_name_mut() = field.name().to_owned();
1654                    let hardcoded_lookup = field.lookup_hardcoded(patches);
1655                    if !hardcoded_lookup.is_empty() {
1656                        references.data.extend(hardcoded_lookup);
1657                        vanilla_references.insert(index as i32, references);
1658                    }
1659                },
1660            }
1661        }
1662
1663        vanilla_references
1664    }
1665
1666    /// This function returns the reference/lookup data of all relevant columns of a DB Table from the vanilla/parent data.
1667    ///
1668    /// If reference data was found, the most recent definition of said data is returned.
1669    fn db_reference_data_from_vanilla_and_modded_tables(&self, references: &mut TableReferences, reference_info: (&str, &str, &[String])) -> Option<Definition> {
1670        self.db_reference_data_generic(references, reference_info, None, &HashMap::new())
1671    }
1672
1673    /// This function returns the reference/lookup data of all relevant columns of a DB Table from the assembly kit data.
1674    ///
1675    /// It returns true if data is found, otherwise it returns false.
1676    fn db_reference_data_from_asskit_tables(&self, references: &mut TableReferences, reference_info: (&str, &str, &[String])) -> bool {
1677        let ref_table = reference_info.0;
1678        let ref_column = reference_info.1;
1679        let ref_lookup_columns = reference_info.2;
1680
1681        match self.asskit_only_db_tables.get(ref_table) {
1682            Some(table) => {
1683                let fields_processed = table.definition().fields_processed();
1684                let ref_column_index = fields_processed.iter().position(|x| x.name() == ref_column);
1685                let ref_lookup_columns_index = ref_lookup_columns.iter().map(|column| fields_processed.iter().position(|x| x.name() == column)).collect::<Vec<_>>();
1686
1687                for row in &*table.data() {
1688                    let mut reference_data = String::new();
1689                    let mut lookup_data = vec![];
1690
1691                    // First, we get the reference data.
1692                    if let Some(index) = ref_column_index {
1693                        reference_data = row[index].data_to_string().to_string();
1694                    }
1695
1696                    // Then, we get the lookup data.
1697                    for column in ref_lookup_columns_index.iter().flatten() {
1698                        lookup_data.push(row[*column].data_to_string());
1699                    }
1700
1701                    references.data.insert(reference_data, lookup_data.join(" "));
1702                }
1703                true
1704            },
1705            None => false,
1706        }
1707    }
1708
1709    /// This function returns the reference/lookup data of all relevant columns of a DB Table from the provided Pack.
1710    fn db_reference_data_from_local_pack(&self, references: &mut TableReferences, reference_info: (&str, &str, &[String]), packs: &BTreeMap<String, Pack>, loc_data: &HashMap<Cow<str>, Cow<str>>) -> Option<Definition> {
1711        self.db_reference_data_generic(references, reference_info, Some(packs), loc_data)
1712    }
1713
1714    fn db_reference_data_generic(&self, references: &mut TableReferences, reference_info: (&str, &str, &[String]), packs: Option<&BTreeMap<String, Pack>>, loc_data: &HashMap<Cow<str>, Cow<str>>) -> Option<Definition> {
1715        let mut data_found: Option<Definition> = None;
1716
1717        let ref_table = reference_info.0;
1718        let ref_column = reference_info.1;
1719        let ref_lookup_columns = reference_info.2;
1720
1721        let mut cache = HashMap::new();
1722
1723        // Input is not guaranteed to be in one or another format, so sanitize them here.
1724        let ref_table_full = if ref_table.ends_with("_tables") {
1725            ref_table.to_owned()
1726        } else {
1727            ref_table.to_owned() + "_tables"
1728        };
1729
1730        let files = match packs {
1731            Some(packs) => {
1732                let mut files: Vec<&RFile> = packs.values().flat_map(|pack| pack.files_by_path(&ContainerPath::Folder(format!("db/{ref_table_full}")), true)).collect();
1733                files.append(&mut self.db_data(&ref_table_full, true, true).unwrap_or_else(|_| vec![]));
1734                files
1735            },
1736            None => self.db_data(&ref_table_full, true, true).unwrap_or_else(|_| vec![]),
1737        };
1738
1739        let mut table_data_cache: HashMap<String, HashMap<String, String>> = HashMap::new();
1740
1741        files.iter().for_each(|file| {
1742            if let Ok(RFileDecoded::DB(db)) = file.decoded() {
1743                let definition = db.definition();
1744                let fields_processed = definition.fields_processed();
1745
1746                // Only continue if the column we're referencing actually exists.
1747                if let Some(ref_column_index) = fields_processed.iter().position(|x| x.name() == ref_column) {
1748
1749                    // Here we analyze the lookups to build their table cache.
1750                    let lookups_analyzed = ref_lookup_columns.iter().map(|ref_lookup_path| {
1751                        let ref_lookup_steps = ref_lookup_path.split(':').map(|x| x.split('#').collect::<Vec<_>>()).collect::<Vec<_>>();
1752                        let mut is_loc = false;
1753                        let mut col_pos = 0;
1754
1755                        for (index, ref_lookup_step) in ref_lookup_steps.iter().enumerate() {
1756                            if ref_lookup_step.len() == 3 {
1757                                let lookup_ref_table = ref_lookup_step[0];
1758                                let lookup_ref_key = ref_lookup_step[1];
1759                                let lookup_ref_lookup = ref_lookup_step[2];
1760                                let lookup_ref_table_long = lookup_ref_table.to_owned() + "_tables";
1761
1762                                // Build the cache for the tables we need to check.
1763                                if !cache.contains_key(lookup_ref_table) {
1764                                    let mut files = vec![];
1765
1766                                    if let Some(packs) = packs {
1767                                        for pack in packs.values() {
1768                                            files.append(&mut pack.files_by_path(&ContainerPath::Folder(format!("db/{lookup_ref_table_long}")), true));
1769                                        }
1770                                    }
1771
1772                                    // Only add to the cache the files not already there due to being in the pack.
1773                                    for file in self.db_data(&lookup_ref_table_long, true, true).unwrap_or_else(|_| vec![]) {
1774                                        if files.iter().all(|x| x.path_in_container_raw() != file.path_in_container_raw()) {
1775                                            files.push(file);
1776                                        }
1777                                    }
1778
1779                                    if !files.is_empty() {
1780
1781                                        // Make sure they're in order so if the lookup is in a mod, we have to do less iterations to find it.
1782                                        files.sort_by(|a, b| a.path_in_container_raw().cmp(b.path_in_container_raw()));
1783                                        cache.insert(lookup_ref_table.to_owned(), files);
1784                                    }
1785                                }
1786
1787                                // If it's the last step, check if it's a loc, or a table column.
1788                                if index == ref_lookup_steps.len() - 1 {
1789                                    if let Some(file) = cache.get(lookup_ref_table) {
1790                                        if let Some(file) = file.first() {
1791                                            if let Ok(RFileDecoded::DB(db)) = file.decoded() {
1792                                                let definition = db.definition();
1793                                                let fields_processed = definition.fields_processed();
1794                                                let localised_fields = definition.localised_fields();
1795
1796                                                match localised_fields.iter().position(|x| x.name() == lookup_ref_lookup) {
1797                                                    Some(loc_pos) => {
1798                                                        is_loc = true;
1799                                                        col_pos = loc_pos;
1800                                                    },
1801                                                    None => match fields_processed.iter().position(|x| x.name() == lookup_ref_lookup) {
1802                                                        Some(pos) => {
1803                                                            is_loc = false;
1804                                                            col_pos = pos;
1805                                                        },
1806                                                        None => {
1807                                                            //error!("Missing column for lookup. This is a bug.");
1808                                                        },
1809                                                    }
1810                                                }
1811                                            }
1812                                        }
1813                                    }
1814                                }
1815
1816                                // Build the hashed cache for lookups, so we don't need to iterate again and again for each row.
1817                                if let Some(files) = cache.get(lookup_ref_table) {
1818                                    for file in files {
1819                                        let table_data_column_cache_key = file.path_in_container_raw().to_owned() + &ref_lookup_step.join("++");
1820                                        if !table_data_cache.contains_key(&table_data_column_cache_key) {
1821                                            if let Ok(RFileDecoded::DB(db)) = file.decoded() {
1822                                                let definition = db.definition();
1823                                                let fields_processed = definition.fields_processed();
1824                                                let localised_fields = definition.localised_fields();
1825                                                let localised_order = definition.localised_key_order();
1826
1827                                                let loc_key = if is_loc {
1828                                                    if let Some(loc_field) = localised_fields.get(col_pos) {
1829                                                        let mut loc_key = String::with_capacity(2 + lookup_ref_table.len() + loc_field.name().len());
1830                                                        loc_key.push_str(lookup_ref_table);
1831                                                        loc_key.push('_');
1832                                                        loc_key.push_str(loc_field.name());
1833                                                        loc_key.push('_');
1834                                                        loc_key
1835                                                    } else {
1836                                                        String::new()
1837                                                    }
1838                                                } else {
1839                                                    String::new()
1840                                                };
1841
1842                                                if let Some(source_key_column) = fields_processed.iter().position(|x| x.name() == lookup_ref_key) {
1843
1844                                                    // Intermediate step cache.
1845                                                    if index < ref_lookup_steps.len() - 1 {
1846                                                        if let Some(source_lookup_column) = fields_processed.iter().position(|x| x.name() == lookup_ref_lookup) {
1847                                                            let cache = db.data().iter()
1848                                                                .map(|row| (row[source_key_column].data_to_string().to_string(), row[source_lookup_column].data_to_string().to_string()))
1849                                                                .collect::<HashMap<_,_>>();
1850
1851                                                            table_data_cache.insert(table_data_column_cache_key.clone(), cache);
1852                                                        }
1853                                                    }
1854
1855                                                    // Locs are already pre-cached. We only need the final part of their key.
1856                                                    else if is_loc {
1857                                                        let cache = db.data().iter()
1858                                                            .map(|row| {
1859                                                                let mut loc_key = loc_key.to_owned();
1860                                                                loc_key.push_str(&localised_order.iter().map(|pos| row[*pos as usize].data_to_string()).join(""));
1861                                                                (row[source_key_column].data_to_string().to_string(), loc_key)
1862                                                            })
1863                                                            .collect::<HashMap<_,_>>();
1864                                                        table_data_cache.insert(table_data_column_cache_key.clone(), cache);
1865                                                    }
1866
1867                                                    else {
1868                                                        let cache = db.data().iter()
1869                                                            .map(|row| (row[source_key_column].data_to_string().to_string(), row[col_pos].data_to_string().to_string()))
1870                                                            .collect::<HashMap<_,_>>();
1871
1872                                                        table_data_cache.insert(table_data_column_cache_key.clone(), cache);
1873                                                    }
1874                                                }
1875                                            }
1876                                        }
1877                                    }
1878                                }
1879                            } else {
1880                                error!("Badly built lookup. This is a bug.");
1881                            }
1882                        }
1883
1884                        (ref_lookup_steps, is_loc)
1885
1886                    }).collect::<Vec<_>>();
1887
1888                    let data = db.data();
1889                    for row in &*data {
1890                        let mut lookup_data = Vec::with_capacity(lookups_analyzed.len());
1891
1892                        // First, we get the reference data.
1893                        let reference_data = row[ref_column_index].data_to_string();
1894
1895                        // Then, we get the lookup data. Only calculate it for non-empty keys.
1896                        for (lookup_steps, is_loc) in lookups_analyzed.iter() {
1897                            if !reference_data.is_empty() {
1898
1899                                if let Some(lookup) = self.db_reference_data_generic_lookup(&cache, loc_data, &reference_data, lookup_steps, *is_loc, &table_data_cache) {
1900                                    lookup_data.push(lookup);
1901                                }
1902                            }
1903                        }
1904
1905                        references.data.insert(reference_data.to_string(), lookup_data.into_iter().join(":"));
1906                    }
1907
1908                    // Once done with the table, check if we should return its definition.
1909                    match data_found {
1910                        Some(ref definition) => {
1911                            if db.definition().version() > definition.version() {
1912                                data_found = Some(db.definition().clone());
1913                            }
1914                        }
1915
1916                        None => data_found = Some(db.definition().clone()),
1917                    }
1918                }
1919            }
1920        });
1921
1922        data_found
1923    }
1924
1925    fn db_reference_data_generic_lookup(
1926        &self,
1927        cache: &HashMap<String, Vec<&RFile>>,
1928        loc_data: &HashMap<Cow<str>, Cow<str>>,
1929        lookup_key: &str,
1930        lookup_steps: &[Vec<&str>],
1931        is_loc: bool,
1932        table_data_cache: &HashMap<String, HashMap<String, String>>
1933    ) -> Option<String> {
1934        let mut data_found: Option<String> = None;
1935
1936        if lookup_steps.is_empty() {
1937            return None;
1938        }
1939
1940        let current_step = &lookup_steps[0];
1941        let source_table = current_step[0];
1942
1943        if let Some(files) = cache.get(source_table) {
1944            for file in files {
1945                let table_data_column_cache_key = file.path_in_container_raw().to_owned() + &current_step.join("++");
1946                if let Some(table_data_column_cache) = table_data_cache.get(&table_data_column_cache_key) {
1947
1948                    if let Some(lookup_value) = table_data_column_cache.get(lookup_key) {
1949
1950                        // If we're not yet in the last step, reduce the steps and repeat.
1951                        if lookup_steps.len() > 1 {
1952                            if !lookup_value.is_empty() {
1953                                data_found = self.db_reference_data_generic_lookup(cache, loc_data, lookup_value, &lookup_steps[1..], is_loc, table_data_cache);
1954                            }
1955                        }
1956
1957                        // If we're on the last step, properly get the lookup data. Locs first.
1958                        else if is_loc {
1959
1960                            if let Some(data) = loc_data.get(&**lookup_value) {
1961                                data_found = Some(data.to_string());
1962                            } else if let Some(data) = self.localisation_data.get(&**lookup_value) {
1963                                data_found = Some(data.to_string());
1964                            } else {
1965                                data_found = Some(lookup_value.to_string())
1966                            }
1967                        }
1968
1969                        // Then table columns.
1970                        else {
1971                            data_found = Some(lookup_value.to_owned());
1972                        }
1973
1974                        // If we find a match, don't bother with the rest of the files.
1975                        break;
1976                    }
1977                }
1978            }
1979        }
1980
1981        data_found
1982    }
1983
1984    /// This function returns the table/column/key from the provided loc key.
1985    ///
1986    /// We return the table without "_tables". Keep that in mind if you use this.
1987    pub fn loc_key_source(&self, key: &str) -> Option<(String, String, Vec<String>)> {
1988        let key_split = key.split('_').collect::<Vec<_>>();
1989
1990        // We don't know how much of the string the key the table is, so we try removing parts until we find a table that matches.
1991        // in reverse so longer table names have priority in case of collision.
1992        for (index, _) in key_split.iter().enumerate().rev() {
1993
1994            // Index 0 would mean empty table name.
1995            if index >= 1 {
1996
1997                let mut table_name = key_split[..index].join("_");
1998                let full_table_name = format!("{table_name}_tables");
1999
2000                if let Ok(rfiles) = self.db_data(&full_table_name, true, false) {
2001                    let mut decoded = rfiles.iter()
2002                        .filter_map(|x| if let Ok(RFileDecoded::DB(table)) = x.decoded() {
2003                            Some(table)
2004                        } else {
2005                            None
2006                        }).collect::<Vec<_>>();
2007
2008                    // Also add the ak files if present.
2009                    if let Some(ak_file) = self.asskit_only_db_tables().get(&full_table_name) {
2010                        decoded.push(ak_file);
2011                    }
2012
2013                    for table in decoded {
2014                        let definition = table.definition();
2015                        let localised_fields = definition.localised_fields();
2016                        let localised_key_order = definition.localised_key_order();
2017                        if !localised_fields.is_empty() {
2018                            let mut field = String::new();
2019
2020                            // Loop to get the column.
2021                            for (second_index, value) in key_split[index..].iter().enumerate() {
2022                                field.push_str(value);
2023
2024                                if localised_fields.iter().any(|x| x.name() == field) {
2025
2026                                    // If we reached this, the rest is the value.
2027                                    let key_data = &key_split[index + second_index + 1..].join("_");
2028
2029                                    // Once we get the key, we need to use the stored loc order to find out to what specific line it belongs.
2030                                    // And yes, this means checking every single fucking line in every single table.
2031                                    let data = table.data();
2032                                    for row in data.iter() {
2033                                        let generated_key_split = localised_key_order.iter().map(|col| row[*col as usize].data_to_string()).collect::<Vec<_>>();
2034                                        let generated_key = generated_key_split.join("");
2035                                        if &generated_key == key_data {
2036                                            return Some((table_name, field, generated_key_split.iter().map(|x| x.to_string()).collect()));
2037                                        }
2038                                    }
2039                                }
2040
2041                                field.push('_');
2042                            }
2043                        }
2044                    }
2045                }
2046
2047                // Add an underscore before adding the next part of the table name in the next loop.
2048                table_name.push('_');
2049            }
2050        }
2051
2052        None
2053    }
2054
2055    //-----------------------------------//
2056    // Utility functions.
2057    //-----------------------------------//
2058
2059    /// This function returns if a specific file exists in the dependencies cache.
2060    pub fn file_exists(&self, file_path: &str, include_vanilla: bool, include_parent: bool, case_insensitive: bool) -> bool {
2061        if include_parent {
2062            if self.parent_files.contains_key(file_path) {
2063                return true
2064            } else if case_insensitive {
2065                let lower = file_path.to_lowercase();
2066                if self.parent_paths.contains_key(&lower) {
2067                    return true
2068                }
2069            }
2070        }
2071
2072        if include_vanilla {
2073
2074            if self.vanilla_files.contains_key(file_path) || self.vanilla_loose_files.contains_key(file_path) {
2075                return true
2076            } else if case_insensitive {
2077                let lower = file_path.to_lowercase();
2078                if self.vanilla_paths.contains_key(&lower) || self.vanilla_loose_paths.contains_key(&lower) {
2079                    return true
2080                }
2081            }
2082        }
2083
2084        false
2085    }
2086
2087    /// This function returns if a specific folder exists in the dependencies cache.
2088    pub fn folder_exists(&self, folder_path: &str, include_vanilla: bool, include_parent: bool, case_insensitive: bool) -> bool {
2089        if include_parent && (
2090            self.parent_folders.contains(folder_path) ||
2091            (case_insensitive && self.parent_folders.par_iter().any(|path| caseless::canonical_caseless_match_str(path, folder_path)))
2092        ) {
2093            return true
2094        }
2095
2096        if include_vanilla && (
2097            (self.vanilla_folders.contains(folder_path) || self.vanilla_loose_folders.contains(folder_path)) ||
2098            (case_insensitive && self.vanilla_folders.par_iter().chain(self.vanilla_loose_folders.par_iter()).any(|path| caseless::canonical_caseless_match_str(path, folder_path)))
2099        ) {
2100            return true
2101        }
2102
2103        false
2104    }
2105
2106    /// This function checks if the dependencies cache file exists on disk.
2107    pub fn are_dependencies_generated(file_path: &Path) -> bool {
2108        file_path.is_file()
2109    }
2110
2111    /// This function checks if there is vanilla data loaded in the provided cache.
2112    pub fn is_vanilla_data_loaded(&self, include_asskit: bool) -> bool {
2113        if include_asskit {
2114            !self.vanilla_files.is_empty() && self.is_asskit_data_loaded()
2115        } else {
2116            !self.vanilla_files.is_empty()
2117        }
2118    }
2119
2120    /// This function checks if there is assembly kit data loaded in the provided cache.
2121    pub fn is_asskit_data_loaded(&self) -> bool {
2122        !self.asskit_only_db_tables.is_empty()
2123    }
2124
2125    /// This function is used to check if a table is outdated or not.
2126    pub fn is_db_outdated(&self, rfile: &RFileDecoded) -> bool {
2127        if let RFileDecoded::DB(data) = rfile {
2128            let dep_db_undecoded = if let Ok(undecoded) = self.db_data(data.table_name(), true, false) { undecoded } else { return false };
2129            let dep_db_decoded = dep_db_undecoded.iter().filter_map(|x| if let Ok(RFileDecoded::DB(decoded)) = x.decoded() { Some(decoded) } else { None }).collect::<Vec<_>>();
2130
2131            if let Some(vanilla_db) = dep_db_decoded.iter().max_by(|x, y| x.definition().version().cmp(y.definition().version())) {
2132                if vanilla_db.definition().version() > data.definition().version() {
2133                    return true;
2134                }
2135            }
2136        }
2137
2138        false
2139    }
2140
2141    /// This function is used to get the version of a table in the game files, if said table is in the game files.
2142    pub fn db_version(&self, table_name: &str) -> Option<i32> {
2143        let tables = self.vanilla_tables.get(table_name)?;
2144        for table_path in tables {
2145
2146            let table = self.vanilla_files.get(table_path)?;
2147            if let RFileDecoded::DB(table) = table.decoded().ok()? {
2148                return Some(*table.definition().version());
2149            }
2150
2151            let table = self.vanilla_loose_files.get(table_path)?;
2152            if let RFileDecoded::DB(table) = table.decoded().ok()? {
2153                return Some(*table.definition().version());
2154            }
2155        }
2156
2157        None
2158    }
2159
2160    /// This function returns the list of values a column of a table has, across all instances of said table in the dependencies and the provided Packs.
2161    pub fn db_values_from_table_name_and_column_name(&self, packs: Option<&BTreeMap<String, Pack>>, table_name: &str, column_name: &str, include_vanilla: bool, include_parent: bool) -> HashSet<String> {
2162        let mut values = HashSet::new();
2163
2164        if let Ok(files) = self.db_data(table_name, include_vanilla, include_parent) {
2165            values.extend(files.par_iter().filter_map(|file| {
2166                if let Ok(RFileDecoded::DB(table)) = file.decoded() {
2167                    table.definition().column_position_by_name(column_name).map(|column| table.data().par_iter().map(|row| row[column].data_to_string().to_string()).collect::<Vec<_>>())
2168                } else { None }
2169            }).flatten().collect::<Vec<_>>());
2170        }
2171
2172        if let Some(packs) = packs {
2173            for pack in packs.values() {
2174                let files = pack.files_by_path(&ContainerPath::Folder(format!("db/{table_name}")), true);
2175                values.extend(files.par_iter().filter_map(|file| {
2176                    if let Ok(RFileDecoded::DB(table)) = file.decoded() {
2177                        table.definition().column_position_by_name(column_name).map(|column| table.data().par_iter().map(|row| row[column].data_to_string().to_string()).collect::<Vec<_>>())
2178                    } else { None }
2179                }).flatten().collect::<Vec<_>>());
2180            }
2181        }
2182
2183        values
2184    }
2185
2186    /// This function returns the value a table has in the row it has a specific value in a specific column.
2187    pub fn db_values_from_table_name_and_column_name_for_value(&self, packs: Option<&BTreeMap<String, Pack>>, table_name: &str, key_column_name: &str, desired_column_name: &str, include_vanilla: bool, include_parent: bool) -> HashMap<String, String> {
2188        let mut values = HashMap::new();
2189
2190        if let Ok(files) = self.db_data(table_name, include_vanilla, include_parent) {
2191            values.extend(files.par_iter().filter_map(|file| {
2192                if let Ok(RFileDecoded::DB(table)) = file.decoded() {
2193                    if let Some(column) = table.definition().column_position_by_name(key_column_name) {
2194                        table.definition().column_position_by_name(desired_column_name).map(|desired_column| table.data().par_iter().map(|row| (row[column].data_to_string().to_string(), row[desired_column].data_to_string().to_string())).collect::<Vec<_>>())
2195                    } else { None }
2196                } else { None }
2197            }).flatten().collect::<Vec<_>>());
2198        }
2199
2200        if let Some(packs) = packs {
2201            for pack in packs.values() {
2202                let files = pack.files_by_path(&ContainerPath::Folder(format!("db/{table_name}")), true);
2203                values.extend(files.par_iter().filter_map(|file| {
2204                    if let Ok(RFileDecoded::DB(table)) = file.decoded() {
2205                        if let Some(column) = table.definition().column_position_by_name(key_column_name) {
2206                            table.definition().column_position_by_name(desired_column_name).map(|desired_column| table.data().par_iter().map(|row| (row[column].data_to_string().to_string(), row[desired_column].data_to_string().to_string())).collect::<Vec<_>>())
2207                        } else { None }
2208                    } else { None }
2209                }).flatten().collect::<Vec<_>>());
2210            }
2211        }
2212
2213        values
2214    }
2215
2216    /// This function updates a DB Table to its latest valid version, being the latest valid version the one in the vanilla files.
2217    ///
2218    /// It returns both, old and new versions, or an error.
2219    pub fn update_db(&mut self, rfile: &mut RFileDecoded) -> Result<(i32, i32, Vec<String>, Vec<String>)> {
2220        match rfile {
2221            RFileDecoded::DB(data) => {
2222                let dep_db_undecoded = self.db_data(data.table_name(), true, false)?;
2223                let dep_db_decoded = dep_db_undecoded.iter().filter_map(|x| if let Ok(RFileDecoded::DB(decoded)) = x.decoded() { Some(decoded) } else { None }).collect::<Vec<_>>();
2224
2225                if let Some(vanilla_db) = dep_db_decoded.iter().max_by(|x, y| x.definition().version().cmp(y.definition().version())) {
2226
2227                    let definition_new = vanilla_db.definition();
2228                    let definition_old = data.definition().clone();
2229                    if definition_old != *definition_new {
2230                        data.set_definition(definition_new);
2231
2232                        // Get the info about the definition differences.
2233                        let fields_old = definition_old.fields_processed();
2234                        let fields_new = definition_new.fields_processed();
2235                        let fields_deleted = fields_old.iter()
2236                            .filter(|x| fields_new.iter().all(|y| y.name() != x.name()))
2237                            .map(|x| x.name().to_owned())
2238                            .collect::<Vec<_>>();
2239                        let fields_added = fields_new.iter()
2240                            .filter(|x| fields_old.iter().all(|y| y.name() != x.name()))
2241                            .map(|x| x.name().to_owned())
2242                            .collect::<Vec<_>>();
2243
2244                        Ok((*definition_old.version(), *definition_new.version(), fields_deleted, fields_added))
2245                    }
2246                    else {
2247                        Err(RLibError::NoDefinitionUpdateAvailable)
2248                    }
2249                }
2250                else { Err(RLibError::NoTableInGameFilesToCompare) }
2251            }
2252            _ => Err(RLibError::DecodingDBNotADBTable),
2253        }
2254    }
2255
2256    /// Function to generate the missing loc entries in a pack.
2257    pub fn generate_missing_loc_data(&self, packs: &mut BTreeMap<String, Pack>) -> Result<Vec<ContainerPath>> {
2258        let loc_data = self.loc_data(true, true)?;
2259        let mut existing_locs = HashMap::new();
2260
2261        for loc in &loc_data {
2262            if let Ok(RFileDecoded::Loc(ref data)) = loc.decoded() {
2263                existing_locs.extend(data.table().data().iter().map(|x| (x[0].data_to_string().to_string(), x[1].data_to_string().to_string())));
2264            }
2265        }
2266
2267        let mut all_paths = vec![];
2268        for pack in packs.values_mut() {
2269            all_paths.extend(pack.generate_missing_loc_data(&existing_locs)?);
2270        }
2271        Ok(all_paths)
2272    }
2273
2274    /// This function bruteforces the order in which multikeyed tables get their keys together for loc entries.
2275    pub fn bruteforce_loc_key_order(&self, schema: &mut Schema, locs: Option<HashMap<String, Vec<String>>>, local_packs: Option<&BTreeMap<String, Pack>>, mut ak_files: Option<&mut HashMap<String, DB>>) -> Result<()> {
2276        let mut fields_still_not_found = vec![];
2277
2278        // Get all vanilla loc keys into a big hashmap so we can check them fast.
2279        let loc_files = self.loc_data(true, false)?;
2280        let loc_table = loc_files.iter()
2281            .filter_map(|file| if let Ok(RFileDecoded::Loc(loc)) = file.decoded() { Some(loc) } else { None })
2282            .flat_map(|file| file.data().to_vec())
2283            .map(|entry| (entry[0].data_to_string().to_string(), entry[1].data_to_string().to_string()))
2284            .collect::<HashMap<_,_>>();
2285
2286        let ak_tables = match ak_files {
2287            Some(ref tables) => (**tables).clone(),
2288            None => HashMap::new(),
2289        };
2290
2291        // This is to fix bruteforcing not working on tables like campaigns.
2292        let local_files: Vec<_> = match local_packs {
2293            Some(packs) => packs.values()
2294                .flat_map(|pack| pack.files_by_type(&[FileType::DB]))
2295                .filter_map(|x| match x.decoded() {
2296                    Ok(RFileDecoded::DB(db)) => Some(db),
2297                    _ => None,
2298                })
2299                .collect(),
2300            None => Vec::new(),
2301        };
2302
2303        // Get all the tables so we don't need to re-fetch each table individually.
2304        let mut db_tables = if ak_files.is_some() {
2305            ak_tables.values().collect::<Vec<_>>()
2306        } else {
2307            self.db_and_loc_data(true, false, true, false)?
2308                .iter()
2309                .filter_map(|file| if let Ok(RFileDecoded::DB(table)) = file.decoded() { Some(table) } else { None })
2310                .collect::<Vec<_>>()
2311        };
2312
2313        db_tables.extend_from_slice(&local_files);
2314
2315        // Merge tables of the same name and version, so we got more chances of loc data being found.
2316        let mut db_tables_dedup: Vec<DB> = vec![];
2317        for table in &db_tables {
2318            match db_tables_dedup.iter_mut().find(|x| x.table_name() == table.table_name() && x.definition().version() == table.definition().version()) {
2319                Some(db_source) => *db_source = DB::merge(&[db_source, table])?,
2320                None => db_tables_dedup.push((*table).clone()),
2321            }
2322        }
2323
2324        for table in &db_tables_dedup {
2325            let definition = table.definition();
2326            let mut loc_fields = definition.localised_fields().to_vec();
2327
2328            // We assume the fields that came with the table are correct, as they probably come from the normal procedure to get these.
2329            //let mut loc_fields_final: Vec<Field> = vec![];
2330            let mut loc_fields_final = loc_fields.to_vec();
2331
2332            // If we received possible loc info, add the one we received.
2333            if let Some(ref loc_fields_info) = locs {
2334                loc_fields.clear();
2335
2336                if let Some(loc_names) = loc_fields_info.get(&table.table_name_without_tables()) {
2337                    for name in loc_names {
2338                        if loc_fields.iter().all(|x| x.name() != name) {
2339
2340                            let mut field = Field::default();
2341                            field.set_name(name.to_string());
2342                            field.set_field_type(FieldType::StringU8);
2343
2344                            loc_fields.push(field);
2345                        }
2346                    }
2347                }
2348            }
2349
2350            let fields = definition.fields_processed();
2351            let key_fields = fields.iter()
2352                .enumerate()
2353                .filter(|(_, field)| field.is_key(None))
2354                .collect::<Vec<_>>();
2355
2356            // Check which fields from the missing field list are actually loc fields.
2357            let short_table_name = table.table_name_without_tables();
2358            for localised_field in &loc_fields {
2359                let localised_key = format!("{}_{}_", short_table_name, localised_field.name());
2360
2361                // Note: the second check is to avoid a weird bug I'm still not sure why it happens where loc fields get duplicated.
2362                if loc_table.keys().any(|x| x.starts_with(&localised_key)) && loc_fields_final.iter().all(|x| x.name() != localised_field.name()) {
2363                    loc_fields_final.push(localised_field.clone());
2364                }
2365            }
2366
2367            // Some fields fail the previous check because the table contains a field with the same name. So we must repeat it with the table fields.
2368            // There is a weird corner case here where a localised field may start like the name of another table field. We need to avoid that.
2369            for table_field in &fields {
2370                if loc_fields_final.iter().all(|x| !x.name().starts_with(table_field.name())) {
2371                    let localised_key = format!("{}_{}_", short_table_name, table_field.name());
2372                    if loc_table.keys().any(|x| x.starts_with(&localised_key)) && loc_fields_final.iter().all(|x| x.name() != table_field.name()) {
2373                        loc_fields_final.push(table_field.clone());
2374                    }
2375                }
2376            }
2377
2378            for loc_field in &loc_fields {
2379                if loc_fields_final.iter().all(|x| x.name() != loc_field.name()) {
2380                    fields_still_not_found.push(format!("{}/{}", table.table_name_without_tables(), loc_field.name()));
2381                }
2382            }
2383
2384            // Save the loc fields.
2385            if let Some(ak_files) = &mut ak_files {
2386                let ak_table = ak_files.get_mut(table.table_name()).unwrap();
2387                let mut definition = ak_table.definition().clone();
2388                definition.set_localised_fields(loc_fields_final.to_vec());
2389                ak_table.set_definition(&definition);
2390
2391            } else if let Some(schema_definition) = schema.definition_by_name_and_version_mut(table.table_name(), *definition.version()) {
2392                schema_definition.set_localised_fields(loc_fields_final.to_vec());
2393            }
2394
2395            // If after updating the loc data we have loc fields, try to find the key order for them.
2396            if !loc_fields_final.is_empty() {
2397
2398                // If we only have one key field, don't bother searching.
2399                let order = if key_fields.len() == 1 {
2400                    vec![key_fields[0].0 as u32]
2401                }
2402
2403                // If we have multiple key fields, we need to test for combinations.
2404                else {
2405                    let mut order = Vec::with_capacity(key_fields.len());
2406                    let combos = key_fields.iter().permutations(key_fields.len());
2407                    let table_data = table.data();
2408                    for combo in combos {
2409
2410                        // Many multikeyed tables admit empty values as part of the key. We need rows with no empty values.
2411                        // NOTE: While we just need one line to get the order, we check every line to avoid wrong orders due to first line sharing fields.
2412                        let mut combo_is_valid = true;
2413                        for row in table_data.iter() {
2414                            //for (index, _) in &combo {
2415                            //    if row[*index].data_to_string().is_empty() {
2416                            //        fail_due_to_empty_keys_in_combos = true;
2417                            //        //break;
2418                            //    }
2419                            //}
2420
2421                            let mut combined_key = String::new();
2422                            for (index, _) in &combo {
2423                                combined_key.push_str(&row[*index].data_to_string());
2424                            }
2425
2426                            for localised_field in &loc_fields_final {
2427                                let localised_key = format!("{}_{}_{}", short_table_name, localised_field.name(), combined_key);
2428                                match loc_table.get(&localised_key) {
2429                                    Some(_) => {
2430                                        if order.is_empty() {
2431                                            order = combo.iter().map(|(index, _)| *index as u32).collect();
2432                                        }
2433                                    }
2434                                    None => {
2435                                        combo_is_valid = false;
2436                                        break;
2437                                    }
2438                                }
2439                            }
2440
2441                            // If the combo was not valid for a loc field on a line, stop.
2442                            if !combo_is_valid {
2443                                break;
2444                            }
2445                        }
2446
2447                        // If the combo is not valid, reset the order and try the next one.
2448                        if !combo_is_valid {
2449                            order = vec![];
2450                            continue;
2451                        }
2452
2453                        if !order.is_empty() {
2454                            break;
2455                        }
2456                    }
2457
2458                    order
2459                };
2460
2461                if !order.is_empty() && !loc_fields_final.is_empty() {
2462                    info!("Bruteforce: loc key order found for table {}, version {}.", table.table_name(), definition.version());
2463                    if let Some(ak_files) = &mut ak_files {
2464                        let ak_table = ak_files.get_mut(table.table_name()).unwrap();
2465                        let mut definition = ak_table.definition().clone();
2466                        definition.set_localised_key_order(order);
2467                        ak_table.set_definition(&definition);
2468                    } else if let Some(schema_definition) = schema.definition_by_name_and_version_mut(table.table_name(), *definition.version()) {
2469                        schema_definition.set_localised_key_order(order);
2470                    }
2471                } else {
2472                    info!("Bruteforce: loc key order found (but may be incorrect) for table {}, version {}.", table.table_name(), definition.version());
2473
2474                    // If we don't have locs, make sure to delete any order we had.
2475                    if loc_fields_final.is_empty() {
2476                        if let Some(ak_files) = &mut ak_files {
2477                            let ak_table = ak_files.get_mut(table.table_name()).unwrap();
2478                            let mut definition = ak_table.definition().clone();
2479                            definition.set_localised_key_order(vec![]);
2480                            ak_table.set_definition(&definition);
2481                        } else if let Some(schema_definition) = schema.definition_by_name_and_version_mut(table.table_name(), *definition.version()) {
2482                            schema_definition.set_localised_key_order(vec![]);
2483                        }
2484                    }
2485                }
2486            }
2487
2488            // Make sure to cleanup any past mess here.
2489            else if let Some(ak_files) = &mut ak_files {
2490                let ak_table = ak_files.get_mut(table.table_name()).unwrap();
2491                let mut definition = ak_table.definition().clone();
2492                definition.set_localised_key_order(vec![]);
2493                ak_table.set_definition(&definition);
2494            } else if let Some(schema_definition) = schema.definition_by_name_and_version_mut(table.table_name(), *definition.version()) {
2495                schema_definition.set_localised_key_order(vec![]);
2496            }
2497        }
2498
2499        // Dedup this list, because if the game had multiple table files, we'll get duplicated fields.
2500        fields_still_not_found.sort();
2501        fields_still_not_found.dedup();
2502        info!("Bruteforce: fields still not found :{fields_still_not_found:#?}");
2503
2504        // Once everything is done, run a check on the loc keys to see if any of them still doesn't match any table/field combo.
2505        // This will fail if called on cache generation. Only execute it when updating the schema.
2506        if ak_files.is_none() {
2507            for key in loc_table.keys().sorted() {
2508                if self.loc_key_source(key).is_none() {
2509                    info!("-- Bruteforce: cannot find source for loc key {key}.");
2510                }
2511            }
2512        }
2513
2514        Ok(())
2515    }
2516
2517    /// This function generates automatic schema patches based mainly on bruteforcing and some clever logic.
2518    #[allow(clippy::if_same_then_else)]
2519    pub fn generate_automatic_patches(&self, schema: &mut Schema, packs: &BTreeMap<String, Pack>) -> Result<()> {
2520        let mut db_tables = self.db_and_loc_data(true, false, true, false)?
2521            .iter()
2522            .filter_map(|file| if let Ok(RFileDecoded::DB(table)) = file.decoded() { Some(table) } else { None })
2523            .collect::<Vec<_>>();
2524
2525        for pack in packs.values() {
2526            db_tables.extend_from_slice(&pack.files_by_type(&[FileType::DB])
2527                .iter()
2528                .filter_map(|x| if let Ok(RFileDecoded::DB(db)) = x.decoded() {
2529                    Some(db)
2530                } else {
2531                    None
2532                })
2533                .collect::<Vec<_>>()
2534            );
2535        }
2536
2537        let current_patches = schema.patches_mut();
2538        let mut new_patches: HashMap<String, DefinitionPatch> = HashMap::new();
2539
2540        // Cache all image and video paths.
2541        let image_paths = self.vanilla_files()
2542            .keys()
2543            .filter(|x| x.ends_with(".png") || x.ends_with(".tga"))
2544            .collect::<Vec<_>>();
2545
2546        let video_paths = self.vanilla_files()
2547            .keys()
2548            .filter(|x| x.ends_with(".ca_vp8"))
2549            .collect::<Vec<_>>();
2550
2551        for table in &db_tables {
2552            let definition = table.definition();
2553            let fields = definition.fields_processed();
2554            for (column, field) in fields.iter().enumerate() {
2555                match field.field_type() {
2556                    FieldType::StringU8 |
2557                    FieldType::StringU16 |
2558                    FieldType::OptionalStringU8 |
2559                    FieldType::OptionalStringU16 => {
2560
2561                        // Icons can be found by:
2562                        // - Checking if the data contains ".png" or ".tga".
2563                        // - Checking if the data contains "Icon" or "Image" in the name.
2564                        //
2565                        // Note that if the field contains incomplete/relative paths, this will guess and try to find unique files that match the path.
2566                        let mut possible_icon = false;
2567                        let low_name = field.name().to_lowercase();
2568                        if (low_name.contains("icon") || low_name.contains("image")) &&
2569
2570                            // Attila. This doesn't match with anything that makes sense.
2571                            !(table.table_name() == "building_sets_tables" && field.name() == "icon") &&
2572
2573                            // This really should be called category. It's wrong in the ak.
2574                            !(table.table_name() == "character_traits_tables" && field.name() == "icon") {
2575                            possible_icon = true;
2576                        }
2577
2578                        // Use hashset for uniqueness and ram usage.
2579                        let mut possible_relative_paths = table.data().par_iter()
2580                            .filter_map(|row| {
2581
2582                                // Only check fields that are not already marked, or are marked but without path (like override_icon in incidents).
2583                                if !field.is_filename(None) || (
2584                                        field.is_filename(None) && (
2585                                            field.filename_relative_path(None).is_none() ||
2586                                            field.filename_relative_path(None).unwrap().is_empty()
2587                                        )
2588                                    ) || (
2589
2590                                        // This table has an incorrect path by default.
2591                                        (table.table_name() == "advisors_tables" && field.name() == "advisor_icon_path") ||
2592
2593                                        // This one is missing subpaths.
2594                                        (table.table_name() == "campaign_post_battle_captive_options_tables" && field.name() == "icon_path") ||
2595
2596                                        // This one for some reason points to "working_data" and has no replacement bit.
2597                                        (table.table_name() == "narrative_viewer_tabs_tables" && field.name() == "image_path") ||
2598
2599                                        // This one has a path missing the replacement bits.
2600                                        (table.table_name() == "technology_ui_groups_tables" && field.name() == "optional_background_image")
2601                                    ) {
2602
2603                                    // These checks filter out certain problematic cell values:
2604                                    // - .: means empty in some image fields.
2605                                    // - x: means empty in some image fields.
2606                                    // - placeholder: because it's in multiple places and generates false positives.
2607                                    let mut data = row[column].data_to_string().to_lowercase().replace("\\", "/");
2608
2609                                    // Fix formatting for cells which start with / or \\.
2610                                    if data.starts_with("/") {
2611                                        if data.len() > 1 {
2612                                            data = data[1..].to_owned();
2613                                        } else {
2614                                            data = String::new();
2615                                        }
2616                                    }
2617
2618                                    if !data.is_empty() && !data.ends_with("/") &&
2619                                        data != "." &&
2620                                        data != "x" &&
2621                                        data != "false" &&
2622                                        data != "building_placeholder" &&
2623                                        data != "placehoder.png" &&
2624                                        data != "placeholder" &&
2625                                        data != "placeholder.tga" &&
2626                                        data != "placeholder.png" && (
2627                                            possible_icon ||
2628                                            data.ends_with(".png") || data.ends_with(".tga")
2629                                        ) {
2630
2631                                        let possible_paths = image_paths.iter()
2632
2633                                            // Manual filters for some fields that are known to trigger hard-to-fix false positives.
2634                                            .filter(|x| {
2635                                                if table.table_name() == "aide_de_camp_speeches_tables" && field.name() == "icon_name" {
2636                                                    x.starts_with("ui/battle ui/adc_icons/")
2637                                                } else if table.table_name() == "agent_string_subculture_overrides_tables" && field.name() == "icon_path" {
2638                                                    x.starts_with("ui/campaign ui/agents/icons/")
2639                                                } else if table.table_name() == "ancillary_types_tables" && field.name() == "ui_icon" {
2640                                                    x.starts_with("ui/portraits/ancillaries/")
2641                                                } else if table.table_name() == "battlefield_building_categories_tables" && field.name() == "icon_path" {
2642                                                    x.starts_with("ui/battle ui/building icons/")
2643                                                } else if table.table_name() == "bonus_value_uis_tables" && field.name() == "icon" {
2644                                                    x.starts_with("ui/campaign ui/effect_bundles/")
2645                                                } else if table.table_name() == "building_culture_variants_tables" && field.name() == "icon" {
2646                                                    x.starts_with("ui/buildings/icons/")
2647                                                } else if table.table_name() == "campaign_payload_ui_details_tables" && field.name() == "icon" {
2648                                                    x.starts_with("ui/campaign ui/effect_bundles/")
2649                                                } else if table.table_name() == "campaign_post_battle_captive_options_tables" && field.name() == "icon_path" {
2650                                                    x.starts_with("ui/campaign ui/captive_option_icons/")
2651                                                } else if table.table_name() == "capture_point_types_tables" && field.name() == "icon_name" {
2652                                                    x.starts_with("ui/battle ui/capture_point_icons/")
2653                                                } else if table.table_name() == "character_skills_tables" && field.name() == "image_path" {
2654                                                    x.starts_with("ui/campaign ui/skills/")
2655                                                } else if table.table_name() == "character_traits_tables" && field.name() == "icon_custom" {
2656                                                    x.starts_with("ui/campaign ui/effect_bundles/")
2657
2658                                                // This is to fix issues with incomplete cursor paths.
2659                                                } else if table.table_name() == "cursors_tables" && field.name() == "image" {
2660                                                    !x.starts_with(&(data.to_owned() + "_"))
2661                                                } else if table.table_name() == "dilemmas_tables" && field.name() == "ui_image" {
2662                                                    x.starts_with("ui/eventpics/")
2663                                                } else if table.table_name() == "effect_bundles_tables" && field.name() == "ui_icon" {
2664                                                    x.starts_with("ui/campaign ui/effect_bundles/")
2665                                                } else if table.table_name() == "effects_tables" && (field.name() == "icon" || field.name() == "icon_negative") {
2666                                                    x.starts_with("ui/campaign ui/effect_bundles/")
2667                                                } else if table.table_name() == "faction_groups_tables" && field.name() == "ui_icon" {
2668                                                    x.starts_with("ui/campaign ui/effect_bundles/")
2669                                                } else if table.table_name() == "incidents_tables" && field.name() == "ui_image" {
2670                                                    x.starts_with("ui/eventpics/")
2671                                                } else if table.table_name() == "message_event_strings_tables" && field.name() == "image" {
2672                                                    x.starts_with("ui/eventpics/")
2673                                                } else if table.table_name() == "missions_tables" && field.name() == "ui_icon" {
2674                                                    x.starts_with("ui/campaign ui/message_icons/")
2675
2676                                                // This is to fix false positives in sequencial missions in Pharaoh.
2677                                                } else if table.table_name() == "missions_tables" && field.name() == "ui_image" {
2678                                                    x.starts_with("ui/eventpics/") && x.ends_with(&(data.to_owned() + ".png"))
2679                                                } else if table.table_name() == "pooled_resources_tables" && field.name() == "optional_icon_path" {
2680                                                    x.starts_with("ui/skins/")
2681                                                } else if table.table_name() == "projectile_shot_type_enum_tables" && field.name() == "icon_name" {
2682                                                    x.starts_with("ui/battle ui/ability_icons/")
2683                                                } else if table.table_name() == "religions_tables" && field.name() == "ui_icon_path" {
2684                                                    x.starts_with("ui/campaign ui/religion_icons/")
2685                                                } else if table.table_name() == "special_ability_phases_tables" && field.name() == "ticker_icon" {
2686                                                    x.starts_with("ui/battle ui/ability_icons/")
2687                                                } else if table.table_name() == "technologies_tables" && field.name() == "icon_name" {
2688                                                    x.starts_with("ui/campaign ui/technologies/")
2689                                                } else if table.table_name() == "technologies_tables" && field.name() == "info_pic" {
2690                                                    x.starts_with("ui/eventpics/")
2691                                                } else if table.table_name() == "trait_categories_tables" && field.name() == "icon_path" {
2692                                                    x.starts_with("ui/campaign ui/effect_bundles/")
2693                                                } else if table.table_name() == "ui_unit_groupings_tables" && field.name() == "icon" {
2694                                                    x.starts_with("ui/common ui/unit_category_icons/")
2695                                                } else if table.table_name() == "victory_types_tables" && field.name() == "icon" {
2696                                                    x.starts_with("ui/campaign ui/victory_type_icons/")
2697
2698                                                // For some reason, some brilliant mind at CA decided to end a video name with ".png". So we need to filter this here.
2699                                                } else if table.table_name() == "videos_tables" && field.name() == "video_name" {
2700                                                    x.starts_with("movies/")
2701                                                } else {
2702                                                    true
2703                                                }
2704                                            })
2705
2706                                            // This filter is for reducing false positives in these cases:
2707                                            // - "default" or generic data.
2708                                            // - "x" value for invalid paths
2709                                            // - Entries that end in "_", which is used for some button path entries.
2710                                            .filter(|x| if !data.ends_with('_') {
2711                                                if !data.contains("/") {
2712                                                    if !data.contains('.') {
2713                                                        x.contains(&("/".to_owned() + &data + "."))
2714                                                    } else {
2715                                                        x.contains(&("/".to_owned() + &data))
2716                                                    }
2717                                                } else {
2718                                                    x.contains(&data)
2719                                                }
2720                                            } else {
2721                                                false
2722                                            })
2723
2724                                            // Replace only the last instance, to avoid weird folder-replacing bugs.
2725                                            .filter_map(|x| x.rfind(&data).map(|pos| (x, pos)))
2726                                            .map(|(x, pos)| x[..pos].to_owned() + &x[pos..].replacen(&data, "%", 1))
2727                                            .collect::<Vec<_>>();
2728
2729
2730                                        if !possible_paths.is_empty() {
2731                                            return Some(possible_paths)
2732                                        }
2733                                    }
2734                                }
2735
2736                                None
2737                            })
2738                            .flatten()
2739                            .collect::<HashSet<String>>();
2740
2741                        // Video files can be found by:
2742                        // - Checking if the data contains ".ca_vp8".
2743                        // - Checking if the data contains "video" in the name.
2744                        //
2745                        // Note that if the field contains incomplete/relative paths, this will guess and try to find unique files that match the path.
2746                        let mut possible_video = false;
2747                        if low_name.contains("video") {
2748                            possible_video = true;
2749                        }
2750
2751                        possible_relative_paths.extend(
2752                            table.data().par_iter().filter_map(|row| {
2753
2754                                // Only check fields that are not already marked, or are marked but without path (like override_icon in incidents).
2755                                if !field.is_filename(None) || (
2756                                        field.is_filename(None) && (
2757                                            field.filename_relative_path(None).is_none() ||
2758                                            field.filename_relative_path(None).unwrap().is_empty()
2759                                        )
2760                                    ) || (
2761
2762                                        // This table is missing the subpaths (which are valid) by default.
2763                                        table.table_name() == "videos_tables" && field.name() == "video_name"
2764                                    ) {
2765
2766                                    let mut data = row[column].data_to_string().to_lowercase().replace("\\", "/");
2767
2768                                    // Fix formatting for cells which start with / or \\.
2769                                    if data.starts_with("/") {
2770                                        if data.len() > 1 {
2771                                            data = data[1..].to_owned();
2772                                        } else {
2773                                            data = String::new();
2774                                        }
2775                                    }
2776
2777                                    if !data.is_empty() && (
2778                                            possible_video ||
2779                                            data.ends_with(".ca_vp8")
2780                                        ) {
2781
2782                                        let possible_paths = video_paths.iter()
2783                                            .filter(|x| {
2784                                                if table.table_name() == "videos_tables" && field.name() == "video_name" {
2785                                                    x.starts_with("movies/")
2786                                                } else {
2787                                                    true
2788                                                }
2789                                            })
2790                                            // This filter is for reducing false positives in these cases:
2791                                            // - "%_something", which is used for sequential videos.
2792                                            // - Faction-specific videos.
2793                                            .filter(|x| if !data.contains('.') {
2794                                                    x.contains(&("/".to_owned() + &data + "."))
2795                                                } else {
2796                                                    x.contains(&("/".to_owned() + &data))
2797                                                })
2798
2799                                            // Replace only the last instance, to avoid weird folder-replacing bugs.
2800                                            .filter_map(|x| x.rfind(&data).map(|pos| (x, pos)))
2801                                            .map(|(x, pos)| x[..pos].to_owned() + &x[pos..].replacen(&data, "%", 1))
2802                                            .collect::<Vec<_>>();
2803
2804
2805                                        if !possible_paths.is_empty() {
2806                                            return Some(possible_paths)
2807                                        }
2808                                    }
2809                                }
2810
2811                                None
2812                            })
2813                            .flatten()
2814                            .collect::<HashSet<String>>()
2815                        );
2816
2817                        // Debug message.
2818                        if !possible_relative_paths.is_empty() && (possible_relative_paths.len() > 1 || (possible_relative_paths.len() == 1 && possible_relative_paths.iter().collect::<Vec<_>>()[0] != "%")) {
2819                            info!("Checking table {}, field {} ...", table.table_name(), field.name());
2820                            dbg!(&possible_relative_paths);
2821                        }
2822
2823                        // This one has an incorrect relative path value that needs to be patched out.
2824                        //
2825                        // This is due to we assigning a name to this column which matches a different column in the AK.
2826                        if (table.table_name() == "models_building_tables" && field.name() == "logic_file") ||
2827                            (table.table_name() == "models_sieges_tables" && (field.name() == "model_file" || field.name() == "logic_file" || field.name() == "collision_file")) ||
2828                            (table.table_name() == "models_deployables_tables" && (field.name() == "model_file" || field.name() == "logic_file" || field.name() == "collision_file")) {
2829                            possible_relative_paths.clear();
2830                            possible_relative_paths.insert("%".to_owned());
2831                        }
2832
2833                        // These columns have incomplete paths or are incorrectly marked as files. Do not treat them as file paths.
2834                        if (table.table_name() == "ui_mercenary_recruitment_infos_tables" && field.name() == "hire_button_icon_path") ||
2835                            (table.table_name() == "battles_tables" && (field.name() == "specification" || field.name() == "battle_environment_audio")) ||
2836                            (table.table_name() == "factions_tables" && field.name() == "key") ||
2837                            (table.table_name() == "frontend_faction_leaders_tables" && field.name() == "key") {
2838                            let mut patch = HashMap::new();
2839                            patch.insert("is_filename".to_owned(), "false".to_owned());
2840
2841                            match new_patches.get_mut(table.table_name()) {
2842                                Some(patches) => match patches.get_mut(field.name()) {
2843                                    Some(patches) => patches.extend(patch),
2844                                    None => { patches.insert(field.name().to_owned(), patch); }
2845                                },
2846                                None => {
2847                                    let mut table_patch = HashMap::new();
2848                                    table_patch.insert(field.name().to_owned(), patch);
2849                                    new_patches.insert(table.table_name().to_string(), table_patch);
2850                                }
2851                            }
2852                        }
2853
2854                        // Only make patches for fields we manage to pinpoint to a file.
2855                        if !possible_relative_paths.is_empty() {
2856                            let mut possible_relative_paths = possible_relative_paths.iter().collect::<Vec<_>>();
2857                            possible_relative_paths.sort();
2858
2859                            let mut patch = HashMap::new();
2860                            if !field.is_filename(None) {
2861                                patch.insert("is_filename".to_owned(), "true".to_owned());
2862                            }
2863
2864                            // Only add paths if we're not dealing with single paths with full replacement, or we're force-replacing a path (advisors table).
2865                            if possible_relative_paths.len() > 1 || (
2866                                (
2867                                    possible_relative_paths.len() == 1 &&
2868                                    possible_relative_paths[0].contains('%') &&
2869                                    possible_relative_paths[0] != "%"
2870                                ) || (
2871                                    possible_relative_paths[0] == "%" &&
2872                                    field.filename_relative_path(None).is_some() &&
2873                                    !field.filename_relative_path(None).unwrap().is_empty()
2874                                )
2875                            ) {
2876                                patch.insert("filename_relative_path".to_owned(), possible_relative_paths.into_iter().join(";"));
2877                            }
2878
2879                            // Do not bother with empty patches.
2880                            if !patch.is_empty() {
2881                                match new_patches.get_mut(table.table_name()) {
2882                                    Some(patches) => match patches.get_mut(field.name()) {
2883                                        Some(patches) => patches.extend(patch),
2884                                        None => { patches.insert(field.name().to_owned(), patch); }
2885                                    },
2886                                    None => {
2887                                        let mut table_patch = HashMap::new();
2888                                        table_patch.insert(field.name().to_owned(), patch);
2889                                        new_patches.insert(table.table_name().to_string(), table_patch);
2890                                    }
2891                                }
2892                            }
2893                        }
2894                        /*
2895                        if (low_name == "key" || low_name == "id") && table.data().par_iter().all(|x| x[column].data_to_string().parse::<i32>().is_ok()) {
2896                            let mut patch = HashMap::new();
2897                            patch.insert("is_numeric".to_owned(), "true".to_owned());
2898
2899                            match new_patches.get_mut(table.table_name()) {
2900                                Some(patches) => match patches.get_mut(field.name()) {
2901                                    Some(patches) => patches.extend(patch),
2902                                    None => { patches.insert(field.name().to_owned(), patch); }
2903                                },
2904                                None => {
2905                                    let mut table_patch = HashMap::new();
2906                                    table_patch.insert(field.name().to_owned(), patch);
2907                                    new_patches.insert(table.table_name().to_string(), table_patch);
2908                                }
2909                            }
2910                        }*/
2911                    }
2912                    FieldType::I64 |
2913                    FieldType::OptionalI64 => {
2914                        /*let low_name = field.name().to_lowercase();
2915                        if (low_name == "key" || low_name == "id") && table.data().par_iter().all(|x| x[column].data_to_string().parse::<i32>().is_ok()) {
2916                            let mut patch = HashMap::new();
2917                            patch.insert("is_numeric".to_owned(), "true".to_owned());
2918
2919                            match new_patches.get_mut(table.table_name()) {
2920                                Some(patches) => match patches.get_mut(field.name()) {
2921                                    Some(patches) => patches.extend(patch),
2922                                    None => { patches.insert(field.name().to_owned(), patch); }
2923                                },
2924                                None => {
2925                                    let mut table_patch = HashMap::new();
2926                                    table_patch.insert(field.name().to_owned(), patch);
2927                                    new_patches.insert(table.table_name().to_string(), table_patch);
2928                                }
2929                            }
2930                        }*/
2931                    }
2932                    _ => continue
2933                }
2934            }
2935        }
2936
2937        Schema::add_patches_to_patch_set(current_patches, &new_patches);
2938
2939        Ok(())
2940    }
2941
2942    /// Function to add tiles and tile maps to the provided pack.
2943    ///
2944    /// Only for Warhammer 3.
2945    pub fn add_tile_maps_and_tiles(&mut self, packs: &mut BTreeMap<String, Pack>, pack_key: Option<&str>, game: &GameInfo, schema: &Schema, options: OptimizerOptions, tile_maps: Vec<PathBuf>, tiles: Vec<(PathBuf, String)>) -> Result<(Vec<ContainerPath>, Vec<ContainerPath>)> {
2946        let mut added_paths = vec![];
2947
2948        // Use the provided key, or fall back to the first pack in the map.
2949        let pack = match pack_key {
2950            Some(key) => packs.get_mut(key).ok_or_else(|| RLibError::NoPacksProvided)?,
2951            None => packs.values_mut().next().ok_or_else(|| RLibError::NoPacksProvided)?,
2952        };
2953
2954        // Tile Maps are from assembly_kit/working_data/terrain/battles/.
2955        for tile_map in &tile_maps {
2956            added_paths.append(&mut pack.insert_folder(tile_map, "terrain/battles", &None, &None, true)?);
2957        }
2958
2959        // Tiles are from assembly_kit/working_data/terrain/tiles/battle/, and can be in a subfolder if they're part of a tileset.
2960        for (tile, subpath) in &tiles {
2961
2962            let (internal_path, needs_tile_database) = if subpath.is_empty() {
2963                ("terrain/tiles/battle".to_owned(), false)
2964            } else {
2965                (format!("terrain/tiles/battle/{}", subpath.replace('\\', "/")), true)
2966            };
2967            added_paths.append(&mut pack.insert_folder(tile, &internal_path, &None, &None, true)?);
2968
2969            // If it's part of a tile set, we need to add the relevant tile database file for the tileset or the map will load as blank ingame.
2970            if needs_tile_database {
2971
2972                // We only need the database for out map, not the full database folder.
2973                let subpath_len = subpath.replace('\\', "/").split('/').count();
2974                let mut tile_database = tile.to_path_buf();
2975
2976                (0..=subpath_len).for_each(|_| {
2977                    tile_database.pop();
2978                });
2979
2980                let file_name = format!("{}_{}.bin", subpath.replace('/', "_"), tile.file_name().unwrap().to_string_lossy());
2981                tile_database.push(format!("_tile_database/TILES/{file_name}"));
2982                let tile_database_path = format!("terrain/tiles/battle/_tile_database/TILES/{file_name}");
2983
2984                added_paths.push(pack.insert_file(&tile_database, &tile_database_path, &None)?.unwrap());
2985            }
2986        }
2987
2988        let (paths_to_delete, paths_to_add) = pack.optimize(Some(added_paths.clone()), self, schema, game, &options)?;
2989
2990        let paths_to_delete = paths_to_delete.iter()
2991            .map(|path| ContainerPath::File(path.to_string()))
2992            .collect::<Vec<_>>();
2993
2994        added_paths.extend(paths_to_add.into_iter()
2995            .map(|path| ContainerPath::File(path.to_string()))
2996            .collect::<Vec<_>>());
2997
2998        Ok((added_paths, paths_to_delete))
2999    }
3000
3001    // Function to trigger an startpos build.
3002    //
3003    // After this ends, remember to call the post one!
3004    pub fn build_starpos_pre(&self, packs: &mut BTreeMap<String, Pack>, pack_key: Option<&str>, game: &GameInfo, game_path: &Path, campaign_id: &str, process_hlp_spd_data: bool, sub_start_pos: &str) -> Result<()> {
3005
3006        // Pre-fetch data we need before taking mutable borrows.
3007        let map_names = if process_hlp_spd_data {
3008            self.db_values_from_table_name_and_column_name_for_value(Some(packs), "campaigns_tables", "campaign_name", "map_name", true, true)
3009        } else {
3010            HashMap::new()
3011        };
3012
3013        // Use the provided key, or fall back to the first pack in the map.
3014        let pack_file = match pack_key {
3015            Some(key) => packs.get_mut(key).ok_or_else(|| RLibError::NoPacksProvided)?,
3016            None => packs.values_mut().next().ok_or_else(|| RLibError::NoPacksProvided)?,
3017        };
3018        let pack_name = pack_file.disk_file_name();
3019        if pack_name.is_empty() {
3020            return Err(RLibError::BuildStartposError("The Pack needs to be saved to disk in order to build a startpos. Save it and try again.".to_owned()));
3021        }
3022
3023        if campaign_id.is_empty() {
3024            return Err(RLibError::BuildStartposError("campaign_id not provided.".to_owned()));
3025        }
3026
3027        let process_hlp_spd_data_string = if process_hlp_spd_data {
3028            String::from("process_campaign_ai_map_data;")
3029        } else {
3030            String::new()
3031        };
3032
3033        // Note: 3K uses 2 passes per campaign, each one with a different startpos, but both share the hlp/spd process, so that only needs to be generated once.
3034        // Also, extra folders is to fix a bug in Rome 2, Attila and possibly Thrones where objectives are not processed if certain folders are missing.
3035        let extra_folders = "add_working_directory assembly_kit\\working_data;";
3036        let mut user_script_contents = if game.key() == KEY_ATTILA || game.key() == KEY_THRONES_OF_BRITANNIA { extra_folders.to_owned() } else { String::new() };
3037
3038        user_script_contents.push_str(&format!("
3039    mod {pack_name};
3040    process_campaign_startpos {campaign_id} {sub_start_pos};
3041    {process_hlp_spd_data_string}
3042    quit_after_campaign_processing;"
3043        ));
3044
3045        // Games may fail to launch if we don't have this path created, which is done the first time we start the game.
3046        let game_data_path = game.data_path(game_path)?;
3047        if !game_path.is_dir() {
3048            return Err(RLibError::BuildStartposError("Game path incorrect. Fix it in the settings and try again.".to_owned()));
3049        }
3050
3051        if !PathBuf::from(pack_file.disk_file_path()).starts_with(&game_data_path) {
3052            return Err(RLibError::BuildStartposError("The Pack needs to be in /data. Install it there and try again.".to_owned()));
3053        }
3054
3055        // We need to extract the victory_objectives.txt file to "data/campaign_id/". Warhammer 3 doesn't use this file.
3056        if GAMES_NEEDING_VICTORY_OBJECTIVES.contains(&game.key()) {
3057            let mut game_campaign_path = game_data_path.to_path_buf();
3058            game_campaign_path.push(campaign_id);
3059            DirBuilder::new().recursive(true).create(&game_campaign_path)?;
3060
3061            game_campaign_path.push(VICTORY_OBJECTIVES_EXTRACTED_FILE_NAME);
3062            pack_file.extract(ContainerPath::File(VICTORY_OBJECTIVES_FILE_NAME.to_owned()), &game_campaign_path, false, &None, true, false, &None, true)?;
3063        }
3064
3065        let config_path = game.config_path(game_path).ok_or(RLibError::BuildStartposError("Error getting the game's config path.".to_owned()))?;
3066        let scripts_path = config_path.join("scripts");
3067        DirBuilder::new().recursive(true).create(&scripts_path)?;
3068
3069        // Rome 2 is bugged when generating startpos using the userscript. We need to pass it to the game through args in a cmd terminal instead of by file.
3070        //
3071        // So don't do any userscript change for Rome 2.
3072        if game.key() != KEY_ROME_2 {
3073
3074            // Make a backup before editing the script, so we can restore it later.
3075            let uspa = scripts_path.join(USER_SCRIPT_FILE_NAME);
3076            let uspb = scripts_path.join(USER_SCRIPT_FILE_NAME.to_owned() + ".bak");
3077
3078            if uspa.is_file() {
3079                std::fs::copy(&uspa, uspb)?;
3080            }
3081
3082            let mut file = BufWriter::new(File::create(uspa)?);
3083
3084            // Napoleon, Empire and Shogun 2 require the user.script.txt or mod list file (for Shogun's latest update) to be in UTF-16 LE. What the actual fuck.
3085            if *game.raw_db_version() < 2 {
3086                file.write_string_u16(&user_script_contents)?;
3087            } else {
3088                file.write_all(user_script_contents.as_bytes())?;
3089            }
3090
3091            file.flush()?;
3092        }
3093
3094        // Due to how the starpos is generated, if we generate it on vanilla campaigns it'll overwrite existing files if it's generated on /data.
3095        // So we must backup the vanilla files, then restore them after.
3096        //
3097        // Only needed from Warhammer 1 onwards, and in Rome 2 due to how is generated there.
3098        if game.key() != KEY_THRONES_OF_BRITANNIA &&
3099            game.key() != KEY_ATTILA &&
3100            game.key() != KEY_SHOGUN_2 {
3101
3102            let sub_start_pos_suffix = if sub_start_pos.is_empty() {
3103                String::new()
3104            } else {
3105                format!("_{sub_start_pos}")
3106            };
3107
3108            let starpos_path = game_data_path.join(format!("campaigns/{campaign_id}/startpos{sub_start_pos_suffix}.esf"));
3109            if starpos_path.is_file() {
3110                let starpos_path_bak = game_data_path.join(format!("campaigns/{campaign_id}/startpos{sub_start_pos_suffix}.esf.bak"));
3111                std::fs::copy(&starpos_path, starpos_path_bak)?;
3112                std::fs::remove_file(starpos_path)?;
3113            }
3114        }
3115
3116        // Same for the other two files, if we're generating them. We need to get the campaign name from the campaigns table first, then get the files generated.
3117        if process_hlp_spd_data {
3118            if let Some(map_name) = map_names.get(campaign_id) {
3119                match game.key() {
3120
3121                    // For generating the hlp data, from Warhammer 1 onwards the game outputs it to /data, which may not exists and may conflict with existing files.
3122                    //
3123                    // Create the folder just in case, and back any file found.
3124                    KEY_PHARAOH_DYNASTIES |
3125                    KEY_PHARAOH |
3126                    KEY_WARHAMMER_3 |
3127                    KEY_TROY |
3128                    KEY_THREE_KINGDOMS |
3129                    KEY_WARHAMMER_2 |
3130                    KEY_WARHAMMER => {
3131                        let hlp_folder_path = game_data_path.join(format!("campaign_maps/{map_name}"));
3132                        if !hlp_folder_path.is_dir() {
3133                            DirBuilder::new().recursive(true).create(&hlp_folder_path)?;
3134                        }
3135
3136                        let hlp_path = game_data_path.join(format!("campaign_maps/{map_name}/hlp_data.esf"));
3137                        if hlp_path.is_file() {
3138                            let hlp_path_bak = game_data_path.join(format!("campaign_maps/{map_name}/hlp_data.esf.bak"));
3139                            std::fs::copy(&hlp_path, hlp_path_bak)?;
3140                            std::fs::remove_file(hlp_path)?;
3141                        }
3142                    },
3143
3144                    // For Thrones and Attila is more tricky, because the game itself is bugged when processing this file.
3145                    //
3146                    // It's generated in the game's config folder, but we need to manually keep recreating the folder for a while because the game deletes it
3147                    // in the middle of the process and causes an error when trying to write the file. The way we do it is with a background thread
3148                    // that keeps recreating it every 100ms if it ever detects it's gone.
3149                    //
3150                    // Keep in mind this thread is kept alive for as long as the program runs unless it's intentionally stopped. So remember to stop it.
3151                    KEY_THRONES_OF_BRITANNIA |
3152                    KEY_ATTILA => {
3153                        let folder_path = config_path.join(format!("maps/campaign_maps/{map_name}"));
3154
3155                        let (sender, receiver) = channel::<bool>();
3156                        let join = thread::spawn(move || {
3157                            loop {
3158                                match receiver.try_recv() {
3159                                    Ok(stop) => if stop {
3160                                        break;
3161                                    }
3162                                    Err(_) => {
3163                                        if !folder_path.is_dir() {
3164                                            let _ = DirBuilder::new().recursive(true).create(&folder_path);
3165                                        }
3166
3167                                        thread::sleep(Duration::from_millis(100));
3168                                    }
3169                                }
3170                            }
3171                        });
3172
3173                         *START_POS_WORKAROUND_THREAD.write().unwrap() = Some(vec![(sender, join)]);
3174                    },
3175
3176                    // For rome 2 is a weird one. It generates the file in config (like Attila), but them moves it to /data (like Warhammer).
3177                    //
3178                    // So we need to first, ensure the config folder is created (it may not exists, but it's not deleted mid-process like in Attile)
3179                    // and it's empty, and then backup the hlp file, if exists, from /data.
3180                    KEY_ROME_2 => {
3181                        let hlp_folder = game_data_path.join(format!("campaign_maps/{map_name}/"));
3182                        if hlp_folder.is_dir() {
3183                            let _ = DirBuilder::new().recursive(true).create(&hlp_folder);
3184                        }
3185
3186                        let hlp_path = hlp_folder.join("hlp_data.esf");
3187                        if hlp_path.is_file() {
3188                            let hlp_path_bak = game_data_path.join(format!("campaign_maps/{map_name}/hlp_data.esf.bak"));
3189                            std::fs::copy(&hlp_path, hlp_path_bak)?;
3190                            std::fs::remove_file(hlp_path)?;
3191                        }
3192
3193                    }
3194                    KEY_SHOGUN_2 => return Err(RLibError::BuildStartposError("Unsupported... yet. If you want to test support for this game, let me know.".to_owned())),
3195                    KEY_NAPOLEON => return Err(RLibError::BuildStartposError("Unsupported... yet. If you want to test support for this game, let me know.".to_owned())),
3196                    KEY_EMPIRE => return Err(RLibError::BuildStartposError("Unsupported... yet. If you want to test support for this game, let me know.".to_owned())),
3197                    _ => return Err(RLibError::BuildStartposError("How the fuck did you trigger this?".to_owned())),
3198                }
3199
3200                // This file is only from Warhammer 1 onwards. No need to check if the path exists because the hlp process should have created the folder.
3201                if game.key() != KEY_THRONES_OF_BRITANNIA &&
3202                    game.key() != KEY_ATTILA &&
3203                    game.key() != KEY_ROME_2 &&
3204                    game.key() != KEY_SHOGUN_2 &&
3205                    game.key() != KEY_NAPOLEON &&
3206                    game.key() != KEY_EMPIRE {
3207
3208                    let spd_path = game_data_path.join(format!("campaign_maps/{map_name}/spd_data.esf"));
3209                    if spd_path.is_file() {
3210                        let spd_path_bak = game_data_path.join(format!("campaign_maps/{map_name}/spd_data.esf.bak"));
3211                        std::fs::copy(&spd_path, spd_path_bak)?;
3212                        std::fs::remove_file(spd_path)?;
3213                    }
3214                }
3215            }
3216        }
3217
3218        // Then launch the game. 3K needs to be launched manually and in a blocking manner to make sure it does each pass it has to do correctly.
3219        if game.key() == KEY_THREE_KINGDOMS {
3220            let exe_path = game.executable_path(game_path).ok_or_else(|| RLibError::BuildStartposError("Game exe path not found.".to_owned()))?;
3221            let exe_name = exe_path.file_name().ok_or_else(|| RLibError::BuildStartposError("Game exe name not found.".to_owned()))?.to_string_lossy();
3222
3223            // NOTE: This uses a non-existant load order file on purpouse, so no mod in the load order interferes with generating the startpos.
3224            let mut command = Command::new("cmd");
3225            command.arg("/C");
3226            command.arg("start");
3227            command.arg("/wait");
3228            command.arg("/d");
3229            command.arg(game_path.to_string_lossy().replace('\\', "/"));
3230            command.arg(exe_name.to_string());
3231            command.arg("temp_file.txt;");
3232
3233            let _ = command.output()?;
3234
3235            // In multipass, we need to clean the user script after each pass.
3236            let uspa = scripts_path.join(USER_SCRIPT_FILE_NAME);
3237            let uspb = scripts_path.join(USER_SCRIPT_FILE_NAME.to_owned() + ".bak");
3238            if uspb.is_file() {
3239                std::fs::copy(uspb, uspa)?;
3240            }
3241
3242            // If there's no backup, means there was no file to begin with, so we delete the custom file.
3243            else if uspa.is_file() {
3244                std::fs::remove_file(uspa)?;
3245            }
3246
3247        // Rome 2 needs to be launched manually through the cmd with params. The rest can be launched through their regular launcher.
3248        } else if game.key() == KEY_ROME_2 {
3249            let exe_path = game.executable_path(game_path).ok_or_else(|| RLibError::BuildStartposError("Game exe path not found.".to_owned()))?;
3250            let exe_name = exe_path.file_name().ok_or_else(|| RLibError::BuildStartposError("Game exe name not found.".to_owned()))?.to_string_lossy();
3251
3252            // NOTE: This uses a non-existant load order file on purpouse, so no mod in the load order interferes with generating the startpos.
3253            let mut command = Command::new("cmd");
3254            command.arg("/C");
3255            command.arg("start");
3256            command.arg("/d");
3257            command.arg(game_path.to_string_lossy().replace('\\', "/"));
3258            command.arg(exe_name.to_string());
3259            command.arg("temp_file.txt;");
3260
3261            // We need to turn the user script contents into a oneliner or the command will ignore it.
3262            #[cfg(target_os = "windows")] {
3263                use std::os::windows::process::CommandExt;
3264
3265                // Rome 2 needs the working_data folder in order to throw the startpos file there.
3266                command.raw_arg(extra_folders);
3267                command.raw_arg(user_script_contents.replace("\n", " "));
3268            }
3269
3270            command.spawn()?;
3271        } else {
3272            match game.game_launch_command(game_path) {
3273                Ok(command) => { let _ = open::that(command); },
3274                _ => return Err(RLibError::BuildStartposError("The currently selected game cannot be launched from Steam.".to_owned())),
3275            }
3276        }
3277
3278        Ok(())
3279    }
3280
3281    /// Function to trigger the second part of the startpos build process, which involves importing the startpos file
3282    /// into the provided pack.
3283    ///
3284    /// Call this when the game closes after the pre function launched it.
3285    ///
3286    /// NOTE: The assembly kit path is only needed for Rome 2.
3287    pub fn build_starpos_post(&self, packs: &mut BTreeMap<String, Pack>, pack_key: Option<&str>, game: &GameInfo, game_path: &Path, asskit_path: Option<PathBuf>,campaign_id: &str, process_hlp_spd_data: bool, cleanup_mode: bool, sub_start_pos: &[String]) -> Result<Vec<ContainerPath>> {
3288
3289        // Pre-fetch data we need before taking mutable borrows.
3290        let map_names = if process_hlp_spd_data {
3291            self.db_values_from_table_name_and_column_name_for_value(Some(packs), "campaigns_tables", "campaign_name", "map_name", true, true)
3292        } else {
3293            HashMap::new()
3294        };
3295
3296        // Use the provided key, or fall back to the first pack in the map.
3297        let pack_file = match pack_key {
3298            Some(key) => packs.get_mut(key).ok_or_else(|| RLibError::NoPacksProvided)?,
3299            None => packs.values_mut().next().ok_or_else(|| RLibError::NoPacksProvided)?,
3300        };
3301
3302        let mut startpos_failed = false;
3303        let mut sub_startpos_failed = vec![];
3304        let mut hlp_failed = false;
3305        let mut spd_failed = false;
3306
3307        // Before anything else, close the workaround thread.
3308        if let Some(data) = START_POS_WORKAROUND_THREAD.write().unwrap().as_mut() {
3309            let (sender, handle) = data.remove(0);
3310            let _ = sender.send(true);
3311            let _ = handle.join();
3312        }
3313
3314        *START_POS_WORKAROUND_THREAD.write().unwrap() = None;
3315
3316        if !game_path.is_dir() {
3317            return Err(RLibError::BuildStartposError("Game path incorrect. Fix it in the settings and try again.".to_owned()));
3318        }
3319
3320        let game_data_path = game.data_path(game_path)?;
3321
3322        // Warhammer 3 doesn't use this folder.
3323        if GAMES_NEEDING_VICTORY_OBJECTIVES.contains(&game.key()) {
3324
3325            // We need to delete the "data/campaign_id/" folder.
3326            let mut game_campaign_path = game_data_path.to_path_buf();
3327            game_campaign_path.push(campaign_id);
3328            if game_campaign_path.is_dir() {
3329                let _ = std::fs::remove_dir_all(game_campaign_path);
3330            }
3331        }
3332
3333        let config_path = game.config_path(game_path).ok_or(RLibError::BuildStartposError("Error getting the game's config path.".to_owned()))?;
3334        let scripts_path = config_path.join("scripts");
3335        if !scripts_path.is_dir() {
3336            DirBuilder::new().recursive(true).create(&scripts_path)?;
3337        }
3338
3339        // Restore the userscript backup, if any.
3340        let uspa = scripts_path.join(USER_SCRIPT_FILE_NAME);
3341        let uspb = scripts_path.join(USER_SCRIPT_FILE_NAME.to_owned() + ".bak");
3342        if uspb.is_file() {
3343            std::fs::copy(uspb, uspa)?;
3344        }
3345
3346        // If there's no backup, means there was no file to begin with, so we delete the custom file.
3347        else if uspa.is_file() {
3348            std::fs::remove_file(uspa)?;
3349        }
3350
3351        let mut added_paths = vec![];
3352
3353        // Add the starpos file. As some games have multiple startpos per campaign (3K) we return a vector with all the paths we have to generate.
3354        let starpos_paths = match game.key() {
3355            KEY_PHARAOH_DYNASTIES |
3356            KEY_PHARAOH |
3357            KEY_WARHAMMER_3 |
3358            KEY_TROY |
3359            KEY_THREE_KINGDOMS |
3360            KEY_WARHAMMER_2 |
3361            KEY_WARHAMMER => {
3362                if sub_start_pos.is_empty() {
3363                    vec![game_data_path.join(format!("campaigns/{campaign_id}/startpos.esf"))]
3364                } else {
3365                    let mut paths = vec![];
3366                    for sub in sub_start_pos {
3367                        paths.push(game_data_path.join(format!("campaigns/{campaign_id}/startpos_{sub}.esf")));
3368
3369                    }
3370                    paths
3371                }
3372            }
3373            KEY_THRONES_OF_BRITANNIA |
3374            KEY_ATTILA => vec![config_path.join(format!("maps/campaigns/{campaign_id}/startpos.esf"))],
3375
3376            // Rome 2 outputs the startpos in the assembly kit folder.
3377            KEY_ROME_2 => {
3378                match asskit_path {
3379                    Some(asskit_path) => {
3380                        if !asskit_path.is_dir() {
3381                            return Err(RLibError::BuildStartposError("Assembly Kit path is not a valid folder.".to_owned()));
3382                        }
3383
3384                        vec![asskit_path.join(format!("working_data/campaigns/{campaign_id}/startpos.esf"))]
3385                    },
3386                    None => return Err(RLibError::BuildStartposError("Assembly Kit path not provided.".to_owned())),
3387                }
3388            },
3389
3390            // Shogun 2 outputs to data, but unlike modern names, vanilla startpos are packed, so there's no rist of overwrite.
3391            // We still need to clean it up later though. Napoleon and Empire override vanilla files, so those are backed.
3392            KEY_SHOGUN_2 |
3393            KEY_NAPOLEON |
3394            KEY_EMPIRE => vec![game_data_path.join(format!("campaigns/{campaign_id}/startpos.esf"))],
3395            _ => return Err(RLibError::BuildStartposError("How the fuck did you trigger this?".to_owned())),
3396        };
3397
3398        let starpos_paths_pack = if sub_start_pos.is_empty() {
3399            vec![format!("campaigns/{}/startpos.esf", campaign_id)]
3400        } else {
3401            let mut paths = vec![];
3402            for sub in sub_start_pos {
3403                paths.push(format!("campaigns/{campaign_id}/startpos_{sub}.esf"));
3404            }
3405            paths
3406        };
3407
3408        if !cleanup_mode {
3409            for (index, starpos_path) in starpos_paths.iter().enumerate() {
3410                if !starpos_path.is_file() {
3411                    if sub_start_pos.is_empty() {
3412                        startpos_failed = true;
3413                    } else {
3414                        sub_startpos_failed.push(sub_start_pos[index].to_owned());
3415                    }
3416                } else {
3417
3418                    let mut rfile = RFile::new_from_file_path(starpos_path)?;
3419                    rfile.set_path_in_container_raw(&starpos_paths_pack[index]);
3420                    rfile.load()?;
3421                    rfile.guess_file_type()?;
3422
3423                    added_paths.push(pack_file.insert(rfile).map(|x| x.unwrap())?);
3424                }
3425            }
3426        }
3427
3428        // Restore the old starpos if there was one, and delete the new one if it has already been added.
3429        //
3430        // Only needed from Warhammer 1 onwards, and for Rome 2, Napoleon and Empire. Other games generate the startpos outside that folder.
3431        //
3432        // 3K uses 2 startpos, so we need to restore them both.
3433        if game.key() != KEY_THRONES_OF_BRITANNIA &&
3434            game.key() != KEY_ATTILA &&
3435            game.key() != KEY_SHOGUN_2 {
3436
3437            for starpos_path in &starpos_paths {
3438                let file_name = starpos_path.file_name().unwrap().to_string_lossy().to_string();
3439                let file_name_bak = file_name + ".bak";
3440
3441                let mut starpos_path_bak = starpos_path.to_path_buf();
3442                starpos_path_bak.set_file_name(file_name_bak);
3443
3444                if starpos_path_bak.is_file() {
3445                    std::fs::copy(&starpos_path_bak, starpos_path)?;
3446                    std::fs::remove_file(starpos_path_bak)?;
3447                }
3448            }
3449        }
3450
3451        // In Shogun 2, we need to cleanup the generated file as to not interfere with the packed one.
3452        if game.key() == KEY_SHOGUN_2 {
3453            for starpos_path in &starpos_paths {
3454                if starpos_path.is_file() {
3455                    std::fs::remove_file(starpos_path)?;
3456                }
3457            }
3458        }
3459
3460        // Same with the other two files.
3461        if process_hlp_spd_data {
3462            if let Some(map_name) = map_names.get(campaign_id) {
3463
3464                // Same as with startpos. It's different depending on the game.
3465                let hlp_path = match game.key() {
3466                    KEY_PHARAOH_DYNASTIES |
3467                    KEY_PHARAOH |
3468                    KEY_WARHAMMER_3 |
3469                    KEY_TROY |
3470                    KEY_THREE_KINGDOMS |
3471                    KEY_WARHAMMER_2 |
3472                    KEY_WARHAMMER => game_data_path.join(format!("campaign_maps/{map_name}/hlp_data.esf")),
3473                    KEY_THRONES_OF_BRITANNIA |
3474                    KEY_ATTILA => config_path.join(format!("maps/campaign_maps/{map_name}/hlp_data.esf")),
3475                    KEY_ROME_2 => game_data_path.join(format!("campaign_maps/{map_name}/hlp_data.esf")),
3476                    _ => return Err(RLibError::BuildStartposError("How the fuck did you trigger this?".to_owned())),
3477                };
3478
3479                let hlp_path_pack = format!("campaign_maps/{map_name}/hlp_data.esf");
3480
3481                if !cleanup_mode {
3482
3483                    if !hlp_path.is_file() {
3484                        hlp_failed = true;
3485                    } else {
3486
3487                        let mut rfile_hlp = RFile::new_from_file_path(&hlp_path)?;
3488                        rfile_hlp.set_path_in_container_raw(&hlp_path_pack);
3489                        rfile_hlp.load()?;
3490                        rfile_hlp.guess_file_type()?;
3491
3492                        added_paths.push(pack_file.insert(rfile_hlp).map(|x| x.unwrap())?);
3493                    }
3494                }
3495
3496                // Only needed from Warhammer 1 onwards, and in Rome 2. Other games generate the hlp file outside that folder.
3497                if game.key() != KEY_THRONES_OF_BRITANNIA &&
3498                    game.key() != KEY_ATTILA {
3499
3500                    let hlp_path_bak = game_data_path.join(format!("campaign_maps/{map_name}/hlp_data.esf.bak"));
3501
3502                    if hlp_path_bak.is_file() {
3503                        std::fs::copy(&hlp_path_bak, hlp_path)?;
3504                        std::fs::remove_file(hlp_path_bak)?;
3505                    }
3506                }
3507
3508                // The spd file was introduced in Warhammer 1. Don't expect it on older games.
3509                if game.key() != KEY_THRONES_OF_BRITANNIA &&
3510                    game.key() != KEY_ATTILA &&
3511                    game.key() != KEY_ROME_2 {
3512
3513                    let spd_path = game_data_path.join(format!("campaign_maps/{map_name}/spd_data.esf"));
3514                    let spd_path_pack = format!("campaign_maps/{map_name}/spd_data.esf");
3515
3516                    if !cleanup_mode {
3517
3518                        if !spd_path.is_file() {
3519                            spd_failed = true;
3520                        } else {
3521
3522                            let mut rfile_spd = RFile::new_from_file_path(&spd_path)?;
3523                            rfile_spd.set_path_in_container_raw(&spd_path_pack);
3524                            rfile_spd.load()?;
3525                            rfile_spd.guess_file_type()?;
3526
3527                            added_paths.push(pack_file.insert(rfile_spd).map(|x| x.unwrap())?);
3528                        }
3529                    }
3530
3531                    let spd_path_bak = game_data_path.join(format!("campaign_maps/{map_name}/spd_data.esf.bak"));
3532                    if spd_path_bak.is_file() {
3533                        std::fs::copy(&spd_path_bak, spd_path)?;
3534                        std::fs::remove_file(spd_path_bak)?;
3535                    }
3536                }
3537            }
3538        }
3539
3540        let mut error = String::new();
3541        if startpos_failed || (!sub_start_pos.is_empty() && !sub_startpos_failed.is_empty()) || hlp_failed || spd_failed {
3542            error.push_str("<p>One or more files failed to generate:</p><ul>")
3543        }
3544        if startpos_failed {
3545            error.push_str("<li>Startpos file failed to generate.</li>");
3546        }
3547
3548        for sub_failed in &sub_startpos_failed {
3549            error.push_str(&format!("<li>\"{sub_failed}\" Startpos file failed to generate.</li>"));
3550        }
3551
3552        if hlp_failed {
3553            error.push_str("<li>HLP file failed to generate.</li>");
3554        }
3555
3556        if spd_failed {
3557            error.push_str("<li>SPD file failed to generate.</li>");
3558        }
3559
3560        if startpos_failed || hlp_failed || spd_failed {
3561            error.push_str("</ul><p>No files were added and the related files were restored to their pre-build state. Check your tables are correct before trying to generate them again.</p>")
3562        }
3563
3564        if error.is_empty() {
3565            Ok(added_paths)
3566        } else {
3567            Err(RLibError::BuildStartposError(error))
3568        }
3569    }
3570
3571    /// This function imports a specific table from the data it has in the AK.
3572    ///
3573    /// Tables generated with this are VALID.
3574    pub fn import_from_ak(&self, table_name: &str, schema: &Schema) -> Result<DB> {
3575        let definition = if let Some(definitions) = schema.definitions_by_table_name_cloned(table_name) {
3576            if !definitions.is_empty() {
3577                definitions[0].clone()
3578            } else {
3579                return Err(RLibError::DecodingDBNoDefinitionsFound)
3580            }
3581        } else {
3582            return Err(RLibError::DecodingDBNoDefinitionsFound)
3583        };
3584
3585        // Create the new table according to the schema, and import its data from the AK.
3586        if let Some(ak_file) = self.asskit_only_db_tables().get(table_name) {
3587            let mut real_table = ak_file.clone();
3588            real_table.set_definition(&definition);
3589            Ok(real_table)
3590        } else {
3591            Err(RLibError::AssemblyKitTableNotFound(table_name.to_owned()))
3592        }
3593    }
3594
3595    //-----------------------------------//
3596    // Dangerous functions.
3597    //-----------------------------------//
3598
3599    /// This function manually inserts a loc file from this into the dependencies as a vanilla loc.
3600    ///
3601    /// THIS IS DANGEROUS. DO NOT USE IT UNLESS YOU KNOW WHAT YOU'RE DOING.
3602    pub fn insert_loc_as_vanilla_loc(&mut self, rfile: RFile) {
3603        let path = rfile.path_in_container_raw().to_owned();
3604        self.vanilla_files.insert(path.to_owned(), rfile);
3605        self.vanilla_locs.insert(path);
3606    }
3607
3608    /// This function manipulates a definition to recursively add reference lookups if found.
3609    ///
3610    /// THIS IS DANGEROUS IF WE FIND A CYCLIC DEPENDENCY.
3611    pub fn add_recursive_lookups_to_definition(&self, schema: &Schema, definition: &mut Definition, table_name: &str) {
3612        let schema_patches = definition.patches().clone();
3613
3614        for field in definition.fields_mut().iter_mut() {
3615
3616            // First check lookups on the local table.
3617            if let Some(lookup_data_old) = field.lookup(Some(&schema_patches)) {
3618                let mut lookup_data = vec![];
3619
3620                // Check first for local lookups.
3621                if !lookup_data_old.is_empty() {
3622
3623                    let table_name = if let Some(table_name) = table_name.strip_suffix("_tables") {
3624                        table_name.to_owned()
3625                    } else {
3626                        table_name.to_owned()
3627                    };
3628
3629                    for lookup_data_old in &lookup_data_old {
3630                        let lookup_string = format!("{}#{}#{}", table_name, field.name(), lookup_data_old);
3631                        self.add_recursive_lookups(schema, &schema_patches, lookup_data_old, &mut lookup_data, &lookup_string, &table_name);
3632                    }
3633
3634                }
3635
3636                // If our field is a reference, do recursive checks to find out all the lookup data of a specific field.
3637                if let Some((ref_table_name, ref_column)) = field.is_reference(Some(&schema_patches)) {
3638                    for lookup_data_old in &lookup_data_old {
3639                        let lookup_string = format!("{ref_table_name}#{ref_column}#{lookup_data_old}");
3640                        self.add_recursive_lookups(schema, &schema_patches, lookup_data_old, &mut lookup_data, &lookup_string, &ref_table_name);
3641                    }
3642                }
3643
3644                if !lookup_data.is_empty() {
3645                    field.set_lookup(Some(lookup_data));
3646                } else {
3647                    field.set_lookup(None);
3648                }
3649            }
3650        }
3651    }
3652
3653    fn add_recursive_lookups(&self,
3654        schema: &Schema,
3655        schema_patches: &HashMap<String, HashMap<String, String>>,
3656        lookup: &str,
3657        lookup_data: &mut Vec<String>,
3658        lookup_string: &str,
3659        table_name: &str
3660    ) {
3661        let mut finish_lookup = false;
3662        let table_name = table_name.to_string() + "_tables";
3663        if let Ok(ref_tables) = self.db_data(&table_name, true, true) {
3664            let candidates = ref_tables.iter()
3665                .filter_map(|rfile| rfile.decoded().ok())
3666                .filter_map(|decoded| if let RFileDecoded::DB(db) = decoded {
3667                    Some(db.definition().clone())
3668                } else {
3669                    None
3670                })
3671                .collect::<Vec<_>>();
3672
3673            if let Some(definition) = schema.definition_newer(&table_name, &candidates) {
3674
3675                // If this fails, it may be a loc.
3676                if let Some(pos) = definition.column_position_by_name(lookup) {
3677                    if let Some(field) = definition.fields_processed().get(pos) {
3678
3679                        // If our field is a reference, we need to go one level deeper to find the lookup.
3680                        if let Some((ref_table_name, ref_column)) = field.is_reference(Some(schema_patches)) {
3681                            if let Some(lookups) = field.lookup(Some(schema_patches)) {
3682                                for lookup in &lookups {
3683                                    let lookup_string = format!("{lookup_string}:{ref_table_name}#{ref_column}#{lookup}");
3684
3685                                    self.add_recursive_lookups(schema, schema_patches, lookup, lookup_data, &lookup_string, &ref_table_name);
3686                                }
3687                            } else {
3688                                finish_lookup = true;
3689                            }
3690                        } else {
3691                            finish_lookup = true;
3692                        }
3693                    } else {
3694                        finish_lookup = true;
3695                    }
3696                }
3697
3698                else if definition.localised_fields().iter().any(|x| x.name() == lookup) {
3699                    finish_lookup = true;
3700                }
3701            } else {
3702                finish_lookup = true;
3703            }
3704        }
3705
3706        if finish_lookup && !lookup_data.iter().any(|x| x == lookup_string) {
3707            lookup_data.push(lookup_string.to_owned());
3708        }
3709    }
3710}