Skip to main content

rpfm_lib/files/pack/
mod.rs

1//---------------------------------------------------------------------------//
2// Copyright (c) 2017-2026 Ismael Gutiérrez González. All rights reserved.
3//
4// This file is part of the Rusted PackFile Manager (RPFM) project,
5// which can be found here: https://github.com/Frodo45127/rpfm.
6//
7// This file is licensed under the MIT license, which can be found here:
8// https://github.com/Frodo45127/rpfm/blob/master/LICENSE.
9//---------------------------------------------------------------------------//
10
11//! PackFile (`.pack`) container format for Total War games.
12//!
13//! This module handles reading and writing PackFiles, the primary container format used by
14//! Total War games to store game assets. PackFiles bundle multiple files into a single archive
15//! with optional compression and encryption support.
16//!
17//! # Overview
18//!
19//! PackFiles have evolved through multiple versions since Empire: Total War, with each version
20//! adding features like timestamps, compression, and encryption. This module supports all
21//! known PackFile versions (PFH0 through PFH6).
22//!
23//! # Pack Types
24//!
25//! Packs have different types that determine load order and behavior:
26//! - **Boot**: Core game files loaded first
27//! - **Release**: Official game content
28//! - **Patch**: Official patches
29//! - **Mod**: User-created modifications
30//! - **Movie**: Video content packs
31//!
32//! # Features
33//!
34//! - **Lazy Loading**: Files can be loaded on-demand to reduce memory usage
35//! - **Compression**: PFH5+ supports LZ4, Zlib, and LZMA compression (game-dependent)
36//! - **Encryption**: Packs support index and data encryption
37//! - **Timestamps**: Track when files were last modified
38//! - **Dependencies**: Packs can declare dependencies on other packs
39//! - **Metadata**: Store notes and settings within the pack itself
40//!
41//! # Example
42//!
43//! ```no_run
44//! use rpfm_lib::files::pack::Pack;
45//! use rpfm_lib::files::{Container, Decodeable, DecodeableExtraData};
46//! use std::fs::File;
47//! use std::io::BufReader;
48//!
49//! // Open and read a pack file
50//! let file = File::open("my_mod.pack").unwrap();
51//! let mut reader = BufReader::new(file);
52//! let mut extra_data = DecodeableExtraData::default();
53//! extra_data.set_lazy_load(true);
54//!
55//! let pack = Pack::decode(&mut reader, &Some(extra_data)).unwrap();
56//! println!("Pack contains {} files", pack.files().len());
57//! ```
58
59use bitflags::bitflags;
60use getset::*;
61use rayon::prelude::*;
62use serde_derive::{Serialize, Deserialize};
63use serde_json::{from_slice, to_string_pretty};
64use itertools::Itertools;
65
66use std::cmp::Ordering;
67use std::collections::{BTreeMap, HashMap, HashSet};
68use std::fs::File;
69use std::hash::{DefaultHasher, Hash, Hasher};
70use std::io::{BufReader, BufWriter, Cursor, SeekFrom, Write};
71use std::path::{Path, PathBuf};
72use std::str::FromStr;
73
74use crate::binary::{ReadBytes, WriteBytes};
75use crate::compression::{Compressible, CompressionFormat};
76use crate::error::{RLibError, Result};
77use crate::files::{Container, ContainerPath, Decodeable, DecodeableExtraData, Encodeable, EncodeableExtraData, FileType, Loc, RFile, RFileDecoded, table::DecodedData};
78use crate::games::{GameInfo, pfh_file_type::PFHFileType, pfh_version::PFHVersion};
79use crate::notes::Note;
80use crate::utils::{current_time, last_modified_time_from_file};
81
82#[cfg(test)]
83mod pack_test;
84mod pack_versions;
85
86/// File extension used by PackFiles.
87pub const EXTENSION: &str = ".pack";
88
89/// Special preamble found in some Steam Workshop downloaded packs.
90const MFH_PREAMBLE: &str = "MFH";
91
92/// Path where Terry (map editor) exports map files within a pack.
93const TERRY_MAP_PATH: &str = "terrain/tiles/battle/_assembly_kit";
94
95/// Default filename for Battle Map Data files exported from Terry.
96const DEFAULT_BMD_DATA: &str = "bmd_data.bin";
97
98/// Path prefix for missing loc entries that override existing translations.
99const MISSING_LOCS_PATH_START_EXISTING: &str = "text/aaa_missing_locs_";
100/// Path prefix for missing loc entries that are new translations.
101const MISSING_LOCS_PATH_START_NEW: &str = "text/zzz_missing_locs_";
102
103// Binary markers used by the Siege AI patching function.
104const FORT_PERIMETER_HINT: &[u8; 18] = b"AIH_FORT_PERIMETER";
105const DEFENSIVE_HILL_HINT: &[u8; 18] = b"AIH_DEFENSIVE_HILL";
106const SIEGE_AREA_NODE_HINT: &[u8; 19] = b"AIH_SIEGE_AREA_NODE";
107
108/// Reserved filename for legacy dependency manager data.
109pub const RESERVED_NAME_DEPENDENCIES_MANAGER: &str = "dependencies_manager.rpfm_reserved";
110/// Reserved filename for dependency manager data (current version).
111pub const RESERVED_NAME_DEPENDENCIES_MANAGER_V2: &str = "dependencies_manager_v2.rpfm_reserved";
112/// Reserved filename for extra packfile references.
113pub const RESERVED_NAME_EXTRA_PACKFILE: &str = "extra_packfile.rpfm_reserved";
114/// Reserved filename for pack settings data.
115pub const RESERVED_NAME_SETTINGS: &str = "settings.rpfm_reserved";
116/// Reserved filename for extracted pack settings (JSON format).
117pub const RESERVED_NAME_SETTINGS_EXTRACTED: &str = "settings.rpfm_reserved.json";
118/// Reserved filename for pack notes data.
119pub const RESERVED_NAME_NOTES: &str = "notes.rpfm_reserved";
120/// Reserved filename for extracted pack notes (Markdown format).
121pub const RESERVED_NAME_NOTES_EXTRACTED: &str = "notes.rpfm_reserved.md";
122
123/// List of reserved filenames used by RPFM for internal purposes.
124///
125/// These files are automatically handled during pack read/write operations
126/// and should not be manually manipulated.
127pub const RESERVED_RFILE_NAMES: [&str; 3] = [RESERVED_NAME_EXTRA_PACKFILE, RESERVED_NAME_SETTINGS, RESERVED_NAME_NOTES];
128
129/// Authoring tool identifier for Creative Assembly tools.
130const AUTHORING_TOOL_CA: &str = "CA_TOOL";
131/// Authoring tool identifier for RPFM.
132const AUTHORING_TOOL_RPFM: &str = "RPFM";
133/// Maximum size in bytes for the authoring tool string.
134const AUTHORING_TOOL_SIZE: u32 = 8;
135
136/// Settings key for the compression format used by this pack.
137pub const SETTING_KEY_CF: &str = "compression_format";
138
139bitflags! {
140
141    /// This represents the bitmasks a Pack can have applied to his type.
142    ///
143    /// Keep in mind that this lib supports decoding Packs with any of these flags enabled,
144    /// but it only supports enconding for the `HAS_INDEX_WITH_TIMESTAMPS` flag.
145    #[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
146    pub struct PFHFlags: u32 {
147
148        /// Used to specify that the header of the Pack is extended by 20 bytes. Used in Arena.
149        const HAS_EXTENDED_HEADER       = 0b0000_0001_0000_0000;
150
151        /// Used to specify that the File Index is encrypted. Used in Arena.
152        const HAS_ENCRYPTED_INDEX       = 0b0000_0000_1000_0000;
153
154        /// Used to specify that the File Index contains a timestamp of every Pack.
155        const HAS_INDEX_WITH_TIMESTAMPS = 0b0000_0000_0100_0000;
156
157        /// Used to specify that the File Data is encrypted. Seen in `music.pack` Packs and in Arena.
158        const HAS_ENCRYPTED_DATA        = 0b0000_0000_0001_0000;
159    }
160}
161
162//---------------------------------------------------------------------------//
163//                              Enum & Structs
164//---------------------------------------------------------------------------//
165
166/// Packs are a container-type file, used for "packing" all game assets into single files, to speed up disk reads.
167///
168/// Their format has passed through multiple iterations since empire, getting changes on almost all iterations,
169/// like timestamps, encryption, compression,...
170///
171/// # Pack Structure
172///
173/// | Bytes  | Type                        | Data                                                                       |
174/// | ------ | --------------------------- | -------------------------------------------------------------------------- |
175/// | 20-384 | [PackHeader]                | Header of the Pack. Lenght depends on Pack version and flags enabled.      |
176/// | *      | [Pack Index](#pack-index)   | Index containing the list of Packs this Pack depends on.                   |
177/// | *      | [File Index](#file-index)   | Index containing the list of Files this Pack depends on                    |
178/// | *      | [File Data](#file-data)     | Data of the files contained in this Pack.                                  |
179/// | 256    | Appendix                    | Unknown data at the end of the Pack. Only seen in Arena's encrypted Packs. |
180///
181/// ## Pack Index
182///
183/// The Pack Index contains a list of Packs that will be force-loaded before this mod.
184///
185/// | Bytes | Type                     | Data            |
186/// | ----- | ------------------------ | --------------- |
187/// | *     | Null-terminated StringU8 | Pack file name. |
188///
189/// ## File Index
190///
191/// The File Index contains the metadata of the Files this Pack contains, in the same order their data is, further in the Pack.
192///
193/// | Bytes | Type                     | Data                                                                                                            |
194/// | ----- | ------------------------ | --------------------------------------------------------------------------------------------------------------- |
195/// | 4     | u32                      | Size of the file's data, in bytes.                                                                              |
196/// | 8     | u64                      | Timestamp of the file, if the header has the HAS_INDEX_WITH_TIMESTAMPS flag enabled. Only in PFH2 and PFH3.     |
197/// | 4     | u32                      | Truncated timestamp of the file, if the header has the HAS_INDEX_WITH_TIMESTAMPS flag enabled. Only since PFH4. |
198/// | 1     | bool                     | If the file is compressed. Only since PFH5.                                                                     |
199/// | *     | Null-terminated StringU8 | File's path within the Pack.                                                                                    |
200///
201/// ## File Data
202///
203/// The raw data of the files contained by this Pack, in the same order as their indexes. Not much to explain here.
204///
205#[derive(Debug, Clone, PartialEq, Getters, MutGetters, Setters, Default, Serialize, Deserialize)]
206#[getset(get = "pub", get_mut = "pub", set = "pub")]
207pub struct Pack {
208
209    /// The path of the Pack on disk, if exists. If not, then this should be empty.
210    disk_file_path: String,
211
212    /// The offset on the disk file the data of this Pack starts. Usually 0.
213    disk_file_offset: u64,
214
215    /// Timestamp from the moment this Pack was open. To check if the file was edited on disk while we had it open.
216    local_timestamp: u64,
217
218    /// If the files in this Pack should be compressed. Compression format is set in the Pack Settings.
219    #[getset(skip)]
220    compress: bool,
221
222    /// Header data of this Pack.
223    header: PackHeader,
224
225    /// List of Packs this Pack depends on. If the boolean is true, the packs are also required to be loaded before himself when starting the game.
226    ///
227    /// In other places, we may refer to this as the `Dependency List`.
228    dependencies: Vec<(bool, String)>,
229
230    /// List of files this Pack contains.
231    files: HashMap<String, RFile>,
232
233    /// List of file paths lowercased, with their casing counterparts. To quickly find files.
234    paths: HashMap<String, Vec<String>>,
235
236    /// Notes added to the Pack. Exclusive of this lib.
237    notes: PackNotes,
238
239    /// Settings stored in the Pack itself, to be able to share them between installations.
240    settings: PackSettings,
241}
242
243/// Header of a Pack, containing all the header-related info of said Pack.
244///
245/// # Header Structure.
246///
247/// | Bytes | Type                               | Data                                                                                                            |
248/// | ----- | ---------------------------------- | --------------------------------------------------------------------------------------------------------------- |
249/// | 8     | 00-Padded StringU8                 | Fake Preamble/Id of this Pack. Usually "MFH" and a bunch of 00. Only in old Steam Workshop files.               |
250/// | 4     | StringU8                           | Preamble/Id of this Pack. Contains the "version" of this Pack.                                                  |
251/// | 4     | u32                                | Pack Type + Bitwised flags for tweaking certain Pack configurations.                                            |
252/// | 4     | u32                                | Amount of items in the Pack Index of this Pack.                                                                 |
253/// | 4     | u32                                | Lenght in bytes of the Pack Index.                                                                              |
254/// | 4     | u32                                | Amount of items in the File Index of this Pack.                                                                 |
255/// | 4     | u32                                | Lenght in bytes of the File Index.                                                                              |
256/// | 8     | u64                                | Timestamp when this Pack was last edited. Only in PFH2 and PFH3.                                                |
257/// | 20    | `Vec<u8>`                          | Extended header data. Only if HAS_EXTENDED_HEADER flag is set.                                                  |
258/// | 280   | [Subheader](#subheader-structure)  | Subheader data. Only since PFH6.                                                                                |
259///
260/// # Subheader Structure.
261///
262/// Subheader containing extra metadata for the Pack. Only in PFH6.
263///
264/// | Bytes | Type               | Data                                                                                         |
265/// | ----- | ------------------ | -------------------------------------------------------------------------------------------- |
266/// | 4     | u32                | Subheader marker. Marks the begining of the subheader. If missing, there's no subheader.     |
267/// | 4     | u32                | Subheader version.                                                                           |
268/// | 4     | u32                | Game version this Pack was done for.                                                         |
269/// | 4     | u32                | Build number of the game version this Pack was done for.                                     |
270/// | 8     | 00-Padded StringU8 | Tool that made this Pack.                                                                    |
271/// | 256   | `Vec<u8>`          | Unused bytes.                                                                                |
272#[derive(Debug, Clone, PartialEq, Eq, Getters, Setters, Serialize, Deserialize)]
273#[getset(get = "pub", set = "pub")]
274pub struct PackHeader {
275
276    /// The version of the Pack.
277    pfh_version: PFHVersion,
278
279    /// The type of the Pack.
280    pfh_file_type: PFHFileType,
281
282    /// The bitmasks applied to the Pack.
283    bitmask: PFHFlags,
284
285    /// The timestamp of the last time the Pack was saved.
286    internal_timestamp: u64,
287
288    /// Game version this Pack is intended for. This usually triggers the "outdated mod" warning in the launcher if it doesn't match the current exe version.
289    game_version: u32,
290
291    /// Build number of the game.
292    build_number: u32,
293
294    /// Tool that created the Pack. Max 8 characters, 00-padded.
295    authoring_tool: String,
296
297    /// Extra subheader data, in case it's used in the future.
298    extra_subheader_data: Vec<u8>,
299}
300
301/// Pack-specific settings stored within the pack itself.
302///
303/// These settings are serialized to a reserved file within the pack, allowing them
304/// to be shared when the pack is distributed. Common settings include compression
305/// format, diagnostic ignore lists, and import configurations.
306///
307/// # Built-in Settings
308///
309/// - `compression_format`: The compression format to use (None, Lz4, Zlib)
310/// - `diagnostics_files_to_ignore`: Files/diagnostics to skip during validation
311/// - `import_files_to_ignore`: Files to skip during folder imports
312/// - `disable_autosaves`: Disable automatic saving
313/// - `do_not_generate_existing_locs`: Skip generating loc entries that already exist
314#[derive(Clone, Debug, PartialEq, Eq, Getters, MutGetters, Setters, Serialize, Deserialize)]
315#[getset(get = "pub", get_mut = "pub", set = "pub")]
316pub struct PackSettings {
317
318    /// Multi-line text settings (e.g., file ignore lists).
319    settings_text: BTreeMap<String, String>,
320
321    /// Single-line string settings (e.g., compression format).
322    settings_string: BTreeMap<String, String>,
323
324    /// Boolean flag settings (e.g., disable_autosaves).
325    settings_bool: BTreeMap<String, bool>,
326
327    /// Integer settings (e.g., thresholds, limits).
328    settings_number: BTreeMap<String, i32>,
329}
330
331/// Pack notes for documentation and collaboration.
332///
333/// Notes are stored within the pack and include both pack-level notes (general
334/// documentation) and file-specific notes (annotations on individual files or tables).
335/// Notes are serialized as JSON and can be shared when the pack is distributed.
336#[derive(Clone, Debug, PartialEq, Eq, Default, Getters, MutGetters, Setters, Serialize, Deserialize)]
337#[getset(get = "pub", get_mut = "pub", set = "pub")]
338pub struct PackNotes {
339
340    /// General notes for the entire pack (Markdown format).
341    pack_notes: String,
342
343    /// File-specific notes, keyed by lowercase file path.
344    ///
345    /// For DB tables, notes are shared across all tables of the same type
346    /// (path is truncated to `db/table_name/`).
347    file_notes: HashMap<String, Vec<Note>>,
348}
349
350//---------------------------------------------------------------------------//
351//                           Structs Implementations
352//---------------------------------------------------------------------------//
353
354impl Container for Pack {
355
356    /// This method allows us to extract the metadata associated to the provided container as `.json` or `.md` files.
357    ///
358    /// [Pack] implementation extracts the [PackSettings] of the provided Pack and its associated notes.
359    fn extract_metadata(&mut self, destination_path: &Path) -> Result<Vec<PathBuf>> {
360        let mut paths = vec![];
361        let mut data = vec![];
362        data.write_all(to_string_pretty(&self.notes)?.as_bytes())?;
363        data.extend_from_slice(b"\n"); // Add newline to the end of the file
364
365        let path = destination_path.join(RESERVED_NAME_NOTES_EXTRACTED);
366        paths.push(path.to_owned());
367        let mut file = BufWriter::new(File::create(path)?);
368        file.write_all(&data)?;
369        file.flush()?;
370
371        let mut data = vec![];
372        data.write_all(to_string_pretty(&self.settings)?.as_bytes())?;
373        data.extend_from_slice(b"\n"); // Add newline to the end of the file
374
375        let path = destination_path.join(RESERVED_NAME_SETTINGS_EXTRACTED);
376        paths.push(path.to_owned());
377        let mut file = BufWriter::new(File::create(path)?);
378        file.write_all(&data)?;
379        file.flush()?;
380
381        Ok(paths)
382    }
383
384    fn insert(&mut self, mut file: RFile) -> Result<Option<ContainerPath>> {
385
386        // Filter out special files, so we only leave the normal files in.
387        let path_container = file.path_in_container();
388        let path = file.path_in_container_raw();
389        if path == RESERVED_NAME_NOTES_EXTRACTED {
390            self.notes = PackNotes::load(&file.encode(&None, false, false, true)?.unwrap())?;
391            Ok(None)
392        } else if path == RESERVED_NAME_SETTINGS_EXTRACTED {
393            self.settings = PackSettings::load(&file.encode(&None, false, false, true)?.unwrap())?;
394            Ok(None)
395        } else if path == RESERVED_NAME_DEPENDENCIES_MANAGER_V2 {
396            self.dependencies = from_slice(&file.encode(&None, false, false, true)?.unwrap())?;
397            Ok(None)
398        }
399
400        // If it's not filtered out, add it to the Pack.
401        else {
402            self.paths_cache_insert_path(path);
403            self.files.insert(path.to_owned(), file);
404
405            Ok(Some(path_container))
406        }
407    }
408
409    fn disk_file_path(&self) -> &str {
410       &self.disk_file_path
411    }
412
413    fn files(&self) -> &HashMap<String, RFile> {
414        &self.files
415    }
416
417    fn files_mut(&mut self) -> &mut HashMap<String, RFile> {
418        &mut self.files
419    }
420
421    fn disk_file_offset(&self) -> u64 {
422       self.disk_file_offset
423    }
424
425    fn paths_cache(&self) -> &HashMap<String, Vec<String>> {
426        &self.paths
427    }
428
429    fn paths_cache_mut(&mut self) -> &mut HashMap<String, Vec<String>> {
430        &mut self.paths
431    }
432
433    fn internal_timestamp(&self) -> u64 {
434       self.header.internal_timestamp
435    }
436
437    fn local_timestamp(&self) -> u64 {
438       self.local_timestamp
439    }
440
441    /// This function allows you to *move* any RFile of folder of RFiles from one folder to another.
442    ///
443    /// It returns a list with all the new [ContainerPath].
444    fn move_path(&mut self, source_path: &ContainerPath, destination_path: &ContainerPath) -> Result<Vec<(ContainerPath, ContainerPath)>> {
445        match source_path {
446            ContainerPath::File(source_path) => match destination_path {
447                ContainerPath::File(destination_path) => {
448                    if RESERVED_RFILE_NAMES.contains(&&**destination_path) {
449                        return Err(RLibError::ReservedFiles);
450                    }
451
452                    if destination_path.is_empty() {
453                        return Err(RLibError::EmptyDestiny);
454                    }
455
456                    self.paths_cache_remove_path(source_path);
457                    let mut moved = self.files_mut()
458                        .remove(source_path)
459                        .ok_or_else(|| RLibError::FileNotFound(source_path.to_string()))?;
460
461                    moved.set_path_in_container_raw(destination_path);
462
463                    self.insert(moved).map(|x| match x {
464                        Some(x) => vec![(ContainerPath::File(source_path.to_string()), x); 1],
465                        None => Vec::with_capacity(0),
466                    })
467                },
468                ContainerPath::Folder(_) => unreachable!("move_path_pack_1"),
469            },
470            ContainerPath::Folder(source_path) => match destination_path {
471                ContainerPath::File(_) => unreachable!("move_path_pack_2"),
472                ContainerPath::Folder(destination_path) => {
473                    if destination_path.is_empty() {
474                        return Err(RLibError::EmptyDestiny);
475                    }
476
477                    // Fix to avoid false positives.
478                    let mut source_path_end = source_path.to_owned();
479                    if !source_path_end.ends_with('/') {
480                        source_path_end.push('/');
481                    }
482
483                    let moved_paths = self.files()
484                        .par_iter()
485                        .filter_map(|(path, _)| if path.starts_with(&source_path_end) { Some(path.to_owned()) } else { None })
486                        .collect::<Vec<_>>();
487
488                    let moved = moved_paths.iter()
489                        .filter_map(|x| {
490                            self.paths_cache_remove_path(x);
491                            self.files_mut().remove(x)
492                        })
493                        .collect::<Vec<_>>();
494
495                    let mut new_paths = Vec::with_capacity(moved.len());
496                    for mut moved in moved {
497                        let old_path = moved.path_in_container();
498                        let new_path = moved.path_in_container_raw().replacen(source_path, destination_path, 1);
499                        moved.set_path_in_container_raw(&new_path);
500
501                        if let Some(new_path) = self.insert(moved)? {
502                            new_paths.push((old_path, new_path));
503                        }
504                    }
505
506                    Ok(new_paths)
507                },
508            },
509        }
510    }
511}
512
513impl Decodeable for Pack {
514
515    fn decode<R: ReadBytes>(data: &mut R, extra_data: &Option<DecodeableExtraData>) -> Result<Self> {
516        Self::read(data, extra_data)
517    }
518}
519
520impl Encodeable for Pack {
521
522    fn encode<W: WriteBytes>(&mut self, buffer: &mut W, extra_data: &Option<EncodeableExtraData>) -> Result<()> {
523        self.write(buffer, extra_data)
524    }
525}
526
527/// Implementation of `Pack`.
528impl Pack {
529
530    /// This function creates a new empty Pack with a specific PFHVersion.
531    pub fn new_with_version(pfh_version: PFHVersion) -> Self {
532        let mut pack = Self::default();
533        pack.header.pfh_version = pfh_version;
534        pack
535    }
536
537    /// This function creates a new empty Pack with a name and a specific PFHVersion.
538    pub fn new_with_name_and_version(name: &str, pfh_version: PFHVersion) -> Self {
539        let mut pack = Self::default();
540        pack.header.pfh_version = pfh_version;
541        pack.disk_file_path = name.to_owned();
542        pack
543    }
544
545    /// This function tries to read a `Pack` from raw data.
546    ///
547    /// If `lazy_load` is false, the data of all the files inside the `Pack` will be preload to memory.
548    fn read<R: ReadBytes>(data: &mut R, extra_data: &Option<DecodeableExtraData>) -> Result<Self> {
549        let extra_data = extra_data.as_ref().ok_or(RLibError::DecodingMissingExtraData)?;
550
551        // GameInfo is required now, to properly support per-game particularities.
552        let game_info = match extra_data.game_info {
553            Some(game_info) => game_info,
554            None => return Err(RLibError::GameInfoMissingFromDecodingFunction),
555        };
556
557        // If we're reading from a file on disk, we require a valid path.
558        // If we're reading from a file on memory, we don't need a valid path.
559        let disk_file_path = match extra_data.disk_file_path {
560            Some(path) => {
561                let file_path = PathBuf::from_str(path).map_err(|_|RLibError::DecodingMissingExtraDataField("disk_file_path".to_owned()))?;
562                if file_path.is_file() {
563                    path.to_owned()
564                } else {
565                    return Err(RLibError::DecodingMissingExtraData)
566                }
567            }
568            None => String::new()
569        };
570
571        let disk_file_offset = extra_data.disk_file_offset;
572        let disk_file_size = if extra_data.data_size > 0 { extra_data.data_size } else { data.len()? };
573        let timestamp = extra_data.timestamp;
574        let is_encrypted = extra_data.is_encrypted;
575        let skip_path_cache_generation = extra_data.skip_path_cache_generation;
576
577        // If we don't have a path, or the file is encrypted, we can't lazy-load.
578        let lazy_load = !disk_file_path.is_empty() && !is_encrypted && extra_data.lazy_load;
579
580        // First, we do some quick checks to ensure it's a valid Pack.
581        // A valid Pack, bare and empty, needs at least 24 bytes, regardless of game or type.
582        let data_len = disk_file_size;
583        if data_len < 24 {
584            return Err(RLibError::PackHeaderNotComplete);
585        }
586
587        // Check if it has the weird steam-only header, and skip it if found.
588        let start = if data.read_string_u8(3)? == MFH_PREAMBLE { 8 } else { 0 };
589        data.seek(SeekFrom::Current(-3))?;
590        data.seek(SeekFrom::Current(start))?;
591
592        // Create the default Pack and start populating it.
593        let mut pack = Self {
594            disk_file_path,
595            disk_file_offset,
596            local_timestamp: timestamp,
597            ..Default::default()
598        };
599
600        pack.header.pfh_version = PFHVersion::version(&data.read_string_u8(4)?)?;
601
602        let pack_type = data.read_u32()?;
603        pack.header.pfh_file_type = PFHFileType::try_from(pack_type & 15)?;
604        pack.header.bitmask = PFHFlags::from_bits_truncate(pack_type & !15);
605
606        // Each Pack version has its own read function, to avoid breaking support for older Packs
607        // when implementing support for a new Pack version.
608        let expected_data_len = match pack.header.pfh_version {
609            PFHVersion::PFH6 => pack.read_pfh6(data, extra_data)?,
610            PFHVersion::PFH5 => pack.read_pfh5(data, extra_data)?,
611            PFHVersion::PFH4 => pack.read_pfh4(data, extra_data)?,
612            PFHVersion::PFH3 => pack.read_pfh3(data, extra_data)?,
613            PFHVersion::PFH2 => pack.read_pfh2(data, extra_data)?,
614            PFHVersion::PFH0 => pack.read_pfh0(data, extra_data)?,
615        };
616
617        // Remove the reserved files from the Pack and read them properly.
618        if let Some(mut notes) = pack.files.remove(RESERVED_NAME_NOTES) {
619            notes.load()?;
620            let data = notes.cached()?;
621
622            // Migration logic from 3.X to 4.X notes: iff we detect old notes, we don't fail.
623            // We instead generate a new 4.X note and fill the pack message with the old 3.X note.
624            match PackNotes::load(data) {
625                Ok(notes) => pack.notes = notes,
626                Err(_) => {
627                    let len = data.len();
628                    let mut data = Cursor::new(data);
629                    pack.notes = PackNotes::default();
630                    pack.notes.pack_notes = data.read_string_u8(len)?;
631                }
632            }
633        }
634
635        if let Some(mut settings) = pack.files.remove(RESERVED_NAME_SETTINGS) {
636            settings.load()?;
637            let data = settings.cached()?;
638            pack.settings.load_and_update(data)?;
639        }
640
641        if let Some(mut deps) = pack.files.remove(RESERVED_NAME_DEPENDENCIES_MANAGER_V2) {
642            deps.load()?;
643            let data = deps.cached()?;
644            pack.dependencies = from_slice(data)?;
645        }
646
647        // Generate the path list.
648        if !skip_path_cache_generation {
649            pack.paths_cache_generate();
650        }
651
652        // Once we're done reading files, we have to initialize the compression format.
653        // The reason we do this here is because the pack only contains if the files are compressed or not, not which format is used.
654        // To support multiple formats, we have to make sure we save the last-used format in the the pack settings.
655        // If no format has been saved but the files are compressed, we default to the more modern one supported by the game.
656        let preferred_cf = game_info.compression_formats_supported().first().cloned().unwrap_or_default();
657        let current_cf_str = pack.settings().setting_string(SETTING_KEY_CF).cloned().unwrap_or_default();
658        let current_cf = CompressionFormat::from(&*current_cf_str);
659
660        if pack.compress && current_cf == CompressionFormat::None {
661            pack.settings_mut().set_setting_string(SETTING_KEY_CF, preferred_cf.to_string().as_str());
662        }
663
664        // If at this point we have not reached the end of the Pack, there is something wrong with it.
665        // NOTE: Arena Packs have extra data at the end. If we detect one of those Packs, take that into account.
666        //if pack.header.pfh_version == PFHVersion::PFH5 && pack.header.bitmask.contains(PFHFlags::HAS_EXTENDED_HEADER) {
667        //    if expected_data_len + 256 != data_len { return Err(RLibError::DecodingMismatchSizeError(data_len as usize, expected_data_len as usize)) }
668        //}
669        if expected_data_len != data_len { return Err(RLibError::DecodingMismatchSizeError(data_len as usize, expected_data_len as usize)) }
670
671        // Guess the file's types. Do this here because this can be very slow and here we can do it in paralell.
672        pack.files.par_iter_mut().map(|(_, file)| file.guess_file_type()).collect::<Result<()>>()?;
673
674        // If we disabled lazy-loading, load every File to memory.
675        if !lazy_load {
676            pack.files.par_iter_mut().try_for_each(|(_, file)| file.load())?;
677        }
678
679        // Return our Pack.
680        Ok(pack)
681    }
682
683    /// This function writes a `Pack` into the provided buffer.
684    fn write<W: WriteBytes>(&mut self, buffer: &mut W, extra_data: &Option<EncodeableExtraData>) -> Result<()> {
685        let test_mode = if let Some(extra_data) = extra_data {
686            extra_data.test_mode
687        } else {
688            false
689        };
690
691        if !test_mode {
692
693            // Only do this in non-vanilla files.
694            if self.header.pfh_file_type == PFHFileType::Mod || self.header.pfh_file_type == PFHFileType::Movie {
695
696                // Save notes, if needed.
697                let mut data = vec![];
698                data.write_all(to_string_pretty(&self.notes)?.as_bytes())?;
699                let file = RFile::new_from_vec(&data, FileType::Text, 0, RESERVED_NAME_NOTES);
700                self.files.insert(RESERVED_NAME_NOTES.to_owned(), file);
701
702                // Saving Pack settings.
703                let mut data = vec![];
704                data.write_all(to_string_pretty(&self.settings)?.as_bytes())?;
705                let file = RFile::new_from_vec(&data, FileType::Text, 0, RESERVED_NAME_SETTINGS);
706                self.files.insert(RESERVED_NAME_SETTINGS.to_owned(), file);
707
708                // Saving Pack dependencies.
709                let mut data = vec![];
710                data.write_all(to_string_pretty(&self.dependencies)?.as_bytes())?;
711                let file = RFile::new_from_vec(&data, FileType::Text, 0, RESERVED_NAME_DEPENDENCIES_MANAGER_V2);
712                self.files.insert(RESERVED_NAME_DEPENDENCIES_MANAGER_V2.to_owned(), file);
713
714            }
715        }
716
717        match self.header.pfh_version {
718            PFHVersion::PFH6 => self.write_pfh6(buffer, extra_data)?,
719            PFHVersion::PFH5 => self.write_pfh5(buffer, extra_data)?,
720            PFHVersion::PFH4 => self.write_pfh4(buffer, extra_data)?,
721            PFHVersion::PFH3 => self.write_pfh3(buffer, extra_data)?,
722            PFHVersion::PFH2 => self.write_pfh2(buffer, extra_data)?,
723            PFHVersion::PFH0 => self.write_pfh0(buffer, extra_data)?,
724        }
725
726        // Remove again the reserved Files.
727        self.remove(&ContainerPath::File(RESERVED_NAME_NOTES.to_owned()));
728        self.remove(&ContainerPath::File(RESERVED_NAME_SETTINGS.to_owned()));
729        self.remove(&ContainerPath::File(RESERVED_NAME_DEPENDENCIES_MANAGER_V2.to_owned()));
730
731        // If nothing has failed, return success.
732        Ok(())
733    }
734
735    //-----------------------------------------------------------------------//
736    //                        Convenience functions
737    //-----------------------------------------------------------------------//
738
739    /// This function reads and returns all CA Packs for the provided game merged as one, for easy manipulation.
740    ///
741    /// This needs a [GameInfo] to get the Packs from, and a game path to search the Packs on.
742    pub fn read_and_merge_ca_packs(game: &GameInfo, game_path: &Path) -> Result<Self> {
743        let paths = game.ca_packs_paths(game_path)?;
744        let mut pack = Self::read_and_merge(&paths, game, true, true, false)?;
745
746        // Make sure it's not mod type.
747        pack.header_mut().set_pfh_file_type(PFHFileType::Release);
748        Ok(pack)
749    }
750
751    /// Convenience function to open multiple Packs as one, taking care of overwriting files when needed.
752    ///
753    /// If this function receives only one path, it works as a normal read_from_disk function. If it receives none, an error will be returned.
754    pub fn read_and_merge(pack_paths: &[PathBuf], game: &GameInfo, lazy_load: bool, ignore_mods: bool, keep_order: bool) -> Result<Self> {
755        if pack_paths.is_empty() {
756            return Err(RLibError::NoPacksProvided);
757        }
758
759        let mut extra_data = DecodeableExtraData {
760            lazy_load,
761            game_info: Some(game),
762            ..Default::default()
763        };
764
765        // If we only got one path, just decode the Pack on it.
766        if pack_paths.len() == 1 {
767            let mut data = BufReader::new(File::open(&pack_paths[0])
768                .map_err(|error| RLibError::IOErrorPath(Box::new(RLibError::IOError(error)), pack_paths[0].to_path_buf()))?);
769            let path_str = pack_paths[0].to_string_lossy().replace('\\', "/");
770
771            extra_data.set_disk_file_path(Some(&path_str));
772            extra_data.set_timestamp(last_modified_time_from_file(data.get_ref()).unwrap());
773
774            return Self::read(&mut data, &Some(extra_data))
775        }
776
777        // Skip path cache generation for each pack, as we're not going to use it. Instead we're going to generate one for the merged pack.
778        extra_data.set_skip_path_cache_generation(true);
779
780        // Generate a new empty Pack to act as merged one.
781        let mut pack_new = Pack::default();
782        let mut packs = pack_paths.par_iter()
783            .map(|path| {
784                let mut data = BufReader::new(File::open(path)
785                    .map_err(|error| RLibError::IOErrorPath(Box::new(RLibError::IOError(error)), pack_paths[0].to_path_buf()))?);
786                let path_str = path.to_string_lossy().replace('\\', "/");
787
788                let mut extra_data = extra_data.to_owned();
789                extra_data.set_disk_file_path(Some(&path_str));
790                extra_data.set_timestamp(last_modified_time_from_file(data.get_ref())?);
791
792                Self::read(&mut data, &Some(extra_data))
793            }).collect::<Result<Vec<Pack>>>()?;
794
795        // Group different type files, and sort them by name.
796        packs.sort_by(|pack_a, pack_b| if pack_a.pfh_file_type() != pack_b.pfh_file_type() {
797            pack_a.pfh_file_type().cmp(&pack_b.pfh_file_type())
798        } else if !keep_order {
799            pack_a.disk_file_path.cmp(&pack_b.disk_file_path)
800        } else {
801            Ordering::Equal
802        });
803
804        packs.iter()
805            .chunk_by(|pack| pack.header.pfh_file_type)
806            .into_iter()
807            .for_each(|(pfh_type, packs)| {
808                if pfh_type != PFHFileType::Mod || !ignore_mods {
809                    let mut packs = packs.collect::<Vec<_>>();
810                    packs.reverse();
811                    packs.iter()
812                        .for_each(|pack| {
813                        pack_new.files_mut().extend(pack.files().clone())
814                    });
815                }
816            });
817
818        // Fix the dependencies of the merged pack.
819        let pack_names = packs.iter().map(|pack| pack.disk_file_name()).collect::<Vec<_>>();
820        let mut dependencies = packs.iter()
821            .flat_map(|pack| pack.dependencies()
822                .iter()
823                .filter(|(_, dependency)| !pack_names.contains(dependency))
824                .cloned()
825                .collect::<Vec<_>>())
826            .collect::<Vec<_>>();
827
828        // Dedup the dependencies while preserving the order.
829        let mut set = HashSet::new();
830        dependencies.retain(|x| set.insert(x.clone()));
831        pack_new.set_dependencies(dependencies);
832
833        // Fix the pack version and header.
834        pack_new.set_pfh_file_type(packs[0].pfh_file_type());
835        pack_new.set_pfh_version(game.pfh_version_by_file_type(pack_new.pfh_file_type()));
836
837        // Generate the path list.
838        pack_new.paths_cache_generate();
839
840        Ok(pack_new)
841    }
842
843    /// Convenience function to merge open Packs as one, taking care of overwriting files when needed.
844    ///
845    /// Packs are merged in the order they are provided. If you need to use a custom order,
846    /// sort them before merging, or use the `read_and_merge` function instead.
847    ///
848    /// Internal files are left in the state they were before. If you need them loaded, do it after this.
849    pub fn merge(packs: &[Self]) -> Result<Self> {
850        if packs.is_empty() {
851            return Err(RLibError::NoPacksProvided);
852        }
853
854        // If we only got one pack, clone it and return it.
855        if packs.len() == 1 {
856            return Ok(packs[0].clone());
857        }
858
859        // Generate a new empty Pack to act as merged one. If all packs to merge share the same type, use that pack type.
860        let mut pack_new = Pack::default();
861
862        let mut pfh_types = packs.iter().map(|pack| pack.pfh_file_type()).collect::<Vec<_>>();
863        pfh_types.sort();
864        pfh_types.dedup();
865
866        if pfh_types.len() == 1 {
867            pack_new.set_pfh_file_type(pfh_types[0]);
868        }
869
870        packs.iter()
871            .chunk_by(|pack| pack.header.pfh_file_type)
872            .into_iter()
873            .for_each(|(_, packs)| {
874                let mut packs = packs.collect::<Vec<_>>();
875                packs.reverse();
876                packs.iter()
877                    .for_each(|pack| {
878                    pack_new.files_mut().extend(pack.files().clone())
879                });
880            });
881
882        // Fix the dependencies of the merged pack.
883        let pack_names = packs.iter().map(|pack| pack.disk_file_name()).collect::<Vec<_>>();
884        let mut dependencies = packs.iter()
885            .flat_map(|pack| pack.dependencies()
886                .iter()
887                .filter(|(_, dependency)| !pack_names.contains(dependency))
888                .cloned()
889                .collect::<Vec<_>>())
890            .collect::<Vec<_>>();
891
892        // Dedup the dependencies while preserving the order.
893        let mut set = HashSet::new();
894        dependencies.retain(|x| set.insert(x.clone()));
895        pack_new.set_dependencies(dependencies);
896
897        // Fix the pack version and header.
898        pack_new.set_pfh_file_type(packs[0].pfh_file_type());
899        pack_new.set_pfh_version(packs[0].pfh_version());
900
901        // Generate the path list.
902        pack_new.paths_cache_generate();
903
904        Ok(pack_new)
905    }
906
907    /// Convenience function to easily save a Pack to disk.
908    ///
909    /// If a path is provided, the Pack will be saved to that path. Otherwise, it'll use whatever path it had set before.
910    pub fn save(&mut self, path: Option<&Path>, game_info: &GameInfo, extra_data: &Option<EncodeableExtraData>) -> Result<()> {
911        if let Some(path) = path {
912            self.disk_file_path = path.to_string_lossy().to_string();
913        }
914
915        // Before truncating the file, make sure we loaded everything to memory.
916        self.files.iter_mut().try_for_each(|(_, file)| file.load())?;
917
918        let mut file = BufWriter::new(File::create(&self.disk_file_path)?);
919        let extra_data = if extra_data.is_some() {
920            extra_data.clone()
921        } else {
922            Some(EncodeableExtraData::new_from_game_info(game_info))
923        };
924
925        self.encode(&mut file, &extra_data)
926    }
927
928    //-----------------------------------------------------------------------//
929    //                           Getters & Setters
930    //-----------------------------------------------------------------------//
931
932    /// This function returns the current PFH Version of the provided Pack.
933    pub fn pfh_version(&self) -> PFHVersion {
934        *self.header.pfh_version()
935    }
936
937    /// This function returns the current PFH File Type of the provided Pack.
938    pub fn pfh_file_type(&self) -> PFHFileType {
939        *self.header.pfh_file_type()
940    }
941
942    /// This function returns the bitmask applied to the provided Pack.
943    pub fn bitmask(&self) -> PFHFlags {
944        *self.header.bitmask()
945    }
946
947    /// This function returns the timestamp of the last time the Pack was saved.
948    pub fn internal_timestamp(&self) -> u64 {
949        *self.header.internal_timestamp()
950    }
951
952    /// This function returns the Game version this Pack is intended for.
953    pub fn game_version(&self) -> u32 {
954        *self.header.game_version()
955    }
956
957    /// This function returns the build number of the game this Pack is intended for.
958    pub fn build_number(&self) -> u32 {
959        *self.header.build_number()
960    }
961
962    /// This function returns the tool that created the Pack. Max 8 characters, 00-padded.
963    pub fn authoring_tool(&self) -> &str {
964        self.header.authoring_tool()
965    }
966
967    /// This function returns the Extra Subheader Data, if any.
968    pub fn extra_subheader_data(&self) -> &[u8] {
969        self.header.extra_subheader_data()
970    }
971/*
972    /// This function changes the path of the Pack.
973    ///
974    /// This can fail if you pass it an empty path.
975    pub fn set_file_path(&mut self, path: &Path) -> Result<()> {
976        if path.components().count() == 0 { return Err(ErrorKind::EmptyInput.into()) }
977        self.file_path = path.to_path_buf();
978
979        // We have to change the name of the Pack in all his `Files` too.
980        let file_name = self.disk_file_name();
981        self.files.iter_mut().for_each(|x| x.get_ref_mut_raw().set_packfile_name(&file_name));
982        Ok(())
983    }*/
984
985    /// This function returns the compression format of the Pack.
986    pub fn compression_format(&self) -> CompressionFormat {
987        let cf = self.settings().setting_string(SETTING_KEY_CF).map(|x| x.to_owned());
988        CompressionFormat::from(cf.unwrap_or_default().as_str())
989    }
990
991    /// This function sets the current Pack PFH Version to the provided one.
992    pub fn set_pfh_version(&mut self, version: PFHVersion) {
993        self.header.set_pfh_version(version);
994    }
995
996    /// This function sets the current Pack PFH File Type to the provided one.
997    pub fn set_pfh_file_type(&mut self, file_type: PFHFileType) {
998        self.header.set_pfh_file_type(file_type);
999    }
1000
1001    /// This function sets the current Pack bitmask to the provided one.
1002    pub fn set_bitmask(&mut self, bitmask: PFHFlags) {
1003        self.header.set_bitmask(bitmask);
1004    }
1005
1006    /// This function sets the current Pack timestamp to the provided one.
1007    pub fn set_internal_timestamp(&mut self, timestamp: u64) {
1008        self.header.set_internal_timestamp(timestamp);
1009    }
1010
1011    /// This function sets the game version (as in X.Y.Z) this Pack is for.
1012    pub fn set_game_version(&mut self, game_version: u32) {
1013        self.header.set_game_version(game_version);
1014    }
1015
1016    /// This function sets the build number this Pack is for.
1017    pub fn set_build_number(&mut self, build_number: u32) {
1018        self.header.set_build_number(build_number);
1019    }
1020
1021    /// This function sets the authoring tool that last edited this Pack.
1022    pub fn set_authoring_tool(&mut self, authoring_tool: &str) {
1023        self.header.set_authoring_tool(authoring_tool.to_string());
1024    }
1025
1026    /// This function sets the Extra Subheader Data of the Pack.
1027    pub fn set_extra_subheader_data(&mut self, extra_subheader_data: &[u8]) {
1028        self.header.set_extra_subheader_data(extra_subheader_data.to_vec());
1029    }
1030
1031    /// This function sets the compression format the pack should use.
1032    ///
1033    /// Returns the new compression format.
1034    /// Support for each format varies depending on the game.
1035    pub fn set_compression_format(&mut self, cf: CompressionFormat, game_info: &GameInfo) -> CompressionFormat {
1036        if cf == CompressionFormat::None || !game_info.compression_formats_supported().contains(&cf) {
1037            self.compress = false;
1038            self.settings_mut().set_setting_string(SETTING_KEY_CF, CompressionFormat::None.to_string().as_str());
1039            CompressionFormat::None
1040        } else {
1041            self.compress = true;
1042            self.settings_mut().set_setting_string(SETTING_KEY_CF, cf.to_string().as_str());
1043            cf
1044        }
1045    }
1046
1047    //-----------------------------------------------------------------------//
1048    //                             Util functions
1049    //-----------------------------------------------------------------------//
1050
1051    /// This function allows to toggle CA Authoring tool spoofing for this Pack.
1052    ///
1053    /// Passing spoof as false will reset the Authoring Tool to the default one.
1054    pub fn spoof_ca_authoring_tool(&mut self, spoof: bool) {
1055        if spoof {
1056            self.header.set_authoring_tool(AUTHORING_TOOL_CA.to_string());
1057        } else {
1058            self.header.set_authoring_tool(AUTHORING_TOOL_RPFM.to_string());
1059        }
1060    }
1061
1062    /// This function returns if the Pack is compressible or not.
1063    pub fn is_compressible(&self) -> bool {
1064        matches!(self.header.pfh_version, PFHVersion::PFH6 | PFHVersion::PFH5)
1065    }
1066
1067    /// This function returns the paths (as Strings) of the files used for missing loc data generation for the loaded Pack.
1068    ///
1069    /// The first one is the one for existing entries. The second one is the one for new entries.
1070    pub fn missing_locs_paths(&self) -> (String, String) {
1071        (
1072            MISSING_LOCS_PATH_START_EXISTING.to_owned() + &self.disk_file_name() + ".loc",
1073            MISSING_LOCS_PATH_START_NEW.to_owned() + &self.disk_file_name() + ".loc"
1074        )
1075    }
1076
1077    /// This function is used to generate all loc entries missing from a Pack into a missing.loc file.
1078    pub fn generate_missing_loc_data(&mut self, existing_locs: &HashMap<String, String>) -> Result<Vec<ContainerPath>> {
1079        let mut new_files = vec![];
1080
1081        let (missing_locs_path_existing, missing_locs_path_new) = self.missing_locs_paths();
1082
1083        let db_tables = self.files_by_type(&[FileType::DB]);
1084        let loc_tables = self.files_by_type(&[FileType::Loc]);
1085        let mut missing_trads_file_new = Loc::new();
1086        let mut missing_trads_file_overwritten = Loc::new();
1087
1088        let loc_keys_from_memory = loc_tables.par_iter().filter_map(|rfile| {
1089            if rfile.path_in_container_raw() != missing_locs_path_new && rfile.path_in_container_raw() != missing_locs_path_existing {
1090                if let Ok(RFileDecoded::Loc(table)) = rfile.decoded() {
1091                    Some(table.data().iter().filter_map(|x| {
1092                        if let DecodedData::StringU16(data) = &x[0] {
1093                            Some(data.to_owned())
1094                        } else {
1095                            None
1096                        }
1097                    }).collect::<HashSet<String>>())
1098                } else { None }
1099            } else { None }
1100        }).flatten().collect::<HashSet<String>>();
1101
1102        let (missing_trads_new, missing_trads_overwritten) = db_tables.par_iter().filter_map(|rfile| {
1103            if let Ok(RFileDecoded::DB(table)) = rfile.decoded() {
1104                let definition = table.definition();
1105                let loc_fields = definition.localised_fields();
1106                let table_data = table.data();
1107                let table_name = table.table_name_without_tables();
1108                let fields_processed = definition.fields_processed();
1109
1110                let has_loc_fields = !loc_fields.is_empty();
1111                let is_building_culture_variants = table_name == "building_culture_variants";
1112
1113                if has_loc_fields || is_building_culture_variants {
1114                    // Get the keys, which may be concatenated. We get them IN THE ORDER THEY ARE IN THE BINARY FILE.
1115                    let localised_order = definition.localised_key_order();
1116                    let mut new_rows_new = vec![];
1117                    let mut new_rows_overwritten = vec![];
1118
1119                    for row in table_data.iter() {
1120
1121                        // Generate locs for the table's own localised fields.
1122                        if has_loc_fields {
1123                            for loc_field in loc_fields {
1124                                let key = localised_order.iter().map(|pos| row[*pos as usize].data_to_string()).join("");
1125
1126                                // Key can be empty due to incomplete schema. Ignore those.
1127                                if !key.is_empty() {
1128                                    let loc_key = format!("{}_{}_{}", table_name, loc_field.name(), key);
1129
1130                                    if let Some(value) = existing_locs.get(&loc_key) {
1131                                        let mut new_row = missing_trads_file_overwritten.new_row();
1132                                        new_row[0] = DecodedData::StringU16(loc_key);
1133                                        new_row[1] = DecodedData::StringU16(value.to_owned());
1134                                        new_rows_overwritten.push(new_row);
1135
1136                                    } else if !loc_keys_from_memory.contains(&*loc_key) {
1137                                        let mut new_row = missing_trads_file_new.new_row();
1138                                        new_row[0] = DecodedData::StringU16(loc_key);
1139                                        new_row[1] = DecodedData::StringU16("PLACEHOLDER".to_owned());
1140                                        new_rows_new.push(new_row);
1141                                    }
1142                                }
1143                            }
1144                        }
1145
1146                        // Special case: building_culture_variants has a short_description column that references
1147                        // building_short_description_texts, a table that only exists in the Assembly Kit. We need to
1148                        // generate loc entries for it using the referenced table name as the loc key prefix.
1149                        if is_building_culture_variants {
1150                            if let Some(short_desc_index) = fields_processed.iter().position(|x| x.name() == "short_description") {
1151                                let key = row[short_desc_index].data_to_string();
1152                                if !key.is_empty() {
1153                                    let loc_key = format!("building_short_description_texts_short_description_{}", key);
1154
1155                                    if let Some(value) = existing_locs.get(&loc_key) {
1156                                        let mut new_row = missing_trads_file_overwritten.new_row();
1157                                        new_row[0] = DecodedData::StringU16(loc_key);
1158                                        new_row[1] = DecodedData::StringU16(value.to_owned());
1159                                        new_rows_overwritten.push(new_row);
1160
1161                                    } else if !loc_keys_from_memory.contains(&*loc_key) {
1162                                        let mut new_row = missing_trads_file_new.new_row();
1163                                        new_row[0] = DecodedData::StringU16(loc_key);
1164                                        new_row[1] = DecodedData::StringU16("PLACEHOLDER".to_owned());
1165                                        new_rows_new.push(new_row);
1166                                    }
1167                                }
1168                            }
1169                        }
1170                    }
1171
1172                    return Some((new_rows_new, new_rows_overwritten))
1173                }
1174            }
1175            None
1176        }).collect::<(Vec<Vec<Vec<DecodedData>>>, Vec<Vec<Vec<DecodedData>>>)>();
1177
1178        // NOTE: We do not use rayon's .flatten() because for some reason it eats values it's supposed to keep.
1179        let missing_trads_new = missing_trads_new.into_iter().flatten().collect::<Vec<_>>();
1180        let missing_trads_overwritten = missing_trads_overwritten.into_iter().flatten().collect::<Vec<_>>();
1181
1182        // Save the missing translations to two files: one for new translations, and another one for translations in use by this pack.
1183        if !missing_trads_new.is_empty() {
1184            let _ = missing_trads_file_new.set_data(&missing_trads_new);
1185            let packed_file = RFile::new_from_decoded(&RFileDecoded::Loc(missing_trads_file_new), 0, &missing_locs_path_new);
1186            new_files.push(self.insert(packed_file)?.unwrap());
1187        }
1188
1189        if !missing_trads_overwritten.is_empty() && !self.settings.setting_bool("do_not_generate_existing_locs").unwrap_or(&false) {
1190            let _ = missing_trads_file_overwritten.set_data(&missing_trads_overwritten);
1191            let packed_file = RFile::new_from_decoded(&RFileDecoded::Loc(missing_trads_file_overwritten), 0, &missing_locs_path_existing);
1192            new_files.push(self.insert(packed_file)?.unwrap());
1193        }
1194
1195        Ok(new_files)
1196    }
1197
1198    /// This function is used to patch Warhammer I & II Siege map packs so their AI actually works.
1199    ///
1200    /// This also removes the useless xml files left by Terry in the Pack.
1201    pub fn patch_siege_ai(&mut self) -> Result<(String, Vec<ContainerPath>)> {
1202
1203        // If there are no files, directly return an error.
1204        if self.files().is_empty() {
1205            return Err(RLibError::PatchSiegeAIEmptyPack)
1206        }
1207
1208        let mut files_patched = 0;
1209        let mut files_to_delete: Vec<ContainerPath> = vec![];
1210        let mut multiple_defensive_hill_hints = false;
1211
1212        // We only need to change stuff inside the map folder, so we only check the maps in that folder.
1213        for file in self.files_by_path_mut(&ContainerPath::Folder(TERRY_MAP_PATH.to_owned()), true) {
1214            let path = file.path_in_container_raw();
1215            let idx = path.rfind('/').unwrap_or(0);
1216            let name = if path.get(idx + 1..).is_some() {
1217                &path[idx + 1..]
1218            } else {
1219                continue
1220            };
1221
1222            // The files we need to process are `bmd_data.bin` and all the `catchment_` files the map has.
1223            if name == DEFAULT_BMD_DATA || (name.starts_with("catchment_") && name.ends_with(".bin")) {
1224                file.load()?;
1225                let data = file.cached_mut()?;
1226
1227                // The patching process it's simple. First, we check if there is SiegeAI stuff in the file by checking if there is an Area Node.
1228                // If we find one, we check if there is a defensive hill hint in the same file, and patch it if there is one.
1229                if data.windows(19).any(|window: &[u8]|window == SIEGE_AREA_NODE_HINT) {
1230                    if let Some(index) = data.windows(18).position(|window: &[u8]|window == DEFENSIVE_HILL_HINT) {
1231                        data.splice(index..index + 18, FORT_PERIMETER_HINT.iter().cloned());
1232                        files_patched += 1;
1233                    }
1234
1235                    // If there is more than one defensive hill in one file, is a valid file, but we want to warn the user about it.
1236                    if data.windows(18).any(|window: &[u8]|window == DEFENSIVE_HILL_HINT) {
1237                        multiple_defensive_hill_hints = true;
1238                    }
1239                }
1240            }
1241
1242            // All xml in this folder are useless, so we mark them all for deletion.
1243            else if name.ends_with(".xml") {
1244                files_to_delete.push(ContainerPath::File(file.path_in_container_raw().to_string()));
1245            }
1246        }
1247
1248        // If there are files to delete, we delete them.
1249        files_to_delete.iter().for_each(|x| { self.remove(x); });
1250
1251        // If we didn't found any file to patch or delete, return an error.
1252        if files_patched == 0 && files_to_delete.is_empty() { Err(RLibError::PatchSiegeAINoPatchableFiles) }
1253
1254        // TODO: make this more.... `fluent`.
1255        // If we found files to delete, but not to patch, return a message reporting it.
1256        else if files_patched == 0 {
1257            Ok((format!("No file suitable for patching has been found.\n{} files deleted.", files_to_delete.len()), files_to_delete))
1258        }
1259
1260        // If we found multiple defensive hill hints... it's ok, but we return a warning.
1261        else if multiple_defensive_hill_hints {
1262
1263            // The message is different depending on the amount of files deleted.
1264            if files_to_delete.is_empty() {
1265                Ok((format!("{files_patched} files patched.\nNo file suitable for deleting has been found.\
1266                \n\n\
1267                WARNING: Multiple Defensive Hints have been found and we only patched the first one.\
1268                 If you are using SiegeAI, you should only have one Defensive Hill in the map (the \
1269                 one acting as the perimeter of your fort/city/castle). Due to SiegeAI being present, \
1270                 in the map, normal Defensive Hills will not work anyways, and the only thing they do \
1271                 is interfere with the patching process. So, if your map doesn't work properly after \
1272                 patching, delete all the extra Defensive Hill Hints. They are the culprit."), files_to_delete))
1273            }
1274            else {
1275                Ok((format!("{} files patched.\n{} files deleted.\
1276                \n\n\
1277                WARNING: Multiple Defensive Hints have been found and we only patched the first one.\
1278                 If you are using SiegeAI, you should only have one Defensive Hill in the map (the \
1279                 one acting as the perimeter of your fort/city/castle). Due to SiegeAI being present, \
1280                 in the map, normal Defensive Hills will not work anyways, and the only thing they do \
1281                 is interfere with the patching process. So, if your map doesn't work properly after \
1282                 patching, delete all the extra Defensive Hill Hints. They are the culprit.",
1283                files_patched, files_to_delete.len()), files_to_delete))
1284            }
1285        }
1286
1287        // If no files to delete were found, but we got files patched, report it.
1288        else if files_to_delete.is_empty() {
1289            Ok((format!("{files_patched} files patched.\nNo file suitable for deleting has been found."), files_to_delete))
1290        }
1291
1292        // And finally, if we got some files patched and some deleted, report it too.
1293        else {
1294            Ok((format!("{} files patched.\n{} files deleted.", files_patched, files_to_delete.len()), files_to_delete))
1295        }
1296    }
1297
1298    /// Function to perform a live extraction, meaning the files will be extracted while the game is running,
1299    /// allowing for real-time updates and modifications without the need for a full game restart.
1300    ///
1301    /// Only works in Warhammer 3.
1302    pub fn live_export(&mut self, game: &GameInfo, game_path: &Path, disable_regen_table_guid: bool, keys_first: bool) -> Result<()> {
1303
1304        // If there are no files, directly return an error.
1305        if self.files().is_empty() {
1306            return Err(RLibError::LiveExportNoFilesToExport);
1307        }
1308
1309        let extra_data = Some(EncodeableExtraData::new_from_game_info_and_settings(game, self.compression_format(), disable_regen_table_guid));
1310        let data_path = game.data_path(game_path)?;
1311
1312        // We're interested in lua and xml files only, not those entire folders.
1313        let files = self.files_by_type_and_paths(&[FileType::Text], &[ContainerPath::Folder("script/".to_string()), ContainerPath::Folder("ui/".to_string())], true)
1314            .into_iter()
1315            .cloned()
1316            .collect::<Vec<RFile>>();
1317
1318        let mut correlations = HashMap::new();
1319        for mut file in files.into_iter() {
1320            let mut path_split = file.path_in_container_split().iter().map(|x| x.to_owned()).collect::<Vec<_>>();
1321            let mut hasher = DefaultHasher::new();
1322
1323            // Use time to ensure we never collide with a previous live export.
1324            std::time::SystemTime::now().hash(&mut hasher);
1325            let value = hasher.finish();
1326            let new_name = format!("{}_{}", value, path_split.last().unwrap());
1327
1328            *path_split.last_mut().unwrap() = &new_name;
1329            let new_path = path_split.join("/");
1330
1331            correlations.insert(file.path_in_container_raw().to_owned(), new_path.to_owned());
1332            file.set_path_in_container_raw(&new_path);
1333
1334            // To avoid duplicating logic, we insert these files into the pack, extract them, then delete them from the Pack.
1335            let container_path = file.path_in_container();
1336            self.insert(file)?;
1337            self.extract(container_path.clone(), &data_path, true, &None, false, keys_first, &extra_data, true)?;
1338
1339            self.remove(&container_path);
1340        }
1341
1342        // This is the file you have to call from lua later on.
1343        let summary_data_str = correlations.iter().map(|(key, value)| format!("    [\"{key}\"] = \"{value}\",")).join("\n");
1344        let summary_data_lua = format!("return {{\n{summary_data_str}\n}}");
1345        let summary_path = game_path.join("lua_path_mappings.txt");
1346        let mut file = BufWriter::new(File::create(summary_path)?);
1347        file.write_all(summary_data_lua.as_bytes())?;
1348
1349        Ok(())
1350    }
1351
1352    /// Function to update the anim ids of the pack on mass, based on a starting id and an offset.
1353    pub fn update_anim_ids(&mut self, game: &GameInfo, starting_id: i32, offset: i32) -> Result<Vec<ContainerPath>> {
1354        if offset == 0 {
1355            return Err(RLibError::UpdateAnimIdsError("Offset must be different than 0.".to_owned()))
1356        }
1357
1358        if starting_id < 0 {
1359            return Err(RLibError::UpdateAnimIdsError("Starting Id must be greater than 0.".to_owned()))
1360        }
1361
1362        // First, do a pass over sparse files.
1363        let mut extra_data = DecodeableExtraData::default();
1364        extra_data.set_game_info(Some(game));
1365        let extra_data = Some(extra_data);
1366
1367        let mut files = self.files_by_type_mut(&[FileType::AnimFragmentBattle]);
1368        let mut paths = files.par_iter_mut()
1369            .filter_map(|file| {
1370                let mut changed = false;
1371                if let Ok(Some(RFileDecoded::AnimFragmentBattle(mut table))) = file.decode(&extra_data, false, true) {
1372                    if *table.max_id() >= starting_id as u32 {
1373                        table.set_max_id(*table.max_id() + offset as u32);
1374                        changed = true;
1375                    }
1376
1377                    for entry in table.entries_mut() {
1378                        if *entry.animation_id() >= starting_id as u32 {
1379                            entry.set_animation_id(*entry.animation_id() + offset as u32);
1380                            changed = true;
1381                        }
1382
1383                        if *entry.slot_id() >= starting_id as u32 {
1384                            entry.set_slot_id(*entry.slot_id() + offset as u32);
1385                            changed = true;
1386                        }
1387                    }
1388
1389                    if changed {
1390                        let _ = file.set_decoded(RFileDecoded::AnimFragmentBattle(table));
1391                        Some(file.path_in_container())
1392                    } else {
1393                        None
1394                    }
1395                } else {
1396                    None
1397                }
1398            }
1399        ).collect::<Vec<_>>();
1400
1401        // Then, do another pass over files in AnimPacks. No need to do a par_iter because there is often less than 10 animpacks in packs.
1402        let mut anim_packs = self.files_by_type_mut(&[FileType::AnimPack]);
1403
1404        for anim_pack in anim_packs.iter_mut() {
1405            let mut changed = false;
1406            if let Ok(Some(RFileDecoded::AnimPack(mut pack))) = anim_pack.decode(&extra_data, false, true) {
1407
1408                let mut files = pack.files_by_type_mut(&[FileType::AnimFragmentBattle]);
1409                for file in files.iter_mut() {
1410                    if let Ok(Some(RFileDecoded::AnimFragmentBattle(mut table))) = file.decode(&extra_data, false, true) {
1411                        if *table.max_id() >= starting_id as u32 {
1412                            table.set_max_id(*table.max_id() + offset as u32);
1413                            changed = true;
1414                        }
1415
1416                        for entry in table.entries_mut() {
1417                            if *entry.animation_id() >= starting_id as u32 {
1418                                entry.set_animation_id(*entry.animation_id() + offset as u32);
1419                                changed = true;
1420                            }
1421
1422                            if *entry.slot_id() >= starting_id as u32 {
1423                                entry.set_slot_id(*entry.slot_id() + offset as u32);
1424                                changed = true;
1425                            }
1426                        }
1427
1428                        if changed {
1429                            let _ = file.set_decoded(RFileDecoded::AnimFragmentBattle(table));
1430                        }
1431                    }
1432                }
1433
1434                if changed {
1435                    let _ = anim_pack.set_decoded(RFileDecoded::AnimPack(pack));
1436                    paths.push(anim_pack.path_in_container());
1437                }
1438            }
1439        }
1440
1441        Ok(paths)
1442    }
1443}
1444
1445impl PackNotes {
1446
1447    /// This function tries to load the notes from the current Pack and return them.
1448    pub fn load(data: &[u8]) -> Result<Self> {
1449        from_slice(data).map_err(From::from)
1450    }
1451
1452    /// This function returns all notes afecting the provided path.
1453    pub fn notes_by_path(&self, path: &str) -> Vec<Note> {
1454        let path_lower = path.to_lowercase();
1455        self.file_notes()
1456            .iter()
1457            .filter(|(path, _)| path.is_empty() || path_lower.starts_with(*path) || &&path_lower == path)
1458            .flat_map(|(_, notes)| notes.to_vec())
1459            .collect()
1460    }
1461
1462    /// This function adds a note for an specific path.
1463    ///
1464    /// Note: for DB tables, notes are added for all tables with the same table name instead of specific tables.
1465    pub fn add_note(&mut self, mut note: Note) -> Note {
1466
1467        // For tables, share notes between same-type tables.
1468        let mut path = note.path().to_lowercase();
1469        if path.starts_with("db/") || path.starts_with("ceo_db/") {
1470            let mut new_path = path.split('/').collect::<Vec<_>>();
1471            if new_path.len() == 3 {
1472                new_path.pop();
1473            }
1474            path = new_path.join("/");
1475        }
1476        note.set_path(path.to_owned());
1477
1478        match self.file_notes_mut().get_mut(&path) {
1479            Some(notes) => {
1480
1481                // If it already has an id greater than 0, we're trying to replace and existing note if found.
1482                if *note.id() == 0 {
1483                    let id = notes.iter().map(|note| note.id()).max().unwrap();
1484                    note.set_id(*id + 1);
1485                } else {
1486                    notes.retain(|x| x.id() != note.id());
1487                }
1488
1489                notes.push(note.clone());
1490                note
1491            },
1492            None => {
1493                let notes = vec![note.clone()];
1494                self.file_notes_mut().insert(path.to_owned(), notes);
1495                note
1496            }
1497        }
1498    }
1499
1500    /// This function deletes a note with the specified path and id.
1501    pub fn delete_note(&mut self, path: &str, id: u64) {
1502        let path_lower = path.to_lowercase();
1503
1504        if let Some(notes) = self.file_notes_mut().get_mut(&path_lower) {
1505            notes.retain(|note| note.id() != &id);
1506            if notes.is_empty() {
1507                self.file_notes_mut().remove(&path_lower);
1508            }
1509        }
1510    }
1511}
1512
1513impl PackSettings {
1514
1515    /// This function tries to load the settings from a slice and return them.
1516    pub fn load(data: &[u8]) -> Result<Self> {
1517        from_slice(data).map_err(From::from)
1518    }
1519
1520    /// This function tries to load the settings from a slice, update them so they don't have any missing values and return them.
1521    pub fn load_and_update(&mut self, data: &[u8]) -> Result<()> {
1522        let settings: Self = from_slice(data)?;
1523
1524        self.settings_bool.extend(settings.settings_bool);
1525        self.settings_number.extend(settings.settings_number);
1526        self.settings_string.extend(settings.settings_string);
1527        self.settings_text.extend(settings.settings_text);
1528
1529        Ok(())
1530    }
1531
1532    /// This function returns the provided string setting, if found.
1533    pub fn setting_string(&self, key: &str) -> Option<&String> {
1534        self.settings_string.get(key)
1535    }
1536
1537    /// This function returns the provided text setting (multiline string), if found.
1538    pub fn setting_text(&self, key: &str) -> Option<&String> {
1539        self.settings_text.get(key)
1540    }
1541
1542    /// This function returns the provided bool setting, if found.
1543    pub fn setting_bool(&self, key: &str) -> Option<&bool> {
1544        self.settings_bool.get(key)
1545    }
1546
1547    /// This function returns the provided numeric setting, if found.
1548    pub fn setting_number(&self, key: &str) -> Option<&i32> {
1549        self.settings_number.get(key)
1550    }
1551
1552    /// This function sets the string setting provided with the value you passed.
1553    ///
1554    /// If the value already existed, it gets overwritten.
1555    pub fn set_setting_string(&mut self, key: &str, value: &str) {
1556        self.settings_string.insert(key.to_owned(), value.to_owned());
1557    }
1558
1559    /// This function sets the text (multiline string) setting provided with the value you passed.
1560    ///
1561    /// If the value already existed, it gets overwritten.
1562    pub fn set_setting_text(&mut self, key: &str, value: &str) {
1563        self.settings_text.insert(key.to_owned(), value.to_owned());
1564    }
1565
1566    /// This function sets the bool setting provided with the value you passed.
1567    ///
1568    /// If the value already existed, it gets overwritten.
1569    pub fn set_setting_bool(&mut self, key: &str, value: bool) {
1570        self.settings_bool.insert(key.to_owned(), value);
1571    }
1572
1573    /// This function sets the numeric setting provided with the value you passed.
1574    ///
1575    /// If the value already existed, it gets overwritten.
1576    pub fn set_setting_number(&mut self, key: &str, value: i32) {
1577        self.settings_number.insert(key.to_owned(), value);
1578    }
1579
1580    /// This function returns the list of paths which the diagnostic tool should ignore.
1581    ///
1582    /// TODO: Move this to rpfm_extensions.
1583    pub fn diagnostics_files_to_ignore(&self) -> Option<Vec<(String, Vec<String>, Vec<String>)>> {
1584        self.settings_text.get("diagnostics_files_to_ignore").map(|files_to_ignore| {
1585            let files = files_to_ignore.split('\n').collect::<Vec<&str>>();
1586
1587            // Ignore commented out rows.
1588            files.iter().filter_map(|x| {
1589                if !x.starts_with('#') {
1590                    let path = x.splitn(3, ';').collect::<Vec<&str>>();
1591                    if path.len() == 3 {
1592                        Some((path[0].to_string(), path[1].split(',').filter_map(|y| if !y.is_empty() { Some(y.to_owned()) } else { None }).collect::<Vec<String>>(), path[2].split(',').filter_map(|y| if !y.is_empty() { Some(y.to_owned()) } else { None }).collect::<Vec<String>>()))
1593                    } else if path.len() == 2 {
1594                        Some((path[0].to_string(), path[1].split(',').filter_map(|y| if !y.is_empty() { Some(y.to_owned()) } else { None }).collect::<Vec<String>>(), vec![]))
1595                    } else if path.len() == 1 {
1596                        Some((path[0].to_string(), vec![], vec![]))
1597                    } else {
1598                        None
1599                    }
1600                } else {
1601                    None
1602                }
1603            }).collect::<Vec<(String, Vec<String>, Vec<String>)>>()
1604        })
1605    }
1606}
1607
1608impl Default for PackHeader {
1609    fn default() -> Self {
1610        Self {
1611            pfh_version: Default::default(),
1612            pfh_file_type: Default::default(),
1613            bitmask: Default::default(),
1614            internal_timestamp: Default::default(),
1615            game_version: Default::default(),
1616            build_number: Default::default(),
1617            authoring_tool: AUTHORING_TOOL_RPFM.to_owned(),
1618            extra_subheader_data: Default::default(),
1619        }
1620    }
1621}
1622
1623impl Default for PFHFlags {
1624    fn default() -> Self {
1625        Self::empty()
1626    }
1627}
1628
1629impl Default for PackSettings {
1630    fn default() -> Self {
1631        let mut settings = Self {
1632            settings_text: BTreeMap::new(),
1633            settings_string: BTreeMap::new(),
1634            settings_bool: BTreeMap::new(),
1635            settings_number: BTreeMap::new(),
1636        };
1637
1638        settings.settings_text_mut().insert("diagnostics_files_to_ignore".to_owned(), "".to_owned());
1639        settings.settings_text_mut().insert("import_files_to_ignore".to_owned(), "".to_owned());
1640        settings.settings_bool_mut().insert("disable_autosaves".to_owned(), false);
1641        settings.settings_bool_mut().insert("do_not_generate_existing_locs".to_owned(), false);
1642        settings.settings_string_mut().insert(SETTING_KEY_CF.to_owned(), "None".to_owned());
1643        settings
1644    }
1645}