rpfm_extensions/diagnostics/text.rs
1//---------------------------------------------------------------------------//
2// Copyright (c) 2017-2026 Ismael Gutiérrez González. All rights reserved.
3//
4// This file is part of the Rusted PackFile Manager (RPFM) project,
5// which can be found here: https://github.com/Frodo45127/rpfm.
6//
7// This file is licensed under the MIT license, which can be found here:
8// https://github.com/Frodo45127/rpfm/blob/master/LICENSE.
9//---------------------------------------------------------------------------//
10
11//! Module with the structs and functions specific for `Text` diagnostics.
12
13use getset::{Getters, MutGetters};
14use serde_derive::{Serialize, Deserialize};
15
16use std::collections::{HashMap, HashSet};
17use std::{fmt, fmt::Display};
18
19use rpfm_lib::files::{RFile, RFileDecoded};
20use rpfm_lib::utils::*;
21
22use crate::dependencies::Dependencies;
23use crate::diagnostics::*;
24
25//-------------------------------------------------------------------------------//
26// Enums & Structs
27//-------------------------------------------------------------------------------//
28
29/// This struct contains the results of a Text diagnostic.
30#[derive(Debug, Clone, Default, Getters, MutGetters, Serialize, Deserialize)]
31#[getset(get = "pub", get_mut = "pub")]
32pub struct TextDiagnostic {
33 path: String,
34 pack: String,
35 results: Vec<TextDiagnosticReport>
36}
37
38/// This struct defines an individual Text diagnostic result.
39#[derive(Debug, Clone, Getters, MutGetters, Serialize, Deserialize)]
40#[getset(get = "pub", get_mut = "pub")]
41pub struct TextDiagnosticReport {
42 report_type: TextDiagnosticReportType,
43}
44
45#[derive(Debug, Clone, Serialize, Deserialize)]
46pub enum TextDiagnosticReportType {
47 InvalidKey((u64, u64), (u64, u64), String, String, String),
48}
49
50//-------------------------------------------------------------------------------//
51// Implementations
52//-------------------------------------------------------------------------------//
53
54impl TextDiagnosticReport {
55 pub fn new(report_type: TextDiagnosticReportType) -> Self {
56 Self {
57 report_type
58 }
59 }
60}
61
62impl DiagnosticReport for TextDiagnosticReport {
63 fn message(&self) -> String {
64 match &self.report_type {
65 TextDiagnosticReportType::InvalidKey(_,_, table, column, key) => "Invalid Key: \"".to_string() + key + "\" is not in table \"" + table + "\", column \"" + column + "\".",
66 }
67 }
68
69 fn level(&self) -> DiagnosticLevel {
70 match self.report_type {
71 TextDiagnosticReportType::InvalidKey(_,_,_,_,_) => DiagnosticLevel::Error,
72 }
73 }
74}
75
76impl Display for TextDiagnosticReportType {
77 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
78 Display::fmt(match self {
79 Self::InvalidKey(_,_,_,_,_) => "InvalidKey",
80 }, f)
81 }
82}
83
84impl TextDiagnostic {
85 pub fn new(path: &str, pack: &str) -> Self {
86 Self {
87 path: path.to_owned(),
88 pack: pack.to_owned(),
89 results: vec![],
90 }
91 }
92
93 /// This function takes care of checking for Text-Related for errors.
94 pub fn check(
95 pack_key: &str,
96 file: &RFile,
97 packs: &BTreeMap<String, Pack>,
98 dependencies: &Dependencies,
99 global_ignored_diagnostics: &[String],
100 ignored_fields: &[String],
101 ignored_diagnostics: &HashSet<String>,
102 ignored_diagnostics_for_fields: &HashMap<String, Vec<String>>,
103 ) -> Option<DiagnosticType> {
104
105 if let Ok(RFileDecoded::Text(text)) = file.decoded() {
106 let mut diagnostic = Self::new(file.path_in_container_raw(), pack_key);
107
108 let text = text.contents();
109 let mut start_pos = 0;
110
111 // We're only interested in tables marked with "--@db".
112 while let Some(pos) = text[start_pos..].find("--@db ") {
113 if let Some(end_line) = text[start_pos + pos..].find('\n') {
114
115 // We only support single-line comments.
116 let table_data = text[start_pos + pos + 6..start_pos + pos + end_line].split(' ').collect::<Vec<_>>();
117
118 // We expect table name and column.
119 if table_data.len() >= 2 {
120 let table_name = if table_data[0].ends_with("_tables") { table_data[0].to_owned() } else { table_data[0].to_owned() + "_tables" };
121 let table_column = if table_data[1].ends_with("\r") {
122 &table_data[1][..table_data[1].len() - 1]
123 } else {
124 table_data[1]
125 };
126
127 let index_to_check = if let Some(indexes) = table_data.get(2) {
128 indexes.split(",")
129 .filter_map(|x| x.parse::<usize>().ok())
130 .collect()
131 } else {
132 vec![]
133 };
134
135 // We need to make sure we only check the next line for the start, or we may end up checking the wrong vars.
136 let (next_line_start, next_line_end) = match text[start_pos + pos + 6..].find('\n') {
137 Some(nls) => if text.as_bytes().get(start_pos + pos + 6 + nls + 1).is_some() {
138 match text[start_pos + pos + 6 + nls + 1..].find('\n') {
139 Some(nle) => (start_pos + pos + 6 + nls + 1, start_pos + pos + 6 + nls + 1 + nle),
140 None => break,
141 }
142 } else {
143 break;
144 }
145
146 None => break,
147 };
148
149 // Formats supported:
150 // - Single line, single value:
151 // hb = "key"
152 //
153 // - Single line, single table:
154 // hb = { "a", "b" }
155 //
156 // - Single line, keyed table:
157 // hb = { "a" = "b", "c" = "d" }
158 //
159 // - Multiple lines, single table:
160 // hb = {
161 // "a",
162 // "b"
163 // }
164 //
165 // - Multiple lines, keyed table (support for key and value:
166 // hb = {
167 // "a" = "b"
168 // "c" = "d"
169 // }
170
171 // Data to search are strings in commas between {}.
172 let (keys, data_start, data_end) = {
173 let mut vals = (vec![], 0, 0);
174
175 if let Some(data_start) = text[next_line_start..next_line_end].find('{') {
176 if let Some(data_end) = text[next_line_start + data_start..].find('}') {
177
178 // +1 to not include the { at the start.
179 let data_to_search = &text[next_line_start + data_start + 1..next_line_start + data_start + data_end];
180
181 // Multi-line table.
182 if data_to_search.contains('\n') || data_to_search.contains('\r') {
183
184 // Keyed table.
185 if data_to_search.contains('=') {
186 let data_split = data_to_search.split('\n')
187 .filter_map(|x| {
188 let spl = x.split('=')
189 .map(|y| y.split('\"').collect::<Vec<_>>());
190
191 let mut keys = vec![];
192 for (i, data) in spl.enumerate() {
193 if index_to_check.contains(&i) && data.len() == 3 {
194 keys.push(data[1].to_owned());
195 }
196 }
197
198 if !keys.is_empty() {
199 Some(keys)
200 } else {
201 None
202 }
203 })
204 .flatten()
205 .collect::<Vec<_>>();
206
207 vals = (data_split, data_start, data_end)
208 }
209
210 // Non-keyed/single value table.
211 else {
212 let data_split = data_to_search.split('\n')
213 .filter_map(|x| {
214
215 // On each line, we want the data between commas.
216 let spl = x.split('\"').collect::<Vec<_>>();
217 if spl.len() == 3 {
218 Some(spl[1].to_owned())
219 } else {
220 None
221 }
222 })
223 .collect::<Vec<_>>();
224
225 vals = (data_split, data_start, data_end)
226 }
227 }
228
229 // Single line keyed table.
230 else if data_to_search.contains('=') {
231 let data_split = data_to_search.split(',')
232 .filter_map(|x| {
233 let spl = x.split('=')
234 .map(|y| y.split('\"').collect::<Vec<_>>());
235
236 let mut keys = vec![];
237 for (i, data) in spl.enumerate() {
238 if index_to_check.contains(&i) && data.len() == 3 {
239 keys.push(data[1].to_owned());
240 }
241 }
242
243 if !keys.is_empty() {
244 Some(keys)
245 } else {
246 None
247 }
248 })
249 .flatten()
250 .collect::<Vec<_>>();
251
252 vals = (data_split, data_start, data_end)
253 }
254
255 // Single line non-keyed table.
256 else {
257 let data_split = data_to_search.split(',')
258 .filter_map(|x| {
259
260 // On each line, we want the data between commas.
261 let spl = x.split('\"').collect::<Vec<_>>();
262 if spl.len() == 3 {
263 Some(spl[1].to_owned())
264 } else {
265 None
266 }
267 })
268 .collect::<Vec<_>>();
269
270 vals = (data_split, data_start, data_end)
271 }
272 }
273 }
274
275 // No { means it's single line, single value.
276 else if let Some(data_start) = text[next_line_start..next_line_end].find('\"') {
277 // +1 to skip the starting comma.
278 if text.as_bytes().get(next_line_start + data_start + 1).is_some() {
279 if let Some(data_end) = text[next_line_start + data_start + 1..].find('\"') {
280 if text.as_bytes().get(next_line_start + data_start + 1 + data_end).is_some() {
281 let data_to_search = &text[next_line_start + data_start + 1..next_line_start + data_start + 1 + data_end];
282 vals = (vec![data_to_search.to_string()], data_start, data_end)
283 }
284 }
285 }
286 }
287
288 vals
289 };
290
291 let mut not_found = HashMap::new();
292
293 // Add the files from the dependencies, then the files from the pack, then reverse the list so we process first the pack ones.
294 if let Ok(mut tables) = dependencies.db_data(&table_name, true, true) {
295 for pack in packs.values() {
296 tables.append(&mut pack.files_by_path(&ContainerPath::Folder("db/".to_string() + &table_name + "/"), true));
297 }
298 tables.reverse();
299
300 // If there are no tables that match out name, ignore it.
301 if tables.is_empty() {
302 start_pos = next_line_start + data_start + data_end;
303 continue;
304 }
305
306 for key in &keys {
307 let key_to_check = key.trim();
308
309 // Calculate the row, column_start and column_end of the data.
310 let start_cursor = line_column_from_string_pos(text, (next_line_start + data_start + 1) as u64);
311 let end_cursor = line_column_from_string_pos(text, (next_line_start + data_start + 1 + data_end) as u64);
312
313 let mut found = false;
314 for table in &tables {
315 if let Ok(RFileDecoded::DB(table)) = table.decoded() {
316 let definition = table.definition();
317 if let Some(column) = definition.column_position_by_name(table_column) {
318 for row in table.data().iter() {
319 if row[column].data_to_string() == *key_to_check {
320 found = true;
321 break;
322 }
323 }
324
325 if found {
326 break;
327 }
328 }
329 }
330 }
331
332 if !found {
333 not_found.insert(key_to_check, (start_cursor, end_cursor));
334 }
335 }
336
337 for (key, (start, end)) in ¬_found {
338 if !Diagnostics::ignore_diagnostic(global_ignored_diagnostics, None, Some("InvalidKey"), ignored_fields, ignored_diagnostics, ignored_diagnostics_for_fields) {
339 let result = TextDiagnosticReport::new(TextDiagnosticReportType::InvalidKey(*start, *end, table_name.to_string(), table_column.to_string(), key.to_string()));
340 diagnostic.results_mut().push(result);
341 }
342 }
343 }
344
345 start_pos = next_line_start + data_start + data_end;
346 }
347 }
348 }
349
350 if !diagnostic.results().is_empty() {
351 Some(DiagnosticType::Text(diagnostic))
352 } else { None }
353 } else { None }
354 }
355}