29 Commits

Author SHA1 Message Date
96acb36767 update TU3 list file 2025-10-02 21:15:02 +08:00
6e401c86cf update deps, add reqwest and clippy 2025-10-02 20:37:04 +08:00
1686114181 fix: error setting new sub patch id
Some checks failed
Release Build / build (push) Has been cancelled
2025-08-17 21:14:07 +08:00
4ca65979c9 workflows 2025-08-15 15:04:03 +08:00
6e493b3fdc update to TU2.5 list file 2025-08-15 14:59:10 +08:00
5401200add DLC pak support
Some checks failed
Release Build / build (push) Has been cancelled
2025-08-15 11:23:43 +08:00
312c3bc90e update dependencies 2025-08-15 10:48:32 +08:00
7222ee1790 Path-like chunk name component parser 2025-08-15 10:47:51 +08:00
d628400f6a fix when patch not deleted when not exists higher patches 2025-07-28 20:15:40 +08:00
18d8138dc4 remove local relative path dependencies
Some checks failed
Release Build / build (push) Has been cancelled
2025-07-28 19:43:57 +08:00
7d17efd889 github actions 2025-07-28 19:34:40 +08:00
e0ff976a11 add Restore tool, fix metadata write error 2025-07-28 19:31:09 +08:00
c64a9a8fbb write a metadata to uncompressed pak 2025-07-28 18:45:11 +08:00
6bd30c682d fix replace mode not working 2025-07-28 18:07:24 +08:00
6c78c4f01c update version 2025-07-28 18:03:18 +08:00
b6b2820d75 clippy, remove unnecessary print 2025-07-28 17:56:03 +08:00
0d465ed46c auto mode support 2025-07-28 17:53:03 +08:00
1588c8d756 update embedded list file to TU2 version 2025-07-28 17:49:34 +08:00
6e95351fa0 v0.2.0 refactoring 2025-07-28 14:51:55 +08:00
601e217276 use fs_err for better std::fs errors 2025-07-25 19:33:47 +08:00
87d808dc3b update dependencies 2025-07-25 19:25:27 +08:00
817db15340 switch to color_eyre 2025-07-25 19:24:50 +08:00
ad98236e06 colorful and better interaction
Use colored texts.
Default enable feature clone.
Add panic hook to avoid program exit.
2025-03-30 21:20:05 +08:00
9d703833e4 Merge branch 'main' of github.com:eigeen/mhws-tex-decompressor 2025-03-30 20:56:03 +08:00
40f9ea4572 wait for exit when success 2025-03-30 20:55:32 +08:00
3009d73726 Merge pull request #2 from xuanplus/patch-1
Update main.rs to remove quotes
2025-03-22 11:15:39 +08:00
Haoxuan Di
108d8bce92 Update main.rs
remove quotes for dropping file to terminal
2025-03-22 11:11:43 +08:00
527354a3af full package and feature clone 2025-03-20 11:06:22 +08:00
f46a06948c pak entry flags clone 2025-03-19 21:30:28 +08:00
10 changed files with 3336 additions and 296 deletions

52
.github/workflows/release.yml vendored Normal file
View File

@@ -0,0 +1,52 @@
name: Release Build
on:
push:
tags: ["v*"]
workflow_dispatch:
permissions:
contents: write
env:
CARGO_TERM_COLOR: always
BINARY_NAME: mhws-tex-decompressor
jobs:
build:
runs-on: windows-latest
steps:
- uses: actions/checkout@v4
- name: Install Rust toolchain
uses: dtolnay/rust-toolchain@stable
with:
targets: x86_64-pc-windows-msvc
- name: Build
run: cargo build --verbose --release
- name: Prepare release package
run: |
mkdir release
copy target/release/${{ env.BINARY_NAME }}.exe release/
- name: Upload artifact
uses: actions/upload-artifact@v4
with:
name: ${{ env.BINARY_NAME }}-windows-x64
path: release/
- name: Create Release
uses: softprops/action-gh-release@v2
if: github.ref_type == 'tag'
with:
draft: true
files: release/*
name: Release ${{ github.ref_name }}
body: |
## ${{ github.ref_name }}
### Changes
- Please fill in the changes

2
.gitignore vendored
View File

@@ -1 +1,3 @@
/target
*.exe
*.zip

2300
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
[package]
name = "mhws-tex-decompressor"
version = "0.1.1"
version = "0.3.0"
edition = "2024"
[dependencies]
@@ -11,8 +11,16 @@ edition = "2024"
re-tex = { git = "https://github.com/eigeen/re-tex.git", branch = "main" }
ree-pak-core = { git = "https://github.com/eigeen/ree-pak-rs.git", branch = "main" }
dialoguer = "0.11"
eyre = "0.6"
indicatif = "0.17"
rayon = "1.10"
# UI
dialoguer = "0.12"
indicatif = "0.18"
colored = "3.0"
color-eyre = "0.6"
rayon = "1.11"
parking_lot = "0.12"
fs-err = "3.1"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
tokio = { version = "1.47", features = ["parking_lot", "rt-multi-thread", "macros"] }
reqwest = { version = "0.12", features = ["json"] }

Binary file not shown.

724
src/app.rs Normal file
View File

@@ -0,0 +1,724 @@
use std::{
io::{self, Write},
path::{Path, PathBuf},
sync::{
Arc,
atomic::{AtomicUsize, Ordering},
},
time::Duration,
};
use fs_err as fs;
use color_eyre::eyre::bail;
use dialoguer::{Input, MultiSelect, Select, theme::ColorfulTheme};
use fs::OpenOptions;
use indicatif::{HumanBytes, ProgressBar, ProgressStyle};
use parking_lot::Mutex;
use rayon::iter::{IntoParallelRefIterator, ParallelIterator};
use re_tex::tex::Tex;
use ree_pak_core::{
filename::FileNameTable, pak::PakEntry, read::archive::PakArchiveReader,
utf16_hash::Utf16HashExt, write::FileOptions,
};
use crate::{chunk::ChunkName, metadata::PakMetadata, util::human_bytes};
const AUTO_CHUNK_SELECTION_SIZE_THRESHOLD: usize = 50 * 1024 * 1024; // 50MB
const FALSE_TRUE_SELECTION: [&str; 2] = ["False", "True"];
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
enum Mode {
Automatic = 0,
Manual = 1,
Restore = 2,
}
impl Mode {
fn from_index(index: usize) -> color_eyre::Result<Self> {
match index {
0 => Ok(Mode::Automatic),
1 => Ok(Mode::Manual),
2 => Ok(Mode::Restore),
_ => bail!("Invalid mode index: {index}"),
}
}
}
#[derive(Clone)]
struct ChunkSelection {
chunk_name: ChunkName,
file_size: u64,
full_path: PathBuf,
}
impl std::fmt::Display for ChunkSelection {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{} ({})", self.chunk_name, human_bytes(self.file_size))?;
Ok(())
}
}
#[derive(Default)]
pub struct App {
filename_table: Option<FileNameTable>,
}
impl App {
pub fn run(&mut self) -> color_eyre::Result<()> {
println!("Version v{} - Tool by @Eigeen", env!("CARGO_PKG_VERSION"));
println!("Get updates from https://github.com/eigeen/mhws-tex-decompressor");
println!();
println!("Loading embedded file name table...");
// Mode selection
let mode = Select::with_theme(&ColorfulTheme::default())
.with_prompt("Select mode")
.items(["Automatic", "Manual", "Restore"])
.default(0)
.interact()?;
let mode = Mode::from_index(mode)?;
match mode {
Mode::Automatic => self.auto_mode(),
Mode::Manual => self.manual_mode(),
Mode::Restore => self.restore_mode(),
}
}
fn filename_table(&self) -> &FileNameTable {
self.filename_table.as_ref().unwrap()
}
/// Scan for all pak files in the game directory, including DLC directory
fn scan_all_pak_files(&self, game_dir: &Path) -> color_eyre::Result<Vec<ChunkSelection>> {
let mut main_chunks = Vec::new();
let mut dlc_chunks = Vec::new();
// Scan main game directory
self.scan_pak_files_in_dir(game_dir, &mut main_chunks)?;
// Scan DLC directory if it exists
let dlc_dir = game_dir.join("dlc");
if dlc_dir.is_dir() {
self.scan_pak_files_in_dir(&dlc_dir, &mut dlc_chunks)?;
}
// If both main and DLC have files, ask user which locations to process
let selected_locations = if !main_chunks.is_empty() && !dlc_chunks.is_empty() {
let locations = vec!["Main game directory", "DLC directory"];
MultiSelect::with_theme(&ColorfulTheme::default())
.with_prompt("Select locations to process (Space to select, Enter to confirm)")
.items(&locations)
.defaults(&[true, true])
.interact()?
} else if !main_chunks.is_empty() {
vec![0]
} else if !dlc_chunks.is_empty() {
vec![1]
} else {
vec![]
};
let mut all_chunks = Vec::new();
for &location_idx in &selected_locations {
match location_idx {
0 => all_chunks.extend(main_chunks.iter().cloned()),
1 => all_chunks.extend(dlc_chunks.iter().cloned()),
_ => {}
}
}
all_chunks.sort_by(|a, b| a.chunk_name.cmp(&b.chunk_name));
Ok(all_chunks)
}
/// Scan pak files in a specific directory
fn scan_pak_files_in_dir(
&self,
dir: &Path,
all_chunks: &mut Vec<ChunkSelection>,
) -> color_eyre::Result<()> {
let entries = fs::read_dir(dir)?;
for entry in entries {
let entry = entry?;
if !entry.file_type()?.is_file() {
continue;
}
let file_name = entry.file_name().to_string_lossy().to_string();
let file_path = entry.path();
if !file_name.ends_with(".pak") {
continue;
}
let chunk_name = match ChunkName::try_from_str(&file_name) {
Ok(chunk_name) => chunk_name,
Err(e) => {
println!("Invalid chunk name, skipped: {e}");
continue;
}
};
let file_size = fs::metadata(&file_path)?.len();
all_chunks.push(ChunkSelection {
chunk_name,
file_size,
full_path: file_path,
});
}
Ok(())
}
fn process_chunk(
&self,
filename_table: &FileNameTable,
input_path: &Path,
output_path: &Path,
use_full_package_mode: bool,
use_feature_clone: bool,
) -> color_eyre::Result<()> {
println!("Processing chunk: {}", input_path.display());
let file = fs::File::open(input_path)?;
let mut reader = io::BufReader::new(file);
let pak_archive = ree_pak_core::read::read_archive(&mut reader)?;
let archive_reader = PakArchiveReader::new(reader, &pak_archive);
let archive_reader_mtx = Mutex::new(archive_reader);
// filtered entries
let entries = if use_full_package_mode {
pak_archive.entries().iter().collect::<Vec<_>>()
} else {
println!("Filtering entries...");
pak_archive
.entries()
.iter()
.filter(|entry| is_tex_file(entry.hash(), filename_table))
.collect::<Vec<_>>()
};
// new pak archive
let out_file = OpenOptions::new()
.create(true)
.truncate(true)
.write(true)
.open(output_path)?;
// +1 for metadata
let mut pak_writer =
ree_pak_core::write::PakWriter::new(out_file, (entries.len() as u64) + 1);
// write metadata
let metadata = PakMetadata::new(use_full_package_mode);
metadata.write_to_pak(&mut pak_writer)?;
let pak_writer_mtx = Arc::new(Mutex::new(pak_writer));
let bar = ProgressBar::new(entries.len() as u64);
bar.set_style(
ProgressStyle::default_bar()
.template("Bytes written: {msg}\n{pos}/{len} {wide_bar}")?,
);
bar.enable_steady_tick(Duration::from_millis(200));
let pak_writer_mtx1 = Arc::clone(&pak_writer_mtx);
let bar1 = bar.clone();
let bytes_written = AtomicUsize::new(0);
let err = entries
.par_iter()
.try_for_each(move |&entry| -> color_eyre::Result<()> {
let pak_writer_mtx = &pak_writer_mtx1;
let bar = &bar1;
// read raw tex file
// parse tex file
let mut entry_reader = {
let mut archive_reader = archive_reader_mtx.lock();
archive_reader.owned_entry_reader(entry.clone())?
};
if !is_tex_file(entry.hash(), filename_table) {
// plain file, just copy
let mut buf = vec![];
std::io::copy(&mut entry_reader, &mut buf)?;
let mut pak_writer = pak_writer_mtx.lock();
let write_bytes = write_to_pak(
&mut pak_writer,
entry,
entry.hash(),
&buf,
use_feature_clone,
)?;
bytes_written.fetch_add(write_bytes, Ordering::SeqCst);
} else {
let mut tex = Tex::from_reader(&mut entry_reader)?;
// decompress mipmaps
tex.batch_decompress()?;
let tex_bytes = tex.as_bytes()?;
let mut pak_writer = pak_writer_mtx.lock();
let write_bytes = write_to_pak(
&mut pak_writer,
entry,
entry.hash(),
&tex_bytes,
use_feature_clone,
)?;
bytes_written.fetch_add(write_bytes, Ordering::SeqCst);
}
bar.inc(1);
if bar.position().is_multiple_of(100) {
bar.set_message(
HumanBytes(bytes_written.load(Ordering::SeqCst) as u64).to_string(),
);
}
Ok(())
});
if let Err(e) = err {
eprintln!("Error occurred when processing tex: {e}");
eprintln!(
"The process terminated early, we'll save the current processed tex files to pak file."
);
}
match Arc::try_unwrap(pak_writer_mtx) {
Ok(pak_writer) => pak_writer.into_inner().finish()?,
Err(_) => panic!("Arc::try_unwrap failed"),
};
bar.finish();
Ok(())
}
fn auto_mode(&mut self) -> color_eyre::Result<()> {
let current_dir = std::env::current_dir()?;
wait_for_enter(
r#"Check list:
1. Your game is already updated to the latest version.
2. Uninstalled all the mods, or the generated files will break mods.
I'm sure I've checked the list, press Enter to continue"#,
);
let game_dir: String = Input::<String>::with_theme(&ColorfulTheme::default())
.show_default(true)
.default(current_dir.to_string_lossy().to_string())
.with_prompt("Input MonsterHunterWilds directory path")
.interact_text()
.unwrap()
.trim_matches(|c| c == '\"' || c == '\'')
.to_string();
let game_dir = Path::new(&game_dir);
if !game_dir.is_dir() {
bail!("game directory not exists.");
}
// scan for pak files in main game directory and DLC directory
let all_chunk_selections = self.scan_all_pak_files(game_dir)?;
// show chunks for selection
// only show sub chunks
let chunk_selections: Vec<&ChunkSelection> = all_chunk_selections
.iter()
.filter(|chunk_selection| chunk_selection.chunk_name.sub_id().is_some())
.collect();
if chunk_selections.is_empty() {
bail!("No available pak files found.");
}
let selected_chunks: Vec<bool> = chunk_selections
.iter()
.map(|chunk_selection| {
chunk_selection.file_size >= AUTO_CHUNK_SELECTION_SIZE_THRESHOLD as u64
})
.collect();
let selected_chunks: Option<Vec<usize>> =
MultiSelect::with_theme(&ColorfulTheme::default())
.with_prompt("Select chunks to process (Space to select, Enter to confirm)")
.items(&chunk_selections)
.defaults(&selected_chunks)
.interact_opt()?;
let Some(selected_chunks) = selected_chunks else {
bail!("No chunks selected.");
};
let selected_chunk_selections: Vec<&ChunkSelection> = selected_chunks
.iter()
.map(|i| chunk_selections[*i])
.collect();
// replace mode: replace original files with uncompressed files
// patch mode: generate patch files after original patch files
let use_replace_mode = Select::with_theme(&ColorfulTheme::default())
.with_prompt(
"Replace original files with uncompressed files? (Will automatically backup original files)",
)
.default(0)
.items(FALSE_TRUE_SELECTION)
.interact()
.unwrap();
let use_replace_mode = use_replace_mode == 1;
// all chunk names for patch ID tracking
let mut all_chunk_names: Vec<ChunkName> = all_chunk_selections
.iter()
.map(|cs| cs.chunk_name.clone())
.collect();
// start processing
for chunk_selection in selected_chunk_selections {
let chunk_path = &chunk_selection.full_path;
let chunk_name = &chunk_selection.chunk_name;
let output_path = if use_replace_mode {
// In replace mode, first generate a temporary decompressed file
chunk_path.with_extension("pak.temp")
} else {
// In patch mode
// Find the max patch id for the current chunk series
let max_patch_id = all_chunk_names
.iter()
.filter(|c| {
c.major_id() == chunk_name.major_id()
&& c.patch_id() == chunk_name.patch_id()
&& c.sub_id() == chunk_name.sub_id()
})
.filter_map(|c| c.sub_patch_id())
.max()
.unwrap_or(0);
let new_patch_id = max_patch_id + 1;
// Create a new chunk name
let output_chunk_name = chunk_name.set_sub_patch(new_patch_id);
// Add the new patch to the chunk list so it can be found in subsequent processing
all_chunk_names.push(output_chunk_name.clone());
// Determine output directory based on original chunk location
let output_dir = chunk_path.parent().unwrap();
output_dir.join(output_chunk_name.to_string())
};
println!("Output patch file: {}", output_path.display());
self.process_chunk(
self.filename_table(),
chunk_path,
&output_path,
use_replace_mode,
true,
)?;
// In replace mode, backup the original file
// and rename the temporary file to the original file name
if use_replace_mode {
// Backup the original file
let backup_path = chunk_path.with_extension("pak.backup");
if backup_path.exists() {
fs::remove_file(&backup_path)?;
}
fs::rename(chunk_path, &backup_path)?;
// Rename the temporary file to the original file name
fs::rename(&output_path, chunk_path)?;
}
println!();
}
Ok(())
}
fn manual_mode(&mut self) -> color_eyre::Result<()> {
let input: String = Input::with_theme(&ColorfulTheme::default())
.show_default(true)
.default("re_chunk_000.pak.sub_000.pak".to_string())
.with_prompt("Input .pak file path")
.interact_text()
.unwrap()
.trim_matches(|c| c == '\"' || c == '\'')
.to_string();
let input_path = Path::new(&input);
if !input_path.is_file() {
bail!("input file not exists.");
}
let use_full_package_mode = Select::with_theme(&ColorfulTheme::default())
.with_prompt(
"Package all files, including non-tex files (for replacing original files)",
)
.default(0)
.items(FALSE_TRUE_SELECTION)
.interact()
.unwrap();
let use_full_package_mode = use_full_package_mode == 1;
let use_feature_clone = Select::with_theme(&ColorfulTheme::default())
.with_prompt("Clone feature flags from original file?")
.default(1)
.items(FALSE_TRUE_SELECTION)
.interact()
.unwrap();
let use_feature_clone = use_feature_clone == 1;
self.process_chunk(
self.filename_table(),
input_path,
&input_path.with_extension("uncompressed.pak"),
use_full_package_mode,
use_feature_clone,
)?;
Ok(())
}
fn restore_mode(&mut self) -> color_eyre::Result<()> {
let current_dir = std::env::current_dir()?;
let game_dir: String = Input::<String>::with_theme(&ColorfulTheme::default())
.show_default(true)
.default(current_dir.to_string_lossy().to_string())
.with_prompt("Input MonsterHunterWilds directory path")
.interact_text()
.unwrap()
.trim_matches(|c| c == '\"' || c == '\'')
.to_string();
let game_dir = Path::new(&game_dir);
if !game_dir.is_dir() {
bail!("game directory not exists.");
}
// scan all pak files, find files generated by this tool
println!("Scanning tool generated files...");
let mut tool_generated_files = Vec::new();
let mut backup_files = Vec::new();
let mut all_chunks = Vec::new();
// Scan main directory
self.scan_tool_files_in_directory(
game_dir,
&mut tool_generated_files,
&mut backup_files,
&mut all_chunks,
)?;
// Scan DLC directory if exists
let dlc_dir = game_dir.join("dlc");
if dlc_dir.is_dir() {
self.scan_tool_files_in_directory(
&dlc_dir,
&mut tool_generated_files,
&mut backup_files,
&mut all_chunks,
)?;
}
if tool_generated_files.is_empty() && backup_files.is_empty() {
println!("No files found to restore.");
return Ok(());
}
println!(
"Found {} tool generated files and {} backup files",
tool_generated_files.len(),
backup_files.len()
);
// restore
let mut patch_files_to_remove = Vec::new();
for (file_path, metadata) in &tool_generated_files {
if metadata.is_full_package() {
// restore full package mode (replace mode)
// this is a replace mode generated file, find the corresponding backup file
let backup_path = file_path.with_extension("pak.backup");
if backup_path.exists() {
println!("Restore replace mode file: {}", file_path.display());
// delete the current file and restore the backup
fs::remove_file(file_path)?;
fs::rename(&backup_path, file_path)?;
println!(" Restore backup file: {}", backup_path.display());
} else {
println!("Warning: backup file not found {}", backup_path.display());
}
} else {
// restore patch mode
// this is a patch mode generated file
if let Ok(chunk_name) =
ChunkName::try_from_str(&file_path.file_name().unwrap().to_string_lossy())
{
patch_files_to_remove.push((file_path.clone(), chunk_name));
}
}
}
// remove patch files
if !patch_files_to_remove.is_empty() {
println!("Remove patch files...");
for (file_path, chunk_name) in patch_files_to_remove.iter().rev() {
println!("Remove patch file: {}", file_path.display());
// Check if there are any patches with higher numbers
let has_higher_patches = all_chunks.iter().any(|c| {
c.major_id() == chunk_name.major_id()
&& c.sub_id() == chunk_name.sub_id()
&& match (c.sub_id(), c.sub_patch_id()) {
(Some(_), Some(patch_id)) => {
patch_id > chunk_name.sub_patch_id().unwrap()
}
(None, Some(patch_id)) => patch_id > chunk_name.patch_id().unwrap(),
_ => false,
}
});
if has_higher_patches {
// create an empty patch file instead of deleting, to keep the patch sequence continuous
self.create_empty_patch_file(file_path)?;
println!(" Create empty patch file to keep sequence continuous");
} else {
// no higher patches exist, safe to delete
fs::remove_file(file_path)?;
// remove from all_chunks
all_chunks.retain(|c| c != chunk_name);
println!(" Removed patch file");
}
}
}
println!("Restore completed!");
Ok(())
}
/// Scan tool generated files in a specific directory
fn scan_tool_files_in_directory(
&self,
dir: &Path,
tool_generated_files: &mut Vec<(std::path::PathBuf, PakMetadata)>,
backup_files: &mut Vec<std::path::PathBuf>,
all_chunks: &mut Vec<ChunkName>,
) -> color_eyre::Result<()> {
let entries = fs::read_dir(dir)?;
for entry in entries {
let entry = entry?;
if !entry.file_type()?.is_file() {
continue;
}
let file_name = entry.file_name().to_string_lossy().to_string();
let file_path = entry.path();
// check backup files
if file_name.ends_with(".pak.backup") {
backup_files.push(file_path);
continue;
}
// check pak files
if !file_name.ends_with(".pak") {
continue;
}
// Check if it's a chunk or DLC file
let is_chunk = file_name.starts_with("re_chunk_");
let is_dlc = file_name.starts_with("re_dlc_");
if !is_chunk && !is_dlc {
continue;
}
// collect chunk info
if let Ok(chunk_name) = ChunkName::try_from_str(&file_name) {
all_chunks.push(chunk_name.clone());
}
// check if the file is generated by this tool
if let Ok(Some(metadata)) = self.check_tool_generated_file(&file_path) {
tool_generated_files.push((file_path, metadata));
}
}
Ok(())
}
/// check if the file is generated by this tool, return metadata
fn check_tool_generated_file(
&self,
file_path: &Path,
) -> color_eyre::Result<Option<PakMetadata>> {
let file = match fs::File::open(file_path) {
Ok(file) => file,
Err(_) => return Ok(None),
};
let mut reader = io::BufReader::new(file);
let pak_archive = match ree_pak_core::read::read_archive(&mut reader) {
Ok(archive) => archive,
Err(_) => return Ok(None),
};
PakMetadata::from_pak_archive(reader, &pak_archive)
}
/// create an empty patch file
fn create_empty_patch_file(&self, file_path: &Path) -> color_eyre::Result<()> {
let out_file = OpenOptions::new()
.create(true)
.truncate(true)
.write(true)
.open(file_path)?;
let mut pak_writer = ree_pak_core::write::PakWriter::new(out_file, 1);
// write metadata to mark this is an empty patch file
let metadata = PakMetadata::new(false);
metadata.write_to_pak(&mut pak_writer)?;
pak_writer.finish()?;
Ok(())
}
}
fn is_tex_file(hash: u64, file_name_table: &FileNameTable) -> bool {
let Some(file_name) = file_name_table.get_file_name(hash) else {
return false;
};
file_name.to_string().unwrap().ends_with(".tex.241106027")
}
fn write_to_pak<W>(
writer: &mut ree_pak_core::write::PakWriter<W>,
entry: &PakEntry,
file_name: impl Utf16HashExt,
data: &[u8],
use_feature_clone: bool,
) -> color_eyre::Result<usize>
where
W: io::Write + io::Seek,
{
let mut file_options = FileOptions::default();
if use_feature_clone {
file_options = file_options.with_unk_attr(*entry.unk_attr())
}
writer.start_file(file_name, file_options)?;
writer.write_all(data)?;
Ok(data.len())
}
fn wait_for_enter(msg: &str) {
let _: String = Input::with_theme(&ColorfulTheme::default())
.with_prompt(msg)
.allow_empty(true)
.interact_text()
.unwrap();
}

314
src/chunk.rs Normal file
View File

@@ -0,0 +1,314 @@
//! Chunk file name format
//!
//! File name structure:
//! - Base: re_chunk_XXX.pak
//! - Patch: re_chunk_XXX.pak.patch_XXX.pak
//! - Sub: re_chunk_XXX.pak.sub_XXX.pak
//! - Sub Patch: re_chunk_XXX.pak.sub_XXX.pak.patch_XXX.pak
//! - DLC: re_dlc_stm_3308900.pak (and more)
use color_eyre::eyre;
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum ChunkComponent {
/// Base chunk with major ID (re_chunk_XXX.pak)
Base(u32),
/// DLC chunk with DLC ID (re_dlc_stm_3308900.pak)
Dlc(String),
/// Patch chunk with patch ID (XXX in .patch_XXX.pak)
Patch(u32),
/// Sub chunk with sub ID (XXX in .sub_XXX.pak)
Sub(u32),
/// Sub patch chunk with sub patch ID (YYY in .sub_XXX.pak.patch_YYY.pak)
SubPatch(u32),
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct ChunkName {
/// Chunk components
pub components: Vec<ChunkComponent>,
}
impl ChunkName {
#[allow(dead_code)]
/// Create a new base chunk name (re_chunk_XXX.pak)
pub fn new(major_id: u32) -> Self {
Self {
components: vec![ChunkComponent::Base(major_id)],
}
}
/// Create a chunk name from a string
pub fn try_from_str(name: &str) -> color_eyre::Result<Self> {
let dot_parts = name.split('.').collect::<Vec<&str>>();
if dot_parts.len() < 2 || dot_parts.len() % 2 != 0 {
return Err(eyre::eyre!(
"Invalid chunk name with odd number of parts: {}",
name
));
}
// every 2 parts is a component
let part_pairs = dot_parts
.chunks_exact(2)
.map(|c| (c[0], c[1]))
.collect::<Vec<(&str, &str)>>();
// check if all parts have the correct extension
if !part_pairs.iter().all(|(_, ext)| *ext == "pak") {
return Err(eyre::eyre!(
"Invalid chunk name with invalid extension: {}",
name
));
}
let mut components = Vec::new();
let mut has_sub = false;
for (part_name, _) in part_pairs.iter() {
let component = Self::parse_component(part_name)?;
match component {
Component::Major(id) => {
components.push(ChunkComponent::Base(id));
}
Component::Dlc(id) => {
components.push(ChunkComponent::Dlc(id));
}
Component::Sub(id) => {
components.push(ChunkComponent::Sub(id));
has_sub = true;
}
Component::Patch(id) => {
if has_sub {
components.push(ChunkComponent::SubPatch(id));
} else {
components.push(ChunkComponent::Patch(id));
}
}
}
}
Ok(Self { components })
}
/// Get the major ID (base chunk ID)
pub fn major_id(&self) -> Option<u32> {
self.components.iter().find_map(|c| match c {
ChunkComponent::Base(id) => Some(*id),
_ => None,
})
}
/// Get the patch ID
pub fn patch_id(&self) -> Option<u32> {
self.components.iter().find_map(|c| match c {
ChunkComponent::Patch(id) => Some(*id),
_ => None,
})
}
/// Get the sub ID
pub fn sub_id(&self) -> Option<u32> {
self.components.iter().find_map(|c| match c {
ChunkComponent::Sub(id) => Some(*id),
_ => None,
})
}
/// Get the sub patch ID
pub fn sub_patch_id(&self) -> Option<u32> {
self.components.iter().find_map(|c| match c {
ChunkComponent::SubPatch(id) => Some(*id),
_ => None,
})
}
/// Add or replace a sub patch component with the given ID
pub fn set_sub_patch(&self, patch_id: u32) -> Self {
let mut new_components = self.components.clone();
// Check if SubPatch already exists and replace it
if let Some(pos) = new_components
.iter()
.position(|c| matches!(c, ChunkComponent::SubPatch(_)))
{
new_components[pos] = ChunkComponent::SubPatch(patch_id);
} else {
new_components.push(ChunkComponent::SubPatch(patch_id));
}
Self {
components: new_components,
}
}
fn parse_component(name: &str) -> color_eyre::Result<Component> {
if name.starts_with("re_chunk_") {
let major_id = name
.strip_prefix("re_chunk_")
.unwrap()
.parse::<u32>()
.map_err(|e| eyre::eyre!("Chunk name with invalid major ID: {}", e))?;
Ok(Component::Major(major_id))
} else if name.starts_with("re_dlc_") {
let dlc_id = name.strip_prefix("re_dlc_").unwrap().to_string();
Ok(Component::Dlc(dlc_id))
} else if name.starts_with("patch_") {
let patch_id = name
.strip_prefix("patch_")
.unwrap()
.parse::<u32>()
.map_err(|e| eyre::eyre!("Chunk name with invalid patch ID: {}", e))?;
Ok(Component::Patch(patch_id))
} else if name.starts_with("sub_") {
let sub_id = name
.strip_prefix("sub_")
.unwrap()
.parse::<u32>()
.map_err(|e| eyre::eyre!("Chunk name with invalid sub ID: {}", e))?;
Ok(Component::Sub(sub_id))
} else {
Err(eyre::eyre!(
"Invalid chunk name with invalid component: {}",
name
))
}
}
}
impl std::fmt::Display for ChunkName {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
for (i, component) in self.components.iter().enumerate() {
if i > 0 {
write!(f, ".")?;
}
match component {
ChunkComponent::Base(id) => write!(f, "re_chunk_{:03}.pak", id)?,
ChunkComponent::Dlc(id) => write!(f, "re_dlc_{}.pak", id)?,
ChunkComponent::Patch(id) => write!(f, "patch_{:03}.pak", id)?,
ChunkComponent::Sub(id) => write!(f, "sub_{:03}.pak", id)?,
ChunkComponent::SubPatch(id) => write!(f, "patch_{:03}.pak", id)?,
}
}
Ok(())
}
}
impl PartialOrd for ChunkName {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
Some(self.cmp(other))
}
}
impl Ord for ChunkName {
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
// compare by component count first
self.components
.len()
.cmp(&other.components.len())
.then_with(|| {
// compare each component
for (a, b) in self.components.iter().zip(other.components.iter()) {
let cmp = match (a, b) {
(ChunkComponent::Base(a), ChunkComponent::Base(b)) => a.cmp(b),
(ChunkComponent::Dlc(a), ChunkComponent::Dlc(b)) => a.cmp(b),
(ChunkComponent::Patch(a), ChunkComponent::Patch(b)) => a.cmp(b),
(ChunkComponent::Sub(a), ChunkComponent::Sub(b)) => a.cmp(b),
(ChunkComponent::SubPatch(a), ChunkComponent::SubPatch(b)) => a.cmp(b),
// compare by component type priority
(ChunkComponent::Base(_), _) => std::cmp::Ordering::Less,
(_, ChunkComponent::Base(_)) => std::cmp::Ordering::Greater,
(ChunkComponent::Dlc(_), _) => std::cmp::Ordering::Less,
(_, ChunkComponent::Dlc(_)) => std::cmp::Ordering::Greater,
(ChunkComponent::Sub(_), _) => std::cmp::Ordering::Less,
(_, ChunkComponent::Sub(_)) => std::cmp::Ordering::Greater,
(ChunkComponent::Patch(_), ChunkComponent::SubPatch(_)) => {
std::cmp::Ordering::Less
}
(ChunkComponent::SubPatch(_), ChunkComponent::Patch(_)) => {
std::cmp::Ordering::Greater
}
};
if cmp != std::cmp::Ordering::Equal {
return cmp;
}
}
std::cmp::Ordering::Equal
})
}
}
enum Component {
Major(u32),
Dlc(String),
Patch(u32),
Sub(u32),
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_chunk_name_formats() {
// Test base chunk
let base = ChunkName::new(0);
assert_eq!(base.to_string(), "re_chunk_000.pak");
// Test patch chunk
let patch = ChunkName::try_from_str("re_chunk_000.pak.patch_001.pak").unwrap();
assert_eq!(patch.to_string(), "re_chunk_000.pak.patch_001.pak");
// Test sub chunk
let sub = ChunkName::try_from_str("re_chunk_000.pak.sub_000.pak").unwrap();
assert_eq!(sub.to_string(), "re_chunk_000.pak.sub_000.pak");
// Test sub patch chunk
let sub_patch =
ChunkName::try_from_str("re_chunk_000.pak.sub_000.pak.patch_001.pak").unwrap();
assert_eq!(
sub_patch.to_string(),
"re_chunk_000.pak.sub_000.pak.patch_001.pak"
);
// Test DLC chunk
let dlc = ChunkName::try_from_str("re_dlc_stm_3308900.pak").unwrap();
assert_eq!(dlc.to_string(), "re_dlc_stm_3308900.pak");
}
#[test]
fn test_chunk_helper_methods() {
// Test base chunk helper methods
let base = ChunkName::new(123);
assert_eq!(base.major_id(), Some(123));
assert_eq!(base.patch_id(), None);
assert_eq!(base.sub_id(), None);
assert_eq!(base.sub_patch_id(), None);
// Test complex chunk helper methods
let complex =
ChunkName::try_from_str("re_chunk_456.pak.sub_789.pak.patch_012.pak").unwrap();
assert_eq!(complex.major_id(), Some(456));
assert_eq!(complex.patch_id(), None);
assert_eq!(complex.sub_id(), Some(789));
assert_eq!(complex.sub_patch_id(), Some(12));
// Test DLC chunk helper methods
let dlc = ChunkName::try_from_str("re_dlc_stm_3308900.pak").unwrap();
assert_eq!(dlc.major_id(), None);
}
#[test]
fn test_set_sub_patch() {
let base = ChunkName::try_from_str("re_chunk_000.pak.sub_001.pak").unwrap();
let with_patch = base.set_sub_patch(99);
assert_eq!(with_patch.major_id(), Some(0));
assert_eq!(with_patch.sub_id(), Some(1));
assert_eq!(with_patch.sub_patch_id(), Some(99));
assert_eq!(
with_patch.to_string(),
"re_chunk_000.pak.sub_001.pak.patch_099.pak"
);
}
}

View File

@@ -1,148 +1,32 @@
use std::{
fs::{self, OpenOptions},
io::{self, Write},
path::Path,
sync::{
Arc,
atomic::{AtomicUsize, Ordering},
},
time::Duration,
};
mod app;
mod chunk;
mod metadata;
mod util;
use colored::Colorize;
use dialoguer::{Input, theme::ColorfulTheme};
use indicatif::{HumanBytes, ProgressBar, ProgressStyle};
use parking_lot::Mutex;
use rayon::iter::{IntoParallelRefIterator, ParallelIterator};
use re_tex::tex::Tex;
use ree_pak_core::{filename::FileNameTable, read::archive::PakArchiveReader, write::FileOptions};
const FILE_NAME_LIST: &[u8] = include_bytes!("../assets/MHWs_STM_Release.list.zst");
fn main() {
println!("Version {}. Tool by @Eigeen", env!("CARGO_PKG_VERSION"));
std::panic::set_hook(Box::new(panic_hook));
if let Err(e) = main_entry() {
eprintln!("Error: {e}");
let mut app = app::App::default();
if let Err(e) = app.run() {
eprintln!("{}: {:#}", "Error".red().bold(), e);
wait_for_exit();
std::process::exit(1);
}
wait_for_exit();
}
fn main_entry() -> eyre::Result<()> {
let input: String = Input::with_theme(&ColorfulTheme::default())
.show_default(true)
.default("re_chunk_000.pak.sub_000.pak".to_string())
.with_prompt("Input .pak file path")
.interact_text()
.unwrap();
println!("Input file: {}", input);
let input_path = Path::new(&input);
if !input_path.is_file() {
eyre::bail!("input file not exists.");
}
println!("Loading embedded file name table...");
let filename_table = FileNameTable::from_bytes(FILE_NAME_LIST)?;
let file = fs::File::open(input_path)?;
let mut reader = io::BufReader::new(file);
println!("Reading pak archive...");
let pak_archive = ree_pak_core::read::read_archive(&mut reader)?;
let archive_reader = PakArchiveReader::new(reader, &pak_archive);
let archive_reader_mtx = Mutex::new(archive_reader);
// filtered entries
println!("Filtering entries...");
let entries = pak_archive
.entries()
.iter()
.filter(|entry| {
let Some(file_name) = filename_table.get_file_name(entry.hash()) else {
return false;
};
file_name.get_name().ends_with(".tex.241106027")
})
.collect::<Vec<_>>();
// new pak archive
let output_path = input_path.with_extension("uncompressed.pak");
println!("Output file: {}", output_path.to_string_lossy());
let out_file = OpenOptions::new()
.create(true)
.truncate(true)
.write(true)
.open(output_path)?;
let pak_writer = ree_pak_core::write::PakWriter::new(out_file, entries.len() as u64);
let pak_writer_mtx = Arc::new(Mutex::new(pak_writer));
let bar = ProgressBar::new(entries.len() as u64);
bar.set_style(
ProgressStyle::default_bar().template("Bytes written: {msg}\n{pos}/{len} {wide_bar}")?,
);
bar.enable_steady_tick(Duration::from_millis(200));
let pak_writer_mtx1 = Arc::clone(&pak_writer_mtx);
let bar1 = bar.clone();
let bytes_written = AtomicUsize::new(0);
let err = entries
.par_iter()
.try_for_each(move |&entry| -> eyre::Result<()> {
let pak_writer_mtx = &pak_writer_mtx1;
let bar = &bar1;
// read raw tex file
// parse tex file
let mut entry_reader = {
let mut archive_reader = archive_reader_mtx.lock();
archive_reader.owned_entry_reader(entry.clone())?
};
let mut tex = Tex::from_reader(&mut entry_reader)?;
// decompress mipmaps
tex.batch_decompress()?;
let tex_bytes = tex.as_bytes()?;
bytes_written.fetch_add(tex_bytes.len() as usize, Ordering::SeqCst);
// save file
let file_name = filename_table.get_file_name(entry.hash()).unwrap().clone();
{
let mut pak_writer = pak_writer_mtx.lock();
pak_writer.start_file(file_name, FileOptions::default())?;
pak_writer.write_all(&tex_bytes)?;
}
bar.inc(1);
if bar.position() % 100 == 0 {
bar.set_message(
HumanBytes(bytes_written.load(Ordering::SeqCst) as u64).to_string(),
);
}
Ok(())
});
if let Err(e) = err {
eprintln!("Error occurred when processing tex: {e}");
eprintln!(
"The process terminated early, we'll save the current processed tex files to pak file."
);
}
let pak_writer = Arc::try_unwrap(pak_writer_mtx);
match pak_writer {
Ok(pak_writer) => pak_writer.into_inner().finish()?,
Err(_) => panic!("Arc::try_unwrap failed"),
};
bar.finish();
println!("Done!");
println!(
"You should rename the output file like `re_chunk_000.pak.sub_000.pak.patch_xxx.pak`, or manage it by your favorite mod manager."
);
Ok(())
fn panic_hook(info: &std::panic::PanicHookInfo) {
eprintln!("{}: {}", "Panic".red().bold(), info);
wait_for_exit();
std::process::exit(1);
}
fn wait_for_exit() {
let _: String = Input::with_theme(&ColorfulTheme::default())
.with_prompt("Press Enter to exit")
.allow_empty(true)
.interact_text()
.unwrap();

71
src/metadata.rs Normal file
View File

@@ -0,0 +1,71 @@
//! Extended metadata for generated pak files.
use std::io::{self, Read, Write};
use ree_pak_core::{
pak::PakArchive,
read::archive::PakArchiveReader,
utf16_hash::Utf16HashExt,
write::{FileOptions, PakWriter},
};
use serde::{Deserialize, Serialize};
const METADATA_KEY: &str = "__TEX_DECOMPRESSOR_METADATA__";
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PakMetadata {
version: u32,
is_full_package: bool,
}
impl PakMetadata {
pub fn new(is_full_package: bool) -> Self {
Self {
version: 1,
is_full_package,
}
}
pub fn is_full_package(&self) -> bool {
self.is_full_package
}
pub fn from_pak_archive<R>(
reader: R,
pak_archive: &PakArchive,
) -> color_eyre::Result<Option<Self>>
where
R: io::Read + io::Seek,
{
let entry = pak_archive
.entries()
.iter()
.find(|entry| entry.hash() == METADATA_KEY.hash_mixed());
if let Some(entry) = entry {
// read file
let mut archive_reader = PakArchiveReader::new(reader, pak_archive);
let mut entry_reader = archive_reader.owned_entry_reader(entry.clone())?;
let mut buf = Vec::new();
entry_reader.read_to_end(&mut buf)?;
let metadata = serde_json::from_slice(&buf)?;
Ok(Some(metadata))
} else {
Ok(None)
}
}
pub fn write_to_pak<W>(&self, pak_writer: &mut PakWriter<W>) -> color_eyre::Result<()>
where
W: io::Write + io::Seek,
{
let json_str = serde_json::to_string(self)?;
let json_bytes = json_str.as_bytes();
pak_writer.start_file(METADATA_KEY, FileOptions::default())?;
pak_writer.write_all(json_bytes)?;
Ok(())
}
}

5
src/util.rs Normal file
View File

@@ -0,0 +1,5 @@
use indicatif::HumanBytes;
pub fn human_bytes(bytes: u64) -> String {
HumanBytes(bytes).to_string()
}