20 Commits

Author SHA1 Message Date
7d17efd889 github actions 2025-07-28 19:34:40 +08:00
e0ff976a11 add Restore tool, fix metadata write error 2025-07-28 19:31:09 +08:00
c64a9a8fbb write a metadata to uncompressed pak 2025-07-28 18:45:11 +08:00
6bd30c682d fix replace mode not working 2025-07-28 18:07:24 +08:00
6c78c4f01c update version 2025-07-28 18:03:18 +08:00
b6b2820d75 clippy, remove unnecessary print 2025-07-28 17:56:03 +08:00
0d465ed46c auto mode support 2025-07-28 17:53:03 +08:00
1588c8d756 update embedded list file to TU2 version 2025-07-28 17:49:34 +08:00
6e95351fa0 v0.2.0 refactoring 2025-07-28 14:51:55 +08:00
601e217276 use fs_err for better std::fs errors 2025-07-25 19:33:47 +08:00
87d808dc3b update dependencies 2025-07-25 19:25:27 +08:00
817db15340 switch to color_eyre 2025-07-25 19:24:50 +08:00
ad98236e06 colorful and better interaction
Use colored texts.
Default enable feature clone.
Add panic hook to avoid program exit.
2025-03-30 21:20:05 +08:00
9d703833e4 Merge branch 'main' of github.com:eigeen/mhws-tex-decompressor 2025-03-30 20:56:03 +08:00
40f9ea4572 wait for exit when success 2025-03-30 20:55:32 +08:00
3009d73726 Merge pull request #2 from xuanplus/patch-1
Update main.rs to remove quotes
2025-03-22 11:15:39 +08:00
Haoxuan Di
108d8bce92 Update main.rs
remove quotes for dropping file to terminal
2025-03-22 11:11:43 +08:00
527354a3af full package and feature clone 2025-03-20 11:06:22 +08:00
f46a06948c pak entry flags clone 2025-03-19 21:30:28 +08:00
2b214cec01 fixed: a fixed input file path 2025-03-19 19:37:58 +08:00
10 changed files with 2046 additions and 264 deletions

53
.github/workflows/release.yml vendored Normal file
View File

@@ -0,0 +1,53 @@
name: Release Build
on:
push:
tags: ["v*"]
permissions:
contents: write
env:
CARGO_TERM_COLOR: always
BINARY_NAME: mhws-tex-decompressor
jobs:
build:
runs-on: windows-latest
steps:
- uses: actions/checkout@v4
- name: Install Rust toolchain
uses: dtolnay/rust-toolchain@stable
with:
targets: x86_64-pc-windows-msvc
- name: Build
run: cargo build --verbose --release
- name: Prepare release package
run: |
mkdir release
copy target/release/${{ env.BINARY_NAME }}.exe release/
copy README.md release/
copy LICENSE release/
- name: Upload artifact
uses: actions/upload-artifact@v4
with:
name: ${{ env.BINARY_NAME }}-windows-x64
path: release/
- name: Create Release
uses: softprops/action-gh-release@v2
if: github.ref_type == 'tag'
with:
draft: true
files: release/*
name: Release ${{ github.ref_name }}
body: |
## ${{ github.ref_name }}
### Changes
- Please fill in the changes

2
.gitignore vendored
View File

@@ -1 +1,3 @@
/target /target
*.exe
*.zip

1198
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,18 +1,22 @@
[package] [package]
name = "mhws-tex-decompressor" name = "mhws-tex-decompressor"
version = "0.1.0" version = "0.2.0"
edition = "2024" edition = "2024"
[dependencies] [dependencies]
# # local development # local development
# re-tex = { path = "../re-tex" } re-tex = { path = "../re-tex" }
# ree-pak-core = { path = "../../ree-pak-rs/ree-pak-core" } ree-pak-core = { path = "../../ree-pak-rs/ree-pak-core" }
re-tex = { git = "https://github.com/eigeen/re-tex.git", branch = "main" } # re-tex = { git = "https://github.com/eigeen/re-tex.git", branch = "main" }
ree-pak-core = { git = "https://github.com/eigeen/ree-pak-rs.git", branch = "main" } # ree-pak-core = { git = "https://github.com/eigeen/ree-pak-rs.git", branch = "main" }
dialoguer = "0.11.0" dialoguer = "0.11"
eyre = "0.6.12" color-eyre = "0.6.5"
indicatif = "0.17.11" indicatif = "0.18"
rayon = "1.10.0" rayon = "1.10"
parking_lot = "0.12.3" parking_lot = "0.12"
colored = "3.0"
fs-err = "3.1.1"
serde = { version = "1.0.219", features = ["derive"] }
serde_json = "1.0.141"

Binary file not shown.

632
src/app.rs Normal file
View File

@@ -0,0 +1,632 @@
use std::{
io::{self, Write},
path::Path,
sync::{
Arc,
atomic::{AtomicUsize, Ordering},
},
time::Duration,
};
use fs_err as fs;
use color_eyre::eyre::bail;
use dialoguer::{Input, MultiSelect, Select, theme::ColorfulTheme};
use fs::OpenOptions;
use indicatif::{HumanBytes, ProgressBar, ProgressStyle};
use parking_lot::Mutex;
use rayon::iter::{IntoParallelRefIterator, ParallelIterator};
use re_tex::tex::Tex;
use ree_pak_core::{
filename::{FileNameExt, FileNameTable},
pak::PakEntry,
read::archive::PakArchiveReader,
write::FileOptions,
};
use crate::{chunk::ChunkName, metadata::PakMetadata, util::human_bytes};
const FILE_NAME_LIST: &[u8] = include_bytes!("../assets/MHWs_STM_Release.list.zst");
const AUTO_CHUNK_SELECTION_SIZE_THRESHOLD: usize = 50 * 1024 * 1024; // 50MB
const FALSE_TRUE_SELECTION: [&str; 2] = ["False", "True"];
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
enum Mode {
Automatic = 0,
Manual = 1,
Restore = 2,
}
impl Mode {
fn from_index(index: usize) -> color_eyre::Result<Self> {
match index {
0 => Ok(Mode::Automatic),
1 => Ok(Mode::Manual),
2 => Ok(Mode::Restore),
_ => bail!("Invalid mode index: {index}"),
}
}
}
struct ChunkSelection {
chunk_name: ChunkName,
file_size: u64,
}
impl std::fmt::Display for ChunkSelection {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{} ({})", self.chunk_name, human_bytes(self.file_size))?;
Ok(())
}
}
#[derive(Default)]
pub struct App {
filename_table: Option<FileNameTable>,
}
impl App {
pub fn run(&mut self) -> color_eyre::Result<()> {
println!("Version v{} - Tool by @Eigeen", env!("CARGO_PKG_VERSION"));
println!("Get updates at https://github.com/eigeen/mhws-tex-decompressor");
println!();
println!("Loading embedded file name table...");
let filename_table = FileNameTable::from_bytes(FILE_NAME_LIST)?;
self.filename_table = Some(filename_table);
// Mode selection
let mode = Select::with_theme(&ColorfulTheme::default())
.with_prompt("Select mode")
.items(&["Automatic", "Manual", "Restore"])
.default(0)
.interact()?;
let mode = Mode::from_index(mode)?;
match mode {
Mode::Automatic => self.auto_mode(),
Mode::Manual => self.manual_mode(),
Mode::Restore => self.restore_mode(),
}
}
fn filename_table(&self) -> &FileNameTable {
self.filename_table.as_ref().unwrap()
}
fn process_chunk(
&self,
filename_table: &FileNameTable,
input_path: &Path,
output_path: &Path,
use_full_package_mode: bool,
use_feature_clone: bool,
) -> color_eyre::Result<()> {
println!("Processing chunk: {}", input_path.display());
let file = fs::File::open(input_path)?;
let mut reader = io::BufReader::new(file);
let pak_archive = ree_pak_core::read::read_archive(&mut reader)?;
let archive_reader = PakArchiveReader::new(reader, &pak_archive);
let archive_reader_mtx = Mutex::new(archive_reader);
// filtered entries
let entries = if use_full_package_mode {
pak_archive.entries().iter().collect::<Vec<_>>()
} else {
println!("Filtering entries...");
pak_archive
.entries()
.iter()
.filter(|entry| is_tex_file(entry.hash(), filename_table))
.collect::<Vec<_>>()
};
// new pak archive
let out_file = OpenOptions::new()
.create(true)
.truncate(true)
.write(true)
.open(output_path)?;
// +1 for metadata
let mut pak_writer =
ree_pak_core::write::PakWriter::new(out_file, (entries.len() as u64) + 1);
// write metadata
let metadata = PakMetadata::new(use_full_package_mode);
metadata.write_to_pak(&mut pak_writer)?;
let pak_writer_mtx = Arc::new(Mutex::new(pak_writer));
let bar = ProgressBar::new(entries.len() as u64);
bar.set_style(
ProgressStyle::default_bar()
.template("Bytes written: {msg}\n{pos}/{len} {wide_bar}")?,
);
bar.enable_steady_tick(Duration::from_millis(200));
let pak_writer_mtx1 = Arc::clone(&pak_writer_mtx);
let bar1 = bar.clone();
let bytes_written = AtomicUsize::new(0);
let err = entries
.par_iter()
.try_for_each(move |&entry| -> color_eyre::Result<()> {
let pak_writer_mtx = &pak_writer_mtx1;
let bar = &bar1;
// read raw tex file
// parse tex file
let mut entry_reader = {
let mut archive_reader = archive_reader_mtx.lock();
archive_reader.owned_entry_reader(entry.clone())?
};
if !is_tex_file(entry.hash(), filename_table) {
// plain file, just copy
let mut buf = vec![];
std::io::copy(&mut entry_reader, &mut buf)?;
let mut pak_writer = pak_writer_mtx.lock();
let write_bytes = write_to_pak(
&mut pak_writer,
entry,
entry.hash(),
&buf,
use_feature_clone,
)?;
bytes_written.fetch_add(write_bytes, Ordering::SeqCst);
} else {
let mut tex = Tex::from_reader(&mut entry_reader)?;
// decompress mipmaps
tex.batch_decompress()?;
let tex_bytes = tex.as_bytes()?;
let mut pak_writer = pak_writer_mtx.lock();
let write_bytes = write_to_pak(
&mut pak_writer,
entry,
entry.hash(),
&tex_bytes,
use_feature_clone,
)?;
bytes_written.fetch_add(write_bytes, Ordering::SeqCst);
}
bar.inc(1);
if bar.position() % 100 == 0 {
bar.set_message(
HumanBytes(bytes_written.load(Ordering::SeqCst) as u64).to_string(),
);
}
Ok(())
});
if let Err(e) = err {
eprintln!("Error occurred when processing tex: {e}");
eprintln!(
"The process terminated early, we'll save the current processed tex files to pak file."
);
}
match Arc::try_unwrap(pak_writer_mtx) {
Ok(pak_writer) => pak_writer.into_inner().finish()?,
Err(_) => panic!("Arc::try_unwrap failed"),
};
bar.finish();
Ok(())
}
fn auto_mode(&mut self) -> color_eyre::Result<()> {
let current_dir = std::env::current_dir()?;
wait_for_enter(
r#"Check list:
1. Your game is already updated to the latest version.
2. Uninstalled all the mods, or the generated files will break mods.
I'm sure I've checked the list, press Enter to continue"#,
);
let game_dir: String = Input::<String>::with_theme(&ColorfulTheme::default())
.show_default(true)
.default(current_dir.to_string_lossy().to_string())
.with_prompt("Input MonsterHunterWilds directory path")
.interact_text()
.unwrap()
.trim_matches(|c| c == '\"' || c == '\'')
.to_string();
let game_dir = Path::new(&game_dir);
if !game_dir.is_dir() {
bail!("game directory not exists.");
}
// scan for pak files
let dir = fs::read_dir(game_dir)?;
let mut all_chunks: Vec<ChunkName> = vec![];
for entry in dir {
let entry = entry?;
if !entry.file_type()?.is_file() {
continue;
}
let file_name = entry.file_name().to_string_lossy().to_string();
if !file_name.ends_with(".pak") || !file_name.starts_with("re_chunk_") {
continue;
}
let chunk_name = match ChunkName::try_from_str(&file_name) {
Ok(chunk_name) => chunk_name,
Err(e) => {
println!("Invalid chunk name, skipped: {e}");
continue;
}
};
all_chunks.push(chunk_name);
}
all_chunks.sort();
// show chunks for selection
// only show sub chunks
let chunk_selections = all_chunks
.iter()
.filter_map(|chunk| {
if chunk.sub_id.is_some() {
Some(chunk.to_string())
} else {
None
}
})
.map(|file_name| {
let file_path = game_dir.join(&file_name);
let file_size = fs::metadata(file_path)?.len();
Ok(ChunkSelection {
chunk_name: ChunkName::try_from_str(&file_name)?,
file_size,
})
})
.collect::<color_eyre::Result<Vec<_>>>()?;
if chunk_selections.is_empty() {
bail!("No available pak files found.");
}
let selected_chunks: Vec<bool> = chunk_selections
.iter()
.map(|chunk_selection| {
Ok(chunk_selection.file_size >= AUTO_CHUNK_SELECTION_SIZE_THRESHOLD as u64)
})
.collect::<color_eyre::Result<Vec<_>>>()?;
let selected_chunks: Option<Vec<usize>> =
MultiSelect::with_theme(&ColorfulTheme::default())
.with_prompt("Select chunks to process (Space to select, Enter to confirm)")
.items(&chunk_selections)
.defaults(&selected_chunks)
.interact_opt()?;
let Some(selected_chunks) = selected_chunks else {
bail!("No chunks selected.");
};
let selected_chunks = selected_chunks
.iter()
.map(|i| chunk_selections[*i].chunk_name.clone())
.collect::<Vec<_>>();
// replace mode: replace original files with uncompressed files
// patch mode: generate patch files after original patch files
let use_replace_mode = Select::with_theme(&ColorfulTheme::default())
.with_prompt(
"Replace original files with uncompressed files? (Will automatically backup original files)",
)
.default(0)
.items(&FALSE_TRUE_SELECTION)
.interact()
.unwrap();
let use_replace_mode = use_replace_mode == 1;
// start processing
for chunk_name in selected_chunks {
let chunk_path = game_dir.join(chunk_name.to_string());
let output_path = if use_replace_mode {
// In replace mode, first generate a temporary decompressed file
chunk_path.with_extension("pak.temp")
} else {
// In patch mode
// Find the max patch id for the current chunk series
let max_patch_id = all_chunks
.iter()
.filter(|c| {
c.major_id == chunk_name.major_id
&& c.patch_id == chunk_name.patch_id
&& c.sub_id == chunk_name.sub_id
})
.filter_map(|c| c.sub_patch_id)
.max()
.unwrap_or(0);
let new_patch_id = max_patch_id + 1;
// Create a new chunk name
let mut output_chunk_name = chunk_name.clone();
output_chunk_name.sub_patch_id = Some(new_patch_id);
// Add the new patch to the chunk list so it can be found in subsequent processing
all_chunks.push(output_chunk_name.clone());
game_dir.join(output_chunk_name.to_string())
};
println!("Output patch file: {}", output_path.display());
self.process_chunk(
self.filename_table(),
&chunk_path,
&output_path,
use_replace_mode,
true,
)?;
// In replace mode, backup the original file
// and rename the temporary file to the original file name
if use_replace_mode {
// Backup the original file
let backup_path = chunk_path.with_extension("pak.backup");
if backup_path.exists() {
fs::remove_file(&backup_path)?;
}
fs::rename(&chunk_path, &backup_path)?;
// Rename the temporary file to the original file name
fs::rename(&output_path, &chunk_path)?;
}
println!();
}
Ok(())
}
fn manual_mode(&mut self) -> color_eyre::Result<()> {
let input: String = Input::with_theme(&ColorfulTheme::default())
.show_default(true)
.default("re_chunk_000.pak.sub_000.pak".to_string())
.with_prompt("Input .pak file path")
.interact_text()
.unwrap()
.trim_matches(|c| c == '\"' || c == '\'')
.to_string();
let input_path = Path::new(&input);
if !input_path.is_file() {
bail!("input file not exists.");
}
let use_full_package_mode = Select::with_theme(&ColorfulTheme::default())
.with_prompt(
"Package all files, including non-tex files (for replacing original files)",
)
.default(0)
.items(&FALSE_TRUE_SELECTION)
.interact()
.unwrap();
let use_full_package_mode = use_full_package_mode == 1;
let use_feature_clone = Select::with_theme(&ColorfulTheme::default())
.with_prompt("Clone feature flags from original file?")
.default(1)
.items(&FALSE_TRUE_SELECTION)
.interact()
.unwrap();
let use_feature_clone = use_feature_clone == 1;
self.process_chunk(
self.filename_table(),
input_path,
&input_path.with_extension("uncompressed.pak"),
use_full_package_mode,
use_feature_clone,
)?;
Ok(())
}
fn restore_mode(&mut self) -> color_eyre::Result<()> {
let current_dir = std::env::current_dir()?;
let game_dir: String = Input::<String>::with_theme(&ColorfulTheme::default())
.show_default(true)
.default(current_dir.to_string_lossy().to_string())
.with_prompt("Input MonsterHunterWilds directory path")
.interact_text()
.unwrap()
.trim_matches(|c| c == '\"' || c == '\'')
.to_string();
let game_dir = Path::new(&game_dir);
if !game_dir.is_dir() {
bail!("game directory not exists.");
}
// scan all pak files, find files generated by this tool
println!("Scanning tool generated files...");
let dir = fs::read_dir(game_dir)?;
let mut tool_generated_files = Vec::new();
let mut backup_files = Vec::new();
let mut all_chunks = Vec::new();
for entry in dir {
let entry = entry?;
if !entry.file_type()?.is_file() {
continue;
}
let file_name = entry.file_name().to_string_lossy().to_string();
let file_path = entry.path();
// check backup files
if file_name.ends_with(".pak.backup") {
backup_files.push(file_path);
continue;
}
// check pak files
if !file_name.ends_with(".pak") || !file_name.starts_with("re_chunk_") {
continue;
}
// collect chunk info
if let Ok(chunk_name) = ChunkName::try_from_str(&file_name) {
all_chunks.push(chunk_name.clone());
}
// check if the file is generated by this tool
if let Ok(Some(metadata)) = self.check_tool_generated_file(&file_path) {
tool_generated_files.push((file_path, metadata));
}
}
if tool_generated_files.is_empty() && backup_files.is_empty() {
println!("No files found to restore.");
return Ok(());
}
println!(
"Found {} tool generated files and {} backup files",
tool_generated_files.len(),
backup_files.len()
);
// restore
let mut patch_files_to_remove = Vec::new();
for (file_path, metadata) in &tool_generated_files {
if metadata.is_full_package() {
// restore full package mode (replace mode)
// this is a replace mode generated file, find the corresponding backup file
let backup_path = file_path.with_extension("pak.backup");
if backup_path.exists() {
println!("Restore replace mode file: {}", file_path.display());
// delete the current file and restore the backup
fs::remove_file(file_path)?;
fs::rename(&backup_path, file_path)?;
println!(" Restore backup file: {}", backup_path.display());
} else {
println!("Warning: backup file not found {}", backup_path.display());
}
} else {
// restore patch mode
// this is a patch mode generated file
if let Ok(chunk_name) =
ChunkName::try_from_str(&file_path.file_name().unwrap().to_string_lossy())
{
patch_files_to_remove.push((file_path.clone(), chunk_name));
}
}
}
// remove patch files
if !patch_files_to_remove.is_empty() {
println!("Remove patch files...");
for (file_path, chunk_name) in patch_files_to_remove.iter().rev() {
println!("Remove patch file: {}", file_path.display());
// Check if there are any patches with higher numbers
let has_higher_patches = all_chunks.iter().any(|c| {
c.major_id == chunk_name.major_id
&& c.sub_id == chunk_name.sub_id
&& match (c.sub_id, c.sub_patch_id) {
(Some(_), Some(patch_id)) => {
patch_id > chunk_name.sub_patch_id.unwrap()
}
(None, Some(patch_id)) => patch_id > chunk_name.patch_id.unwrap(),
_ => false,
}
});
if has_higher_patches {
// create an empty patch file instead of deleting, to keep the patch sequence continuous
self.create_empty_patch_file(file_path)?;
println!(" Create empty patch file to keep sequence continuous");
} else {
// no higher patches exist, safe to delete
fs::remove_file(file_path)?;
println!(" Removed patch file");
}
}
}
println!("Restore completed!");
Ok(())
}
/// check if the file is generated by this tool, return metadata
fn check_tool_generated_file(
&self,
file_path: &Path,
) -> color_eyre::Result<Option<PakMetadata>> {
let file = match fs::File::open(file_path) {
Ok(file) => file,
Err(_) => return Ok(None),
};
let mut reader = io::BufReader::new(file);
let pak_archive = match ree_pak_core::read::read_archive(&mut reader) {
Ok(archive) => archive,
Err(_) => return Ok(None),
};
PakMetadata::from_pak_archive(reader, &pak_archive)
}
/// create an empty patch file
fn create_empty_patch_file(&self, file_path: &Path) -> color_eyre::Result<()> {
let out_file = OpenOptions::new()
.create(true)
.truncate(true)
.write(true)
.open(file_path)?;
let mut pak_writer = ree_pak_core::write::PakWriter::new(out_file, 1);
// write metadata to mark this is an empty patch file
let metadata = PakMetadata::new(false);
metadata.write_to_pak(&mut pak_writer)?;
pak_writer.finish()?;
Ok(())
}
}
fn is_tex_file(hash: u64, file_name_table: &FileNameTable) -> bool {
let Some(file_name) = file_name_table.get_file_name(hash) else {
return false;
};
file_name.get_name().ends_with(".tex.241106027")
}
fn write_to_pak<W>(
writer: &mut ree_pak_core::write::PakWriter<W>,
entry: &PakEntry,
file_name: impl FileNameExt,
data: &[u8],
use_feature_clone: bool,
) -> color_eyre::Result<usize>
where
W: io::Write + io::Seek,
{
let mut file_options = FileOptions::default();
if use_feature_clone {
file_options = file_options.with_unk_attr(*entry.unk_attr())
}
writer.start_file(file_name, file_options)?;
writer.write_all(data)?;
Ok(data.len())
}
fn wait_for_enter(msg: &str) {
let _: String = Input::with_theme(&ColorfulTheme::default())
.with_prompt(msg)
.allow_empty(true)
.interact_text()
.unwrap();
}

174
src/chunk.rs Normal file
View File

@@ -0,0 +1,174 @@
//! Chunk file name format
//!
//! File name structure:
//! - Base: re_chunk_XXX.pak
//! - Patch: re_chunk_XXX.pak.patch_XXX.pak
//! - Sub: re_chunk_XXX.pak.sub_XXX.pak
//! - Sub Patch: re_chunk_XXX.pak.sub_XXX.pak.patch_XXX.pak
use color_eyre::eyre;
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct ChunkName {
/// Major chunk ID (XXX in re_chunk_XXX.pak)
pub major_id: u32,
/// Patch number (XXX in .patch_XXX.pak)
pub patch_id: Option<u32>,
/// Sub chunk ID (XXX in .sub_XXX.pak)
pub sub_id: Option<u32>,
/// Patch number for sub chunk (YYY in .sub_XXX.pak.patch_YYY.pak)
pub sub_patch_id: Option<u32>,
}
impl ChunkName {
/// Create a new base chunk name (re_chunk_XXX.pak)
pub fn new(major_id: u32) -> Self {
Self {
major_id,
patch_id: None,
sub_id: None,
sub_patch_id: None,
}
}
/// Create a chunk name from a string
pub fn try_from_str(name: &str) -> color_eyre::Result<Self> {
let dot_parts = name.split('.').collect::<Vec<&str>>();
if dot_parts.len() < 2 || dot_parts.len() % 2 != 0 {
return Err(eyre::eyre!(
"Invalid chunk name with odd number of parts: {}",
name
));
}
// every 2 parts is a component
let components = dot_parts
.chunks_exact(2)
.map(|c| (c[0], c[1]))
.collect::<Vec<(&str, &str)>>();
// check if all parts have the correct extension
if !components.iter().all(|(_, ext)| *ext == "pak") {
return Err(eyre::eyre!(
"Invalid chunk name with invalid extension: {}",
name
));
}
let mut this = Self::new(0);
for (name, _) in components.iter() {
let component = Self::parse_component(name)?;
match component {
Component::Major(id) => this.major_id = id,
Component::Sub(id) => this.sub_id = Some(id),
Component::Patch(id) => {
if this.sub_id.is_some() {
this.sub_patch_id = Some(id);
} else {
this.patch_id = Some(id);
}
}
}
}
Ok(this)
}
fn parse_component(name: &str) -> color_eyre::Result<Component> {
if name.starts_with("re_chunk_") {
let major_id = name
.strip_prefix("re_chunk_")
.unwrap()
.parse::<u32>()
.map_err(|e| eyre::eyre!("Chunk name with invalid major ID: {}", e))?;
Ok(Component::Major(major_id))
} else if name.starts_with("patch_") {
let patch_id = name
.strip_prefix("patch_")
.unwrap()
.parse::<u32>()
.map_err(|e| eyre::eyre!("Chunk name with invalid patch ID: {}", e))?;
Ok(Component::Patch(patch_id))
} else if name.starts_with("sub_") {
let sub_id = name
.strip_prefix("sub_")
.unwrap()
.parse::<u32>()
.map_err(|e| eyre::eyre!("Chunk name with invalid sub ID: {}", e))?;
Ok(Component::Sub(sub_id))
} else {
Err(eyre::eyre!(
"Invalid chunk name with invalid component: {}",
name
))
}
}
}
impl std::fmt::Display for ChunkName {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "re_chunk_{:03}.pak", self.major_id)?;
if let Some(patch_id) = self.patch_id {
write!(f, ".patch_{:03}.pak", patch_id)?;
return Ok(());
}
if let Some(sub_id) = self.sub_id {
write!(f, ".sub_{:03}.pak", sub_id)?;
}
if let Some(sub_patch_id) = self.sub_patch_id {
write!(f, ".patch_{:03}.pak", sub_patch_id)?;
}
Ok(())
}
}
impl PartialOrd for ChunkName {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
Some(self.cmp(other))
}
}
impl Ord for ChunkName {
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
self.major_id
.cmp(&other.major_id)
.then(self.sub_id.cmp(&other.sub_id))
.then(self.patch_id.cmp(&other.patch_id))
.then(self.sub_patch_id.cmp(&other.sub_patch_id))
}
}
enum Component {
Major(u32),
Patch(u32),
Sub(u32),
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_chunk_name_formats() {
// Test base chunk
let base = ChunkName::new(0);
assert_eq!(base.to_string(), "re_chunk_000.pak");
// Test patch chunk
let patch = ChunkName::try_from_str("re_chunk_000.pak.patch_001.pak").unwrap();
assert_eq!(patch.to_string(), "re_chunk_000.pak.patch_001.pak");
// Test sub chunk
let sub = ChunkName::try_from_str("re_chunk_000.pak.sub_000.pak").unwrap();
assert_eq!(sub.to_string(), "re_chunk_000.pak.sub_000.pak");
// Test sub patch chunk
let sub_patch =
ChunkName::try_from_str("re_chunk_000.pak.sub_000.pak.patch_001.pak").unwrap();
assert_eq!(
sub_patch.to_string(),
"re_chunk_000.pak.sub_000.pak.patch_001.pak"
);
}
}

View File

@@ -1,150 +1,32 @@
use std::{ mod app;
fs::{self, OpenOptions}, mod chunk;
io::{self, Write}, mod metadata;
path::Path, mod util;
sync::{
Arc,
atomic::{AtomicUsize, Ordering},
},
time::Duration,
};
use colored::Colorize;
use dialoguer::{Input, theme::ColorfulTheme}; use dialoguer::{Input, theme::ColorfulTheme};
use indicatif::{HumanBytes, ProgressBar, ProgressStyle};
use parking_lot::Mutex;
use rayon::iter::{IntoParallelRefIterator, ParallelIterator};
use re_tex::tex::Tex;
use ree_pak_core::{filename::FileNameTable, read::archive::PakArchiveReader, write::FileOptions};
const FILE_NAME_LIST: &[u8] = include_bytes!("../assets/MHWs_STM_Release.list.zst");
fn main() { fn main() {
println!("Version {}. Tool by @Eigeen", env!("CARGO_PKG_VERSION")); std::panic::set_hook(Box::new(panic_hook));
if let Err(e) = main_entry() { let mut app = app::App::default();
eprintln!("Error: {e}"); if let Err(e) = app.run() {
eprintln!("{}: {:#}", "Error".red().bold(), e);
wait_for_exit(); wait_for_exit();
std::process::exit(1); std::process::exit(1);
} }
wait_for_exit();
} }
fn main_entry() -> eyre::Result<()> { fn panic_hook(info: &std::panic::PanicHookInfo) {
let input: String = Input::with_theme(&ColorfulTheme::default()) eprintln!("{}: {}", "Panic".red().bold(), info);
.show_default(true) wait_for_exit();
.default("re_chunk_000.pak.sub_000.pak".to_string()) std::process::exit(1);
.with_prompt("Input .pak file path")
.interact_text()
.unwrap();
println!("Input file: {}", input);
let input_path = Path::new(&input);
if !input_path.is_file() {
eyre::bail!("input file not exists.");
}
println!("Loading embedded file name table...");
let filename_table = FileNameTable::from_bytes(FILE_NAME_LIST)?;
let file_path =
"E:/SteamLibrary/steamapps/common/MonsterHunterWilds/re_chunk_000.pak.sub_000.pak";
let file = fs::File::open(file_path)?;
let mut reader = io::BufReader::new(file);
println!("Reading pak archive...");
let pak_archive = ree_pak_core::read::read_archive(&mut reader)?;
let archive_reader = PakArchiveReader::new(reader, &pak_archive);
let archive_reader_mtx = Mutex::new(archive_reader);
// filtered entries
println!("Filtering entries...");
let entries = pak_archive
.entries()
.iter()
.filter(|entry| {
let Some(file_name) = filename_table.get_file_name(entry.hash()) else {
return false;
};
file_name.get_name().ends_with(".tex.241106027")
})
.collect::<Vec<_>>();
// new pak archive
let output_path = input_path.with_extension("uncompressed.pak");
println!("Output file: {}", output_path.to_string_lossy());
let out_file = OpenOptions::new()
.create(true)
.truncate(true)
.write(true)
.open(output_path)?;
let pak_writer = ree_pak_core::write::PakWriter::new(out_file, entries.len() as u64);
let pak_writer_mtx = Arc::new(Mutex::new(pak_writer));
let bar = ProgressBar::new(entries.len() as u64);
bar.set_style(
ProgressStyle::default_bar().template("Bytes written: {msg}\n{pos}/{len} {wide_bar}")?,
);
bar.enable_steady_tick(Duration::from_millis(200));
let pak_writer_mtx1 = Arc::clone(&pak_writer_mtx);
let bar1 = bar.clone();
let bytes_written = AtomicUsize::new(0);
let err = entries
.par_iter()
.try_for_each(move |&entry| -> eyre::Result<()> {
let pak_writer_mtx = &pak_writer_mtx1;
let bar = &bar1;
// read raw tex file
// parse tex file
let mut entry_reader = {
let mut archive_reader = archive_reader_mtx.lock();
archive_reader.owned_entry_reader(entry.clone())?
};
let mut tex = Tex::from_reader(&mut entry_reader)?;
// decompress mipmaps
tex.batch_decompress()?;
let tex_bytes = tex.as_bytes()?;
bytes_written.fetch_add(tex_bytes.len() as usize, Ordering::SeqCst);
// save file
let file_name = filename_table.get_file_name(entry.hash()).unwrap().clone();
{
let mut pak_writer = pak_writer_mtx.lock();
pak_writer.start_file(file_name, FileOptions::default())?;
pak_writer.write_all(&tex_bytes)?;
}
bar.inc(1);
if bar.position() % 100 == 0 {
bar.set_message(
HumanBytes(bytes_written.load(Ordering::SeqCst) as u64).to_string(),
);
}
Ok(())
});
if let Err(e) = err {
eprintln!("Error occurred when processing tex: {e}");
eprintln!(
"The process terminated early, we'll save the current processed tex files to pak file."
);
}
let pak_writer = Arc::try_unwrap(pak_writer_mtx);
match pak_writer {
Ok(pak_writer) => pak_writer.into_inner().finish()?,
Err(_) => panic!("Arc::try_unwrap failed"),
};
bar.finish();
println!("Done!");
println!(
"You should rename the output file like `re_chunk_000.pak.sub_000.pak.patch_xxx.pak`, or manage it by your favorite mod manager."
);
Ok(())
} }
fn wait_for_exit() { fn wait_for_exit() {
let _: String = Input::with_theme(&ColorfulTheme::default()) let _: String = Input::with_theme(&ColorfulTheme::default())
.with_prompt("Press Enter to exit")
.allow_empty(true) .allow_empty(true)
.interact_text() .interact_text()
.unwrap(); .unwrap();

72
src/metadata.rs Normal file
View File

@@ -0,0 +1,72 @@
//! Extended metadata for generated pak files.
use std::io::{self, Read, Write};
use ree_pak_core::{
filename::{FileNameExt, FileNameFull},
pak::PakArchive,
read::archive::PakArchiveReader,
write::{FileOptions, PakWriter},
};
use serde::{Deserialize, Serialize};
const METADATA_KEY: &str = "__TEX_DECOMPRESSOR_METADATA__";
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PakMetadata {
version: u32,
is_full_package: bool,
}
impl PakMetadata {
pub fn new(is_full_package: bool) -> Self {
Self {
version: 1,
is_full_package,
}
}
pub fn is_full_package(&self) -> bool {
self.is_full_package
}
pub fn from_pak_archive<R>(
reader: R,
pak_archive: &PakArchive,
) -> color_eyre::Result<Option<Self>>
where
R: io::Read + io::Seek,
{
let key_name = FileNameFull::new(METADATA_KEY);
let entry = pak_archive
.entries()
.iter()
.find(|entry| entry.hash() == key_name.hash_mixed());
if let Some(entry) = entry {
// read file
let mut archive_reader = PakArchiveReader::new(reader, pak_archive);
let mut entry_reader = archive_reader.owned_entry_reader(entry.clone())?;
let mut buf = Vec::new();
entry_reader.read_to_end(&mut buf)?;
let metadata = serde_json::from_slice(&buf)?;
Ok(Some(metadata))
} else {
Ok(None)
}
}
pub fn write_to_pak<W>(&self, pak_writer: &mut PakWriter<W>) -> color_eyre::Result<()>
where
W: io::Write + io::Seek,
{
let json_str = serde_json::to_string(self)?;
let json_bytes = json_str.as_bytes();
pak_writer.start_file(METADATA_KEY, FileOptions::default())?;
pak_writer.write_all(json_bytes)?;
Ok(())
}
}

5
src/util.rs Normal file
View File

@@ -0,0 +1,5 @@
use indicatif::HumanBytes;
pub fn human_bytes(bytes: u64) -> String {
HumanBytes(bytes).to_string()
}