diff --git a/Cargo.lock b/Cargo.lock index 212ea792..1d289152 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -475,21 +475,21 @@ dependencies = [ [[package]] name = "deku" -version = "0.17.0" +version = "0.18.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "709ade444d53896e60f6265660eb50480dd08b77bfc822e5dcc233b88b0b2fba" +checksum = "a9711031e209dc1306d66985363b4397d4c7b911597580340b93c9729b55f6eb" dependencies = [ "bitvec", "deku_derive", - "no_std_io", + "no_std_io2", "rustversion", ] [[package]] name = "deku_derive" -version = "0.17.0" +version = "0.18.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d7534973f93f9de83203e41c8ddd32d230599fa73fa889f3deb1580ccd186913" +checksum = "58cb0719583cbe4e81fb40434ace2f0d22ccc3e39a74bb3796c22b451b4f139d" dependencies = [ "darling", "proc-macro-crate", @@ -917,10 +917,10 @@ dependencies = [ ] [[package]] -name = "no_std_io" -version = "0.6.0" +name = "no_std_io2" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fa5f306a6f2c01b4fd172f29bb46195b1764061bf926c75e96ff55df3178208" +checksum = "9f038b95e66372ec5f4adabd615fc9a46a1fe42bcfe549863921c0e44667b605" dependencies = [ "memchr", ] @@ -1059,9 +1059,9 @@ dependencies = [ [[package]] name = "proc-macro-crate" -version = "3.1.0" +version = "3.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d37c51ca738a55da99dc0c4a34860fd675453b8b36209178c2249bb13651284" +checksum = "8ecf48c7ca261d60b74ab1a7b20da18bede46776b2e55535cb958eb595c5fa7b" dependencies = [ "toml_edit", ] @@ -1487,15 +1487,15 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "toml_datetime" -version = "0.6.5" +version = "0.6.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3550f4e9685620ac18a50ed434eb3aec30db8ba93b0287467bca5826ea25baf1" +checksum = "0dd7358ecb8fc2f8d014bf86f6f638ce72ba252a2c3a2572f2a795f1d23efb41" [[package]] name = "toml_edit" -version = "0.21.1" +version = "0.22.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a8534fd7f78b5405e860340ad6575217ce99f38d4d5c8f2442cb5ecb50090e1" +checksum = "583c44c02ad26b0c3f3066fe629275e50627026c51ac2e595cca4c230ce1ce1d" dependencies = [ "indexmap", "toml_datetime", @@ -1933,9 +1933,9 @@ checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] name = "winnow" -version = "0.5.40" +version = "0.6.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f593a95398737aeed53e489c785df13f3618e41dbcd6718c6addbf1395aa6876" +checksum = "68a9bda4691f099d435ad181000724da8e5899daa10713c2d432552b9ccd3a6f" dependencies = [ "memchr", ] diff --git a/backhand-cli/src/bin/unsquashfs.rs b/backhand-cli/src/bin/unsquashfs.rs index abeb6fca..0159743c 100644 --- a/backhand-cli/src/bin/unsquashfs.rs +++ b/backhand-cli/src/bin/unsquashfs.rs @@ -468,8 +468,8 @@ fn extract_all<'a, S: ParallelIterator>>( // write to file let fd = File::create(&filepath).unwrap(); - let mut writer = BufWriter::with_capacity(file.basic.file_size as usize, &fd); - let file = filesystem.file(&file.basic); + let mut writer = BufWriter::with_capacity(file.file_len(), &fd); + let file = filesystem.file(file); let mut reader = file.reader(); match io::copy(&mut reader, &mut writer) { diff --git a/backhand-test/tests/non_standard.rs b/backhand-test/tests/non_standard.rs index 839445d0..839ffb87 100644 --- a/backhand-test/tests/non_standard.rs +++ b/backhand-test/tests/non_standard.rs @@ -30,12 +30,9 @@ fn full_test( { let file = BufReader::new(File::open(og_path).unwrap()); info!("calling from_reader"); - let og_filesystem = FilesystemReader::from_reader_with_offset_and_kind( - file, - offset, - Kind::from_kind(&kind), - ) - .unwrap(); + let og_filesystem = + FilesystemReader::from_reader_with_offset_and_kind(file, offset, Kind::from_kind(kind)) + .unwrap(); let mut new_filesystem = FilesystemWriter::from_fs_reader(&og_filesystem).unwrap(); if let Some(pad) = pad { new_filesystem.set_kib_padding(pad); @@ -57,7 +54,7 @@ fn full_test( let _new_filesystem = FilesystemReader::from_reader_with_offset_and_kind( created_file, offset, - Kind::from_kind(&kind), + Kind::from_kind(kind), ) .unwrap(); } @@ -140,7 +137,7 @@ fn test_custom_compressor() { if let Compressor::Gzip = compressor { out.resize(out.capacity(), 0); let mut decompressor = libdeflater::Decompressor::new(); - let amt = decompressor.zlib_decompress(&bytes, out).unwrap(); + let amt = decompressor.zlib_decompress(bytes, out).unwrap(); out.truncate(amt); } else { unimplemented!(); diff --git a/backhand/Cargo.toml b/backhand/Cargo.toml index 116c0d85..2f70a5ef 100644 --- a/backhand/Cargo.toml +++ b/backhand/Cargo.toml @@ -16,7 +16,7 @@ all-features = true rustdoc-args = ["--cfg", "docsrs"] [dependencies] -deku = "0.17.0" +deku = { version = "0.18.1", default-features = false, features = ["std"] } tracing = { version = "0.1.40" } thiserror = "1.0.63" flate2 = { version = "1.0.33", optional = true, features = ["zlib-ng"] } diff --git a/backhand/src/compressor.rs b/backhand/src/compressor.rs index b5ae1ab8..41c39855 100644 --- a/backhand/src/compressor.rs +++ b/backhand/src/compressor.rs @@ -375,7 +375,7 @@ impl CompressionAction for DefaultCompressor { if let Some(options) = &fs_compressor.options { trace!("writing compression options"); superblock.flags |= Flags::CompressorOptionsArePresent as u16; - let mut compression_opt_buf_out = vec![]; + let mut compression_opt_buf_out = Cursor::new(vec![]); let mut writer = Writer::new(&mut compression_opt_buf_out); match options { CompressionOptions::Gzip(gzip) => { @@ -398,7 +398,7 @@ impl CompressionAction for DefaultCompressor { superblock.block_size, Kind { inner: kind.inner.clone() }, ); - metadata.write_all(&compression_opt_buf_out)?; + metadata.write_all(&compression_opt_buf_out.into_inner())?; metadata.finalize(&mut w)?; } diff --git a/backhand/src/data.rs b/backhand/src/data.rs index 452a1e9b..306501f7 100644 --- a/backhand/src/data.rs +++ b/backhand/src/data.rs @@ -137,7 +137,7 @@ impl<'a> DataWriter<'a> { mut writer: W, ) -> Result<(usize, Added), BackhandError> { //just clone it, because block sizes where never modified, just copy it - let mut block_sizes = reader.file.basic.block_sizes.clone(); + let mut block_sizes = reader.file.file.block_sizes().to_vec(); let mut read_buf = vec![]; let mut decompress_buf = vec![]; @@ -190,7 +190,7 @@ impl<'a> DataWriter<'a> { writer.write_all(&read_buf)?; } } - let file_size = reader.file.basic.file_size as usize; + let file_size = reader.file.file.file_len(); Ok((file_size, Added::Data { blocks_start, block_sizes })) } diff --git a/backhand/src/filesystem/node.rs b/backhand/src/filesystem/node.rs index 40d7fd7c..a974c3bd 100644 --- a/backhand/src/filesystem/node.rs +++ b/backhand/src/filesystem/node.rs @@ -6,8 +6,8 @@ use std::sync::{Arc, Mutex}; use super::normalize_squashfs_path; use crate::data::Added; -use crate::inode::{BasicFile, InodeHeader}; -use crate::{BackhandError, FilesystemReaderFile, Id}; +use crate::inode::{BasicFile, ExtendedFile, InodeHeader}; +use crate::{BackhandError, DataSize, FilesystemReaderFile, Id}; /// File information for Node #[derive(Debug, PartialEq, Eq, Default, Clone, Copy)] @@ -91,8 +91,46 @@ pub enum InnerNode { /// Unread file for filesystem #[derive(Debug, PartialEq, Eq, Clone)] -pub struct SquashfsFileReader { - pub basic: BasicFile, +pub enum SquashfsFileReader { + Basic(BasicFile), + Extended(ExtendedFile), +} + +impl SquashfsFileReader { + pub fn file_len(&self) -> usize { + match self { + SquashfsFileReader::Basic(basic) => basic.file_size as usize, + SquashfsFileReader::Extended(extended) => extended.file_size as usize, + } + } + + pub fn frag_index(&self) -> usize { + match self { + SquashfsFileReader::Basic(basic) => basic.frag_index as usize, + SquashfsFileReader::Extended(extended) => extended.frag_index as usize, + } + } + + pub fn block_sizes(&self) -> &[DataSize] { + match self { + SquashfsFileReader::Basic(basic) => &basic.block_sizes, + SquashfsFileReader::Extended(extended) => &extended.block_sizes, + } + } + + pub fn blocks_start(&self) -> u64 { + match self { + SquashfsFileReader::Basic(basic) => basic.blocks_start as u64, + SquashfsFileReader::Extended(extended) => extended.blocks_start, + } + } + + pub fn block_offset(&self) -> u32 { + match self { + SquashfsFileReader::Basic(basic) => basic.block_offset, + SquashfsFileReader::Extended(extended) => extended.block_offset, + } + } } /// Read file from other SquashfsFile or an user file diff --git a/backhand/src/filesystem/reader.rs b/backhand/src/filesystem/reader.rs index 90f88bb8..5e60a79e 100644 --- a/backhand/src/filesystem/reader.rs +++ b/backhand/src/filesystem/reader.rs @@ -7,7 +7,6 @@ use crate::data::DataSize; use crate::error::BackhandError; use crate::fragment::Fragment; use crate::id::Id; -use crate::inode::BasicFile; use crate::kinds::Kind; use crate::reader::BufReadSeek; use crate::squashfs::Cache; @@ -130,8 +129,8 @@ impl<'b> FilesystemReader<'b> { } /// Return a file handler for this file - pub fn file<'a>(&'a self, basic_file: &'a BasicFile) -> FilesystemReaderFile<'a, 'b> { - FilesystemReaderFile::new(self, basic_file) + pub fn file<'a>(&'a self, file: &'a SquashfsFileReader) -> FilesystemReaderFile<'a, 'b> { + FilesystemReaderFile::new(self, file) } /// Iterator of all files, including the root @@ -154,7 +153,7 @@ impl<'b> FilesystemReader<'b> { /// match &node.inner { /// InnerNode::File(file) => { /// let mut reader = filesystem - /// .file(&file.basic) + /// .file(&file) /// .reader(); /// // Then, do something with the reader /// }, @@ -171,12 +170,12 @@ impl<'b> FilesystemReader<'b> { #[derive(Copy, Clone)] pub struct FilesystemReaderFile<'a, 'b> { pub(crate) system: &'a FilesystemReader<'b>, - pub(crate) basic: &'a BasicFile, + pub(crate) file: &'a SquashfsFileReader, } impl<'a, 'b> FilesystemReaderFile<'a, 'b> { - pub fn new(system: &'a FilesystemReader<'b>, basic: &'a BasicFile) -> Self { - Self { system, basic } + pub fn new(system: &'a FilesystemReader<'b>, file: &'a SquashfsFileReader) -> Self { + Self { system, file } } /// Create [`SquashfsReadFile`] that impls [`std::io::Read`] from [`FilesystemReaderFile`]. @@ -190,18 +189,15 @@ impl<'a, 'b> FilesystemReaderFile<'a, 'b> { } pub fn fragment(&self) -> Option<&'a Fragment> { - if self.basic.frag_index == 0xffffffff { + if self.file.frag_index() == 0xffffffff { None } else { - self.system - .fragments - .as_ref() - .map(|fragments| &fragments[self.basic.frag_index as usize]) + self.system.fragments.as_ref().map(|fragments| &fragments[self.file.frag_index()]) } } pub(crate) fn raw_data_reader(&self) -> SquashfsRawData<'a, 'b> { - SquashfsRawData::new(Self { system: self.system, basic: self.basic }) + SquashfsRawData::new(Self { system: self.system, file: self.file }) } } @@ -210,7 +206,7 @@ impl<'a, 'b> IntoIterator for FilesystemReaderFile<'a, 'b> { type Item = as Iterator>::Item; fn into_iter(self) -> Self::IntoIter { - BlockIterator { blocks: &self.basic.block_sizes, fragment: self.fragment() } + BlockIterator { blocks: self.file.block_sizes(), fragment: self.fragment() } } } @@ -252,7 +248,7 @@ pub(crate) struct SquashfsRawData<'a, 'b> { impl<'a, 'b> SquashfsRawData<'a, 'b> { pub fn new(file: FilesystemReaderFile<'a, 'b>) -> Self { - let pos = file.basic.blocks_start.into(); + let pos = file.file.blocks_start(); let current_block = file.into_iter(); Self { file, current_block, pos } } @@ -332,10 +328,10 @@ impl<'a, 'b> SquashfsRawData<'a, 'b> { #[inline] fn fragment_range(&self) -> std::ops::Range { let block_len = self.file.system.block_size as usize; - let block_num = self.file.basic.block_sizes.len(); - let file_size = self.file.basic.file_size as usize; + let block_num = self.file.file.block_sizes().len(); + let file_size = self.file.file.file_len(); let frag_len = file_size - (block_num * block_len); - let frag_start = self.file.basic.block_offset as usize; + let frag_start = self.file.file.block_offset() as usize; let frag_end = frag_start + frag_len; frag_start..frag_end } @@ -381,7 +377,7 @@ impl<'a, 'b> SquashfsRawData<'a, 'b> { #[inline] pub fn into_reader(self) -> SquashfsReadFile<'a, 'b> { let block_size = self.file.system.block_size as usize; - let bytes_available = self.file.basic.file_size as usize; + let bytes_available = self.file.file.file_len(); SquashfsReadFile::new(block_size, self, 0, bytes_available) } } diff --git a/backhand/src/filesystem/writer.rs b/backhand/src/filesystem/writer.rs index 23e9c98f..287d2e7d 100644 --- a/backhand/src/filesystem/writer.rs +++ b/backhand/src/filesystem/writer.rs @@ -1,5 +1,5 @@ use std::ffi::OsStr; -use std::io::{Read, Seek, SeekFrom, Write}; +use std::io::{Cursor, Read, Seek, SeekFrom, Write}; use std::num::NonZeroUsize; use std::path::{Path, PathBuf}; use std::sync::Arc; @@ -217,7 +217,7 @@ impl<'a, 'b, 'c> FilesystemWriter<'a, 'b, 'c> { .map(|node| { let inner = match &node.inner { InnerNode::File(file) => { - let reader = reader.file(&file.basic); + let reader = reader.file(file); InnerNode::File(SquashfsFileWriter::SquashfsFile(reader)) } InnerNode::Symlink(x) => InnerNode::Symlink(x.clone()), @@ -622,9 +622,10 @@ impl<'a, 'b, 'c> FilesystemWriter<'a, 'b, 'c> { trace!("WRITING DIR: {block_offset:#02x?}"); let mut total_size: usize = 3; for dir in Entry::into_dir(entries) { - let mut bytes = vec![]; - let mut writer = Writer::new(&mut bytes); + let mut cursor = Cursor::new(vec![]); + let mut writer = Writer::new(&mut cursor); dir.to_writer(&mut writer, kind.inner.type_endian)?; + let bytes = cursor.into_inner(); total_size += bytes.len(); dir_writer.write_all(&bytes)?; } @@ -831,25 +832,28 @@ impl<'a, 'b, 'c> FilesystemWriter<'a, 'b, 'c> { W: Write + Seek, { let mut ptrs: Vec = vec![]; - let mut table_bytes = Vec::with_capacity(table.len() * element_size); + let table_bytes = Vec::with_capacity(table.len() * element_size); + let mut cursor_table = Cursor::new(table_bytes); let mut iter = table.iter().peekable(); while let Some(t) = iter.next() { // convert fragment ptr to bytes - let mut table_writer = Writer::new(&mut table_bytes); + let mut table_writer = Writer::new(&mut cursor_table); t.to_writer(&mut table_writer, self.kind.inner.type_endian)?; // once table_bytes + next is over the maximum size of a metadata block, write - if ((table_bytes.len() + element_size) > METADATA_MAXSIZE) || iter.peek().is_none() { + if ((cursor_table.get_ref().len() + element_size) > METADATA_MAXSIZE) + || iter.peek().is_none() + { ptrs.push(w.stream_position()?); // write metadata len - let len = metadata::set_if_uncompressed(table_bytes.len() as u16); + let len = metadata::set_if_uncompressed(cursor_table.get_ref().len() as u16); let mut writer = Writer::new(&mut w); len.to_writer(&mut writer, self.kind.inner.data_endian)?; // write metadata bytes - w.write_all(&table_bytes)?; + w.write_all(cursor_table.get_ref())?; - table_bytes.clear(); + cursor_table.get_mut().clear(); } } diff --git a/backhand/src/inode.rs b/backhand/src/inode.rs index 34108265..f9acdcd5 100644 --- a/backhand/src/inode.rs +++ b/backhand/src/inode.rs @@ -1,7 +1,7 @@ //! Index Node for file or directory use core::fmt; -use std::io::Write; +use std::io::{Cursor, Write}; use deku::prelude::*; @@ -35,7 +35,7 @@ impl Inode { superblock: &SuperBlock, kind: &Kind, ) -> Entry<'a> { - let mut inode_bytes = vec![]; + let mut inode_bytes = Cursor::new(vec![]); let mut writer = Writer::new(&mut inode_bytes); self.to_writer( &mut writer, @@ -49,7 +49,7 @@ impl Inode { .unwrap(); let start = m_writer.metadata_start; let offset = m_writer.uncompressed_bytes.len() as u16; - m_writer.write_all(&inode_bytes).unwrap(); + m_writer.write_all(&inode_bytes.into_inner()).unwrap(); Entry { start, @@ -181,18 +181,6 @@ pub struct BasicFile { pub block_sizes: Vec, } -impl From<&ExtendedFile> for BasicFile { - fn from(ex_file: &ExtendedFile) -> Self { - Self { - blocks_start: ex_file.blocks_start as u32, - frag_index: ex_file.frag_index, - block_offset: ex_file.block_offset, - file_size: ex_file.file_size as u32, - block_sizes: ex_file.block_sizes.clone(), - } - } -} - #[derive(Debug, DekuRead, DekuWrite, Clone, PartialEq, Eq)] #[deku( endian = "endian", diff --git a/backhand/src/metadata.rs b/backhand/src/metadata.rs index a0703ea8..89b4994c 100644 --- a/backhand/src/metadata.rs +++ b/backhand/src/metadata.rs @@ -112,7 +112,7 @@ impl Write for MetadataWriter { } } -pub fn read_block( +pub fn read_block( reader: &mut R, superblock: &SuperBlock, kind: &Kind, diff --git a/backhand/src/squashfs.rs b/backhand/src/squashfs.rs index 8e948a18..731fe336 100644 --- a/backhand/src/squashfs.rs +++ b/backhand/src/squashfs.rs @@ -538,17 +538,20 @@ impl<'b> Squashfs<'b> { } // BasicFile InodeId::BasicFile => { - trace!("before_file: {:#02x?}", entry); - let basic = match &found_inode.inner { - InodeInner::BasicFile(file) => file.clone(), - InodeInner::ExtendedFile(file) => file.into(), + let inner = match &found_inode.inner { + InodeInner::BasicFile(file) => { + SquashfsFileReader::Basic(file.clone()) + } + InodeInner::ExtendedFile(file) => { + SquashfsFileReader::Extended(file.clone()) + } _ => { return Err(BackhandError::UnexpectedInode( found_inode.inner.clone(), )) } }; - InnerNode::File(SquashfsFileReader { basic }) + InnerNode::File(inner) } // Basic Symlink InodeId::BasicSymlink => {