Skip to content

Commit

Permalink
Fix clippy warnings
Browse files Browse the repository at this point in the history
  • Loading branch information
philipc committed Nov 22, 2022
1 parent 839877a commit 4a29150
Show file tree
Hide file tree
Showing 12 changed files with 64 additions and 63 deletions.
16 changes: 8 additions & 8 deletions crates/examples/src/objdump.rs
Expand Up @@ -12,43 +12,43 @@ pub fn print<W: Write, E: Write>(
) -> Result<()> {
let mut member_names: Vec<_> = member_names.into_iter().map(|name| (name, false)).collect();

if let Ok(archive) = ArchiveFile::parse(&*file) {
if let Ok(archive) = ArchiveFile::parse(file) {
writeln!(w, "Format: Archive (kind: {:?})", archive.kind())?;
for member in archive.members() {
match member {
Ok(member) => {
if find_member(&mut member_names, member.name()) {
writeln!(w)?;
writeln!(w, "{}:", String::from_utf8_lossy(member.name()))?;
if let Ok(data) = member.data(&*file) {
if let Ok(data) = member.data(file) {
dump_object(w, e, data)?;
}
}
}
Err(err) => writeln!(e, "Failed to parse archive member: {}", err)?,
}
}
} else if let Ok(arches) = FatHeader::parse_arch32(&*file) {
} else if let Ok(arches) = FatHeader::parse_arch32(file) {
writeln!(w, "Format: Mach-O Fat 32")?;
for arch in arches {
writeln!(w)?;
writeln!(w, "Fat Arch: {:?}", arch.architecture())?;
match arch.data(&*file) {
match arch.data(file) {
Ok(data) => dump_object(w, e, data)?,
Err(err) => writeln!(e, "Failed to parse Fat 32 data: {}", err)?,
}
}
} else if let Ok(arches) = FatHeader::parse_arch64(&*file) {
} else if let Ok(arches) = FatHeader::parse_arch64(file) {
writeln!(w, "Format: Mach-O Fat 64")?;
for arch in arches {
writeln!(w)?;
writeln!(w, "Fat Arch: {:?}", arch.architecture())?;
match arch.data(&*file) {
match arch.data(file) {
Ok(data) => dump_object(w, e, data)?,
Err(err) => writeln!(e, "Failed to parse Fat 64 data: {}", err)?,
}
}
} else if let Ok(cache) = DyldCache::<Endianness>::parse(&*file, extra_files) {
} else if let Ok(cache) = DyldCache::<Endianness>::parse(file, extra_files) {
writeln!(w, "Format: dyld cache {:?}-endian", cache.endianness())?;
writeln!(w, "Architecture: {:?}", cache.architecture())?;
for image in cache.images() {
Expand All @@ -74,7 +74,7 @@ pub fn print<W: Write, E: Write>(
dump_parsed_object(w, e, &file)?;
}
} else {
dump_object(w, e, &*file)?;
dump_object(w, e, file)?;
}

for (name, found) in member_names {
Expand Down
56 changes: 28 additions & 28 deletions crates/examples/src/readobj/elf.rs
Expand Up @@ -32,56 +32,56 @@ fn print_elf<Elf: FileHeader<Endian = Endianness>>(p: &mut Printer<'_>, elf: &El
fn print_file_header<Elf: FileHeader>(p: &mut Printer<'_>, endian: Elf::Endian, elf: &Elf) {
p.group("FileHeader", |p| {
p.group("Ident", |p| print_ident(p, elf.e_ident()));
p.field_enum("Type", elf.e_type(endian), &FLAGS_ET);
p.field_enum("Machine", elf.e_machine(endian), &FLAGS_EM);
p.field_enum("Type", elf.e_type(endian), FLAGS_ET);
p.field_enum("Machine", elf.e_machine(endian), FLAGS_EM);
let version = elf.e_version(endian);
if version < 256 {
p.field_enum("Version", version as u8, &FLAGS_EV);
p.field_enum("Version", version as u8, FLAGS_EV);
} else {
p.field_hex("Version", version);
}
p.field_enum("Type", elf.e_type(endian), &FLAGS_ET);
p.field_enum("Type", elf.e_type(endian), FLAGS_ET);
p.field_hex("Entry", elf.e_entry(endian).into());
p.field_hex("ProgramHeaderOffset", elf.e_phoff(endian).into());
p.field_hex("SectionHeaderOffset", elf.e_shoff(endian).into());
let flags = elf.e_flags(endian);
p.field_hex("Flags", flags);
match elf.e_machine(endian) {
EM_SPARC => p.flags(flags, 0, &FLAGS_EF_SPARC),
EM_SPARCV9 => p.flags(flags, 0, &FLAGS_EF_SPARCV9),
EM_SPARC => p.flags(flags, 0, FLAGS_EF_SPARC),
EM_SPARCV9 => p.flags(flags, 0, FLAGS_EF_SPARCV9),
EM_MIPS => {
p.flags(flags, 0, &FLAGS_EF_MIPS);
p.flags(flags, EF_MIPS_ARCH, &FLAGS_EF_MIPS_ARCH);
p.flags(flags, 0, FLAGS_EF_MIPS);
p.flags(flags, EF_MIPS_ARCH, FLAGS_EF_MIPS_ARCH);
// Some ABIs may have all these bits zeroed out
if flags & EF_MIPS_ABI != 0 {
p.flags(flags, EF_MIPS_ABI, &FLAGS_EF_MIPS_ABI);
p.flags(flags, EF_MIPS_ABI, FLAGS_EF_MIPS_ABI);
}
}
EM_PARISC => {
p.flags(flags, 0, &FLAGS_EF_PARISC);
p.flags(flags, EF_PARISC_ARCH, &FLAGS_EF_PARISC_ARCH);
p.flags(flags, 0, FLAGS_EF_PARISC);
p.flags(flags, EF_PARISC_ARCH, FLAGS_EF_PARISC_ARCH);
}
EM_ALPHA => p.flags(flags, 0, &FLAGS_EF_ALPHA),
EM_PPC => p.flags(flags, 0, &FLAGS_EF_PPC),
EM_PPC64 => p.flags(flags, 0, &FLAGS_EF_PPC64),
EM_ALPHA => p.flags(flags, 0, FLAGS_EF_ALPHA),
EM_PPC => p.flags(flags, 0, FLAGS_EF_PPC),
EM_PPC64 => p.flags(flags, 0, FLAGS_EF_PPC64),
EM_ARM => {
p.flags(flags, 0, &FLAGS_EF_ARM);
p.flags(flags, EF_ARM_EABIMASK, &FLAGS_EF_ARM_EABI);
p.flags(flags, 0, FLAGS_EF_ARM);
p.flags(flags, EF_ARM_EABIMASK, FLAGS_EF_ARM_EABI);
}
EM_CSKY => p.flags(flags, EF_CSKY_ABIMASK, &FLAGS_EF_CSKY_ABI),
EM_IA_64 => p.flags(flags, 0, &FLAGS_EF_IA_64),
EM_SH => p.flags(flags, EF_SH_MACH_MASK, &FLAGS_EF_SH_MACH),
EM_S390 => p.flags(flags, 0, &FLAGS_EF_S390),
EM_CSKY => p.flags(flags, EF_CSKY_ABIMASK, FLAGS_EF_CSKY_ABI),
EM_IA_64 => p.flags(flags, 0, FLAGS_EF_IA_64),
EM_SH => p.flags(flags, EF_SH_MACH_MASK, FLAGS_EF_SH_MACH),
EM_S390 => p.flags(flags, 0, FLAGS_EF_S390),
EM_RISCV => {
p.flags(flags, 0, &FLAGS_EF_RISCV);
p.flags(flags, EF_RISCV_FLOAT_ABI, &FLAGS_EF_RISCV_FLOAT_ABI);
p.flags(flags, 0, FLAGS_EF_RISCV);
p.flags(flags, EF_RISCV_FLOAT_ABI, FLAGS_EF_RISCV_FLOAT_ABI);
}
EM_LOONGARCH => {
p.flags(flags, 0, &FLAGS_EF_LARCH_OBJABI);
p.flags(flags, 0, FLAGS_EF_LARCH_OBJABI);
p.flags(
flags,
EF_LARCH_ABI_MODIFIER_MASK,
&FLAGS_EF_LARCH_ABI_MODIFIER,
FLAGS_EF_LARCH_ABI_MODIFIER,
);
}
_ => {}
Expand All @@ -97,10 +97,10 @@ fn print_file_header<Elf: FileHeader>(p: &mut Printer<'_>, endian: Elf::Endian,

fn print_ident(p: &mut Printer<'_>, ident: &Ident) {
p.field("Magic", format!("{:X?}", ident.magic));
p.field_enum("Class", ident.class, &FLAGS_EI_CLASS);
p.field_enum("Data", ident.data, &FLAGS_EI_DATA);
p.field_enum("Version", ident.version, &FLAGS_EV);
p.field_enum("OsAbi", ident.os_abi, &FLAGS_EI_OSABI);
p.field_enum("Class", ident.class, FLAGS_EI_CLASS);
p.field_enum("Data", ident.data, FLAGS_EI_DATA);
p.field_enum("Version", ident.version, FLAGS_EV);
p.field_enum("OsAbi", ident.os_abi, FLAGS_EI_OSABI);
p.field_hex("AbiVersion", ident.abi_version);
p.field("Unused", format!("{:X?}", ident.padding));
}
Expand Down
2 changes: 1 addition & 1 deletion crates/examples/src/readobj/mod.rs
Expand Up @@ -7,7 +7,7 @@ use object::Endianness;

pub fn print(w: &'_ mut dyn Write, e: &'_ mut dyn Write, file: &[u8]) {
let mut printer = Printer::new(w, e);
print_object(&mut printer, &*file);
print_object(&mut printer, file);
}

struct Printer<'a> {
Expand Down
16 changes: 8 additions & 8 deletions crates/examples/src/readobj/pe.rs
Expand Up @@ -12,10 +12,10 @@ pub(super) fn print_coff(p: &mut Printer<'_>, data: &[u8]) {
let sections = header.sections(data, offset).print_err(p);
let symbols = header.symbols(data).print_err(p);
if let Some(ref sections) = sections {
print_sections(p, data, header.machine.get(LE), symbols.as_ref(), &sections);
print_sections(p, data, header.machine.get(LE), symbols.as_ref(), sections);
}
if let Some(ref symbols) = symbols {
print_symbols(p, sections.as_ref(), &symbols);
print_symbols(p, sections.as_ref(), symbols);
}
}
}
Expand Down Expand Up @@ -86,14 +86,14 @@ fn print_pe<Pe: ImageNtHeaders>(p: &mut Printer<'_>, data: &[u8]) {
print_sections(p, data, machine, symbols.as_ref(), sections);
}
if let Some(ref symbols) = symbols {
print_symbols(p, sections.as_ref(), &symbols);
print_symbols(p, sections.as_ref(), symbols);
}
if let Some(ref sections) = sections {
print_export_dir(p, data, &sections, &data_directories);
print_import_dir::<Pe>(p, data, &sections, &data_directories);
print_delay_load_dir::<Pe>(p, data, &sections, &data_directories);
print_reloc_dir(p, data, machine, &sections, &data_directories);
print_resource_dir(p, data, &sections, &data_directories);
print_export_dir(p, data, sections, &data_directories);
print_import_dir::<Pe>(p, data, sections, &data_directories);
print_delay_load_dir::<Pe>(p, data, sections, &data_directories);
print_reloc_dir(p, data, machine, sections, &data_directories);
print_resource_dir(p, data, sections, &data_directories);
}
}
}
Expand Down
1 change: 1 addition & 0 deletions src/pod.rs
Expand Up @@ -14,6 +14,7 @@ type Result<T> = result::Result<T, ()>;

/// A trait for types that can safely be converted from and to byte slices.
///
/// # Safety
/// A type that is `Pod` must:
/// - be `#[repr(C)]` or `#[repr(transparent)]`
/// - have no invalid byte values
Expand Down
14 changes: 7 additions & 7 deletions src/read/archive.rs
Expand Up @@ -58,9 +58,9 @@ impl<'data, R: ReadRef<'data>> ArchiveFile<'data, R> {
.read_bytes(&mut tail, archive::MAGIC.len() as u64)
.read_error("Invalid archive size")?;

if magic == &archive::AIX_BIG_MAGIC {
if magic == archive::AIX_BIG_MAGIC {
return Self::parse_aixbig(data);
} else if magic != &archive::MAGIC {
} else if magic != archive::MAGIC {
return Err(Error("Unsupported archive identifier"));
}

Expand Down Expand Up @@ -273,9 +273,9 @@ impl<'data, R: ReadRef<'data>> Iterator for ArchiveMemberIterator<'data, R> {
}
Some(member)
}
Members::AixBig { ref mut index } => match *index {
&[] => return None,
&[ref first, ref rest @ ..] => {
Members::AixBig { ref mut index } => match **index {
[] => None,
[ref first, ref rest @ ..] => {
*index = rest;
let member = ArchiveMember::parse_aixbig_index(self.data, first);
if member.is_err() {
Expand Down Expand Up @@ -333,11 +333,11 @@ impl<'data> ArchiveMember<'data> {
*offset = offset.saturating_add(1);
}

let name = if header.name[0] == b'/' && (header.name[1] as char).is_digit(10) {
let name = if header.name[0] == b'/' && (header.name[1] as char).is_ascii_digit() {
// Read file name from the names table.
parse_sysv_extended_name(&header.name[1..], names)
.read_error("Invalid archive extended name offset")?
} else if &header.name[..3] == b"#1/" && (header.name[3] as char).is_digit(10) {
} else if &header.name[..3] == b"#1/" && (header.name[3] as char).is_ascii_digit() {
// Read file name from the start of the file data.
parse_bsd_extended_name(&header.name[3..], data, &mut file_offset, &mut file_size)
.read_error("Invalid archive extended name length")?
Expand Down
2 changes: 1 addition & 1 deletion src/read/elf/comdat.rs
Expand Up @@ -34,7 +34,7 @@ where
type Item = ElfComdat<'data, 'file, Elf, R>;

fn next(&mut self) -> Option<Self::Item> {
while let Some((_index, section)) = self.iter.next() {
for (_index, section) in self.iter.by_ref() {
if let Some(comdat) = ElfComdat::parse(self.file, section) {
return Some(comdat);
}
Expand Down
2 changes: 1 addition & 1 deletion src/read/elf/segment.rs
Expand Up @@ -34,7 +34,7 @@ where
type Item = ElfSegment<'data, 'file, Elf, R>;

fn next(&mut self) -> Option<Self::Item> {
while let Some(segment) = self.iter.next() {
for segment in self.iter.by_ref() {
if segment.p_type(self.file.endian) == elf::PT_LOAD {
return Some(ElfSegment {
file: self.file,
Expand Down
6 changes: 3 additions & 3 deletions src/read/pe/resource.rs
Expand Up @@ -18,7 +18,7 @@ impl<'data> ResourceDirectory<'data> {

/// Parses the root resource directory.
pub fn root(&self) -> Result<ResourceDirectoryTable<'data>> {
ResourceDirectoryTable::parse(&self.data, 0)
ResourceDirectoryTable::parse(self.data, 0)
}
}

Expand Down Expand Up @@ -93,13 +93,13 @@ impl pe::ImageResourceDirectoryEntry {
) -> Result<ResourceDirectoryEntryData<'data>> {
if self.is_table() {
ResourceDirectoryTable::parse(section.data, self.data_offset())
.map(|t| ResourceDirectoryEntryData::Table(t))
.map(ResourceDirectoryEntryData::Table)
} else {
section
.data
.read_at::<pe::ImageResourceDataEntry>(self.data_offset().into())
.read_error("Invalid resource entry")
.map(|d| ResourceDirectoryEntryData::Data(d))
.map(ResourceDirectoryEntryData::Data)
}
}
}
Expand Down
2 changes: 1 addition & 1 deletion src/read/xcoff/relocation.rs
Expand Up @@ -53,7 +53,7 @@ where
let size = (relocation.r_rsize() & 0x3F) + 1;
let target = RelocationTarget::Symbol(SymbolIndex(relocation.r_symndx() as usize));
(
u64::from(relocation.r_vaddr().into()),
relocation.r_vaddr().into(),
Relocation {
kind,
encoding,
Expand Down
6 changes: 3 additions & 3 deletions src/read/xcoff/section.rs
Expand Up @@ -190,7 +190,7 @@ where

fn flags(&self) -> SectionFlags {
SectionFlags::Xcoff {
s_flags: self.section.s_flags().into(),
s_flags: self.section.s_flags(),
}
}

Expand Down Expand Up @@ -298,7 +298,7 @@ pub trait SectionHeader: Debug + Pod {
/// Returns `Err` for invalid values.
fn data<'data, R: ReadRef<'data>>(&self, data: R) -> result::Result<&'data [u8], ()> {
if let Some((offset, size)) = self.file_range() {
data.read_bytes_at(offset.into(), size.into())
data.read_bytes_at(offset, size)
} else {
Ok(&[])
}
Expand Down Expand Up @@ -420,7 +420,7 @@ impl SectionHeader for xcoff::SectionHeader64 {
///
/// `data` must be the entire file data.
fn relocations<'data, R: ReadRef<'data>>(&self, data: R) -> read::Result<&'data [Self::Rel]> {
data.read_slice_at(self.s_relptr().into(), self.s_nreloc() as usize)
data.read_slice_at(self.s_relptr(), self.s_nreloc() as usize)
.read_error("Invalid XCOFF relocation offset or number")
}
}
4 changes: 2 additions & 2 deletions src/read/xcoff/symbol.rs
Expand Up @@ -104,7 +104,7 @@ where
return Err(Error("Invalid index for file auxiliary symbol."));
}
}
return Ok(aux_file);
Ok(aux_file)
}

/// Return the csect auxiliary symbol.
Expand All @@ -116,7 +116,7 @@ where
return Err(Error("Invalid index/offset for csect auxiliary symbol."));
}
}
return Ok(aux_csect);
Ok(aux_csect)
}

/// Return true if the symbol table is empty.
Expand Down

0 comments on commit 4a29150

Please sign in to comment.