PIE binaries & fix currupted framebuffer

This commit is contained in:
2025-09-26 20:24:23 -06:00
parent 07b4905a06
commit 180d9e82d2
11 changed files with 223 additions and 131 deletions

View File

@@ -1,73 +1,175 @@
#![allow(static_mut_refs)]
use core::ptr;
use crate::{
abi,
storage::{File, SDCARD},
};
use abi_sys::{CallAbiTable, EntryFn};
use alloc::{vec, vec::Vec};
use bumpalo::Bump;
use embedded_sdmmc::ShortFileName;
use goblin::{
elf::{
header::header32::Header,
program_header::program_header32::{PT_LOAD, ProgramHeader},
section_header::SHT_SYMTAB,
reloc::R_ARM_RELATIVE,
section_header::{SHT_REL, SHT_SYMTAB},
},
elf32::{section_header::SectionHeader, sym::Sym},
elf32::{header, reloc::Rel, section_header::SectionHeader, sym::Sym},
};
use strum::IntoEnumIterator;
const ELF32_HDR_SIZE: usize = 52;
// userland ram region defined in memory.x
unsafe extern "C" {
static __userapp_start__: u8;
static __userapp_end__: u8;
}
pub async unsafe fn load_binary(name: &ShortFileName) -> Result<EntryFn, &str> {
pub async unsafe fn load_binary(name: &ShortFileName) -> Option<(EntryFn, Bump)> {
let mut sd_lock = SDCARD.get().lock().await;
let sd = sd_lock.as_mut().unwrap();
let error = "";
let mut entry = 0;
let mut header_buf = [0; ELF32_HDR_SIZE];
sd.read_file(name, |mut file| {
file.read(&mut header_buf).unwrap();
let elf_header = Header::from_bytes(&header_buf);
let (entry, bump) = sd
.read_file(name, |mut file| {
file.read(&mut header_buf).unwrap();
let elf_header = Header::from_bytes(&header_buf);
let mut program_headers_buf = vec![0_u8; elf_header.e_phentsize as usize];
for i in 1..=elf_header.e_phnum {
file.seek_from_start(elf_header.e_phoff + (elf_header.e_phentsize * i) as u32)
.unwrap();
file.read(&mut program_headers_buf).unwrap();
let ph = cast_phdr(&program_headers_buf);
if ph.p_type == PT_LOAD {
load_segment(&mut file, &ph).unwrap()
// reject non-PIE
if elf_header.e_type != header::ET_DYN {
return None;
}
}
patch_abi(&elf_header, &mut file).unwrap();
let mut ph_buf = vec![0_u8; elf_header.e_phentsize as usize];
// TODO: dynamically search for abi table
let (total_size, min_vaddr, _max_vaddr) =
total_loadable_size(&mut file, &elf_header, &mut ph_buf);
entry = elf_header.e_entry as u32;
})
.await
.unwrap();
defmt::info!("total_size: {}", total_size);
let bump = Bump::with_capacity(total_size);
let base = bump.alloc_slice_fill_default::<u8>(total_size);
defmt::info!("base ptr: {}", base.as_ptr());
if entry != 0 {
Ok(unsafe { core::mem::transmute(entry) })
} else {
Err(error)
}
// load each segment into bump, relative to base_ptr
for i in 0..elf_header.e_phnum {
file.seek_from_start(elf_header.e_phoff + (elf_header.e_phentsize * i) as u32)
.unwrap();
file.read(&mut ph_buf).unwrap();
let ph = cast_phdr(&ph_buf);
let seg_offset = (ph.p_vaddr - min_vaddr) as usize;
defmt::info!("segment offset {}", seg_offset);
let mut segment = &mut base[seg_offset..seg_offset + ph.p_memsz as usize];
if ph.p_type == PT_LOAD {
load_segment(&mut file, &ph, &mut segment).unwrap();
}
}
let mut sh_buf = vec![0_u8; elf_header.e_shentsize as usize];
for i in 0..elf_header.e_shnum {
file.seek_from_start(elf_header.e_shoff + (elf_header.e_shentsize * i) as u32)
.unwrap();
file.read(&mut sh_buf).unwrap();
let sh = cast_shdr(&sh_buf);
match sh.sh_type {
SHT_REL => {
apply_relocations(&sh, min_vaddr, base.as_mut_ptr(), &mut file).unwrap();
}
_ => {}
}
}
patch_abi(&elf_header, base.as_mut_ptr(), min_vaddr, &mut file).unwrap();
defmt::info!("elf entry point before offset: {}", elf_header.e_entry);
// entry pointer is base_ptr + (entry - min_vaddr)
let entry_ptr: EntryFn = unsafe {
core::mem::transmute(base.as_ptr().add((elf_header.e_entry - min_vaddr) as usize))
};
defmt::info!("entry ptr: {}", entry_ptr);
Some((entry_ptr, bump))
})
.await
.expect("Failed to read file")?;
Some((entry, bump))
}
fn patch_abi(elf_header: &Header, file: &mut File) -> Result<(), ()> {
fn load_segment(file: &mut File, ph: &ProgramHeader, segment: &mut [u8]) -> Result<(), ()> {
let filesz = ph.p_filesz as usize;
let memsz = ph.p_memsz as usize;
// read file contents
let mut remaining = filesz;
let mut dst_offset = 0;
let mut file_offset = ph.p_offset;
let mut buf = [0u8; 512];
while remaining > 0 {
let to_read = core::cmp::min(remaining, buf.len());
file.seek_from_start(file_offset).unwrap();
file.read(&mut buf[..to_read]).unwrap();
segment[dst_offset..dst_offset + to_read].copy_from_slice(&buf[..to_read]);
remaining -= to_read;
dst_offset += to_read;
file_offset += to_read as u32;
}
// zero BSS if needed
if memsz > filesz {
segment[filesz..].fill(0);
}
Ok(())
}
fn apply_relocations(
sh: &SectionHeader,
min_vaddr: u32,
base: *mut u8,
file: &mut File,
) -> Result<(), ()> {
let mut reloc = [0_u8; 8];
let num_relocs = sh.sh_size as usize / sh.sh_entsize as usize;
for i in 0..num_relocs {
file.seek_from_start(sh.sh_offset + (i as u32 * 8)).unwrap();
file.read(&mut reloc).unwrap();
let rel = cast_rel(&reloc);
let reloc_type = rel.r_info & 0xff;
let reloc_addr = unsafe { base.add((rel.r_offset - min_vaddr) as usize) as *mut u32 };
match reloc_type {
R_ARM_RELATIVE => {
// REL: add base to the word already stored there
unsafe {
let val = ptr::read_unaligned(reloc_addr);
ptr::write_unaligned(reloc_addr, val.wrapping_add(base as u32));
}
}
_ => {
defmt::warn!("Unsupported relocation type: {}", reloc_type);
return Err(());
}
}
}
Ok(())
}
fn patch_abi(
elf_header: &Header,
base: *mut u8,
min_vaddr: u32,
file: &mut File,
) -> Result<(), ()> {
for i in 1..=elf_header.e_shnum {
let sh = read_section(file, &elf_header, i.into());
@@ -101,7 +203,11 @@ fn patch_abi(elf_header: &Header, file: &mut File) -> Result<(), ()> {
let symbol_name = core::str::from_utf8(&name).unwrap();
if symbol_name == "CALL_ABI_TABLE" {
let table_base = sym.st_value as *mut usize;
let table_base =
unsafe { base.add((sym.st_value as usize) - min_vaddr as usize) }
as *mut usize;
defmt::info!("CALL_ABI_TABLE st_value: {:x}", sym.st_value);
defmt::info!("table base {}", table_base);
for (idx, call) in CallAbiTable::iter().enumerate() {
let ptr = match call {
@@ -113,6 +219,13 @@ fn patch_abi(elf_header: &Header, file: &mut File) -> Result<(), ()> {
CallAbiTable::GenRand => abi::gen_rand as usize,
};
unsafe {
defmt::info!(
"table {:?}#{} @ {} -> 0x{:X}",
call,
idx,
table_base.wrapping_add(idx),
ptr
);
table_base.add(idx as usize).write(ptr);
}
}
@@ -124,6 +237,33 @@ fn patch_abi(elf_header: &Header, file: &mut File) -> Result<(), ()> {
Err(())
}
fn total_loadable_size(
file: &mut File,
elf_header: &Header,
ph_buf: &mut [u8],
) -> (usize, u32, u32) {
let mut min_vaddr = u32::MAX;
let mut max_vaddr = 0u32;
for i in 0..elf_header.e_phnum {
file.seek_from_start(elf_header.e_phoff + (elf_header.e_phentsize * i) as u32)
.unwrap();
file.read(ph_buf).unwrap();
let ph = cast_phdr(&ph_buf);
if ph.p_type == PT_LOAD {
if ph.p_vaddr < min_vaddr {
min_vaddr = ph.p_vaddr;
}
if ph.p_vaddr + ph.p_memsz > max_vaddr {
max_vaddr = ph.p_vaddr + ph.p_memsz;
}
}
}
let total_size = (max_vaddr - min_vaddr) as usize;
(total_size, min_vaddr, max_vaddr)
}
fn read_section(file: &mut File, elf_header: &Header, section: u32) -> SectionHeader {
let mut section_header_buf = vec![0_u8; elf_header.e_shentsize as usize];
@@ -134,57 +274,6 @@ fn read_section(file: &mut File, elf_header: &Header, section: u32) -> SectionHe
cast_shdr(&section_header_buf)
}
fn load_segment(file: &mut File, ph: &ProgramHeader) -> Result<(), ()> {
let dst_start = ph.p_vaddr as *mut u8;
let filesz = ph.p_filesz as usize;
let memsz = ph.p_memsz as usize;
let vaddr = ph.p_vaddr as usize;
let mut remaining = filesz;
let mut dst_ptr = dst_start;
let mut file_offset = ph.p_offset;
let seg_start = vaddr;
let seg_end = vaddr + memsz;
// Bounds check: make sure segment fits inside payload region
let user_start = unsafe { &__userapp_start__ as *const u8 as usize };
let user_end = unsafe { &__userapp_end__ as *const u8 as usize };
if seg_start < user_start || seg_end > user_end {
panic!(
"Segment out of bounds: {:x}..{:x} not within {:x}..{:x}",
seg_start, seg_end, user_start, user_end
);
}
// Buffer for chunked reads (512 bytes is typical SD sector size)
let mut buf = [0u8; 512];
while remaining > 0 {
let to_read = core::cmp::min(remaining, buf.len());
// Read chunk from file
file.seek_from_start(file_offset).unwrap();
file.read(&mut buf[..to_read]).unwrap();
unsafe {
// Copy chunk directly into destination memory
core::ptr::copy_nonoverlapping(buf.as_ptr(), dst_ptr, to_read);
dst_ptr = dst_ptr.add(to_read);
}
remaining -= to_read;
file_offset += to_read as u32;
}
// Zero BSS (memsz - filesz)
if memsz > filesz {
unsafe {
core::ptr::write_bytes(dst_ptr, 0, memsz - filesz);
}
}
Ok(())
}
fn cast_phdr(buf: &[u8]) -> ProgramHeader {
assert!(buf.len() >= core::mem::size_of::<ProgramHeader>());
unsafe { core::ptr::read(buf.as_ptr() as *const ProgramHeader) }
@@ -199,3 +288,8 @@ fn cast_sym(buf: &[u8]) -> Sym {
assert!(buf.len() >= core::mem::size_of::<Sym>());
unsafe { core::ptr::read(buf.as_ptr() as *const Sym) }
}
fn cast_rel(buf: &[u8]) -> Rel {
assert!(buf.len() >= core::mem::size_of::<Rel>());
unsafe { core::ptr::read(buf.as_ptr() as *const Rel) }
}