1 use alloc::{ 2 boxed::Box, 3 collections::{BTreeMap, BTreeSet}, 4 rc::Rc, 5 string::{String, ToString}, 6 vec::Vec, 7 }; 8 use core::{ 9 cell::RefCell, 10 mem::{size_of, swap, transmute}, 11 ptr, slice, 12 }; 13 use goblin::{ 14 elf::{ 15 header::ET_DYN, 16 program_header, 17 r#dyn::{Dyn, DT_DEBUG, DT_RUNPATH}, 18 reloc, sym, Elf, 19 }, 20 error::{Error, Result}, 21 }; 22 23 use crate::{ 24 c_str::CString, 25 fs::File, 26 header::{errno::STR_ERROR, fcntl, sys_mman, unistd}, 27 io::Read, 28 platform::{errno, types::c_void}, 29 }; 30 31 use super::{ 32 access::accessible, 33 callbacks::LinkerCallbacks, 34 debug::{RTLDDebug, RTLDState, _dl_debug_state, _r_debug}, 35 library::{DepTree, Library}, 36 tcb::{Master, Tcb}, 37 PAGE_SIZE, 38 }; 39 #[cfg(target_os = "redox")] 40 pub const PATH_SEP: char = ';'; 41 42 #[cfg(target_os = "linux")] 43 pub const PATH_SEP: char = ':'; 44 45 pub struct DSO { 46 pub name: String, 47 pub base_addr: usize, 48 pub entry_point: usize, 49 } 50 51 #[derive(Clone, Copy, Debug)] 52 pub struct Symbol { 53 pub value: usize, 54 pub base: usize, 55 pub size: usize, 56 } 57 impl Symbol { 58 pub fn as_ptr(self) -> *mut c_void { 59 (self.base + self.value) as *mut c_void 60 } 61 } 62 63 pub struct Linker { 64 // Used by load 65 /// Library path to search when loading library by name 66 default_library_path: String, 67 ld_library_path: Option<String>, 68 root: Library, 69 verbose: bool, 70 tls_index_offset: usize, 71 lib_spaces: BTreeMap<usize, Library>, 72 counter: usize, 73 pub cbs: Rc<RefCell<LinkerCallbacks>>, 74 } 75 76 const root_id: usize = 1; 77 78 impl Linker { 79 pub fn new(ld_library_path: Option<String>, verbose: bool) -> Self { 80 Self { 81 default_library_path: "/lib".to_string(), 82 ld_library_path: ld_library_path, 83 root: Library::new(), 84 verbose, 85 tls_index_offset: 0, 86 lib_spaces: BTreeMap::new(), 87 counter: root_id + 1, 88 cbs: Rc::new(RefCell::new(LinkerCallbacks::new())), 89 } 90 } 91 pub fn load(&mut self, name: &str, path: &str) -> Result<()> { 92 let mut lib: Library = Library::new(); 93 swap(&mut lib, &mut self.root); 94 lib.dep_tree = self.load_recursive(name, path, &mut lib)?; 95 swap(&mut lib, &mut self.root); 96 if self.verbose { 97 println!("Dep tree: {:#?}", self.root.dep_tree); 98 } 99 return Ok(()); 100 } 101 pub fn unload(&mut self, libspace: usize) { 102 if let Some(lib) = self.lib_spaces.remove(&libspace) { 103 for (_, (_, mmap)) in lib.mmaps { 104 unsafe { sys_mman::munmap(mmap.as_mut_ptr() as *mut c_void, mmap.len()) }; 105 } 106 } 107 } 108 fn load_recursive(&mut self, name: &str, path: &str, lib: &mut Library) -> Result<DepTree> { 109 if self.verbose { 110 println!("load {}: {}", name, path); 111 } 112 if lib.cir_dep.contains(name) { 113 return Err(Error::Malformed(format!( 114 "Circular dependency: {} is a dependency of itself", 115 name 116 ))); 117 } 118 119 let mut deps = DepTree::new(name.to_string()); 120 let mut data = Vec::new(); 121 lib.cir_dep.insert(name.to_string()); 122 let path_c = CString::new(path) 123 .map_err(|err| Error::Malformed(format!("invalid path '{}': {}", path, err)))?; 124 125 { 126 let flags = fcntl::O_RDONLY | fcntl::O_CLOEXEC; 127 let mut file = File::open(&path_c, flags) 128 .map_err(|err| Error::Malformed(format!("failed to open '{}': {}", path, err)))?; 129 130 file.read_to_end(&mut data) 131 .map_err(|err| Error::Malformed(format!("failed to read '{}': {}", path, err)))?; 132 } 133 deps.deps = self.load_data(name, data.into_boxed_slice(), lib)?; 134 lib.cir_dep.remove(name); 135 Ok(deps) 136 } 137 138 fn load_data( 139 &mut self, 140 name: &str, 141 data: Box<[u8]>, 142 lib: &mut Library, 143 ) -> Result<Vec<DepTree>> { 144 let elf = Elf::parse(&data)?; 145 //println!("{:#?}", elf); 146 147 // search for RUNPATH 148 lib.runpath = if let Some(dynamic) = elf.dynamic { 149 let entry = dynamic.dyns.iter().find(|d| d.d_tag == DT_RUNPATH); 150 match entry { 151 Some(entry) => { 152 let path = elf 153 .dynstrtab 154 .get(entry.d_val as usize) 155 .ok_or(Error::Malformed("Missing RUNPATH in dynstrtab".to_string()))??; 156 Some(path.to_string()) 157 } 158 _ => None, 159 } 160 } else { 161 None 162 }; 163 164 let mut deps = Vec::new(); 165 for library in elf.libraries.iter() { 166 if let Some(dep) = self._load_library(library, lib)? { 167 deps.push(dep); 168 } 169 } 170 let key = match elf.soname { 171 Some(soname) => soname, 172 _ => name, 173 }; 174 if !lib.objects.contains_key(key) { 175 lib.objects.insert(key.to_string(), data); 176 } 177 return Ok(deps); 178 } 179 180 pub fn load_library(&mut self, name: Option<&str>) -> Result<usize> { 181 match name { 182 Some(name) => { 183 let mut lib = Library::new(); 184 self._load_library(name, &mut lib)?; 185 let ret = self.counter; 186 self.lib_spaces.insert(ret, lib); 187 self.counter += 1; 188 return Ok(ret); 189 } 190 None => return Ok(root_id), 191 } 192 } 193 fn _load_library(&mut self, name: &str, lib: &mut Library) -> Result<Option<DepTree>> { 194 if lib.objects.contains_key(name) || self.root.objects.contains_key(name) { 195 // It should be previously resolved so we don't need to worry about it 196 Ok(None) 197 } else if name.contains('/') { 198 Ok(Some(self.load_recursive(name, name, lib)?)) 199 } else { 200 let mut paths = Vec::new(); 201 if let Some(ld_library_path) = &self.ld_library_path { 202 paths.push(ld_library_path); 203 } 204 if let Some(runpath) = &lib.runpath { 205 paths.push(runpath); 206 } 207 paths.push(&self.default_library_path); 208 for part in paths.iter() { 209 let path = if part.is_empty() { 210 format!("./{}", name) 211 } else { 212 format!("{}/{}", part, name) 213 }; 214 if self.verbose { 215 println!("check {}", path); 216 } 217 218 if accessible(&path, unistd::F_OK) == 0 { 219 return Ok(Some(self.load_recursive(name, &path, lib)?)); 220 } 221 } 222 223 Err(Error::Malformed(format!("failed to locate '{}'", name))) 224 } 225 } 226 227 fn collect_syms( 228 elf: &Elf, 229 mmap: &[u8], 230 verbose: bool, 231 ) -> Result<(BTreeMap<String, Symbol>, BTreeMap<String, Symbol>)> { 232 let mut globals = BTreeMap::new(); 233 let mut weak_syms = BTreeMap::new(); 234 for sym in elf.dynsyms.iter() { 235 let bind = sym.st_bind(); 236 if sym.st_value == 0 || ![sym::STB_GLOBAL, sym::STB_WEAK].contains(&bind) { 237 continue; 238 } 239 let name: String; 240 let value: Symbol; 241 if let Some(name_res) = elf.dynstrtab.get(sym.st_name) { 242 name = name_res?.to_string(); 243 value = if is_pie_enabled(elf) { 244 Symbol { 245 base: mmap.as_ptr() as usize, 246 value: sym.st_value as usize, 247 size: sym.st_size as usize, 248 } 249 } else { 250 Symbol { 251 base: 0, 252 value: sym.st_value as usize, 253 size: sym.st_size as usize, 254 } 255 }; 256 } else { 257 continue; 258 } 259 match sym.st_bind() { 260 sym::STB_GLOBAL => { 261 if verbose { 262 println!(" global {}: {:x?} = {:p}", &name, sym, value.as_ptr()); 263 } 264 globals.insert(name, value); 265 } 266 sym::STB_WEAK => { 267 if verbose { 268 println!(" weak {}: {:x?} = {:p}", &name, sym, value.as_ptr()); 269 } 270 weak_syms.insert(name, value); 271 } 272 _ => unreachable!(), 273 } 274 } 275 return Ok((globals, weak_syms)); 276 } 277 278 pub fn get_sym(&self, name: &str, libspace: Option<usize>) -> Option<Symbol> { 279 match libspace { 280 None | Some(root_id) => self.root.get_sym(name), 281 Some(id) => { 282 let lib = self.lib_spaces.get(&id)?; 283 lib.get_sym(name) 284 } 285 } 286 } 287 288 pub fn run_init(&self, libspace: Option<usize>) -> Result<()> { 289 match libspace { 290 Some(id) => { 291 let lib = self.lib_spaces.get(&id).unwrap(); 292 self.run_tree(&lib, &lib.dep_tree, ".init_array") 293 } 294 None => self.run_tree(&self.root, &self.root.dep_tree, ".init_array"), 295 } 296 } 297 298 pub fn run_fini(&self, libspace: Option<usize>) -> Result<()> { 299 match libspace { 300 Some(root_id) => return Ok(()), 301 Some(id) => { 302 let lib = self.lib_spaces.get(&id).unwrap(); 303 self.run_tree(&lib, &lib.dep_tree, ".fini_array") 304 } 305 None => { 306 //TODO we first need to deinitialize all the loaded libraries first! 307 self.run_tree(&self.root, &self.root.dep_tree, ".fini_array") 308 } 309 } 310 } 311 312 fn run_tree(&self, lib: &Library, root: &DepTree, tree_name: &str) -> Result<()> { 313 for node in root.deps.iter() { 314 self.run_tree(lib, node, tree_name)?; 315 } 316 if self.verbose { 317 println!("running {} {}", tree_name, &root.name); 318 } 319 let (_, mmap) = match lib.mmaps.get(&root.name) { 320 Some(some) => some, 321 None => return Ok(()), 322 }; 323 let elf = Elf::parse(lib.objects.get(&root.name).unwrap())?; 324 for section in &elf.section_headers { 325 let name = match elf.shdr_strtab.get(section.sh_name) { 326 Some(x) => match x { 327 Ok(y) => y, 328 _ => continue, 329 }, 330 _ => continue, 331 }; 332 if name == tree_name { 333 let addr = if is_pie_enabled(&elf) { 334 mmap.as_ptr() as usize + section.vm_range().start 335 } else { 336 section.vm_range().start 337 }; 338 for i in (0..section.sh_size).step_by(8) { 339 unsafe { call_inits_finis(addr + i as usize) }; 340 } 341 } 342 } 343 return Ok(()); 344 } 345 346 pub fn link( 347 &mut self, 348 primary_opt: Option<&str>, 349 dso: Option<DSO>, 350 libspace: Option<usize>, 351 ) -> Result<Option<usize>> { 352 match libspace { 353 Some(id) => { 354 let mut lib = self.lib_spaces.remove(&id).unwrap(); 355 let res = self._link(primary_opt, dso, &mut lib); 356 self.lib_spaces.insert(id, lib); 357 res 358 } 359 None => { 360 let mut lib = Library::new(); 361 swap(&mut lib, &mut self.root); 362 let res = self._link(primary_opt, dso, &mut lib); 363 swap(&mut lib, &mut self.root); 364 res 365 } 366 } 367 } 368 369 pub fn _link( 370 &mut self, 371 primary_opt: Option<&str>, 372 dso: Option<DSO>, 373 lib: &mut Library, 374 ) -> Result<Option<usize>> { 375 unsafe { _r_debug.state = RTLDState::RT_ADD }; 376 _dl_debug_state(); 377 let mut skip_list = BTreeSet::new(); 378 let elfs = { 379 let mut elfs = BTreeMap::new(); 380 for (name, data) in lib.objects.iter() { 381 // Skip already linked libraries 382 if !lib.mmaps.contains_key(&*name) && !self.root.mmaps.contains_key(&*name) { 383 elfs.insert(name.as_str(), Elf::parse(&data)?); 384 } else { 385 skip_list.insert(name.as_str()); 386 } 387 } 388 elfs 389 }; 390 391 // Load all ELF files into memory and find all globals 392 let mut tls_primary = 0; 393 let mut tls_size = 0; 394 for (elf_name, elf) in elfs.iter() { 395 if skip_list.contains(elf_name) { 396 continue; 397 } 398 if self.verbose { 399 println!("map {}", elf_name); 400 } 401 let object = match lib.objects.get(*elf_name) { 402 Some(some) => some, 403 None => continue, 404 }; 405 // data for struct LinkMap 406 let mut l_ld = 0; 407 // Calculate virtual memory bounds 408 let bounds = { 409 let mut bounds_opt: Option<(usize, usize)> = None; 410 for ph in elf.program_headers.iter() { 411 let voff = ph.p_vaddr as usize % PAGE_SIZE; 412 let vaddr = ph.p_vaddr as usize - voff; 413 let vsize = 414 ((ph.p_memsz as usize + voff + PAGE_SIZE - 1) / PAGE_SIZE) * PAGE_SIZE; 415 416 match ph.p_type { 417 program_header::PT_DYNAMIC => { 418 l_ld = ph.p_vaddr; 419 } 420 program_header::PT_LOAD => { 421 if self.verbose { 422 println!(" load {:#x}, {:#x}: {:x?}", vaddr, vsize, ph); 423 } 424 if let Some(ref mut bounds) = bounds_opt { 425 if vaddr < bounds.0 { 426 bounds.0 = vaddr; 427 } 428 if vaddr + vsize > bounds.1 { 429 bounds.1 = vaddr + vsize; 430 } 431 } else { 432 bounds_opt = Some((vaddr, vaddr + vsize)); 433 } 434 } 435 program_header::PT_TLS => { 436 if self.verbose { 437 println!(" load tls {:#x}: {:x?}", vsize, ph); 438 } 439 tls_size += vsize; 440 if Some(*elf_name) == primary_opt { 441 tls_primary += vsize; 442 } 443 } 444 _ => (), 445 } 446 } 447 match bounds_opt { 448 Some(some) => some, 449 None => continue, 450 } 451 }; 452 if self.verbose { 453 println!(" bounds {:#x}, {:#x}", bounds.0, bounds.1); 454 } 455 // Allocate memory 456 let mmap = unsafe { 457 let same_elf = if let Some(prog) = dso.as_ref() { 458 if prog.name == *elf_name { 459 true 460 } else { 461 false 462 } 463 } else { 464 false 465 }; 466 if same_elf { 467 let addr = dso.as_ref().unwrap().base_addr; 468 let size = if is_pie_enabled(&elf) { 469 bounds.1 470 } else { 471 bounds.1 - bounds.0 472 }; 473 474 // Fill the gaps i the binary 475 let mut ranges = Vec::new(); 476 for ph in elf.program_headers.iter() { 477 if ph.p_type == program_header::PT_LOAD { 478 let voff = ph.p_vaddr as usize % PAGE_SIZE; 479 let vaddr = ph.p_vaddr as usize - voff; 480 let vsize = ((ph.p_memsz as usize + voff + PAGE_SIZE - 1) / PAGE_SIZE) 481 * PAGE_SIZE; 482 if is_pie_enabled(&elf) { 483 ranges.push((vaddr, vsize)); 484 } else { 485 ranges.push((vaddr - addr, vsize)); 486 } 487 } 488 } 489 ranges.sort(); 490 let mut start = addr; 491 for (vaddr, vsize) in ranges.iter() { 492 if start < addr + vaddr { 493 if self.verbose { 494 println!("mmap({:#x}, {})", start, addr + vaddr - start); 495 } 496 let mut flags = sys_mman::MAP_ANONYMOUS | sys_mman::MAP_PRIVATE; 497 if start != 0 { 498 flags |= sys_mman::MAP_FIXED_NOREPLACE; 499 } 500 let ptr = sys_mman::mmap( 501 start as *mut c_void, 502 addr + vaddr - start, 503 //TODO: Make it possible to not specify PROT_EXEC on Redox 504 sys_mman::PROT_READ | sys_mman::PROT_WRITE, 505 flags, 506 -1, 507 0, 508 ); 509 if ptr as usize == !0 510 /* MAP_FAILED */ 511 { 512 return Err(Error::Malformed(format!( 513 "failed to map {}. errno: {}", 514 elf_name, STR_ERROR[errno as usize] 515 ))); 516 } 517 if start as *mut c_void != ptr::null_mut() { 518 assert_eq!( 519 ptr, start as *mut c_void, 520 "mmap must always map on the destination we requested" 521 ); 522 } 523 } 524 start = addr + vaddr + vsize 525 } 526 sys_mman::mprotect( 527 addr as *mut c_void, 528 size, 529 sys_mman::PROT_READ | sys_mman::PROT_WRITE, 530 ); 531 _r_debug.insert_first(addr as usize, &elf_name, addr + l_ld as usize); 532 ( 533 addr as usize, 534 slice::from_raw_parts_mut(addr as *mut u8, size), 535 ) 536 } else { 537 let (start, end) = bounds; 538 let size = end - start; 539 if self.verbose { 540 println!("mmap({:#x}, {})", start, size); 541 } 542 let mut flags = sys_mman::MAP_ANONYMOUS | sys_mman::MAP_PRIVATE; 543 if start != 0 { 544 flags |= sys_mman::MAP_FIXED_NOREPLACE; 545 } 546 let ptr = sys_mman::mmap( 547 start as *mut c_void, 548 size, 549 //TODO: Make it possible to not specify PROT_EXEC on Redox 550 sys_mman::PROT_READ | sys_mman::PROT_WRITE, 551 flags, 552 -1, 553 0, 554 ); 555 if ptr as usize == !0 556 /* MAP_FAILED */ 557 { 558 return Err(Error::Malformed(format!( 559 "failed to map {}. errno: {}", 560 elf_name, STR_ERROR[errno as usize] 561 ))); 562 } 563 if start as *mut c_void != ptr::null_mut() { 564 assert_eq!( 565 ptr, start as *mut c_void, 566 "mmap must always map on the destination we requested" 567 ); 568 } 569 ptr::write_bytes(ptr as *mut u8, 0, size); 570 _r_debug.insert(ptr as usize, &elf_name, ptr as usize + l_ld as usize); 571 (start, slice::from_raw_parts_mut(ptr as *mut u8, size)) 572 } 573 }; 574 if self.verbose { 575 println!(" mmap {:p}, {:#x}", mmap.1.as_mut_ptr(), mmap.1.len()); 576 } 577 let (globals, weak_syms) = Linker::collect_syms(&elf, &mmap.1, self.verbose)?; 578 lib.globals.extend(globals.into_iter()); 579 lib.weak_syms.extend(weak_syms.into_iter()); 580 lib.mmaps.insert(elf_name.to_string(), mmap); 581 } 582 583 // Allocate TLS 584 let mut tcb_opt = if primary_opt.is_some() { 585 Some(unsafe { Tcb::new(tls_size)? }) 586 } else { 587 None 588 }; 589 if self.verbose { 590 println!("tcb {:x?}", tcb_opt); 591 } 592 // Copy data 593 let mut tls_offset = tls_primary; 594 let mut tcb_masters = Vec::new(); 595 // Insert main image master 596 tcb_masters.push(Master { 597 ptr: ptr::null_mut(), 598 len: 0, 599 offset: 0, 600 }); 601 let mut tls_ranges = BTreeMap::new(); 602 for (elf_name, elf) in elfs.iter() { 603 if skip_list.contains(elf_name) { 604 continue; 605 } 606 let same_elf = if let Some(prog) = dso.as_ref() { 607 if prog.name == *elf_name { 608 true 609 } else { 610 false 611 } 612 } else { 613 false 614 }; 615 let object = match lib.objects.get(*elf_name) { 616 Some(some) => some, 617 None => continue, 618 }; 619 620 let &mut (base_addr, ref mut mmap) = match lib.mmaps.get_mut(*elf_name) { 621 Some(some) => some, 622 None => continue, 623 }; 624 if self.verbose { 625 println!("load {}", elf_name); 626 } 627 // Copy data 628 for ph in elf.program_headers.iter() { 629 let voff = ph.p_vaddr as usize % PAGE_SIZE; 630 let vaddr = ph.p_vaddr as usize - voff; 631 let vsize = ((ph.p_memsz as usize + voff + PAGE_SIZE - 1) / PAGE_SIZE) * PAGE_SIZE; 632 633 match ph.p_type { 634 program_header::PT_LOAD => { 635 if same_elf { 636 continue; 637 } 638 let obj_data = { 639 let range = ph.file_range(); 640 match object.get(range.clone()) { 641 Some(some) => some, 642 None => { 643 return Err(Error::Malformed(format!( 644 "failed to read {:x?}", 645 range 646 ))) 647 } 648 } 649 }; 650 651 let mmap_data = { 652 let range = ph.p_vaddr as usize - base_addr 653 ..ph.p_vaddr as usize + obj_data.len() - base_addr; 654 match mmap.get_mut(range.clone()) { 655 Some(some) => some, 656 None => { 657 println!("mmap: {}", mmap.len()); 658 return Err(Error::Malformed(format!( 659 "failed to write {:x?}", 660 range 661 ))); 662 } 663 } 664 }; 665 if self.verbose { 666 println!( 667 " copy {:#x}, {:#x}: {:#x}, {:#x}", 668 vaddr, 669 vsize, 670 voff, 671 obj_data.len() 672 ); 673 } 674 mmap_data.copy_from_slice(obj_data); 675 } 676 program_header::PT_TLS => { 677 let valign = if ph.p_align > 0 { 678 ((ph.p_memsz + (ph.p_align - 1)) / ph.p_align) * ph.p_align 679 } else { 680 ph.p_memsz 681 } as usize; 682 let ptr = unsafe { 683 if is_pie_enabled(elf) { 684 mmap.as_ptr().add(ph.p_vaddr as usize) 685 } else { 686 ph.p_vaddr as *const u8 687 } 688 }; 689 let mut tcb_master = Master { 690 ptr: ptr, 691 len: ph.p_filesz as usize, 692 offset: tls_size - valign, 693 }; 694 if self.verbose { 695 println!( 696 " tls master {:p}, {:#x}: {:#x}, {:#x}", 697 tcb_master.ptr, tcb_master.len, tcb_master.offset, valign, 698 ); 699 } 700 if Some(*elf_name) == primary_opt { 701 tls_ranges.insert( 702 elf_name.to_string(), 703 (self.tls_index_offset, tcb_master.range()), 704 ); 705 tcb_masters[0] = tcb_master; 706 } else { 707 tcb_master.offset -= tls_offset; 708 tls_offset += vsize; 709 tls_ranges.insert( 710 elf_name.to_string(), 711 ( 712 self.tls_index_offset + tcb_masters.len(), 713 tcb_master.range(), 714 ), 715 ); 716 tcb_masters.push(tcb_master); 717 } 718 } 719 _ => (), 720 } 721 } 722 } 723 724 self.tls_index_offset += tcb_masters.len(); 725 726 // Set master images for TLS and copy TLS data 727 if let Some(ref mut tcb) = tcb_opt { 728 unsafe { 729 tcb.set_masters(tcb_masters.into_boxed_slice()); 730 tcb.copy_masters()?; 731 } 732 } 733 734 // Perform relocations, and protect pages 735 for (elf_name, elf) in elfs.iter() { 736 if skip_list.contains(elf_name) { 737 continue; 738 } 739 if self.verbose { 740 println!("link {}", elf_name); 741 } 742 // Relocate 743 for rel in elf 744 .dynrelas 745 .iter() 746 .chain(elf.dynrels.iter()) 747 .chain(elf.pltrelocs.iter()) 748 { 749 // println!(" rel {}: {:x?}", 750 // reloc::r_to_str(rel.r_type, elf.header.e_machine), 751 // rel 752 // ); 753 let symbol = if rel.r_sym > 0 { 754 let sym = elf.dynsyms.get(rel.r_sym).ok_or(Error::Malformed(format!( 755 "missing symbol for relocation {:?}", 756 rel 757 )))?; 758 759 let name = 760 elf.dynstrtab 761 .get(sym.st_name) 762 .ok_or(Error::Malformed(format!( 763 "missing name for symbol {:?}", 764 sym 765 )))??; 766 lib.get_sym(name).or_else(|| self.root.get_sym(name)) 767 } else { 768 None 769 }; 770 771 let s = symbol 772 .as_ref() 773 .map(|sym| sym.as_ptr() as usize) 774 .unwrap_or(0); 775 776 let a = rel.r_addend.unwrap_or(0) as usize; 777 778 let (_, mmap) = match lib.mmaps.get_mut(*elf_name) { 779 Some(some) => some, 780 None => continue, 781 }; 782 783 let b = mmap.as_mut_ptr() as usize; 784 785 let (tm, t) = if let Some((tls_index, tls_range)) = tls_ranges.get(*elf_name) { 786 (*tls_index, tls_range.start) 787 } else { 788 (0, 0) 789 }; 790 791 let ptr = if is_pie_enabled(&elf) { 792 unsafe { mmap.as_mut_ptr().add(rel.r_offset as usize) } 793 } else { 794 rel.r_offset as *mut u8 795 }; 796 let set_u64 = |value| { 797 // println!(" set_u64 {:#x}", value); 798 unsafe { 799 *(ptr as *mut u64) = value; 800 } 801 }; 802 803 match rel.r_type { 804 reloc::R_X86_64_64 => { 805 set_u64((s + a) as u64); 806 } 807 reloc::R_X86_64_DTPMOD64 => { 808 set_u64(tm as u64); 809 } 810 reloc::R_X86_64_DTPOFF64 => { 811 if s != 0 { 812 set_u64((s - b) as u64); 813 } else { 814 set_u64(s as u64); 815 } 816 } 817 reloc::R_X86_64_GLOB_DAT | reloc::R_X86_64_JUMP_SLOT => { 818 set_u64(s as u64); 819 } 820 reloc::R_X86_64_RELATIVE => { 821 set_u64((b + a) as u64); 822 } 823 reloc::R_X86_64_TPOFF64 => { 824 set_u64((s + a).wrapping_sub(t) as u64); 825 } 826 reloc::R_X86_64_IRELATIVE => (), // Handled below 827 reloc::R_X86_64_COPY => unsafe { 828 // TODO: Make this work 829 let sym = symbol 830 .as_ref() 831 .expect("R_X86_64_COPY called without valid symbol"); 832 ptr::copy_nonoverlapping(sym.as_ptr() as *const u8, ptr, sym.size as usize); 833 }, 834 _ => { 835 panic!( 836 " {} unsupported", 837 reloc::r_to_str(rel.r_type, elf.header.e_machine) 838 ); 839 } 840 } 841 } 842 843 // overwrite DT_DEBUG if exist in DYNAMIC segment 844 // first we identify the location of DYNAMIC segment 845 let mut dyn_start = None; 846 let mut debug_start = None; 847 for ph in elf.program_headers.iter() { 848 if ph.p_type == program_header::PT_DYNAMIC { 849 dyn_start = Some(ph.p_vaddr as usize); 850 } 851 } 852 // next we identify the location of DT_DEBUG in .dynamic section 853 if let Some(dynamic) = elf.dynamic.as_ref() { 854 let mut i = 0; 855 for entry in &dynamic.dyns { 856 if entry.d_tag == DT_DEBUG { 857 debug_start = Some(i as usize); 858 break; 859 } 860 i += 1; 861 } 862 } 863 if let Some(dyn_start_addr) = dyn_start { 864 if let Some(i) = debug_start { 865 let (_, mmap) = match lib.mmaps.get_mut(*elf_name) { 866 Some(some) => some, 867 None => continue, 868 }; 869 let bytes: [u8; size_of::<Dyn>() / 2] = 870 unsafe { transmute((&_r_debug) as *const RTLDDebug as usize) }; 871 let start = if is_pie_enabled(elf) { 872 dyn_start_addr + i * size_of::<Dyn>() + size_of::<Dyn>() / 2 873 } else { 874 dyn_start_addr + i * size_of::<Dyn>() + size_of::<Dyn>() / 2 875 - mmap.as_mut_ptr() as usize 876 }; 877 mmap[start..start + size_of::<Dyn>() / 2].clone_from_slice(&bytes); 878 } 879 } 880 881 // Protect pages 882 for ph in elf.program_headers.iter() { 883 if ph.p_type == program_header::PT_LOAD { 884 let voff = ph.p_vaddr as usize % PAGE_SIZE; 885 let vaddr = ph.p_vaddr as usize - voff; 886 let vsize = 887 ((ph.p_memsz as usize + voff + PAGE_SIZE - 1) / PAGE_SIZE) * PAGE_SIZE; 888 889 let mut prot = 0; 890 891 if ph.p_flags & program_header::PF_R == program_header::PF_R { 892 prot |= sys_mman::PROT_READ; 893 } 894 895 // W ^ X. If it is executable, do not allow it to be writable, even if requested 896 if ph.p_flags & program_header::PF_X == program_header::PF_X { 897 prot |= sys_mman::PROT_EXEC; 898 } else if ph.p_flags & program_header::PF_W == program_header::PF_W { 899 prot |= sys_mman::PROT_WRITE; 900 } 901 902 let (_, mmap) = match lib.mmaps.get_mut(*elf_name) { 903 Some(some) => some, 904 None => continue, 905 }; 906 let res = unsafe { 907 let ptr = if is_pie_enabled(elf) { 908 mmap.as_mut_ptr().add(vaddr) 909 } else { 910 vaddr as *const u8 911 }; 912 if self.verbose { 913 println!(" prot {:#x}, {:#x}: {:p}, {:#x}", vaddr, vsize, ptr, prot); 914 } 915 sys_mman::mprotect(ptr as *mut c_void, vsize, prot) 916 }; 917 918 if res < 0 { 919 return Err(Error::Malformed(format!("failed to mprotect {}", elf_name))); 920 } 921 } 922 } 923 } 924 925 // Activate TLS 926 if let Some(ref mut tcb) = tcb_opt { 927 unsafe { 928 tcb.activate(); 929 } 930 } 931 932 // Perform indirect relocations (necessary evil), gather entry point 933 let mut entry_opt = None; 934 for (elf_name, elf) in elfs.iter() { 935 if skip_list.contains(elf_name) { 936 continue; 937 } 938 let (_, mmap) = match lib.mmaps.get_mut(*elf_name) { 939 Some(some) => some, 940 None => continue, 941 }; 942 if self.verbose { 943 println!("entry {}", elf_name); 944 } 945 if Some(*elf_name) == primary_opt { 946 if is_pie_enabled(&elf) { 947 entry_opt = Some(mmap.as_mut_ptr() as usize + elf.header.e_entry as usize); 948 } else { 949 entry_opt = Some(elf.header.e_entry as usize); 950 } 951 } 952 953 // Relocate 954 for rel in elf 955 .dynrelas 956 .iter() 957 .chain(elf.dynrels.iter()) 958 .chain(elf.pltrelocs.iter()) 959 { 960 // println!(" rel {}: {:x?}", 961 // reloc::r_to_str(rel.r_type, elf.header.e_machine), 962 // rel 963 // ); 964 965 let a = rel.r_addend.unwrap_or(0) as usize; 966 967 let b = mmap.as_mut_ptr() as usize; 968 969 let ptr = unsafe { mmap.as_mut_ptr().add(rel.r_offset as usize) }; 970 971 let set_u64 = |value| { 972 // println!(" set_u64 {:#x}", value); 973 unsafe { 974 *(ptr as *mut u64) = value; 975 } 976 }; 977 978 if rel.r_type == reloc::R_X86_64_IRELATIVE { 979 unsafe { 980 let f: unsafe extern "C" fn() -> u64 = transmute(b + a); 981 set_u64(f()); 982 } 983 } 984 } 985 // Protect pages 986 for ph in elf.program_headers.iter() { 987 if let program_header::PT_LOAD = ph.p_type { 988 let voff = ph.p_vaddr as usize % PAGE_SIZE; 989 let vaddr = ph.p_vaddr as usize - voff; 990 let vsize = 991 ((ph.p_memsz as usize + voff + PAGE_SIZE - 1) / PAGE_SIZE) * PAGE_SIZE; 992 993 let mut prot = 0; 994 995 if ph.p_flags & program_header::PF_R == program_header::PF_R { 996 prot |= sys_mman::PROT_READ; 997 } 998 999 // W ^ X. If it is executable, do not allow it to be writable, even if requested 1000 if ph.p_flags & program_header::PF_X == program_header::PF_X { 1001 prot |= sys_mman::PROT_EXEC; 1002 } else if ph.p_flags & program_header::PF_W == program_header::PF_W { 1003 prot |= sys_mman::PROT_WRITE; 1004 } 1005 1006 let res = unsafe { 1007 let ptr = if is_pie_enabled(&elf) { 1008 mmap.as_mut_ptr().add(vaddr) 1009 } else { 1010 vaddr as *const u8 1011 }; 1012 if self.verbose { 1013 println!(" prot {:#x}, {:#x}: {:p}, {:#x}", vaddr, vsize, ptr, prot); 1014 } 1015 sys_mman::mprotect(ptr as *mut c_void, vsize, prot) 1016 }; 1017 1018 if res < 0 { 1019 return Err(Error::Malformed(format!("failed to mprotect {}", elf_name))); 1020 } 1021 } 1022 } 1023 } 1024 unsafe { _r_debug.state = RTLDState::RT_CONSISTENT }; 1025 _dl_debug_state(); 1026 Ok(entry_opt) 1027 } 1028 } 1029 1030 unsafe fn call_inits_finis(addr: usize) { 1031 let func = transmute::<usize, *const Option<extern "C" fn()>>(addr); 1032 (*func).map(|x| x()); 1033 } 1034 1035 fn is_pie_enabled(elf: &Elf) -> bool { 1036 if elf.header.e_type == ET_DYN { 1037 true 1038 } else { 1039 false 1040 } 1041 } 1042