зеркало из https://github.com/mozilla/gecko-dev.git
Bug 1804360 - Update goblin to 0.6.0. r=janerik,supply-chain-reviewers
Differential Revision: https://phabricator.services.mozilla.com/D164009
This commit is contained in:
Родитель
8fd72b5fca
Коммит
a86772f0b5
|
@ -2383,14 +2383,14 @@ dependencies = [
|
|||
name = "goblin"
|
||||
version = "0.1.99"
|
||||
dependencies = [
|
||||
"goblin 0.5.4",
|
||||
"goblin 0.6.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "goblin"
|
||||
version = "0.5.4"
|
||||
version = "0.6.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a7666983ed0dd8d21a6f6576ee00053ca0926fb281a5522577a4dbd0f1b54143"
|
||||
checksum = "572564d6cba7d09775202c8e7eebc4d534d5ae36578ab402fb21e182a0ac9505"
|
||||
dependencies = [
|
||||
"log",
|
||||
"plain",
|
||||
|
@ -5900,8 +5900,6 @@ dependencies = [
|
|||
[[package]]
|
||||
name = "uniffi_bindgen"
|
||||
version = "0.21.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5d46080a4840abccf7c0cce21931dae53215cbd7dd969b5e63c486235ce91a2a"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"askama",
|
||||
|
@ -5909,7 +5907,7 @@ dependencies = [
|
|||
"camino",
|
||||
"clap",
|
||||
"fs-err",
|
||||
"goblin 0.5.4",
|
||||
"goblin 0.6.0",
|
||||
"heck",
|
||||
"once_cell",
|
||||
"paste",
|
||||
|
|
|
@ -181,10 +181,13 @@ path = "third_party/rust/mio-0.6.23"
|
|||
# okay, but it means that their dependencies on UniFFI crates will normally
|
||||
# also be the git versions. Patch them to use the published versions to avoid
|
||||
# duplicate crates.
|
||||
|
||||
[patch."https://github.com/mozilla/uniffi-rs.git"]
|
||||
uniffi = "=0.21.1"
|
||||
uniffi_bindgen = "=0.21.1"
|
||||
#uniffi_bindgen = "=0.21.1"
|
||||
uniffi_build = "=0.21.1"
|
||||
uniffi_macros = "=0.21.1"
|
||||
weedle2 = "=4.0.0"
|
||||
|
||||
# Patched with https://github.com/mozilla/uniffi-rs/pull/1405
|
||||
[patch.crates-io.uniffi_bindgen]
|
||||
path = "third_party/rust/uniffi_bindgen"
|
||||
|
|
|
@ -8,4 +8,4 @@ license = "MPL-2.0"
|
|||
path = "lib.rs"
|
||||
|
||||
[dependencies.goblin]
|
||||
version = "0.5.3"
|
||||
version = "0.6"
|
||||
|
|
|
@ -700,6 +700,12 @@ criteria = "safe-to-deploy"
|
|||
delta = "0.1.3 -> 0.5.4"
|
||||
notes = "Several bugfixes since 2019. This version is also in use by Mozilla's crash reporting tooling, e.g. minidump-writer"
|
||||
|
||||
[[audits.goblin]]
|
||||
who = "Gabriele Svelto <gsvelto@mozilla.com>"
|
||||
criteria = "safe-to-deploy"
|
||||
delta = "0.5.4 -> 0.6.0"
|
||||
notes = "Mostly bug fixes and some added functionality"
|
||||
|
||||
[[audits.gpu-descriptor]]
|
||||
who = "Mike Hommey <mh+mozilla@glandium.org>"
|
||||
criteria = "safe-to-deploy"
|
||||
|
|
|
@ -158,6 +158,10 @@ notes = "This is a first-party crate which is entirely unrelated to the crates.i
|
|||
audit-as-crates-io = false
|
||||
notes = "This is a first-party crate, maintained by the appservices team, which is entirely unrelated to the crates.io package of the same name."
|
||||
|
||||
[policy.uniffi_bindgen]
|
||||
audit-as-crates-io = false
|
||||
notes = "This is a first-party crate, normally vendored, but currently patched in-tree until next update."
|
||||
|
||||
[policy.viaduct]
|
||||
audit-as-crates-io = false
|
||||
notes = "This is a first-party crate, maintained by the appservices team, which is entirely unrelated to the crates.io package of the same name."
|
||||
|
|
|
@ -1 +1 @@
|
|||
{"files":{"CHANGELOG.md":"f58fcd94ba37c97b658a3b7c4164136f782680d06ca71102613bb16a364c5c78","Cargo.toml":"ad3ba4ac887440a8b6738072791d2cd3bc8932d9494093a97311ef78bc3830fe","LICENSE":"036bf6b6d6fd6dd1abda2ff6cdb672a63bdf32c468048720072910f2268a965f","README.md":"5db58c51a339f81752b8346ca4801547e6a387b2cd89869b27e6b38bcda1078f","src/archive/mod.rs":"6e54bfffcb3b259dbaecd1b6b8bcf2ed317e7cb7b59cb512b6c690a8a92c140c","src/elf/compression_header.rs":"2eb5fdda9177c1c897310d86714967de019b39c6e23b1f3a890dd3a659be0acc","src/elf/constants_header.rs":"f2ede290ecacf60b1719e9994aa45612bf0f7baf63806a293d4530a674e5861a","src/elf/constants_relocation.rs":"a010071cd2a25ab71e0c7181eb1d9f417daa2d1ec25a09c74bd12ad944892225","src/elf/dynamic.rs":"bb0daccdfb14df7cdbdf345e2290e02ae5f9ebf8146d0e113b363feb2e306dc0","src/elf/gnu_hash.rs":"7a9fcaf6cb38167d20527364bdf9bc2379c44dede5d7666275a1eb20dc665179","src/elf/header.rs":"3391a1fa9b8e3923f7ce74caff0668d8ddb5b34767bf3da309ff497fd81c34c7","src/elf/mod.rs":"d2ecb62524d7da16a0f46b4b8fa43929839d6897ca93c9bb8f5299d76d66a977","src/elf/note.rs":"3c354f1108fa8e5a69f6cf629a36b61241a321f235a429d9e9900aa7a4c02f46","src/elf/program_header.rs":"7d934c7c761fc73b1a30a8fc2b048d98511b529dd5429cb0848b4bdd5dcdade7","src/elf/reloc.rs":"a4b7843c0e201f83c344c0681dfd285754a7211e3472ddd53024a520e17c847f","src/elf/section_header.rs":"ca52e85f6a0e10177368f2790d41bc9ae3348216bbd4393d483f0646030e1cc7","src/elf/sym.rs":"045c01107f4e100d6827cb819b82a28ea10c0d9bc00a1cdddb04a0865f1162ec","src/elf/symver.rs":"3f899201f64a702653d44288f860003e7acd75e38111d36479af823ed92b1341","src/error.rs":"af620a5692bca070dc727d49cdbb566a533bfb97724ca68932ae7fec7dc05cf6","src/lib.rs":"f592c9fb610e910b0eece8b7ee8685eed7de8556bb4d8df43c0fd55109b3304b","src/mach/bind_opcodes.rs":"1dcacfb853d05c2c7e6dbb4509ee705a8ea645db0d334991a2293fef92eee851","src/mach/constants.rs":"c2a2381a0b9c3047d37582465e8965d995dca414d0da21fb7bcc6b8334e49eb6","src/mach/exports.rs":"d22122744673a3ce5f54b2b4b20bfa47d17378e64d3dda2858dd13add74ed3dc","src/mach/fat.rs":"45a3228aaa1ab8b77f322dd4924b7383f1357e226ffc079846d67c0268389ea7","src/mach/header.rs":"ae15265e9f1a92eb9ba04333e5bb309f276e1300b87f43386f7829e820318938","src/mach/imports.rs":"2153269dfff32e23d72f76a82d658be06bd79b7e35d79b7e17115e4eb24b13d5","src/mach/load_command.rs":"6435666c46e875610375b92b1ab1e3fdc9f6048d51728d996dd4531329eb3d39","src/mach/mod.rs":"462c6bcf3d948a545076dca04016ff1613476f8b667021f7ee6156b630a06b22","src/mach/relocation.rs":"11b0b76ed7d997c87e396100515f931fe84473c228bed0e980fbab311530070a","src/mach/segment.rs":"0dc29bf42b25f60c7258bc8b757f6a862e846582dd6d2e70737933ad6334a0e4","src/mach/symbols.rs":"500190db6d82fc4d807fc6039c5049b6446e2f3f71228088a7e0abe58ac1e9ee","src/pe/characteristic.rs":"6f810a6e5646b922cf7e3ca6d314677a4e1e1ad5695278c2b1b527a05f4299f3","src/pe/data_directories.rs":"d4e156f0c5b509860ceb3c7d42e1621e6c2143b90fc412806b3cefab1acc577a","src/pe/debug.rs":"c7a6ce316fc108a7309502674af3aa5521d594dfb5fdc373bfabe01521443224","src/pe/exception.rs":"de2c9c07812ecd315c8400fc8fdcadc6a44d7a8be96e69a3f4ccf14ef8cf8426","src/pe/export.rs":"c98f5ce0b1b18bb87f06d1d41dbf70f443d65ecb1624cb23a1ef6c5f93a892e1","src/pe/header.rs":"f964bd1157f04820a537d79faad1015195d49be01ec6ec5e4e205abd10ee923e","src/pe/import.rs":"855276e46c01ccd7631104e4d1265592e36c9468aadcacc937a40c29d94aabe3","src/pe/mod.rs":"f43524ceb77ad263a094eea038f81f010fc6f1de1d144f24218ee3224abab0fd","src/pe/optional_header.rs":"f3fb9aec04fccde3b765ec3d54cb27bfe636efceb94ddbe34e88098e28f56b55","src/pe/options.rs":"b38f4e87f13ae381712621786f89e931452b2b4857a7bb6f140c4c21a63aa652","src/pe/relocation.rs":"c479b80bb1d6910f2168505dda4f2d8925b7edc34bed4e25d069546f88f52bb3","src/pe/section_table.rs":"d7144c7be3242d7aa653d22dca1cf15f7110f79a946a15cbe6ecf531e0cacb19","src/pe/symbol.rs":"9a65226c93c4499e21d094ceb838d58db706951580a1c43dfb36b95dbaff70f0","src/pe/utils.rs":"907cc565030db20f694c72a2a9b89ec8038e4f2593e6233b65d2a6854738e6d1","src/strtab.rs":"6d122084cf5d5244b2bd734b1d6d2c018116cc537ffc0c81d042d5b8815d7782","tests/bins/elf/gnu_hash/README.md":"52581e2ea7067a55bd8aedf4079200fb76448573ae9ffef7d886b9556e980db9"},"package":"a7666983ed0dd8d21a6f6576ee00053ca0926fb281a5522577a4dbd0f1b54143"}
|
||||
{"files":{"CHANGELOG.md":"45e187ea086d30738d40ecd0a893326d0acf416d754284786190b9b9eb60e2f6","Cargo.toml":"ba0aff6d6cbeddd63473b7ffe5e979b4a0d956d1c2271422a67f451555ea5f4e","LICENSE":"036bf6b6d6fd6dd1abda2ff6cdb672a63bdf32c468048720072910f2268a965f","README.md":"c5854ea5d60a3e3a0e89e9ed04b6b3653efe6d7e941894a734888abd52bf9212","src/archive/mod.rs":"ae739638d7267011bedf51712516d3485171d8f2df2ab6746a0d942d86efd6a6","src/elf/compression_header.rs":"2eb5fdda9177c1c897310d86714967de019b39c6e23b1f3a890dd3a659be0acc","src/elf/constants_header.rs":"f2ede290ecacf60b1719e9994aa45612bf0f7baf63806a293d4530a674e5861a","src/elf/constants_relocation.rs":"a010071cd2a25ab71e0c7181eb1d9f417daa2d1ec25a09c74bd12ad944892225","src/elf/dynamic.rs":"c26e75311f2da9e34dc4c0a2120dfcc20df88a41d67c52b9bf703258de018fd8","src/elf/gnu_hash.rs":"7a9fcaf6cb38167d20527364bdf9bc2379c44dede5d7666275a1eb20dc665179","src/elf/header.rs":"3391a1fa9b8e3923f7ce74caff0668d8ddb5b34767bf3da309ff497fd81c34c7","src/elf/mod.rs":"d2ecb62524d7da16a0f46b4b8fa43929839d6897ca93c9bb8f5299d76d66a977","src/elf/note.rs":"3c354f1108fa8e5a69f6cf629a36b61241a321f235a429d9e9900aa7a4c02f46","src/elf/program_header.rs":"7d934c7c761fc73b1a30a8fc2b048d98511b529dd5429cb0848b4bdd5dcdade7","src/elf/reloc.rs":"a4b7843c0e201f83c344c0681dfd285754a7211e3472ddd53024a520e17c847f","src/elf/section_header.rs":"ca52e85f6a0e10177368f2790d41bc9ae3348216bbd4393d483f0646030e1cc7","src/elf/sym.rs":"045c01107f4e100d6827cb819b82a28ea10c0d9bc00a1cdddb04a0865f1162ec","src/elf/symver.rs":"3f899201f64a702653d44288f860003e7acd75e38111d36479af823ed92b1341","src/error.rs":"af620a5692bca070dc727d49cdbb566a533bfb97724ca68932ae7fec7dc05cf6","src/lib.rs":"465eb53b540dfd142d204984ee7280130542d7f83d6c53691299d773f7394faf","src/mach/bind_opcodes.rs":"1dcacfb853d05c2c7e6dbb4509ee705a8ea645db0d334991a2293fef92eee851","src/mach/constants.rs":"c2a2381a0b9c3047d37582465e8965d995dca414d0da21fb7bcc6b8334e49eb6","src/mach/exports.rs":"d22122744673a3ce5f54b2b4b20bfa47d17378e64d3dda2858dd13add74ed3dc","src/mach/fat.rs":"45a3228aaa1ab8b77f322dd4924b7383f1357e226ffc079846d67c0268389ea7","src/mach/header.rs":"ae15265e9f1a92eb9ba04333e5bb309f276e1300b87f43386f7829e820318938","src/mach/imports.rs":"2153269dfff32e23d72f76a82d658be06bd79b7e35d79b7e17115e4eb24b13d5","src/mach/load_command.rs":"6435666c46e875610375b92b1ab1e3fdc9f6048d51728d996dd4531329eb3d39","src/mach/mod.rs":"53ad219fd2265a5689ab38d5031722268eab6bbb649c75756e74295df4b611b7","src/mach/relocation.rs":"11b0b76ed7d997c87e396100515f931fe84473c228bed0e980fbab311530070a","src/mach/segment.rs":"0dc29bf42b25f60c7258bc8b757f6a862e846582dd6d2e70737933ad6334a0e4","src/mach/symbols.rs":"d2505fa8d65ea267abfcb6a9fc4d1acd47d5605aa6775935757e2fa8e92af507","src/pe/characteristic.rs":"6f810a6e5646b922cf7e3ca6d314677a4e1e1ad5695278c2b1b527a05f4299f3","src/pe/data_directories.rs":"d4e156f0c5b509860ceb3c7d42e1621e6c2143b90fc412806b3cefab1acc577a","src/pe/debug.rs":"3811c616a9b6d6b54e15348bb369b794bb89532e04fe19eca91b745d7c51a553","src/pe/exception.rs":"de2c9c07812ecd315c8400fc8fdcadc6a44d7a8be96e69a3f4ccf14ef8cf8426","src/pe/export.rs":"c98f5ce0b1b18bb87f06d1d41dbf70f443d65ecb1624cb23a1ef6c5f93a892e1","src/pe/header.rs":"f02a4beddc00ddd6624df7defc42991ceb507360b5aa1003cf33332c1c89a743","src/pe/import.rs":"855276e46c01ccd7631104e4d1265592e36c9468aadcacc937a40c29d94aabe3","src/pe/mod.rs":"f43524ceb77ad263a094eea038f81f010fc6f1de1d144f24218ee3224abab0fd","src/pe/optional_header.rs":"f3fb9aec04fccde3b765ec3d54cb27bfe636efceb94ddbe34e88098e28f56b55","src/pe/options.rs":"b38f4e87f13ae381712621786f89e931452b2b4857a7bb6f140c4c21a63aa652","src/pe/relocation.rs":"c479b80bb1d6910f2168505dda4f2d8925b7edc34bed4e25d069546f88f52bb3","src/pe/section_table.rs":"d7144c7be3242d7aa653d22dca1cf15f7110f79a946a15cbe6ecf531e0cacb19","src/pe/symbol.rs":"9a65226c93c4499e21d094ceb838d58db706951580a1c43dfb36b95dbaff70f0","src/pe/utils.rs":"907cc565030db20f694c72a2a9b89ec8038e4f2593e6233b65d2a6854738e6d1","src/strtab.rs":"6d122084cf5d5244b2bd734b1d6d2c018116cc537ffc0c81d042d5b8815d7782","tests/bins/elf/gnu_hash/README.md":"52581e2ea7067a55bd8aedf4079200fb76448573ae9ffef7d886b9556e980db9"},"package":"572564d6cba7d09775202c8e7eebc4d534d5ae36578ab402fb21e182a0ac9505"}
|
|
@ -5,6 +5,20 @@ Before 1.0, this project does not adhere to [Semantic Versioning](http://semver.
|
|||
|
||||
Goblin is now 0.5, which means we will try our best to ease breaking changes. Tracking issue is here: https://github.com/m4b/goblin/issues/97
|
||||
|
||||
## [0.6.0] - 2022-10-23
|
||||
### Breaking
|
||||
macho: add support for archives in multi-arch binaries, big thanks to @nick96: https://github.com/m4b/goblin/pull/322
|
||||
### Changed
|
||||
elf: only consider loadable segments for VM translation (this may semantically break someone, if they depended on older behavior), thanks @lumag: https://github.com/m4b/goblin/pull/329
|
||||
### Fixed
|
||||
archive: fix potential panic in bsd filenames, thanks @nathaniel-daniel: https://github.com/m4b/goblin/pull/335
|
||||
archive: fix subtract with overflow, thanks @anfedotoff: https://github.com/m4b/goblin/pull/333
|
||||
pe: fix oob access, thanks @anfedetoff: https://github.com/m4b/goblin/pull/330
|
||||
archive: fix oob access, thanks @anfedetoff: https://github.com/m4b/goblin/pull/329
|
||||
### Added
|
||||
pe: add machine_to_str utility function, thanks @cgzones: https://github.com/m4b/goblin/pull/338
|
||||
fuzz: add debug info for line numbers, thanks @SweetVishnya: https://github.com/m4b/goblin/pull/336
|
||||
|
||||
## [0.5.4] - 2022-8-14
|
||||
### Fixed
|
||||
pe: fix regression in PE binary parsing, thanks @SquareMan: https://github.com/m4b/goblin/pull/321
|
||||
|
|
|
@ -13,7 +13,7 @@
|
|||
edition = "2021"
|
||||
rust-version = "1.56.0"
|
||||
name = "goblin"
|
||||
version = "0.5.4"
|
||||
version = "0.6.0"
|
||||
authors = [
|
||||
"m4b <m4b.github.io@gmail.com>",
|
||||
"seu <seu@panopticon.re>",
|
||||
|
@ -81,10 +81,12 @@ endian_fd = ["alloc"]
|
|||
mach32 = [
|
||||
"alloc",
|
||||
"endian_fd",
|
||||
"archive",
|
||||
]
|
||||
mach64 = [
|
||||
"alloc",
|
||||
"endian_fd",
|
||||
"archive",
|
||||
]
|
||||
pe32 = [
|
||||
"alloc",
|
||||
|
|
|
@ -26,7 +26,7 @@ Add to your `Cargo.toml`
|
|||
|
||||
```toml
|
||||
[dependencies]
|
||||
goblin = "0.5"
|
||||
goblin = "0.6"
|
||||
```
|
||||
|
||||
### Features
|
||||
|
@ -110,6 +110,7 @@ In lexicographic order:
|
|||
- [@2vg]
|
||||
- [@alessandrod]
|
||||
- [@amanieu]
|
||||
- [@anfedotoff]
|
||||
- [@apalm]
|
||||
- [@burjui]
|
||||
- [@connorkuehl]
|
||||
|
@ -134,6 +135,7 @@ In lexicographic order:
|
|||
- [@Lichtso]
|
||||
- [@lion128]
|
||||
- [@llogiq]
|
||||
- [@lumag]
|
||||
- [@lzutao]
|
||||
- [@lzybkr]
|
||||
- [@m4b]
|
||||
|
@ -143,6 +145,8 @@ In lexicographic order:
|
|||
- [@mre]
|
||||
- [@Mrmaxmeier]
|
||||
- [n01e0]
|
||||
- [nathaniel-daniel]
|
||||
- [@nick96]
|
||||
- [@nico-abram]
|
||||
- [@npmccallum]
|
||||
- [@pchickey]
|
||||
|
@ -155,6 +159,7 @@ In lexicographic order:
|
|||
- [@skdltmxn]
|
||||
- [@sollyucko]
|
||||
- [@Swatinem]
|
||||
- [@SweetVishnya]
|
||||
- [@SquareMan]
|
||||
- [@tathanhdinh]
|
||||
- [@Techno-coder]
|
||||
|
@ -174,6 +179,7 @@ In lexicographic order:
|
|||
[@2vg]: https://github.com/2vg
|
||||
[@alessandrod]: https://github.com/alessandrod
|
||||
[@amanieu]: https://github.com/amanieu
|
||||
[@anfedotoff]: https://github.com/anfedotoff
|
||||
[@apalm]: https://github.com/apalm
|
||||
[@burjui]: https://github.com/burjui
|
||||
[@connorkuehl]: https://github.com/connorkuehl
|
||||
|
@ -198,6 +204,7 @@ In lexicographic order:
|
|||
[@Lichtso]: https://github.com/Lichtso
|
||||
[@lion128]: https://github.com/lion128
|
||||
[@llogiq]: https://github.com/llogiq
|
||||
[@lumag]: https://github.com/lumag
|
||||
[@lzutao]: https://github.com/lzutao
|
||||
[@lzybkr]: https://github.com/lzybkr
|
||||
[@m4b]: https://github.com/m4b
|
||||
|
@ -207,6 +214,8 @@ In lexicographic order:
|
|||
[@mre]: https://github.com/mre
|
||||
[@Mrmaxmeier]: https://github.com/Mrmaxmeier
|
||||
[n01e0]: https://github.com/n01e0
|
||||
[nathaniel-daniel]: https://github.com/nathaniel-daniel
|
||||
[@nick96]: https://github.com/nick96
|
||||
[@nico-abram]: https://github.com/nico-abram
|
||||
[@npmccallum]: https://github.com/npmccallum
|
||||
[@pchickey]: https://github.com/pchickey
|
||||
|
@ -219,6 +228,7 @@ In lexicographic order:
|
|||
[@skdltmxn]: https://github.com/skdltmxn
|
||||
[@sollyucko]: https://github.com/sollyucko
|
||||
[@Swatinem]: https://github.com/Swatinem
|
||||
[@SweetVishnya]: https://github.com/SweetVishnya
|
||||
[@SquareMan]: https://github.com/SquareMan
|
||||
[@tathanhdinh]: https://github.com/tathanhdinh
|
||||
[@Techno-coder]: https://github.com/Techno-coder
|
||||
|
|
|
@ -132,11 +132,15 @@ impl<'a> Member<'a> {
|
|||
)?;
|
||||
|
||||
// adjust the offset and size accordingly
|
||||
*offset = header_offset + SIZEOF_HEADER + len;
|
||||
header.size -= len;
|
||||
if header.size > len {
|
||||
*offset = header_offset + SIZEOF_HEADER + len;
|
||||
header.size -= len;
|
||||
|
||||
// the name may have trailing NULs which we don't really want to keep
|
||||
Some(name.trim_end_matches('\0'))
|
||||
// the name may have trailing NULs which we don't really want to keep
|
||||
Some(name.trim_end_matches('\0'))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
@ -160,8 +164,8 @@ impl<'a> Member<'a> {
|
|||
fn bsd_filename_length(name: &str) -> Option<usize> {
|
||||
use core::str::FromStr;
|
||||
|
||||
if name.len() > 3 && &name[0..3] == "#1/" {
|
||||
let trimmed_name = &name[3..].trim_end_matches(' ');
|
||||
if let Some(name) = name.strip_prefix("#1/") {
|
||||
let trimmed_name = name.trim_end_matches(' ');
|
||||
if let Ok(len) = usize::from_str(trimmed_name) {
|
||||
Some(len)
|
||||
} else {
|
||||
|
@ -338,8 +342,13 @@ impl<'a> Index<'a> {
|
|||
|
||||
let mut symbol_offsets = Vec::with_capacity(symbols);
|
||||
for _ in 0..symbols {
|
||||
symbol_offsets
|
||||
.push(member_offsets[buffer.gread_with::<u16>(offset, scroll::LE)? as usize - 1]);
|
||||
if let Some(symbol_offset) =
|
||||
member_offsets.get(buffer.gread_with::<u16>(offset, scroll::LE)? as usize - 1)
|
||||
{
|
||||
symbol_offsets.push(*symbol_offset);
|
||||
} else {
|
||||
return Err(Error::BufferTooShort(members, "members"));
|
||||
}
|
||||
}
|
||||
let strtab = strtab::Strtab::parse(buffer, *offset, buffer.len() - *offset, 0x0)?;
|
||||
Ok(Index {
|
||||
|
@ -611,6 +620,7 @@ mod tests {
|
|||
assert_eq!(Member::bsd_filename_length("#2/1"), None);
|
||||
assert_eq!(Member::bsd_filename_length(INDEX_NAME), None);
|
||||
assert_eq!(Member::bsd_filename_length(NAME_INDEX_NAME), None);
|
||||
assert_eq!(Member::bsd_filename_length("👺"), None);
|
||||
|
||||
// #1/<len> should be parsed as Some(len), with or without whitespace
|
||||
assert_eq!(Member::bsd_filename_length("#1/1"), Some(1));
|
||||
|
|
|
@ -582,7 +582,7 @@ macro_rules! elf_dynamic_info_std_impl {
|
|||
/// Convert a virtual memory address to a file offset
|
||||
fn vm_to_offset(phdrs: &[$phdr], address: $size) -> Option<$size> {
|
||||
for ph in phdrs {
|
||||
if address >= ph.p_vaddr {
|
||||
if ph.p_type == crate::elf::program_header::PT_LOAD && address >= ph.p_vaddr {
|
||||
let offset = address - ph.p_vaddr;
|
||||
if offset < ph.p_memsz {
|
||||
return ph.p_offset.checked_add(offset);
|
||||
|
|
|
@ -201,6 +201,33 @@ pub mod container {
|
|||
}
|
||||
}
|
||||
|
||||
/// Takes a reference to the first 16 bytes of the total bytes slice and convert it to an array for `peek_bytes` to use.
|
||||
/// Returns None if bytes's length is less than 16.
|
||||
#[allow(unused)]
|
||||
fn take_hint_bytes(bytes: &[u8]) -> Option<&[u8; 16]> {
|
||||
bytes
|
||||
.get(0..16)
|
||||
.and_then(|hint_bytes_slice| hint_bytes_slice.try_into().ok())
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
/// Information obtained from a peek `Hint`
|
||||
pub struct HintData {
|
||||
pub is_lsb: bool,
|
||||
pub is_64: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
/// A hint at the underlying binary format for 16 bytes of arbitrary data
|
||||
pub enum Hint {
|
||||
Elf(HintData),
|
||||
Mach(HintData),
|
||||
MachFat(usize),
|
||||
PE,
|
||||
Archive,
|
||||
Unknown(u64),
|
||||
}
|
||||
|
||||
macro_rules! if_everything {
|
||||
($($i:item)*) => ($(
|
||||
#[cfg(all(feature = "endian_fd", feature = "elf64", feature = "elf32", feature = "pe64", feature = "pe32", feature = "mach64", feature = "mach32", feature = "archive"))]
|
||||
|
@ -210,28 +237,9 @@ macro_rules! if_everything {
|
|||
|
||||
if_everything! {
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
/// Information obtained from a peek `Hint`
|
||||
pub struct HintData {
|
||||
pub is_lsb: bool,
|
||||
pub is_64: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
/// A hint at the underlying binary format for 16 bytes of arbitrary data
|
||||
pub enum Hint {
|
||||
Elf(HintData),
|
||||
Mach(HintData),
|
||||
MachFat(usize),
|
||||
PE,
|
||||
Archive,
|
||||
Unknown(u64),
|
||||
}
|
||||
|
||||
/// Peeks at `bytes`, and returns a `Hint`
|
||||
pub fn peek_bytes(bytes: &[u8; 16]) -> error::Result<Hint> {
|
||||
use scroll::{Pread, LE, BE};
|
||||
use crate::mach::{fat, header};
|
||||
use scroll::{Pread, LE};
|
||||
if &bytes[0..elf::header::SELFMAG] == elf::header::ELFMAG {
|
||||
let class = bytes[elf::header::EI_CLASS];
|
||||
let is_lsb = bytes[elf::header::EI_DATA] == elf::header::ELFDATA2LSB;
|
||||
|
@ -248,23 +256,7 @@ if_everything! {
|
|||
} else if (&bytes[0..2]).pread_with::<u16>(0, LE)? == pe::header::DOS_MAGIC {
|
||||
Ok(Hint::PE)
|
||||
} else {
|
||||
let (magic, maybe_ctx) = mach::parse_magic_and_ctx(bytes, 0)?;
|
||||
match magic {
|
||||
fat::FAT_MAGIC => {
|
||||
// should probably verify this is always Big Endian...
|
||||
let narchitectures = bytes.pread_with::<u32>(4, BE)? as usize;
|
||||
Ok(Hint::MachFat(narchitectures))
|
||||
},
|
||||
header::MH_CIGAM_64 | header::MH_CIGAM | header::MH_MAGIC_64 | header::MH_MAGIC => {
|
||||
if let Some(ctx) = maybe_ctx {
|
||||
Ok(Hint::Mach(HintData { is_lsb: ctx.le.is_little(), is_64: Some(ctx.container.is_big()) }))
|
||||
} else {
|
||||
Err(error::Error::Malformed(format!("Correct mach magic {:#x} does not have a matching parsing context!", magic)))
|
||||
}
|
||||
},
|
||||
// its something else
|
||||
_ => Ok(Hint::Unknown(bytes.pread::<u64>(0)?))
|
||||
}
|
||||
mach::peek_bytes(bytes)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -279,16 +271,6 @@ if_everything! {
|
|||
peek_bytes(&bytes)
|
||||
}
|
||||
|
||||
/// Takes a reference to the first 16 bytes of the total bytes slice and convert it to an array for `peek_bytes` to use.
|
||||
/// Returns None if bytes's length is less than 16.
|
||||
fn take_hint_bytes(bytes: &[u8]) -> Option<&[u8; 16]> {
|
||||
use core::convert::TryInto;
|
||||
bytes.get(0..16)
|
||||
.and_then(|hint_bytes_slice| {
|
||||
hint_bytes_slice.try_into().ok()
|
||||
})
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
/// A parseable object that goblin understands
|
||||
|
|
|
@ -7,8 +7,8 @@ use log::debug;
|
|||
use scroll::ctx::SizeWith;
|
||||
use scroll::{Pread, BE};
|
||||
|
||||
use crate::container;
|
||||
use crate::error;
|
||||
use crate::{archive, container};
|
||||
use crate::{error, take_hint_bytes};
|
||||
|
||||
pub mod bind_opcodes;
|
||||
pub mod constants;
|
||||
|
@ -296,6 +296,15 @@ pub struct FatArchIterator<'a> {
|
|||
start: usize,
|
||||
}
|
||||
|
||||
/// A single architecture froma multi architecture binary container
|
||||
/// ([MultiArch]).
|
||||
#[derive(Debug)]
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
pub enum SingleArch<'a> {
|
||||
MachO(MachO<'a>),
|
||||
Archive(archive::Archive<'a>),
|
||||
}
|
||||
|
||||
impl<'a> Iterator for FatArchIterator<'a> {
|
||||
type Item = error::Result<fat::FatArch>;
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
|
@ -313,16 +322,65 @@ impl<'a> Iterator for FatArchIterator<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
/// Iterator over every `MachO` binary contained in this `MultiArch` container
|
||||
pub struct MachOIterator<'a> {
|
||||
/// Iterator over every entry contained in this `MultiArch` container
|
||||
pub struct SingleArchIterator<'a> {
|
||||
index: usize,
|
||||
data: &'a [u8],
|
||||
narches: usize,
|
||||
start: usize,
|
||||
}
|
||||
|
||||
impl<'a> Iterator for MachOIterator<'a> {
|
||||
type Item = error::Result<MachO<'a>>;
|
||||
pub fn peek_bytes(bytes: &[u8; 16]) -> error::Result<crate::Hint> {
|
||||
if &bytes[0..archive::SIZEOF_MAGIC] == archive::MAGIC {
|
||||
Ok(crate::Hint::Archive)
|
||||
} else {
|
||||
let (magic, maybe_ctx) = parse_magic_and_ctx(bytes, 0)?;
|
||||
match magic {
|
||||
header::MH_CIGAM_64 | header::MH_CIGAM | header::MH_MAGIC_64 | header::MH_MAGIC => {
|
||||
if let Some(ctx) = maybe_ctx {
|
||||
Ok(crate::Hint::Mach(crate::HintData {
|
||||
is_lsb: ctx.le.is_little(),
|
||||
is_64: Some(ctx.container.is_big()),
|
||||
}))
|
||||
} else {
|
||||
Err(error::Error::Malformed(format!(
|
||||
"Correct mach magic {:#x} does not have a matching parsing context!",
|
||||
magic
|
||||
)))
|
||||
}
|
||||
}
|
||||
fat::FAT_MAGIC => {
|
||||
// should probably verify this is always Big Endian...
|
||||
let narchitectures = bytes.pread_with::<u32>(4, BE)? as usize;
|
||||
Ok(crate::Hint::MachFat(narchitectures))
|
||||
}
|
||||
_ => Ok(crate::Hint::Unknown(bytes.pread::<u64>(0)?)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn extract_multi_entry(bytes: &[u8]) -> error::Result<SingleArch> {
|
||||
if let Some(hint_bytes) = take_hint_bytes(bytes) {
|
||||
match peek_bytes(hint_bytes)? {
|
||||
crate::Hint::Mach(_) => {
|
||||
let binary = MachO::parse(bytes, 0)?;
|
||||
Ok(SingleArch::MachO(binary))
|
||||
}
|
||||
crate::Hint::Archive => {
|
||||
let archive = archive::Archive::parse(bytes)?;
|
||||
Ok(SingleArch::Archive(archive))
|
||||
}
|
||||
_ => Err(error::Error::Malformed(format!(
|
||||
"multi-arch entry must be a Mach-O binary or an archive"
|
||||
))),
|
||||
}
|
||||
} else {
|
||||
Err(error::Error::Malformed(format!("Object is too small")))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Iterator for SingleArchIterator<'a> {
|
||||
type Item = error::Result<SingleArch<'a>>;
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
if self.index >= self.narches {
|
||||
None
|
||||
|
@ -333,8 +391,7 @@ impl<'a> Iterator for MachOIterator<'a> {
|
|||
match self.data.pread_with::<fat::FatArch>(offset, scroll::BE) {
|
||||
Ok(arch) => {
|
||||
let bytes = arch.slice(self.data);
|
||||
let binary = MachO::parse(bytes, 0);
|
||||
Some(binary)
|
||||
Some(extract_multi_entry(bytes))
|
||||
}
|
||||
Err(e) => Some(Err(e.into())),
|
||||
}
|
||||
|
@ -343,10 +400,10 @@ impl<'a> Iterator for MachOIterator<'a> {
|
|||
}
|
||||
|
||||
impl<'a, 'b> IntoIterator for &'b MultiArch<'a> {
|
||||
type Item = error::Result<MachO<'a>>;
|
||||
type IntoIter = MachOIterator<'a>;
|
||||
type Item = error::Result<SingleArch<'a>>;
|
||||
type IntoIter = SingleArchIterator<'a>;
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
MachOIterator {
|
||||
SingleArchIterator {
|
||||
index: 0,
|
||||
data: self.data,
|
||||
narches: self.narches,
|
||||
|
@ -387,7 +444,7 @@ impl<'a> MultiArch<'a> {
|
|||
Ok(arches)
|
||||
}
|
||||
/// Try to get the Mach-o binary at `index`
|
||||
pub fn get(&self, index: usize) -> error::Result<MachO<'a>> {
|
||||
pub fn get(&self, index: usize) -> error::Result<SingleArch<'a>> {
|
||||
if index >= self.narches {
|
||||
return Err(error::Error::Malformed(format!(
|
||||
"Requested the {}-th binary, but there are only {} architectures in this container",
|
||||
|
@ -397,13 +454,13 @@ impl<'a> MultiArch<'a> {
|
|||
let offset = (index * fat::SIZEOF_FAT_ARCH) + self.start;
|
||||
let arch = self.data.pread_with::<fat::FatArch>(offset, scroll::BE)?;
|
||||
let bytes = arch.slice(self.data);
|
||||
Ok(MachO::parse(bytes, 0)?)
|
||||
extract_multi_entry(bytes)
|
||||
}
|
||||
|
||||
pub fn find<F: Fn(error::Result<fat::FatArch>) -> bool>(
|
||||
&'a self,
|
||||
f: F,
|
||||
) -> Option<error::Result<MachO<'a>>> {
|
||||
) -> Option<error::Result<SingleArch<'a>>> {
|
||||
for (i, arch) in self.iter_arches().enumerate() {
|
||||
if f(arch) {
|
||||
return Some(self.get(i));
|
||||
|
@ -464,3 +521,66 @@ impl<'a> Mach<'a> {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::{Mach, SingleArch};
|
||||
|
||||
#[test]
|
||||
fn parse_multi_arch_of_macho_binaries() {
|
||||
// Create via:
|
||||
// clang -arch arm64 -shared -o /tmp/hello_world_arm hello_world.c
|
||||
// clang -arch x86_64 -shared -o /tmp/hello_world_x86_64 hello_world.c
|
||||
// lipo -create -output hello_world_fat_binaries /tmp/hello_world_arm /tmp/hello_world_x86_64
|
||||
// strip hello_world_fat_binaries
|
||||
let bytes = include_bytes!(concat!(
|
||||
env!("CARGO_MANIFEST_DIR"),
|
||||
"/assets/hello_world_fat_binaries"
|
||||
));
|
||||
let mach = Mach::parse(bytes).expect("failed to parse input file");
|
||||
match mach {
|
||||
Mach::Fat(fat) => {
|
||||
assert!(fat.into_iter().count() > 0);
|
||||
for entry in fat.into_iter() {
|
||||
let entry = entry.expect("failed to read entry");
|
||||
match entry {
|
||||
SingleArch::MachO(macho) => {
|
||||
assert!(macho.symbols().count() > 0);
|
||||
}
|
||||
_ => panic!("expected MultiArchEntry::MachO, got {:?}", entry),
|
||||
}
|
||||
}
|
||||
}
|
||||
Mach::Binary(_) => panic!("expected Mach::Fat, got Mach::Binary"),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_multi_arch_of_archives() {
|
||||
// Created with:
|
||||
// clang -c -o /tmp/hello_world.o hello_world.c
|
||||
// ar -r /tmp/hello_world.a /tmp/hello_world.o
|
||||
// lipo -create -output hello_world_fat_archives /tmp/hello_world.a
|
||||
// strip hello_world_fat_archives
|
||||
let bytes = include_bytes!(concat!(
|
||||
env!("CARGO_MANIFEST_DIR"),
|
||||
"/assets/hello_world_fat_archives"
|
||||
));
|
||||
let mach = Mach::parse(bytes).expect("failed to parse input file");
|
||||
match mach {
|
||||
Mach::Fat(fat) => {
|
||||
assert!(fat.into_iter().count() > 0);
|
||||
for entry in fat.into_iter() {
|
||||
let entry = entry.expect("failed to read entry");
|
||||
match entry {
|
||||
SingleArch::Archive(archive) => {
|
||||
assert!(!archive.members().is_empty())
|
||||
}
|
||||
_ => panic!("expected MultiArchEntry::Archive, got {:?}", entry),
|
||||
}
|
||||
}
|
||||
}
|
||||
Mach::Binary(_) => panic!("expected Mach::Fat, got Mach::Binary"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -314,8 +314,8 @@ impl ctx::TryIntoCtx<container::Ctx> for Nlist {
|
|||
container::Ctx { container, le }: container::Ctx,
|
||||
) -> Result<usize, Self::Error> {
|
||||
let size = match container {
|
||||
Container::Little => (bytes.pwrite_with::<Nlist32>(self.into(), 0, le)?),
|
||||
Container::Big => (bytes.pwrite_with::<Nlist64>(self.into(), 0, le)?),
|
||||
Container::Little => bytes.pwrite_with::<Nlist32>(self.into(), 0, le)?,
|
||||
Container::Big => bytes.pwrite_with::<Nlist64>(self.into(), 0, le)?,
|
||||
};
|
||||
Ok(size)
|
||||
}
|
||||
|
|
|
@ -169,13 +169,18 @@ impl<'a> CodeviewPDB70DebugInfo<'a> {
|
|||
let mut signature: [u8; 16] = [0; 16];
|
||||
signature.copy_from_slice(bytes.gread_with(&mut offset, 16)?);
|
||||
let age: u32 = bytes.gread_with(&mut offset, scroll::LE)?;
|
||||
let filename = &bytes[offset..offset + filename_length];
|
||||
|
||||
Ok(Some(CodeviewPDB70DebugInfo {
|
||||
codeview_signature,
|
||||
signature,
|
||||
age,
|
||||
filename,
|
||||
}))
|
||||
if let Some(filename) = bytes.get(offset..offset + filename_length) {
|
||||
Ok(Some(CodeviewPDB70DebugInfo {
|
||||
codeview_signature,
|
||||
signature,
|
||||
age,
|
||||
filename,
|
||||
}))
|
||||
} else {
|
||||
Err(error::Error::Malformed(format!(
|
||||
"ImageDebugDirectory seems corrupted: {:?}",
|
||||
idd
|
||||
)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -215,9 +215,41 @@ impl Header {
|
|||
}
|
||||
}
|
||||
|
||||
/// Convert machine to str representation
|
||||
pub fn machine_to_str(machine: u16) -> &'static str {
|
||||
match machine {
|
||||
COFF_MACHINE_UNKNOWN => "UNKNOWN",
|
||||
COFF_MACHINE_AM33 => "AM33",
|
||||
COFF_MACHINE_X86_64 => "X86_64",
|
||||
COFF_MACHINE_ARM => "ARM",
|
||||
COFF_MACHINE_ARM64 => "ARM64",
|
||||
COFF_MACHINE_ARMNT => "ARM_NT",
|
||||
COFF_MACHINE_EBC => "EBC",
|
||||
COFF_MACHINE_X86 => "X86",
|
||||
COFF_MACHINE_IA64 => "IA64",
|
||||
COFF_MACHINE_M32R => "M32R",
|
||||
COFF_MACHINE_MIPS16 => "MIPS_16",
|
||||
COFF_MACHINE_MIPSFPU => "MIPS_FPU",
|
||||
COFF_MACHINE_MIPSFPU16 => "MIPS_FPU_16",
|
||||
COFF_MACHINE_POWERPC => "POWERPC",
|
||||
COFF_MACHINE_POWERPCFP => "POWERCFP",
|
||||
COFF_MACHINE_R4000 => "R4000",
|
||||
COFF_MACHINE_RISCV32 => "RISC-V_32",
|
||||
COFF_MACHINE_RISCV64 => "RISC-V_64",
|
||||
COFF_MACHINE_RISCV128 => "RISC-V_128",
|
||||
COFF_MACHINE_SH3 => "SH3",
|
||||
COFF_MACHINE_SH3DSP => "SH3DSP",
|
||||
COFF_MACHINE_SH4 => "SH4",
|
||||
COFF_MACHINE_SH5 => "SH5",
|
||||
COFF_MACHINE_THUMB => "THUMB",
|
||||
COFF_MACHINE_WCEMIPSV2 => "WCE_MIPS_V2",
|
||||
_ => "COFF_UNKNOWN",
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::{Header, COFF_MACHINE_X86, DOS_MAGIC, PE_MAGIC};
|
||||
use super::{machine_to_str, Header, COFF_MACHINE_X86, DOS_MAGIC, PE_MAGIC};
|
||||
|
||||
const CRSS_HEADER: [u8; 688] = [
|
||||
0x4d, 0x5a, 0x90, 0x00, 0x03, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xff, 0xff, 0x00,
|
||||
|
@ -274,6 +306,7 @@ mod tests {
|
|||
assert!(header.dos_header.signature == DOS_MAGIC);
|
||||
assert!(header.signature == PE_MAGIC);
|
||||
assert!(header.coff_header.machine == COFF_MACHINE_X86);
|
||||
assert!(machine_to_str(header.coff_header.machine) == "X86");
|
||||
println!("header: {:?}", &header);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -54,7 +54,7 @@ features = [
|
|||
version = "2.7.0"
|
||||
|
||||
[dependencies.goblin]
|
||||
version = "0.5"
|
||||
version = "0.6"
|
||||
|
||||
[dependencies.heck]
|
||||
version = "0.4"
|
||||
|
|
|
@ -8,7 +8,7 @@ use fs_err as fs;
|
|||
use goblin::{
|
||||
archive::Archive,
|
||||
elf::Elf,
|
||||
mach::{segment::Section, symbols, Mach, MachO},
|
||||
mach::{segment::Section, symbols, Mach, MachO, SingleArch},
|
||||
pe::PE,
|
||||
Object,
|
||||
};
|
||||
|
@ -76,7 +76,10 @@ pub fn extract_from_mach(mach: Mach<'_>, file_data: &[u8]) -> anyhow::Result<Vec
|
|||
match mach {
|
||||
Mach::Binary(macho) => extract_from_macho(macho, file_data),
|
||||
// Multi-binary library, just extract the first one
|
||||
Mach::Fat(multi_arch) => extract_from_macho(multi_arch.get(0)?, file_data),
|
||||
Mach::Fat(multi_arch) => match multi_arch.get(0)? {
|
||||
SingleArch::MachO(macho) => extract_from_macho(macho, file_data),
|
||||
SingleArch::Archive(archive) => extract_from_archive(archive, file_data),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
|
|
Загрузка…
Ссылка в новой задаче