Bug 1669668 - mach vendor rust. r=rhunt

Differential Revision: https://phabricator.services.mozilla.com/D92755
This commit is contained in:
Lars T Hansen 2020-10-08 09:18:49 +00:00
Родитель 14a59af661
Коммит 3b3f2d9de2
14 изменённых файлов: 298 добавлений и 214 удалений

8
Cargo.lock сгенерированный
Просмотреть файл

@ -5567,18 +5567,18 @@ checksum = "57da5d7300428d75d8b3cdfb736e41ee6af8926d69c1de2f201a1a22f234b7b5"
[[package]]
name = "wast"
version = "23.0.0"
version = "25.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "26b080a48623c1b15193eac2e28c7b8d0e6b2e1c6c67ed46ddcd86063e78e504"
checksum = "94c7c4b9eeb5c233b89385147dd3ea5bb89988a3a8d256f116cf1db40e5ff43e"
dependencies = [
"leb128",
]
[[package]]
name = "wat"
version = "1.0.24"
version = "1.0.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6c350d7431aa486488d28cdf75b57d59c02fab9cde20d93c52424510afe18ecc"
checksum = "4766d466249e23279e92c52033429eb91141c5efea1c4478138fa6f6ef4efe3e"
dependencies = [
"wast",
]

2
third_party/rust/wast/.cargo-checksum.json поставляемый

Различия файлов скрыты, потому что одна или несколько строк слишком длинны

2
third_party/rust/wast/Cargo.toml поставляемый
Просмотреть файл

@ -13,7 +13,7 @@
[package]
edition = "2018"
name = "wast"
version = "23.0.0"
version = "25.0.1"
authors = ["Alex Crichton <alex@alexcrichton.com>"]
description = "Customizable Rust parsers for the WebAssembly Text formats WAT and WAST\n"
homepage = "https://github.com/bytecodealliance/wasm-tools/tree/main/crates/wast"

123
third_party/rust/wast/src/ast/expr.rs поставляемый
Просмотреть файл

@ -10,7 +10,7 @@ use std::mem;
#[derive(Debug)]
#[allow(missing_docs)]
pub struct Expression<'a> {
pub instrs: Vec<Instruction<'a>>,
pub instrs: Box<[Instruction<'a>]>,
}
impl<'a> Parse<'a> for Expression<'a> {
@ -111,7 +111,6 @@ impl<'a> ExpressionParser<'a> {
}
}
match self.paren(parser)? {
// No parenthesis seen? Then we just parse the next instruction
// and move on.
@ -207,7 +206,7 @@ impl<'a> ExpressionParser<'a> {
}
Ok(Expression {
instrs: self.instrs,
instrs: self.instrs.into(),
})
}
@ -789,21 +788,21 @@ instructions! {
// proposal: simd
V128Load(MemArg<16>) : [0xfd, 0x00] : "v128.load",
I16x8Load8x8S(MemArg<8>) : [0xfd, 0x01] : "i16x8.load8x8_s",
I16x8Load8x8U(MemArg<8>) : [0xfd, 0x02] : "i16x8.load8x8_u",
I32x4Load16x4S(MemArg<8>) : [0xfd, 0x03] : "i32x4.load16x4_s",
I32x4Load16x4U(MemArg<8>) : [0xfd, 0x04] : "i32x4.load16x4_u",
I64x2Load32x2S(MemArg<8>) : [0xfd, 0x05] : "i64x2.load32x2_s",
I64x2Load32x2U(MemArg<8>) : [0xfd, 0x06] : "i64x2.load32x2_u",
V8x16LoadSplat(MemArg<1>) : [0xfd, 0x07] : "v8x16.load_splat",
V16x8LoadSplat(MemArg<2>) : [0xfd, 0x08] : "v16x8.load_splat",
V32x4LoadSplat(MemArg<4>) : [0xfd, 0x09] : "v32x4.load_splat",
V64x2LoadSplat(MemArg<8>) : [0xfd, 0x0a] : "v64x2.load_splat",
V128Load8x8S(MemArg<8>) : [0xfd, 0x01] : "v128.load8x8_s",
V128Load8x8U(MemArg<8>) : [0xfd, 0x02] : "v128.load8x8_u",
V128Load16x4S(MemArg<8>) : [0xfd, 0x03] : "v128.load16x4_s",
V128Load16x4U(MemArg<8>) : [0xfd, 0x04] : "v128.load16x4_u",
V128Load32x2S(MemArg<8>) : [0xfd, 0x05] : "v128.load32x2_s",
V128Load32x2U(MemArg<8>) : [0xfd, 0x06] : "v128.load32x2_u",
V128Load8Splat(MemArg<1>) : [0xfd, 0x07] : "v128.load8_splat",
V128Load16Splat(MemArg<2>) : [0xfd, 0x08] : "v128.load16_splat",
V128Load32Splat(MemArg<4>) : [0xfd, 0x09] : "v128.load32_splat",
V128Load64Splat(MemArg<8>) : [0xfd, 0x0a] : "v128.load64_splat",
V128Store(MemArg<16>) : [0xfd, 0x0b] : "v128.store",
V128Const(V128Const) : [0xfd, 0x0c] : "v128.const",
V8x16Shuffle(V8x16Shuffle) : [0xfd, 0x0d] : "v8x16.shuffle",
V8x16Swizzle : [0xfd, 0x0e] : "v8x16.swizzle",
I8x16Shuffle(I8x16Shuffle) : [0xfd, 0x0d] : "i8x16.shuffle",
I8x16Swizzle : [0xfd, 0x0e] : "i8x16.swizzle",
I8x16Splat : [0xfd, 0x0f] : "i8x16.splat",
I16x8Splat : [0xfd, 0x10] : "i16x8.splat",
@ -812,20 +811,20 @@ instructions! {
F32x4Splat : [0xfd, 0x13] : "f32x4.splat",
F64x2Splat : [0xfd, 0x14] : "f64x2.splat",
I8x16ExtractLaneS(u8) : [0xfd, 0x15] : "i8x16.extract_lane_s",
I8x16ExtractLaneU(u8) : [0xfd, 0x16] : "i8x16.extract_lane_u",
I8x16ReplaceLane(u8) : [0xfd, 0x17] : "i8x16.replace_lane",
I16x8ExtractLaneS(u8) : [0xfd, 0x18] : "i16x8.extract_lane_s",
I16x8ExtractLaneU(u8) : [0xfd, 0x19] : "i16x8.extract_lane_u",
I16x8ReplaceLane(u8) : [0xfd, 0x1a] : "i16x8.replace_lane",
I32x4ExtractLane(u8) : [0xfd, 0x1b] : "i32x4.extract_lane",
I32x4ReplaceLane(u8) : [0xfd, 0x1c] : "i32x4.replace_lane",
I64x2ExtractLane(u8) : [0xfd, 0x1d] : "i64x2.extract_lane",
I64x2ReplaceLane(u8) : [0xfd, 0x1e] : "i64x2.replace_lane",
F32x4ExtractLane(u8) : [0xfd, 0x1f] : "f32x4.extract_lane",
F32x4ReplaceLane(u8) : [0xfd, 0x20] : "f32x4.replace_lane",
F64x2ExtractLane(u8) : [0xfd, 0x21] : "f64x2.extract_lane",
F64x2ReplaceLane(u8) : [0xfd, 0x22] : "f64x2.replace_lane",
I8x16ExtractLaneS(LaneArg) : [0xfd, 0x15] : "i8x16.extract_lane_s",
I8x16ExtractLaneU(LaneArg) : [0xfd, 0x16] : "i8x16.extract_lane_u",
I8x16ReplaceLane(LaneArg) : [0xfd, 0x17] : "i8x16.replace_lane",
I16x8ExtractLaneS(LaneArg) : [0xfd, 0x18] : "i16x8.extract_lane_s",
I16x8ExtractLaneU(LaneArg) : [0xfd, 0x19] : "i16x8.extract_lane_u",
I16x8ReplaceLane(LaneArg) : [0xfd, 0x1a] : "i16x8.replace_lane",
I32x4ExtractLane(LaneArg) : [0xfd, 0x1b] : "i32x4.extract_lane",
I32x4ReplaceLane(LaneArg) : [0xfd, 0x1c] : "i32x4.replace_lane",
I64x2ExtractLane(LaneArg) : [0xfd, 0x1d] : "i64x2.extract_lane",
I64x2ReplaceLane(LaneArg) : [0xfd, 0x1e] : "i64x2.replace_lane",
F32x4ExtractLane(LaneArg) : [0xfd, 0x1f] : "f32x4.extract_lane",
F32x4ReplaceLane(LaneArg) : [0xfd, 0x20] : "f32x4.replace_lane",
F64x2ExtractLane(LaneArg) : [0xfd, 0x21] : "f64x2.extract_lane",
F64x2ReplaceLane(LaneArg) : [0xfd, 0x22] : "f64x2.replace_lane",
I8x16Eq : [0xfd, 0x23] : "i8x16.eq",
I8x16Ne : [0xfd, 0x24] : "i8x16.ne",
@ -889,11 +888,11 @@ instructions! {
I8x16ShrS : [0xfd, 0x6c] : "i8x16.shr_s",
I8x16ShrU : [0xfd, 0x6d] : "i8x16.shr_u",
I8x16Add : [0xfd, 0x6e] : "i8x16.add",
I8x16AddSaturateS : [0xfd, 0x6f] : "i8x16.add_saturate_s",
I8x16AddSaturateU : [0xfd, 0x70] : "i8x16.add_saturate_u",
I8x16AddSatS : [0xfd, 0x6f] : "i8x16.add_sat_s",
I8x16AddSatU : [0xfd, 0x70] : "i8x16.add_sat_u",
I8x16Sub : [0xfd, 0x71] : "i8x16.sub",
I8x16SubSaturateS : [0xfd, 0x72] : "i8x16.sub_saturate_s",
I8x16SubSaturateU : [0xfd, 0x73] : "i8x16.sub_saturate_u",
I8x16SubSatS : [0xfd, 0x72] : "i8x16.sub_sat_s",
I8x16SubSatU : [0xfd, 0x73] : "i8x16.sub_sat_u",
I8x16MinS : [0xfd, 0x76] : "i8x16.min_s",
I8x16MinU : [0xfd, 0x77] : "i8x16.min_u",
I8x16MaxS : [0xfd, 0x78] : "i8x16.max_s",
@ -915,11 +914,11 @@ instructions! {
I16x8ShrS : [0xfd, 0x8c] : "i16x8.shr_s",
I16x8ShrU : [0xfd, 0x8d] : "i16x8.shr_u",
I16x8Add : [0xfd, 0x8e] : "i16x8.add",
I16x8AddSaturateS : [0xfd, 0x8f] : "i16x8.add_saturate_s",
I16x8AddSaturateU : [0xfd, 0x90] : "i16x8.add_saturate_u",
I16x8AddSatS : [0xfd, 0x8f] : "i16x8.add_sat_s",
I16x8AddSatU : [0xfd, 0x90] : "i16x8.add_sat_u",
I16x8Sub : [0xfd, 0x91] : "i16x8.sub",
I16x8SubSaturateS : [0xfd, 0x92] : "i16x8.sub_saturate_s",
I16x8SubSaturateU : [0xfd, 0x93] : "i16x8.sub_saturate_u",
I16x8SubSatS : [0xfd, 0x92] : "i16x8.sub_sat_s",
I16x8SubSatU : [0xfd, 0x93] : "i16x8.sub_sat_u",
I16x8Mul : [0xfd, 0x95] : "i16x8.mul",
I16x8MinS : [0xfd, 0x96] : "i16x8.min_s",
I16x8MinU : [0xfd, 0x97] : "i16x8.min_u",
@ -941,12 +940,12 @@ instructions! {
I32x4ShrU : [0xfd, 0xad] : "i32x4.shr_u",
I32x4Add : [0xfd, 0xae] : "i32x4.add",
I32x4Sub : [0xfd, 0xb1] : "i32x4.sub",
I32x4DotI16x8S : [0xfd, 0xb4] : "i32x4.dot_i8x16_s",
I32x4Mul : [0xfd, 0xb5] : "i32x4.mul",
I32x4MinS : [0xfd, 0xb6] : "i32x4.min_s",
I32x4MinU : [0xfd, 0xb7] : "i32x4.min_u",
I32x4MaxS : [0xfd, 0xb8] : "i32x4.max_s",
I32x4MaxU : [0xfd, 0xb9] : "i32x4.max_u",
I32x4DotI16x8S : [0xfd, 0xba] : "i32x4.dot_i8x16_s",
I64x2Neg : [0xfd, 0xc1] : "i64x2.neg",
I64x2Shl : [0xfd, 0xcb] : "i64x2.shl",
@ -956,6 +955,15 @@ instructions! {
I64x2Sub : [0xfd, 0xd1] : "i64x2.sub",
I64x2Mul : [0xfd, 0xd5] : "i64x2.mul",
F32x4Ceil : [0xfd, 0xd8] : "f32x4.ceil",
F32x4Floor : [0xfd, 0xd9] : "f32x4.floor",
F32x4Trunc : [0xfd, 0xda] : "f32x4.trunc",
F32x4Nearest : [0xfd, 0xdb] : "f32x4.nearest",
F64x2Ceil : [0xfd, 0xdc] : "f64x2.ceil",
F64x2Floor : [0xfd, 0xdd] : "f64x2.floor",
F64x2Trunc : [0xfd, 0xde] : "f64x2.trunc",
F64x2Nearest : [0xfd, 0xdf] : "f64x2.nearest",
F32x4Abs : [0xfd, 0xe0] : "f32x4.abs",
F32x4Neg : [0xfd, 0xe1] : "f32x4.neg",
F32x4Sqrt : [0xfd, 0xe3] : "f32x4.sqrt",
@ -965,6 +973,8 @@ instructions! {
F32x4Div : [0xfd, 0xe7] : "f32x4.div",
F32x4Min : [0xfd, 0xe8] : "f32x4.min",
F32x4Max : [0xfd, 0xe9] : "f32x4.max",
F32x4PMin : [0xfd, 0xea] : "f32x4.pmin",
F32x4PMax : [0xfd, 0xeb] : "f32x4.pmax",
F64x2Abs : [0xfd, 0xec] : "f64x2.abs",
F64x2Neg : [0xfd, 0xed] : "f64x2.neg",
@ -975,6 +985,8 @@ instructions! {
F64x2Div : [0xfd, 0xf3] : "f64x2.div",
F64x2Min : [0xfd, 0xf4] : "f64x2.min",
F64x2Max : [0xfd, 0xf5] : "f64x2.max",
F64x2PMin : [0xfd, 0xf6] : "f64x2.pmin",
F64x2PMax : [0xfd, 0xf7] : "f64x2.pmax",
I32x4TruncSatF32x4S : [0xfd, 0xf8] : "i32x4.trunc_sat_f32x4_s",
I32x4TruncSatF32x4U : [0xfd, 0xf9] : "i32x4.trunc_sat_f32x4_u",
@ -1066,6 +1078,33 @@ impl<'a> Parse<'a> for BrTableIndices<'a> {
}
}
/// Payload for lane-related instructions. Unsigned with no + prefix.
#[derive(Debug)]
pub struct LaneArg {
/// The lane argument.
pub lane: u8,
}
impl<'a> Parse<'a> for LaneArg {
fn parse(parser: Parser<'a>) -> Result<Self> {
let lane = parser.step(|c| {
if let Some((i, rest)) = c.integer() {
if i.sign() == None {
let (src, radix) = i.val();
let val = u8::from_str_radix(src, radix)
.map_err(|_| c.error("malformed lane index"))?;
Ok((val, rest))
} else {
Err(c.error("unexpected token"))
}
} else {
Err(c.error("expected a lane index"))
}
})?;
Ok(LaneArg { lane })
}
}
/// Payload for memory-related instructions indicating offset/alignment of
/// memory accesses.
#[derive(Debug)]
@ -1480,16 +1519,16 @@ impl<'a> Parse<'a> for V128Const {
}
}
/// Lanes being shuffled in the `v8x16.shuffle` instruction
/// Lanes being shuffled in the `i8x16.shuffle` instruction
#[derive(Debug)]
pub struct V8x16Shuffle {
pub struct I8x16Shuffle {
#[allow(missing_docs)]
pub lanes: [u8; 16],
}
impl<'a> Parse<'a> for V8x16Shuffle {
impl<'a> Parse<'a> for I8x16Shuffle {
fn parse(parser: Parser<'a>) -> Result<Self> {
Ok(V8x16Shuffle {
Ok(I8x16Shuffle {
lanes: [
parser.parse()?,
parser.parse()?,

35
third_party/rust/wast/src/ast/types.rs поставляемый
Просмотреть файл

@ -1,5 +1,6 @@
use crate::ast::{self, kw};
use crate::parser::{Cursor, Parse, Parser, Peek, Result};
use std::mem;
/// The value types for a wasm module.
#[allow(missing_docs)]
@ -411,22 +412,26 @@ impl<'a> Parse<'a> for MemoryType {
pub struct FunctionType<'a> {
/// The parameters of a function, optionally each having an identifier for
/// name resolution and a name for the custom `name` section.
pub params: Vec<(
Option<ast::Id<'a>>,
Option<ast::NameAnnotation<'a>>,
ValType<'a>,
)>,
pub params: Box<
[(
Option<ast::Id<'a>>,
Option<ast::NameAnnotation<'a>>,
ValType<'a>,
)],
>,
/// The results types of a function.
pub results: Vec<ValType<'a>>,
pub results: Box<[ValType<'a>]>,
}
impl<'a> FunctionType<'a> {
fn finish_parse(&mut self, allow_names: bool, parser: Parser<'a>) -> Result<()> {
let mut params = Vec::from(mem::take(&mut self.params));
let mut results = Vec::from(mem::take(&mut self.results));
while parser.peek2::<kw::param>() || parser.peek2::<kw::result>() {
parser.parens(|p| {
let mut l = p.lookahead1();
if l.peek::<kw::param>() {
if self.results.len() > 0 {
if results.len() > 0 {
return Err(p.error(
"result before parameter (or unexpected token): \
cannot list params after results",
@ -443,14 +448,14 @@ impl<'a> FunctionType<'a> {
};
let parse_more = id.is_none() && name.is_none();
let ty = p.parse()?;
self.params.push((id, name, ty));
params.push((id, name, ty));
while parse_more && !p.is_empty() {
self.params.push((None, None, p.parse()?));
params.push((None, None, p.parse()?));
}
} else if l.peek::<kw::result>() {
p.parse::<kw::result>()?;
while !p.is_empty() {
self.results.push(p.parse()?);
results.push(p.parse()?);
}
} else {
return Err(l.error());
@ -458,6 +463,8 @@ impl<'a> FunctionType<'a> {
Ok(())
})?;
}
self.params = params.into();
self.results = results.into();
Ok(())
}
}
@ -465,8 +472,8 @@ impl<'a> FunctionType<'a> {
impl<'a> Parse<'a> for FunctionType<'a> {
fn parse(parser: Parser<'a>) -> Result<Self> {
let mut ret = FunctionType {
params: Vec::new(),
results: Vec::new(),
params: Box::new([]),
results: Box::new([]),
};
ret.finish_parse(true, parser)?;
Ok(ret)
@ -497,8 +504,8 @@ pub struct FunctionTypeNoNames<'a>(pub FunctionType<'a>);
impl<'a> Parse<'a> for FunctionTypeNoNames<'a> {
fn parse(parser: Parser<'a>) -> Result<Self> {
let mut ret = FunctionType {
params: Vec::new(),
results: Vec::new(),
params: Box::new([]),
results: Box::new([]),
};
ret.finish_parse(false, parser)?;
Ok(FunctionTypeNoNames(ret))

8
third_party/rust/wast/src/binary.rs поставляемый
Просмотреть файл

@ -851,6 +851,12 @@ impl Encode for LetType<'_> {
}
}
impl Encode for LaneArg {
fn encode(&self, e: &mut Vec<u8>) {
self.lane.encode(e);
}
}
impl Encode for MemArg<'_> {
fn encode(&self, e: &mut Vec<u8>) {
match self.memory {
@ -1070,7 +1076,7 @@ impl Encode for V128Const {
}
}
impl Encode for V8x16Shuffle {
impl Encode for I8x16Shuffle {
fn encode(&self, dst: &mut Vec<u8>) {
dst.extend_from_slice(&self.lanes);
}

205
third_party/rust/wast/src/lexer.rs поставляемый
Просмотреть файл

@ -48,31 +48,24 @@ pub struct Lexer<'a> {
/// whitespace. For most cases you'll probably ignore these and simply look at
/// tokens.
#[derive(Debug, PartialEq)]
pub enum Source<'a> {
/// A fragment of source that is a comment, either a line or a block
/// comment.
Comment(Comment<'a>),
pub enum Token<'a> {
/// A line comment, preceded with `;;`
LineComment(&'a str),
/// A block comment, surrounded by `(;` and `;)`. Note that these can be
/// nested.
BlockComment(&'a str),
/// A fragment of source that represents whitespace.
Whitespace(&'a str),
/// A fragment of source that represents an actual s-expression token.
Token(Token<'a>),
}
/// The kinds of tokens that can be lexed for WAT s-expressions.
#[derive(Debug, PartialEq)]
pub enum Token<'a> {
/// A left-parenthesis, including the source text for where it comes from.
LParen(&'a str),
/// A right-parenthesis, including the source text for where it comes from.
RParen(&'a str),
/// A string literal, which is actually a list of bytes.
String {
/// The list of bytes that this string literal represents.
val: Cow<'a, [u8]>,
/// The original source text of this string literal.
src: &'a str,
},
String(WasmString<'a>),
/// An identifier (like `$foo`).
///
@ -96,21 +89,6 @@ pub enum Token<'a> {
Float(Float<'a>),
}
/// The types of comments that can be lexed from WAT source text, including the
/// original text of the comment itself.
///
/// Note that the original text here includes the symbols for the comment
/// itself.
#[derive(Debug, PartialEq)]
pub enum Comment<'a> {
/// A line comment, preceded with `;;`
Line(&'a str),
/// A block comment, surrounded by `(;` and `;)`. Note that these can be
/// nested.
Block(&'a str),
}
/// Errors that can be generated while lexing.
///
/// All lexing errors have line/colum/position information as well as a
@ -166,11 +144,24 @@ pub enum LexError {
__Nonexhaustive,
}
/// A sign token for an integer.
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum SignToken {
/// Plus sign: "+",
Plus,
/// Minus sign: "-",
Minus,
}
/// A parsed integer, signed or unsigned.
///
/// Methods can be use to access the value of the integer.
#[derive(Debug, PartialEq)]
pub struct Integer<'a> {
pub struct Integer<'a>(Box<IntegerInner<'a>>);
#[derive(Debug, PartialEq)]
struct IntegerInner<'a> {
sign: Option<SignToken>,
src: &'a str,
val: Cow<'a, str>,
hex: bool,
@ -180,11 +171,24 @@ pub struct Integer<'a> {
///
/// Methods can be use to access the value of the float.
#[derive(Debug, PartialEq)]
pub struct Float<'a> {
pub struct Float<'a>(Box<FloatInner<'a>>);
#[derive(Debug, PartialEq)]
struct FloatInner<'a> {
src: &'a str,
val: FloatVal<'a>,
}
/// A parsed string.
#[derive(Debug, PartialEq)]
pub struct WasmString<'a>(Box<WasmStringInner<'a>>);
#[derive(Debug, PartialEq)]
struct WasmStringInner<'a> {
src: &'a str,
val: Cow<'a, [u8]>,
}
/// Possible parsed float values
#[derive(Debug, PartialEq)]
pub enum FloatVal<'a> {
@ -236,15 +240,15 @@ impl<'a> Lexer<'a> {
/// # Errors
///
/// Returns an error if the input is malformed.
pub fn parse(&mut self) -> Result<Option<Source<'a>>, Error> {
pub fn parse(&mut self) -> Result<Option<Token<'a>>, Error> {
if let Some(ws) = self.ws() {
return Ok(Some(Source::Whitespace(ws)));
return Ok(Some(Token::Whitespace(ws)));
}
if let Some(comment) = self.comment()? {
return Ok(Some(Source::Comment(comment)));
return Ok(Some(comment));
}
if let Some(token) = self.token()? {
return Ok(Some(Source::Token(token)));
return Ok(Some(token));
}
match self.it.next() {
Some((i, ch)) => Err(self.error(i, LexError::Unexpected(ch))),
@ -265,7 +269,10 @@ impl<'a> Lexer<'a> {
if let Some(pos) = self.eat_char('"') {
let val = self.string()?;
let src = &self.input[pos..self.cur()];
return Ok(Some(Token::String { val, src }));
return Ok(Some(Token::String(WasmString(Box::new(WasmStringInner {
val,
src,
})))));
}
let (start, prefix) = match self.it.peek().cloned() {
@ -299,28 +306,30 @@ impl<'a> Lexer<'a> {
}
fn number(&self, src: &'a str) -> Option<Token<'a>> {
let (negative, num) = if src.starts_with('+') {
(false, &src[1..])
let (sign, num) = if src.starts_with('+') {
(Some(SignToken::Plus), &src[1..])
} else if src.starts_with('-') {
(true, &src[1..])
(Some(SignToken::Minus), &src[1..])
} else {
(false, src)
(None, src)
};
let negative = sign == Some(SignToken::Minus);
// Handle `inf` and `nan` which are special numbers here
if num == "inf" {
return Some(Token::Float(Float {
return Some(Token::Float(Float(Box::new(FloatInner {
src,
val: FloatVal::Inf { negative },
}));
}))));
} else if num == "nan" {
return Some(Token::Float(Float {
return Some(Token::Float(Float(Box::new(FloatInner {
src,
val: FloatVal::Nan {
val: None,
negative,
},
}));
}))));
} else if num.starts_with("nan:0x") {
let mut it = num[6..].chars();
let to_parse = skip_undescores(&mut it, false, char::is_ascii_hexdigit)?;
@ -328,13 +337,13 @@ impl<'a> Lexer<'a> {
return None;
}
let n = u64::from_str_radix(&to_parse, 16).ok()?;
return Some(Token::Float(Float {
return Some(Token::Float(Float(Box::new(FloatInner {
src,
val: FloatVal::Nan {
val: Some(n),
negative,
},
}));
}))));
}
// Figure out if we're a hex number or not
@ -360,7 +369,14 @@ impl<'a> Lexer<'a> {
Some(_) => {}
// Otherwise this is a valid integer literal!
None => return Some(Token::Integer(Integer { src, val, hex })),
None => {
return Some(Token::Integer(Integer(Box::new(IntegerInner {
sign,
src,
val,
hex,
}))))
}
}
// A number can optionally be after the decimal so only actually try to
@ -402,7 +418,7 @@ impl<'a> Lexer<'a> {
return None;
}
return Some(Token::Float(Float {
return Some(Token::Float(Float(Box::new(FloatInner {
src,
val: FloatVal::Val {
hex,
@ -410,7 +426,7 @@ impl<'a> Lexer<'a> {
exponent,
decimal,
},
}));
}))));
fn skip_undescores<'a>(
it: &mut str::Chars<'a>,
@ -486,7 +502,7 @@ impl<'a> Lexer<'a> {
}
/// Attempts to read a comment from the input stream
fn comment(&mut self) -> Result<Option<Comment<'a>>, Error> {
fn comment(&mut self) -> Result<Option<Token<'a>>, Error> {
if let Some(start) = self.eat_str(";;") {
loop {
match self.it.peek() {
@ -495,7 +511,7 @@ impl<'a> Lexer<'a> {
}
}
let end = self.cur();
return Ok(Some(Comment::Line(&self.input[start..end])));
return Ok(Some(Token::LineComment(&self.input[start..end])));
}
if let Some(start) = self.eat_str("(;") {
let mut level = 1;
@ -507,7 +523,7 @@ impl<'a> Lexer<'a> {
level -= 1;
if level == 0 {
let end = self.cur();
return Ok(Some(Comment::Block(&self.input[start..end])));
return Ok(Some(Token::BlockComment(&self.input[start..end])));
}
}
}
@ -680,41 +696,23 @@ impl<'a> Lexer<'a> {
}
impl<'a> Iterator for Lexer<'a> {
type Item = Result<Source<'a>, Error>;
type Item = Result<Token<'a>, Error>;
fn next(&mut self) -> Option<Self::Item> {
self.parse().transpose()
}
}
impl<'a> Source<'a> {
/// Returns the original source text for this token.
pub fn src(&self) -> &'a str {
match self {
Source::Comment(c) => c.src(),
Source::Whitespace(s) => s,
Source::Token(t) => t.src(),
}
}
}
impl<'a> Comment<'a> {
/// Returns the original source text for this comment.
pub fn src(&self) -> &'a str {
match self {
Comment::Line(s) => s,
Comment::Block(s) => s,
}
}
}
impl<'a> Token<'a> {
/// Returns the original source text for this token.
pub fn src(&self) -> &'a str {
match self {
Token::Whitespace(s) => s,
Token::BlockComment(s) => s,
Token::LineComment(s) => s,
Token::LParen(s) => s,
Token::RParen(s) => s,
Token::String { src, .. } => src,
Token::String(s) => s.src(),
Token::Id(s) => s,
Token::Keyword(s) => s,
Token::Reserved(s) => s,
@ -725,28 +723,45 @@ impl<'a> Token<'a> {
}
impl<'a> Integer<'a> {
/// Returns the sign token for this integer.
pub fn sign(&self) -> Option<SignToken> {
self.0.sign
}
/// Returns the original source text for this integer.
pub fn src(&self) -> &'a str {
self.src
self.0.src
}
/// Returns the value string that can be parsed for this integer, as well as
/// the base that it should be parsed in
pub fn val(&self) -> (&str, u32) {
(&self.val, if self.hex { 16 } else { 10 })
(&self.0.val, if self.0.hex { 16 } else { 10 })
}
}
impl<'a> Float<'a> {
/// Returns the original source text for this integer.
pub fn src(&self) -> &'a str {
self.src
self.0.src
}
/// Returns a parsed value of this float with all of the components still
/// listed as strings.
pub fn val(&self) -> &FloatVal<'a> {
&self.val
&self.0.val
}
}
impl<'a> WasmString<'a> {
/// Returns the original source text for this string.
pub fn src(&self) -> &'a str {
self.0.src
}
/// Returns a parsed value, as a list of bytes, for this string.
pub fn val(&self) -> &[u8] {
&self.0.val
}
}
@ -826,7 +841,7 @@ mod tests {
fn ws_smoke() {
fn get_whitespace(input: &str) -> &str {
match Lexer::new(input).parse().expect("no first token") {
Some(Source::Whitespace(s)) => s,
Some(Token::Whitespace(s)) => s,
other => panic!("unexpected {:?}", other),
}
}
@ -841,7 +856,7 @@ mod tests {
fn line_comment_smoke() {
fn get_line_comment(input: &str) -> &str {
match Lexer::new(input).parse().expect("no first token") {
Some(Source::Comment(Comment::Line(s))) => s,
Some(Token::LineComment(s)) => s,
other => panic!("unexpected {:?}", other),
}
}
@ -856,7 +871,7 @@ mod tests {
fn block_comment_smoke() {
fn get_block_comment(input: &str) -> &str {
match Lexer::new(input).parse().expect("no first token") {
Some(Source::Comment(Comment::Block(s))) => s,
Some(Token::BlockComment(s)) => s,
other => panic!("unexpected {:?}", other),
}
}
@ -866,10 +881,10 @@ mod tests {
}
fn get_token(input: &str) -> Token<'_> {
match Lexer::new(input).parse().expect("no first token") {
Some(Source::Token(t)) => t,
other => panic!("unexpected {:?}", other),
}
Lexer::new(input)
.parse()
.expect("no first token")
.expect("no token")
}
#[test]
@ -884,11 +899,11 @@ mod tests {
#[test]
fn strings() {
fn get_string(input: &str) -> Cow<'_, [u8]> {
fn get_string(input: &str) -> Vec<u8> {
match get_token(input) {
Token::String { val, src } => {
assert_eq!(input, src);
val
Token::String(s) => {
assert_eq!(input, s.src());
s.val().to_vec()
}
other => panic!("not string {:?}", other),
}
@ -964,11 +979,11 @@ mod tests {
#[test]
fn integer() {
fn get_integer(input: &str) -> Cow<'_, str> {
fn get_integer(input: &str) -> String {
match get_token(input) {
Token::Integer(i) => {
assert_eq!(input, i.src());
i.val
i.val().0.to_string()
}
other => panic!("not integer {:?}", other),
}
@ -990,7 +1005,7 @@ mod tests {
match get_token(input) {
Token::Float(i) => {
assert_eq!(input, i.src());
i.val
i.0.val
}
other => panic!("not reserved {:?}", other),
}

13
third_party/rust/wast/src/lib.rs поставляемый
Просмотреть файл

@ -140,6 +140,11 @@ impl Error {
}
}
/// Return the `Span` for this error.
pub fn span(&self) -> Span {
self.inner.span
}
/// To provide a more useful error this function can be used to extract
/// relevant textual information about this error into the error itself.
///
@ -172,6 +177,14 @@ impl Error {
_ => None,
}
}
/// Returns the underlying message, if any, that describes this error.
pub fn message(&self) -> String {
match &self.inner.kind {
ErrorKind::Lex(e) => e.to_string(),
ErrorKind::Custom(e) => e.clone(),
}
}
}
impl fmt::Display for Error {

58
third_party/rust/wast/src/parser.rs поставляемый
Просмотреть файл

@ -63,7 +63,7 @@
//! This module is heavily inspired by [`syn`](https://docs.rs/syn) so you can
//! likely also draw inspiration from the excellent examples in the `syn` crate.
use crate::lexer::{Comment, Float, Integer, Lexer, Source, Token};
use crate::lexer::{Float, Integer, Lexer, Token};
use crate::{Error, Span};
use std::cell::{Cell, RefCell};
use std::collections::HashMap;
@ -279,7 +279,7 @@ pub struct ParseBuffer<'a> {
// list of tokens from the tokenized source (including whitespace and
// comments), and the second element is how to skip this token, if it can be
// skipped.
tokens: Box<[(Source<'a>, Cell<NextTokenAt>)]>,
tokens: Box<[(Token<'a>, Cell<NextTokenAt>)]>,
input: &'a str,
cur: Cell<usize>,
known_annotations: RefCell<HashMap<String, usize>>,
@ -359,7 +359,6 @@ impl ParseBuffer<'_> {
// delimiters. This is required since while parsing we generally skip
// annotations and there's no real opportunity to return a parse error.
fn validate_annotations(&self) -> Result<()> {
use crate::lexer::Source::*;
use crate::lexer::Token::*;
enum State {
None,
@ -370,13 +369,13 @@ impl ParseBuffer<'_> {
for token in self.tokens.iter() {
state = match (&token.0, state) {
// From nothing, a `(` starts the search for an annotation
(Token(LParen(_)), State::None) => State::LParen,
(LParen(_), State::None) => State::LParen,
// ... otherwise in nothing we alwyas preserve that state.
(_, State::None) => State::None,
// If the previous state was an `LParen`, we may have an
// annotation if the next keyword is reserved
(Token(Reserved(s)), State::LParen) if s.starts_with("@") && s.len() > 0 => {
(Reserved(s), State::LParen) if s.starts_with("@") && s.len() > 0 => {
let offset = self.input_pos(s);
State::Annotation {
span: Span { offset },
@ -389,12 +388,12 @@ impl ParseBuffer<'_> {
// Once we're in an annotation we need to balance parentheses,
// so handle the depth changes.
(Token(LParen(_)), State::Annotation { span, depth }) => State::Annotation {
(LParen(_), State::Annotation { span, depth }) => State::Annotation {
span,
depth: depth + 1,
},
(Token(RParen(_)), State::Annotation { depth: 1, .. }) => State::None,
(Token(RParen(_)), State::Annotation { span, depth }) => State::Annotation {
(RParen(_), State::Annotation { depth: 1, .. }) => State::None,
(RParen(_), State::Annotation { span, depth }) => State::Annotation {
span,
depth: depth - 1,
},
@ -434,9 +433,8 @@ impl<'a> Parser<'a> {
self.buf.tokens[self.cursor().cur..]
.iter()
.any(|(t, _)| match t {
Source::Token(_) => true,
Source::Comment(_) => false,
Source::Whitespace(_) => false,
Token::Whitespace(_) | Token::LineComment(_) | Token::BlockComment(_) => false,
_ => true,
})
}
@ -1035,7 +1033,7 @@ impl<'a> Cursor<'a> {
/// unknown annotations.
pub fn string(mut self) -> Option<(&'a [u8], Self)> {
match self.advance_token()? {
Token::String { val, .. } => Some((&**val, self)),
Token::String(s) => Some((s.val(), self)),
_ => None,
}
}
@ -1066,26 +1064,27 @@ impl<'a> Cursor<'a> {
return None;
}
match &self.parser.buf.tokens.get(self.cur.wrapping_sub(1))?.0 {
Source::Token(Token::LParen(_)) => Some((&token[1..], cursor)),
Token::LParen(_) => Some((&token[1..], cursor)),
_ => None,
}
}
/// Attempts to advance this cursor if the current token is a
/// [`Source::Comment`](crate::lexer::Comment)
/// [`Token::LineComment`](crate::lexer::Token) or a
/// [`Token::BlockComment`](crate::lexer::Token)
///
/// This function will only skip whitespace, no other tokens.
pub fn comment(mut self) -> Option<(&'a Comment<'a>, Self)> {
pub fn comment(mut self) -> Option<(&'a str, Self)> {
let comment = loop {
match &self.parser.buf.tokens.get(self.cur)?.0 {
Source::Token(_) => return None,
Source::Comment(c) => {
Token::LineComment(c) | Token::BlockComment(c) => {
self.cur += 1;
break c;
}
Source::Whitespace(_) => {
Token::Whitespace(_) => {
self.cur += 1;
}
_ => return None,
}
};
Some((comment, self))
@ -1104,14 +1103,15 @@ impl<'a> Cursor<'a> {
// If we're currently pointing at a token, and it's not the start
// of an annotation, then we return that token and advance
// ourselves to just after that token.
if let Source::Token(t) = token {
match self.annotation_start() {
match token {
Token::Whitespace(_) | Token::LineComment(_) | Token::BlockComment(_) => {}
_ => match self.annotation_start() {
Some(n) if !is_known_annotation(n) => {}
_ => {
self.cur += 1;
return Some(t);
return Some(token);
}
}
},
}
// ... otherwise we need to skip the current token, and possibly
@ -1149,11 +1149,11 @@ impl<'a> Cursor<'a> {
fn annotation_start(&self) -> Option<&'a str> {
match self.parser.buf.tokens.get(self.cur).map(|p| &p.0) {
Some(Source::Token(Token::LParen(_))) => {}
Some(Token::LParen(_)) => {}
_ => return None,
}
let reserved = match self.parser.buf.tokens.get(self.cur + 1).map(|p| &p.0) {
Some(Source::Token(Token::Reserved(n))) => n,
Some(Token::Reserved(n)) => n,
_ => return None,
};
if reserved.starts_with("@") && reserved.len() > 1 {
@ -1179,8 +1179,8 @@ impl<'a> Cursor<'a> {
self.cur += 1;
while depth > 0 {
match &self.parser.buf.tokens.get(self.cur)?.0 {
Source::Token(Token::LParen(_)) => depth += 1,
Source::Token(Token::RParen(_)) => depth -= 1,
Token::LParen(_) => depth += 1,
Token::RParen(_) => depth -= 1,
_ => {}
}
self.cur += 1;
@ -1195,8 +1195,10 @@ impl<'a> Cursor<'a> {
// and otherwise we skip all comments/whitespace and otherwise
// get real intersted once a normal `Token` pops up.
match token {
Source::Token(_) => return Some(self.cur),
_ => self.cur += 1,
Token::Whitespace(_) | Token::LineComment(_) | Token::BlockComment(_) => {
self.cur += 1
}
_ => return Some(self.cur),
}
}
}

Просмотреть файл

@ -90,11 +90,11 @@ pub fn run(fields: &mut Vec<ModuleField>) {
kind: DataKind::Active {
memory: Index::Id(id),
offset: Expression {
instrs: vec![if is_32 {
instrs: Box::new([if is_32 {
Instruction::I32Const(0)
} else {
Instruction::I64Const(0)
}],
}]),
},
},
data,
@ -153,7 +153,7 @@ pub fn run(fields: &mut Vec<ModuleField>) {
kind: ElemKind::Active {
table: Index::Id(id),
offset: Expression {
instrs: vec![Instruction::I32Const(0)],
instrs: Box::new([Instruction::I32Const(0)]),
},
},
payload,

38
third_party/rust/wast/src/resolve/names.rs поставляемый
Просмотреть файл

@ -702,11 +702,11 @@ impl<'a> Resolver<'a> {
key.0
.iter()
.map(|ty| self.copy_valtype_from_module(span, child, *ty))
.collect::<Result<Vec<_>, Error>>()?,
.collect::<Result<Box<[_]>, Error>>()?,
key.1
.iter()
.map(|ty| self.copy_valtype_from_module(span, child, *ty))
.collect::<Result<Vec<_>, Error>>()?,
.collect::<Result<Box<[_]>, Error>>()?,
);
Ok(Item::Func(self.modules[self.cur].key_to_idx(span, my_key)))
}
@ -1883,7 +1883,7 @@ impl<'a, 'b> ExprResolver<'a, 'b> {
// `End` and `Else` instructions if they have labels listed we
// verify that they match the label at the beginning of the block.
Else(_) | End(_) => {
let (matching_block, label) = match instr {
let (matching_block, label) = match &instr {
Else(label) => (self.blocks.last().cloned(), label),
End(label) => (self.blocks.pop(), label),
_ => unreachable!(),
@ -1895,7 +1895,9 @@ impl<'a, 'b> ExprResolver<'a, 'b> {
// Reset the local scopes to before this block was entered
if matching_block.pushed_scope {
self.scopes.pop();
if let End(_) = instr {
self.scopes.pop();
}
}
let label = match label {
@ -2065,16 +2067,16 @@ impl<'a, 'b> ExprResolver<'a, 'b> {
| I64AtomicRmw16CmpxchgU(m)
| I64AtomicRmw32CmpxchgU(m)
| V128Load(m)
| I16x8Load8x8S(m)
| I16x8Load8x8U(m)
| I32x4Load16x4S(m)
| I32x4Load16x4U(m)
| I64x2Load32x2S(m)
| I64x2Load32x2U(m)
| V8x16LoadSplat(m)
| V16x8LoadSplat(m)
| V32x4LoadSplat(m)
| V64x2LoadSplat(m)
| V128Load8x8S(m)
| V128Load8x8U(m)
| V128Load16x4S(m)
| V128Load16x4U(m)
| V128Load32x2S(m)
| V128Load32x2U(m)
| V128Load8Splat(m)
| V128Load16Splat(m)
| V128Load32Splat(m)
| V128Load64Splat(m)
| V128Store(m)
| MemoryAtomicNotify(m)
| MemoryAtomicWait32(m)
@ -2137,13 +2139,13 @@ trait TypeKey<'a> {
fn into_info(self, span: Span, cur: usize) -> TypeInfo<'a>;
}
type FuncKey<'a> = (Vec<ValType<'a>>, Vec<ValType<'a>>);
type FuncKey<'a> = (Box<[ValType<'a>]>, Box<[ValType<'a>]>);
impl<'a> TypeReference<'a> for FunctionType<'a> {
type Key = FuncKey<'a>;
fn key(&self) -> Self::Key {
let params = self.params.iter().map(|p| p.2).collect::<Vec<_>>();
let params = self.params.iter().map(|p| p.2).collect();
let results = self.results.clone();
(params, results)
}
@ -2196,10 +2198,10 @@ impl<'a> TypeReference<'a> for FunctionType<'a> {
fn resolve(&mut self, cx: &Module<'a>) -> Result<(), Error> {
// Resolve the (ref T) value types in the final function type
for param in &mut self.params {
for param in self.params.iter_mut() {
cx.resolve_valtype(&mut param.2)?;
}
for result in &mut self.results {
for result in self.results.iter_mut() {
cx.resolve_valtype(result)?;
}
Ok(())

8
third_party/rust/wast/tests/comments.rs поставляемый
Просмотреть файл

@ -1,4 +1,3 @@
use wast::lexer::Comment;
use wast::parser::{self, Parse, ParseBuffer, Parser, Result};
pub struct Comments<'a> {
@ -15,9 +14,10 @@ impl<'a> Parse<'a> for Comments<'a> {
None => break,
};
cursor = c;
comments.push(match comment {
Comment::Block(s) => &s[2..s.len() - 2],
Comment::Line(s) => &s[2..],
comments.push(if comment.starts_with(";;") {
&comment[2..]
} else {
&comment[2..comment.len() - 2]
});
}
Ok((comments, cursor))

2
third_party/rust/wat/.cargo-checksum.json поставляемый
Просмотреть файл

@ -1 +1 @@
{"files":{"Cargo.toml":"231cd4f5396bac3e568d2b9cc7b1fde0c0049dc3f9720110300875efb50208c4","README.md":"6653a386a2210f0f7e36964f15214bc441e2c723c42867dfe90dfcedcd301814","src/lib.rs":"03652351228b7f7a520f4e7f1e689fa34a37b8e5e0fc8367a167cc893cdbc449"},"package":"6c350d7431aa486488d28cdf75b57d59c02fab9cde20d93c52424510afe18ecc"}
{"files":{"Cargo.toml":"e769911e25335b27f6dc0b5348a75bda7f08ff83959fc39be48f0947522af50c","README.md":"6653a386a2210f0f7e36964f15214bc441e2c723c42867dfe90dfcedcd301814","src/lib.rs":"03652351228b7f7a520f4e7f1e689fa34a37b8e5e0fc8367a167cc893cdbc449"},"package":"4766d466249e23279e92c52033429eb91141c5efea1c4478138fa6f6ef4efe3e"}

4
third_party/rust/wat/Cargo.toml поставляемый
Просмотреть файл

@ -13,7 +13,7 @@
[package]
edition = "2018"
name = "wat"
version = "1.0.24"
version = "1.0.26"
authors = ["Alex Crichton <alex@alexcrichton.com>"]
description = "Rust parser for the WebAssembly Text format, WAT\n"
homepage = "https://github.com/bytecodealliance/wasm-tools/tree/main/crates/wat"
@ -22,4 +22,4 @@ readme = "README.md"
license = "Apache-2.0 WITH LLVM-exception"
repository = "https://github.com/bytecodealliance/wasm-tools/tree/main/crates/wat"
[dependencies.wast]
version = "23.0.0"
version = "25.0.0"