Vendor things

This commit is contained in:
John Doty 2024-03-08 11:03:01 -08:00
parent 5deceec006
commit 977e3c17e5
19434 changed files with 10682014 additions and 0 deletions

View file

@ -0,0 +1,394 @@
use std::{borrow::Cow, fmt};
use super::{builtins::MacroCall, context::ExprPos, Span};
use crate::{
AddressSpace, BinaryOperator, Binding, Constant, Expression, Function, GlobalVariable, Handle,
Interpolation, Literal, Sampling, StorageAccess, Type, UnaryOperator,
};
#[derive(Debug, Clone, Copy)]
pub enum GlobalLookupKind {
Variable(Handle<GlobalVariable>),
Constant(Handle<Constant>, Handle<Type>),
BlockSelect(Handle<GlobalVariable>, u32),
}
#[derive(Debug, Clone, Copy)]
pub struct GlobalLookup {
pub kind: GlobalLookupKind,
pub entry_arg: Option<usize>,
pub mutable: bool,
}
#[derive(Debug, Clone)]
pub struct ParameterInfo {
pub qualifier: ParameterQualifier,
/// Whether the parameter should be treated as a depth image instead of a
/// sampled image.
pub depth: bool,
}
/// How the function is implemented
#[derive(Clone, Copy)]
pub enum FunctionKind {
/// The function is user defined
Call(Handle<Function>),
/// The function is a builtin
Macro(MacroCall),
}
impl fmt::Debug for FunctionKind {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match *self {
Self::Call(_) => write!(f, "Call"),
Self::Macro(_) => write!(f, "Macro"),
}
}
}
#[derive(Debug)]
pub struct Overload {
/// Normalized function parameters, modifiers are not applied
pub parameters: Vec<Handle<Type>>,
pub parameters_info: Vec<ParameterInfo>,
/// How the function is implemented
pub kind: FunctionKind,
/// Whether this function was already defined or is just a prototype
pub defined: bool,
/// Whether this overload is the one provided by the language or has
/// been redeclared by the user (builtins only)
pub internal: bool,
/// Whether or not this function returns void (nothing)
pub void: bool,
}
bitflags::bitflags! {
/// Tracks the variations of the builtin already generated, this is needed because some
/// builtins overloads can't be generated unless explicitly used, since they might cause
/// unneeded capabilities to be requested
#[derive(Default)]
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub struct BuiltinVariations: u32 {
/// Request the standard overloads
const STANDARD = 1 << 0;
/// Request overloads that use the double type
const DOUBLE = 1 << 1;
/// Request overloads that use samplerCubeArray(Shadow)
const CUBE_TEXTURES_ARRAY = 1 << 2;
/// Request overloads that use sampler2DMSArray
const D2_MULTI_TEXTURES_ARRAY = 1 << 3;
}
}
#[derive(Debug, Default)]
pub struct FunctionDeclaration {
pub overloads: Vec<Overload>,
/// Tracks the builtin overload variations that were already generated
pub variations: BuiltinVariations,
}
#[derive(Debug)]
pub struct EntryArg {
pub name: Option<String>,
pub binding: Binding,
pub handle: Handle<GlobalVariable>,
pub storage: StorageQualifier,
}
#[derive(Debug, Clone)]
pub struct VariableReference {
pub expr: Handle<Expression>,
/// Wether the variable is of a pointer type (and needs loading) or not
pub load: bool,
/// Wether the value of the variable can be changed or not
pub mutable: bool,
pub constant: Option<(Handle<Constant>, Handle<Type>)>,
pub entry_arg: Option<usize>,
}
#[derive(Debug, Clone)]
pub struct HirExpr {
pub kind: HirExprKind,
pub meta: Span,
}
#[derive(Debug, Clone)]
pub enum HirExprKind {
Access {
base: Handle<HirExpr>,
index: Handle<HirExpr>,
},
Select {
base: Handle<HirExpr>,
field: String,
},
Literal(Literal),
Binary {
left: Handle<HirExpr>,
op: BinaryOperator,
right: Handle<HirExpr>,
},
Unary {
op: UnaryOperator,
expr: Handle<HirExpr>,
},
Variable(VariableReference),
Call(FunctionCall),
/// Represents the ternary operator in glsl (`:?`)
Conditional {
/// The expression that will decide which branch to take, must evaluate to a boolean
condition: Handle<HirExpr>,
/// The expression that will be evaluated if [`condition`] returns `true`
///
/// [`condition`]: Self::Conditional::condition
accept: Handle<HirExpr>,
/// The expression that will be evaluated if [`condition`] returns `false`
///
/// [`condition`]: Self::Conditional::condition
reject: Handle<HirExpr>,
},
Assign {
tgt: Handle<HirExpr>,
value: Handle<HirExpr>,
},
/// A prefix/postfix operator like `++`
PrePostfix {
/// The operation to be performed
op: BinaryOperator,
/// Whether this is a postfix or a prefix
postfix: bool,
/// The target expression
expr: Handle<HirExpr>,
},
/// A method call like `what.something(a, b, c)`
Method {
/// expression the method call applies to (`what` in the example)
expr: Handle<HirExpr>,
/// the method name (`something` in the example)
name: String,
/// the arguments to the method (`a`, `b`, and `c` in the example)
args: Vec<Handle<HirExpr>>,
},
}
#[derive(Debug, Hash, PartialEq, Eq)]
pub enum QualifierKey<'a> {
String(Cow<'a, str>),
/// Used for `std140` and `std430` layout qualifiers
Layout,
/// Used for image formats
Format,
}
#[derive(Debug)]
pub enum QualifierValue {
None,
Uint(u32),
Layout(StructLayout),
Format(crate::StorageFormat),
}
#[derive(Debug, Default)]
pub struct TypeQualifiers<'a> {
pub span: Span,
pub storage: (StorageQualifier, Span),
pub invariant: Option<Span>,
pub interpolation: Option<(Interpolation, Span)>,
pub precision: Option<(Precision, Span)>,
pub sampling: Option<(Sampling, Span)>,
/// Memory qualifiers used in the declaration to set the storage access to be used
/// in declarations that support it (storage images and buffers)
pub storage_access: Option<(StorageAccess, Span)>,
pub layout_qualifiers: crate::FastHashMap<QualifierKey<'a>, (QualifierValue, Span)>,
}
impl<'a> TypeQualifiers<'a> {
/// Appends `errors` with errors for all unused qualifiers
pub fn unused_errors(&self, errors: &mut Vec<super::Error>) {
if let Some(meta) = self.invariant {
errors.push(super::Error {
kind: super::ErrorKind::SemanticError(
"Invariant qualifier can only be used in in/out variables".into(),
),
meta,
});
}
if let Some((_, meta)) = self.interpolation {
errors.push(super::Error {
kind: super::ErrorKind::SemanticError(
"Interpolation qualifiers can only be used in in/out variables".into(),
),
meta,
});
}
if let Some((_, meta)) = self.sampling {
errors.push(super::Error {
kind: super::ErrorKind::SemanticError(
"Sampling qualifiers can only be used in in/out variables".into(),
),
meta,
});
}
if let Some((_, meta)) = self.storage_access {
errors.push(super::Error {
kind: super::ErrorKind::SemanticError(
"Memory qualifiers can only be used in storage variables".into(),
),
meta,
});
}
for &(_, meta) in self.layout_qualifiers.values() {
errors.push(super::Error {
kind: super::ErrorKind::SemanticError("Unexpected qualifier".into()),
meta,
});
}
}
/// Removes the layout qualifier with `name`, if it exists and adds an error if it isn't
/// a [`QualifierValue::Uint`]
pub fn uint_layout_qualifier(
&mut self,
name: &'a str,
errors: &mut Vec<super::Error>,
) -> Option<u32> {
match self
.layout_qualifiers
.remove(&QualifierKey::String(name.into()))
{
Some((QualifierValue::Uint(v), _)) => Some(v),
Some((_, meta)) => {
errors.push(super::Error {
kind: super::ErrorKind::SemanticError("Qualifier expects a uint value".into()),
meta,
});
// Return a dummy value instead of `None` to differentiate from
// the qualifier not existing, since some parts might require the
// qualifier to exist and throwing another error that it doesn't
// exist would be unhelpful
Some(0)
}
_ => None,
}
}
/// Removes the layout qualifier with `name`, if it exists and adds an error if it isn't
/// a [`QualifierValue::None`]
pub fn none_layout_qualifier(&mut self, name: &'a str, errors: &mut Vec<super::Error>) -> bool {
match self
.layout_qualifiers
.remove(&QualifierKey::String(name.into()))
{
Some((QualifierValue::None, _)) => true,
Some((_, meta)) => {
errors.push(super::Error {
kind: super::ErrorKind::SemanticError(
"Qualifier doesn't expect a value".into(),
),
meta,
});
// Return a `true` to since the qualifier is defined and adding
// another error for it not being defined would be unhelpful
true
}
_ => false,
}
}
}
#[derive(Debug, Clone)]
pub enum FunctionCallKind {
TypeConstructor(Handle<Type>),
Function(String),
}
#[derive(Debug, Clone)]
pub struct FunctionCall {
pub kind: FunctionCallKind,
pub args: Vec<Handle<HirExpr>>,
}
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum StorageQualifier {
AddressSpace(AddressSpace),
Input,
Output,
Const,
}
impl Default for StorageQualifier {
fn default() -> Self {
StorageQualifier::AddressSpace(AddressSpace::Function)
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum StructLayout {
Std140,
Std430,
}
// TODO: Encode precision hints in the IR
/// A precision hint used in GLSL declarations.
///
/// Precision hints can be used to either speed up shader execution or control
/// the precision of arithmetic operations.
///
/// To use a precision hint simply add it before the type in the declaration.
/// ```glsl
/// mediump float a;
/// ```
///
/// The default when no precision is declared is `highp` which means that all
/// operations operate with the type defined width.
///
/// For `mediump` and `lowp` operations follow the spir-v
/// [`RelaxedPrecision`][RelaxedPrecision] decoration semantics.
///
/// [RelaxedPrecision]: https://www.khronos.org/registry/SPIR-V/specs/unified1/SPIRV.html#_a_id_relaxedprecisionsection_a_relaxed_precision
#[derive(Debug, Clone, PartialEq, Copy)]
pub enum Precision {
/// `lowp` precision
Low,
/// `mediump` precision
Medium,
/// `highp` precision
High,
}
#[derive(Debug, Clone, PartialEq, Copy)]
pub enum ParameterQualifier {
In,
Out,
InOut,
Const,
}
impl ParameterQualifier {
/// Returns true if the argument should be passed as a lhs expression
pub const fn is_lhs(&self) -> bool {
match *self {
ParameterQualifier::Out | ParameterQualifier::InOut => true,
_ => false,
}
}
/// Converts from a parameter qualifier into a [`ExprPos`](ExprPos)
pub const fn as_pos(&self) -> ExprPos {
match *self {
ParameterQualifier::Out | ParameterQualifier::InOut => ExprPos::Lhs,
_ => ExprPos::Rhs,
}
}
}
/// The GLSL profile used by a shader.
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum Profile {
/// The `core` profile, default when no profile is specified.
Core,
}

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,134 @@
use super::{constants::ConstantSolvingError, token::TokenValue};
use crate::Span;
use pp_rs::token::PreprocessorError;
use std::borrow::Cow;
use thiserror::Error;
fn join_with_comma(list: &[ExpectedToken]) -> String {
let mut string = "".to_string();
for (i, val) in list.iter().enumerate() {
string.push_str(&val.to_string());
match i {
i if i == list.len() - 1 => {}
i if i == list.len() - 2 => string.push_str(" or "),
_ => string.push_str(", "),
}
}
string
}
/// One of the expected tokens returned in [`InvalidToken`](ErrorKind::InvalidToken).
#[derive(Debug, PartialEq)]
pub enum ExpectedToken {
/// A specific token was expected.
Token(TokenValue),
/// A type was expected.
TypeName,
/// An identifier was expected.
Identifier,
/// An integer literal was expected.
IntLiteral,
/// A float literal was expected.
FloatLiteral,
/// A boolean literal was expected.
BoolLiteral,
/// The end of file was expected.
Eof,
}
impl From<TokenValue> for ExpectedToken {
fn from(token: TokenValue) -> Self {
ExpectedToken::Token(token)
}
}
impl std::fmt::Display for ExpectedToken {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match *self {
ExpectedToken::Token(ref token) => write!(f, "{token:?}"),
ExpectedToken::TypeName => write!(f, "a type"),
ExpectedToken::Identifier => write!(f, "identifier"),
ExpectedToken::IntLiteral => write!(f, "integer literal"),
ExpectedToken::FloatLiteral => write!(f, "float literal"),
ExpectedToken::BoolLiteral => write!(f, "bool literal"),
ExpectedToken::Eof => write!(f, "end of file"),
}
}
}
/// Information about the cause of an error.
#[derive(Debug, Error)]
#[cfg_attr(test, derive(PartialEq))]
pub enum ErrorKind {
/// Whilst parsing as encountered an unexpected EOF.
#[error("Unexpected end of file")]
EndOfFile,
/// The shader specified an unsupported or invalid profile.
#[error("Invalid profile: {0}")]
InvalidProfile(String),
/// The shader requested an unsupported or invalid version.
#[error("Invalid version: {0}")]
InvalidVersion(u64),
/// Whilst parsing an unexpected token was encountered.
///
/// A list of expected tokens is also returned.
#[error("Expected {}, found {0:?}", join_with_comma(.1))]
InvalidToken(TokenValue, Vec<ExpectedToken>),
/// A specific feature is not yet implemented.
///
/// To help prioritize work please open an issue in the github issue tracker
/// if none exist already or react to the already existing one.
#[error("Not implemented: {0}")]
NotImplemented(&'static str),
/// A reference to a variable that wasn't declared was used.
#[error("Unknown variable: {0}")]
UnknownVariable(String),
/// A reference to a type that wasn't declared was used.
#[error("Unknown type: {0}")]
UnknownType(String),
/// A reference to a non existent member of a type was made.
#[error("Unknown field: {0}")]
UnknownField(String),
/// An unknown layout qualifier was used.
///
/// If the qualifier does exist please open an issue in the github issue tracker
/// if none exist already or react to the already existing one to help
/// prioritize work.
#[error("Unknown layout qualifier: {0}")]
UnknownLayoutQualifier(String),
/// Unsupported matrix of the form matCx2
///
/// Our IR expects matrices of the form matCx2 to have a stride of 8 however
/// matrices in the std140 layout have a stride of at least 16
#[error("unsupported matrix of the form matCx2 in std140 block layout")]
UnsupportedMatrixTypeInStd140,
/// A variable with the same name already exists in the current scope.
#[error("Variable already declared: {0}")]
VariableAlreadyDeclared(String),
/// A semantic error was detected in the shader.
#[error("{0}")]
SemanticError(Cow<'static, str>),
/// An error was returned by the preprocessor.
#[error("{0:?}")]
PreprocessorError(PreprocessorError),
/// The parser entered an illegal state and exited
///
/// This obviously is a bug and as such should be reported in the github issue tracker
#[error("Internal error: {0}")]
InternalError(&'static str),
}
impl From<ConstantSolvingError> for ErrorKind {
fn from(err: ConstantSolvingError) -> Self {
ErrorKind::SemanticError(err.to_string().into())
}
}
/// Error returned during shader parsing.
#[derive(Debug, Error)]
#[error("{kind}")]
#[cfg_attr(test, derive(PartialEq))]
pub struct Error {
/// Holds the information about the error itself.
pub kind: ErrorKind,
/// Holds information about the range of the source code where the error happened.
pub meta: Span,
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,301 @@
use super::{
ast::Precision,
token::{Directive, DirectiveKind, Token, TokenValue},
types::parse_type,
};
use crate::{FastHashMap, Span, StorageAccess};
use pp_rs::{
pp::Preprocessor,
token::{PreprocessorError, Punct, TokenValue as PPTokenValue},
};
#[derive(Debug)]
#[cfg_attr(test, derive(PartialEq))]
pub struct LexerResult {
pub kind: LexerResultKind,
pub meta: Span,
}
#[derive(Debug)]
#[cfg_attr(test, derive(PartialEq))]
pub enum LexerResultKind {
Token(Token),
Directive(Directive),
Error(PreprocessorError),
}
pub struct Lexer<'a> {
pp: Preprocessor<'a>,
}
impl<'a> Lexer<'a> {
pub fn new(input: &'a str, defines: &'a FastHashMap<String, String>) -> Self {
let mut pp = Preprocessor::new(input);
for (define, value) in defines {
pp.add_define(define, value).unwrap(); //TODO: handle error
}
Lexer { pp }
}
}
impl<'a> Iterator for Lexer<'a> {
type Item = LexerResult;
fn next(&mut self) -> Option<Self::Item> {
let pp_token = match self.pp.next()? {
Ok(t) => t,
Err((err, loc)) => {
return Some(LexerResult {
kind: LexerResultKind::Error(err),
meta: loc.into(),
});
}
};
let meta = pp_token.location.into();
let value = match pp_token.value {
PPTokenValue::Extension(extension) => {
return Some(LexerResult {
kind: LexerResultKind::Directive(Directive {
kind: DirectiveKind::Extension,
tokens: extension.tokens,
}),
meta,
})
}
PPTokenValue::Float(float) => TokenValue::FloatConstant(float),
PPTokenValue::Ident(ident) => {
match ident.as_str() {
// Qualifiers
"layout" => TokenValue::Layout,
"in" => TokenValue::In,
"out" => TokenValue::Out,
"uniform" => TokenValue::Uniform,
"buffer" => TokenValue::Buffer,
"shared" => TokenValue::Shared,
"invariant" => TokenValue::Invariant,
"flat" => TokenValue::Interpolation(crate::Interpolation::Flat),
"noperspective" => TokenValue::Interpolation(crate::Interpolation::Linear),
"smooth" => TokenValue::Interpolation(crate::Interpolation::Perspective),
"centroid" => TokenValue::Sampling(crate::Sampling::Centroid),
"sample" => TokenValue::Sampling(crate::Sampling::Sample),
"const" => TokenValue::Const,
"inout" => TokenValue::InOut,
"precision" => TokenValue::Precision,
"highp" => TokenValue::PrecisionQualifier(Precision::High),
"mediump" => TokenValue::PrecisionQualifier(Precision::Medium),
"lowp" => TokenValue::PrecisionQualifier(Precision::Low),
"restrict" => TokenValue::Restrict,
"readonly" => TokenValue::MemoryQualifier(StorageAccess::LOAD),
"writeonly" => TokenValue::MemoryQualifier(StorageAccess::STORE),
// values
"true" => TokenValue::BoolConstant(true),
"false" => TokenValue::BoolConstant(false),
// jump statements
"continue" => TokenValue::Continue,
"break" => TokenValue::Break,
"return" => TokenValue::Return,
"discard" => TokenValue::Discard,
// selection statements
"if" => TokenValue::If,
"else" => TokenValue::Else,
"switch" => TokenValue::Switch,
"case" => TokenValue::Case,
"default" => TokenValue::Default,
// iteration statements
"while" => TokenValue::While,
"do" => TokenValue::Do,
"for" => TokenValue::For,
// types
"void" => TokenValue::Void,
"struct" => TokenValue::Struct,
word => match parse_type(word) {
Some(t) => TokenValue::TypeName(t),
None => TokenValue::Identifier(String::from(word)),
},
}
}
PPTokenValue::Integer(integer) => TokenValue::IntConstant(integer),
PPTokenValue::Punct(punct) => match punct {
// Compound assignments
Punct::AddAssign => TokenValue::AddAssign,
Punct::SubAssign => TokenValue::SubAssign,
Punct::MulAssign => TokenValue::MulAssign,
Punct::DivAssign => TokenValue::DivAssign,
Punct::ModAssign => TokenValue::ModAssign,
Punct::LeftShiftAssign => TokenValue::LeftShiftAssign,
Punct::RightShiftAssign => TokenValue::RightShiftAssign,
Punct::AndAssign => TokenValue::AndAssign,
Punct::XorAssign => TokenValue::XorAssign,
Punct::OrAssign => TokenValue::OrAssign,
// Two character punctuation
Punct::Increment => TokenValue::Increment,
Punct::Decrement => TokenValue::Decrement,
Punct::LogicalAnd => TokenValue::LogicalAnd,
Punct::LogicalOr => TokenValue::LogicalOr,
Punct::LogicalXor => TokenValue::LogicalXor,
Punct::LessEqual => TokenValue::LessEqual,
Punct::GreaterEqual => TokenValue::GreaterEqual,
Punct::EqualEqual => TokenValue::Equal,
Punct::NotEqual => TokenValue::NotEqual,
Punct::LeftShift => TokenValue::LeftShift,
Punct::RightShift => TokenValue::RightShift,
// Parenthesis or similar
Punct::LeftBrace => TokenValue::LeftBrace,
Punct::RightBrace => TokenValue::RightBrace,
Punct::LeftParen => TokenValue::LeftParen,
Punct::RightParen => TokenValue::RightParen,
Punct::LeftBracket => TokenValue::LeftBracket,
Punct::RightBracket => TokenValue::RightBracket,
// Other one character punctuation
Punct::LeftAngle => TokenValue::LeftAngle,
Punct::RightAngle => TokenValue::RightAngle,
Punct::Semicolon => TokenValue::Semicolon,
Punct::Comma => TokenValue::Comma,
Punct::Colon => TokenValue::Colon,
Punct::Dot => TokenValue::Dot,
Punct::Equal => TokenValue::Assign,
Punct::Bang => TokenValue::Bang,
Punct::Minus => TokenValue::Dash,
Punct::Tilde => TokenValue::Tilde,
Punct::Plus => TokenValue::Plus,
Punct::Star => TokenValue::Star,
Punct::Slash => TokenValue::Slash,
Punct::Percent => TokenValue::Percent,
Punct::Pipe => TokenValue::VerticalBar,
Punct::Caret => TokenValue::Caret,
Punct::Ampersand => TokenValue::Ampersand,
Punct::Question => TokenValue::Question,
},
PPTokenValue::Pragma(pragma) => {
return Some(LexerResult {
kind: LexerResultKind::Directive(Directive {
kind: DirectiveKind::Pragma,
tokens: pragma.tokens,
}),
meta,
})
}
PPTokenValue::Version(version) => {
return Some(LexerResult {
kind: LexerResultKind::Directive(Directive {
kind: DirectiveKind::Version {
is_first_directive: version.is_first_directive,
},
tokens: version.tokens,
}),
meta,
})
}
};
Some(LexerResult {
kind: LexerResultKind::Token(Token { value, meta }),
meta,
})
}
}
#[cfg(test)]
mod tests {
use pp_rs::token::{Integer, Location, Token as PPToken, TokenValue as PPTokenValue};
use super::{
super::token::{Directive, DirectiveKind, Token, TokenValue},
Lexer, LexerResult, LexerResultKind,
};
use crate::Span;
#[test]
fn lex_tokens() {
let defines = crate::FastHashMap::default();
// line comments
let mut lex = Lexer::new("#version 450\nvoid main () {}", &defines);
let mut location = Location::default();
location.start = 9;
location.end = 12;
assert_eq!(
lex.next().unwrap(),
LexerResult {
kind: LexerResultKind::Directive(Directive {
kind: DirectiveKind::Version {
is_first_directive: true
},
tokens: vec![PPToken {
value: PPTokenValue::Integer(Integer {
signed: true,
value: 450,
width: 32
}),
location
}]
}),
meta: Span::new(1, 8)
}
);
assert_eq!(
lex.next().unwrap(),
LexerResult {
kind: LexerResultKind::Token(Token {
value: TokenValue::Void,
meta: Span::new(13, 17)
}),
meta: Span::new(13, 17)
}
);
assert_eq!(
lex.next().unwrap(),
LexerResult {
kind: LexerResultKind::Token(Token {
value: TokenValue::Identifier("main".into()),
meta: Span::new(18, 22)
}),
meta: Span::new(18, 22)
}
);
assert_eq!(
lex.next().unwrap(),
LexerResult {
kind: LexerResultKind::Token(Token {
value: TokenValue::LeftParen,
meta: Span::new(23, 24)
}),
meta: Span::new(23, 24)
}
);
assert_eq!(
lex.next().unwrap(),
LexerResult {
kind: LexerResultKind::Token(Token {
value: TokenValue::RightParen,
meta: Span::new(24, 25)
}),
meta: Span::new(24, 25)
}
);
assert_eq!(
lex.next().unwrap(),
LexerResult {
kind: LexerResultKind::Token(Token {
value: TokenValue::LeftBrace,
meta: Span::new(26, 27)
}),
meta: Span::new(26, 27)
}
);
assert_eq!(
lex.next().unwrap(),
LexerResult {
kind: LexerResultKind::Token(Token {
value: TokenValue::RightBrace,
meta: Span::new(27, 28)
}),
meta: Span::new(27, 28)
}
);
assert_eq!(lex.next(), None);
}
}

View file

@ -0,0 +1,235 @@
/*!
Frontend for [GLSL][glsl] (OpenGL Shading Language).
To begin, take a look at the documentation for the [`Frontend`].
# Supported versions
## Vulkan
- 440 (partial)
- 450
- 460
[glsl]: https://www.khronos.org/registry/OpenGL/index_gl.php
*/
pub use ast::{Precision, Profile};
pub use error::{Error, ErrorKind, ExpectedToken};
pub use token::TokenValue;
use crate::{proc::Layouter, FastHashMap, FastHashSet, Handle, Module, ShaderStage, Span, Type};
use ast::{EntryArg, FunctionDeclaration, GlobalLookup};
use parser::ParsingContext;
mod ast;
mod builtins;
mod constants;
mod context;
mod error;
mod functions;
mod lex;
mod offset;
mod parser;
#[cfg(test)]
mod parser_tests;
mod token;
mod types;
mod variables;
type Result<T> = std::result::Result<T, Error>;
/// Per-shader options passed to [`parse`](Frontend::parse).
///
/// The [`From`](From) trait is implemented for [`ShaderStage`](ShaderStage) to
/// provide a quick way to create a Options instance.
/// ```rust
/// # use naga::ShaderStage;
/// # use naga::front::glsl::Options;
/// Options::from(ShaderStage::Vertex);
/// ```
#[derive(Debug)]
pub struct Options {
/// The shader stage in the pipeline.
pub stage: ShaderStage,
/// Preprocesor definitions to be used, akin to having
/// ```glsl
/// #define key value
/// ```
/// for each key value pair in the map.
pub defines: FastHashMap<String, String>,
}
impl From<ShaderStage> for Options {
fn from(stage: ShaderStage) -> Self {
Options {
stage,
defines: FastHashMap::default(),
}
}
}
/// Additional information about the GLSL shader.
///
/// Stores additional information about the GLSL shader which might not be
/// stored in the shader [`Module`](Module).
#[derive(Debug)]
pub struct ShaderMetadata {
/// The GLSL version specified in the shader through the use of the
/// `#version` preprocessor directive.
pub version: u16,
/// The GLSL profile specified in the shader through the use of the
/// `#version` preprocessor directive.
pub profile: Profile,
/// The shader stage in the pipeline, passed to the [`parse`](Frontend::parse)
/// method via the [`Options`](Options) struct.
pub stage: ShaderStage,
/// The workgroup size for compute shaders, defaults to `[1; 3]` for
/// compute shaders and `[0; 3]` for non compute shaders.
pub workgroup_size: [u32; 3],
/// Whether or not early fragment tests where requested by the shader.
/// Defaults to `false`.
pub early_fragment_tests: bool,
/// The shader can request extensions via the
/// `#extension` preprocessor directive, in the directive a behavior
/// parameter is used to control whether the extension should be disabled,
/// warn on usage, enabled if possible or required.
///
/// This field only stores extensions which were required or requested to
/// be enabled if possible and they are supported.
pub extensions: FastHashSet<String>,
}
impl ShaderMetadata {
fn reset(&mut self, stage: ShaderStage) {
self.version = 0;
self.profile = Profile::Core;
self.stage = stage;
self.workgroup_size = [u32::from(stage == ShaderStage::Compute); 3];
self.early_fragment_tests = false;
self.extensions.clear();
}
}
impl Default for ShaderMetadata {
fn default() -> Self {
ShaderMetadata {
version: 0,
profile: Profile::Core,
stage: ShaderStage::Vertex,
workgroup_size: [0; 3],
early_fragment_tests: false,
extensions: FastHashSet::default(),
}
}
}
/// The `Frontend` is the central structure of the GLSL frontend.
///
/// To instantiate a new `Frontend` the [`Default`](Default) trait is used, so a
/// call to the associated function [`Frontend::default`](Frontend::default) will
/// return a new `Frontend` instance.
///
/// To parse a shader simply call the [`parse`](Frontend::parse) method with a
/// [`Options`](Options) struct and a [`&str`](str) holding the glsl code.
///
/// The `Frontend` also provides the [`metadata`](Frontend::metadata) to get some
/// further information about the previously parsed shader, like version and
/// extensions used (see the documentation for
/// [`ShaderMetadata`](ShaderMetadata) to see all the returned information)
///
/// # Example usage
/// ```rust
/// use naga::ShaderStage;
/// use naga::front::glsl::{Frontend, Options};
///
/// let glsl = r#"
/// #version 450 core
///
/// void main() {}
/// "#;
///
/// let mut frontend = Frontend::default();
/// let options = Options::from(ShaderStage::Vertex);
/// frontend.parse(&options, glsl);
/// ```
///
/// # Reusability
///
/// If there's a need to parse more than one shader reusing the same `Frontend`
/// instance may be beneficial since internal allocations will be reused.
///
/// Calling the [`parse`](Frontend::parse) method multiple times will reset the
/// `Frontend` so no extra care is needed when reusing.
#[derive(Debug, Default)]
pub struct Frontend {
meta: ShaderMetadata,
lookup_function: FastHashMap<String, FunctionDeclaration>,
lookup_type: FastHashMap<String, Handle<Type>>,
global_variables: Vec<(String, GlobalLookup)>,
entry_args: Vec<EntryArg>,
layouter: Layouter,
errors: Vec<Error>,
module: Module,
}
impl Frontend {
fn reset(&mut self, stage: ShaderStage) {
self.meta.reset(stage);
self.lookup_function.clear();
self.lookup_type.clear();
self.global_variables.clear();
self.entry_args.clear();
self.layouter.clear();
// This is necessary because if the last parsing errored out, the module
// wouldn't have been taken
self.module = Module::default();
}
/// Parses a shader either outputting a shader [`Module`](Module) or a list
/// of [`Error`](Error)s.
///
/// Multiple calls using the same `Frontend` and different shaders are supported.
pub fn parse(
&mut self,
options: &Options,
source: &str,
) -> std::result::Result<Module, Vec<Error>> {
self.reset(options.stage);
let lexer = lex::Lexer::new(source, &options.defines);
let mut ctx = ParsingContext::new(lexer);
if let Err(e) = ctx.parse(self) {
self.errors.push(e);
}
if self.errors.is_empty() {
Ok(std::mem::take(&mut self.module))
} else {
Err(std::mem::take(&mut self.errors))
}
}
/// Returns additional information about the parsed shader which might not be
/// stored in the [`Module`](Module), see the documentation for
/// [`ShaderMetadata`](ShaderMetadata) for more information about the
/// returned data.
///
/// # Notes
///
/// Following an unsuccessful parsing the state of the returned information
/// is undefined, it might contain only partial information about the
/// current shader, the previous shader or both.
pub const fn metadata(&self) -> &ShaderMetadata {
&self.meta
}
}

View file

@ -0,0 +1,170 @@
/*!
Module responsible for calculating the offset and span for types.
There exists two types of layouts std140 and std430 (there's technically
two more layouts, shared and packed. Shared is not supported by spirv. Packed is
implementation dependent and for now it's just implemented as an alias to
std140).
The OpenGl spec (the layout rules are defined by the OpenGl spec in section
7.6.2.2 as opposed to the GLSL spec) uses the term basic machine units which are
equivalent to bytes.
*/
use super::{
ast::StructLayout,
error::{Error, ErrorKind},
Span,
};
use crate::{proc::Alignment, Handle, Type, TypeInner, UniqueArena};
/// Struct with information needed for defining a struct member.
///
/// Returned by [`calculate_offset`](calculate_offset)
#[derive(Debug)]
pub struct TypeAlignSpan {
/// The handle to the type, this might be the same handle passed to
/// [`calculate_offset`](calculate_offset) or a new such a new array type
/// with a different stride set.
pub ty: Handle<Type>,
/// The alignment required by the type.
pub align: Alignment,
/// The size of the type.
pub span: u32,
}
/// Returns the type, alignment and span of a struct member according to a [`StructLayout`](StructLayout).
///
/// The functions returns a [`TypeAlignSpan`](TypeAlignSpan) which has a `ty` member
/// this should be used as the struct member type because for example arrays may have to
/// change the stride and as such need to have a different type.
pub fn calculate_offset(
mut ty: Handle<Type>,
meta: Span,
layout: StructLayout,
types: &mut UniqueArena<Type>,
errors: &mut Vec<Error>,
) -> TypeAlignSpan {
// When using the std430 storage layout, shader storage blocks will be laid out in buffer storage
// identically to uniform and shader storage blocks using the std140 layout, except
// that the base alignment and stride of arrays of scalars and vectors in rule 4 and of
// structures in rule 9 are not rounded up a multiple of the base alignment of a vec4.
let (align, span) = match types[ty].inner {
// 1. If the member is a scalar consuming N basic machine units,
// the base alignment is N.
TypeInner::Scalar { width, .. } => (Alignment::from_width(width), width as u32),
// 2. If the member is a two- or four-component vector with components
// consuming N basic machine units, the base alignment is 2N or 4N, respectively.
// 3. If the member is a three-component vector with components consuming N
// basic machine units, the base alignment is 4N.
TypeInner::Vector { size, width, .. } => (
Alignment::from(size) * Alignment::from_width(width),
size as u32 * width as u32,
),
// 4. If the member is an array of scalars or vectors, the base alignment and array
// stride are set to match the base alignment of a single array element, according
// to rules (1), (2), and (3), and rounded up to the base alignment of a vec4.
// TODO: Matrices array
TypeInner::Array { base, size, .. } => {
let info = calculate_offset(base, meta, layout, types, errors);
let name = types[ty].name.clone();
// See comment at the beginning of the function
let (align, stride) = if StructLayout::Std430 == layout {
(info.align, info.align.round_up(info.span))
} else {
let align = info.align.max(Alignment::MIN_UNIFORM);
(align, align.round_up(info.span))
};
let span = match size {
crate::ArraySize::Constant(size) => size.get() * stride,
crate::ArraySize::Dynamic => stride,
};
let ty_span = types.get_span(ty);
ty = types.insert(
Type {
name,
inner: TypeInner::Array {
base: info.ty,
size,
stride,
},
},
ty_span,
);
(align, span)
}
// 5. If the member is a column-major matrix with C columns and R rows, the
// matrix is stored identically to an array of C column vectors with R
// components each, according to rule (4)
// TODO: Row major matrices
TypeInner::Matrix {
columns,
rows,
width,
} => {
let mut align = Alignment::from(rows) * Alignment::from_width(width);
// See comment at the beginning of the function
if StructLayout::Std430 != layout {
align = align.max(Alignment::MIN_UNIFORM);
}
// See comment on the error kind
if StructLayout::Std140 == layout && rows == crate::VectorSize::Bi {
errors.push(Error {
kind: ErrorKind::UnsupportedMatrixTypeInStd140,
meta,
});
}
(align, align * columns as u32)
}
TypeInner::Struct { ref members, .. } => {
let mut span = 0;
let mut align = Alignment::ONE;
let mut members = members.clone();
let name = types[ty].name.clone();
for member in members.iter_mut() {
let info = calculate_offset(member.ty, meta, layout, types, errors);
let member_alignment = info.align;
span = member_alignment.round_up(span);
align = member_alignment.max(align);
member.ty = info.ty;
member.offset = span;
span += info.span;
}
span = align.round_up(span);
let ty_span = types.get_span(ty);
ty = types.insert(
Type {
name,
inner: TypeInner::Struct { members, span },
},
ty_span,
);
(align, span)
}
_ => {
errors.push(Error {
kind: ErrorKind::SemanticError("Invalid struct member type".into()),
meta,
});
(Alignment::ONE, 0)
}
};
TypeAlignSpan { ty, align, span }
}

View file

@ -0,0 +1,438 @@
use super::{
ast::{FunctionKind, Profile, TypeQualifiers},
context::{Context, ExprPos},
error::ExpectedToken,
error::{Error, ErrorKind},
lex::{Lexer, LexerResultKind},
token::{Directive, DirectiveKind},
token::{Token, TokenValue},
variables::{GlobalOrConstant, VarDeclaration},
Frontend, Result,
};
use crate::{arena::Handle, proc::U32EvalError, Block, Expression, Span, Type};
use pp_rs::token::{PreprocessorError, Token as PPToken, TokenValue as PPTokenValue};
use std::iter::Peekable;
mod declarations;
mod expressions;
mod functions;
mod types;
pub struct ParsingContext<'source> {
lexer: Peekable<Lexer<'source>>,
/// Used to store tokens already consumed by the parser but that need to be backtracked
backtracked_token: Option<Token>,
last_meta: Span,
}
impl<'source> ParsingContext<'source> {
pub fn new(lexer: Lexer<'source>) -> Self {
ParsingContext {
lexer: lexer.peekable(),
backtracked_token: None,
last_meta: Span::default(),
}
}
/// Helper method for backtracking from a consumed token
///
/// This method should always be used instead of assigning to `backtracked_token` since
/// it validates that backtracking hasn't occurred more than one time in a row
///
/// # Panics
/// - If the parser already backtracked without bumping in between
pub fn backtrack(&mut self, token: Token) -> Result<()> {
// This should never happen
if let Some(ref prev_token) = self.backtracked_token {
return Err(Error {
kind: ErrorKind::InternalError("The parser tried to backtrack twice in a row"),
meta: prev_token.meta,
});
}
self.backtracked_token = Some(token);
Ok(())
}
pub fn expect_ident(&mut self, frontend: &mut Frontend) -> Result<(String, Span)> {
let token = self.bump(frontend)?;
match token.value {
TokenValue::Identifier(name) => Ok((name, token.meta)),
_ => Err(Error {
kind: ErrorKind::InvalidToken(token.value, vec![ExpectedToken::Identifier]),
meta: token.meta,
}),
}
}
pub fn expect(&mut self, frontend: &mut Frontend, value: TokenValue) -> Result<Token> {
let token = self.bump(frontend)?;
if token.value != value {
Err(Error {
kind: ErrorKind::InvalidToken(token.value, vec![value.into()]),
meta: token.meta,
})
} else {
Ok(token)
}
}
pub fn next(&mut self, frontend: &mut Frontend) -> Option<Token> {
loop {
if let Some(token) = self.backtracked_token.take() {
self.last_meta = token.meta;
break Some(token);
}
let res = self.lexer.next()?;
match res.kind {
LexerResultKind::Token(token) => {
self.last_meta = token.meta;
break Some(token);
}
LexerResultKind::Directive(directive) => {
frontend.handle_directive(directive, res.meta)
}
LexerResultKind::Error(error) => frontend.errors.push(Error {
kind: ErrorKind::PreprocessorError(error),
meta: res.meta,
}),
}
}
}
pub fn bump(&mut self, frontend: &mut Frontend) -> Result<Token> {
self.next(frontend).ok_or(Error {
kind: ErrorKind::EndOfFile,
meta: self.last_meta,
})
}
/// Returns None on the end of the file rather than an error like other methods
pub fn bump_if(&mut self, frontend: &mut Frontend, value: TokenValue) -> Option<Token> {
if self.peek(frontend).filter(|t| t.value == value).is_some() {
self.bump(frontend).ok()
} else {
None
}
}
pub fn peek(&mut self, frontend: &mut Frontend) -> Option<&Token> {
loop {
if let Some(ref token) = self.backtracked_token {
break Some(token);
}
match self.lexer.peek()?.kind {
LexerResultKind::Token(_) => {
let res = self.lexer.peek()?;
match res.kind {
LexerResultKind::Token(ref token) => break Some(token),
_ => unreachable!(),
}
}
LexerResultKind::Error(_) | LexerResultKind::Directive(_) => {
let res = self.lexer.next()?;
match res.kind {
LexerResultKind::Directive(directive) => {
frontend.handle_directive(directive, res.meta)
}
LexerResultKind::Error(error) => frontend.errors.push(Error {
kind: ErrorKind::PreprocessorError(error),
meta: res.meta,
}),
LexerResultKind::Token(_) => unreachable!(),
}
}
}
}
}
pub fn expect_peek(&mut self, frontend: &mut Frontend) -> Result<&Token> {
let meta = self.last_meta;
self.peek(frontend).ok_or(Error {
kind: ErrorKind::EndOfFile,
meta,
})
}
pub fn parse(&mut self, frontend: &mut Frontend) -> Result<()> {
// Body and expression arena for global initialization
let mut body = Block::new();
let mut ctx = Context::new(frontend, &mut body);
while self.peek(frontend).is_some() {
self.parse_external_declaration(frontend, &mut ctx, &mut body)?;
}
// Add an `EntryPoint` to `parser.module` for `main`, if a
// suitable overload exists. Error out if we can't find one.
if let Some(declaration) = frontend.lookup_function.get("main") {
for decl in declaration.overloads.iter() {
if let FunctionKind::Call(handle) = decl.kind {
if decl.defined && decl.parameters.is_empty() {
frontend.add_entry_point(handle, body, ctx.expressions);
return Ok(());
}
}
}
}
Err(Error {
kind: ErrorKind::SemanticError("Missing entry point".into()),
meta: Span::default(),
})
}
fn parse_uint_constant(&mut self, frontend: &mut Frontend) -> Result<(u32, Span)> {
let (const_expr, meta) = self.parse_constant_expression(frontend)?;
let res = frontend.module.to_ctx().eval_expr_to_u32(const_expr);
let int = match res {
Ok(value) => Ok(value),
Err(U32EvalError::Negative) => Err(Error {
kind: ErrorKind::SemanticError("int constant overflows".into()),
meta,
}),
Err(U32EvalError::NonConst) => Err(Error {
kind: ErrorKind::SemanticError("Expected a uint constant".into()),
meta,
}),
}?;
Ok((int, meta))
}
fn parse_constant_expression(
&mut self,
frontend: &mut Frontend,
) -> Result<(Handle<Expression>, Span)> {
let mut block = Block::new();
let mut ctx = Context::new(frontend, &mut block);
let mut stmt_ctx = ctx.stmt_ctx();
let expr = self.parse_conditional(frontend, &mut ctx, &mut stmt_ctx, &mut block, None)?;
let (root, meta) = ctx.lower_expect(stmt_ctx, frontend, expr, ExprPos::Rhs, &mut block)?;
Ok((frontend.solve_constant(&ctx, root, meta)?, meta))
}
}
impl Frontend {
fn handle_directive(&mut self, directive: Directive, meta: Span) {
let mut tokens = directive.tokens.into_iter();
match directive.kind {
DirectiveKind::Version { is_first_directive } => {
if !is_first_directive {
self.errors.push(Error {
kind: ErrorKind::SemanticError(
"#version must occur first in shader".into(),
),
meta,
})
}
match tokens.next() {
Some(PPToken {
value: PPTokenValue::Integer(int),
location,
}) => match int.value {
440 | 450 | 460 => self.meta.version = int.value as u16,
_ => self.errors.push(Error {
kind: ErrorKind::InvalidVersion(int.value),
meta: location.into(),
}),
},
Some(PPToken { value, location }) => self.errors.push(Error {
kind: ErrorKind::PreprocessorError(PreprocessorError::UnexpectedToken(
value,
)),
meta: location.into(),
}),
None => self.errors.push(Error {
kind: ErrorKind::PreprocessorError(PreprocessorError::UnexpectedNewLine),
meta,
}),
};
match tokens.next() {
Some(PPToken {
value: PPTokenValue::Ident(name),
location,
}) => match name.as_str() {
"core" => self.meta.profile = Profile::Core,
_ => self.errors.push(Error {
kind: ErrorKind::InvalidProfile(name),
meta: location.into(),
}),
},
Some(PPToken { value, location }) => self.errors.push(Error {
kind: ErrorKind::PreprocessorError(PreprocessorError::UnexpectedToken(
value,
)),
meta: location.into(),
}),
None => {}
};
if let Some(PPToken { value, location }) = tokens.next() {
self.errors.push(Error {
kind: ErrorKind::PreprocessorError(PreprocessorError::UnexpectedToken(
value,
)),
meta: location.into(),
})
}
}
DirectiveKind::Extension => {
// TODO: Proper extension handling
// - Checking for extension support in the compiler
// - Handle behaviors such as warn
// - Handle the all extension
let name = match tokens.next() {
Some(PPToken {
value: PPTokenValue::Ident(name),
..
}) => Some(name),
Some(PPToken { value, location }) => {
self.errors.push(Error {
kind: ErrorKind::PreprocessorError(PreprocessorError::UnexpectedToken(
value,
)),
meta: location.into(),
});
None
}
None => {
self.errors.push(Error {
kind: ErrorKind::PreprocessorError(
PreprocessorError::UnexpectedNewLine,
),
meta,
});
None
}
};
match tokens.next() {
Some(PPToken {
value: PPTokenValue::Punct(pp_rs::token::Punct::Colon),
..
}) => {}
Some(PPToken { value, location }) => self.errors.push(Error {
kind: ErrorKind::PreprocessorError(PreprocessorError::UnexpectedToken(
value,
)),
meta: location.into(),
}),
None => self.errors.push(Error {
kind: ErrorKind::PreprocessorError(PreprocessorError::UnexpectedNewLine),
meta,
}),
};
match tokens.next() {
Some(PPToken {
value: PPTokenValue::Ident(behavior),
location,
}) => match behavior.as_str() {
"require" | "enable" | "warn" | "disable" => {
if let Some(name) = name {
self.meta.extensions.insert(name);
}
}
_ => self.errors.push(Error {
kind: ErrorKind::PreprocessorError(PreprocessorError::UnexpectedToken(
PPTokenValue::Ident(behavior),
)),
meta: location.into(),
}),
},
Some(PPToken { value, location }) => self.errors.push(Error {
kind: ErrorKind::PreprocessorError(PreprocessorError::UnexpectedToken(
value,
)),
meta: location.into(),
}),
None => self.errors.push(Error {
kind: ErrorKind::PreprocessorError(PreprocessorError::UnexpectedNewLine),
meta,
}),
}
if let Some(PPToken { value, location }) = tokens.next() {
self.errors.push(Error {
kind: ErrorKind::PreprocessorError(PreprocessorError::UnexpectedToken(
value,
)),
meta: location.into(),
})
}
}
DirectiveKind::Pragma => {
// TODO: handle some common pragmas?
}
}
}
}
pub struct DeclarationContext<'ctx, 'qualifiers> {
qualifiers: TypeQualifiers<'qualifiers>,
/// Indicates a global declaration
external: bool,
ctx: &'ctx mut Context,
body: &'ctx mut Block,
}
impl<'ctx, 'qualifiers> DeclarationContext<'ctx, 'qualifiers> {
fn add_var(
&mut self,
frontend: &mut Frontend,
ty: Handle<Type>,
name: String,
init: Option<Handle<Expression>>,
meta: Span,
) -> Result<Handle<Expression>> {
let decl = VarDeclaration {
qualifiers: &mut self.qualifiers,
ty,
name: Some(name),
init,
meta,
};
match self.external {
true => {
let global = frontend.add_global_var(self.ctx, self.body, decl)?;
let expr = match global {
GlobalOrConstant::Global(handle) => Expression::GlobalVariable(handle),
GlobalOrConstant::Constant(handle) => Expression::Constant(handle),
};
Ok(self.ctx.add_expression(expr, meta, self.body))
}
false => frontend.add_local_var(self.ctx, self.body, decl),
}
}
/// Emits all the expressions captured by the emitter and starts the emitter again
///
/// Alias to [`emit_restart`] with the declaration body
///
/// [`emit_restart`]: Context::emit_restart
#[inline]
fn flush_expressions(&mut self) {
self.ctx.emit_restart(self.body);
}
}

View file

@ -0,0 +1,677 @@
use crate::{
front::glsl::{
ast::{
GlobalLookup, GlobalLookupKind, Precision, QualifierKey, QualifierValue,
StorageQualifier, StructLayout, TypeQualifiers,
},
context::{Context, ExprPos},
error::ExpectedToken,
offset,
token::{Token, TokenValue},
types::scalar_components,
variables::{GlobalOrConstant, VarDeclaration},
Error, ErrorKind, Frontend, Span,
},
proc::Alignment,
AddressSpace, Block, Expression, FunctionResult, Handle, ScalarKind, Statement, StructMember,
Type, TypeInner,
};
use super::{DeclarationContext, ParsingContext, Result};
/// Helper method used to retrieve the child type of `ty` at
/// index `i`.
///
/// # Note
///
/// Does not check if the index is valid and returns the same type
/// when indexing out-of-bounds a struct or indexing a non indexable
/// type.
fn element_or_member_type(
ty: Handle<Type>,
i: usize,
types: &mut crate::UniqueArena<Type>,
) -> Handle<Type> {
match types[ty].inner {
// The child type of a vector is a scalar of the same kind and width
TypeInner::Vector { kind, width, .. } => types.insert(
Type {
name: None,
inner: TypeInner::Scalar { kind, width },
},
Default::default(),
),
// The child type of a matrix is a vector of floats with the same
// width and the size of the matrix rows.
TypeInner::Matrix { rows, width, .. } => types.insert(
Type {
name: None,
inner: TypeInner::Vector {
size: rows,
kind: ScalarKind::Float,
width,
},
},
Default::default(),
),
// The child type of an array is the base type of the array
TypeInner::Array { base, .. } => base,
// The child type of a struct at index `i` is the type of it's
// member at that same index.
//
// In case the index is out of bounds the same type is returned
TypeInner::Struct { ref members, .. } => {
members.get(i).map(|member| member.ty).unwrap_or(ty)
}
// The type isn't indexable, the same type is returned
_ => ty,
}
}
impl<'source> ParsingContext<'source> {
pub fn parse_external_declaration(
&mut self,
frontend: &mut Frontend,
global_ctx: &mut Context,
global_body: &mut Block,
) -> Result<()> {
if self
.parse_declaration(frontend, global_ctx, global_body, true)?
.is_none()
{
let token = self.bump(frontend)?;
match token.value {
TokenValue::Semicolon if frontend.meta.version == 460 => Ok(()),
_ => {
let expected = match frontend.meta.version {
460 => vec![TokenValue::Semicolon.into(), ExpectedToken::Eof],
_ => vec![ExpectedToken::Eof],
};
Err(Error {
kind: ErrorKind::InvalidToken(token.value, expected),
meta: token.meta,
})
}
}
} else {
Ok(())
}
}
pub fn parse_initializer(
&mut self,
frontend: &mut Frontend,
ty: Handle<Type>,
ctx: &mut Context,
body: &mut Block,
) -> Result<(Handle<Expression>, Span)> {
// initializer:
// assignment_expression
// LEFT_BRACE initializer_list RIGHT_BRACE
// LEFT_BRACE initializer_list COMMA RIGHT_BRACE
//
// initializer_list:
// initializer
// initializer_list COMMA initializer
if let Some(Token { mut meta, .. }) = self.bump_if(frontend, TokenValue::LeftBrace) {
// initializer_list
let mut components = Vec::new();
loop {
// The type expected to be parsed inside the initializer list
let new_ty =
element_or_member_type(ty, components.len(), &mut frontend.module.types);
components.push(self.parse_initializer(frontend, new_ty, ctx, body)?.0);
let token = self.bump(frontend)?;
match token.value {
TokenValue::Comma => {
if let Some(Token { meta: end_meta, .. }) =
self.bump_if(frontend, TokenValue::RightBrace)
{
meta.subsume(end_meta);
break;
}
}
TokenValue::RightBrace => {
meta.subsume(token.meta);
break;
}
_ => {
return Err(Error {
kind: ErrorKind::InvalidToken(
token.value,
vec![TokenValue::Comma.into(), TokenValue::RightBrace.into()],
),
meta: token.meta,
})
}
}
}
Ok((
ctx.add_expression(Expression::Compose { ty, components }, meta, body),
meta,
))
} else {
let mut stmt = ctx.stmt_ctx();
let expr = self.parse_assignment(frontend, ctx, &mut stmt, body)?;
let (mut init, init_meta) =
ctx.lower_expect(stmt, frontend, expr, ExprPos::Rhs, body)?;
let scalar_components = scalar_components(&frontend.module.types[ty].inner);
if let Some((kind, width)) = scalar_components {
ctx.implicit_conversion(frontend, &mut init, init_meta, kind, width)?;
}
Ok((init, init_meta))
}
}
// Note: caller preparsed the type and qualifiers
// Note: caller skips this if the fallthrough token is not expected to be consumed here so this
// produced Error::InvalidToken if it isn't consumed
pub fn parse_init_declarator_list(
&mut self,
frontend: &mut Frontend,
mut ty: Handle<Type>,
ctx: &mut DeclarationContext,
) -> Result<()> {
// init_declarator_list:
// single_declaration
// init_declarator_list COMMA IDENTIFIER
// init_declarator_list COMMA IDENTIFIER array_specifier
// init_declarator_list COMMA IDENTIFIER array_specifier EQUAL initializer
// init_declarator_list COMMA IDENTIFIER EQUAL initializer
//
// single_declaration:
// fully_specified_type
// fully_specified_type IDENTIFIER
// fully_specified_type IDENTIFIER array_specifier
// fully_specified_type IDENTIFIER array_specifier EQUAL initializer
// fully_specified_type IDENTIFIER EQUAL initializer
// Consume any leading comma, e.g. this is valid: `float, a=1;`
if self
.peek(frontend)
.map_or(false, |t| t.value == TokenValue::Comma)
{
self.next(frontend);
}
loop {
let token = self.bump(frontend)?;
let name = match token.value {
TokenValue::Semicolon => break,
TokenValue::Identifier(name) => name,
_ => {
return Err(Error {
kind: ErrorKind::InvalidToken(
token.value,
vec![ExpectedToken::Identifier, TokenValue::Semicolon.into()],
),
meta: token.meta,
})
}
};
let mut meta = token.meta;
// array_specifier
// array_specifier EQUAL initializer
// EQUAL initializer
// parse an array specifier if it exists
// NOTE: unlike other parse methods this one doesn't expect an array specifier and
// returns Ok(None) rather than an error if there is not one
self.parse_array_specifier(frontend, &mut meta, &mut ty)?;
let init = self
.bump_if(frontend, TokenValue::Assign)
.map::<Result<_>, _>(|_| {
let (mut expr, init_meta) =
self.parse_initializer(frontend, ty, ctx.ctx, ctx.body)?;
let scalar_components = scalar_components(&frontend.module.types[ty].inner);
if let Some((kind, width)) = scalar_components {
ctx.ctx
.implicit_conversion(frontend, &mut expr, init_meta, kind, width)?;
}
meta.subsume(init_meta);
Ok((expr, init_meta))
})
.transpose()?;
let is_const = ctx.qualifiers.storage.0 == StorageQualifier::Const;
let maybe_const_expr = if ctx.external {
if let Some((root, meta)) = init {
match frontend.solve_constant(ctx.ctx, root, meta) {
Ok(res) => Some(res),
// If the declaration is external (global scope) and is constant qualified
// then the initializer must be a constant expression
Err(err) if is_const => return Err(err),
_ => None,
}
} else {
None
}
} else {
None
};
let pointer = ctx.add_var(frontend, ty, name, maybe_const_expr, meta)?;
if let Some((value, _)) = init.filter(|_| maybe_const_expr.is_none()) {
ctx.flush_expressions();
ctx.body.push(Statement::Store { pointer, value }, meta);
}
let token = self.bump(frontend)?;
match token.value {
TokenValue::Semicolon => break,
TokenValue::Comma => {}
_ => {
return Err(Error {
kind: ErrorKind::InvalidToken(
token.value,
vec![TokenValue::Comma.into(), TokenValue::Semicolon.into()],
),
meta: token.meta,
})
}
}
}
Ok(())
}
/// `external` whether or not we are in a global or local context
pub fn parse_declaration(
&mut self,
frontend: &mut Frontend,
ctx: &mut Context,
body: &mut Block,
external: bool,
) -> Result<Option<Span>> {
//declaration:
// function_prototype SEMICOLON
//
// init_declarator_list SEMICOLON
// PRECISION precision_qualifier type_specifier SEMICOLON
//
// type_qualifier IDENTIFIER LEFT_BRACE struct_declaration_list RIGHT_BRACE SEMICOLON
// type_qualifier IDENTIFIER LEFT_BRACE struct_declaration_list RIGHT_BRACE IDENTIFIER SEMICOLON
// type_qualifier IDENTIFIER LEFT_BRACE struct_declaration_list RIGHT_BRACE IDENTIFIER array_specifier SEMICOLON
// type_qualifier SEMICOLON type_qualifier IDENTIFIER SEMICOLON
// type_qualifier IDENTIFIER identifier_list SEMICOLON
if self.peek_type_qualifier(frontend) || self.peek_type_name(frontend) {
let mut qualifiers = self.parse_type_qualifiers(frontend)?;
if self.peek_type_name(frontend) {
// This branch handles variables and function prototypes and if
// external is true also function definitions
let (ty, mut meta) = self.parse_type(frontend)?;
let token = self.bump(frontend)?;
let token_fallthrough = match token.value {
TokenValue::Identifier(name) => match self.expect_peek(frontend)?.value {
TokenValue::LeftParen => {
// This branch handles function definition and prototypes
self.bump(frontend)?;
let result = ty.map(|ty| FunctionResult { ty, binding: None });
let mut body = Block::new();
let mut context = Context::new(frontend, &mut body);
self.parse_function_args(frontend, &mut context, &mut body)?;
let end_meta = self.expect(frontend, TokenValue::RightParen)?.meta;
meta.subsume(end_meta);
let token = self.bump(frontend)?;
return match token.value {
TokenValue::Semicolon => {
// This branch handles function prototypes
frontend.add_prototype(context, name, result, meta);
Ok(Some(meta))
}
TokenValue::LeftBrace if external => {
// This branch handles function definitions
// as you can see by the guard this branch
// only happens if external is also true
// parse the body
self.parse_compound_statement(
token.meta,
frontend,
&mut context,
&mut body,
&mut None,
)?;
frontend.add_function(context, name, result, body, meta);
Ok(Some(meta))
}
_ if external => Err(Error {
kind: ErrorKind::InvalidToken(
token.value,
vec![
TokenValue::LeftBrace.into(),
TokenValue::Semicolon.into(),
],
),
meta: token.meta,
}),
_ => Err(Error {
kind: ErrorKind::InvalidToken(
token.value,
vec![TokenValue::Semicolon.into()],
),
meta: token.meta,
}),
};
}
// Pass the token to the init_declarator_list parser
_ => Token {
value: TokenValue::Identifier(name),
meta: token.meta,
},
},
// Pass the token to the init_declarator_list parser
_ => token,
};
// If program execution has reached here then this will be a
// init_declarator_list
// token_fallthrough will have a token that was already bumped
if let Some(ty) = ty {
let mut ctx = DeclarationContext {
qualifiers,
external,
ctx,
body,
};
self.backtrack(token_fallthrough)?;
self.parse_init_declarator_list(frontend, ty, &mut ctx)?;
} else {
frontend.errors.push(Error {
kind: ErrorKind::SemanticError("Declaration cannot have void type".into()),
meta,
})
}
Ok(Some(meta))
} else {
// This branch handles struct definitions and modifiers like
// ```glsl
// layout(early_fragment_tests);
// ```
let token = self.bump(frontend)?;
match token.value {
TokenValue::Identifier(ty_name) => {
if self.bump_if(frontend, TokenValue::LeftBrace).is_some() {
self.parse_block_declaration(
frontend,
ctx,
body,
&mut qualifiers,
ty_name,
token.meta,
)
.map(Some)
} else {
if qualifiers.invariant.take().is_some() {
frontend.make_variable_invariant(ctx, body, &ty_name, token.meta);
qualifiers.unused_errors(&mut frontend.errors);
self.expect(frontend, TokenValue::Semicolon)?;
return Ok(Some(qualifiers.span));
}
//TODO: declaration
// type_qualifier IDENTIFIER SEMICOLON
// type_qualifier IDENTIFIER identifier_list SEMICOLON
Err(Error {
kind: ErrorKind::NotImplemented("variable qualifier"),
meta: token.meta,
})
}
}
TokenValue::Semicolon => {
if let Some(value) =
qualifiers.uint_layout_qualifier("local_size_x", &mut frontend.errors)
{
frontend.meta.workgroup_size[0] = value;
}
if let Some(value) =
qualifiers.uint_layout_qualifier("local_size_y", &mut frontend.errors)
{
frontend.meta.workgroup_size[1] = value;
}
if let Some(value) =
qualifiers.uint_layout_qualifier("local_size_z", &mut frontend.errors)
{
frontend.meta.workgroup_size[2] = value;
}
frontend.meta.early_fragment_tests |= qualifiers
.none_layout_qualifier("early_fragment_tests", &mut frontend.errors);
qualifiers.unused_errors(&mut frontend.errors);
Ok(Some(qualifiers.span))
}
_ => Err(Error {
kind: ErrorKind::InvalidToken(
token.value,
vec![ExpectedToken::Identifier, TokenValue::Semicolon.into()],
),
meta: token.meta,
}),
}
}
} else {
match self.peek(frontend).map(|t| &t.value) {
Some(&TokenValue::Precision) => {
// PRECISION precision_qualifier type_specifier SEMICOLON
self.bump(frontend)?;
let token = self.bump(frontend)?;
let _ = match token.value {
TokenValue::PrecisionQualifier(p) => p,
_ => {
return Err(Error {
kind: ErrorKind::InvalidToken(
token.value,
vec![
TokenValue::PrecisionQualifier(Precision::High).into(),
TokenValue::PrecisionQualifier(Precision::Medium).into(),
TokenValue::PrecisionQualifier(Precision::Low).into(),
],
),
meta: token.meta,
})
}
};
let (ty, meta) = self.parse_type_non_void(frontend)?;
match frontend.module.types[ty].inner {
TypeInner::Scalar {
kind: ScalarKind::Float | ScalarKind::Sint,
..
} => {}
_ => frontend.errors.push(Error {
kind: ErrorKind::SemanticError(
"Precision statement can only work on floats and ints".into(),
),
meta,
}),
}
self.expect(frontend, TokenValue::Semicolon)?;
Ok(Some(meta))
}
_ => Ok(None),
}
}
}
pub fn parse_block_declaration(
&mut self,
frontend: &mut Frontend,
ctx: &mut Context,
body: &mut Block,
qualifiers: &mut TypeQualifiers,
ty_name: String,
mut meta: Span,
) -> Result<Span> {
let layout = match qualifiers.layout_qualifiers.remove(&QualifierKey::Layout) {
Some((QualifierValue::Layout(l), _)) => l,
None => {
if let StorageQualifier::AddressSpace(AddressSpace::Storage { .. }) =
qualifiers.storage.0
{
StructLayout::Std430
} else {
StructLayout::Std140
}
}
_ => unreachable!(),
};
let mut members = Vec::new();
let span = self.parse_struct_declaration_list(frontend, &mut members, layout)?;
self.expect(frontend, TokenValue::RightBrace)?;
let mut ty = frontend.module.types.insert(
Type {
name: Some(ty_name),
inner: TypeInner::Struct {
members: members.clone(),
span,
},
},
Default::default(),
);
let token = self.bump(frontend)?;
let name = match token.value {
TokenValue::Semicolon => None,
TokenValue::Identifier(name) => {
self.parse_array_specifier(frontend, &mut meta, &mut ty)?;
self.expect(frontend, TokenValue::Semicolon)?;
Some(name)
}
_ => {
return Err(Error {
kind: ErrorKind::InvalidToken(
token.value,
vec![ExpectedToken::Identifier, TokenValue::Semicolon.into()],
),
meta: token.meta,
})
}
};
let global = frontend.add_global_var(
ctx,
body,
VarDeclaration {
qualifiers,
ty,
name,
init: None,
meta,
},
)?;
for (i, k, ty) in members.into_iter().enumerate().filter_map(|(i, m)| {
let ty = m.ty;
m.name.map(|s| (i as u32, s, ty))
}) {
let lookup = GlobalLookup {
kind: match global {
GlobalOrConstant::Global(handle) => GlobalLookupKind::BlockSelect(handle, i),
GlobalOrConstant::Constant(handle) => GlobalLookupKind::Constant(handle, ty),
},
entry_arg: None,
mutable: true,
};
ctx.add_global(frontend, &k, lookup, body);
frontend.global_variables.push((k, lookup));
}
Ok(meta)
}
// TODO: Accept layout arguments
pub fn parse_struct_declaration_list(
&mut self,
frontend: &mut Frontend,
members: &mut Vec<StructMember>,
layout: StructLayout,
) -> Result<u32> {
let mut span = 0;
let mut align = Alignment::ONE;
loop {
// TODO: type_qualifier
let (base_ty, mut meta) = self.parse_type_non_void(frontend)?;
loop {
let (name, name_meta) = self.expect_ident(frontend)?;
let mut ty = base_ty;
self.parse_array_specifier(frontend, &mut meta, &mut ty)?;
meta.subsume(name_meta);
let info = offset::calculate_offset(
ty,
meta,
layout,
&mut frontend.module.types,
&mut frontend.errors,
);
let member_alignment = info.align;
span = member_alignment.round_up(span);
align = member_alignment.max(align);
members.push(StructMember {
name: Some(name),
ty: info.ty,
binding: None,
offset: span,
});
span += info.span;
if self.bump_if(frontend, TokenValue::Comma).is_none() {
break;
}
}
self.expect(frontend, TokenValue::Semicolon)?;
if let TokenValue::RightBrace = self.expect_peek(frontend)?.value {
break;
}
}
span = align.round_up(span);
Ok(span)
}
}

View file

@ -0,0 +1,547 @@
use std::num::NonZeroU32;
use crate::{
front::glsl::{
ast::{FunctionCall, FunctionCallKind, HirExpr, HirExprKind},
context::{Context, StmtContext},
error::{ErrorKind, ExpectedToken},
parser::ParsingContext,
token::{Token, TokenValue},
Error, Frontend, Result, Span,
},
ArraySize, BinaryOperator, Block, Handle, Literal, Type, TypeInner, UnaryOperator,
};
impl<'source> ParsingContext<'source> {
pub fn parse_primary(
&mut self,
frontend: &mut Frontend,
ctx: &mut Context,
stmt: &mut StmtContext,
body: &mut Block,
) -> Result<Handle<HirExpr>> {
let mut token = self.bump(frontend)?;
let literal = match token.value {
TokenValue::IntConstant(int) => {
if int.width != 32 {
frontend.errors.push(Error {
kind: ErrorKind::SemanticError("Unsupported non-32bit integer".into()),
meta: token.meta,
});
}
if int.signed {
Literal::I32(int.value as i32)
} else {
Literal::U32(int.value as u32)
}
}
TokenValue::FloatConstant(float) => {
if float.width != 32 {
frontend.errors.push(Error {
kind: ErrorKind::SemanticError("Unsupported floating-point value (expected single-precision floating-point number)".into()),
meta: token.meta,
});
}
Literal::F32(float.value)
}
TokenValue::BoolConstant(value) => Literal::Bool(value),
TokenValue::LeftParen => {
let expr = self.parse_expression(frontend, ctx, stmt, body)?;
let meta = self.expect(frontend, TokenValue::RightParen)?.meta;
token.meta.subsume(meta);
return Ok(expr);
}
_ => {
return Err(Error {
kind: ErrorKind::InvalidToken(
token.value,
vec![
TokenValue::LeftParen.into(),
ExpectedToken::IntLiteral,
ExpectedToken::FloatLiteral,
ExpectedToken::BoolLiteral,
],
),
meta: token.meta,
});
}
};
Ok(stmt.hir_exprs.append(
HirExpr {
kind: HirExprKind::Literal(literal),
meta: token.meta,
},
Default::default(),
))
}
pub fn parse_function_call_args(
&mut self,
frontend: &mut Frontend,
ctx: &mut Context,
stmt: &mut StmtContext,
body: &mut Block,
meta: &mut Span,
) -> Result<Vec<Handle<HirExpr>>> {
let mut args = Vec::new();
if let Some(token) = self.bump_if(frontend, TokenValue::RightParen) {
meta.subsume(token.meta);
} else {
loop {
args.push(self.parse_assignment(frontend, ctx, stmt, body)?);
let token = self.bump(frontend)?;
match token.value {
TokenValue::Comma => {}
TokenValue::RightParen => {
meta.subsume(token.meta);
break;
}
_ => {
return Err(Error {
kind: ErrorKind::InvalidToken(
token.value,
vec![TokenValue::Comma.into(), TokenValue::RightParen.into()],
),
meta: token.meta,
});
}
}
}
}
Ok(args)
}
pub fn parse_postfix(
&mut self,
frontend: &mut Frontend,
ctx: &mut Context,
stmt: &mut StmtContext,
body: &mut Block,
) -> Result<Handle<HirExpr>> {
let mut base = if self.peek_type_name(frontend) {
let (mut handle, mut meta) = self.parse_type_non_void(frontend)?;
self.expect(frontend, TokenValue::LeftParen)?;
let args = self.parse_function_call_args(frontend, ctx, stmt, body, &mut meta)?;
if let TypeInner::Array {
size: ArraySize::Dynamic,
stride,
base,
} = frontend.module.types[handle].inner
{
let span = frontend.module.types.get_span(handle);
let size = u32::try_from(args.len())
.ok()
.and_then(NonZeroU32::new)
.ok_or(Error {
kind: ErrorKind::SemanticError(
"There must be at least one argument".into(),
),
meta,
})?;
handle = frontend.module.types.insert(
Type {
name: None,
inner: TypeInner::Array {
stride,
base,
size: ArraySize::Constant(size),
},
},
span,
)
}
stmt.hir_exprs.append(
HirExpr {
kind: HirExprKind::Call(FunctionCall {
kind: FunctionCallKind::TypeConstructor(handle),
args,
}),
meta,
},
Default::default(),
)
} else if let TokenValue::Identifier(_) = self.expect_peek(frontend)?.value {
let (name, mut meta) = self.expect_ident(frontend)?;
let expr = if self.bump_if(frontend, TokenValue::LeftParen).is_some() {
let args = self.parse_function_call_args(frontend, ctx, stmt, body, &mut meta)?;
let kind = match frontend.lookup_type.get(&name) {
Some(ty) => FunctionCallKind::TypeConstructor(*ty),
None => FunctionCallKind::Function(name),
};
HirExpr {
kind: HirExprKind::Call(FunctionCall { kind, args }),
meta,
}
} else {
let var = match frontend.lookup_variable(ctx, body, &name, meta) {
Some(var) => var,
None => {
return Err(Error {
kind: ErrorKind::UnknownVariable(name),
meta,
})
}
};
HirExpr {
kind: HirExprKind::Variable(var),
meta,
}
};
stmt.hir_exprs.append(expr, Default::default())
} else {
self.parse_primary(frontend, ctx, stmt, body)?
};
while let TokenValue::LeftBracket
| TokenValue::Dot
| TokenValue::Increment
| TokenValue::Decrement = self.expect_peek(frontend)?.value
{
let Token { value, mut meta } = self.bump(frontend)?;
match value {
TokenValue::LeftBracket => {
let index = self.parse_expression(frontend, ctx, stmt, body)?;
let end_meta = self.expect(frontend, TokenValue::RightBracket)?.meta;
meta.subsume(end_meta);
base = stmt.hir_exprs.append(
HirExpr {
kind: HirExprKind::Access { base, index },
meta,
},
Default::default(),
)
}
TokenValue::Dot => {
let (field, end_meta) = self.expect_ident(frontend)?;
if self.bump_if(frontend, TokenValue::LeftParen).is_some() {
let args =
self.parse_function_call_args(frontend, ctx, stmt, body, &mut meta)?;
base = stmt.hir_exprs.append(
HirExpr {
kind: HirExprKind::Method {
expr: base,
name: field,
args,
},
meta,
},
Default::default(),
);
continue;
}
meta.subsume(end_meta);
base = stmt.hir_exprs.append(
HirExpr {
kind: HirExprKind::Select { base, field },
meta,
},
Default::default(),
)
}
TokenValue::Increment | TokenValue::Decrement => {
base = stmt.hir_exprs.append(
HirExpr {
kind: HirExprKind::PrePostfix {
op: match value {
TokenValue::Increment => crate::BinaryOperator::Add,
_ => crate::BinaryOperator::Subtract,
},
postfix: true,
expr: base,
},
meta,
},
Default::default(),
)
}
_ => unreachable!(),
}
}
Ok(base)
}
pub fn parse_unary(
&mut self,
frontend: &mut Frontend,
ctx: &mut Context,
stmt: &mut StmtContext,
body: &mut Block,
) -> Result<Handle<HirExpr>> {
Ok(match self.expect_peek(frontend)?.value {
TokenValue::Plus | TokenValue::Dash | TokenValue::Bang | TokenValue::Tilde => {
let Token { value, mut meta } = self.bump(frontend)?;
let expr = self.parse_unary(frontend, ctx, stmt, body)?;
let end_meta = stmt.hir_exprs[expr].meta;
let kind = match value {
TokenValue::Dash => HirExprKind::Unary {
op: UnaryOperator::Negate,
expr,
},
TokenValue::Bang | TokenValue::Tilde => HirExprKind::Unary {
op: UnaryOperator::Not,
expr,
},
_ => return Ok(expr),
};
meta.subsume(end_meta);
stmt.hir_exprs
.append(HirExpr { kind, meta }, Default::default())
}
TokenValue::Increment | TokenValue::Decrement => {
let Token { value, meta } = self.bump(frontend)?;
let expr = self.parse_unary(frontend, ctx, stmt, body)?;
stmt.hir_exprs.append(
HirExpr {
kind: HirExprKind::PrePostfix {
op: match value {
TokenValue::Increment => crate::BinaryOperator::Add,
_ => crate::BinaryOperator::Subtract,
},
postfix: false,
expr,
},
meta,
},
Default::default(),
)
}
_ => self.parse_postfix(frontend, ctx, stmt, body)?,
})
}
pub fn parse_binary(
&mut self,
frontend: &mut Frontend,
ctx: &mut Context,
stmt: &mut StmtContext,
body: &mut Block,
passthrough: Option<Handle<HirExpr>>,
min_bp: u8,
) -> Result<Handle<HirExpr>> {
let mut left = passthrough
.ok_or(ErrorKind::EndOfFile /* Dummy error */)
.or_else(|_| self.parse_unary(frontend, ctx, stmt, body))?;
let mut meta = stmt.hir_exprs[left].meta;
while let Some((l_bp, r_bp)) = binding_power(&self.expect_peek(frontend)?.value) {
if l_bp < min_bp {
break;
}
let Token { value, .. } = self.bump(frontend)?;
let right = self.parse_binary(frontend, ctx, stmt, body, None, r_bp)?;
let end_meta = stmt.hir_exprs[right].meta;
meta.subsume(end_meta);
left = stmt.hir_exprs.append(
HirExpr {
kind: HirExprKind::Binary {
left,
op: match value {
TokenValue::LogicalOr => BinaryOperator::LogicalOr,
TokenValue::LogicalXor => BinaryOperator::NotEqual,
TokenValue::LogicalAnd => BinaryOperator::LogicalAnd,
TokenValue::VerticalBar => BinaryOperator::InclusiveOr,
TokenValue::Caret => BinaryOperator::ExclusiveOr,
TokenValue::Ampersand => BinaryOperator::And,
TokenValue::Equal => BinaryOperator::Equal,
TokenValue::NotEqual => BinaryOperator::NotEqual,
TokenValue::GreaterEqual => BinaryOperator::GreaterEqual,
TokenValue::LessEqual => BinaryOperator::LessEqual,
TokenValue::LeftAngle => BinaryOperator::Less,
TokenValue::RightAngle => BinaryOperator::Greater,
TokenValue::LeftShift => BinaryOperator::ShiftLeft,
TokenValue::RightShift => BinaryOperator::ShiftRight,
TokenValue::Plus => BinaryOperator::Add,
TokenValue::Dash => BinaryOperator::Subtract,
TokenValue::Star => BinaryOperator::Multiply,
TokenValue::Slash => BinaryOperator::Divide,
TokenValue::Percent => BinaryOperator::Modulo,
_ => unreachable!(),
},
right,
},
meta,
},
Default::default(),
)
}
Ok(left)
}
pub fn parse_conditional(
&mut self,
frontend: &mut Frontend,
ctx: &mut Context,
stmt: &mut StmtContext,
body: &mut Block,
passthrough: Option<Handle<HirExpr>>,
) -> Result<Handle<HirExpr>> {
let mut condition = self.parse_binary(frontend, ctx, stmt, body, passthrough, 0)?;
let mut meta = stmt.hir_exprs[condition].meta;
if self.bump_if(frontend, TokenValue::Question).is_some() {
let accept = self.parse_expression(frontend, ctx, stmt, body)?;
self.expect(frontend, TokenValue::Colon)?;
let reject = self.parse_assignment(frontend, ctx, stmt, body)?;
let end_meta = stmt.hir_exprs[reject].meta;
meta.subsume(end_meta);
condition = stmt.hir_exprs.append(
HirExpr {
kind: HirExprKind::Conditional {
condition,
accept,
reject,
},
meta,
},
Default::default(),
)
}
Ok(condition)
}
pub fn parse_assignment(
&mut self,
frontend: &mut Frontend,
ctx: &mut Context,
stmt: &mut StmtContext,
body: &mut Block,
) -> Result<Handle<HirExpr>> {
let tgt = self.parse_unary(frontend, ctx, stmt, body)?;
let mut meta = stmt.hir_exprs[tgt].meta;
Ok(match self.expect_peek(frontend)?.value {
TokenValue::Assign => {
self.bump(frontend)?;
let value = self.parse_assignment(frontend, ctx, stmt, body)?;
let end_meta = stmt.hir_exprs[value].meta;
meta.subsume(end_meta);
stmt.hir_exprs.append(
HirExpr {
kind: HirExprKind::Assign { tgt, value },
meta,
},
Default::default(),
)
}
TokenValue::OrAssign
| TokenValue::AndAssign
| TokenValue::AddAssign
| TokenValue::DivAssign
| TokenValue::ModAssign
| TokenValue::SubAssign
| TokenValue::MulAssign
| TokenValue::LeftShiftAssign
| TokenValue::RightShiftAssign
| TokenValue::XorAssign => {
let token = self.bump(frontend)?;
let right = self.parse_assignment(frontend, ctx, stmt, body)?;
let end_meta = stmt.hir_exprs[right].meta;
meta.subsume(end_meta);
let value = stmt.hir_exprs.append(
HirExpr {
meta,
kind: HirExprKind::Binary {
left: tgt,
op: match token.value {
TokenValue::OrAssign => BinaryOperator::InclusiveOr,
TokenValue::AndAssign => BinaryOperator::And,
TokenValue::AddAssign => BinaryOperator::Add,
TokenValue::DivAssign => BinaryOperator::Divide,
TokenValue::ModAssign => BinaryOperator::Modulo,
TokenValue::SubAssign => BinaryOperator::Subtract,
TokenValue::MulAssign => BinaryOperator::Multiply,
TokenValue::LeftShiftAssign => BinaryOperator::ShiftLeft,
TokenValue::RightShiftAssign => BinaryOperator::ShiftRight,
TokenValue::XorAssign => BinaryOperator::ExclusiveOr,
_ => unreachable!(),
},
right,
},
},
Default::default(),
);
stmt.hir_exprs.append(
HirExpr {
kind: HirExprKind::Assign { tgt, value },
meta,
},
Default::default(),
)
}
_ => self.parse_conditional(frontend, ctx, stmt, body, Some(tgt))?,
})
}
pub fn parse_expression(
&mut self,
frontend: &mut Frontend,
ctx: &mut Context,
stmt: &mut StmtContext,
body: &mut Block,
) -> Result<Handle<HirExpr>> {
let mut expr = self.parse_assignment(frontend, ctx, stmt, body)?;
while let TokenValue::Comma = self.expect_peek(frontend)?.value {
self.bump(frontend)?;
expr = self.parse_assignment(frontend, ctx, stmt, body)?;
}
Ok(expr)
}
}
const fn binding_power(value: &TokenValue) -> Option<(u8, u8)> {
Some(match *value {
TokenValue::LogicalOr => (1, 2),
TokenValue::LogicalXor => (3, 4),
TokenValue::LogicalAnd => (5, 6),
TokenValue::VerticalBar => (7, 8),
TokenValue::Caret => (9, 10),
TokenValue::Ampersand => (11, 12),
TokenValue::Equal | TokenValue::NotEqual => (13, 14),
TokenValue::GreaterEqual
| TokenValue::LessEqual
| TokenValue::LeftAngle
| TokenValue::RightAngle => (15, 16),
TokenValue::LeftShift | TokenValue::RightShift => (17, 18),
TokenValue::Plus | TokenValue::Dash => (19, 20),
TokenValue::Star | TokenValue::Slash | TokenValue::Percent => (21, 22),
_ => return None,
})
}

View file

@ -0,0 +1,651 @@
use crate::front::glsl::context::ExprPos;
use crate::front::glsl::Span;
use crate::Literal;
use crate::{
front::glsl::{
ast::ParameterQualifier,
context::Context,
parser::ParsingContext,
token::{Token, TokenValue},
variables::VarDeclaration,
Error, ErrorKind, Frontend, Result,
},
Block, Expression, Statement, SwitchCase, UnaryOperator,
};
impl<'source> ParsingContext<'source> {
pub fn peek_parameter_qualifier(&mut self, frontend: &mut Frontend) -> bool {
self.peek(frontend).map_or(false, |t| match t.value {
TokenValue::In | TokenValue::Out | TokenValue::InOut | TokenValue::Const => true,
_ => false,
})
}
/// Returns the parsed `ParameterQualifier` or `ParameterQualifier::In`
pub fn parse_parameter_qualifier(&mut self, frontend: &mut Frontend) -> ParameterQualifier {
if self.peek_parameter_qualifier(frontend) {
match self.bump(frontend).unwrap().value {
TokenValue::In => ParameterQualifier::In,
TokenValue::Out => ParameterQualifier::Out,
TokenValue::InOut => ParameterQualifier::InOut,
TokenValue::Const => ParameterQualifier::Const,
_ => unreachable!(),
}
} else {
ParameterQualifier::In
}
}
pub fn parse_statement(
&mut self,
frontend: &mut Frontend,
ctx: &mut Context,
body: &mut Block,
terminator: &mut Option<usize>,
) -> Result<Option<Span>> {
// Type qualifiers always identify a declaration statement
if self.peek_type_qualifier(frontend) {
return self.parse_declaration(frontend, ctx, body, false);
}
// Type names can identify either declaration statements or type constructors
// depending on wether the token following the type name is a `(` (LeftParen)
if self.peek_type_name(frontend) {
// Start by consuming the type name so that we can peek the token after it
let token = self.bump(frontend)?;
// Peek the next token and check if it's a `(` (LeftParen) if so the statement
// is a constructor, otherwise it's a declaration. We need to do the check
// beforehand and not in the if since we will backtrack before the if
let declaration = TokenValue::LeftParen != self.expect_peek(frontend)?.value;
self.backtrack(token)?;
if declaration {
return self.parse_declaration(frontend, ctx, body, false);
}
}
let new_break = || {
let mut block = Block::new();
block.push(Statement::Break, crate::Span::default());
block
};
let &Token {
ref value,
mut meta,
} = self.expect_peek(frontend)?;
let meta_rest = match *value {
TokenValue::Continue => {
let meta = self.bump(frontend)?.meta;
body.push(Statement::Continue, meta);
terminator.get_or_insert(body.len());
self.expect(frontend, TokenValue::Semicolon)?.meta
}
TokenValue::Break => {
let meta = self.bump(frontend)?.meta;
body.push(Statement::Break, meta);
terminator.get_or_insert(body.len());
self.expect(frontend, TokenValue::Semicolon)?.meta
}
TokenValue::Return => {
self.bump(frontend)?;
let (value, meta) = match self.expect_peek(frontend)?.value {
TokenValue::Semicolon => (None, self.bump(frontend)?.meta),
_ => {
// TODO: Implicit conversions
let mut stmt = ctx.stmt_ctx();
let expr = self.parse_expression(frontend, ctx, &mut stmt, body)?;
self.expect(frontend, TokenValue::Semicolon)?;
let (handle, meta) =
ctx.lower_expect(stmt, frontend, expr, ExprPos::Rhs, body)?;
(Some(handle), meta)
}
};
ctx.emit_restart(body);
body.push(Statement::Return { value }, meta);
terminator.get_or_insert(body.len());
meta
}
TokenValue::Discard => {
let meta = self.bump(frontend)?.meta;
body.push(Statement::Kill, meta);
terminator.get_or_insert(body.len());
self.expect(frontend, TokenValue::Semicolon)?.meta
}
TokenValue::If => {
let mut meta = self.bump(frontend)?.meta;
self.expect(frontend, TokenValue::LeftParen)?;
let condition = {
let mut stmt = ctx.stmt_ctx();
let expr = self.parse_expression(frontend, ctx, &mut stmt, body)?;
let (handle, more_meta) =
ctx.lower_expect(stmt, frontend, expr, ExprPos::Rhs, body)?;
meta.subsume(more_meta);
handle
};
self.expect(frontend, TokenValue::RightParen)?;
ctx.emit_restart(body);
let mut accept = Block::new();
if let Some(more_meta) =
self.parse_statement(frontend, ctx, &mut accept, &mut None)?
{
meta.subsume(more_meta)
}
let mut reject = Block::new();
if self.bump_if(frontend, TokenValue::Else).is_some() {
if let Some(more_meta) =
self.parse_statement(frontend, ctx, &mut reject, &mut None)?
{
meta.subsume(more_meta);
}
}
body.push(
Statement::If {
condition,
accept,
reject,
},
meta,
);
meta
}
TokenValue::Switch => {
let mut meta = self.bump(frontend)?.meta;
let end_meta;
self.expect(frontend, TokenValue::LeftParen)?;
let (selector, uint) = {
let mut stmt = ctx.stmt_ctx();
let expr = self.parse_expression(frontend, ctx, &mut stmt, body)?;
let (root, meta) =
ctx.lower_expect(stmt, frontend, expr, ExprPos::Rhs, body)?;
let uint = frontend.resolve_type(ctx, root, meta)?.scalar_kind()
== Some(crate::ScalarKind::Uint);
(root, uint)
};
self.expect(frontend, TokenValue::RightParen)?;
ctx.emit_restart(body);
let mut cases = Vec::new();
// Track if any default case is present in the switch statement.
let mut default_present = false;
self.expect(frontend, TokenValue::LeftBrace)?;
loop {
let value = match self.expect_peek(frontend)?.value {
TokenValue::Case => {
self.bump(frontend)?;
let mut stmt = ctx.stmt_ctx();
let expr = self.parse_expression(frontend, ctx, &mut stmt, body)?;
let (root, meta) =
ctx.lower_expect(stmt, frontend, expr, ExprPos::Rhs, body)?;
let const_expr = frontend.solve_constant(ctx, root, meta)?;
match frontend.module.const_expressions[const_expr] {
Expression::Literal(Literal::I32(value)) => match uint {
true => crate::SwitchValue::U32(value as u32),
false => crate::SwitchValue::I32(value),
},
Expression::Literal(Literal::U32(value)) => {
crate::SwitchValue::U32(value)
}
_ => {
frontend.errors.push(Error {
kind: ErrorKind::SemanticError(
"Case values can only be integers".into(),
),
meta,
});
crate::SwitchValue::I32(0)
}
}
}
TokenValue::Default => {
self.bump(frontend)?;
default_present = true;
crate::SwitchValue::Default
}
TokenValue::RightBrace => {
end_meta = self.bump(frontend)?.meta;
break;
}
_ => {
let Token { value, meta } = self.bump(frontend)?;
return Err(Error {
kind: ErrorKind::InvalidToken(
value,
vec![
TokenValue::Case.into(),
TokenValue::Default.into(),
TokenValue::RightBrace.into(),
],
),
meta,
});
}
};
self.expect(frontend, TokenValue::Colon)?;
let mut body = Block::new();
let mut case_terminator = None;
loop {
match self.expect_peek(frontend)?.value {
TokenValue::Case | TokenValue::Default | TokenValue::RightBrace => {
break
}
_ => {
self.parse_statement(
frontend,
ctx,
&mut body,
&mut case_terminator,
)?;
}
}
}
let mut fall_through = true;
if let Some(mut idx) = case_terminator {
if let Statement::Break = body[idx - 1] {
fall_through = false;
idx -= 1;
}
body.cull(idx..)
}
cases.push(SwitchCase {
value,
body,
fall_through,
})
}
meta.subsume(end_meta);
// NOTE: do not unwrap here since a switch statement isn't required
// to have any cases.
if let Some(case) = cases.last_mut() {
// GLSL requires that the last case not be empty, so we check
// that here and produce an error otherwise (fall_through must
// also be checked because `break`s count as statements but
// they aren't added to the body)
if case.body.is_empty() && case.fall_through {
frontend.errors.push(Error {
kind: ErrorKind::SemanticError(
"last case/default label must be followed by statements".into(),
),
meta,
})
}
// GLSL allows the last case to not have any `break` statement,
// this would mark it as fall through but naga's IR requires that
// the last case must not be fall through, so we mark need to mark
// the last case as not fall through always.
case.fall_through = false;
}
// Add an empty default case in case non was present, this is needed because
// naga's IR requires that all switch statements must have a default case but
// GLSL doesn't require that, so we might need to add an empty default case.
if !default_present {
cases.push(SwitchCase {
value: crate::SwitchValue::Default,
body: Block::new(),
fall_through: false,
})
}
body.push(Statement::Switch { selector, cases }, meta);
meta
}
TokenValue::While => {
let mut meta = self.bump(frontend)?.meta;
let mut loop_body = Block::new();
let mut stmt = ctx.stmt_ctx();
self.expect(frontend, TokenValue::LeftParen)?;
let root = self.parse_expression(frontend, ctx, &mut stmt, &mut loop_body)?;
meta.subsume(self.expect(frontend, TokenValue::RightParen)?.meta);
let (expr, expr_meta) =
ctx.lower_expect(stmt, frontend, root, ExprPos::Rhs, &mut loop_body)?;
let condition = ctx.add_expression(
Expression::Unary {
op: UnaryOperator::Not,
expr,
},
expr_meta,
&mut loop_body,
);
ctx.emit_restart(&mut loop_body);
loop_body.push(
Statement::If {
condition,
accept: new_break(),
reject: Block::new(),
},
crate::Span::default(),
);
meta.subsume(expr_meta);
if let Some(body_meta) =
self.parse_statement(frontend, ctx, &mut loop_body, &mut None)?
{
meta.subsume(body_meta);
}
body.push(
Statement::Loop {
body: loop_body,
continuing: Block::new(),
break_if: None,
},
meta,
);
meta
}
TokenValue::Do => {
let mut meta = self.bump(frontend)?.meta;
let mut loop_body = Block::new();
let mut terminator = None;
self.parse_statement(frontend, ctx, &mut loop_body, &mut terminator)?;
let mut stmt = ctx.stmt_ctx();
self.expect(frontend, TokenValue::While)?;
self.expect(frontend, TokenValue::LeftParen)?;
let root = self.parse_expression(frontend, ctx, &mut stmt, &mut loop_body)?;
let end_meta = self.expect(frontend, TokenValue::RightParen)?.meta;
meta.subsume(end_meta);
let (expr, expr_meta) =
ctx.lower_expect(stmt, frontend, root, ExprPos::Rhs, &mut loop_body)?;
let condition = ctx.add_expression(
Expression::Unary {
op: UnaryOperator::Not,
expr,
},
expr_meta,
&mut loop_body,
);
ctx.emit_restart(&mut loop_body);
loop_body.push(
Statement::If {
condition,
accept: new_break(),
reject: Block::new(),
},
crate::Span::default(),
);
if let Some(idx) = terminator {
loop_body.cull(idx..)
}
body.push(
Statement::Loop {
body: loop_body,
continuing: Block::new(),
break_if: None,
},
meta,
);
meta
}
TokenValue::For => {
let mut meta = self.bump(frontend)?.meta;
ctx.symbol_table.push_scope();
self.expect(frontend, TokenValue::LeftParen)?;
if self.bump_if(frontend, TokenValue::Semicolon).is_none() {
if self.peek_type_name(frontend) || self.peek_type_qualifier(frontend) {
self.parse_declaration(frontend, ctx, body, false)?;
} else {
let mut stmt = ctx.stmt_ctx();
let expr = self.parse_expression(frontend, ctx, &mut stmt, body)?;
ctx.lower(stmt, frontend, expr, ExprPos::Rhs, body)?;
self.expect(frontend, TokenValue::Semicolon)?;
}
}
let (mut block, mut continuing) = (Block::new(), Block::new());
if self.bump_if(frontend, TokenValue::Semicolon).is_none() {
let (expr, expr_meta) = if self.peek_type_name(frontend)
|| self.peek_type_qualifier(frontend)
{
let mut qualifiers = self.parse_type_qualifiers(frontend)?;
let (ty, mut meta) = self.parse_type_non_void(frontend)?;
let name = self.expect_ident(frontend)?.0;
self.expect(frontend, TokenValue::Assign)?;
let (value, end_meta) =
self.parse_initializer(frontend, ty, ctx, &mut block)?;
meta.subsume(end_meta);
let decl = VarDeclaration {
qualifiers: &mut qualifiers,
ty,
name: Some(name),
init: None,
meta,
};
let pointer = frontend.add_local_var(ctx, &mut block, decl)?;
ctx.emit_restart(&mut block);
block.push(Statement::Store { pointer, value }, meta);
(value, end_meta)
} else {
let mut stmt = ctx.stmt_ctx();
let root = self.parse_expression(frontend, ctx, &mut stmt, &mut block)?;
ctx.lower_expect(stmt, frontend, root, ExprPos::Rhs, &mut block)?
};
let condition = ctx.add_expression(
Expression::Unary {
op: UnaryOperator::Not,
expr,
},
expr_meta,
&mut block,
);
ctx.emit_restart(&mut block);
block.push(
Statement::If {
condition,
accept: new_break(),
reject: Block::new(),
},
crate::Span::default(),
);
self.expect(frontend, TokenValue::Semicolon)?;
}
match self.expect_peek(frontend)?.value {
TokenValue::RightParen => {}
_ => {
let mut stmt = ctx.stmt_ctx();
let rest =
self.parse_expression(frontend, ctx, &mut stmt, &mut continuing)?;
ctx.lower(stmt, frontend, rest, ExprPos::Rhs, &mut continuing)?;
}
}
meta.subsume(self.expect(frontend, TokenValue::RightParen)?.meta);
if let Some(stmt_meta) =
self.parse_statement(frontend, ctx, &mut block, &mut None)?
{
meta.subsume(stmt_meta);
}
body.push(
Statement::Loop {
body: block,
continuing,
break_if: None,
},
meta,
);
ctx.symbol_table.pop_scope();
meta
}
TokenValue::LeftBrace => {
let meta = self.bump(frontend)?.meta;
let mut block = Block::new();
let mut block_terminator = None;
let meta = self.parse_compound_statement(
meta,
frontend,
ctx,
&mut block,
&mut block_terminator,
)?;
body.push(Statement::Block(block), meta);
if block_terminator.is_some() {
terminator.get_or_insert(body.len());
}
meta
}
TokenValue::Semicolon => self.bump(frontend)?.meta,
_ => {
// Attempt to force expression parsing for remainder of the
// tokens. Unknown or invalid tokens will be caught there and
// turned into an error.
let mut stmt = ctx.stmt_ctx();
let expr = self.parse_expression(frontend, ctx, &mut stmt, body)?;
ctx.lower(stmt, frontend, expr, ExprPos::Rhs, body)?;
self.expect(frontend, TokenValue::Semicolon)?.meta
}
};
meta.subsume(meta_rest);
Ok(Some(meta))
}
pub fn parse_compound_statement(
&mut self,
mut meta: Span,
frontend: &mut Frontend,
ctx: &mut Context,
body: &mut Block,
terminator: &mut Option<usize>,
) -> Result<Span> {
ctx.symbol_table.push_scope();
loop {
if let Some(Token {
meta: brace_meta, ..
}) = self.bump_if(frontend, TokenValue::RightBrace)
{
meta.subsume(brace_meta);
break;
}
let stmt = self.parse_statement(frontend, ctx, body, terminator)?;
if let Some(stmt_meta) = stmt {
meta.subsume(stmt_meta);
}
}
if let Some(idx) = *terminator {
body.cull(idx..)
}
ctx.symbol_table.pop_scope();
Ok(meta)
}
pub fn parse_function_args(
&mut self,
frontend: &mut Frontend,
context: &mut Context,
body: &mut Block,
) -> Result<()> {
if self.bump_if(frontend, TokenValue::Void).is_some() {
return Ok(());
}
loop {
if self.peek_type_name(frontend) || self.peek_parameter_qualifier(frontend) {
let qualifier = self.parse_parameter_qualifier(frontend);
let mut ty = self.parse_type_non_void(frontend)?.0;
match self.expect_peek(frontend)?.value {
TokenValue::Comma => {
self.bump(frontend)?;
context.add_function_arg(frontend, body, None, ty, qualifier);
continue;
}
TokenValue::Identifier(_) => {
let mut name = self.expect_ident(frontend)?;
self.parse_array_specifier(frontend, &mut name.1, &mut ty)?;
context.add_function_arg(frontend, body, Some(name), ty, qualifier);
if self.bump_if(frontend, TokenValue::Comma).is_some() {
continue;
}
break;
}
_ => break,
}
}
break;
}
Ok(())
}
}

View file

@ -0,0 +1,426 @@
use std::num::NonZeroU32;
use crate::{
front::glsl::{
ast::{QualifierKey, QualifierValue, StorageQualifier, StructLayout, TypeQualifiers},
error::ExpectedToken,
parser::ParsingContext,
token::{Token, TokenValue},
Error, ErrorKind, Frontend, Result,
},
AddressSpace, ArraySize, Handle, Span, Type, TypeInner,
};
impl<'source> ParsingContext<'source> {
/// Parses an optional array_specifier returning wether or not it's present
/// and modifying the type handle if it exists
pub fn parse_array_specifier(
&mut self,
frontend: &mut Frontend,
span: &mut Span,
ty: &mut Handle<Type>,
) -> Result<()> {
while self.parse_array_specifier_single(frontend, span, ty)? {}
Ok(())
}
/// Implementation of [`Self::parse_array_specifier`] for a single array_specifier
fn parse_array_specifier_single(
&mut self,
frontend: &mut Frontend,
span: &mut Span,
ty: &mut Handle<Type>,
) -> Result<bool> {
if self.bump_if(frontend, TokenValue::LeftBracket).is_some() {
let size = if let Some(Token { meta, .. }) =
self.bump_if(frontend, TokenValue::RightBracket)
{
span.subsume(meta);
ArraySize::Dynamic
} else {
let (value, constant_span) = self.parse_uint_constant(frontend)?;
let size = NonZeroU32::new(value).ok_or(Error {
kind: ErrorKind::SemanticError("Array size must be greater than zero".into()),
meta: constant_span,
})?;
let end_span = self.expect(frontend, TokenValue::RightBracket)?.meta;
span.subsume(end_span);
ArraySize::Constant(size)
};
frontend.layouter.update(frontend.module.to_ctx()).unwrap();
let stride = frontend.layouter[*ty].to_stride();
*ty = frontend.module.types.insert(
Type {
name: None,
inner: TypeInner::Array {
base: *ty,
size,
stride,
},
},
*span,
);
Ok(true)
} else {
Ok(false)
}
}
pub fn parse_type(&mut self, frontend: &mut Frontend) -> Result<(Option<Handle<Type>>, Span)> {
let token = self.bump(frontend)?;
let mut handle = match token.value {
TokenValue::Void => return Ok((None, token.meta)),
TokenValue::TypeName(ty) => frontend.module.types.insert(ty, token.meta),
TokenValue::Struct => {
let mut meta = token.meta;
let ty_name = self.expect_ident(frontend)?.0;
self.expect(frontend, TokenValue::LeftBrace)?;
let mut members = Vec::new();
let span = self.parse_struct_declaration_list(
frontend,
&mut members,
StructLayout::Std140,
)?;
let end_meta = self.expect(frontend, TokenValue::RightBrace)?.meta;
meta.subsume(end_meta);
let ty = frontend.module.types.insert(
Type {
name: Some(ty_name.clone()),
inner: TypeInner::Struct { members, span },
},
meta,
);
frontend.lookup_type.insert(ty_name, ty);
ty
}
TokenValue::Identifier(ident) => match frontend.lookup_type.get(&ident) {
Some(ty) => *ty,
None => {
return Err(Error {
kind: ErrorKind::UnknownType(ident),
meta: token.meta,
})
}
},
_ => {
return Err(Error {
kind: ErrorKind::InvalidToken(
token.value,
vec![
TokenValue::Void.into(),
TokenValue::Struct.into(),
ExpectedToken::TypeName,
],
),
meta: token.meta,
});
}
};
let mut span = token.meta;
self.parse_array_specifier(frontend, &mut span, &mut handle)?;
Ok((Some(handle), span))
}
pub fn parse_type_non_void(&mut self, frontend: &mut Frontend) -> Result<(Handle<Type>, Span)> {
let (maybe_ty, meta) = self.parse_type(frontend)?;
let ty = maybe_ty.ok_or_else(|| Error {
kind: ErrorKind::SemanticError("Type can't be void".into()),
meta,
})?;
Ok((ty, meta))
}
pub fn peek_type_qualifier(&mut self, frontend: &mut Frontend) -> bool {
self.peek(frontend).map_or(false, |t| match t.value {
TokenValue::Invariant
| TokenValue::Interpolation(_)
| TokenValue::Sampling(_)
| TokenValue::PrecisionQualifier(_)
| TokenValue::Const
| TokenValue::In
| TokenValue::Out
| TokenValue::Uniform
| TokenValue::Shared
| TokenValue::Buffer
| TokenValue::Restrict
| TokenValue::MemoryQualifier(_)
| TokenValue::Layout => true,
_ => false,
})
}
pub fn parse_type_qualifiers<'a>(
&mut self,
frontend: &mut Frontend,
) -> Result<TypeQualifiers<'a>> {
let mut qualifiers = TypeQualifiers::default();
while self.peek_type_qualifier(frontend) {
let token = self.bump(frontend)?;
// Handle layout qualifiers outside the match since this can push multiple values
if token.value == TokenValue::Layout {
self.parse_layout_qualifier_id_list(frontend, &mut qualifiers)?;
continue;
}
qualifiers.span.subsume(token.meta);
match token.value {
TokenValue::Invariant => {
if qualifiers.invariant.is_some() {
frontend.errors.push(Error {
kind: ErrorKind::SemanticError(
"Cannot use more than one invariant qualifier per declaration"
.into(),
),
meta: token.meta,
})
}
qualifiers.invariant = Some(token.meta);
}
TokenValue::Interpolation(i) => {
if qualifiers.interpolation.is_some() {
frontend.errors.push(Error {
kind: ErrorKind::SemanticError(
"Cannot use more than one interpolation qualifier per declaration"
.into(),
),
meta: token.meta,
})
}
qualifiers.interpolation = Some((i, token.meta));
}
TokenValue::Const
| TokenValue::In
| TokenValue::Out
| TokenValue::Uniform
| TokenValue::Shared
| TokenValue::Buffer => {
let storage = match token.value {
TokenValue::Const => StorageQualifier::Const,
TokenValue::In => StorageQualifier::Input,
TokenValue::Out => StorageQualifier::Output,
TokenValue::Uniform => {
StorageQualifier::AddressSpace(AddressSpace::Uniform)
}
TokenValue::Shared => {
StorageQualifier::AddressSpace(AddressSpace::WorkGroup)
}
TokenValue::Buffer => {
StorageQualifier::AddressSpace(AddressSpace::Storage {
access: crate::StorageAccess::all(),
})
}
_ => unreachable!(),
};
if StorageQualifier::AddressSpace(AddressSpace::Function)
!= qualifiers.storage.0
{
frontend.errors.push(Error {
kind: ErrorKind::SemanticError(
"Cannot use more than one storage qualifier per declaration".into(),
),
meta: token.meta,
});
}
qualifiers.storage = (storage, token.meta);
}
TokenValue::Sampling(s) => {
if qualifiers.sampling.is_some() {
frontend.errors.push(Error {
kind: ErrorKind::SemanticError(
"Cannot use more than one sampling qualifier per declaration"
.into(),
),
meta: token.meta,
})
}
qualifiers.sampling = Some((s, token.meta));
}
TokenValue::PrecisionQualifier(p) => {
if qualifiers.precision.is_some() {
frontend.errors.push(Error {
kind: ErrorKind::SemanticError(
"Cannot use more than one precision qualifier per declaration"
.into(),
),
meta: token.meta,
})
}
qualifiers.precision = Some((p, token.meta));
}
TokenValue::MemoryQualifier(access) => {
let storage_access = qualifiers
.storage_access
.get_or_insert((crate::StorageAccess::all(), Span::default()));
if !storage_access.0.contains(!access) {
frontend.errors.push(Error {
kind: ErrorKind::SemanticError(
"The same memory qualifier can only be used once".into(),
),
meta: token.meta,
})
}
storage_access.0 &= access;
storage_access.1.subsume(token.meta);
}
TokenValue::Restrict => continue,
_ => unreachable!(),
};
}
Ok(qualifiers)
}
pub fn parse_layout_qualifier_id_list(
&mut self,
frontend: &mut Frontend,
qualifiers: &mut TypeQualifiers,
) -> Result<()> {
self.expect(frontend, TokenValue::LeftParen)?;
loop {
self.parse_layout_qualifier_id(frontend, &mut qualifiers.layout_qualifiers)?;
if self.bump_if(frontend, TokenValue::Comma).is_some() {
continue;
}
break;
}
let token = self.expect(frontend, TokenValue::RightParen)?;
qualifiers.span.subsume(token.meta);
Ok(())
}
pub fn parse_layout_qualifier_id(
&mut self,
frontend: &mut Frontend,
qualifiers: &mut crate::FastHashMap<QualifierKey, (QualifierValue, Span)>,
) -> Result<()> {
// layout_qualifier_id:
// IDENTIFIER
// IDENTIFIER EQUAL constant_expression
// SHARED
let mut token = self.bump(frontend)?;
match token.value {
TokenValue::Identifier(name) => {
let (key, value) = match name.as_str() {
"std140" => (
QualifierKey::Layout,
QualifierValue::Layout(StructLayout::Std140),
),
"std430" => (
QualifierKey::Layout,
QualifierValue::Layout(StructLayout::Std430),
),
word => {
if let Some(format) = map_image_format(word) {
(QualifierKey::Format, QualifierValue::Format(format))
} else {
let key = QualifierKey::String(name.into());
let value = if self.bump_if(frontend, TokenValue::Assign).is_some() {
let (value, end_meta) = match self.parse_uint_constant(frontend) {
Ok(v) => v,
Err(e) => {
frontend.errors.push(e);
(0, Span::default())
}
};
token.meta.subsume(end_meta);
QualifierValue::Uint(value)
} else {
QualifierValue::None
};
(key, value)
}
}
};
qualifiers.insert(key, (value, token.meta));
}
_ => frontend.errors.push(Error {
kind: ErrorKind::InvalidToken(token.value, vec![ExpectedToken::Identifier]),
meta: token.meta,
}),
}
Ok(())
}
pub fn peek_type_name(&mut self, frontend: &mut Frontend) -> bool {
self.peek(frontend).map_or(false, |t| match t.value {
TokenValue::TypeName(_) | TokenValue::Void => true,
TokenValue::Struct => true,
TokenValue::Identifier(ref ident) => frontend.lookup_type.contains_key(ident),
_ => false,
})
}
}
fn map_image_format(word: &str) -> Option<crate::StorageFormat> {
use crate::StorageFormat as Sf;
let format = match word {
// float-image-format-qualifier:
"rgba32f" => Sf::Rgba32Float,
"rgba16f" => Sf::Rgba16Float,
"rg32f" => Sf::Rg32Float,
"rg16f" => Sf::Rg16Float,
"r11f_g11f_b10f" => Sf::Rg11b10Float,
"r32f" => Sf::R32Float,
"r16f" => Sf::R16Float,
"rgba16" => Sf::Rgba16Unorm,
"rgb10_a2" => Sf::Rgb10a2Unorm,
"rgba8" => Sf::Rgba8Unorm,
"rg16" => Sf::Rg16Unorm,
"rg8" => Sf::Rg8Unorm,
"r16" => Sf::R16Unorm,
"r8" => Sf::R8Unorm,
"rgba16_snorm" => Sf::Rgba16Snorm,
"rgba8_snorm" => Sf::Rgba8Snorm,
"rg16_snorm" => Sf::Rg16Snorm,
"rg8_snorm" => Sf::Rg8Snorm,
"r16_snorm" => Sf::R16Snorm,
"r8_snorm" => Sf::R8Snorm,
// int-image-format-qualifier:
"rgba32i" => Sf::Rgba32Sint,
"rgba16i" => Sf::Rgba16Sint,
"rgba8i" => Sf::Rgba8Sint,
"rg32i" => Sf::Rg32Sint,
"rg16i" => Sf::Rg16Sint,
"rg8i" => Sf::Rg8Sint,
"r32i" => Sf::R32Sint,
"r16i" => Sf::R16Sint,
"r8i" => Sf::R8Sint,
// uint-image-format-qualifier:
"rgba32ui" => Sf::Rgba32Uint,
"rgba16ui" => Sf::Rgba16Uint,
"rgba8ui" => Sf::Rgba8Uint,
"rg32ui" => Sf::Rg32Uint,
"rg16ui" => Sf::Rg16Uint,
"rg8ui" => Sf::Rg8Uint,
"r32ui" => Sf::R32Uint,
"r16ui" => Sf::R16Uint,
"r8ui" => Sf::R8Uint,
// TODO: These next ones seem incorrect to me
// "rgb10_a2ui" => Sf::Rgb10a2Unorm,
_ => return None,
};
Some(format)
}

View file

@ -0,0 +1,854 @@
use super::{
ast::Profile,
error::ExpectedToken,
error::{Error, ErrorKind},
token::TokenValue,
Frontend, Options, Span,
};
use crate::ShaderStage;
use pp_rs::token::PreprocessorError;
#[test]
fn version() {
let mut frontend = Frontend::default();
// invalid versions
assert_eq!(
frontend
.parse(
&Options::from(ShaderStage::Vertex),
"#version 99000\n void main(){}",
)
.err()
.unwrap(),
vec![Error {
kind: ErrorKind::InvalidVersion(99000),
meta: Span::new(9, 14)
}],
);
assert_eq!(
frontend
.parse(
&Options::from(ShaderStage::Vertex),
"#version 449\n void main(){}",
)
.err()
.unwrap(),
vec![Error {
kind: ErrorKind::InvalidVersion(449),
meta: Span::new(9, 12)
}]
);
assert_eq!(
frontend
.parse(
&Options::from(ShaderStage::Vertex),
"#version 450 smart\n void main(){}",
)
.err()
.unwrap(),
vec![Error {
kind: ErrorKind::InvalidProfile("smart".into()),
meta: Span::new(13, 18),
}]
);
assert_eq!(
frontend
.parse(
&Options::from(ShaderStage::Vertex),
"#version 450\nvoid main(){} #version 450",
)
.err()
.unwrap(),
vec![
Error {
kind: ErrorKind::PreprocessorError(PreprocessorError::UnexpectedHash,),
meta: Span::new(27, 28),
},
Error {
kind: ErrorKind::InvalidToken(
TokenValue::Identifier("version".into()),
vec![ExpectedToken::Eof]
),
meta: Span::new(28, 35)
}
]
);
// valid versions
frontend
.parse(
&Options::from(ShaderStage::Vertex),
" # version 450\nvoid main() {}",
)
.unwrap();
assert_eq!(
(frontend.metadata().version, frontend.metadata().profile),
(450, Profile::Core)
);
frontend
.parse(
&Options::from(ShaderStage::Vertex),
"#version 450\nvoid main() {}",
)
.unwrap();
assert_eq!(
(frontend.metadata().version, frontend.metadata().profile),
(450, Profile::Core)
);
frontend
.parse(
&Options::from(ShaderStage::Vertex),
"#version 450 core\nvoid main(void) {}",
)
.unwrap();
assert_eq!(
(frontend.metadata().version, frontend.metadata().profile),
(450, Profile::Core)
);
}
#[test]
fn control_flow() {
let mut frontend = Frontend::default();
frontend
.parse(
&Options::from(ShaderStage::Vertex),
r#"
# version 450
void main() {
if (true) {
return 1;
} else {
return 2;
}
}
"#,
)
.unwrap();
frontend
.parse(
&Options::from(ShaderStage::Vertex),
r#"
# version 450
void main() {
if (true) {
return 1;
}
}
"#,
)
.unwrap();
frontend
.parse(
&Options::from(ShaderStage::Vertex),
r#"
# version 450
void main() {
int x;
int y = 3;
switch (5) {
case 2:
x = 2;
case 5:
x = 5;
y = 2;
break;
default:
x = 0;
}
}
"#,
)
.unwrap();
frontend
.parse(
&Options::from(ShaderStage::Vertex),
r#"
# version 450
void main() {
int x = 0;
while(x < 5) {
x = x + 1;
}
do {
x = x - 1;
} while(x >= 4)
}
"#,
)
.unwrap();
frontend
.parse(
&Options::from(ShaderStage::Vertex),
r#"
# version 450
void main() {
int x = 0;
for(int i = 0; i < 10;) {
x = x + 2;
}
for(;;);
return x;
}
"#,
)
.unwrap();
}
#[test]
fn declarations() {
let mut frontend = Frontend::default();
frontend
.parse(
&Options::from(ShaderStage::Vertex),
r#"
#version 450
layout(location = 0) in vec2 v_uv;
layout(location = 0) out vec4 o_color;
layout(set = 1, binding = 1) uniform texture2D tex;
layout(set = 1, binding = 2) uniform sampler tex_sampler;
layout(early_fragment_tests) in;
void main() {}
"#,
)
.unwrap();
frontend
.parse(
&Options::from(ShaderStage::Vertex),
r#"
#version 450
layout(std140, set = 2, binding = 0)
uniform u_locals {
vec3 model_offs;
float load_time;
ivec4 atlas_offs;
};
void main() {}
"#,
)
.unwrap();
frontend
.parse(
&Options::from(ShaderStage::Vertex),
r#"
#version 450
layout(push_constant)
uniform u_locals {
vec3 model_offs;
float load_time;
ivec4 atlas_offs;
};
void main() {}
"#,
)
.unwrap();
frontend
.parse(
&Options::from(ShaderStage::Vertex),
r#"
#version 450
layout(std430, set = 2, binding = 0)
uniform u_locals {
vec3 model_offs;
float load_time;
ivec4 atlas_offs;
};
void main() {}
"#,
)
.unwrap();
frontend
.parse(
&Options::from(ShaderStage::Vertex),
r#"
#version 450
layout(std140, set = 2, binding = 0)
uniform u_locals {
vec3 model_offs;
float load_time;
} block_var;
void main() {
load_time * model_offs;
block_var.load_time * block_var.model_offs;
}
"#,
)
.unwrap();
frontend
.parse(
&Options::from(ShaderStage::Vertex),
r#"
#version 450
float vector = vec4(1.0 / 17.0, 9.0 / 17.0, 3.0 / 17.0, 11.0 / 17.0);
void main() {}
"#,
)
.unwrap();
frontend
.parse(
&Options::from(ShaderStage::Vertex),
r#"
#version 450
precision highp float;
void main() {}
"#,
)
.unwrap();
}
#[test]
fn textures() {
let mut frontend = Frontend::default();
frontend
.parse(
&Options::from(ShaderStage::Vertex),
r#"
#version 450
layout(location = 0) in vec2 v_uv;
layout(location = 0) out vec4 o_color;
layout(set = 1, binding = 1) uniform texture2D tex;
layout(set = 1, binding = 2) uniform sampler tex_sampler;
void main() {
o_color = texture(sampler2D(tex, tex_sampler), v_uv);
o_color.a = texture(sampler2D(tex, tex_sampler), v_uv, 2.0).a;
}
"#,
)
.unwrap();
}
#[test]
fn functions() {
let mut frontend = Frontend::default();
frontend
.parse(
&Options::from(ShaderStage::Vertex),
r#"
# version 450
void test1(float);
void test1(float) {}
void main() {}
"#,
)
.unwrap();
frontend
.parse(
&Options::from(ShaderStage::Vertex),
r#"
# version 450
void test2(float a) {}
void test3(float a, float b) {}
void test4(float, float) {}
void main() {}
"#,
)
.unwrap();
frontend
.parse(
&Options::from(ShaderStage::Vertex),
r#"
# version 450
float test(float a) { return a; }
void main() {}
"#,
)
.unwrap();
frontend
.parse(
&Options::from(ShaderStage::Vertex),
r#"
# version 450
float test(vec4 p) {
return p.x;
}
void main() {}
"#,
)
.unwrap();
// Function overloading
frontend
.parse(
&Options::from(ShaderStage::Vertex),
r#"
# version 450
float test(vec2 p);
float test(vec3 p);
float test(vec4 p);
float test(vec2 p) {
return p.x;
}
float test(vec3 p) {
return p.x;
}
float test(vec4 p) {
return p.x;
}
void main() {}
"#,
)
.unwrap();
assert_eq!(
frontend
.parse(
&Options::from(ShaderStage::Vertex),
r#"
# version 450
int test(vec4 p) {
return p.x;
}
float test(vec4 p) {
return p.x;
}
void main() {}
"#,
)
.err()
.unwrap(),
vec![Error {
kind: ErrorKind::SemanticError("Function already defined".into()),
meta: Span::new(134, 152),
}]
);
println!();
frontend
.parse(
&Options::from(ShaderStage::Vertex),
r#"
# version 450
float callee(uint q) {
return float(q);
}
float caller() {
callee(1u);
}
void main() {}
"#,
)
.unwrap();
// Nested function call
frontend
.parse(
&Options::from(ShaderStage::Vertex),
r#"
# version 450
layout(set = 0, binding = 1) uniform texture2D t_noise;
layout(set = 0, binding = 2) uniform sampler s_noise;
void main() {
textureLod(sampler2D(t_noise, s_noise), vec2(1.0), 0);
}
"#,
)
.unwrap();
frontend
.parse(
&Options::from(ShaderStage::Vertex),
r#"
# version 450
void fun(vec2 in_parameter, out float out_parameter) {
ivec2 _ = ivec2(in_parameter);
}
void main() {
float a;
fun(vec2(1.0), a);
}
"#,
)
.unwrap();
}
#[test]
fn constants() {
use crate::{Constant, Expression, ScalarKind, Type, TypeInner};
let mut frontend = Frontend::default();
let module = frontend
.parse(
&Options::from(ShaderStage::Vertex),
r#"
# version 450
const float a = 1.0;
float global = a;
const float b = a;
void main() {}
"#,
)
.unwrap();
let mut types = module.types.iter();
let mut constants = module.constants.iter();
let mut const_expressions = module.const_expressions.iter();
let (ty_handle, ty) = types.next().unwrap();
assert_eq!(
ty,
&Type {
name: None,
inner: TypeInner::Scalar {
kind: ScalarKind::Float,
width: 4
}
}
);
let (init_a_handle, init_a) = const_expressions.next().unwrap();
assert_eq!(init_a, &Expression::Literal(crate::Literal::F32(1.0)));
let (constant_a_handle, constant_a) = constants.next().unwrap();
assert_eq!(
constant_a,
&Constant {
name: Some("a".to_owned()),
r#override: crate::Override::None,
ty: ty_handle,
init: init_a_handle
}
);
// skip const expr that was inserted for `global` var
const_expressions.next().unwrap();
let (init_b_handle, init_b) = const_expressions.next().unwrap();
assert_eq!(init_b, &Expression::Constant(constant_a_handle));
assert_eq!(
constants.next().unwrap().1,
&Constant {
name: Some("b".to_owned()),
r#override: crate::Override::None,
ty: ty_handle,
init: init_b_handle
}
);
assert!(constants.next().is_none());
}
#[test]
fn function_overloading() {
let mut frontend = Frontend::default();
frontend
.parse(
&Options::from(ShaderStage::Vertex),
r#"
# version 450
float saturate(float v) { return clamp(v, 0.0, 1.0); }
vec2 saturate(vec2 v) { return clamp(v, vec2(0.0), vec2(1.0)); }
vec3 saturate(vec3 v) { return clamp(v, vec3(0.0), vec3(1.0)); }
vec4 saturate(vec4 v) { return clamp(v, vec4(0.0), vec4(1.0)); }
void main() {
float v1 = saturate(1.5);
vec2 v2 = saturate(vec2(0.5, 1.5));
vec3 v3 = saturate(vec3(0.5, 1.5, 2.5));
vec3 v4 = saturate(vec4(0.5, 1.5, 2.5, 3.5));
}
"#,
)
.unwrap();
}
#[test]
fn implicit_conversions() {
let mut frontend = Frontend::default();
frontend
.parse(
&Options::from(ShaderStage::Vertex),
r#"
# version 450
void main() {
mat4 a = mat4(1);
float b = 1u;
float c = 1 + 2.0;
}
"#,
)
.unwrap();
assert_eq!(
frontend
.parse(
&Options::from(ShaderStage::Vertex),
r#"
# version 450
void test(int a) {}
void test(uint a) {}
void main() {
test(1.0);
}
"#,
)
.err()
.unwrap(),
vec![Error {
kind: ErrorKind::SemanticError("Unknown function \'test\'".into()),
meta: Span::new(156, 165),
}]
);
assert_eq!(
frontend
.parse(
&Options::from(ShaderStage::Vertex),
r#"
# version 450
void test(float a) {}
void test(uint a) {}
void main() {
test(1);
}
"#,
)
.err()
.unwrap(),
vec![Error {
kind: ErrorKind::SemanticError("Ambiguous best function for \'test\'".into()),
meta: Span::new(158, 165),
}]
);
}
#[test]
fn structs() {
let mut frontend = Frontend::default();
frontend
.parse(
&Options::from(ShaderStage::Vertex),
r#"
# version 450
Test {
vec4 pos;
} xx;
void main() {}
"#,
)
.unwrap_err();
frontend
.parse(
&Options::from(ShaderStage::Vertex),
r#"
# version 450
struct Test {
vec4 pos;
};
void main() {}
"#,
)
.unwrap();
frontend
.parse(
&Options::from(ShaderStage::Vertex),
r#"
# version 450
const int NUM_VECS = 42;
struct Test {
vec4 vecs[NUM_VECS];
};
void main() {}
"#,
)
.unwrap();
frontend
.parse(
&Options::from(ShaderStage::Vertex),
r#"
# version 450
struct Hello {
vec4 test;
} test() {
return Hello( vec4(1.0) );
}
void main() {}
"#,
)
.unwrap();
frontend
.parse(
&Options::from(ShaderStage::Vertex),
r#"
# version 450
struct Test {};
void main() {}
"#,
)
.unwrap_err();
frontend
.parse(
&Options::from(ShaderStage::Vertex),
r#"
# version 450
inout struct Test {
vec4 x;
};
void main() {}
"#,
)
.unwrap_err();
}
#[test]
fn swizzles() {
let mut frontend = Frontend::default();
frontend
.parse(
&Options::from(ShaderStage::Vertex),
r#"
# version 450
void main() {
vec4 v = vec4(1);
v.xyz = vec3(2);
v.x = 5.0;
v.xyz.zxy.yx.xy = vec2(5.0, 1.0);
}
"#,
)
.unwrap();
frontend
.parse(
&Options::from(ShaderStage::Vertex),
r#"
# version 450
void main() {
vec4 v = vec4(1);
v.xx = vec2(5.0);
}
"#,
)
.unwrap_err();
frontend
.parse(
&Options::from(ShaderStage::Vertex),
r#"
# version 450
void main() {
vec3 v = vec3(1);
v.w = 2.0;
}
"#,
)
.unwrap_err();
}
#[test]
fn expressions() {
let mut frontend = Frontend::default();
// Vector indexing
frontend
.parse(
&Options::from(ShaderStage::Vertex),
r#"
# version 450
float test(int index) {
vec4 v = vec4(1.0, 2.0, 3.0, 4.0);
return v[index] + 1.0;
}
void main() {}
"#,
)
.unwrap();
// Prefix increment/decrement
frontend
.parse(
&Options::from(ShaderStage::Vertex),
r#"
# version 450
void main() {
uint index = 0;
--index;
++index;
}
"#,
)
.unwrap();
// Dynamic indexing of array
frontend
.parse(
&Options::from(ShaderStage::Vertex),
r#"
# version 450
void main() {
const vec4 positions[1] = { vec4(0) };
gl_Position = positions[gl_VertexIndex];
}
"#,
)
.unwrap();
}

View file

@ -0,0 +1,137 @@
pub use pp_rs::token::{Float, Integer, Location, PreprocessorError, Token as PPToken};
use super::ast::Precision;
use crate::{Interpolation, Sampling, Span, Type};
impl From<Location> for Span {
fn from(loc: Location) -> Self {
Span::new(loc.start, loc.end)
}
}
#[derive(Debug)]
#[cfg_attr(test, derive(PartialEq))]
pub struct Token {
pub value: TokenValue,
pub meta: Span,
}
/// A token passed from the lexing used in the parsing.
///
/// This type is exported since it's returned in the
/// [`InvalidToken`](super::ErrorKind::InvalidToken) error.
#[derive(Debug, PartialEq)]
pub enum TokenValue {
Identifier(String),
FloatConstant(Float),
IntConstant(Integer),
BoolConstant(bool),
Layout,
In,
Out,
InOut,
Uniform,
Buffer,
Const,
Shared,
Restrict,
/// A `glsl` memory qualifier such as `writeonly`
///
/// The associated [`crate::StorageAccess`] is the access being allowed
/// (for example `writeonly` has an associated value of [`crate::StorageAccess::STORE`])
MemoryQualifier(crate::StorageAccess),
Invariant,
Interpolation(Interpolation),
Sampling(Sampling),
Precision,
PrecisionQualifier(Precision),
Continue,
Break,
Return,
Discard,
If,
Else,
Switch,
Case,
Default,
While,
Do,
For,
Void,
Struct,
TypeName(Type),
Assign,
AddAssign,
SubAssign,
MulAssign,
DivAssign,
ModAssign,
LeftShiftAssign,
RightShiftAssign,
AndAssign,
XorAssign,
OrAssign,
Increment,
Decrement,
LogicalOr,
LogicalAnd,
LogicalXor,
LessEqual,
GreaterEqual,
Equal,
NotEqual,
LeftShift,
RightShift,
LeftBrace,
RightBrace,
LeftParen,
RightParen,
LeftBracket,
RightBracket,
LeftAngle,
RightAngle,
Comma,
Semicolon,
Colon,
Dot,
Bang,
Dash,
Tilde,
Plus,
Star,
Slash,
Percent,
VerticalBar,
Caret,
Ampersand,
Question,
}
#[derive(Debug)]
#[cfg_attr(test, derive(PartialEq))]
pub struct Directive {
pub kind: DirectiveKind,
pub tokens: Vec<PPToken>,
}
#[derive(Debug)]
#[cfg_attr(test, derive(PartialEq))]
pub enum DirectiveKind {
Version { is_first_directive: bool },
Extension,
Pragma,
}

View file

@ -0,0 +1,336 @@
use super::{
constants::ConstantSolver, context::Context, Error, ErrorKind, Frontend, Result, Span,
};
use crate::{
proc::ResolveContext, Bytes, Expression, Handle, ImageClass, ImageDimension, ScalarKind, Type,
TypeInner, VectorSize,
};
pub fn parse_type(type_name: &str) -> Option<Type> {
match type_name {
"bool" => Some(Type {
name: None,
inner: TypeInner::Scalar {
kind: ScalarKind::Bool,
width: crate::BOOL_WIDTH,
},
}),
"float" => Some(Type {
name: None,
inner: TypeInner::Scalar {
kind: ScalarKind::Float,
width: 4,
},
}),
"double" => Some(Type {
name: None,
inner: TypeInner::Scalar {
kind: ScalarKind::Float,
width: 8,
},
}),
"int" => Some(Type {
name: None,
inner: TypeInner::Scalar {
kind: ScalarKind::Sint,
width: 4,
},
}),
"uint" => Some(Type {
name: None,
inner: TypeInner::Scalar {
kind: ScalarKind::Uint,
width: 4,
},
}),
"sampler" | "samplerShadow" => Some(Type {
name: None,
inner: TypeInner::Sampler {
comparison: type_name == "samplerShadow",
},
}),
word => {
fn kind_width_parse(ty: &str) -> Option<(ScalarKind, u8)> {
Some(match ty {
"" => (ScalarKind::Float, 4),
"b" => (ScalarKind::Bool, crate::BOOL_WIDTH),
"i" => (ScalarKind::Sint, 4),
"u" => (ScalarKind::Uint, 4),
"d" => (ScalarKind::Float, 8),
_ => return None,
})
}
fn size_parse(n: &str) -> Option<VectorSize> {
Some(match n {
"2" => VectorSize::Bi,
"3" => VectorSize::Tri,
"4" => VectorSize::Quad,
_ => return None,
})
}
let vec_parse = |word: &str| {
let mut iter = word.split("vec");
let kind = iter.next()?;
let size = iter.next()?;
let (kind, width) = kind_width_parse(kind)?;
let size = size_parse(size)?;
Some(Type {
name: None,
inner: TypeInner::Vector { size, kind, width },
})
};
let mat_parse = |word: &str| {
let mut iter = word.split("mat");
let kind = iter.next()?;
let size = iter.next()?;
let (_, width) = kind_width_parse(kind)?;
let (columns, rows) = if let Some(size) = size_parse(size) {
(size, size)
} else {
let mut iter = size.split('x');
match (iter.next()?, iter.next()?, iter.next()) {
(col, row, None) => (size_parse(col)?, size_parse(row)?),
_ => return None,
}
};
Some(Type {
name: None,
inner: TypeInner::Matrix {
columns,
rows,
width,
},
})
};
let texture_parse = |word: &str| {
let mut iter = word.split("texture");
let texture_kind = |ty| {
Some(match ty {
"" => ScalarKind::Float,
"i" => ScalarKind::Sint,
"u" => ScalarKind::Uint,
_ => return None,
})
};
let kind = iter.next()?;
let size = iter.next()?;
let kind = texture_kind(kind)?;
let sampled = |multi| ImageClass::Sampled { kind, multi };
let (dim, arrayed, class) = match size {
"1D" => (ImageDimension::D1, false, sampled(false)),
"1DArray" => (ImageDimension::D1, true, sampled(false)),
"2D" => (ImageDimension::D2, false, sampled(false)),
"2DArray" => (ImageDimension::D2, true, sampled(false)),
"2DMS" => (ImageDimension::D2, false, sampled(true)),
"2DMSArray" => (ImageDimension::D2, true, sampled(true)),
"3D" => (ImageDimension::D3, false, sampled(false)),
"Cube" => (ImageDimension::Cube, false, sampled(false)),
"CubeArray" => (ImageDimension::Cube, true, sampled(false)),
_ => return None,
};
Some(Type {
name: None,
inner: TypeInner::Image {
dim,
arrayed,
class,
},
})
};
let image_parse = |word: &str| {
let mut iter = word.split("image");
let texture_kind = |ty| {
Some(match ty {
"" => ScalarKind::Float,
"i" => ScalarKind::Sint,
"u" => ScalarKind::Uint,
_ => return None,
})
};
let kind = iter.next()?;
let size = iter.next()?;
// TODO: Check that the texture format and the kind match
let _ = texture_kind(kind)?;
let class = ImageClass::Storage {
format: crate::StorageFormat::R8Uint,
access: crate::StorageAccess::all(),
};
// TODO: glsl support multisampled storage images, naga doesn't
let (dim, arrayed) = match size {
"1D" => (ImageDimension::D1, false),
"1DArray" => (ImageDimension::D1, true),
"2D" => (ImageDimension::D2, false),
"2DArray" => (ImageDimension::D2, true),
"3D" => (ImageDimension::D3, false),
// Naga doesn't support cube images and it's usefulness
// is questionable, so they won't be supported for now
// "Cube" => (ImageDimension::Cube, false),
// "CubeArray" => (ImageDimension::Cube, true),
_ => return None,
};
Some(Type {
name: None,
inner: TypeInner::Image {
dim,
arrayed,
class,
},
})
};
vec_parse(word)
.or_else(|| mat_parse(word))
.or_else(|| texture_parse(word))
.or_else(|| image_parse(word))
}
}
}
pub const fn scalar_components(ty: &TypeInner) -> Option<(ScalarKind, Bytes)> {
match *ty {
TypeInner::Scalar { kind, width } => Some((kind, width)),
TypeInner::Vector { kind, width, .. } => Some((kind, width)),
TypeInner::Matrix { width, .. } => Some((ScalarKind::Float, width)),
TypeInner::ValuePointer { kind, width, .. } => Some((kind, width)),
_ => None,
}
}
pub const fn type_power(kind: ScalarKind, width: Bytes) -> Option<u32> {
Some(match kind {
ScalarKind::Sint => 0,
ScalarKind::Uint => 1,
ScalarKind::Float if width == 4 => 2,
ScalarKind::Float => 3,
ScalarKind::Bool => return None,
})
}
impl Frontend {
/// Resolves the types of the expressions until `expr` (inclusive)
///
/// This needs to be done before the [`typifier`] can be queried for
/// the types of the expressions in the range between the last grow and `expr`.
///
/// # Note
///
/// The `resolve_type*` methods (like [`resolve_type`]) automatically
/// grow the [`typifier`] so calling this method is not necessary when using
/// them.
///
/// [`typifier`]: Context::typifier
/// [`resolve_type`]: Self::resolve_type
pub(crate) fn typifier_grow(
&self,
ctx: &mut Context,
expr: Handle<Expression>,
meta: Span,
) -> Result<()> {
let resolve_ctx = ResolveContext::with_locals(&self.module, &ctx.locals, &ctx.arguments);
ctx.typifier
.grow(expr, &ctx.expressions, &resolve_ctx)
.map_err(|error| Error {
kind: ErrorKind::SemanticError(format!("Can't resolve type: {error:?}").into()),
meta,
})
}
/// Gets the type for the result of the `expr` expression
///
/// Automatically grows the [`typifier`] to `expr` so calling
/// [`typifier_grow`] is not necessary
///
/// [`typifier`]: Context::typifier
/// [`typifier_grow`]: Self::typifier_grow
pub(crate) fn resolve_type<'b>(
&'b self,
ctx: &'b mut Context,
expr: Handle<Expression>,
meta: Span,
) -> Result<&'b TypeInner> {
self.typifier_grow(ctx, expr, meta)?;
Ok(ctx.typifier.get(expr, &self.module.types))
}
/// Gets the type handle for the result of the `expr` expression
///
/// Automatically grows the [`typifier`] to `expr` so calling
/// [`typifier_grow`] is not necessary
///
/// # Note
///
/// Consider using [`resolve_type`] whenever possible
/// since it doesn't require adding each type to the [`types`] arena
/// and it doesn't need to mutably borrow the [`Parser`][Self]
///
/// [`types`]: crate::Module::types
/// [`typifier`]: Context::typifier
/// [`typifier_grow`]: Self::typifier_grow
/// [`resolve_type`]: Self::resolve_type
pub(crate) fn resolve_type_handle(
&mut self,
ctx: &mut Context,
expr: Handle<Expression>,
meta: Span,
) -> Result<Handle<Type>> {
self.typifier_grow(ctx, expr, meta)?;
Ok(ctx.typifier.register_type(expr, &mut self.module.types))
}
/// Invalidates the cached type resolution for `expr` forcing a recomputation
pub(crate) fn invalidate_expression<'b>(
&'b self,
ctx: &'b mut Context,
expr: Handle<Expression>,
meta: Span,
) -> Result<()> {
let resolve_ctx = ResolveContext::with_locals(&self.module, &ctx.locals, &ctx.arguments);
ctx.typifier
.invalidate(expr, &ctx.expressions, &resolve_ctx)
.map_err(|error| Error {
kind: ErrorKind::SemanticError(format!("Can't resolve type: {error:?}").into()),
meta,
})
}
pub(crate) fn solve_constant(
&mut self,
ctx: &Context,
root: Handle<Expression>,
meta: Span,
) -> Result<Handle<Expression>> {
let mut solver = ConstantSolver {
types: &mut self.module.types,
expressions: &ctx.expressions,
constants: &mut self.module.constants,
const_expressions: &mut self.module.const_expressions,
};
solver.solve(root).map_err(|e| Error {
kind: e.into(),
meta,
})
}
}

View file

@ -0,0 +1,671 @@
use super::{
ast::*,
context::{Context, ExprPos},
error::{Error, ErrorKind},
Frontend, Result, Span,
};
use crate::{
AddressSpace, Binding, Block, BuiltIn, Constant, Expression, GlobalVariable, Handle,
Interpolation, LocalVariable, ResourceBinding, ScalarKind, ShaderStage, SwizzleComponent, Type,
TypeInner, VectorSize,
};
pub struct VarDeclaration<'a, 'key> {
pub qualifiers: &'a mut TypeQualifiers<'key>,
pub ty: Handle<Type>,
pub name: Option<String>,
pub init: Option<Handle<Expression>>,
pub meta: Span,
}
/// Information about a builtin used in [`add_builtin`](Frontend::add_builtin).
struct BuiltInData {
/// The type of the builtin.
inner: TypeInner,
/// The associated builtin class.
builtin: BuiltIn,
/// Whether the builtin can be written to or not.
mutable: bool,
/// The storage used for the builtin.
storage: StorageQualifier,
}
pub enum GlobalOrConstant {
Global(Handle<GlobalVariable>),
Constant(Handle<Constant>),
}
impl Frontend {
/// Adds a builtin and returns a variable reference to it
fn add_builtin(
&mut self,
ctx: &mut Context,
body: &mut Block,
name: &str,
data: BuiltInData,
meta: Span,
) -> Option<VariableReference> {
let ty = self.module.types.insert(
Type {
name: None,
inner: data.inner,
},
meta,
);
let handle = self.module.global_variables.append(
GlobalVariable {
name: Some(name.into()),
space: AddressSpace::Private,
binding: None,
ty,
init: None,
},
meta,
);
let idx = self.entry_args.len();
self.entry_args.push(EntryArg {
name: None,
binding: Binding::BuiltIn(data.builtin),
handle,
storage: data.storage,
});
self.global_variables.push((
name.into(),
GlobalLookup {
kind: GlobalLookupKind::Variable(handle),
entry_arg: Some(idx),
mutable: data.mutable,
},
));
let expr = ctx.add_expression(Expression::GlobalVariable(handle), meta, body);
let var = VariableReference {
expr,
load: true,
mutable: data.mutable,
constant: None,
entry_arg: Some(idx),
};
ctx.symbol_table.add_root(name.into(), var.clone());
Some(var)
}
pub(crate) fn lookup_variable(
&mut self,
ctx: &mut Context,
body: &mut Block,
name: &str,
meta: Span,
) -> Option<VariableReference> {
if let Some(var) = ctx.symbol_table.lookup(name).cloned() {
return Some(var);
}
let data = match name {
"gl_Position" => BuiltInData {
inner: TypeInner::Vector {
size: VectorSize::Quad,
kind: ScalarKind::Float,
width: 4,
},
builtin: BuiltIn::Position { invariant: false },
mutable: true,
storage: StorageQualifier::Output,
},
"gl_FragCoord" => BuiltInData {
inner: TypeInner::Vector {
size: VectorSize::Quad,
kind: ScalarKind::Float,
width: 4,
},
builtin: BuiltIn::Position { invariant: false },
mutable: false,
storage: StorageQualifier::Input,
},
"gl_PointCoord" => BuiltInData {
inner: TypeInner::Vector {
size: VectorSize::Bi,
kind: ScalarKind::Float,
width: 4,
},
builtin: BuiltIn::PointCoord,
mutable: false,
storage: StorageQualifier::Input,
},
"gl_GlobalInvocationID"
| "gl_NumWorkGroups"
| "gl_WorkGroupSize"
| "gl_WorkGroupID"
| "gl_LocalInvocationID" => BuiltInData {
inner: TypeInner::Vector {
size: VectorSize::Tri,
kind: ScalarKind::Uint,
width: 4,
},
builtin: match name {
"gl_GlobalInvocationID" => BuiltIn::GlobalInvocationId,
"gl_NumWorkGroups" => BuiltIn::NumWorkGroups,
"gl_WorkGroupSize" => BuiltIn::WorkGroupSize,
"gl_WorkGroupID" => BuiltIn::WorkGroupId,
"gl_LocalInvocationID" => BuiltIn::LocalInvocationId,
_ => unreachable!(),
},
mutable: false,
storage: StorageQualifier::Input,
},
"gl_FrontFacing" => BuiltInData {
inner: TypeInner::Scalar {
kind: ScalarKind::Bool,
width: crate::BOOL_WIDTH,
},
builtin: BuiltIn::FrontFacing,
mutable: false,
storage: StorageQualifier::Input,
},
"gl_PointSize" | "gl_FragDepth" => BuiltInData {
inner: TypeInner::Scalar {
kind: ScalarKind::Float,
width: 4,
},
builtin: match name {
"gl_PointSize" => BuiltIn::PointSize,
"gl_FragDepth" => BuiltIn::FragDepth,
_ => unreachable!(),
},
mutable: true,
storage: StorageQualifier::Output,
},
"gl_ClipDistance" | "gl_CullDistance" => {
let base = self.module.types.insert(
Type {
name: None,
inner: TypeInner::Scalar {
kind: ScalarKind::Float,
width: 4,
},
},
meta,
);
BuiltInData {
inner: TypeInner::Array {
base,
size: crate::ArraySize::Dynamic,
stride: 4,
},
builtin: match name {
"gl_ClipDistance" => BuiltIn::ClipDistance,
"gl_CullDistance" => BuiltIn::CullDistance,
_ => unreachable!(),
},
mutable: self.meta.stage == ShaderStage::Vertex,
storage: StorageQualifier::Output,
}
}
_ => {
let builtin = match name {
"gl_BaseVertex" => BuiltIn::BaseVertex,
"gl_BaseInstance" => BuiltIn::BaseInstance,
"gl_PrimitiveID" => BuiltIn::PrimitiveIndex,
"gl_InstanceIndex" => BuiltIn::InstanceIndex,
"gl_VertexIndex" => BuiltIn::VertexIndex,
"gl_SampleID" => BuiltIn::SampleIndex,
"gl_LocalInvocationIndex" => BuiltIn::LocalInvocationIndex,
_ => return None,
};
BuiltInData {
inner: TypeInner::Scalar {
kind: ScalarKind::Uint,
width: 4,
},
builtin,
mutable: false,
storage: StorageQualifier::Input,
}
}
};
self.add_builtin(ctx, body, name, data, meta)
}
pub(crate) fn make_variable_invariant(
&mut self,
ctx: &mut Context,
body: &mut Block,
name: &str,
meta: Span,
) {
if let Some(var) = self.lookup_variable(ctx, body, name, meta) {
if let Some(index) = var.entry_arg {
if let Binding::BuiltIn(BuiltIn::Position { ref mut invariant }) =
self.entry_args[index].binding
{
*invariant = true;
}
}
}
}
pub(crate) fn field_selection(
&mut self,
ctx: &mut Context,
pos: ExprPos,
body: &mut Block,
expression: Handle<Expression>,
name: &str,
meta: Span,
) -> Result<Handle<Expression>> {
let (ty, is_pointer) = match *self.resolve_type(ctx, expression, meta)? {
TypeInner::Pointer { base, .. } => (&self.module.types[base].inner, true),
ref ty => (ty, false),
};
match *ty {
TypeInner::Struct { ref members, .. } => {
let index = members
.iter()
.position(|m| m.name == Some(name.into()))
.ok_or_else(|| Error {
kind: ErrorKind::UnknownField(name.into()),
meta,
})?;
let pointer = ctx.add_expression(
Expression::AccessIndex {
base: expression,
index: index as u32,
},
meta,
body,
);
Ok(match pos {
ExprPos::Rhs if is_pointer => {
ctx.add_expression(Expression::Load { pointer }, meta, body)
}
_ => pointer,
})
}
// swizzles (xyzw, rgba, stpq)
TypeInner::Vector { size, .. } => {
let check_swizzle_components = |comps: &str| {
name.chars()
.map(|c| {
comps
.find(c)
.filter(|i| *i < size as usize)
.map(|i| SwizzleComponent::from_index(i as u32))
})
.collect::<Option<Vec<SwizzleComponent>>>()
};
let components = check_swizzle_components("xyzw")
.or_else(|| check_swizzle_components("rgba"))
.or_else(|| check_swizzle_components("stpq"));
if let Some(components) = components {
if let ExprPos::Lhs = pos {
let not_unique = (1..components.len())
.any(|i| components[i..].contains(&components[i - 1]));
if not_unique {
self.errors.push(Error {
kind:
ErrorKind::SemanticError(
format!(
"swizzle cannot have duplicate components in left-hand-side expression for \"{name:?}\""
)
.into(),
),
meta ,
})
}
}
let mut pattern = [SwizzleComponent::X; 4];
for (pat, component) in pattern.iter_mut().zip(&components) {
*pat = *component;
}
// flatten nested swizzles (vec.zyx.xy.x => vec.z)
let mut expression = expression;
if let Expression::Swizzle {
size: _,
vector,
pattern: ref src_pattern,
} = ctx[expression]
{
expression = vector;
for pat in &mut pattern {
*pat = src_pattern[pat.index() as usize];
}
}
let size = match components.len() {
// Swizzles with just one component are accesses and not swizzles
1 => {
match pos {
// If the position is in the right hand side and the base
// vector is a pointer, load it, otherwise the swizzle would
// produce a pointer
ExprPos::Rhs if is_pointer => {
expression = ctx.add_expression(
Expression::Load {
pointer: expression,
},
meta,
body,
);
}
_ => {}
};
return Ok(ctx.add_expression(
Expression::AccessIndex {
base: expression,
index: pattern[0].index(),
},
meta,
body,
));
}
2 => VectorSize::Bi,
3 => VectorSize::Tri,
4 => VectorSize::Quad,
_ => {
self.errors.push(Error {
kind: ErrorKind::SemanticError(
format!("Bad swizzle size for \"{name:?}\"").into(),
),
meta,
});
VectorSize::Quad
}
};
if is_pointer {
// NOTE: for lhs expression, this extra load ends up as an unused expr, because the
// assignment will extract the pointer and use it directly anyway. Unfortunately we
// need it for validation to pass, as swizzles cannot operate on pointer values.
expression = ctx.add_expression(
Expression::Load {
pointer: expression,
},
meta,
body,
);
}
Ok(ctx.add_expression(
Expression::Swizzle {
size,
vector: expression,
pattern,
},
meta,
body,
))
} else {
Err(Error {
kind: ErrorKind::SemanticError(
format!("Invalid swizzle for vector \"{name}\"").into(),
),
meta,
})
}
}
_ => Err(Error {
kind: ErrorKind::SemanticError(
format!("Can't lookup field on this type \"{name}\"").into(),
),
meta,
}),
}
}
pub(crate) fn add_global_var(
&mut self,
ctx: &mut Context,
body: &mut Block,
VarDeclaration {
qualifiers,
mut ty,
name,
init,
meta,
}: VarDeclaration,
) -> Result<GlobalOrConstant> {
let storage = qualifiers.storage.0;
let (ret, lookup) = match storage {
StorageQualifier::Input | StorageQualifier::Output => {
let input = storage == StorageQualifier::Input;
// TODO: glslang seems to use a counter for variables without
// explicit location (even if that causes collisions)
let location = qualifiers
.uint_layout_qualifier("location", &mut self.errors)
.unwrap_or(0);
let interpolation = qualifiers.interpolation.take().map(|(i, _)| i).or_else(|| {
let kind = self.module.types[ty].inner.scalar_kind()?;
Some(match kind {
ScalarKind::Float => Interpolation::Perspective,
_ => Interpolation::Flat,
})
});
let sampling = qualifiers.sampling.take().map(|(s, _)| s);
let handle = self.module.global_variables.append(
GlobalVariable {
name: name.clone(),
space: AddressSpace::Private,
binding: None,
ty,
init,
},
meta,
);
let idx = self.entry_args.len();
self.entry_args.push(EntryArg {
name: name.clone(),
binding: Binding::Location {
location,
interpolation,
sampling,
},
handle,
storage,
});
let lookup = GlobalLookup {
kind: GlobalLookupKind::Variable(handle),
entry_arg: Some(idx),
mutable: !input,
};
(GlobalOrConstant::Global(handle), lookup)
}
StorageQualifier::Const => {
let init = init.ok_or_else(|| Error {
kind: ErrorKind::SemanticError("const values must have an initializer".into()),
meta,
})?;
let constant = Constant {
name: name.clone(),
r#override: crate::Override::None,
ty,
init,
};
let handle = self.module.constants.fetch_or_append(constant, meta);
let lookup = GlobalLookup {
kind: GlobalLookupKind::Constant(handle, ty),
entry_arg: None,
mutable: false,
};
(GlobalOrConstant::Constant(handle), lookup)
}
StorageQualifier::AddressSpace(mut space) => {
match space {
AddressSpace::Storage { ref mut access } => {
if let Some((allowed_access, _)) = qualifiers.storage_access.take() {
*access = allowed_access;
}
}
AddressSpace::Uniform => match self.module.types[ty].inner {
TypeInner::Image {
class,
dim,
arrayed,
} => {
if let crate::ImageClass::Storage {
mut access,
mut format,
} = class
{
if let Some((allowed_access, _)) = qualifiers.storage_access.take()
{
access = allowed_access;
}
match qualifiers.layout_qualifiers.remove(&QualifierKey::Format) {
Some((QualifierValue::Format(f), _)) => format = f,
// TODO: glsl supports images without format qualifier
// if they are `writeonly`
None => self.errors.push(Error {
kind: ErrorKind::SemanticError(
"image types require a format layout qualifier".into(),
),
meta,
}),
_ => unreachable!(),
}
ty = self.module.types.insert(
Type {
name: None,
inner: TypeInner::Image {
dim,
arrayed,
class: crate::ImageClass::Storage { format, access },
},
},
meta,
);
}
space = AddressSpace::Handle
}
TypeInner::Sampler { .. } => space = AddressSpace::Handle,
_ => {
if qualifiers.none_layout_qualifier("push_constant", &mut self.errors) {
space = AddressSpace::PushConstant
}
}
},
AddressSpace::Function => space = AddressSpace::Private,
_ => {}
};
let binding = match space {
AddressSpace::Uniform | AddressSpace::Storage { .. } | AddressSpace::Handle => {
let binding = qualifiers.uint_layout_qualifier("binding", &mut self.errors);
if binding.is_none() {
self.errors.push(Error {
kind: ErrorKind::SemanticError(
"uniform/buffer blocks require layout(binding=X)".into(),
),
meta,
});
}
let set = qualifiers.uint_layout_qualifier("set", &mut self.errors);
binding.map(|binding| ResourceBinding {
group: set.unwrap_or(0),
binding,
})
}
_ => None,
};
let handle = self.module.global_variables.append(
GlobalVariable {
name: name.clone(),
space,
binding,
ty,
init,
},
meta,
);
let lookup = GlobalLookup {
kind: GlobalLookupKind::Variable(handle),
entry_arg: None,
mutable: true,
};
(GlobalOrConstant::Global(handle), lookup)
}
};
if let Some(name) = name {
ctx.add_global(self, &name, lookup, body);
self.global_variables.push((name, lookup));
}
qualifiers.unused_errors(&mut self.errors);
Ok(ret)
}
pub(crate) fn add_local_var(
&mut self,
ctx: &mut Context,
body: &mut Block,
decl: VarDeclaration,
) -> Result<Handle<Expression>> {
let storage = decl.qualifiers.storage;
let mutable = match storage.0 {
StorageQualifier::AddressSpace(AddressSpace::Function) => true,
StorageQualifier::Const => false,
_ => {
self.errors.push(Error {
kind: ErrorKind::SemanticError("Locals cannot have a storage qualifier".into()),
meta: storage.1,
});
true
}
};
let handle = ctx.locals.append(
LocalVariable {
name: decl.name.clone(),
ty: decl.ty,
init: None,
},
decl.meta,
);
let expr = ctx.add_expression(Expression::LocalVariable(handle), decl.meta, body);
if let Some(name) = decl.name {
let maybe_var = ctx.add_local_var(name.clone(), expr, mutable);
if maybe_var.is_some() {
self.errors.push(Error {
kind: ErrorKind::VariableAlreadyDeclared(name),
meta: decl.meta,
})
}
}
decl.qualifiers.unused_errors(&mut self.errors);
Ok(expr)
}
}

View file

@ -0,0 +1,61 @@
/*!
Interpolation defaults.
*/
impl crate::Binding {
/// Apply the usual default interpolation for `ty` to `binding`.
///
/// This function is a utility front ends may use to satisfy the Naga IR's
/// requirement, meant to ensure that input languages' policies have been
/// applied appropriately, that all I/O `Binding`s from the vertex shader to the
/// fragment shader must have non-`None` `interpolation` values.
///
/// All the shader languages Naga supports have similar rules:
/// perspective-correct, center-sampled interpolation is the default for any
/// binding that can vary, and everything else either defaults to flat, or
/// requires an explicit flat qualifier/attribute/what-have-you.
///
/// If `binding` is not a [`Location`] binding, or if its [`interpolation`] is
/// already set, then make no changes. Otherwise, set `binding`'s interpolation
/// and sampling to reasonable defaults depending on `ty`, the type of the value
/// being interpolated:
///
/// - If `ty` is a floating-point scalar, vector, or matrix type, then
/// default to [`Perspective`] interpolation and [`Center`] sampling.
///
/// - If `ty` is an integral scalar or vector, then default to [`Flat`]
/// interpolation, which has no associated sampling.
///
/// - For any other types, make no change. Such types are not permitted as
/// user-defined IO values, and will probably be flagged by the verifier
///
/// When structs appear in input or output types, each member ought to have its
/// own [`Binding`], so structs are simply covered by the third case.
///
/// [`Binding`]: crate::Binding
/// [`Location`]: crate::Binding::Location
/// [`interpolation`]: crate::Binding::Location::interpolation
/// [`Perspective`]: crate::Interpolation::Perspective
/// [`Flat`]: crate::Interpolation::Flat
/// [`Center`]: crate::Sampling::Center
pub fn apply_default_interpolation(&mut self, ty: &crate::TypeInner) {
if let crate::Binding::Location {
location: _,
interpolation: ref mut interpolation @ None,
ref mut sampling,
} = *self
{
match ty.scalar_kind() {
Some(crate::ScalarKind::Float) => {
*interpolation = Some(crate::Interpolation::Perspective);
*sampling = Some(crate::Sampling::Center);
}
Some(crate::ScalarKind::Sint | crate::ScalarKind::Uint) => {
*interpolation = Some(crate::Interpolation::Flat);
*sampling = None;
}
Some(_) | None => {}
}
}
}
}

364
third-party/vendor/naga/src/front/mod.rs vendored Normal file
View file

@ -0,0 +1,364 @@
/*!
Frontend parsers that consume binary and text shaders and load them into [`Module`](super::Module)s.
*/
mod interpolator;
mod type_gen;
#[cfg(feature = "glsl-in")]
pub mod glsl;
#[cfg(feature = "spv-in")]
pub mod spv;
#[cfg(feature = "wgsl-in")]
pub mod wgsl;
use crate::{
arena::{Arena, Handle, UniqueArena},
proc::{ResolveContext, ResolveError, TypeResolution},
FastHashMap,
};
use std::ops;
/// Helper class to emit expressions
#[allow(dead_code)]
#[derive(Default, Debug)]
struct Emitter {
start_len: Option<usize>,
}
#[allow(dead_code)]
impl Emitter {
fn start(&mut self, arena: &Arena<crate::Expression>) {
if self.start_len.is_some() {
unreachable!("Emitting has already started!");
}
self.start_len = Some(arena.len());
}
#[must_use]
fn finish(
&mut self,
arena: &Arena<crate::Expression>,
) -> Option<(crate::Statement, crate::span::Span)> {
let start_len = self.start_len.take().unwrap();
if start_len != arena.len() {
#[allow(unused_mut)]
let mut span = crate::span::Span::default();
let range = arena.range_from(start_len);
#[cfg(feature = "span")]
for handle in range.clone() {
span.subsume(arena.get_span(handle))
}
Some((crate::Statement::Emit(range), span))
} else {
None
}
}
}
/// A table of types for an `Arena<Expression>`.
///
/// A front end can use a `Typifier` to get types for an arena's expressions
/// while it is still contributing expressions to it. At any point, you can call
/// [`typifier.grow(expr, arena, ctx)`], where `expr` is a `Handle<Expression>`
/// referring to something in `arena`, and the `Typifier` will resolve the types
/// of all the expressions up to and including `expr`. Then you can write
/// `typifier[handle]` to get the type of any handle at or before `expr`.
///
/// Note that `Typifier` does *not* build an `Arena<Type>` as a part of its
/// usual operation. Ideally, a module's type arena should only contain types
/// actually needed by `Handle<Type>`s elsewhere in the module — functions,
/// variables, [`Compose`] expressions, other types, and so on — so we don't
/// want every little thing that occurs as the type of some intermediate
/// expression to show up there.
///
/// Instead, `Typifier` accumulates a [`TypeResolution`] for each expression,
/// which refers to the `Arena<Type>` in the [`ResolveContext`] passed to `grow`
/// as needed. [`TypeResolution`] is a lightweight representation for
/// intermediate types like this; see its documentation for details.
///
/// If you do need to register a `Typifier`'s conclusion in an `Arena<Type>`
/// (say, for a [`LocalVariable`] whose type you've inferred), you can use
/// [`register_type`] to do so.
///
/// [`typifier.grow(expr, arena)`]: Typifier::grow
/// [`register_type`]: Typifier::register_type
/// [`Compose`]: crate::Expression::Compose
/// [`LocalVariable`]: crate::LocalVariable
#[derive(Debug, Default)]
pub struct Typifier {
resolutions: Vec<TypeResolution>,
}
impl Typifier {
pub const fn new() -> Self {
Typifier {
resolutions: Vec::new(),
}
}
pub fn reset(&mut self) {
self.resolutions.clear()
}
pub fn get<'a>(
&'a self,
expr_handle: Handle<crate::Expression>,
types: &'a UniqueArena<crate::Type>,
) -> &'a crate::TypeInner {
self.resolutions[expr_handle.index()].inner_with(types)
}
/// Add an expression's type to an `Arena<Type>`.
///
/// Add the type of `expr_handle` to `types`, and return a `Handle<Type>`
/// referring to it.
///
/// # Note
///
/// If you just need a [`TypeInner`] for `expr_handle`'s type, consider
/// using `typifier[expression].inner_with(types)` instead. Calling
/// [`TypeResolution::inner_with`] often lets us avoid adding anything to
/// the arena, which can significantly reduce the number of types that end
/// up in the final module.
///
/// [`TypeInner`]: crate::TypeInner
pub fn register_type(
&self,
expr_handle: Handle<crate::Expression>,
types: &mut UniqueArena<crate::Type>,
) -> Handle<crate::Type> {
match self[expr_handle].clone() {
TypeResolution::Handle(handle) => handle,
TypeResolution::Value(inner) => {
types.insert(crate::Type { name: None, inner }, crate::Span::UNDEFINED)
}
}
}
/// Grow this typifier until it contains a type for `expr_handle`.
pub fn grow(
&mut self,
expr_handle: Handle<crate::Expression>,
expressions: &Arena<crate::Expression>,
ctx: &ResolveContext,
) -> Result<(), ResolveError> {
if self.resolutions.len() <= expr_handle.index() {
for (eh, expr) in expressions.iter().skip(self.resolutions.len()) {
//Note: the closure can't `Err` by construction
let resolution = ctx.resolve(expr, |h| Ok(&self.resolutions[h.index()]))?;
log::debug!("Resolving {:?} = {:?} : {:?}", eh, expr, resolution);
self.resolutions.push(resolution);
}
}
Ok(())
}
/// Recompute the type resolution for `expr_handle`.
///
/// If the type of `expr_handle` hasn't yet been calculated, call
/// [`grow`](Self::grow) to ensure it is covered.
///
/// In either case, when this returns, `self[expr_handle]` should be an
/// updated type resolution for `expr_handle`.
pub fn invalidate(
&mut self,
expr_handle: Handle<crate::Expression>,
expressions: &Arena<crate::Expression>,
ctx: &ResolveContext,
) -> Result<(), ResolveError> {
if self.resolutions.len() <= expr_handle.index() {
self.grow(expr_handle, expressions, ctx)
} else {
let expr = &expressions[expr_handle];
//Note: the closure can't `Err` by construction
let resolution = ctx.resolve(expr, |h| Ok(&self.resolutions[h.index()]))?;
self.resolutions[expr_handle.index()] = resolution;
Ok(())
}
}
}
impl ops::Index<Handle<crate::Expression>> for Typifier {
type Output = TypeResolution;
fn index(&self, handle: Handle<crate::Expression>) -> &Self::Output {
&self.resolutions[handle.index()]
}
}
/// Type representing a lexical scope, associating a name to a single variable
///
/// The scope is generic over the variable representation and name representaion
/// in order to allow larger flexibility on the frontends on how they might
/// represent them.
type Scope<Name, Var> = FastHashMap<Name, Var>;
/// Structure responsible for managing variable lookups and keeping track of
/// lexical scopes
///
/// The symbol table is generic over the variable representation and its name
/// to allow larger flexibility on the frontends on how they might represent them.
///
/// ```
/// use naga::front::SymbolTable;
///
/// // Create a new symbol table with `u32`s representing the variable
/// let mut symbol_table: SymbolTable<&str, u32> = SymbolTable::default();
///
/// // Add two variables named `var1` and `var2` with 0 and 2 respectively
/// symbol_table.add("var1", 0);
/// symbol_table.add("var2", 2);
///
/// // Check that `var1` exists and is `0`
/// assert_eq!(symbol_table.lookup("var1"), Some(&0));
///
/// // Push a new scope and add a variable to it named `var1` shadowing the
/// // variable of our previous scope
/// symbol_table.push_scope();
/// symbol_table.add("var1", 1);
///
/// // Check that `var1` now points to the new value of `1` and `var2` still
/// // exists with its value of `2`
/// assert_eq!(symbol_table.lookup("var1"), Some(&1));
/// assert_eq!(symbol_table.lookup("var2"), Some(&2));
///
/// // Pop the scope
/// symbol_table.pop_scope();
///
/// // Check that `var1` now refers to our initial variable with value `0`
/// assert_eq!(symbol_table.lookup("var1"), Some(&0));
/// ```
///
/// Scopes are ordered as a LIFO stack so a variable defined in a later scope
/// with the same name as another variable defined in a earlier scope will take
/// precedence in the lookup. Scopes can be added with [`push_scope`] and
/// removed with [`pop_scope`].
///
/// A root scope is added when the symbol table is created and must always be
/// present. Trying to pop it will result in a panic.
///
/// Variables can be added with [`add`] and looked up with [`lookup`]. Adding a
/// variable will do so in the currently active scope and as mentioned
/// previously a lookup will search from the current scope to the root scope.
///
/// [`push_scope`]: Self::push_scope
/// [`pop_scope`]: Self::push_scope
/// [`add`]: Self::add
/// [`lookup`]: Self::lookup
pub struct SymbolTable<Name, Var> {
/// Stack of lexical scopes. Not all scopes are active; see [`cursor`].
///
/// [`cursor`]: Self::cursor
scopes: Vec<Scope<Name, Var>>,
/// Limit of the [`scopes`] stack (exclusive). By using a separate value for
/// the stack length instead of `Vec`'s own internal length, the scopes can
/// be reused to cache memory allocations.
///
/// [`scopes`]: Self::scopes
cursor: usize,
}
impl<Name, Var> SymbolTable<Name, Var> {
/// Adds a new lexical scope.
///
/// All variables declared after this point will be added to this scope
/// until another scope is pushed or [`pop_scope`] is called, causing this
/// scope to be removed along with all variables added to it.
///
/// [`pop_scope`]: Self::pop_scope
pub fn push_scope(&mut self) {
// If the cursor is equal to the scope's stack length then we need to
// push another empty scope. Otherwise we can reuse the already existing
// scope.
if self.scopes.len() == self.cursor {
self.scopes.push(FastHashMap::default())
} else {
self.scopes[self.cursor].clear();
}
self.cursor += 1;
}
/// Removes the current lexical scope and all its variables
///
/// # PANICS
/// - If the current lexical scope is the root scope
pub fn pop_scope(&mut self) {
// Despite the method title, the variables are only deleted when the
// scope is reused. This is because while a clear is inevitable if the
// scope needs to be reused, there are cases where the scope might be
// popped and not reused, i.e. if another scope with the same nesting
// level is never pushed again.
assert!(self.cursor != 1, "Tried to pop the root scope");
self.cursor -= 1;
}
}
impl<Name, Var> SymbolTable<Name, Var>
where
Name: std::hash::Hash + Eq,
{
/// Perform a lookup for a variable named `name`.
///
/// As stated in the struct level documentation the lookup will proceed from
/// the current scope to the root scope, returning `Some` when a variable is
/// found or `None` if there doesn't exist a variable with `name` in any
/// scope.
pub fn lookup<Q: ?Sized>(&self, name: &Q) -> Option<&Var>
where
Name: std::borrow::Borrow<Q>,
Q: std::hash::Hash + Eq,
{
// Iterate backwards trough the scopes and try to find the variable
for scope in self.scopes[..self.cursor].iter().rev() {
if let Some(var) = scope.get(name) {
return Some(var);
}
}
None
}
/// Adds a new variable to the current scope.
///
/// Returns the previous variable with the same name in this scope if it
/// exists, so that the frontend might handle it in case variable shadowing
/// is disallowed.
pub fn add(&mut self, name: Name, var: Var) -> Option<Var> {
self.scopes[self.cursor - 1].insert(name, var)
}
/// Adds a new variable to the root scope.
///
/// This is used in GLSL for builtins which aren't known in advance and only
/// when used for the first time, so there must be a way to add those
/// declarations to the root unconditionally from the current scope.
///
/// Returns the previous variable with the same name in the root scope if it
/// exists, so that the frontend might handle it in case variable shadowing
/// is disallowed.
pub fn add_root(&mut self, name: Name, var: Var) -> Option<Var> {
self.scopes[0].insert(name, var)
}
}
impl<Name, Var> Default for SymbolTable<Name, Var> {
/// Constructs a new symbol table with a root scope
fn default() -> Self {
Self {
scopes: vec![FastHashMap::default()],
cursor: 1,
}
}
}
use std::fmt;
impl<Name: fmt::Debug, Var: fmt::Debug> fmt::Debug for SymbolTable<Name, Var> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str("SymbolTable ")?;
f.debug_list()
.entries(self.scopes[..self.cursor].iter())
.finish()
}
}

View file

@ -0,0 +1,181 @@
use super::error::Error;
use num_traits::cast::FromPrimitive;
use std::convert::TryInto;
pub(super) const fn map_binary_operator(word: spirv::Op) -> Result<crate::BinaryOperator, Error> {
use crate::BinaryOperator;
use spirv::Op;
match word {
// Arithmetic Instructions +, -, *, /, %
Op::IAdd | Op::FAdd => Ok(BinaryOperator::Add),
Op::ISub | Op::FSub => Ok(BinaryOperator::Subtract),
Op::IMul | Op::FMul => Ok(BinaryOperator::Multiply),
Op::UDiv | Op::SDiv | Op::FDiv => Ok(BinaryOperator::Divide),
Op::SRem => Ok(BinaryOperator::Modulo),
// Relational and Logical Instructions
Op::IEqual | Op::FOrdEqual | Op::FUnordEqual | Op::LogicalEqual => {
Ok(BinaryOperator::Equal)
}
Op::INotEqual | Op::FOrdNotEqual | Op::FUnordNotEqual | Op::LogicalNotEqual => {
Ok(BinaryOperator::NotEqual)
}
Op::ULessThan | Op::SLessThan | Op::FOrdLessThan | Op::FUnordLessThan => {
Ok(BinaryOperator::Less)
}
Op::ULessThanEqual
| Op::SLessThanEqual
| Op::FOrdLessThanEqual
| Op::FUnordLessThanEqual => Ok(BinaryOperator::LessEqual),
Op::UGreaterThan | Op::SGreaterThan | Op::FOrdGreaterThan | Op::FUnordGreaterThan => {
Ok(BinaryOperator::Greater)
}
Op::UGreaterThanEqual
| Op::SGreaterThanEqual
| Op::FOrdGreaterThanEqual
| Op::FUnordGreaterThanEqual => Ok(BinaryOperator::GreaterEqual),
Op::BitwiseOr => Ok(BinaryOperator::InclusiveOr),
Op::BitwiseXor => Ok(BinaryOperator::ExclusiveOr),
Op::BitwiseAnd => Ok(BinaryOperator::And),
_ => Err(Error::UnknownBinaryOperator(word)),
}
}
pub(super) const fn map_relational_fun(
word: spirv::Op,
) -> Result<crate::RelationalFunction, Error> {
use crate::RelationalFunction as Rf;
use spirv::Op;
match word {
Op::All => Ok(Rf::All),
Op::Any => Ok(Rf::Any),
Op::IsNan => Ok(Rf::IsNan),
Op::IsInf => Ok(Rf::IsInf),
Op::IsFinite => Ok(Rf::IsFinite),
Op::IsNormal => Ok(Rf::IsNormal),
_ => Err(Error::UnknownRelationalFunction(word)),
}
}
pub(super) const fn map_vector_size(word: spirv::Word) -> Result<crate::VectorSize, Error> {
match word {
2 => Ok(crate::VectorSize::Bi),
3 => Ok(crate::VectorSize::Tri),
4 => Ok(crate::VectorSize::Quad),
_ => Err(Error::InvalidVectorSize(word)),
}
}
pub(super) fn map_image_dim(word: spirv::Word) -> Result<crate::ImageDimension, Error> {
use spirv::Dim as D;
match D::from_u32(word) {
Some(D::Dim1D) => Ok(crate::ImageDimension::D1),
Some(D::Dim2D) => Ok(crate::ImageDimension::D2),
Some(D::Dim3D) => Ok(crate::ImageDimension::D3),
Some(D::DimCube) => Ok(crate::ImageDimension::Cube),
_ => Err(Error::UnsupportedImageDim(word)),
}
}
pub(super) fn map_image_format(word: spirv::Word) -> Result<crate::StorageFormat, Error> {
match spirv::ImageFormat::from_u32(word) {
Some(spirv::ImageFormat::R8) => Ok(crate::StorageFormat::R8Unorm),
Some(spirv::ImageFormat::R8Snorm) => Ok(crate::StorageFormat::R8Snorm),
Some(spirv::ImageFormat::R8ui) => Ok(crate::StorageFormat::R8Uint),
Some(spirv::ImageFormat::R8i) => Ok(crate::StorageFormat::R8Sint),
Some(spirv::ImageFormat::R16) => Ok(crate::StorageFormat::R16Unorm),
Some(spirv::ImageFormat::R16Snorm) => Ok(crate::StorageFormat::R16Snorm),
Some(spirv::ImageFormat::R16ui) => Ok(crate::StorageFormat::R16Uint),
Some(spirv::ImageFormat::R16i) => Ok(crate::StorageFormat::R16Sint),
Some(spirv::ImageFormat::R16f) => Ok(crate::StorageFormat::R16Float),
Some(spirv::ImageFormat::Rg8) => Ok(crate::StorageFormat::Rg8Unorm),
Some(spirv::ImageFormat::Rg8Snorm) => Ok(crate::StorageFormat::Rg8Snorm),
Some(spirv::ImageFormat::Rg8ui) => Ok(crate::StorageFormat::Rg8Uint),
Some(spirv::ImageFormat::Rg8i) => Ok(crate::StorageFormat::Rg8Sint),
Some(spirv::ImageFormat::R32ui) => Ok(crate::StorageFormat::R32Uint),
Some(spirv::ImageFormat::R32i) => Ok(crate::StorageFormat::R32Sint),
Some(spirv::ImageFormat::R32f) => Ok(crate::StorageFormat::R32Float),
Some(spirv::ImageFormat::Rg16) => Ok(crate::StorageFormat::Rg16Unorm),
Some(spirv::ImageFormat::Rg16Snorm) => Ok(crate::StorageFormat::Rg16Snorm),
Some(spirv::ImageFormat::Rg16ui) => Ok(crate::StorageFormat::Rg16Uint),
Some(spirv::ImageFormat::Rg16i) => Ok(crate::StorageFormat::Rg16Sint),
Some(spirv::ImageFormat::Rg16f) => Ok(crate::StorageFormat::Rg16Float),
Some(spirv::ImageFormat::Rgba8) => Ok(crate::StorageFormat::Rgba8Unorm),
Some(spirv::ImageFormat::Rgba8Snorm) => Ok(crate::StorageFormat::Rgba8Snorm),
Some(spirv::ImageFormat::Rgba8ui) => Ok(crate::StorageFormat::Rgba8Uint),
Some(spirv::ImageFormat::Rgba8i) => Ok(crate::StorageFormat::Rgba8Sint),
Some(spirv::ImageFormat::Rgb10a2ui) => Ok(crate::StorageFormat::Rgb10a2Unorm),
Some(spirv::ImageFormat::R11fG11fB10f) => Ok(crate::StorageFormat::Rg11b10Float),
Some(spirv::ImageFormat::Rg32ui) => Ok(crate::StorageFormat::Rg32Uint),
Some(spirv::ImageFormat::Rg32i) => Ok(crate::StorageFormat::Rg32Sint),
Some(spirv::ImageFormat::Rg32f) => Ok(crate::StorageFormat::Rg32Float),
Some(spirv::ImageFormat::Rgba16) => Ok(crate::StorageFormat::Rgba16Unorm),
Some(spirv::ImageFormat::Rgba16Snorm) => Ok(crate::StorageFormat::Rgba16Snorm),
Some(spirv::ImageFormat::Rgba16ui) => Ok(crate::StorageFormat::Rgba16Uint),
Some(spirv::ImageFormat::Rgba16i) => Ok(crate::StorageFormat::Rgba16Sint),
Some(spirv::ImageFormat::Rgba16f) => Ok(crate::StorageFormat::Rgba16Float),
Some(spirv::ImageFormat::Rgba32ui) => Ok(crate::StorageFormat::Rgba32Uint),
Some(spirv::ImageFormat::Rgba32i) => Ok(crate::StorageFormat::Rgba32Sint),
Some(spirv::ImageFormat::Rgba32f) => Ok(crate::StorageFormat::Rgba32Float),
_ => Err(Error::UnsupportedImageFormat(word)),
}
}
pub(super) fn map_width(word: spirv::Word) -> Result<crate::Bytes, Error> {
(word >> 3) // bits to bytes
.try_into()
.map_err(|_| Error::InvalidTypeWidth(word))
}
pub(super) fn map_builtin(word: spirv::Word, invariant: bool) -> Result<crate::BuiltIn, Error> {
use spirv::BuiltIn as Bi;
Ok(match spirv::BuiltIn::from_u32(word) {
Some(Bi::Position | Bi::FragCoord) => crate::BuiltIn::Position { invariant },
Some(Bi::ViewIndex) => crate::BuiltIn::ViewIndex,
// vertex
Some(Bi::BaseInstance) => crate::BuiltIn::BaseInstance,
Some(Bi::BaseVertex) => crate::BuiltIn::BaseVertex,
Some(Bi::ClipDistance) => crate::BuiltIn::ClipDistance,
Some(Bi::CullDistance) => crate::BuiltIn::CullDistance,
Some(Bi::InstanceIndex) => crate::BuiltIn::InstanceIndex,
Some(Bi::PointSize) => crate::BuiltIn::PointSize,
Some(Bi::VertexIndex) => crate::BuiltIn::VertexIndex,
// fragment
Some(Bi::FragDepth) => crate::BuiltIn::FragDepth,
Some(Bi::PointCoord) => crate::BuiltIn::PointCoord,
Some(Bi::FrontFacing) => crate::BuiltIn::FrontFacing,
Some(Bi::PrimitiveId) => crate::BuiltIn::PrimitiveIndex,
Some(Bi::SampleId) => crate::BuiltIn::SampleIndex,
Some(Bi::SampleMask) => crate::BuiltIn::SampleMask,
// compute
Some(Bi::GlobalInvocationId) => crate::BuiltIn::GlobalInvocationId,
Some(Bi::LocalInvocationId) => crate::BuiltIn::LocalInvocationId,
Some(Bi::LocalInvocationIndex) => crate::BuiltIn::LocalInvocationIndex,
Some(Bi::WorkgroupId) => crate::BuiltIn::WorkGroupId,
Some(Bi::WorkgroupSize) => crate::BuiltIn::WorkGroupSize,
Some(Bi::NumWorkgroups) => crate::BuiltIn::NumWorkGroups,
_ => return Err(Error::UnsupportedBuiltIn(word)),
})
}
pub(super) fn map_storage_class(word: spirv::Word) -> Result<super::ExtendedClass, Error> {
use super::ExtendedClass as Ec;
use spirv::StorageClass as Sc;
Ok(match Sc::from_u32(word) {
Some(Sc::Function) => Ec::Global(crate::AddressSpace::Function),
Some(Sc::Input) => Ec::Input,
Some(Sc::Output) => Ec::Output,
Some(Sc::Private) => Ec::Global(crate::AddressSpace::Private),
Some(Sc::UniformConstant) => Ec::Global(crate::AddressSpace::Handle),
Some(Sc::StorageBuffer) => Ec::Global(crate::AddressSpace::Storage {
//Note: this is restricted by decorations later
access: crate::StorageAccess::all(),
}),
// we expect the `Storage` case to be filtered out before calling this function.
Some(Sc::Uniform) => Ec::Global(crate::AddressSpace::Uniform),
Some(Sc::Workgroup) => Ec::Global(crate::AddressSpace::WorkGroup),
Some(Sc::PushConstant) => Ec::Global(crate::AddressSpace::PushConstant),
_ => return Err(Error::UnsupportedStorageClass(word)),
})
}

View file

@ -0,0 +1,129 @@
use super::ModuleState;
use crate::arena::Handle;
#[derive(Debug, thiserror::Error)]
pub enum Error {
#[error("invalid header")]
InvalidHeader,
#[error("invalid word count")]
InvalidWordCount,
#[error("unknown instruction {0}")]
UnknownInstruction(u16),
#[error("unknown capability %{0}")]
UnknownCapability(spirv::Word),
#[error("unsupported instruction {1:?} at {0:?}")]
UnsupportedInstruction(ModuleState, spirv::Op),
#[error("unsupported capability {0:?}")]
UnsupportedCapability(spirv::Capability),
#[error("unsupported extension {0}")]
UnsupportedExtension(String),
#[error("unsupported extension set {0}")]
UnsupportedExtSet(String),
#[error("unsupported extension instantiation set %{0}")]
UnsupportedExtInstSet(spirv::Word),
#[error("unsupported extension instantiation %{0}")]
UnsupportedExtInst(spirv::Word),
#[error("unsupported type {0:?}")]
UnsupportedType(Handle<crate::Type>),
#[error("unsupported execution model %{0}")]
UnsupportedExecutionModel(spirv::Word),
#[error("unsupported execution mode %{0}")]
UnsupportedExecutionMode(spirv::Word),
#[error("unsupported storage class %{0}")]
UnsupportedStorageClass(spirv::Word),
#[error("unsupported image dimension %{0}")]
UnsupportedImageDim(spirv::Word),
#[error("unsupported image format %{0}")]
UnsupportedImageFormat(spirv::Word),
#[error("unsupported builtin %{0}")]
UnsupportedBuiltIn(spirv::Word),
#[error("unsupported control flow %{0}")]
UnsupportedControlFlow(spirv::Word),
#[error("unsupported binary operator %{0}")]
UnsupportedBinaryOperator(spirv::Word),
#[error("Naga supports OpTypeRuntimeArray in the StorageBuffer storage class only")]
UnsupportedRuntimeArrayStorageClass,
#[error("unsupported matrix stride {stride} for a {columns}x{rows} matrix with scalar width={width}")]
UnsupportedMatrixStride {
stride: u32,
columns: u8,
rows: u8,
width: u8,
},
#[error("unknown binary operator {0:?}")]
UnknownBinaryOperator(spirv::Op),
#[error("unknown relational function {0:?}")]
UnknownRelationalFunction(spirv::Op),
#[error("invalid parameter {0:?}")]
InvalidParameter(spirv::Op),
#[error("invalid operand count {1} for {0:?}")]
InvalidOperandCount(spirv::Op, u16),
#[error("invalid operand")]
InvalidOperand,
#[error("invalid id %{0}")]
InvalidId(spirv::Word),
#[error("invalid decoration %{0}")]
InvalidDecoration(spirv::Word),
#[error("invalid type width %{0}")]
InvalidTypeWidth(spirv::Word),
#[error("invalid sign %{0}")]
InvalidSign(spirv::Word),
#[error("invalid inner type %{0}")]
InvalidInnerType(spirv::Word),
#[error("invalid vector size %{0}")]
InvalidVectorSize(spirv::Word),
#[error("invalid access type %{0}")]
InvalidAccessType(spirv::Word),
#[error("invalid access {0:?}")]
InvalidAccess(crate::Expression),
#[error("invalid access index %{0}")]
InvalidAccessIndex(spirv::Word),
#[error("invalid index type %{0}")]
InvalidIndexType(spirv::Word),
#[error("invalid binding %{0}")]
InvalidBinding(spirv::Word),
#[error("invalid global var {0:?}")]
InvalidGlobalVar(crate::Expression),
#[error("invalid image/sampler expression {0:?}")]
InvalidImageExpression(crate::Expression),
#[error("invalid image base type {0:?}")]
InvalidImageBaseType(Handle<crate::Type>),
#[error("invalid image {0:?}")]
InvalidImage(Handle<crate::Type>),
#[error("invalid as type {0:?}")]
InvalidAsType(Handle<crate::Type>),
#[error("invalid vector type {0:?}")]
InvalidVectorType(Handle<crate::Type>),
#[error("inconsistent comparison sampling {0:?}")]
InconsistentComparisonSampling(Handle<crate::GlobalVariable>),
#[error("wrong function result type %{0}")]
WrongFunctionResultType(spirv::Word),
#[error("wrong function argument type %{0}")]
WrongFunctionArgumentType(spirv::Word),
#[error("missing decoration {0:?}")]
MissingDecoration(spirv::Decoration),
#[error("bad string")]
BadString,
#[error("incomplete data")]
IncompleteData,
#[error("invalid terminator")]
InvalidTerminator,
#[error("invalid edge classification")]
InvalidEdgeClassification,
#[error("cycle detected in the CFG during traversal at {0}")]
ControlFlowGraphCycle(crate::front::spv::BlockId),
#[error("recursive function call %{0}")]
FunctionCallCycle(spirv::Word),
#[error("invalid array size {0:?}")]
InvalidArraySize(Handle<crate::Constant>),
#[error("invalid barrier scope %{0}")]
InvalidBarrierScope(spirv::Word),
#[error("invalid barrier memory semantics %{0}")]
InvalidBarrierMemorySemantics(spirv::Word),
#[error(
"arrays of images / samplers are supported only through bindings for \
now (i.e. you can't create an array of images or samplers that doesn't \
come from a binding)"
)]
NonBindingArrayOfImageOrSamplers,
}

View file

@ -0,0 +1,674 @@
use crate::{
arena::{Arena, Handle},
front::spv::{BlockContext, BodyIndex},
};
use super::{Error, Instruction, LookupExpression, LookupHelper as _};
use crate::front::Emitter;
pub type BlockId = u32;
#[derive(Copy, Clone, Debug)]
pub struct MergeInstruction {
pub merge_block_id: BlockId,
pub continue_block_id: Option<BlockId>,
}
impl<I: Iterator<Item = u32>> super::Frontend<I> {
// Registers a function call. It will generate a dummy handle to call, which
// gets resolved after all the functions are processed.
pub(super) fn add_call(
&mut self,
from: spirv::Word,
to: spirv::Word,
) -> Handle<crate::Function> {
let dummy_handle = self
.dummy_functions
.append(crate::Function::default(), Default::default());
self.deferred_function_calls.push(to);
self.function_call_graph.add_edge(from, to, ());
dummy_handle
}
pub(super) fn parse_function(&mut self, module: &mut crate::Module) -> Result<(), Error> {
let start = self.data_offset;
self.lookup_expression.clear();
self.lookup_load_override.clear();
self.lookup_sampled_image.clear();
let result_type_id = self.next()?;
let fun_id = self.next()?;
let _fun_control = self.next()?;
let fun_type_id = self.next()?;
let mut fun = {
let ft = self.lookup_function_type.lookup(fun_type_id)?;
if ft.return_type_id != result_type_id {
return Err(Error::WrongFunctionResultType(result_type_id));
}
crate::Function {
name: self.future_decor.remove(&fun_id).and_then(|dec| dec.name),
arguments: Vec::with_capacity(ft.parameter_type_ids.len()),
result: if self.lookup_void_type == Some(result_type_id) {
None
} else {
let lookup_result_ty = self.lookup_type.lookup(result_type_id)?;
Some(crate::FunctionResult {
ty: lookup_result_ty.handle,
binding: None,
})
},
local_variables: Arena::new(),
expressions: self
.make_expression_storage(&module.global_variables, &module.constants),
named_expressions: crate::NamedExpressions::default(),
body: crate::Block::new(),
}
};
// read parameters
for i in 0..fun.arguments.capacity() {
let start = self.data_offset;
match self.next_inst()? {
Instruction {
op: spirv::Op::FunctionParameter,
wc: 3,
} => {
let type_id = self.next()?;
let id = self.next()?;
let handle = fun.expressions.append(
crate::Expression::FunctionArgument(i as u32),
self.span_from(start),
);
self.lookup_expression.insert(
id,
LookupExpression {
handle,
type_id,
// Setting this to an invalid id will cause get_expr_handle
// to default to the main body making sure no load/stores
// are added.
block_id: 0,
},
);
//Note: we redo the lookup in order to work around `self` borrowing
if type_id
!= self
.lookup_function_type
.lookup(fun_type_id)?
.parameter_type_ids[i]
{
return Err(Error::WrongFunctionArgumentType(type_id));
}
let ty = self.lookup_type.lookup(type_id)?.handle;
let decor = self.future_decor.remove(&id).unwrap_or_default();
fun.arguments.push(crate::FunctionArgument {
name: decor.name,
ty,
binding: None,
});
}
Instruction { op, .. } => return Err(Error::InvalidParameter(op)),
}
}
// Read body
self.function_call_graph.add_node(fun_id);
let mut parameters_sampling =
vec![super::image::SamplingFlags::empty(); fun.arguments.len()];
let mut block_ctx = BlockContext {
phis: Default::default(),
blocks: Default::default(),
body_for_label: Default::default(),
mergers: Default::default(),
bodies: Default::default(),
function_id: fun_id,
expressions: &mut fun.expressions,
local_arena: &mut fun.local_variables,
const_arena: &mut module.constants,
const_expressions: &mut module.const_expressions,
type_arena: &module.types,
global_arena: &module.global_variables,
arguments: &fun.arguments,
parameter_sampling: &mut parameters_sampling,
};
// Insert the main body whose parent is also himself
block_ctx.bodies.push(super::Body::with_parent(0));
// Scan the blocks and add them as nodes
loop {
let fun_inst = self.next_inst()?;
log::debug!("{:?}", fun_inst.op);
match fun_inst.op {
spirv::Op::Line => {
fun_inst.expect(4)?;
let _file_id = self.next()?;
let _row_id = self.next()?;
let _col_id = self.next()?;
}
spirv::Op::Label => {
// Read the label ID
fun_inst.expect(2)?;
let block_id = self.next()?;
self.next_block(block_id, &mut block_ctx)?;
}
spirv::Op::FunctionEnd => {
fun_inst.expect(1)?;
break;
}
_ => {
return Err(Error::UnsupportedInstruction(self.state, fun_inst.op));
}
}
}
if let Some(ref prefix) = self.options.block_ctx_dump_prefix {
let dump_suffix = match self.lookup_entry_point.get(&fun_id) {
Some(ep) => format!("block_ctx.{:?}-{}.txt", ep.stage, ep.name),
None => format!("block_ctx.Fun-{}.txt", module.functions.len()),
};
let dest = prefix.join(dump_suffix);
let dump = format!("{block_ctx:#?}");
if let Err(e) = std::fs::write(&dest, dump) {
log::error!("Unable to dump the block context into {:?}: {}", dest, e);
}
}
// Emit `Store` statements to properly initialize all the local variables we
// created for `phi` expressions.
//
// Note that get_expr_handle also contributes slightly odd entries to this table,
// to get the spill.
for phi in block_ctx.phis.iter() {
// Get a pointer to the local variable for the phi's value.
let phi_pointer = block_ctx.expressions.append(
crate::Expression::LocalVariable(phi.local),
crate::Span::default(),
);
// At the end of each of `phi`'s predecessor blocks, store the corresponding
// source value in the phi's local variable.
for &(source, predecessor) in phi.expressions.iter() {
let source_lexp = &self.lookup_expression[&source];
let predecessor_body_idx = block_ctx.body_for_label[&predecessor];
// If the expression is a global/argument it will have a 0 block
// id so we must use a default value instead of panicking
let source_body_idx = block_ctx
.body_for_label
.get(&source_lexp.block_id)
.copied()
.unwrap_or(0);
// If the Naga `Expression` generated for `source` is in scope, then we
// can simply store that in the phi's local variable.
//
// Otherwise, spill the source value to a local variable in the block that
// defines it. (We know this store dominates the predecessor; otherwise,
// the phi wouldn't have been able to refer to that source expression in
// the first place.) Then, the predecessor block can count on finding the
// source's value in that local variable.
let value = if super::is_parent(predecessor_body_idx, source_body_idx, &block_ctx) {
source_lexp.handle
} else {
// The source SPIR-V expression is not defined in the phi's
// predecessor block, nor is it a globally available expression. So it
// must be defined off in some other block that merely dominates the
// predecessor. This means that the corresponding Naga `Expression`
// may not be in scope in the predecessor block.
//
// In the block that defines `source`, spill it to a fresh local
// variable, to ensure we can still use it at the end of the
// predecessor.
let ty = self.lookup_type[&source_lexp.type_id].handle;
let local = block_ctx.local_arena.append(
crate::LocalVariable {
name: None,
ty,
init: None,
},
crate::Span::default(),
);
let pointer = block_ctx.expressions.append(
crate::Expression::LocalVariable(local),
crate::Span::default(),
);
// Get the spilled value of the source expression.
let start = block_ctx.expressions.len();
let expr = block_ctx
.expressions
.append(crate::Expression::Load { pointer }, crate::Span::default());
let range = block_ctx.expressions.range_from(start);
block_ctx
.blocks
.get_mut(&predecessor)
.unwrap()
.push(crate::Statement::Emit(range), crate::Span::default());
// At the end of the block that defines it, spill the source
// expression's value.
block_ctx
.blocks
.get_mut(&source_lexp.block_id)
.unwrap()
.push(
crate::Statement::Store {
pointer,
value: source_lexp.handle,
},
crate::Span::default(),
);
expr
};
// At the end of the phi predecessor block, store the source
// value in the phi's value.
block_ctx.blocks.get_mut(&predecessor).unwrap().push(
crate::Statement::Store {
pointer: phi_pointer,
value,
},
crate::Span::default(),
)
}
}
fun.body = block_ctx.lower();
// done
let fun_handle = module.functions.append(fun, self.span_from_with_op(start));
self.lookup_function.insert(
fun_id,
super::LookupFunction {
handle: fun_handle,
parameters_sampling,
},
);
if let Some(ep) = self.lookup_entry_point.remove(&fun_id) {
// create a wrapping function
let mut function = crate::Function {
name: Some(format!("{}_wrap", ep.name)),
arguments: Vec::new(),
result: None,
local_variables: Arena::new(),
expressions: Arena::new(),
named_expressions: crate::NamedExpressions::default(),
body: crate::Block::new(),
};
// 1. copy the inputs from arguments to privates
for &v_id in ep.variable_ids.iter() {
let lvar = self.lookup_variable.lookup(v_id)?;
if let super::Variable::Input(ref arg) = lvar.inner {
let span = module.global_variables.get_span(lvar.handle);
let arg_expr = function.expressions.append(
crate::Expression::FunctionArgument(function.arguments.len() as u32),
span,
);
let load_expr = if arg.ty == module.global_variables[lvar.handle].ty {
arg_expr
} else {
// The only case where the type is different is if we need to treat
// unsigned integer as signed.
let mut emitter = Emitter::default();
emitter.start(&function.expressions);
let handle = function.expressions.append(
crate::Expression::As {
expr: arg_expr,
kind: crate::ScalarKind::Sint,
convert: Some(4),
},
span,
);
function.body.extend(emitter.finish(&function.expressions));
handle
};
function.body.push(
crate::Statement::Store {
pointer: function
.expressions
.append(crate::Expression::GlobalVariable(lvar.handle), span),
value: load_expr,
},
span,
);
let mut arg = arg.clone();
if ep.stage == crate::ShaderStage::Fragment {
if let Some(ref mut binding) = arg.binding {
binding.apply_default_interpolation(&module.types[arg.ty].inner);
}
}
function.arguments.push(arg);
}
}
// 2. call the wrapped function
let fake_id = !(module.entry_points.len() as u32); // doesn't matter, as long as it's not a collision
let dummy_handle = self.add_call(fake_id, fun_id);
function.body.push(
crate::Statement::Call {
function: dummy_handle,
arguments: Vec::new(),
result: None,
},
crate::Span::default(),
);
// 3. copy the outputs from privates to the result
let mut members = Vec::new();
let mut components = Vec::new();
for &v_id in ep.variable_ids.iter() {
let lvar = self.lookup_variable.lookup(v_id)?;
if let super::Variable::Output(ref result) = lvar.inner {
let span = module.global_variables.get_span(lvar.handle);
let expr_handle = function
.expressions
.append(crate::Expression::GlobalVariable(lvar.handle), span);
// Cull problematic builtins of gl_PerVertex.
// See the docs for `Frontend::gl_per_vertex_builtin_access`.
{
let ty = &module.types[result.ty];
match ty.inner {
crate::TypeInner::Struct {
members: ref original_members,
span,
} if ty.name.as_deref() == Some("gl_PerVertex") => {
let mut new_members = original_members.clone();
for member in &mut new_members {
if let Some(crate::Binding::BuiltIn(built_in)) = member.binding
{
if !self.gl_per_vertex_builtin_access.contains(&built_in) {
member.binding = None
}
}
}
if &new_members != original_members {
module.types.replace(
result.ty,
crate::Type {
name: ty.name.clone(),
inner: crate::TypeInner::Struct {
members: new_members,
span,
},
},
);
}
}
_ => {}
}
}
match module.types[result.ty].inner {
crate::TypeInner::Struct {
members: ref sub_members,
..
} => {
for (index, sm) in sub_members.iter().enumerate() {
if sm.binding.is_none() {
continue;
}
let mut sm = sm.clone();
if let Some(ref mut binding) = sm.binding {
if ep.stage == crate::ShaderStage::Vertex {
binding.apply_default_interpolation(
&module.types[sm.ty].inner,
);
}
}
members.push(sm);
components.push(function.expressions.append(
crate::Expression::AccessIndex {
base: expr_handle,
index: index as u32,
},
span,
));
}
}
ref inner => {
let mut binding = result.binding.clone();
if let Some(ref mut binding) = binding {
if ep.stage == crate::ShaderStage::Vertex {
binding.apply_default_interpolation(inner);
}
}
members.push(crate::StructMember {
name: None,
ty: result.ty,
binding,
offset: 0,
});
// populate just the globals first, then do `Load` in a
// separate step, so that we can get a range.
components.push(expr_handle);
}
}
}
}
for (member_index, member) in members.iter().enumerate() {
match member.binding {
Some(crate::Binding::BuiltIn(crate::BuiltIn::Position { .. }))
if self.options.adjust_coordinate_space =>
{
let mut emitter = Emitter::default();
emitter.start(&function.expressions);
let global_expr = components[member_index];
let span = function.expressions.get_span(global_expr);
let access_expr = function.expressions.append(
crate::Expression::AccessIndex {
base: global_expr,
index: 1,
},
span,
);
let load_expr = function.expressions.append(
crate::Expression::Load {
pointer: access_expr,
},
span,
);
let neg_expr = function.expressions.append(
crate::Expression::Unary {
op: crate::UnaryOperator::Negate,
expr: load_expr,
},
span,
);
function.body.extend(emitter.finish(&function.expressions));
function.body.push(
crate::Statement::Store {
pointer: access_expr,
value: neg_expr,
},
span,
);
}
_ => {}
}
}
let mut emitter = Emitter::default();
emitter.start(&function.expressions);
for component in components.iter_mut() {
let load_expr = crate::Expression::Load {
pointer: *component,
};
let span = function.expressions.get_span(*component);
*component = function.expressions.append(load_expr, span);
}
match members[..] {
[] => {}
[ref member] => {
function.body.extend(emitter.finish(&function.expressions));
let span = function.expressions.get_span(components[0]);
function.body.push(
crate::Statement::Return {
value: components.first().cloned(),
},
span,
);
function.result = Some(crate::FunctionResult {
ty: member.ty,
binding: member.binding.clone(),
});
}
_ => {
let span = crate::Span::total_span(
components.iter().map(|h| function.expressions.get_span(*h)),
);
let ty = module.types.insert(
crate::Type {
name: None,
inner: crate::TypeInner::Struct {
members,
span: 0xFFFF, // shouldn't matter
},
},
span,
);
let result_expr = function
.expressions
.append(crate::Expression::Compose { ty, components }, span);
function.body.extend(emitter.finish(&function.expressions));
function.body.push(
crate::Statement::Return {
value: Some(result_expr),
},
span,
);
function.result = Some(crate::FunctionResult { ty, binding: None });
}
}
module.entry_points.push(crate::EntryPoint {
name: ep.name,
stage: ep.stage,
early_depth_test: ep.early_depth_test,
workgroup_size: ep.workgroup_size,
function,
});
}
Ok(())
}
}
impl<'function> BlockContext<'function> {
pub(super) fn gctx(&self) -> crate::proc::GlobalCtx {
crate::proc::GlobalCtx {
types: self.type_arena,
constants: self.const_arena,
const_expressions: self.const_expressions,
}
}
/// Consumes the `BlockContext` producing a Ir [`Block`](crate::Block)
fn lower(mut self) -> crate::Block {
fn lower_impl(
blocks: &mut crate::FastHashMap<spirv::Word, crate::Block>,
bodies: &[super::Body],
body_idx: BodyIndex,
) -> crate::Block {
let mut block = crate::Block::new();
for item in bodies[body_idx].data.iter() {
match *item {
super::BodyFragment::BlockId(id) => block.append(blocks.get_mut(&id).unwrap()),
super::BodyFragment::If {
condition,
accept,
reject,
} => {
let accept = lower_impl(blocks, bodies, accept);
let reject = lower_impl(blocks, bodies, reject);
block.push(
crate::Statement::If {
condition,
accept,
reject,
},
crate::Span::default(),
)
}
super::BodyFragment::Loop {
body,
continuing,
break_if,
} => {
let body = lower_impl(blocks, bodies, body);
let continuing = lower_impl(blocks, bodies, continuing);
block.push(
crate::Statement::Loop {
body,
continuing,
break_if,
},
crate::Span::default(),
)
}
super::BodyFragment::Switch {
selector,
ref cases,
default,
} => {
let mut ir_cases: Vec<_> = cases
.iter()
.map(|&(value, body_idx)| {
let body = lower_impl(blocks, bodies, body_idx);
// Handle simple cases that would make a fallthrough statement unreachable code
let fall_through = body.last().map_or(true, |s| !s.is_terminator());
crate::SwitchCase {
value: crate::SwitchValue::I32(value),
body,
fall_through,
}
})
.collect();
ir_cases.push(crate::SwitchCase {
value: crate::SwitchValue::Default,
body: lower_impl(blocks, bodies, default),
fall_through: false,
});
block.push(
crate::Statement::Switch {
selector,
cases: ir_cases,
},
crate::Span::default(),
)
}
super::BodyFragment::Break => {
block.push(crate::Statement::Break, crate::Span::default())
}
super::BodyFragment::Continue => {
block.push(crate::Statement::Continue, crate::Span::default())
}
}
}
block
}
lower_impl(&mut self.blocks, &self.bodies, 0)
}
}

View file

@ -0,0 +1,742 @@
use crate::arena::{Arena, Handle, UniqueArena};
use super::{Error, LookupExpression, LookupHelper as _};
#[derive(Clone, Debug)]
pub(super) struct LookupSampledImage {
image: Handle<crate::Expression>,
sampler: Handle<crate::Expression>,
}
bitflags::bitflags! {
/// Flags describing sampling method.
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub struct SamplingFlags: u32 {
/// Regular sampling.
const REGULAR = 0x1;
/// Comparison sampling.
const COMPARISON = 0x2;
}
}
impl<'function> super::BlockContext<'function> {
fn get_image_expr_ty(
&self,
handle: Handle<crate::Expression>,
) -> Result<Handle<crate::Type>, Error> {
match self.expressions[handle] {
crate::Expression::GlobalVariable(handle) => Ok(self.global_arena[handle].ty),
crate::Expression::FunctionArgument(i) => Ok(self.arguments[i as usize].ty),
ref other => Err(Error::InvalidImageExpression(other.clone())),
}
}
}
/// Options of a sampling operation.
#[derive(Debug)]
pub struct SamplingOptions {
/// Projection sampling: the division by W is expected to happen
/// in the texture unit.
pub project: bool,
/// Depth comparison sampling with a reference value.
pub compare: bool,
}
enum ExtraCoordinate {
ArrayLayer,
Projection,
Garbage,
}
/// Return the texture coordinates separated from the array layer,
/// and/or divided by the projection term.
///
/// The Proj sampling ops expect an extra coordinate for the W.
/// The arrayed (can't be Proj!) images expect an extra coordinate for the layer.
fn extract_image_coordinates(
image_dim: crate::ImageDimension,
extra_coordinate: ExtraCoordinate,
base: Handle<crate::Expression>,
coordinate_ty: Handle<crate::Type>,
ctx: &mut super::BlockContext,
) -> (Handle<crate::Expression>, Option<Handle<crate::Expression>>) {
let (given_size, kind) = match ctx.type_arena[coordinate_ty].inner {
crate::TypeInner::Scalar { kind, .. } => (None, kind),
crate::TypeInner::Vector { size, kind, .. } => (Some(size), kind),
ref other => unreachable!("Unexpected texture coordinate {:?}", other),
};
let required_size = image_dim.required_coordinate_size();
let required_ty = required_size.map(|size| {
ctx.type_arena
.get(&crate::Type {
name: None,
inner: crate::TypeInner::Vector {
size,
kind,
width: 4,
},
})
.expect("Required coordinate type should have been set up by `parse_type_image`!")
});
let extra_expr = crate::Expression::AccessIndex {
base,
index: required_size.map_or(1, |size| size as u32),
};
let base_span = ctx.expressions.get_span(base);
match extra_coordinate {
ExtraCoordinate::ArrayLayer => {
let extracted = match required_size {
None => ctx
.expressions
.append(crate::Expression::AccessIndex { base, index: 0 }, base_span),
Some(size) => {
let mut components = Vec::with_capacity(size as usize);
for index in 0..size as u32 {
let comp = ctx
.expressions
.append(crate::Expression::AccessIndex { base, index }, base_span);
components.push(comp);
}
ctx.expressions.append(
crate::Expression::Compose {
ty: required_ty.unwrap(),
components,
},
base_span,
)
}
};
let array_index_f32 = ctx.expressions.append(extra_expr, base_span);
let array_index = ctx.expressions.append(
crate::Expression::As {
kind: crate::ScalarKind::Sint,
expr: array_index_f32,
convert: Some(4),
},
base_span,
);
(extracted, Some(array_index))
}
ExtraCoordinate::Projection => {
let projection = ctx.expressions.append(extra_expr, base_span);
let divided = match required_size {
None => {
let temp = ctx
.expressions
.append(crate::Expression::AccessIndex { base, index: 0 }, base_span);
ctx.expressions.append(
crate::Expression::Binary {
op: crate::BinaryOperator::Divide,
left: temp,
right: projection,
},
base_span,
)
}
Some(size) => {
let mut components = Vec::with_capacity(size as usize);
for index in 0..size as u32 {
let temp = ctx
.expressions
.append(crate::Expression::AccessIndex { base, index }, base_span);
let comp = ctx.expressions.append(
crate::Expression::Binary {
op: crate::BinaryOperator::Divide,
left: temp,
right: projection,
},
base_span,
);
components.push(comp);
}
ctx.expressions.append(
crate::Expression::Compose {
ty: required_ty.unwrap(),
components,
},
base_span,
)
}
};
(divided, None)
}
ExtraCoordinate::Garbage if given_size == required_size => (base, None),
ExtraCoordinate::Garbage => {
use crate::SwizzleComponent as Sc;
let cut_expr = match required_size {
None => crate::Expression::AccessIndex { base, index: 0 },
Some(size) => crate::Expression::Swizzle {
size,
vector: base,
pattern: [Sc::X, Sc::Y, Sc::Z, Sc::W],
},
};
(ctx.expressions.append(cut_expr, base_span), None)
}
}
}
pub(super) fn patch_comparison_type(
flags: SamplingFlags,
var: &mut crate::GlobalVariable,
arena: &mut UniqueArena<crate::Type>,
) -> bool {
if !flags.contains(SamplingFlags::COMPARISON) {
return true;
}
if flags == SamplingFlags::all() {
return false;
}
log::debug!("Flipping comparison for {:?}", var);
let original_ty = &arena[var.ty];
let original_ty_span = arena.get_span(var.ty);
let ty_inner = match original_ty.inner {
crate::TypeInner::Image {
class: crate::ImageClass::Sampled { multi, .. },
dim,
arrayed,
} => crate::TypeInner::Image {
class: crate::ImageClass::Depth { multi },
dim,
arrayed,
},
crate::TypeInner::Sampler { .. } => crate::TypeInner::Sampler { comparison: true },
ref other => unreachable!("Unexpected type for comparison mutation: {:?}", other),
};
let name = original_ty.name.clone();
var.ty = arena.insert(
crate::Type {
name,
inner: ty_inner,
},
original_ty_span,
);
true
}
impl<I: Iterator<Item = u32>> super::Frontend<I> {
pub(super) fn parse_image_couple(&mut self) -> Result<(), Error> {
let _result_type_id = self.next()?;
let result_id = self.next()?;
let image_id = self.next()?;
let sampler_id = self.next()?;
let image_lexp = self.lookup_expression.lookup(image_id)?;
let sampler_lexp = self.lookup_expression.lookup(sampler_id)?;
self.lookup_sampled_image.insert(
result_id,
LookupSampledImage {
image: image_lexp.handle,
sampler: sampler_lexp.handle,
},
);
Ok(())
}
pub(super) fn parse_image_uncouple(&mut self, block_id: spirv::Word) -> Result<(), Error> {
let result_type_id = self.next()?;
let result_id = self.next()?;
let sampled_image_id = self.next()?;
self.lookup_expression.insert(
result_id,
LookupExpression {
handle: self.lookup_sampled_image.lookup(sampled_image_id)?.image,
type_id: result_type_id,
block_id,
},
);
Ok(())
}
pub(super) fn parse_image_write(
&mut self,
words_left: u16,
ctx: &mut super::BlockContext,
emitter: &mut crate::front::Emitter,
block: &mut crate::Block,
body_idx: usize,
) -> Result<crate::Statement, Error> {
let image_id = self.next()?;
let coordinate_id = self.next()?;
let value_id = self.next()?;
let image_ops = if words_left != 0 { self.next()? } else { 0 };
if image_ops != 0 {
let other = spirv::ImageOperands::from_bits_truncate(image_ops);
log::warn!("Unknown image write ops {:?}", other);
for _ in 1..words_left {
self.next()?;
}
}
let image_lexp = self.lookup_expression.lookup(image_id)?;
let image_ty = ctx.get_image_expr_ty(image_lexp.handle)?;
let coord_lexp = self.lookup_expression.lookup(coordinate_id)?;
let coord_handle =
self.get_expr_handle(coordinate_id, coord_lexp, ctx, emitter, block, body_idx);
let coord_type_handle = self.lookup_type.lookup(coord_lexp.type_id)?.handle;
let (coordinate, array_index) = match ctx.type_arena[image_ty].inner {
crate::TypeInner::Image {
dim,
arrayed,
class: _,
} => extract_image_coordinates(
dim,
if arrayed {
ExtraCoordinate::ArrayLayer
} else {
ExtraCoordinate::Garbage
},
coord_handle,
coord_type_handle,
ctx,
),
_ => return Err(Error::InvalidImage(image_ty)),
};
let value_lexp = self.lookup_expression.lookup(value_id)?;
let value = self.get_expr_handle(value_id, value_lexp, ctx, emitter, block, body_idx);
Ok(crate::Statement::ImageStore {
image: image_lexp.handle,
coordinate,
array_index,
value,
})
}
pub(super) fn parse_image_load(
&mut self,
mut words_left: u16,
ctx: &mut super::BlockContext,
emitter: &mut crate::front::Emitter,
block: &mut crate::Block,
block_id: spirv::Word,
body_idx: usize,
) -> Result<(), Error> {
let start = self.data_offset;
let result_type_id = self.next()?;
let result_id = self.next()?;
let image_id = self.next()?;
let coordinate_id = self.next()?;
let mut image_ops = if words_left != 0 {
words_left -= 1;
self.next()?
} else {
0
};
let mut sample = None;
let mut level = None;
while image_ops != 0 {
let bit = 1 << image_ops.trailing_zeros();
match spirv::ImageOperands::from_bits_truncate(bit) {
spirv::ImageOperands::LOD => {
let lod_expr = self.next()?;
let lod_lexp = self.lookup_expression.lookup(lod_expr)?;
let lod_handle =
self.get_expr_handle(lod_expr, lod_lexp, ctx, emitter, block, body_idx);
level = Some(lod_handle);
words_left -= 1;
}
spirv::ImageOperands::SAMPLE => {
let sample_expr = self.next()?;
let sample_handle = self.lookup_expression.lookup(sample_expr)?.handle;
sample = Some(sample_handle);
words_left -= 1;
}
other => {
log::warn!("Unknown image load op {:?}", other);
for _ in 0..words_left {
self.next()?;
}
break;
}
}
image_ops ^= bit;
}
// No need to call get_expr_handle here since only globals/arguments are
// allowed as images and they are always in the root scope
let image_lexp = self.lookup_expression.lookup(image_id)?;
let image_ty = ctx.get_image_expr_ty(image_lexp.handle)?;
let coord_lexp = self.lookup_expression.lookup(coordinate_id)?;
let coord_handle =
self.get_expr_handle(coordinate_id, coord_lexp, ctx, emitter, block, body_idx);
let coord_type_handle = self.lookup_type.lookup(coord_lexp.type_id)?.handle;
let (coordinate, array_index) = match ctx.type_arena[image_ty].inner {
crate::TypeInner::Image {
dim,
arrayed,
class: _,
} => extract_image_coordinates(
dim,
if arrayed {
ExtraCoordinate::ArrayLayer
} else {
ExtraCoordinate::Garbage
},
coord_handle,
coord_type_handle,
ctx,
),
_ => return Err(Error::InvalidImage(image_ty)),
};
let expr = crate::Expression::ImageLoad {
image: image_lexp.handle,
coordinate,
array_index,
sample,
level,
};
self.lookup_expression.insert(
result_id,
LookupExpression {
handle: ctx.expressions.append(expr, self.span_from_with_op(start)),
type_id: result_type_id,
block_id,
},
);
Ok(())
}
#[allow(clippy::too_many_arguments)]
pub(super) fn parse_image_sample(
&mut self,
mut words_left: u16,
options: SamplingOptions,
ctx: &mut super::BlockContext,
emitter: &mut crate::front::Emitter,
block: &mut crate::Block,
block_id: spirv::Word,
body_idx: usize,
) -> Result<(), Error> {
let start = self.data_offset;
let result_type_id = self.next()?;
let result_id = self.next()?;
let sampled_image_id = self.next()?;
let coordinate_id = self.next()?;
let dref_id = if options.compare {
Some(self.next()?)
} else {
None
};
let mut image_ops = if words_left != 0 {
words_left -= 1;
self.next()?
} else {
0
};
let mut level = crate::SampleLevel::Auto;
let mut offset = None;
while image_ops != 0 {
let bit = 1 << image_ops.trailing_zeros();
match spirv::ImageOperands::from_bits_truncate(bit) {
spirv::ImageOperands::BIAS => {
let bias_expr = self.next()?;
let bias_lexp = self.lookup_expression.lookup(bias_expr)?;
let bias_handle =
self.get_expr_handle(bias_expr, bias_lexp, ctx, emitter, block, body_idx);
level = crate::SampleLevel::Bias(bias_handle);
words_left -= 1;
}
spirv::ImageOperands::LOD => {
let lod_expr = self.next()?;
let lod_lexp = self.lookup_expression.lookup(lod_expr)?;
let lod_handle =
self.get_expr_handle(lod_expr, lod_lexp, ctx, emitter, block, body_idx);
level = if options.compare {
log::debug!("Assuming {:?} is zero", lod_handle);
crate::SampleLevel::Zero
} else {
crate::SampleLevel::Exact(lod_handle)
};
words_left -= 1;
}
spirv::ImageOperands::GRAD => {
let grad_x_expr = self.next()?;
let grad_x_lexp = self.lookup_expression.lookup(grad_x_expr)?;
let grad_x_handle = self.get_expr_handle(
grad_x_expr,
grad_x_lexp,
ctx,
emitter,
block,
body_idx,
);
let grad_y_expr = self.next()?;
let grad_y_lexp = self.lookup_expression.lookup(grad_y_expr)?;
let grad_y_handle = self.get_expr_handle(
grad_y_expr,
grad_y_lexp,
ctx,
emitter,
block,
body_idx,
);
level = if options.compare {
log::debug!(
"Assuming gradients {:?} and {:?} are not greater than 1",
grad_x_handle,
grad_y_handle
);
crate::SampleLevel::Zero
} else {
crate::SampleLevel::Gradient {
x: grad_x_handle,
y: grad_y_handle,
}
};
words_left -= 2;
}
spirv::ImageOperands::CONST_OFFSET => {
let offset_constant = self.next()?;
let offset_handle = self.lookup_constant.lookup(offset_constant)?.handle;
let offset_handle = ctx.const_expressions.append(
crate::Expression::Constant(offset_handle),
Default::default(),
);
offset = Some(offset_handle);
words_left -= 1;
}
other => {
log::warn!("Unknown image sample operand {:?}", other);
for _ in 0..words_left {
self.next()?;
}
break;
}
}
image_ops ^= bit;
}
let si_lexp = self.lookup_sampled_image.lookup(sampled_image_id)?;
let coord_lexp = self.lookup_expression.lookup(coordinate_id)?;
let coord_handle =
self.get_expr_handle(coordinate_id, coord_lexp, ctx, emitter, block, body_idx);
let coord_type_handle = self.lookup_type.lookup(coord_lexp.type_id)?.handle;
let sampling_bit = if options.compare {
SamplingFlags::COMPARISON
} else {
SamplingFlags::REGULAR
};
let image_ty = match ctx.expressions[si_lexp.image] {
crate::Expression::GlobalVariable(handle) => {
if let Some(flags) = self.handle_sampling.get_mut(&handle) {
*flags |= sampling_bit;
}
ctx.global_arena[handle].ty
}
crate::Expression::FunctionArgument(i) => {
ctx.parameter_sampling[i as usize] |= sampling_bit;
ctx.arguments[i as usize].ty
}
crate::Expression::Access { base, .. } => match ctx.expressions[base] {
crate::Expression::GlobalVariable(handle) => {
if let Some(flags) = self.handle_sampling.get_mut(&handle) {
*flags |= sampling_bit;
}
match ctx.type_arena[ctx.global_arena[handle].ty].inner {
crate::TypeInner::BindingArray { base, .. } => base,
_ => return Err(Error::InvalidGlobalVar(ctx.expressions[base].clone())),
}
}
ref other => return Err(Error::InvalidGlobalVar(other.clone())),
},
ref other => return Err(Error::InvalidGlobalVar(other.clone())),
};
match ctx.expressions[si_lexp.sampler] {
crate::Expression::GlobalVariable(handle) => {
*self.handle_sampling.get_mut(&handle).unwrap() |= sampling_bit;
}
crate::Expression::FunctionArgument(i) => {
ctx.parameter_sampling[i as usize] |= sampling_bit;
}
crate::Expression::Access { base, .. } => match ctx.expressions[base] {
crate::Expression::GlobalVariable(handle) => {
*self.handle_sampling.get_mut(&handle).unwrap() |= sampling_bit;
}
ref other => return Err(Error::InvalidGlobalVar(other.clone())),
},
ref other => return Err(Error::InvalidGlobalVar(other.clone())),
}
let ((coordinate, array_index), depth_ref) = match ctx.type_arena[image_ty].inner {
crate::TypeInner::Image {
dim,
arrayed,
class: _,
} => (
extract_image_coordinates(
dim,
if options.project {
ExtraCoordinate::Projection
} else if arrayed {
ExtraCoordinate::ArrayLayer
} else {
ExtraCoordinate::Garbage
},
coord_handle,
coord_type_handle,
ctx,
),
{
match dref_id {
Some(id) => {
let expr_lexp = self.lookup_expression.lookup(id)?;
let mut expr =
self.get_expr_handle(id, expr_lexp, ctx, emitter, block, body_idx);
if options.project {
let required_size = dim.required_coordinate_size();
let right = ctx.expressions.append(
crate::Expression::AccessIndex {
base: coord_handle,
index: required_size.map_or(1, |size| size as u32),
},
crate::Span::default(),
);
expr = ctx.expressions.append(
crate::Expression::Binary {
op: crate::BinaryOperator::Divide,
left: expr,
right,
},
crate::Span::default(),
)
};
Some(expr)
}
None => None,
}
},
),
_ => return Err(Error::InvalidImage(image_ty)),
};
let expr = crate::Expression::ImageSample {
image: si_lexp.image,
sampler: si_lexp.sampler,
gather: None, //TODO
coordinate,
array_index,
offset,
level,
depth_ref,
};
self.lookup_expression.insert(
result_id,
LookupExpression {
handle: ctx.expressions.append(expr, self.span_from_with_op(start)),
type_id: result_type_id,
block_id,
},
);
Ok(())
}
pub(super) fn parse_image_query_size(
&mut self,
at_level: bool,
ctx: &mut super::BlockContext,
emitter: &mut crate::front::Emitter,
block: &mut crate::Block,
block_id: spirv::Word,
body_idx: usize,
) -> Result<(), Error> {
let start = self.data_offset;
let result_type_id = self.next()?;
let result_id = self.next()?;
let image_id = self.next()?;
let level = if at_level {
let level_id = self.next()?;
let level_lexp = self.lookup_expression.lookup(level_id)?;
Some(self.get_expr_handle(level_id, level_lexp, ctx, emitter, block, body_idx))
} else {
None
};
// No need to call get_expr_handle here since only globals/arguments are
// allowed as images and they are always in the root scope
//TODO: handle arrays and cubes
let image_lexp = self.lookup_expression.lookup(image_id)?;
let expr = crate::Expression::ImageQuery {
image: image_lexp.handle,
query: crate::ImageQuery::Size { level },
};
let expr = crate::Expression::As {
expr: ctx.expressions.append(expr, self.span_from_with_op(start)),
kind: crate::ScalarKind::Sint,
convert: Some(4),
};
self.lookup_expression.insert(
result_id,
LookupExpression {
handle: ctx.expressions.append(expr, self.span_from_with_op(start)),
type_id: result_type_id,
block_id,
},
);
Ok(())
}
pub(super) fn parse_image_query_other(
&mut self,
query: crate::ImageQuery,
expressions: &mut Arena<crate::Expression>,
block_id: spirv::Word,
) -> Result<(), Error> {
let start = self.data_offset;
let result_type_id = self.next()?;
let result_id = self.next()?;
let image_id = self.next()?;
// No need to call get_expr_handle here since only globals/arguments are
// allowed as images and they are always in the root scope
let image_lexp = self.lookup_expression.lookup(image_id)?.clone();
let expr = crate::Expression::ImageQuery {
image: image_lexp.handle,
query,
};
let expr = crate::Expression::As {
expr: expressions.append(expr, self.span_from_with_op(start)),
kind: crate::ScalarKind::Sint,
convert: Some(4),
};
self.lookup_expression.insert(
result_id,
LookupExpression {
handle: expressions.append(expr, self.span_from_with_op(start)),
type_id: result_type_id,
block_id,
},
);
Ok(())
}
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,31 @@
use super::Error;
use crate::arena::{Arena, Handle};
/// Create a default value for an output built-in.
pub fn generate_default_built_in(
built_in: Option<crate::BuiltIn>,
ty: Handle<crate::Type>,
const_expressions: &mut Arena<crate::Expression>,
span: crate::Span,
) -> Result<Handle<crate::Expression>, Error> {
let expr = match built_in {
Some(crate::BuiltIn::Position { .. }) => {
let zero = const_expressions
.append(crate::Expression::Literal(crate::Literal::F32(0.0)), span);
let one = const_expressions
.append(crate::Expression::Literal(crate::Literal::F32(1.0)), span);
crate::Expression::Compose {
ty,
components: vec![zero, zero, zero, one],
}
}
Some(crate::BuiltIn::PointSize) => crate::Expression::Literal(crate::Literal::F32(1.0)),
Some(crate::BuiltIn::FragDepth) => crate::Expression::Literal(crate::Literal::F32(0.0)),
Some(crate::BuiltIn::SampleMask) => {
crate::Expression::Literal(crate::Literal::U32(u32::MAX))
}
// Note: `crate::BuiltIn::ClipDistance` is intentionally left for the default path
_ => crate::Expression::ZeroValue(ty),
};
Ok(const_expressions.append(expr, span))
}

View file

@ -0,0 +1,314 @@
/*!
Type generators.
*/
use crate::{arena::Handle, span::Span};
impl crate::Module {
pub fn generate_atomic_compare_exchange_result(
&mut self,
kind: crate::ScalarKind,
width: crate::Bytes,
) -> Handle<crate::Type> {
let bool_ty = self.types.insert(
crate::Type {
name: None,
inner: crate::TypeInner::Scalar {
kind: crate::ScalarKind::Bool,
width: crate::BOOL_WIDTH,
},
},
Span::UNDEFINED,
);
let scalar_ty = self.types.insert(
crate::Type {
name: None,
inner: crate::TypeInner::Scalar { kind, width },
},
Span::UNDEFINED,
);
self.types.insert(
crate::Type {
name: Some(format!(
"__atomic_compare_exchange_result<{kind:?},{width}>"
)),
inner: crate::TypeInner::Struct {
members: vec![
crate::StructMember {
name: Some("old_value".to_string()),
ty: scalar_ty,
binding: None,
offset: 0,
},
crate::StructMember {
name: Some("exchanged".to_string()),
ty: bool_ty,
binding: None,
offset: 4,
},
],
span: 8,
},
},
Span::UNDEFINED,
)
}
/// Populate this module's [`SpecialTypes::ray_desc`] type.
///
/// [`SpecialTypes::ray_desc`] is the type of the [`descriptor`] operand of
/// an [`Initialize`] [`RayQuery`] statement. In WGSL, it is a struct type
/// referred to as `RayDesc`.
///
/// Backends consume values of this type to drive platform APIs, so if you
/// change any its fields, you must update the backends to match. Look for
/// backend code dealing with [`RayQueryFunction::Initialize`].
///
/// [`SpecialTypes::ray_desc`]: crate::SpecialTypes::ray_desc
/// [`descriptor`]: crate::RayQueryFunction::Initialize::descriptor
/// [`Initialize`]: crate::RayQueryFunction::Initialize
/// [`RayQuery`]: crate::Statement::RayQuery
/// [`RayQueryFunction::Initialize`]: crate::RayQueryFunction::Initialize
pub fn generate_ray_desc_type(&mut self) -> Handle<crate::Type> {
if let Some(handle) = self.special_types.ray_desc {
return handle;
}
let width = 4;
let ty_flag = self.types.insert(
crate::Type {
name: None,
inner: crate::TypeInner::Scalar {
width,
kind: crate::ScalarKind::Uint,
},
},
Span::UNDEFINED,
);
let ty_scalar = self.types.insert(
crate::Type {
name: None,
inner: crate::TypeInner::Scalar {
width,
kind: crate::ScalarKind::Float,
},
},
Span::UNDEFINED,
);
let ty_vector = self.types.insert(
crate::Type {
name: None,
inner: crate::TypeInner::Vector {
size: crate::VectorSize::Tri,
kind: crate::ScalarKind::Float,
width,
},
},
Span::UNDEFINED,
);
let handle = self.types.insert(
crate::Type {
name: Some("RayDesc".to_string()),
inner: crate::TypeInner::Struct {
members: vec![
crate::StructMember {
name: Some("flags".to_string()),
ty: ty_flag,
binding: None,
offset: 0,
},
crate::StructMember {
name: Some("cull_mask".to_string()),
ty: ty_flag,
binding: None,
offset: 4,
},
crate::StructMember {
name: Some("tmin".to_string()),
ty: ty_scalar,
binding: None,
offset: 8,
},
crate::StructMember {
name: Some("tmax".to_string()),
ty: ty_scalar,
binding: None,
offset: 12,
},
crate::StructMember {
name: Some("origin".to_string()),
ty: ty_vector,
binding: None,
offset: 16,
},
crate::StructMember {
name: Some("dir".to_string()),
ty: ty_vector,
binding: None,
offset: 32,
},
],
span: 48,
},
},
Span::UNDEFINED,
);
self.special_types.ray_desc = Some(handle);
handle
}
/// Populate this module's [`SpecialTypes::ray_intersection`] type.
///
/// [`SpecialTypes::ray_intersection`] is the type of a
/// `RayQueryGetIntersection` expression. In WGSL, it is a struct type
/// referred to as `RayIntersection`.
///
/// Backends construct values of this type based on platform APIs, so if you
/// change any its fields, you must update the backends to match. Look for
/// the backend's handling for [`Expression::RayQueryGetIntersection`].
///
/// [`SpecialTypes::ray_intersection`]: crate::SpecialTypes::ray_intersection
/// [`Expression::RayQueryGetIntersection`]: crate::Expression::RayQueryGetIntersection
pub fn generate_ray_intersection_type(&mut self) -> Handle<crate::Type> {
if let Some(handle) = self.special_types.ray_intersection {
return handle;
}
let width = 4;
let ty_flag = self.types.insert(
crate::Type {
name: None,
inner: crate::TypeInner::Scalar {
width,
kind: crate::ScalarKind::Uint,
},
},
Span::UNDEFINED,
);
let ty_scalar = self.types.insert(
crate::Type {
name: None,
inner: crate::TypeInner::Scalar {
width,
kind: crate::ScalarKind::Float,
},
},
Span::UNDEFINED,
);
let ty_barycentrics = self.types.insert(
crate::Type {
name: None,
inner: crate::TypeInner::Vector {
width,
size: crate::VectorSize::Bi,
kind: crate::ScalarKind::Float,
},
},
Span::UNDEFINED,
);
let ty_bool = self.types.insert(
crate::Type {
name: None,
inner: crate::TypeInner::Scalar {
width: crate::BOOL_WIDTH,
kind: crate::ScalarKind::Bool,
},
},
Span::UNDEFINED,
);
let ty_transform = self.types.insert(
crate::Type {
name: None,
inner: crate::TypeInner::Matrix {
columns: crate::VectorSize::Quad,
rows: crate::VectorSize::Tri,
width,
},
},
Span::UNDEFINED,
);
let handle = self.types.insert(
crate::Type {
name: Some("RayIntersection".to_string()),
inner: crate::TypeInner::Struct {
members: vec![
crate::StructMember {
name: Some("kind".to_string()),
ty: ty_flag,
binding: None,
offset: 0,
},
crate::StructMember {
name: Some("t".to_string()),
ty: ty_scalar,
binding: None,
offset: 4,
},
crate::StructMember {
name: Some("instance_custom_index".to_string()),
ty: ty_flag,
binding: None,
offset: 8,
},
crate::StructMember {
name: Some("instance_id".to_string()),
ty: ty_flag,
binding: None,
offset: 12,
},
crate::StructMember {
name: Some("sbt_record_offset".to_string()),
ty: ty_flag,
binding: None,
offset: 16,
},
crate::StructMember {
name: Some("geometry_index".to_string()),
ty: ty_flag,
binding: None,
offset: 20,
},
crate::StructMember {
name: Some("primitive_index".to_string()),
ty: ty_flag,
binding: None,
offset: 24,
},
crate::StructMember {
name: Some("barycentrics".to_string()),
ty: ty_barycentrics,
binding: None,
offset: 28,
},
crate::StructMember {
name: Some("front_face".to_string()),
ty: ty_bool,
binding: None,
offset: 36,
},
crate::StructMember {
name: Some("object_to_world".to_string()),
ty: ty_transform,
binding: None,
offset: 48,
},
crate::StructMember {
name: Some("world_to_object".to_string()),
ty: ty_transform,
binding: None,
offset: 112,
},
],
span: 176,
},
},
Span::UNDEFINED,
);
self.special_types.ray_intersection = Some(handle);
handle
}
}

View file

@ -0,0 +1,703 @@
use crate::front::wgsl::parse::lexer::Token;
use crate::proc::{Alignment, ResolveError};
use crate::{SourceLocation, Span};
use codespan_reporting::diagnostic::{Diagnostic, Label};
use codespan_reporting::files::SimpleFile;
use codespan_reporting::term;
use std::borrow::Cow;
use std::ops::Range;
use termcolor::{ColorChoice, NoColor, StandardStream};
use thiserror::Error;
#[derive(Clone, Debug)]
pub struct ParseError {
message: String,
labels: Vec<(Span, Cow<'static, str>)>,
notes: Vec<String>,
}
impl ParseError {
pub fn labels(&self) -> impl Iterator<Item = (Span, &str)> + ExactSizeIterator + '_ {
self.labels
.iter()
.map(|&(span, ref msg)| (span, msg.as_ref()))
}
pub fn message(&self) -> &str {
&self.message
}
fn diagnostic(&self) -> Diagnostic<()> {
let diagnostic = Diagnostic::error()
.with_message(self.message.to_string())
.with_labels(
self.labels
.iter()
.map(|label| {
Label::primary((), label.0.to_range().unwrap())
.with_message(label.1.to_string())
})
.collect(),
)
.with_notes(
self.notes
.iter()
.map(|note| format!("note: {note}"))
.collect(),
);
diagnostic
}
/// Emits a summary of the error to standard error stream.
pub fn emit_to_stderr(&self, source: &str) {
self.emit_to_stderr_with_path(source, "wgsl")
}
/// Emits a summary of the error to standard error stream.
pub fn emit_to_stderr_with_path(&self, source: &str, path: &str) {
let files = SimpleFile::new(path, source);
let config = codespan_reporting::term::Config::default();
let writer = StandardStream::stderr(ColorChoice::Auto);
term::emit(&mut writer.lock(), &config, &files, &self.diagnostic())
.expect("cannot write error");
}
/// Emits a summary of the error to a string.
pub fn emit_to_string(&self, source: &str) -> String {
self.emit_to_string_with_path(source, "wgsl")
}
/// Emits a summary of the error to a string.
pub fn emit_to_string_with_path(&self, source: &str, path: &str) -> String {
let files = SimpleFile::new(path, source);
let config = codespan_reporting::term::Config::default();
let mut writer = NoColor::new(Vec::new());
term::emit(&mut writer, &config, &files, &self.diagnostic()).expect("cannot write error");
String::from_utf8(writer.into_inner()).unwrap()
}
/// Returns a [`SourceLocation`] for the first label in the error message.
pub fn location(&self, source: &str) -> Option<SourceLocation> {
self.labels.get(0).map(|label| label.0.location(source))
}
}
impl std::fmt::Display for ParseError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.message)
}
}
impl std::error::Error for ParseError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
None
}
}
#[derive(Copy, Clone, Debug, PartialEq)]
pub enum ExpectedToken<'a> {
Token(Token<'a>),
Identifier,
Number,
Integer,
/// Expected: constant, parenthesized expression, identifier
PrimaryExpression,
/// Expected: assignment, increment/decrement expression
Assignment,
/// Expected: 'case', 'default', '}'
SwitchItem,
/// Expected: ',', ')'
WorkgroupSizeSeparator,
/// Expected: 'struct', 'let', 'var', 'type', ';', 'fn', eof
GlobalItem,
/// Expected a type.
Type,
/// Access of `var`, `let`, `const`.
Variable,
/// Access of a function
Function,
}
#[derive(Clone, Copy, Debug, Error, PartialEq)]
pub enum NumberError {
#[error("invalid numeric literal format")]
Invalid,
#[error("numeric literal not representable by target type")]
NotRepresentable,
#[error("unimplemented f16 type")]
UnimplementedF16,
}
#[derive(Copy, Clone, Debug, PartialEq)]
pub enum InvalidAssignmentType {
Other,
Swizzle,
ImmutableBinding(Span),
}
#[derive(Clone, Debug)]
pub enum Error<'a> {
Unexpected(Span, ExpectedToken<'a>),
UnexpectedComponents(Span),
UnexpectedOperationInConstContext(Span),
BadNumber(Span, NumberError),
/// A negative signed integer literal where both signed and unsigned,
/// but only non-negative literals are allowed.
NegativeInt(Span),
BadU32Constant(Span),
BadMatrixScalarKind(Span, crate::ScalarKind, u8),
BadAccessor(Span),
BadTexture(Span),
BadTypeCast {
span: Span,
from_type: String,
to_type: String,
},
BadTextureSampleType {
span: Span,
kind: crate::ScalarKind,
width: u8,
},
BadIncrDecrReferenceType(Span),
InvalidResolve(ResolveError),
InvalidForInitializer(Span),
/// A break if appeared outside of a continuing block
InvalidBreakIf(Span),
InvalidGatherComponent(Span),
InvalidConstructorComponentType(Span, i32),
InvalidIdentifierUnderscore(Span),
ReservedIdentifierPrefix(Span),
UnknownAddressSpace(Span),
UnknownAttribute(Span),
UnknownBuiltin(Span),
UnknownAccess(Span),
UnknownIdent(Span, &'a str),
UnknownScalarType(Span),
UnknownType(Span),
UnknownStorageFormat(Span),
UnknownConservativeDepth(Span),
SizeAttributeTooLow(Span, u32),
AlignAttributeTooLow(Span, Alignment),
NonPowerOfTwoAlignAttribute(Span),
InconsistentBinding(Span),
TypeNotConstructible(Span),
TypeNotInferrable(Span),
InitializationTypeMismatch(Span, String, String),
MissingType(Span),
MissingAttribute(&'static str, Span),
InvalidAtomicPointer(Span),
InvalidAtomicOperandType(Span),
InvalidRayQueryPointer(Span),
Pointer(&'static str, Span),
NotPointer(Span),
NotReference(&'static str, Span),
InvalidAssignment {
span: Span,
ty: InvalidAssignmentType,
},
ReservedKeyword(Span),
/// Redefinition of an identifier (used for both module-scope and local redefinitions).
Redefinition {
/// Span of the identifier in the previous definition.
previous: Span,
/// Span of the identifier in the new definition.
current: Span,
},
/// A declaration refers to itself directly.
RecursiveDeclaration {
/// The location of the name of the declaration.
ident: Span,
/// The point at which it is used.
usage: Span,
},
/// A declaration refers to itself indirectly, through one or more other
/// definitions.
CyclicDeclaration {
/// The location of the name of some declaration in the cycle.
ident: Span,
/// The edges of the cycle of references.
///
/// Each `(decl, reference)` pair indicates that the declaration whose
/// name is `decl` has an identifier at `reference` whose definition is
/// the next declaration in the cycle. The last pair's `reference` is
/// the same identifier as `ident`, above.
path: Vec<(Span, Span)>,
},
InvalidSwitchValue {
uint: bool,
span: Span,
},
CalledEntryPoint(Span),
WrongArgumentCount {
span: Span,
expected: Range<u32>,
found: u32,
},
FunctionReturnsVoid(Span),
InvalidWorkGroupUniformLoad(Span),
Other,
ExpectedArraySize(Span),
NonPositiveArrayLength(Span),
}
impl<'a> Error<'a> {
pub(crate) fn as_parse_error(&self, source: &'a str) -> ParseError {
match *self {
Error::Unexpected(unexpected_span, expected) => {
let expected_str = match expected {
ExpectedToken::Token(token) => {
match token {
Token::Separator(c) => format!("'{c}'"),
Token::Paren(c) => format!("'{c}'"),
Token::Attribute => "@".to_string(),
Token::Number(_) => "number".to_string(),
Token::Word(s) => s.to_string(),
Token::Operation(c) => format!("operation ('{c}')"),
Token::LogicalOperation(c) => format!("logical operation ('{c}')"),
Token::ShiftOperation(c) => format!("bitshift ('{c}{c}')"),
Token::AssignmentOperation(c) if c=='<' || c=='>' => format!("bitshift ('{c}{c}=')"),
Token::AssignmentOperation(c) => format!("operation ('{c}=')"),
Token::IncrementOperation => "increment operation".to_string(),
Token::DecrementOperation => "decrement operation".to_string(),
Token::Arrow => "->".to_string(),
Token::Unknown(c) => format!("unknown ('{c}')"),
Token::Trivia => "trivia".to_string(),
Token::End => "end".to_string(),
}
}
ExpectedToken::Identifier => "identifier".to_string(),
ExpectedToken::Number => "32-bit signed integer literal".to_string(),
ExpectedToken::Integer => "unsigned/signed integer literal".to_string(),
ExpectedToken::PrimaryExpression => "expression".to_string(),
ExpectedToken::Assignment => "assignment or increment/decrement".to_string(),
ExpectedToken::SwitchItem => "switch item ('case' or 'default') or a closing curly bracket to signify the end of the switch statement ('}')".to_string(),
ExpectedToken::WorkgroupSizeSeparator => "workgroup size separator (',') or a closing parenthesis".to_string(),
ExpectedToken::GlobalItem => "global item ('struct', 'const', 'var', 'alias', ';', 'fn') or the end of the file".to_string(),
ExpectedToken::Type => "type".to_string(),
ExpectedToken::Variable => "variable access".to_string(),
ExpectedToken::Function => "function name".to_string(),
};
ParseError {
message: format!(
"expected {}, found '{}'",
expected_str, &source[unexpected_span],
),
labels: vec![(unexpected_span, format!("expected {expected_str}").into())],
notes: vec![],
}
}
Error::UnexpectedComponents(bad_span) => ParseError {
message: "unexpected components".to_string(),
labels: vec![(bad_span, "unexpected components".into())],
notes: vec![],
},
Error::UnexpectedOperationInConstContext(span) => ParseError {
message: "this operation is not supported in a const context".to_string(),
labels: vec![(span, "operation not supported here".into())],
notes: vec![],
},
Error::BadNumber(bad_span, ref err) => ParseError {
message: format!("{}: `{}`", err, &source[bad_span],),
labels: vec![(bad_span, err.to_string().into())],
notes: vec![],
},
Error::NegativeInt(bad_span) => ParseError {
message: format!(
"expected non-negative integer literal, found `{}`",
&source[bad_span],
),
labels: vec![(bad_span, "expected non-negative integer".into())],
notes: vec![],
},
Error::BadU32Constant(bad_span) => ParseError {
message: format!(
"expected unsigned integer constant expression, found `{}`",
&source[bad_span],
),
labels: vec![(bad_span, "expected unsigned integer".into())],
notes: vec![],
},
Error::BadMatrixScalarKind(span, kind, width) => ParseError {
message: format!(
"matrix scalar type must be floating-point, but found `{}`",
kind.to_wgsl(width)
),
labels: vec![(span, "must be floating-point (e.g. `f32`)".into())],
notes: vec![],
},
Error::BadAccessor(accessor_span) => ParseError {
message: format!("invalid field accessor `{}`", &source[accessor_span],),
labels: vec![(accessor_span, "invalid accessor".into())],
notes: vec![],
},
Error::UnknownIdent(ident_span, ident) => ParseError {
message: format!("no definition in scope for identifier: '{ident}'"),
labels: vec![(ident_span, "unknown identifier".into())],
notes: vec![],
},
Error::UnknownScalarType(bad_span) => ParseError {
message: format!("unknown scalar type: '{}'", &source[bad_span]),
labels: vec![(bad_span, "unknown scalar type".into())],
notes: vec!["Valid scalar types are f32, f64, i32, u32, bool".into()],
},
Error::BadTextureSampleType { span, kind, width } => ParseError {
message: format!(
"texture sample type must be one of f32, i32 or u32, but found {}",
kind.to_wgsl(width)
),
labels: vec![(span, "must be one of f32, i32 or u32".into())],
notes: vec![],
},
Error::BadIncrDecrReferenceType(span) => ParseError {
message:
"increment/decrement operation requires reference type to be one of i32 or u32"
.to_string(),
labels: vec![(span, "must be a reference type of i32 or u32".into())],
notes: vec![],
},
Error::BadTexture(bad_span) => ParseError {
message: format!(
"expected an image, but found '{}' which is not an image",
&source[bad_span]
),
labels: vec![(bad_span, "not an image".into())],
notes: vec![],
},
Error::BadTypeCast {
span,
ref from_type,
ref to_type,
} => {
let msg = format!("cannot cast a {from_type} to a {to_type}");
ParseError {
message: msg.clone(),
labels: vec![(span, msg.into())],
notes: vec![],
}
}
Error::InvalidResolve(ref resolve_error) => ParseError {
message: resolve_error.to_string(),
labels: vec![],
notes: vec![],
},
Error::InvalidForInitializer(bad_span) => ParseError {
message: format!(
"for(;;) initializer is not an assignment or a function call: '{}'",
&source[bad_span]
),
labels: vec![(bad_span, "not an assignment or function call".into())],
notes: vec![],
},
Error::InvalidBreakIf(bad_span) => ParseError {
message: "A break if is only allowed in a continuing block".to_string(),
labels: vec![(bad_span, "not in a continuing block".into())],
notes: vec![],
},
Error::InvalidGatherComponent(bad_span) => ParseError {
message: format!(
"textureGather component '{}' doesn't exist, must be 0, 1, 2, or 3",
&source[bad_span]
),
labels: vec![(bad_span, "invalid component".into())],
notes: vec![],
},
Error::InvalidConstructorComponentType(bad_span, component) => ParseError {
message: format!("invalid type for constructor component at index [{component}]"),
labels: vec![(bad_span, "invalid component type".into())],
notes: vec![],
},
Error::InvalidIdentifierUnderscore(bad_span) => ParseError {
message: "Identifier can't be '_'".to_string(),
labels: vec![(bad_span, "invalid identifier".into())],
notes: vec![
"Use phony assignment instead ('_ =' notice the absence of 'let' or 'var')"
.to_string(),
],
},
Error::ReservedIdentifierPrefix(bad_span) => ParseError {
message: format!(
"Identifier starts with a reserved prefix: '{}'",
&source[bad_span]
),
labels: vec![(bad_span, "invalid identifier".into())],
notes: vec![],
},
Error::UnknownAddressSpace(bad_span) => ParseError {
message: format!("unknown address space: '{}'", &source[bad_span]),
labels: vec![(bad_span, "unknown address space".into())],
notes: vec![],
},
Error::UnknownAttribute(bad_span) => ParseError {
message: format!("unknown attribute: '{}'", &source[bad_span]),
labels: vec![(bad_span, "unknown attribute".into())],
notes: vec![],
},
Error::UnknownBuiltin(bad_span) => ParseError {
message: format!("unknown builtin: '{}'", &source[bad_span]),
labels: vec![(bad_span, "unknown builtin".into())],
notes: vec![],
},
Error::UnknownAccess(bad_span) => ParseError {
message: format!("unknown access: '{}'", &source[bad_span]),
labels: vec![(bad_span, "unknown access".into())],
notes: vec![],
},
Error::UnknownStorageFormat(bad_span) => ParseError {
message: format!("unknown storage format: '{}'", &source[bad_span]),
labels: vec![(bad_span, "unknown storage format".into())],
notes: vec![],
},
Error::UnknownConservativeDepth(bad_span) => ParseError {
message: format!("unknown conservative depth: '{}'", &source[bad_span]),
labels: vec![(bad_span, "unknown conservative depth".into())],
notes: vec![],
},
Error::UnknownType(bad_span) => ParseError {
message: format!("unknown type: '{}'", &source[bad_span]),
labels: vec![(bad_span, "unknown type".into())],
notes: vec![],
},
Error::SizeAttributeTooLow(bad_span, min_size) => ParseError {
message: format!("struct member size must be at least {min_size}"),
labels: vec![(bad_span, format!("must be at least {min_size}").into())],
notes: vec![],
},
Error::AlignAttributeTooLow(bad_span, min_align) => ParseError {
message: format!("struct member alignment must be at least {min_align}"),
labels: vec![(bad_span, format!("must be at least {min_align}").into())],
notes: vec![],
},
Error::NonPowerOfTwoAlignAttribute(bad_span) => ParseError {
message: "struct member alignment must be a power of 2".to_string(),
labels: vec![(bad_span, "must be a power of 2".into())],
notes: vec![],
},
Error::InconsistentBinding(span) => ParseError {
message: "input/output binding is not consistent".to_string(),
labels: vec![(span, "input/output binding is not consistent".into())],
notes: vec![],
},
Error::TypeNotConstructible(span) => ParseError {
message: format!("type `{}` is not constructible", &source[span]),
labels: vec![(span, "type is not constructible".into())],
notes: vec![],
},
Error::TypeNotInferrable(span) => ParseError {
message: "type can't be inferred".to_string(),
labels: vec![(span, "type can't be inferred".into())],
notes: vec![],
},
Error::InitializationTypeMismatch(name_span, ref expected_ty, ref got_ty) => {
ParseError {
message: format!(
"the type of `{}` is expected to be `{}`, but got `{}`",
&source[name_span], expected_ty, got_ty,
),
labels: vec![(
name_span,
format!("definition of `{}`", &source[name_span]).into(),
)],
notes: vec![],
}
}
Error::MissingType(name_span) => ParseError {
message: format!("variable `{}` needs a type", &source[name_span]),
labels: vec![(
name_span,
format!("definition of `{}`", &source[name_span]).into(),
)],
notes: vec![],
},
Error::MissingAttribute(name, name_span) => ParseError {
message: format!(
"variable `{}` needs a '{}' attribute",
&source[name_span], name
),
labels: vec![(
name_span,
format!("definition of `{}`", &source[name_span]).into(),
)],
notes: vec![],
},
Error::InvalidAtomicPointer(span) => ParseError {
message: "atomic operation is done on a pointer to a non-atomic".to_string(),
labels: vec![(span, "atomic pointer is invalid".into())],
notes: vec![],
},
Error::InvalidAtomicOperandType(span) => ParseError {
message: "atomic operand type is inconsistent with the operation".to_string(),
labels: vec![(span, "atomic operand type is invalid".into())],
notes: vec![],
},
Error::InvalidRayQueryPointer(span) => ParseError {
message: "ray query operation is done on a pointer to a non-ray-query".to_string(),
labels: vec![(span, "ray query pointer is invalid".into())],
notes: vec![],
},
Error::NotPointer(span) => ParseError {
message: "the operand of the `*` operator must be a pointer".to_string(),
labels: vec![(span, "expression is not a pointer".into())],
notes: vec![],
},
Error::NotReference(what, span) => ParseError {
message: format!("{what} must be a reference"),
labels: vec![(span, "expression is not a reference".into())],
notes: vec![],
},
Error::InvalidAssignment { span, ty } => {
let (extra_label, notes) = match ty {
InvalidAssignmentType::Swizzle => (
None,
vec![
"WGSL does not support assignments to swizzles".into(),
"consider assigning each component individually".into(),
],
),
InvalidAssignmentType::ImmutableBinding(binding_span) => (
Some((binding_span, "this is an immutable binding".into())),
vec![format!(
"consider declaring '{}' with `var` instead of `let`",
&source[binding_span]
)],
),
InvalidAssignmentType::Other => (None, vec![]),
};
ParseError {
message: "invalid left-hand side of assignment".into(),
labels: std::iter::once((span, "cannot assign to this expression".into()))
.chain(extra_label)
.collect(),
notes,
}
}
Error::Pointer(what, span) => ParseError {
message: format!("{what} must not be a pointer"),
labels: vec![(span, "expression is a pointer".into())],
notes: vec![],
},
Error::ReservedKeyword(name_span) => ParseError {
message: format!("name `{}` is a reserved keyword", &source[name_span]),
labels: vec![(
name_span,
format!("definition of `{}`", &source[name_span]).into(),
)],
notes: vec![],
},
Error::Redefinition { previous, current } => ParseError {
message: format!("redefinition of `{}`", &source[current]),
labels: vec![
(
current,
format!("redefinition of `{}`", &source[current]).into(),
),
(
previous,
format!("previous definition of `{}`", &source[previous]).into(),
),
],
notes: vec![],
},
Error::RecursiveDeclaration { ident, usage } => ParseError {
message: format!("declaration of `{}` is recursive", &source[ident]),
labels: vec![(ident, "".into()), (usage, "uses itself here".into())],
notes: vec![],
},
Error::CyclicDeclaration { ident, ref path } => ParseError {
message: format!("declaration of `{}` is cyclic", &source[ident]),
labels: path
.iter()
.enumerate()
.flat_map(|(i, &(ident, usage))| {
[
(ident, "".into()),
(
usage,
if i == path.len() - 1 {
"ending the cycle".into()
} else {
format!("uses `{}`", &source[ident]).into()
},
),
]
})
.collect(),
notes: vec![],
},
Error::InvalidSwitchValue { uint, span } => ParseError {
message: "invalid switch value".to_string(),
labels: vec![(
span,
if uint {
"expected unsigned integer"
} else {
"expected signed integer"
}
.into(),
)],
notes: vec![if uint {
format!("suffix the integer with a `u`: '{}u'", &source[span])
} else {
let span = span.to_range().unwrap();
format!(
"remove the `u` suffix: '{}'",
&source[span.start..span.end - 1]
)
}],
},
Error::CalledEntryPoint(span) => ParseError {
message: "entry point cannot be called".to_string(),
labels: vec![(span, "entry point cannot be called".into())],
notes: vec![],
},
Error::WrongArgumentCount {
span,
ref expected,
found,
} => ParseError {
message: format!(
"wrong number of arguments: expected {}, found {}",
if expected.len() < 2 {
format!("{}", expected.start)
} else {
format!("{}..{}", expected.start, expected.end)
},
found
),
labels: vec![(span, "wrong number of arguments".into())],
notes: vec![],
},
Error::FunctionReturnsVoid(span) => ParseError {
message: "function does not return any value".to_string(),
labels: vec![(span, "".into())],
notes: vec![
"perhaps you meant to call the function in a separate statement?".into(),
],
},
Error::InvalidWorkGroupUniformLoad(span) => ParseError {
message: "incorrect type passed to workgroupUniformLoad".into(),
labels: vec![(span, "".into())],
notes: vec!["passed type must be a workgroup pointer".into()],
},
Error::Other => ParseError {
message: "other error".to_string(),
labels: vec![],
notes: vec![],
},
Error::ExpectedArraySize(span) => ParseError {
message: "array element count must resolve to an integer scalar (u32 or i32)"
.to_string(),
labels: vec![(span, "must resolve to u32/i32".into())],
notes: vec![],
},
Error::NonPositiveArrayLength(span) => ParseError {
message: "array element count must be greater than zero".to_string(),
labels: vec![(span, "must be greater than zero".into())],
notes: vec![],
},
}
}
}

View file

@ -0,0 +1,193 @@
use super::Error;
use crate::front::wgsl::parse::ast;
use crate::{FastHashMap, Handle, Span};
/// A `GlobalDecl` list in which each definition occurs before all its uses.
pub struct Index<'a> {
dependency_order: Vec<Handle<ast::GlobalDecl<'a>>>,
}
impl<'a> Index<'a> {
/// Generate an `Index` for the given translation unit.
///
/// Perform a topological sort on `tu`'s global declarations, placing
/// referents before the definitions that refer to them.
///
/// Return an error if the graph of references between declarations contains
/// any cycles.
pub fn generate(tu: &ast::TranslationUnit<'a>) -> Result<Self, Error<'a>> {
// Produce a map from global definitions' names to their `Handle<GlobalDecl>`s.
// While doing so, reject conflicting definitions.
let mut globals = FastHashMap::with_capacity_and_hasher(tu.decls.len(), Default::default());
for (handle, decl) in tu.decls.iter() {
let ident = decl_ident(decl);
let name = ident.name;
if let Some(old) = globals.insert(name, handle) {
return Err(Error::Redefinition {
previous: decl_ident(&tu.decls[old]).span,
current: ident.span,
});
}
}
let len = tu.decls.len();
let solver = DependencySolver {
globals: &globals,
module: tu,
visited: vec![false; len],
temp_visited: vec![false; len],
path: Vec::new(),
out: Vec::with_capacity(len),
};
let dependency_order = solver.solve()?;
Ok(Self { dependency_order })
}
/// Iterate over `GlobalDecl`s, visiting each definition before all its uses.
///
/// Produce handles for all of the `GlobalDecl`s of the `TranslationUnit`
/// passed to `Index::generate`, ordered so that a given declaration is
/// produced before any other declaration that uses it.
pub fn visit_ordered(&self) -> impl Iterator<Item = Handle<ast::GlobalDecl<'a>>> + '_ {
self.dependency_order.iter().copied()
}
}
/// An edge from a reference to its referent in the current depth-first
/// traversal.
///
/// This is like `ast::Dependency`, except that we've determined which
/// `GlobalDecl` it refers to.
struct ResolvedDependency<'a> {
/// The referent of some identifier used in the current declaration.
decl: Handle<ast::GlobalDecl<'a>>,
/// Where that use occurs within the current declaration.
usage: Span,
}
/// Local state for ordering a `TranslationUnit`'s module-scope declarations.
///
/// Values of this type are used temporarily by `Index::generate`
/// to perform a depth-first sort on the declarations.
/// Technically, what we want is a topological sort, but a depth-first sort
/// has one key benefit - it's much more efficient in storing
/// the path of each node for error generation.
struct DependencySolver<'source, 'temp> {
/// A map from module-scope definitions' names to their handles.
globals: &'temp FastHashMap<&'source str, Handle<ast::GlobalDecl<'source>>>,
/// The translation unit whose declarations we're ordering.
module: &'temp ast::TranslationUnit<'source>,
/// For each handle, whether we have pushed it onto `out` yet.
visited: Vec<bool>,
/// For each handle, whether it is an predecessor in the current depth-first
/// traversal. This is used to detect cycles in the reference graph.
temp_visited: Vec<bool>,
/// The current path in our depth-first traversal. Used for generating
/// error messages for non-trivial reference cycles.
path: Vec<ResolvedDependency<'source>>,
/// The list of declaration handles, with declarations before uses.
out: Vec<Handle<ast::GlobalDecl<'source>>>,
}
impl<'a> DependencySolver<'a, '_> {
/// Produce the sorted list of declaration handles, and check for cycles.
fn solve(mut self) -> Result<Vec<Handle<ast::GlobalDecl<'a>>>, Error<'a>> {
for (id, _) in self.module.decls.iter() {
if self.visited[id.index()] {
continue;
}
self.dfs(id)?;
}
Ok(self.out)
}
/// Ensure that all declarations used by `id` have been added to the
/// ordering, and then append `id` itself.
fn dfs(&mut self, id: Handle<ast::GlobalDecl<'a>>) -> Result<(), Error<'a>> {
let decl = &self.module.decls[id];
let id_usize = id.index();
self.temp_visited[id_usize] = true;
for dep in decl.dependencies.iter() {
if let Some(&dep_id) = self.globals.get(dep.ident) {
self.path.push(ResolvedDependency {
decl: dep_id,
usage: dep.usage,
});
let dep_id_usize = dep_id.index();
if self.temp_visited[dep_id_usize] {
// Found a cycle.
return if dep_id == id {
// A declaration refers to itself directly.
Err(Error::RecursiveDeclaration {
ident: decl_ident(decl).span,
usage: dep.usage,
})
} else {
// A declaration refers to itself indirectly, through
// one or more other definitions. Report the entire path
// of references.
let start_at = self
.path
.iter()
.rev()
.enumerate()
.find_map(|(i, dep)| (dep.decl == dep_id).then_some(i))
.unwrap_or(0);
Err(Error::CyclicDeclaration {
ident: decl_ident(&self.module.decls[dep_id]).span,
path: self.path[start_at..]
.iter()
.map(|curr_dep| {
let curr_id = curr_dep.decl;
let curr_decl = &self.module.decls[curr_id];
(decl_ident(curr_decl).span, curr_dep.usage)
})
.collect(),
})
};
} else if !self.visited[dep_id_usize] {
self.dfs(dep_id)?;
}
// Remove this edge from the current path.
self.path.pop();
}
// Ignore unresolved identifiers; they may be predeclared objects.
}
// Remove this node from the current path.
self.temp_visited[id_usize] = false;
// Now everything this declaration uses has been visited, and is already
// present in `out`. That means we we can append this one to the
// ordering, and mark it as visited.
self.out.push(id);
self.visited[id_usize] = true;
Ok(())
}
}
const fn decl_ident<'a>(decl: &ast::GlobalDecl<'a>) -> ast::Ident<'a> {
match decl.kind {
ast::GlobalDeclKind::Fn(ref f) => f.name,
ast::GlobalDeclKind::Var(ref v) => v.name,
ast::GlobalDeclKind::Const(ref c) => c.name,
ast::GlobalDeclKind::Struct(ref s) => s.name,
ast::GlobalDeclKind::Type(ref t) => t.name,
}
}

View file

@ -0,0 +1,606 @@
use std::num::NonZeroU32;
use crate::front::wgsl::parse::ast;
use crate::{Handle, Span};
use crate::front::wgsl::error::Error;
use crate::front::wgsl::lower::{ExpressionContext, Lowerer};
use crate::proc::TypeResolution;
enum ConcreteConstructorHandle {
PartialVector {
size: crate::VectorSize,
},
PartialMatrix {
columns: crate::VectorSize,
rows: crate::VectorSize,
},
PartialArray,
Type(Handle<crate::Type>),
}
impl ConcreteConstructorHandle {
fn borrow<'a>(&self, module: &'a crate::Module) -> ConcreteConstructor<'a> {
match *self {
Self::PartialVector { size } => ConcreteConstructor::PartialVector { size },
Self::PartialMatrix { columns, rows } => {
ConcreteConstructor::PartialMatrix { columns, rows }
}
Self::PartialArray => ConcreteConstructor::PartialArray,
Self::Type(handle) => ConcreteConstructor::Type(handle, &module.types[handle].inner),
}
}
}
enum ConcreteConstructor<'a> {
PartialVector {
size: crate::VectorSize,
},
PartialMatrix {
columns: crate::VectorSize,
rows: crate::VectorSize,
},
PartialArray,
Type(Handle<crate::Type>, &'a crate::TypeInner),
}
impl ConcreteConstructorHandle {
fn to_error_string(&self, ctx: ExpressionContext) -> String {
match *self {
Self::PartialVector { size } => {
format!("vec{}<?>", size as u32,)
}
Self::PartialMatrix { columns, rows } => {
format!("mat{}x{}<?>", columns as u32, rows as u32,)
}
Self::PartialArray => "array<?, ?>".to_string(),
Self::Type(ty) => ctx.format_type(ty),
}
}
}
enum ComponentsHandle<'a> {
None,
One {
component: Handle<crate::Expression>,
span: Span,
ty: &'a TypeResolution,
},
Many {
components: Vec<Handle<crate::Expression>>,
spans: Vec<Span>,
first_component_ty: &'a TypeResolution,
},
}
impl<'a> ComponentsHandle<'a> {
fn borrow(self, module: &'a crate::Module) -> Components<'a> {
match self {
Self::None => Components::None,
Self::One {
component,
span,
ty,
} => Components::One {
component,
span,
ty_inner: ty.inner_with(&module.types),
},
Self::Many {
components,
spans,
first_component_ty,
} => Components::Many {
components,
spans,
first_component_ty_inner: first_component_ty.inner_with(&module.types),
},
}
}
}
enum Components<'a> {
None,
One {
component: Handle<crate::Expression>,
span: Span,
ty_inner: &'a crate::TypeInner,
},
Many {
components: Vec<Handle<crate::Expression>>,
spans: Vec<Span>,
first_component_ty_inner: &'a crate::TypeInner,
},
}
impl Components<'_> {
fn into_components_vec(self) -> Vec<Handle<crate::Expression>> {
match self {
Self::None => vec![],
Self::One { component, .. } => vec![component],
Self::Many { components, .. } => components,
}
}
}
impl<'source, 'temp> Lowerer<'source, 'temp> {
/// Generate Naga IR for a type constructor expression.
///
/// The `constructor` value represents the head of the constructor
/// expression, which is at least a hint of which type is being built; if
/// it's one of the `Partial` variants, we need to consider the argument
/// types as well.
///
/// This is used for [`Construct`] expressions, but also for [`Call`]
/// expressions, once we've determined that the "callable" (in WGSL spec
/// terms) is actually a type.
///
/// [`Construct`]: ast::Expression::Construct
/// [`Call`]: ast::Expression::Call
pub fn construct(
&mut self,
span: Span,
constructor: &ast::ConstructorType<'source>,
ty_span: Span,
components: &[Handle<ast::Expression<'source>>],
mut ctx: ExpressionContext<'source, '_, '_>,
) -> Result<Handle<crate::Expression>, Error<'source>> {
let constructor_h = self.constructor(constructor, ctx.reborrow())?;
let components_h = match *components {
[] => ComponentsHandle::None,
[component] => {
let span = ctx.ast_expressions.get_span(component);
let component = self.expression(component, ctx.reborrow())?;
ctx.grow_types(component)?;
let ty = &ctx.typifier()[component];
ComponentsHandle::One {
component,
span,
ty,
}
}
[component, ref rest @ ..] => {
let span = ctx.ast_expressions.get_span(component);
let component = self.expression(component, ctx.reborrow())?;
let components = std::iter::once(Ok(component))
.chain(
rest.iter()
.map(|&component| self.expression(component, ctx.reborrow())),
)
.collect::<Result<_, _>>()?;
let spans = std::iter::once(span)
.chain(
rest.iter()
.map(|&component| ctx.ast_expressions.get_span(component)),
)
.collect();
ctx.grow_types(component)?;
let ty = &ctx.typifier()[component];
ComponentsHandle::Many {
components,
spans,
first_component_ty: ty,
}
}
};
let (components, constructor) = (
components_h.borrow(ctx.module),
constructor_h.borrow(ctx.module),
);
let expr = match (components, constructor) {
// Empty constructor
(Components::None, dst_ty) => match dst_ty {
ConcreteConstructor::Type(ty, _) => {
return Ok(ctx.interrupt_emitter(crate::Expression::ZeroValue(ty), span))
}
_ => return Err(Error::TypeNotInferrable(ty_span)),
},
// Scalar constructor & conversion (scalar -> scalar)
(
Components::One {
component,
ty_inner: &crate::TypeInner::Scalar { .. },
..
},
ConcreteConstructor::Type(_, &crate::TypeInner::Scalar { kind, width }),
) => crate::Expression::As {
expr: component,
kind,
convert: Some(width),
},
// Vector conversion (vector -> vector)
(
Components::One {
component,
ty_inner: &crate::TypeInner::Vector { size: src_size, .. },
..
},
ConcreteConstructor::Type(
_,
&crate::TypeInner::Vector {
size: dst_size,
kind: dst_kind,
width: dst_width,
},
),
) if dst_size == src_size => crate::Expression::As {
expr: component,
kind: dst_kind,
convert: Some(dst_width),
},
// Vector conversion (vector -> vector) - partial
(
Components::One {
component,
ty_inner:
&crate::TypeInner::Vector {
size: src_size,
kind: src_kind,
..
},
..
},
ConcreteConstructor::PartialVector { size: dst_size },
) if dst_size == src_size => crate::Expression::As {
expr: component,
kind: src_kind,
convert: None,
},
// Matrix conversion (matrix -> matrix)
(
Components::One {
component,
ty_inner:
&crate::TypeInner::Matrix {
columns: src_columns,
rows: src_rows,
..
},
..
},
ConcreteConstructor::Type(
_,
&crate::TypeInner::Matrix {
columns: dst_columns,
rows: dst_rows,
width: dst_width,
},
),
) if dst_columns == src_columns && dst_rows == src_rows => crate::Expression::As {
expr: component,
kind: crate::ScalarKind::Float,
convert: Some(dst_width),
},
// Matrix conversion (matrix -> matrix) - partial
(
Components::One {
component,
ty_inner:
&crate::TypeInner::Matrix {
columns: src_columns,
rows: src_rows,
..
},
..
},
ConcreteConstructor::PartialMatrix {
columns: dst_columns,
rows: dst_rows,
},
) if dst_columns == src_columns && dst_rows == src_rows => crate::Expression::As {
expr: component,
kind: crate::ScalarKind::Float,
convert: None,
},
// Vector constructor (splat) - infer type
(
Components::One {
component,
ty_inner: &crate::TypeInner::Scalar { .. },
..
},
ConcreteConstructor::PartialVector { size },
) => crate::Expression::Splat {
size,
value: component,
},
// Vector constructor (splat)
(
Components::One {
component,
ty_inner:
&crate::TypeInner::Scalar {
kind: src_kind,
width: src_width,
..
},
..
},
ConcreteConstructor::Type(
_,
&crate::TypeInner::Vector {
size,
kind: dst_kind,
width: dst_width,
},
),
) if dst_kind == src_kind || dst_width == src_width => crate::Expression::Splat {
size,
value: component,
},
// Vector constructor (by elements)
(
Components::Many {
components,
first_component_ty_inner:
&crate::TypeInner::Scalar { kind, width }
| &crate::TypeInner::Vector { kind, width, .. },
..
},
ConcreteConstructor::PartialVector { size },
)
| (
Components::Many {
components,
first_component_ty_inner:
&crate::TypeInner::Scalar { .. } | &crate::TypeInner::Vector { .. },
..
},
ConcreteConstructor::Type(_, &crate::TypeInner::Vector { size, width, kind }),
) => {
let inner = crate::TypeInner::Vector { size, kind, width };
let ty = ctx.ensure_type_exists(inner);
crate::Expression::Compose { ty, components }
}
// Matrix constructor (by elements)
(
Components::Many {
components,
first_component_ty_inner: &crate::TypeInner::Scalar { width, .. },
..
},
ConcreteConstructor::PartialMatrix { columns, rows },
)
| (
Components::Many {
components,
first_component_ty_inner: &crate::TypeInner::Scalar { .. },
..
},
ConcreteConstructor::Type(
_,
&crate::TypeInner::Matrix {
columns,
rows,
width,
},
),
) => {
let vec_ty = ctx.ensure_type_exists(crate::TypeInner::Vector {
width,
kind: crate::ScalarKind::Float,
size: rows,
});
let components = components
.chunks(rows as usize)
.map(|vec_components| {
ctx.append_expression(
crate::Expression::Compose {
ty: vec_ty,
components: Vec::from(vec_components),
},
Default::default(),
)
})
.collect();
let ty = ctx.ensure_type_exists(crate::TypeInner::Matrix {
columns,
rows,
width,
});
crate::Expression::Compose { ty, components }
}
// Matrix constructor (by columns)
(
Components::Many {
components,
first_component_ty_inner: &crate::TypeInner::Vector { width, .. },
..
},
ConcreteConstructor::PartialMatrix { columns, rows },
)
| (
Components::Many {
components,
first_component_ty_inner: &crate::TypeInner::Vector { .. },
..
},
ConcreteConstructor::Type(
_,
&crate::TypeInner::Matrix {
columns,
rows,
width,
},
),
) => {
let ty = ctx.ensure_type_exists(crate::TypeInner::Matrix {
columns,
rows,
width,
});
crate::Expression::Compose { ty, components }
}
// Array constructor - infer type
(components, ConcreteConstructor::PartialArray) => {
let components = components.into_components_vec();
let base = ctx.register_type(components[0])?;
let inner = crate::TypeInner::Array {
base,
size: crate::ArraySize::Constant(
NonZeroU32::new(u32::try_from(components.len()).unwrap()).unwrap(),
),
stride: {
self.layouter.update(ctx.module.to_ctx()).unwrap();
self.layouter[base].to_stride()
},
};
let ty = ctx.ensure_type_exists(inner);
crate::Expression::Compose { ty, components }
}
// Array constructor
(components, ConcreteConstructor::Type(ty, &crate::TypeInner::Array { .. })) => {
let components = components.into_components_vec();
crate::Expression::Compose { ty, components }
}
// Struct constructor
(components, ConcreteConstructor::Type(ty, &crate::TypeInner::Struct { .. })) => {
crate::Expression::Compose {
ty,
components: components.into_components_vec(),
}
}
// ERRORS
// Bad conversion (type cast)
(Components::One { span, ty_inner, .. }, _) => {
let from_type = ctx.format_typeinner(ty_inner);
return Err(Error::BadTypeCast {
span,
from_type,
to_type: constructor_h.to_error_string(ctx.reborrow()),
});
}
// Too many parameters for scalar constructor
(
Components::Many { spans, .. },
ConcreteConstructor::Type(_, &crate::TypeInner::Scalar { .. }),
) => {
let span = spans[1].until(spans.last().unwrap());
return Err(Error::UnexpectedComponents(span));
}
// Parameters are of the wrong type for vector or matrix constructor
(
Components::Many { spans, .. },
ConcreteConstructor::Type(
_,
&crate::TypeInner::Vector { .. } | &crate::TypeInner::Matrix { .. },
)
| ConcreteConstructor::PartialVector { .. }
| ConcreteConstructor::PartialMatrix { .. },
) => {
return Err(Error::InvalidConstructorComponentType(spans[0], 0));
}
// Other types can't be constructed
_ => return Err(Error::TypeNotConstructible(ty_span)),
};
let expr = ctx.append_expression(expr, span);
Ok(expr)
}
/// Build a Naga IR [`Type`] for `constructor` if there is enough
/// information to do so.
///
/// For `Partial` variants of [`ast::ConstructorType`], we don't know the
/// component type, so in that case we return the appropriate `Partial`
/// variant of [`ConcreteConstructorHandle`].
///
/// But for the other `ConstructorType` variants, we have everything we need
/// to know to actually produce a Naga IR type. In this case we add to/find
/// in [`ctx.module`] a suitable Naga `Type` and return a
/// [`ConcreteConstructorHandle::Type`] value holding its handle.
///
/// Note that constructing an [`Array`] type may require inserting
/// [`Constant`]s as well as `Type`s into `ctx.module`, to represent the
/// array's length.
///
/// [`Type`]: crate::Type
/// [`ctx.module`]: ExpressionContext::module
/// [`Array`]: crate::TypeInner::Array
/// [`Constant`]: crate::Constant
fn constructor<'out>(
&mut self,
constructor: &ast::ConstructorType<'source>,
mut ctx: ExpressionContext<'source, '_, 'out>,
) -> Result<ConcreteConstructorHandle, Error<'source>> {
let c = match *constructor {
ast::ConstructorType::Scalar { width, kind } => {
let ty = ctx.ensure_type_exists(crate::TypeInner::Scalar { width, kind });
ConcreteConstructorHandle::Type(ty)
}
ast::ConstructorType::PartialVector { size } => {
ConcreteConstructorHandle::PartialVector { size }
}
ast::ConstructorType::Vector { size, kind, width } => {
let ty = ctx.ensure_type_exists(crate::TypeInner::Vector { size, kind, width });
ConcreteConstructorHandle::Type(ty)
}
ast::ConstructorType::PartialMatrix { rows, columns } => {
ConcreteConstructorHandle::PartialMatrix { rows, columns }
}
ast::ConstructorType::Matrix {
rows,
columns,
width,
} => {
let ty = ctx.ensure_type_exists(crate::TypeInner::Matrix {
columns,
rows,
width,
});
ConcreteConstructorHandle::Type(ty)
}
ast::ConstructorType::PartialArray => ConcreteConstructorHandle::PartialArray,
ast::ConstructorType::Array { base, size } => {
let base = self.resolve_ast_type(base, ctx.as_global())?;
let size = match size {
ast::ArraySize::Constant(expr) => {
let const_expr = self.expression(expr, ctx.as_const())?;
crate::ArraySize::Constant(ctx.array_length(const_expr)?)
}
ast::ArraySize::Dynamic => crate::ArraySize::Dynamic,
};
self.layouter.update(ctx.module.to_ctx()).unwrap();
let ty = ctx.ensure_type_exists(crate::TypeInner::Array {
base,
size,
stride: self.layouter[base].to_stride(),
});
ConcreteConstructorHandle::Type(ty)
}
ast::ConstructorType::Type(ty) => ConcreteConstructorHandle::Type(ty),
};
Ok(c)
}
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,301 @@
/*!
Frontend for [WGSL][wgsl] (WebGPU Shading Language).
[wgsl]: https://gpuweb.github.io/gpuweb/wgsl.html
*/
mod error;
mod index;
mod lower;
mod parse;
#[cfg(test)]
mod tests;
use crate::front::wgsl::error::Error;
use crate::front::wgsl::parse::Parser;
use thiserror::Error;
pub use crate::front::wgsl::error::ParseError;
use crate::front::wgsl::lower::Lowerer;
pub struct Frontend {
parser: Parser,
}
impl Frontend {
pub const fn new() -> Self {
Self {
parser: Parser::new(),
}
}
pub fn parse(&mut self, source: &str) -> Result<crate::Module, ParseError> {
self.inner(source).map_err(|x| x.as_parse_error(source))
}
fn inner<'a>(&mut self, source: &'a str) -> Result<crate::Module, Error<'a>> {
let tu = self.parser.parse(source)?;
let index = index::Index::generate(&tu)?;
let module = Lowerer::new(&index).lower(&tu)?;
Ok(module)
}
}
pub fn parse_str(source: &str) -> Result<crate::Module, ParseError> {
Frontend::new().parse(source)
}
impl crate::StorageFormat {
const fn to_wgsl(self) -> &'static str {
use crate::StorageFormat as Sf;
match self {
Sf::R8Unorm => "r8unorm",
Sf::R8Snorm => "r8snorm",
Sf::R8Uint => "r8uint",
Sf::R8Sint => "r8sint",
Sf::R16Uint => "r16uint",
Sf::R16Sint => "r16sint",
Sf::R16Float => "r16float",
Sf::Rg8Unorm => "rg8unorm",
Sf::Rg8Snorm => "rg8snorm",
Sf::Rg8Uint => "rg8uint",
Sf::Rg8Sint => "rg8sint",
Sf::R32Uint => "r32uint",
Sf::R32Sint => "r32sint",
Sf::R32Float => "r32float",
Sf::Rg16Uint => "rg16uint",
Sf::Rg16Sint => "rg16sint",
Sf::Rg16Float => "rg16float",
Sf::Rgba8Unorm => "rgba8unorm",
Sf::Rgba8Snorm => "rgba8snorm",
Sf::Rgba8Uint => "rgba8uint",
Sf::Rgba8Sint => "rgba8sint",
Sf::Rgb10a2Unorm => "rgb10a2unorm",
Sf::Rg11b10Float => "rg11b10float",
Sf::Rg32Uint => "rg32uint",
Sf::Rg32Sint => "rg32sint",
Sf::Rg32Float => "rg32float",
Sf::Rgba16Uint => "rgba16uint",
Sf::Rgba16Sint => "rgba16sint",
Sf::Rgba16Float => "rgba16float",
Sf::Rgba32Uint => "rgba32uint",
Sf::Rgba32Sint => "rgba32sint",
Sf::Rgba32Float => "rgba32float",
Sf::R16Unorm => "r16unorm",
Sf::R16Snorm => "r16snorm",
Sf::Rg16Unorm => "rg16unorm",
Sf::Rg16Snorm => "rg16snorm",
Sf::Rgba16Unorm => "rgba16unorm",
Sf::Rgba16Snorm => "rgba16snorm",
}
}
}
impl crate::TypeInner {
/// Formats the type as it is written in wgsl.
///
/// For example `vec3<f32>`.
///
/// Note: The names of a `TypeInner::Struct` is not known. Therefore this method will simply return "struct" for them.
fn to_wgsl(&self, gctx: crate::proc::GlobalCtx) -> String {
use crate::TypeInner as Ti;
match *self {
Ti::Scalar { kind, width } => kind.to_wgsl(width),
Ti::Vector { size, kind, width } => {
format!("vec{}<{}>", size as u32, kind.to_wgsl(width))
}
Ti::Matrix {
columns,
rows,
width,
} => {
format!(
"mat{}x{}<{}>",
columns as u32,
rows as u32,
crate::ScalarKind::Float.to_wgsl(width),
)
}
Ti::Atomic { kind, width } => {
format!("atomic<{}>", kind.to_wgsl(width))
}
Ti::Pointer { base, .. } => {
let base = &gctx.types[base];
let name = base.name.as_deref().unwrap_or("unknown");
format!("ptr<{name}>")
}
Ti::ValuePointer { kind, width, .. } => {
format!("ptr<{}>", kind.to_wgsl(width))
}
Ti::Array { base, size, .. } => {
let member_type = &gctx.types[base];
let base = member_type.name.as_deref().unwrap_or("unknown");
match size {
crate::ArraySize::Constant(size) => format!("array<{base}, {size}>"),
crate::ArraySize::Dynamic => format!("array<{base}>"),
}
}
Ti::Struct { .. } => {
// TODO: Actually output the struct?
"struct".to_string()
}
Ti::Image {
dim,
arrayed,
class,
} => {
let dim_suffix = match dim {
crate::ImageDimension::D1 => "_1d",
crate::ImageDimension::D2 => "_2d",
crate::ImageDimension::D3 => "_3d",
crate::ImageDimension::Cube => "_cube",
};
let array_suffix = if arrayed { "_array" } else { "" };
let class_suffix = match class {
crate::ImageClass::Sampled { multi: true, .. } => "_multisampled",
crate::ImageClass::Depth { multi: false } => "_depth",
crate::ImageClass::Depth { multi: true } => "_depth_multisampled",
crate::ImageClass::Sampled { multi: false, .. }
| crate::ImageClass::Storage { .. } => "",
};
let type_in_brackets = match class {
crate::ImageClass::Sampled { kind, .. } => {
// Note: The only valid widths are 4 bytes wide.
// The lexer has already verified this, so we can safely assume it here.
// https://gpuweb.github.io/gpuweb/wgsl/#sampled-texture-type
let element_type = kind.to_wgsl(4);
format!("<{element_type}>")
}
crate::ImageClass::Depth { multi: _ } => String::new(),
crate::ImageClass::Storage { format, access } => {
if access.contains(crate::StorageAccess::STORE) {
format!("<{},write>", format.to_wgsl())
} else {
format!("<{}>", format.to_wgsl())
}
}
};
format!("texture{class_suffix}{dim_suffix}{array_suffix}{type_in_brackets}")
}
Ti::Sampler { .. } => "sampler".to_string(),
Ti::AccelerationStructure => "acceleration_structure".to_string(),
Ti::RayQuery => "ray_query".to_string(),
Ti::BindingArray { base, size, .. } => {
let member_type = &gctx.types[base];
let base = member_type.name.as_deref().unwrap_or("unknown");
match size {
crate::ArraySize::Constant(size) => format!("binding_array<{base}, {size}>"),
crate::ArraySize::Dynamic => format!("binding_array<{base}>"),
}
}
}
}
}
mod type_inner_tests {
#[test]
fn to_wgsl() {
use std::num::NonZeroU32;
let mut types = crate::UniqueArena::new();
let mytype1 = types.insert(
crate::Type {
name: Some("MyType1".to_string()),
inner: crate::TypeInner::Struct {
members: vec![],
span: 0,
},
},
Default::default(),
);
let mytype2 = types.insert(
crate::Type {
name: Some("MyType2".to_string()),
inner: crate::TypeInner::Struct {
members: vec![],
span: 0,
},
},
Default::default(),
);
let gctx = crate::proc::GlobalCtx {
types: &types,
constants: &crate::Arena::new(),
const_expressions: &crate::Arena::new(),
};
let array = crate::TypeInner::Array {
base: mytype1,
stride: 4,
size: crate::ArraySize::Constant(unsafe { NonZeroU32::new_unchecked(32) }),
};
assert_eq!(array.to_wgsl(gctx), "array<MyType1, 32>");
let mat = crate::TypeInner::Matrix {
rows: crate::VectorSize::Quad,
columns: crate::VectorSize::Bi,
width: 8,
};
assert_eq!(mat.to_wgsl(gctx), "mat2x4<f64>");
let ptr = crate::TypeInner::Pointer {
base: mytype2,
space: crate::AddressSpace::Storage {
access: crate::StorageAccess::default(),
},
};
assert_eq!(ptr.to_wgsl(gctx), "ptr<MyType2>");
let img1 = crate::TypeInner::Image {
dim: crate::ImageDimension::D2,
arrayed: false,
class: crate::ImageClass::Sampled {
kind: crate::ScalarKind::Float,
multi: true,
},
};
assert_eq!(img1.to_wgsl(gctx), "texture_multisampled_2d<f32>");
let img2 = crate::TypeInner::Image {
dim: crate::ImageDimension::Cube,
arrayed: true,
class: crate::ImageClass::Depth { multi: false },
};
assert_eq!(img2.to_wgsl(gctx), "texture_depth_cube_array");
let img3 = crate::TypeInner::Image {
dim: crate::ImageDimension::D2,
arrayed: false,
class: crate::ImageClass::Depth { multi: true },
};
assert_eq!(img3.to_wgsl(gctx), "texture_depth_multisampled_2d");
let array = crate::TypeInner::BindingArray {
base: mytype1,
size: crate::ArraySize::Constant(unsafe { NonZeroU32::new_unchecked(32) }),
};
assert_eq!(array.to_wgsl(gctx), "binding_array<MyType1, 32>");
}
}
impl crate::ScalarKind {
/// Format a scalar kind+width as a type is written in wgsl.
///
/// Examples: `f32`, `u64`, `bool`.
fn to_wgsl(self, width: u8) -> String {
let prefix = match self {
crate::ScalarKind::Sint => "i",
crate::ScalarKind::Uint => "u",
crate::ScalarKind::Float => "f",
crate::ScalarKind::Bool => return "bool".to_string(),
};
format!("{}{}", prefix, width * 8)
}
}

View file

@ -0,0 +1,486 @@
use crate::front::wgsl::parse::number::Number;
use crate::{Arena, FastHashSet, Handle, Span};
use std::hash::Hash;
#[derive(Debug, Default)]
pub struct TranslationUnit<'a> {
pub decls: Arena<GlobalDecl<'a>>,
/// The common expressions arena for the entire translation unit.
///
/// All functions, global initializers, array lengths, etc. store their
/// expressions here. We apportion these out to individual Naga
/// [`Function`]s' expression arenas at lowering time. Keeping them all in a
/// single arena simplifies handling of things like array lengths (which are
/// effectively global and thus don't clearly belong to any function) and
/// initializers (which can appear in both function-local and module-scope
/// contexts).
///
/// [`Function`]: crate::Function
pub expressions: Arena<Expression<'a>>,
/// Non-user-defined types, like `vec4<f32>` or `array<i32, 10>`.
///
/// These are referred to by `Handle<ast::Type<'a>>` values.
/// User-defined types are referred to by name until lowering.
pub types: Arena<Type<'a>>,
}
#[derive(Debug, Clone, Copy)]
pub struct Ident<'a> {
pub name: &'a str,
pub span: Span,
}
#[derive(Debug)]
pub enum IdentExpr<'a> {
Unresolved(&'a str),
Local(Handle<Local>),
}
/// A reference to a module-scope definition or predeclared object.
///
/// Each [`GlobalDecl`] holds a set of these values, to be resolved to
/// specific definitions later. To support de-duplication, `Eq` and
/// `Hash` on a `Dependency` value consider only the name, not the
/// source location at which the reference occurs.
#[derive(Debug)]
pub struct Dependency<'a> {
/// The name referred to.
pub ident: &'a str,
/// The location at which the reference to that name occurs.
pub usage: Span,
}
impl Hash for Dependency<'_> {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
self.ident.hash(state);
}
}
impl PartialEq for Dependency<'_> {
fn eq(&self, other: &Self) -> bool {
self.ident == other.ident
}
}
impl Eq for Dependency<'_> {}
/// A module-scope declaration.
#[derive(Debug)]
pub struct GlobalDecl<'a> {
pub kind: GlobalDeclKind<'a>,
/// Names of all module-scope or predeclared objects this
/// declaration uses.
pub dependencies: FastHashSet<Dependency<'a>>,
}
#[derive(Debug)]
pub enum GlobalDeclKind<'a> {
Fn(Function<'a>),
Var(GlobalVariable<'a>),
Const(Const<'a>),
Struct(Struct<'a>),
Type(TypeAlias<'a>),
}
#[derive(Debug)]
pub struct FunctionArgument<'a> {
pub name: Ident<'a>,
pub ty: Handle<Type<'a>>,
pub binding: Option<crate::Binding>,
pub handle: Handle<Local>,
}
#[derive(Debug)]
pub struct FunctionResult<'a> {
pub ty: Handle<Type<'a>>,
pub binding: Option<crate::Binding>,
}
#[derive(Debug)]
pub struct EntryPoint {
pub stage: crate::ShaderStage,
pub early_depth_test: Option<crate::EarlyDepthTest>,
pub workgroup_size: [u32; 3],
}
#[cfg(doc)]
use crate::front::wgsl::lower::{RuntimeExpressionContext, StatementContext};
#[derive(Debug)]
pub struct Function<'a> {
pub entry_point: Option<EntryPoint>,
pub name: Ident<'a>,
pub arguments: Vec<FunctionArgument<'a>>,
pub result: Option<FunctionResult<'a>>,
/// Local variable and function argument arena.
///
/// Note that the `Local` here is actually a zero-sized type. The AST keeps
/// all the detailed information about locals - names, types, etc. - in
/// [`LocalDecl`] statements. For arguments, that information is kept in
/// [`arguments`]. This `Arena`'s only role is to assign a unique `Handle`
/// to each of them, and track their definitions' spans for use in
/// diagnostics.
///
/// In the AST, when an [`Ident`] expression refers to a local variable or
/// argument, its [`IdentExpr`] holds the referent's `Handle<Local>` in this
/// arena.
///
/// During lowering, [`LocalDecl`] statements add entries to a per-function
/// table that maps `Handle<Local>` values to their Naga representations,
/// accessed via [`StatementContext::local_table`] and
/// [`RuntimeExpressionContext::local_table`]. This table is then consulted when
/// lowering subsequent [`Ident`] expressions.
///
/// [`LocalDecl`]: StatementKind::LocalDecl
/// [`arguments`]: Function::arguments
/// [`Ident`]: Expression::Ident
/// [`StatementContext::local_table`]: StatementContext::local_table
/// [`RuntimeExpressionContext::local_table`]: RuntimeExpressionContext::local_table
pub locals: Arena<Local>,
pub body: Block<'a>,
}
#[derive(Debug)]
pub struct GlobalVariable<'a> {
pub name: Ident<'a>,
pub space: crate::AddressSpace,
pub binding: Option<crate::ResourceBinding>,
pub ty: Handle<Type<'a>>,
pub init: Option<Handle<Expression<'a>>>,
}
#[derive(Debug)]
pub struct StructMember<'a> {
pub name: Ident<'a>,
pub ty: Handle<Type<'a>>,
pub binding: Option<crate::Binding>,
pub align: Option<(u32, Span)>,
pub size: Option<(u32, Span)>,
}
#[derive(Debug)]
pub struct Struct<'a> {
pub name: Ident<'a>,
pub members: Vec<StructMember<'a>>,
}
#[derive(Debug)]
pub struct TypeAlias<'a> {
pub name: Ident<'a>,
pub ty: Handle<Type<'a>>,
}
#[derive(Debug)]
pub struct Const<'a> {
pub name: Ident<'a>,
pub ty: Option<Handle<Type<'a>>>,
pub init: Handle<Expression<'a>>,
}
/// The size of an [`Array`] or [`BindingArray`].
///
/// [`Array`]: Type::Array
/// [`BindingArray`]: Type::BindingArray
#[derive(Debug, Copy, Clone)]
pub enum ArraySize<'a> {
/// The length as a constant expression.
Constant(Handle<Expression<'a>>),
Dynamic,
}
#[derive(Debug)]
pub enum Type<'a> {
Scalar {
kind: crate::ScalarKind,
width: crate::Bytes,
},
Vector {
size: crate::VectorSize,
kind: crate::ScalarKind,
width: crate::Bytes,
},
Matrix {
columns: crate::VectorSize,
rows: crate::VectorSize,
width: crate::Bytes,
},
Atomic {
kind: crate::ScalarKind,
width: crate::Bytes,
},
Pointer {
base: Handle<Type<'a>>,
space: crate::AddressSpace,
},
Array {
base: Handle<Type<'a>>,
size: ArraySize<'a>,
},
Image {
dim: crate::ImageDimension,
arrayed: bool,
class: crate::ImageClass,
},
Sampler {
comparison: bool,
},
AccelerationStructure,
RayQuery,
RayDesc,
RayIntersection,
BindingArray {
base: Handle<Type<'a>>,
size: ArraySize<'a>,
},
/// A user-defined type, like a struct or a type alias.
User(Ident<'a>),
}
#[derive(Debug, Default)]
pub struct Block<'a> {
pub stmts: Vec<Statement<'a>>,
}
#[derive(Debug)]
pub struct Statement<'a> {
pub kind: StatementKind<'a>,
pub span: Span,
}
#[derive(Debug)]
pub enum StatementKind<'a> {
LocalDecl(LocalDecl<'a>),
Block(Block<'a>),
If {
condition: Handle<Expression<'a>>,
accept: Block<'a>,
reject: Block<'a>,
},
Switch {
selector: Handle<Expression<'a>>,
cases: Vec<SwitchCase<'a>>,
},
Loop {
body: Block<'a>,
continuing: Block<'a>,
break_if: Option<Handle<Expression<'a>>>,
},
Break,
Continue,
Return {
value: Option<Handle<Expression<'a>>>,
},
Kill,
Call {
function: Ident<'a>,
arguments: Vec<Handle<Expression<'a>>>,
},
Assign {
target: Handle<Expression<'a>>,
op: Option<crate::BinaryOperator>,
value: Handle<Expression<'a>>,
},
Increment(Handle<Expression<'a>>),
Decrement(Handle<Expression<'a>>),
Ignore(Handle<Expression<'a>>),
}
#[derive(Debug)]
pub enum SwitchValue {
I32(i32),
U32(u32),
Default,
}
#[derive(Debug)]
pub struct SwitchCase<'a> {
pub value: SwitchValue,
pub value_span: Span,
pub body: Block<'a>,
pub fall_through: bool,
}
/// A type at the head of a [`Construct`] expression.
///
/// WGSL has two types of [`type constructor expressions`]:
///
/// - Those that fully specify the type being constructed, like
/// `vec3<f32>(x,y,z)`, which obviously constructs a `vec3<f32>`.
///
/// - Those that leave the component type of the composite being constructed
/// implicit, to be inferred from the argument types, like `vec3(x,y,z)`,
/// which constructs a `vec3<T>` where `T` is the type of `x`, `y`, and `z`.
///
/// This enum represents the head type of both cases. The `PartialFoo` variants
/// represent the second case, where the component type is implicit.
///
/// This does not cover structs or types referred to by type aliases. See the
/// documentation for [`Construct`] and [`Call`] expressions for details.
///
/// [`Construct`]: Expression::Construct
/// [`type constructor expressions`]: https://gpuweb.github.io/gpuweb/wgsl/#type-constructor-expr
/// [`Call`]: Expression::Call
#[derive(Debug)]
pub enum ConstructorType<'a> {
/// A scalar type or conversion: `f32(1)`.
Scalar {
kind: crate::ScalarKind,
width: crate::Bytes,
},
/// A vector construction whose component type is inferred from the
/// argument: `vec3(1.0)`.
PartialVector { size: crate::VectorSize },
/// A vector construction whose component type is written out:
/// `vec3<f32>(1.0)`.
Vector {
size: crate::VectorSize,
kind: crate::ScalarKind,
width: crate::Bytes,
},
/// A matrix construction whose component type is inferred from the
/// argument: `mat2x2(1,2,3,4)`.
PartialMatrix {
columns: crate::VectorSize,
rows: crate::VectorSize,
},
/// A matrix construction whose component type is written out:
/// `mat2x2<f32>(1,2,3,4)`.
Matrix {
columns: crate::VectorSize,
rows: crate::VectorSize,
width: crate::Bytes,
},
/// An array whose component type and size are inferred from the arguments:
/// `array(3,4,5)`.
PartialArray,
/// An array whose component type and size are written out:
/// `array<u32, 4>(3,4,5)`.
Array {
base: Handle<Type<'a>>,
size: ArraySize<'a>,
},
/// Constructing a value of a known Naga IR type.
///
/// This variant is produced only during lowering, when we have Naga types
/// available, never during parsing.
Type(Handle<crate::Type>),
}
#[derive(Debug, Copy, Clone)]
pub enum Literal {
Bool(bool),
Number(Number),
}
#[cfg(doc)]
use crate::front::wgsl::lower::Lowerer;
#[derive(Debug)]
pub enum Expression<'a> {
Literal(Literal),
Ident(IdentExpr<'a>),
/// A type constructor expression.
///
/// This is only used for expressions like `KEYWORD(EXPR...)` and
/// `KEYWORD<PARAM>(EXPR...)`, where `KEYWORD` is a [type-defining keyword] like
/// `vec3`. These keywords cannot be shadowed by user definitions, so we can
/// tell that such an expression is a construction immediately.
///
/// For ordinary identifiers, we can't tell whether an expression like
/// `IDENTIFIER(EXPR, ...)` is a construction expression or a function call
/// until we know `IDENTIFIER`'s definition, so we represent those as
/// [`Call`] expressions.
///
/// [type-defining keyword]: https://gpuweb.github.io/gpuweb/wgsl/#type-defining-keywords
/// [`Call`]: Expression::Call
Construct {
ty: ConstructorType<'a>,
ty_span: Span,
components: Vec<Handle<Expression<'a>>>,
},
Unary {
op: crate::UnaryOperator,
expr: Handle<Expression<'a>>,
},
AddrOf(Handle<Expression<'a>>),
Deref(Handle<Expression<'a>>),
Binary {
op: crate::BinaryOperator,
left: Handle<Expression<'a>>,
right: Handle<Expression<'a>>,
},
/// A function call or type constructor expression.
///
/// We can't tell whether an expression like `IDENTIFIER(EXPR, ...)` is a
/// construction expression or a function call until we know `IDENTIFIER`'s
/// definition, so we represent everything of that form as one of these
/// expressions until lowering. At that point, [`Lowerer::call`] has
/// everything's definition in hand, and can decide whether to emit a Naga
/// [`Constant`], [`As`], [`Splat`], or [`Compose`] expression.
///
/// [`Lowerer::call`]: Lowerer::call
/// [`Constant`]: crate::Expression::Constant
/// [`As`]: crate::Expression::As
/// [`Splat`]: crate::Expression::Splat
/// [`Compose`]: crate::Expression::Compose
Call {
function: Ident<'a>,
arguments: Vec<Handle<Expression<'a>>>,
},
Index {
base: Handle<Expression<'a>>,
index: Handle<Expression<'a>>,
},
Member {
base: Handle<Expression<'a>>,
field: Ident<'a>,
},
Bitcast {
expr: Handle<Expression<'a>>,
to: Handle<Type<'a>>,
ty_span: Span,
},
}
#[derive(Debug)]
pub struct LocalVariable<'a> {
pub name: Ident<'a>,
pub ty: Option<Handle<Type<'a>>>,
pub init: Option<Handle<Expression<'a>>>,
pub handle: Handle<Local>,
}
#[derive(Debug)]
pub struct Let<'a> {
pub name: Ident<'a>,
pub ty: Option<Handle<Type<'a>>>,
pub init: Handle<Expression<'a>>,
pub handle: Handle<Local>,
}
#[derive(Debug)]
pub enum LocalDecl<'a> {
Var(LocalVariable<'a>),
Let(Let<'a>),
}
#[derive(Debug)]
/// A placeholder for a local variable declaration.
///
/// See [`Function::locals`] for more information.
pub struct Local;

View file

@ -0,0 +1,236 @@
use super::Error;
use crate::Span;
pub fn map_address_space(word: &str, span: Span) -> Result<crate::AddressSpace, Error<'_>> {
match word {
"private" => Ok(crate::AddressSpace::Private),
"workgroup" => Ok(crate::AddressSpace::WorkGroup),
"uniform" => Ok(crate::AddressSpace::Uniform),
"storage" => Ok(crate::AddressSpace::Storage {
access: crate::StorageAccess::default(),
}),
"push_constant" => Ok(crate::AddressSpace::PushConstant),
"function" => Ok(crate::AddressSpace::Function),
_ => Err(Error::UnknownAddressSpace(span)),
}
}
pub fn map_built_in(word: &str, span: Span) -> Result<crate::BuiltIn, Error<'_>> {
Ok(match word {
"position" => crate::BuiltIn::Position { invariant: false },
// vertex
"vertex_index" => crate::BuiltIn::VertexIndex,
"instance_index" => crate::BuiltIn::InstanceIndex,
"view_index" => crate::BuiltIn::ViewIndex,
// fragment
"front_facing" => crate::BuiltIn::FrontFacing,
"frag_depth" => crate::BuiltIn::FragDepth,
"primitive_index" => crate::BuiltIn::PrimitiveIndex,
"sample_index" => crate::BuiltIn::SampleIndex,
"sample_mask" => crate::BuiltIn::SampleMask,
// compute
"global_invocation_id" => crate::BuiltIn::GlobalInvocationId,
"local_invocation_id" => crate::BuiltIn::LocalInvocationId,
"local_invocation_index" => crate::BuiltIn::LocalInvocationIndex,
"workgroup_id" => crate::BuiltIn::WorkGroupId,
"num_workgroups" => crate::BuiltIn::NumWorkGroups,
_ => return Err(Error::UnknownBuiltin(span)),
})
}
pub fn map_interpolation(word: &str, span: Span) -> Result<crate::Interpolation, Error<'_>> {
match word {
"linear" => Ok(crate::Interpolation::Linear),
"flat" => Ok(crate::Interpolation::Flat),
"perspective" => Ok(crate::Interpolation::Perspective),
_ => Err(Error::UnknownAttribute(span)),
}
}
pub fn map_sampling(word: &str, span: Span) -> Result<crate::Sampling, Error<'_>> {
match word {
"center" => Ok(crate::Sampling::Center),
"centroid" => Ok(crate::Sampling::Centroid),
"sample" => Ok(crate::Sampling::Sample),
_ => Err(Error::UnknownAttribute(span)),
}
}
pub fn map_storage_format(word: &str, span: Span) -> Result<crate::StorageFormat, Error<'_>> {
use crate::StorageFormat as Sf;
Ok(match word {
"r8unorm" => Sf::R8Unorm,
"r8snorm" => Sf::R8Snorm,
"r8uint" => Sf::R8Uint,
"r8sint" => Sf::R8Sint,
"r16unorm" => Sf::R16Unorm,
"r16snorm" => Sf::R16Snorm,
"r16uint" => Sf::R16Uint,
"r16sint" => Sf::R16Sint,
"r16float" => Sf::R16Float,
"rg8unorm" => Sf::Rg8Unorm,
"rg8snorm" => Sf::Rg8Snorm,
"rg8uint" => Sf::Rg8Uint,
"rg8sint" => Sf::Rg8Sint,
"r32uint" => Sf::R32Uint,
"r32sint" => Sf::R32Sint,
"r32float" => Sf::R32Float,
"rg16unorm" => Sf::Rg16Unorm,
"rg16snorm" => Sf::Rg16Snorm,
"rg16uint" => Sf::Rg16Uint,
"rg16sint" => Sf::Rg16Sint,
"rg16float" => Sf::Rg16Float,
"rgba8unorm" => Sf::Rgba8Unorm,
"rgba8snorm" => Sf::Rgba8Snorm,
"rgba8uint" => Sf::Rgba8Uint,
"rgba8sint" => Sf::Rgba8Sint,
"rgb10a2unorm" => Sf::Rgb10a2Unorm,
"rg11b10float" => Sf::Rg11b10Float,
"rg32uint" => Sf::Rg32Uint,
"rg32sint" => Sf::Rg32Sint,
"rg32float" => Sf::Rg32Float,
"rgba16unorm" => Sf::Rgba16Unorm,
"rgba16snorm" => Sf::Rgba16Snorm,
"rgba16uint" => Sf::Rgba16Uint,
"rgba16sint" => Sf::Rgba16Sint,
"rgba16float" => Sf::Rgba16Float,
"rgba32uint" => Sf::Rgba32Uint,
"rgba32sint" => Sf::Rgba32Sint,
"rgba32float" => Sf::Rgba32Float,
_ => return Err(Error::UnknownStorageFormat(span)),
})
}
pub fn get_scalar_type(word: &str) -> Option<(crate::ScalarKind, crate::Bytes)> {
match word {
// "f16" => Some((crate::ScalarKind::Float, 2)),
"f32" => Some((crate::ScalarKind::Float, 4)),
"f64" => Some((crate::ScalarKind::Float, 8)),
"i32" => Some((crate::ScalarKind::Sint, 4)),
"u32" => Some((crate::ScalarKind::Uint, 4)),
"bool" => Some((crate::ScalarKind::Bool, crate::BOOL_WIDTH)),
_ => None,
}
}
pub fn map_derivative(word: &str) -> Option<(crate::DerivativeAxis, crate::DerivativeControl)> {
use crate::{DerivativeAxis as Axis, DerivativeControl as Ctrl};
match word {
"dpdxCoarse" => Some((Axis::X, Ctrl::Coarse)),
"dpdyCoarse" => Some((Axis::Y, Ctrl::Coarse)),
"fwidthCoarse" => Some((Axis::Width, Ctrl::Coarse)),
"dpdxFine" => Some((Axis::X, Ctrl::Fine)),
"dpdyFine" => Some((Axis::Y, Ctrl::Fine)),
"fwidthFine" => Some((Axis::Width, Ctrl::Fine)),
"dpdx" => Some((Axis::X, Ctrl::None)),
"dpdy" => Some((Axis::Y, Ctrl::None)),
"fwidth" => Some((Axis::Width, Ctrl::None)),
_ => None,
}
}
pub fn map_relational_fun(word: &str) -> Option<crate::RelationalFunction> {
match word {
"any" => Some(crate::RelationalFunction::Any),
"all" => Some(crate::RelationalFunction::All),
_ => None,
}
}
pub fn map_standard_fun(word: &str) -> Option<crate::MathFunction> {
use crate::MathFunction as Mf;
Some(match word {
// comparison
"abs" => Mf::Abs,
"min" => Mf::Min,
"max" => Mf::Max,
"clamp" => Mf::Clamp,
"saturate" => Mf::Saturate,
// trigonometry
"cos" => Mf::Cos,
"cosh" => Mf::Cosh,
"sin" => Mf::Sin,
"sinh" => Mf::Sinh,
"tan" => Mf::Tan,
"tanh" => Mf::Tanh,
"acos" => Mf::Acos,
"acosh" => Mf::Acosh,
"asin" => Mf::Asin,
"asinh" => Mf::Asinh,
"atan" => Mf::Atan,
"atanh" => Mf::Atanh,
"atan2" => Mf::Atan2,
"radians" => Mf::Radians,
"degrees" => Mf::Degrees,
// decomposition
"ceil" => Mf::Ceil,
"floor" => Mf::Floor,
"round" => Mf::Round,
"fract" => Mf::Fract,
"trunc" => Mf::Trunc,
"modf" => Mf::Modf,
"frexp" => Mf::Frexp,
"ldexp" => Mf::Ldexp,
// exponent
"exp" => Mf::Exp,
"exp2" => Mf::Exp2,
"log" => Mf::Log,
"log2" => Mf::Log2,
"pow" => Mf::Pow,
// geometry
"dot" => Mf::Dot,
"outerProduct" => Mf::Outer,
"cross" => Mf::Cross,
"distance" => Mf::Distance,
"length" => Mf::Length,
"normalize" => Mf::Normalize,
"faceForward" => Mf::FaceForward,
"reflect" => Mf::Reflect,
"refract" => Mf::Refract,
// computational
"sign" => Mf::Sign,
"fma" => Mf::Fma,
"mix" => Mf::Mix,
"step" => Mf::Step,
"smoothstep" => Mf::SmoothStep,
"sqrt" => Mf::Sqrt,
"inverseSqrt" => Mf::InverseSqrt,
"transpose" => Mf::Transpose,
"determinant" => Mf::Determinant,
// bits
"countTrailingZeros" => Mf::CountTrailingZeros,
"countLeadingZeros" => Mf::CountLeadingZeros,
"countOneBits" => Mf::CountOneBits,
"reverseBits" => Mf::ReverseBits,
"extractBits" => Mf::ExtractBits,
"insertBits" => Mf::InsertBits,
"firstTrailingBit" => Mf::FindLsb,
"firstLeadingBit" => Mf::FindMsb,
// data packing
"pack4x8snorm" => Mf::Pack4x8snorm,
"pack4x8unorm" => Mf::Pack4x8unorm,
"pack2x16snorm" => Mf::Pack2x16snorm,
"pack2x16unorm" => Mf::Pack2x16unorm,
"pack2x16float" => Mf::Pack2x16float,
// data unpacking
"unpack4x8snorm" => Mf::Unpack4x8snorm,
"unpack4x8unorm" => Mf::Unpack4x8unorm,
"unpack2x16snorm" => Mf::Unpack2x16snorm,
"unpack2x16unorm" => Mf::Unpack2x16unorm,
"unpack2x16float" => Mf::Unpack2x16float,
_ => return None,
})
}
pub fn map_conservative_depth(
word: &str,
span: Span,
) -> Result<crate::ConservativeDepth, Error<'_>> {
use crate::ConservativeDepth as Cd;
match word {
"greater_equal" => Ok(Cd::GreaterEqual),
"less_equal" => Ok(Cd::LessEqual),
"unchanged" => Ok(Cd::Unchanged),
_ => Err(Error::UnknownConservativeDepth(span)),
}
}

View file

@ -0,0 +1,723 @@
use super::{number::consume_number, Error, ExpectedToken};
use crate::front::wgsl::error::NumberError;
use crate::front::wgsl::parse::{conv, Number};
use crate::Span;
type TokenSpan<'a> = (Token<'a>, Span);
#[derive(Copy, Clone, Debug, PartialEq)]
pub enum Token<'a> {
Separator(char),
Paren(char),
Attribute,
Number(Result<Number, NumberError>),
Word(&'a str),
Operation(char),
LogicalOperation(char),
ShiftOperation(char),
AssignmentOperation(char),
IncrementOperation,
DecrementOperation,
Arrow,
Unknown(char),
Trivia,
End,
}
fn consume_any(input: &str, what: impl Fn(char) -> bool) -> (&str, &str) {
let pos = input.find(|c| !what(c)).unwrap_or(input.len());
input.split_at(pos)
}
/// Return the token at the start of `input`.
///
/// If `generic` is `false`, then the bit shift operators `>>` or `<<`
/// are valid lookahead tokens for the current parser state (see [§3.1
/// Parsing] in the WGSL specification). In other words:
///
/// - If `generic` is `true`, then we are expecting an angle bracket
/// around a generic type parameter, like the `<` and `>` in
/// `vec3<f32>`, so interpret `<` and `>` as `Token::Paren` tokens,
/// even if they're part of `<<` or `>>` sequences.
///
/// - Otherwise, interpret `<<` and `>>` as shift operators:
/// `Token::LogicalOperation` tokens.
///
/// [§3.1 Parsing]: https://gpuweb.github.io/gpuweb/wgsl/#parsing
fn consume_token(input: &str, generic: bool) -> (Token<'_>, &str) {
let mut chars = input.chars();
let cur = match chars.next() {
Some(c) => c,
None => return (Token::End, ""),
};
match cur {
':' | ';' | ',' => (Token::Separator(cur), chars.as_str()),
'.' => {
let og_chars = chars.as_str();
match chars.next() {
Some('0'..='9') => consume_number(input),
_ => (Token::Separator(cur), og_chars),
}
}
'@' => (Token::Attribute, chars.as_str()),
'(' | ')' | '{' | '}' | '[' | ']' => (Token::Paren(cur), chars.as_str()),
'<' | '>' => {
let og_chars = chars.as_str();
match chars.next() {
Some('=') if !generic => (Token::LogicalOperation(cur), chars.as_str()),
Some(c) if c == cur && !generic => {
let og_chars = chars.as_str();
match chars.next() {
Some('=') => (Token::AssignmentOperation(cur), chars.as_str()),
_ => (Token::ShiftOperation(cur), og_chars),
}
}
_ => (Token::Paren(cur), og_chars),
}
}
'0'..='9' => consume_number(input),
'/' => {
let og_chars = chars.as_str();
match chars.next() {
Some('/') => {
let _ = chars.position(is_comment_end);
(Token::Trivia, chars.as_str())
}
Some('*') => {
let mut depth = 1;
let mut prev = None;
for c in &mut chars {
match (prev, c) {
(Some('*'), '/') => {
prev = None;
depth -= 1;
if depth == 0 {
return (Token::Trivia, chars.as_str());
}
}
(Some('/'), '*') => {
prev = None;
depth += 1;
}
_ => {
prev = Some(c);
}
}
}
(Token::End, "")
}
Some('=') => (Token::AssignmentOperation(cur), chars.as_str()),
_ => (Token::Operation(cur), og_chars),
}
}
'-' => {
let og_chars = chars.as_str();
match chars.next() {
Some('>') => (Token::Arrow, chars.as_str()),
Some('0'..='9' | '.') => consume_number(input),
Some('-') => (Token::DecrementOperation, chars.as_str()),
Some('=') => (Token::AssignmentOperation(cur), chars.as_str()),
_ => (Token::Operation(cur), og_chars),
}
}
'+' => {
let og_chars = chars.as_str();
match chars.next() {
Some('+') => (Token::IncrementOperation, chars.as_str()),
Some('=') => (Token::AssignmentOperation(cur), chars.as_str()),
_ => (Token::Operation(cur), og_chars),
}
}
'*' | '%' | '^' => {
let og_chars = chars.as_str();
match chars.next() {
Some('=') => (Token::AssignmentOperation(cur), chars.as_str()),
_ => (Token::Operation(cur), og_chars),
}
}
'~' => (Token::Operation(cur), chars.as_str()),
'=' | '!' => {
let og_chars = chars.as_str();
match chars.next() {
Some('=') => (Token::LogicalOperation(cur), chars.as_str()),
_ => (Token::Operation(cur), og_chars),
}
}
'&' | '|' => {
let og_chars = chars.as_str();
match chars.next() {
Some(c) if c == cur => (Token::LogicalOperation(cur), chars.as_str()),
Some('=') => (Token::AssignmentOperation(cur), chars.as_str()),
_ => (Token::Operation(cur), og_chars),
}
}
_ if is_blankspace(cur) => {
let (_, rest) = consume_any(input, is_blankspace);
(Token::Trivia, rest)
}
_ if is_word_start(cur) => {
let (word, rest) = consume_any(input, is_word_part);
(Token::Word(word), rest)
}
_ => (Token::Unknown(cur), chars.as_str()),
}
}
/// Returns whether or not a char is a comment end
/// (Unicode Pattern_White_Space excluding U+0020, U+0009, U+200E and U+200F)
const fn is_comment_end(c: char) -> bool {
match c {
'\u{000a}'..='\u{000d}' | '\u{0085}' | '\u{2028}' | '\u{2029}' => true,
_ => false,
}
}
/// Returns whether or not a char is a blankspace (Unicode Pattern_White_Space)
const fn is_blankspace(c: char) -> bool {
match c {
'\u{0020}'
| '\u{0009}'..='\u{000d}'
| '\u{0085}'
| '\u{200e}'
| '\u{200f}'
| '\u{2028}'
| '\u{2029}' => true,
_ => false,
}
}
/// Returns whether or not a char is a word start (Unicode XID_Start + '_')
fn is_word_start(c: char) -> bool {
c == '_' || unicode_xid::UnicodeXID::is_xid_start(c)
}
/// Returns whether or not a char is a word part (Unicode XID_Continue)
fn is_word_part(c: char) -> bool {
unicode_xid::UnicodeXID::is_xid_continue(c)
}
#[derive(Clone)]
pub(in crate::front::wgsl) struct Lexer<'a> {
input: &'a str,
pub(in crate::front::wgsl) source: &'a str,
// The byte offset of the end of the last non-trivia token.
last_end_offset: usize,
}
impl<'a> Lexer<'a> {
pub(in crate::front::wgsl) const fn new(input: &'a str) -> Self {
Lexer {
input,
source: input,
last_end_offset: 0,
}
}
/// Calls the function with a lexer and returns the result of the function as well as the span for everything the function parsed
///
/// # Examples
/// ```ignore
/// let lexer = Lexer::new("5");
/// let (value, span) = lexer.capture_span(Lexer::next_uint_literal);
/// assert_eq!(value, 5);
/// ```
#[inline]
pub fn capture_span<T, E>(
&mut self,
inner: impl FnOnce(&mut Self) -> Result<T, E>,
) -> Result<(T, Span), E> {
let start = self.current_byte_offset();
let res = inner(self)?;
let end = self.current_byte_offset();
Ok((res, Span::from(start..end)))
}
pub(in crate::front::wgsl) fn start_byte_offset(&mut self) -> usize {
loop {
// Eat all trivia because `next` doesn't eat trailing trivia.
let (token, rest) = consume_token(self.input, false);
if let Token::Trivia = token {
self.input = rest;
} else {
return self.current_byte_offset();
}
}
}
fn peek_token_and_rest(&mut self) -> (TokenSpan<'a>, &'a str) {
let mut cloned = self.clone();
let token = cloned.next();
let rest = cloned.input;
(token, rest)
}
const fn current_byte_offset(&self) -> usize {
self.source.len() - self.input.len()
}
pub(in crate::front::wgsl) fn span_from(&self, offset: usize) -> Span {
Span::from(offset..self.last_end_offset)
}
/// Return the next non-whitespace token from `self`.
///
/// Assume we are a parse state where bit shift operators may
/// occur, but not angle brackets.
#[must_use]
pub(in crate::front::wgsl) fn next(&mut self) -> TokenSpan<'a> {
self.next_impl(false)
}
/// Return the next non-whitespace token from `self`.
///
/// Assume we are in a parse state where angle brackets may occur,
/// but not bit shift operators.
#[must_use]
pub(in crate::front::wgsl) fn next_generic(&mut self) -> TokenSpan<'a> {
self.next_impl(true)
}
/// Return the next non-whitespace token from `self`, with a span.
///
/// See [`consume_token`] for the meaning of `generic`.
fn next_impl(&mut self, generic: bool) -> TokenSpan<'a> {
let mut start_byte_offset = self.current_byte_offset();
loop {
let (token, rest) = consume_token(self.input, generic);
self.input = rest;
match token {
Token::Trivia => start_byte_offset = self.current_byte_offset(),
_ => {
self.last_end_offset = self.current_byte_offset();
return (token, self.span_from(start_byte_offset));
}
}
}
}
#[must_use]
pub(in crate::front::wgsl) fn peek(&mut self) -> TokenSpan<'a> {
let (token, _) = self.peek_token_and_rest();
token
}
pub(in crate::front::wgsl) fn expect_span(
&mut self,
expected: Token<'a>,
) -> Result<Span, Error<'a>> {
let next = self.next();
if next.0 == expected {
Ok(next.1)
} else {
Err(Error::Unexpected(next.1, ExpectedToken::Token(expected)))
}
}
pub(in crate::front::wgsl) fn expect(&mut self, expected: Token<'a>) -> Result<(), Error<'a>> {
self.expect_span(expected)?;
Ok(())
}
pub(in crate::front::wgsl) fn expect_generic_paren(
&mut self,
expected: char,
) -> Result<(), Error<'a>> {
let next = self.next_generic();
if next.0 == Token::Paren(expected) {
Ok(())
} else {
Err(Error::Unexpected(
next.1,
ExpectedToken::Token(Token::Paren(expected)),
))
}
}
/// If the next token matches it is skipped and true is returned
pub(in crate::front::wgsl) fn skip(&mut self, what: Token<'_>) -> bool {
let (peeked_token, rest) = self.peek_token_and_rest();
if peeked_token.0 == what {
self.input = rest;
true
} else {
false
}
}
pub(in crate::front::wgsl) fn next_ident_with_span(
&mut self,
) -> Result<(&'a str, Span), Error<'a>> {
match self.next() {
(Token::Word(word), span) if word == "_" => {
Err(Error::InvalidIdentifierUnderscore(span))
}
(Token::Word(word), span) if word.starts_with("__") => {
Err(Error::ReservedIdentifierPrefix(span))
}
(Token::Word(word), span) => Ok((word, span)),
other => Err(Error::Unexpected(other.1, ExpectedToken::Identifier)),
}
}
pub(in crate::front::wgsl) fn next_ident(
&mut self,
) -> Result<super::ast::Ident<'a>, Error<'a>> {
let ident = self
.next_ident_with_span()
.map(|(name, span)| super::ast::Ident { name, span })?;
if crate::keywords::wgsl::RESERVED.contains(&ident.name) {
return Err(Error::ReservedKeyword(ident.span));
}
Ok(ident)
}
/// Parses a generic scalar type, for example `<f32>`.
pub(in crate::front::wgsl) fn next_scalar_generic(
&mut self,
) -> Result<(crate::ScalarKind, crate::Bytes), Error<'a>> {
self.expect_generic_paren('<')?;
let pair = match self.next() {
(Token::Word(word), span) => {
conv::get_scalar_type(word).ok_or(Error::UnknownScalarType(span))
}
(_, span) => Err(Error::UnknownScalarType(span)),
}?;
self.expect_generic_paren('>')?;
Ok(pair)
}
/// Parses a generic scalar type, for example `<f32>`.
///
/// Returns the span covering the inner type, excluding the brackets.
pub(in crate::front::wgsl) fn next_scalar_generic_with_span(
&mut self,
) -> Result<(crate::ScalarKind, crate::Bytes, Span), Error<'a>> {
self.expect_generic_paren('<')?;
let pair = match self.next() {
(Token::Word(word), span) => conv::get_scalar_type(word)
.map(|(a, b)| (a, b, span))
.ok_or(Error::UnknownScalarType(span)),
(_, span) => Err(Error::UnknownScalarType(span)),
}?;
self.expect_generic_paren('>')?;
Ok(pair)
}
pub(in crate::front::wgsl) fn next_storage_access(
&mut self,
) -> Result<crate::StorageAccess, Error<'a>> {
let (ident, span) = self.next_ident_with_span()?;
match ident {
"read" => Ok(crate::StorageAccess::LOAD),
"write" => Ok(crate::StorageAccess::STORE),
"read_write" => Ok(crate::StorageAccess::LOAD | crate::StorageAccess::STORE),
_ => Err(Error::UnknownAccess(span)),
}
}
pub(in crate::front::wgsl) fn next_format_generic(
&mut self,
) -> Result<(crate::StorageFormat, crate::StorageAccess), Error<'a>> {
self.expect(Token::Paren('<'))?;
let (ident, ident_span) = self.next_ident_with_span()?;
let format = conv::map_storage_format(ident, ident_span)?;
self.expect(Token::Separator(','))?;
let access = self.next_storage_access()?;
self.expect(Token::Paren('>'))?;
Ok((format, access))
}
pub(in crate::front::wgsl) fn open_arguments(&mut self) -> Result<(), Error<'a>> {
self.expect(Token::Paren('('))
}
pub(in crate::front::wgsl) fn close_arguments(&mut self) -> Result<(), Error<'a>> {
let _ = self.skip(Token::Separator(','));
self.expect(Token::Paren(')'))
}
pub(in crate::front::wgsl) fn next_argument(&mut self) -> Result<bool, Error<'a>> {
let paren = Token::Paren(')');
if self.skip(Token::Separator(',')) {
Ok(!self.skip(paren))
} else {
self.expect(paren).map(|()| false)
}
}
}
#[cfg(test)]
fn sub_test(source: &str, expected_tokens: &[Token]) {
let mut lex = Lexer::new(source);
for &token in expected_tokens {
assert_eq!(lex.next().0, token);
}
assert_eq!(lex.next().0, Token::End);
}
#[test]
fn test_numbers() {
// WGSL spec examples //
// decimal integer
sub_test(
"0x123 0X123u 1u 123 0 0i 0x3f",
&[
Token::Number(Ok(Number::I32(291))),
Token::Number(Ok(Number::U32(291))),
Token::Number(Ok(Number::U32(1))),
Token::Number(Ok(Number::I32(123))),
Token::Number(Ok(Number::I32(0))),
Token::Number(Ok(Number::I32(0))),
Token::Number(Ok(Number::I32(63))),
],
);
// decimal floating point
sub_test(
"0.e+4f 01. .01 12.34 .0f 0h 1e-3 0xa.fp+2 0x1P+4f 0X.3 0x3p+2h 0X1.fp-4 0x3.2p+2h",
&[
Token::Number(Ok(Number::F32(0.))),
Token::Number(Ok(Number::F32(1.))),
Token::Number(Ok(Number::F32(0.01))),
Token::Number(Ok(Number::F32(12.34))),
Token::Number(Ok(Number::F32(0.))),
Token::Number(Err(NumberError::UnimplementedF16)),
Token::Number(Ok(Number::F32(0.001))),
Token::Number(Ok(Number::F32(43.75))),
Token::Number(Ok(Number::F32(16.))),
Token::Number(Ok(Number::F32(0.1875))),
Token::Number(Err(NumberError::UnimplementedF16)),
Token::Number(Ok(Number::F32(0.12109375))),
Token::Number(Err(NumberError::UnimplementedF16)),
],
);
// MIN / MAX //
// min / max decimal signed integer
sub_test(
"-2147483648i 2147483647i -2147483649i 2147483648i",
&[
Token::Number(Ok(Number::I32(i32::MIN))),
Token::Number(Ok(Number::I32(i32::MAX))),
Token::Number(Err(NumberError::NotRepresentable)),
Token::Number(Err(NumberError::NotRepresentable)),
],
);
// min / max decimal unsigned integer
sub_test(
"0u 4294967295u -1u 4294967296u",
&[
Token::Number(Ok(Number::U32(u32::MIN))),
Token::Number(Ok(Number::U32(u32::MAX))),
Token::Number(Err(NumberError::NotRepresentable)),
Token::Number(Err(NumberError::NotRepresentable)),
],
);
// min / max hexadecimal signed integer
sub_test(
"-0x80000000i 0x7FFFFFFFi -0x80000001i 0x80000000i",
&[
Token::Number(Ok(Number::I32(i32::MIN))),
Token::Number(Ok(Number::I32(i32::MAX))),
Token::Number(Err(NumberError::NotRepresentable)),
Token::Number(Err(NumberError::NotRepresentable)),
],
);
// min / max hexadecimal unsigned integer
sub_test(
"0x0u 0xFFFFFFFFu -0x1u 0x100000000u",
&[
Token::Number(Ok(Number::U32(u32::MIN))),
Token::Number(Ok(Number::U32(u32::MAX))),
Token::Number(Err(NumberError::NotRepresentable)),
Token::Number(Err(NumberError::NotRepresentable)),
],
);
/// ≈ 2^-126 * 2^23 (= 2^149)
const SMALLEST_POSITIVE_SUBNORMAL_F32: f32 = 1e-45;
/// ≈ 2^-126 * (1 2^23)
const LARGEST_SUBNORMAL_F32: f32 = 1.1754942e-38;
/// ≈ 2^-126
const SMALLEST_POSITIVE_NORMAL_F32: f32 = f32::MIN_POSITIVE;
/// ≈ 1 2^24
const LARGEST_F32_LESS_THAN_ONE: f32 = 0.99999994;
/// ≈ 1 + 2^23
const SMALLEST_F32_LARGER_THAN_ONE: f32 = 1.0000001;
/// ≈ -(2^127 * (2 2^23))
const SMALLEST_NORMAL_F32: f32 = f32::MIN;
/// ≈ 2^127 * (2 2^23)
const LARGEST_NORMAL_F32: f32 = f32::MAX;
// decimal floating point
sub_test(
"1e-45f 1.1754942e-38f 1.17549435e-38f 0.99999994f 1.0000001f -3.40282347e+38f 3.40282347e+38f",
&[
Token::Number(Ok(Number::F32(
SMALLEST_POSITIVE_SUBNORMAL_F32,
))),
Token::Number(Ok(Number::F32(
LARGEST_SUBNORMAL_F32,
))),
Token::Number(Ok(Number::F32(
SMALLEST_POSITIVE_NORMAL_F32,
))),
Token::Number(Ok(Number::F32(
LARGEST_F32_LESS_THAN_ONE,
))),
Token::Number(Ok(Number::F32(
SMALLEST_F32_LARGER_THAN_ONE,
))),
Token::Number(Ok(Number::F32(
SMALLEST_NORMAL_F32,
))),
Token::Number(Ok(Number::F32(
LARGEST_NORMAL_F32,
))),
],
);
sub_test(
"-3.40282367e+38f 3.40282367e+38f",
&[
Token::Number(Err(NumberError::NotRepresentable)), // ≈ -2^128
Token::Number(Err(NumberError::NotRepresentable)), // ≈ 2^128
],
);
// hexadecimal floating point
sub_test(
"0x1p-149f 0x7FFFFFp-149f 0x1p-126f 0xFFFFFFp-24f 0x800001p-23f -0xFFFFFFp+104f 0xFFFFFFp+104f",
&[
Token::Number(Ok(Number::F32(
SMALLEST_POSITIVE_SUBNORMAL_F32,
))),
Token::Number(Ok(Number::F32(
LARGEST_SUBNORMAL_F32,
))),
Token::Number(Ok(Number::F32(
SMALLEST_POSITIVE_NORMAL_F32,
))),
Token::Number(Ok(Number::F32(
LARGEST_F32_LESS_THAN_ONE,
))),
Token::Number(Ok(Number::F32(
SMALLEST_F32_LARGER_THAN_ONE,
))),
Token::Number(Ok(Number::F32(
SMALLEST_NORMAL_F32,
))),
Token::Number(Ok(Number::F32(
LARGEST_NORMAL_F32,
))),
],
);
sub_test(
"-0x1p128f 0x1p128f 0x1.000001p0f",
&[
Token::Number(Err(NumberError::NotRepresentable)), // = -2^128
Token::Number(Err(NumberError::NotRepresentable)), // = 2^128
Token::Number(Err(NumberError::NotRepresentable)),
],
);
}
#[test]
fn test_tokens() {
sub_test("id123_OK", &[Token::Word("id123_OK")]);
sub_test(
"92No",
&[Token::Number(Ok(Number::I32(92))), Token::Word("No")],
);
sub_test(
"2u3o",
&[
Token::Number(Ok(Number::U32(2))),
Token::Number(Ok(Number::I32(3))),
Token::Word("o"),
],
);
sub_test(
"2.4f44po",
&[
Token::Number(Ok(Number::F32(2.4))),
Token::Number(Ok(Number::I32(44))),
Token::Word("po"),
],
);
sub_test(
"Δέλτα réflexion Кызыл 𐰓𐰏𐰇 朝焼け سلام 검정 שָׁלוֹם गुलाबी փիրուզ",
&[
Token::Word("Δέλτα"),
Token::Word("réflexion"),
Token::Word("Кызыл"),
Token::Word("𐰓𐰏𐰇"),
Token::Word("朝焼け"),
Token::Word("سلام"),
Token::Word("검정"),
Token::Word("שָׁלוֹם"),
Token::Word("गुलाबी"),
Token::Word("փիրուզ"),
],
);
sub_test("æNoø", &[Token::Word("æNoø")]);
sub_test("No¾", &[Token::Word("No"), Token::Unknown('¾')]);
sub_test("No好", &[Token::Word("No好")]);
sub_test("_No", &[Token::Word("_No")]);
sub_test(
"*/*/***/*//=/*****//",
&[
Token::Operation('*'),
Token::AssignmentOperation('/'),
Token::Operation('/'),
],
);
}
#[test]
fn test_variable_decl() {
sub_test(
"@group(0 ) var< uniform> texture: texture_multisampled_2d <f32 >;",
&[
Token::Attribute,
Token::Word("group"),
Token::Paren('('),
Token::Number(Ok(Number::I32(0))),
Token::Paren(')'),
Token::Word("var"),
Token::Paren('<'),
Token::Word("uniform"),
Token::Paren('>'),
Token::Word("texture"),
Token::Separator(':'),
Token::Word("texture_multisampled_2d"),
Token::Paren('<'),
Token::Word("f32"),
Token::Paren('>'),
Token::Separator(';'),
],
);
sub_test(
"var<storage,read_write> buffer: array<u32>;",
&[
Token::Word("var"),
Token::Paren('<'),
Token::Word("storage"),
Token::Separator(','),
Token::Word("read_write"),
Token::Paren('>'),
Token::Word("buffer"),
Token::Separator(':'),
Token::Word("array"),
Token::Paren('<'),
Token::Word("u32"),
Token::Paren('>'),
Token::Separator(';'),
],
);
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,443 @@
use std::borrow::Cow;
use crate::front::wgsl::error::NumberError;
use crate::front::wgsl::parse::lexer::Token;
/// When using this type assume no Abstract Int/Float for now
#[derive(Copy, Clone, Debug, PartialEq)]
pub enum Number {
/// Abstract Int (-2^63 ≤ i < 2^63)
AbstractInt(i64),
/// Abstract Float (IEEE-754 binary64)
AbstractFloat(f64),
/// Concrete i32
I32(i32),
/// Concrete u32
U32(u32),
/// Concrete f32
F32(f32),
}
impl Number {
/// Convert abstract numbers to a plausible concrete counterpart.
///
/// Return concrete numbers unchanged. If the conversion would be
/// lossy, return an error.
fn abstract_to_concrete(self) -> Result<Number, NumberError> {
match self {
Number::AbstractInt(num) => i32::try_from(num)
.map(Number::I32)
.map_err(|_| NumberError::NotRepresentable),
Number::AbstractFloat(num) => {
let num = num as f32;
if num.is_finite() {
Ok(Number::F32(num))
} else {
Err(NumberError::NotRepresentable)
}
}
num => Ok(num),
}
}
}
// TODO: when implementing Creation-Time Expressions, remove the ability to match the minus sign
pub(in crate::front::wgsl) fn consume_number(input: &str) -> (Token<'_>, &str) {
let (result, rest) = parse(input);
(
Token::Number(result.and_then(Number::abstract_to_concrete)),
rest,
)
}
enum Kind {
Int(IntKind),
Float(FloatKind),
}
enum IntKind {
I32,
U32,
}
enum FloatKind {
F32,
F16,
}
// The following regexes (from the WGSL spec) will be matched:
// int_literal:
// | / 0 [iu]? /
// | / [1-9][0-9]* [iu]? /
// | / 0[xX][0-9a-fA-F]+ [iu]? /
// decimal_float_literal:
// | / 0 [fh] /
// | / [1-9][0-9]* [fh] /
// | / [0-9]* \.[0-9]+ ([eE][+-]?[0-9]+)? [fh]? /
// | / [0-9]+ \.[0-9]* ([eE][+-]?[0-9]+)? [fh]? /
// | / [0-9]+ [eE][+-]?[0-9]+ [fh]? /
// hex_float_literal:
// | / 0[xX][0-9a-fA-F]* \.[0-9a-fA-F]+ ([pP][+-]?[0-9]+ [fh]?)? /
// | / 0[xX][0-9a-fA-F]+ \.[0-9a-fA-F]* ([pP][+-]?[0-9]+ [fh]?)? /
// | / 0[xX][0-9a-fA-F]+ [pP][+-]?[0-9]+ [fh]? /
// You could visualize the regex below via https://debuggex.com to get a rough idea what `parse` is doing
// -?(?:0[xX](?:([0-9a-fA-F]+\.[0-9a-fA-F]*|[0-9a-fA-F]*\.[0-9a-fA-F]+)(?:([pP][+-]?[0-9]+)([fh]?))?|([0-9a-fA-F]+)([pP][+-]?[0-9]+)([fh]?)|([0-9a-fA-F]+)([iu]?))|((?:[0-9]+[eE][+-]?[0-9]+|(?:[0-9]+\.[0-9]*|[0-9]*\.[0-9]+)(?:[eE][+-]?[0-9]+)?))([fh]?)|((?:[0-9]|[1-9][0-9]+))([iufh]?))
fn parse(input: &str) -> (Result<Number, NumberError>, &str) {
/// returns `true` and consumes `X` bytes from the given byte buffer
/// if the given `X` nr of patterns are found at the start of the buffer
macro_rules! consume {
($bytes:ident, $($pattern:pat),*) => {
match $bytes {
&[$($pattern),*, ref rest @ ..] => { $bytes = rest; true },
_ => false,
}
};
}
/// consumes one byte from the given byte buffer
/// if one of the given patterns are found at the start of the buffer
/// returning the corresponding expr for the matched pattern
macro_rules! consume_map {
($bytes:ident, [$($pattern:pat_param => $to:expr),*]) => {
match $bytes {
$( &[$pattern, ref rest @ ..] => { $bytes = rest; Some($to) }, )*
_ => None,
}
};
}
/// consumes all consecutive bytes matched by the `0-9` pattern from the given byte buffer
/// returning the number of consumed bytes
macro_rules! consume_dec_digits {
($bytes:ident) => {{
let start_len = $bytes.len();
while let &[b'0'..=b'9', ref rest @ ..] = $bytes {
$bytes = rest;
}
start_len - $bytes.len()
}};
}
/// consumes all consecutive bytes matched by the `0-9 | a-f | A-F` pattern from the given byte buffer
/// returning the number of consumed bytes
macro_rules! consume_hex_digits {
($bytes:ident) => {{
let start_len = $bytes.len();
while let &[b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F', ref rest @ ..] = $bytes {
$bytes = rest;
}
start_len - $bytes.len()
}};
}
/// maps the given `&[u8]` (tail of the initial `input: &str`) to a `&str`
macro_rules! rest_to_str {
($bytes:ident) => {
&input[input.len() - $bytes.len()..]
};
}
struct ExtractSubStr<'a>(&'a str);
impl<'a> ExtractSubStr<'a> {
/// given an `input` and a `start` (tail of the `input`)
/// creates a new [`ExtractSubStr`](`Self`)
fn start(input: &'a str, start: &'a [u8]) -> Self {
let start = input.len() - start.len();
Self(&input[start..])
}
/// given an `end` (tail of the initial `input`)
/// returns a substring of `input`
fn end(&self, end: &'a [u8]) -> &'a str {
let end = self.0.len() - end.len();
&self.0[..end]
}
}
let mut bytes = input.as_bytes();
let general_extract = ExtractSubStr::start(input, bytes);
let is_negative = consume!(bytes, b'-');
if consume!(bytes, b'0', b'x' | b'X') {
let digits_extract = ExtractSubStr::start(input, bytes);
let consumed = consume_hex_digits!(bytes);
if consume!(bytes, b'.') {
let consumed_after_period = consume_hex_digits!(bytes);
if consumed + consumed_after_period == 0 {
return (Err(NumberError::Invalid), rest_to_str!(bytes));
}
let significand = general_extract.end(bytes);
if consume!(bytes, b'p' | b'P') {
consume!(bytes, b'+' | b'-');
let consumed = consume_dec_digits!(bytes);
if consumed == 0 {
return (Err(NumberError::Invalid), rest_to_str!(bytes));
}
let number = general_extract.end(bytes);
let kind = consume_map!(bytes, [b'f' => FloatKind::F32, b'h' => FloatKind::F16]);
(parse_hex_float(number, kind), rest_to_str!(bytes))
} else {
(
parse_hex_float_missing_exponent(significand, None),
rest_to_str!(bytes),
)
}
} else {
if consumed == 0 {
return (Err(NumberError::Invalid), rest_to_str!(bytes));
}
let significand = general_extract.end(bytes);
let digits = digits_extract.end(bytes);
let exp_extract = ExtractSubStr::start(input, bytes);
if consume!(bytes, b'p' | b'P') {
consume!(bytes, b'+' | b'-');
let consumed = consume_dec_digits!(bytes);
if consumed == 0 {
return (Err(NumberError::Invalid), rest_to_str!(bytes));
}
let exponent = exp_extract.end(bytes);
let kind = consume_map!(bytes, [b'f' => FloatKind::F32, b'h' => FloatKind::F16]);
(
parse_hex_float_missing_period(significand, exponent, kind),
rest_to_str!(bytes),
)
} else {
let kind = consume_map!(bytes, [b'i' => IntKind::I32, b'u' => IntKind::U32]);
(
parse_hex_int(is_negative, digits, kind),
rest_to_str!(bytes),
)
}
}
} else {
let is_first_zero = bytes.first() == Some(&b'0');
let consumed = consume_dec_digits!(bytes);
if consume!(bytes, b'.') {
let consumed_after_period = consume_dec_digits!(bytes);
if consumed + consumed_after_period == 0 {
return (Err(NumberError::Invalid), rest_to_str!(bytes));
}
if consume!(bytes, b'e' | b'E') {
consume!(bytes, b'+' | b'-');
let consumed = consume_dec_digits!(bytes);
if consumed == 0 {
return (Err(NumberError::Invalid), rest_to_str!(bytes));
}
}
let number = general_extract.end(bytes);
let kind = consume_map!(bytes, [b'f' => FloatKind::F32, b'h' => FloatKind::F16]);
(parse_dec_float(number, kind), rest_to_str!(bytes))
} else {
if consumed == 0 {
return (Err(NumberError::Invalid), rest_to_str!(bytes));
}
if consume!(bytes, b'e' | b'E') {
consume!(bytes, b'+' | b'-');
let consumed = consume_dec_digits!(bytes);
if consumed == 0 {
return (Err(NumberError::Invalid), rest_to_str!(bytes));
}
let number = general_extract.end(bytes);
let kind = consume_map!(bytes, [b'f' => FloatKind::F32, b'h' => FloatKind::F16]);
(parse_dec_float(number, kind), rest_to_str!(bytes))
} else {
// make sure the multi-digit numbers don't start with zero
if consumed > 1 && is_first_zero {
return (Err(NumberError::Invalid), rest_to_str!(bytes));
}
let digits_with_sign = general_extract.end(bytes);
let kind = consume_map!(bytes, [
b'i' => Kind::Int(IntKind::I32),
b'u' => Kind::Int(IntKind::U32),
b'f' => Kind::Float(FloatKind::F32),
b'h' => Kind::Float(FloatKind::F16)
]);
(
parse_dec(is_negative, digits_with_sign, kind),
rest_to_str!(bytes),
)
}
}
}
}
fn parse_hex_float_missing_exponent(
// format: -?0[xX] ( [0-9a-fA-F]+\.[0-9a-fA-F]* | [0-9a-fA-F]*\.[0-9a-fA-F]+ )
significand: &str,
kind: Option<FloatKind>,
) -> Result<Number, NumberError> {
let hexf_input = format!("{}{}", significand, "p0");
parse_hex_float(&hexf_input, kind)
}
fn parse_hex_float_missing_period(
// format: -?0[xX] [0-9a-fA-F]+
significand: &str,
// format: [pP][+-]?[0-9]+
exponent: &str,
kind: Option<FloatKind>,
) -> Result<Number, NumberError> {
let hexf_input = format!("{significand}.{exponent}");
parse_hex_float(&hexf_input, kind)
}
fn parse_hex_int(
is_negative: bool,
// format: [0-9a-fA-F]+
digits: &str,
kind: Option<IntKind>,
) -> Result<Number, NumberError> {
let digits_with_sign = if is_negative {
Cow::Owned(format!("-{digits}"))
} else {
Cow::Borrowed(digits)
};
parse_int(&digits_with_sign, kind, 16, is_negative)
}
fn parse_dec(
is_negative: bool,
// format: -? ( [0-9] | [1-9][0-9]+ )
digits_with_sign: &str,
kind: Option<Kind>,
) -> Result<Number, NumberError> {
match kind {
None => parse_int(digits_with_sign, None, 10, is_negative),
Some(Kind::Int(kind)) => parse_int(digits_with_sign, Some(kind), 10, is_negative),
Some(Kind::Float(kind)) => parse_dec_float(digits_with_sign, Some(kind)),
}
}
// Float parsing notes
// The following chapters of IEEE 754-2019 are relevant:
//
// 7.4 Overflow (largest finite number is exceeded by what would have been
// the rounded floating-point result were the exponent range unbounded)
//
// 7.5 Underflow (tiny non-zero result is detected;
// for decimal formats tininess is detected before rounding when a non-zero result
// computed as though both the exponent range and the precision were unbounded
// would lie strictly between 2^126)
//
// 7.6 Inexact (rounded result differs from what would have been computed
// were both exponent range and precision unbounded)
// The WGSL spec requires us to error:
// on overflow for decimal floating point literals
// on overflow and inexact for hexadecimal floating point literals
// (underflow is not mentioned)
// hexf_parse errors on overflow, underflow, inexact
// rust std lib float from str handles overflow, underflow, inexact transparently (rounds and will not error)
// Therefore we only check for overflow manually for decimal floating point literals
// input format: -?0[xX] ( [0-9a-fA-F]+\.[0-9a-fA-F]* | [0-9a-fA-F]*\.[0-9a-fA-F]+ ) [pP][+-]?[0-9]+
fn parse_hex_float(input: &str, kind: Option<FloatKind>) -> Result<Number, NumberError> {
match kind {
None => match hexf_parse::parse_hexf64(input, false) {
Ok(num) => Ok(Number::AbstractFloat(num)),
// can only be ParseHexfErrorKind::Inexact but we can't check since it's private
_ => Err(NumberError::NotRepresentable),
},
Some(FloatKind::F32) => match hexf_parse::parse_hexf32(input, false) {
Ok(num) => Ok(Number::F32(num)),
// can only be ParseHexfErrorKind::Inexact but we can't check since it's private
_ => Err(NumberError::NotRepresentable),
},
Some(FloatKind::F16) => Err(NumberError::UnimplementedF16),
}
}
// input format: -? ( [0-9]+\.[0-9]* | [0-9]*\.[0-9]+ ) ([eE][+-]?[0-9]+)?
// | -? [0-9]+ [eE][+-]?[0-9]+
fn parse_dec_float(input: &str, kind: Option<FloatKind>) -> Result<Number, NumberError> {
match kind {
None => {
let num = input.parse::<f64>().unwrap(); // will never fail
num.is_finite()
.then_some(Number::AbstractFloat(num))
.ok_or(NumberError::NotRepresentable)
}
Some(FloatKind::F32) => {
let num = input.parse::<f32>().unwrap(); // will never fail
num.is_finite()
.then_some(Number::F32(num))
.ok_or(NumberError::NotRepresentable)
}
Some(FloatKind::F16) => Err(NumberError::UnimplementedF16),
}
}
fn parse_int(
input: &str,
kind: Option<IntKind>,
radix: u32,
is_negative: bool,
) -> Result<Number, NumberError> {
fn map_err(e: core::num::ParseIntError) -> NumberError {
match *e.kind() {
core::num::IntErrorKind::PosOverflow | core::num::IntErrorKind::NegOverflow => {
NumberError::NotRepresentable
}
_ => unreachable!(),
}
}
match kind {
None => match i64::from_str_radix(input, radix) {
Ok(num) => Ok(Number::AbstractInt(num)),
Err(e) => Err(map_err(e)),
},
Some(IntKind::I32) => match i32::from_str_radix(input, radix) {
Ok(num) => Ok(Number::I32(num)),
Err(e) => Err(map_err(e)),
},
Some(IntKind::U32) if is_negative => Err(NumberError::NotRepresentable),
Some(IntKind::U32) => match u32::from_str_radix(input, radix) {
Ok(num) => Ok(Number::U32(num)),
Err(e) => Err(map_err(e)),
},
}
}

View file

@ -0,0 +1,483 @@
use super::parse_str;
#[test]
fn parse_comment() {
parse_str(
"//
////
///////////////////////////////////////////////////////// asda
//////////////////// dad ////////// /
/////////////////////////////////////////////////////////////////////////////////////////////////////
//
",
)
.unwrap();
}
#[test]
fn parse_types() {
parse_str("const a : i32 = 2;").unwrap();
assert!(parse_str("const a : x32 = 2;").is_err());
parse_str("var t: texture_2d<f32>;").unwrap();
parse_str("var t: texture_cube_array<i32>;").unwrap();
parse_str("var t: texture_multisampled_2d<u32>;").unwrap();
parse_str("var t: texture_storage_1d<rgba8uint,write>;").unwrap();
parse_str("var t: texture_storage_3d<r32float,read>;").unwrap();
}
#[test]
fn parse_type_inference() {
parse_str(
"
fn foo() {
let a = 2u;
let b: u32 = a;
var x = 3.;
var y = vec2<f32>(1, 2);
}",
)
.unwrap();
assert!(parse_str(
"
fn foo() { let c : i32 = 2.0; }",
)
.is_err());
}
#[test]
fn parse_type_cast() {
parse_str(
"
const a : i32 = 2;
fn main() {
var x: f32 = f32(a);
x = f32(i32(a + 1) / 2);
}
",
)
.unwrap();
parse_str(
"
fn main() {
let x: vec2<f32> = vec2<f32>(1.0, 2.0);
let y: vec2<u32> = vec2<u32>(x);
}
",
)
.unwrap();
parse_str(
"
fn main() {
let x: vec2<f32> = vec2<f32>(0.0);
}
",
)
.unwrap();
assert!(parse_str(
"
fn main() {
let x: vec2<f32> = vec2<f32>(0);
}
",
)
.is_err());
}
#[test]
fn parse_struct() {
parse_str(
"
struct Foo { x: i32 }
struct Bar {
@size(16) x: vec2<i32>,
@align(16) y: f32,
@size(32) @align(128) z: vec3<f32>,
};
struct Empty {}
var<storage,read_write> s: Foo;
",
)
.unwrap();
}
#[test]
fn parse_standard_fun() {
parse_str(
"
fn main() {
var x: i32 = min(max(1, 2), 3);
}
",
)
.unwrap();
}
#[test]
fn parse_statement() {
parse_str(
"
fn main() {
;
{}
{;}
}
",
)
.unwrap();
parse_str(
"
fn foo() {}
fn bar() { foo(); }
",
)
.unwrap();
}
#[test]
fn parse_if() {
parse_str(
"
fn main() {
if true {
discard;
} else {}
if 0 != 1 {}
if false {
return;
} else if true {
return;
} else {}
}
",
)
.unwrap();
}
#[test]
fn parse_parentheses_if() {
parse_str(
"
fn main() {
if (true) {
discard;
} else {}
if (0 != 1) {}
if (false) {
return;
} else if (true) {
return;
} else {}
}
",
)
.unwrap();
}
#[test]
fn parse_loop() {
parse_str(
"
fn main() {
var i: i32 = 0;
loop {
if i == 1 { break; }
continuing { i = 1; }
}
loop {
if i == 0 { continue; }
break;
}
}
",
)
.unwrap();
parse_str(
"
fn main() {
var found: bool = false;
var i: i32 = 0;
while !found {
if i == 10 {
found = true;
}
i = i + 1;
}
}
",
)
.unwrap();
parse_str(
"
fn main() {
while true {
break;
}
}
",
)
.unwrap();
parse_str(
"
fn main() {
var a: i32 = 0;
for(var i: i32 = 0; i < 4; i = i + 1) {
a = a + 2;
}
}
",
)
.unwrap();
parse_str(
"
fn main() {
for(;;) {
break;
}
}
",
)
.unwrap();
}
#[test]
fn parse_switch() {
parse_str(
"
fn main() {
var pos: f32;
switch (3) {
case 0, 1: { pos = 0.0; }
case 2: { pos = 1.0; }
default: { pos = 3.0; }
}
}
",
)
.unwrap();
}
#[test]
fn parse_switch_optional_colon_in_case() {
parse_str(
"
fn main() {
var pos: f32;
switch (3) {
case 0, 1 { pos = 0.0; }
case 2 { pos = 1.0; }
default { pos = 3.0; }
}
}
",
)
.unwrap();
}
#[test]
fn parse_switch_default_in_case() {
parse_str(
"
fn main() {
var pos: f32;
switch (3) {
case 0, 1: { pos = 0.0; }
case 2: {}
case default, 3: { pos = 3.0; }
}
}
",
)
.unwrap();
}
#[test]
fn parse_parentheses_switch() {
parse_str(
"
fn main() {
var pos: f32;
switch pos > 1.0 {
default: { pos = 3.0; }
}
}
",
)
.unwrap();
}
#[test]
fn parse_texture_load() {
parse_str(
"
var t: texture_3d<u32>;
fn foo() {
let r: vec4<u32> = textureLoad(t, vec3<u32>(0.0, 1.0, 2.0), 1);
}
",
)
.unwrap();
parse_str(
"
var t: texture_multisampled_2d_array<i32>;
fn foo() {
let r: vec4<i32> = textureLoad(t, vec2<i32>(10, 20), 2, 3);
}
",
)
.unwrap();
parse_str(
"
var t: texture_storage_1d_array<r32float,read>;
fn foo() {
let r: vec4<f32> = textureLoad(t, 10, 2);
}
",
)
.unwrap();
}
#[test]
fn parse_texture_store() {
parse_str(
"
var t: texture_storage_2d<rgba8unorm,write>;
fn foo() {
textureStore(t, vec2<i32>(10, 20), vec4<f32>(0.0, 1.0, 2.0, 3.0));
}
",
)
.unwrap();
}
#[test]
fn parse_texture_query() {
parse_str(
"
var t: texture_multisampled_2d_array<f32>;
fn foo() {
var dim: vec2<u32> = textureDimensions(t);
dim = textureDimensions(t, 0);
let layers: u32 = textureNumLayers(t);
let samples: u32 = textureNumSamples(t);
}
",
)
.unwrap();
}
#[test]
fn parse_postfix() {
parse_str(
"fn foo() {
let x: f32 = vec4<f32>(1.0, 2.0, 3.0, 4.0).xyz.rgbr.aaaa.wz.g;
let y: f32 = fract(vec2<f32>(0.5, x)).x;
}",
)
.unwrap();
}
#[test]
fn parse_expressions() {
parse_str("fn foo() {
let x: f32 = select(0.0, 1.0, true);
let y: vec2<f32> = select(vec2<f32>(1.0, 1.0), vec2<f32>(x, x), vec2<bool>(x < 0.5, x > 0.5));
let z: bool = !(0.0 == 1.0);
}").unwrap();
}
#[test]
fn parse_pointers() {
parse_str(
"fn foo() {
var x: f32 = 1.0;
let px = &x;
let py = frexp(0.5, px);
}",
)
.unwrap();
}
#[test]
fn parse_struct_instantiation() {
parse_str(
"
struct Foo {
a: f32,
b: vec3<f32>,
}
@fragment
fn fs_main() {
var foo: Foo = Foo(0.0, vec3<f32>(0.0, 1.0, 42.0));
}
",
)
.unwrap();
}
#[test]
fn parse_array_length() {
parse_str(
"
struct Foo {
data: array<u32>
} // this is used as both input and output for convenience
@group(0) @binding(0)
var<storage> foo: Foo;
@group(0) @binding(1)
var<storage> bar: array<u32>;
fn baz() {
var x: u32 = arrayLength(foo.data);
var y: u32 = arrayLength(bar);
}
",
)
.unwrap();
}
#[test]
fn parse_storage_buffers() {
parse_str(
"
@group(0) @binding(0)
var<storage> foo: array<u32>;
",
)
.unwrap();
parse_str(
"
@group(0) @binding(0)
var<storage,read> foo: array<u32>;
",
)
.unwrap();
parse_str(
"
@group(0) @binding(0)
var<storage,write> foo: array<u32>;
",
)
.unwrap();
parse_str(
"
@group(0) @binding(0)
var<storage,read_write> foo: array<u32>;
",
)
.unwrap();
}
#[test]
fn parse_alias() {
parse_str(
"
alias Vec4 = vec4<f32>;
",
)
.unwrap();
}