Vendor things

This commit is contained in:
John Doty 2024-03-08 11:03:01 -08:00
parent 5deceec006
commit 977e3c17e5
19434 changed files with 10682014 additions and 0 deletions

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,133 @@
#[cfg(feature = "validate")]
use crate::proc::TypeResolution;
use crate::arena::Handle;
#[derive(Clone, Debug, thiserror::Error)]
#[cfg_attr(test, derive(PartialEq))]
pub enum ComposeError {
#[error("Composing of type {0:?} can't be done")]
Type(Handle<crate::Type>),
#[error("Composing expects {expected} components but {given} were given")]
ComponentCount { given: u32, expected: u32 },
#[error("Composing {index}'s component type is not expected")]
ComponentType { index: u32 },
}
#[cfg(feature = "validate")]
pub fn validate_compose(
self_ty_handle: Handle<crate::Type>,
gctx: crate::proc::GlobalCtx,
component_resolutions: impl ExactSizeIterator<Item = TypeResolution>,
) -> Result<(), ComposeError> {
use crate::TypeInner as Ti;
match gctx.types[self_ty_handle].inner {
// vectors are composed from scalars or other vectors
Ti::Vector { size, kind, width } => {
let mut total = 0;
for (index, comp_res) in component_resolutions.enumerate() {
total += match *comp_res.inner_with(gctx.types) {
Ti::Scalar {
kind: comp_kind,
width: comp_width,
} if comp_kind == kind && comp_width == width => 1,
Ti::Vector {
size: comp_size,
kind: comp_kind,
width: comp_width,
} if comp_kind == kind && comp_width == width => comp_size as u32,
ref other => {
log::error!("Vector component[{}] type {:?}", index, other);
return Err(ComposeError::ComponentType {
index: index as u32,
});
}
};
}
if size as u32 != total {
return Err(ComposeError::ComponentCount {
expected: size as u32,
given: total,
});
}
}
// matrix are composed from column vectors
Ti::Matrix {
columns,
rows,
width,
} => {
let inner = Ti::Vector {
size: rows,
kind: crate::ScalarKind::Float,
width,
};
if columns as usize != component_resolutions.len() {
return Err(ComposeError::ComponentCount {
expected: columns as u32,
given: component_resolutions.len() as u32,
});
}
for (index, comp_res) in component_resolutions.enumerate() {
if comp_res.inner_with(gctx.types) != &inner {
log::error!("Matrix component[{}] type {:?}", index, comp_res);
return Err(ComposeError::ComponentType {
index: index as u32,
});
}
}
}
Ti::Array {
base,
size: crate::ArraySize::Constant(count),
stride: _,
} => {
if count.get() as usize != component_resolutions.len() {
return Err(ComposeError::ComponentCount {
expected: count.get(),
given: component_resolutions.len() as u32,
});
}
for (index, comp_res) in component_resolutions.enumerate() {
let base_inner = &gctx.types[base].inner;
let comp_res_inner = comp_res.inner_with(gctx.types);
// We don't support arrays of pointers, but it seems best not to
// embed that assumption here, so use `TypeInner::equivalent`.
if !base_inner.equivalent(comp_res_inner, gctx.types) {
log::error!("Array component[{}] type {:?}", index, comp_res);
return Err(ComposeError::ComponentType {
index: index as u32,
});
}
}
}
Ti::Struct { ref members, .. } => {
if members.len() != component_resolutions.len() {
return Err(ComposeError::ComponentCount {
given: component_resolutions.len() as u32,
expected: members.len() as u32,
});
}
for (index, (member, comp_res)) in members.iter().zip(component_resolutions).enumerate()
{
let member_inner = &gctx.types[member.ty].inner;
let comp_res_inner = comp_res.inner_with(gctx.types);
// We don't support pointers in structs, but it seems best not to embed
// that assumption here, so use `TypeInner::equivalent`.
if !comp_res_inner.equivalent(member_inner, gctx.types) {
log::error!("Struct component[{}] type {:?}", index, comp_res);
return Err(ComposeError::ComponentType {
index: index as u32,
});
}
}
}
ref other => {
log::error!("Composing of {:?}", other);
return Err(ComposeError::Type(self_ty_handle));
}
}
Ok(())
}

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,712 @@
//! Implementation of `Validator::validate_module_handles`.
use crate::{
arena::{BadHandle, BadRangeError},
Handle,
};
#[cfg(feature = "validate")]
use crate::{Arena, UniqueArena};
#[cfg(feature = "validate")]
use super::ValidationError;
#[cfg(feature = "validate")]
use std::{convert::TryInto, hash::Hash, num::NonZeroU32};
#[cfg(feature = "validate")]
impl super::Validator {
/// Validates that all handles within `module` are:
///
/// * Valid, in the sense that they contain indices within each arena structure inside the
/// [`crate::Module`] type.
/// * No arena contents contain any items that have forward dependencies; that is, the value
/// associated with a handle only may contain references to handles in the same arena that
/// were constructed before it.
///
/// By validating the above conditions, we free up subsequent logic to assume that handle
/// accesses are infallible.
///
/// # Errors
///
/// Errors returned by this method are intentionally sparse, for simplicity of implementation.
/// It is expected that only buggy frontends or fuzzers should ever emit IR that fails this
/// validation pass.
pub(super) fn validate_module_handles(module: &crate::Module) -> Result<(), ValidationError> {
let &crate::Module {
ref constants,
ref entry_points,
ref functions,
ref global_variables,
ref types,
ref special_types,
ref const_expressions,
} = module;
// NOTE: Types being first is important. All other forms of validation depend on this.
for (this_handle, ty) in types.iter() {
match ty.inner {
crate::TypeInner::Scalar { .. }
| crate::TypeInner::Vector { .. }
| crate::TypeInner::Matrix { .. }
| crate::TypeInner::ValuePointer { .. }
| crate::TypeInner::Atomic { .. }
| crate::TypeInner::Image { .. }
| crate::TypeInner::Sampler { .. }
| crate::TypeInner::AccelerationStructure
| crate::TypeInner::RayQuery => (),
crate::TypeInner::Pointer { base, space: _ } => {
this_handle.check_dep(base)?;
}
crate::TypeInner::Array { base, .. }
| crate::TypeInner::BindingArray { base, .. } => {
this_handle.check_dep(base)?;
}
crate::TypeInner::Struct {
ref members,
span: _,
} => {
this_handle.check_dep_iter(members.iter().map(|m| m.ty))?;
}
}
}
for handle_and_expr in const_expressions.iter() {
Self::validate_const_expression_handles(handle_and_expr, constants, types)?;
}
let validate_type = |handle| Self::validate_type_handle(handle, types);
let validate_const_expr =
|handle| Self::validate_expression_handle(handle, const_expressions);
for (_handle, constant) in constants.iter() {
let &crate::Constant {
name: _,
r#override: _,
ty,
init,
} = constant;
validate_type(ty)?;
validate_const_expr(init)?;
}
for (_handle, global_variable) in global_variables.iter() {
let &crate::GlobalVariable {
name: _,
space: _,
binding: _,
ty,
init,
} = global_variable;
validate_type(ty)?;
if let Some(init_expr) = init {
validate_const_expr(init_expr)?;
}
}
let validate_function = |function_handle, function: &_| -> Result<_, InvalidHandleError> {
let &crate::Function {
name: _,
ref arguments,
ref result,
ref local_variables,
ref expressions,
ref named_expressions,
ref body,
} = function;
for arg in arguments.iter() {
let &crate::FunctionArgument {
name: _,
ty,
binding: _,
} = arg;
validate_type(ty)?;
}
if let &Some(crate::FunctionResult { ty, binding: _ }) = result {
validate_type(ty)?;
}
for (_handle, local_variable) in local_variables.iter() {
let &crate::LocalVariable { name: _, ty, init } = local_variable;
validate_type(ty)?;
if let Some(init_constant) = init {
validate_const_expr(init_constant)?;
}
}
for handle in named_expressions.keys().copied() {
Self::validate_expression_handle(handle, expressions)?;
}
for handle_and_expr in expressions.iter() {
Self::validate_expression_handles(
handle_and_expr,
constants,
const_expressions,
types,
local_variables,
global_variables,
functions,
function_handle,
)?;
}
Self::validate_block_handles(body, expressions, functions)?;
Ok(())
};
for entry_point in entry_points.iter() {
validate_function(None, &entry_point.function)?;
}
for (function_handle, function) in functions.iter() {
validate_function(Some(function_handle), function)?;
}
if let Some(ty) = special_types.ray_desc {
validate_type(ty)?;
}
if let Some(ty) = special_types.ray_intersection {
validate_type(ty)?;
}
Ok(())
}
fn validate_type_handle(
handle: Handle<crate::Type>,
types: &UniqueArena<crate::Type>,
) -> Result<(), InvalidHandleError> {
handle.check_valid_for_uniq(types).map(|_| ())
}
fn validate_constant_handle(
handle: Handle<crate::Constant>,
constants: &Arena<crate::Constant>,
) -> Result<(), InvalidHandleError> {
handle.check_valid_for(constants).map(|_| ())
}
fn validate_expression_handle(
handle: Handle<crate::Expression>,
expressions: &Arena<crate::Expression>,
) -> Result<(), InvalidHandleError> {
handle.check_valid_for(expressions).map(|_| ())
}
fn validate_function_handle(
handle: Handle<crate::Function>,
functions: &Arena<crate::Function>,
) -> Result<(), InvalidHandleError> {
handle.check_valid_for(functions).map(|_| ())
}
fn validate_const_expression_handles(
(handle, expression): (Handle<crate::Expression>, &crate::Expression),
constants: &Arena<crate::Constant>,
types: &UniqueArena<crate::Type>,
) -> Result<(), InvalidHandleError> {
let validate_constant = |handle| Self::validate_constant_handle(handle, constants);
let validate_type = |handle| Self::validate_type_handle(handle, types);
match *expression {
crate::Expression::Literal(_) => {}
crate::Expression::Constant(constant) => {
validate_constant(constant)?;
handle.check_dep(constants[constant].init)?;
}
crate::Expression::ZeroValue(ty) => {
validate_type(ty)?;
}
crate::Expression::Compose { ty, ref components } => {
validate_type(ty)?;
handle.check_dep_iter(components.iter().copied())?;
}
_ => {}
}
Ok(())
}
#[allow(clippy::too_many_arguments)]
fn validate_expression_handles(
(handle, expression): (Handle<crate::Expression>, &crate::Expression),
constants: &Arena<crate::Constant>,
const_expressions: &Arena<crate::Expression>,
types: &UniqueArena<crate::Type>,
local_variables: &Arena<crate::LocalVariable>,
global_variables: &Arena<crate::GlobalVariable>,
functions: &Arena<crate::Function>,
// The handle of the current function or `None` if it's an entry point
current_function: Option<Handle<crate::Function>>,
) -> Result<(), InvalidHandleError> {
let validate_constant = |handle| Self::validate_constant_handle(handle, constants);
let validate_const_expr =
|handle| Self::validate_expression_handle(handle, const_expressions);
let validate_type = |handle| Self::validate_type_handle(handle, types);
match *expression {
crate::Expression::Access { base, index } => {
handle.check_dep(base)?.check_dep(index)?;
}
crate::Expression::AccessIndex { base, .. } => {
handle.check_dep(base)?;
}
crate::Expression::Splat { value, .. } => {
handle.check_dep(value)?;
}
crate::Expression::Swizzle { vector, .. } => {
handle.check_dep(vector)?;
}
crate::Expression::Literal(_) => {}
crate::Expression::Constant(constant) => {
validate_constant(constant)?;
}
crate::Expression::ZeroValue(ty) => {
validate_type(ty)?;
}
crate::Expression::Compose { ty, ref components } => {
validate_type(ty)?;
handle.check_dep_iter(components.iter().copied())?;
}
crate::Expression::FunctionArgument(_arg_idx) => (),
crate::Expression::GlobalVariable(global_variable) => {
global_variable.check_valid_for(global_variables)?;
}
crate::Expression::LocalVariable(local_variable) => {
local_variable.check_valid_for(local_variables)?;
}
crate::Expression::Load { pointer } => {
handle.check_dep(pointer)?;
}
crate::Expression::ImageSample {
image,
sampler,
gather: _,
coordinate,
array_index,
offset,
level,
depth_ref,
} => {
if let Some(offset) = offset {
validate_const_expr(offset)?;
}
handle
.check_dep(image)?
.check_dep(sampler)?
.check_dep(coordinate)?
.check_dep_opt(array_index)?;
match level {
crate::SampleLevel::Auto | crate::SampleLevel::Zero => (),
crate::SampleLevel::Exact(expr) => {
handle.check_dep(expr)?;
}
crate::SampleLevel::Bias(expr) => {
handle.check_dep(expr)?;
}
crate::SampleLevel::Gradient { x, y } => {
handle.check_dep(x)?.check_dep(y)?;
}
};
handle.check_dep_opt(depth_ref)?;
}
crate::Expression::ImageLoad {
image,
coordinate,
array_index,
sample,
level,
} => {
handle
.check_dep(image)?
.check_dep(coordinate)?
.check_dep_opt(array_index)?
.check_dep_opt(sample)?
.check_dep_opt(level)?;
}
crate::Expression::ImageQuery { image, query } => {
handle.check_dep(image)?;
match query {
crate::ImageQuery::Size { level } => {
handle.check_dep_opt(level)?;
}
crate::ImageQuery::NumLevels
| crate::ImageQuery::NumLayers
| crate::ImageQuery::NumSamples => (),
};
}
crate::Expression::Unary {
op: _,
expr: operand,
} => {
handle.check_dep(operand)?;
}
crate::Expression::Binary { op: _, left, right } => {
handle.check_dep(left)?.check_dep(right)?;
}
crate::Expression::Select {
condition,
accept,
reject,
} => {
handle
.check_dep(condition)?
.check_dep(accept)?
.check_dep(reject)?;
}
crate::Expression::Derivative { expr: argument, .. } => {
handle.check_dep(argument)?;
}
crate::Expression::Relational { fun: _, argument } => {
handle.check_dep(argument)?;
}
crate::Expression::Math {
fun: _,
arg,
arg1,
arg2,
arg3,
} => {
handle
.check_dep(arg)?
.check_dep_opt(arg1)?
.check_dep_opt(arg2)?
.check_dep_opt(arg3)?;
}
crate::Expression::As {
expr: input,
kind: _,
convert: _,
} => {
handle.check_dep(input)?;
}
crate::Expression::CallResult(function) => {
Self::validate_function_handle(function, functions)?;
if let Some(handle) = current_function {
handle.check_dep(function)?;
}
}
crate::Expression::AtomicResult { .. }
| crate::Expression::RayQueryProceedResult
| crate::Expression::WorkGroupUniformLoadResult { .. } => (),
crate::Expression::ArrayLength(array) => {
handle.check_dep(array)?;
}
crate::Expression::RayQueryGetIntersection {
query,
committed: _,
} => {
handle.check_dep(query)?;
}
}
Ok(())
}
fn validate_block_handles(
block: &crate::Block,
expressions: &Arena<crate::Expression>,
functions: &Arena<crate::Function>,
) -> Result<(), InvalidHandleError> {
let validate_block = |block| Self::validate_block_handles(block, expressions, functions);
let validate_expr = |handle| Self::validate_expression_handle(handle, expressions);
let validate_expr_opt = |handle_opt| {
if let Some(handle) = handle_opt {
validate_expr(handle)?;
}
Ok(())
};
block.iter().try_for_each(|stmt| match *stmt {
crate::Statement::Emit(ref expr_range) => {
expr_range.check_valid_for(expressions)?;
Ok(())
}
crate::Statement::Block(ref block) => {
validate_block(block)?;
Ok(())
}
crate::Statement::If {
condition,
ref accept,
ref reject,
} => {
validate_expr(condition)?;
validate_block(accept)?;
validate_block(reject)?;
Ok(())
}
crate::Statement::Switch {
selector,
ref cases,
} => {
validate_expr(selector)?;
for &crate::SwitchCase {
value: _,
ref body,
fall_through: _,
} in cases
{
validate_block(body)?;
}
Ok(())
}
crate::Statement::Loop {
ref body,
ref continuing,
break_if,
} => {
validate_block(body)?;
validate_block(continuing)?;
validate_expr_opt(break_if)?;
Ok(())
}
crate::Statement::Return { value } => validate_expr_opt(value),
crate::Statement::Store { pointer, value } => {
validate_expr(pointer)?;
validate_expr(value)?;
Ok(())
}
crate::Statement::ImageStore {
image,
coordinate,
array_index,
value,
} => {
validate_expr(image)?;
validate_expr(coordinate)?;
validate_expr_opt(array_index)?;
validate_expr(value)?;
Ok(())
}
crate::Statement::Atomic {
pointer,
fun,
value,
result,
} => {
validate_expr(pointer)?;
match fun {
crate::AtomicFunction::Add
| crate::AtomicFunction::Subtract
| crate::AtomicFunction::And
| crate::AtomicFunction::ExclusiveOr
| crate::AtomicFunction::InclusiveOr
| crate::AtomicFunction::Min
| crate::AtomicFunction::Max => (),
crate::AtomicFunction::Exchange { compare } => validate_expr_opt(compare)?,
};
validate_expr(value)?;
validate_expr(result)?;
Ok(())
}
crate::Statement::WorkGroupUniformLoad { pointer, result } => {
validate_expr(pointer)?;
validate_expr(result)?;
Ok(())
}
crate::Statement::Call {
function,
ref arguments,
result,
} => {
Self::validate_function_handle(function, functions)?;
for arg in arguments.iter().copied() {
validate_expr(arg)?;
}
validate_expr_opt(result)?;
Ok(())
}
crate::Statement::RayQuery { query, ref fun } => {
validate_expr(query)?;
match *fun {
crate::RayQueryFunction::Initialize {
acceleration_structure,
descriptor,
} => {
validate_expr(acceleration_structure)?;
validate_expr(descriptor)?;
}
crate::RayQueryFunction::Proceed { result } => {
validate_expr(result)?;
}
crate::RayQueryFunction::Terminate => {}
}
Ok(())
}
crate::Statement::Break
| crate::Statement::Continue
| crate::Statement::Kill
| crate::Statement::Barrier(_) => Ok(()),
})
}
}
#[cfg(feature = "validate")]
impl From<BadHandle> for ValidationError {
fn from(source: BadHandle) -> Self {
Self::InvalidHandle(source.into())
}
}
#[cfg(feature = "validate")]
impl From<FwdDepError> for ValidationError {
fn from(source: FwdDepError) -> Self {
Self::InvalidHandle(source.into())
}
}
#[cfg(feature = "validate")]
impl From<BadRangeError> for ValidationError {
fn from(source: BadRangeError) -> Self {
Self::InvalidHandle(source.into())
}
}
#[derive(Clone, Debug, thiserror::Error)]
pub enum InvalidHandleError {
#[error(transparent)]
BadHandle(#[from] BadHandle),
#[error(transparent)]
ForwardDependency(#[from] FwdDepError),
#[error(transparent)]
BadRange(#[from] BadRangeError),
}
#[derive(Clone, Debug, thiserror::Error)]
#[error(
"{subject:?} of kind {subject_kind:?} depends on {depends_on:?} of kind {depends_on_kind}, \
which has not been processed yet"
)]
pub struct FwdDepError {
// This error is used for many `Handle` types, but there's no point in making this generic, so
// we just flatten them all to `Handle<()>` here.
subject: Handle<()>,
subject_kind: &'static str,
depends_on: Handle<()>,
depends_on_kind: &'static str,
}
#[cfg(feature = "validate")]
impl<T> Handle<T> {
/// Check that `self` is valid within `arena` using [`Arena::check_contains_handle`].
pub(self) fn check_valid_for(self, arena: &Arena<T>) -> Result<(), InvalidHandleError> {
arena.check_contains_handle(self)?;
Ok(())
}
/// Check that `self` is valid within `arena` using [`UniqueArena::check_contains_handle`].
pub(self) fn check_valid_for_uniq(
self,
arena: &UniqueArena<T>,
) -> Result<(), InvalidHandleError>
where
T: Eq + Hash,
{
arena.check_contains_handle(self)?;
Ok(())
}
/// Check that `depends_on` was constructed before `self` by comparing handle indices.
///
/// If `self` is a valid handle (i.e., it has been validated using [`Self::check_valid_for`])
/// and this function returns [`Ok`], then it may be assumed that `depends_on` is also valid.
/// In [`naga`](crate)'s current arena-based implementation, this is useful for validating
/// recursive definitions of arena-based values in linear time.
///
/// # Errors
///
/// If `depends_on`'s handle is from the same [`Arena`] as `self'`s, but not constructed earlier
/// than `self`'s, this function returns an error.
pub(self) fn check_dep(self, depends_on: Self) -> Result<Self, FwdDepError> {
if depends_on < self {
Ok(self)
} else {
let erase_handle_type = |handle: Handle<_>| {
Handle::new(NonZeroU32::new((handle.index() + 1).try_into().unwrap()).unwrap())
};
Err(FwdDepError {
subject: erase_handle_type(self),
subject_kind: std::any::type_name::<T>(),
depends_on: erase_handle_type(depends_on),
depends_on_kind: std::any::type_name::<T>(),
})
}
}
/// Like [`Self::check_dep`], except for [`Option`]al handle values.
pub(self) fn check_dep_opt(self, depends_on: Option<Self>) -> Result<Self, FwdDepError> {
self.check_dep_iter(depends_on.into_iter())
}
/// Like [`Self::check_dep`], except for [`Iterator`]s over handle values.
pub(self) fn check_dep_iter(
self,
depends_on: impl Iterator<Item = Self>,
) -> Result<Self, FwdDepError> {
for handle in depends_on {
self.check_dep(handle)?;
}
Ok(self)
}
}
#[cfg(feature = "validate")]
impl<T> crate::arena::Range<T> {
pub(self) fn check_valid_for(&self, arena: &Arena<T>) -> Result<(), BadRangeError> {
arena.check_contains_range(self)
}
}
#[test]
#[cfg(feature = "validate")]
fn constant_deps() {
use crate::{Constant, Expression, Literal, Span, Type, TypeInner};
let nowhere = Span::default();
let mut types = UniqueArena::new();
let mut const_exprs = Arena::new();
let mut fun_exprs = Arena::new();
let mut constants = Arena::new();
let i32_handle = types.insert(
Type {
name: None,
inner: TypeInner::Scalar {
kind: crate::ScalarKind::Sint,
width: 4,
},
},
nowhere,
);
// Construct a self-referential constant by misusing a handle to
// fun_exprs as a constant initializer.
let fun_expr = fun_exprs.append(Expression::Literal(Literal::I32(42)), nowhere);
let self_referential_const = constants.append(
Constant {
name: None,
r#override: crate::Override::None,
ty: i32_handle,
init: fun_expr,
},
nowhere,
);
let _self_referential_expr =
const_exprs.append(Expression::Constant(self_referential_const), nowhere);
for handle_and_expr in const_exprs.iter() {
assert!(super::Validator::validate_const_expression_handles(
handle_and_expr,
&constants,
&types,
)
.is_err());
}
}

View file

@ -0,0 +1,686 @@
use super::{
analyzer::{FunctionInfo, GlobalUse},
Capabilities, Disalignment, FunctionError, ModuleInfo,
};
use crate::arena::{Handle, UniqueArena};
use crate::span::{AddSpan as _, MapErrWithSpan as _, SpanProvider as _, WithSpan};
use bit_set::BitSet;
#[cfg(feature = "validate")]
const MAX_WORKGROUP_SIZE: u32 = 0x4000;
#[derive(Clone, Debug, thiserror::Error)]
pub enum GlobalVariableError {
#[error("Usage isn't compatible with address space {0:?}")]
InvalidUsage(crate::AddressSpace),
#[error("Type isn't compatible with address space {0:?}")]
InvalidType(crate::AddressSpace),
#[error("Type flags {seen:?} do not meet the required {required:?}")]
MissingTypeFlags {
required: super::TypeFlags,
seen: super::TypeFlags,
},
#[error("Capability {0:?} is not supported")]
UnsupportedCapability(Capabilities),
#[error("Binding decoration is missing or not applicable")]
InvalidBinding,
#[error("Alignment requirements for address space {0:?} are not met by {1:?}")]
Alignment(
crate::AddressSpace,
Handle<crate::Type>,
#[source] Disalignment,
),
#[error("Initializer doesn't match the variable type")]
InitializerType,
}
#[derive(Clone, Debug, thiserror::Error)]
pub enum VaryingError {
#[error("The type {0:?} does not match the varying")]
InvalidType(Handle<crate::Type>),
#[error("The type {0:?} cannot be used for user-defined entry point inputs or outputs")]
NotIOShareableType(Handle<crate::Type>),
#[error("Interpolation is not valid")]
InvalidInterpolation,
#[error("Interpolation must be specified on vertex shader outputs and fragment shader inputs")]
MissingInterpolation,
#[error("Built-in {0:?} is not available at this stage")]
InvalidBuiltInStage(crate::BuiltIn),
#[error("Built-in type for {0:?} is invalid")]
InvalidBuiltInType(crate::BuiltIn),
#[error("Entry point arguments and return values must all have bindings")]
MissingBinding,
#[error("Struct member {0} is missing a binding")]
MemberMissingBinding(u32),
#[error("Multiple bindings at location {location} are present")]
BindingCollision { location: u32 },
#[error("Built-in {0:?} is present more than once")]
DuplicateBuiltIn(crate::BuiltIn),
#[error("Capability {0:?} is not supported")]
UnsupportedCapability(Capabilities),
}
#[derive(Clone, Debug, thiserror::Error)]
pub enum EntryPointError {
#[error("Multiple conflicting entry points")]
Conflict,
#[error("Vertex shaders must return a `@builtin(position)` output value")]
MissingVertexOutputPosition,
#[error("Early depth test is not applicable")]
UnexpectedEarlyDepthTest,
#[error("Workgroup size is not applicable")]
UnexpectedWorkgroupSize,
#[error("Workgroup size is out of range")]
OutOfRangeWorkgroupSize,
#[error("Uses operations forbidden at this stage")]
ForbiddenStageOperations,
#[error("Global variable {0:?} is used incorrectly as {1:?}")]
InvalidGlobalUsage(Handle<crate::GlobalVariable>, GlobalUse),
#[error("Bindings for {0:?} conflict with other resource")]
BindingCollision(Handle<crate::GlobalVariable>),
#[error("Argument {0} varying error")]
Argument(u32, #[source] VaryingError),
#[error(transparent)]
Result(#[from] VaryingError),
#[error("Location {location} interpolation of an integer has to be flat")]
InvalidIntegerInterpolation { location: u32 },
#[error(transparent)]
Function(#[from] FunctionError),
}
#[cfg(feature = "validate")]
fn storage_usage(access: crate::StorageAccess) -> GlobalUse {
let mut storage_usage = GlobalUse::QUERY;
if access.contains(crate::StorageAccess::LOAD) {
storage_usage |= GlobalUse::READ;
}
if access.contains(crate::StorageAccess::STORE) {
storage_usage |= GlobalUse::WRITE;
}
storage_usage
}
struct VaryingContext<'a> {
stage: crate::ShaderStage,
output: bool,
types: &'a UniqueArena<crate::Type>,
type_info: &'a Vec<super::r#type::TypeInfo>,
location_mask: &'a mut BitSet,
built_ins: &'a mut crate::FastHashSet<crate::BuiltIn>,
capabilities: Capabilities,
#[cfg(feature = "validate")]
flags: super::ValidationFlags,
}
impl VaryingContext<'_> {
fn validate_impl(
&mut self,
ty: Handle<crate::Type>,
binding: &crate::Binding,
) -> Result<(), VaryingError> {
use crate::{
BuiltIn as Bi, ScalarKind as Sk, ShaderStage as St, TypeInner as Ti, VectorSize as Vs,
};
let ty_inner = &self.types[ty].inner;
match *binding {
crate::Binding::BuiltIn(built_in) => {
// Ignore the `invariant` field for the sake of duplicate checks,
// but use the original in error messages.
let canonical = if let crate::BuiltIn::Position { .. } = built_in {
crate::BuiltIn::Position { invariant: false }
} else {
built_in
};
if self.built_ins.contains(&canonical) {
return Err(VaryingError::DuplicateBuiltIn(built_in));
}
self.built_ins.insert(canonical);
let required = match built_in {
Bi::ClipDistance => Capabilities::CLIP_DISTANCE,
Bi::CullDistance => Capabilities::CULL_DISTANCE,
Bi::PrimitiveIndex => Capabilities::PRIMITIVE_INDEX,
Bi::ViewIndex => Capabilities::MULTIVIEW,
Bi::SampleIndex => Capabilities::MULTISAMPLED_SHADING,
_ => Capabilities::empty(),
};
if !self.capabilities.contains(required) {
return Err(VaryingError::UnsupportedCapability(required));
}
let width = 4;
let (visible, type_good) = match built_in {
Bi::BaseInstance | Bi::BaseVertex | Bi::InstanceIndex | Bi::VertexIndex => (
self.stage == St::Vertex && !self.output,
*ty_inner
== Ti::Scalar {
kind: Sk::Uint,
width,
},
),
Bi::ClipDistance | Bi::CullDistance => (
self.stage == St::Vertex && self.output,
match *ty_inner {
Ti::Array { base, .. } => {
self.types[base].inner
== Ti::Scalar {
kind: Sk::Float,
width,
}
}
_ => false,
},
),
Bi::PointSize => (
self.stage == St::Vertex && self.output,
*ty_inner
== Ti::Scalar {
kind: Sk::Float,
width,
},
),
Bi::PointCoord => (
self.stage == St::Fragment && !self.output,
*ty_inner
== Ti::Vector {
size: Vs::Bi,
kind: Sk::Float,
width,
},
),
Bi::Position { .. } => (
match self.stage {
St::Vertex => self.output,
St::Fragment => !self.output,
St::Compute => false,
},
*ty_inner
== Ti::Vector {
size: Vs::Quad,
kind: Sk::Float,
width,
},
),
Bi::ViewIndex => (
match self.stage {
St::Vertex | St::Fragment => !self.output,
St::Compute => false,
},
*ty_inner
== Ti::Scalar {
kind: Sk::Sint,
width,
},
),
Bi::FragDepth => (
self.stage == St::Fragment && self.output,
*ty_inner
== Ti::Scalar {
kind: Sk::Float,
width,
},
),
Bi::FrontFacing => (
self.stage == St::Fragment && !self.output,
*ty_inner
== Ti::Scalar {
kind: Sk::Bool,
width: crate::BOOL_WIDTH,
},
),
Bi::PrimitiveIndex => (
self.stage == St::Fragment && !self.output,
*ty_inner
== Ti::Scalar {
kind: Sk::Uint,
width,
},
),
Bi::SampleIndex => (
self.stage == St::Fragment && !self.output,
*ty_inner
== Ti::Scalar {
kind: Sk::Uint,
width,
},
),
Bi::SampleMask => (
self.stage == St::Fragment,
*ty_inner
== Ti::Scalar {
kind: Sk::Uint,
width,
},
),
Bi::LocalInvocationIndex => (
self.stage == St::Compute && !self.output,
*ty_inner
== Ti::Scalar {
kind: Sk::Uint,
width,
},
),
Bi::GlobalInvocationId
| Bi::LocalInvocationId
| Bi::WorkGroupId
| Bi::WorkGroupSize
| Bi::NumWorkGroups => (
self.stage == St::Compute && !self.output,
*ty_inner
== Ti::Vector {
size: Vs::Tri,
kind: Sk::Uint,
width,
},
),
};
if !visible {
return Err(VaryingError::InvalidBuiltInStage(built_in));
}
if !type_good {
log::warn!("Wrong builtin type: {:?}", ty_inner);
return Err(VaryingError::InvalidBuiltInType(built_in));
}
}
crate::Binding::Location {
location,
interpolation,
sampling,
} => {
// Only IO-shareable types may be stored in locations.
if !self.type_info[ty.index()]
.flags
.contains(super::TypeFlags::IO_SHAREABLE)
{
return Err(VaryingError::NotIOShareableType(ty));
}
if !self.location_mask.insert(location as usize) {
#[cfg(feature = "validate")]
if self.flags.contains(super::ValidationFlags::BINDINGS) {
return Err(VaryingError::BindingCollision { location });
}
}
let needs_interpolation = match self.stage {
crate::ShaderStage::Vertex => self.output,
crate::ShaderStage::Fragment => !self.output,
crate::ShaderStage::Compute => false,
};
// It doesn't make sense to specify a sampling when `interpolation` is `Flat`, but
// SPIR-V and GLSL both explicitly tolerate such combinations of decorators /
// qualifiers, so we won't complain about that here.
let _ = sampling;
let required = match sampling {
Some(crate::Sampling::Sample) => Capabilities::MULTISAMPLED_SHADING,
_ => Capabilities::empty(),
};
if !self.capabilities.contains(required) {
return Err(VaryingError::UnsupportedCapability(required));
}
match ty_inner.scalar_kind() {
Some(crate::ScalarKind::Float) => {
if needs_interpolation && interpolation.is_none() {
return Err(VaryingError::MissingInterpolation);
}
}
Some(_) => {
if needs_interpolation && interpolation != Some(crate::Interpolation::Flat)
{
return Err(VaryingError::InvalidInterpolation);
}
}
None => return Err(VaryingError::InvalidType(ty)),
}
}
}
Ok(())
}
fn validate(
&mut self,
ty: Handle<crate::Type>,
binding: Option<&crate::Binding>,
) -> Result<(), WithSpan<VaryingError>> {
let span_context = self.types.get_span_context(ty);
match binding {
Some(binding) => self
.validate_impl(ty, binding)
.map_err(|e| e.with_span_context(span_context)),
None => {
match self.types[ty].inner {
crate::TypeInner::Struct { ref members, .. } => {
for (index, member) in members.iter().enumerate() {
let span_context = self.types.get_span_context(ty);
match member.binding {
None => {
#[cfg(feature = "validate")]
if self.flags.contains(super::ValidationFlags::BINDINGS) {
return Err(VaryingError::MemberMissingBinding(
index as u32,
)
.with_span_context(span_context));
}
#[cfg(not(feature = "validate"))]
let _ = index;
}
Some(ref binding) => self
.validate_impl(member.ty, binding)
.map_err(|e| e.with_span_context(span_context))?,
}
}
}
_ =>
{
#[cfg(feature = "validate")]
if self.flags.contains(super::ValidationFlags::BINDINGS) {
return Err(VaryingError::MissingBinding.with_span());
}
}
}
Ok(())
}
}
}
}
impl super::Validator {
#[cfg(feature = "validate")]
pub(super) fn validate_global_var(
&self,
var: &crate::GlobalVariable,
gctx: crate::proc::GlobalCtx,
mod_info: &ModuleInfo,
) -> Result<(), GlobalVariableError> {
use super::TypeFlags;
log::debug!("var {:?}", var);
let inner_ty = match gctx.types[var.ty].inner {
// A binding array is (mostly) supposed to behave the same as a
// series of individually bound resources, so we can (mostly)
// validate a `binding_array<T>` as if it were just a plain `T`.
crate::TypeInner::BindingArray { base, .. } => base,
_ => var.ty,
};
let type_info = &self.types[inner_ty.index()];
let (required_type_flags, is_resource) = match var.space {
crate::AddressSpace::Function => {
return Err(GlobalVariableError::InvalidUsage(var.space))
}
crate::AddressSpace::Storage { .. } => {
if let Err((ty_handle, disalignment)) = type_info.storage_layout {
if self.flags.contains(super::ValidationFlags::STRUCT_LAYOUTS) {
return Err(GlobalVariableError::Alignment(
var.space,
ty_handle,
disalignment,
));
}
}
(TypeFlags::DATA | TypeFlags::HOST_SHAREABLE, true)
}
crate::AddressSpace::Uniform => {
if let Err((ty_handle, disalignment)) = type_info.uniform_layout {
if self.flags.contains(super::ValidationFlags::STRUCT_LAYOUTS) {
return Err(GlobalVariableError::Alignment(
var.space,
ty_handle,
disalignment,
));
}
}
(
TypeFlags::DATA
| TypeFlags::COPY
| TypeFlags::SIZED
| TypeFlags::HOST_SHAREABLE,
true,
)
}
crate::AddressSpace::Handle => {
match gctx.types[inner_ty].inner {
crate::TypeInner::Image { class, .. } => match class {
crate::ImageClass::Storage {
format:
crate::StorageFormat::R16Unorm
| crate::StorageFormat::R16Snorm
| crate::StorageFormat::Rg16Unorm
| crate::StorageFormat::Rg16Snorm
| crate::StorageFormat::Rgba16Unorm
| crate::StorageFormat::Rgba16Snorm,
..
} => {
if !self
.capabilities
.contains(Capabilities::STORAGE_TEXTURE_16BIT_NORM_FORMATS)
{
return Err(GlobalVariableError::UnsupportedCapability(
Capabilities::STORAGE_TEXTURE_16BIT_NORM_FORMATS,
));
}
}
_ => {}
},
crate::TypeInner::Sampler { .. }
| crate::TypeInner::AccelerationStructure
| crate::TypeInner::RayQuery => {}
_ => {
return Err(GlobalVariableError::InvalidType(var.space));
}
}
(TypeFlags::empty(), true)
}
crate::AddressSpace::Private | crate::AddressSpace::WorkGroup => {
(TypeFlags::DATA | TypeFlags::SIZED, false)
}
crate::AddressSpace::PushConstant => {
if !self.capabilities.contains(Capabilities::PUSH_CONSTANT) {
return Err(GlobalVariableError::UnsupportedCapability(
Capabilities::PUSH_CONSTANT,
));
}
(
TypeFlags::DATA
| TypeFlags::COPY
| TypeFlags::HOST_SHAREABLE
| TypeFlags::SIZED,
false,
)
}
};
if !type_info.flags.contains(required_type_flags) {
return Err(GlobalVariableError::MissingTypeFlags {
seen: type_info.flags,
required: required_type_flags,
});
}
if is_resource != var.binding.is_some() {
if self.flags.contains(super::ValidationFlags::BINDINGS) {
return Err(GlobalVariableError::InvalidBinding);
}
}
if let Some(init) = var.init {
let decl_ty = &gctx.types[var.ty].inner;
let init_ty = mod_info[init].inner_with(gctx.types);
if !decl_ty.equivalent(init_ty, gctx.types) {
return Err(GlobalVariableError::InitializerType);
}
}
Ok(())
}
pub(super) fn validate_entry_point(
&mut self,
ep: &crate::EntryPoint,
module: &crate::Module,
mod_info: &ModuleInfo,
) -> Result<FunctionInfo, WithSpan<EntryPointError>> {
#[cfg(feature = "validate")]
if ep.early_depth_test.is_some() {
let required = Capabilities::EARLY_DEPTH_TEST;
if !self.capabilities.contains(required) {
return Err(
EntryPointError::Result(VaryingError::UnsupportedCapability(required))
.with_span(),
);
}
if ep.stage != crate::ShaderStage::Fragment {
return Err(EntryPointError::UnexpectedEarlyDepthTest.with_span());
}
}
#[cfg(feature = "validate")]
if ep.stage == crate::ShaderStage::Compute {
if ep
.workgroup_size
.iter()
.any(|&s| s == 0 || s > MAX_WORKGROUP_SIZE)
{
return Err(EntryPointError::OutOfRangeWorkgroupSize.with_span());
}
} else if ep.workgroup_size != [0; 3] {
return Err(EntryPointError::UnexpectedWorkgroupSize.with_span());
}
let info = self
.validate_function(&ep.function, module, mod_info, true)
.map_err(WithSpan::into_other)?;
#[cfg(feature = "validate")]
{
use super::ShaderStages;
let stage_bit = match ep.stage {
crate::ShaderStage::Vertex => ShaderStages::VERTEX,
crate::ShaderStage::Fragment => ShaderStages::FRAGMENT,
crate::ShaderStage::Compute => ShaderStages::COMPUTE,
};
if !info.available_stages.contains(stage_bit) {
return Err(EntryPointError::ForbiddenStageOperations.with_span());
}
}
self.location_mask.clear();
let mut argument_built_ins = crate::FastHashSet::default();
// TODO: add span info to function arguments
for (index, fa) in ep.function.arguments.iter().enumerate() {
let mut ctx = VaryingContext {
stage: ep.stage,
output: false,
types: &module.types,
type_info: &self.types,
location_mask: &mut self.location_mask,
built_ins: &mut argument_built_ins,
capabilities: self.capabilities,
#[cfg(feature = "validate")]
flags: self.flags,
};
ctx.validate(fa.ty, fa.binding.as_ref())
.map_err_inner(|e| EntryPointError::Argument(index as u32, e).with_span())?;
}
self.location_mask.clear();
if let Some(ref fr) = ep.function.result {
let mut result_built_ins = crate::FastHashSet::default();
let mut ctx = VaryingContext {
stage: ep.stage,
output: true,
types: &module.types,
type_info: &self.types,
location_mask: &mut self.location_mask,
built_ins: &mut result_built_ins,
capabilities: self.capabilities,
#[cfg(feature = "validate")]
flags: self.flags,
};
ctx.validate(fr.ty, fr.binding.as_ref())
.map_err_inner(|e| EntryPointError::Result(e).with_span())?;
#[cfg(feature = "validate")]
if ep.stage == crate::ShaderStage::Vertex
&& !result_built_ins.contains(&crate::BuiltIn::Position { invariant: false })
{
return Err(EntryPointError::MissingVertexOutputPosition.with_span());
}
} else if ep.stage == crate::ShaderStage::Vertex {
#[cfg(feature = "validate")]
return Err(EntryPointError::MissingVertexOutputPosition.with_span());
}
for bg in self.bind_group_masks.iter_mut() {
bg.clear();
}
#[cfg(feature = "validate")]
for (var_handle, var) in module.global_variables.iter() {
let usage = info[var_handle];
if usage.is_empty() {
continue;
}
let allowed_usage = match var.space {
crate::AddressSpace::Function => unreachable!(),
crate::AddressSpace::Uniform => GlobalUse::READ | GlobalUse::QUERY,
crate::AddressSpace::Storage { access } => storage_usage(access),
crate::AddressSpace::Handle => match module.types[var.ty].inner {
crate::TypeInner::BindingArray { base, .. } => match module.types[base].inner {
crate::TypeInner::Image {
class: crate::ImageClass::Storage { access, .. },
..
} => storage_usage(access),
_ => GlobalUse::READ | GlobalUse::QUERY,
},
crate::TypeInner::Image {
class: crate::ImageClass::Storage { access, .. },
..
} => storage_usage(access),
_ => GlobalUse::READ | GlobalUse::QUERY,
},
crate::AddressSpace::Private | crate::AddressSpace::WorkGroup => GlobalUse::all(),
crate::AddressSpace::PushConstant => GlobalUse::READ,
};
if !allowed_usage.contains(usage) {
log::warn!("\tUsage error for: {:?}", var);
log::warn!(
"\tAllowed usage: {:?}, requested: {:?}",
allowed_usage,
usage
);
return Err(EntryPointError::InvalidGlobalUsage(var_handle, usage)
.with_span_handle(var_handle, &module.global_variables));
}
if let Some(ref bind) = var.binding {
while self.bind_group_masks.len() <= bind.group as usize {
self.bind_group_masks.push(BitSet::new());
}
if !self.bind_group_masks[bind.group as usize].insert(bind.binding as usize) {
if self.flags.contains(super::ValidationFlags::BINDINGS) {
return Err(EntryPointError::BindingCollision(var_handle)
.with_span_handle(var_handle, &module.global_variables));
}
}
}
}
Ok(info)
}
}

495
third-party/vendor/naga/src/valid/mod.rs vendored Normal file
View file

@ -0,0 +1,495 @@
/*!
Shader validator.
*/
mod analyzer;
mod compose;
mod expression;
mod function;
mod handles;
mod interface;
mod r#type;
use crate::{
arena::Handle,
proc::{LayoutError, Layouter, TypeResolution},
FastHashSet,
};
use bit_set::BitSet;
use std::ops;
//TODO: analyze the model at the same time as we validate it,
// merge the corresponding matches over expressions and statements.
use crate::span::{AddSpan as _, WithSpan};
pub use analyzer::{ExpressionInfo, FunctionInfo, GlobalUse, Uniformity, UniformityRequirements};
pub use compose::ComposeError;
pub use expression::ExpressionError;
pub use function::{CallError, FunctionError, LocalVariableError};
pub use interface::{EntryPointError, GlobalVariableError, VaryingError};
pub use r#type::{Disalignment, TypeError, TypeFlags};
use self::handles::InvalidHandleError;
bitflags::bitflags! {
/// Validation flags.
///
/// If you are working with trusted shaders, then you may be able
/// to save some time by skipping validation.
///
/// If you do not perform full validation, invalid shaders may
/// cause Naga to panic. If you do perform full validation and
/// [`Validator::validate`] returns `Ok`, then Naga promises that
/// code generation will either succeed or return an error; it
/// should never panic.
///
/// The default value for `ValidationFlags` is
/// `ValidationFlags::all()`. If Naga's `"validate"` feature is
/// enabled, this requests full validation; otherwise, this
/// requests no validation. (The `"validate"` feature is disabled
/// by default.)
#[cfg_attr(feature = "serialize", derive(serde::Serialize))]
#[cfg_attr(feature = "deserialize", derive(serde::Deserialize))]
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub struct ValidationFlags: u8 {
/// Expressions.
#[cfg(feature = "validate")]
const EXPRESSIONS = 0x1;
/// Statements and blocks of them.
#[cfg(feature = "validate")]
const BLOCKS = 0x2;
/// Uniformity of control flow for operations that require it.
#[cfg(feature = "validate")]
const CONTROL_FLOW_UNIFORMITY = 0x4;
/// Host-shareable structure layouts.
#[cfg(feature = "validate")]
const STRUCT_LAYOUTS = 0x8;
/// Constants.
#[cfg(feature = "validate")]
const CONSTANTS = 0x10;
/// Group, binding, and location attributes.
#[cfg(feature = "validate")]
const BINDINGS = 0x20;
}
}
impl Default for ValidationFlags {
fn default() -> Self {
Self::all()
}
}
bitflags::bitflags! {
/// Allowed IR capabilities.
#[must_use]
#[cfg_attr(feature = "serialize", derive(serde::Serialize))]
#[cfg_attr(feature = "deserialize", derive(serde::Deserialize))]
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub struct Capabilities: u16 {
/// Support for [`AddressSpace:PushConstant`].
const PUSH_CONSTANT = 0x1;
/// Float values with width = 8.
const FLOAT64 = 0x2;
/// Support for [`Builtin:PrimitiveIndex`].
const PRIMITIVE_INDEX = 0x4;
/// Support for non-uniform indexing of sampled textures and storage buffer arrays.
const SAMPLED_TEXTURE_AND_STORAGE_BUFFER_ARRAY_NON_UNIFORM_INDEXING = 0x8;
/// Support for non-uniform indexing of uniform buffers and storage texture arrays.
const UNIFORM_BUFFER_AND_STORAGE_TEXTURE_ARRAY_NON_UNIFORM_INDEXING = 0x10;
/// Support for non-uniform indexing of samplers.
const SAMPLER_NON_UNIFORM_INDEXING = 0x20;
/// Support for [`Builtin::ClipDistance`].
const CLIP_DISTANCE = 0x40;
/// Support for [`Builtin::CullDistance`].
const CULL_DISTANCE = 0x80;
/// Support for 16-bit normalized storage texture formats.
const STORAGE_TEXTURE_16BIT_NORM_FORMATS = 0x100;
/// Support for [`BuiltIn::ViewIndex`].
const MULTIVIEW = 0x200;
/// Support for `early_depth_test`.
const EARLY_DEPTH_TEST = 0x400;
/// Support for [`Builtin::SampleIndex`] and [`Sampling::Sample`].
const MULTISAMPLED_SHADING = 0x800;
/// Support for ray queries and acceleration structures.
const RAY_QUERY = 0x1000;
}
}
impl Default for Capabilities {
fn default() -> Self {
Self::MULTISAMPLED_SHADING
}
}
bitflags::bitflags! {
/// Validation flags.
#[cfg_attr(feature = "serialize", derive(serde::Serialize))]
#[cfg_attr(feature = "deserialize", derive(serde::Deserialize))]
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub struct ShaderStages: u8 {
const VERTEX = 0x1;
const FRAGMENT = 0x2;
const COMPUTE = 0x4;
}
}
#[derive(Debug)]
#[cfg_attr(feature = "serialize", derive(serde::Serialize))]
#[cfg_attr(feature = "deserialize", derive(serde::Deserialize))]
pub struct ModuleInfo {
type_flags: Vec<TypeFlags>,
functions: Vec<FunctionInfo>,
entry_points: Vec<FunctionInfo>,
const_expression_types: Box<[TypeResolution]>,
}
impl ops::Index<Handle<crate::Type>> for ModuleInfo {
type Output = TypeFlags;
fn index(&self, handle: Handle<crate::Type>) -> &Self::Output {
&self.type_flags[handle.index()]
}
}
impl ops::Index<Handle<crate::Function>> for ModuleInfo {
type Output = FunctionInfo;
fn index(&self, handle: Handle<crate::Function>) -> &Self::Output {
&self.functions[handle.index()]
}
}
impl ops::Index<Handle<crate::Expression>> for ModuleInfo {
type Output = TypeResolution;
fn index(&self, handle: Handle<crate::Expression>) -> &Self::Output {
&self.const_expression_types[handle.index()]
}
}
#[derive(Debug)]
pub struct Validator {
flags: ValidationFlags,
capabilities: Capabilities,
types: Vec<r#type::TypeInfo>,
layouter: Layouter,
location_mask: BitSet,
bind_group_masks: Vec<BitSet>,
#[allow(dead_code)]
switch_values: FastHashSet<crate::SwitchValue>,
valid_expression_list: Vec<Handle<crate::Expression>>,
valid_expression_set: BitSet,
}
#[derive(Clone, Debug, thiserror::Error)]
pub enum ConstExpressionError {
#[error("The expression is not a constant expression")]
NonConst,
#[error(transparent)]
Compose(#[from] ComposeError),
#[error("Type resolution failed")]
Type(#[from] crate::proc::ResolveError),
}
#[derive(Clone, Debug, thiserror::Error)]
pub enum ConstantError {
#[error("The type doesn't match the constant")]
InvalidType,
#[error("The type is not constructible")]
NonConstructibleType,
}
#[derive(Clone, Debug, thiserror::Error)]
pub enum ValidationError {
#[error(transparent)]
InvalidHandle(#[from] InvalidHandleError),
#[error(transparent)]
Layouter(#[from] LayoutError),
#[error("Type {handle:?} '{name}' is invalid")]
Type {
handle: Handle<crate::Type>,
name: String,
source: TypeError,
},
#[error("Constant expression {handle:?} is invalid")]
ConstExpression {
handle: Handle<crate::Expression>,
source: ConstExpressionError,
},
#[error("Constant {handle:?} '{name}' is invalid")]
Constant {
handle: Handle<crate::Constant>,
name: String,
source: ConstantError,
},
#[error("Global variable {handle:?} '{name}' is invalid")]
GlobalVariable {
handle: Handle<crate::GlobalVariable>,
name: String,
source: GlobalVariableError,
},
#[error("Function {handle:?} '{name}' is invalid")]
Function {
handle: Handle<crate::Function>,
name: String,
source: FunctionError,
},
#[error("Entry point {name} at {stage:?} is invalid")]
EntryPoint {
stage: crate::ShaderStage,
name: String,
source: EntryPointError,
},
#[error("Module is corrupted")]
Corrupted,
}
impl crate::TypeInner {
#[cfg(feature = "validate")]
const fn is_sized(&self) -> bool {
match *self {
Self::Scalar { .. }
| Self::Vector { .. }
| Self::Matrix { .. }
| Self::Array {
size: crate::ArraySize::Constant(_),
..
}
| Self::Atomic { .. }
| Self::Pointer { .. }
| Self::ValuePointer { .. }
| Self::Struct { .. } => true,
Self::Array { .. }
| Self::Image { .. }
| Self::Sampler { .. }
| Self::AccelerationStructure
| Self::RayQuery
| Self::BindingArray { .. } => false,
}
}
/// Return the `ImageDimension` for which `self` is an appropriate coordinate.
#[cfg(feature = "validate")]
const fn image_storage_coordinates(&self) -> Option<crate::ImageDimension> {
match *self {
Self::Scalar {
kind: crate::ScalarKind::Sint | crate::ScalarKind::Uint,
..
} => Some(crate::ImageDimension::D1),
Self::Vector {
size: crate::VectorSize::Bi,
kind: crate::ScalarKind::Sint | crate::ScalarKind::Uint,
..
} => Some(crate::ImageDimension::D2),
Self::Vector {
size: crate::VectorSize::Tri,
kind: crate::ScalarKind::Sint | crate::ScalarKind::Uint,
..
} => Some(crate::ImageDimension::D3),
_ => None,
}
}
}
impl Validator {
/// Construct a new validator instance.
pub fn new(flags: ValidationFlags, capabilities: Capabilities) -> Self {
Validator {
flags,
capabilities,
types: Vec::new(),
layouter: Layouter::default(),
location_mask: BitSet::new(),
bind_group_masks: Vec::new(),
switch_values: FastHashSet::default(),
valid_expression_list: Vec::new(),
valid_expression_set: BitSet::new(),
}
}
/// Reset the validator internals
pub fn reset(&mut self) {
self.types.clear();
self.layouter.clear();
self.location_mask.clear();
self.bind_group_masks.clear();
self.switch_values.clear();
self.valid_expression_list.clear();
self.valid_expression_set.clear();
}
#[cfg(feature = "validate")]
fn validate_constant(
&self,
handle: Handle<crate::Constant>,
gctx: crate::proc::GlobalCtx,
mod_info: &ModuleInfo,
) -> Result<(), ConstantError> {
let con = &gctx.constants[handle];
let type_info = &self.types[con.ty.index()];
if !type_info.flags.contains(TypeFlags::CONSTRUCTIBLE) {
return Err(ConstantError::NonConstructibleType);
}
let decl_ty = &gctx.types[con.ty].inner;
let init_ty = mod_info[con.init].inner_with(gctx.types);
if !decl_ty.equivalent(init_ty, gctx.types) {
return Err(ConstantError::InvalidType);
}
Ok(())
}
/// Check the given module to be valid.
pub fn validate(
&mut self,
module: &crate::Module,
) -> Result<ModuleInfo, WithSpan<ValidationError>> {
self.reset();
self.reset_types(module.types.len());
#[cfg(feature = "validate")]
Self::validate_module_handles(module).map_err(|e| e.with_span())?;
self.layouter.update(module.to_ctx()).map_err(|e| {
let handle = e.ty;
ValidationError::from(e).with_span_handle(handle, &module.types)
})?;
let placeholder = TypeResolution::Value(crate::TypeInner::Scalar {
kind: crate::ScalarKind::Bool,
width: 0,
});
let mut mod_info = ModuleInfo {
type_flags: Vec::with_capacity(module.types.len()),
functions: Vec::with_capacity(module.functions.len()),
entry_points: Vec::with_capacity(module.entry_points.len()),
const_expression_types: vec![placeholder; module.const_expressions.len()]
.into_boxed_slice(),
};
for (handle, ty) in module.types.iter() {
let ty_info = self
.validate_type(handle, module.to_ctx())
.map_err(|source| {
ValidationError::Type {
handle,
name: ty.name.clone().unwrap_or_default(),
source,
}
.with_span_handle(handle, &module.types)
})?;
mod_info.type_flags.push(ty_info.flags);
self.types[handle.index()] = ty_info;
}
{
let t = crate::Arena::new();
let resolve_context = crate::proc::ResolveContext::with_locals(module, &t, &[]);
for (handle, _) in module.const_expressions.iter() {
mod_info
.process_const_expression(handle, &resolve_context, module.to_ctx())
.map_err(|source| {
ValidationError::ConstExpression { handle, source }
.with_span_handle(handle, &module.const_expressions)
})?
}
}
#[cfg(feature = "validate")]
if self.flags.contains(ValidationFlags::CONSTANTS) {
for (handle, _) in module.const_expressions.iter() {
self.validate_const_expression(handle, module.to_ctx(), &mut mod_info)
.map_err(|source| {
ValidationError::ConstExpression { handle, source }
.with_span_handle(handle, &module.const_expressions)
})?
}
for (handle, constant) in module.constants.iter() {
self.validate_constant(handle, module.to_ctx(), &mod_info)
.map_err(|source| {
ValidationError::Constant {
handle,
name: constant.name.clone().unwrap_or_default(),
source,
}
.with_span_handle(handle, &module.constants)
})?
}
}
#[cfg(feature = "validate")]
for (var_handle, var) in module.global_variables.iter() {
self.validate_global_var(var, module.to_ctx(), &mod_info)
.map_err(|source| {
ValidationError::GlobalVariable {
handle: var_handle,
name: var.name.clone().unwrap_or_default(),
source,
}
.with_span_handle(var_handle, &module.global_variables)
})?;
}
for (handle, fun) in module.functions.iter() {
match self.validate_function(fun, module, &mod_info, false) {
Ok(info) => mod_info.functions.push(info),
Err(error) => {
return Err(error.and_then(|source| {
ValidationError::Function {
handle,
name: fun.name.clone().unwrap_or_default(),
source,
}
.with_span_handle(handle, &module.functions)
}))
}
}
}
let mut ep_map = FastHashSet::default();
for ep in module.entry_points.iter() {
if !ep_map.insert((ep.stage, &ep.name)) {
return Err(ValidationError::EntryPoint {
stage: ep.stage,
name: ep.name.clone(),
source: EntryPointError::Conflict,
}
.with_span()); // TODO: keep some EP span information?
}
match self.validate_entry_point(ep, module, &mod_info) {
Ok(info) => mod_info.entry_points.push(info),
Err(error) => {
return Err(error.and_then(|source| {
ValidationError::EntryPoint {
stage: ep.stage,
name: ep.name.clone(),
source,
}
.with_span()
}));
}
}
}
Ok(mod_info)
}
}
#[cfg(feature = "validate")]
fn validate_atomic_compare_exchange_struct(
types: &crate::UniqueArena<crate::Type>,
members: &[crate::StructMember],
scalar_predicate: impl FnOnce(&crate::TypeInner) -> bool,
) -> bool {
members.len() == 2
&& members[0].name.as_deref() == Some("old_value")
&& scalar_predicate(&types[members[0].ty].inner)
&& members[1].name.as_deref() == Some("exchanged")
&& types[members[1].ty].inner
== crate::TypeInner::Scalar {
kind: crate::ScalarKind::Bool,
width: crate::BOOL_WIDTH,
}
}

View file

@ -0,0 +1,596 @@
use super::Capabilities;
use crate::{arena::Handle, proc::Alignment};
bitflags::bitflags! {
/// Flags associated with [`Type`]s by [`Validator`].
///
/// [`Type`]: crate::Type
/// [`Validator`]: crate::valid::Validator
#[cfg_attr(feature = "serialize", derive(serde::Serialize))]
#[cfg_attr(feature = "deserialize", derive(serde::Deserialize))]
#[repr(transparent)]
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub struct TypeFlags: u8 {
/// Can be used for data variables.
///
/// This flag is required on types of local variables, function
/// arguments, array elements, and struct members.
///
/// This includes all types except `Image`, `Sampler`,
/// and some `Pointer` types.
const DATA = 0x1;
/// The data type has a size known by pipeline creation time.
///
/// Unsized types are quite restricted. The only unsized types permitted
/// by Naga, other than the non-[`DATA`] types like [`Image`] and
/// [`Sampler`], are dynamically-sized [`Array`s], and [`Struct`s] whose
/// last members are such arrays. See the documentation for those types
/// for details.
///
/// [`DATA`]: TypeFlags::DATA
/// [`Image`]: crate::Type::Image
/// [`Sampler`]: crate::Type::Sampler
/// [`Array`]: crate::Type::Array
/// [`Struct`]: crate::Type::struct
const SIZED = 0x2;
/// The data can be copied around.
const COPY = 0x4;
/// Can be be used for user-defined IO between pipeline stages.
///
/// This covers anything that can be in [`Location`] binding:
/// non-bool scalars and vectors, matrices, and structs and
/// arrays containing only interface types.
const IO_SHAREABLE = 0x8;
/// Can be used for host-shareable structures.
const HOST_SHAREABLE = 0x10;
/// This type can be passed as a function argument.
const ARGUMENT = 0x40;
/// A WGSL [constructible] type.
///
/// The constructible types are scalars, vectors, matrices, fixed-size
/// arrays of constructible types, and structs whose members are all
/// constructible.
///
/// [constructible]: https://gpuweb.github.io/gpuweb/wgsl/#constructible
const CONSTRUCTIBLE = 0x80;
}
}
#[derive(Clone, Copy, Debug, thiserror::Error)]
pub enum Disalignment {
#[error("The array stride {stride} is not a multiple of the required alignment {alignment}")]
ArrayStride { stride: u32, alignment: Alignment },
#[error("The struct span {span}, is not a multiple of the required alignment {alignment}")]
StructSpan { span: u32, alignment: Alignment },
#[error("The struct member[{index}] offset {offset} is not a multiple of the required alignment {alignment}")]
MemberOffset {
index: u32,
offset: u32,
alignment: Alignment,
},
#[error("The struct member[{index}] offset {offset} must be at least {expected}")]
MemberOffsetAfterStruct {
index: u32,
offset: u32,
expected: u32,
},
#[error("The struct member[{index}] is not statically sized")]
UnsizedMember { index: u32 },
#[error("The type is not host-shareable")]
NonHostShareable,
}
#[derive(Clone, Debug, thiserror::Error)]
pub enum TypeError {
#[error("Capability {0:?} is required")]
MissingCapability(Capabilities),
#[error("The {0:?} scalar width {1} is not supported")]
InvalidWidth(crate::ScalarKind, crate::Bytes),
#[error("The {0:?} scalar width {1} is not supported for an atomic")]
InvalidAtomicWidth(crate::ScalarKind, crate::Bytes),
#[error("Invalid type for pointer target {0:?}")]
InvalidPointerBase(Handle<crate::Type>),
#[error("Unsized types like {base:?} must be in the `Storage` address space, not `{space:?}`")]
InvalidPointerToUnsized {
base: Handle<crate::Type>,
space: crate::AddressSpace,
},
#[error("Expected data type, found {0:?}")]
InvalidData(Handle<crate::Type>),
#[error("Base type {0:?} for the array is invalid")]
InvalidArrayBaseType(Handle<crate::Type>),
#[error("The constant {0:?} is specialized, and cannot be used as an array size")]
UnsupportedSpecializedArrayLength(Handle<crate::Constant>),
#[error("Array stride {stride} does not match the expected {expected}")]
InvalidArrayStride { stride: u32, expected: u32 },
#[error("Field '{0}' can't be dynamically-sized, has type {1:?}")]
InvalidDynamicArray(String, Handle<crate::Type>),
#[error("The base handle {0:?} has to be a struct")]
BindingArrayBaseTypeNotStruct(Handle<crate::Type>),
#[error("Structure member[{index}] at {offset} overlaps the previous member")]
MemberOverlap { index: u32, offset: u32 },
#[error(
"Structure member[{index}] at {offset} and size {size} crosses the structure boundary of size {span}"
)]
MemberOutOfBounds {
index: u32,
offset: u32,
size: u32,
span: u32,
},
#[error("Structure types must have at least one member")]
EmptyStruct,
}
// Only makes sense if `flags.contains(HOST_SHAREABLE)`
type LayoutCompatibility = Result<Alignment, (Handle<crate::Type>, Disalignment)>;
fn check_member_layout(
accum: &mut LayoutCompatibility,
member: &crate::StructMember,
member_index: u32,
member_layout: LayoutCompatibility,
parent_handle: Handle<crate::Type>,
) {
*accum = match (*accum, member_layout) {
(Ok(cur_alignment), Ok(alignment)) => {
if alignment.is_aligned(member.offset) {
Ok(cur_alignment.max(alignment))
} else {
Err((
parent_handle,
Disalignment::MemberOffset {
index: member_index,
offset: member.offset,
alignment,
},
))
}
}
(Err(e), _) | (_, Err(e)) => Err(e),
};
}
/// Determine whether a pointer in `space` can be passed as an argument.
///
/// If a pointer in `space` is permitted to be passed as an argument to a
/// user-defined function, return `TypeFlags::ARGUMENT`. Otherwise, return
/// `TypeFlags::empty()`.
///
/// Pointers passed as arguments to user-defined functions must be in the
/// `Function`, `Private`, or `Workgroup` storage space.
const fn ptr_space_argument_flag(space: crate::AddressSpace) -> TypeFlags {
use crate::AddressSpace as As;
match space {
As::Function | As::Private | As::WorkGroup => TypeFlags::ARGUMENT,
As::Uniform | As::Storage { .. } | As::Handle | As::PushConstant => TypeFlags::empty(),
}
}
#[derive(Clone, Debug)]
pub(super) struct TypeInfo {
pub flags: TypeFlags,
pub uniform_layout: LayoutCompatibility,
pub storage_layout: LayoutCompatibility,
}
impl TypeInfo {
const fn dummy() -> Self {
TypeInfo {
flags: TypeFlags::empty(),
uniform_layout: Ok(Alignment::ONE),
storage_layout: Ok(Alignment::ONE),
}
}
const fn new(flags: TypeFlags, alignment: Alignment) -> Self {
TypeInfo {
flags,
uniform_layout: Ok(alignment),
storage_layout: Ok(alignment),
}
}
}
impl super::Validator {
const fn require_type_capability(&self, capability: Capabilities) -> Result<(), TypeError> {
if self.capabilities.contains(capability) {
Ok(())
} else {
Err(TypeError::MissingCapability(capability))
}
}
pub(super) fn check_width(
&self,
kind: crate::ScalarKind,
width: crate::Bytes,
) -> Result<(), TypeError> {
let good = match kind {
crate::ScalarKind::Bool => width == crate::BOOL_WIDTH,
crate::ScalarKind::Float => {
if width == 8 {
self.require_type_capability(Capabilities::FLOAT64)?;
true
} else {
width == 4
}
}
crate::ScalarKind::Sint | crate::ScalarKind::Uint => width == 4,
};
if good {
Ok(())
} else {
Err(TypeError::InvalidWidth(kind, width))
}
}
pub(super) fn reset_types(&mut self, size: usize) {
self.types.clear();
self.types.resize(size, TypeInfo::dummy());
self.layouter.clear();
}
pub(super) fn validate_type(
&self,
handle: Handle<crate::Type>,
gctx: crate::proc::GlobalCtx,
) -> Result<TypeInfo, TypeError> {
use crate::TypeInner as Ti;
Ok(match gctx.types[handle].inner {
Ti::Scalar { kind, width } => {
self.check_width(kind, width)?;
let shareable = if kind.is_numeric() {
TypeFlags::IO_SHAREABLE | TypeFlags::HOST_SHAREABLE
} else {
TypeFlags::empty()
};
TypeInfo::new(
TypeFlags::DATA
| TypeFlags::SIZED
| TypeFlags::COPY
| TypeFlags::ARGUMENT
| TypeFlags::CONSTRUCTIBLE
| shareable,
Alignment::from_width(width),
)
}
Ti::Vector { size, kind, width } => {
self.check_width(kind, width)?;
let shareable = if kind.is_numeric() {
TypeFlags::IO_SHAREABLE | TypeFlags::HOST_SHAREABLE
} else {
TypeFlags::empty()
};
TypeInfo::new(
TypeFlags::DATA
| TypeFlags::SIZED
| TypeFlags::COPY
| TypeFlags::HOST_SHAREABLE
| TypeFlags::ARGUMENT
| TypeFlags::CONSTRUCTIBLE
| shareable,
Alignment::from(size) * Alignment::from_width(width),
)
}
Ti::Matrix {
columns: _,
rows,
width,
} => {
self.check_width(crate::ScalarKind::Float, width)?;
TypeInfo::new(
TypeFlags::DATA
| TypeFlags::SIZED
| TypeFlags::COPY
| TypeFlags::HOST_SHAREABLE
| TypeFlags::ARGUMENT
| TypeFlags::CONSTRUCTIBLE,
Alignment::from(rows) * Alignment::from_width(width),
)
}
Ti::Atomic { kind, width } => {
let good = match kind {
crate::ScalarKind::Bool | crate::ScalarKind::Float => false,
crate::ScalarKind::Sint | crate::ScalarKind::Uint => width == 4,
};
if !good {
return Err(TypeError::InvalidAtomicWidth(kind, width));
}
TypeInfo::new(
TypeFlags::DATA | TypeFlags::SIZED | TypeFlags::HOST_SHAREABLE,
Alignment::from_width(width),
)
}
Ti::Pointer { base, space } => {
use crate::AddressSpace as As;
let base_info = &self.types[base.index()];
if !base_info.flags.contains(TypeFlags::DATA) {
return Err(TypeError::InvalidPointerBase(base));
}
// Runtime-sized values can only live in the `Storage` storage
// space, so it's useless to have a pointer to such a type in
// any other space.
//
// Detecting this problem here prevents the definition of
// functions like:
//
// fn f(p: ptr<workgroup, UnsizedType>) -> ... { ... }
//
// which would otherwise be permitted, but uncallable. (They
// may also present difficulties in code generation).
if !base_info.flags.contains(TypeFlags::SIZED) {
match space {
As::Storage { .. } => {}
_ => {
return Err(TypeError::InvalidPointerToUnsized { base, space });
}
}
}
// `Validator::validate_function` actually checks the storage
// space of pointer arguments explicitly before checking the
// `ARGUMENT` flag, to give better error messages. But it seems
// best to set `ARGUMENT` accurately anyway.
let argument_flag = ptr_space_argument_flag(space);
// Pointers cannot be stored in variables, structure members, or
// array elements, so we do not mark them as `DATA`.
TypeInfo::new(
argument_flag | TypeFlags::SIZED | TypeFlags::COPY,
Alignment::ONE,
)
}
Ti::ValuePointer {
size: _,
kind,
width,
space,
} => {
// ValuePointer should be treated the same way as the equivalent
// Pointer / Scalar / Vector combination, so each step in those
// variants' match arms should have a counterpart here.
//
// However, some cases are trivial: All our implicit base types
// are DATA and SIZED, so we can never return
// `InvalidPointerBase` or `InvalidPointerToUnsized`.
self.check_width(kind, width)?;
// `Validator::validate_function` actually checks the storage
// space of pointer arguments explicitly before checking the
// `ARGUMENT` flag, to give better error messages. But it seems
// best to set `ARGUMENT` accurately anyway.
let argument_flag = ptr_space_argument_flag(space);
// Pointers cannot be stored in variables, structure members, or
// array elements, so we do not mark them as `DATA`.
TypeInfo::new(
argument_flag | TypeFlags::SIZED | TypeFlags::COPY,
Alignment::ONE,
)
}
Ti::Array { base, size, stride } => {
let base_info = &self.types[base.index()];
if !base_info.flags.contains(TypeFlags::DATA | TypeFlags::SIZED) {
return Err(TypeError::InvalidArrayBaseType(base));
}
let base_layout = self.layouter[base];
let general_alignment = base_layout.alignment;
let uniform_layout = match base_info.uniform_layout {
Ok(base_alignment) => {
let alignment = base_alignment
.max(general_alignment)
.max(Alignment::MIN_UNIFORM);
if alignment.is_aligned(stride) {
Ok(alignment)
} else {
Err((handle, Disalignment::ArrayStride { stride, alignment }))
}
}
Err(e) => Err(e),
};
let storage_layout = match base_info.storage_layout {
Ok(base_alignment) => {
let alignment = base_alignment.max(general_alignment);
if alignment.is_aligned(stride) {
Ok(alignment)
} else {
Err((handle, Disalignment::ArrayStride { stride, alignment }))
}
}
Err(e) => Err(e),
};
let type_info_mask = match size {
crate::ArraySize::Constant(_) => {
TypeFlags::DATA
| TypeFlags::SIZED
| TypeFlags::COPY
| TypeFlags::HOST_SHAREABLE
| TypeFlags::ARGUMENT
| TypeFlags::CONSTRUCTIBLE
}
crate::ArraySize::Dynamic => {
// Non-SIZED types may only appear as the last element of a structure.
// This is enforced by checks for SIZED-ness for all compound types,
// and a special case for structs.
TypeFlags::DATA | TypeFlags::COPY | TypeFlags::HOST_SHAREABLE
}
};
TypeInfo {
flags: base_info.flags & type_info_mask,
uniform_layout,
storage_layout,
}
}
Ti::Struct { ref members, span } => {
if members.is_empty() {
return Err(TypeError::EmptyStruct);
}
let mut ti = TypeInfo::new(
TypeFlags::DATA
| TypeFlags::SIZED
| TypeFlags::COPY
| TypeFlags::HOST_SHAREABLE
| TypeFlags::IO_SHAREABLE
| TypeFlags::ARGUMENT
| TypeFlags::CONSTRUCTIBLE,
Alignment::ONE,
);
ti.uniform_layout = Ok(Alignment::MIN_UNIFORM);
let mut min_offset = 0;
let mut prev_struct_data: Option<(u32, u32)> = None;
for (i, member) in members.iter().enumerate() {
let base_info = &self.types[member.ty.index()];
if !base_info.flags.contains(TypeFlags::DATA) {
return Err(TypeError::InvalidData(member.ty));
}
if !base_info.flags.contains(TypeFlags::HOST_SHAREABLE) {
if ti.uniform_layout.is_ok() {
ti.uniform_layout = Err((member.ty, Disalignment::NonHostShareable));
}
if ti.storage_layout.is_ok() {
ti.storage_layout = Err((member.ty, Disalignment::NonHostShareable));
}
}
ti.flags &= base_info.flags;
if member.offset < min_offset {
// HACK: this could be nicer. We want to allow some structures
// to not bother with offsets/alignments if they are never
// used for host sharing.
if member.offset == 0 {
ti.flags.set(TypeFlags::HOST_SHAREABLE, false);
} else {
return Err(TypeError::MemberOverlap {
index: i as u32,
offset: member.offset,
});
}
}
let base_size = gctx.types[member.ty].inner.size(gctx);
min_offset = member.offset + base_size;
if min_offset > span {
return Err(TypeError::MemberOutOfBounds {
index: i as u32,
offset: member.offset,
size: base_size,
span,
});
}
check_member_layout(
&mut ti.uniform_layout,
member,
i as u32,
base_info.uniform_layout,
handle,
);
check_member_layout(
&mut ti.storage_layout,
member,
i as u32,
base_info.storage_layout,
handle,
);
// Validate rule: If a structure member itself has a structure type S,
// then the number of bytes between the start of that member and
// the start of any following member must be at least roundUp(16, SizeOf(S)).
if let Some((span, offset)) = prev_struct_data {
let diff = member.offset - offset;
let min = Alignment::MIN_UNIFORM.round_up(span);
if diff < min {
ti.uniform_layout = Err((
handle,
Disalignment::MemberOffsetAfterStruct {
index: i as u32,
offset: member.offset,
expected: offset + min,
},
));
}
};
prev_struct_data = match gctx.types[member.ty].inner {
crate::TypeInner::Struct { span, .. } => Some((span, member.offset)),
_ => None,
};
// The last field may be an unsized array.
if !base_info.flags.contains(TypeFlags::SIZED) {
let is_array = match gctx.types[member.ty].inner {
crate::TypeInner::Array { .. } => true,
_ => false,
};
if !is_array || i + 1 != members.len() {
let name = member.name.clone().unwrap_or_default();
return Err(TypeError::InvalidDynamicArray(name, member.ty));
}
if ti.uniform_layout.is_ok() {
ti.uniform_layout =
Err((handle, Disalignment::UnsizedMember { index: i as u32 }));
}
}
}
let alignment = self.layouter[handle].alignment;
if !alignment.is_aligned(span) {
ti.uniform_layout = Err((handle, Disalignment::StructSpan { span, alignment }));
ti.storage_layout = Err((handle, Disalignment::StructSpan { span, alignment }));
}
ti
}
Ti::Image { .. } | Ti::Sampler { .. } => {
TypeInfo::new(TypeFlags::ARGUMENT, Alignment::ONE)
}
Ti::AccelerationStructure => {
self.require_type_capability(Capabilities::RAY_QUERY)?;
TypeInfo::new(TypeFlags::ARGUMENT, Alignment::ONE)
}
Ti::RayQuery => {
self.require_type_capability(Capabilities::RAY_QUERY)?;
TypeInfo::new(TypeFlags::DATA | TypeFlags::SIZED, Alignment::ONE)
}
Ti::BindingArray { base, size } => {
if base >= handle {
return Err(TypeError::InvalidArrayBaseType(base));
}
let type_info_mask = match size {
crate::ArraySize::Constant(_) => TypeFlags::SIZED | TypeFlags::HOST_SHAREABLE,
crate::ArraySize::Dynamic => {
// Final type is non-sized
TypeFlags::HOST_SHAREABLE
}
};
let base_info = &self.types[base.index()];
if base_info.flags.contains(TypeFlags::DATA) {
// Currently Naga only supports binding arrays of structs for non-handle types.
match gctx.types[base].inner {
crate::TypeInner::Struct { .. } => {}
_ => return Err(TypeError::BindingArrayBaseTypeNotStruct(base)),
};
}
TypeInfo::new(base_info.flags & type_info_mask, Alignment::ONE)
}
})
}
}