[wgsl-in,ir] Add support for parsing rust-style doc comments (#6364)

* [wgsl-in,ir] add support for parsing rust-style doc comments

* rename relevant items to `doc_comments` (or variations of it)

* address comments

* remove `next_until`

* rename `save_doc_comments` to `ignore_doc_comments`

* expand snapshot test and ignore blankspace when accumulating doc comments

* make tokenizer more straightforward

---------

Co-authored-by: teoxoy <28601907+teoxoy@users.noreply.github.com>
This commit is contained in:
Thierry Berger 2025-06-05 15:13:11 +02:00 committed by GitHub
parent 00bc80da61
commit 28af245d51
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
46 changed files with 964 additions and 30 deletions

View File

@ -45,6 +45,7 @@ Bottom level categories:
#### Naga
- Added `no_std` support with default features disabled. By @Bushrat011899 in [#7585](https://github.com/gfx-rs/wgpu/pull/7585).
- [wgsl-in,ir] Add support for parsing rust-style doc comments via `naga::front::glsl::Frontend::new_with_options`. By @Vrixyz in [#6364](https://github.com/gfx-rs/wgpu/pull/6364).
#### General

View File

@ -272,6 +272,42 @@ pub fn compact(module: &mut crate::Module) {
module_map.global_expressions.adjust(init);
}
}
// Adjust doc comments
if let Some(ref mut doc_comments) = module.doc_comments {
let crate::DocComments {
module: _,
types: ref mut doc_comments_for_types,
struct_members: ref mut doc_comments_for_struct_members,
entry_points: _,
functions: _,
constants: ref mut doc_comments_for_constants,
global_variables: _,
} = **doc_comments;
log::trace!("adjusting doc comments for types");
for (mut ty, doc_comment) in core::mem::take(doc_comments_for_types) {
if !module_map.types.used(ty) {
continue;
}
module_map.types.adjust(&mut ty);
doc_comments_for_types.insert(ty, doc_comment);
}
log::trace!("adjusting doc comments for struct members");
for ((mut ty, index), doc_comment) in core::mem::take(doc_comments_for_struct_members) {
if !module_map.types.used(ty) {
continue;
}
module_map.types.adjust(&mut ty);
doc_comments_for_struct_members.insert((ty, index), doc_comment);
}
log::trace!("adjusting doc comments for constants");
for (mut constant, doc_comment) in core::mem::take(doc_comments_for_constants) {
if !module_map.constants.used(constant) {
continue;
}
module_map.constants.adjust(&mut constant);
doc_comments_for_constants.insert(constant, doc_comment);
}
}
// Temporary storage to help us reuse allocations of existing
// named expression tables.

View File

@ -14,7 +14,7 @@ pub mod spv;
#[cfg(feature = "wgsl-in")]
pub mod wgsl;
use alloc::{vec, vec::Vec};
use alloc::{boxed::Box, vec, vec::Vec};
use core::ops;
use crate::{
@ -330,3 +330,10 @@ impl<Name: fmt::Debug, Var: fmt::Debug> fmt::Debug for SymbolTable<Name, Var> {
.finish()
}
}
impl crate::Module {
pub fn get_or_insert_default_doc_comments(&mut self) -> &mut Box<crate::DocComments> {
self.doc_comments
.get_or_insert_with(|| Box::new(crate::DocComments::default()))
}
}

View File

@ -469,6 +469,8 @@ impl<'a> Error<'a> {
Token::Arrow => "->".to_string(),
Token::Unknown(c) => format!("unknown (`{c}`)"),
Token::Trivia => "trivia".to_string(),
Token::DocComment(s) => format!("doc comment ('{s}')"),
Token::ModuleDocComment(s) => format!("module doc comment ('{s}')"),
Token::End => "end".to_string(),
},
ExpectedToken::Identifier => "identifier".to_string(),

View File

@ -1022,7 +1022,7 @@ enum LoweredGlobalDecl {
Const(Handle<ir::Constant>),
Override(Handle<ir::Override>),
Type(Handle<ir::Type>),
EntryPoint,
EntryPoint(usize),
}
enum Texture {
@ -1130,6 +1130,10 @@ impl<'source, 'temp> Lowerer<'source, 'temp> {
layouter: &mut proc::Layouter::default(),
global_expression_kind_tracker: &mut proc::ExpressionKindTracker::new(),
};
if !tu.doc_comments.is_empty() {
ctx.module.get_or_insert_default_doc_comments().module =
tu.doc_comments.iter().map(|s| s.to_string()).collect();
}
for decl_handle in self.index.visit_ordered() {
let span = tu.decls.get_span(decl_handle);
@ -1138,6 +1142,29 @@ impl<'source, 'temp> Lowerer<'source, 'temp> {
match decl.kind {
ast::GlobalDeclKind::Fn(ref f) => {
let lowered_decl = self.function(f, span, &mut ctx)?;
if !f.doc_comments.is_empty() {
match lowered_decl {
LoweredGlobalDecl::Function { handle, .. } => {
ctx.module
.get_or_insert_default_doc_comments()
.functions
.insert(
handle,
f.doc_comments.iter().map(|s| s.to_string()).collect(),
);
}
LoweredGlobalDecl::EntryPoint(index) => {
ctx.module
.get_or_insert_default_doc_comments()
.entry_points
.insert(
index,
f.doc_comments.iter().map(|s| s.to_string()).collect(),
);
}
_ => {}
}
}
ctx.globals.insert(f.name.name, lowered_decl);
}
ast::GlobalDeclKind::Var(ref v) => {
@ -1173,6 +1200,15 @@ impl<'source, 'temp> Lowerer<'source, 'temp> {
span,
);
if !v.doc_comments.is_empty() {
ctx.module
.get_or_insert_default_doc_comments()
.global_variables
.insert(
handle,
v.doc_comments.iter().map(|s| s.to_string()).collect(),
);
}
ctx.globals
.insert(v.name.name, LoweredGlobalDecl::Var(handle));
}
@ -1203,6 +1239,15 @@ impl<'source, 'temp> Lowerer<'source, 'temp> {
ctx.globals
.insert(c.name.name, LoweredGlobalDecl::Const(handle));
if !c.doc_comments.is_empty() {
ctx.module
.get_or_insert_default_doc_comments()
.constants
.insert(
handle,
c.doc_comments.iter().map(|s| s.to_string()).collect(),
);
}
}
ast::GlobalDeclKind::Override(ref o) => {
let explicit_ty =
@ -1249,6 +1294,15 @@ impl<'source, 'temp> Lowerer<'source, 'temp> {
let handle = self.r#struct(s, span, &mut ctx)?;
ctx.globals
.insert(s.name.name, LoweredGlobalDecl::Type(handle));
if !s.doc_comments.is_empty() {
ctx.module
.get_or_insert_default_doc_comments()
.types
.insert(
handle,
s.doc_comments.iter().map(|s| s.to_string()).collect(),
);
}
}
ast::GlobalDeclKind::Type(ref alias) => {
let ty = self.resolve_named_ast_type(
@ -1469,7 +1523,9 @@ impl<'source, 'temp> Lowerer<'source, 'temp> {
workgroup_size_overrides,
function,
});
Ok(LoweredGlobalDecl::EntryPoint)
Ok(LoweredGlobalDecl::EntryPoint(
ctx.module.entry_points.len() - 1,
))
} else {
let handle = ctx.module.functions.append(function, span);
Ok(LoweredGlobalDecl::Function {
@ -2086,7 +2142,7 @@ impl<'source, 'temp> Lowerer<'source, 'temp> {
}
LoweredGlobalDecl::Function { .. }
| LoweredGlobalDecl::Type(_)
| LoweredGlobalDecl::EntryPoint => {
| LoweredGlobalDecl::EntryPoint(_) => {
return Err(Box::new(Error::Unexpected(span, ExpectedToken::Variable)));
}
};
@ -2373,7 +2429,7 @@ impl<'source, 'temp> Lowerer<'source, 'temp> {
function_span,
ExpectedToken::Function,
))),
Some(&LoweredGlobalDecl::EntryPoint) => {
Some(&LoweredGlobalDecl::EntryPoint(_)) => {
Err(Box::new(Error::CalledEntryPoint(function_span)))
}
Some(&LoweredGlobalDecl::Function {
@ -3581,6 +3637,8 @@ impl<'source, 'temp> Lowerer<'source, 'temp> {
let mut struct_alignment = proc::Alignment::ONE;
let mut members = Vec::with_capacity(s.members.len());
let mut doc_comments: Vec<Option<Vec<String>>> = Vec::new();
for member in s.members.iter() {
let ty = self.resolve_ast_type(member.ty, &mut ctx.as_const())?;
@ -3623,6 +3681,11 @@ impl<'source, 'temp> Lowerer<'source, 'temp> {
offset = member_alignment.round_up(offset);
struct_alignment = struct_alignment.max(member_alignment);
if !member.doc_comments.is_empty() {
doc_comments.push(Some(
member.doc_comments.iter().map(|s| s.to_string()).collect(),
));
}
members.push(ir::StructMember {
name: Some(member.name.name.to_owned()),
ty,
@ -3646,6 +3709,14 @@ impl<'source, 'temp> Lowerer<'source, 'temp> {
},
span,
);
for (i, c) in doc_comments.drain(..).enumerate() {
if let Some(comment) = c {
ctx.module
.get_or_insert_default_doc_comments()
.struct_members
.insert((handle, i), comment);
}
}
Ok(handle)
}

View File

@ -15,6 +15,7 @@ pub use crate::front::wgsl::error::ParseError;
pub use crate::front::wgsl::parse::directive::language_extension::{
ImplementedLanguageExtension, LanguageExtension, UnimplementedLanguageExtension,
};
pub use crate::front::wgsl::parse::Options;
use alloc::boxed::Box;
use thiserror::Error;
@ -31,12 +32,20 @@ pub(crate) type Result<'a, T> = core::result::Result<T, Box<Error<'a>>>;
pub struct Frontend {
parser: Parser,
options: Options,
}
impl Frontend {
pub const fn new() -> Self {
Self {
parser: Parser::new(),
options: Options::new(),
}
}
pub const fn new_with_options(options: Options) -> Self {
Self {
parser: Parser::new(),
options,
}
}
@ -45,7 +54,7 @@ impl Frontend {
}
fn inner<'a>(&mut self, source: &'a str) -> Result<'a, crate::Module> {
let tu = self.parser.parse(source)?;
let tu = self.parser.parse(source, &self.options)?;
let index = index::Index::generate(&tu)?;
let module = Lowerer::new(&index).lower(tu)?;

View File

@ -40,6 +40,10 @@ pub struct TranslationUnit<'a> {
/// See [`DiagnosticFilterNode`] for details on how the tree is represented and used in
/// validation.
pub diagnostic_filter_leaf: Option<Handle<DiagnosticFilterNode>>,
/// Doc comments appearing first in the file.
/// This serves as documentation for the whole TranslationUnit.
pub doc_comments: Vec<&'a str>,
}
#[derive(Debug, Clone, Copy)]
@ -137,6 +141,7 @@ pub struct Function<'a> {
pub result: Option<FunctionResult<'a>>,
pub body: Block<'a>,
pub diagnostic_filter_leaf: Option<Handle<DiagnosticFilterNode>>,
pub doc_comments: Vec<&'a str>,
}
#[derive(Debug)]
@ -163,6 +168,7 @@ pub struct GlobalVariable<'a> {
pub binding: Option<ResourceBinding<'a>>,
pub ty: Option<Handle<Type<'a>>>,
pub init: Option<Handle<Expression<'a>>>,
pub doc_comments: Vec<&'a str>,
}
#[derive(Debug)]
@ -172,12 +178,14 @@ pub struct StructMember<'a> {
pub binding: Option<Binding<'a>>,
pub align: Option<Handle<Expression<'a>>>,
pub size: Option<Handle<Expression<'a>>>,
pub doc_comments: Vec<&'a str>,
}
#[derive(Debug)]
pub struct Struct<'a> {
pub name: Ident<'a>,
pub members: Vec<StructMember<'a>>,
pub doc_comments: Vec<&'a str>,
}
#[derive(Debug)]
@ -191,6 +199,7 @@ pub struct Const<'a> {
pub name: Ident<'a>,
pub ty: Option<Handle<Type<'a>>>,
pub init: Handle<Expression<'a>>,
pub doc_comments: Vec<&'a str>,
}
#[derive(Debug)]

View File

@ -5,7 +5,7 @@ use crate::front::wgsl::parse::{conv, Number};
use crate::front::wgsl::Scalar;
use crate::Span;
use alloc::boxed::Box;
use alloc::{boxed::Box, vec::Vec};
type TokenSpan<'a> = (Token<'a>, Span);
@ -25,6 +25,8 @@ pub enum Token<'a> {
Arrow,
Unknown(char),
Trivia,
DocComment(&'a str),
ModuleDocComment(&'a str),
End,
}
@ -47,8 +49,10 @@ fn consume_any(input: &str, what: impl Fn(char) -> bool) -> (&str, &str) {
/// - Otherwise, interpret `<<` and `>>` as shift operators:
/// `Token::LogicalOperation` tokens.
///
/// If `ignore_doc_comments` is true, doc comments are treated as [`Token::Trivia`].
///
/// [§3.1 Parsing]: https://gpuweb.github.io/gpuweb/wgsl/#parsing
fn consume_token(input: &str, generic: bool) -> (Token<'_>, &str) {
fn consume_token(input: &str, generic: bool, ignore_doc_comments: bool) -> (Token<'_>, &str) {
let mut chars = input.chars();
let cur = match chars.next() {
Some(c) => c,
@ -84,12 +88,37 @@ fn consume_token(input: &str, generic: bool) -> (Token<'_>, &str) {
let og_chars = chars.as_str();
match chars.next() {
Some('/') => {
let _ = chars.position(is_comment_end);
(Token::Trivia, chars.as_str())
let mut input_chars = input.char_indices();
let doc_comment_end = input_chars
.find_map(|(index, c)| is_comment_end(c).then_some(index))
.unwrap_or(input.len());
let token = match chars.next() {
Some('/') if !ignore_doc_comments => {
Token::DocComment(&input[..doc_comment_end])
}
Some('!') if !ignore_doc_comments => {
Token::ModuleDocComment(&input[..doc_comment_end])
}
_ => Token::Trivia,
};
(token, input_chars.as_str())
}
Some('*') => {
let next_c = chars.next();
enum CommentType {
Doc,
ModuleDoc,
Normal,
}
let comment_type = match next_c {
Some('*') if !ignore_doc_comments => CommentType::Doc,
Some('!') if !ignore_doc_comments => CommentType::ModuleDoc,
_ => CommentType::Normal,
};
let mut depth = 1;
let mut prev = None;
let mut prev = next_c;
for c in &mut chars {
match (prev, c) {
@ -97,7 +126,19 @@ fn consume_token(input: &str, generic: bool) -> (Token<'_>, &str) {
prev = None;
depth -= 1;
if depth == 0 {
return (Token::Trivia, chars.as_str());
let rest = chars.as_str();
let token = match comment_type {
CommentType::Doc => {
let doc_comment_end = input.len() - rest.len();
Token::DocComment(&input[..doc_comment_end])
}
CommentType::ModuleDoc => {
let doc_comment_end = input.len() - rest.len();
Token::ModuleDocComment(&input[..doc_comment_end])
}
CommentType::Normal => Token::Trivia,
};
return (token, rest);
}
}
(Some('/'), '*') => {
@ -170,6 +211,7 @@ fn consume_token(input: &str, generic: bool) -> (Token<'_>, &str) {
/// Returns whether or not a char is a comment end
/// (Unicode Pattern_White_Space excluding U+0020, U+0009, U+200E and U+200F)
/// <https://www.w3.org/TR/WGSL/#line-break>
const fn is_comment_end(c: char) -> bool {
match c {
'\u{000a}'..='\u{000d}' | '\u{0085}' | '\u{2028}' | '\u{2029}' => true,
@ -220,16 +262,21 @@ pub(in crate::front::wgsl) struct Lexer<'a> {
/// statements.
last_end_offset: usize,
/// Whether or not to ignore doc comments.
/// If `true`, doc comments are treated as [`Token::Trivia`].
ignore_doc_comments: bool,
pub(in crate::front::wgsl) enable_extensions: EnableExtensions,
}
impl<'a> Lexer<'a> {
pub(in crate::front::wgsl) const fn new(input: &'a str) -> Self {
pub(in crate::front::wgsl) const fn new(input: &'a str, ignore_doc_comments: bool) -> Self {
Lexer {
input,
source: input,
last_end_offset: 0,
enable_extensions: EnableExtensions::empty(),
ignore_doc_comments,
}
}
@ -255,7 +302,7 @@ impl<'a> Lexer<'a> {
pub(in crate::front::wgsl) fn start_byte_offset(&mut self) -> usize {
loop {
// Eat all trivia because `next` doesn't eat trailing trivia.
let (token, rest) = consume_token(self.input, false);
let (token, rest) = consume_token(self.input, false, true);
if let Token::Trivia = token {
self.input = rest;
} else {
@ -271,6 +318,40 @@ impl<'a> Lexer<'a> {
(token, rest)
}
/// Collect all module doc comments until a non doc token is found.
pub(in crate::front::wgsl) fn accumulate_module_doc_comments(&mut self) -> Vec<&'a str> {
let mut doc_comments = Vec::new();
loop {
// ignore blankspace
self.input = consume_any(self.input, is_blankspace).1;
let (token, rest) = consume_token(self.input, false, self.ignore_doc_comments);
if let Token::ModuleDocComment(doc_comment) = token {
self.input = rest;
doc_comments.push(doc_comment);
} else {
return doc_comments;
}
}
}
/// Collect all doc comments until a non doc token is found.
pub(in crate::front::wgsl) fn accumulate_doc_comments(&mut self) -> Vec<&'a str> {
let mut doc_comments = Vec::new();
loop {
// ignore blankspace
self.input = consume_any(self.input, is_blankspace).1;
let (token, rest) = consume_token(self.input, false, self.ignore_doc_comments);
if let Token::DocComment(doc_comment) = token {
self.input = rest;
doc_comments.push(doc_comment);
} else {
return doc_comments;
}
}
}
const fn current_byte_offset(&self) -> usize {
self.source.len() - self.input.len()
}
@ -285,7 +366,7 @@ impl<'a> Lexer<'a> {
/// occur, but not angle brackets.
#[must_use]
pub(in crate::front::wgsl) fn next(&mut self) -> TokenSpan<'a> {
self.next_impl(false)
self.next_impl(false, true)
}
/// Return the next non-whitespace token from `self`.
@ -294,16 +375,25 @@ impl<'a> Lexer<'a> {
/// but not bit shift operators.
#[must_use]
pub(in crate::front::wgsl) fn next_generic(&mut self) -> TokenSpan<'a> {
self.next_impl(true)
self.next_impl(true, true)
}
#[cfg(test)]
pub fn next_with_unignored_doc_comments(&mut self) -> TokenSpan<'a> {
self.next_impl(false, false)
}
/// Return the next non-whitespace token from `self`, with a span.
///
/// See [`consume_token`] for the meaning of `generic`.
fn next_impl(&mut self, generic: bool) -> TokenSpan<'a> {
fn next_impl(&mut self, generic: bool, ignore_doc_comments: bool) -> TokenSpan<'a> {
let mut start_byte_offset = self.current_byte_offset();
loop {
let (token, rest) = consume_token(self.input, generic);
let (token, rest) = consume_token(
self.input,
generic,
ignore_doc_comments || self.ignore_doc_comments,
);
self.input = rest;
match token {
Token::Trivia => start_byte_offset = self.current_byte_offset(),
@ -516,9 +606,31 @@ impl<'a> Lexer<'a> {
#[cfg(test)]
#[track_caller]
fn sub_test(source: &str, expected_tokens: &[Token]) {
let mut lex = Lexer::new(source);
sub_test_with(true, source, expected_tokens);
}
#[cfg(test)]
#[track_caller]
fn sub_test_with_and_without_doc_comments(source: &str, expected_tokens: &[Token]) {
sub_test_with(false, source, expected_tokens);
sub_test_with(
true,
source,
expected_tokens
.iter()
.filter(|v| !matches!(**v, Token::DocComment(_) | Token::ModuleDocComment(_)))
.cloned()
.collect::<Vec<_>>()
.as_slice(),
);
}
#[cfg(test)]
#[track_caller]
fn sub_test_with(ignore_doc_comments: bool, source: &str, expected_tokens: &[Token]) {
let mut lex = Lexer::new(source, ignore_doc_comments);
for &token in expected_tokens {
assert_eq!(lex.next().0, token);
assert_eq!(lex.next_with_unignored_doc_comments().0, token);
}
assert_eq!(lex.next().0, Token::End);
}
@ -737,11 +849,13 @@ fn test_tokens() {
sub_test("No¾", &[Token::Word("No"), Token::Unknown('¾')]);
sub_test("No好", &[Token::Word("No好")]);
sub_test("_No", &[Token::Word("_No")]);
sub_test(
sub_test_with_and_without_doc_comments(
"*/*/***/*//=/*****//",
&[
Token::Operation('*'),
Token::AssignmentOperation('/'),
Token::DocComment("/*****/"),
Token::Operation('/'),
],
);
@ -807,3 +921,132 @@ fn test_variable_decl() {
],
);
}
#[test]
fn test_comments() {
sub_test("// Single comment", &[]);
sub_test(
"/* multi
line
comment */",
&[],
);
sub_test(
"/* multi
line
comment */
// and another",
&[],
);
}
#[test]
fn test_doc_comments() {
sub_test_with_and_without_doc_comments(
"/// Single comment",
&[Token::DocComment("/// Single comment")],
);
sub_test_with_and_without_doc_comments(
"/** multi
line
comment */",
&[Token::DocComment(
"/** multi
line
comment */",
)],
);
sub_test_with_and_without_doc_comments(
"/** multi
line
comment */
/// and another",
&[
Token::DocComment(
"/** multi
line
comment */",
),
Token::DocComment("/// and another"),
],
);
}
#[test]
fn test_doc_comment_nested() {
sub_test_with_and_without_doc_comments(
"/**
a comment with nested one /**
nested comment
*/
*/
const a : i32 = 2;",
&[
Token::DocComment(
"/**
a comment with nested one /**
nested comment
*/
*/",
),
Token::Word("const"),
Token::Word("a"),
Token::Separator(':'),
Token::Word("i32"),
Token::Operation('='),
Token::Number(Ok(Number::AbstractInt(2))),
Token::Separator(';'),
],
);
}
#[test]
fn test_doc_comment_long_character() {
sub_test_with_and_without_doc_comments(
"/// π/2
/// D(𝐡) = ───────────────────────────────────────────────────
/// παα_b((𝐡𝐭)² / αₜ²) + (𝐡𝐛)² / α_b² +`
const a : i32 = 2;",
&[
Token::DocComment("/// π/2"),
Token::DocComment("/// D(𝐡) = ───────────────────────────────────────────────────"),
Token::DocComment("/// παα_b((𝐡𝐭)² / αₜ²) + (𝐡𝐛)² / α_b² +`"),
Token::Word("const"),
Token::Word("a"),
Token::Separator(':'),
Token::Word("i32"),
Token::Operation('='),
Token::Number(Ok(Number::AbstractInt(2))),
Token::Separator(';'),
],
);
}
#[test]
fn test_doc_comments_module() {
sub_test_with_and_without_doc_comments(
"//! Comment Module
//! Another one.
/*! Different module comment */
/// Trying to break module comment
// Trying to break module comment again
//! After a regular comment is ok.
/*! Different module comment again */
//! After a break is supported.
const
//! After anything else is not.",
&[
Token::ModuleDocComment("//! Comment Module"),
Token::ModuleDocComment("//! Another one."),
Token::ModuleDocComment("/*! Different module comment */"),
Token::DocComment("/// Trying to break module comment"),
Token::ModuleDocComment("//! After a regular comment is ok."),
Token::ModuleDocComment("/*! Different module comment again */"),
Token::ModuleDocComment("//! After a break is supported."),
Token::Word("const"),
],
);
}

View File

@ -275,6 +275,21 @@ impl<'a> BindingParser<'a> {
}
}
/// Configuration for the whole parser run.
pub struct Options {
/// Controls whether the parser should parse doc comments.
pub parse_doc_comments: bool,
}
impl Options {
/// Creates a new [`Options`] without doc comments parsing.
pub const fn new() -> Self {
Options {
parse_doc_comments: false,
}
}
}
pub struct Parser {
rules: Vec<(Rule, usize)>,
recursion_depth: u32,
@ -1326,6 +1341,7 @@ impl Parser {
binding: None,
ty,
init,
doc_comments: Vec::new(),
})
}
@ -1346,6 +1362,9 @@ impl Parser {
ExpectedToken::Token(Token::Separator(',')),
)));
}
let doc_comments = lexer.accumulate_doc_comments();
let (mut size, mut align) = (ParsedAttribute::default(), ParsedAttribute::default());
self.push_rule_span(Rule::Attribute, lexer);
let mut bind_parser = BindingParser::default();
@ -1381,6 +1400,7 @@ impl Parser {
binding,
size: size.value,
align: align.value,
doc_comments,
});
if !member_names.insert(name.name) {
@ -2708,6 +2728,7 @@ impl Parser {
result,
body,
diagnostic_filter_leaf,
doc_comments: Vec::new(),
};
// done
@ -2750,6 +2771,8 @@ impl Parser {
lexer: &mut Lexer<'a>,
out: &mut ast::TranslationUnit<'a>,
) -> Result<'a, ()> {
let doc_comments = lexer.accumulate_doc_comments();
// read attributes
let mut binding = None;
let mut stage = ParsedAttribute::default();
@ -2893,7 +2916,12 @@ impl Parser {
let name = lexer.next_ident()?;
let members = self.struct_body(lexer, &mut ctx)?;
Some(ast::GlobalDeclKind::Struct(ast::Struct { name, members }))
Some(ast::GlobalDeclKind::Struct(ast::Struct {
name,
members,
doc_comments,
}))
}
(Token::Word("alias"), _) => {
ensure_no_diag_attrs("`alias`es".into(), diagnostic_filters)?;
@ -2921,7 +2949,12 @@ impl Parser {
let init = self.general_expression(lexer, &mut ctx)?;
lexer.expect(Token::Separator(';'))?;
Some(ast::GlobalDeclKind::Const(ast::Const { name, ty, init }))
Some(ast::GlobalDeclKind::Const(ast::Const {
name,
ty,
init,
doc_comments,
}))
}
(Token::Word("override"), _) => {
ensure_no_diag_attrs("`override`s".into(), diagnostic_filters)?;
@ -2954,6 +2987,7 @@ impl Parser {
let mut var = self.variable_decl(lexer, &mut ctx)?;
var.binding = binding.take();
var.doc_comments = doc_comments;
Some(ast::GlobalDeclKind::Var(var))
}
(Token::Word("fn"), _) => {
@ -2983,6 +3017,7 @@ impl Parser {
} else {
None
},
doc_comments,
..function
}))
}
@ -3030,14 +3065,21 @@ impl Parser {
}
}
pub fn parse<'a>(&mut self, source: &'a str) -> Result<'a, ast::TranslationUnit<'a>> {
pub fn parse<'a>(
&mut self,
source: &'a str,
options: &Options,
) -> Result<'a, ast::TranslationUnit<'a>> {
self.reset();
let mut lexer = Lexer::new(source);
let mut lexer = Lexer::new(source, !options.parse_doc_comments);
let mut tu = ast::TranslationUnit::default();
let mut enable_extensions = EnableExtensions::empty();
let mut diagnostic_filters = DiagnosticFilterMap::new();
// Parse module doc comments.
tu.doc_comments = lexer.accumulate_module_doc_comments();
// Parse directives.
while let Ok((ident, _directive_ident_span)) = lexer.peek_ident_with_span() {
if let Some(kind) = DirectiveKind::from_ident(ident) {

View File

@ -221,7 +221,7 @@ An override expression can be evaluated at pipeline creation time.
mod block;
use alloc::{string::String, vec::Vec};
use alloc::{boxed::Box, string::String, vec::Vec};
#[cfg(feature = "arbitrary")]
use arbitrary::Arbitrary;
@ -2386,6 +2386,28 @@ pub enum RayQueryIntersection {
Aabb = 3,
}
/// Doc comments preceding items.
///
/// These can be used to generate automated documentation,
/// IDE hover information or translate shaders with their context comments.
#[derive(Debug, Default, Clone)]
#[cfg_attr(feature = "serialize", derive(Serialize))]
#[cfg_attr(feature = "deserialize", derive(Deserialize))]
#[cfg_attr(feature = "arbitrary", derive(Arbitrary))]
pub struct DocComments {
pub types: FastIndexMap<Handle<Type>, Vec<String>>,
// The key is:
// - key.0: the handle to the Struct
// - key.1: the index of the `StructMember`.
pub struct_members: FastIndexMap<(Handle<Type>, usize), Vec<String>>,
pub entry_points: FastIndexMap<usize, Vec<String>>,
pub functions: FastIndexMap<Handle<Function>, Vec<String>>,
pub constants: FastIndexMap<Handle<Constant>, Vec<String>>,
pub global_variables: FastIndexMap<Handle<GlobalVariable>, Vec<String>>,
// Top level comments, appearing before any space.
pub module: Vec<String>,
}
/// Shader module.
///
/// A module is a set of constants, global variables and functions, as well as
@ -2471,4 +2493,6 @@ pub struct Module {
/// See [`DiagnosticFilterNode`] for details on how the tree is represented and used in
/// validation.
pub diagnostic_filter_leaf: Option<Handle<DiagnosticFilterNode>>,
/// Doc comments.
pub doc_comments: Option<Box<DocComments>>,
}

View File

@ -2,16 +2,15 @@
use core::{convert::TryInto, hash::Hash};
use super::ValidationError;
use super::{TypeError, ValidationError};
use crate::non_max_u32::NonMaxU32;
use crate::{
arena::{BadHandle, BadRangeError},
diagnostic_filter::DiagnosticFilterNode,
Handle,
EntryPoint, Handle,
};
use crate::{Arena, UniqueArena};
#[cfg(test)]
use alloc::string::ToString;
impl super::Validator {
@ -43,6 +42,7 @@ impl super::Validator {
ref global_expressions,
ref diagnostic_filters,
ref diagnostic_filter_leaf,
ref doc_comments,
} = module;
// Because types can refer to global expressions and vice versa, to
@ -257,6 +257,70 @@ impl super::Validator {
handle.check_valid_for(diagnostic_filters)?;
}
if let Some(doc_comments) = doc_comments.as_ref() {
let crate::DocComments {
module: _,
types: ref doc_comments_for_types,
struct_members: ref doc_comments_for_struct_members,
entry_points: ref doc_comments_for_entry_points,
functions: ref doc_comments_for_functions,
constants: ref doc_comments_for_constants,
global_variables: ref doc_comments_for_global_variables,
} = **doc_comments;
for (&ty, _) in doc_comments_for_types.iter() {
validate_type(ty)?;
}
for (&(ty, struct_member_index), _) in doc_comments_for_struct_members.iter() {
validate_type(ty)?;
let struct_type = types.get_handle(ty).unwrap();
match struct_type.inner {
crate::TypeInner::Struct {
ref members,
span: ref _span,
} => {
(0..members.len())
.contains(&struct_member_index)
.then_some(())
// TODO: what errors should this be?
.ok_or_else(|| ValidationError::Type {
handle: ty,
name: struct_type.name.as_ref().map_or_else(
|| "members length incorrect".to_string(),
|name| name.to_string(),
),
source: TypeError::InvalidData(ty),
})?;
}
_ => {
// TODO: internal error ? We should never get here.
// If entering there, it's probably that we forgot to adjust a handle in the compact phase.
return Err(ValidationError::Type {
handle: ty,
name: struct_type
.name
.as_ref()
.map_or_else(|| "Unknown".to_string(), |name| name.to_string()),
source: TypeError::InvalidData(ty),
});
}
}
for (&function, _) in doc_comments_for_functions.iter() {
Self::validate_function_handle(function, functions)?;
}
for (&entry_point_index, _) in doc_comments_for_entry_points.iter() {
Self::validate_entry_point_index(entry_point_index, entry_points)?;
}
for (&constant, _) in doc_comments_for_constants.iter() {
Self::validate_constant_handle(constant, constants)?;
}
for (&global_variable, _) in doc_comments_for_global_variables.iter() {
Self::validate_global_variable_handle(global_variable, global_variables)?;
}
}
}
Ok(())
}
@ -274,6 +338,13 @@ impl super::Validator {
handle.check_valid_for(constants).map(|_| ())
}
fn validate_global_variable_handle(
handle: Handle<crate::GlobalVariable>,
global_variables: &Arena<crate::GlobalVariable>,
) -> Result<(), InvalidHandleError> {
handle.check_valid_for(global_variables).map(|_| ())
}
fn validate_override_handle(
handle: Handle<crate::Override>,
overrides: &Arena<crate::Override>,
@ -343,6 +414,22 @@ impl super::Validator {
Ok(max_expr)
}
fn validate_entry_point_index(
entry_point_index: usize,
entry_points: &[EntryPoint],
) -> Result<(), InvalidHandleError> {
(0..entry_points.len())
.contains(&entry_point_index)
.then_some(())
.ok_or_else(|| {
BadHandle {
kind: "EntryPoint",
index: entry_point_index,
}
.into()
})
}
/// Validate all handles that occur in `expression`, whose handle is `handle`.
///
/// If `expression` refers to any `Type`s, return the highest-indexed type

View File

@ -0,0 +1,2 @@
targets = "IR"
wgsl-in = { parse_doc_comments = true }

View File

@ -0,0 +1,27 @@
//! Module doc comment.
//! 2nd line of module doc comment.
/**
🍽 /* nested comment */
*/
@group(0) @binding(0) var<uniform> mvp_matrix: mat4x4<f32>;
/// workgroup var doc comment
/// 2nd line of workgroup var doc comment
var<workgroup> w_mem: mat2x2<f32>;
/// constant doc comment
const test_c: u32 = 1;
/// struct doc comment
struct TestS {
/// member doc comment
test_m: u32,
}
/// function doc comment
fn test_f() {}
/// entry point doc comment
@compute @workgroup_size(1)
fn test_ep() {}

View File

@ -78,6 +78,12 @@ where
Ok(map)
}
#[derive(Default, serde::Deserialize)]
#[serde(default)]
struct WgslInParameters {
parse_doc_comments: bool,
}
#[derive(Default, serde::Deserialize)]
#[serde(default)]
struct SpirvInParameters {
@ -117,6 +123,10 @@ struct Parameters {
// -- GOD MODE --
god_mode: bool,
// -- wgsl-in options --
#[serde(rename = "wgsl-in")]
wgsl_in: WgslInParameters,
// -- spirv-in options --
#[serde(rename = "spv-in")]
spv_in: SpirvInParameters,
@ -805,7 +815,13 @@ fn convert_snapshots_wgsl() {
let source = input.read_source();
// crlf will make the large split output different on different platform
let source = source.replace('\r', "");
match naga::front::wgsl::parse_str(&source) {
let params = input.read_parameters();
let WgslInParameters { parse_doc_comments } = params.wgsl_in;
let options = naga::front::wgsl::Options { parse_doc_comments };
let mut frontend = naga::front::wgsl::Frontend::new_with_options(options);
match frontend.parse(&source) {
Ok(mut module) => check_targets(&input, &mut module, Some(&source)),
Err(e) => panic!(
"{}",

View File

@ -198,4 +198,5 @@
],
diagnostic_filters: [],
diagnostic_filter_leaf: None,
doc_comments: None,
)

View File

@ -268,4 +268,5 @@
],
diagnostic_filters: [],
diagnostic_filter_leaf: None,
doc_comments: None,
)

View File

@ -1033,4 +1033,5 @@
],
diagnostic_filters: [],
diagnostic_filter_leaf: None,
doc_comments: None,
)

View File

@ -1311,4 +1311,5 @@
],
diagnostic_filters: [],
diagnostic_filter_leaf: None,
doc_comments: None,
)

View File

@ -615,4 +615,5 @@
],
diagnostic_filters: [],
diagnostic_filter_leaf: None,
doc_comments: None,
)

View File

@ -721,4 +721,5 @@
],
diagnostic_filters: [],
diagnostic_filter_leaf: None,
doc_comments: None,
)

View File

@ -2909,4 +2909,5 @@
],
diagnostic_filters: [],
diagnostic_filter_leaf: None,
doc_comments: None,
)

View File

@ -2909,4 +2909,5 @@
],
diagnostic_filters: [],
diagnostic_filter_leaf: None,
doc_comments: None,
)

View File

@ -336,4 +336,5 @@
],
diagnostic_filters: [],
diagnostic_filter_leaf: None,
doc_comments: None,
)

View File

@ -336,4 +336,5 @@
],
diagnostic_filters: [],
diagnostic_filter_leaf: None,
doc_comments: None,
)

View File

@ -36,4 +36,5 @@
],
diagnostic_filters: [],
diagnostic_filter_leaf: None,
doc_comments: None,
)

View File

@ -36,4 +36,5 @@
],
diagnostic_filters: [],
diagnostic_filter_leaf: None,
doc_comments: None,
)

View File

@ -90,4 +90,5 @@
),
],
diagnostic_filter_leaf: Some(0),
doc_comments: None,
)

View File

@ -90,4 +90,5 @@
),
],
diagnostic_filter_leaf: Some(0),
doc_comments: None,
)

View File

@ -467,4 +467,5 @@
],
diagnostic_filters: [],
diagnostic_filter_leaf: None,
doc_comments: None,
)

View File

@ -467,4 +467,5 @@
],
diagnostic_filters: [],
diagnostic_filter_leaf: None,
doc_comments: None,
)

View File

@ -102,4 +102,5 @@
],
diagnostic_filters: [],
diagnostic_filter_leaf: None,
doc_comments: None,
)

View File

@ -102,4 +102,5 @@
],
diagnostic_filters: [],
diagnostic_filter_leaf: None,
doc_comments: None,
)

View File

@ -203,4 +203,5 @@
],
diagnostic_filters: [],
diagnostic_filter_leaf: None,
doc_comments: None,
)

View File

@ -203,4 +203,5 @@
],
diagnostic_filters: [],
diagnostic_filter_leaf: None,
doc_comments: None,
)

View File

@ -130,4 +130,5 @@
],
diagnostic_filters: [],
diagnostic_filter_leaf: None,
doc_comments: None,
)

View File

@ -130,4 +130,5 @@
],
diagnostic_filters: [],
diagnostic_filter_leaf: None,
doc_comments: None,
)

View File

@ -265,4 +265,5 @@
],
diagnostic_filters: [],
diagnostic_filter_leaf: None,
doc_comments: None,
)

View File

@ -265,4 +265,5 @@
],
diagnostic_filters: [],
diagnostic_filter_leaf: None,
doc_comments: None,
)

View File

@ -223,4 +223,5 @@
],
diagnostic_filters: [],
diagnostic_filter_leaf: None,
doc_comments: None,
)

View File

@ -223,4 +223,5 @@
],
diagnostic_filters: [],
diagnostic_filter_leaf: None,
doc_comments: None,
)

View File

@ -317,4 +317,5 @@
],
diagnostic_filters: [],
diagnostic_filter_leaf: None,
doc_comments: None,
)

View File

@ -317,4 +317,5 @@
],
diagnostic_filters: [],
diagnostic_filter_leaf: None,
doc_comments: None,
)

View File

@ -192,4 +192,5 @@
],
diagnostic_filters: [],
diagnostic_filter_leaf: None,
doc_comments: None,
)

View File

@ -192,4 +192,5 @@
],
diagnostic_filters: [],
diagnostic_filter_leaf: None,
doc_comments: None,
)

View File

@ -0,0 +1,164 @@
(
types: [
(
name: None,
inner: Matrix(
columns: Quad,
rows: Quad,
scalar: (
kind: Float,
width: 4,
),
),
),
(
name: None,
inner: Matrix(
columns: Bi,
rows: Bi,
scalar: (
kind: Float,
width: 4,
),
),
),
(
name: None,
inner: Scalar((
kind: Uint,
width: 4,
)),
),
(
name: Some("TestS"),
inner: Struct(
members: [
(
name: Some("test_m"),
ty: 2,
binding: None,
offset: 0,
),
],
span: 4,
),
),
],
special_types: (
ray_desc: None,
ray_intersection: None,
ray_vertex_return: None,
predeclared_types: {},
),
constants: [
(
name: Some("test_c"),
ty: 2,
init: 0,
),
],
overrides: [],
global_variables: [
(
name: Some("mvp_matrix"),
space: Uniform,
binding: Some((
group: 0,
binding: 0,
)),
ty: 0,
init: None,
),
(
name: Some("w_mem"),
space: WorkGroup,
binding: None,
ty: 1,
init: None,
),
],
global_expressions: [
Literal(U32(1)),
],
functions: [
(
name: Some("test_f"),
arguments: [],
result: None,
local_variables: [],
expressions: [],
named_expressions: {},
body: [
Return(
value: None,
),
],
diagnostic_filter_leaf: None,
),
],
entry_points: [
(
name: "test_ep",
stage: Compute,
early_depth_test: None,
workgroup_size: (1, 1, 1),
workgroup_size_overrides: None,
function: (
name: Some("test_ep"),
arguments: [],
result: None,
local_variables: [],
expressions: [],
named_expressions: {},
body: [
Return(
value: None,
),
],
diagnostic_filter_leaf: None,
),
),
],
diagnostic_filters: [],
diagnostic_filter_leaf: None,
doc_comments: Some((
types: {
3: [
"/// struct doc comment",
],
},
struct_members: {
(3, 0): [
"/// member doc comment",
],
},
entry_points: {
0: [
"/// entry point doc comment",
],
},
functions: {
0: [
"/// function doc comment",
],
},
constants: {
0: [
"/// constant doc comment",
],
},
global_variables: {
0: [
"/**\n 🍽\u{fe0f} /* nested comment */\n */",
],
1: [
"/// workgroup var doc comment",
"/// 2nd line of workgroup var doc comment",
],
},
module: [
"//! Module doc comment.",
"//! 2nd line of module doc comment.",
],
)),
)

View File

@ -0,0 +1,164 @@
(
types: [
(
name: None,
inner: Matrix(
columns: Quad,
rows: Quad,
scalar: (
kind: Float,
width: 4,
),
),
),
(
name: None,
inner: Matrix(
columns: Bi,
rows: Bi,
scalar: (
kind: Float,
width: 4,
),
),
),
(
name: None,
inner: Scalar((
kind: Uint,
width: 4,
)),
),
(
name: Some("TestS"),
inner: Struct(
members: [
(
name: Some("test_m"),
ty: 2,
binding: None,
offset: 0,
),
],
span: 4,
),
),
],
special_types: (
ray_desc: None,
ray_intersection: None,
ray_vertex_return: None,
predeclared_types: {},
),
constants: [
(
name: Some("test_c"),
ty: 2,
init: 0,
),
],
overrides: [],
global_variables: [
(
name: Some("mvp_matrix"),
space: Uniform,
binding: Some((
group: 0,
binding: 0,
)),
ty: 0,
init: None,
),
(
name: Some("w_mem"),
space: WorkGroup,
binding: None,
ty: 1,
init: None,
),
],
global_expressions: [
Literal(U32(1)),
],
functions: [
(
name: Some("test_f"),
arguments: [],
result: None,
local_variables: [],
expressions: [],
named_expressions: {},
body: [
Return(
value: None,
),
],
diagnostic_filter_leaf: None,
),
],
entry_points: [
(
name: "test_ep",
stage: Compute,
early_depth_test: None,
workgroup_size: (1, 1, 1),
workgroup_size_overrides: None,
function: (
name: Some("test_ep"),
arguments: [],
result: None,
local_variables: [],
expressions: [],
named_expressions: {},
body: [
Return(
value: None,
),
],
diagnostic_filter_leaf: None,
),
),
],
diagnostic_filters: [],
diagnostic_filter_leaf: None,
doc_comments: Some((
types: {
3: [
"/// struct doc comment",
],
},
struct_members: {
(3, 0): [
"/// member doc comment",
],
},
entry_points: {
0: [
"/// entry point doc comment",
],
},
functions: {
0: [
"/// function doc comment",
],
},
constants: {
0: [
"/// constant doc comment",
],
},
global_variables: {
0: [
"/**\n 🍽\u{fe0f} /* nested comment */\n */",
],
1: [
"/// workgroup var doc comment",
"/// 2nd line of workgroup var doc comment",
],
},
module: [
"//! Module doc comment.",
"//! 2nd line of module doc comment.",
],
)),
)