From bbd976e7416e6446d1473db579e1eef11e24498f Mon Sep 17 00:00:00 2001
From: Henri Chataing <henrichataing@google.com>
Date: Wed, 16 Mar 2022 16:51:50 +0100
Subject: [PATCH] PDL: Import pdl-parser sources

The PDL parser re-implementation currently parses
input PDL source files, runs linter checks on
the parsed AST, and serializes the AST to JSON.

Test: m pdl + run against test/*.pdl
Change-Id: If86e0265582e1ee21f14c1341c9069d43b17f97a
---
 tools/pdl/Android.bp                |   17 +
 tools/pdl/src/ast.rs                |  301 +++++++
 tools/pdl/src/lint.rs               | 1265 +++++++++++++++++++++++++++
 tools/pdl/src/main.rs               |   51 ++
 tools/pdl/src/parser.rs             |  530 +++++++++++
 tools/pdl/src/pdl.pest              |  123 +++
 tools/pdl/test/array-field.pdl      |   39 +
 tools/pdl/test/checksum-field.pdl   |   22 +
 tools/pdl/test/count-field.pdl      |   25 +
 tools/pdl/test/decl-scope.pdl       |   26 +
 tools/pdl/test/example.pdl          |   78 ++
 tools/pdl/test/fixed-field.pdl      |   22 +
 tools/pdl/test/group-constraint.pdl |   39 +
 tools/pdl/test/packet.pdl           |   52 ++
 tools/pdl/test/recurse.pdl          |   38 +
 tools/pdl/test/size-field.pdl       |   58 ++
 tools/pdl/test/struct.pdl           |   52 ++
 tools/pdl/test/typedef-field.pdl    |   36 +
 18 files changed, 2774 insertions(+)
 create mode 100644 tools/pdl/Android.bp
 create mode 100644 tools/pdl/src/ast.rs
 create mode 100644 tools/pdl/src/lint.rs
 create mode 100644 tools/pdl/src/main.rs
 create mode 100644 tools/pdl/src/parser.rs
 create mode 100644 tools/pdl/src/pdl.pest
 create mode 100644 tools/pdl/test/array-field.pdl
 create mode 100644 tools/pdl/test/checksum-field.pdl
 create mode 100644 tools/pdl/test/count-field.pdl
 create mode 100644 tools/pdl/test/decl-scope.pdl
 create mode 100644 tools/pdl/test/example.pdl
 create mode 100644 tools/pdl/test/fixed-field.pdl
 create mode 100644 tools/pdl/test/group-constraint.pdl
 create mode 100644 tools/pdl/test/packet.pdl
 create mode 100644 tools/pdl/test/recurse.pdl
 create mode 100644 tools/pdl/test/size-field.pdl
 create mode 100644 tools/pdl/test/struct.pdl
 create mode 100644 tools/pdl/test/typedef-field.pdl

diff --git a/tools/pdl/Android.bp b/tools/pdl/Android.bp
new file mode 100644
index 00000000000..cbf8e538d44
--- /dev/null
+++ b/tools/pdl/Android.bp
@@ -0,0 +1,17 @@
+
+rust_binary_host {
+    name: "pdl",
+    srcs: [
+        "src/main.rs",
+    ],
+    rustlibs: [
+        "libpest",
+        "libserde",
+        "libserde_json",
+        "libstructopt",
+        "libcodespan_reporting",
+    ],
+    proc_macros: [
+        "libpest_derive",
+    ],
+}
diff --git a/tools/pdl/src/ast.rs b/tools/pdl/src/ast.rs
new file mode 100644
index 00000000000..e25b01a207b
--- /dev/null
+++ b/tools/pdl/src/ast.rs
@@ -0,0 +1,301 @@
+use codespan_reporting::diagnostic;
+use codespan_reporting::files;
+use serde::Serialize;
+use std::fmt;
+use std::ops;
+
+/// File identfiier.
+/// References a source file in the source database.
+pub type FileId = usize;
+
+/// Source database.
+/// Stores the source file contents for reference.
+pub type SourceDatabase = files::SimpleFiles<String, String>;
+
+#[derive(Debug, Copy, Clone, Serialize, PartialEq, Eq, PartialOrd, Ord)]
+pub struct SourceLocation {
+    pub offset: usize,
+    pub line: usize,
+    pub column: usize,
+}
+
+#[derive(Debug, Clone, Serialize)]
+pub struct SourceRange {
+    pub file: FileId,
+    pub start: SourceLocation,
+    pub end: SourceLocation,
+}
+
+#[derive(Debug, Serialize)]
+#[serde(tag = "kind", rename = "comment")]
+pub struct Comment {
+    pub loc: SourceRange,
+    pub text: String,
+}
+
+#[derive(Debug, Serialize)]
+#[serde(rename_all = "snake_case")]
+pub enum EndiannessValue {
+    LittleEndian,
+    BigEndian,
+}
+
+#[derive(Debug, Serialize)]
+#[serde(tag = "kind", rename = "endianness_declaration")]
+pub struct Endianness {
+    pub loc: SourceRange,
+    pub value: EndiannessValue,
+}
+
+#[derive(Debug, Serialize)]
+#[serde(tag = "kind")]
+pub enum Expr {
+    #[serde(rename = "identifier")]
+    Identifier { loc: SourceRange, name: String },
+    #[serde(rename = "integer")]
+    Integer { loc: SourceRange, value: usize },
+    #[serde(rename = "unary_expr")]
+    Unary { loc: SourceRange, op: String, operand: Box<Expr> },
+    #[serde(rename = "binary_expr")]
+    Binary { loc: SourceRange, op: String, operands: Box<(Expr, Expr)> },
+}
+
+#[derive(Debug, Serialize)]
+#[serde(tag = "kind", rename = "tag")]
+pub struct Tag {
+    pub id: String,
+    pub loc: SourceRange,
+    pub value: usize,
+}
+
+#[derive(Debug, Serialize)]
+#[serde(tag = "kind", rename = "constraint")]
+pub struct Constraint {
+    pub id: String,
+    pub loc: SourceRange,
+    pub value: Expr,
+}
+
+#[derive(Debug, Serialize)]
+#[serde(tag = "kind")]
+pub enum Field {
+    #[serde(rename = "checksum_field")]
+    Checksum { loc: SourceRange, field_id: String },
+    #[serde(rename = "padding_field")]
+    Padding { loc: SourceRange, width: usize },
+    #[serde(rename = "size_field")]
+    Size { loc: SourceRange, field_id: String, width: usize },
+    #[serde(rename = "count_field")]
+    Count { loc: SourceRange, field_id: String, width: usize },
+    #[serde(rename = "body_field")]
+    Body { loc: SourceRange },
+    #[serde(rename = "payload_field")]
+    Payload { loc: SourceRange, size_modifier: Option<String> },
+    #[serde(rename = "fixed_field")]
+    Fixed {
+        loc: SourceRange,
+        width: Option<usize>,
+        value: Option<usize>,
+        enum_id: Option<String>,
+        tag_id: Option<String>,
+    },
+    #[serde(rename = "reserved_field")]
+    Reserved { loc: SourceRange, width: usize },
+    #[serde(rename = "array_field")]
+    Array {
+        loc: SourceRange,
+        id: String,
+        width: Option<usize>,
+        type_id: Option<String>,
+        size_modifier: Option<String>,
+        size: Option<usize>,
+    },
+    #[serde(rename = "scalar_field")]
+    Scalar { loc: SourceRange, id: String, width: usize },
+    #[serde(rename = "typedef_field")]
+    Typedef { loc: SourceRange, id: String, type_id: String },
+    #[serde(rename = "group_field")]
+    Group { loc: SourceRange, group_id: String, constraints: Vec<Constraint> },
+}
+
+#[derive(Debug, Serialize)]
+#[serde(tag = "kind", rename = "test_case")]
+pub struct TestCase {
+    pub loc: SourceRange,
+    pub input: String,
+}
+
+#[derive(Debug, Serialize)]
+#[serde(tag = "kind")]
+pub enum Decl {
+    #[serde(rename = "checksum_declaration")]
+    Checksum { id: String, loc: SourceRange, function: String, width: usize },
+    #[serde(rename = "custom_field_declaration")]
+    CustomField { id: String, loc: SourceRange, width: Option<usize>, function: String },
+    #[serde(rename = "enum_declaration")]
+    Enum { id: String, loc: SourceRange, tags: Vec<Tag>, width: usize },
+    #[serde(rename = "packet_declaration")]
+    Packet {
+        id: String,
+        loc: SourceRange,
+        constraints: Vec<Constraint>,
+        fields: Vec<Field>,
+        parent_id: Option<String>,
+    },
+    #[serde(rename = "struct_declaration")]
+    Struct {
+        id: String,
+        loc: SourceRange,
+        constraints: Vec<Constraint>,
+        fields: Vec<Field>,
+        parent_id: Option<String>,
+    },
+    #[serde(rename = "group_declaration")]
+    Group { id: String, loc: SourceRange, fields: Vec<Field> },
+    #[serde(rename = "test_declaration")]
+    Test { loc: SourceRange, type_id: String, test_cases: Vec<TestCase> },
+}
+
+#[derive(Debug, Serialize)]
+pub struct Grammar {
+    pub version: String,
+    pub file: FileId,
+    pub comments: Vec<Comment>,
+    pub endianness: Option<Endianness>,
+    pub declarations: Vec<Decl>,
+}
+
+/// Implemented for all AST elements.
+pub trait Located<'d> {
+    fn loc(&'d self) -> &'d SourceRange;
+}
+
+/// Implemented for named AST elements.
+pub trait Named<'d> {
+    fn id(&'d self) -> Option<&'d String>;
+}
+
+impl SourceLocation {
+    pub fn new(offset: usize, line_starts: &[usize]) -> SourceLocation {
+        for (line, start) in line_starts.iter().enumerate() {
+            if *start <= offset {
+                return SourceLocation { offset, line, column: offset - start };
+            }
+        }
+        unreachable!()
+    }
+}
+
+impl SourceRange {
+    pub fn primary(&self) -> diagnostic::Label<FileId> {
+        diagnostic::Label::primary(self.file, self.start.offset..self.end.offset)
+    }
+    pub fn secondary(&self) -> diagnostic::Label<FileId> {
+        diagnostic::Label::secondary(self.file, self.start.offset..self.end.offset)
+    }
+}
+
+impl fmt::Display for SourceRange {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        if self.start.line == self.end.line {
+            write!(f, "{}:{}-{}", self.start.line, self.start.column, self.end.column)
+        } else {
+            write!(
+                f,
+                "{}:{}-{}:{}",
+                self.start.line, self.start.column, self.end.line, self.end.column
+            )
+        }
+    }
+}
+
+impl ops::Add<SourceRange> for SourceRange {
+    type Output = SourceRange;
+
+    fn add(self, rhs: SourceRange) -> SourceRange {
+        assert!(self.file == rhs.file);
+        SourceRange {
+            file: self.file,
+            start: self.start.min(rhs.start),
+            end: self.end.max(rhs.end),
+        }
+    }
+}
+
+impl Grammar {
+    pub fn new(file: FileId) -> Grammar {
+        Grammar {
+            version: "1,0".to_owned(),
+            comments: vec![],
+            endianness: None,
+            declarations: vec![],
+            file,
+        }
+    }
+}
+
+impl<'d> Located<'d> for Field {
+    fn loc(&'d self) -> &'d SourceRange {
+        match self {
+            Field::Checksum { loc, .. }
+            | Field::Padding { loc, .. }
+            | Field::Size { loc, .. }
+            | Field::Count { loc, .. }
+            | Field::Body { loc, .. }
+            | Field::Payload { loc, .. }
+            | Field::Fixed { loc, .. }
+            | Field::Reserved { loc, .. }
+            | Field::Array { loc, .. }
+            | Field::Scalar { loc, .. }
+            | Field::Typedef { loc, .. }
+            | Field::Group { loc, .. } => loc,
+        }
+    }
+}
+
+impl<'d> Located<'d> for Decl {
+    fn loc(&'d self) -> &'d SourceRange {
+        match self {
+            Decl::Checksum { loc, .. }
+            | Decl::CustomField { loc, .. }
+            | Decl::Enum { loc, .. }
+            | Decl::Packet { loc, .. }
+            | Decl::Struct { loc, .. }
+            | Decl::Group { loc, .. }
+            | Decl::Test { loc, .. } => loc,
+        }
+    }
+}
+
+impl<'d> Named<'d> for Field {
+    fn id(&'d self) -> Option<&'d String> {
+        match self {
+            Field::Checksum { .. }
+            | Field::Padding { .. }
+            | Field::Size { .. }
+            | Field::Count { .. }
+            | Field::Body { .. }
+            | Field::Payload { .. }
+            | Field::Fixed { .. }
+            | Field::Reserved { .. }
+            | Field::Group { .. } => None,
+            Field::Array { id, .. } | Field::Scalar { id, .. } | Field::Typedef { id, .. } => {
+                Some(id)
+            }
+        }
+    }
+}
+
+impl<'d> Named<'d> for Decl {
+    fn id(&'d self) -> Option<&'d String> {
+        match self {
+            Decl::Test { .. } => None,
+            Decl::Checksum { id, .. }
+            | Decl::CustomField { id, .. }
+            | Decl::Enum { id, .. }
+            | Decl::Packet { id, .. }
+            | Decl::Struct { id, .. }
+            | Decl::Group { id, .. } => Some(id),
+        }
+    }
+}
diff --git a/tools/pdl/src/lint.rs b/tools/pdl/src/lint.rs
new file mode 100644
index 00000000000..0b2d3bc62c0
--- /dev/null
+++ b/tools/pdl/src/lint.rs
@@ -0,0 +1,1265 @@
+use codespan_reporting::diagnostic::Diagnostic;
+use codespan_reporting::files;
+use codespan_reporting::term;
+use codespan_reporting::term::termcolor;
+use std::collections::HashMap;
+
+use crate::ast::*;
+
+/// Aggregate linter diagnostics.
+pub struct LintDiagnostics {
+    pub diagnostics: Vec<Diagnostic<FileId>>,
+}
+
+/// Implement lint checks for an AST element.
+pub trait Lintable {
+    /// Generate lint warnings and errors for the
+    /// input element.
+    fn lint(&self) -> LintDiagnostics;
+}
+
+/// Represents a chain of group expansion.
+/// Each field but the last in the chain is a typedef field of a group.
+/// The last field can also be a typedef field of a group if the chain is
+/// not fully expanded.
+type FieldPath<'d> = Vec<&'d Field>;
+
+/// Gather information about the full grammar declaration.
+struct Scope<'d> {
+    // Collection of Group declarations.
+    groups: HashMap<String, &'d Decl>,
+
+    // Collection of Packet declarations.
+    packets: HashMap<String, &'d Decl>,
+
+    // Collection of Enum, Struct, Checksum, and CustomField declarations.
+    // Packet and Group can not be referenced in a Typedef field and thus
+    // do not share the same namespace.
+    typedef: HashMap<String, &'d Decl>,
+
+    // Collection of Packet, Struct, and Group scope declarations.
+    scopes: HashMap<&'d Decl, PacketScope<'d>>,
+}
+
+/// Gather information about a Packet, Struct, or Group declaration.
+struct PacketScope<'d> {
+    // Checksum starts, indexed by the checksum field id.
+    checksums: HashMap<String, FieldPath<'d>>,
+
+    // Size or count fields, indexed by the field id.
+    sizes: HashMap<String, FieldPath<'d>>,
+
+    // Payload or body field.
+    payload: Option<FieldPath<'d>>,
+
+    // Typedef, scalar, array fields.
+    named: HashMap<String, FieldPath<'d>>,
+
+    // Group fields.
+    groups: HashMap<String, &'d Field>,
+
+    // Flattened field declarations.
+    // Contains field declarations from the original Packet, Struct, or Group,
+    // where Group fields have been substituted by their body.
+    // Constrained Scalar or Typedef Group fields are substitued by a Fixed
+    // field.
+    fields: Vec<FieldPath<'d>>,
+
+    // Constraint declarations gathered from Group inlining.
+    constraints: HashMap<String, &'d Constraint>,
+
+    // Local and inherited field declarations. Only named fields are preserved.
+    // Saved here for reference for parent constraint resolving.
+    all_fields: HashMap<String, &'d Field>,
+
+    // Local and inherited constraint declarations.
+    // Saved here for constraint conflict checks.
+    all_constraints: HashMap<String, &'d Constraint>,
+}
+
+impl std::cmp::Eq for &Decl {}
+impl<'d> std::cmp::PartialEq for &'d Decl {
+    fn eq(&self, other: &Self) -> bool {
+        std::ptr::eq(*self, *other)
+    }
+}
+
+impl<'d> std::hash::Hash for &'d Decl {
+    fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
+        std::ptr::hash(*self, state);
+    }
+}
+
+impl<'d> Located<'d> for FieldPath<'d> {
+    fn loc(&'d self) -> &'d SourceRange {
+        self.last().unwrap().loc()
+    }
+}
+
+impl LintDiagnostics {
+    fn new() -> LintDiagnostics {
+        LintDiagnostics { diagnostics: vec![] }
+    }
+
+    pub fn print(
+        &self,
+        sources: &SourceDatabase,
+        color: termcolor::ColorChoice,
+    ) -> Result<(), files::Error> {
+        let writer = termcolor::StandardStream::stderr(color);
+        let config = term::Config::default();
+        for d in self.diagnostics.iter() {
+            term::emit(&mut writer.lock(), &config, sources, d)?;
+        }
+        Ok(())
+    }
+
+    fn push(&mut self, diagnostic: Diagnostic<FileId>) {
+        self.diagnostics.push(diagnostic)
+    }
+
+    fn err_undeclared(&mut self, id: &str, loc: &SourceRange) {
+        self.diagnostics.push(
+            Diagnostic::error()
+                .with_message(format!("undeclared identifier `{}`", id))
+                .with_labels(vec![loc.primary()]),
+        )
+    }
+
+    fn err_redeclared(&mut self, id: &str, kind: &str, loc: &SourceRange, prev: &SourceRange) {
+        self.diagnostics.push(
+            Diagnostic::error()
+                .with_message(format!("redeclaration of {} identifier `{}`", kind, id))
+                .with_labels(vec![
+                    loc.primary(),
+                    prev.secondary().with_message(format!("`{}` is first declared here", id)),
+                ]),
+        )
+    }
+}
+
+fn bit_width(val: usize) -> usize {
+    usize::BITS as usize - val.leading_zeros() as usize
+}
+
+impl<'d> PacketScope<'d> {
+    /// Insert a field declaration into a packet scope.
+    fn insert(&mut self, field: &'d Field, result: &mut LintDiagnostics) {
+        match field {
+            Field::Checksum { loc, field_id, .. } => {
+                self.checksums.insert(field_id.clone(), vec![field]).map(|prev| {
+                    result.push(
+                        Diagnostic::error()
+                            .with_message(format!(
+                                "redeclaration of checksum start for `{}`",
+                                field_id
+                            ))
+                            .with_labels(vec![
+                                loc.primary(),
+                                prev.loc()
+                                    .secondary()
+                                    .with_message("checksum start is first declared here"),
+                            ]),
+                    )
+                })
+            }
+
+            Field::Padding { .. } | Field::Reserved { .. } | Field::Fixed { .. } => None,
+
+            Field::Size { loc, field_id, .. } | Field::Count { loc, field_id, .. } => {
+                self.sizes.insert(field_id.clone(), vec![field]).map(|prev| {
+                    result.push(
+                        Diagnostic::error()
+                            .with_message(format!(
+                                "redeclaration of size or count for `{}`",
+                                field_id
+                            ))
+                            .with_labels(vec![
+                                loc.primary(),
+                                prev.loc().secondary().with_message("size is first declared here"),
+                            ]),
+                    )
+                })
+            }
+
+            Field::Body { loc, .. } | Field::Payload { loc, .. } => {
+                if let Some(prev) = self.payload.as_ref() {
+                    result.push(
+                        Diagnostic::error()
+                            .with_message("redeclaration of payload or body field")
+                            .with_labels(vec![
+                                loc.primary(),
+                                prev.loc()
+                                    .secondary()
+                                    .with_message("payload is first declared here"),
+                            ]),
+                    )
+                }
+                self.payload = Some(vec![field]);
+                None
+            }
+
+            Field::Array { loc, id, .. }
+            | Field::Scalar { loc, id, .. }
+            | Field::Typedef { loc, id, .. } => self
+                .named
+                .insert(id.clone(), vec![field])
+                .map(|prev| result.err_redeclared(id, "field", loc, prev.loc())),
+
+            Field::Group { loc, group_id, .. } => {
+                self.groups.insert(group_id.clone(), field).map(|prev| {
+                    result.push(
+                        Diagnostic::error()
+                            .with_message(format!("duplicate group `{}` insertion", group_id))
+                            .with_labels(vec![
+                                loc.primary(),
+                                prev.loc()
+                                    .secondary()
+                                    .with_message(format!("`{}` is first used here", group_id)),
+                            ]),
+                    )
+                })
+            }
+        };
+    }
+
+    /// Add parent fields and constraints to the scope.
+    /// Only named fields are imported.
+    fn inherit(
+        &mut self,
+        scope: &Scope,
+        parent: &PacketScope<'d>,
+        constraints: impl Iterator<Item = &'d Constraint>,
+        result: &mut LintDiagnostics,
+    ) {
+        // Check constraints.
+        assert!(self.all_constraints.is_empty());
+        self.all_constraints = parent.all_constraints.clone();
+        for constraint in constraints {
+            lint_constraint(scope, parent, constraint, result);
+            let id = constraint.id.clone();
+            if let Some(prev) = self.all_constraints.insert(id, constraint) {
+                result.push(
+                    Diagnostic::error()
+                        .with_message(format!("duplicate constraint on field `{}`", constraint.id))
+                        .with_labels(vec![
+                            constraint.loc.primary(),
+                            prev.loc.secondary().with_message("the constraint is first set here"),
+                        ]),
+                )
+            }
+        }
+
+        // Merge group constraints into parent constraints,
+        // but generate no duplication warnings, the constraints
+        // do no apply to the same field set.
+        for (id, constraint) in self.constraints.iter() {
+            self.all_constraints.insert(id.clone(), constraint);
+        }
+
+        // Save parent fields.
+        self.all_fields = parent.all_fields.clone();
+    }
+
+    /// Insert group field declarations into a packet scope.
+    fn inline(
+        &mut self,
+        scope: &Scope,
+        packet_scope: &PacketScope<'d>,
+        group: &'d Field,
+        constraints: impl Iterator<Item = &'d Constraint>,
+        result: &mut LintDiagnostics,
+    ) {
+        fn err_redeclared_by_group(
+            result: &mut LintDiagnostics,
+            message: impl Into<String>,
+            loc: &SourceRange,
+            prev: &SourceRange,
+        ) {
+            result.push(Diagnostic::error().with_message(message).with_labels(vec![
+                loc.primary(),
+                prev.secondary().with_message("first declared here"),
+            ]))
+        }
+
+        for (id, field) in packet_scope.checksums.iter() {
+            if let Some(prev) = self.checksums.insert(id.clone(), field.clone()) {
+                err_redeclared_by_group(
+                    result,
+                    format!("inserted group redeclares checksum start for `{}`", id),
+                    group.loc(),
+                    prev.loc(),
+                )
+            }
+        }
+        for (id, field) in packet_scope.sizes.iter() {
+            if let Some(prev) = self.sizes.insert(id.clone(), field.clone()) {
+                err_redeclared_by_group(
+                    result,
+                    format!("inserted group redeclares size or count for `{}`", id),
+                    group.loc(),
+                    prev.loc(),
+                )
+            }
+        }
+        match (&self.payload, &packet_scope.payload) {
+            (Some(prev), Some(next)) => err_redeclared_by_group(
+                result,
+                "inserted group redeclares payload or body field",
+                next.loc(),
+                prev.loc(),
+            ),
+            (None, Some(payload)) => self.payload = Some(payload.clone()),
+            _ => (),
+        }
+        for (id, field) in packet_scope.named.iter() {
+            let mut path = vec![group];
+            path.extend(field.clone());
+            if let Some(prev) = self.named.insert(id.clone(), path) {
+                err_redeclared_by_group(
+                    result,
+                    format!("inserted group redeclares field `{}`", id),
+                    group.loc(),
+                    prev.loc(),
+                )
+            }
+        }
+
+        // Append group fields to the finalizeed fields.
+        for field in packet_scope.fields.iter() {
+            let mut path = vec![group];
+            path.extend(field.clone());
+            self.fields.push(path);
+        }
+
+        // Append group constraints to the caller packet_scope.
+        for (id, constraint) in packet_scope.constraints.iter() {
+            self.constraints.insert(id.clone(), constraint);
+        }
+
+        // Add constraints to the packet_scope, checking for duplicate constraints.
+        for constraint in constraints {
+            lint_constraint(scope, packet_scope, constraint, result);
+            let id = constraint.id.clone();
+            if let Some(prev) = self.constraints.insert(id, constraint) {
+                result.push(
+                    Diagnostic::error()
+                        .with_message(format!("duplicate constraint on field `{}`", constraint.id))
+                        .with_labels(vec![
+                            constraint.loc.primary(),
+                            prev.loc.secondary().with_message("the constraint is first set here"),
+                        ]),
+                )
+            }
+        }
+    }
+
+    /// Cleanup scope after processing all fields.
+    fn finalize(&mut self, result: &mut LintDiagnostics) {
+        // Check field shadowing.
+        for f in self.fields.iter().map(|f| f.last().unwrap()) {
+            if let Some(id) = f.id() {
+                if let Some(prev) = self.all_fields.insert(id.clone(), f) {
+                    result.push(
+                        Diagnostic::warning()
+                            .with_message(format!("declaration of `{}` shadows parent field", id))
+                            .with_labels(vec![
+                                f.loc().primary(),
+                                prev.loc()
+                                    .secondary()
+                                    .with_message(format!("`{}` is first declared here", id)),
+                            ]),
+                    )
+                }
+            }
+        }
+    }
+}
+
+/// Helper for linting value constraints over packet fields.
+fn lint_constraint(
+    scope: &Scope,
+    packet_scope: &PacketScope,
+    constraint: &Constraint,
+    result: &mut LintDiagnostics,
+) {
+    // Validate constraint value types.
+    match (packet_scope.all_fields.get(&constraint.id), &constraint.value) {
+        (
+            Some(Field::Scalar { loc: field_loc, width, .. }),
+            Expr::Integer { value, loc: value_loc, .. },
+        ) => {
+            if bit_width(*value) > *width {
+                result.push(
+                    Diagnostic::error().with_message("invalid integer literal").with_labels(vec![
+                        value_loc.primary().with_message(format!(
+                            "expected maximum value of `{}`",
+                            (1 << *width) - 1
+                        )),
+                        field_loc.secondary().with_message("the value is used here"),
+                    ]),
+                )
+            }
+        }
+
+        (Some(Field::Typedef { type_id, loc: field_loc, .. }), _) => {
+            match (scope.typedef.get(type_id), &constraint.value) {
+                (Some(Decl::Enum { tags, .. }), Expr::Identifier { name, loc: name_loc, .. }) => {
+                    if !tags.iter().any(|t| &t.id == name) {
+                        result.push(
+                            Diagnostic::error()
+                                .with_message(format!("undeclared enum tag `{}`", name))
+                                .with_labels(vec![
+                                    name_loc.primary(),
+                                    field_loc.secondary().with_message("the value is used here"),
+                                ]),
+                        )
+                    }
+                }
+                (Some(Decl::Enum { .. }), _) => result.push(
+                    Diagnostic::error().with_message("invalid literal type").with_labels(vec![
+                        constraint
+                            .loc
+                            .primary()
+                            .with_message(format!("expected `{}` tag identifier", type_id)),
+                        field_loc.secondary().with_message("the value is used here"),
+                    ]),
+                ),
+                (Some(decl), _) => result.push(
+                    Diagnostic::error().with_message("invalid constraint").with_labels(vec![
+                        constraint.loc.primary(),
+                        field_loc.secondary().with_message(format!(
+                            "`{}` has type {}, expected enum field",
+                            constraint.id,
+                            decl.kind()
+                        )),
+                    ]),
+                ),
+                // This error will be reported during field linting
+                (None, _) => (),
+            }
+        }
+
+        (Some(Field::Scalar { loc: field_loc, .. }), _) => {
+            result.push(Diagnostic::error().with_message("invalid literal type").with_labels(vec![
+                constraint.loc.primary().with_message("expected integer literal"),
+                field_loc.secondary().with_message("the value is used here"),
+            ]))
+        }
+        (Some(_), _) => unreachable!(),
+        (None, _) => result.push(
+            Diagnostic::error()
+                .with_message(format!("undeclared identifier `{}`", constraint.id))
+                .with_labels(vec![constraint.loc.primary()]),
+        ),
+    }
+}
+
+impl<'d> Scope<'d> {
+    // Sort Packet, Struct, and Group declarations by reverse topological
+    // orde, and inline Group fields.
+    // Raises errors and warnings for:
+    //      - undeclared included Groups,
+    //      - undeclared Typedef fields,
+    //      - undeclared Packet or Struct parents,
+    //      - recursive Group insertion,
+    //      - recursive Packet or Struct inheritance.
+    fn finalize(&mut self, result: &mut LintDiagnostics) -> Vec<&'d Decl> {
+        // Auxiliary function implementing BFS on Packet tree.
+        enum Mark {
+            Temporary,
+            Permanent,
+        }
+        struct Context<'d> {
+            list: Vec<&'d Decl>,
+            visited: HashMap<&'d Decl, Mark>,
+            scopes: HashMap<&'d Decl, PacketScope<'d>>,
+        }
+
+        fn bfs<'s, 'd>(
+            decl: &'d Decl,
+            context: &'s mut Context<'d>,
+            scope: &Scope<'d>,
+            result: &mut LintDiagnostics,
+        ) -> Option<&'s PacketScope<'d>> {
+            match context.visited.get(&decl) {
+                Some(Mark::Permanent) => return context.scopes.get(&decl),
+                Some(Mark::Temporary) => {
+                    result.push(
+                        Diagnostic::error()
+                            .with_message(format!(
+                                "recursive declaration of {} `{}`",
+                                decl.kind(),
+                                decl.id().unwrap()
+                            ))
+                            .with_labels(vec![decl.loc().primary()]),
+                    );
+                    return None;
+                }
+                _ => (),
+            }
+
+            let (parent_id, parent_namespace, fields) = match decl {
+                Decl::Packet { parent_id, fields, .. } => (parent_id, &scope.packets, fields),
+                Decl::Struct { parent_id, fields, .. } => (parent_id, &scope.typedef, fields),
+                Decl::Group { fields, .. } => (&None, &scope.groups, fields),
+                _ => return None,
+            };
+
+            context.visited.insert(decl, Mark::Temporary);
+            let mut lscope = decl.scope(result).unwrap();
+
+            // Iterate over Struct and Group fields.
+            for f in fields {
+                match f {
+                    Field::Group { group_id, constraints, .. } => {
+                        match scope.groups.get(group_id) {
+                            None => result.push(
+                                Diagnostic::error()
+                                    .with_message(format!(
+                                        "undeclared group identifier `{}`",
+                                        group_id
+                                    ))
+                                    .with_labels(vec![f.loc().primary()]),
+                            ),
+                            Some(group_decl) => {
+                                // Recurse to flatten the inserted group.
+                                if let Some(rscope) = bfs(group_decl, context, scope, result) {
+                                    // Inline the group fields and constraints into
+                                    // the current scope.
+                                    lscope.inline(scope, rscope, f, constraints.iter(), result)
+                                }
+                            }
+                        }
+                    }
+                    Field::Typedef { type_id, .. } => {
+                        lscope.fields.push(vec![f]);
+                        match scope.typedef.get(type_id) {
+                            None => result.push(
+                                Diagnostic::error()
+                                    .with_message(format!(
+                                        "undeclared typedef identifier `{}`",
+                                        type_id
+                                    ))
+                                    .with_labels(vec![f.loc().primary()]),
+                            ),
+                            Some(struct_decl @ Decl::Struct { .. }) => {
+                                bfs(struct_decl, context, scope, result);
+                            }
+                            Some(_) => (),
+                        }
+                    }
+                    _ => lscope.fields.push(vec![f]),
+                }
+            }
+
+            // Iterate over parent declaration.
+            for id in parent_id {
+                match parent_namespace.get(id) {
+                    None => result.push(
+                        Diagnostic::error()
+                            .with_message(format!("undeclared parent identifier `{}`", id))
+                            .with_labels(vec![decl.loc().primary()])
+                            .with_notes(vec![format!("hint: expected {} parent", decl.kind())]),
+                    ),
+                    Some(parent_decl) => {
+                        if let Some(rscope) = bfs(parent_decl, context, scope, result) {
+                            // Import the parent fields and constraints into the current scope.
+                            lscope.inherit(scope, rscope, decl.constraints(), result)
+                        }
+                    }
+                }
+            }
+
+            lscope.finalize(result);
+            context.list.push(decl);
+            context.visited.insert(decl, Mark::Permanent);
+            context.scopes.insert(decl, lscope);
+            context.scopes.get(&decl)
+        }
+
+        let mut context =
+            Context::<'d> { list: vec![], visited: HashMap::new(), scopes: HashMap::new() };
+
+        for decl in self.packets.values().chain(self.typedef.values()).chain(self.groups.values()) {
+            bfs(decl, &mut context, self, result);
+        }
+
+        self.scopes = context.scopes;
+        context.list
+    }
+}
+
+impl Field {
+    fn kind(&self) -> &str {
+        match self {
+            Field::Checksum { .. } => "payload",
+            Field::Padding { .. } => "padding",
+            Field::Size { .. } => "size",
+            Field::Count { .. } => "count",
+            Field::Body { .. } => "body",
+            Field::Payload { .. } => "payload",
+            Field::Fixed { .. } => "fixed",
+            Field::Reserved { .. } => "reserved",
+            Field::Group { .. } => "group",
+            Field::Array { .. } => "array",
+            Field::Scalar { .. } => "scalar",
+            Field::Typedef { .. } => "typedef",
+        }
+    }
+}
+
+// Helper for linting an enum declaration.
+fn lint_enum(tags: &[Tag], width: usize, result: &mut LintDiagnostics) {
+    let mut local_scope = HashMap::new();
+    for tag in tags {
+        // Tags must be unique within the scope of the
+        // enum declaration.
+        if let Some(prev) = local_scope.insert(tag.id.clone(), tag) {
+            result.push(
+                Diagnostic::error()
+                    .with_message(format!("redeclaration of tag identifier `{}`", &tag.id))
+                    .with_labels(vec![
+                        tag.loc.primary(),
+                        prev.loc.secondary().with_message("first declared here"),
+                    ]),
+            )
+        }
+
+        // Tag values must fit the enum declared width.
+        if bit_width(tag.value) > width {
+            result.push(Diagnostic::error().with_message("invalid literal value").with_labels(
+                vec![tag.loc.primary().with_message(format!(
+                        "expected maximum value of `{}`",
+                        (1 << width) - 1
+                    ))],
+            ))
+        }
+    }
+}
+
+// Helper for linting checksum fields.
+fn lint_checksum(
+    scope: &Scope,
+    packet_scope: &PacketScope,
+    path: &FieldPath,
+    field_id: &str,
+    result: &mut LintDiagnostics,
+) {
+    // Checksum field must be declared before
+    // the checksum start. The field must be a typedef with
+    // a valid checksum type.
+    let checksum_loc = path.loc();
+    let field_decl = packet_scope.named.get(field_id);
+
+    match field_decl.and_then(|f| f.last()) {
+        Some(Field::Typedef { loc: field_loc, type_id, .. }) => {
+            // Check declaration type of checksum field.
+            match scope.typedef.get(type_id) {
+                Some(Decl::Checksum { .. }) => (),
+                Some(decl) => result.push(
+                    Diagnostic::error()
+                        .with_message(format!("checksum start uses invalid field `{}`", field_id))
+                        .with_labels(vec![
+                            checksum_loc.primary(),
+                            field_loc.secondary().with_message(format!(
+                                "`{}` is declared with {} type `{}`, expected checksum_field",
+                                field_id,
+                                decl.kind(),
+                                type_id
+                            )),
+                        ]),
+                ),
+                // This error case will be reported when the field itself
+                // is checked.
+                None => (),
+            };
+            // Check declaration order of checksum field.
+            match field_decl.and_then(|f| f.first()) {
+                Some(decl) if decl.loc().start > checksum_loc.start => result.push(
+                    Diagnostic::error()
+                        .with_message("invalid checksum start declaration")
+                        .with_labels(vec![
+                            checksum_loc
+                                .primary()
+                                .with_message("checksum start precedes checksum field"),
+                            decl.loc().secondary().with_message("checksum field is declared here"),
+                        ]),
+                ),
+                _ => (),
+            }
+        }
+        Some(field) => result.push(
+            Diagnostic::error()
+                .with_message(format!("checksum start uses invalid field `{}`", field_id))
+                .with_labels(vec![
+                    checksum_loc.primary(),
+                    field.loc().secondary().with_message(format!(
+                        "`{}` is declared as {} field, expected typedef",
+                        field_id,
+                        field.kind()
+                    )),
+                ]),
+        ),
+        None => result.err_undeclared(field_id, checksum_loc),
+    }
+}
+
+// Helper for linting size fields.
+fn lint_size(
+    _scope: &Scope,
+    packet_scope: &PacketScope,
+    path: &FieldPath,
+    field_id: &str,
+    _width: usize,
+    result: &mut LintDiagnostics,
+) {
+    // Size fields should be declared before
+    // the sized field (body, payload, or array).
+    // The field must reference a valid body, payload or array
+    // field.
+
+    let size_loc = path.loc();
+
+    if field_id == "_payload_" {
+        return match packet_scope.payload.as_ref().and_then(|f| f.last()) {
+            Some(Field::Body { .. }) => result.push(
+                Diagnostic::error()
+                    .with_message("size field uses undeclared payload field, did you mean _body_ ?")
+                    .with_labels(vec![size_loc.primary()]),
+            ),
+            Some(Field::Payload { .. }) => {
+                match packet_scope.payload.as_ref().and_then(|f| f.first()) {
+                    Some(field) if field.loc().start < size_loc.start => result.push(
+                        Diagnostic::error().with_message("invalid size field").with_labels(vec![
+                            size_loc
+                                .primary()
+                                .with_message("size field is declared after payload field"),
+                            field.loc().secondary().with_message("payload field is declared here"),
+                        ]),
+                    ),
+                    _ => (),
+                }
+            }
+            Some(_) => unreachable!(),
+            None => result.push(
+                Diagnostic::error()
+                    .with_message("size field uses undeclared payload field")
+                    .with_labels(vec![size_loc.primary()]),
+            ),
+        };
+    }
+    if field_id == "_body_" {
+        return match packet_scope.payload.as_ref().and_then(|f| f.last()) {
+            Some(Field::Payload { .. }) => result.push(
+                Diagnostic::error()
+                    .with_message("size field uses undeclared body field, did you mean _payload_ ?")
+                    .with_labels(vec![size_loc.primary()]),
+            ),
+            Some(Field::Body { .. }) => {
+                match packet_scope.payload.as_ref().and_then(|f| f.first()) {
+                    Some(field) if field.loc().start < size_loc.start => result.push(
+                        Diagnostic::error().with_message("invalid size field").with_labels(vec![
+                            size_loc
+                                .primary()
+                                .with_message("size field is declared after body field"),
+                            field.loc().secondary().with_message("body field is declared here"),
+                        ]),
+                    ),
+                    _ => (),
+                }
+            }
+            Some(_) => unreachable!(),
+            None => result.push(
+                Diagnostic::error()
+                    .with_message("size field uses undeclared body field")
+                    .with_labels(vec![size_loc.primary()]),
+            ),
+        };
+    }
+
+    let field = packet_scope.named.get(field_id);
+
+    match field.and_then(|f| f.last()) {
+        Some(Field::Array { size: Some(_), loc: array_loc, .. }) => result.push(
+            Diagnostic::warning()
+                .with_message(format!("size field uses array `{}` with static size", field_id))
+                .with_labels(vec![
+                    size_loc.primary(),
+                    array_loc.secondary().with_message(format!("`{}` is declared here", field_id)),
+                ]),
+        ),
+        Some(Field::Array { .. }) => (),
+        Some(field) => result.push(
+            Diagnostic::error()
+                .with_message(format!("invalid `{}` field type", field_id))
+                .with_labels(vec![
+                    field.loc().primary().with_message(format!(
+                        "`{}` is declared as {}",
+                        field_id,
+                        field.kind()
+                    )),
+                    size_loc
+                        .secondary()
+                        .with_message(format!("`{}` is used here as array", field_id)),
+                ]),
+        ),
+
+        None => result.err_undeclared(field_id, size_loc),
+    };
+    match field.and_then(|f| f.first()) {
+        Some(field) if field.loc().start < size_loc.start => {
+            result.push(Diagnostic::error().with_message("invalid size field").with_labels(vec![
+                    size_loc
+                        .primary()
+                        .with_message(format!("size field is declared after field `{}`", field_id)),
+                    field
+                        .loc()
+                        .secondary()
+                        .with_message(format!("`{}` is declared here", field_id)),
+                ]))
+        }
+        _ => (),
+    }
+}
+
+// Helper for linting count fields.
+fn lint_count(
+    _scope: &Scope,
+    packet_scope: &PacketScope,
+    path: &FieldPath,
+    field_id: &str,
+    _width: usize,
+    result: &mut LintDiagnostics,
+) {
+    // Count fields should be declared before the sized field.
+    // The field must reference a valid array field.
+    // Warning if the array already has a known size.
+
+    let count_loc = path.loc();
+    let field = packet_scope.named.get(field_id);
+
+    match field.and_then(|f| f.last()) {
+        Some(Field::Array { size: Some(_), loc: array_loc, .. }) => result.push(
+            Diagnostic::warning()
+                .with_message(format!("count field uses array `{}` with static size", field_id))
+                .with_labels(vec![
+                    count_loc.primary(),
+                    array_loc.secondary().with_message(format!("`{}` is declared here", field_id)),
+                ]),
+        ),
+
+        Some(Field::Array { .. }) => (),
+        Some(field) => result.push(
+            Diagnostic::error()
+                .with_message(format!("invalid `{}` field type", field_id))
+                .with_labels(vec![
+                    field.loc().primary().with_message(format!(
+                        "`{}` is declared as {}",
+                        field_id,
+                        field.kind()
+                    )),
+                    count_loc
+                        .secondary()
+                        .with_message(format!("`{}` is used here as array", field_id)),
+                ]),
+        ),
+
+        None => result.err_undeclared(field_id, count_loc),
+    };
+    match field.and_then(|f| f.first()) {
+        Some(field) if field.loc().start < count_loc.start => {
+            result.push(Diagnostic::error().with_message("invalid count field").with_labels(vec![
+                    count_loc.primary().with_message(format!(
+                        "count field is declared after field `{}`",
+                        field_id
+                    )),
+                    field
+                        .loc()
+                        .secondary()
+                        .with_message(format!("`{}` is declared here", field_id)),
+                ]))
+        }
+        _ => (),
+    }
+}
+
+// Helper for linting fixed fields.
+#[allow(clippy::too_many_arguments)]
+fn lint_fixed(
+    scope: &Scope,
+    _packet_scope: &PacketScope,
+    path: &FieldPath,
+    width: &Option<usize>,
+    value: &Option<usize>,
+    enum_id: &Option<String>,
+    tag_id: &Option<String>,
+    result: &mut LintDiagnostics,
+) {
+    // By parsing constraint, we already have that either
+    // (width and value) or (enum_id and tag_id) are Some.
+
+    let fixed_loc = path.loc();
+
+    if width.is_some() {
+        // The value of a fixed field should have .
+        if bit_width(value.unwrap()) > width.unwrap() {
+            result.push(Diagnostic::error().with_message("invalid integer literal").with_labels(
+                vec![fixed_loc.primary().with_message(format!(
+                    "expected maximum value of `{}`",
+                    (1 << width.unwrap()) - 1
+                ))],
+            ))
+        }
+    } else {
+        // The fixed field should reference a valid enum id and tag id
+        // association.
+        match scope.typedef.get(enum_id.as_ref().unwrap()) {
+            Some(Decl::Enum { tags, .. }) => {
+                match tags.iter().find(|t| &t.id == tag_id.as_ref().unwrap()) {
+                    Some(_) => (),
+                    None => result.push(
+                        Diagnostic::error()
+                            .with_message(format!(
+                                "undeclared enum tag `{}`",
+                                tag_id.as_ref().unwrap()
+                            ))
+                            .with_labels(vec![fixed_loc.primary()]),
+                    ),
+                }
+            }
+            Some(decl) => result.push(
+                Diagnostic::error()
+                    .with_message(format!(
+                        "fixed field uses invalid typedef `{}`",
+                        decl.id().unwrap()
+                    ))
+                    .with_labels(vec![fixed_loc.primary().with_message(format!(
+                        "{} has kind {}, expected enum",
+                        decl.id().unwrap(),
+                        decl.kind(),
+                    ))]),
+            ),
+            None => result.push(
+                Diagnostic::error()
+                    .with_message(format!("undeclared enum type `{}`", enum_id.as_ref().unwrap()))
+                    .with_labels(vec![fixed_loc.primary()]),
+            ),
+        }
+    }
+}
+
+// Helper for linting array fields.
+#[allow(clippy::too_many_arguments)]
+fn lint_array(
+    scope: &Scope,
+    _packet_scope: &PacketScope,
+    path: &FieldPath,
+    _width: &Option<usize>,
+    type_id: &Option<String>,
+    _size_modifier: &Option<String>,
+    _size: &Option<usize>,
+    result: &mut LintDiagnostics,
+) {
+    // By parsing constraint, we have that width and type_id are mutually
+    // exclusive, as well as size_modifier and size.
+    // type_id must reference a valid enum or packet type.
+    // TODO(hchataing) unbounded arrays should have a matching size
+    // or count field
+
+    let array_loc = path.loc();
+
+    if type_id.is_some() {
+        match scope.typedef.get(type_id.as_ref().unwrap()) {
+            Some(Decl::Enum { .. })
+            | Some(Decl::Struct { .. })
+            | Some(Decl::CustomField { .. }) => (),
+            Some(decl) => result.push(
+                Diagnostic::error()
+                    .with_message(format!(
+                        "array field uses invalid {} element type `{}`",
+                        decl.kind(),
+                        type_id.as_ref().unwrap()
+                    ))
+                    .with_labels(vec![array_loc.primary()])
+                    .with_notes(vec!["hint: expected enum, struct, custom_field".to_owned()]),
+            ),
+            None => result.push(
+                Diagnostic::error()
+                    .with_message(format!(
+                        "array field uses undeclared element type `{}`",
+                        type_id.as_ref().unwrap()
+                    ))
+                    .with_labels(vec![array_loc.primary()])
+                    .with_notes(vec!["hint: expected enum, struct, custom_field".to_owned()]),
+            ),
+        }
+    }
+}
+
+// Helper for linting typedef fields.
+fn lint_typedef(
+    scope: &Scope,
+    _packet_scope: &PacketScope,
+    path: &FieldPath,
+    type_id: &str,
+    result: &mut LintDiagnostics,
+) {
+    // The typedef field must reference a valid struct, enum,
+    // custom_field, or checksum type.
+    // TODO(hchataing) checksum fields should have a matching checksum start
+
+    let typedef_loc = path.loc();
+
+    match scope.typedef.get(type_id) {
+        Some(Decl::Enum { .. })
+        | Some(Decl::Struct { .. })
+        | Some(Decl::CustomField { .. })
+        | Some(Decl::Checksum { .. }) => (),
+
+        Some(decl) => result.push(
+            Diagnostic::error()
+                .with_message(format!(
+                    "typedef field uses invalid {} element type `{}`",
+                    decl.kind(),
+                    type_id
+                ))
+                .with_labels(vec![typedef_loc.primary()])
+                .with_notes(vec!["hint: expected enum, struct, custom_field, checksum".to_owned()]),
+        ),
+        None => result.push(
+            Diagnostic::error()
+                .with_message(format!("typedef field uses undeclared element type `{}`", type_id))
+                .with_labels(vec![typedef_loc.primary()])
+                .with_notes(vec!["hint: expected enum, struct, custom_field, checksum".to_owned()]),
+        ),
+    }
+}
+
+// Helper for linting a field declaration.
+fn lint_field(
+    scope: &Scope,
+    packet_scope: &PacketScope,
+    field: &FieldPath,
+    result: &mut LintDiagnostics,
+) {
+    match field.last().unwrap() {
+        Field::Checksum { field_id, .. } => {
+            lint_checksum(scope, packet_scope, field, field_id, result)
+        }
+        Field::Size { field_id, width, .. } => {
+            lint_size(scope, packet_scope, field, field_id, *width, result)
+        }
+        Field::Count { field_id, width, .. } => {
+            lint_count(scope, packet_scope, field, field_id, *width, result)
+        }
+        Field::Fixed { width, value, enum_id, tag_id, .. } => {
+            lint_fixed(scope, packet_scope, field, width, value, enum_id, tag_id, result)
+        }
+        Field::Array { width, type_id, size_modifier, size, .. } => {
+            lint_array(scope, packet_scope, field, width, type_id, size_modifier, size, result)
+        }
+        Field::Typedef { type_id, .. } => lint_typedef(scope, packet_scope, field, type_id, result),
+        Field::Padding { .. }
+        | Field::Reserved { .. }
+        | Field::Scalar { .. }
+        | Field::Body { .. }
+        | Field::Payload { .. } => (),
+        Field::Group { .. } => unreachable!(),
+    }
+}
+
+// Helper for linting a packet declaration.
+fn lint_packet(
+    scope: &Scope,
+    decl: &Decl,
+    id: &str,
+    loc: &SourceRange,
+    constraints: &[Constraint],
+    parent_id: &Option<String>,
+    result: &mut LintDiagnostics,
+) {
+    // The parent declaration is checked by Scope::finalize.
+    // The local scope is also generated by Scope::finalize.
+    // TODO(hchataing) check parent payload size constraint: compute an upper
+    // bound of the payload size and check against the encoded maximum size.
+
+    if parent_id.is_none() && !constraints.is_empty() {
+        // Constraint list should be empty when there is
+        // no inheritance.
+        result.push(
+            Diagnostic::warning()
+                .with_message(format!(
+                    "packet `{}` has field constraints, but no parent declaration",
+                    id
+                ))
+                .with_labels(vec![loc.primary()])
+                .with_notes(vec!["hint: expected parent declaration".to_owned()]),
+        )
+    }
+
+    // Retrieve pre-computed packet scope.
+    // Scope validation was done before, so it must exist.
+    let packet_scope = &scope.scopes.get(&decl).unwrap();
+
+    for field in packet_scope.fields.iter() {
+        lint_field(scope, packet_scope, field, result)
+    }
+}
+
+// Helper for linting a struct declaration.
+fn lint_struct(
+    scope: &Scope,
+    decl: &Decl,
+    id: &str,
+    loc: &SourceRange,
+    constraints: &[Constraint],
+    parent_id: &Option<String>,
+    result: &mut LintDiagnostics,
+) {
+    // The parent declaration is checked by Scope::finalize.
+    // The local scope is also generated by Scope::finalize.
+    // TODO(hchataing) check parent payload size constraint: compute an upper
+    // bound of the payload size and check against the encoded maximum size.
+
+    if parent_id.is_none() && !constraints.is_empty() {
+        // Constraint list should be empty when there is
+        // no inheritance.
+        result.push(
+            Diagnostic::warning()
+                .with_message(format!(
+                    "struct `{}` has field constraints, but no parent declaration",
+                    id
+                ))
+                .with_labels(vec![loc.primary()])
+                .with_notes(vec!["hint: expected parent declaration".to_owned()]),
+        )
+    }
+
+    // Retrieve pre-computed packet scope.
+    // Scope validation was done before, so it must exist.
+    let packet_scope = &scope.scopes.get(&decl).unwrap();
+
+    for field in packet_scope.fields.iter() {
+        lint_field(scope, packet_scope, field, result)
+    }
+}
+
+impl Decl {
+    fn constraints(&self) -> impl Iterator<Item = &Constraint> {
+        match self {
+            Decl::Packet { constraints, .. } | Decl::Struct { constraints, .. } => {
+                Some(constraints.iter())
+            }
+            _ => None,
+        }
+        .into_iter()
+        .flatten()
+    }
+
+    fn scope<'d>(&'d self, result: &mut LintDiagnostics) -> Option<PacketScope<'d>> {
+        match self {
+            Decl::Packet { fields, .. }
+            | Decl::Struct { fields, .. }
+            | Decl::Group { fields, .. } => {
+                let mut scope = PacketScope {
+                    checksums: HashMap::new(),
+                    sizes: HashMap::new(),
+                    payload: None,
+                    named: HashMap::new(),
+                    groups: HashMap::new(),
+
+                    fields: Vec::new(),
+                    constraints: HashMap::new(),
+                    all_fields: HashMap::new(),
+                    all_constraints: HashMap::new(),
+                };
+                for field in fields {
+                    scope.insert(field, result)
+                }
+                Some(scope)
+            }
+            _ => None,
+        }
+    }
+
+    fn lint<'d>(&'d self, scope: &Scope<'d>, result: &mut LintDiagnostics) {
+        match self {
+            Decl::Checksum { .. } | Decl::CustomField { .. } => (),
+            Decl::Enum { tags, width, .. } => lint_enum(tags, *width, result),
+            Decl::Packet { id, loc, constraints, parent_id, .. } => {
+                lint_packet(scope, self, id, loc, constraints, parent_id, result)
+            }
+            Decl::Struct { id, loc, constraints, parent_id, .. } => {
+                lint_struct(scope, self, id, loc, constraints, parent_id, result)
+            }
+            // Groups are finalizeed before linting, to make sure
+            // potential errors are raised only once.
+            Decl::Group { .. } => (),
+            Decl::Test { .. } => (),
+        }
+    }
+
+    fn kind(&self) -> &str {
+        match self {
+            Decl::Checksum { .. } => "checksum",
+            Decl::CustomField { .. } => "custom field",
+            Decl::Enum { .. } => "enum",
+            Decl::Packet { .. } => "packet",
+            Decl::Struct { .. } => "struct",
+            Decl::Group { .. } => "group",
+            Decl::Test { .. } => "test",
+        }
+    }
+}
+
+impl Grammar {
+    fn scope<'d>(&'d self, result: &mut LintDiagnostics) -> Scope<'d> {
+        let mut scope = Scope {
+            groups: HashMap::new(),
+            packets: HashMap::new(),
+            typedef: HashMap::new(),
+            scopes: HashMap::new(),
+        };
+
+        // Gather top-level declarations.
+        // Validate the top-level scopes (Group, Packet, Typedef).
+        //
+        // TODO: switch to try_insert when stable
+        for decl in &self.declarations {
+            if let Some((id, namespace)) = match decl {
+                Decl::Checksum { id, .. }
+                | Decl::CustomField { id, .. }
+                | Decl::Struct { id, .. }
+                | Decl::Enum { id, .. } => Some((id, &mut scope.typedef)),
+                Decl::Group { id, .. } => Some((id, &mut scope.groups)),
+                Decl::Packet { id, .. } => Some((id, &mut scope.packets)),
+                _ => None,
+            } {
+                if let Some(prev) = namespace.insert(id.clone(), decl) {
+                    result.err_redeclared(id, decl.kind(), decl.loc(), prev.loc())
+                }
+            }
+            if let Some(lscope) = decl.scope(result) {
+                scope.scopes.insert(decl, lscope);
+            }
+        }
+
+        scope.finalize(result);
+        scope
+    }
+}
+
+impl Lintable for Grammar {
+    fn lint(&self) -> LintDiagnostics {
+        let mut result = LintDiagnostics::new();
+        let scope = self.scope(&mut result);
+        if !result.diagnostics.is_empty() {
+            return result;
+        }
+        for decl in &self.declarations {
+            decl.lint(&scope, &mut result)
+        }
+        result
+    }
+}
diff --git a/tools/pdl/src/main.rs b/tools/pdl/src/main.rs
new file mode 100644
index 00000000000..5f488fd61ed
--- /dev/null
+++ b/tools/pdl/src/main.rs
@@ -0,0 +1,51 @@
+//! PDL parser and linter.
+
+extern crate codespan_reporting;
+extern crate pest;
+#[macro_use]
+extern crate pest_derive;
+extern crate serde;
+extern crate serde_json;
+extern crate structopt;
+
+use codespan_reporting::term;
+use codespan_reporting::term::termcolor;
+use structopt::StructOpt;
+
+mod ast;
+mod lint;
+mod parser;
+
+use crate::lint::Lintable;
+
+#[derive(Debug, StructOpt)]
+#[structopt(name = "pdl-parser", about = "Packet Description Language parser tool.")]
+struct Opt {
+    #[structopt(short, long = "--version", help = "Print tool version and exit.")]
+    version: bool,
+
+    #[structopt(name = "FILE", help = "Input file.")]
+    input_file: String,
+}
+
+fn main() {
+    let opt = Opt::from_args();
+
+    if opt.version {
+        println!("Packet Description Language parser version 1.0");
+        return;
+    }
+
+    let mut sources = ast::SourceDatabase::new();
+    match parser::parse_file(&mut sources, opt.input_file) {
+        Ok(grammar) => {
+            let _ = grammar.lint().print(&sources, termcolor::ColorChoice::Always);
+            println!("{}", serde_json::to_string_pretty(&grammar).unwrap())
+        }
+        Err(err) => {
+            let writer = termcolor::StandardStream::stderr(termcolor::ColorChoice::Always);
+            let config = term::Config::default();
+            _ = term::emit(&mut writer.lock(), &config, &sources, &err);
+        }
+    }
+}
diff --git a/tools/pdl/src/parser.rs b/tools/pdl/src/parser.rs
new file mode 100644
index 00000000000..bea80e00309
--- /dev/null
+++ b/tools/pdl/src/parser.rs
@@ -0,0 +1,530 @@
+use super::ast;
+use codespan_reporting::diagnostic::Diagnostic;
+use codespan_reporting::files;
+use pest::iterators::{Pair, Pairs};
+use pest::{Parser, Token};
+use std::iter::{Filter, Peekable};
+
+// Generate the PDL parser.
+// TODO: use #[grammar = "pdl.pest"]
+// currently not possible because CARGO_MANIFEST_DIR is not set
+// in soong environment.
+#[derive(Parser)]
+#[grammar_inline = r#"
+WHITESPACE = _{ " " | "\n" }
+COMMENT = { block_comment | line_comment }
+
+block_comment = { "/*" ~ (!"*/" ~ ANY)* ~ "*/" }
+line_comment = { "//" ~ (!"\n" ~ ANY)* }
+
+alpha = { 'a'..'z' | 'A'..'Z' }
+digit = { '0'..'9' }
+hexdigit = { digit | 'a'..'f' | 'A'..'F' }
+alphanum = { alpha | digit | "_" }
+
+identifier = @{ alpha ~ alphanum* }
+payload_identifier = @{ "_payload_" }
+body_identifier = @{ "_body_" }
+intvalue = @{ digit+ }
+hexvalue = @{ ("0x"|"0X") ~ hexdigit+ }
+integer = @{ hexvalue | intvalue }
+string = @{ "\"" ~ (!"\"" ~ ANY)* ~ "\"" }
+size_modifier = @{
+    ("+"|"-"|"*"|"/") ~ (digit|"+"|"-"|"*"|"/")+
+}
+
+endianness_declaration = { "little_endian_packets" | "big_endian_packets" }
+
+enum_tag = { identifier ~ "=" ~ integer }
+enum_tag_list = { enum_tag ~ ("," ~ enum_tag)* ~ ","? }
+enum_declaration = {
+    "enum" ~ identifier ~ ":" ~ integer ~ "{" ~
+        enum_tag_list ~
+    "}"
+}
+
+constraint = { identifier ~ "=" ~ (identifier|integer) }
+constraint_list = { constraint ~ ("," ~ constraint)* }
+
+checksum_field = { "_checksum_start_" ~ "(" ~ identifier ~ ")" }
+padding_field = { "_padding_" ~ "[" ~ integer ~ "]" }
+size_field = { "_size_" ~ "(" ~ (identifier|payload_identifier|body_identifier)  ~ ")" ~ ":" ~ integer }
+count_field = { "_count_" ~ "(" ~ identifier ~ ")" ~ ":" ~ integer }
+body_field = @{ "_body_" }
+payload_field = { "_payload_" ~ (":" ~ "[" ~ size_modifier ~ "]")? }
+fixed_field = { "_fixed_" ~ "=" ~ (
+    (integer ~ ":" ~ integer) |
+    (identifier ~ ":" ~ identifier)
+)}
+reserved_field = { "_reserved_" ~ ":" ~ integer }
+array_field = { identifier ~ ":" ~ (integer|identifier) ~
+    "[" ~ (size_modifier|integer)? ~ "]"
+}
+scalar_field = { identifier ~ ":" ~ integer }
+typedef_field = { identifier ~ ":" ~ identifier }
+group_field = { identifier ~ ("{" ~ constraint_list ~ "}")? }
+
+field = _{
+    checksum_field |
+    padding_field |
+    size_field |
+    count_field |
+    body_field |
+    payload_field |
+    fixed_field |
+    reserved_field |
+    array_field |
+    scalar_field |
+    typedef_field |
+    group_field
+}
+field_list = { field ~ ("," ~ field)* ~ ","? }
+
+packet_declaration = {
+   "packet" ~ identifier ~
+        (":" ~ identifier)? ~
+           ("(" ~ constraint_list ~ ")")? ~
+    "{" ~
+        field_list? ~
+    "}"
+}
+
+struct_declaration = {
+    "struct" ~ identifier ~
+        (":" ~ identifier)? ~
+           ("(" ~ constraint_list ~ ")")? ~
+    "{" ~
+        field_list? ~
+    "}"
+}
+
+group_declaration = {
+    "group" ~ identifier ~ "{" ~ field_list ~ "}"
+}
+
+checksum_declaration = {
+    "checksum" ~ identifier ~ ":" ~ integer ~ string
+}
+
+custom_field_declaration = {
+    "custom_field" ~ identifier ~ (":" ~ integer)? ~ string
+}
+
+test_case = { string }
+test_case_list = _{ test_case ~ ("," ~ test_case)* ~ ","? }
+test_declaration = {
+    "test" ~ identifier ~ "{" ~
+        test_case_list ~
+    "}"
+}
+
+declaration = _{
+    enum_declaration |
+    packet_declaration |
+    struct_declaration |
+    group_declaration |
+    checksum_declaration |
+    custom_field_declaration |
+    test_declaration
+}
+
+grammar = {
+    SOI ~
+    endianness_declaration? ~
+    declaration* ~
+    EOI
+}
+"#]
+pub struct PDLParser;
+
+type Node<'i> = Pair<'i, Rule>;
+type NodeIterator<'i> = Peekable<Filter<Pairs<'i, Rule>, fn(&Node<'i>) -> bool>>;
+type Context<'a> = (ast::FileId, &'a Vec<usize>);
+
+trait Helpers<'i> {
+    fn children(self) -> NodeIterator<'i>;
+    fn as_loc(&self, context: &Context) -> ast::SourceRange;
+    fn as_string(&self) -> String;
+    fn as_usize(&self) -> Result<usize, String>;
+}
+
+impl<'i> Helpers<'i> for Node<'i> {
+    fn children(self) -> NodeIterator<'i> {
+        self.into_inner().filter((|n| n.as_rule() != Rule::COMMENT) as fn(&Self) -> bool).peekable()
+    }
+
+    fn as_loc(&self, context: &Context) -> ast::SourceRange {
+        let span = self.as_span();
+        ast::SourceRange {
+            file: context.0,
+            start: ast::SourceLocation::new(span.start_pos().pos(), context.1),
+            end: ast::SourceLocation::new(span.end_pos().pos(), context.1),
+        }
+    }
+
+    fn as_string(&self) -> String {
+        self.as_str().to_owned()
+    }
+
+    fn as_usize(&self) -> Result<usize, String> {
+        let text = self.as_str();
+        if let Some(num) = text.strip_prefix("0x") {
+            usize::from_str_radix(num, 16)
+                .map_err(|_| format!("cannot convert '{}' to usize", self.as_str()))
+        } else {
+            #[allow(clippy::from_str_radix_10)]
+            usize::from_str_radix(text, 10)
+                .map_err(|_| format!("cannot convert '{}' to usize", self.as_str()))
+        }
+    }
+}
+
+fn err_unexpected_rule<T>(expected: Rule, found: Rule) -> Result<T, String> {
+    Err(format!("expected rule {:?}, got {:?}", expected, found))
+}
+
+fn err_missing_rule<T>(expected: Rule) -> Result<T, String> {
+    Err(format!("expected rule {:?}, got nothing", expected))
+}
+
+fn expect<'i>(iter: &mut NodeIterator<'i>, rule: Rule) -> Result<Node<'i>, String> {
+    match iter.next() {
+        Some(node) if node.as_rule() == rule => Ok(node),
+        Some(node) => err_unexpected_rule(rule, node.as_rule()),
+        None => err_missing_rule(rule),
+    }
+}
+
+fn maybe<'i>(iter: &mut NodeIterator<'i>, rule: Rule) -> Option<Node<'i>> {
+    iter.next_if(|n| n.as_rule() == rule)
+}
+
+fn parse_identifier(iter: &mut NodeIterator<'_>) -> Result<String, String> {
+    expect(iter, Rule::identifier).map(|n| n.as_string())
+}
+
+fn parse_integer(iter: &mut NodeIterator<'_>) -> Result<usize, String> {
+    expect(iter, Rule::integer).and_then(|n| n.as_usize())
+}
+
+fn parse_identifier_opt(iter: &mut NodeIterator<'_>) -> Result<Option<String>, String> {
+    Ok(maybe(iter, Rule::identifier).map(|n| n.as_string()))
+}
+
+fn parse_integer_opt(iter: &mut NodeIterator<'_>) -> Result<Option<usize>, String> {
+    maybe(iter, Rule::integer).map(|n| n.as_usize()).transpose()
+}
+
+fn parse_identifier_or_integer(
+    iter: &mut NodeIterator<'_>,
+) -> Result<(Option<String>, Option<usize>), String> {
+    match iter.next() {
+        Some(n) if n.as_rule() == Rule::identifier => Ok((Some(n.as_string()), None)),
+        Some(n) if n.as_rule() == Rule::integer => Ok((None, Some(n.as_usize()?))),
+        Some(n) => Err(format!(
+            "expected rule {:?} or {:?}, got {:?}",
+            Rule::identifier,
+            Rule::integer,
+            n.as_rule()
+        )),
+        None => {
+            Err(format!("expected rule {:?} or {:?}, got nothing", Rule::identifier, Rule::integer))
+        }
+    }
+}
+
+fn parse_string(iter: &mut NodeIterator<'_>) -> Result<String, String> {
+    expect(iter, Rule::string).map(|n| n.as_string())
+}
+
+fn parse_atomic_expr(iter: &mut NodeIterator<'_>, context: &Context) -> Result<ast::Expr, String> {
+    match iter.next() {
+        Some(n) if n.as_rule() == Rule::identifier => {
+            Ok(ast::Expr::Identifier { loc: n.as_loc(context), name: n.as_string() })
+        }
+        Some(n) if n.as_rule() == Rule::integer => {
+            Ok(ast::Expr::Integer { loc: n.as_loc(context), value: n.as_usize()? })
+        }
+        Some(n) => Err(format!(
+            "expected rule {:?} or {:?}, got {:?}",
+            Rule::identifier,
+            Rule::integer,
+            n.as_rule()
+        )),
+        None => {
+            Err(format!("expected rule {:?} or {:?}, got nothing", Rule::identifier, Rule::integer))
+        }
+    }
+}
+
+fn parse_size_modifier_opt(iter: &mut NodeIterator<'_>) -> Option<String> {
+    maybe(iter, Rule::size_modifier).map(|n| n.as_string())
+}
+
+fn parse_endianness(node: Node<'_>, context: &Context) -> Result<ast::Endianness, String> {
+    if node.as_rule() != Rule::endianness_declaration {
+        err_unexpected_rule(Rule::endianness_declaration, node.as_rule())
+    } else {
+        Ok(ast::Endianness {
+            loc: node.as_loc(context),
+            value: match node.as_str() {
+                "little_endian_packets" => ast::EndiannessValue::LittleEndian,
+                "big_endian_packets" => ast::EndiannessValue::BigEndian,
+                _ => unreachable!(),
+            },
+        })
+    }
+}
+
+fn parse_constraint(node: Node<'_>, context: &Context) -> Result<ast::Constraint, String> {
+    if node.as_rule() != Rule::constraint {
+        err_unexpected_rule(Rule::constraint, node.as_rule())
+    } else {
+        let loc = node.as_loc(context);
+        let mut children = node.children();
+        let id = parse_identifier(&mut children)?;
+        let value = parse_atomic_expr(&mut children, context)?;
+        Ok(ast::Constraint { id, loc, value })
+    }
+}
+
+fn parse_constraint_list_opt(
+    iter: &mut NodeIterator<'_>,
+    context: &Context,
+) -> Result<Vec<ast::Constraint>, String> {
+    maybe(iter, Rule::constraint_list)
+        .map_or(Ok(vec![]), |n| n.children().map(|n| parse_constraint(n, context)).collect())
+}
+
+fn parse_enum_tag(node: Node<'_>, context: &Context) -> Result<ast::Tag, String> {
+    if node.as_rule() != Rule::enum_tag {
+        err_unexpected_rule(Rule::enum_tag, node.as_rule())
+    } else {
+        let loc = node.as_loc(context);
+        let mut children = node.children();
+        let id = parse_identifier(&mut children)?;
+        let value = parse_integer(&mut children)?;
+        Ok(ast::Tag { id, loc, value })
+    }
+}
+
+fn parse_enum_tag_list(
+    iter: &mut NodeIterator<'_>,
+    context: &Context,
+) -> Result<Vec<ast::Tag>, String> {
+    expect(iter, Rule::enum_tag_list)
+        .and_then(|n| n.children().map(|n| parse_enum_tag(n, context)).collect())
+}
+
+fn parse_field(node: Node<'_>, context: &Context) -> Result<ast::Field, String> {
+    let loc = node.as_loc(context);
+    let rule = node.as_rule();
+    let mut children = node.children();
+    Ok(match rule {
+        Rule::checksum_field => {
+            let field_id = parse_identifier(&mut children)?;
+            ast::Field::Checksum { loc, field_id }
+        }
+        Rule::padding_field => {
+            let width = parse_integer(&mut children)?;
+            ast::Field::Padding { loc, width }
+        }
+        Rule::size_field => {
+            let field_id = match children.next() {
+                Some(n) if n.as_rule() == Rule::identifier => n.as_string(),
+                Some(n) if n.as_rule() == Rule::payload_identifier => n.as_string(),
+                Some(n) if n.as_rule() == Rule::body_identifier => n.as_string(),
+                Some(n) => err_unexpected_rule(Rule::identifier, n.as_rule())?,
+                None => err_missing_rule(Rule::identifier)?,
+            };
+            let width = parse_integer(&mut children)?;
+            ast::Field::Size { loc, field_id, width }
+        }
+        Rule::count_field => {
+            let field_id = parse_identifier(&mut children)?;
+            let width = parse_integer(&mut children)?;
+            ast::Field::Count { loc, field_id, width }
+        }
+        Rule::body_field => ast::Field::Body { loc },
+        Rule::payload_field => {
+            let size_modifier = parse_size_modifier_opt(&mut children);
+            ast::Field::Payload { loc, size_modifier }
+        }
+        Rule::fixed_field => {
+            let (tag_id, value) = parse_identifier_or_integer(&mut children)?;
+            let (enum_id, width) = parse_identifier_or_integer(&mut children)?;
+            ast::Field::Fixed { loc, enum_id, tag_id, width, value }
+        }
+        Rule::reserved_field => {
+            let width = parse_integer(&mut children)?;
+            ast::Field::Reserved { loc, width }
+        }
+        Rule::array_field => {
+            let id = parse_identifier(&mut children)?;
+            let (type_id, width) = parse_identifier_or_integer(&mut children)?;
+            let (size, size_modifier) = match children.next() {
+                Some(n) if n.as_rule() == Rule::integer => (Some(n.as_usize()?), None),
+                Some(n) if n.as_rule() == Rule::size_modifier => (None, Some(n.as_string())),
+                Some(n) => {
+                    return Err(format!(
+                        "expected rule {:?} or {:?}, got {:?}",
+                        Rule::integer,
+                        Rule::size_modifier,
+                        n.as_rule()
+                    ))
+                }
+                None => (None, None),
+            };
+            ast::Field::Array { loc, id, type_id, width, size, size_modifier }
+        }
+        Rule::scalar_field => {
+            let id = parse_identifier(&mut children)?;
+            let width = parse_integer(&mut children)?;
+            ast::Field::Scalar { loc, id, width }
+        }
+        Rule::typedef_field => {
+            let id = parse_identifier(&mut children)?;
+            let type_id = parse_identifier(&mut children)?;
+            ast::Field::Typedef { loc, id, type_id }
+        }
+        Rule::group_field => {
+            let group_id = parse_identifier(&mut children)?;
+            let constraints = parse_constraint_list_opt(&mut children, context)?;
+            ast::Field::Group { loc, group_id, constraints }
+        }
+        _ => return Err(format!("expected rule *_field, got {:?}", rule)),
+    })
+}
+
+fn parse_field_list<'i>(
+    iter: &mut NodeIterator<'i>,
+    context: &Context,
+) -> Result<Vec<ast::Field>, String> {
+    expect(iter, Rule::field_list)
+        .and_then(|n| n.children().map(|n| parse_field(n, context)).collect())
+}
+
+fn parse_field_list_opt<'i>(
+    iter: &mut NodeIterator<'i>,
+    context: &Context,
+) -> Result<Vec<ast::Field>, String> {
+    maybe(iter, Rule::field_list)
+        .map_or(Ok(vec![]), |n| n.children().map(|n| parse_field(n, context)).collect())
+}
+
+fn parse_grammar(root: Node<'_>, context: &Context) -> Result<ast::Grammar, String> {
+    let mut toplevel_comments = vec![];
+    let mut grammar = ast::Grammar::new(context.0);
+
+    let mut comment_start = vec![];
+    for token in root.clone().tokens() {
+        match token {
+            Token::Start { rule: Rule::COMMENT, pos } => comment_start.push(pos),
+            Token::End { rule: Rule::COMMENT, pos } => {
+                let start_pos = comment_start.pop().unwrap();
+                grammar.comments.push(ast::Comment {
+                    loc: ast::SourceRange {
+                        file: context.0,
+                        start: ast::SourceLocation::new(start_pos.pos(), context.1),
+                        end: ast::SourceLocation::new(pos.pos(), context.1),
+                    },
+                    text: start_pos.span(&pos).as_str().to_owned(),
+                })
+            }
+            _ => (),
+        }
+    }
+
+    for node in root.children() {
+        let loc = node.as_loc(context);
+        let rule = node.as_rule();
+        match rule {
+            Rule::endianness_declaration => {
+                grammar.endianness = Some(parse_endianness(node, context)?)
+            }
+            Rule::checksum_declaration => {
+                let mut children = node.children();
+                let id = parse_identifier(&mut children)?;
+                let width = parse_integer(&mut children)?;
+                let function = parse_string(&mut children)?;
+                grammar.declarations.push(ast::Decl::Checksum { id, loc, function, width })
+            }
+            Rule::custom_field_declaration => {
+                let mut children = node.children();
+                let id = parse_identifier(&mut children)?;
+                let width = parse_integer_opt(&mut children)?;
+                let function = parse_string(&mut children)?;
+                grammar.declarations.push(ast::Decl::CustomField { id, loc, function, width })
+            }
+            Rule::enum_declaration => {
+                let mut children = node.children();
+                let id = parse_identifier(&mut children)?;
+                let width = parse_integer(&mut children)?;
+                let tags = parse_enum_tag_list(&mut children, context)?;
+                grammar.declarations.push(ast::Decl::Enum { id, loc, width, tags })
+            }
+            Rule::packet_declaration => {
+                let mut children = node.children();
+                let id = parse_identifier(&mut children)?;
+                let parent_id = parse_identifier_opt(&mut children)?;
+                let constraints = parse_constraint_list_opt(&mut children, context)?;
+                let fields = parse_field_list_opt(&mut children, context)?;
+                grammar.declarations.push(ast::Decl::Packet {
+                    id,
+                    loc,
+                    parent_id,
+                    constraints,
+                    fields,
+                })
+            }
+            Rule::struct_declaration => {
+                let mut children = node.children();
+                let id = parse_identifier(&mut children)?;
+                let parent_id = parse_identifier_opt(&mut children)?;
+                let constraints = parse_constraint_list_opt(&mut children, context)?;
+                let fields = parse_field_list_opt(&mut children, context)?;
+                grammar.declarations.push(ast::Decl::Struct {
+                    id,
+                    loc,
+                    parent_id,
+                    constraints,
+                    fields,
+                })
+            }
+            Rule::group_declaration => {
+                let mut children = node.children();
+                let id = parse_identifier(&mut children)?;
+                let fields = parse_field_list(&mut children, context)?;
+                grammar.declarations.push(ast::Decl::Group { id, loc, fields })
+            }
+            Rule::test_declaration => {}
+            Rule::EOI => (),
+            _ => unreachable!(),
+        }
+    }
+    grammar.comments.append(&mut toplevel_comments);
+    Ok(grammar)
+}
+
+/// Parse a new source file.
+/// The source file is fully read and added to the compilation database.
+/// Returns the constructed AST, or a descriptive error message in case
+/// of syntax error.
+pub fn parse_file(
+    sources: &mut ast::SourceDatabase,
+    name: String,
+) -> Result<ast::Grammar, Diagnostic<ast::FileId>> {
+    let source = std::fs::read_to_string(&name).map_err(|e| {
+        Diagnostic::error().with_message(format!("failed to read input file '{}': {}", &name, e))
+    })?;
+    let root = PDLParser::parse(Rule::grammar, &source)
+        .map_err(|e| {
+            Diagnostic::error()
+                .with_message(format!("failed to parse input file '{}': {}", &name, e))
+        })?
+        .next()
+        .unwrap();
+    let line_starts: Vec<_> = files::line_starts(&source).collect();
+    let file = sources.add(name, source.clone());
+    parse_grammar(root, &(file, &line_starts)).map_err(|e| Diagnostic::error().with_message(e))
+}
diff --git a/tools/pdl/src/pdl.pest b/tools/pdl/src/pdl.pest
new file mode 100644
index 00000000000..43b5095f88d
--- /dev/null
+++ b/tools/pdl/src/pdl.pest
@@ -0,0 +1,123 @@
+WHITESPACE = _{ " " | "\n" }
+COMMENT = { block_comment | line_comment }
+
+block_comment = { "/*" ~ (!"*/" ~ ANY)* ~ "*/" }
+line_comment = { "//" ~ (!"\n" ~ ANY)* }
+
+alpha = { 'a'..'z' | 'A'..'Z' }
+digit = { '0'..'9' }
+hexdigit = { digit | 'a'..'f' | 'A'..'F' }
+alphanum = { alpha | digit | "_" }
+
+identifier = @{ alpha ~ alphanum* }
+payload_identifier = @{ "_payload_" }
+body_identifier = @{ "_body_" }
+intvalue = @{ digit+ }
+hexvalue = @{ ("0x"|"0X") ~ hexdigit+ }
+integer = @{ hexvalue | intvalue }
+string = @{ "\"" ~ (!"\"" ~ ANY)* ~ "\"" }
+size_modifier = @{
+    ("+"|"-"|"*"|"/") ~ (digit|"+"|"-"|"*"|"/")+
+}
+
+endianness_declaration = { "little_endian_packets" | "big_endian_packets" }
+
+enum_tag = { identifier ~ "=" ~ integer }
+enum_tag_list = { enum_tag ~ ("," ~ enum_tag)* ~ ","? }
+enum_declaration = {
+    "enum" ~ identifier ~ ":" ~ integer ~ "{" ~
+        enum_tag_list ~
+    "}"
+}
+
+constraint = { identifier ~ "=" ~ (identifier|integer) }
+constraint_list = { constraint ~ ("," ~ constraint)* }
+
+checksum_field = { "_checksum_start_" ~ "(" ~ identifier ~ ")" }
+padding_field = { "_padding_" ~ "[" ~ integer ~ "]" }
+size_field = { "_size_" ~ "(" ~ (identifier|payload_identifier|body_identifier)  ~ ")" ~ ":" ~ integer }
+count_field = { "_count_" ~ "(" ~ identifier ~ ")" ~ ":" ~ integer }
+body_field = @{ "_body_" }
+payload_field = { "_payload_" ~ (":" ~ "[" ~ size_modifier ~ "]")? }
+fixed_field = { "_fixed_" ~ "=" ~ (
+    (integer ~ ":" ~ integer) |
+    (identifier ~ ":" ~ identifier)
+)}
+reserved_field = { "_reserved_" ~ ":" ~ integer }
+array_field = { identifier ~ ":" ~ (integer|identifier) ~
+    "[" ~ (size_modifier|integer)? ~ "]"
+}
+scalar_field = { identifier ~ ":" ~ integer }
+typedef_field = { identifier ~ ":" ~ identifier }
+group_field = { identifier ~ ("{" ~ constraint_list ~ "}")? }
+
+field = _{
+    checksum_field |
+    padding_field |
+    size_field |
+    count_field |
+    body_field |
+    payload_field |
+    fixed_field |
+    reserved_field |
+    array_field |
+    scalar_field |
+    typedef_field |
+    group_field
+}
+field_list = { field ~ ("," ~ field)* ~ ","? }
+
+packet_declaration = {
+   "packet" ~ identifier ~
+        (":" ~ identifier)? ~
+           ("(" ~ constraint_list ~ ")")? ~
+    "{" ~
+        field_list? ~
+    "}"
+}
+
+struct_declaration = {
+    "struct" ~ identifier ~
+        (":" ~ identifier)? ~
+           ("(" ~ constraint_list ~ ")")? ~
+    "{" ~
+        field_list? ~
+    "}"
+}
+
+group_declaration = {
+    "group" ~ identifier ~ "{" ~ field_list ~ "}"
+}
+
+checksum_declaration = {
+    "checksum" ~ identifier ~ ":" ~ integer ~ string
+}
+
+custom_field_declaration = {
+    "custom_field" ~ identifier ~ (":" ~ integer)? ~ string
+}
+
+test_case = { string }
+test_case_list = _{ test_case ~ ("," ~ test_case)* ~ ","? }
+test_declaration = {
+    "test" ~ identifier ~ "{" ~
+        test_case_list ~
+    "}"
+}
+
+declaration = _{
+    enum_declaration |
+    packet_declaration |
+    struct_declaration |
+    group_declaration |
+    checksum_declaration |
+    custom_field_declaration |
+    test_declaration
+}
+
+grammar = {
+    SOI ~
+    endianness_declaration? ~
+    declaration* ~
+    EOI
+}
diff --git a/tools/pdl/test/array-field.pdl b/tools/pdl/test/array-field.pdl
new file mode 100644
index 00000000000..070a6cc5324
--- /dev/null
+++ b/tools/pdl/test/array-field.pdl
@@ -0,0 +1,39 @@
+little_endian_packets
+
+custom_field custom: 1 "custom"
+checksum checksum: 1 "checksum"
+
+enum Enum : 1 {
+    tag = 0,
+}
+
+struct Struct {
+    a: 1,
+}
+
+packet Packet {
+    a: 1,
+}
+
+group Group {
+    a: 1,
+}
+
+packet InvalidKind {
+    array_0: Group[],
+    array_1: Packet[],
+    array_2: checksum[],
+}
+
+packet UndeclaredType {
+    array: Unknown[],
+}
+
+packet Correct {
+    array_0: custom[],
+    array_1: Enum[],
+    array_2: Struct[],
+    array_3: 1[],
+    array_4: 1[42],
+    array_5: 1[+2],
+}
diff --git a/tools/pdl/test/checksum-field.pdl b/tools/pdl/test/checksum-field.pdl
new file mode 100644
index 00000000000..0e1a98b2665
--- /dev/null
+++ b/tools/pdl/test/checksum-field.pdl
@@ -0,0 +1,22 @@
+little_endian_packets
+
+checksum crc16: 16 "crc16"
+
+packet Undefined {
+    _checksum_start_ (crc16),
+}
+
+packet InvalidType {
+    crc16: 16,
+    _checksum_start_ (crc16),
+}
+
+packet InvalidOrder {
+    _checksum_start_ (crc16),
+    crc16: crc16,
+}
+
+packet Correct {
+    crc16: crc16,
+    _checksum_start_ (crc16),
+}
diff --git a/tools/pdl/test/count-field.pdl b/tools/pdl/test/count-field.pdl
new file mode 100644
index 00000000000..a88cccdec8c
--- /dev/null
+++ b/tools/pdl/test/count-field.pdl
@@ -0,0 +1,25 @@
+little_endian_packets
+
+packet Undefined {
+    _count_ (array): 8,
+}
+
+packet InvalidType {
+    _count_ (array): 8,
+    array: 16,
+}
+
+packet InvalidOrder {
+    array: 16[],
+    _count_ (array): 8,
+}
+
+packet InvalidSize {
+    _count_ (array): 8,
+    array: 16[32],
+}
+
+packet Correct {
+    _count_ (array): 8,
+    array: 16[],
+}
diff --git a/tools/pdl/test/decl-scope.pdl b/tools/pdl/test/decl-scope.pdl
new file mode 100644
index 00000000000..c1391ab3f2b
--- /dev/null
+++ b/tools/pdl/test/decl-scope.pdl
@@ -0,0 +1,26 @@
+
+// Clashes with custom_field, struct, enum
+checksum decl_name: 16 "crc16"
+
+// Clashes with checksum, struct, enum
+custom_field decl_name: 1 "custom"
+
+// Clashes with checksum, custom_field, struct
+enum decl_name : 1 {
+    A = 1,
+}
+
+// Clashes with checksum, custom_field, enum
+struct decl_name {
+    a: 1,
+}
+
+// OK
+group decl_name {
+    a: 1,
+}
+
+// OK
+packet decl_name {
+    a: 1,
+}
diff --git a/tools/pdl/test/example.pdl b/tools/pdl/test/example.pdl
new file mode 100644
index 00000000000..b34d1400dbf
--- /dev/null
+++ b/tools/pdl/test/example.pdl
@@ -0,0 +1,78 @@
+// line comment
+/* block comment */
+
+little_endian_packets
+
+/* stuff */
+enum FourBits : 4 {
+  ONE = 1,
+  TWO = 2,
+  THREE = 3,
+  FIVE = 5,
+  TEN = 10,
+  LAZY_ME = 15,
+}
+
+/* other stuff */
+enum FourBits : 4 {
+  ONE = 1,
+  TWO = 2,
+  THREE = 3,
+  FIVE = 5,
+  TEN = 10,
+  LAZY_ME = 15
+}
+
+packet Test {
+    /* Checksum */
+    _checksum_start_ (crc16),
+    /* Padding */
+    _padding_ [1],
+    /* Size */
+    _size_ (_payload_) : 1,
+    _size_ (_body_) : 1,
+    _size_ (id) : 1,
+    /* Body */
+    _body_,
+    /* Payload */
+    _payload_,
+    _payload_ : [+1],
+    /* Fixed */
+    _fixed_ = 1:1,
+    _fixed_ = id:id,
+    /* Reserved */
+    _reserved_ : 1,
+    /* Array */
+    id: 1[+1],
+    id: id[+1],
+    id: 1[1],
+    id: id[1],
+    id: 1[],
+    id: id[],
+    /* Scalar */
+    id: 1,
+    /* Typedef */
+    id : id,
+    /* Group */
+    id { a=1, b=2 },
+    id,
+}
+
+packet TestChild : Test {
+}
+
+packet TestChild (a=1, b=2) {
+}
+
+packet TestChild : Test (a=1, b=2) {
+}
+
+checksum id: 1 "id"
+
+custom_field id : 1 "id"
+custom_field id "id"
+
+test Test {
+    "1111",
+    "2222",
+}
diff --git a/tools/pdl/test/fixed-field.pdl b/tools/pdl/test/fixed-field.pdl
new file mode 100644
index 00000000000..e69fc7e37fa
--- /dev/null
+++ b/tools/pdl/test/fixed-field.pdl
@@ -0,0 +1,22 @@
+little_endian_packets
+
+enum Enum : 1 {
+    tag = 0,
+}
+
+packet InvalidValue {
+    _fixed_ = 1: 256,
+}
+
+packet UndeclaredEnum {
+    _fixed_ = tag : InvalidEnum,
+}
+
+packet UndeclaredTag {
+    _fixed_ = invalid_tag : Enum,
+}
+
+packet Correct {
+    _fixed_ = 1: 256,
+    _fixed_ = tag: Enum,
+}
diff --git a/tools/pdl/test/group-constraint.pdl b/tools/pdl/test/group-constraint.pdl
new file mode 100644
index 00000000000..1f4e10d39d7
--- /dev/null
+++ b/tools/pdl/test/group-constraint.pdl
@@ -0,0 +1,39 @@
+little_endian_packets
+
+custom_field custom: 1 "custom"
+checksum checksum: 1 "checksum"
+
+enum Enum : 1 {
+    tag = 0,
+}
+
+group Group {
+    a: 4,
+    b: Enum,
+    c: custom_field,
+    d: checksum,
+}
+
+struct Undeclared {
+    Group { e=1 },
+}
+
+struct Redeclared {
+    Group { a=1, a=2 },
+}
+
+struct TypeMismatch {
+    Group { a=tag, b=1, c=1, d=1 },
+}
+
+struct InvalidLiteral {
+    Group { a=42 },
+}
+
+struct UndeclaredTag {
+    Group { b=undeclared_tag },
+}
+
+struct Correct {
+    Group { a=1, b=tag },
+}
diff --git a/tools/pdl/test/packet.pdl b/tools/pdl/test/packet.pdl
new file mode 100644
index 00000000000..9b9ca201d65
--- /dev/null
+++ b/tools/pdl/test/packet.pdl
@@ -0,0 +1,52 @@
+little_endian_packets
+
+custom_field custom: 1 "custom"
+checksum checksum: 1 "checksum"
+
+enum Enum : 1 {
+    tag = 0,
+}
+
+packet Packet {
+    a: 4,
+    b: Enum,
+    c: custom,
+    d: checksum,
+}
+
+struct Struct {
+    a: 4,
+}
+
+packet RecursivePacket_0 : RecursivePacket_1 {
+}
+
+packet RecursivePacket_1 : RecursivePacket_0 {
+}
+
+packet InvalidParent : Struct {
+}
+
+packet UndeclaredParent : FooBar {
+}
+
+packet UnnecessaryConstraints (a=1) {
+}
+
+packet Undeclared : Packet (c=1) {
+}
+
+packet Redeclared : Packet (a=1, a=2) {
+}
+
+packet TypeMismatch : Packet (a=tag, b=1, c=1, d=1) {
+}
+
+packet InvalidLiteral : Packet (a=42) {
+}
+
+packet UndeclaredTag : Packet (b=undeclared_tag) {
+}
+
+packet Correct : Packet (a=1, b=tag) {
+}
diff --git a/tools/pdl/test/recurse.pdl b/tools/pdl/test/recurse.pdl
new file mode 100644
index 00000000000..ad3a2009815
--- /dev/null
+++ b/tools/pdl/test/recurse.pdl
@@ -0,0 +1,38 @@
+
+struct Struct_0: Struct_1 {
+}
+
+struct Struct_1: Struct_0 {
+}
+
+
+struct Packet_0: Packet_1 {
+}
+
+struct Packet_1: Packet_0 {
+}
+
+
+group Group_0 {
+    Group_1
+}
+
+struct Struct_2 {
+    Group_0
+}
+
+group Group_1 {
+    a: Struct_2
+}
+
+
+struct Struct_3: Struct_4 {
+}
+
+struct Struct_4 {
+    Group_2
+}
+
+group Group_2 {
+    a: Struct_3
+}
diff --git a/tools/pdl/test/size-field.pdl b/tools/pdl/test/size-field.pdl
new file mode 100644
index 00000000000..dfa9ad7f5b4
--- /dev/null
+++ b/tools/pdl/test/size-field.pdl
@@ -0,0 +1,58 @@
+little_endian_packets
+
+packet Undefined {
+    _size_ (array): 8,
+}
+
+packet UndefinedPayloadWithBody {
+    _size_ (_payload_): 8,
+    _body_,
+}
+
+packet UndefinedPayload {
+    _size_ (_payload_): 8,
+}
+
+packet UndefinedBodyWithPayload {
+    _size_ (_body_): 8,
+    _payload_,
+}
+
+packet UndefinedBody {
+    _size_ (_body_): 8,
+}
+
+packet InvalidType {
+    _size_ (array): 8,
+    array: 16,
+}
+
+packet InvalidArrayOrder {
+    array: 16[],
+    _size_ (array): 8,
+}
+
+packet InvalidPayloadOrder {
+    _payload_,
+    _size_ (_payload_): 8,
+}
+
+packet InvalidBodyOrder {
+    _body_,
+    _size_ (_body_): 8,
+}
+
+packet CorrectArray {
+    _size_ (array): 8,
+    array: 16[],
+}
+
+packet CorrectPayload {
+    _size_ (_payload_): 8,
+    _payload_,
+}
+
+packet CorrectBody {
+    _size_ (_body_): 8,
+    _body_,
+}
diff --git a/tools/pdl/test/struct.pdl b/tools/pdl/test/struct.pdl
new file mode 100644
index 00000000000..d8ed439a2e1
--- /dev/null
+++ b/tools/pdl/test/struct.pdl
@@ -0,0 +1,52 @@
+little_endian_packets
+
+custom_field custom: 1 "custom"
+checksum checksum: 1 "checksum"
+
+enum Enum : 1 {
+    tag = 0,
+}
+
+struct Struct {
+    a: 4,
+    b: Enum,
+    c: custom,
+    d: checksum,
+}
+
+packet Packet {
+    a: 4,
+}
+
+struct RecursiveStruct_0 : RecursiveStruct_1 {
+}
+
+struct RecursiveStruct_1 : RecursiveStruct_0 {
+}
+
+struct InvalidParent : Packet {
+}
+
+struct UndeclaredParent : FooBar {
+}
+
+struct UnnecessaryConstraints (a=1) {
+}
+
+struct Undeclared : Struct (c=1) {
+}
+
+struct Redeclared : Struct (a=1, a=2) {
+}
+
+struct TypeMismatch : Struct (a=tag, b=1, c=1, d=1) {
+}
+
+struct InvalidLiteral : Struct (a=42) {
+}
+
+struct UndeclaredTag : Struct (b=undeclared_tag) {
+}
+
+struct Correct : Struct (a=1, b=tag) {
+}
diff --git a/tools/pdl/test/typedef-field.pdl b/tools/pdl/test/typedef-field.pdl
new file mode 100644
index 00000000000..2e566765583
--- /dev/null
+++ b/tools/pdl/test/typedef-field.pdl
@@ -0,0 +1,36 @@
+little_endian_packets
+
+custom_field custom: 1 "custom"
+checksum checksum: 1 "checksum"
+
+enum Enum : 1 {
+    tag = 0,
+}
+
+struct Struct {
+    a: 1,
+}
+
+packet Packet {
+    a: 1,
+}
+
+group Group {
+    a: 1,
+}
+
+packet InvalidKind {
+    typedef_0: Group,
+    typedef_1: Packet,
+}
+
+packet UndeclaredType {
+    typedef: Unknown,
+}
+
+packet Correct {
+    typedef_0: custom,
+    typedef_1: checksum,
+    typedef_2: Enum,
+    typedef_3: Struct,
+}
-- 
GitLab