Skip to content

Commit

Permalink
implement #include support
Browse files Browse the repository at this point in the history
  • Loading branch information
rcgoodfellow committed Sep 2, 2022
1 parent c02cf0b commit 95aa344
Show file tree
Hide file tree
Showing 18 changed files with 349 additions and 424 deletions.
48 changes: 34 additions & 14 deletions lang/p4-macro/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,10 @@
use std::fs;
use std::sync::Arc;

use p4::check::Diagnostics;
use p4::{check, error, error::SemanticError, lexer, parser, preprocessor};
use p4::{
ast::AST, check, error, error::SemanticError, lexer, parser, preprocessor,
};
use proc_macro::TokenStream;
use serde::Deserialize;
use serde_tokenstream::ParseWrapper;
Expand Down Expand Up @@ -58,25 +61,42 @@ fn generate_rs(
) -> Result<TokenStream, syn::Error> {
//TODO gracefull error handling

let contents = fs::read_to_string(filename).unwrap();
let mut ast = AST::default();
process_file(Arc::new(filename), &mut ast, &settings)?;

let ppr = preprocessor::run(&contents).unwrap();
let lines: Vec<&str> = ppr.lines.iter().map(|x| x.as_str()).collect();
let lxr = lexer::Lexer::new(lines.clone());
let mut psr = parser::Parser::new(lxr);
let mut ast = psr.run().unwrap();
let (hlir, diags) = check::all(&ast);
check(&lines, &diags);
p4_rust::sanitize(&mut ast);
let tokens = p4_rust::emit_tokens(
let (hlir, _) = check::all(&ast);

let tokens: TokenStream = p4_rust::emit_tokens(
&ast,
&hlir,
p4_rust::Settings {
pipeline_name: settings.pipeline_name,
pipeline_name: settings.pipeline_name.clone(),
},
);
)
.into();

Ok(tokens)
}

Ok(tokens.into())
fn process_file(
filename: Arc<String>,
ast: &mut AST,
settings: &GenerationSettings,
) -> Result<(), syn::Error> {
let contents = fs::read_to_string(&*filename).unwrap();
let ppr = preprocessor::run(&contents, filename.clone()).unwrap();
for included in &ppr.elements.includes {
process_file(Arc::new(included.clone()), ast, settings)?;
}

let (_, diags) = check::all(ast);
let lines: Vec<&str> = ppr.lines.iter().map(|x| x.as_str()).collect();
check(&lines, &diags);
let lxr = lexer::Lexer::new(lines.clone(), filename);
let mut psr = parser::Parser::new(lxr);
psr.run(ast).unwrap();
p4_rust::sanitize(ast);
Ok(())
}

// TODO copy pasta from x4c
Expand Down
2 changes: 2 additions & 0 deletions p4/examples/bad/parser/badness-included.p4
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
#include <p4/examples/bad/checker/undefined_type_ref_parser_arg.p4>
#include <p4/examples/bad/checker/parser-no-start-state.p4>
2 changes: 1 addition & 1 deletion p4/examples/codegen/hub.p4
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
#include <softnpu.p4>
#include <p4/examples/codegen/softnpu.p4>

SoftNPU(
parse(),
Expand Down
22 changes: 7 additions & 15 deletions p4/examples/codegen/softnpu.p4
Original file line number Diff line number Diff line change
@@ -1,23 +1,15 @@
struct IngressMetadata {
bit<8> port;
bool nat;
bit<16> nat_id;
}

struct EgressMetadata {
bit<8> port;
bit<128> nexthop;
bool drop;
}

parser NpuParser<H>(
packet_in pkt,
out H parsed_headers
);

control NpuIngress<H>(
inout H hdr,
inout IngressMetadata ingress_meta,
inout EgressMetadata egress_meta,
);

package SoftNPU<H>(
NpuParser<H> p,
NpuIngress<H> ingress,
);
extern Checksum {
bit<16> run<T>(in T data);
}
2 changes: 2 additions & 0 deletions p4/src/ast.rs
Original file line number Diff line number Diff line change
Expand Up @@ -723,6 +723,7 @@ impl Lvalue {
kind: self.token.kind.clone(),
line: self.token.line,
col: self.token.col + parts[0].len() + 1,
file: self.token.file.clone(),
},
}
}
Expand All @@ -734,6 +735,7 @@ impl Lvalue {
kind: self.token.kind.clone(),
line: self.token.line,
col: self.token.col,
file: self.token.file.clone(),
},
}
}
Expand Down
134 changes: 68 additions & 66 deletions p4/src/error.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
use crate::lexer::Token;
use crate::lexer::{Kind, Lexer, Token};
use colored::Colorize;
use std::fmt;
use std::sync::Arc;

#[derive(Debug)]
pub struct SemanticError {
Expand All @@ -16,31 +17,19 @@ pub struct SemanticError {

impl fmt::Display for SemanticError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let loc = format!("\n[{}:{}]", self.at.line + 1, self.at.col + 1)
let loc = format!("[{}:{}]", self.at.line + 1, self.at.col + 1)
.as_str()
.bright_red();
writeln!(f, "{} {}\n", loc, self.message.bright_white())?;
writeln!(
f,
"{}\n{} {}\n",
self.message.bright_white(),
loc,
*self.at.file,
)?;
writeln!(f, " {}", self.source)?;

// The presence of tabs makes presenting error indicators purely based
// on column position impossible, so here we iterrate over the existing
// string and mask out the non whitespace text inserting the error
// indicators and preserving any tab/space mixture.
let carat_line: String = self
.source
.chars()
.enumerate()
.map(|(i, x)| {
if i == self.at.col {
return '^';
}
if x.is_whitespace() {
x
} else {
' '
}
})
.collect();
let carat_line = carat_line(&self.source, &self.at);
write!(f, " {}", carat_line.bright_red())
}
}
Expand All @@ -64,28 +53,16 @@ impl fmt::Display for ParserError {
let loc = format!("[{}:{}]", self.at.line + 1, self.at.col + 1)
.as_str()
.bright_red();
writeln!(f, "{} {}\n", loc, self.message.bright_white())?;
writeln!(
f,
"{}\n{} {}\n",
self.message.bright_white(),
loc,
*self.at.file,
)?;
writeln!(f, " {}", self.source)?;

// The presence of tabs makes presenting error indicators purely based
// on column position impossible, so here we iterrate over the existing
// string and mask out the non whitespace text inserting the error
// indicators and preserving any tab/space mixture.
let carat_line: String = self
.source
.chars()
.enumerate()
.map(|(i, x)| {
if i == self.at.col {
return '^';
}
if x.is_whitespace() {
x
} else {
' '
}
})
.collect();
let carat_line = carat_line(&self.source, &self.at);
write!(f, " {}", carat_line.bright_red())
}
}
Expand All @@ -105,35 +82,32 @@ pub struct TokenError {

/// The source line the token error occured on.
pub source: String,

/// The soruce file where the token error was encountered.
pub file: Arc<String>,
}

impl fmt::Display for TokenError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let loc = format!("[{}:{}]", self.line + 1, self.col + 1)
.as_str()
.bright_red();
writeln!(f, "{} {}\n", loc, "unrecognized token".bright_white())?;
writeln!(
f,
"{}\n{} {}\n",
"unrecognized token".bright_white(),
loc,
*self.file,
)?;
writeln!(f, " {}", self.source)?;

// The presence of tabs makes presenting error indicators purely based
// on column position impossible, so here we iterrate over the existing
// string and mask out the non whitespace text inserting the error
// indicators and preserving any tab/space mixture.
let carat_line: String = self
.source
.chars()
.enumerate()
.map(|(i, x)| {
if i >= self.col && i < self.col + self.len {
return '^';
}
if x.is_whitespace() {
x
} else {
' '
}
})
.collect();
let at = Token {
kind: Kind::Eof,
line: self.line,
col: self.col,
file: Arc::new(self.source.clone()),
};
let carat_line = carat_line(&self.source, &at);
write!(f, " {}", carat_line.bright_red())
}
}
Expand All @@ -153,9 +127,11 @@ impl fmt::Display for Error {
Self::Lexer(e) => e.fmt(f),
Self::Parser(e) => e.fmt(f),
Self::Semantic(errors) => {
for e in errors {
for e in &errors[..errors.len() - 1] {
e.fmt(f)?;
writeln!(f)?;
}
errors[errors.len() - 1].fmt(f)?;
Ok(())
}
}
Expand Down Expand Up @@ -192,20 +168,46 @@ pub struct PreprocessorError {

/// The source line the token error occured on.
pub source: String,

/// The soruce file where the token error was encountered.
pub file: Arc<String>,
}

impl fmt::Display for PreprocessorError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let loc = format!("[{}]", self.line + 1).as_str().bright_red();
writeln!(
f,
"{} {}: {}\n",
loc,
"preporcessor error".bright_white(),
"{}\n{} {}\n",
self.message.bright_white(),
loc,
*self.file,
)?;
writeln!(f, " {}", self.source)
}
}

impl std::error::Error for PreprocessorError {}

fn carat_line(line: &str, at: &Token) -> String {
// The presence of tabs makes presenting error indicators purely based
// on column position impossible, so here we iterrate over the existing
// string and mask out the non whitespace text inserting the error
// indicators and preserving any tab/space mixture.
let mut carat_line = String::new();
for x in line[..at.col].chars() {
if x.is_whitespace() {
carat_line.push(x);
} else {
carat_line.push(' ');
}
}
for x in line[at.col..].chars() {
if x.is_whitespace() || Lexer::is_separator(x) {
break;
} else {
carat_line.push('^');
}
}
carat_line
}
Loading

0 comments on commit 95aa344

Please sign in to comment.