Skip to content

Commit

Permalink
Renamed "decoded [string]" into "data literal", mainly as variations …
Browse files Browse the repository at this point in the history
…of that replacement
  • Loading branch information
Tamschi committed Jul 15, 2021
1 parent 2a59c78 commit 8719f58
Show file tree
Hide file tree
Showing 4 changed files with 30 additions and 21 deletions.
7 changes: 7 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,13 @@

<!-- markdownlint-disable no-trailing-punctuation -->

## next

TODO: Date

* **Breaking:**
* `Decoded` is now `DataLiteral` and so on.

## 0.0.9

2021-07-15
Expand Down
8 changes: 4 additions & 4 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,11 +20,11 @@ mod token;
pub use parsing::parse;
pub use token::Token;

/// Shared variant payload data structure for decoded strings (`<…:…>`).
/// Shared variant payload data structure for data literals (`<…:…>`).
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct Decoded<'a, Position> {
pub struct DataLiteral<'a, Position> {
pub encoding: Cow<'a, String, str>,
pub encoding_span: Range<Position>,
pub decoded: Cow<'a, String, str>,
pub decoded_span: Range<Position>,
pub unencoded_data: Cow<'a, String, str>,
pub unencoded_data_span: Range<Position>,
}
8 changes: 4 additions & 4 deletions src/parsing.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
use crate::{
diagnostics::{Diagnostic, DiagnosticLabel, DiagnosticLabelPriority, DiagnosticType, Reporter},
token::Token as lexerToken,
Decoded,
DataLiteral,
};
use cervine::Cow;
use debugless_unwrap::DebuglessUnwrap as _;
Expand Down Expand Up @@ -79,7 +79,7 @@ impl<'a, Position> Taml<'a, Position> {
#[derive(Debug, Clone)]
pub enum TamlValue<'a, Position> {
String(Cow<'a, String, str>),
Decoded(Decoded<'a, Position>),
DataLiteral(DataLiteral<'a, Position>),
Integer(&'a str),
Float(&'a str),
List(List<'a, Position>),
Expand Down Expand Up @@ -1280,8 +1280,8 @@ fn parse_value<'a, Position: Debug + Clone + PartialEq>(
value: TamlValue::String(str),
span,
},
(lexerToken::Decoded(decoded), span) => Taml {
value: TamlValue::Decoded(decoded),
(lexerToken::DataLiteral(data_literal), span) => Taml {
value: TamlValue::DataLiteral(data_literal),
span,
},
(lexerToken::Float(str), span) => Taml {
Expand Down
28 changes: 15 additions & 13 deletions src/token.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
use crate::Decoded;
use crate::DataLiteral;
use cervine::Cow;
use gnaw::Unshift as _;
use lazy_transform_str::{
Expand Down Expand Up @@ -128,24 +128,24 @@ pub enum Token<'a, Position> {
String(Cow<'a, String, str>),

#[regex(r#"<[a-zA-Z_][a-zA-Z\-_0-9]*:([^\\>]|\\\\|\\>)*>"#, |lex| {
let (encoding, decoded) = lex.slice()[1..lex.slice().len() - 1].split_once(':').unwrap();
Decoded {
let (encoding, unencoded_data) = lex.slice()[1..lex.slice().len() - 1].split_once(':').unwrap();
DataLiteral {
encoding: Cow::Borrowed(encoding),
encoding_span: lex.span().start + 1..lex.span().start + 1 + encoding.len(),
decoded: unescape_backslashed_verbatim(decoded),
decoded_span: lex.span().end - 1 - decoded.len()..lex.span().end - 1,
unencoded_data: unescape_backslashed_verbatim(unencoded_data),
unencoded_data_span: lex.span().end - 1 - unencoded_data.len()..lex.span().end - 1,
}
})]
#[regex(r#"<`([^\\`]|\\\\|\\`)*`:([^\\>]|\\\\|\\>)*>"#, |lex| {
let (encoding, decoded) = lex.slice()[1..lex.slice().len() - 1].split_once(':').unwrap();
Decoded {
let (encoding, unencoded_data) = lex.slice()[1..lex.slice().len() - 1].split_once(':').unwrap();
DataLiteral {
encoding: unescape_quoted_identifier(encoding),
encoding_span: lex.span().start + 1..lex.span().start + 1 + encoding.len(),
decoded: unescape_backslashed_verbatim(decoded),
decoded_span: lex.span().end - 1 - decoded.len()..lex.span().end - 1,
unencoded_data: unescape_backslashed_verbatim(unencoded_data),
unencoded_data_span: lex.span().end - 1 - unencoded_data.len()..lex.span().end - 1,
}
})]
Decoded(Decoded<'a, Position>),
DataLiteral(DataLiteral<'a, Position>),

#[regex(r"-?\d+\.\d+", |lex| trim_trailing_0s(trim_leading_0s(lex.slice())))]
Float(&'a str),
Expand Down Expand Up @@ -181,10 +181,12 @@ impl<'a, Position> Display for Token<'a, Position> {
Token::Thesis => write!(f, ")"),
Token::Comma => write!(f, ","),
Token::Period => write!(f, "."),
Token::Decoded(Decoded {
encoding, decoded, ..
Token::DataLiteral(DataLiteral {
encoding,
unencoded_data,
..
}) => {
write!(f, "<{}:{}>", encoding, escape_greater(decoded))
write!(f, "<{}:{}>", encoding, escape_greater(unencoded_data))
}
Token::String(str) => write!(f, r#""{}""#, escape_double_quotes(str)),
Token::Float(str) | Token::Integer(str) => write!(f, "{}", str),
Expand Down

0 comments on commit 8719f58

Please sign in to comment.