@@ -497,7 +497,7 @@ RefPtr<Selector> Parser::parse_single_selector(TokenStream<T>& tokens, bool is_r
497
497
498
498
tokens.skip_whitespace ();
499
499
500
- auto current_value = tokens.peek_token ();
500
+ auto & current_value = tokens.peek_token ();
501
501
if (current_value.is (Token::Type::Delim)) {
502
502
auto delim = ((Token)current_value).delim ();
503
503
if (delim == " >" ) {
@@ -512,7 +512,7 @@ RefPtr<Selector> Parser::parse_single_selector(TokenStream<T>& tokens, bool is_r
512
512
} else if (delim == " |" ) {
513
513
tokens.next_token ();
514
514
515
- auto next = tokens.peek_token ();
515
+ auto & next = tokens.peek_token ();
516
516
if (next.is (Token::Type::EndOfFile))
517
517
return {};
518
518
@@ -528,7 +528,7 @@ RefPtr<Selector> Parser::parse_single_selector(TokenStream<T>& tokens, bool is_r
528
528
Vector<Selector::SimpleSelector> simple_selectors;
529
529
530
530
for (;;) {
531
- auto current_value = tokens.peek_token ();
531
+ auto & current_value = tokens.peek_token ();
532
532
if (current_value.is (Token::Type::EndOfFile) || current_value.is (Token::Type::Whitespace))
533
533
break ;
534
534
@@ -546,7 +546,7 @@ RefPtr<Selector> Parser::parse_single_selector(TokenStream<T>& tokens, bool is_r
546
546
};
547
547
548
548
for (;;) {
549
- auto current_value = tokens.peek_token ();
549
+ auto & current_value = tokens.peek_token ();
550
550
if (current_value.is (Token::Type::EndOfFile))
551
551
break ;
552
552
@@ -577,7 +577,7 @@ NonnullRefPtrVector<StyleRule> Parser::consume_a_list_of_rules(TokenStream<T>& t
577
577
NonnullRefPtrVector<StyleRule> rules;
578
578
579
579
for (;;) {
580
- auto token = tokens.next_token ();
580
+ auto & token = tokens.next_token ();
581
581
582
582
if (token.is (Token::Type::Whitespace)) {
583
583
continue ;
@@ -627,14 +627,14 @@ NonnullRefPtr<StyleRule> Parser::consume_an_at_rule(TokenStream<T>& tokens)
627
627
{
628
628
dbgln_if (CSS_PARSER_TRACE, " Parser::consume_an_at_rule" );
629
629
630
- auto name_ident = tokens.next_token ();
630
+ auto & name_ident = tokens.next_token ();
631
631
VERIFY (name_ident.is (Token::Type::AtKeyword));
632
632
633
633
NonnullRefPtr<StyleRule> rule = create<StyleRule>(StyleRule::Type::At);
634
634
rule->m_name = ((Token)name_ident).at_keyword ();
635
635
636
636
for (;;) {
637
- auto token = tokens.next_token ();
637
+ auto & token = tokens.next_token ();
638
638
if (token.is (Token::Type::Semicolon)) {
639
639
return rule;
640
640
}
@@ -670,7 +670,7 @@ RefPtr<StyleRule> Parser::consume_a_qualified_rule(TokenStream<T>& tokens)
670
670
NonnullRefPtr<StyleRule> rule = create<StyleRule>(StyleRule::Type::Qualified);
671
671
672
672
for (;;) {
673
- auto token = tokens.next_token ();
673
+ auto & token = tokens.next_token ();
674
674
675
675
if (token.is (Token::Type::EndOfFile)) {
676
676
log_parse_error ();
@@ -705,7 +705,7 @@ StyleComponentValueRule Parser::consume_a_component_value(TokenStream<T>& tokens
705
705
{
706
706
dbgln_if (CSS_PARSER_TRACE, " Parser::consume_a_component_value" );
707
707
708
- auto token = tokens.next_token ();
708
+ auto & token = tokens.next_token ();
709
709
710
710
if (token.is (Token::Type::OpenCurly) || token.is (Token::Type::OpenSquare) || token.is (Token::Type::OpenParen))
711
711
return StyleComponentValueRule (consume_a_simple_block (tokens));
@@ -737,7 +737,7 @@ NonnullRefPtr<StyleBlockRule> Parser::consume_a_simple_block(TokenStream<T>& tok
737
737
block->m_token = tokens.current_token ();
738
738
739
739
for (;;) {
740
- auto token = tokens.next_token ();
740
+ auto & token = tokens.next_token ();
741
741
742
742
if (token.is (ending_token)) {
743
743
return block;
@@ -769,7 +769,7 @@ NonnullRefPtr<StyleFunctionRule> Parser::consume_a_function(TokenStream<T>& toke
769
769
NonnullRefPtr<StyleFunctionRule> function = create<StyleFunctionRule>(((Token)name_ident).m_value .to_string ());
770
770
771
771
for (;;) {
772
- auto token = tokens.next_token ();
772
+ auto & token = tokens.next_token ();
773
773
if (token.is (Token::Type::CloseParen)) {
774
774
return function;
775
775
}
@@ -797,16 +797,16 @@ Optional<StyleDeclarationRule> Parser::consume_a_declaration(TokenStream<T>& tok
797
797
{
798
798
dbgln_if (CSS_PARSER_TRACE, " Parser::consume_a_declaration" );
799
799
800
- auto token = tokens.next_token ();
800
+ auto & token = tokens.next_token ();
801
801
802
802
StyleDeclarationRule declaration;
803
803
VERIFY (token.is (Token::Type::Ident));
804
804
declaration.m_name = ((Token)token).ident ();
805
805
806
806
tokens.skip_whitespace ();
807
807
808
- auto colon = tokens.next_token ();
809
- if (!colon .is (Token::Type::Colon)) {
808
+ auto & maybe_colon = tokens.next_token ();
809
+ if (!maybe_colon .is (Token::Type::Colon)) {
810
810
log_parse_error ();
811
811
return {};
812
812
}
@@ -862,7 +862,7 @@ Vector<DeclarationOrAtRule> Parser::consume_a_list_of_declarations(TokenStream<T
862
862
Vector<DeclarationOrAtRule> list;
863
863
864
864
for (;;) {
865
- auto token = tokens.next_token ();
865
+ auto & token = tokens.next_token ();
866
866
if (token.is (Token::Type::Whitespace) || token.is (Token::Type::Semicolon)) {
867
867
continue ;
868
868
}
@@ -882,7 +882,7 @@ Vector<DeclarationOrAtRule> Parser::consume_a_list_of_declarations(TokenStream<T
882
882
temp.append (token);
883
883
884
884
for (;;) {
885
- auto peek = tokens.peek_token ();
885
+ auto & peek = tokens.peek_token ();
886
886
if (peek.is (Token::Type::Semicolon) || peek.is (Token::Type::EndOfFile)) {
887
887
break ;
888
888
}
@@ -899,11 +899,13 @@ Vector<DeclarationOrAtRule> Parser::consume_a_list_of_declarations(TokenStream<T
899
899
900
900
log_parse_error ();
901
901
tokens.reconsume_current_input_token ();
902
- auto peek = tokens.peek_token ();
903
- while (!(peek.is (Token::Type::Semicolon) || peek.is (Token::Type::EndOfFile))) {
902
+
903
+ for (;;) {
904
+ auto & peek = tokens.peek_token ();
905
+ if (peek.is (Token::Type::Semicolon) || peek.is (Token::Type::EndOfFile))
906
+ break ;
904
907
dbgln (" Discarding token: '{}'" , peek.to_debug_string ());
905
908
(void )consume_a_component_value (tokens);
906
- peek = tokens.peek_token ();
907
909
}
908
910
}
909
911
@@ -924,7 +926,7 @@ RefPtr<CSSRule> Parser::parse_as_rule(TokenStream<T>& tokens)
924
926
925
927
tokens.skip_whitespace ();
926
928
927
- auto token = tokens.peek_token ();
929
+ auto & token = tokens.peek_token ();
928
930
929
931
if (token.is (Token::Type::EndOfFile)) {
930
932
return {};
@@ -941,7 +943,7 @@ RefPtr<CSSRule> Parser::parse_as_rule(TokenStream<T>& tokens)
941
943
942
944
tokens.skip_whitespace ();
943
945
944
- auto maybe_eof = tokens.peek_token ();
946
+ auto & maybe_eof = tokens.peek_token ();
945
947
if (maybe_eof.is (Token::Type::EndOfFile)) {
946
948
return rule;
947
949
}
@@ -983,7 +985,7 @@ Optional<StyleProperty> Parser::parse_as_declaration(TokenStream<T>& tokens)
983
985
984
986
tokens.skip_whitespace ();
985
987
986
- auto token = tokens.peek_token ();
988
+ auto & token = tokens.peek_token ();
987
989
988
990
if (!token.is (Token::Type::Ident)) {
989
991
return {};
@@ -1045,7 +1047,7 @@ Optional<StyleComponentValueRule> Parser::parse_as_component_value(TokenStream<T
1045
1047
1046
1048
tokens.skip_whitespace ();
1047
1049
1048
- auto token = tokens.peek_token ();
1050
+ auto & token = tokens.peek_token ();
1049
1051
1050
1052
if (token.is (Token::Type::EndOfFile)) {
1051
1053
return {};
@@ -1055,7 +1057,7 @@ Optional<StyleComponentValueRule> Parser::parse_as_component_value(TokenStream<T
1055
1057
1056
1058
tokens.skip_whitespace ();
1057
1059
1058
- auto maybe_eof = tokens.peek_token ();
1060
+ auto & maybe_eof = tokens.peek_token ();
1059
1061
if (maybe_eof.is (Token::Type::EndOfFile)) {
1060
1062
return value;
1061
1063
}
@@ -1100,7 +1102,7 @@ Vector<Vector<StyleComponentValueRule>> Parser::parse_as_comma_separated_list_of
1100
1102
lists.append ({});
1101
1103
1102
1104
for (;;) {
1103
- auto next = tokens.next_token ();
1105
+ auto & next = tokens.next_token ();
1104
1106
1105
1107
if (next.is (Token::Type::Comma)) {
1106
1108
lists.append ({});
0 commit comments