Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with
or
.
Download ZIP
Browse files

[mowyw] introduced [% ... %] as tag delimiter

git-svn-id: svn+ssh://faui2k3.org/var/lib/svn/moritz/mowyw@579 addfbb1e-f4f9-0310-b6f0-bccd0f9b8dc6
  • Loading branch information...
commit 519560800f601b3a1622f2e1290eeb0d53be4971 1 parent 9564f19
moritz authored
Showing with 43 additions and 4 deletions.
  1. +6 −0 Changelog
  2. +6 −0 README
  3. +5 −1 example/source/foo2.shtml
  4. +26 −3 mowyw
View
6 Changelog
@@ -1,3 +1,9 @@
+mowyw (0.4.0)
+ * mowyw: Added support for new delimiters: [% ... %] instead of [[[
+ ... ]]]. Currently they are interchangable, so [% .. ]]] works, but
+ this is considered a bug rather than a feature.
+ * README: update accordingly
+ * example/source/foo2.shtml: added example [% ... %] tags
mowyw (0.3.0)
* mowyw: Added support for syntax highliting, variables and comments
* README: update accordingly
View
6 README
@@ -81,11 +81,17 @@ Just use the line
[[[include foo]]]
+or
+
+[% include foo %]
+
in your files, and the line will be replaced with the content of the file
`includes/foo'. It works pretty much like #inlcude "includes/foo" with a C pre
processor or <!-- #include virtual="includes/foo"--> with Server Side Includes
(SSI).
+Note that everywhere the [[[ ... ]]] delimiters can be exchanged by [% .. %].
+
MENUS:
Suppose you want to write a navigation menu in your html files that look like
View
6 example/source/foo2.shtml
@@ -1,10 +1,14 @@
<title>Mowyw example: page foo1</title>
-[[[include head2]]]
+[% include head2 %]
<h2>foo2 is another sub item of foo</h2>
<p>And now I'm running out of ideas what to write</p>
+<p>New style markup (if you don't see anything it means all works fine):
+ [% setvar mytest <p>All worked fine!</p> %]</p>
+[% readvar mytest %]
+
</div>
[[[menu nav foo foo2]]]
View
29 mowyw
@@ -65,8 +65,10 @@ my $result = GetOptions(
my @input_tokens = (
[ 'TAG_START', qr/\[\[\[\s*/],
- [ 'KEYWORD', qr/(?:include|menu|option|item|endverbatim|verbatim|comment|setvar|readvar|syntax|endsyntax)/],
+ [ 'TAG_START', qr/\[\%\s*/],
+ [ 'KEYWORD', qr/(?:include|menu|option|item|endverbatim|verbatim|comment|setvar|readvar|synatxfile|syntax|endsyntax)/],
[ 'TAG_END', qr/\s*\]\]\]/],
+ [ 'TAG_END', qr/\s*\%\]/],
[ 'BRACES_START', qr/{{/],
[ 'BRACES_END', qr/}}/],
);
@@ -204,6 +206,7 @@ sub parse_tokens {
if ($tokens->[0]->[0] eq "TAG_START"){
p_expect($tokens, "TAG_START", $meta);
my $key = p_expect($tokens, 'KEYWORD', $meta);
+# warn "Found keyword $key\n";
my %dispatch = (
include => \&p_include,
menu => \&p_menu,
@@ -220,7 +223,8 @@ sub parse_tokens {
setvar => \&p_setvar,
readvar => \&p_readvar,
syntax => \&p_syntax,
- endvsyntax => sub {
+ syntaxfile => \&p_syntaxfile,
+ endsyntax => sub {
# p_syntax reads until it finds endsyntax, so it
# may never occur here
my ($tokens, $meta) = @_;
@@ -284,6 +288,7 @@ sub p_include {
# a keyword as well as file name
if ($tokens->[0]->[0] eq "UNMATCHED" or $tokens->[0]->[0] eq "KEYWORD"){
my $fn = strip_ws($tokens->[0]->[1]);
+# warn "Including file $fn\n";
$fn = $includes_prefix . $fn . $postfix;
shift @$tokens;
p_expect($tokens, "TAG_END", $meta);
@@ -292,7 +297,7 @@ sub p_include {
unshift @{$m->{FILES}}, $fn;
return parse_file($fn, $m);
} else {
- cloak("Expected: File name. Got: $tokens->[0]->[1]");
+ confess("Expected: File name. Got: $tokens->[0]->[1]");
}
}
@@ -446,6 +451,19 @@ sub p_readvar {
}
}
+sub p_syntaxfile {
+ my $tokens = shift;
+ my $meta = shift;
+ my $tag_content = shift @$tokens;
+ $tag_content = strip_ws($tag_content->[1]);
+ p_expect($tokens, "TAG_END", $meta);
+ my @t = split m/\s+/, $tag_content;
+ if (scalar @t != 2){
+ parse_error("Usage of syntaxfile tag: [[[syntaxfile <filename> <language>", @{$meta->{FILES}});
+ }
+
+}
+
sub p_syntax {
my $tokens = shift;
my $meta = shift;
@@ -461,6 +479,11 @@ sub p_syntax {
p_expect($tokens, "KEYWORD", $meta);
p_expect($tokens, "TAG_END", $meta);
+ return do_hilight($str, $lang);
+}
+
+sub do_hilight {
+ my ($str, $lang) = @_;
eval {
no warnings "all";
require Text::VimColor;
Please sign in to comment.
Something went wrong with that request. Please try again.