Skip to content
Fetching contributors…
Cannot retrieve contributors at this time
executable file 127 lines (103 sloc) 3.25 KB
#!/usr/bin/env lua
-- build-utils/gentokens.lua - gen tokenize lib
--
-- Copyright © 2010 Mason Larobina <mason.larobina@gmail.com>
--
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with this program. If not, see <http://www.gnu.org/licenses/>.
tokenize_h = [[
/* This file is autogenerated by build-utils/gentokens.lua */
#ifndef LUAKIT_COMMON_TOKENIZE_H
#define LUAKIT_COMMON_TOKENIZE_H
#include <glib/gtypes.h>
typedef enum luakit_token_t {
L_TK_UNKNOWN=0, /* (luakit_token_t) 0 == L_TK_UNKNOWN */
%s
} luakit_token_t;
__attribute__((pure)) enum luakit_token_t l_tokenize(const gchar *);
__attribute__((pure)) const gchar * token_tostring(luakit_token_t);
#endif
]]
tokenize_c = [[
/* This file is autogenerated by build-utils/gentokens.lua */
#include <glib/ghash.h>
#include "common/tokenize.h"
typedef struct {
luakit_token_t tok;
const gchar *name;
} token_map_t;
token_map_t tokens_table[] = {
%s
{ 0, NULL },
};
luakit_token_t
l_tokenize(const gchar *s)
{
static GHashTable *tokens = NULL;
if (!tokens) {
tokens = g_hash_table_new(g_str_hash, g_str_equal);
for (token_map_t *t = tokens_table; t->name; t++)
g_hash_table_insert(tokens, (gpointer) t->name, (gpointer) t->tok);
}
return (luakit_token_t) g_hash_table_lookup(tokens, s);
}
const gchar *
token_tostring(luakit_token_t tok)
{
if (tok == L_TK_UNKNOWN)
return NULL;
return tokens_table[((gint)tok) - 1].name;
}
]]
if #arg ~= 2 then
error("invalid args, usage: gentokens.lua [tokens.list] [out.c/out.h]")
end
-- Load list of tokens
local tokens = {}
for token in io.lines(arg[1]) do
if #token > 0 then
if not string.match(token, "^[%w_]+$") then
error(string.format("invalid token: %q", token))
end
enum = "L_TK_"..string.upper(token)
tokens[enum] = { enum = enum, token = token }
end
end
-- Order tokens
local order = {}
for k, _ in pairs(tokens) do table.insert(order, k) end
table.sort(order)
if string.match(arg[2], "%.h$") then
-- Gen list of tokens
local enums = {}
for _, k in pairs(order) do
table.insert(enums, string.format("%s,", k))
end
-- Write header file
fh = io.open(arg[2], "w")
fh:write(string.format(tokenize_h, table.concat(enums, "\n ")))
fh:close()
elseif string.match(arg[2], "%.c$") then
-- Gen table of { token, "literal" }
local tokmap = {}
for _, k in pairs(order) do
local t = tokens[k]
table.insert(tokmap, string.format('{ %s, %q },', t.enum, t.token))
end
-- Write source file
fh = io.open(arg[2], "w")
fh:write(string.format(tokenize_c, table.concat(tokmap, "\n ")))
fh:close()
else
error("Unknown action for file: " .. arg[2])
end
Something went wrong with that request. Please try again.