Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 4 additions & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ build/counter: examples/counter.c coroutine.h build/coroutine.a
gcc -I. -Wall -Wextra -ggdb -o build/counter examples/counter.c build/coroutine.a

.PHONY: examples
examples: build/counter build/counter_cpp build/counter_c3 build/counter_jai build/echo
examples: build/counter build/counter_cpp build/counter_c3 build/counter_jai build/echo build/lexer

build/echo: examples/echo.c3 coroutine.c3 build/coroutine.a
c3c compile -l build/coroutine.a -o build/echo examples/echo.c3 coroutine.c3
Expand All @@ -16,6 +16,9 @@ build/counter_c3: examples/counter.c3 coroutine.c3 build/coroutine.a
build/counter_jai: examples/counter.jai build/coroutine.a build/coroutine.so
jai-linux examples/counter.jai

build/lexer: examples/lexer.c coroutine.h build/coroutine.a
gcc -I. -Wall -Wextra -ggdb -o build/lexer examples/lexer.c build/coroutine.a

build/coroutine.so: coroutine.c
mkdir -p build
gcc -Wall -Wextra -ggdb -shared -fPIC -o build/coroutine.so coroutine.c
Expand Down
85 changes: 85 additions & 0 deletions examples/lexer.c
Original file line number Diff line number Diff line change
@@ -0,0 +1,85 @@
// Lexer is a classical example of usecase for coroutines.
// This is a *very* simple and basic lexer that
// can lex single digit integers, + and -.
// The example would be better if we could return values
// when we yield (kind of like a generator). But it is what it is.

#include <stdio.h>
#include <assert.h>
#include <stdbool.h>

#include <coroutine.h>

typedef enum {
TK_INT,
TK_OP,
TK_EOF
} TokenKind;

typedef union {
char tk_op;
int tk_int;
} TokenValue;

TokenKind token_kind = TK_EOF;
TokenValue token_value = {0};

void lex(void* input_void) {
if (input_void == NULL) return;

const char* input = input_void;

while(true) {
switch(*input) {
// Numba
case '0': case '1': case '2': case '3': case '4':
case '5': case '6': case '7': case '8': case '9': {
token_kind = TK_INT;
token_value.tk_int = *input - '0';
} break;

// Operators
case '+': case '-': {
token_kind = TK_OP;
token_value.tk_op = *input;
} break;

default: {
token_kind = TK_EOF;
return;
}
}
input++;

// For every token we consume, we yield control back to the caller (a parser, I guess).
coroutine_yield();
}
}

int main(int argc, char* argv[]){
if (argc != 2) {
printf("Usage: %s <input-text>\n", argv[0]);
return 1;
}

coroutine_init();
{
coroutine_go(lex, argv[1]);

// Consume those tokens
bool quit = false;
while(!quit && coroutine_alive() > 1){
// Yield control to the lexer.
// It will lex and yield control back to here.
coroutine_yield();
switch(token_kind){
case TK_INT: { printf("TK_INT: %d\n", token_value.tk_int); } break;
case TK_OP: { printf("TK_OP: %c\n", token_value.tk_op); } break;
default: { printf("Done!\n"); quit = true; } break;
}
}
}
coroutine_finish();

return 0;
}