1: #import <flx.flxh>
2:
3: open List;
4:
5: union token_t =
6: | TOK_EOF
7: | TOK_PLUS
8: | TOK_INT of int
9: ;
10:
11: fun lexit(start:iterator) (finish:iterator):iterator*token_t =>
12: reglex start to finish with
13: | "+" => TOK_PLUS
14: | ["0"-"9"]+ => TOK_INT $ int $ string_between(lexeme_start, lexeme_end)
15: endmatch
16: ;
17:
18: gen get_token (var s:string) ():token_t = {
19: print s; endl;
20: val first = Lexer::start_iterator s;
21: val finish = Lexer::end_iterator s;
22: var current = first;
23: start:>
24: if current == finish do
25: goto stop;
26: done;
27: val next, tok = lexit current finish;
28: current = next;
29: yield tok;
30: goto start;
31: stop:>
32: return TOK_EOF;
33: }
34:
35: union expr_t =
36: | Integr of int
37: ;
38:
39: nonterm eexpr : expr_t =
40: | xx:eexpr TOK_PLUS y:TOK_INT =>
41: match xx with
42: | Integr ?i => Integr (i+y)
43: endmatch
44:
45: | y:TOK_INT => Integr y
46: ;
47:
48: proc try_parse() {
49: var z : 1 + int =
50: parse get_token "1+2+3" with
51: | e: eexpr => match e with | Integr ?i => i endmatch
52: endmatch
53: ;
54:
55: match z with
56: | case 0 => { print "Error"; }
57: | case 1 (?i) => { print i; }
58: endmatch;
59: endl;
60: }
61:
62: try_parse();