Commit 82bfcf017ae337b90b1d721a379309ca49aae764
1 parent
e70f9a3c
-
Showing
13 changed files
with
83 additions
and
54 deletions
Show diff stats
anubis_dev/compiler/src/predef.c
| ... | ... | @@ -185,7 +185,7 @@ void do_predefinitions1(void) |
| 185 | 185 | |
| 186 | 186 | void do_predefinitions2(void) |
| 187 | 187 | { |
| 188 | - /* include the following file produced by: 'anubis -predef' */ | |
| 188 | + /* include the following file produced by: 'myanubis -predef' */ | |
| 189 | 189 | #ifdef _no_predef_dat_ |
| 190 | 190 | #include "predef_npd.aux" |
| 191 | 191 | #endif | ... | ... |
anubis_dev/library/lexical_analysis/fast_lexer.anubis
| ... | ... | @@ -1397,7 +1397,7 @@ define LexerOutput($Token) |
| 1397 | 1397 | |
| 1398 | 1398 | /* almost the same thing for accepted */ |
| 1399 | 1399 | accepted(s,a,start,end) then |
| 1400 | - print("low level accepted start = "+to_decimal(start)+" end = "+to_decimal(end)+"\n"); | |
| 1400 | + //print("low level accepted start = "+to_decimal(start)+" end = "+to_decimal(end)+"\n"); | |
| 1401 | 1401 | last_accept_v <- last(s,end); |
| 1402 | 1402 | current_v <- end; |
| 1403 | 1403 | if a is | ... | ... |
anubis_dev/library/test/predefined/fast_lexer.unit_test.anubis
| ... | ... | @@ -106,6 +106,9 @@ define One |
| 106 | 106 | assertIsSame(ut, |
| 107 | 107 | test_fast_lexer(lexer_1,to_byte_array(" gggabuzobof"),none,0,0,0), |
| 108 | 108 | accepted(6,not_at_end_of_input,2,10), "6ter"); |
| 109 | + assertIsSame(ut, | |
| 110 | + test_fast_lexer(lexer_1,to_byte_array(" "),none,0,0,0), | |
| 111 | + ignored(7,3), "7"); | |
| 109 | 112 | |
| 110 | 113 | unique. |
| 111 | 114 | ... | ... |
anubis_distrib/library/examples/OOCounter.anubis.cpp
anubis_distrib/library/examples/logic/peano.anubis
| ... | ... | @@ -3,11 +3,11 @@ |
| 3 | 3 | |
| 4 | 4 | *Title* Playing with Peano numbers. |
| 5 | 5 | |
| 6 | - *Copyright* Copyright (c) Alain Prouté 2005. | |
| 6 | + *Copyright* Copyright (c) Alain Prout� 2005. | |
| 7 | 7 | |
| 8 | 8 | *Released* |
| 9 | 9 | |
| 10 | - *Author* Alain Prouté | |
| 10 | + *Author* Alain Prout� | |
| 11 | 11 | |
| 12 | 12 | |
| 13 | 13 | This file contains several classical exercices on Peano numbers and recursive |
| ... | ... | @@ -325,7 +325,7 @@ global define One |
| 325 | 325 | ( |
| 326 | 326 | List(String) args |
| 327 | 327 | ) = |
| 328 | - with primes = first_primes(_100), | |
| 328 | + with primes = first_primes(_500), | |
| 329 | 329 | forget(map((Peano p) |-> print(to_string(p)+" "),reverse(primes))); |
| 330 | 330 | print("\n"). |
| 331 | 331 | ... | ... |
anubis_distrib/library/examples/mathematics/runge_kutta.anubis
anubis_distrib/library/lexical_analysis/fast_lexer.anubis
| ... | ... | @@ -1404,7 +1404,7 @@ define LexerOutput($Token) |
| 1404 | 1404 | { |
| 1405 | 1405 | not_at_end_of_input then |
| 1406 | 1406 | /* the lexeme just read must be accepted: the action is applied */ |
| 1407 | - update_start_line_col(*buffer_v,start,start_v,line_v,col_v); | |
| 1407 | + update_start_line_col(*buffer_v,end,start_v,line_v,col_v); | |
| 1408 | 1408 | last_accept_v <- none; |
| 1409 | 1409 | (*actions(word32(s,0)))(extract(*buffer_v,start,end),*line_v,*col_v), |
| 1410 | 1410 | ... | ... |
anubis_distrib/library/lexical_analysis/fast_lexer_example_1.anubis
| ... | ... | @@ -125,7 +125,7 @@ define Maybe(LexingStream) |
| 125 | 125 | failure then failure, |
| 126 | 126 | success(f) then make_lexing_stream("", /* no preambule */ |
| 127 | 127 | f, /* the opened file */ |
| 128 | - 200, /* size of buffer for the lexing stream */ | |
| 128 | + 1000, /* size of buffer for the lexing stream */ | |
| 129 | 129 | 100) /* timeout (seconds) */ |
| 130 | 130 | }. |
| 131 | 131 | ... | ... |
anubis_distrib/library/predefined.anubis
| ... | ... | @@ -4402,16 +4402,15 @@ public type FastLexerState: |
| 4402 | 4402 | transition('z',1)]), |
| 4403 | 4403 | |
| 4404 | 4404 | /* state 1 */ |
| 4405 | - accepting([transition(' ',2),transition('\t',2),transition('\n',2), | |
| 4406 | - transition('a',1), | |
| 4405 | + accepting([transition('a',1), | |
| 4407 | 4406 | ... |
| 4408 | 4407 | transition('z',1)]), |
| 4409 | 4408 | /* state 2 */ |
| 4410 | 4409 | ignoring([]) |
| 4411 | 4410 | ] |
| 4412 | 4411 | |
| 4413 | - Notice that a state is identified by its rank in the list. The starting state is always | |
| 4414 | - state 0. Also notice that ignoring states may have transitions. Indeed, if we want to | |
| 4412 | + Notice that a state is identified by its rank in the list. The starting state is always state 0. | |
| 4413 | + Also notice that ignoring states may have transitions. Indeed, if we want to | |
| 4415 | 4414 | detect sequences of at least two 'a' and ignore anything else, we may have an automaton |
| 4416 | 4415 | like this one: |
| 4417 | 4416 | |
| ... | ... | @@ -4446,9 +4445,8 @@ public type AtEndOfInput: |
| 4446 | 4445 | not_at_end_of_input, |
| 4447 | 4446 | at_end_of_input. |
| 4448 | 4447 | |
| 4449 | - Each (low level) fast lexer is actually a function taking a byte array as a source of | |
| 4450 | - bytes, a position (of type 'Int') within this byte array, and the name of a starting | |
| 4451 | - state, and returning a result of the following type: | |
| 4448 | + Each (low level) fast lexer is actually a function taking several arguments (see below) | |
| 4449 | + and returning a result of the following type: | |
| 4452 | 4450 | |
| 4453 | 4451 | public type FastLexerOutput: |
| 4454 | 4452 | rejected(Word16 where, AtEndOfInput, Int start, Int end), |
| ... | ... | @@ -4466,7 +4464,7 @@ public type FastLexerOutput: |
| 4466 | 4464 | the last accepted token within the input byte array. |
| 4467 | 4465 | |
| 4468 | 4466 | (2) there is no last accepted position: it returns: rejected(w,a,s,e) where |
| 4469 | - 's' and 'e' are the starting and end positions of the recognized prefix. | |
| 4467 | + 's' and 'e' are the starting and end positions of what has been read. | |
| 4470 | 4468 | |
| 4471 | 4469 | If 'a' is 'not_at_end_of_input', the above must be interpreted textually, i.e. |
| 4472 | 4470 | 'accepted' means accepted, and 'rejected' means rejected. However, if 'a' is | ... | ... |
anubis_distrib/library/test/predefined/fast_lexer.unit_test.anubis
| ... | ... | @@ -21,41 +21,54 @@ define FastLexerOutput |
| 21 | 21 | List(FastLexerState) lexer, |
| 22 | 22 | ByteArray input, |
| 23 | 23 | FastLexerLastAccepted last_accepted, |
| 24 | - Int position, | |
| 24 | + Int position, | |
| 25 | + Int start, | |
| 25 | 26 | Word16 state |
| 26 | 27 | ) = |
| 27 | 28 | if make_fast_lexer(lexer) is |
| 28 | 29 | { |
| 29 | - unknown_state(n) then print("fast lexer unknown state: "+to_decimal(n)+"\n"); alert, | |
| 30 | - too_many_states then print("too many states.\n"); alert, | |
| 31 | - ok(fl) then with result = fl(input,last_accepted,position,state), | |
| 32 | - | |
| 30 | + unknown_state(n) then print("fast lexer unknown state: "+to_decimal(n)+"\n"); should_not_happen(ignored(0,0)), | |
| 31 | + too_many_states then print("too many states.\n"); should_not_happen(ignored(0,0)), | |
| 32 | + ok(fl) then with result = fl(input,last_accepted,position,start,state), | |
| 33 | + print("Result = "); | |
| 33 | 34 | if result is |
| 34 | 35 | { |
| 35 | - rejected(w,e,a) then | |
| 36 | - print("\nrejected("+to_decimal(w)+","+abs_to_decimal(e)+","+format(a)+")\n"), | |
| 37 | - accepted(w,e,a,_) then | |
| 38 | - print("\naccepted("+to_decimal(w)+","+abs_to_decimal(e)+","+format(a)+")\n") | |
| 36 | + rejected(w,eoi,s,e) then | |
| 37 | + print("rejected("+to_decimal(w)+"," | |
| 38 | + +format(eoi)+"," | |
| 39 | + +abs_to_decimal(s)+"," | |
| 40 | + +abs_to_decimal(e) | |
| 41 | + +")\n"), | |
| 42 | + accepted(w,eoi,s,e) then | |
| 43 | + print("accepted("+to_decimal(w)+"," | |
| 44 | + +format(eoi)+"," | |
| 45 | + +abs_to_decimal(s)+"," | |
| 46 | + +abs_to_decimal(e) | |
| 47 | + +")\n"), | |
| 48 | + ignored(w,s) then | |
| 49 | + print("ignored("+to_decimal(w)+"," | |
| 50 | + +abs_to_decimal(s) | |
| 51 | + +")\n") | |
| 39 | 52 | }; |
| 40 | - | |
| 41 | 53 | result |
| 42 | 54 | }. |
| 43 | 55 | |
| 44 | 56 | |
| 45 | 57 | |
| 46 | - A lexer accepting "g+abu", "g+abuzo", | |
| 58 | + A lexer accepting "g+abu", "g+abuzo", and ignoring blanks | |
| 47 | 59 | |
| 48 | 60 | define List(FastLexerState) |
| 49 | 61 | lexer_1 |
| 50 | 62 | = |
| 51 | 63 | [ |
| 52 | - /* state 0 */ rejecting([transition('g',1)]), | |
| 64 | + /* state 0 */ rejecting([transition('g',1),transition(' ',7)]), | |
| 53 | 65 | /* state 1 */ rejecting([transition('a',2),transition('g',1)]), |
| 54 | 66 | /* state 2 */ rejecting([transition('b',3)]), |
| 55 | 67 | /* state 3 */ rejecting([transition('u',4)]), |
| 56 | 68 | /* state 4 */ accepting([transition('z',5)]), |
| 57 | 69 | /* state 5 */ rejecting([transition('o',6)]), |
| 58 | - /* state 6 */ accepting([ ]) | |
| 70 | + /* state 6 */ accepting([ ]), | |
| 71 | + /* state 7 */ ignoring([ ]) | |
| 59 | 72 | ]. |
| 60 | 73 | |
| 61 | 74 | |
| ... | ... | @@ -67,32 +80,39 @@ define One |
| 67 | 80 | |
| 68 | 81 | /* starting in state 0, position 0 */ |
| 69 | 82 | assertIsSame(ut, |
| 70 | - test_fast_lexer(lexer_1,to_byte_array("gab"),none,0,0), | |
| 71 | - rejected(3,3,at_end_of_input), "1"); | |
| 72 | - unique. | |
| 73 | - | |
| 74 | - | |
| 83 | + test_fast_lexer(lexer_1,to_byte_array("gab"),none,0,0,0), | |
| 84 | + rejected(3,at_end_of_input,0,3), "1"); | |
| 75 | 85 | assertIsSame(ut, |
| 76 | - test_fast_lexer(lexer_1,to_byte_array("gabu"),none,bol,neol,0,0), | |
| 77 | - accepted(4,4,at_end_of_input), "2"); | |
| 86 | + test_fast_lexer(lexer_1,to_byte_array("gabu"),none,0,0,0), | |
| 87 | + accepted(4,at_end_of_input,0,4), "2"); | |
| 78 | 88 | assertIsSame(ut, |
| 79 | - test_fast_lexer(lexer_1,to_byte_array("ggggabu"),none,bol,neol,0,0), | |
| 80 | - accepted(4,7,at_end_of_input), "2bis"); | |
| 89 | + test_fast_lexer(lexer_1,to_byte_array("ggggabu"),none,0,0,0), | |
| 90 | + accepted(4,at_end_of_input,0,7), "2bis"); | |
| 81 | 91 | assertIsSame(ut, |
| 82 | - test_fast_lexer(lexer_1,to_byte_array("gabuz"),none,bol,neol,0,0), | |
| 83 | - accepted(4,4,at_end_of_input), "3"); | |
| 92 | + test_fast_lexer(lexer_1,to_byte_array("gabuz"),none,0,0,0), | |
| 93 | + accepted(5,at_end_of_input,0,4), "3"); | |
| 84 | 94 | assertIsSame(ut, |
| 85 | - test_fast_lexer(lexer_1,to_byte_array("gmbuz"),none,bol,neol,0,0), | |
| 86 | - rejected(1,1,not_at_end_of_input),"4"); | |
| 95 | + test_fast_lexer(lexer_1,to_byte_array("gmbuz"),none,0,0,0), | |
| 96 | + rejected(1,not_at_end_of_input,0,2),"4"); | |
| 87 | 97 | assertIsSame(ut, |
| 88 | - test_fast_lexer(lexer_1,to_byte_array("gabuzo"),none,bol,neol,0,0), | |
| 89 | - accepted(6,6,at_end_of_input), "5"); | |
| 98 | + test_fast_lexer(lexer_1,to_byte_array("gabuzo"),none,0,0,0), | |
| 99 | + accepted(6,at_end_of_input,0,6), "5"); | |
| 90 | 100 | assertIsSame(ut, |
| 91 | - test_fast_lexer(lexer_1,to_byte_array("gabuzobof"),none,bol,neol,0,0), | |
| 92 | - accepted(6,6,not_at_end_of_input), "6"); | |
| 101 | + test_fast_lexer(lexer_1,to_byte_array("gabuzobof"),none,0,0,0), | |
| 102 | + accepted(6,not_at_end_of_input,0,6), "6"); | |
| 93 | 103 | assertIsSame(ut, |
| 94 | - test_fast_lexer(lexer_1,to_byte_array("gggggabuzobof"),none,bol,neol,0,0), | |
| 95 | - accepted(6,10,not_at_end_of_input), "6bis"); | |
| 104 | + test_fast_lexer(lexer_1,to_byte_array("gggggabuzobof"),none,0,0,0), | |
| 105 | + accepted(6,not_at_end_of_input,0,10), "6bis"); | |
| 106 | + assertIsSame(ut, | |
| 107 | + test_fast_lexer(lexer_1,to_byte_array(" gggabuzobof"),none,0,0,0), | |
| 108 | + accepted(6,not_at_end_of_input,2,10), "6ter"); | |
| 109 | + assertIsSame(ut, | |
| 110 | + test_fast_lexer(lexer_1,to_byte_array(" "),none,0,0,0), | |
| 111 | + ignored(7,3), "7"); | |
| 112 | + | |
| 113 | + unique. | |
| 114 | + | |
| 115 | + | |
| 96 | 116 | |
| 97 | 117 | /* restarting from some other state (with or without an already accepted position) */ |
| 98 | 118 | |
| ... | ... | @@ -128,3 +148,9 @@ public define UnitTestSuite |
| 128 | 148 | ]). |
| 129 | 149 | |
| 130 | 150 | |
| 151 | +global define One | |
| 152 | + fast_lexer_tests | |
| 153 | + (List(String) args) = | |
| 154 | + execute_tests([make_fast_lexer_test_suite],args). | |
| 155 | + | |
| 156 | + | ... | ... |
anubis_distrib/library/web/making_a_web_site.anubis
anubis_distrib/library/web/mime.anubis
| 1 | - | |
| 1 | + | |
| 2 | 2 | *Project* The Anubis Project |
| 3 | 3 | |
| 4 | 4 | *Title* MIME Types definition. |
| ... | ... | @@ -68,5 +68,6 @@ public define String |
| 68 | 68 | String charset, |
| 69 | 69 | String text |
| 70 | 70 | )= |
| 71 | - with text1 = "=?"+charset+"?B?"+to_string(base64_encode(to_byte_array(text)))+"?=", | |
| 71 | +// with text1 = "=?"+charset+"?B?"+to_string(base64_encode(to_byte_array(text)))+"?=", | |
| 72 | + with text1 = text, | |
| 72 | 73 | find_and_replace(text1, implode([13,10]), implode([13,10,32])). | ... | ... |
anubis_distrib/library/web/multihost_http_server.anubis
| 1 | - | |
| 1 | + | |
| 2 | 2 | *Project* The Anubis Project |
| 3 | 3 | |
| 4 | 4 | *Title* A Multi Host HTTP/HTTPS Server |
| ... | ... | @@ -851,11 +851,12 @@ define String |
| 851 | 851 | Web_Site_Description desc, |
| 852 | 852 | List(HTTP_header) headers, |
| 853 | 853 | ) = |
| 854 | + with all = member(journal_headers(desc),"*"), | |
| 854 | 855 | if headers is |
| 855 | 856 | { |
| 856 | 857 | [ ] then "", |
| 857 | 858 | [h . t] then if h is http_header(name,value) then |
| 858 | - if member(journal_headers(desc),name) | |
| 859 | + if (all|member(journal_headers(desc),name)) | |
| 859 | 860 | then " | "+name+": "+value+"\n"+show_format(desc,t) |
| 860 | 861 | else show_format(desc,t) |
| 861 | 862 | }. | ... | ... |