lex.c 8.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331
  1. /* $Id: lex.c,v 1.11 2002/08/29 19:38:52 dijkstra Exp $ */
  2. /*
  3. * Copyright (c) 2001-2002 Willem Dijkstra
  4. * All rights reserved.
  5. *
  6. * Redistribution and use in source and binary forms, with or without
  7. * modification, are permitted provided that the following conditions
  8. * are met:
  9. *
  10. * - Redistributions of source code must retain the above copyright
  11. * notice, this list of conditions and the following disclaimer.
  12. * - Redistributions in binary form must reproduce the above
  13. * copyright notice, this list of conditions and the following
  14. * disclaimer in the documentation and/or other materials provided
  15. * with the distribution.
  16. *
  17. * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
  18. * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
  19. * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
  20. * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
  21. * COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
  22. * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
  23. * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
  24. * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
  25. * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
  26. * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
  27. * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
  28. * POSSIBILITY OF SUCH DAMAGE.
  29. *
  30. */
  31. /*
  32. * This lexical analyser was written to be smaller than flex and with less
  33. * features. Its attributes in random order: capable of multiple instances, one
  34. * token lookahead, strings delimited by ' or ", comments can start anywhere
  35. * with # and last until eol, max token size = _POSIX2_LINE_LENGTH. Tokens are
  36. * defined in lex.h, the mapping of tokens to ascii happens here.
  37. *
  38. * Usage:
  39. *
  40. * l = open_lex(filename);
  41. * while (lex_nexttoken(l)) {
  42. * use l->token, l->op, l->value
  43. * }
  44. * close_lex(l);
  45. */
  46. #include <sys/types.h>
  47. #include <errno.h>
  48. #include <limits.h>
  49. #include <stdio.h>
  50. #include <stdlib.h>
  51. #include <string.h>
  52. #include "xmalloc.h"
  53. #include "lex.h"
  54. #include "error.h"
  55. static struct {
  56. const char *name;
  57. int opcode;
  58. } keywords[] = {
  59. { "{", LXT_BEGIN },
  60. { "}", LXT_END },
  61. { "(", LXT_OPEN },
  62. { ")", LXT_CLOSE },
  63. { ",", LXT_COMMA },
  64. { ":", LXT_COLON },
  65. { "accept", LXT_ACCEPT },
  66. { "cpu", LXT_CPU },
  67. { "if", LXT_IF },
  68. { "in", LXT_IN },
  69. { "io", LXT_IO },
  70. { "mem", LXT_MEM },
  71. { "monitor", LXT_MONITOR },
  72. { "mux", LXT_MUX },
  73. { "pf", LXT_PF },
  74. { "port", LXT_PORT },
  75. { "source", LXT_SOURCE },
  76. { "stream", LXT_STREAM },
  77. { "to", LXT_TO },
  78. { "write", LXT_WRITE },
  79. { NULL, 0 }
  80. };
  81. #define KW_OPS "{},():"
  82. /* Return the number of the token pointed to by cp or LXT_BADTOKEN */
  83. int
  84. parse_token(const char *cp)
  85. {
  86. u_int i;
  87. for (i = 0; keywords[i].name; i++)
  88. if (strcasecmp(cp, keywords[i].name) == 0)
  89. return keywords[i].opcode;
  90. return LXT_BADTOKEN;
  91. }
  92. /* Return the ascii representation of an opcode */
  93. const char*
  94. parse_opcode(const int op)
  95. {
  96. u_int i;
  97. for (i=0; keywords[i].name; i++)
  98. if (keywords[i].opcode == op)
  99. return keywords[i].name;
  100. return NULL;
  101. }
  102. /* Read a line and increase buffer if needed */
  103. int
  104. lex_readline(struct lex *l)
  105. {
  106. char *bp;
  107. bp = l->buffer;
  108. if (l->buffer) {
  109. if ((l->curpos < l->endpos) &&
  110. ((l->bsize - l->endpos) < _POSIX2_LINE_MAX)) {
  111. l->bsize += _POSIX2_LINE_MAX;
  112. l->buffer = xrealloc(l->buffer, l->bsize);
  113. bp += l->endpos;
  114. } else {
  115. l->curpos = 0;
  116. l->endpos = 0;
  117. }
  118. } else {
  119. l->bsize = _POSIX2_LINE_MAX;
  120. l->buffer = xmalloc(l->bsize);
  121. bp = l->buffer;
  122. }
  123. if (!fgets(bp, (l->buffer+l->bsize)-bp, l->fh))
  124. return 0;
  125. else {
  126. l->endpos += strlen(bp) - 1;
  127. return 1;
  128. }
  129. }
  130. /* Copy char out of input stream */
  131. void
  132. lex_copychar(struct lex *l)
  133. {
  134. l->token[l->tokpos]=l->buffer[l->curpos];
  135. if (++l->tokpos >= _POSIX2_LINE_MAX) {
  136. l->token[_POSIX2_LINE_MAX-1] = '\0';
  137. fatal("%s:%d: parse error at '%s'", l->filename, l->cline, l->token);
  138. /* NOT REACHED */
  139. }
  140. }
  141. /* Get next char, read next line if needed */
  142. int
  143. lex_nextchar(struct lex *l)
  144. {
  145. l->curpos++;
  146. if (l->curpos > l->endpos)
  147. if (!lex_readline(l))
  148. return 0;
  149. if (l->buffer[l->curpos] == '\n') l->cline++;
  150. return 1;
  151. }
  152. /* Close of current token with a '\0' */
  153. void
  154. lex_termtoken(struct lex *l)
  155. {
  156. l->token[l->tokpos] = l->token[_POSIX2_LINE_MAX-1] = '\0';
  157. l->tokpos=0;
  158. }
  159. /* Unget token; the lexer allows 1 look a head. */
  160. void
  161. lex_ungettoken(struct lex *l)
  162. {
  163. l->unget = 1;
  164. }
  165. /* Get the next token in lex->token. return 0 if no more tokens found. */
  166. int
  167. lex_nexttoken(struct lex *l)
  168. {
  169. /* return same token as last time if it has been pushed back */
  170. if (l->unget) {
  171. l->unget = 0;
  172. return 1;
  173. }
  174. l->op = LXT_BADTOKEN;
  175. l->value = 0;
  176. l->type = LXY_UNKNOWN;
  177. /* find first non whitespace */
  178. while (l->buffer[l->curpos] == ' ' ||
  179. l->buffer[l->curpos] == '\t' ||
  180. l->buffer[l->curpos] == '\r' ||
  181. l->buffer[l->curpos] == '\n' ||
  182. l->buffer[l->curpos] == '\0' ||
  183. l->buffer[l->curpos] == '#') {
  184. /* flush rest of line if comment */
  185. if (l->buffer[l->curpos] == '#') {
  186. while (l->buffer[l->curpos] != '\n')
  187. if (!lex_nextchar(l))
  188. return 0;
  189. } else
  190. if (!lex_nextchar(l))
  191. return 0;
  192. }
  193. l->type = LXY_STRING;
  194. /* "delimited string" */
  195. if (l->buffer[l->curpos] == '"') {
  196. if (!lex_nextchar(l)) {
  197. warning("%s:%d: unbalanced '\"'", l->filename, l->cline);
  198. return 0;
  199. }
  200. while (l->buffer[l->curpos] != '"') {
  201. lex_copychar(l);
  202. if (!lex_nextchar(l)) {
  203. warning("%s:%d: unbalanced '\"'", l->filename, l->cline);
  204. return 0;
  205. }
  206. }
  207. lex_termtoken(l);
  208. lex_nextchar(l);
  209. return 1;
  210. }
  211. /* 'delimited string' */
  212. if (l->buffer[l->curpos] == '\'') {
  213. if (!lex_nextchar(l)) {
  214. warning("%s:%d: unbalanced \"\'\"", l->filename, l->cline);
  215. return 0;
  216. }
  217. while (l->buffer[l->curpos] != '\'') {
  218. lex_copychar(l);
  219. if (!lex_nextchar(l)) {
  220. warning("%s:%d: unbalanced \"\'\"", l->filename, l->cline);
  221. return 0;
  222. }
  223. }
  224. lex_termtoken(l);
  225. lex_nextchar(l);
  226. return 1;
  227. }
  228. /* one char keyword */
  229. if (strchr(KW_OPS, l->buffer[l->curpos])) {
  230. lex_copychar(l);
  231. lex_termtoken(l);
  232. l->op = parse_token(l->token);
  233. lex_nextchar(l);
  234. return 1;
  235. }
  236. /* single keyword */
  237. while (l->buffer[l->curpos] != ' ' &&
  238. l->buffer[l->curpos] != '\t' &&
  239. l->buffer[l->curpos] != '\r' &&
  240. l->buffer[l->curpos] != '\n' &&
  241. l->buffer[l->curpos] != '\0' &&
  242. l->buffer[l->curpos] != '#' &&
  243. (strchr(KW_OPS, l->buffer[l->curpos]) == NULL)) {
  244. lex_copychar(l);
  245. if (!lex_nextchar(l))
  246. break;
  247. }
  248. lex_termtoken(l);
  249. l->op = parse_token(l->token);
  250. /* number */
  251. if (l->token[0] >= '0' && l->token[0] <= '9' ) {
  252. if (strlen(l->token) == strspn(l->token, "0123456789")) {
  253. l->type = LXY_NUMBER;
  254. l->value = strtol(l->token, NULL , 10);
  255. }
  256. }
  257. return 1;
  258. }
  259. /* Create and initialize a lexical analyser */
  260. struct lex *
  261. open_lex(const char *filename)
  262. {
  263. struct lex *l;
  264. l = xmalloc(sizeof(struct lex));
  265. l->buffer = NULL;
  266. l->cline = 1;
  267. l->curpos = 0;
  268. l->endpos = 0;
  269. l->filename = filename;
  270. l->op = LXT_BADTOKEN;
  271. l->token = xmalloc(_POSIX2_LINE_MAX);
  272. l->tokpos = 0;
  273. l->type = LXY_UNKNOWN;
  274. l->unget = 0;
  275. l->value = 0;
  276. if ((l->fh = fopen(l->filename, "r")) == NULL) {
  277. warning("could not open file '%s':%s",
  278. l->filename, strerror(errno));
  279. xfree(l);
  280. return NULL;
  281. }
  282. lex_nextchar(l);
  283. return l;
  284. }
  285. /* Destroy a lexical analyser */
  286. void
  287. close_lex(struct lex *l)
  288. {
  289. if (l == NULL) return;
  290. if (l->fh) fclose(l->fh);
  291. if (l->buffer) xfree(l->buffer);
  292. if (l->token) xfree(l->token);
  293. xfree(l);
  294. }
  295. /* Signal a parse error */
  296. void
  297. parse_error(struct lex *l, const char *s)
  298. {
  299. warning("%s:%d: expected %s (found '%.8s')",
  300. l->filename, l->cline, s, l->token);
  301. }