1 2 /* ntp_scanner.c 3 * 4 * The source code for a simple lexical analyzer. 5 * 6 * Written By: Sachin Kamboj 7 * University of Delaware 8 * Newark, DE 19711 9 * Copyright (c) 2006 10 */ 11 12 #ifdef HAVE_CONFIG_H 13 # include <config.h> 14 #endif 15 16 #include <stdio.h> 17 #include <ctype.h> 18 #include <stdlib.h> 19 #include <errno.h> 20 #include <string.h> 21 22 #include "ntpd.h" 23 #include "ntp_config.h" 24 #include "ntpsim.h" 25 #include "ntp_scanner.h" 26 #include "ntp_parser.h" 27 28 /* ntp_keyword.h declares finite state machine and token text */ 29 #include "ntp_keyword.h" 30 31 32 33 /* SCANNER GLOBAL VARIABLES 34 * ------------------------ 35 */ 36 37 #define MAX_LEXEME (1024 + 1) /* The maximum size of a lexeme */ 38 char yytext[MAX_LEXEME]; /* Buffer for storing the input text/lexeme */ 39 u_int32 conf_file_sum; /* Simple sum of characters read */ 40 41 static struct FILE_INFO * lex_stack = NULL; 42 43 44 45 /* CONSTANTS 46 * --------- 47 */ 48 49 50 /* SCANNER GLOBAL VARIABLES 51 * ------------------------ 52 */ 53 const char special_chars[] = "{}(),;|="; 54 55 56 /* FUNCTIONS 57 * --------- 58 */ 59 60 static int is_keyword(char *lexeme, follby *pfollowedby); 61 62 63 /* 64 * keyword() - Return the keyword associated with token T_ identifier. 65 * See also token_name() for the string-ized T_ identifier. 66 * Example: keyword(T_Server) returns "server" 67 * token_name(T_Server) returns "T_Server" 68 */ 69 const char * 70 keyword( 71 int token 72 ) 73 { 74 size_t i; 75 const char *text; 76 77 i = token - LOWEST_KEYWORD_ID; 78 79 if (i < COUNTOF(keyword_text)) 80 text = keyword_text[i]; 81 else 82 text = NULL; 83 84 return (text != NULL) 85 ? text 86 : "(keyword not found)"; 87 } 88 89 90 /* FILE & STRING BUFFER INTERFACE 91 * ------------------------------ 92 * 93 * This set out as a couple of wrapper functions around the standard C 94 * fgetc and ungetc functions in order to include positional 95 * bookkeeping. Alas, this is no longer a good solution with nested 96 * input files and the possibility to send configuration commands via 97 * 'ntpdc' and 'ntpq'. 98 * 99 * Now there are a few functions to maintain a stack of nested input 100 * sources (though nesting is only allowd for disk files) and from the 101 * scanner / parser point of view there's no difference between both 102 * types of sources. 103 * 104 * The 'fgetc()' / 'ungetc()' replacements now operate on a FILE_INFO 105 * structure. Instead of trying different 'ungetc()' strategies for file 106 * and buffer based parsing, we keep the backup char in our own 107 * FILE_INFO structure. This is sufficient, as the parser does *not* 108 * jump around via 'seek' or the like, and there's no need to 109 * check/clear the backup store in other places than 'lex_getch()'. 110 */ 111 112 /* 113 * Allocate an info structure and attach it to a file. 114 * 115 * Note: When 'mode' is NULL, then the INFO block will be set up to 116 * contain a NULL file pointer, as suited for remote config command 117 * parsing. Otherwise having a NULL file pointer is considered an error, 118 * and a NULL info block pointer is returned to indicate failure! 119 * 120 * Note: We use a variable-sized structure to hold a copy of the file 121 * name (or, more proper, the input source description). This is more 122 * secure than keeping a reference to some other storage that might go 123 * out of scope. 124 */ 125 static struct FILE_INFO * 126 lex_open( 127 const char *path, 128 const char *mode 129 ) 130 { 131 struct FILE_INFO *stream; 132 size_t nnambuf; 133 134 nnambuf = strlen(path); 135 stream = emalloc_zero(sizeof(*stream) + nnambuf); 136 stream->curpos.nline = 1; 137 stream->backch = EOF; 138 /* copy name with memcpy -- trailing NUL already there! */ 139 memcpy(stream->fname, path, nnambuf); 140 141 if (NULL != mode) { 142 stream->fpi = fopen(path, mode); 143 if (NULL == stream->fpi) { 144 free(stream); 145 stream = NULL; 146 } 147 } 148 return stream; 149 } 150 151 /* get next character from buffer or file. This will return any putback 152 * character first; it will also make sure the last line is at least 153 * virtually terminated with a '\n'. 154 */ 155 static int 156 lex_getch( 157 struct FILE_INFO *stream 158 ) 159 { 160 int ch; 161 162 if (NULL == stream || stream->force_eof) 163 return EOF; 164 165 if (EOF != stream->backch) { 166 ch = stream->backch; 167 stream->backch = EOF; 168 if (stream->fpi) 169 conf_file_sum += ch; 170 stream->curpos.ncol++; 171 } else if (stream->fpi) { 172 /* fetch next 7-bit ASCII char (or EOF) from file */ 173 while ((ch = fgetc(stream->fpi)) != EOF && ch > SCHAR_MAX) 174 stream->curpos.ncol++; 175 if (EOF != ch) { 176 conf_file_sum += ch; 177 stream->curpos.ncol++; 178 } 179 } else { 180 /* fetch next 7-bit ASCII char from buffer */ 181 const char * scan; 182 scan = &remote_config.buffer[remote_config.pos]; 183 while ((ch = (u_char)*scan) > SCHAR_MAX) { 184 scan++; 185 stream->curpos.ncol++; 186 } 187 if ('\0' != ch) { 188 scan++; 189 stream->curpos.ncol++; 190 } else { 191 ch = EOF; 192 } 193 remote_config.pos = (int)(scan - remote_config.buffer); 194 } 195 196 /* If the last line ends without '\n', generate one. This 197 * happens most likely on Windows, where editors often have a 198 * sloppy concept of a line. 199 */ 200 if (EOF == ch && stream->curpos.ncol != 0) 201 ch = '\n'; 202 203 /* update scan position tallies */ 204 if (ch == '\n') { 205 stream->bakpos = stream->curpos; 206 stream->curpos.nline++; 207 stream->curpos.ncol = 0; 208 } 209 210 return ch; 211 } 212 213 /* Note: lex_ungetch will fail to track more than one line of push 214 * back. But since it guarantees only one char of back storage anyway, 215 * this should not be a problem. 216 */ 217 static int 218 lex_ungetch( 219 int ch, 220 struct FILE_INFO *stream 221 ) 222 { 223 /* check preconditions */ 224 if (NULL == stream || stream->force_eof) 225 return EOF; 226 if (EOF != stream->backch || EOF == ch) 227 return EOF; 228 229 /* keep for later reference and update checksum */ 230 stream->backch = (u_char)ch; 231 if (stream->fpi) 232 conf_file_sum -= stream->backch; 233 234 /* update position */ 235 if (stream->backch == '\n') { 236 stream->curpos = stream->bakpos; 237 stream->bakpos.ncol = -1; 238 } 239 stream->curpos.ncol--; 240 return stream->backch; 241 } 242 243 /* dispose of an input structure. If the file pointer is not NULL, close 244 * the file. This function does not check the result of 'fclose()'. 245 */ 246 static void 247 lex_close( 248 struct FILE_INFO *stream 249 ) 250 { 251 if (NULL != stream) { 252 if (NULL != stream->fpi) 253 fclose(stream->fpi); 254 free(stream); 255 } 256 } 257 258 /* INPUT STACK 259 * ----------- 260 * 261 * Nested input sources are a bit tricky at first glance. We deal with 262 * this problem using a stack of input sources, that is, a forward 263 * linked list of FILE_INFO structs. 264 * 265 * This stack is never empty during parsing; while an encounter with EOF 266 * can and will remove nested input sources, removing the last element 267 * in the stack will not work during parsing, and the EOF condition of 268 * the outermost input file remains until the parser folds up. 269 */ 270 271 static struct FILE_INFO * 272 _drop_stack_do( 273 struct FILE_INFO * head 274 ) 275 { 276 struct FILE_INFO * tail; 277 while (NULL != head) { 278 tail = head->st_next; 279 lex_close(head); 280 head = tail; 281 } 282 return head; 283 } 284 285 286 287 /* Create a singleton input source on an empty lexer stack. This will 288 * fail if there is already an input source, or if the underlying disk 289 * file cannot be opened. 290 * 291 * Returns TRUE if a new input object was successfully created. 292 */ 293 int/*BOOL*/ 294 lex_init_stack( 295 const char * path, 296 const char * mode 297 ) 298 { 299 if (NULL != lex_stack || NULL == path) 300 return FALSE; 301 302 lex_stack = lex_open(path, mode); 303 return (NULL != lex_stack); 304 } 305 306 /* This removes *all* input sources from the stack, leaving the head 307 * pointer as NULL. Any attempt to parse in that state is likely to bomb 308 * with segmentation faults or the like. 309 * 310 * In other words: Use this to clean up after parsing, and do not parse 311 * anything until the next 'lex_init_stack()' succeeded. 312 */ 313 void 314 lex_drop_stack() 315 { 316 lex_stack = _drop_stack_do(lex_stack); 317 } 318 319 /* Flush the lexer input stack: This will nip all input objects on the 320 * stack (but keeps the current top-of-stack) and marks the top-of-stack 321 * as inactive. Any further calls to lex_getch yield only EOF, and it's 322 * no longer possible to push something back. 323 * 324 * Returns TRUE if there is a head element (top-of-stack) that was not 325 * in the force-eof mode before this call. 326 */ 327 int/*BOOL*/ 328 lex_flush_stack() 329 { 330 int retv = FALSE; 331 332 if (NULL != lex_stack) { 333 retv = !lex_stack->force_eof; 334 lex_stack->force_eof = TRUE; 335 lex_stack->st_next = _drop_stack_do( 336 lex_stack->st_next); 337 } 338 return retv; 339 } 340 341 /* Push another file on the parsing stack. If the mode is NULL, create a 342 * FILE_INFO suitable for in-memory parsing; otherwise, create a 343 * FILE_INFO that is bound to a local/disc file. Note that 'path' must 344 * not be NULL, or the function will fail. 345 * 346 * Returns TRUE if a new info record was pushed onto the stack. 347 */ 348 int/*BOOL*/ lex_push_file( 349 const char * path, 350 const char * mode 351 ) 352 { 353 struct FILE_INFO * next = NULL; 354 355 if (NULL != path) { 356 next = lex_open(path, mode); 357 if (NULL != next) { 358 next->st_next = lex_stack; 359 lex_stack = next; 360 } 361 } 362 return (NULL != next); 363 } 364 365 /* Pop, close & free the top of the include stack, unless the stack 366 * contains only a singleton input object. In that case the function 367 * fails, because the parser does not expect the input stack to be 368 * empty. 369 * 370 * Returns TRUE if an object was successfuly popped from the stack. 371 */ 372 int/*BOOL*/ 373 lex_pop_file(void) 374 { 375 struct FILE_INFO * head = lex_stack; 376 struct FILE_INFO * tail = NULL; 377 378 if (NULL != head) { 379 tail = head->st_next; 380 if (NULL != tail) { 381 lex_stack = tail; 382 lex_close(head); 383 } 384 } 385 return (NULL != tail); 386 } 387 388 /* Get include nesting level. This currently loops over the stack and 389 * counts elements; but since this is of concern only with an include 390 * statement and the nesting depth has a small limit, there's no 391 * bottleneck expected here. 392 * 393 * Returns the nesting level of includes, that is, the current depth of 394 * the lexer input stack. 395 * 396 * Note: 397 */ 398 size_t 399 lex_level(void) 400 { 401 size_t cnt = 0; 402 struct FILE_INFO *ipf = lex_stack; 403 404 while (NULL != ipf) { 405 cnt++; 406 ipf = ipf->st_next; 407 } 408 return cnt; 409 } 410 411 /* check if the current input is from a file */ 412 int/*BOOL*/ 413 lex_from_file(void) 414 { 415 return (NULL != lex_stack) && (NULL != lex_stack->fpi); 416 } 417 418 struct FILE_INFO * 419 lex_current() 420 { 421 /* this became so simple, it could be a macro. But then, 422 * lex_stack needed to be global... 423 */ 424 return lex_stack; 425 } 426 427 428 /* STATE MACHINES 429 * -------------- 430 */ 431 432 /* Keywords */ 433 static int 434 is_keyword( 435 char *lexeme, 436 follby *pfollowedby 437 ) 438 { 439 follby fb; 440 int curr_s; /* current state index */ 441 int token; 442 int i; 443 444 curr_s = SCANNER_INIT_S; 445 token = 0; 446 447 for (i = 0; lexeme[i]; i++) { 448 while (curr_s && (lexeme[i] != SS_CH(sst[curr_s]))) 449 curr_s = SS_OTHER_N(sst[curr_s]); 450 451 if (curr_s && (lexeme[i] == SS_CH(sst[curr_s]))) { 452 if ('\0' == lexeme[i + 1] 453 && FOLLBY_NON_ACCEPTING 454 != SS_FB(sst[curr_s])) { 455 fb = SS_FB(sst[curr_s]); 456 *pfollowedby = fb; 457 token = curr_s; 458 break; 459 } 460 curr_s = SS_MATCH_N(sst[curr_s]); 461 } else 462 break; 463 } 464 465 return token; 466 } 467 468 469 /* Integer */ 470 static int 471 is_integer( 472 char *lexeme 473 ) 474 { 475 int i; 476 int is_neg; 477 u_int u_val; 478 479 i = 0; 480 481 /* Allow a leading minus sign */ 482 if (lexeme[i] == '-') { 483 i++; 484 is_neg = TRUE; 485 } else { 486 is_neg = FALSE; 487 } 488 489 /* Check that all the remaining characters are digits */ 490 for (; lexeme[i] != '\0'; i++) { 491 if (!isdigit((u_char)lexeme[i])) 492 return FALSE; 493 } 494 495 if (is_neg) 496 return TRUE; 497 498 /* Reject numbers that fit in unsigned but not in signed int */ 499 if (1 == sscanf(lexeme, "%u", &u_val)) 500 return (u_val <= INT_MAX); 501 else 502 return FALSE; 503 } 504 505 506 /* U_int -- assumes is_integer() has returned FALSE */ 507 static int 508 is_u_int( 509 char *lexeme 510 ) 511 { 512 int i; 513 int is_hex; 514 515 i = 0; 516 if ('0' == lexeme[i] && 'x' == tolower((u_char)lexeme[i + 1])) { 517 i += 2; 518 is_hex = TRUE; 519 } else { 520 is_hex = FALSE; 521 } 522 523 /* Check that all the remaining characters are digits */ 524 for (; lexeme[i] != '\0'; i++) { 525 if (is_hex && !isxdigit((u_char)lexeme[i])) 526 return FALSE; 527 if (!is_hex && !isdigit((u_char)lexeme[i])) 528 return FALSE; 529 } 530 531 return TRUE; 532 } 533 534 535 /* Double */ 536 static int 537 is_double( 538 char *lexeme 539 ) 540 { 541 u_int num_digits = 0; /* Number of digits read */ 542 u_int i; 543 544 i = 0; 545 546 /* Check for an optional '+' or '-' */ 547 if ('+' == lexeme[i] || '-' == lexeme[i]) 548 i++; 549 550 /* Read the integer part */ 551 for (; lexeme[i] && isdigit((u_char)lexeme[i]); i++) 552 num_digits++; 553 554 /* Check for the optional decimal point */ 555 if ('.' == lexeme[i]) { 556 i++; 557 /* Check for any digits after the decimal point */ 558 for (; lexeme[i] && isdigit((u_char)lexeme[i]); i++) 559 num_digits++; 560 } 561 562 /* 563 * The number of digits in both the decimal part and the 564 * fraction part must not be zero at this point 565 */ 566 if (!num_digits) 567 return 0; 568 569 /* Check if we are done */ 570 if (!lexeme[i]) 571 return 1; 572 573 /* There is still more input, read the exponent */ 574 if ('e' == tolower((u_char)lexeme[i])) 575 i++; 576 else 577 return 0; 578 579 /* Read an optional Sign */ 580 if ('+' == lexeme[i] || '-' == lexeme[i]) 581 i++; 582 583 /* Now read the exponent part */ 584 while (lexeme[i] && isdigit((u_char)lexeme[i])) 585 i++; 586 587 /* Check if we are done */ 588 if (!lexeme[i]) 589 return 1; 590 else 591 return 0; 592 } 593 594 595 /* is_special() - Test whether a character is a token */ 596 static inline int 597 is_special( 598 int ch 599 ) 600 { 601 return strchr(special_chars, ch) != NULL; 602 } 603 604 605 static int 606 is_EOC( 607 int ch 608 ) 609 { 610 if ((old_config_style && (ch == '\n')) || 611 (!old_config_style && (ch == ';'))) 612 return 1; 613 return 0; 614 } 615 616 617 char * 618 quote_if_needed(char *str) 619 { 620 char *ret; 621 size_t len; 622 size_t octets; 623 624 len = strlen(str); 625 octets = len + 2 + 1; 626 ret = emalloc(octets); 627 if ('"' != str[0] 628 && (strcspn(str, special_chars) < len 629 || strchr(str, ' ') != NULL)) { 630 snprintf(ret, octets, "\"%s\"", str); 631 } else 632 strlcpy(ret, str, octets); 633 634 return ret; 635 } 636 637 638 static int 639 create_string_token( 640 char *lexeme 641 ) 642 { 643 char *pch; 644 645 /* 646 * ignore end of line whitespace 647 */ 648 pch = lexeme; 649 while (*pch && isspace((u_char)*pch)) 650 pch++; 651 652 if (!*pch) { 653 yylval.Integer = T_EOC; 654 return yylval.Integer; 655 } 656 657 yylval.String = estrdup(lexeme); 658 return T_String; 659 } 660 661 662 /* 663 * yylex() - function that does the actual scanning. 664 * Bison expects this function to be called yylex and for it to take no 665 * input and return an int. 666 * Conceptually yylex "returns" yylval as well as the actual return 667 * value representing the token or type. 668 */ 669 int 670 yylex(void) 671 { 672 static follby followedby = FOLLBY_TOKEN; 673 size_t i; 674 int instring; 675 int yylval_was_set; 676 int converted; 677 int token; /* The return value */ 678 int ch; 679 680 instring = FALSE; 681 yylval_was_set = FALSE; 682 683 do { 684 /* Ignore whitespace at the beginning */ 685 while (EOF != (ch = lex_getch(lex_stack)) && 686 isspace(ch) && 687 !is_EOC(ch)) 688 689 ; /* Null Statement */ 690 691 if (EOF == ch) { 692 693 if ( ! lex_pop_file()) 694 return 0; 695 token = T_EOC; 696 goto normal_return; 697 698 } else if (is_EOC(ch)) { 699 700 /* end FOLLBY_STRINGS_TO_EOC effect */ 701 followedby = FOLLBY_TOKEN; 702 token = T_EOC; 703 goto normal_return; 704 705 } else if (is_special(ch) && FOLLBY_TOKEN == followedby) { 706 /* special chars are their own token values */ 707 token = ch; 708 /* 709 * '=' outside simulator configuration implies 710 * a single string following as in: 711 * setvar Owner = "The Boss" default 712 */ 713 if ('=' == ch && old_config_style) 714 followedby = FOLLBY_STRING; 715 yytext[0] = (char)ch; 716 yytext[1] = '\0'; 717 goto normal_return; 718 } else 719 lex_ungetch(ch, lex_stack); 720 721 /* save the position of start of the token */ 722 lex_stack->tokpos = lex_stack->curpos; 723 724 /* Read in the lexeme */ 725 i = 0; 726 while (EOF != (ch = lex_getch(lex_stack))) { 727 728 yytext[i] = (char)ch; 729 730 /* Break on whitespace or a special character */ 731 if (isspace(ch) || is_EOC(ch) 732 || '"' == ch 733 || (FOLLBY_TOKEN == followedby 734 && is_special(ch))) 735 break; 736 737 /* Read the rest of the line on reading a start 738 of comment character */ 739 if ('#' == ch) { 740 while (EOF != (ch = lex_getch(lex_stack)) 741 && '\n' != ch) 742 ; /* Null Statement */ 743 break; 744 } 745 746 i++; 747 if (i >= COUNTOF(yytext)) 748 goto lex_too_long; 749 } 750 /* Pick up all of the string inside between " marks, to 751 * end of line. If we make it to EOL without a 752 * terminating " assume it for them. 753 * 754 * XXX - HMS: I'm not sure we want to assume the closing " 755 */ 756 if ('"' == ch) { 757 instring = TRUE; 758 while (EOF != (ch = lex_getch(lex_stack)) && 759 ch != '"' && ch != '\n') { 760 yytext[i++] = (char)ch; 761 if (i >= COUNTOF(yytext)) 762 goto lex_too_long; 763 } 764 /* 765 * yytext[i] will be pushed back as not part of 766 * this lexeme, but any closing quote should 767 * not be pushed back, so we read another char. 768 */ 769 if ('"' == ch) 770 ch = lex_getch(lex_stack); 771 } 772 /* Pushback the last character read that is not a part 773 * of this lexeme. This fails silently if ch is EOF, 774 * but then the EOF condition persists and is handled on 775 * the next turn by the include stack mechanism. 776 */ 777 lex_ungetch(ch, lex_stack); 778 779 yytext[i] = '\0'; 780 } while (i == 0); 781 782 /* Now return the desired token */ 783 784 /* First make sure that the parser is *not* expecting a string 785 * as the next token (based on the previous token that was 786 * returned) and that we haven't read a string. 787 */ 788 789 if (followedby == FOLLBY_TOKEN && !instring) { 790 token = is_keyword(yytext, &followedby); 791 if (token) { 792 /* 793 * T_Server is exceptional as it forces the 794 * following token to be a string in the 795 * non-simulator parts of the configuration, 796 * but in the simulator configuration section, 797 * "server" is followed by "=" which must be 798 * recognized as a token not a string. 799 */ 800 if (T_Server == token && !old_config_style) 801 followedby = FOLLBY_TOKEN; 802 goto normal_return; 803 } else if (is_integer(yytext)) { 804 yylval_was_set = TRUE; 805 errno = 0; 806 if ((yylval.Integer = strtol(yytext, NULL, 10)) == 0 807 && ((errno == EINVAL) || (errno == ERANGE))) { 808 msyslog(LOG_ERR, 809 "Integer cannot be represented: %s", 810 yytext); 811 if (lex_from_file()) { 812 exit(1); 813 } else { 814 /* force end of parsing */ 815 yylval.Integer = 0; 816 return 0; 817 } 818 } 819 token = T_Integer; 820 goto normal_return; 821 } else if (is_u_int(yytext)) { 822 yylval_was_set = TRUE; 823 if ('0' == yytext[0] && 824 'x' == tolower((unsigned long)yytext[1])) 825 converted = sscanf(&yytext[2], "%x", 826 &yylval.U_int); 827 else 828 converted = sscanf(yytext, "%u", 829 &yylval.U_int); 830 if (1 != converted) { 831 msyslog(LOG_ERR, 832 "U_int cannot be represented: %s", 833 yytext); 834 if (lex_from_file()) { 835 exit(1); 836 } else { 837 /* force end of parsing */ 838 yylval.Integer = 0; 839 return 0; 840 } 841 } 842 token = T_U_int; 843 goto normal_return; 844 } else if (is_double(yytext)) { 845 yylval_was_set = TRUE; 846 errno = 0; 847 if ((yylval.Double = atof(yytext)) == 0 && errno == ERANGE) { 848 msyslog(LOG_ERR, 849 "Double too large to represent: %s", 850 yytext); 851 exit(1); 852 } else { 853 token = T_Double; 854 goto normal_return; 855 } 856 } else { 857 /* Default: Everything is a string */ 858 yylval_was_set = TRUE; 859 token = create_string_token(yytext); 860 goto normal_return; 861 } 862 } 863 864 /* 865 * Either followedby is not FOLLBY_TOKEN or this lexeme is part 866 * of a string. Hence, we need to return T_String. 867 * 868 * _Except_ we might have a -4 or -6 flag on a an association 869 * configuration line (server, peer, pool, etc.). 870 * 871 * This is a terrible hack, but the grammar is ambiguous so we 872 * don't have a choice. [SK] 873 * 874 * The ambiguity is in the keyword scanner, not ntp_parser.y. 875 * We do not require server addresses be quoted in ntp.conf, 876 * complicating the scanner's job. To avoid trying (and 877 * failing) to match an IP address or DNS name to a keyword, 878 * the association keywords use FOLLBY_STRING in the keyword 879 * table, which tells the scanner to force the next token to be 880 * a T_String, so it does not try to match a keyword but rather 881 * expects a string when -4/-6 modifiers to server, peer, etc. 882 * are encountered. 883 * restrict -4 and restrict -6 parsing works correctly without 884 * this hack, as restrict uses FOLLBY_TOKEN. [DH] 885 */ 886 if ('-' == yytext[0]) { 887 if ('4' == yytext[1]) { 888 token = T_Ipv4_flag; 889 goto normal_return; 890 } else if ('6' == yytext[1]) { 891 token = T_Ipv6_flag; 892 goto normal_return; 893 } 894 } 895 896 if (FOLLBY_STRING == followedby) 897 followedby = FOLLBY_TOKEN; 898 899 yylval_was_set = TRUE; 900 token = create_string_token(yytext); 901 902 normal_return: 903 if (T_EOC == token) 904 DPRINTF(4,("\t<end of command>\n")); 905 else 906 DPRINTF(4, ("yylex: lexeme '%s' -> %s\n", yytext, 907 token_name(token))); 908 909 if (!yylval_was_set) 910 yylval.Integer = token; 911 912 return token; 913 914 lex_too_long: 915 yytext[min(sizeof(yytext) - 1, 50)] = 0; 916 msyslog(LOG_ERR, 917 "configuration item on line %d longer than limit of %lu, began with '%s'", 918 lex_stack->curpos.nline, (u_long)min(sizeof(yytext) - 1, 50), 919 yytext); 920 921 /* 922 * If we hit the length limit reading the startup configuration 923 * file, abort. 924 */ 925 if (lex_from_file()) 926 exit(sizeof(yytext) - 1); 927 928 /* 929 * If it's runtime configuration via ntpq :config treat it as 930 * if the configuration text ended before the too-long lexeme, 931 * hostname, or string. 932 */ 933 yylval.Integer = 0; 934 return 0; 935 } 936