xref: /freebsd/contrib/ntp/ntpd/keyword-gen.c (revision 7a7741af18d6c8a804cc643cb7ecda9d730c6aa6)
1 /*
2  * keyword-gen.c -- generate keyword scanner finite state machine and
3  *		    keyword_text array.
4  *
5  * This program is run to generate ntp_keyword.h
6  * After making a change here, two output files should be committed at
7  * the same time as keyword-gen.c:
8  *	ntp_keyword.h
9  *	keyword-gen-utd
10  *
11  * keyword-gen-utd is a sentinel used by Makefile.am to avoid compiling
12  * keyword_gen.c and generating ntp_keyword.h if the input keyword-gen.c
13  * has not changed.  This is not solely an optimization, it also breaks
14  * a dependency chain that otherwise would cause programs to be compiled
15  * when running "make dist" or "make distdir".  We want these to package
16  * the existing source without building anything but a tarball.  See
17  * [Bug 1470].
18  */
19 #include <config.h>
20 #include <stdio.h>
21 #include <stdlib.h>
22 #include <time.h>
23 
24 #include <ntp_stdlib.h>
25 #include <ntp_config.h>
26 #include "ntp_scanner.h"
27 #include "ntp_parser.h"
28 
29 
30 /* Define a structure to hold a (keyword, token) pair */
31 struct key_tok {
32 	char *	key;		/* Keyword */
33 	u_short	token;		/* Associated Token */
34 	follby	followedby;	/* nonzero indicates the next token(s)
35 				   forced to be string(s) */
36 };
37 
38 struct key_tok ntp_keywords[] = {
39 { "...",		T_Ellipsis,		FOLLBY_TOKEN },
40 { "allpeers",		T_Allpeers,		FOLLBY_TOKEN },
41 { "automax",		T_Automax,		FOLLBY_TOKEN },
42 { "broadcast",		T_Broadcast,		FOLLBY_STRING },
43 { "broadcastclient",	T_Broadcastclient,	FOLLBY_TOKEN },
44 { "broadcastdelay",	T_Broadcastdelay,	FOLLBY_TOKEN },
45 { "checkhash",		T_Checkhash,		FOLLBY_TOKEN },
46 { "ctl",		T_Ctl,			FOLLBY_TOKEN },
47 { "delrestrict",	T_Delrestrict,		FOLLBY_TOKEN },
48 { "device",		T_Device,		FOLLBY_STRING },
49 { "disable",		T_Disable,		FOLLBY_TOKEN },
50 { "driftfile",		T_Driftfile,		FOLLBY_STRING },
51 { "dscp",		T_Dscp,			FOLLBY_TOKEN },
52 { "enable",		T_Enable,		FOLLBY_TOKEN },
53 { "end",		T_End,			FOLLBY_TOKEN },
54 { "filegen",		T_Filegen,		FOLLBY_TOKEN },
55 { "fudge",		T_Fudge,		FOLLBY_STRING },
56 { "ignorehash",		T_Ignorehash,		FOLLBY_TOKEN },
57 { "io",			T_Io,			FOLLBY_TOKEN },
58 { "includefile",	T_Includefile,		FOLLBY_STRING },
59 { "leapfile",		T_Leapfile,		FOLLBY_STRING },
60 { "leapsmearinterval",	T_Leapsmearinterval,	FOLLBY_TOKEN },
61 { "logconfig",		T_Logconfig,		FOLLBY_STRINGS_TO_EOC },
62 { "logfile",		T_Logfile,		FOLLBY_STRING },
63 { "manycastclient",	T_Manycastclient,	FOLLBY_STRING },
64 { "manycastserver",	T_Manycastserver,	FOLLBY_STRINGS_TO_EOC },
65 { "mem",		T_Mem,			FOLLBY_TOKEN },
66 { "multicastclient",	T_Multicastclient,	FOLLBY_STRINGS_TO_EOC },
67 { "peer",		T_Peer,			FOLLBY_STRING },
68 { "phone",		T_Phone,		FOLLBY_STRINGS_TO_EOC },
69 { "pidfile",		T_Pidfile,		FOLLBY_STRING },
70 { "pollskewlist",	T_PollSkewList,		FOLLBY_TOKEN },
71 { "pool",		T_Pool,			FOLLBY_STRING },
72 { "discard",		T_Discard,		FOLLBY_TOKEN },
73 { "reset",		T_Reset,		FOLLBY_TOKEN },
74 { "restrict",		T_Restrict,		FOLLBY_TOKEN },
75 { "rlimit",		T_Rlimit,		FOLLBY_TOKEN },
76 { "server",		T_Server,		FOLLBY_STRING },
77 { "serverresponse",	T_Serverresponse,	FOLLBY_TOKEN },
78 { "fuzz",		T_Fuzz,			FOLLBY_TOKEN },
79 { "poll",		T_Poll,			FOLLBY_TOKEN },
80 { "setvar",		T_Setvar,		FOLLBY_STRING },
81 { "statistics",		T_Statistics,		FOLLBY_TOKEN },
82 { "statsdir",		T_Statsdir,		FOLLBY_STRING },
83 { "sys",		T_Sys,			FOLLBY_TOKEN },
84 { "tick",		T_Tick,			FOLLBY_TOKEN },
85 { "timer",		T_Timer,		FOLLBY_TOKEN },
86 { "tinker",		T_Tinker,		FOLLBY_TOKEN },
87 { "tos",		T_Tos,			FOLLBY_TOKEN },
88 { "trap",		T_Trap,			FOLLBY_STRING },
89 { "unconfig",		T_Unconfig,		FOLLBY_STRING },
90 { "unpeer",		T_Unpeer,		FOLLBY_STRING },
91 { "xmtnonce",		T_Xmtnonce,		FOLLBY_TOKEN },
92 /* authentication_command */
93 { "controlkey",		T_ControlKey,		FOLLBY_TOKEN },
94 { "crypto",		T_Crypto,		FOLLBY_TOKEN },
95 { "keys",		T_Keys,			FOLLBY_STRING },
96 { "keysdir",		T_Keysdir,		FOLLBY_STRING },
97 { "ntpsigndsocket",	T_NtpSignDsocket,	FOLLBY_STRING },
98 { "requestkey",		T_Requestkey,		FOLLBY_TOKEN },
99 { "revoke",		T_Revoke,		FOLLBY_TOKEN },
100 { "trustedkey",		T_Trustedkey,		FOLLBY_TOKEN },
101 /* IPv4/IPv6 protocol override flag */
102 { "-4",			T_Ipv4_flag,		FOLLBY_TOKEN },
103 { "-6",			T_Ipv6_flag,		FOLLBY_TOKEN },
104 /* option */
105 { "autokey",		T_Autokey,		FOLLBY_TOKEN },
106 { "burst",		T_Burst,		FOLLBY_TOKEN },
107 { "iburst",		T_Iburst,		FOLLBY_TOKEN },
108 { "key",		T_Key,			FOLLBY_TOKEN },
109 { "maxpoll",		T_Maxpoll,		FOLLBY_TOKEN },
110 { "mdnstries",		T_Mdnstries,		FOLLBY_TOKEN },
111 { "minpoll",		T_Minpoll,		FOLLBY_TOKEN },
112 { "mode",		T_Mode,			FOLLBY_TOKEN },
113 { "noselect",		T_Noselect,		FOLLBY_TOKEN },
114 { "preempt",		T_Preempt,		FOLLBY_TOKEN },
115 { "true",		T_True,			FOLLBY_TOKEN },
116 { "prefer",		T_Prefer,		FOLLBY_TOKEN },
117 { "ttl",		T_Ttl,			FOLLBY_TOKEN },
118 { "version",		T_Version,		FOLLBY_TOKEN },
119 { "xleave",		T_Xleave,		FOLLBY_TOKEN },
120 /* crypto_command */
121 { "host",		T_Host,			FOLLBY_STRING },
122 { "ident",		T_Ident,		FOLLBY_STRING },
123 { "pw",			T_Pw,			FOLLBY_STRING },
124 { "randfile",		T_Randfile,		FOLLBY_STRING },
125 { "digest",		T_Digest,		FOLLBY_STRING },
126 /*** MONITORING COMMANDS ***/
127 /* stat */
128 { "clockstats",		T_Clockstats,		FOLLBY_TOKEN },
129 { "cryptostats",	T_Cryptostats,		FOLLBY_TOKEN },
130 { "loopstats",		T_Loopstats,		FOLLBY_TOKEN },
131 { "peerstats",		T_Peerstats,		FOLLBY_TOKEN },
132 { "rawstats",		T_Rawstats,		FOLLBY_TOKEN },
133 { "sysstats", 		T_Sysstats,		FOLLBY_TOKEN },
134 { "protostats",		T_Protostats,		FOLLBY_TOKEN },
135 { "timingstats",	T_Timingstats,		FOLLBY_TOKEN },
136 /* filegen_option */
137 { "file",		T_File,			FOLLBY_STRING },
138 { "link",		T_Link,			FOLLBY_TOKEN },
139 { "nolink",		T_Nolink,		FOLLBY_TOKEN },
140 { "type",		T_Type,			FOLLBY_TOKEN },
141 /* filegen_type */
142 { "age",		T_Age,			FOLLBY_TOKEN },
143 { "day",		T_Day,			FOLLBY_TOKEN },
144 { "month",		T_Month,		FOLLBY_TOKEN },
145 { "none",		T_None,			FOLLBY_TOKEN },
146 { "pid",		T_Pid,			FOLLBY_TOKEN },
147 { "week",		T_Week,			FOLLBY_TOKEN },
148 { "year",		T_Year,			FOLLBY_TOKEN },
149 /*** ORPHAN MODE COMMANDS ***/
150 /* tos_option */
151 { "minclock",		T_Minclock,		FOLLBY_TOKEN },
152 { "maxclock",		T_Maxclock,		FOLLBY_TOKEN },
153 { "minsane",		T_Minsane,		FOLLBY_TOKEN },
154 { "floor",		T_Floor,		FOLLBY_TOKEN },
155 { "ceiling",		T_Ceiling,		FOLLBY_TOKEN },
156 { "cohort",		T_Cohort,		FOLLBY_TOKEN },
157 { "mindist",		T_Mindist,		FOLLBY_TOKEN },
158 { "maxdist",		T_Maxdist,		FOLLBY_TOKEN },
159 { "bcpollbstep",	T_Bcpollbstep,		FOLLBY_TOKEN },
160 { "beacon",		T_Beacon,		FOLLBY_TOKEN },
161 { "orphan",		T_Orphan,		FOLLBY_TOKEN },
162 { "orphanwait",		T_Orphanwait,		FOLLBY_TOKEN },
163 { "nonvolatile",	T_Nonvolatile,		FOLLBY_TOKEN },
164 { "basedate",		T_Basedate,		FOLLBY_STRING },
165 /* access_control_flag */
166 { "default",		T_Default,		FOLLBY_TOKEN },
167 { "source",		T_Source,		FOLLBY_TOKEN },
168 { "epeer",		T_Epeer,		FOLLBY_TOKEN },
169 { "noepeer",		T_Noepeer,		FOLLBY_TOKEN },
170 { "flake",		T_Flake,		FOLLBY_TOKEN },
171 { "ignore",		T_Ignore,		FOLLBY_TOKEN },
172 { "ippeerlimit",	T_Ippeerlimit,		FOLLBY_TOKEN },
173 { "limited",		T_Limited,		FOLLBY_TOKEN },
174 { "mssntp",		T_Mssntp,		FOLLBY_TOKEN },
175 { "kod",		T_Kod,			FOLLBY_TOKEN },
176 { "lowpriotrap",	T_Lowpriotrap,		FOLLBY_TOKEN },
177 { "mask",		T_Mask,			FOLLBY_TOKEN },
178 { "nomodify",		T_Nomodify,		FOLLBY_TOKEN },
179 { "nomrulist",		T_Nomrulist,		FOLLBY_TOKEN },
180 { "nopeer",		T_Nopeer,		FOLLBY_TOKEN },
181 { "noquery",		T_Noquery,		FOLLBY_TOKEN },
182 { "noserve",		T_Noserve,		FOLLBY_TOKEN },
183 { "notrap",		T_Notrap,		FOLLBY_TOKEN },
184 { "notrust",		T_Notrust,		FOLLBY_TOKEN },
185 { "ntpport",		T_Ntpport,		FOLLBY_TOKEN },
186 /* discard_option */
187 { "average",		T_Average,		FOLLBY_TOKEN },
188 { "minimum",		T_Minimum,		FOLLBY_TOKEN },
189 { "monitor",		T_Monitor,		FOLLBY_TOKEN },
190 /* mru_option */
191 { "incalloc",		T_Incalloc,		FOLLBY_TOKEN },
192 { "incmem",		T_Incmem,		FOLLBY_TOKEN },
193 { "initalloc",		T_Initalloc,		FOLLBY_TOKEN },
194 { "initmem",		T_Initmem,		FOLLBY_TOKEN },
195 { "mindepth",		T_Mindepth,		FOLLBY_TOKEN },
196 { "maxage",		T_Maxage,		FOLLBY_TOKEN },
197 { "maxdepth",		T_Maxdepth,		FOLLBY_TOKEN },
198 { "maxmem",		T_Maxmem,		FOLLBY_TOKEN },
199 { "mru",		T_Mru,			FOLLBY_TOKEN },
200 /* fudge_factor */
201 { "abbrev",		T_Abbrev,		FOLLBY_STRING },
202 { "flag1",		T_Flag1,		FOLLBY_TOKEN },
203 { "flag2",		T_Flag2,		FOLLBY_TOKEN },
204 { "flag3",		T_Flag3,		FOLLBY_TOKEN },
205 { "flag4",		T_Flag4,		FOLLBY_TOKEN },
206 { "refid",		T_Refid,		FOLLBY_STRING },
207 { "stratum",		T_Stratum,		FOLLBY_TOKEN },
208 { "time1",		T_Time1,		FOLLBY_TOKEN },
209 { "time2",		T_Time2,		FOLLBY_TOKEN },
210 { "minjitter",		T_Minjitter,		FOLLBY_TOKEN },
211 /* device spec */
212 { "ppsdata",		T_PpsData,		FOLLBY_STRING },
213 { "timedata",		T_TimeData,		FOLLBY_STRING },
214 /* system_option */
215 { "auth",		T_Auth,			FOLLBY_TOKEN },
216 { "bclient",		T_Bclient,		FOLLBY_TOKEN },
217 { "calibrate",		T_Calibrate,		FOLLBY_TOKEN },
218 { "kernel",		T_Kernel,		FOLLBY_TOKEN },
219 { "mode7",		T_Mode7,		FOLLBY_TOKEN },
220 { "ntp",		T_Ntp,			FOLLBY_TOKEN },
221 { "peer_clear_digest_early",	T_PCEdigest,	FOLLBY_TOKEN },
222 { "stats",		T_Stats,		FOLLBY_TOKEN },
223 { "unpeer_crypto_early",	T_UEcrypto,	FOLLBY_TOKEN },
224 { "unpeer_crypto_nak_early",	T_UEcryptonak,	FOLLBY_TOKEN },
225 { "unpeer_digest_early",	T_UEdigest,	FOLLBY_TOKEN },
226 /* rlimit_option */
227 { "memlock",		T_Memlock,		FOLLBY_TOKEN },
228 { "stacksize",		T_Stacksize,		FOLLBY_TOKEN },
229 { "filenum",		T_Filenum,		FOLLBY_TOKEN },
230 /* tinker_option */
231 { "step",		T_Step,			FOLLBY_TOKEN },
232 { "stepback",		T_Stepback,		FOLLBY_TOKEN },
233 { "stepfwd",		T_Stepfwd,		FOLLBY_TOKEN },
234 { "panic",		T_Panic,		FOLLBY_TOKEN },
235 { "dispersion",		T_Dispersion,		FOLLBY_TOKEN },
236 { "stepout",		T_Stepout,		FOLLBY_TOKEN },
237 { "allan",		T_Allan,		FOLLBY_TOKEN },
238 { "huffpuff",		T_Huffpuff,		FOLLBY_TOKEN },
239 { "freq",		T_Freq,			FOLLBY_TOKEN },
240 /* miscellaneous_command */
241 { "port",		T_Port,			FOLLBY_TOKEN },
242 { "interface",		T_Interface,		FOLLBY_TOKEN },
243 { "saveconfigdir",	T_Saveconfigdir,	FOLLBY_STRING },
244 /* interface_command (ignore and interface already defined) */
245 { "nic",		T_Nic,			FOLLBY_TOKEN },
246 { "all",		T_All,			FOLLBY_TOKEN },
247 { "ipv4",		T_Ipv4,			FOLLBY_TOKEN },
248 { "ipv6",		T_Ipv6,			FOLLBY_TOKEN },
249 { "wildcard",		T_Wildcard,		FOLLBY_TOKEN },
250 { "listen",		T_Listen,		FOLLBY_TOKEN },
251 { "drop",		T_Drop,			FOLLBY_TOKEN },
252 /* simulator commands */
253 { "simulate",		T_Simulate,		FOLLBY_TOKEN },
254 { "simulation_duration",T_Sim_Duration,		FOLLBY_TOKEN },
255 { "beep_delay",		T_Beep_Delay,		FOLLBY_TOKEN },
256 { "duration",		T_Duration,		FOLLBY_TOKEN },
257 { "server_offset",	T_Server_Offset,	FOLLBY_TOKEN },
258 { "freq_offset",	T_Freq_Offset,		FOLLBY_TOKEN },
259 { "wander",		T_Wander,		FOLLBY_TOKEN },
260 { "jitter",		T_Jitter,		FOLLBY_TOKEN },
261 { "prop_delay",		T_Prop_Delay,		FOLLBY_TOKEN },
262 { "proc_delay",		T_Proc_Delay,		FOLLBY_TOKEN },
263 };
264 
265 typedef struct big_scan_state_tag {
266 	char	ch;		/* Character this state matches on */
267 	char	followedby;	/* Forces next token(s) to T_String */
268 	u_short	finishes_token;	/* nonzero ID if last keyword char */
269 	u_short	match_next_s;	/* next state to check matching ch */
270 	u_short	other_next_s;	/* next state to check if not ch */
271 } big_scan_state;
272 
273 /*
274  * Note: to increase MAXSTATES beyond 2048, be aware it is currently
275  * crammed into 11 bits in scan_state form.  Raising to 4096 would be
276  * relatively easy by storing the followedby value in a separate
277  * array with one entry per token, and shrinking the char value to
278  * 7 bits to free a bit for accepting/non-accepting.  More than 4096
279  * states will require expanding scan_state beyond 32 bits each.
280  */
281 #define MAXSTATES	2048
282 #define MAX_TOK_LEN	63
283 
284 const char *	current_keyword;/* for error reporting */
285 big_scan_state	sst[MAXSTATES];	/* scanner FSM state entries */
286 u_short		sst_highwater;	/* next entry index to consider */
287 char *		symb[1024];	/* map token ID to symbolic name */
288 
289 /* for libntp */
290 const char *	progname = "keyword-gen";
291 
292 int		main			(int, char **);
293 static void	generate_preamble	(void);
294 static void	generate_fsm		(void);
295 static void	generate_token_text	(void);
296 static u_short	create_keyword_scanner	(void);
297 static u_short	create_scan_states	(char *, u_short, follby, u_short);
298 int		compare_key_tok_id	(const void *, const void *);
299 int		compare_key_tok_text	(const void *, const void *);
300 void		populate_symb		(char *);
301 const char *	symbname		(u_short);
302 
303 
304 int main(int argc, char **argv)
305 {
306 	if (argc < 2) {
307 		fprintf(stderr, "Usage:\n%s t_header.h\n", argv[0]);
308 		exit(1);
309 	}
310 	debug = 1;
311 
312 	populate_symb(argv[1]);
313 
314 	generate_preamble();
315 	generate_token_text();
316 	generate_fsm();
317 
318 	return 0;
319 }
320 
321 
322 static void
323 generate_preamble(void)
324 {
325 	time_t now;
326 	char timestamp[128];
327 	char preamble[] =
328 "/*\n"
329 " * ntp_keyword.h\n"
330 " * \n"
331 " * NOTE: edit this file with caution, it is generated by keyword-gen.c\n"
332 " *\t Generated %s UTC	  diff_ignore_line\n"
333 " *\n"
334 " */\n"
335 "#include \"ntp_scanner.h\"\n"
336 "#include \"ntp_parser.h\"\n"
337 "\n";
338 
339 	time(&now);
340 	if (!strftime(timestamp, sizeof(timestamp),
341 		      "%Y-%m-%d %H:%M:%S", gmtime(&now)))
342 		timestamp[0] = '\0';
343 
344 	printf(preamble, timestamp);
345 }
346 
347 
348 static void
349 generate_fsm(void)
350 {
351 	char rprefix[MAX_TOK_LEN + 1];
352 	char prefix[MAX_TOK_LEN + 1];
353 	char token_id_comment[16 + MAX_TOK_LEN + 1];
354 	size_t prefix_len;
355 	char *p;
356 	char *r;
357 	u_short initial_state;
358 	u_short this_state;
359 	u_short state;
360 	u_short i;
361 	u_short token;
362 
363 	/*
364 	 * Sort ntp_keywords in alphabetical keyword order.  This is
365 	 * not necessary, but minimizes nonfunctional changes in the
366 	 * generated finite state machine when keywords are modified.
367 	 */
368 	qsort(ntp_keywords, COUNTOF(ntp_keywords),
369 	      sizeof(ntp_keywords[0]), compare_key_tok_text);
370 
371 	/*
372 	 * To save space, reserve the state array entry matching each
373 	 * token number for its terminal state, so the token identifier
374 	 * does not need to be stored in each state, but can be
375 	 * recovered trivially.  To mark the entry reserved,
376 	 * finishes_token is nonzero.
377 	 */
378 
379 	for (i = 0; i < COUNTOF(ntp_keywords); i++) {
380 		token = ntp_keywords[i].token;
381 		if (1 > token || token >= COUNTOF(sst)) {
382 			fprintf(stderr,
383 				"keyword-gen sst[%u] too small "
384 				"for keyword '%s' id %d\n",
385 				(int)COUNTOF(sst),
386 				ntp_keywords[i].key,
387 				token);
388 			exit(4);
389 		}
390 		sst[token].finishes_token = token;
391 	}
392 
393 	initial_state = create_keyword_scanner();
394 
395 	fprintf(stderr,
396 		"%d keywords consumed %d states of %d max.\n",
397 		(int)COUNTOF(ntp_keywords),
398 		sst_highwater - 1,
399 		(int)COUNTOF(sst) - 1);
400 
401 	printf("#define SCANNER_INIT_S %d\n\n", initial_state);
402 
403 	printf("const scan_state sst[%d] = {\n"
404 	       "/*SS_T( ch,\tf-by, match, other ),\t\t\t\t */\n"
405 	       "  0,\t\t\t\t      /* %5d %-17s */\n",
406 	       sst_highwater,
407 	       0, "");
408 
409 	for (i = 1; i < sst_highwater; i++) {
410 
411 		/* verify fields will fit */
412 		if (sst[i].followedby & ~0x3) {
413 			fprintf(stderr,
414 				"keyword-gen internal error "
415 				"sst[%d].followedby %d too big\n",
416 				i, sst[i].followedby);
417 			exit(7);
418 		}
419 
420 		if (sst_highwater <= sst[i].match_next_s
421 		    || sst[i].match_next_s & ~0x7ff) {
422 			fprintf(stderr,
423 				"keyword-gen internal error "
424 				"sst[%d].match_next_s %d too big\n",
425 				i, sst[i].match_next_s);
426 			exit(8);
427 		}
428 
429 		if (sst_highwater <= sst[i].other_next_s
430 		    || sst[i].other_next_s & ~0x7ff) {
431 			fprintf(stderr,
432 				"keyword-gen internal error "
433 				"sst[%d].other_next_s %d too big\n",
434 				i, sst[i].other_next_s);
435 			exit(9);
436 		}
437 
438 		if (sst[i].finishes_token) {
439 			snprintf(token_id_comment,
440 				 sizeof(token_id_comment), "%5d %-17s",
441 				 i, symbname(sst[i].finishes_token));
442 			if (i != sst[i].finishes_token) {
443 				fprintf(stderr,
444 					"keyword-gen internal error "
445 					"entry %d finishes token %d\n",
446 					i, sst[i].finishes_token);
447 				exit(5);
448 			}
449 		} else {
450 		/*
451 		 * Determine the keyword prefix that leads to this
452 		 * state.  This is expensive but keyword-gen is run
453 		 * only when it changes.  Distributing keyword-gen-utd
454 		 * achieves that, which is why it must be committed
455 		 * at the same time as keyword-gen.c and ntp_keyword.h.
456 		 *
457 		 * Scan the state array iteratively looking for a state
458 		 * which leads to the current one, collecting matching
459 		 * characters along the way.  There is only one such
460 		 * path back to the starting state given the way our
461 		 * scanner state machine is built and the practice of
462 		 * using the spelling of the keyword as its T_* token
463 		 * identifier, which results in never having two
464 		 * spellings result in the same T_* value.
465 		 */
466 			prefix_len = 0;
467 			this_state = i;
468 			do {
469 				for (state = 1; state < sst_highwater; state++)
470 					if (sst[state].other_next_s == this_state) {
471 						this_state = state;
472 						break;
473 					} else if (sst[state].match_next_s == this_state) {
474 						this_state = state;
475 						rprefix[prefix_len] = sst[state].ch;
476 						prefix_len++;
477 						break;
478 					}
479 			} while (this_state != initial_state);
480 
481 			if (prefix_len) {
482 				/* reverse rprefix into prefix */
483 				p = prefix + prefix_len;
484 				r = rprefix;
485 				while (r < rprefix + prefix_len)
486 					*--p = *r++;
487 			}
488 			prefix[prefix_len] = '\0';
489 
490 			snprintf(token_id_comment,
491 				 sizeof(token_id_comment), "%5d %-17s",
492 				 i, (initial_state == i)
493 					? "[initial state]"
494 					: prefix);
495 		}
496 
497 		printf("  S_ST( '%c',\t%d,    %5u, %5u )%s /* %s */\n",
498 		       sst[i].ch,
499 		       sst[i].followedby,
500 		       sst[i].match_next_s,
501 		       sst[i].other_next_s,
502 		       (i + 1 < sst_highwater)
503 			   ? ","
504 			   : " ",
505 		       token_id_comment);
506 	}
507 
508 	printf("};\n\n");
509 }
510 
511 
512 /* Define a function to create the states of the scanner. This function
513  * is used by the create_keyword_scanner function below.
514  *
515  * This function takes a suffix of a keyword, the token to be returned on
516  * recognizing the complete keyword, and any pre-existing state that exists
517  * for some other keyword that has the same prefix as the current one.
518  */
519 static u_short
520 create_scan_states(
521 	char *	text,
522 	u_short	token,
523 	follby	followedby,
524 	u_short	prev_state
525 	)
526 {
527 	u_short my_state;
528 	u_short return_state;
529 	u_short prev_char_s;
530 	u_short curr_char_s;
531 
532 	return_state = prev_state;
533 	curr_char_s = prev_state;
534 	prev_char_s = 0;
535 
536 	/* Find the correct position to insert the state.
537 	 * All states should be in alphabetical order
538 	 */
539 	while (curr_char_s && (text[0] < sst[curr_char_s].ch)) {
540 		prev_char_s = curr_char_s;
541 		curr_char_s = sst[curr_char_s].other_next_s;
542 	}
543 
544 	/*
545 	 * Check if a previously seen keyword has the same prefix as
546 	 * the current keyword.  If so, simply use the state for that
547 	 * keyword as my_state, otherwise, allocate a new state.
548 	 */
549 	if (curr_char_s && (text[0] == sst[curr_char_s].ch)) {
550 		my_state = curr_char_s;
551 		if ('\0' == text[1]) {
552 			fprintf(stderr,
553 				"Duplicate entries for keyword '%s' in"
554 				" keyword_gen.c ntp_keywords[].\n",
555 				current_keyword);
556 			exit(2);
557 		}
558 	} else {
559 		do
560 			my_state = sst_highwater++;
561 		while (my_state < COUNTOF(sst)
562 		       && sst[my_state].finishes_token);
563 		if (my_state >= COUNTOF(sst)) {
564 			fprintf(stderr,
565 				"fatal, keyword scanner state array "
566 				"sst[%d] is too small, modify\n"
567 				"keyword-gen.c to increase.\n",
568 				(int)COUNTOF(sst));
569 			exit(3);
570 		}
571 		/* Store the next character of the keyword */
572 		sst[my_state].ch = text[0];
573 		sst[my_state].other_next_s = curr_char_s;
574 		sst[my_state].followedby = FOLLBY_NON_ACCEPTING;
575 
576 		if (prev_char_s)
577 			sst[prev_char_s].other_next_s = my_state;
578 		else
579 			return_state = my_state;
580 	}
581 
582 	/* Check if the next character is '\0'.
583 	 * If yes, we are done with the recognition and this is an accepting
584 	 * state.
585 	 * If not, we need to continue scanning
586 	 */
587 	if ('\0' == text[1]) {
588 		sst[my_state].finishes_token = (u_short)token;
589 		sst[my_state].followedby = (char)followedby;
590 
591 		if (sst[token].finishes_token != (u_short)token) {
592 			fprintf(stderr,
593 				"fatal, sst[%d] not reserved for %s.\n",
594 				token, symbname(token));
595 			exit(6);
596 		}
597 		/* relocate so token id is sst[] index */
598 		if (my_state != token) {
599 			sst[token] = sst[my_state];
600 			ZERO(sst[my_state]);
601 			do
602 				sst_highwater--;
603 			while (sst[sst_highwater].finishes_token);
604 			my_state = token;
605 			if (prev_char_s)
606 				sst[prev_char_s].other_next_s = my_state;
607 			else
608 				return_state = my_state;
609 		}
610 	} else
611 		sst[my_state].match_next_s =
612 		    create_scan_states(
613 			&text[1],
614 			token,
615 			followedby,
616 			sst[my_state].match_next_s);
617 
618 	return return_state;
619 }
620 
621 
622 /* Define a function that takes a list of (keyword, token) values and
623  * creates a keywords scanner out of it.
624  */
625 
626 static u_short
627 create_keyword_scanner(void)
628 {
629 	u_short scanner;
630 	u_short i;
631 
632 	sst_highwater = 1;	/* index 0 invalid, unused */
633 	scanner = 0;
634 
635 	for (i = 0; i < COUNTOF(ntp_keywords); i++) {
636 		current_keyword = ntp_keywords[i].key;
637 		scanner =
638 		    create_scan_states(
639 			ntp_keywords[i].key,
640 			ntp_keywords[i].token,
641 			ntp_keywords[i].followedby,
642 			scanner);
643 	}
644 
645 	return scanner;
646 }
647 
648 
649 static void
650 generate_token_text(void)
651 {
652 	u_short lowest_id;
653 	u_short highest_id;
654 	u_short id_count;
655 	u_short id;
656 	u_short i;
657 
658 	/* sort ntp_keywords in token ID order */
659 	qsort(ntp_keywords, COUNTOF(ntp_keywords),
660 	      sizeof(ntp_keywords[0]), compare_key_tok_id);
661 
662 	lowest_id = ntp_keywords[0].token;
663 	highest_id = ntp_keywords[COUNTOF(ntp_keywords) - 1].token;
664 	id_count = highest_id - lowest_id + 1;
665 
666 	printf("#define LOWEST_KEYWORD_ID %d\n\n", lowest_id);
667 
668 	printf("const char * const keyword_text[%d] = {", id_count);
669 
670 	id = lowest_id;
671 	i = 0;
672 	while (i < COUNTOF(ntp_keywords)) {
673 		while (id < ntp_keywords[i].token) {
674 			printf(",\n\t/* %-5d %5d %20s */\tNULL",
675 			       id - lowest_id, id, symbname(id));
676 			id++;
677 		}
678 		if (i > 0)
679 			printf(",");
680 		printf("\n\t/* %-5d %5d %20s */\t\"%s\"",
681 		       id - lowest_id, id, symbname(id),
682 		       ntp_keywords[i].key);
683 		i++;
684 		id++;
685 	}
686 
687 	printf("\n};\n\n");
688 }
689 
690 
691 int
692 compare_key_tok_id(
693 	const void *a1,
694 	const void *a2
695 	)
696 {
697 	const struct key_tok *p1 = a1;
698 	const struct key_tok *p2 = a2;
699 
700 	if (p1->token == p2->token)
701 		return 0;
702 
703 	if (p1->token < p2->token)
704 		return -1;
705 	else
706 		return 1;
707 }
708 
709 
710 int
711 compare_key_tok_text(
712 	const void *a1,
713 	const void *a2
714 	)
715 {
716 	const struct key_tok *p1 = a1;
717 	const struct key_tok *p2 = a2;
718 
719 	return strcmp(p1->key, p2->key);
720 }
721 
722 
723 /*
724  * populate_symb() - populate symb[] lookup array with symbolic token
725  *		     names such that symb[T_Age] == "T_Age", etc.
726  */
727 void
728 populate_symb(
729 	char *header_file
730 	)
731 {
732 	FILE *	yh;
733 	char	line[2 * MAX_TOK_LEN];
734 	char	name[2 * MAX_TOK_LEN];
735 	int	token;
736 
737 	yh = fopen(header_file, "r");
738 	if (NULL == yh) {
739 		perror("unable to open yacc/bison header file");
740 		exit(4);
741 	}
742 
743 	while (NULL != fgets(line, sizeof(line), yh))
744 		if (2 == sscanf(line, "#define %s %d", name, &token)
745 		    && 'T' == name[0] && '_' == name[1] && token >= 0
746 		    && token < COUNTOF(symb)) {
747 
748 			symb[token] = estrdup(name);
749 			if (strlen(name) > MAX_TOK_LEN) {
750 				fprintf(stderr,
751 					"MAX_TOK_LEN %d too small for '%s'\n"
752 					"Edit keyword-gen.c to raise.\n",
753 					MAX_TOK_LEN, name);
754 				exit(10);
755 			}
756 		}
757 	fclose(yh);
758 }
759 
760 
761 const char *
762 symbname(
763 	u_short token
764 	)
765 {
766 	char *name;
767 
768 	if (token < COUNTOF(symb) && symb[token] != NULL) {
769 		name = symb[token];
770 	} else {
771 		LIB_GETBUF(name);
772 		snprintf(name, LIB_BUFLENGTH, "%d", token);
773 	}
774 
775 	return name;
776 }
777