Subversion Repositories tendra.SVN

Rev

Go to most recent revision | Details | Last modification | View Log | RSS feed

Rev Author Line No. Line
2 7u83 1
/*
2
    		 Crown Copyright (c) 1997
3
 
4
    This TenDRA(r) Computer Program is subject to Copyright
5
    owned by the United Kingdom Secretary of State for Defence
6
    acting through the Defence Evaluation and Research Agency
7
    (DERA).  It is made available to Recipients with a
8
    royalty-free licence for its use, reproduction, transfer
9
    to other parties and amendment for any purpose not excluding
10
    product development provided that any such use et cetera
11
    shall be deemed to be acceptance of the following conditions:-
12
 
13
        (1) Its Recipients shall ensure that this Notice is
14
        reproduced upon any copies or amended versions of it;
15
 
16
        (2) Any amended version of it shall be clearly marked to
17
        show both the nature of and the organisation responsible
18
        for the relevant amendment or amendments;
19
 
20
        (3) Its onward transfer from a recipient to another
21
        party shall be deemed to be that party's acceptance of
22
        these conditions;
23
 
24
        (4) DERA gives no warranty or assurance as to its
25
        quality or suitability for any purpose and DERA accepts
26
        no liability whatsoever in relation to any use to which
27
        it may be put.
28
*/
29
 
30
 
31
#include "config.h"
32
#include "c_types.h"
33
#include "hashid_ops.h"
34
#include "id_ops.h"
35
#include "tok_ops.h"
36
#include "error.h"
37
#include "catalog.h"
38
#include "option.h"
39
#include "char.h"
40
#include "declare.h"
41
#include "dump.h"
42
#include "file.h"
43
#include "lex.h"
44
#include "hash.h"
45
#include "macro.h"
46
#include "parse.h"
47
#include "pragma.h"
48
#include "predict.h"
49
#include "preproc.h"
50
#include "psyntax.h"
51
#include "redeclare.h"
52
#include "statement.h"
53
#include "symbols.h"
54
#include "tok.h"
55
#include "token.h"
56
#include "ustring.h"
57
#include "variable.h"
58
 
59
 
60
/*
61
    CHECK THAT PRAGMA SYNTAX AND MAIN SYNTAX ARE IN STEP
62
 
63
    By including both psyntax.h and syntax.h, the compiler will check
64
    that the lex_* macros defined in each are consistent.  Note that
65
    because the #pragma syntax is the same for both C and C++, this
66
    also checks the consistency of the C and C++ syntaxes.  This is only
67
    done in development mode.
68
*/
69
 
70
#ifdef DEBUG
71
#include "syntax.h"
72
#endif
73
 
74
 
75
/*
76
    READ A PREPROCESSING TOKEN
77
 
78
    This routine reads the next preprocessing token from the input file.
79
*/
80
 
81
static PPTOKEN *get_token
82
    PROTO_Z ()
83
{
84
    PPTOKEN *p = new_pptok () ;
85
    int t = read_token () ;
86
    update_column () ;
87
    p->tok = t ;
88
    p->pp_space = WHITE_SPACE ;
89
    p->next = NULL ;
90
    if ( t <= LAST_COMPLEX_TOKEN ) token_parts ( t, p ) ;
91
    return ( p ) ;
92
}
93
 
94
 
95
/*
96
    FIND A KEYWORD LEXICAL TOKEN NUMBER
97
 
98
    This routine finds the lexical token corresponding to the identifier id.
99
    If id does not represent an underlying keyword then an error is reported
100
    and lex_identifier is returned.
101
*/
102
 
103
int find_keyword
104
    PROTO_N ( ( id ) )
105
    PROTO_T ( IDENTIFIER id )
106
{
107
    HASHID nm = DEREF_hashid ( id_name ( id ) ) ;
108
    int t = find_hashid ( nm ) ;
109
    if ( t == lex_identifier ) {
110
	report ( preproc_loc, ERR_pragma_keyword_bad ( nm ) ) ;
111
    }
112
    return ( t ) ;
113
}
114
 
115
 
116
/*
117
    DEFINE A KEYWORD
118
 
119
    This routine defines id to be a keyword with lexical token number t.
120
    Certain language extensions are implemented by tokens disguised as
121
    keywords.
122
*/
123
 
124
void define_keyword
125
    PROTO_N ( ( id, t ) )
126
    PROTO_T ( IDENTIFIER id X int t )
127
{
128
    switch ( t ) {
129
	case lex_identifier : {
130
	    break ;
131
	}
132
	case lex_representation : {
133
	    TOKEN tok = make_sort ( "ZZZ", 1 ) ;
134
	    id = make_token_decl ( tok, 0, id, id ) ;
135
	    token_interface ( id, lex_no_Hdef ) ;
136
	    COPY_int ( tok_proc_key ( tok ), t ) ;
137
	    break ;
138
	}
139
	case lex_typeof : {
140
	    TOKEN tok = make_sort ( "SE", 1 ) ;
141
	    id = make_token_decl ( tok, 0, id, id ) ;
142
	    token_interface ( id, lex_no_Hdef ) ;
143
	    COPY_int ( tok_proc_key ( tok ), t ) ;
144
	    break ;
145
	}
146
	default : {
147
	    HASHID nm = DEREF_hashid ( id_name ( id ) ) ;
148
	    IGNORE make_keyword ( nm, t, NULL_id ) ;
149
	    break ;
150
	}
151
    }
152
    return ;
153
}
154
 
155
 
156
/*
157
    UNDEFINE A KEYWORD
158
 
159
    This routine undefines the keyword id.
160
*/
161
 
162
void undef_keyword
163
    PROTO_N ( ( id ) )
164
    PROTO_T ( IDENTIFIER id )
165
{
166
    unsigned tag ;
167
    HASHID nm = DEREF_hashid ( id_name ( id ) ) ;
168
    PTR ( IDENTIFIER ) ptr = hashid_id ( nm ) ;
169
    do {
170
	IDENTIFIER pid = DEREF_id ( ptr ) ;
171
	tag = TAG_id ( pid ) ;
172
	switch ( tag ) {
173
	    case id_keyword_tag :
174
	    case id_iso_keyword_tag :
175
	    case id_reserved_tag : {
176
		/* Undefine a keyword */
177
		if ( do_keyword ) dump_undefine ( pid, &preproc_loc, 1 ) ;
178
		pid = DEREF_id ( id_alias ( pid ) ) ;
179
		COPY_id ( ptr, pid ) ;
180
		COPY_id ( hashid_cache ( nm ), NULL_id ) ;
181
		return ;
182
	    }
183
	}
184
	ptr = id_alias ( pid ) ;
185
    } while ( tag != id_dummy_tag ) ;
186
    return ;
187
}
188
 
189
 
190
/*
191
    RESCAN A PRAGMA STATEMENT
192
 
193
    The routine read_tendra replaces all identifier tokens within a '#pragma'
194
    command by their corresponding underlying keywords.  This routine restores
195
    these keywords, except that given by s, to identifiers.
196
*/
197
 
198
void rescan_pragma
199
    PROTO_N ( ( s ) )
200
    PROTO_T ( int s )
201
{
202
    PPTOKEN *p ;
203
    for ( p = crt_token ; p != NULL ; p = p->next ) {
204
	int t = p->tok ;
205
	if ( t >= FIRST_KEYWORD && t <= LAST_KEYWORD && t != s ) {
206
	    p->tok = lex_identifier ;
207
	}
208
    }
209
    return ;
210
}
211
 
212
 
213
/*
214
    SET A TOKEN TO A KEYWORD
215
 
216
    This routine sets the preprocessing token p to the keyword corresponding
217
    to the lexical token number t.
218
*/
219
 
220
static void set_token
221
    PROTO_N ( ( p, t ) )
222
    PROTO_T ( PPTOKEN *p X int t )
223
{
224
    HASHID nm = KEYWORD ( t ) ;
225
    p->tok = t ;
226
    p->pp_data.id.hash = nm ;
227
    p->pp_data.id.use = DEREF_id ( hashid_id ( nm ) ) ;
228
    return ;
229
}
230
 
231
 
232
/*
233
    PATCH A PRAGMA STATEMENT
234
 
235
    This routine is used by the preprocessor to preserve a '#pragma'
236
    statement.  The arguments are as in parse_pragma.
237
*/
238
 
239
static void patch_pragma
240
    PROTO_N ( ( p, tendra ) )
241
    PROTO_T ( PPTOKEN *p X int tendra )
242
{
243
    p = clean_tok_list ( p ) ;
244
    switch ( p->tok ) {
245
	case lex_interface : {
246
	    int t = crt_interface ;
247
	    if ( t == lex_ignore && tendra ) t = lex_reject ;
248
	    set_token ( p, t ) ;
249
	    break ;
250
	}
251
	case lex_member : {
252
	    if ( !tendra ) {
253
		if ( p->next->tok == lex_definition ) {
254
		    set_token ( p, lex_define_Hcap ) ;
255
		    set_token ( p->next, lex_member_Hcap ) ;
256
		}
257
	    }
258
	    break ;
259
	}
260
	case lex_promoted : {
261
	    if ( !tendra ) set_token ( p, lex_promote ) ;
262
	    break ;
263
	}
264
	case lex_reject : {
265
	    if ( !tendra ) set_token ( p, lex_ignore ) ;
266
	    break ;
267
	}
268
    }
269
    patch_preproc_dir ( p ) ;
270
    if ( tendra ) {
271
	p = patch_tokens ( tendra ) ;
272
	set_token ( p, lex_tendra ) ;
273
	if ( tendra == 2 ) p->next->tok = lex_plus_Hplus ;
274
    }
275
    return ;
276
}
277
 
278
 
279
/*
280
    PARSE A PRAGMA STATEMENT
281
 
282
    This routine parses the '#pragma' statement given by the preprocessing
283
    tokens p.  tendra is 1 for '#pragma TenDRA' statements, 2 for '#pragma
284
    TenDRA++' statements, and 0 otherwise.
285
*/
286
 
287
static int parse_pragma
288
    PROTO_N ( ( p, tendra ) )
289
    PROTO_T ( PPTOKEN *p X int tendra )
290
{
291
    int nt ;
292
    PPTOKEN *pt ;
293
    PARSE_STATE s ;
294
    int pp = preproc_only ;
295
    int tok = lex_ignore_token ;
296
 
297
    /* Parsing action */
298
    new_linkage = crt_linkage ;
299
    save_state ( &s, 0 ) ;
300
    init_parser ( p ) ;
301
    crt_loc = preproc_loc ;
302
    crt_line_changed = 1 ;
303
    ADVANCE_LEXER ;
304
    if ( pp ) {
305
	parse_preproc ( &tok ) ;
306
	if ( tok != lex_ignore_token ) {
307
	    tok = lex_ignore_token ;
308
	    have_syntax_error = 1 ;
309
	}
310
    } else {
311
	parse_tendra ( &tok ) ;
312
    }
313
    nt = crt_lex_token ;
314
    pt = crt_token ;
315
    p = restore_parser () ;
316
 
317
    /* Check for end of input */
318
    switch ( tok ) {
319
	case lex_set :
320
	case lex_unused : {
321
	    /* Patch inset pragma statements */
322
	    PPTOKEN *q = clean_tok_list ( p ) ;
323
	    PPTOKEN *r = new_pptok () ;
324
	    p = q ;
325
	    p->tok = tok ;
326
	    while ( q->tok != lex_newline ) q = q->next ;
327
	    q->tok = lex_inset_Hend ;
328
	    r->tok = lex_newline ;
329
	    r->next = q->next ;
330
	    q->next = r ;
331
	    patch_preproc_dir ( p ) ;
332
	    tok = lex_inset_Hstart ;
333
	    p = NULL ;
334
	    break ;
335
	}
336
	default : {
337
	    /* Should have reached the end of the line */
338
	    if ( nt != lex_newline && !have_syntax_error ) {
339
		ERROR err = ERR_lex_parse ( pt ) ;
340
		err = concat_error ( err, ERR_cpp_end ( lex_pragma ) ) ;
341
		report ( preproc_loc, err ) ;
342
	    }
343
	    break ;
344
	}
345
    }
346
 
347
    /* Preprocessing action */
348
    if ( pp ) {
349
	patch_pragma ( p, tendra ) ;
350
	tok = lex_hash_Hpragma ;
351
    } else {
352
	free_tok_list ( p ) ;
353
    }
354
    restore_state ( &s ) ;
355
    crt_linkage = new_linkage ;
356
    return ( tok ) ;
357
}
358
 
359
 
360
/*
361
    SKIP TO NEXT COLON
362
 
363
    This routine scans along the list of preprocessing tokens p until it
364
    finds the first colon.  It then returns the following token.  The null
365
    token is returned if there is no colon in p.
366
*/
367
 
368
static PPTOKEN *skip_to_colon
369
    PROTO_N ( ( p ) )
370
    PROTO_T ( PPTOKEN *p )
371
{
372
    while ( p ) {
373
	PPTOKEN *q = p->next ;
374
	if ( p->tok == lex_colon ) return ( q ) ;
375
	p = q ;
376
    }
377
    return ( NULL ) ;
378
}
379
 
380
 
381
/*
382
    MARK A TOKEN PARAMETER
383
 
384
    This routine marks the TDF token parameter given by the preprocessing
385
    tokens p.  This consists of an optional 'TAG' followed by an identifier,
386
    which is optional if n is false.  Macro expansion of this identifier
387
    is inhibited.  Macro is true if the identifier is declared in the
388
    token namespace.
389
*/
390
 
391
static PPTOKEN *mark_tdf_param
392
    PROTO_N ( ( p, n, macro ) )
393
    PROTO_T ( PPTOKEN *p X int n X int macro )
394
{
395
    if ( p && p->tok == lex_identifier ) {
396
	int t = find_hashid ( p->pp_data.id.hash ) ;
397
	if ( t == lex_tag_Hcap ) {
398
	    if ( p->next && p->next->tok == lex_identifier ) {
399
		/* Have 'TAG id' */
400
		p->tok = t ;
401
		p = p->next ;
402
		p->pp_data.id.use = NULL_id ;
403
	    } else {
404
		/* Have 'TAG' */
405
		if ( n == 0 ) {
406
		    /* Interpret as 'TAG' */
407
		    p->tok = t ;
408
		} else {
409
		    /* Interpret as 'id' where id = TAG */
410
		    p->pp_data.id.use = NULL_id ;
411
		}
412
	    }
413
	} else {
414
	    /* Have 'id' */
415
	    if ( macro && preproc_only ) {
416
		/* Mark macro names when preprocessing */
417
		HASHID nm = p->pp_data.id.hash ;
418
		unsigned c = check_macro ( nm, 0 ) ;
419
		IDENTIFIER id = underlying_id ( p->pp_data.id.use ) ;
420
		DECL_SPEC ds = DEREF_dspec ( id_storage ( id ) ) ;
421
		ds |= dspec_token ;
422
		COPY_dspec ( id_storage ( id ), ds ) ;
423
		if ( c == PP_TRUE ) {
424
		    /* Token previously defined as macro */
425
		    PPTOKEN *q ;
426
		    token_macro = DEREF_id ( hashid_id ( nm ) ) ;
427
		    IGNORE patch_cond ( lex_hash_Hop, lex_define ) ;
428
		    q = patch_tokens ( 2 ) ;
429
		    q->tok = lex_builtin_Hfile ;
430
		    q->next->tok = lex_hash_Hop ;
431
		}
432
	    }
433
	    p->pp_data.id.use = NULL_id ;
434
	}
435
	return ( p->next ) ;
436
    }
437
    if ( n ) return ( NULL ) ;
438
    return ( p ) ;
439
}
440
 
441
 
442
/*
443
    MARK A TOKEN PROGRAM PARAMETER
444
 
445
    This routine marks the TDF token program parameter given by the
446
    preprocessing tokens p.
447
*/
448
 
449
static PPTOKEN *mark_prog_param
450
    PROTO_N ( ( p ) )
451
    PROTO_T ( PPTOKEN *p )
452
{
453
    if ( p && p->tok == lex_identifier ) {
454
	int t = find_hashid ( p->pp_data.id.hash ) ;
455
	switch ( t ) {
456
	    case lex_exp_Hcap :
457
	    case lex_nat_Hcap :
458
	    case lex_int_Hcap :
459
	    case lex_stmt_Hcap :
460
	    case lex_proc_Hcap : {
461
		/* Have the form 'EXP id' etc */
462
		p->tok = t ;
463
		p = p->next ;
464
		if ( p ) p = p->next ;
465
		return ( p ) ;
466
	    }
467
	    case lex_type_Hcap : {
468
		/* Have the form 'TYPE t' */
469
		int depth = 0 ;
470
		p->tok = t ;
471
		while ( p ) {
472
		    /* Step over type */
473
		    switch ( p->tok ) {
474
			case lex_open_Hround :
475
			case lex_open_Hsquare_H1 :
476
			case lex_open_Hsquare_H2 : {
477
			    depth++ ;
478
			    break ;
479
			}
480
			case lex_close_Hround :
481
			case lex_close_Hsquare_H1 :
482
			case lex_close_Hsquare_H2 : {
483
			    if ( depth > 0 ) depth-- ;
484
			    break ;
485
			}
486
			case lex_comma : {
487
			    if ( depth == 0 ) return ( p ) ;
488
			    break ;
489
			}
490
			case lex_close_Hbrace_H1 :
491
			case lex_close_Hbrace_H2 : {
492
			    return ( p ) ;
493
			}
494
		    }
495
		    p = p->next ;
496
		}
497
		break ;
498
	    }
499
	    case lex_member_Hcap : {
500
		/* Have the form 'MEMBER t : id' */
501
		p->tok = t ;
502
		p = skip_to_colon ( p->next ) ;
503
		if ( p ) p = p->next ;
504
		return ( p ) ;
505
	    }
506
	}
507
    }
508
    return ( NULL ) ;
509
}
510
 
511
 
512
/*
513
    MARK A PRAGMA TOKEN STATEMENT
514
 
515
    The macro expansion of '#pragma token' statements is rather complex.
516
    The token syntax skeleton is not subject to macro expansion, while
517
    any types etc. involved in the specification are.  This routine marks
518
    the skeleton keywords in the list of preprocessing tokens p, returning
519
    the token immediately following the token specification.  It also sets
520
    macro to true if the declared token lies in the macro namespace.
521
*/
522
 
523
static PPTOKEN *mark_tdf_token
524
    PROTO_N ( ( p, macro ) )
525
    PROTO_T ( PPTOKEN *p X int *macro )
526
{
527
    if ( p == NULL ) return ( NULL ) ;
528
    if ( p->tok == lex_identifier ) {
529
	int t = find_hashid ( p->pp_data.id.hash ) ;
530
	switch ( t ) {
531
 
532
	    case lex_exp_Hcap : {
533
		/* Expression token specifiers */
534
		*macro = 1 ;
535
		p->tok = t ;
536
		p = p->next ;
537
		if ( p->tok == lex_identifier ) {
538
		    /* Check for lvalue or rvalue qualifier */
539
		    t = find_hashid ( p->pp_data.id.hash ) ;
540
		    if ( t == lex_lvalue || t == lex_rvalue ||
541
			 t == lex_const ) {
542
			p->tok = t ;
543
		    }
544
		    p = p->next ;
545
		}
546
		if ( p->tok == lex_colon ) {
547
		    /* Step over type name */
548
		    p = skip_to_colon ( p->next ) ;
549
		}
550
		return ( p ) ;
551
	    }
552
 
553
	    case lex_func_Hcap : {
554
		/* Function token specifiers */
555
		*macro = 1 ;
556
		p->tok = t ;
557
		p = skip_to_colon ( p->next ) ;
558
		return ( p ) ;
559
	    }
560
 
561
	    case lex_member_Hcap : {
562
		/* Member token specifiers */
563
		p->tok = t ;
564
		p = p->next ;
565
		if ( p->tok == lex_identifier ) {
566
		    /* Check for access specifier */
567
		    t = find_hashid ( p->pp_data.id.hash ) ;
568
		    if ( t == lex_public || t == lex_protected ||
569
			 t == lex_private ) {
570
			p->tok = t ;
571
			p = p->next ;
572
		    }
573
		}
574
		p = skip_to_colon ( p ) ;
575
		p = skip_to_colon ( p ) ;
576
		return ( p ) ;
577
	    }
578
 
579
	    case lex_proc_Hcap : {
580
		/* Procedure token specifiers */
581
		*macro = 1 ;
582
		p->tok = t ;
583
		p = p->next ;
584
		if ( p == NULL ) return ( NULL ) ;
585
		t = p->tok ;
586
		if ( t == lex_open_Hbrace_H1 || t == lex_open_Hbrace_H2 ) {
587
		    /* General procedure parameters */
588
		    p = p->next ;
589
		    for ( ; ; ) {
590
			if ( p == NULL ) break ;
591
			t = p->tok ;
592
			if ( t == lex_or_H1 ) {
593
			    p = p->next ;
594
			    break ;
595
			}
596
			p = mark_tdf_token ( p, macro ) ;
597
			p = mark_tdf_param ( p, 1, 0 ) ;
598
			if ( p == NULL ) break ;
599
			if ( p->tok == lex_comma ) p = p->next ;
600
		    }
601
		    for ( ; ; ) {
602
			if ( p == NULL ) break ;
603
			t = p->tok ;
604
			if ( t == lex_close_Hbrace_H1 ||
605
			     t == lex_close_Hbrace_H2 ) {
606
			    p = p->next ;
607
			    break ;
608
			}
609
			p = mark_prog_param ( p ) ;
610
			if ( p == NULL ) break ;
611
			if ( p->tok == lex_comma ) p = p->next ;
612
		    }
613
		} else if ( t == lex_open_Hround ) {
614
		    /* Simple procedure parameters */
615
		    p = p->next ;
616
		    for ( ; ; ) {
617
			if ( p == NULL ) break ;
618
			t = p->tok ;
619
			if ( t == lex_close_Hround ) {
620
			    p = p->next ;
621
			    break ;
622
			}
623
			p = mark_tdf_token ( p, macro ) ;
624
			p = mark_tdf_param ( p, 0, 0 ) ;
625
			if ( p == NULL ) break ;
626
			if ( p->tok == lex_comma ) p = p->next ;
627
		    }
628
		}
629
		p = mark_tdf_token ( p, macro ) ;
630
		return ( p ) ;
631
	    }
632
 
633
	    case lex_variety_Hcap : {
634
		/* Integral type token specifiers */
635
		p->tok = t ;
636
		p = p->next ;
637
		if ( p->tok == lex_identifier ) {
638
		    /* Check for signed or unsigned qualifier */
639
		    t = find_hashid ( p->pp_data.id.hash ) ;
640
		    if ( t == lex_signed || t == lex_unsigned ) {
641
			p->tok = t ;
642
			p = p->next ;
643
		    }
644
		}
645
		return ( p ) ;
646
	    }
647
 
648
	    case lex_nat_Hcap :
649
	    case lex_int_Hcap :
650
	    case lex_stmt_Hcap : {
651
		/* Simple token specifiers */
652
		*macro = 1 ;
653
		p->tok = t ;
654
		return ( p->next ) ;
655
	    }
656
 
657
	    case lex_arith_Hcap :
658
	    case lex_class_Hcap :
659
	    case lex_float_Hcap :
660
	    case lex_scalar_Hcap :
661
	    case lex_struct_Hcap :
662
	    case lex_type_Hcap :
663
	    case lex_union_Hcap : {
664
		/* Type token specifiers */
665
		p->tok = t ;
666
		return ( p->next ) ;
667
	    }
668
	}
669
    }
670
    return ( p->next ) ;
671
}
672
 
673
 
674
/*
675
    READ AN EXTERNAL TOKEN NAME
676
 
677
    This routine processes an external token name.  This consists of a
678
    hash symbol, pointed to by p, followed a list of preprocessing
679
    tokens.  Note that the result is an extended identifier, unless it
680
    is a simple identifier.
681
*/
682
 
683
static PPTOKEN *quote_token_name
684
    PROTO_N ( ( p ) )
685
    PROTO_T ( PPTOKEN *p )
686
{
687
    PPTOKEN *q = p->next ;
688
    if ( q != NULL ) {
689
	/* All following tokens are quoted */
690
	string s ;
691
	unsigned long sp = q->pp_space ;
692
	IGNORE quote_tok_list ( q, 0, char_quote ) ;
693
	free_tok_list ( q ) ;
694
	q = new_pptok () ;
695
	q->pp_space = sp ;
696
	q->next = NULL ;
697
	s = token_buff.start ;
698
	if ( ustrseq ( s, "-" ) ) {
699
	    /* Special form '-' */
700
	    q->tok = lex_minus ;
701
	} else {
702
	    /* Create an identifier */
703
	    unsigned long h = hash ( s ) ;
704
	    token_hashid = lookup_name ( s, h, 1, lex_unknown ) ;
705
	    q->tok = lex_identifier ;
706
	    token_parts ( lex_identifier, q ) ;
707
	}
708
	p->next = q ;
709
	p = q ;
710
    }
711
    return ( p ) ;
712
}
713
 
714
 
715
/*
716
    READ A PRAGMA TOKEN STATEMENT
717
 
718
    This routine processes a '#pragma token' statement.  p gives the first
719
    preprocessing token (i.e. 'token').  tendra is as in parse_pragma.
720
*/
721
 
722
static int read_tdf_token
723
    PROTO_N ( ( p, tendra ) )
724
    PROTO_T ( PPTOKEN *p X int tendra )
725
{
726
    /* Read and macro expand the rest of the line */
727
    int t ;
728
    PPTOKEN *q = p ;
729
    while ( q->next ) q = q->next ;
730
    if ( q->tok != lex_newline ) {
731
	int macro = 0 ;
732
	PPTOKEN *r = read_line ( lex_ignore_token, lex_ignore_token ) ;
733
	if ( r ) r->pp_space = WHITE_SPACE ;
734
	q->next = r ;
735
	r = mark_tdf_token ( r, &macro ) ;
736
	r = mark_tdf_param ( r, 1, macro ) ;
737
	while ( r ) {
738
	    /* Find token name */
739
	    t = r->tok ;
740
	    if ( t == lex_hash_H1 || t == lex_hash_H2 ) {
741
		q = quote_token_name ( r ) ;
742
		break ;
743
	    }
744
	    q = r ;
745
	    r = r->next ;
746
	}
747
 
748
	/* Add newline token */
749
	while ( q->next ) q = q->next ;
750
	q->next = new_pptok () ;
751
	q->next->tok = lex_newline ;
752
	q->next->next = NULL ;
753
    }
754
    q = expand_tok_list ( p ) ;
755
    free_tok_list ( p ) ;
756
 
757
    /* Parse the line */
758
    in_token_decl = 1 ;
759
    decl_loc = preproc_loc ;
760
    t = parse_pragma ( q, tendra ) ;
761
    in_token_decl = 0 ;
762
    return ( t ) ;
763
}
764
 
765
 
766
/*
767
    READ A PRAGMA TENDRA STATEMENT
768
 
769
    This routine processes a '#pragma TenDRA' statement, returning the
770
    corresponding lexical token.  One or two tokens from the line will
771
    have already been read into p.  tendra is as in parse_pragma.
772
*/
773
 
774
static int read_tendra
775
    PROTO_N ( ( p, tendra ) )
776
    PROTO_T ( PPTOKEN *p X int tendra )
777
{
778
    /* Read and macro expand the rest of the line */
779
    PPTOKEN *q = p ;
780
    while ( q->next ) q = q->next ;
781
    if ( q->tok != lex_newline ) {
782
	PPTOKEN *r = read_line ( lex_ignore_token, lex_newline ) ;
783
	if ( r ) r->pp_space = WHITE_SPACE ;
784
	q->next = r ;
785
    }
786
    q = expand_tok_list ( p ) ;
787
    free_tok_list ( p ) ;
788
    p = q ;
789
 
790
    /* Replace any identifiers by keywords */
791
    for ( q = p ; q != NULL ; q = q->next ) {
792
	int t = q->tok ;
793
	if ( t == lex_identifier ) {
794
	    t = find_hashid ( q->pp_data.id.hash ) ;
795
	    q->tok = t ;
796
	}
797
    }
798
 
799
    /* Parse the line */
800
    return ( parse_pragma ( p, tendra ) ) ;
801
}
802
 
803
 
804
/*
805
    READ A PRAGMA INTERFACE STATEMENT
806
 
807
    This routine processes a '#pragma interface' statement, returning the
808
    corresponding lexical token.  One or two tokens from the line will
809
    have already been read into p.  tendra is as in parse_pragma.
810
*/
811
 
812
static int read_interface
813
    PROTO_N ( ( p, tendra ) )
814
    PROTO_T ( PPTOKEN *p X int tendra )
815
{
816
    /* Read and macro expand the rest of the line */
817
    int tok ;
818
    int nl = 0 ;
819
    PPTOKEN *q = p ;
820
    PPTOKEN *r = p ;
821
    while ( q->next ) q = q->next ;
822
    if ( q->tok != lex_newline ) {
823
	PPTOKEN *s = read_line ( lex_ignore_token, lex_ignore_token ) ;
824
	if ( s ) s->pp_space = WHITE_SPACE ;
825
	q->next = s ;
826
	nl = 1 ;
827
    }
828
    for ( q = p ; q != NULL ; q = q->next ) {
829
	/* Suppress expansion of 'TAG' */
830
	int t = q->tok ;
831
	if ( t == lex_identifier ) {
832
	    t = find_hashid ( q->pp_data.id.hash ) ;
833
	    if ( t == lex_tag_Hcap ) q->tok = t ;
834
	} else if ( t == lex_hash_H1 || t == lex_hash_H2 ) {
835
	    r = quote_token_name ( q ) ;
836
	    break ;
837
	}
838
	r = q ;
839
    }
840
 
841
    /* Add newline token */
842
    if ( nl ) {
843
	while ( r->next ) r = r->next ;
844
	r->next = new_pptok () ;
845
	r->next->tok = lex_newline ;
846
	r->next->next = NULL ;
847
    }
848
 
849
    /* Parse the line */
850
    tok = parse_pragma ( p, tendra ) ;
851
    return ( tok ) ;
852
}
853
 
854
 
855
/*
856
    PROCESS A NON-TENDRA PRAGMA STATEMENT
857
 
858
    This routine processes a '#pragma' statement in which there is no
859
    TenDRA keyword, or such a keyword is optional.  The argument p gives
860
    the first preprocessing token of the statement, while tendra is as
861
    in parse_pragma.  The corresponding lexical token value is returned
862
    if the statement is recognised, otherwise lex_unknown is returned.
863
    The non-TenDRA and TenDRA forms may differ as follows:
864
 
865
	token				TenDRA token
866
	extend interface		TenDRA extend
867
	implement interface		TenDRA implement
868
	define				TenDRA define
869
	no_def				TenDRA no_def
870
	ignore				TenDRA reject
871
	interface			TenDRA interface
872
	promote				TenDRA promoted
873
	compute promote			TenDRA compute promote (?)
874
	integer literal			TenDRA integer literal
875
	external volatile		TenDRA external volatile
876
	DEFINE MEMBER			TenDRA member definition
877
*/
878
 
879
static int read_non_tendra
880
    PROTO_N ( ( p, tendra ) )
881
    PROTO_T ( PPTOKEN *p X int tendra )
882
{
883
    if ( p->tok == lex_identifier ) {
884
	int t = find_hashid ( p->pp_data.id.hash ) ;
885
	switch ( t ) {
886
 
887
	    case lex_token : {
888
		/* Token syntax */
889
		p->tok = t ;
890
		return ( read_tdf_token ( p, tendra ) ) ;
891
	    }
892
 
893
	    case lex_extend :
894
	    case lex_implement : {
895
		/* Interface inclusion */
896
		int pp ;
897
		if ( !tendra ) {
898
		    /* Need 'interface' for non-TenDRA form */
899
		    PPTOKEN *q = get_token () ;
900
		    p->next = q ;
901
		    if ( q->tok == lex_identifier ) {
902
			int s = find_hashid ( q->pp_data.id.hash ) ;
903
			if ( s != lex_interface ) break ;
904
		    }
905
		}
906
		pp = read_include ( 1, lex_pragma ) ;
907
		if ( pp == lex_included ) {
908
		    /* Update current interface flag */
909
		    if ( t == lex_implement ) {
910
			crt_interface = lex_define ;
911
		    } else {
912
			if ( crt_interface != lex_no_Hdef ) {
913
			    crt_interface = lex_ignore ;
914
			}
915
		    }
916
		}
917
		return ( pp ) ;
918
	    }
919
 
920
	    case lex_define :
921
	    case lex_no_Hdef :
922
	    case lex_interface : {
923
		/* Interface listing */
924
		p->tok = t ;
925
		return ( read_interface ( p, tendra ) ) ;
926
	    }
927
 
928
	    case lex_reject : {
929
		/* Interface listing (TenDRA form) */
930
		if ( tendra ) {
931
		    p->tok = t ;
932
		    return ( read_interface ( p, tendra ) ) ;
933
		}
934
		break ;
935
	    }
936
 
937
	    case lex_ignore : {
938
		/* Interface listing (non-TenDRA form) */
939
		if ( !tendra ) {
940
		    p->tok = lex_reject ;
941
		    return ( read_interface ( p, tendra ) ) ;
942
		}
943
		break ;
944
	    }
945
 
946
	    case lex_undef : {
947
		/* Token undefining */
948
		PPTOKEN *q = get_token () ;
949
		p->next = q ;
950
		if ( q->tok == lex_identifier ) {
951
		    int s = find_hashid ( q->pp_data.id.hash ) ;
952
		    if ( s == lex_token ) {
953
			p->tok = t ;
954
			q->tok = s ;
955
			return ( read_interface ( p, tendra ) ) ;
956
		    }
957
		}
958
		break ;
959
	    }
960
 
961
	    case lex_promote : {
962
		/* Promotion specification (non-TenDRA form) */
963
		if ( !tendra ) {
964
		    set_token ( p, lex_promoted ) ;
965
		    return ( read_tendra ( p, tendra ) ) ;
966
		}
967
		break ;
968
	    }
969
 
970
	    case lex_promoted : {
971
		/* Promotion specification (TenDRA form) */
972
		if ( tendra ) {
973
		    set_token ( p, lex_promoted ) ;
974
		    return ( read_tendra ( p, tendra ) ) ;
975
		}
976
		break ;
977
	    }
978
 
979
	    case lex_compute : {
980
		/* Computed promotion specification */
981
		PPTOKEN *q = get_token () ;
982
		p->next = q ;
983
		if ( q->tok == lex_identifier ) {
984
		    int s = find_hashid ( q->pp_data.id.hash ) ;
985
		    if ( s == lex_promote ) {
986
			set_token ( p, t ) ;
987
			set_token ( q, s ) ;
988
			return ( read_tendra ( p, tendra ) ) ;
989
		    }
990
		}
991
		break ;
992
	    }
993
 
994
	    case lex_integer : {
995
		/* Integer literal specification */
996
		PPTOKEN *q = get_token () ;
997
		p->next = q ;
998
		if ( q->tok == lex_identifier ) {
999
		    int s = find_hashid ( q->pp_data.id.hash ) ;
1000
		    if ( s == lex_lit ) {
1001
			set_token ( p, t ) ;
1002
			set_token ( q, s ) ;
1003
			return ( read_tendra ( p, tendra ) ) ;
1004
		    }
1005
		}
1006
		break ;
1007
	    }
1008
 
1009
	    case lex_define_Hcap : {
1010
		/* Member definition (non-TenDRA form) */
1011
		if ( !tendra ) {
1012
		    PPTOKEN *q = get_token () ;
1013
		    p->next = q ;
1014
		    if ( q->tok == lex_identifier ) {
1015
			int s = find_hashid ( q->pp_data.id.hash ) ;
1016
			if ( s == lex_member_Hcap ) {
1017
			    set_token ( p, lex_member ) ;
1018
			    set_token ( q, lex_definition ) ;
1019
			    return ( read_tendra ( p, tendra ) ) ;
1020
			}
1021
		    }
1022
		}
1023
		break ;
1024
	    }
1025
 
1026
	    case lex_member : {
1027
		/* Member definition (TenDRA form) */
1028
		if ( tendra ) {
1029
		    PPTOKEN *q = get_token () ;
1030
		    p->next = q ;
1031
		    if ( q->tok == lex_identifier ) {
1032
			int s = find_hashid ( q->pp_data.id.hash ) ;
1033
			if ( s == lex_definition ) {
1034
			    set_token ( p, lex_member ) ;
1035
			    set_token ( q, lex_definition ) ;
1036
			    return ( read_tendra ( p, tendra ) ) ;
1037
			}
1038
		    }
1039
		}
1040
		break ;
1041
	    }
1042
 
1043
	    case lex_accept : {
1044
		/* Conversion tokens (non-TenDRA form) */
1045
		if ( !tendra ) {
1046
		    PPTOKEN *q = get_token () ;
1047
		    p->next = q ;
1048
		    if ( q->tok == lex_ellipsis ) {
1049
			return ( read_tendra ( p, tendra ) ) ;
1050
		    }
1051
		    if ( q->tok == lex_identifier ) {
1052
			int s = find_hashid ( q->pp_data.id.hash ) ;
1053
			if ( s == lex_conversion ) {
1054
			    return ( read_tendra ( p, tendra ) ) ;
1055
			}
1056
		    }
1057
		}
1058
		break ;
1059
	    }
1060
 
1061
	    case lex_external : {
1062
		/* External volatility (non-TenDRA form) */
1063
		if ( !tendra ) {
1064
		    PPTOKEN *q = get_token () ;
1065
		    p->next = q ;
1066
		    if ( q->tok == lex_identifier ) {
1067
			int s = find_hashid ( q->pp_data.id.hash ) ;
1068
			if ( s == lex_volatile ) {
1069
			    return ( read_tendra ( p, tendra ) ) ;
1070
			}
1071
		    }
1072
		}
1073
		break ;
1074
	    }
1075
 
1076
	    case lex_preserve : {
1077
		/* Preserve statics (non-TenDRA form) */
1078
		return ( read_tendra ( p, tendra ) ) ;
1079
	    }
1080
	}
1081
    }
1082
    return ( lex_unknown ) ;
1083
}
1084
 
1085
 
1086
/*
1087
    PROCESS A PRAGMA STATEMENT
1088
 
1089
    This routine analyses a '#pragma' statement, returning the corresponding
1090
    lexical token.  It is called from read_preproc_dir (q.v.) immediately
1091
    after the '#pragma' directive has been identified.  It is not called for
1092
    skipped '#pragma' directives.  The calling routine will skip to the end
1093
    of the preprocessing directive if necessary.
1094
*/
1095
 
1096
int read_pragma
1097
    PROTO_Z ()
1098
{
1099
    int t ;
1100
    PPTOKEN *p = get_token () ;
1101
    if ( p->tok == lex_identifier ) {
1102
	t = find_hashid ( p->pp_data.id.hash ) ;
1103
	switch ( t ) {
1104
 
1105
	    case lex_tendra : {
1106
		/* Deal with '#pragma TenDRA' */
1107
		int tendra = 1 ;
1108
		p->next = free_tokens ;
1109
		free_tokens = p ;
1110
		p = get_token () ;
1111
		if ( p->tok == lex_plus_Hplus ) {
1112
		    /* Allow for TenDRA++ */
1113
#if LANGUAGE_CPP
1114
		    p->next = free_tokens ;
1115
		    free_tokens = p ;
1116
		    p = get_token () ;
1117
		    tendra = 2 ;
1118
#else
1119
		    return ( lex_ignore_token ) ;
1120
#endif
1121
		}
1122
 
1123
		/* Allow for optional TenDRA pragmas */
1124
		t = read_non_tendra ( p, tendra ) ;
1125
		if ( t != lex_unknown ) return ( t ) ;
1126
 
1127
		/* Deal with TenDRA pragmas */
1128
		t = read_tendra ( p, tendra ) ;
1129
		return ( t ) ;
1130
	    }
1131
 
1132
	    case lex_ident : {
1133
		/* Deal with '#pragma ident' */
1134
		if ( !option ( OPT_ppdir_ident_ignore ) ) {
1135
		    read_ident ( lex_pragma ) ;
1136
		}
1137
		return ( lex_ignore_token ) ;
1138
	    }
1139
 
1140
	    case lex_weak : {
1141
		/* Deal with '#pragma weak' */
1142
		if ( !option ( OPT_ppdir_weak_ignore ) ) {
1143
		    read_weak ( lex_pragma ) ;
1144
		}
1145
		return ( lex_ignore_token ) ;
1146
	    }
1147
	}
1148
    }
1149
 
1150
    /* Deal with non-TenDRA pragmas */
1151
    t = read_non_tendra ( p, 0 ) ;
1152
    if ( t != lex_unknown ) return ( t ) ;
1153
 
1154
    /* Report unknown pragmas */
1155
    free_tok_list ( p ) ;
1156
    report ( preproc_loc, ERR_cpp_pragma_unknown ( lex_pragma ) ) ;
1157
    return ( lex_ignore_token ) ;
1158
}
1159
 
1160
 
1161
/*
1162
    ANALYSE A LINT COMMENT
1163
 
1164
    This routine analyses the comment built up in token_buff for lint
1165
    format comments.  These are interpreted locally.  This can go slightly
1166
    wrong with look-ahead, in which case the more structured keyword
1167
    approach should be used.  Other fairly standard lint comments include
1168
    VARARGS, LINTLIBRARY, CONSTANTCONDITION (CONSTCOND), EMPTY, LINTED,
1169
    PROTOLIB, PRINTFLIKE and SCANFLIKE.
1170
*/
1171
 
1172
int lint_comment
1173
    PROTO_Z ()
1174
{
1175
    string t ;
1176
    size_t sz ;
1177
    character c ;
1178
    string s = token_buff.start ;
1179
    while ( c = *s, is_white_char ( ( unsigned long ) c ) ) {
1180
	/* Step over initial white space */
1181
	s++ ;
1182
    }
1183
    t = s ;
1184
    while ( c = *s, is_alpha_char ( ( unsigned long ) c ) ) {
1185
	/* Scan to end of identifier */
1186
	s++ ;
1187
    }
1188
 
1189
    /* Check identifier */
1190
    sz = ( size_t ) ( s - t ) ;
1191
    switch ( sz ) {
1192
	case 8 : {
1193
	    if ( strncmp ( strlit ( t ), "ARGSUSED", sz ) == 0 ) {
1194
		/* Indicate unused variables */
1195
		suppress_variable = 1 ;
1196
	    } else if ( strncmp ( strlit ( t ), "FALLTHRU", sz ) == 0 ) {
1197
		/* Suppress fall through errors */
1198
		suppress_fall = 1 ;
1199
	    }
1200
	    break ;
1201
	}
1202
	case 10 : {
1203
	    if ( strncmp ( strlit ( t ), "NOTREACHED", sz ) == 0 ) {
1204
		/* Suppress unreached code errors */
1205
		unreached_last = 1 ;
1206
	    }
1207
	    break ;
1208
	}
1209
	case 11 : {
1210
	    if ( strncmp ( strlit ( t ), "FALLTHROUGH", sz ) == 0 ) {
1211
		/* Suppress fall through errors */
1212
		suppress_fall = 1 ;
1213
	    }
1214
	    break ;
1215
	}
1216
    }
1217
 
1218
    /* Rest of comment is ignored */
1219
    return ( lex_ignore_token ) ;
1220
}