X-Git-Url: https://oss.titaniummirror.com/gitweb?a=blobdiff_plain;f=gcc%2Ftree.c;fp=gcc%2Ftree.c;h=a33596d6be3f1e4ce0415ff0842a44c2ae85dbb3;hb=6fed43773c9b0ce596dca5686f37ac3fc0fa11c0;hp=5c933720c34ca39a819a4433f81fc1e0f6588d24;hpb=27b11d56b743098deb193d510b337ba22dc52e5c;p=msp430-gcc.git diff --git a/gcc/tree.c b/gcc/tree.c index 5c933720..a33596d6 100644 --- a/gcc/tree.c +++ b/gcc/tree.c @@ -1,12 +1,13 @@ /* Language-independent node constructors for parse phase of GNU compiler. Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, - 1999, 2000, 2001, 2002 Free Software Foundation, Inc. + 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 + Free Software Foundation, Inc. This file is part of GCC. GCC is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free -Software Foundation; either version 2, or (at your option) any later +Software Foundation; either version 3, or (at your option) any later version. GCC is distributed in the hope that it will be useful, but WITHOUT ANY @@ -15,9 +16,8 @@ FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License -along with GCC; see the file COPYING. If not, write to the Free -Software Foundation, 59 Temple Place - Suite 330, Boston, MA -02111-1307, USA. */ +along with GCC; see the file COPYING3. If not see +. */ /* This file contains the low level primitives for operating on tree nodes, including allocation, list operations, interning of identifiers, @@ -27,16 +27,15 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA nodes of that code. It is intended to be language-independent, but occasionally - calls language-dependent routines defined (for C) in typecheck.c. - - The low-level allocation routines oballoc and permalloc - are used also for allocating many other kinds of objects - by all passes of the compiler. */ + calls language-dependent routines defined (for C) in typecheck.c. */ #include "config.h" #include "system.h" +#include "coretypes.h" +#include "tm.h" #include "flags.h" #include "tree.h" +#include "real.h" #include "tm_p.h" #include "function.h" #include "obstack.h" @@ -46,72 +45,79 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA #include "output.h" #include "target.h" #include "langhooks.h" +#include "tree-iterator.h" +#include "basic-block.h" +#include "tree-flow.h" +#include "params.h" +#include "pointer-set.h" +#include "fixed-value.h" -#define obstack_chunk_alloc xmalloc -#define obstack_chunk_free free -/* obstack.[ch] explicitly declined to prototype this. */ -extern int _obstack_allocated_p PARAMS ((struct obstack *h, PTR obj)); - -static void unsave_expr_now_r PARAMS ((tree)); - -/* Objects allocated on this obstack last forever. */ - -struct obstack permanent_obstack; - -/* Table indexed by tree code giving a string containing a character - classifying the tree code. Possibilities are - t, d, s, c, r, <, 1, 2 and e. See tree.def for details. */ +/* Tree code classes. */ #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE, +#define END_OF_BASE_TREE_CODES tcc_exceptional, -char tree_code_type[MAX_TREE_CODES] = { -#include "tree.def" +const enum tree_code_class tree_code_type[] = { +#include "all-tree.def" }; + #undef DEFTREECODE +#undef END_OF_BASE_TREE_CODES /* Table indexed by tree code giving number of expression operands beyond the fixed part of the node structure. Not used for types or decls. */ #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH, +#define END_OF_BASE_TREE_CODES 0, -int tree_code_length[MAX_TREE_CODES] = { -#include "tree.def" +const unsigned char tree_code_length[] = { +#include "all-tree.def" }; + #undef DEFTREECODE +#undef END_OF_BASE_TREE_CODES /* Names of tree components. Used for printing out the tree and error messages. */ #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME, +#define END_OF_BASE_TREE_CODES "@dummy", -const char *tree_code_name[MAX_TREE_CODES] = { -#include "tree.def" +const char *const tree_code_name[] = { +#include "all-tree.def" }; + #undef DEFTREECODE +#undef END_OF_BASE_TREE_CODES + +/* Each tree code class has an associated string representation. + These must correspond to the tree_code_class entries. */ + +const char *const tree_code_class_strings[] = +{ + "exceptional", + "constant", + "type", + "declaration", + "reference", + "comparison", + "unary", + "binary", + "statement", + "vl_exp", + "expression" +}; + +/* obstack.[ch] explicitly declined to prototype this. */ +extern int _obstack_allocated_p (struct obstack *h, void *obj); +#ifdef GATHER_STATISTICS /* Statistics-gathering stuff. */ -typedef enum -{ - d_kind, - t_kind, - b_kind, - s_kind, - r_kind, - e_kind, - c_kind, - id_kind, - perm_list_kind, - temp_list_kind, - vec_kind, - x_kind, - lang_decl, - lang_type, - all_kinds -} tree_node_kind; int tree_node_counts[(int) all_kinds]; int tree_node_sizes[(int) all_kinds]; +/* Keep in sync with tree.h:enum tree_node_kind. */ static const char * const tree_node_kind_names[] = { "decls", "types", @@ -124,20 +130,25 @@ static const char * const tree_node_kind_names[] = { "perm_tree_lists", "temp_tree_lists", "vecs", + "binfos", + "ssa names", + "constructors", "random kinds", "lang_decl kinds", - "lang_type kinds" + "lang_type kinds", + "omp clauses", }; +#endif /* GATHER_STATISTICS */ /* Unique id for next decl created. */ -static int next_decl_uid; +static GTY(()) int next_decl_uid; /* Unique id for next type created. */ -static int next_type_uid = 1; +static GTY(()) int next_type_uid = 1; /* Since we cannot rehash a type after it is in the table, we have to keep the hash code. */ -struct type_hash +struct type_hash GTY(()) { unsigned long hash; tree type; @@ -153,299 +164,548 @@ struct type_hash same table, they are completely independent, and the hash code is computed differently for each of these. */ -htab_t type_hash_table; - -static void build_real_from_int_cst_1 PARAMS ((PTR)); -static void set_type_quals PARAMS ((tree, int)); -static void append_random_chars PARAMS ((char *)); -static int type_hash_eq PARAMS ((const void*, const void*)); -static unsigned int type_hash_hash PARAMS ((const void*)); -static void print_type_hash_statistics PARAMS((void)); -static void finish_vector_type PARAMS((tree)); -static tree make_vector PARAMS ((enum machine_mode, tree, int)); -static int type_hash_marked_p PARAMS ((const void *)); -static void type_hash_mark PARAMS ((const void *)); -static int mark_tree_hashtable_entry PARAMS((void **, void *)); - -/* If non-null, these are language-specific helper functions for - unsave_expr_now. If present, LANG_UNSAVE is called before its - argument (an UNSAVE_EXPR) is to be unsaved, and all other - processing in unsave_expr_now is aborted. LANG_UNSAVE_EXPR_NOW is - called from unsave_expr_1 for language-specific tree codes. */ -void (*lang_unsave) PARAMS ((tree *)); -void (*lang_unsave_expr_now) PARAMS ((tree)); - -/* If non-null, these are language-specific helper functions for - unsafe_for_reeval. Return negative to not handle some tree. */ -int (*lang_unsafe_for_reeval) PARAMS ((tree)); - -/* Set the DECL_ASSEMBLER_NAME for a node. If it is the sort of thing - that the assembler should talk about, set DECL_ASSEMBLER_NAME to an - appropriate IDENTIFIER_NODE. Otherwise, set it to the - ERROR_MARK_NODE to ensure that the assembler does not talk about - it. */ -void (*lang_set_decl_assembler_name) PARAMS ((tree)); - +static GTY ((if_marked ("type_hash_marked_p"), param_is (struct type_hash))) + htab_t type_hash_table; + +/* Hash table and temporary node for larger integer const values. */ +static GTY (()) tree int_cst_node; +static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node))) + htab_t int_cst_hash_table; + +/* Hash table for optimization flags and target option flags. Use the same + hash table for both sets of options. Nodes for building the current + optimization and target option nodes. The assumption is most of the time + the options created will already be in the hash table, so we avoid + allocating and freeing up a node repeatably. */ +static GTY (()) tree cl_optimization_node; +static GTY (()) tree cl_target_option_node; +static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node))) + htab_t cl_option_hash_table; + +/* General tree->tree mapping structure for use in hash tables. */ + + +static GTY ((if_marked ("tree_map_marked_p"), param_is (struct tree_map))) + htab_t debug_expr_for_decl; + +static GTY ((if_marked ("tree_map_marked_p"), param_is (struct tree_map))) + htab_t value_expr_for_decl; + +static GTY ((if_marked ("tree_priority_map_marked_p"), + param_is (struct tree_priority_map))) + htab_t init_priority_for_decl; + +static GTY ((if_marked ("tree_map_marked_p"), param_is (struct tree_map))) + htab_t restrict_base_for_decl; + +static void set_type_quals (tree, int); +static int type_hash_eq (const void *, const void *); +static hashval_t type_hash_hash (const void *); +static hashval_t int_cst_hash_hash (const void *); +static int int_cst_hash_eq (const void *, const void *); +static hashval_t cl_option_hash_hash (const void *); +static int cl_option_hash_eq (const void *, const void *); +static void print_type_hash_statistics (void); +static void print_debug_expr_statistics (void); +static void print_value_expr_statistics (void); +static int type_hash_marked_p (const void *); +static unsigned int type_hash_list (const_tree, hashval_t); +static unsigned int attribute_hash_list (const_tree, hashval_t); + tree global_trees[TI_MAX]; tree integer_types[itk_none]; + +unsigned char tree_contains_struct[MAX_TREE_CODES][64]; + +/* Number of operands for each OpenMP clause. */ +unsigned const char omp_clause_num_ops[] = +{ + 0, /* OMP_CLAUSE_ERROR */ + 1, /* OMP_CLAUSE_PRIVATE */ + 1, /* OMP_CLAUSE_SHARED */ + 1, /* OMP_CLAUSE_FIRSTPRIVATE */ + 2, /* OMP_CLAUSE_LASTPRIVATE */ + 4, /* OMP_CLAUSE_REDUCTION */ + 1, /* OMP_CLAUSE_COPYIN */ + 1, /* OMP_CLAUSE_COPYPRIVATE */ + 1, /* OMP_CLAUSE_IF */ + 1, /* OMP_CLAUSE_NUM_THREADS */ + 1, /* OMP_CLAUSE_SCHEDULE */ + 0, /* OMP_CLAUSE_NOWAIT */ + 0, /* OMP_CLAUSE_ORDERED */ + 0, /* OMP_CLAUSE_DEFAULT */ + 3, /* OMP_CLAUSE_COLLAPSE */ + 0 /* OMP_CLAUSE_UNTIED */ +}; + +const char * const omp_clause_code_name[] = +{ + "error_clause", + "private", + "shared", + "firstprivate", + "lastprivate", + "reduction", + "copyin", + "copyprivate", + "if", + "num_threads", + "schedule", + "nowait", + "ordered", + "default", + "collapse", + "untied" +}; -/* Set the DECL_ASSEMBLER_NAME for DECL. */ -void -set_decl_assembler_name (decl) - tree decl; -{ - /* The language-independent code should never use the - DECL_ASSEMBLER_NAME for lots of DECLs. Only FUNCTION_DECLs and - VAR_DECLs for variables with static storage duration need a real - DECL_ASSEMBLER_NAME. */ - if (TREE_CODE (decl) == FUNCTION_DECL - || (TREE_CODE (decl) == VAR_DECL - && (TREE_STATIC (decl) - || DECL_EXTERNAL (decl) - || TREE_PUBLIC (decl)))) - /* By default, assume the name to use in assembly code is the - same as that used in the source language. (That's correct - for C, and GCC used to set DECL_ASSEMBLER_NAME to the same - value as DECL_NAME in build_decl, so this choice provides - backwards compatibility with existing front-ends. */ - SET_DECL_ASSEMBLER_NAME (decl, DECL_NAME (decl)); - else - /* Nobody should ever be asking for the DECL_ASSEMBLER_NAME of - these DECLs -- unless they're in language-dependent code, in - which case lang_set_decl_assembler_name should handle things. */ - abort (); -} - -/* Init the principal obstacks. */ +/* Init tree.c. */ void -init_obstacks () +init_ttree (void) { - gcc_obstack_init (&permanent_obstack); - /* Initialize the hash table of types. */ - type_hash_table = htab_create (TYPE_HASH_INITIAL_SIZE, type_hash_hash, - type_hash_eq, 0); - ggc_add_deletable_htab (type_hash_table, type_hash_marked_p, - type_hash_mark); - ggc_add_tree_root (global_trees, TI_MAX); - ggc_add_tree_root (integer_types, itk_none); + type_hash_table = htab_create_ggc (TYPE_HASH_INITIAL_SIZE, type_hash_hash, + type_hash_eq, 0); + + debug_expr_for_decl = htab_create_ggc (512, tree_map_hash, + tree_map_eq, 0); + + value_expr_for_decl = htab_create_ggc (512, tree_map_hash, + tree_map_eq, 0); + init_priority_for_decl = htab_create_ggc (512, tree_priority_map_hash, + tree_priority_map_eq, 0); + restrict_base_for_decl = htab_create_ggc (256, tree_map_hash, + tree_map_eq, 0); + + int_cst_hash_table = htab_create_ggc (1024, int_cst_hash_hash, + int_cst_hash_eq, NULL); + + int_cst_node = make_node (INTEGER_CST); + + cl_option_hash_table = htab_create_ggc (64, cl_option_hash_hash, + cl_option_hash_eq, NULL); + + cl_optimization_node = make_node (OPTIMIZATION_NODE); + cl_target_option_node = make_node (TARGET_OPTION_NODE); + + tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON] = 1; + tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_NON_COMMON] = 1; + tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON] = 1; + - /* Set lang_set_decl_set_assembler_name to a default value. */ - lang_set_decl_assembler_name = set_decl_assembler_name; + tree_contains_struct[CONST_DECL][TS_DECL_COMMON] = 1; + tree_contains_struct[VAR_DECL][TS_DECL_COMMON] = 1; + tree_contains_struct[PARM_DECL][TS_DECL_COMMON] = 1; + tree_contains_struct[RESULT_DECL][TS_DECL_COMMON] = 1; + tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON] = 1; + tree_contains_struct[TYPE_DECL][TS_DECL_COMMON] = 1; + tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON] = 1; + tree_contains_struct[LABEL_DECL][TS_DECL_COMMON] = 1; + tree_contains_struct[FIELD_DECL][TS_DECL_COMMON] = 1; + + + tree_contains_struct[CONST_DECL][TS_DECL_WRTL] = 1; + tree_contains_struct[VAR_DECL][TS_DECL_WRTL] = 1; + tree_contains_struct[PARM_DECL][TS_DECL_WRTL] = 1; + tree_contains_struct[RESULT_DECL][TS_DECL_WRTL] = 1; + tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL] = 1; + tree_contains_struct[LABEL_DECL][TS_DECL_WRTL] = 1; + + tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL] = 1; + tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL] = 1; + tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL] = 1; + tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL] = 1; + tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL] = 1; + tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL] = 1; + tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL] = 1; + tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL] = 1; + tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL] = 1; + tree_contains_struct[NAME_MEMORY_TAG][TS_DECL_MINIMAL] = 1; + tree_contains_struct[SYMBOL_MEMORY_TAG][TS_DECL_MINIMAL] = 1; + tree_contains_struct[MEMORY_PARTITION_TAG][TS_DECL_MINIMAL] = 1; + + tree_contains_struct[NAME_MEMORY_TAG][TS_MEMORY_TAG] = 1; + tree_contains_struct[SYMBOL_MEMORY_TAG][TS_MEMORY_TAG] = 1; + tree_contains_struct[MEMORY_PARTITION_TAG][TS_MEMORY_TAG] = 1; + + tree_contains_struct[MEMORY_PARTITION_TAG][TS_MEMORY_PARTITION_TAG] = 1; + + tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS] = 1; + tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS] = 1; + tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS] = 1; + tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_WITH_VIS] = 1; + + tree_contains_struct[VAR_DECL][TS_VAR_DECL] = 1; + tree_contains_struct[FIELD_DECL][TS_FIELD_DECL] = 1; + tree_contains_struct[PARM_DECL][TS_PARM_DECL] = 1; + tree_contains_struct[LABEL_DECL][TS_LABEL_DECL] = 1; + tree_contains_struct[RESULT_DECL][TS_RESULT_DECL] = 1; + tree_contains_struct[CONST_DECL][TS_CONST_DECL] = 1; + tree_contains_struct[TYPE_DECL][TS_TYPE_DECL] = 1; + tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL] = 1; + tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL] = 1; + tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON] = 1; + + lang_hooks.init_ts (); } -/* Allocate SIZE bytes in the permanent obstack - and return a pointer to them. */ +/* The name of the object as the assembler will see it (but before any + translations made by ASM_OUTPUT_LABELREF). Often this is the same + as DECL_NAME. It is an IDENTIFIER_NODE. */ +tree +decl_assembler_name (tree decl) +{ + if (!DECL_ASSEMBLER_NAME_SET_P (decl)) + lang_hooks.set_decl_assembler_name (decl); + return DECL_WITH_VIS_CHECK (decl)->decl_with_vis.assembler_name; +} + +/* Compare ASMNAME with the DECL_ASSEMBLER_NAME of DECL. */ -char * -permalloc (size) - int size; +bool +decl_assembler_name_equal (tree decl, const_tree asmname) { - return (char *) obstack_alloc (&permanent_obstack, size); + tree decl_asmname = DECL_ASSEMBLER_NAME (decl); + const char *decl_str; + const char *asmname_str; + bool test = false; + + if (decl_asmname == asmname) + return true; + + decl_str = IDENTIFIER_POINTER (decl_asmname); + asmname_str = IDENTIFIER_POINTER (asmname); + + + /* If the target assembler name was set by the user, things are trickier. + We have a leading '*' to begin with. After that, it's arguable what + is the correct thing to do with -fleading-underscore. Arguably, we've + historically been doing the wrong thing in assemble_alias by always + printing the leading underscore. Since we're not changing that, make + sure user_label_prefix follows the '*' before matching. */ + if (decl_str[0] == '*') + { + size_t ulp_len = strlen (user_label_prefix); + + decl_str ++; + + if (ulp_len == 0) + test = true; + else if (strncmp (decl_str, user_label_prefix, ulp_len) == 0) + decl_str += ulp_len, test=true; + else + decl_str --; + } + if (asmname_str[0] == '*') + { + size_t ulp_len = strlen (user_label_prefix); + + asmname_str ++; + + if (ulp_len == 0) + test = true; + else if (strncmp (asmname_str, user_label_prefix, ulp_len) == 0) + asmname_str += ulp_len, test=true; + else + asmname_str --; + } + + if (!test) + return false; + return strcmp (decl_str, asmname_str) == 0; } -/* Allocate NELEM items of SIZE bytes in the permanent obstack - and return a pointer to them. The storage is cleared before - returning the value. */ +/* Hash asmnames ignoring the user specified marks. */ -char * -perm_calloc (nelem, size) - int nelem; - long size; +hashval_t +decl_assembler_name_hash (const_tree asmname) { - char *rval = (char *) obstack_alloc (&permanent_obstack, nelem * size); - memset (rval, 0, nelem * size); - return rval; + if (IDENTIFIER_POINTER (asmname)[0] == '*') + { + const char *decl_str = IDENTIFIER_POINTER (asmname) + 1; + size_t ulp_len = strlen (user_label_prefix); + + if (ulp_len == 0) + ; + else if (strncmp (decl_str, user_label_prefix, ulp_len) == 0) + decl_str += ulp_len; + + return htab_hash_string (decl_str); + } + + return htab_hash_string (IDENTIFIER_POINTER (asmname)); } -/* Compute the number of bytes occupied by 'node'. This routine only - looks at TREE_CODE and, if the code is TREE_VEC, TREE_VEC_LENGTH. */ +/* Compute the number of bytes occupied by a tree with code CODE. + This function cannot be used for nodes that have variable sizes, + including TREE_VEC, STRING_CST, and CALL_EXPR. */ size_t -tree_size (node) - tree node; +tree_code_size (enum tree_code code) { - enum tree_code code = TREE_CODE (node); - switch (TREE_CODE_CLASS (code)) { - case 'd': /* A decl node */ - return sizeof (struct tree_decl); + case tcc_declaration: /* A decl node */ + { + switch (code) + { + case FIELD_DECL: + return sizeof (struct tree_field_decl); + case PARM_DECL: + return sizeof (struct tree_parm_decl); + case VAR_DECL: + return sizeof (struct tree_var_decl); + case LABEL_DECL: + return sizeof (struct tree_label_decl); + case RESULT_DECL: + return sizeof (struct tree_result_decl); + case CONST_DECL: + return sizeof (struct tree_const_decl); + case TYPE_DECL: + return sizeof (struct tree_type_decl); + case FUNCTION_DECL: + return sizeof (struct tree_function_decl); + case NAME_MEMORY_TAG: + case SYMBOL_MEMORY_TAG: + return sizeof (struct tree_memory_tag); + case MEMORY_PARTITION_TAG: + return sizeof (struct tree_memory_partition_tag); + default: + return sizeof (struct tree_decl_non_common); + } + } - case 't': /* a type node */ + case tcc_type: /* a type node */ return sizeof (struct tree_type); - case 'b': /* a lexical block node */ - return sizeof (struct tree_block); - - case 'r': /* a reference */ - case 'e': /* an expression */ - case 's': /* an expression with side effects */ - case '<': /* a comparison expression */ - case '1': /* a unary arithmetic expression */ - case '2': /* a binary arithmetic expression */ + case tcc_reference: /* a reference */ + case tcc_expression: /* an expression */ + case tcc_statement: /* an expression with side effects */ + case tcc_comparison: /* a comparison expression */ + case tcc_unary: /* a unary arithmetic expression */ + case tcc_binary: /* a binary arithmetic expression */ return (sizeof (struct tree_exp) - + TREE_CODE_LENGTH (code) * sizeof (char *) - sizeof (char *)); - - case 'c': /* a constant */ - /* We can't use TREE_CODE_LENGTH for INTEGER_CST, since the number of - words is machine-dependent due to varying length of HOST_WIDE_INT, - which might be wider than a pointer (e.g., long long). Similarly - for REAL_CST, since the number of words is machine-dependent due - to varying size and alignment of `double'. */ - if (code == INTEGER_CST) - return sizeof (struct tree_int_cst); - else if (code == REAL_CST) - return sizeof (struct tree_real_cst); - else - return (sizeof (struct tree_common) - + TREE_CODE_LENGTH (code) * sizeof (char *)); + + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree)); - case 'x': /* something random, like an identifier. */ - { - size_t length; - length = (sizeof (struct tree_common) - + TREE_CODE_LENGTH (code) * sizeof (char *)); - if (code == TREE_VEC) - length += TREE_VEC_LENGTH (node) * sizeof (char *) - sizeof (char *); - return length; - } + case tcc_constant: /* a constant */ + switch (code) + { + case INTEGER_CST: return sizeof (struct tree_int_cst); + case REAL_CST: return sizeof (struct tree_real_cst); + case FIXED_CST: return sizeof (struct tree_fixed_cst); + case COMPLEX_CST: return sizeof (struct tree_complex); + case VECTOR_CST: return sizeof (struct tree_vector); + case STRING_CST: gcc_unreachable (); + default: + return lang_hooks.tree_size (code); + } + + case tcc_exceptional: /* something random, like an identifier. */ + switch (code) + { + case IDENTIFIER_NODE: return lang_hooks.identifier_size; + case TREE_LIST: return sizeof (struct tree_list); + + case ERROR_MARK: + case PLACEHOLDER_EXPR: return sizeof (struct tree_common); + + case TREE_VEC: + case OMP_CLAUSE: gcc_unreachable (); + + case SSA_NAME: return sizeof (struct tree_ssa_name); + + case STATEMENT_LIST: return sizeof (struct tree_statement_list); + case BLOCK: return sizeof (struct tree_block); + case CONSTRUCTOR: return sizeof (struct tree_constructor); + case OPTIMIZATION_NODE: return sizeof (struct tree_optimization_option); + case TARGET_OPTION_NODE: return sizeof (struct tree_target_option); + + default: + return lang_hooks.tree_size (code); + } + + default: + gcc_unreachable (); + } +} + +/* Compute the number of bytes occupied by NODE. This routine only + looks at TREE_CODE, except for those nodes that have variable sizes. */ +size_t +tree_size (const_tree node) +{ + const enum tree_code code = TREE_CODE (node); + switch (code) + { + case TREE_BINFO: + return (offsetof (struct tree_binfo, base_binfos) + + VEC_embedded_size (tree, BINFO_N_BASE_BINFOS (node))); + + case TREE_VEC: + return (sizeof (struct tree_vec) + + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree)); + + case STRING_CST: + return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1; + + case OMP_CLAUSE: + return (sizeof (struct tree_omp_clause) + + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1) + * sizeof (tree)); default: - abort (); + if (TREE_CODE_CLASS (code) == tcc_vl_exp) + return (sizeof (struct tree_exp) + + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree)); + else + return tree_code_size (code); } } -/* Return a newly allocated node of code CODE. - For decl and type nodes, some other fields are initialized. - The rest of the node is initialized to zero. +/* Return a newly allocated node of code CODE. For decl and type + nodes, some other fields are initialized. The rest of the node is + initialized to zero. This function cannot be used for TREE_VEC or + OMP_CLAUSE nodes, which is enforced by asserts in tree_code_size. Achoo! I got a code in the node. */ tree -make_node (code) - enum tree_code code; +make_node_stat (enum tree_code code MEM_STAT_DECL) { tree t; - int type = TREE_CODE_CLASS (code); - size_t length; + enum tree_code_class type = TREE_CODE_CLASS (code); + size_t length = tree_code_size (code); #ifdef GATHER_STATISTICS tree_node_kind kind; -#endif - struct tree_common ttmp; - - /* We can't allocate a TREE_VEC without knowing how many elements - it will have. */ - if (code == TREE_VEC) - abort (); - - TREE_SET_CODE ((tree)&ttmp, code); - length = tree_size ((tree)&ttmp); -#ifdef GATHER_STATISTICS switch (type) { - case 'd': /* A decl node */ + case tcc_declaration: /* A decl node */ kind = d_kind; break; - case 't': /* a type node */ + case tcc_type: /* a type node */ kind = t_kind; break; - case 'b': /* a lexical block */ - kind = b_kind; - break; - - case 's': /* an expression with side effects */ + case tcc_statement: /* an expression with side effects */ kind = s_kind; break; - case 'r': /* a reference */ + case tcc_reference: /* a reference */ kind = r_kind; break; - case 'e': /* an expression */ - case '<': /* a comparison expression */ - case '1': /* a unary arithmetic expression */ - case '2': /* a binary arithmetic expression */ + case tcc_expression: /* an expression */ + case tcc_comparison: /* a comparison expression */ + case tcc_unary: /* a unary arithmetic expression */ + case tcc_binary: /* a binary arithmetic expression */ kind = e_kind; break; - case 'c': /* a constant */ + case tcc_constant: /* a constant */ kind = c_kind; break; - case 'x': /* something random, like an identifier. */ - if (code == IDENTIFIER_NODE) - kind = id_kind; - else if (code == TREE_VEC) - kind = vec_kind; - else - kind = x_kind; - break; + case tcc_exceptional: /* something random, like an identifier. */ + switch (code) + { + case IDENTIFIER_NODE: + kind = id_kind; + break; + + case TREE_VEC: + kind = vec_kind; + break; + + case TREE_BINFO: + kind = binfo_kind; + break; + + case SSA_NAME: + kind = ssa_name_kind; + break; + + case BLOCK: + kind = b_kind; + break; + + case CONSTRUCTOR: + kind = constr_kind; + break; + default: + kind = x_kind; + break; + } + break; + default: - abort (); + gcc_unreachable (); } tree_node_counts[(int) kind]++; tree_node_sizes[(int) kind] += length; #endif - t = ggc_alloc_tree (length); + if (code == IDENTIFIER_NODE) + t = (tree) ggc_alloc_zone_pass_stat (length, &tree_id_zone); + else + t = (tree) ggc_alloc_zone_pass_stat (length, &tree_zone); - memset ((PTR) t, 0, length); + memset (t, 0, length); TREE_SET_CODE (t, code); switch (type) { - case 's': + case tcc_statement: TREE_SIDE_EFFECTS (t) = 1; - TREE_TYPE (t) = void_type_node; break; - case 'd': - if (code != FUNCTION_DECL) - DECL_ALIGN (t) = 1; - DECL_USER_ALIGN (t) = 0; - DECL_IN_SYSTEM_HEADER (t) = in_system_header; - DECL_SOURCE_LINE (t) = lineno; - DECL_SOURCE_FILE (t) = - (input_filename) ? input_filename : ""; + case tcc_declaration: + if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON)) + { + if (code == FUNCTION_DECL) + { + DECL_ALIGN (t) = FUNCTION_BOUNDARY; + DECL_MODE (t) = FUNCTION_MODE; + } + else + DECL_ALIGN (t) = 1; + /* We have not yet computed the alias set for this declaration. */ + DECL_POINTER_ALIAS_SET (t) = -1; + } + DECL_SOURCE_LOCATION (t) = input_location; DECL_UID (t) = next_decl_uid++; - /* We have not yet computed the alias set for this declaration. */ - DECL_POINTER_ALIAS_SET (t) = -1; break; - case 't': + case tcc_type: TYPE_UID (t) = next_type_uid++; - TYPE_ALIGN (t) = char_type_node ? TYPE_ALIGN (char_type_node) : 0; + TYPE_ALIGN (t) = BITS_PER_UNIT; TYPE_USER_ALIGN (t) = 0; TYPE_MAIN_VARIANT (t) = t; + TYPE_CANONICAL (t) = t; /* Default to no attributes for type, but let target change that. */ TYPE_ATTRIBUTES (t) = NULL_TREE; - (*targetm.set_default_type_attributes) (t); + targetm.set_default_type_attributes (t); /* We have not yet computed the alias set for this type. */ TYPE_ALIAS_SET (t) = -1; break; - case 'c': + case tcc_constant: TREE_CONSTANT (t) = 1; break; - case 'e': + case tcc_expression: switch (code) { case INIT_EXPR: case MODIFY_EXPR: case VA_ARG_EXPR: - case RTL_EXPR: case PREDECREMENT_EXPR: case PREINCREMENT_EXPR: case POSTDECREMENT_EXPR: @@ -459,47 +719,57 @@ make_node (code) break; } break; + + default: + /* Other classes need no special treatment. */ + break; } return t; } - -/* A front-end can reset this to an appropriate function if types need - special handling. */ - -tree (*make_lang_type_fn) PARAMS ((enum tree_code)) = make_node; - -/* Return a new type (with the indicated CODE), doing whatever - language-specific processing is required. */ - -tree -make_lang_type (code) - enum tree_code code; -{ - return (*make_lang_type_fn) (code); -} /* Return a new node with the same contents as NODE except that its TREE_CHAIN is zero and it has a fresh uid. */ tree -copy_node (node) - tree node; +copy_node_stat (tree node MEM_STAT_DECL) { tree t; enum tree_code code = TREE_CODE (node); size_t length; + gcc_assert (code != STATEMENT_LIST); + length = tree_size (node); - t = ggc_alloc_tree (length); + t = (tree) ggc_alloc_zone_pass_stat (length, &tree_zone); memcpy (t, node, length); TREE_CHAIN (t) = 0; TREE_ASM_WRITTEN (t) = 0; + TREE_VISITED (t) = 0; + t->base.ann = 0; - if (TREE_CODE_CLASS (code) == 'd') - DECL_UID (t) = next_decl_uid++; - else if (TREE_CODE_CLASS (code) == 't') + if (TREE_CODE_CLASS (code) == tcc_declaration) + { + DECL_UID (t) = next_decl_uid++; + if ((TREE_CODE (node) == PARM_DECL || TREE_CODE (node) == VAR_DECL) + && DECL_HAS_VALUE_EXPR_P (node)) + { + SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node)); + DECL_HAS_VALUE_EXPR_P (t) = 1; + } + if (TREE_CODE (node) == VAR_DECL && DECL_HAS_INIT_PRIORITY_P (node)) + { + SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node)); + DECL_HAS_INIT_PRIORITY_P (t) = 1; + } + if (TREE_CODE (node) == VAR_DECL && DECL_BASED_ON_RESTRICT_P (node)) + { + SET_DECL_RESTRICT_BASE (t, DECL_GET_RESTRICT_BASE (node)); + DECL_BASED_ON_RESTRICT_P (t) = 1; + } + } + else if (TREE_CODE_CLASS (code) == tcc_type) { TYPE_UID (t) = next_type_uid++; /* The following is so that the debug code for @@ -509,6 +779,13 @@ copy_node (node) but the optimizer should catch that. */ TYPE_SYMTAB_POINTER (t) = 0; TYPE_SYMTAB_ADDRESS (t) = 0; + + /* Do not copy the values cache. */ + if (TYPE_CACHED_VALUES_P(t)) + { + TYPE_CACHED_VALUES_P (t) = 0; + TYPE_CACHED_VALUES (t) = NULL_TREE; + } } return t; @@ -518,8 +795,7 @@ copy_node (node) For example, this can copy a list made of TREE_LIST nodes. */ tree -copy_list (list) - tree list; +copy_list (tree list) { tree head; tree prev, next; @@ -539,227 +815,461 @@ copy_list (list) } -/* Return a newly constructed INTEGER_CST node whose constant value - is specified by the two ints LOW and HI. - The TREE_TYPE is set to `int'. - - This function should be used via the `build_int_2' macro. */ +/* Create an INT_CST node with a LOW value sign extended. */ tree -build_int_2_wide (low, hi) - unsigned HOST_WIDE_INT low; - HOST_WIDE_INT hi; +build_int_cst (tree type, HOST_WIDE_INT low) { - tree t = make_node (INTEGER_CST); + /* Support legacy code. */ + if (!type) + type = integer_type_node; - TREE_INT_CST_LOW (t) = low; - TREE_INT_CST_HIGH (t) = hi; - TREE_TYPE (t) = integer_type_node; - return t; + return build_int_cst_wide (type, low, low < 0 ? -1 : 0); } -/* Return a new VECTOR_CST node whose type is TYPE and whose values - are in a list pointed by VALS. */ +/* Create an INT_CST node with a LOW value zero extended. */ tree -build_vector (type, vals) - tree type, vals; +build_int_cstu (tree type, unsigned HOST_WIDE_INT low) { - tree v = make_node (VECTOR_CST); - int over1 = 0, over2 = 0; - tree link; + return build_int_cst_wide (type, low, 0); +} - TREE_VECTOR_CST_ELTS (v) = vals; - TREE_TYPE (v) = type; +/* Create an INT_CST node with a LOW value in TYPE. The value is sign extended + if it is negative. This function is similar to build_int_cst, but + the extra bits outside of the type precision are cleared. Constants + with these extra bits may confuse the fold so that it detects overflows + even in cases when they do not occur, and in general should be avoided. + We cannot however make this a default behavior of build_int_cst without + more intrusive changes, since there are parts of gcc that rely on the extra + precision of the integer constants. */ - /* Iterate through elements and check for overflow. */ - for (link = vals; link; link = TREE_CHAIN (link)) - { - tree value = TREE_VALUE (link); +tree +build_int_cst_type (tree type, HOST_WIDE_INT low) +{ + unsigned HOST_WIDE_INT low1; + HOST_WIDE_INT hi; - over1 |= TREE_OVERFLOW (value); - over2 |= TREE_CONSTANT_OVERFLOW (value); - } - - TREE_OVERFLOW (v) = over1; - TREE_CONSTANT_OVERFLOW (v) = over2; + gcc_assert (type); - return v; + fit_double_type (low, low < 0 ? -1 : 0, &low1, &hi, type); + + return build_int_cst_wide (type, low1, hi); } -/* Return a new REAL_CST node whose type is TYPE and value is D. */ +/* Create an INT_CST node of TYPE and value HI:LOW. The value is truncated + and sign extended according to the value range of TYPE. */ tree -build_real (type, d) - tree type; - REAL_VALUE_TYPE d; +build_int_cst_wide_type (tree type, + unsigned HOST_WIDE_INT low, HOST_WIDE_INT high) { - tree v; - int overflow = 0; - - /* Check for valid float value for this type on this target machine; - if not, can print error message and store a valid value in D. */ -#ifdef CHECK_FLOAT_VALUE - CHECK_FLOAT_VALUE (TYPE_MODE (type), d, overflow); -#endif - - v = make_node (REAL_CST); - TREE_TYPE (v) = type; - TREE_REAL_CST (v) = d; - TREE_OVERFLOW (v) = TREE_CONSTANT_OVERFLOW (v) = overflow; - return v; + fit_double_type (low, high, &low, &high, type); + return build_int_cst_wide (type, low, high); } -/* Return a new REAL_CST node whose type is TYPE - and whose value is the integer value of the INTEGER_CST node I. */ +/* These are the hash table functions for the hash table of INTEGER_CST + nodes of a sizetype. */ -#if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC) +/* Return the hash code code X, an INTEGER_CST. */ -REAL_VALUE_TYPE -real_value_from_int_cst (type, i) - tree type ATTRIBUTE_UNUSED, i; +static hashval_t +int_cst_hash_hash (const void *x) { - REAL_VALUE_TYPE d; + const_tree const t = (const_tree) x; -#ifdef REAL_ARITHMETIC - /* Clear all bits of the real value type so that we can later do - bitwise comparisons to see if two values are the same. */ - memset ((char *) &d, 0, sizeof d); + return (TREE_INT_CST_HIGH (t) ^ TREE_INT_CST_LOW (t) + ^ htab_hash_pointer (TREE_TYPE (t))); +} - if (! TREE_UNSIGNED (TREE_TYPE (i))) - REAL_VALUE_FROM_INT (d, TREE_INT_CST_LOW (i), TREE_INT_CST_HIGH (i), - TYPE_MODE (type)); - else - REAL_VALUE_FROM_UNSIGNED_INT (d, TREE_INT_CST_LOW (i), - TREE_INT_CST_HIGH (i), TYPE_MODE (type)); -#else /* not REAL_ARITHMETIC */ - /* Some 386 compilers mishandle unsigned int to float conversions, - so introduce a temporary variable E to avoid those bugs. */ - if (TREE_INT_CST_HIGH (i) < 0 && ! TREE_UNSIGNED (TREE_TYPE (i))) - { - REAL_VALUE_TYPE e; - - d = (double) (~TREE_INT_CST_HIGH (i)); - e = ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - * (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))); - d *= e; - e = (double) (~TREE_INT_CST_LOW (i)); - d += e; - d = (- d - 1.0); - } - else - { - REAL_VALUE_TYPE e; +/* Return nonzero if the value represented by *X (an INTEGER_CST tree node) + is the same as that given by *Y, which is the same. */ - d = (double) (unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (i); - e = ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - * (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))); - d *= e; - e = (double) TREE_INT_CST_LOW (i); - d += e; - } -#endif /* not REAL_ARITHMETIC */ - return d; +static int +int_cst_hash_eq (const void *x, const void *y) +{ + const_tree const xt = (const_tree) x; + const_tree const yt = (const_tree) y; + + return (TREE_TYPE (xt) == TREE_TYPE (yt) + && TREE_INT_CST_HIGH (xt) == TREE_INT_CST_HIGH (yt) + && TREE_INT_CST_LOW (xt) == TREE_INT_CST_LOW (yt)); } -/* Args to pass to and from build_real_from_int_cst_1. */ +/* Create an INT_CST node of TYPE and value HI:LOW. + The returned node is always shared. For small integers we use a + per-type vector cache, for larger ones we use a single hash table. */ -struct brfic_args +tree +build_int_cst_wide (tree type, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi) { - tree type; /* Input: type to conver to. */ - tree i; /* Input: operand to convert. */ - REAL_VALUE_TYPE d; /* Output: floating point value. */ -}; + tree t; + int ix = -1; + int limit = 0; -/* Convert an integer to a floating point value while protected by a floating - point exception handler. */ + gcc_assert (type); -static void -build_real_from_int_cst_1 (data) - PTR data; -{ - struct brfic_args *args = (struct brfic_args *) data; + switch (TREE_CODE (type)) + { + case POINTER_TYPE: + case REFERENCE_TYPE: + /* Cache NULL pointer. */ + if (!hi && !low) + { + limit = 1; + ix = 0; + } + break; -#ifdef REAL_ARITHMETIC - args->d = real_value_from_int_cst (args->type, args->i); -#else - args->d - = REAL_VALUE_TRUNCATE (TYPE_MODE (args->type), - real_value_from_int_cst (args->type, args->i)); -#endif -} + case BOOLEAN_TYPE: + /* Cache false or true. */ + limit = 2; + if (!hi && low < 2) + ix = low; + break; -/* Given a tree representing an integer constant I, return a tree - representing the same value as a floating-point constant of type TYPE. - We cannot perform this operation if there is no way of doing arithmetic - on floating-point values. */ + case INTEGER_TYPE: + case OFFSET_TYPE: + if (TYPE_UNSIGNED (type)) + { + /* Cache 0..N */ + limit = INTEGER_SHARE_LIMIT; + if (!hi && low < (unsigned HOST_WIDE_INT)INTEGER_SHARE_LIMIT) + ix = low; + } + else + { + /* Cache -1..N */ + limit = INTEGER_SHARE_LIMIT + 1; + if (!hi && low < (unsigned HOST_WIDE_INT)INTEGER_SHARE_LIMIT) + ix = low + 1; + else if (hi == -1 && low == -(unsigned HOST_WIDE_INT)1) + ix = 0; + } + break; -tree -build_real_from_int_cst (type, i) - tree type; - tree i; -{ - tree v; - int overflow = TREE_OVERFLOW (i); - REAL_VALUE_TYPE d; - struct brfic_args args; + case ENUMERAL_TYPE: + break; - v = make_node (REAL_CST); - TREE_TYPE (v) = type; + default: + gcc_unreachable (); + } - /* Setup input for build_real_from_int_cst_1() */ - args.type = type; - args.i = i; + if (ix >= 0) + { + /* Look for it in the type's vector of small shared ints. */ + if (!TYPE_CACHED_VALUES_P (type)) + { + TYPE_CACHED_VALUES_P (type) = 1; + TYPE_CACHED_VALUES (type) = make_tree_vec (limit); + } - if (do_float_handler (build_real_from_int_cst_1, (PTR) &args)) - /* Receive output from build_real_from_int_cst_1() */ - d = args.d; + t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix); + if (t) + { + /* Make sure no one is clobbering the shared constant. */ + gcc_assert (TREE_TYPE (t) == type); + gcc_assert (TREE_INT_CST_LOW (t) == low); + gcc_assert (TREE_INT_CST_HIGH (t) == hi); + } + else + { + /* Create a new shared int. */ + t = make_node (INTEGER_CST); + + TREE_INT_CST_LOW (t) = low; + TREE_INT_CST_HIGH (t) = hi; + TREE_TYPE (t) = type; + + TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t; + } + } else { - /* We got an exception from build_real_from_int_cst_1() */ - d = dconst0; - overflow = 1; - } + /* Use the cache of larger shared ints. */ + void **slot; - /* Check for valid float value for this type on this target machine. */ + TREE_INT_CST_LOW (int_cst_node) = low; + TREE_INT_CST_HIGH (int_cst_node) = hi; + TREE_TYPE (int_cst_node) = type; -#ifdef CHECK_FLOAT_VALUE - CHECK_FLOAT_VALUE (TYPE_MODE (type), d, overflow); -#endif + slot = htab_find_slot (int_cst_hash_table, int_cst_node, INSERT); + t = (tree) *slot; + if (!t) + { + /* Insert this one into the hash table. */ + t = int_cst_node; + *slot = t; + /* Make a new node for next time round. */ + int_cst_node = make_node (INTEGER_CST); + } + } - TREE_REAL_CST (v) = d; - TREE_OVERFLOW (v) = TREE_CONSTANT_OVERFLOW (v) = overflow; - return v; + return t; } -#endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */ - -/* Return a newly constructed STRING_CST node whose value is - the LEN characters at STR. - The TREE_TYPE is not initialized. */ +/* Builds an integer constant in TYPE such that lowest BITS bits are ones + and the rest are zeros. */ tree -build_string (len, str) - int len; - const char *str; +build_low_bits_mask (tree type, unsigned bits) { - tree s = make_node (STRING_CST); - - TREE_STRING_LENGTH (s) = len; - TREE_STRING_POINTER (s) = ggc_alloc_string (str, len); + unsigned HOST_WIDE_INT low; + HOST_WIDE_INT high; + unsigned HOST_WIDE_INT all_ones = ~(unsigned HOST_WIDE_INT) 0; - return s; -} + gcc_assert (bits <= TYPE_PRECISION (type)); -/* Return a newly constructed COMPLEX_CST node whose value is - specified by the real and imaginary parts REAL and IMAG. - Both REAL and IMAG should be constant nodes. TYPE, if specified, + if (bits == TYPE_PRECISION (type) + && !TYPE_UNSIGNED (type)) + { + /* Sign extended all-ones mask. */ + low = all_ones; + high = -1; + } + else if (bits <= HOST_BITS_PER_WIDE_INT) + { + low = all_ones >> (HOST_BITS_PER_WIDE_INT - bits); + high = 0; + } + else + { + bits -= HOST_BITS_PER_WIDE_INT; + low = all_ones; + high = all_ones >> (HOST_BITS_PER_WIDE_INT - bits); + } + + return build_int_cst_wide (type, low, high); +} + +/* Checks that X is integer constant that can be expressed in (unsigned) + HOST_WIDE_INT without loss of precision. */ + +bool +cst_and_fits_in_hwi (const_tree x) +{ + if (TREE_CODE (x) != INTEGER_CST) + return false; + + if (TYPE_PRECISION (TREE_TYPE (x)) > HOST_BITS_PER_WIDE_INT) + return false; + + return (TREE_INT_CST_HIGH (x) == 0 + || TREE_INT_CST_HIGH (x) == -1); +} + +/* Return a new VECTOR_CST node whose type is TYPE and whose values + are in a list pointed to by VALS. */ + +tree +build_vector (tree type, tree vals) +{ + tree v = make_node (VECTOR_CST); + int over = 0; + tree link; + + TREE_VECTOR_CST_ELTS (v) = vals; + TREE_TYPE (v) = type; + + /* Iterate through elements and check for overflow. */ + for (link = vals; link; link = TREE_CHAIN (link)) + { + tree value = TREE_VALUE (link); + + /* Don't crash if we get an address constant. */ + if (!CONSTANT_CLASS_P (value)) + continue; + + over |= TREE_OVERFLOW (value); + } + + TREE_OVERFLOW (v) = over; + return v; +} + +/* Return a new VECTOR_CST node whose type is TYPE and whose values + are extracted from V, a vector of CONSTRUCTOR_ELT. */ + +tree +build_vector_from_ctor (tree type, VEC(constructor_elt,gc) *v) +{ + tree list = NULL_TREE; + unsigned HOST_WIDE_INT idx; + tree value; + + FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value) + list = tree_cons (NULL_TREE, value, list); + return build_vector (type, nreverse (list)); +} + +/* Return a new CONSTRUCTOR node whose type is TYPE and whose values + are in the VEC pointed to by VALS. */ +tree +build_constructor (tree type, VEC(constructor_elt,gc) *vals) +{ + tree c = make_node (CONSTRUCTOR); + TREE_TYPE (c) = type; + CONSTRUCTOR_ELTS (c) = vals; + return c; +} + +/* Build a CONSTRUCTOR node made of a single initializer, with the specified + INDEX and VALUE. */ +tree +build_constructor_single (tree type, tree index, tree value) +{ + VEC(constructor_elt,gc) *v; + constructor_elt *elt; + tree t; + + v = VEC_alloc (constructor_elt, gc, 1); + elt = VEC_quick_push (constructor_elt, v, NULL); + elt->index = index; + elt->value = value; + + t = build_constructor (type, v); + TREE_CONSTANT (t) = TREE_CONSTANT (value); + return t; +} + + +/* Return a new CONSTRUCTOR node whose type is TYPE and whose values + are in a list pointed to by VALS. */ +tree +build_constructor_from_list (tree type, tree vals) +{ + tree t, val; + VEC(constructor_elt,gc) *v = NULL; + bool constant_p = true; + + if (vals) + { + v = VEC_alloc (constructor_elt, gc, list_length (vals)); + for (t = vals; t; t = TREE_CHAIN (t)) + { + constructor_elt *elt = VEC_quick_push (constructor_elt, v, NULL); + val = TREE_VALUE (t); + elt->index = TREE_PURPOSE (t); + elt->value = val; + if (!TREE_CONSTANT (val)) + constant_p = false; + } + } + + t = build_constructor (type, v); + TREE_CONSTANT (t) = constant_p; + return t; +} + +/* Return a new FIXED_CST node whose type is TYPE and value is F. */ + +tree +build_fixed (tree type, FIXED_VALUE_TYPE f) +{ + tree v; + FIXED_VALUE_TYPE *fp; + + v = make_node (FIXED_CST); + fp = GGC_NEW (FIXED_VALUE_TYPE); + memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE)); + + TREE_TYPE (v) = type; + TREE_FIXED_CST_PTR (v) = fp; + return v; +} + +/* Return a new REAL_CST node whose type is TYPE and value is D. */ + +tree +build_real (tree type, REAL_VALUE_TYPE d) +{ + tree v; + REAL_VALUE_TYPE *dp; + int overflow = 0; + + /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE. + Consider doing it via real_convert now. */ + + v = make_node (REAL_CST); + dp = GGC_NEW (REAL_VALUE_TYPE); + memcpy (dp, &d, sizeof (REAL_VALUE_TYPE)); + + TREE_TYPE (v) = type; + TREE_REAL_CST_PTR (v) = dp; + TREE_OVERFLOW (v) = overflow; + return v; +} + +/* Return a new REAL_CST node whose type is TYPE + and whose value is the integer value of the INTEGER_CST node I. */ + +REAL_VALUE_TYPE +real_value_from_int_cst (const_tree type, const_tree i) +{ + REAL_VALUE_TYPE d; + + /* Clear all bits of the real value type so that we can later do + bitwise comparisons to see if two values are the same. */ + memset (&d, 0, sizeof d); + + real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, + TREE_INT_CST_LOW (i), TREE_INT_CST_HIGH (i), + TYPE_UNSIGNED (TREE_TYPE (i))); + return d; +} + +/* Given a tree representing an integer constant I, return a tree + representing the same value as a floating-point constant of type TYPE. */ + +tree +build_real_from_int_cst (tree type, const_tree i) +{ + tree v; + int overflow = TREE_OVERFLOW (i); + + v = build_real (type, real_value_from_int_cst (type, i)); + + TREE_OVERFLOW (v) |= overflow; + return v; +} + +/* Return a newly constructed STRING_CST node whose value is + the LEN characters at STR. + The TREE_TYPE is not initialized. */ + +tree +build_string (int len, const char *str) +{ + tree s; + size_t length; + + /* Do not waste bytes provided by padding of struct tree_string. */ + length = len + offsetof (struct tree_string, str) + 1; + +#ifdef GATHER_STATISTICS + tree_node_counts[(int) c_kind]++; + tree_node_sizes[(int) c_kind] += length; +#endif + + s = ggc_alloc_tree (length); + + memset (s, 0, sizeof (struct tree_common)); + TREE_SET_CODE (s, STRING_CST); + TREE_CONSTANT (s) = 1; + TREE_STRING_LENGTH (s) = len; + memcpy (s->string.str, str, len); + s->string.str[len] = '\0'; + + return s; +} + +/* Return a newly constructed COMPLEX_CST node whose value is + specified by the real and imaginary parts REAL and IMAG. + Both REAL and IMAG should be constant nodes. TYPE, if specified, will be the type of the COMPLEX_CST; otherwise a new type will be made. */ tree -build_complex (type, real, imag) - tree type; - tree real, imag; +build_complex (tree type, tree real, tree imag) { tree t = make_node (COMPLEX_CST); @@ -767,28 +1277,98 @@ build_complex (type, real, imag) TREE_IMAGPART (t) = imag; TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real)); TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag); - TREE_CONSTANT_OVERFLOW (t) - = TREE_CONSTANT_OVERFLOW (real) | TREE_CONSTANT_OVERFLOW (imag); return t; } +/* Return a constant of arithmetic type TYPE which is the + multiplicative identity of the set TYPE. */ + +tree +build_one_cst (tree type) +{ + switch (TREE_CODE (type)) + { + case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE: + case POINTER_TYPE: case REFERENCE_TYPE: + case OFFSET_TYPE: + return build_int_cst (type, 1); + + case REAL_TYPE: + return build_real (type, dconst1); + + case FIXED_POINT_TYPE: + /* We can only generate 1 for accum types. */ + gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type))); + return build_fixed (type, FCONST1(TYPE_MODE (type))); + + case VECTOR_TYPE: + { + tree scalar, cst; + int i; + + scalar = build_one_cst (TREE_TYPE (type)); + + /* Create 'vect_cst_ = {cst,cst,...,cst}' */ + cst = NULL_TREE; + for (i = TYPE_VECTOR_SUBPARTS (type); --i >= 0; ) + cst = tree_cons (NULL_TREE, scalar, cst); + + return build_vector (type, cst); + } + + case COMPLEX_TYPE: + return build_complex (type, + build_one_cst (TREE_TYPE (type)), + fold_convert (TREE_TYPE (type), integer_zero_node)); + + default: + gcc_unreachable (); + } +} + +/* Build a BINFO with LEN language slots. */ + +tree +make_tree_binfo_stat (unsigned base_binfos MEM_STAT_DECL) +{ + tree t; + size_t length = (offsetof (struct tree_binfo, base_binfos) + + VEC_embedded_size (tree, base_binfos)); + +#ifdef GATHER_STATISTICS + tree_node_counts[(int) binfo_kind]++; + tree_node_sizes[(int) binfo_kind] += length; +#endif + + t = (tree) ggc_alloc_zone_pass_stat (length, &tree_zone); + + memset (t, 0, offsetof (struct tree_binfo, base_binfos)); + + TREE_SET_CODE (t, TREE_BINFO); + + VEC_embedded_init (tree, BINFO_BASE_BINFOS (t), base_binfos); + + return t; +} + + /* Build a newly constructed TREE_VEC node of length LEN. */ tree -make_tree_vec (len) - int len; +make_tree_vec_stat (int len MEM_STAT_DECL) { tree t; - int length = (len-1) * sizeof (tree) + sizeof (struct tree_vec); + int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec); #ifdef GATHER_STATISTICS - tree_node_counts[(int)vec_kind]++; - tree_node_sizes[(int)vec_kind] += length; + tree_node_counts[(int) vec_kind]++; + tree_node_sizes[(int) vec_kind] += length; #endif - t = ggc_alloc_tree (length); + t = (tree) ggc_alloc_zone_pass_stat (length, &tree_zone); + + memset (t, 0, length); - memset ((PTR) t, 0, length); TREE_SET_CODE (t, TREE_VEC); TREE_VEC_LENGTH (t) = len; @@ -799,13 +1379,11 @@ make_tree_vec (len) of zero. */ int -integer_zerop (expr) - tree expr; +integer_zerop (const_tree expr) { STRIP_NOPS (expr); return ((TREE_CODE (expr) == INTEGER_CST - && ! TREE_CONSTANT_OVERFLOW (expr) && TREE_INT_CST_LOW (expr) == 0 && TREE_INT_CST_HIGH (expr) == 0) || (TREE_CODE (expr) == COMPLEX_CST @@ -817,13 +1395,11 @@ integer_zerop (expr) complex constant. */ int -integer_onep (expr) - tree expr; +integer_onep (const_tree expr) { STRIP_NOPS (expr); return ((TREE_CODE (expr) == INTEGER_CST - && ! TREE_CONSTANT_OVERFLOW (expr) && TREE_INT_CST_LOW (expr) == 1 && TREE_INT_CST_HIGH (expr) == 0) || (TREE_CODE (expr) == COMPLEX_CST @@ -835,8 +1411,7 @@ integer_onep (expr) it contains. Likewise for the corresponding complex constant. */ int -integer_all_onesp (expr) - tree expr; +integer_all_onesp (const_tree expr) { int prec; int uns; @@ -848,14 +1423,15 @@ integer_all_onesp (expr) && integer_zerop (TREE_IMAGPART (expr))) return 1; - else if (TREE_CODE (expr) != INTEGER_CST - || TREE_CONSTANT_OVERFLOW (expr)) + else if (TREE_CODE (expr) != INTEGER_CST) return 0; - uns = TREE_UNSIGNED (TREE_TYPE (expr)); + uns = TYPE_UNSIGNED (TREE_TYPE (expr)); + if (TREE_INT_CST_LOW (expr) == ~(unsigned HOST_WIDE_INT) 0 + && TREE_INT_CST_HIGH (expr) == -1) + return 1; if (!uns) - return (TREE_INT_CST_LOW (expr) == ~(unsigned HOST_WIDE_INT) 0 - && TREE_INT_CST_HIGH (expr) == -1); + return 0; /* Note that using TYPE_PRECISION here is wrong. We care about the actual bits, not the (arbitrary) range of the type. */ @@ -867,10 +1443,9 @@ integer_all_onesp (expr) shift_amount = prec - HOST_BITS_PER_WIDE_INT; - if (shift_amount > HOST_BITS_PER_WIDE_INT) - /* Can not handle precisions greater than twice the host int size. */ - abort (); - else if (shift_amount == HOST_BITS_PER_WIDE_INT) + /* Can not handle precisions greater than twice the host int size. */ + gcc_assert (shift_amount <= HOST_BITS_PER_WIDE_INT); + if (shift_amount == HOST_BITS_PER_WIDE_INT) /* Shifting by the host word size is undefined according to the ANSI standard, so we must handle this as a special case. */ high_value = -1; @@ -888,8 +1463,7 @@ integer_all_onesp (expr) one bit on). */ int -integer_pow2p (expr) - tree expr; +integer_pow2p (const_tree expr) { int prec; HOST_WIDE_INT high, low; @@ -901,7 +1475,7 @@ integer_pow2p (expr) && integer_zerop (TREE_IMAGPART (expr))) return 1; - if (TREE_CODE (expr) != INTEGER_CST || TREE_CONSTANT_OVERFLOW (expr)) + if (TREE_CODE (expr) != INTEGER_CST) return 0; prec = (POINTER_TYPE_P (TREE_TYPE (expr)) @@ -930,12 +1504,36 @@ integer_pow2p (expr) || (low == 0 && (high & (high - 1)) == 0)); } +/* Return 1 if EXPR is an integer constant other than zero or a + complex constant other than zero. */ + +int +integer_nonzerop (const_tree expr) +{ + STRIP_NOPS (expr); + + return ((TREE_CODE (expr) == INTEGER_CST + && (TREE_INT_CST_LOW (expr) != 0 + || TREE_INT_CST_HIGH (expr) != 0)) + || (TREE_CODE (expr) == COMPLEX_CST + && (integer_nonzerop (TREE_REALPART (expr)) + || integer_nonzerop (TREE_IMAGPART (expr))))); +} + +/* Return 1 if EXPR is the fixed-point constant zero. */ + +int +fixed_zerop (const_tree expr) +{ + return (TREE_CODE (expr) == FIXED_CST + && double_int_zero_p (TREE_FIXED_CST (expr).data)); +} + /* Return the power of two represented by a tree node known to be a power of two. */ int -tree_log2 (expr) - tree expr; +tree_log2 (const_tree expr) { int prec; HOST_WIDE_INT high, low; @@ -973,8 +1571,7 @@ tree_log2 (expr) than or equal to EXPR. */ int -tree_floor_log2 (expr) - tree expr; +tree_floor_log2 (const_tree expr) { int prec; HOST_WIDE_INT high, low; @@ -1009,63 +1606,78 @@ tree_floor_log2 (expr) : floor_log2 (low)); } -/* Return 1 if EXPR is the real constant zero. */ +/* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for + decimal float constants, so don't return 1 for them. */ int -real_zerop (expr) - tree expr; +real_zerop (const_tree expr) { STRIP_NOPS (expr); return ((TREE_CODE (expr) == REAL_CST - && ! TREE_CONSTANT_OVERFLOW (expr) - && REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst0)) + && REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst0) + && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))))) || (TREE_CODE (expr) == COMPLEX_CST && real_zerop (TREE_REALPART (expr)) && real_zerop (TREE_IMAGPART (expr)))); } -/* Return 1 if EXPR is the real constant one in real or complex form. */ +/* Return 1 if EXPR is the real constant one in real or complex form. + Trailing zeroes matter for decimal float constants, so don't return + 1 for them. */ int -real_onep (expr) - tree expr; +real_onep (const_tree expr) { STRIP_NOPS (expr); return ((TREE_CODE (expr) == REAL_CST - && ! TREE_CONSTANT_OVERFLOW (expr) - && REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst1)) + && REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst1) + && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))))) || (TREE_CODE (expr) == COMPLEX_CST && real_onep (TREE_REALPART (expr)) && real_zerop (TREE_IMAGPART (expr)))); } -/* Return 1 if EXPR is the real constant two. */ +/* Return 1 if EXPR is the real constant two. Trailing zeroes matter + for decimal float constants, so don't return 1 for them. */ int -real_twop (expr) - tree expr; +real_twop (const_tree expr) { STRIP_NOPS (expr); return ((TREE_CODE (expr) == REAL_CST - && ! TREE_CONSTANT_OVERFLOW (expr) - && REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst2)) + && REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst2) + && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))))) || (TREE_CODE (expr) == COMPLEX_CST && real_twop (TREE_REALPART (expr)) && real_zerop (TREE_IMAGPART (expr)))); } +/* Return 1 if EXPR is the real constant minus one. Trailing zeroes + matter for decimal float constants, so don't return 1 for them. */ + +int +real_minus_onep (const_tree expr) +{ + STRIP_NOPS (expr); + + return ((TREE_CODE (expr) == REAL_CST + && REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconstm1) + && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))))) + || (TREE_CODE (expr) == COMPLEX_CST + && real_minus_onep (TREE_REALPART (expr)) + && real_zerop (TREE_IMAGPART (expr)))); +} + /* Nonzero if EXP is a constant or a cast of a constant. */ int -really_constant_p (exp) - tree exp; +really_constant_p (const_tree exp) { /* This is not quite the same as STRIP_NOPS. It does more. */ - while (TREE_CODE (exp) == NOP_EXPR - || TREE_CODE (exp) == CONVERT_EXPR + while (CONVERT_EXPR_P (exp) || TREE_CODE (exp) == NON_LVALUE_EXPR) exp = TREE_OPERAND (exp, 0); return TREE_CONSTANT (exp); @@ -1075,8 +1687,7 @@ really_constant_p (exp) Return 0 if ELEM is not in LIST. */ tree -value_member (elem, list) - tree elem, list; +value_member (tree elem, tree list) { while (list) { @@ -1091,8 +1702,7 @@ value_member (elem, list) Return 0 if ELEM is not in LIST. */ tree -purpose_member (elem, list) - tree elem, list; +purpose_member (const_tree elem, tree list) { while (list) { @@ -1103,27 +1713,10 @@ purpose_member (elem, list) return NULL_TREE; } -/* Return first list element whose BINFO_TYPE is ELEM. - Return 0 if ELEM is not in LIST. */ - -tree -binfo_member (elem, list) - tree elem, list; -{ - while (list) - { - if (elem == BINFO_TYPE (list)) - return list; - list = TREE_CHAIN (list); - } - return NULL_TREE; -} - /* Return nonzero if ELEM is part of the chain CHAIN. */ int -chain_member (elem, chain) - tree elem, chain; +chain_member (const_tree elem, const_tree chain) { while (chain) { @@ -1135,54 +1728,29 @@ chain_member (elem, chain) return 0; } -/* Return nonzero if ELEM is equal to TREE_VALUE (CHAIN) for any piece of - chain CHAIN. This and the next function are currently unused, but - are retained for completeness. */ - -int -chain_member_value (elem, chain) - tree elem, chain; -{ - while (chain) - { - if (elem == TREE_VALUE (chain)) - return 1; - chain = TREE_CHAIN (chain); - } - - return 0; -} - -/* Return nonzero if ELEM is equal to TREE_PURPOSE (CHAIN) - for any piece of chain CHAIN. */ - -int -chain_member_purpose (elem, chain) - tree elem, chain; -{ - while (chain) - { - if (elem == TREE_PURPOSE (chain)) - return 1; - chain = TREE_CHAIN (chain); - } - - return 0; -} - /* Return the length of a chain of nodes chained through TREE_CHAIN. We expect a null pointer to mark the end of the chain. This is the Lisp primitive `length'. */ int -list_length (t) - tree t; +list_length (const_tree t) { - tree tail; + const_tree p = t; +#ifdef ENABLE_TREE_CHECKING + const_tree q = t; +#endif int len = 0; - for (tail = t; tail; tail = TREE_CHAIN (tail)) - len++; + while (p) + { + p = TREE_CHAIN (p); +#ifdef ENABLE_TREE_CHECKING + if (len % 2) + q = TREE_CHAIN (q); + gcc_assert (p != q); +#endif + len++; + } return len; } @@ -1190,8 +1758,7 @@ list_length (t) /* Returns the number of FIELD_DECLs in TYPE. */ int -fields_length (type) - tree type; +fields_length (const_tree type) { tree t = TYPE_FIELDS (type); int count = 0; @@ -1208,36 +1775,34 @@ fields_length (type) This is the Lisp primitive `nconc'. */ tree -chainon (op1, op2) - tree op1, op2; +chainon (tree op1, tree op2) { + tree t1; - if (op1) - { - tree t1; -#ifdef ENABLE_TREE_CHECKING - tree t2; -#endif + if (!op1) + return op2; + if (!op2) + return op1; + + for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1)) + continue; + TREE_CHAIN (t1) = op2; - for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1)) - ; - TREE_CHAIN (t1) = op2; #ifdef ENABLE_TREE_CHECKING - for (t2 = op2; t2; t2 = TREE_CHAIN (t2)) - if (t2 == t1) - abort (); /* Circularity created. */ + { + tree t2; + for (t2 = op2; t2; t2 = TREE_CHAIN (t2)) + gcc_assert (t2 != t1); + } #endif - return op1; - } - else - return op2; + + return op1; } /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */ tree -tree_last (chain) - tree chain; +tree_last (tree chain) { tree next; if (chain) @@ -1250,8 +1815,7 @@ tree_last (chain) and return the new head of the chain (old last element). */ tree -nreverse (t) - tree t; +nreverse (tree t) { tree prev = 0, decl, next; for (decl = t; decl; decl = next) @@ -1262,56 +1826,29 @@ nreverse (t) } return prev; } - -/* Given a chain CHAIN of tree nodes, - construct and return a list of those nodes. */ - -tree -listify (chain) - tree chain; -{ - tree result = NULL_TREE; - tree in_tail = chain; - tree out_tail = NULL_TREE; - - while (in_tail) - { - tree next = tree_cons (NULL_TREE, in_tail, NULL_TREE); - if (out_tail) - TREE_CHAIN (out_tail) = next; - else - result = next; - out_tail = next; - in_tail = TREE_CHAIN (in_tail); - } - - return result; -} /* Return a newly created TREE_LIST node whose purpose and value fields are PARM and VALUE. */ tree -build_tree_list (parm, value) - tree parm, value; +build_tree_list_stat (tree parm, tree value MEM_STAT_DECL) { - tree t = make_node (TREE_LIST); + tree t = make_node_stat (TREE_LIST PASS_MEM_STAT); TREE_PURPOSE (t) = parm; TREE_VALUE (t) = value; return t; } /* Return a newly created TREE_LIST node whose - purpose and value fields are PARM and VALUE + purpose and value fields are PURPOSE and VALUE and whose TREE_CHAIN is CHAIN. */ tree -tree_cons (purpose, value, chain) - tree purpose, value, chain; +tree_cons_stat (tree purpose, tree value, tree chain MEM_STAT_DECL) { tree node; - node = ggc_alloc_tree (sizeof (struct tree_list)); + node = (tree) ggc_alloc_zone_pass_stat (sizeof (struct tree_list), &tree_zone); memset (node, 0, sizeof (struct tree_common)); @@ -1327,6 +1864,24 @@ tree_cons (purpose, value, chain) return node; } +/* Return the elements of a CONSTRUCTOR as a TREE_LIST. */ + +tree +ctor_to_list (tree ctor) +{ + tree list = NULL_TREE; + tree *p = &list; + unsigned ix; + tree purpose, val; + + FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), ix, purpose, val) + { + *p = build_tree_list (purpose, val); + p = &TREE_CHAIN (*p); + } + + return list; +} /* Return the size nominally occupied by an object of type TYPE when it resides in memory. The value is measured in units of bytes, @@ -1335,8 +1890,7 @@ tree_cons (purpose, value, chain) make_unsigned_type). */ tree -size_in_bytes (type) - tree type; +size_in_bytes (const_tree type) { tree t; @@ -1348,13 +1902,10 @@ size_in_bytes (type) if (t == 0) { - incomplete_type_error (NULL_TREE, type); + lang_hooks.types.incomplete_type_error (NULL_TREE, type); return size_zero_node; } - if (TREE_CODE (t) == INTEGER_CST) - force_fit_type (t, 0); - return t; } @@ -1362,8 +1913,7 @@ size_in_bytes (type) or return -1 if the size can vary or is larger than an integer. */ HOST_WIDE_INT -int_size_in_bytes (type) - tree type; +int_size_in_bytes (const_tree type) { tree t; @@ -1374,7 +1924,6 @@ int_size_in_bytes (type) t = TYPE_SIZE_UNIT (type); if (t == 0 || TREE_CODE (t) != INTEGER_CST - || TREE_OVERFLOW (t) || TREE_INT_CST_HIGH (t) != 0 /* If the result would appear negative, it's too big to represent. */ || (HOST_WIDE_INT) TREE_INT_CST_LOW (t) < 0) @@ -1382,26 +1931,56 @@ int_size_in_bytes (type) return TREE_INT_CST_LOW (t); } + +/* Return the maximum size of TYPE (in bytes) as a wide integer + or return -1 if the size can vary or is larger than an integer. */ + +HOST_WIDE_INT +max_int_size_in_bytes (const_tree type) +{ + HOST_WIDE_INT size = -1; + tree size_tree; + + /* If this is an array type, check for a possible MAX_SIZE attached. */ + + if (TREE_CODE (type) == ARRAY_TYPE) + { + size_tree = TYPE_ARRAY_MAX_SIZE (type); + + if (size_tree && host_integerp (size_tree, 1)) + size = tree_low_cst (size_tree, 1); + } + + /* If we still haven't been able to get a size, see if the language + can compute a maximum size. */ + + if (size == -1) + { + size_tree = lang_hooks.types.max_size (type); + + if (size_tree && host_integerp (size_tree, 1)) + size = tree_low_cst (size_tree, 1); + } + + return size; +} /* Return the bit position of FIELD, in bits from the start of the record. This is a tree of type bitsizetype. */ tree -bit_position (field) - tree field; +bit_position (const_tree field) { - return bit_from_pos (DECL_FIELD_OFFSET (field), DECL_FIELD_BIT_OFFSET (field)); } -/* Likewise, but return as an integer. Abort if it cannot be represented - in that way (since it could be a signed value, we don't have the option - of returning -1 like int_size_in_byte can. */ +/* Likewise, but return as an integer. It must be representable in + that way (since it could be a signed value, we don't have the + option of returning -1 like int_size_in_byte can. */ HOST_WIDE_INT -int_bit_position (field) - tree field; +int_bit_position (const_tree field) { return tree_low_cst (bit_position (field), 0); } @@ -1410,20 +1989,18 @@ int_bit_position (field) This is a tree of type sizetype. */ tree -byte_position (field) - tree field; +byte_position (const_tree field) { return byte_from_pos (DECL_FIELD_OFFSET (field), DECL_FIELD_BIT_OFFSET (field)); } -/* Likewise, but return as an integer. Abort if it cannot be represented - in that way (since it could be a signed value, we don't have the option - of returning -1 like int_size_in_byte can. */ +/* Likewise, but return as an integer. It must be representable in + that way (since it could be a signed value, we don't have the + option of returning -1 like int_size_in_byte can. */ HOST_WIDE_INT -int_byte_position (field) - tree field; +int_byte_position (const_tree field) { return tree_low_cst (byte_position (field), 0); } @@ -1431,14 +2008,13 @@ int_byte_position (field) /* Return the strictest alignment, in bits, that T is known to have. */ unsigned int -expr_align (t) - tree t; +expr_align (const_tree t) { unsigned int align0, align1; switch (TREE_CODE (t)) { - case NOP_EXPR: case CONVERT_EXPR: case NON_LVALUE_EXPR: + CASE_CONVERT: case NON_LVALUE_EXPR: /* If we have conversions, we know that the alignment of the object must meet each of the alignments of the types. */ align0 = expr_align (TREE_OPERAND (t, 0)); @@ -1447,7 +2023,7 @@ expr_align (t) case SAVE_EXPR: case COMPOUND_EXPR: case MODIFY_EXPR: case INIT_EXPR: case TARGET_EXPR: case WITH_CLEANUP_EXPR: - case WITH_RECORD_EXPR: case CLEANUP_POINT_EXPR: case UNSAVE_EXPR: + case CLEANUP_POINT_EXPR: /* These don't change the alignment of an object. */ return expr_align (TREE_OPERAND (t, 0)); @@ -1458,14 +2034,13 @@ expr_align (t) align1 = expr_align (TREE_OPERAND (t, 2)); return MIN (align0, align1); + /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set + meaningfully, it's always 1. */ case LABEL_DECL: case CONST_DECL: case VAR_DECL: case PARM_DECL: case RESULT_DECL: - if (DECL_ALIGN (t) != 0) - return DECL_ALIGN (t); - break; - case FUNCTION_DECL: - return FUNCTION_BOUNDARY; + gcc_assert (DECL_ALIGN (t) != 0); + return DECL_ALIGN (t); default: break; @@ -1479,8 +2054,7 @@ expr_align (t) ARRAY_TYPE) minus one. This counts only elements of the top array. */ tree -array_type_nelts (type) - tree type; +array_type_nelts (const_tree type) { tree index_type, min, max; @@ -1495,67 +2069,217 @@ array_type_nelts (type) return (integer_zerop (min) ? max - : fold (build (MINUS_EXPR, TREE_TYPE (max), max, min))); + : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min)); } -/* Return nonzero if arg is static -- a reference to an object in - static storage. This is not the same as the C meaning of `static'. */ +/* If arg is static -- a reference to an object in static storage -- then + return the object. This is not the same as the C meaning of `static'. + If arg isn't static, return NULL. */ -int -staticp (arg) - tree arg; +tree +staticp (tree arg) { switch (TREE_CODE (arg)) { case FUNCTION_DECL: - /* Nested functions aren't static, since taking their address - involves a trampoline. */ - return (decl_function_context (arg) == 0 || DECL_NO_STATIC_CHAIN (arg)) - && ! DECL_NON_ADDR_CONST_P (arg); + /* Nested functions are static, even though taking their address will + involve a trampoline as we unnest the nested function and create + the trampoline on the tree level. */ + return arg; case VAR_DECL: - return (TREE_STATIC (arg) || DECL_EXTERNAL (arg)) - && ! DECL_NON_ADDR_CONST_P (arg); + return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg)) + && ! DECL_THREAD_LOCAL_P (arg) + && ! DECL_DLLIMPORT_P (arg) + ? arg : NULL); + + case CONST_DECL: + return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg)) + ? arg : NULL); case CONSTRUCTOR: - return TREE_STATIC (arg); + return TREE_STATIC (arg) ? arg : NULL; case LABEL_DECL: case STRING_CST: - return 1; + return arg; + + case COMPONENT_REF: + /* If the thing being referenced is not a field, then it is + something language specific. */ + if (TREE_CODE (TREE_OPERAND (arg, 1)) != FIELD_DECL) + return (*lang_hooks.staticp) (arg); /* If we are referencing a bitfield, we can't evaluate an ADDR_EXPR at compile time and so it isn't a constant. */ - case COMPONENT_REF: - return (! DECL_BIT_FIELD (TREE_OPERAND (arg, 1)) - && staticp (TREE_OPERAND (arg, 0))); + if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1))) + return NULL; + + return staticp (TREE_OPERAND (arg, 0)); case BIT_FIELD_REF: - return 0; + return NULL; -#if 0 - /* This case is technically correct, but results in setting - TREE_CONSTANT on ADDR_EXPRs that cannot be evaluated at - compile time. */ + case MISALIGNED_INDIRECT_REF: + case ALIGN_INDIRECT_REF: case INDIRECT_REF: - return TREE_CONSTANT (TREE_OPERAND (arg, 0)); -#endif + return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL; case ARRAY_REF: case ARRAY_RANGE_REF: if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST) return staticp (TREE_OPERAND (arg, 0)); + else + return false; default: if ((unsigned int) TREE_CODE (arg) >= (unsigned int) LAST_AND_UNUSED_TREE_CODE) - return (*lang_hooks.staticp) (arg); + return lang_hooks.staticp (arg); else - return 0; + return NULL; } } + + + +/* Return whether OP is a DECL whose address is function-invariant. */ + +bool +decl_address_invariant_p (const_tree op) +{ + /* The conditions below are slightly less strict than the one in + staticp. */ + + switch (TREE_CODE (op)) + { + case PARM_DECL: + case RESULT_DECL: + case LABEL_DECL: + case FUNCTION_DECL: + return true; + + case VAR_DECL: + if (((TREE_STATIC (op) || DECL_EXTERNAL (op)) + && !DECL_DLLIMPORT_P (op)) + || DECL_THREAD_LOCAL_P (op) + || DECL_CONTEXT (op) == current_function_decl + || decl_function_context (op) == current_function_decl) + return true; + break; + + case CONST_DECL: + if ((TREE_STATIC (op) || DECL_EXTERNAL (op)) + || decl_function_context (op) == current_function_decl) + return true; + break; + + default: + break; + } + + return false; +} + +/* Return whether OP is a DECL whose address is interprocedural-invariant. */ + +bool +decl_address_ip_invariant_p (const_tree op) +{ + /* The conditions below are slightly less strict than the one in + staticp. */ + + switch (TREE_CODE (op)) + { + case LABEL_DECL: + case FUNCTION_DECL: + case STRING_CST: + return true; + + case VAR_DECL: + if (((TREE_STATIC (op) || DECL_EXTERNAL (op)) + && !DECL_DLLIMPORT_P (op)) + || DECL_THREAD_LOCAL_P (op)) + return true; + break; + + case CONST_DECL: + if ((TREE_STATIC (op) || DECL_EXTERNAL (op))) + return true; + break; + + default: + break; + } + + return false; +} + + +/* Return true if T is function-invariant (internal function, does + not handle arithmetic; that's handled in skip_simple_arithmetic and + tree_invariant_p). */ + +static bool tree_invariant_p (tree t); + +static bool +tree_invariant_p_1 (tree t) +{ + tree op; + + if (TREE_CONSTANT (t) + || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t))) + return true; + + switch (TREE_CODE (t)) + { + case SAVE_EXPR: + return true; + + case ADDR_EXPR: + op = TREE_OPERAND (t, 0); + while (handled_component_p (op)) + { + switch (TREE_CODE (op)) + { + case ARRAY_REF: + case ARRAY_RANGE_REF: + if (!tree_invariant_p (TREE_OPERAND (op, 1)) + || TREE_OPERAND (op, 2) != NULL_TREE + || TREE_OPERAND (op, 3) != NULL_TREE) + return false; + break; + + case COMPONENT_REF: + if (TREE_OPERAND (op, 2) != NULL_TREE) + return false; + break; + + default:; + } + op = TREE_OPERAND (op, 0); + } + + return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op); + + default: + break; + } + + return false; +} + +/* Return true if T is function-invariant. */ + +static bool +tree_invariant_p (tree t) +{ + tree inner = skip_simple_arithmetic (t); + return tree_invariant_p_1 (inner); +} + /* Wrap a SAVE_EXPR around EXPR, if appropriate. Do this to any expression which may be used in more than one place, but must be evaluated only once. @@ -1579,39 +2303,24 @@ staticp (arg) are used for. */ tree -save_expr (expr) - tree expr; +save_expr (tree expr) { tree t = fold (expr); tree inner; - /* We don't care about whether this can be used as an lvalue in this - context. */ - while (TREE_CODE (t) == NON_LVALUE_EXPR) - t = TREE_OPERAND (t, 0); - - /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and - a constant, it will be more efficient to not make another SAVE_EXPR since - it will allow better simplification and GCSE will be able to merge the - computations if they actualy occur. */ - for (inner = t; - (TREE_CODE_CLASS (TREE_CODE (inner)) == '1' - || (TREE_CODE_CLASS (TREE_CODE (inner)) == '2' - && TREE_CONSTANT (TREE_OPERAND (inner, 1)))); - inner = TREE_OPERAND (inner, 0)) - ; - /* If the tree evaluates to a constant, then we don't want to hide that fact (i.e. this allows further folding, and direct checks for constants). However, a read-only object that has side effects cannot be bypassed. Since it is no problem to reevaluate literals, we just return the literal node. */ - if (TREE_CONSTANT (inner) - || (TREE_READONLY (inner) && ! TREE_SIDE_EFFECTS (inner)) - || TREE_CODE (inner) == SAVE_EXPR || TREE_CODE (inner) == ERROR_MARK) + inner = skip_simple_arithmetic (t); + if (TREE_CODE (inner) == ERROR_MARK) + return inner; + + if (tree_invariant_p_1 (inner)) return t; - /* If T contains a PLACEHOLDER_EXPR, we must evaluate it each time, since + /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since it means that the size or offset of some field of an object depends on the value within another field. @@ -1620,330 +2329,184 @@ save_expr (expr) evaluated more than once. Front-ends must assure this case cannot happen by surrounding any such subexpressions in their own SAVE_EXPR and forcing evaluation at the proper time. */ - if (contains_placeholder_p (t)) + if (contains_placeholder_p (inner)) return t; - t = build (SAVE_EXPR, TREE_TYPE (expr), t, current_function_decl, NULL_TREE); + t = build1 (SAVE_EXPR, TREE_TYPE (expr), t); /* This expression might be placed ahead of a jump to ensure that the value was computed on both sides of the jump. So make sure it isn't eliminated as dead. */ TREE_SIDE_EFFECTS (t) = 1; - TREE_READONLY (t) = 1; return t; } -/* Arrange for an expression to be expanded multiple independent - times. This is useful for cleanup actions, as the backend can - expand them multiple times in different places. */ +/* Look inside EXPR and into any simple arithmetic operations. Return + the innermost non-arithmetic node. */ tree -unsave_expr (expr) - tree expr; -{ - tree t; - - /* If this is already protected, no sense in protecting it again. */ - if (TREE_CODE (expr) == UNSAVE_EXPR) - return expr; - - t = build1 (UNSAVE_EXPR, TREE_TYPE (expr), expr); - TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (expr); - return t; -} - -/* Returns the index of the first non-tree operand for CODE, or the number - of operands if all are trees. */ - -int -first_rtl_op (code) - enum tree_code code; +skip_simple_arithmetic (tree expr) { - switch (code) - { - case SAVE_EXPR: - return 2; - case GOTO_SUBROUTINE_EXPR: - case RTL_EXPR: - return 0; - case WITH_CLEANUP_EXPR: - return 2; - case METHOD_CALL_EXPR: - return 3; - default: - return TREE_CODE_LENGTH (code); - } -} + tree inner; -/* Perform any modifications to EXPR required when it is unsaved. Does - not recurse into EXPR's subtrees. */ + /* We don't care about whether this can be used as an lvalue in this + context. */ + while (TREE_CODE (expr) == NON_LVALUE_EXPR) + expr = TREE_OPERAND (expr, 0); -void -unsave_expr_1 (expr) - tree expr; -{ - switch (TREE_CODE (expr)) + /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and + a constant, it will be more efficient to not make another SAVE_EXPR since + it will allow better simplification and GCSE will be able to merge the + computations if they actually occur. */ + inner = expr; + while (1) { - case SAVE_EXPR: - if (! SAVE_EXPR_PERSISTENT_P (expr)) - SAVE_EXPR_RTL (expr) = 0; - break; - - case TARGET_EXPR: - /* Don't mess with a TARGET_EXPR that hasn't been expanded. - It's OK for this to happen if it was part of a subtree that - isn't immediately expanded, such as operand 2 of another - TARGET_EXPR. */ - if (TREE_OPERAND (expr, 1)) + if (UNARY_CLASS_P (inner)) + inner = TREE_OPERAND (inner, 0); + else if (BINARY_CLASS_P (inner)) + { + if (tree_invariant_p (TREE_OPERAND (inner, 1))) + inner = TREE_OPERAND (inner, 0); + else if (tree_invariant_p (TREE_OPERAND (inner, 0))) + inner = TREE_OPERAND (inner, 1); + else + break; + } + else break; - - TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3); - TREE_OPERAND (expr, 3) = NULL_TREE; - break; - - case RTL_EXPR: - /* I don't yet know how to emit a sequence multiple times. */ - if (RTL_EXPR_SEQUENCE (expr) != 0) - abort (); - break; - - default: - if (lang_unsave_expr_now != 0) - (*lang_unsave_expr_now) (expr); - break; } + + return inner; } -/* Helper function for unsave_expr_now. */ +/* Return which tree structure is used by T. */ -static void -unsave_expr_now_r (expr) - tree expr; +enum tree_node_structure_enum +tree_node_structure (const_tree t) { - enum tree_code code; - - /* There's nothing to do for NULL_TREE. */ - if (expr == 0) - return; - - unsave_expr_1 (expr); + const enum tree_code code = TREE_CODE (t); - code = TREE_CODE (expr); switch (TREE_CODE_CLASS (code)) - { - case 'c': /* a constant */ - case 't': /* a type node */ - case 'd': /* A decl node */ - case 'b': /* A block node */ - break; - - case 'x': /* miscellaneous: e.g., identifier, TREE_LIST or ERROR_MARK. */ - if (code == TREE_LIST) - { - unsave_expr_now_r (TREE_VALUE (expr)); - unsave_expr_now_r (TREE_CHAIN (expr)); - } - break; - - case 'e': /* an expression */ - case 'r': /* a reference */ - case 's': /* an expression with side effects */ - case '<': /* a comparison expression */ - case '2': /* a binary arithmetic expression */ - case '1': /* a unary arithmetic expression */ + { + case tcc_declaration: { - int i; - - for (i = first_rtl_op (code) - 1; i >= 0; i--) - unsave_expr_now_r (TREE_OPERAND (expr, i)); + switch (code) + { + case FIELD_DECL: + return TS_FIELD_DECL; + case PARM_DECL: + return TS_PARM_DECL; + case VAR_DECL: + return TS_VAR_DECL; + case LABEL_DECL: + return TS_LABEL_DECL; + case RESULT_DECL: + return TS_RESULT_DECL; + case CONST_DECL: + return TS_CONST_DECL; + case TYPE_DECL: + return TS_TYPE_DECL; + case FUNCTION_DECL: + return TS_FUNCTION_DECL; + case SYMBOL_MEMORY_TAG: + case NAME_MEMORY_TAG: + case MEMORY_PARTITION_TAG: + return TS_MEMORY_TAG; + default: + return TS_DECL_NON_COMMON; + } } + case tcc_type: + return TS_TYPE; + case tcc_reference: + case tcc_comparison: + case tcc_unary: + case tcc_binary: + case tcc_expression: + case tcc_statement: + case tcc_vl_exp: + return TS_EXP; + default: /* tcc_constant and tcc_exceptional */ break; - - default: - abort (); } -} - -/* Modify a tree in place so that all the evaluate only once things - are cleared out. Return the EXPR given. */ - -tree -unsave_expr_now (expr) - tree expr; -{ - if (lang_unsave!= 0) - (*lang_unsave) (&expr); - else - unsave_expr_now_r (expr); - - return expr; -} - -/* Return 0 if it is safe to evaluate EXPR multiple times, - return 1 if it is safe if EXPR is unsaved afterward, or - return 2 if it is completely unsafe. - - This assumes that CALL_EXPRs and TARGET_EXPRs are never replicated in - an expression tree, so that it safe to unsave them and the surrounding - context will be correct. - - SAVE_EXPRs basically *only* appear replicated in an expression tree, - occasionally across the whole of a function. It is therefore only - safe to unsave a SAVE_EXPR if you know that all occurrences appear - below the UNSAVE_EXPR. - - RTL_EXPRs consume their rtl during evaluation. It is therefore - never possible to unsave them. */ - -int -unsafe_for_reeval (expr) - tree expr; -{ - int unsafeness = 0; - enum tree_code code; - int i, tmp, tmp2; - tree exp; - int first_rtl; - - if (expr == NULL_TREE) - return 1; - - code = TREE_CODE (expr); - first_rtl = first_rtl_op (code); - switch (code) { - case SAVE_EXPR: - case RTL_EXPR: - return 2; - - case TREE_LIST: - for (exp = expr; exp != 0; exp = TREE_CHAIN (exp)) - { - tmp = unsafe_for_reeval (TREE_VALUE (exp)); - unsafeness = MAX (tmp, unsafeness); - } - - return unsafeness; - - case CALL_EXPR: - tmp2 = unsafe_for_reeval (TREE_OPERAND (expr, 0)); - tmp = unsafe_for_reeval (TREE_OPERAND (expr, 1)); - return MAX (MAX (tmp, 1), tmp2); - - case TARGET_EXPR: - unsafeness = 1; - break; - - default: - if (lang_unsafe_for_reeval != 0) - { - tmp = (*lang_unsafe_for_reeval) (expr); - if (tmp >= 0) - return tmp; - } - break; - } - - switch (TREE_CODE_CLASS (code)) - { - case 'c': /* a constant */ - case 't': /* a type node */ - case 'x': /* something random, like an identifier or an ERROR_MARK. */ - case 'd': /* A decl node */ - case 'b': /* A block node */ - return 0; - - case 'e': /* an expression */ - case 'r': /* a reference */ - case 's': /* an expression with side effects */ - case '<': /* a comparison expression */ - case '2': /* a binary arithmetic expression */ - case '1': /* a unary arithmetic expression */ - for (i = first_rtl - 1; i >= 0; i--) - { - tmp = unsafe_for_reeval (TREE_OPERAND (expr, i)); - unsafeness = MAX (tmp, unsafeness); - } - - return unsafeness; + /* tcc_constant cases. */ + case INTEGER_CST: return TS_INT_CST; + case REAL_CST: return TS_REAL_CST; + case FIXED_CST: return TS_FIXED_CST; + case COMPLEX_CST: return TS_COMPLEX; + case VECTOR_CST: return TS_VECTOR; + case STRING_CST: return TS_STRING; + /* tcc_exceptional cases. */ + case ERROR_MARK: return TS_COMMON; + case IDENTIFIER_NODE: return TS_IDENTIFIER; + case TREE_LIST: return TS_LIST; + case TREE_VEC: return TS_VEC; + case SSA_NAME: return TS_SSA_NAME; + case PLACEHOLDER_EXPR: return TS_COMMON; + case STATEMENT_LIST: return TS_STATEMENT_LIST; + case BLOCK: return TS_BLOCK; + case CONSTRUCTOR: return TS_CONSTRUCTOR; + case TREE_BINFO: return TS_BINFO; + case OMP_CLAUSE: return TS_OMP_CLAUSE; + case OPTIMIZATION_NODE: return TS_OPTIMIZATION; + case TARGET_OPTION_NODE: return TS_TARGET_OPTION; default: - return 2; + gcc_unreachable (); } } /* Return 1 if EXP contains a PLACEHOLDER_EXPR; i.e., if it represents a size or offset that depends on a field within a record. */ -int -contains_placeholder_p (exp) - tree exp; +bool +contains_placeholder_p (const_tree exp) { enum tree_code code; - int result; if (!exp) return 0; - /* If we have a WITH_RECORD_EXPR, it "cancels" any PLACEHOLDER_EXPR - in it since it is supplying a value for it. */ code = TREE_CODE (exp); - if (code == WITH_RECORD_EXPR) - return 0; - else if (code == PLACEHOLDER_EXPR) + if (code == PLACEHOLDER_EXPR) return 1; switch (TREE_CODE_CLASS (code)) { - case 'r': + case tcc_reference: /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit position computations since they will be converted into a WITH_RECORD_EXPR involving the reference, which will assume here will be valid. */ - return contains_placeholder_p (TREE_OPERAND (exp, 0)); + return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0)); - case 'x': + case tcc_exceptional: if (code == TREE_LIST) - return (contains_placeholder_p (TREE_VALUE (exp)) - || (TREE_CHAIN (exp) != 0 - && contains_placeholder_p (TREE_CHAIN (exp)))); + return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp)) + || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp))); break; - case '1': - case '2': case '<': - case 'e': + case tcc_unary: + case tcc_binary: + case tcc_comparison: + case tcc_expression: switch (code) { case COMPOUND_EXPR: /* Ignoring the first operand isn't quite right, but works best. */ - return contains_placeholder_p (TREE_OPERAND (exp, 1)); - - case RTL_EXPR: - case CONSTRUCTOR: - return 0; + return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)); case COND_EXPR: - return (contains_placeholder_p (TREE_OPERAND (exp, 0)) - || contains_placeholder_p (TREE_OPERAND (exp, 1)) - || contains_placeholder_p (TREE_OPERAND (exp, 2))); + return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0)) + || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)) + || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2))); case SAVE_EXPR: - /* If we already know this doesn't have a placeholder, don't - check again. */ - if (SAVE_EXPR_NOPLACEHOLDER (exp) || SAVE_EXPR_RTL (exp) != 0) - return 0; - - SAVE_EXPR_NOPLACEHOLDER (exp) = 1; - result = contains_placeholder_p (TREE_OPERAND (exp, 0)); - if (result) - SAVE_EXPR_NOPLACEHOLDER (exp) = 0; - - return result; - - case CALL_EXPR: - return (TREE_OPERAND (exp, 1) != 0 - && contains_placeholder_p (TREE_OPERAND (exp, 1))); + /* The save_expr function never wraps anything containing + a PLACEHOLDER_EXPR. */ + return 0; default: break; @@ -1952,10 +2515,26 @@ contains_placeholder_p (exp) switch (TREE_CODE_LENGTH (code)) { case 1: - return contains_placeholder_p (TREE_OPERAND (exp, 0)); + return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0)); case 2: - return (contains_placeholder_p (TREE_OPERAND (exp, 0)) - || contains_placeholder_p (TREE_OPERAND (exp, 1))); + return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0)) + || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))); + default: + return 0; + } + + case tcc_vl_exp: + switch (code) + { + case CALL_EXPR: + { + const_tree arg; + const_call_expr_arg_iterator iter; + FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp) + if (CONTAINS_PLACEHOLDER_P (arg)) + return 1; + return 0; + } default: return 0; } @@ -1966,63 +2545,92 @@ contains_placeholder_p (exp) return 0; } -/* Return 1 if EXP contains any expressions that produce cleanups for an - outer scope to deal with. Used by fold. */ +/* Return true if any part of the computation of TYPE involves a + PLACEHOLDER_EXPR. This includes size, bounds, qualifiers + (for QUAL_UNION_TYPE) and field positions. */ -int -has_cleanups (exp) - tree exp; +static bool +type_contains_placeholder_1 (const_tree type) { - int i, nops, cmp; - - if (! TREE_SIDE_EFFECTS (exp)) - return 0; + /* If the size contains a placeholder or the parent type (component type in + the case of arrays) type involves a placeholder, this type does. */ + if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type)) + || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type)) + || (TREE_TYPE (type) != 0 + && type_contains_placeholder_p (TREE_TYPE (type)))) + return true; - switch (TREE_CODE (exp)) + /* Now do type-specific checks. Note that the last part of the check above + greatly limits what we have to do below. */ + switch (TREE_CODE (type)) { - case TARGET_EXPR: - case GOTO_SUBROUTINE_EXPR: - case WITH_CLEANUP_EXPR: - return 1; + case VOID_TYPE: + case COMPLEX_TYPE: + case ENUMERAL_TYPE: + case BOOLEAN_TYPE: + case POINTER_TYPE: + case OFFSET_TYPE: + case REFERENCE_TYPE: + case METHOD_TYPE: + case FUNCTION_TYPE: + case VECTOR_TYPE: + return false; + + case INTEGER_TYPE: + case REAL_TYPE: + case FIXED_POINT_TYPE: + /* Here we just check the bounds. */ + return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type)) + || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type))); + + case ARRAY_TYPE: + /* We're already checked the component type (TREE_TYPE), so just check + the index type. */ + return type_contains_placeholder_p (TYPE_DOMAIN (type)); + + case RECORD_TYPE: + case UNION_TYPE: + case QUAL_UNION_TYPE: + { + tree field; - case CLEANUP_POINT_EXPR: - return 0; + for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field)) + if (TREE_CODE (field) == FIELD_DECL + && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field)) + || (TREE_CODE (type) == QUAL_UNION_TYPE + && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field))) + || type_contains_placeholder_p (TREE_TYPE (field)))) + return true; - case CALL_EXPR: - for (exp = TREE_OPERAND (exp, 1); exp; exp = TREE_CHAIN (exp)) - { - cmp = has_cleanups (TREE_VALUE (exp)); - if (cmp) - return cmp; - } - return 0; + return false; + } default: - break; + gcc_unreachable (); } +} - /* This general rule works for most tree codes. All exceptions should be - handled above. If this is a language-specific tree code, we can't - trust what might be in the operand, so say we don't know - the situation. */ - if ((int) TREE_CODE (exp) >= (int) LAST_AND_UNUSED_TREE_CODE) - return -1; +bool +type_contains_placeholder_p (tree type) +{ + bool result; - nops = first_rtl_op (TREE_CODE (exp)); - for (i = 0; i < nops; i++) - if (TREE_OPERAND (exp, i) != 0) - { - int type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i))); - if (type == 'e' || type == '<' || type == '1' || type == '2' - || type == 'r' || type == 's') - { - cmp = has_cleanups (TREE_OPERAND (exp, i)); - if (cmp) - return cmp; - } - } + /* If the contains_placeholder_bits field has been initialized, + then we know the answer. */ + if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0) + return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1; - return 0; + /* Indicate that we've seen this type node, and the answer is false. + This is what we want to return if we run into recursion via fields. */ + TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1; + + /* Compute the real value. */ + result = type_contains_placeholder_1 (type); + + /* Store the real value. */ + TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1; + + return result; } /* Given a tree EXP, a FIELD_DECL F, and a replacement value R, @@ -2032,165 +2640,294 @@ has_cleanups (exp) PLACEHOLDER_EXPR occurring only in its arglist. */ tree -substitute_in_expr (exp, f, r) - tree exp; - tree f; - tree r; +substitute_in_expr (tree exp, tree f, tree r) { enum tree_code code = TREE_CODE (exp); - tree op0, op1, op2; - tree new; - tree inner; + tree op0, op1, op2, op3; + tree new_tree, inner; - switch (TREE_CODE_CLASS (code)) + /* We handle TREE_LIST and COMPONENT_REF separately. */ + if (code == TREE_LIST) { - case 'c': - case 'd': - return exp; + op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r); + op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r); + if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp)) + return exp; - case 'x': - if (code == PLACEHOLDER_EXPR) + return tree_cons (TREE_PURPOSE (exp), op1, op0); + } + else if (code == COMPONENT_REF) + { + /* If this expression is getting a value from a PLACEHOLDER_EXPR + and it is the right field, replace it with R. */ + for (inner = TREE_OPERAND (exp, 0); + REFERENCE_CLASS_P (inner); + inner = TREE_OPERAND (inner, 0)) + ; + if (TREE_CODE (inner) == PLACEHOLDER_EXPR + && TREE_OPERAND (exp, 1) == f) + return r; + + /* If this expression hasn't been completed let, leave it alone. */ + if (TREE_CODE (inner) == PLACEHOLDER_EXPR && TREE_TYPE (inner) == 0) + return exp; + + op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r); + if (op0 == TREE_OPERAND (exp, 0)) + return exp; + + new_tree = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), + op0, TREE_OPERAND (exp, 1), NULL_TREE); + } + else + switch (TREE_CODE_CLASS (code)) + { + case tcc_constant: + case tcc_declaration: return exp; - else if (code == TREE_LIST) - { - op0 = (TREE_CHAIN (exp) == 0 - ? 0 : substitute_in_expr (TREE_CHAIN (exp), f, r)); - op1 = substitute_in_expr (TREE_VALUE (exp), f, r); - if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp)) + + case tcc_exceptional: + case tcc_unary: + case tcc_binary: + case tcc_comparison: + case tcc_expression: + case tcc_reference: + switch (TREE_CODE_LENGTH (code)) + { + case 0: return exp; - return tree_cons (TREE_PURPOSE (exp), op1, op0); - } + case 1: + op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r); + if (op0 == TREE_OPERAND (exp, 0)) + return exp; - abort (); + new_tree = fold_build1 (code, TREE_TYPE (exp), op0); + break; - case '1': - case '2': - case '<': - case 'e': - switch (TREE_CODE_LENGTH (code)) - { - case 1: - op0 = substitute_in_expr (TREE_OPERAND (exp, 0), f, r); - if (op0 == TREE_OPERAND (exp, 0)) - return exp; + case 2: + op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r); + op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r); - if (code == NON_LVALUE_EXPR) - return op0; + if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)) + return exp; - new = fold (build1 (code, TREE_TYPE (exp), op0)); - break; + new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1); + break; - case 2: - /* An RTL_EXPR cannot contain a PLACEHOLDER_EXPR; a CONSTRUCTOR - could, but we don't support it. */ - if (code == RTL_EXPR) - return exp; - else if (code == CONSTRUCTOR) - abort (); + case 3: + op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r); + op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r); + op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r); - op0 = substitute_in_expr (TREE_OPERAND (exp, 0), f, r); - op1 = substitute_in_expr (TREE_OPERAND (exp, 1), f, r); - if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)) - return exp; + if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1) + && op2 == TREE_OPERAND (exp, 2)) + return exp; - new = fold (build (code, TREE_TYPE (exp), op0, op1)); - break; + new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2); + break; - case 3: - /* It cannot be that anything inside a SAVE_EXPR contains a - PLACEHOLDER_EXPR. */ - if (code == SAVE_EXPR) - return exp; + case 4: + op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r); + op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r); + op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r); + op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r); - else if (code == CALL_EXPR) - { - op1 = substitute_in_expr (TREE_OPERAND (exp, 1), f, r); - if (op1 == TREE_OPERAND (exp, 1)) - return exp; + if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1) + && op2 == TREE_OPERAND (exp, 2) + && op3 == TREE_OPERAND (exp, 3)) + return exp; - return build (code, TREE_TYPE (exp), - TREE_OPERAND (exp, 0), op1, NULL_TREE); - } + new_tree = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3)); + break; - else if (code != COND_EXPR) - abort (); + default: + gcc_unreachable (); + } + break; - op0 = substitute_in_expr (TREE_OPERAND (exp, 0), f, r); - op1 = substitute_in_expr (TREE_OPERAND (exp, 1), f, r); - op2 = substitute_in_expr (TREE_OPERAND (exp, 2), f, r); - if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1) - && op2 == TREE_OPERAND (exp, 2)) - return exp; + case tcc_vl_exp: + { + tree copy = NULL_TREE; + int i; - new = fold (build (code, TREE_TYPE (exp), op0, op1, op2)); - break; + for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++) + { + tree op = TREE_OPERAND (exp, i); + tree new_op = SUBSTITUTE_IN_EXPR (op, f, r); + if (new_op != op) + { + if (!copy) + copy = copy_node (exp); + TREE_OPERAND (copy, i) = new_op; + } + } - default: - abort (); + if (copy) + new_tree = fold (copy); + else + return exp; } + break; - break; + default: + gcc_unreachable (); + } - case 'r': - switch (code) - { - case COMPONENT_REF: - /* If this expression is getting a value from a PLACEHOLDER_EXPR - and it is the right field, replace it with R. */ - for (inner = TREE_OPERAND (exp, 0); - TREE_CODE_CLASS (TREE_CODE (inner)) == 'r'; - inner = TREE_OPERAND (inner, 0)) - ; - if (TREE_CODE (inner) == PLACEHOLDER_EXPR - && TREE_OPERAND (exp, 1) == f) - return r; - - /* If this expression hasn't been completed let, leave it - alone. */ - if (TREE_CODE (inner) == PLACEHOLDER_EXPR - && TREE_TYPE (inner) == 0) - return exp; + TREE_READONLY (new_tree) = TREE_READONLY (exp); + return new_tree; +} - op0 = substitute_in_expr (TREE_OPERAND (exp, 0), f, r); - if (op0 == TREE_OPERAND (exp, 0)) - return exp; +/* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement + for it within OBJ, a tree that is an object or a chain of references. */ - new = fold (build (code, TREE_TYPE (exp), op0, - TREE_OPERAND (exp, 1))); - break; +tree +substitute_placeholder_in_expr (tree exp, tree obj) +{ + enum tree_code code = TREE_CODE (exp); + tree op0, op1, op2, op3; - case BIT_FIELD_REF: - op0 = substitute_in_expr (TREE_OPERAND (exp, 0), f, r); - op1 = substitute_in_expr (TREE_OPERAND (exp, 1), f, r); - op2 = substitute_in_expr (TREE_OPERAND (exp, 2), f, r); - if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1) - && op2 == TREE_OPERAND (exp, 2)) - return exp; + /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type + in the chain of OBJ. */ + if (code == PLACEHOLDER_EXPR) + { + tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp)); + tree elt; + + for (elt = obj; elt != 0; + elt = ((TREE_CODE (elt) == COMPOUND_EXPR + || TREE_CODE (elt) == COND_EXPR) + ? TREE_OPERAND (elt, 1) + : (REFERENCE_CLASS_P (elt) + || UNARY_CLASS_P (elt) + || BINARY_CLASS_P (elt) + || VL_EXP_CLASS_P (elt) + || EXPRESSION_CLASS_P (elt)) + ? TREE_OPERAND (elt, 0) : 0)) + if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type) + return elt; + + for (elt = obj; elt != 0; + elt = ((TREE_CODE (elt) == COMPOUND_EXPR + || TREE_CODE (elt) == COND_EXPR) + ? TREE_OPERAND (elt, 1) + : (REFERENCE_CLASS_P (elt) + || UNARY_CLASS_P (elt) + || BINARY_CLASS_P (elt) + || VL_EXP_CLASS_P (elt) + || EXPRESSION_CLASS_P (elt)) + ? TREE_OPERAND (elt, 0) : 0)) + if (POINTER_TYPE_P (TREE_TYPE (elt)) + && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt))) + == need_type)) + return fold_build1 (INDIRECT_REF, need_type, elt); + + /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it + survives until RTL generation, there will be an error. */ + return exp; + } - new = fold (build (code, TREE_TYPE (exp), op0, op1, op2)); - break; + /* TREE_LIST is special because we need to look at TREE_VALUE + and TREE_CHAIN, not TREE_OPERANDS. */ + else if (code == TREE_LIST) + { + op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj); + op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj); + if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp)) + return exp; + + return tree_cons (TREE_PURPOSE (exp), op1, op0); + } + else + switch (TREE_CODE_CLASS (code)) + { + case tcc_constant: + case tcc_declaration: + return exp; - case INDIRECT_REF: - case BUFFER_REF: - op0 = substitute_in_expr (TREE_OPERAND (exp, 0), f, r); - if (op0 == TREE_OPERAND (exp, 0)) + case tcc_exceptional: + case tcc_unary: + case tcc_binary: + case tcc_comparison: + case tcc_expression: + case tcc_reference: + case tcc_statement: + switch (TREE_CODE_LENGTH (code)) + { + case 0: return exp; - new = fold (build1 (code, TREE_TYPE (exp), op0)); - break; + case 1: + op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj); + if (op0 == TREE_OPERAND (exp, 0)) + return exp; + else + return fold_build1 (code, TREE_TYPE (exp), op0); + + case 2: + op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj); + op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj); + + if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)) + return exp; + else + return fold_build2 (code, TREE_TYPE (exp), op0, op1); + + case 3: + op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj); + op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj); + op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj); + + if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1) + && op2 == TREE_OPERAND (exp, 2)) + return exp; + else + return fold_build3 (code, TREE_TYPE (exp), op0, op1, op2); + + case 4: + op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj); + op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj); + op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj); + op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj); + + if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1) + && op2 == TREE_OPERAND (exp, 2) + && op3 == TREE_OPERAND (exp, 3)) + return exp; + else + return fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3)); + + default: + gcc_unreachable (); + } + break; - default: - abort (); - } - break; + case tcc_vl_exp: + { + tree copy = NULL_TREE; + int i; - default: - abort (); - } + for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++) + { + tree op = TREE_OPERAND (exp, i); + tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj); + if (new_op != op) + { + if (!copy) + copy = copy_node (exp); + TREE_OPERAND (copy, i) = new_op; + } + } + + if (copy) + return fold (copy); + else + return exp; + } - TREE_READONLY (new) = TREE_READONLY (exp); - return new; + default: + gcc_unreachable (); + } } /* Stabilize a reference so that we can use it any number of times @@ -2202,8 +2939,7 @@ substitute_in_expr (exp, f, r) Any other kind of expression is returned unchanged. */ tree -stabilize_reference (ref) - tree ref; +stabilize_reference (tree ref) { tree result; enum tree_code code = TREE_CODE (ref); @@ -2216,13 +2952,9 @@ stabilize_reference (ref) /* No action is needed in this case. */ return ref; - case NOP_EXPR: - case CONVERT_EXPR: + CASE_CONVERT: case FLOAT_EXPR: case FIX_TRUNC_EXPR: - case FIX_FLOOR_EXPR: - case FIX_ROUND_EXPR: - case FIX_CEIL_EXPR: result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0))); break; @@ -2234,7 +2966,7 @@ stabilize_reference (ref) case COMPONENT_REF: result = build_nt (COMPONENT_REF, stabilize_reference (TREE_OPERAND (ref, 0)), - TREE_OPERAND (ref, 1)); + TREE_OPERAND (ref, 1), NULL_TREE); break; case BIT_FIELD_REF: @@ -2247,13 +2979,15 @@ stabilize_reference (ref) case ARRAY_REF: result = build_nt (ARRAY_REF, stabilize_reference (TREE_OPERAND (ref, 0)), - stabilize_reference_1 (TREE_OPERAND (ref, 1))); + stabilize_reference_1 (TREE_OPERAND (ref, 1)), + TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3)); break; case ARRAY_RANGE_REF: result = build_nt (ARRAY_RANGE_REF, stabilize_reference (TREE_OPERAND (ref, 0)), - stabilize_reference_1 (TREE_OPERAND (ref, 1))); + stabilize_reference_1 (TREE_OPERAND (ref, 1)), + TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3)); break; case COMPOUND_EXPR: @@ -2262,13 +2996,6 @@ stabilize_reference (ref) volatiles. */ return stabilize_reference_1 (ref); - case RTL_EXPR: - result = build1 (INDIRECT_REF, TREE_TYPE (ref), - save_expr (build1 (ADDR_EXPR, - build_pointer_type (TREE_TYPE (ref)), - ref))); - break; - /* If arg isn't a kind of lvalue we recognize, make no change. Caller should recognize the error for an invalid lvalue. */ default: @@ -2300,8 +3027,7 @@ stabilize_reference (ref) multiple utterances of the same expression should that prove fruitful. */ tree -stabilize_reference_1 (e) - tree e; +stabilize_reference_1 (tree e) { tree result; enum tree_code code = TREE_CODE (e); @@ -2311,19 +3037,19 @@ stabilize_reference_1 (e) ignore things that are actual constant or that already have been handled by this function. */ - if (TREE_CONSTANT (e) || code == SAVE_EXPR) + if (tree_invariant_p (e)) return e; switch (TREE_CODE_CLASS (code)) { - case 'x': - case 't': - case 'd': - case 'b': - case '<': - case 's': - case 'e': - case 'r': + case tcc_exceptional: + case tcc_type: + case tcc_declaration: + case tcc_comparison: + case tcc_statement: + case tcc_expression: + case tcc_reference: + case tcc_vl_exp: /* If the expression has side-effects, then encase it in a SAVE_EXPR so that it will only be evaluated once. */ /* The reference (r) and comparison (<) classes could be handled as @@ -2332,12 +3058,12 @@ stabilize_reference_1 (e) return save_expr (e); return e; - case 'c': + case tcc_constant: /* Constants need no processing. In fact, we should never reach here. */ return e; - case '2': + case tcc_binary: /* Division is slow and tends to be compiled with jumps, especially the division by powers of 2 that is often found inside of an array reference. So do it just once. */ @@ -2351,13 +3077,13 @@ stabilize_reference_1 (e) stabilize_reference_1 (TREE_OPERAND (e, 1))); break; - case '1': + case tcc_unary: /* Recursively stabilize each operand. */ result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0))); break; default: - abort (); + gcc_unreachable (); } TREE_TYPE (result) = TREE_TYPE (e); @@ -2370,413 +3096,699 @@ stabilize_reference_1 (e) /* Low-level constructors for expressions. */ -/* Build an expression of code CODE, data type TYPE, - and operands as specified by the arguments ARG1 and following arguments. - Expressions and reference nodes can be created this way. - Constants, decls, types and misc nodes cannot be. */ +/* A helper function for build1 and constant folders. Set TREE_CONSTANT, + and TREE_SIDE_EFFECTS for an ADDR_EXPR. */ -tree -build VPARAMS ((enum tree_code code, tree tt, ...)) +void +recompute_tree_invariant_for_addr_expr (tree t) { - tree t; - int length; - int i; - int fro; - int constant; - - VA_OPEN (p, tt); - VA_FIXEDARG (p, enum tree_code, code); - VA_FIXEDARG (p, tree, tt); + tree node; + bool tc = true, se = false; - t = make_node (code); - length = TREE_CODE_LENGTH (code); - TREE_TYPE (t) = tt; + /* We started out assuming this address is both invariant and constant, but + does not have side effects. Now go down any handled components and see if + any of them involve offsets that are either non-constant or non-invariant. + Also check for side-effects. - /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the - result based on those same flags for the arguments. But if the - arguments aren't really even `tree' expressions, we shouldn't be trying - to do this. */ - fro = first_rtl_op (code); + ??? Note that this code makes no attempt to deal with the case where + taking the address of something causes a copy due to misalignment. */ - /* Expressions without side effects may be constant if their - arguments are as well. */ - constant = (TREE_CODE_CLASS (code) == '<' - || TREE_CODE_CLASS (code) == '1' - || TREE_CODE_CLASS (code) == '2' - || TREE_CODE_CLASS (code) == 'c'); +#define UPDATE_FLAGS(NODE) \ +do { tree _node = (NODE); \ + if (_node && !TREE_CONSTANT (_node)) tc = false; \ + if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0) - if (length == 2) + for (node = TREE_OPERAND (t, 0); handled_component_p (node); + node = TREE_OPERAND (node, 0)) { - /* This is equivalent to the loop below, but faster. */ - tree arg0 = va_arg (p, tree); - tree arg1 = va_arg (p, tree); - - TREE_OPERAND (t, 0) = arg0; - TREE_OPERAND (t, 1) = arg1; - TREE_READONLY (t) = 1; - if (arg0 && fro > 0) + /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus + array reference (probably made temporarily by the G++ front end), + so ignore all the operands. */ + if ((TREE_CODE (node) == ARRAY_REF + || TREE_CODE (node) == ARRAY_RANGE_REF) + && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE) { - if (TREE_SIDE_EFFECTS (arg0)) - TREE_SIDE_EFFECTS (t) = 1; - if (!TREE_READONLY (arg0)) - TREE_READONLY (t) = 0; - if (!TREE_CONSTANT (arg0)) - constant = 0; + UPDATE_FLAGS (TREE_OPERAND (node, 1)); + if (TREE_OPERAND (node, 2)) + UPDATE_FLAGS (TREE_OPERAND (node, 2)); + if (TREE_OPERAND (node, 3)) + UPDATE_FLAGS (TREE_OPERAND (node, 3)); } - - if (arg1 && fro > 1) + /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a + FIELD_DECL, apparently. The G++ front end can put something else + there, at least temporarily. */ + else if (TREE_CODE (node) == COMPONENT_REF + && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL) { - if (TREE_SIDE_EFFECTS (arg1)) - TREE_SIDE_EFFECTS (t) = 1; - if (!TREE_READONLY (arg1)) - TREE_READONLY (t) = 0; - if (!TREE_CONSTANT (arg1)) - constant = 0; + if (TREE_OPERAND (node, 2)) + UPDATE_FLAGS (TREE_OPERAND (node, 2)); } + else if (TREE_CODE (node) == BIT_FIELD_REF) + UPDATE_FLAGS (TREE_OPERAND (node, 2)); } - else if (length == 1) - { - tree arg0 = va_arg (p, tree); - /* The only one-operand cases we handle here are those with side-effects. - Others are handled with build1. So don't bother checked if the - arg has side-effects since we'll already have set it. + node = lang_hooks.expr_to_decl (node, &tc, &se); - ??? This really should use build1 too. */ - if (TREE_CODE_CLASS (code) != 's') - abort (); - TREE_OPERAND (t, 0) = arg0; - } + /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from + the address, since &(*a)->b is a form of addition. If it's a constant, the + address is constant too. If it's a decl, its address is constant if the + decl is static. Everything else is not constant and, furthermore, + taking the address of a volatile variable is not volatile. */ + if (TREE_CODE (node) == INDIRECT_REF) + UPDATE_FLAGS (TREE_OPERAND (node, 0)); + else if (CONSTANT_CLASS_P (node)) + ; + else if (DECL_P (node)) + tc &= (staticp (node) != NULL_TREE); else { - for (i = 0; i < length; i++) - { - tree operand = va_arg (p, tree); - - TREE_OPERAND (t, i) = operand; - if (operand && fro > i) - { - if (TREE_SIDE_EFFECTS (operand)) - TREE_SIDE_EFFECTS (t) = 1; - if (!TREE_CONSTANT (operand)) - constant = 0; - } - } + tc = false; + se |= TREE_SIDE_EFFECTS (node); } - VA_CLOSE (p); - TREE_CONSTANT (t) = constant; - return t; + + TREE_CONSTANT (t) = tc; + TREE_SIDE_EFFECTS (t) = se; +#undef UPDATE_FLAGS } -/* Same as above, but only builds for unary operators. - Saves lions share of calls to `build'; cuts down use - of varargs, which is expensive for RISC machines. */ +/* Build an expression of code CODE, data type TYPE, and operands as + specified. Expressions and reference nodes can be created this way. + Constants, decls, types and misc nodes cannot be. + + We define 5 non-variadic functions, from 0 to 4 arguments. This is + enough for all extant tree codes. */ tree -build1 (code, type, node) - enum tree_code code; - tree type; - tree node; +build0_stat (enum tree_code code, tree tt MEM_STAT_DECL) { - int length; -#ifdef GATHER_STATISTICS - tree_node_kind kind; -#endif tree t; -#ifdef GATHER_STATISTICS - if (TREE_CODE_CLASS (code) == 'r') - kind = r_kind; - else - kind = e_kind; -#endif - -#ifdef ENABLE_CHECKING - if (TREE_CODE_CLASS (code) == '2' - || TREE_CODE_CLASS (code) == '<' - || TREE_CODE_LENGTH (code) != 1) - abort (); -#endif /* ENABLE_CHECKING */ + gcc_assert (TREE_CODE_LENGTH (code) == 0); - length = sizeof (struct tree_exp); + t = make_node_stat (code PASS_MEM_STAT); + TREE_TYPE (t) = tt; - t = ggc_alloc_tree (length); + return t; +} - memset ((PTR) t, 0, sizeof (struct tree_common)); +tree +build1_stat (enum tree_code code, tree type, tree node MEM_STAT_DECL) +{ + int length = sizeof (struct tree_exp); +#ifdef GATHER_STATISTICS + tree_node_kind kind; +#endif + tree t; #ifdef GATHER_STATISTICS + switch (TREE_CODE_CLASS (code)) + { + case tcc_statement: /* an expression with side effects */ + kind = s_kind; + break; + case tcc_reference: /* a reference */ + kind = r_kind; + break; + default: + kind = e_kind; + break; + } + tree_node_counts[(int) kind]++; tree_node_sizes[(int) kind] += length; #endif + gcc_assert (TREE_CODE_LENGTH (code) == 1); + + t = (tree) ggc_alloc_zone_pass_stat (length, &tree_zone); + + memset (t, 0, sizeof (struct tree_common)); + TREE_SET_CODE (t, code); TREE_TYPE (t) = type; - TREE_COMPLEXITY (t) = 0; + SET_EXPR_LOCATION (t, UNKNOWN_LOCATION); TREE_OPERAND (t, 0) = node; - if (node && first_rtl_op (code) != 0) + TREE_BLOCK (t) = NULL_TREE; + if (node && !TYPE_P (node)) { TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node); TREE_READONLY (t) = TREE_READONLY (node); } - switch (code) + if (TREE_CODE_CLASS (code) == tcc_statement) + TREE_SIDE_EFFECTS (t) = 1; + else switch (code) { - case INIT_EXPR: - case MODIFY_EXPR: case VA_ARG_EXPR: - case RTL_EXPR: - case PREDECREMENT_EXPR: - case PREINCREMENT_EXPR: - case POSTDECREMENT_EXPR: - case POSTINCREMENT_EXPR: /* All of these have side-effects, no matter what their operands are. */ TREE_SIDE_EFFECTS (t) = 1; TREE_READONLY (t) = 0; break; + case MISALIGNED_INDIRECT_REF: + case ALIGN_INDIRECT_REF: case INDIRECT_REF: /* Whether a dereference is readonly has nothing to do with whether its operand is readonly. */ TREE_READONLY (t) = 0; break; + case ADDR_EXPR: + if (node) + recompute_tree_invariant_for_addr_expr (t); + break; + default: - if (TREE_CODE_CLASS (code) == '1' && node && TREE_CONSTANT (node)) + if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR) + && node && !TYPE_P (node) + && TREE_CONSTANT (node)) TREE_CONSTANT (t) = 1; + if (TREE_CODE_CLASS (code) == tcc_reference + && node && TREE_THIS_VOLATILE (node)) + TREE_THIS_VOLATILE (t) = 1; break; } return t; } -/* Similar except don't specify the TREE_TYPE - and leave the TREE_SIDE_EFFECTS as 0. - It is permissible for arguments to be null, - or even garbage if their values do not matter. */ +#define PROCESS_ARG(N) \ + do { \ + TREE_OPERAND (t, N) = arg##N; \ + if (arg##N &&!TYPE_P (arg##N)) \ + { \ + if (TREE_SIDE_EFFECTS (arg##N)) \ + side_effects = 1; \ + if (!TREE_READONLY (arg##N)) \ + read_only = 0; \ + if (!TREE_CONSTANT (arg##N)) \ + constant = 0; \ + } \ + } while (0) tree -build_nt VPARAMS ((enum tree_code code, ...)) +build2_stat (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL) { + bool constant, read_only, side_effects; tree t; - int length; - int i; - VA_OPEN (p, code); - VA_FIXEDARG (p, enum tree_code, code); + gcc_assert (TREE_CODE_LENGTH (code) == 2); - t = make_node (code); - length = TREE_CODE_LENGTH (code); + if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR) + && arg0 && arg1 && tt && POINTER_TYPE_P (tt) + /* When sizetype precision doesn't match that of pointers + we need to be able to build explicit extensions or truncations + of the offset argument. */ + && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt)) + gcc_assert (TREE_CODE (arg0) == INTEGER_CST + && TREE_CODE (arg1) == INTEGER_CST); - for (i = 0; i < length; i++) - TREE_OPERAND (t, i) = va_arg (p, tree); + if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt) + gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0)) + && INTEGRAL_TYPE_P (TREE_TYPE (arg1)) + && useless_type_conversion_p (sizetype, TREE_TYPE (arg1))); + + t = make_node_stat (code PASS_MEM_STAT); + TREE_TYPE (t) = tt; + + /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the + result based on those same flags for the arguments. But if the + arguments aren't really even `tree' expressions, we shouldn't be trying + to do this. */ + + /* Expressions without side effects may be constant if their + arguments are as well. */ + constant = (TREE_CODE_CLASS (code) == tcc_comparison + || TREE_CODE_CLASS (code) == tcc_binary); + read_only = 1; + side_effects = TREE_SIDE_EFFECTS (t); + + PROCESS_ARG(0); + PROCESS_ARG(1); + + TREE_READONLY (t) = read_only; + TREE_CONSTANT (t) = constant; + TREE_SIDE_EFFECTS (t) = side_effects; + TREE_THIS_VOLATILE (t) + = (TREE_CODE_CLASS (code) == tcc_reference + && arg0 && TREE_THIS_VOLATILE (arg0)); - VA_CLOSE (p); return t; } - -/* Create a DECL_... node of code CODE, name NAME and data type TYPE. - We do NOT enter this node in any sort of symbol table. - layout_decl is used to set up the decl's storage layout. - Other slots are initialized to 0 or null pointers. */ tree -build_decl (code, name, type) - enum tree_code code; - tree name, type; +build3_stat (enum tree_code code, tree tt, tree arg0, tree arg1, + tree arg2 MEM_STAT_DECL) { + bool constant, read_only, side_effects; tree t; - t = make_node (code); + gcc_assert (TREE_CODE_LENGTH (code) == 3); + gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp); -/* if (type == error_mark_node) - type = integer_type_node; */ -/* That is not done, deliberately, so that having error_mark_node - as the type can suppress useless errors in the use of this variable. */ + t = make_node_stat (code PASS_MEM_STAT); + TREE_TYPE (t) = tt; - DECL_NAME (t) = name; - TREE_TYPE (t) = type; + /* As a special exception, if COND_EXPR has NULL branches, we + assume that it is a gimple statement and always consider + it to have side effects. */ + if (code == COND_EXPR + && tt == void_type_node + && arg1 == NULL_TREE + && arg2 == NULL_TREE) + side_effects = true; + else + side_effects = TREE_SIDE_EFFECTS (t); - if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL) - layout_decl (t, 0); - else if (code == FUNCTION_DECL) - DECL_MODE (t) = FUNCTION_MODE; + PROCESS_ARG(0); + PROCESS_ARG(1); + PROCESS_ARG(2); + + TREE_SIDE_EFFECTS (t) = side_effects; + TREE_THIS_VOLATILE (t) + = (TREE_CODE_CLASS (code) == tcc_reference + && arg0 && TREE_THIS_VOLATILE (arg0)); return t; } - -/* BLOCK nodes are used to represent the structure of binding contours - and declarations, once those contours have been exited and their contents - compiled. This information is used for outputting debugging info. */ tree -build_block (vars, tags, subblocks, supercontext, chain) - tree vars, tags ATTRIBUTE_UNUSED, subblocks, supercontext, chain; +build4_stat (enum tree_code code, tree tt, tree arg0, tree arg1, + tree arg2, tree arg3 MEM_STAT_DECL) { - tree block = make_node (BLOCK); + bool constant, read_only, side_effects; + tree t; - BLOCK_VARS (block) = vars; - BLOCK_SUBBLOCKS (block) = subblocks; - BLOCK_SUPERCONTEXT (block) = supercontext; - BLOCK_CHAIN (block) = chain; - return block; + gcc_assert (TREE_CODE_LENGTH (code) == 4); + + t = make_node_stat (code PASS_MEM_STAT); + TREE_TYPE (t) = tt; + + side_effects = TREE_SIDE_EFFECTS (t); + + PROCESS_ARG(0); + PROCESS_ARG(1); + PROCESS_ARG(2); + PROCESS_ARG(3); + + TREE_SIDE_EFFECTS (t) = side_effects; + TREE_THIS_VOLATILE (t) + = (TREE_CODE_CLASS (code) == tcc_reference + && arg0 && TREE_THIS_VOLATILE (arg0)); + + return t; } -/* EXPR_WITH_FILE_LOCATION are used to keep track of the exact - location where an expression or an identifier were encountered. It - is necessary for languages where the frontend parser will handle - recursively more than one file (Java is one of them). */ +tree +build5_stat (enum tree_code code, tree tt, tree arg0, tree arg1, + tree arg2, tree arg3, tree arg4 MEM_STAT_DECL) +{ + bool constant, read_only, side_effects; + tree t; + + gcc_assert (TREE_CODE_LENGTH (code) == 5); + + t = make_node_stat (code PASS_MEM_STAT); + TREE_TYPE (t) = tt; + + side_effects = TREE_SIDE_EFFECTS (t); + + PROCESS_ARG(0); + PROCESS_ARG(1); + PROCESS_ARG(2); + PROCESS_ARG(3); + PROCESS_ARG(4); + + TREE_SIDE_EFFECTS (t) = side_effects; + TREE_THIS_VOLATILE (t) + = (TREE_CODE_CLASS (code) == tcc_reference + && arg0 && TREE_THIS_VOLATILE (arg0)); + + return t; +} tree -build_expr_wfl (node, file, line, col) - tree node; - const char *file; - int line, col; +build7_stat (enum tree_code code, tree tt, tree arg0, tree arg1, + tree arg2, tree arg3, tree arg4, tree arg5, + tree arg6 MEM_STAT_DECL) { - static const char *last_file = 0; - static tree last_filenode = NULL_TREE; - tree wfl = make_node (EXPR_WITH_FILE_LOCATION); + bool constant, read_only, side_effects; + tree t; - EXPR_WFL_NODE (wfl) = node; - EXPR_WFL_SET_LINECOL (wfl, line, col); - if (file != last_file) - { - last_file = file; - last_filenode = file ? get_identifier (file) : NULL_TREE; - } + gcc_assert (code == TARGET_MEM_REF); + + t = make_node_stat (code PASS_MEM_STAT); + TREE_TYPE (t) = tt; + + side_effects = TREE_SIDE_EFFECTS (t); + + PROCESS_ARG(0); + PROCESS_ARG(1); + PROCESS_ARG(2); + PROCESS_ARG(3); + PROCESS_ARG(4); + PROCESS_ARG(5); + PROCESS_ARG(6); + + TREE_SIDE_EFFECTS (t) = side_effects; + TREE_THIS_VOLATILE (t) = 0; + + return t; +} + +/* Similar except don't specify the TREE_TYPE + and leave the TREE_SIDE_EFFECTS as 0. + It is permissible for arguments to be null, + or even garbage if their values do not matter. */ + +tree +build_nt (enum tree_code code, ...) +{ + tree t; + int length; + int i; + va_list p; + + gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp); + + va_start (p, code); + + t = make_node (code); + length = TREE_CODE_LENGTH (code); + + for (i = 0; i < length; i++) + TREE_OPERAND (t, i) = va_arg (p, tree); + + va_end (p); + return t; +} + +/* Similar to build_nt, but for creating a CALL_EXPR object with + ARGLIST passed as a list. */ + +tree +build_nt_call_list (tree fn, tree arglist) +{ + tree t; + int i; + + t = build_vl_exp (CALL_EXPR, list_length (arglist) + 3); + CALL_EXPR_FN (t) = fn; + CALL_EXPR_STATIC_CHAIN (t) = NULL_TREE; + for (i = 0; arglist; arglist = TREE_CHAIN (arglist), i++) + CALL_EXPR_ARG (t, i) = TREE_VALUE (arglist); + return t; +} + +/* Create a DECL_... node of code CODE, name NAME and data type TYPE. + We do NOT enter this node in any sort of symbol table. - EXPR_WFL_FILENAME_NODE (wfl) = last_filenode; - if (node) + layout_decl is used to set up the decl's storage layout. + Other slots are initialized to 0 or null pointers. */ + +tree +build_decl_stat (enum tree_code code, tree name, tree type MEM_STAT_DECL) +{ + tree t; + + t = make_node_stat (code PASS_MEM_STAT); + +/* if (type == error_mark_node) + type = integer_type_node; */ +/* That is not done, deliberately, so that having error_mark_node + as the type can suppress useless errors in the use of this variable. */ + + DECL_NAME (t) = name; + TREE_TYPE (t) = type; + + if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL) + layout_decl (t, 0); + + return t; +} + +/* Builds and returns function declaration with NAME and TYPE. */ + +tree +build_fn_decl (const char *name, tree type) +{ + tree id = get_identifier (name); + tree decl = build_decl (FUNCTION_DECL, id, type); + + DECL_EXTERNAL (decl) = 1; + TREE_PUBLIC (decl) = 1; + DECL_ARTIFICIAL (decl) = 1; + TREE_NOTHROW (decl) = 1; + + return decl; +} + + +/* BLOCK nodes are used to represent the structure of binding contours + and declarations, once those contours have been exited and their contents + compiled. This information is used for outputting debugging info. */ + +tree +build_block (tree vars, tree subblocks, tree supercontext, tree chain) +{ + tree block = make_node (BLOCK); + + BLOCK_VARS (block) = vars; + BLOCK_SUBBLOCKS (block) = subblocks; + BLOCK_SUPERCONTEXT (block) = supercontext; + BLOCK_CHAIN (block) = chain; + return block; +} + +expanded_location +expand_location (source_location loc) +{ + expanded_location xloc; + if (loc == 0) { - TREE_SIDE_EFFECTS (wfl) = TREE_SIDE_EFFECTS (node); - TREE_TYPE (wfl) = TREE_TYPE (node); + xloc.file = NULL; + xloc.line = 0; + xloc.column = 0; + xloc.sysp = 0; } + else + { + const struct line_map *map = linemap_lookup (line_table, loc); + xloc.file = map->to_file; + xloc.line = SOURCE_LINE (map, loc); + xloc.column = SOURCE_COLUMN (map, loc); + xloc.sysp = map->sysp != 0; + }; + return xloc; +} + + +/* Source location accessor functions. */ + + +void +set_expr_locus (tree node, source_location *loc) +{ + if (loc == NULL) + EXPR_CHECK (node)->exp.locus = UNKNOWN_LOCATION; + else + EXPR_CHECK (node)->exp.locus = *loc; +} + +/* Like SET_EXPR_LOCATION, but make sure the tree can have a location. - return wfl; + LOC is the location to use in tree T. */ + +void protected_set_expr_location (tree t, location_t loc) +{ + if (t && CAN_HAVE_LOCATION_P (t)) + SET_EXPR_LOCATION (t, loc); } /* Return a declaration like DDECL except that its DECL_ATTRIBUTES is ATTRIBUTE. */ tree -build_decl_attribute_variant (ddecl, attribute) - tree ddecl, attribute; +build_decl_attribute_variant (tree ddecl, tree attribute) { DECL_ATTRIBUTES (ddecl) = attribute; return ddecl; } +/* Borrowed from hashtab.c iterative_hash implementation. */ +#define mix(a,b,c) \ +{ \ + a -= b; a -= c; a ^= (c>>13); \ + b -= c; b -= a; b ^= (a<< 8); \ + c -= a; c -= b; c ^= ((b&0xffffffff)>>13); \ + a -= b; a -= c; a ^= ((c&0xffffffff)>>12); \ + b -= c; b -= a; b = (b ^ (a<<16)) & 0xffffffff; \ + c -= a; c -= b; c = (c ^ (b>> 5)) & 0xffffffff; \ + a -= b; a -= c; a = (a ^ (c>> 3)) & 0xffffffff; \ + b -= c; b -= a; b = (b ^ (a<<10)) & 0xffffffff; \ + c -= a; c -= b; c = (c ^ (b>>15)) & 0xffffffff; \ +} + + +/* Produce good hash value combining VAL and VAL2. */ +hashval_t +iterative_hash_hashval_t (hashval_t val, hashval_t val2) +{ + /* the golden ratio; an arbitrary value. */ + hashval_t a = 0x9e3779b9; + + mix (a, val, val2); + return val2; +} + +/* Produce good hash value combining PTR and VAL2. */ +static inline hashval_t +iterative_hash_pointer (const void *ptr, hashval_t val2) +{ + if (sizeof (ptr) == sizeof (hashval_t)) + return iterative_hash_hashval_t ((size_t) ptr, val2); + else + { + hashval_t a = (hashval_t) (size_t) ptr; + /* Avoid warnings about shifting of more than the width of the type on + hosts that won't execute this path. */ + int zero = 0; + hashval_t b = (hashval_t) ((size_t) ptr >> (sizeof (hashval_t) * 8 + zero)); + mix (a, b, val2); + return val2; + } +} + +/* Produce good hash value combining VAL and VAL2. */ +static inline hashval_t +iterative_hash_host_wide_int (HOST_WIDE_INT val, hashval_t val2) +{ + if (sizeof (HOST_WIDE_INT) == sizeof (hashval_t)) + return iterative_hash_hashval_t (val, val2); + else + { + hashval_t a = (hashval_t) val; + /* Avoid warnings about shifting of more than the width of the type on + hosts that won't execute this path. */ + int zero = 0; + hashval_t b = (hashval_t) (val >> (sizeof (hashval_t) * 8 + zero)); + mix (a, b, val2); + if (sizeof (HOST_WIDE_INT) > 2 * sizeof (hashval_t)) + { + hashval_t a = (hashval_t) (val >> (sizeof (hashval_t) * 16 + zero)); + hashval_t b = (hashval_t) (val >> (sizeof (hashval_t) * 24 + zero)); + mix (a, b, val2); + } + return val2; + } +} + /* Return a type like TTYPE except that its TYPE_ATTRIBUTE - is ATTRIBUTE. + is ATTRIBUTE and its qualifiers are QUALS. Record such modified types already made so we don't make duplicates. */ -tree -build_type_attribute_variant (ttype, attribute) - tree ttype, attribute; +static tree +build_type_attribute_qual_variant (tree ttype, tree attribute, int quals) { - if ( ! attribute_list_equal (TYPE_ATTRIBUTES (ttype), attribute)) + if (! attribute_list_equal (TYPE_ATTRIBUTES (ttype), attribute)) { - unsigned int hashcode; + hashval_t hashcode = 0; tree ntype; + enum tree_code code = TREE_CODE (ttype); + + /* Building a distinct copy of a tagged type is inappropriate; it + causes breakage in code that expects there to be a one-to-one + relationship between a struct and its fields. + build_duplicate_type is another solution (as used in + handle_transparent_union_attribute), but that doesn't play well + with the stronger C++ type identity model. */ + if (TREE_CODE (ttype) == RECORD_TYPE + || TREE_CODE (ttype) == UNION_TYPE + || TREE_CODE (ttype) == QUAL_UNION_TYPE + || TREE_CODE (ttype) == ENUMERAL_TYPE) + { + warning (OPT_Wattributes, + "ignoring attributes applied to %qT after definition", + TYPE_MAIN_VARIANT (ttype)); + return build_qualified_type (ttype, quals); + } - ntype = copy_node (ttype); + ttype = build_qualified_type (ttype, TYPE_UNQUALIFIED); + ntype = build_distinct_type_copy (ttype); - TYPE_POINTER_TO (ntype) = 0; - TYPE_REFERENCE_TO (ntype) = 0; TYPE_ATTRIBUTES (ntype) = attribute; - /* Create a new main variant of TYPE. */ - TYPE_MAIN_VARIANT (ntype) = ntype; - TYPE_NEXT_VARIANT (ntype) = 0; - set_type_quals (ntype, TYPE_UNQUALIFIED); - - hashcode = (TYPE_HASH (TREE_CODE (ntype)) - + TYPE_HASH (TREE_TYPE (ntype)) - + attribute_hash_list (attribute)); + hashcode = iterative_hash_object (code, hashcode); + if (TREE_TYPE (ntype)) + hashcode = iterative_hash_object (TYPE_HASH (TREE_TYPE (ntype)), + hashcode); + hashcode = attribute_hash_list (attribute, hashcode); switch (TREE_CODE (ntype)) { case FUNCTION_TYPE: - hashcode += TYPE_HASH (TYPE_ARG_TYPES (ntype)); + hashcode = type_hash_list (TYPE_ARG_TYPES (ntype), hashcode); break; case ARRAY_TYPE: - hashcode += TYPE_HASH (TYPE_DOMAIN (ntype)); + if (TYPE_DOMAIN (ntype)) + hashcode = iterative_hash_object (TYPE_HASH (TYPE_DOMAIN (ntype)), + hashcode); break; case INTEGER_TYPE: - hashcode += TYPE_HASH (TYPE_MAX_VALUE (ntype)); + hashcode = iterative_hash_object + (TREE_INT_CST_LOW (TYPE_MAX_VALUE (ntype)), hashcode); + hashcode = iterative_hash_object + (TREE_INT_CST_HIGH (TYPE_MAX_VALUE (ntype)), hashcode); break; case REAL_TYPE: - hashcode += TYPE_HASH (TYPE_PRECISION (ntype)); + case FIXED_POINT_TYPE: + { + unsigned int precision = TYPE_PRECISION (ntype); + hashcode = iterative_hash_object (precision, hashcode); + } break; default: break; } ntype = type_hash_canon (hashcode, ntype); - ttype = build_qualified_type (ntype, TYPE_QUALS (ttype)); - } - - return ttype; -} - -/* Default value of targetm.comp_type_attributes that always returns 1. */ -int -default_comp_type_attributes (type1, type2) - tree type1 ATTRIBUTE_UNUSED; - tree type2 ATTRIBUTE_UNUSED; -{ - return 1; -} + /* If the target-dependent attributes make NTYPE different from + its canonical type, we will need to use structural equality + checks for this type. */ + if (TYPE_STRUCTURAL_EQUALITY_P (ttype) + || !targetm.comp_type_attributes (ntype, ttype)) + SET_TYPE_STRUCTURAL_EQUALITY (ntype); + else if (TYPE_CANONICAL (ntype) == ntype) + TYPE_CANONICAL (ntype) = TYPE_CANONICAL (ttype); -/* Default version of targetm.set_default_type_attributes that always does - nothing. */ + ttype = build_qualified_type (ntype, quals); + } + else if (TYPE_QUALS (ttype) != quals) + ttype = build_qualified_type (ttype, quals); -void -default_set_default_type_attributes (type) - tree type ATTRIBUTE_UNUSED; -{ + return ttype; } -/* Default version of targetm.insert_attributes that always does nothing. */ -void -default_insert_attributes (decl, attr_ptr) - tree decl ATTRIBUTE_UNUSED; - tree *attr_ptr ATTRIBUTE_UNUSED; -{ -} -/* Default value of targetm.attribute_table that is empty. */ -const struct attribute_spec default_target_attribute_table[] = -{ - { NULL, 0, 0, false, false, false, NULL } -}; +/* Return a type like TTYPE except that its TYPE_ATTRIBUTE + is ATTRIBUTE. -/* Default value of targetm.function_attribute_inlinable_p that always - returns false. */ -bool -default_function_attribute_inlinable_p (fndecl) - tree fndecl ATTRIBUTE_UNUSED; -{ - /* By default, functions with machine attributes cannot be inlined. */ - return false; -} + Record such modified types already made so we don't make duplicates. */ -/* Default value of targetm.ms_bitfield_layout_p that always returns - false. */ -bool -default_ms_bitfield_layout_p (record) - tree record ATTRIBUTE_UNUSED; +tree +build_type_attribute_variant (tree ttype, tree attribute) { - /* By default, GCC does not use the MS VC++ bitfield layout rules. */ - return false; + return build_type_attribute_qual_variant (ttype, attribute, + TYPE_QUALS (ttype)); } -/* Return non-zero if IDENT is a valid name for attribute ATTR, +/* Return nonzero if IDENT is a valid name for attribute ATTR, or zero if not. We try both `text' and `__text__', ATTR may be either one. */ @@ -2784,31 +3796,28 @@ default_ms_bitfield_layout_p (record) `text'. One might then also require attribute lists to be stored in their canonicalized form. */ -int -is_attribute_p (attr, ident) - const char *attr; - tree ident; +static int +is_attribute_with_length_p (const char *attr, int attr_len, const_tree ident) { - int ident_len, attr_len; + int ident_len; const char *p; if (TREE_CODE (ident) != IDENTIFIER_NODE) return 0; - - if (strcmp (attr, IDENTIFIER_POINTER (ident)) == 0) - return 1; - + p = IDENTIFIER_POINTER (ident); - ident_len = strlen (p); - attr_len = strlen (attr); + ident_len = IDENTIFIER_LENGTH (ident); + + if (ident_len == attr_len + && strcmp (attr, p) == 0) + return 1; /* If ATTR is `__text__', IDENT must be `text'; and vice versa. */ if (attr[0] == '_') { - if (attr[1] != '_' - || attr[attr_len - 2] != '_' - || attr[attr_len - 1] != '_') - abort (); + gcc_assert (attr[1] == '_'); + gcc_assert (attr[attr_len - 2] == '_'); + gcc_assert (attr[attr_len - 1] == '_'); if (ident_len == attr_len - 4 && strncmp (attr + 2, p, attr_len - 4) == 0) return 1; @@ -2825,6 +3834,17 @@ is_attribute_p (attr, ident) return 0; } +/* Return nonzero if IDENT is a valid name for attribute ATTR, + or zero if not. + + We try both `text' and `__text__', ATTR may be either one. */ + +int +is_attribute_p (const char *attr, const_tree ident) +{ + return is_attribute_with_length_p (attr, strlen (attr), ident); +} + /* Given an attribute name and a list of attributes, return a pointer to the attribute's list element if the attribute is part of the list, or NULL_TREE if not found. If the attribute appears more than once, this only @@ -2832,28 +3852,46 @@ is_attribute_p (attr, ident) be passed back in if further occurrences are wanted. */ tree -lookup_attribute (attr_name, list) - const char *attr_name; - tree list; +lookup_attribute (const char *attr_name, tree list) { tree l; + size_t attr_len = strlen (attr_name); for (l = list; l; l = TREE_CHAIN (l)) { - if (TREE_CODE (TREE_PURPOSE (l)) != IDENTIFIER_NODE) - abort (); - if (is_attribute_p (attr_name, TREE_PURPOSE (l))) + gcc_assert (TREE_CODE (TREE_PURPOSE (l)) == IDENTIFIER_NODE); + if (is_attribute_with_length_p (attr_name, attr_len, TREE_PURPOSE (l))) return l; } - return NULL_TREE; } +/* Remove any instances of attribute ATTR_NAME in LIST and return the + modified list. */ + +tree +remove_attribute (const char *attr_name, tree list) +{ + tree *p; + size_t attr_len = strlen (attr_name); + + for (p = &list; *p; ) + { + tree l = *p; + gcc_assert (TREE_CODE (TREE_PURPOSE (l)) == IDENTIFIER_NODE); + if (is_attribute_with_length_p (attr_name, attr_len, TREE_PURPOSE (l))) + *p = TREE_CHAIN (l); + else + p = &TREE_CHAIN (l); + } + + return list; +} + /* Return an attribute list that is the union of a1 and a2. */ tree -merge_attributes (a1, a2) - tree a1, a2; +merge_attributes (tree a1, tree a2) { tree attributes; @@ -2884,7 +3922,17 @@ merge_attributes (a1, a2) a = lookup_attribute (IDENTIFIER_POINTER (TREE_PURPOSE (a2)), TREE_CHAIN (a))) { - if (simple_cst_equal (TREE_VALUE (a), TREE_VALUE (a2)) == 1) + if (TREE_VALUE (a) != NULL + && TREE_CODE (TREE_VALUE (a)) == TREE_LIST + && TREE_VALUE (a2) != NULL + && TREE_CODE (TREE_VALUE (a2)) == TREE_LIST) + { + if (simple_cst_list_equal (TREE_VALUE (a), + TREE_VALUE (a2)) == 1) + break; + } + else if (simple_cst_equal (TREE_VALUE (a), + TREE_VALUE (a2)) == 1) break; } if (a == NULL_TREE) @@ -2903,8 +3951,7 @@ merge_attributes (a1, a2) the result. */ tree -merge_type_attributes (t1, t2) - tree t1, t2; +merge_type_attributes (tree t1, tree t2) { return merge_attributes (TYPE_ATTRIBUTES (t1), TYPE_ATTRIBUTES (t2)); @@ -2914,14 +3961,13 @@ merge_type_attributes (t1, t2) the result. */ tree -merge_decl_attributes (olddecl, newdecl) - tree olddecl, newdecl; +merge_decl_attributes (tree olddecl, tree newdecl) { return merge_attributes (DECL_ATTRIBUTES (olddecl), DECL_ATTRIBUTES (newdecl)); } -#ifdef TARGET_DLLIMPORT_DECL_ATTRIBUTES +#if TARGET_DLLIMPORT_DECL_ATTRIBUTES /* Specialization of merge_decl_attributes for various Windows targets. @@ -2933,36 +3979,68 @@ merge_decl_attributes (olddecl, newdecl) The second instance of `foo' nullifies the dllimport. */ tree -merge_dllimport_decl_attributes (old, new) - tree old; - tree new; +merge_dllimport_decl_attributes (tree old, tree new_tree) { tree a; - int delete_dllimport_p; - - old = DECL_ATTRIBUTES (old); - new = DECL_ATTRIBUTES (new); + int delete_dllimport_p = 1; /* What we need to do here is remove from `old' dllimport if it doesn't appear in `new'. dllimport behaves like extern: if a declaration is marked dllimport and a definition appears later, then the object - is not dllimport'd. */ - if (lookup_attribute ("dllimport", old) != NULL_TREE - && lookup_attribute ("dllimport", new) == NULL_TREE) - delete_dllimport_p = 1; + is not dllimport'd. We also remove a `new' dllimport if the old list + contains dllexport: dllexport always overrides dllimport, regardless + of the order of declaration. */ + if (!VAR_OR_FUNCTION_DECL_P (new_tree)) + delete_dllimport_p = 0; + else if (DECL_DLLIMPORT_P (new_tree) + && lookup_attribute ("dllexport", DECL_ATTRIBUTES (old))) + { + DECL_DLLIMPORT_P (new_tree) = 0; + warning (OPT_Wattributes, "%q+D already declared with dllexport attribute: " + "dllimport ignored", new_tree); + } + else if (DECL_DLLIMPORT_P (old) && !DECL_DLLIMPORT_P (new_tree)) + { + /* Warn about overriding a symbol that has already been used, e.g.: + extern int __attribute__ ((dllimport)) foo; + int* bar () {return &foo;} + int foo; + */ + if (TREE_USED (old)) + { + warning (0, "%q+D redeclared without dllimport attribute " + "after being referenced with dll linkage", new_tree); + /* If we have used a variable's address with dllimport linkage, + keep the old DECL_DLLIMPORT_P flag: the ADDR_EXPR using the + decl may already have had TREE_CONSTANT computed. + We still remove the attribute so that assembler code refers + to '&foo rather than '_imp__foo'. */ + if (TREE_CODE (old) == VAR_DECL && TREE_ADDRESSABLE (old)) + DECL_DLLIMPORT_P (new_tree) = 1; + } + + /* Let an inline definition silently override the external reference, + but otherwise warn about attribute inconsistency. */ + else if (TREE_CODE (new_tree) == VAR_DECL + || !DECL_DECLARED_INLINE_P (new_tree)) + warning (OPT_Wattributes, "%q+D redeclared without dllimport attribute: " + "previous dllimport ignored", new_tree); + } else delete_dllimport_p = 0; - a = merge_attributes (old, new); + a = merge_attributes (DECL_ATTRIBUTES (old), DECL_ATTRIBUTES (new_tree)); - if (delete_dllimport_p) + if (delete_dllimport_p) { tree prev, t; - + const size_t attr_len = strlen ("dllimport"); + /* Scan the list for dllimport and delete it. */ for (prev = NULL_TREE, t = a; t; prev = t, t = TREE_CHAIN (t)) { - if (is_attribute_p ("dllimport", TREE_PURPOSE (t))) + if (is_attribute_with_length_p ("dllimport", attr_len, + TREE_PURPOSE (t))) { if (prev == NULL_TREE) a = TREE_CHAIN (a); @@ -2976,49 +4054,188 @@ merge_dllimport_decl_attributes (old, new) return a; } -#endif /* TARGET_DLLIMPORT_DECL_ATTRIBUTES */ - -/* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask - of the various TYPE_QUAL values. */ - -static void -set_type_quals (type, type_quals) - tree type; - int type_quals; -{ - TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0; - TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0; - TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0; -} - -/* Return a version of the TYPE, qualified as indicated by the - TYPE_QUALS, if one exists. If no qualified version exists yet, - return NULL_TREE. */ +/* Handle a "dllimport" or "dllexport" attribute; arguments as in + struct attribute_spec.handler. */ tree -get_qualified_type (type, type_quals) - tree type; - int type_quals; +handle_dll_attribute (tree * pnode, tree name, tree args, int flags, + bool *no_add_attrs) { - tree t; + tree node = *pnode; - /* Search the chain of variants to see if there is already one there just - like the one we need to have. If so, use that existing one. We must - preserve the TYPE_NAME, since there is code that depends on this. */ - for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t)) - if (TYPE_QUALS (t) == type_quals && TYPE_NAME (t) == TYPE_NAME (type)) - return t; + /* These attributes may apply to structure and union types being created, + but otherwise should pass to the declaration involved. */ + if (!DECL_P (node)) + { + if (flags & ((int) ATTR_FLAG_DECL_NEXT | (int) ATTR_FLAG_FUNCTION_NEXT + | (int) ATTR_FLAG_ARRAY_NEXT)) + { + *no_add_attrs = true; + return tree_cons (name, args, NULL_TREE); + } + if (TREE_CODE (node) == RECORD_TYPE + || TREE_CODE (node) == UNION_TYPE) + { + node = TYPE_NAME (node); + if (!node) + return NULL_TREE; + } + else + { + warning (OPT_Wattributes, "%qs attribute ignored", + IDENTIFIER_POINTER (name)); + *no_add_attrs = true; + return NULL_TREE; + } + } - return NULL_TREE; -} + if (TREE_CODE (node) != FUNCTION_DECL + && TREE_CODE (node) != VAR_DECL + && TREE_CODE (node) != TYPE_DECL) + { + *no_add_attrs = true; + warning (OPT_Wattributes, "%qs attribute ignored", + IDENTIFIER_POINTER (name)); + return NULL_TREE; + } -/* Like get_qualified_type, but creates the type if it does not - exist. This function never returns NULL_TREE. */ + if (TREE_CODE (node) == TYPE_DECL + && TREE_CODE (TREE_TYPE (node)) != RECORD_TYPE + && TREE_CODE (TREE_TYPE (node)) != UNION_TYPE) + { + *no_add_attrs = true; + warning (OPT_Wattributes, "%qs attribute ignored", + IDENTIFIER_POINTER (name)); + return NULL_TREE; + } -tree -build_qualified_type (type, type_quals) - tree type; - int type_quals; + /* Report error on dllimport ambiguities seen now before they cause + any damage. */ + else if (is_attribute_p ("dllimport", name)) + { + /* Honor any target-specific overrides. */ + if (!targetm.valid_dllimport_attribute_p (node)) + *no_add_attrs = true; + + else if (TREE_CODE (node) == FUNCTION_DECL + && DECL_DECLARED_INLINE_P (node)) + { + warning (OPT_Wattributes, "inline function %q+D declared as " + " dllimport: attribute ignored", node); + *no_add_attrs = true; + } + /* Like MS, treat definition of dllimported variables and + non-inlined functions on declaration as syntax errors. */ + else if (TREE_CODE (node) == FUNCTION_DECL && DECL_INITIAL (node)) + { + error ("function %q+D definition is marked dllimport", node); + *no_add_attrs = true; + } + + else if (TREE_CODE (node) == VAR_DECL) + { + if (DECL_INITIAL (node)) + { + error ("variable %q+D definition is marked dllimport", + node); + *no_add_attrs = true; + } + + /* `extern' needn't be specified with dllimport. + Specify `extern' now and hope for the best. Sigh. */ + DECL_EXTERNAL (node) = 1; + /* Also, implicitly give dllimport'd variables declared within + a function global scope, unless declared static. */ + if (current_function_decl != NULL_TREE && !TREE_STATIC (node)) + TREE_PUBLIC (node) = 1; + } + + if (*no_add_attrs == false) + DECL_DLLIMPORT_P (node) = 1; + } + + /* Report error if symbol is not accessible at global scope. */ + if (!TREE_PUBLIC (node) + && (TREE_CODE (node) == VAR_DECL + || TREE_CODE (node) == FUNCTION_DECL)) + { + error ("external linkage required for symbol %q+D because of " + "%qs attribute", node, IDENTIFIER_POINTER (name)); + *no_add_attrs = true; + } + + /* A dllexport'd entity must have default visibility so that other + program units (shared libraries or the main executable) can see + it. A dllimport'd entity must have default visibility so that + the linker knows that undefined references within this program + unit can be resolved by the dynamic linker. */ + if (!*no_add_attrs) + { + if (DECL_VISIBILITY_SPECIFIED (node) + && DECL_VISIBILITY (node) != VISIBILITY_DEFAULT) + error ("%qs implies default visibility, but %qD has already " + "been declared with a different visibility", + IDENTIFIER_POINTER (name), node); + DECL_VISIBILITY (node) = VISIBILITY_DEFAULT; + DECL_VISIBILITY_SPECIFIED (node) = 1; + } + + return NULL_TREE; +} + +#endif /* TARGET_DLLIMPORT_DECL_ATTRIBUTES */ + +/* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask + of the various TYPE_QUAL values. */ + +static void +set_type_quals (tree type, int type_quals) +{ + TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0; + TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0; + TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0; +} + +/* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */ + +bool +check_qualified_type (const_tree cand, const_tree base, int type_quals) +{ + return (TYPE_QUALS (cand) == type_quals + && TYPE_NAME (cand) == TYPE_NAME (base) + /* Apparently this is needed for Objective-C. */ + && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base) + && attribute_list_equal (TYPE_ATTRIBUTES (cand), + TYPE_ATTRIBUTES (base))); +} + +/* Return a version of the TYPE, qualified as indicated by the + TYPE_QUALS, if one exists. If no qualified version exists yet, + return NULL_TREE. */ + +tree +get_qualified_type (tree type, int type_quals) +{ + tree t; + + if (TYPE_QUALS (type) == type_quals) + return type; + + /* Search the chain of variants to see if there is already one there just + like the one we need to have. If so, use that existing one. We must + preserve the TYPE_NAME, since there is code that depends on this. */ + for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t)) + if (check_qualified_type (t, type, type_quals)) + return t; + + return NULL_TREE; +} + +/* Like get_qualified_type, but creates the type if it does not + exist. This function never returns NULL_TREE. */ + +tree +build_qualified_type (tree type, int type_quals) { tree t; @@ -3028,34 +4245,322 @@ build_qualified_type (type, type_quals) /* If not, build it. */ if (!t) { - t = build_type_copy (type); + t = build_variant_type_copy (type); set_type_quals (t, type_quals); + + if (TYPE_STRUCTURAL_EQUALITY_P (type)) + /* Propagate structural equality. */ + SET_TYPE_STRUCTURAL_EQUALITY (t); + else if (TYPE_CANONICAL (type) != type) + /* Build the underlying canonical type, since it is different + from TYPE. */ + TYPE_CANONICAL (t) = build_qualified_type (TYPE_CANONICAL (type), + type_quals); + else + /* T is its own canonical type. */ + TYPE_CANONICAL (t) = t; + } return t; } -/* Create a new variant of TYPE, equivalent but distinct. - This is so the caller can modify it. */ +/* Create a new distinct copy of TYPE. The new type is made its own + MAIN_VARIANT. If TYPE requires structural equality checks, the + resulting type requires structural equality checks; otherwise, its + TYPE_CANONICAL points to itself. */ tree -build_type_copy (type) - tree type; +build_distinct_type_copy (tree type) { - tree t, m = TYPE_MAIN_VARIANT (type); - - t = copy_node (type); - + tree t = copy_node (type); + TYPE_POINTER_TO (t) = 0; TYPE_REFERENCE_TO (t) = 0; - /* Add this type to the chain of variants of TYPE. */ + /* Set the canonical type either to a new equivalence class, or + propagate the need for structural equality checks. */ + if (TYPE_STRUCTURAL_EQUALITY_P (type)) + SET_TYPE_STRUCTURAL_EQUALITY (t); + else + TYPE_CANONICAL (t) = t; + + /* Make it its own variant. */ + TYPE_MAIN_VARIANT (t) = t; + TYPE_NEXT_VARIANT (t) = 0; + + /* Note that it is now possible for TYPE_MIN_VALUE to be a value + whose TREE_TYPE is not t. This can also happen in the Ada + frontend when using subtypes. */ + + return t; +} + +/* Create a new variant of TYPE, equivalent but distinct. This is so + the caller can modify it. TYPE_CANONICAL for the return type will + be equivalent to TYPE_CANONICAL of TYPE, indicating that the types + are considered equal by the language itself (or that both types + require structural equality checks). */ + +tree +build_variant_type_copy (tree type) +{ + tree t, m = TYPE_MAIN_VARIANT (type); + + t = build_distinct_type_copy (type); + + /* Since we're building a variant, assume that it is a non-semantic + variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */ + TYPE_CANONICAL (t) = TYPE_CANONICAL (type); + + /* Add the new type to the chain of variants of TYPE. */ TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m); TYPE_NEXT_VARIANT (m) = t; + TYPE_MAIN_VARIANT (t) = m; return t; } +/* Return true if the from tree in both tree maps are equal. */ + +int +tree_map_base_eq (const void *va, const void *vb) +{ + const struct tree_map_base *const a = (const struct tree_map_base *) va, + *const b = (const struct tree_map_base *) vb; + return (a->from == b->from); +} + +/* Hash a from tree in a tree_map. */ + +unsigned int +tree_map_base_hash (const void *item) +{ + return htab_hash_pointer (((const struct tree_map_base *)item)->from); +} + +/* Return true if this tree map structure is marked for garbage collection + purposes. We simply return true if the from tree is marked, so that this + structure goes away when the from tree goes away. */ + +int +tree_map_base_marked_p (const void *p) +{ + return ggc_marked_p (((const struct tree_map_base *) p)->from); +} + +unsigned int +tree_map_hash (const void *item) +{ + return (((const struct tree_map *) item)->hash); +} + +/* Return the initialization priority for DECL. */ + +priority_type +decl_init_priority_lookup (tree decl) +{ + struct tree_priority_map *h; + struct tree_map_base in; + + gcc_assert (VAR_OR_FUNCTION_DECL_P (decl)); + in.from = decl; + h = (struct tree_priority_map *) htab_find (init_priority_for_decl, &in); + return h ? h->init : DEFAULT_INIT_PRIORITY; +} + +/* Return the finalization priority for DECL. */ + +priority_type +decl_fini_priority_lookup (tree decl) +{ + struct tree_priority_map *h; + struct tree_map_base in; + + gcc_assert (TREE_CODE (decl) == FUNCTION_DECL); + in.from = decl; + h = (struct tree_priority_map *) htab_find (init_priority_for_decl, &in); + return h ? h->fini : DEFAULT_INIT_PRIORITY; +} + +/* Return the initialization and finalization priority information for + DECL. If there is no previous priority information, a freshly + allocated structure is returned. */ + +static struct tree_priority_map * +decl_priority_info (tree decl) +{ + struct tree_priority_map in; + struct tree_priority_map *h; + void **loc; + + in.base.from = decl; + loc = htab_find_slot (init_priority_for_decl, &in, INSERT); + h = (struct tree_priority_map *) *loc; + if (!h) + { + h = GGC_CNEW (struct tree_priority_map); + *loc = h; + h->base.from = decl; + h->init = DEFAULT_INIT_PRIORITY; + h->fini = DEFAULT_INIT_PRIORITY; + } + + return h; +} + +/* Set the initialization priority for DECL to PRIORITY. */ + +void +decl_init_priority_insert (tree decl, priority_type priority) +{ + struct tree_priority_map *h; + + gcc_assert (VAR_OR_FUNCTION_DECL_P (decl)); + h = decl_priority_info (decl); + h->init = priority; +} + +/* Set the finalization priority for DECL to PRIORITY. */ + +void +decl_fini_priority_insert (tree decl, priority_type priority) +{ + struct tree_priority_map *h; + + gcc_assert (TREE_CODE (decl) == FUNCTION_DECL); + h = decl_priority_info (decl); + h->fini = priority; +} + +/* Look up a restrict qualified base decl for FROM. */ + +tree +decl_restrict_base_lookup (tree from) +{ + struct tree_map *h; + struct tree_map in; + + in.base.from = from; + h = (struct tree_map *) htab_find_with_hash (restrict_base_for_decl, &in, + htab_hash_pointer (from)); + return h ? h->to : NULL_TREE; +} + +/* Record the restrict qualified base TO for FROM. */ + +void +decl_restrict_base_insert (tree from, tree to) +{ + struct tree_map *h; + void **loc; + + h = GGC_NEW (struct tree_map); + h->hash = htab_hash_pointer (from); + h->base.from = from; + h->to = to; + loc = htab_find_slot_with_hash (restrict_base_for_decl, h, h->hash, INSERT); + *(struct tree_map **) loc = h; +} + +/* Print out the statistics for the DECL_DEBUG_EXPR hash table. */ + +static void +print_debug_expr_statistics (void) +{ + fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n", + (long) htab_size (debug_expr_for_decl), + (long) htab_elements (debug_expr_for_decl), + htab_collisions (debug_expr_for_decl)); +} + +/* Print out the statistics for the DECL_VALUE_EXPR hash table. */ + +static void +print_value_expr_statistics (void) +{ + fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n", + (long) htab_size (value_expr_for_decl), + (long) htab_elements (value_expr_for_decl), + htab_collisions (value_expr_for_decl)); +} + +/* Print out statistics for the RESTRICT_BASE_FOR_DECL hash table, but + don't print anything if the table is empty. */ + +static void +print_restrict_base_statistics (void) +{ + if (htab_elements (restrict_base_for_decl) != 0) + fprintf (stderr, + "RESTRICT_BASE hash: size %ld, %ld elements, %f collisions\n", + (long) htab_size (restrict_base_for_decl), + (long) htab_elements (restrict_base_for_decl), + htab_collisions (restrict_base_for_decl)); +} + +/* Lookup a debug expression for FROM, and return it if we find one. */ + +tree +decl_debug_expr_lookup (tree from) +{ + struct tree_map *h, in; + in.base.from = from; + + h = (struct tree_map *) htab_find_with_hash (debug_expr_for_decl, &in, + htab_hash_pointer (from)); + if (h) + return h->to; + return NULL_TREE; +} + +/* Insert a mapping FROM->TO in the debug expression hashtable. */ + +void +decl_debug_expr_insert (tree from, tree to) +{ + struct tree_map *h; + void **loc; + + h = GGC_NEW (struct tree_map); + h->hash = htab_hash_pointer (from); + h->base.from = from; + h->to = to; + loc = htab_find_slot_with_hash (debug_expr_for_decl, h, h->hash, INSERT); + *(struct tree_map **) loc = h; +} + +/* Lookup a value expression for FROM, and return it if we find one. */ + +tree +decl_value_expr_lookup (tree from) +{ + struct tree_map *h, in; + in.base.from = from; + + h = (struct tree_map *) htab_find_with_hash (value_expr_for_decl, &in, + htab_hash_pointer (from)); + if (h) + return h->to; + return NULL_TREE; +} + +/* Insert a mapping FROM->TO in the value expression hashtable. */ + +void +decl_value_expr_insert (tree from, tree to) +{ + struct tree_map *h; + void **loc; + + h = GGC_NEW (struct tree_map); + h->hash = htab_hash_pointer (from); + h->base.from = from; + h->to = to; + loc = htab_find_slot_with_hash (value_expr_for_decl, h, h->hash, INSERT); + *(struct tree_map **) loc = h; +} + /* Hashing of types so that we don't make duplicates. The entry point is `type_hash_canon'. */ @@ -3063,58 +4568,129 @@ build_type_copy (type) with types in the TREE_VALUE slots), by adding the hash codes of the individual types. */ -unsigned int -type_hash_list (list) - tree list; +static unsigned int +type_hash_list (const_tree list, hashval_t hashcode) { - unsigned int hashcode; - tree tail; + const_tree tail; - for (hashcode = 0, tail = list; tail; tail = TREE_CHAIN (tail)) - hashcode += TYPE_HASH (TREE_VALUE (tail)); + for (tail = list; tail; tail = TREE_CHAIN (tail)) + if (TREE_VALUE (tail) != error_mark_node) + hashcode = iterative_hash_object (TYPE_HASH (TREE_VALUE (tail)), + hashcode); return hashcode; } /* These are the Hashtable callback functions. */ -/* Returns true if the types are equal. */ +/* Returns true iff the types are equivalent. */ static int -type_hash_eq (va, vb) - const void *va; - const void *vb; -{ - const struct type_hash *a = va, *b = vb; - if (a->hash == b->hash - && TREE_CODE (a->type) == TREE_CODE (b->type) - && TREE_TYPE (a->type) == TREE_TYPE (b->type) - && attribute_list_equal (TYPE_ATTRIBUTES (a->type), - TYPE_ATTRIBUTES (b->type)) - && TYPE_ALIGN (a->type) == TYPE_ALIGN (b->type) - && (TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type) - || tree_int_cst_equal (TYPE_MAX_VALUE (a->type), - TYPE_MAX_VALUE (b->type))) - && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type) - || tree_int_cst_equal (TYPE_MIN_VALUE (a->type), - TYPE_MIN_VALUE (b->type))) - /* Note that TYPE_DOMAIN is TYPE_ARG_TYPES for FUNCTION_TYPE. */ - && (TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type) - || (TYPE_DOMAIN (a->type) - && TREE_CODE (TYPE_DOMAIN (a->type)) == TREE_LIST - && TYPE_DOMAIN (b->type) - && TREE_CODE (TYPE_DOMAIN (b->type)) == TREE_LIST - && type_list_equal (TYPE_DOMAIN (a->type), - TYPE_DOMAIN (b->type))))) - return 1; - return 0; +type_hash_eq (const void *va, const void *vb) +{ + const struct type_hash *const a = (const struct type_hash *) va, + *const b = (const struct type_hash *) vb; + + /* First test the things that are the same for all types. */ + if (a->hash != b->hash + || TREE_CODE (a->type) != TREE_CODE (b->type) + || TREE_TYPE (a->type) != TREE_TYPE (b->type) + || !attribute_list_equal (TYPE_ATTRIBUTES (a->type), + TYPE_ATTRIBUTES (b->type)) + || TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type) + || TYPE_MODE (a->type) != TYPE_MODE (b->type) + || (TREE_CODE (a->type) != COMPLEX_TYPE + && TYPE_NAME (a->type) != TYPE_NAME (b->type))) + return 0; + + switch (TREE_CODE (a->type)) + { + case VOID_TYPE: + case COMPLEX_TYPE: + case POINTER_TYPE: + case REFERENCE_TYPE: + return 1; + + case VECTOR_TYPE: + return TYPE_VECTOR_SUBPARTS (a->type) == TYPE_VECTOR_SUBPARTS (b->type); + + case ENUMERAL_TYPE: + if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type) + && !(TYPE_VALUES (a->type) + && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST + && TYPE_VALUES (b->type) + && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST + && type_list_equal (TYPE_VALUES (a->type), + TYPE_VALUES (b->type)))) + return 0; + + /* ... fall through ... */ + + case INTEGER_TYPE: + case REAL_TYPE: + case BOOLEAN_TYPE: + return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type) + || tree_int_cst_equal (TYPE_MAX_VALUE (a->type), + TYPE_MAX_VALUE (b->type))) + && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type) + || tree_int_cst_equal (TYPE_MIN_VALUE (a->type), + TYPE_MIN_VALUE (b->type)))); + + case FIXED_POINT_TYPE: + return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type); + + case OFFSET_TYPE: + return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type); + + case METHOD_TYPE: + return (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type) + && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type) + || (TYPE_ARG_TYPES (a->type) + && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST + && TYPE_ARG_TYPES (b->type) + && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST + && type_list_equal (TYPE_ARG_TYPES (a->type), + TYPE_ARG_TYPES (b->type))))); + + case ARRAY_TYPE: + return TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type); + + case RECORD_TYPE: + case UNION_TYPE: + case QUAL_UNION_TYPE: + return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type) + || (TYPE_FIELDS (a->type) + && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST + && TYPE_FIELDS (b->type) + && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST + && type_list_equal (TYPE_FIELDS (a->type), + TYPE_FIELDS (b->type)))); + + case FUNCTION_TYPE: + if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type) + || (TYPE_ARG_TYPES (a->type) + && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST + && TYPE_ARG_TYPES (b->type) + && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST + && type_list_equal (TYPE_ARG_TYPES (a->type), + TYPE_ARG_TYPES (b->type)))) + break; + return 0; + + default: + return 0; + } + + if (lang_hooks.types.type_hash_eq != NULL) + return lang_hooks.types.type_hash_eq (a->type, b->type); + + return 1; } /* Return the cached hash value. */ -static unsigned int -type_hash_hash (item) - const void *item; +static hashval_t +type_hash_hash (const void *item) { return ((const struct type_hash *) item)->hash; } @@ -3123,9 +4699,7 @@ type_hash_hash (item) If one is found, return it. Otherwise return 0. */ tree -type_hash_lookup (hashcode, type) - unsigned int hashcode; - tree type; +type_hash_lookup (hashval_t hashcode, tree type) { struct type_hash *h, in; @@ -3136,7 +4710,8 @@ type_hash_lookup (hashcode, type) in.hash = hashcode; in.type = type; - h = htab_find_with_hash (type_hash_table, &in, hashcode); + h = (struct type_hash *) htab_find_with_hash (type_hash_table, &in, + hashcode); if (h) return h->type; return NULL_TREE; @@ -3146,42 +4721,37 @@ type_hash_lookup (hashcode, type) for a type TYPE whose hash code is HASHCODE. */ void -type_hash_add (hashcode, type) - unsigned int hashcode; - tree type; +type_hash_add (hashval_t hashcode, tree type) { struct type_hash *h; void **loc; - h = (struct type_hash *) ggc_alloc (sizeof (struct type_hash)); + h = GGC_NEW (struct type_hash); h->hash = hashcode; h->type = type; loc = htab_find_slot_with_hash (type_hash_table, h, hashcode, INSERT); - *(struct type_hash **) loc = h; + *loc = (void *)h; } /* Given TYPE, and HASHCODE its hash code, return the canonical object for an identical type if one already exists. - Otherwise, return TYPE, and record it as the canonical object - if it is a permanent object. + Otherwise, return TYPE, and record it as the canonical object. To use this function, first create a type of the sort you want. Then compute its hash code from the fields of the type that make it different from other similar types. - Then call this function and use the value. - This function frees the type you pass in if it is a duplicate. */ - -/* Set to 1 to debug without canonicalization. Never set by program. */ -int debug_no_type_hash = 0; + Then call this function and use the value. */ tree -type_hash_canon (hashcode, type) - unsigned int hashcode; - tree type; +type_hash_canon (unsigned int hashcode, tree type) { tree t1; - if (debug_no_type_hash) + /* The hash table only contains main variants, so ensure that's what we're + being passed. */ + gcc_assert (TYPE_MAIN_VARIANT (type) == type); + + if (!lang_hooks.types.hash_types) return type; /* See if the type is in the hash table already. If so, return it. @@ -3209,51 +4779,15 @@ type_hash_canon (hashcode, type) the number of garbage collections. */ static int -type_hash_marked_p (p) - const void *p; +type_hash_marked_p (const void *p) { - tree type = ((struct type_hash *) p)->type; + const_tree const type = ((const struct type_hash *) p)->type; return ggc_marked_p (type) || TYPE_SYMTAB_POINTER (type); } -/* Mark the entry in the type hash table the type it points to is marked. - Also mark the type in case we are considering this entry "marked" by - virtue of TYPE_SYMTAB_POINTER being set. */ - -static void -type_hash_mark (p) - const void *p; -{ - ggc_mark (p); - ggc_mark_tree (((struct type_hash *) p)->type); -} - -/* Mark the hashtable slot pointed to by ENTRY (which is really a - `tree**') for GC. */ - -static int -mark_tree_hashtable_entry (entry, data) - void **entry; - void *data ATTRIBUTE_UNUSED; -{ - ggc_mark_tree ((tree) *entry); - return 1; -} - -/* Mark ARG (which is really a htab_t whose slots are trees) for - GC. */ - -void -mark_tree_hashtable (arg) - void *arg; -{ - htab_t t = *(htab_t *) arg; - htab_traverse (t, mark_tree_hashtable_entry, 0); -} - static void -print_type_hash_statistics () +print_type_hash_statistics (void) { fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n", (long) htab_size (type_hash_table), @@ -3265,16 +4799,15 @@ print_type_hash_statistics () with names in the TREE_PURPOSE slots and args in the TREE_VALUE slots), by adding the hash codes of the individual attributes. */ -unsigned int -attribute_hash_list (list) - tree list; +static unsigned int +attribute_hash_list (const_tree list, hashval_t hashcode) { - unsigned int hashcode; - tree tail; + const_tree tail; - for (hashcode = 0, tail = list; tail; tail = TREE_CHAIN (tail)) + for (tail = list; tail; tail = TREE_CHAIN (tail)) /* ??? Do we want to add in TREE_VALUE too? */ - hashcode += TYPE_HASH (TREE_PURPOSE (tail)); + hashcode = iterative_hash_object + (IDENTIFIER_HASH_VALUE (TREE_PURPOSE (tail)), hashcode); return hashcode; } @@ -3282,11 +4815,10 @@ attribute_hash_list (list) equivalent to l1. */ int -attribute_list_equal (l1, l2) - tree l1, l2; +attribute_list_equal (const_tree l1, const_tree l2) { - return attribute_list_contained (l1, l2) - && attribute_list_contained (l2, l1); + return attribute_list_contained (l1, l2) + && attribute_list_contained (l2, l1); } /* Given two lists of attributes, return true if list L2 is @@ -3298,10 +4830,9 @@ attribute_list_equal (l1, l2) correctly. */ int -attribute_list_contained (l1, l2) - tree l1, l2; +attribute_list_contained (const_tree l1, const_tree l2) { - tree t1, t2; + const_tree t1, t2; /* First check the obvious, maybe the lists are identical. */ if (l1 == l2) @@ -3320,21 +4851,31 @@ attribute_list_contained (l1, l2) for (; t2 != 0; t2 = TREE_CHAIN (t2)) { - tree attr; - for (attr = lookup_attribute (IDENTIFIER_POINTER (TREE_PURPOSE (t2)), l1); + const_tree attr; + /* This CONST_CAST is okay because lookup_attribute does not + modify its argument and the return value is assigned to a + const_tree. */ + for (attr = lookup_attribute (IDENTIFIER_POINTER (TREE_PURPOSE (t2)), + CONST_CAST_TREE(l1)); attr != NULL_TREE; attr = lookup_attribute (IDENTIFIER_POINTER (TREE_PURPOSE (t2)), TREE_CHAIN (attr))) { - if (simple_cst_equal (TREE_VALUE (t2), TREE_VALUE (attr)) == 1) + if (TREE_VALUE (t2) != NULL + && TREE_CODE (TREE_VALUE (t2)) == TREE_LIST + && TREE_VALUE (attr) != NULL + && TREE_CODE (TREE_VALUE (attr)) == TREE_LIST) + { + if (simple_cst_list_equal (TREE_VALUE (t2), + TREE_VALUE (attr)) == 1) + break; + } + else if (simple_cst_equal (TREE_VALUE (t2), TREE_VALUE (attr)) == 1) break; } if (attr == 0) return 0; - - if (simple_cst_equal (TREE_VALUE (t2), TREE_VALUE (attr)) != 1) - return 0; } return 1; @@ -3346,10 +4887,9 @@ attribute_list_contained (l1, l2) Also, the TREE_PURPOSEs must match. */ int -type_list_equal (l1, l2) - tree l1, l2; +type_list_equal (const_tree l1, const_tree l2) { - tree t1, t2; + const_tree t1, t2; for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2)) if (TREE_VALUE (t1) != TREE_VALUE (t2) @@ -3367,8 +4907,7 @@ type_list_equal (l1, l2) then this function counts only the ordinary arguments. */ int -type_num_arguments (type) - tree type; +type_num_arguments (const_tree type) { int i = 0; tree t; @@ -3388,8 +4927,7 @@ type_num_arguments (type) represent the same constant value. */ int -tree_int_cst_equal (t1, t2) - tree t1, t2; +tree_int_cst_equal (const_tree t1, const_tree t2) { if (t1 == t2) return 1; @@ -3410,13 +4948,12 @@ tree_int_cst_equal (t1, t2) The precise way of comparison depends on their data type. */ int -tree_int_cst_lt (t1, t2) - tree t1, t2; +tree_int_cst_lt (const_tree t1, const_tree t2) { if (t1 == t2) return 0; - if (TREE_UNSIGNED (TREE_TYPE (t1)) != TREE_UNSIGNED (TREE_TYPE (t2))) + if (TYPE_UNSIGNED (TREE_TYPE (t1)) != TYPE_UNSIGNED (TREE_TYPE (t2))) { int t1_sgn = tree_int_cst_sgn (t1); int t2_sgn = tree_int_cst_sgn (t2); @@ -3429,7 +4966,7 @@ tree_int_cst_lt (t1, t2) unsigned just in case one of them would overflow a signed type. */ } - else if (! TREE_UNSIGNED (TREE_TYPE (t1))) + else if (!TYPE_UNSIGNED (TREE_TYPE (t1))) return INT_CST_LT (t1, t2); return INT_CST_LT_UNSIGNED (t1, t2); @@ -3438,58 +4975,50 @@ tree_int_cst_lt (t1, t2) /* Returns -1 if T1 < T2, 0 if T1 == T2, and 1 if T1 > T2. */ int -tree_int_cst_compare (t1, t2) - tree t1; - tree t2; +tree_int_cst_compare (const_tree t1, const_tree t2) { if (tree_int_cst_lt (t1, t2)) return -1; else if (tree_int_cst_lt (t2, t1)) return 1; - else + else return 0; } /* Return 1 if T is an INTEGER_CST that can be manipulated efficiently on the host. If POS is zero, the value can be represented in a single - HOST_WIDE_INT. If POS is nonzero, the value must be positive and can + HOST_WIDE_INT. If POS is nonzero, the value must be non-negative and can be represented in a single unsigned HOST_WIDE_INT. */ int -host_integerp (t, pos) - tree t; - int pos; +host_integerp (const_tree t, int pos) { return (TREE_CODE (t) == INTEGER_CST - && ! TREE_OVERFLOW (t) && ((TREE_INT_CST_HIGH (t) == 0 && (HOST_WIDE_INT) TREE_INT_CST_LOW (t) >= 0) || (! pos && TREE_INT_CST_HIGH (t) == -1 && (HOST_WIDE_INT) TREE_INT_CST_LOW (t) < 0 - && ! TREE_UNSIGNED (TREE_TYPE (t))) + && (!TYPE_UNSIGNED (TREE_TYPE (t)) + || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE + && TYPE_IS_SIZETYPE (TREE_TYPE (t))))) || (pos && TREE_INT_CST_HIGH (t) == 0))); } /* Return the HOST_WIDE_INT least significant bits of T if it is an INTEGER_CST and there is no overflow. POS is nonzero if the result must - be positive. Abort if we cannot satisfy the above conditions. */ + be non-negative. We must be able to satisfy the above conditions. */ HOST_WIDE_INT -tree_low_cst (t, pos) - tree t; - int pos; +tree_low_cst (const_tree t, int pos) { - if (host_integerp (t, pos)) - return TREE_INT_CST_LOW (t); - else - abort (); + gcc_assert (host_integerp (t, pos)); + return TREE_INT_CST_LOW (t); } /* Return the most significant bit of the integer constant T. */ int -tree_int_cst_msb (t) - tree t; +tree_int_cst_msb (const_tree t) { int prec; HOST_WIDE_INT h; @@ -3505,15 +5034,14 @@ tree_int_cst_msb (t) /* Return an indication of the sign of the integer constant T. The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0. - Note that -1 will never be returned it T's type is unsigned. */ + Note that -1 will never be returned if T's type is unsigned. */ int -tree_int_cst_sgn (t) - tree t; +tree_int_cst_sgn (const_tree t) { if (TREE_INT_CST_LOW (t) == 0 && TREE_INT_CST_HIGH (t) == 0) return 0; - else if (TREE_UNSIGNED (TREE_TYPE (t))) + else if (TYPE_UNSIGNED (TREE_TYPE (t))) return 1; else if (TREE_INT_CST_HIGH (t) < 0) return -1; @@ -3521,12 +5049,38 @@ tree_int_cst_sgn (t) return 1; } +/* Return the minimum number of bits needed to represent VALUE in a + signed or unsigned type, UNSIGNEDP says which. */ + +unsigned int +tree_int_cst_min_precision (tree value, bool unsignedp) +{ + int log; + + /* If the value is negative, compute its negative minus 1. The latter + adjustment is because the absolute value of the largest negative value + is one larger than the largest positive value. This is equivalent to + a bit-wise negation, so use that operation instead. */ + + if (tree_int_cst_sgn (value) < 0) + value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value); + + /* Return the number of bits needed, taking into account the fact + that we need one more bit for a signed than unsigned type. */ + + if (integer_zerop (value)) + log = 0; + else + log = tree_floor_log2 (value); + + return log + 1 + !unsignedp; +} + /* Compare two constructor-element-type constants. Return 1 if the lists are known to be equal; otherwise return 0. */ int -simple_cst_list_equal (l1, l2) - tree l1, l2; +simple_cst_list_equal (const_tree l1, const_tree l2) { while (l1 != NULL_TREE && l2 != NULL_TREE) { @@ -3547,8 +5101,7 @@ simple_cst_list_equal (l1, l2) this function. */ int -simple_cst_equal (t1, t2) - tree t1, t2; +simple_cst_equal (const_tree t1, const_tree t2) { enum tree_code code1, code2; int cmp; @@ -3562,16 +5115,16 @@ simple_cst_equal (t1, t2) code1 = TREE_CODE (t1); code2 = TREE_CODE (t2); - if (code1 == NOP_EXPR || code1 == CONVERT_EXPR || code1 == NON_LVALUE_EXPR) + if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR) { - if (code2 == NOP_EXPR || code2 == CONVERT_EXPR + if (CONVERT_EXPR_CODE_P (code2) || code2 == NON_LVALUE_EXPR) return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0)); else return simple_cst_equal (TREE_OPERAND (t1, 0), t2); } - else if (code2 == NOP_EXPR || code2 == CONVERT_EXPR + else if (CONVERT_EXPR_CODE_P (code2) || code2 == NON_LVALUE_EXPR) return simple_cst_equal (t1, TREE_OPERAND (t2, 0)); @@ -3587,26 +5140,55 @@ simple_cst_equal (t1, t2) case REAL_CST: return REAL_VALUES_IDENTICAL (TREE_REAL_CST (t1), TREE_REAL_CST (t2)); + case FIXED_CST: + return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2)); + case STRING_CST: return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2) && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2), TREE_STRING_LENGTH (t1))); case CONSTRUCTOR: - if (CONSTRUCTOR_ELTS (t1) == CONSTRUCTOR_ELTS (t2)) - return 1; - else - abort (); + { + unsigned HOST_WIDE_INT idx; + VEC(constructor_elt, gc) *v1 = CONSTRUCTOR_ELTS (t1); + VEC(constructor_elt, gc) *v2 = CONSTRUCTOR_ELTS (t2); + + if (VEC_length (constructor_elt, v1) != VEC_length (constructor_elt, v2)) + return false; + + for (idx = 0; idx < VEC_length (constructor_elt, v1); ++idx) + /* ??? Should we handle also fields here? */ + if (!simple_cst_equal (VEC_index (constructor_elt, v1, idx)->value, + VEC_index (constructor_elt, v2, idx)->value)) + return false; + return true; + } case SAVE_EXPR: return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0)); case CALL_EXPR: - cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0)); + cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2)); if (cmp <= 0) return cmp; - return - simple_cst_list_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1)); + if (call_expr_nargs (t1) != call_expr_nargs (t2)) + return 0; + { + const_tree arg1, arg2; + const_call_expr_arg_iterator iter1, iter2; + for (arg1 = first_const_call_expr_arg (t1, &iter1), + arg2 = first_const_call_expr_arg (t2, &iter2); + arg1 && arg2; + arg1 = next_const_call_expr_arg (&iter1), + arg2 = next_const_call_expr_arg (&iter2)) + { + cmp = simple_cst_equal (arg1, arg2); + if (cmp <= 0) + return cmp; + } + return arg1 == arg2; + } case TARGET_EXPR: /* Special case: if either target is an unallocated VAR_DECL, @@ -3660,12 +5242,12 @@ simple_cst_equal (t1, t2) switch (TREE_CODE_CLASS (code1)) { - case '1': - case '2': - case '<': - case 'e': - case 'r': - case 's': + case tcc_unary: + case tcc_binary: + case tcc_comparison: + case tcc_expression: + case tcc_reference: + case tcc_statement: cmp = 1; for (i = 0; i < TREE_CODE_LENGTH (code1); i++) { @@ -3686,9 +5268,7 @@ simple_cst_equal (t1, t2) than U, respectively. */ int -compare_tree_int (t, u) - tree t; - unsigned HOST_WIDE_INT u; +compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u) { if (tree_int_cst_sgn (t) < 0) return -1; @@ -3701,82 +5281,366 @@ compare_tree_int (t, u) else return 1; } + +/* Return true if CODE represents an associative tree code. Otherwise + return false. */ +bool +associative_tree_code (enum tree_code code) +{ + switch (code) + { + case BIT_IOR_EXPR: + case BIT_AND_EXPR: + case BIT_XOR_EXPR: + case PLUS_EXPR: + case MULT_EXPR: + case MIN_EXPR: + case MAX_EXPR: + return true; + + default: + break; + } + return false; +} + +/* Return true if CODE represents a commutative tree code. Otherwise + return false. */ +bool +commutative_tree_code (enum tree_code code) +{ + switch (code) + { + case PLUS_EXPR: + case MULT_EXPR: + case MIN_EXPR: + case MAX_EXPR: + case BIT_IOR_EXPR: + case BIT_XOR_EXPR: + case BIT_AND_EXPR: + case NE_EXPR: + case EQ_EXPR: + case UNORDERED_EXPR: + case ORDERED_EXPR: + case UNEQ_EXPR: + case LTGT_EXPR: + case TRUTH_AND_EXPR: + case TRUTH_XOR_EXPR: + case TRUTH_OR_EXPR: + return true; + + default: + break; + } + return false; +} + +/* Generate a hash value for an expression. This can be used iteratively + by passing a previous result as the VAL argument. + + This function is intended to produce the same hash for expressions which + would compare equal using operand_equal_p. */ + +hashval_t +iterative_hash_expr (const_tree t, hashval_t val) +{ + int i; + enum tree_code code; + char tclass; + + if (t == NULL_TREE) + return iterative_hash_pointer (t, val); + + code = TREE_CODE (t); + + switch (code) + { + /* Alas, constants aren't shared, so we can't rely on pointer + identity. */ + case INTEGER_CST: + val = iterative_hash_host_wide_int (TREE_INT_CST_LOW (t), val); + return iterative_hash_host_wide_int (TREE_INT_CST_HIGH (t), val); + case REAL_CST: + { + unsigned int val2 = real_hash (TREE_REAL_CST_PTR (t)); + + return iterative_hash_hashval_t (val2, val); + } + case FIXED_CST: + { + unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t)); + + return iterative_hash_hashval_t (val2, val); + } + case STRING_CST: + return iterative_hash (TREE_STRING_POINTER (t), + TREE_STRING_LENGTH (t), val); + case COMPLEX_CST: + val = iterative_hash_expr (TREE_REALPART (t), val); + return iterative_hash_expr (TREE_IMAGPART (t), val); + case VECTOR_CST: + return iterative_hash_expr (TREE_VECTOR_CST_ELTS (t), val); + + case SSA_NAME: + /* we can just compare by pointer. */ + return iterative_hash_pointer (t, val); + + case TREE_LIST: + /* A list of expressions, for a CALL_EXPR or as the elements of a + VECTOR_CST. */ + for (; t; t = TREE_CHAIN (t)) + val = iterative_hash_expr (TREE_VALUE (t), val); + return val; + case CONSTRUCTOR: + { + unsigned HOST_WIDE_INT idx; + tree field, value; + FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value) + { + val = iterative_hash_expr (field, val); + val = iterative_hash_expr (value, val); + } + return val; + } + case FUNCTION_DECL: + /* When referring to a built-in FUNCTION_DECL, use the + __builtin__ form. Otherwise nodes that compare equal + according to operand_equal_p might get different + hash codes. */ + if (DECL_BUILT_IN (t)) + { + val = iterative_hash_pointer (built_in_decls[DECL_FUNCTION_CODE (t)], + val); + return val; + } + /* else FALL THROUGH */ + default: + tclass = TREE_CODE_CLASS (code); + + if (tclass == tcc_declaration) + { + /* DECL's have a unique ID */ + val = iterative_hash_host_wide_int (DECL_UID (t), val); + } + else + { + gcc_assert (IS_EXPR_CODE_CLASS (tclass)); + + val = iterative_hash_object (code, val); + + /* Don't hash the type, that can lead to having nodes which + compare equal according to operand_equal_p, but which + have different hash codes. */ + if (CONVERT_EXPR_CODE_P (code) + || code == NON_LVALUE_EXPR) + { + /* Make sure to include signness in the hash computation. */ + val += TYPE_UNSIGNED (TREE_TYPE (t)); + val = iterative_hash_expr (TREE_OPERAND (t, 0), val); + } + + else if (commutative_tree_code (code)) + { + /* It's a commutative expression. We want to hash it the same + however it appears. We do this by first hashing both operands + and then rehashing based on the order of their independent + hashes. */ + hashval_t one = iterative_hash_expr (TREE_OPERAND (t, 0), 0); + hashval_t two = iterative_hash_expr (TREE_OPERAND (t, 1), 0); + hashval_t t; + + if (one > two) + t = one, one = two, two = t; + + val = iterative_hash_hashval_t (one, val); + val = iterative_hash_hashval_t (two, val); + } + else + for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i) + val = iterative_hash_expr (TREE_OPERAND (t, i), val); + } + return val; + break; + } +} + +/* Generate a hash value for a pair of expressions. This can be used + iteratively by passing a previous result as the VAL argument. + + The same hash value is always returned for a given pair of expressions, + regardless of the order in which they are presented. This is useful in + hashing the operands of commutative functions. */ + +hashval_t +iterative_hash_exprs_commutative (const_tree t1, + const_tree t2, hashval_t val) +{ + hashval_t one = iterative_hash_expr (t1, 0); + hashval_t two = iterative_hash_expr (t2, 0); + hashval_t t; + + if (one > two) + t = one, one = two, two = t; + val = iterative_hash_hashval_t (one, val); + val = iterative_hash_hashval_t (two, val); + + return val; +} /* Constructors for pointer, array and function types. (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are constructed by language-dependent code, not here.) */ -/* Construct, lay out and return the type of pointers to TO_TYPE. - If such a type has already been constructed, reuse it. */ +/* Construct, lay out and return the type of pointers to TO_TYPE with + mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can + reference all of memory. If such a type has already been + constructed, reuse it. */ tree -build_pointer_type (to_type) - tree to_type; +build_pointer_type_for_mode (tree to_type, enum machine_mode mode, + bool can_alias_all) { - tree t = TYPE_POINTER_TO (to_type); + tree t; - /* First, if we already have a type for pointers to TO_TYPE, use it. */ + if (to_type == error_mark_node) + return error_mark_node; - if (t != 0) - return t; + /* If the pointed-to type has the may_alias attribute set, force + a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */ + if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type))) + can_alias_all = true; + + /* In some cases, languages will have things that aren't a POINTER_TYPE + (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO. + In that case, return that type without regard to the rest of our + operands. + + ??? This is a kludge, but consistent with the way this function has + always operated and there doesn't seem to be a good way to avoid this + at the moment. */ + if (TYPE_POINTER_TO (to_type) != 0 + && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE) + return TYPE_POINTER_TO (to_type); + + /* First, if we already have a type for pointers to TO_TYPE and it's + the proper mode, use it. */ + for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t)) + if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all) + return t; - /* We need a new one. */ t = make_node (POINTER_TYPE); TREE_TYPE (t) = to_type; - - /* Record this type as the pointer to TO_TYPE. */ + SET_TYPE_MODE (t, mode); + TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all; + TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type); TYPE_POINTER_TO (to_type) = t; + if (TYPE_STRUCTURAL_EQUALITY_P (to_type)) + SET_TYPE_STRUCTURAL_EQUALITY (t); + else if (TYPE_CANONICAL (to_type) != to_type) + TYPE_CANONICAL (t) + = build_pointer_type_for_mode (TYPE_CANONICAL (to_type), + mode, can_alias_all); + /* Lay out the type. This function has many callers that are concerned - with expression-construction, and this simplifies them all. - Also, it guarantees the TYPE_SIZE is in the same obstack as the type. */ + with expression-construction, and this simplifies them all. */ layout_type (t); return t; } -/* Build the node for the type of references-to-TO_TYPE. */ +/* By default build pointers in ptr_mode. */ tree -build_reference_type (to_type) - tree to_type; +build_pointer_type (tree to_type) { - tree t = TYPE_REFERENCE_TO (to_type); + return build_pointer_type_for_mode (to_type, ptr_mode, false); +} - /* First, if we already have a type for pointers to TO_TYPE, use it. */ +/* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */ - if (t) - return t; +tree +build_reference_type_for_mode (tree to_type, enum machine_mode mode, + bool can_alias_all) +{ + tree t; + + if (to_type == error_mark_node) + return error_mark_node; + + /* If the pointed-to type has the may_alias attribute set, force + a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */ + if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type))) + can_alias_all = true; + + /* In some cases, languages will have things that aren't a REFERENCE_TYPE + (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO. + In that case, return that type without regard to the rest of our + operands. + + ??? This is a kludge, but consistent with the way this function has + always operated and there doesn't seem to be a good way to avoid this + at the moment. */ + if (TYPE_REFERENCE_TO (to_type) != 0 + && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE) + return TYPE_REFERENCE_TO (to_type); + + /* First, if we already have a type for pointers to TO_TYPE and it's + the proper mode, use it. */ + for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t)) + if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all) + return t; - /* We need a new one. */ t = make_node (REFERENCE_TYPE); TREE_TYPE (t) = to_type; - - /* Record this type as the pointer to TO_TYPE. */ + SET_TYPE_MODE (t, mode); + TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all; + TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type); TYPE_REFERENCE_TO (to_type) = t; + if (TYPE_STRUCTURAL_EQUALITY_P (to_type)) + SET_TYPE_STRUCTURAL_EQUALITY (t); + else if (TYPE_CANONICAL (to_type) != to_type) + TYPE_CANONICAL (t) + = build_reference_type_for_mode (TYPE_CANONICAL (to_type), + mode, can_alias_all); + layout_type (t); return t; } + +/* Build the node for the type of references-to-TO_TYPE by default + in ptr_mode. */ + +tree +build_reference_type (tree to_type) +{ + return build_reference_type_for_mode (to_type, ptr_mode, false); +} + /* Build a type that is compatible with t but has no cv quals anywhere in its type, thus const char *const *const * -> char ***. */ tree -build_type_no_quals (t) - tree t; +build_type_no_quals (tree t) { switch (TREE_CODE (t)) { case POINTER_TYPE: - return build_pointer_type (build_type_no_quals (TREE_TYPE (t))); + return build_pointer_type_for_mode (build_type_no_quals (TREE_TYPE (t)), + TYPE_MODE (t), + TYPE_REF_CAN_ALIAS_ALL (t)); case REFERENCE_TYPE: - return build_reference_type (build_type_no_quals (TREE_TYPE (t))); + return + build_reference_type_for_mode (build_type_no_quals (TREE_TYPE (t)), + TYPE_MODE (t), + TYPE_REF_CAN_ALIAS_ALL (t)); default: return TYPE_MAIN_VARIANT (t); } @@ -3792,16 +5656,15 @@ build_type_no_quals (t) sizes that use more than one HOST_WIDE_INT. */ tree -build_index_type (maxval) - tree maxval; +build_index_type (tree maxval) { tree itype = make_node (INTEGER_TYPE); TREE_TYPE (itype) = sizetype; TYPE_PRECISION (itype) = TYPE_PRECISION (sizetype); TYPE_MIN_VALUE (itype) = size_zero_node; - TYPE_MAX_VALUE (itype) = convert (sizetype, maxval); - TYPE_MODE (itype) = TYPE_MODE (sizetype); + TYPE_MAX_VALUE (itype) = fold_convert (sizetype, maxval); + SET_TYPE_MODE (itype, TYPE_MODE (sizetype)); TYPE_SIZE (itype) = TYPE_SIZE (sizetype); TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (sizetype); TYPE_ALIGN (itype) = TYPE_ALIGN (sizetype); @@ -3810,17 +5673,42 @@ build_index_type (maxval) if (host_integerp (maxval, 1)) return type_hash_canon (tree_low_cst (maxval, 1), itype); else - return itype; + { + /* Since we cannot hash this type, we need to compare it using + structural equality checks. */ + SET_TYPE_STRUCTURAL_EQUALITY (itype); + return itype; + } +} + +/* Builds a signed or unsigned integer type of precision PRECISION. + Used for C bitfields whose precision does not match that of + built-in target types. */ +tree +build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision, + int unsignedp) +{ + tree itype = make_node (INTEGER_TYPE); + + TYPE_PRECISION (itype) = precision; + + if (unsignedp) + fixup_unsigned_type (itype); + else + fixup_signed_type (itype); + + if (host_integerp (TYPE_MAX_VALUE (itype), 1)) + return type_hash_canon (tree_low_cst (TYPE_MAX_VALUE (itype), 1), itype); + + return itype; } /* Create a range of some discrete type TYPE (an INTEGER_TYPE, - ENUMERAL_TYPE, BOOLEAN_TYPE, or CHAR_TYPE), with - low bound LOWVAL and high bound HIGHVAL. - if TYPE==NULL_TREE, sizetype is used. */ + ENUMERAL_TYPE or BOOLEAN_TYPE), with low bound LOWVAL and + high bound HIGHVAL. If TYPE is NULL, sizetype is used. */ tree -build_range_type (type, lowval, highval) - tree type, lowval, highval; +build_range_type (tree type, tree lowval, tree highval) { tree itype = make_node (INTEGER_TYPE); @@ -3828,11 +5716,11 @@ build_range_type (type, lowval, highval) if (type == NULL_TREE) type = sizetype; - TYPE_MIN_VALUE (itype) = convert (type, lowval); - TYPE_MAX_VALUE (itype) = highval ? convert (type, highval) : NULL; + TYPE_MIN_VALUE (itype) = fold_convert (type, lowval); + TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL; TYPE_PRECISION (itype) = TYPE_PRECISION (type); - TYPE_MODE (itype) = TYPE_MODE (type); + SET_TYPE_MODE (itype, TYPE_MODE (type)); TYPE_SIZE (itype) = TYPE_SIZE (type); TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type); TYPE_ALIGN (itype) = TYPE_ALIGN (type); @@ -3850,52 +5738,20 @@ build_range_type (type, lowval, highval) of just highval (maxval). */ tree -build_index_2_type (lowval, highval) - tree lowval, highval; +build_index_2_type (tree lowval, tree highval) { return build_range_type (sizetype, lowval, highval); } -/* Return nonzero iff ITYPE1 and ITYPE2 are equal (in the LISP sense). - Needed because when index types are not hashed, equal index types - built at different times appear distinct, even though structurally, - they are not. */ - -int -index_type_equal (itype1, itype2) - tree itype1, itype2; -{ - if (TREE_CODE (itype1) != TREE_CODE (itype2)) - return 0; - - if (TREE_CODE (itype1) == INTEGER_TYPE) - { - if (TYPE_PRECISION (itype1) != TYPE_PRECISION (itype2) - || TYPE_MODE (itype1) != TYPE_MODE (itype2) - || simple_cst_equal (TYPE_SIZE (itype1), TYPE_SIZE (itype2)) != 1 - || TYPE_ALIGN (itype1) != TYPE_ALIGN (itype2)) - return 0; - - if (1 == simple_cst_equal (TYPE_MIN_VALUE (itype1), - TYPE_MIN_VALUE (itype2)) - && 1 == simple_cst_equal (TYPE_MAX_VALUE (itype1), - TYPE_MAX_VALUE (itype2))) - return 1; - } - - return 0; -} - /* Construct, lay out and return the type of arrays of elements with ELT_TYPE and number of elements specified by the range of values of INDEX_TYPE. If such a type has already been constructed, reuse it. */ tree -build_array_type (elt_type, index_type) - tree elt_type, index_type; +build_array_type (tree elt_type, tree index_type) { tree t; - unsigned int hashcode; + hashval_t hashcode = 0; if (TREE_CODE (elt_type) == FUNCTION_TYPE) { @@ -3903,43 +5759,136 @@ build_array_type (elt_type, index_type) elt_type = integer_type_node; } - /* Make sure TYPE_POINTER_TO (elt_type) is filled in. */ - build_pointer_type (elt_type); - - /* Allocate the array after the pointer type, - in case we free it in type_hash_canon. */ t = make_node (ARRAY_TYPE); TREE_TYPE (t) = elt_type; TYPE_DOMAIN (t) = index_type; - + if (index_type == 0) { + tree save = t; + hashcode = iterative_hash_object (TYPE_HASH (elt_type), hashcode); + t = type_hash_canon (hashcode, t); + if (save == t) + layout_type (t); + + if (TYPE_CANONICAL (t) == t) + { + if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)) + SET_TYPE_STRUCTURAL_EQUALITY (t); + else if (TYPE_CANONICAL (elt_type) != elt_type) + TYPE_CANONICAL (t) + = build_array_type (TYPE_CANONICAL (elt_type), index_type); + } + return t; } - hashcode = TYPE_HASH (elt_type) + TYPE_HASH (index_type); + hashcode = iterative_hash_object (TYPE_HASH (elt_type), hashcode); + hashcode = iterative_hash_object (TYPE_HASH (index_type), hashcode); t = type_hash_canon (hashcode, t); if (!COMPLETE_TYPE_P (t)) layout_type (t); + + if (TYPE_CANONICAL (t) == t) + { + if (TYPE_STRUCTURAL_EQUALITY_P (elt_type) + || TYPE_STRUCTURAL_EQUALITY_P (index_type)) + SET_TYPE_STRUCTURAL_EQUALITY (t); + else if (TYPE_CANONICAL (elt_type) != elt_type + || TYPE_CANONICAL (index_type) != index_type) + TYPE_CANONICAL (t) + = build_array_type (TYPE_CANONICAL (elt_type), + TYPE_CANONICAL (index_type)); + } + return t; } -/* Return the TYPE of the elements comprising - the innermost dimension of ARRAY. */ +/* Recursively examines the array elements of TYPE, until a non-array + element type is found. */ tree -get_inner_array_type (array) - tree array; +strip_array_types (tree type) { - tree type = TREE_TYPE (array); - while (TREE_CODE (type) == ARRAY_TYPE) type = TREE_TYPE (type); return type; } +/* Computes the canonical argument types from the argument type list + ARGTYPES. + + Upon return, *ANY_STRUCTURAL_P will be true iff either it was true + on entry to this function, or if any of the ARGTYPES are + structural. + + Upon return, *ANY_NONCANONICAL_P will be true iff either it was + true on entry to this function, or if any of the ARGTYPES are + non-canonical. + + Returns a canonical argument list, which may be ARGTYPES when the + canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is + true) or would not differ from ARGTYPES. */ + +static tree +maybe_canonicalize_argtypes(tree argtypes, + bool *any_structural_p, + bool *any_noncanonical_p) +{ + tree arg; + bool any_noncanonical_argtypes_p = false; + + for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg)) + { + if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node) + /* Fail gracefully by stating that the type is structural. */ + *any_structural_p = true; + else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg))) + *any_structural_p = true; + else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg) + || TREE_PURPOSE (arg)) + /* If the argument has a default argument, we consider it + non-canonical even though the type itself is canonical. + That way, different variants of function and method types + with default arguments will all point to the variant with + no defaults as their canonical type. */ + any_noncanonical_argtypes_p = true; + } + + if (*any_structural_p) + return argtypes; + + if (any_noncanonical_argtypes_p) + { + /* Build the canonical list of argument types. */ + tree canon_argtypes = NULL_TREE; + bool is_void = false; + + for (arg = argtypes; arg; arg = TREE_CHAIN (arg)) + { + if (arg == void_list_node) + is_void = true; + else + canon_argtypes = tree_cons (NULL_TREE, + TYPE_CANONICAL (TREE_VALUE (arg)), + canon_argtypes); + } + + canon_argtypes = nreverse (canon_argtypes); + if (is_void) + canon_argtypes = chainon (canon_argtypes, void_list_node); + + /* There is a non-canonical type. */ + *any_noncanonical_p = true; + return canon_argtypes; + } + + /* The canonical argument types are the same as ARGTYPES. */ + return argtypes; +} + /* Construct, lay out and return the type of functions returning type VALUE_TYPE given arguments of types ARG_TYPES. @@ -3948,11 +5897,12 @@ get_inner_array_type (array) If such a type has already been constructed, reuse it. */ tree -build_function_type (value_type, arg_types) - tree value_type, arg_types; +build_function_type (tree value_type, tree arg_types) { tree t; - unsigned int hashcode; + hashval_t hashcode = 0; + bool any_structural_p, any_noncanonical_p; + tree canon_argtypes; if (TREE_CODE (value_type) == FUNCTION_TYPE) { @@ -3965,63 +5915,261 @@ build_function_type (value_type, arg_types) TREE_TYPE (t) = value_type; TYPE_ARG_TYPES (t) = arg_types; - /* If we already have such a type, use the old one and free this one. */ - hashcode = TYPE_HASH (value_type) + type_hash_list (arg_types); + /* If we already have such a type, use the old one. */ + hashcode = iterative_hash_object (TYPE_HASH (value_type), hashcode); + hashcode = type_hash_list (arg_types, hashcode); t = type_hash_canon (hashcode, t); + /* Set up the canonical type. */ + any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type); + any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type; + canon_argtypes = maybe_canonicalize_argtypes (arg_types, + &any_structural_p, + &any_noncanonical_p); + if (any_structural_p) + SET_TYPE_STRUCTURAL_EQUALITY (t); + else if (any_noncanonical_p) + TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type), + canon_argtypes); + if (!COMPLETE_TYPE_P (t)) layout_type (t); return t; } -/* Construct, lay out and return the type of methods belonging to class - BASETYPE and whose arguments and values are described by TYPE. - If that type exists already, reuse it. - TYPE must be a FUNCTION_TYPE node. */ +/* Build variant of function type ORIG_TYPE skipping ARGS_TO_SKIP. */ + +tree +build_function_type_skip_args (tree orig_type, bitmap args_to_skip) +{ + tree new_type = NULL; + tree args, new_args = NULL, t; + tree new_reversed; + int i = 0; + + for (args = TYPE_ARG_TYPES (orig_type); args && args != void_list_node; + args = TREE_CHAIN (args), i++) + if (!bitmap_bit_p (args_to_skip, i)) + new_args = tree_cons (NULL_TREE, TREE_VALUE (args), new_args); + + new_reversed = nreverse (new_args); + if (args) + { + if (new_reversed) + TREE_CHAIN (new_args) = void_list_node; + else + new_reversed = void_list_node; + } + + /* Use copy_node to preserve as much as possible from original type + (debug info, attribute lists etc.) + Exception is METHOD_TYPEs must have THIS argument. + When we are asked to remove it, we need to build new FUNCTION_TYPE + instead. */ + if (TREE_CODE (orig_type) != METHOD_TYPE + || !bitmap_bit_p (args_to_skip, 0)) + { + new_type = copy_node (orig_type); + TYPE_ARG_TYPES (new_type) = new_reversed; + } + else + { + new_type + = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type), + new_reversed)); + TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type); + } + + /* This is a new type, not a copy of an old type. Need to reassociate + variants. We can handle everything except the main variant lazily. */ + t = TYPE_MAIN_VARIANT (orig_type); + if (orig_type != t) + { + TYPE_MAIN_VARIANT (new_type) = t; + TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t); + TYPE_NEXT_VARIANT (t) = new_type; + } + else + { + TYPE_MAIN_VARIANT (new_type) = new_type; + TYPE_NEXT_VARIANT (new_type) = NULL; + } + return new_type; +} + +/* Build variant of function type ORIG_TYPE skipping ARGS_TO_SKIP. + + Arguments from DECL_ARGUMENTS list can't be removed now, since they are + linked by TREE_CHAIN directly. It is caller responsibility to eliminate + them when they are being duplicated (i.e. copy_arguments_for_versioning). */ + +tree +build_function_decl_skip_args (tree orig_decl, bitmap args_to_skip) +{ + tree new_decl = copy_node (orig_decl); + tree new_type; + + new_type = TREE_TYPE (orig_decl); + if (prototype_p (new_type)) + new_type = build_function_type_skip_args (new_type, args_to_skip); + TREE_TYPE (new_decl) = new_type; + + /* For declarations setting DECL_VINDEX (i.e. methods) + we expect first argument to be THIS pointer. */ + if (bitmap_bit_p (args_to_skip, 0)) + DECL_VINDEX (new_decl) = NULL_TREE; + return new_decl; +} + +/* Build a function type. The RETURN_TYPE is the type returned by the + function. If VAARGS is set, no void_type_node is appended to the + the list. ARGP muse be alway be terminated be a NULL_TREE. */ + +static tree +build_function_type_list_1 (bool vaargs, tree return_type, va_list argp) +{ + tree t, args, last; + + t = va_arg (argp, tree); + for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree)) + args = tree_cons (NULL_TREE, t, args); + + if (vaargs) + { + last = args; + if (args != NULL_TREE) + args = nreverse (args); + gcc_assert (args != NULL_TREE && last != void_list_node); + } + else if (args == NULL_TREE) + args = void_list_node; + else + { + last = args; + args = nreverse (args); + TREE_CHAIN (last) = void_list_node; + } + args = build_function_type (return_type, args); + + return args; +} + +/* Build a function type. The RETURN_TYPE is the type returned by the + function. If additional arguments are provided, they are + additional argument types. The list of argument types must always + be terminated by NULL_TREE. */ tree -build_method_type (basetype, type) - tree basetype, type; +build_function_type_list (tree return_type, ...) +{ + tree args; + va_list p; + + va_start (p, return_type); + args = build_function_type_list_1 (false, return_type, p); + va_end (p); + return args; +} + +/* Build a variable argument function type. The RETURN_TYPE is the + type returned by the function. If additional arguments are provided, + they are additional argument types. The list of argument types must + always be terminated by NULL_TREE. */ + +tree +build_varargs_function_type_list (tree return_type, ...) +{ + tree args; + va_list p; + + va_start (p, return_type); + args = build_function_type_list_1 (true, return_type, p); + va_end (p); + + return args; +} + +/* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE) + and ARGTYPES (a TREE_LIST) are the return type and arguments types + for the method. An implicit additional parameter (of type + pointer-to-BASETYPE) is added to the ARGTYPES. */ + +tree +build_method_type_directly (tree basetype, + tree rettype, + tree argtypes) { tree t; - unsigned int hashcode; + tree ptype; + int hashcode = 0; + bool any_structural_p, any_noncanonical_p; + tree canon_argtypes; /* Make a node of the sort we want. */ t = make_node (METHOD_TYPE); - if (TREE_CODE (type) != FUNCTION_TYPE) - abort (); - TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype); - TREE_TYPE (t) = TREE_TYPE (type); + TREE_TYPE (t) = rettype; + ptype = build_pointer_type (basetype); /* The actual arglist for this function includes a "hidden" argument which is "this". Put it into the list of argument types. */ + argtypes = tree_cons (NULL_TREE, ptype, argtypes); + TYPE_ARG_TYPES (t) = argtypes; - TYPE_ARG_TYPES (t) - = tree_cons (NULL_TREE, - build_pointer_type (basetype), TYPE_ARG_TYPES (type)); - - /* If we already have such a type, use the old one and free this one. */ - hashcode = TYPE_HASH (basetype) + TYPE_HASH (type); + /* If we already have such a type, use the old one. */ + hashcode = iterative_hash_object (TYPE_HASH (basetype), hashcode); + hashcode = iterative_hash_object (TYPE_HASH (rettype), hashcode); + hashcode = type_hash_list (argtypes, hashcode); t = type_hash_canon (hashcode, t); + /* Set up the canonical type. */ + any_structural_p + = (TYPE_STRUCTURAL_EQUALITY_P (basetype) + || TYPE_STRUCTURAL_EQUALITY_P (rettype)); + any_noncanonical_p + = (TYPE_CANONICAL (basetype) != basetype + || TYPE_CANONICAL (rettype) != rettype); + canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes), + &any_structural_p, + &any_noncanonical_p); + if (any_structural_p) + SET_TYPE_STRUCTURAL_EQUALITY (t); + else if (any_noncanonical_p) + TYPE_CANONICAL (t) + = build_method_type_directly (TYPE_CANONICAL (basetype), + TYPE_CANONICAL (rettype), + canon_argtypes); if (!COMPLETE_TYPE_P (t)) layout_type (t); return t; } +/* Construct, lay out and return the type of methods belonging to class + BASETYPE and whose arguments and values are described by TYPE. + If that type exists already, reuse it. + TYPE must be a FUNCTION_TYPE node. */ + +tree +build_method_type (tree basetype, tree type) +{ + gcc_assert (TREE_CODE (type) == FUNCTION_TYPE); + + return build_method_type_directly (basetype, + TREE_TYPE (type), + TYPE_ARG_TYPES (type)); +} + /* Construct, lay out and return the type of offsets to a value of type TYPE, within an object of type BASETYPE. If a suitable offset type exists already, reuse it. */ tree -build_offset_type (basetype, type) - tree basetype, type; +build_offset_type (tree basetype, tree type) { tree t; - unsigned int hashcode; + hashval_t hashcode = 0; /* Make a node of the sort we want. */ t = make_node (OFFSET_TYPE); @@ -4029,42 +6177,64 @@ build_offset_type (basetype, type) TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype); TREE_TYPE (t) = type; - /* If we already have such a type, use the old one and free this one. */ - hashcode = TYPE_HASH (basetype) + TYPE_HASH (type); + /* If we already have such a type, use the old one. */ + hashcode = iterative_hash_object (TYPE_HASH (basetype), hashcode); + hashcode = iterative_hash_object (TYPE_HASH (type), hashcode); t = type_hash_canon (hashcode, t); if (!COMPLETE_TYPE_P (t)) layout_type (t); + if (TYPE_CANONICAL (t) == t) + { + if (TYPE_STRUCTURAL_EQUALITY_P (basetype) + || TYPE_STRUCTURAL_EQUALITY_P (type)) + SET_TYPE_STRUCTURAL_EQUALITY (t); + else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype + || TYPE_CANONICAL (type) != type) + TYPE_CANONICAL (t) + = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)), + TYPE_CANONICAL (type)); + } + return t; } /* Create a complex type whose components are COMPONENT_TYPE. */ tree -build_complex_type (component_type) - tree component_type; +build_complex_type (tree component_type) { tree t; - unsigned int hashcode; + hashval_t hashcode; + + gcc_assert (INTEGRAL_TYPE_P (component_type) + || SCALAR_FLOAT_TYPE_P (component_type) + || FIXED_POINT_TYPE_P (component_type)); /* Make a node of the sort we want. */ t = make_node (COMPLEX_TYPE); TREE_TYPE (t) = TYPE_MAIN_VARIANT (component_type); - set_type_quals (t, TYPE_QUALS (component_type)); - /* If we already have such a type, use the old one and free this one. */ - hashcode = TYPE_HASH (component_type); + /* If we already have such a type, use the old one. */ + hashcode = iterative_hash_object (TYPE_HASH (component_type), 0); t = type_hash_canon (hashcode, t); if (!COMPLETE_TYPE_P (t)) layout_type (t); - /* If we are writing Dwarf2 output we need to create a name, - since complex is a fundamental type. */ - if ((write_symbols == DWARF2_DEBUG || write_symbols == VMS_AND_DWARF2_DEBUG) - && ! TYPE_NAME (t)) + if (TYPE_CANONICAL (t) == t) + { + if (TYPE_STRUCTURAL_EQUALITY_P (component_type)) + SET_TYPE_STRUCTURAL_EQUALITY (t); + else if (TYPE_CANONICAL (component_type) != component_type) + TYPE_CANONICAL (t) + = build_complex_type (TYPE_CANONICAL (component_type)); + } + + /* We need to create a name, since complex is a fundamental type. */ + if (! TYPE_NAME (t)) { const char *name; if (component_type == char_type_node) @@ -4093,10 +6263,10 @@ build_complex_type (component_type) name = 0; if (name != 0) - TYPE_NAME (t) = get_identifier (name); + TYPE_NAME (t) = build_decl (TYPE_DECL, get_identifier (name), t); } - return t; + return build_qualified_type (t, TYPE_QUALS (component_type)); } /* Return OP, stripped of any conversions to wider types as much as is safe. @@ -4105,11 +6275,6 @@ build_complex_type (component_type) If FOR_TYPE is nonzero, we return a value which, if converted to type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE. - If FOR_TYPE is nonzero, unaligned bit-field references may be changed to the - narrowest type that can hold the value, even if they don't exactly fit. - Otherwise, bit-field references are changed to a narrower type - only if they can be fetched directly from memory in that type. - OP must have integer, real or enumeral type. Pointers are not allowed! There are some cases where the obvious value we could return @@ -4123,9 +6288,7 @@ build_complex_type (component_type) is different from (int) OP. */ tree -get_unwidened (op, for_type) - tree op; - tree for_type; +get_unwidened (tree op, tree for_type) { /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */ tree type = TREE_TYPE (op); @@ -4134,14 +6297,21 @@ get_unwidened (op, for_type) int uns = (for_type != 0 && for_type != type && final_prec > TYPE_PRECISION (type) - && TREE_UNSIGNED (type)); + && TYPE_UNSIGNED (type)); tree win = op; - while (TREE_CODE (op) == NOP_EXPR) + while (CONVERT_EXPR_P (op)) { - int bitschange - = TYPE_PRECISION (TREE_TYPE (op)) - - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))); + int bitschange; + + /* TYPE_PRECISION on vector types has different meaning + (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions, + so avoid them here. */ + if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE) + break; + + bitschange = TYPE_PRECISION (TREE_TYPE (op)) + - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))); /* Truncations are many-one so cannot be removed. Unless we are later going to truncate down even farther. */ @@ -4164,11 +6334,12 @@ get_unwidened (op, for_type) { if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op))) win = op; - /* TREE_UNSIGNED says whether this is a zero-extension. + /* TYPE_UNSIGNED says whether this is a zero-extension. Let's avoid computing it if it does not affect WIN and if UNS will not be needed again. */ - if ((uns || TREE_CODE (op) == NOP_EXPR) - && TREE_UNSIGNED (TREE_TYPE (op))) + if ((uns + || CONVERT_EXPR_P (op)) + && TYPE_UNSIGNED (TREE_TYPE (op))) { uns = 1; win = op; @@ -4176,37 +6347,6 @@ get_unwidened (op, for_type) } } - if (TREE_CODE (op) == COMPONENT_REF - /* Since type_for_size always gives an integer type. */ - && TREE_CODE (type) != REAL_TYPE - /* Don't crash if field not laid out yet. */ - && DECL_SIZE (TREE_OPERAND (op, 1)) != 0 - && host_integerp (DECL_SIZE (TREE_OPERAND (op, 1)), 1)) - { - unsigned int innerprec - = tree_low_cst (DECL_SIZE (TREE_OPERAND (op, 1)), 1); - - type = type_for_size (innerprec, TREE_UNSIGNED (TREE_OPERAND (op, 1))); - - /* We can get this structure field in the narrowest type it fits in. - If FOR_TYPE is 0, do this only for a field that matches the - narrower type exactly and is aligned for it - The resulting extension to its nominal type (a fullword type) - must fit the same conditions as for other extensions. */ - - if (innerprec < TYPE_PRECISION (TREE_TYPE (op)) - && (for_type || ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))) - && (! uns || final_prec <= innerprec - || TREE_UNSIGNED (TREE_OPERAND (op, 1))) - && type != 0) - { - win = build (COMPONENT_REF, type, TREE_OPERAND (op, 0), - TREE_OPERAND (op, 1)); - TREE_SIDE_EFFECTS (win) = TREE_SIDE_EFFECTS (op); - TREE_THIS_VOLATILE (win) = TREE_THIS_VOLATILE (op); - } - } - return win; } @@ -4216,13 +6356,12 @@ get_unwidened (op, for_type) or 0 if the value should be sign-extended. */ tree -get_narrower (op, unsignedp_ptr) - tree op; - int *unsignedp_ptr; +get_narrower (tree op, int *unsignedp_ptr) { int uns = 0; int first = 1; tree win = op; + bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op)); while (TREE_CODE (op) == NOP_EXPR) { @@ -4243,11 +6382,11 @@ get_narrower (op, unsignedp_ptr) /* An extension: the outermost one can be stripped, but remember whether it is zero or sign extension. */ if (first) - uns = TREE_UNSIGNED (TREE_TYPE (op)); + uns = TYPE_UNSIGNED (TREE_TYPE (op)); /* Otherwise, if a sign extension has been stripped, only sign extensions can now be stripped; if a zero extension has been stripped, only zero-extensions. */ - else if (uns != TREE_UNSIGNED (TREE_TYPE (op))) + else if (uns != TYPE_UNSIGNED (TREE_TYPE (op))) break; first = 0; } @@ -4256,9 +6395,13 @@ get_narrower (op, unsignedp_ptr) /* A change in nominal type can always be stripped, but we must preserve the unsignedness. */ if (first) - uns = TREE_UNSIGNED (TREE_TYPE (op)); + uns = TYPE_UNSIGNED (TREE_TYPE (op)); first = 0; op = TREE_OPERAND (op, 0); + /* Keep trying to narrow, but don't assign op to win if it + would turn an integral type into something else. */ + if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p) + continue; } win = op; @@ -4267,12 +6410,16 @@ get_narrower (op, unsignedp_ptr) if (TREE_CODE (op) == COMPONENT_REF /* Since type_for_size always gives an integer type. */ && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE + && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE /* Ensure field is laid out already. */ - && DECL_SIZE (TREE_OPERAND (op, 1)) != 0) + && DECL_SIZE (TREE_OPERAND (op, 1)) != 0 + && host_integerp (DECL_SIZE (TREE_OPERAND (op, 1)), 1)) { unsigned HOST_WIDE_INT innerprec = tree_low_cst (DECL_SIZE (TREE_OPERAND (op, 1)), 1); - tree type = type_for_size (innerprec, TREE_UNSIGNED (op)); + int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1)) + || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1)))); + tree type = lang_hooks.types.type_for_size (innerprec, unsignedp); /* We can get this structure field in a narrower type that fits it, but the resulting extension to its nominal type (a fullword type) @@ -4284,17 +6431,15 @@ get_narrower (op, unsignedp_ptr) if (innerprec < TYPE_PRECISION (TREE_TYPE (op)) && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1)) - && (first || uns == TREE_UNSIGNED (TREE_OPERAND (op, 1))) + && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1))) && type != 0) { if (first) - uns = TREE_UNSIGNED (TREE_OPERAND (op, 1)); - win = build (COMPONENT_REF, type, TREE_OPERAND (op, 0), - TREE_OPERAND (op, 1)); - TREE_SIDE_EFFECTS (win) = TREE_SIDE_EFFECTS (op); - TREE_THIS_VOLATILE (win) = TREE_THIS_VOLATILE (op); + uns = DECL_UNSIGNED (TREE_OPERAND (op, 1)); + win = fold_convert (type, op); } } + *unsignedp_ptr = uns; return win; } @@ -4303,105 +6448,311 @@ get_narrower (op, unsignedp_ptr) for type TYPE (an INTEGER_TYPE). */ int -int_fits_type_p (c, type) - tree c, type; -{ - /* If the bounds of the type are integers, we can check ourselves. - If not, but this type is a subtype, try checking against that. - Otherwise, use force_fit_type, which checks against the precision. */ - if (TYPE_MAX_VALUE (type) != NULL_TREE - && TYPE_MIN_VALUE (type) != NULL_TREE - && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST +int_fits_type_p (const_tree c, const_tree type) +{ + tree type_low_bound, type_high_bound; + bool ok_for_low_bound, ok_for_high_bound, unsc; + double_int dc, dd; + + dc = tree_to_double_int (c); + unsc = TYPE_UNSIGNED (TREE_TYPE (c)); + + if (TREE_CODE (TREE_TYPE (c)) == INTEGER_TYPE + && TYPE_IS_SIZETYPE (TREE_TYPE (c)) + && unsc) + /* So c is an unsigned integer whose type is sizetype and type is not. + sizetype'd integers are sign extended even though they are + unsigned. If the integer value fits in the lower end word of c, + and if the higher end word has all its bits set to 1, that + means the higher end bits are set to 1 only for sign extension. + So let's convert c into an equivalent zero extended unsigned + integer. */ + dc = double_int_zext (dc, TYPE_PRECISION (TREE_TYPE (c))); + +retry: + type_low_bound = TYPE_MIN_VALUE (type); + type_high_bound = TYPE_MAX_VALUE (type); + + /* If at least one bound of the type is a constant integer, we can check + ourselves and maybe make a decision. If no such decision is possible, but + this type is a subtype, try checking against that. Otherwise, use + fit_double_type, which checks against the precision. + + Compute the status for each possibly constant bound, and return if we see + one does not match. Use ok_for_xxx_bound for this purpose, assigning -1 + for "unknown if constant fits", 0 for "constant known *not* to fit" and 1 + for "constant known to fit". */ + + /* Check if c >= type_low_bound. */ + if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST) + { + dd = tree_to_double_int (type_low_bound); + if (TREE_CODE (type) == INTEGER_TYPE + && TYPE_IS_SIZETYPE (type) + && TYPE_UNSIGNED (type)) + dd = double_int_zext (dd, TYPE_PRECISION (type)); + if (unsc != TYPE_UNSIGNED (TREE_TYPE (type_low_bound))) + { + int c_neg = (!unsc && double_int_negative_p (dc)); + int t_neg = (unsc && double_int_negative_p (dd)); + + if (c_neg && !t_neg) + return 0; + if ((c_neg || !t_neg) && double_int_ucmp (dc, dd) < 0) + return 0; + } + else if (double_int_cmp (dc, dd, unsc) < 0) + return 0; + ok_for_low_bound = true; + } + else + ok_for_low_bound = false; + + /* Check if c <= type_high_bound. */ + if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST) + { + dd = tree_to_double_int (type_high_bound); + if (TREE_CODE (type) == INTEGER_TYPE + && TYPE_IS_SIZETYPE (type) + && TYPE_UNSIGNED (type)) + dd = double_int_zext (dd, TYPE_PRECISION (type)); + if (unsc != TYPE_UNSIGNED (TREE_TYPE (type_high_bound))) + { + int c_neg = (!unsc && double_int_negative_p (dc)); + int t_neg = (unsc && double_int_negative_p (dd)); + + if (t_neg && !c_neg) + return 0; + if ((t_neg || !c_neg) && double_int_ucmp (dc, dd) > 0) + return 0; + } + else if (double_int_cmp (dc, dd, unsc) > 0) + return 0; + ok_for_high_bound = true; + } + else + ok_for_high_bound = false; + + /* If the constant fits both bounds, the result is known. */ + if (ok_for_low_bound && ok_for_high_bound) + return 1; + + /* Perform some generic filtering which may allow making a decision + even if the bounds are not constant. First, negative integers + never fit in unsigned types, */ + if (TYPE_UNSIGNED (type) && !unsc && double_int_negative_p (dc)) + return 0; + + /* Second, narrower types always fit in wider ones. */ + if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c))) + return 1; + + /* Third, unsigned integers with top bit set never fit signed types. */ + if (! TYPE_UNSIGNED (type) && unsc) + { + int prec = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (c))) - 1; + if (prec < HOST_BITS_PER_WIDE_INT) + { + if (((((unsigned HOST_WIDE_INT) 1) << prec) & dc.low) != 0) + return 0; + } + else if (((((unsigned HOST_WIDE_INT) 1) + << (prec - HOST_BITS_PER_WIDE_INT)) & dc.high) != 0) + return 0; + } + + /* If we haven't been able to decide at this point, there nothing more we + can check ourselves here. Look at the base type if we have one and it + has the same precision. */ + if (TREE_CODE (type) == INTEGER_TYPE + && TREE_TYPE (type) != 0 + && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type))) + { + type = TREE_TYPE (type); + goto retry; + } + + /* Or to fit_double_type, if nothing else. */ + return !fit_double_type (dc.low, dc.high, &dc.low, &dc.high, type); +} + +/* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant + bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be + represented (assuming two's-complement arithmetic) within the bit + precision of the type are returned instead. */ + +void +get_type_static_bounds (const_tree type, mpz_t min, mpz_t max) +{ + if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type) && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST) + mpz_set_double_int (min, tree_to_double_int (TYPE_MIN_VALUE (type)), + TYPE_UNSIGNED (type)); + else { - if (TREE_UNSIGNED (type)) - return (! INT_CST_LT_UNSIGNED (TYPE_MAX_VALUE (type), c) - && ! INT_CST_LT_UNSIGNED (c, TYPE_MIN_VALUE (type)) - /* Negative ints never fit unsigned types. */ - && ! (TREE_INT_CST_HIGH (c) < 0 - && ! TREE_UNSIGNED (TREE_TYPE (c)))); + if (TYPE_UNSIGNED (type)) + mpz_set_ui (min, 0); else - return (! INT_CST_LT (TYPE_MAX_VALUE (type), c) - && ! INT_CST_LT (c, TYPE_MIN_VALUE (type)) - /* Unsigned ints with top bit set never fit signed types. */ - && ! (TREE_INT_CST_HIGH (c) < 0 - && TREE_UNSIGNED (TREE_TYPE (c)))); - } - else if (TREE_CODE (type) == INTEGER_TYPE && TREE_TYPE (type) != 0) - return int_fits_type_p (c, TREE_TYPE (type)); + { + double_int mn; + mn = double_int_mask (TYPE_PRECISION (type) - 1); + mn = double_int_sext (double_int_add (mn, double_int_one), + TYPE_PRECISION (type)); + mpz_set_double_int (min, mn, false); + } + } + + if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type) + && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST) + mpz_set_double_int (max, tree_to_double_int (TYPE_MAX_VALUE (type)), + TYPE_UNSIGNED (type)); else { - c = copy_node (c); - TREE_TYPE (c) = type; - return !force_fit_type (c, 0); + if (TYPE_UNSIGNED (type)) + mpz_set_double_int (max, double_int_mask (TYPE_PRECISION (type)), + true); + else + mpz_set_double_int (max, double_int_mask (TYPE_PRECISION (type) - 1), + true); } } +/* Return true if VAR is an automatic variable defined in function FN. */ + +bool +auto_var_in_fn_p (const_tree var, const_tree fn) +{ + return (DECL_P (var) && DECL_CONTEXT (var) == fn + && (((TREE_CODE (var) == VAR_DECL || TREE_CODE (var) == PARM_DECL) + && ! TREE_STATIC (var)) + || TREE_CODE (var) == LABEL_DECL + || TREE_CODE (var) == RESULT_DECL)); +} + +/* Subprogram of following function. Called by walk_tree. + + Return *TP if it is an automatic variable or parameter of the + function passed in as DATA. */ + +static tree +find_var_from_fn (tree *tp, int *walk_subtrees, void *data) +{ + tree fn = (tree) data; + + if (TYPE_P (*tp)) + *walk_subtrees = 0; + + else if (DECL_P (*tp) + && auto_var_in_fn_p (*tp, fn)) + return *tp; + + return NULL_TREE; +} + /* Returns true if T is, contains, or refers to a type with variable - size. This concept is more general than that of C99 'variably - modified types': in C99, a struct type is never variably modified - because a VLA may not appear as a structure member. However, in - GNU C code like: - + size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the + arguments, but not the return type. If FN is nonzero, only return + true if a modifier of the type or position of FN is a variable or + parameter inside FN. + + This concept is more general than that of C99 'variably modified types': + in C99, a struct type is never variably modified because a VLA may not + appear as a structure member. However, in GNU C code like: + struct S { int i[f()]; }; is valid, and other languages may define similar constructs. */ bool -variably_modified_type_p (type) - tree type; +variably_modified_type_p (tree type, tree fn) { - /* If TYPE itself has variable size, it is variably modified. + tree t; - We do not yet have a representation of the C99 '[*]' syntax. - When a representation is chosen, this function should be modified - to test for that case as well. */ - if (TYPE_SIZE (type) - && TYPE_SIZE (type) != error_mark_node - && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST) - return true; +/* Test if T is either variable (if FN is zero) or an expression containing + a variable in FN. */ +#define RETURN_TRUE_IF_VAR(T) \ + do { tree _t = (T); \ + if (_t && _t != error_mark_node && TREE_CODE (_t) != INTEGER_CST \ + && (!fn || walk_tree (&_t, find_var_from_fn, fn, NULL))) \ + return true; } while (0) - /* If TYPE is a pointer or reference, it is variably modified if - the type pointed to is variably modified. */ - if ((TREE_CODE (type) == POINTER_TYPE - || TREE_CODE (type) == REFERENCE_TYPE) - && variably_modified_type_p (TREE_TYPE (type))) - return true; - - /* If TYPE is an array, it is variably modified if the array - elements are. (Note that the VLA case has already been checked - above.) */ - if (TREE_CODE (type) == ARRAY_TYPE - && variably_modified_type_p (TREE_TYPE (type))) - return true; + if (type == error_mark_node) + return false; - /* If TYPE is a function type, it is variably modified if any of the - parameters or the return type are variably modified. */ - if (TREE_CODE (type) == FUNCTION_TYPE - || TREE_CODE (type) == METHOD_TYPE) - { - tree parm; + /* If TYPE itself has variable size, it is variably modified. */ + RETURN_TRUE_IF_VAR (TYPE_SIZE (type)); + RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type)); - if (variably_modified_type_p (TREE_TYPE (type))) + switch (TREE_CODE (type)) + { + case POINTER_TYPE: + case REFERENCE_TYPE: + case VECTOR_TYPE: + if (variably_modified_type_p (TREE_TYPE (type), fn)) return true; - for (parm = TYPE_ARG_TYPES (type); - parm && parm != void_list_node; - parm = TREE_CHAIN (parm)) - if (variably_modified_type_p (TREE_VALUE (parm))) + break; + + case FUNCTION_TYPE: + case METHOD_TYPE: + /* If TYPE is a function type, it is variably modified if the + return type is variably modified. */ + if (variably_modified_type_p (TREE_TYPE (type), fn)) return true; + break; + + case INTEGER_TYPE: + case REAL_TYPE: + case FIXED_POINT_TYPE: + case ENUMERAL_TYPE: + case BOOLEAN_TYPE: + /* Scalar types are variably modified if their end points + aren't constant. */ + RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type)); + RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type)); + break; + + case RECORD_TYPE: + case UNION_TYPE: + case QUAL_UNION_TYPE: + /* We can't see if any of the fields are variably-modified by the + definition we normally use, since that would produce infinite + recursion via pointers. */ + /* This is variably modified if some field's type is. */ + for (t = TYPE_FIELDS (type); t; t = TREE_CHAIN (t)) + if (TREE_CODE (t) == FIELD_DECL) + { + RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t)); + RETURN_TRUE_IF_VAR (DECL_SIZE (t)); + RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t)); + + if (TREE_CODE (type) == QUAL_UNION_TYPE) + RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t)); + } + break; + + case ARRAY_TYPE: + /* Do not call ourselves to avoid infinite recursion. This is + variably modified if the element type is. */ + RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type))); + RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type))); + break; + + default: + break; } /* The current language may have other cases to check, but in general, all other types are not variably modified. */ - return (*lang_hooks.tree_inlining.var_mod_type_p) (type); + return lang_hooks.tree_inlining.var_mod_type_p (type, fn); + +#undef RETURN_TRUE_IF_VAR } /* Given a DECL or TYPE, return the scope in which it was declared, or NULL_TREE if there is no containing scope. */ tree -get_containing_scope (t) - tree t; +get_containing_scope (const_tree t) { return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t)); } @@ -4410,17 +6761,13 @@ get_containing_scope (t) a FUNCTION_DECL, or zero if none. */ tree -decl_function_context (decl) - tree decl; +decl_function_context (const_tree decl) { tree context; if (TREE_CODE (decl) == ERROR_MARK) return 0; - if (TREE_CODE (decl) == SAVE_EXPR) - context = SAVE_EXPR_CONTEXT (decl); - /* C++ virtual functions use DECL_CONTEXT for the class of the vtable where we look up the function at runtime. Such functions always take a first argument of type 'pointer to real context'. @@ -4450,29 +6797,35 @@ decl_function_context (decl) TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */ tree -decl_type_context (decl) - tree decl; +decl_type_context (const_tree decl) { tree context = DECL_CONTEXT (decl); while (context) - { - if (TREE_CODE (context) == RECORD_TYPE - || TREE_CODE (context) == UNION_TYPE - || TREE_CODE (context) == QUAL_UNION_TYPE) + switch (TREE_CODE (context)) + { + case NAMESPACE_DECL: + case TRANSLATION_UNIT_DECL: + return NULL_TREE; + + case RECORD_TYPE: + case UNION_TYPE: + case QUAL_UNION_TYPE: return context; - if (TREE_CODE (context) == TYPE_DECL - || TREE_CODE (context) == FUNCTION_DECL) + case TYPE_DECL: + case FUNCTION_DECL: context = DECL_CONTEXT (context); + break; - else if (TREE_CODE (context) == BLOCK) + case BLOCK: context = BLOCK_SUPERCONTEXT (context); + break; + + default: + gcc_unreachable (); + } - else - /* Unhandled CONTEXT!? */ - abort (); - } return NULL_TREE; } @@ -4481,19 +6834,20 @@ decl_type_context (decl) determined. */ tree -get_callee_fndecl (call) - tree call; +get_callee_fndecl (const_tree call) { tree addr; + if (call == error_mark_node) + return error_mark_node; + /* It's invalid to call this function with anything but a CALL_EXPR. */ - if (TREE_CODE (call) != CALL_EXPR) - abort (); + gcc_assert (TREE_CODE (call) == CALL_EXPR); /* The first operand to the CALL is the address of the function called. */ - addr = TREE_OPERAND (call, 0); + addr = CALL_EXPR_FN (call); STRIP_NOPS (addr); @@ -4513,34 +6867,11 @@ get_callee_fndecl (call) return NULL_TREE; } -/* Print debugging information about the obstack O, named STR. */ - -void -print_obstack_statistics (str, o) - const char *str; - struct obstack *o; -{ - struct _obstack_chunk *chunk = o->chunk; - int n_chunks = 1; - int n_alloc = 0; - - n_alloc += o->next_free - chunk->contents; - chunk = chunk->prev; - while (chunk) - { - n_chunks += 1; - n_alloc += chunk->limit - &chunk->contents[0]; - chunk = chunk->prev; - } - fprintf (stderr, "obstack %s: %u bytes, %d chunks\n", - str, n_alloc, n_chunks); -} - /* Print debugging information about tree nodes generated during the compile, and any language-specific information. */ void -dump_tree_statistics () +dump_tree_statistics (void) { #ifdef GATHER_STATISTICS int i; @@ -4549,98 +6880,61 @@ dump_tree_statistics () fprintf (stderr, "\n??? tree nodes created\n\n"); #ifdef GATHER_STATISTICS - fprintf (stderr, "Kind Nodes Bytes\n"); - fprintf (stderr, "-------------------------------------\n"); + fprintf (stderr, "Kind Nodes Bytes\n"); + fprintf (stderr, "---------------------------------------\n"); total_nodes = total_bytes = 0; for (i = 0; i < (int) all_kinds; i++) { - fprintf (stderr, "%-20s %6d %9d\n", tree_node_kind_names[i], + fprintf (stderr, "%-20s %7d %10d\n", tree_node_kind_names[i], tree_node_counts[i], tree_node_sizes[i]); total_nodes += tree_node_counts[i]; total_bytes += tree_node_sizes[i]; } - fprintf (stderr, "-------------------------------------\n"); - fprintf (stderr, "%-20s %6d %9d\n", "Total", total_nodes, total_bytes); - fprintf (stderr, "-------------------------------------\n"); + fprintf (stderr, "---------------------------------------\n"); + fprintf (stderr, "%-20s %7d %10d\n", "Total", total_nodes, total_bytes); + fprintf (stderr, "---------------------------------------\n"); + ssanames_print_statistics (); + phinodes_print_statistics (); #else fprintf (stderr, "(No per-node statistics)\n"); #endif - print_obstack_statistics ("permanent_obstack", &permanent_obstack); print_type_hash_statistics (); - (*lang_hooks.print_statistics) (); + print_debug_expr_statistics (); + print_value_expr_statistics (); + print_restrict_base_statistics (); + lang_hooks.print_statistics (); } -#define FILE_FUNCTION_PREFIX_LEN 9 - #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s" -/* Appends 6 random characters to TEMPLATE to (hopefully) avoid name - clashes in cases where we can't reliably choose a unique name. - - Derived from mkstemp.c in libiberty. */ +/* Generate a crc32 of a string. */ -static void -append_random_chars (template) - char *template; +unsigned +crc32_string (unsigned chksum, const char *string) { - static const char letters[] - = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"; - static unsigned HOST_WIDE_INT value; - unsigned HOST_WIDE_INT v; - - if (! value) + do { - struct stat st; + unsigned value = *string << 24; + unsigned ix; - /* VALUE should be unique for each file and must not change between - compiles since this can cause bootstrap comparison errors. */ + for (ix = 8; ix--; value <<= 1) + { + unsigned feedback; - if (stat (main_input_filename, &st) < 0) - { - /* This can happen when preprocessed text is shipped between - machines, e.g. with bug reports. Assume that uniqueness - isn't actually an issue. */ - value = 1; - } - else - { - /* In VMS, ino is an array, so we have to use both values. We - conditionalize that. */ -#ifdef VMS -#define INO_TO_INT(INO) ((int) (INO)[1] << 16 ^ (int) (INO)[2]) -#else -#define INO_TO_INT(INO) INO -#endif - value = st.st_dev ^ INO_TO_INT (st.st_ino) ^ st.st_mtime; - } + feedback = (value ^ chksum) & 0x80000000 ? 0x04c11db7 : 0; + chksum <<= 1; + chksum ^= feedback; + } } - - template += strlen (template); - - v = value; - - /* Fill in the random bits. */ - template[0] = letters[v % 62]; - v /= 62; - template[1] = letters[v % 62]; - v /= 62; - template[2] = letters[v % 62]; - v /= 62; - template[3] = letters[v % 62]; - v /= 62; - template[4] = letters[v % 62]; - v /= 62; - template[5] = letters[v % 62]; - - template[6] = '\0'; + while (*string++); + return chksum; } /* P is a string that will be used in a symbol. Mask out any characters that are not valid in that context. */ void -clean_symbol_name (p) - char *p; +clean_symbol_name (char *p) { for (; *p; p++) if (! (ISALNUM (*p) @@ -4653,26 +6947,50 @@ clean_symbol_name (p) )) *p = '_'; } - -/* Generate a name for a function unique to this translation unit. + +/* Generate a name for a special-purpose function function. + The generated name may need to be unique across the whole link. TYPE is some string to identify the purpose of this function to the - linker or collect2. */ + linker or collect2; it must start with an uppercase letter, + one of: + I - for constructors + D - for destructors + N - for C++ anonymous namespaces + F - for DWARF unwind frame information. */ tree -get_file_function_name_long (type) - const char *type; +get_file_function_name (const char *type) { char *buf; const char *p; char *q; + /* If we already have a name we know to be unique, just use that. */ if (first_global_object_name) - p = first_global_object_name; + p = q = ASTRDUP (first_global_object_name); + /* If the target is handling the constructors/destructors, they + will be local to this file and the name is only necessary for + debugging purposes. */ + else if ((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors) + { + const char *file = main_input_filename; + if (! file) + file = input_filename; + /* Just use the file's basename, because the full pathname + might be quite long. */ + p = strrchr (file, '/'); + if (p) + p++; + else + p = file; + p = q = ASTRDUP (p); + } else { - /* We don't have anything that we know to be unique to this translation + /* Otherwise, the name must be unique across the entire link. + We don't have anything that we know to be unique to this translation unit, so use what we do have and throw in some randomness. */ - + unsigned len; const char *name = weak_global_object_name; const char *file = main_input_filename; @@ -4681,13 +6999,17 @@ get_file_function_name_long (type) if (! file) file = input_filename; - q = (char *) alloca (7 + strlen (name) + strlen (file)); + len = strlen (file); + q = (char *) alloca (9 * 2 + len + 1); + memcpy (q, file, len + 1); + + sprintf (q + len, "_%08X_%08X", crc32_string (0, name), + crc32_string (0, get_random_seed (false))); - sprintf (q, "%s%s", name, file); - append_random_chars (q); p = q; } + clean_symbol_name (q); buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p) + strlen (type)); @@ -4697,175 +7019,307 @@ get_file_function_name_long (type) constraints). */ sprintf (buf, FILE_FUNCTION_FORMAT, type, p); - /* Don't need to pull weird characters out of global names. */ - if (p != first_global_object_name) - clean_symbol_name (buf + 11); - return get_identifier (buf); } + +#if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007) -/* If KIND=='I', return a suitable global initializer (constructor) name. - If KIND=='D', return a suitable global clean-up (destructor) name. */ - -tree -get_file_function_name (kind) - int kind; -{ - char p[2]; +/* Complain that the tree code of NODE does not match the expected 0 + terminated list of trailing codes. The trailing code list can be + empty, for a more vague error message. FILE, LINE, and FUNCTION + are of the caller. */ - p[0] = kind; - p[1] = 0; +void +tree_check_failed (const_tree node, const char *file, + int line, const char *function, ...) +{ + va_list args; + const char *buffer; + unsigned length = 0; + int code; + + va_start (args, function); + while ((code = va_arg (args, int))) + length += 4 + strlen (tree_code_name[code]); + va_end (args); + if (length) + { + char *tmp; + va_start (args, function); + length += strlen ("expected "); + buffer = tmp = (char *) alloca (length); + length = 0; + while ((code = va_arg (args, int))) + { + const char *prefix = length ? " or " : "expected "; + + strcpy (tmp + length, prefix); + length += strlen (prefix); + strcpy (tmp + length, tree_code_name[code]); + length += strlen (tree_code_name[code]); + } + va_end (args); + } + else + buffer = "unexpected node"; - return get_file_function_name_long (p); + internal_error ("tree check: %s, have %s in %s, at %s:%d", + buffer, tree_code_name[TREE_CODE (node)], + function, trim_filename (file), line); } - -/* Expand (the constant part of) a SET_TYPE CONSTRUCTOR node. - The result is placed in BUFFER (which has length BIT_SIZE), - with one bit in each char ('\000' or '\001'). - If the constructor is constant, NULL_TREE is returned. - Otherwise, a TREE_LIST of the non-constant elements is emitted. */ +/* Complain that the tree code of NODE does match the expected 0 + terminated list of trailing codes. FILE, LINE, and FUNCTION are of + the caller. */ -tree -get_set_constructor_bits (init, buffer, bit_size) - tree init; - char *buffer; - int bit_size; -{ - int i; - tree vals; - HOST_WIDE_INT domain_min - = tree_low_cst (TYPE_MIN_VALUE (TYPE_DOMAIN (TREE_TYPE (init))), 0); - tree non_const_bits = NULL_TREE; - - for (i = 0; i < bit_size; i++) - buffer[i] = 0; - - for (vals = TREE_OPERAND (init, 1); - vals != NULL_TREE; vals = TREE_CHAIN (vals)) - { - if (!host_integerp (TREE_VALUE (vals), 0) - || (TREE_PURPOSE (vals) != NULL_TREE - && !host_integerp (TREE_PURPOSE (vals), 0))) - non_const_bits - = tree_cons (TREE_PURPOSE (vals), TREE_VALUE (vals), non_const_bits); - else if (TREE_PURPOSE (vals) != NULL_TREE) - { - /* Set a range of bits to ones. */ - HOST_WIDE_INT lo_index - = tree_low_cst (TREE_PURPOSE (vals), 0) - domain_min; - HOST_WIDE_INT hi_index - = tree_low_cst (TREE_VALUE (vals), 0) - domain_min; - - if (lo_index < 0 || lo_index >= bit_size - || hi_index < 0 || hi_index >= bit_size) - abort (); - for (; lo_index <= hi_index; lo_index++) - buffer[lo_index] = 1; - } - else +void +tree_not_check_failed (const_tree node, const char *file, + int line, const char *function, ...) +{ + va_list args; + char *buffer; + unsigned length = 0; + int code; + + va_start (args, function); + while ((code = va_arg (args, int))) + length += 4 + strlen (tree_code_name[code]); + va_end (args); + va_start (args, function); + buffer = (char *) alloca (length); + length = 0; + while ((code = va_arg (args, int))) + { + if (length) { - /* Set a single bit to one. */ - HOST_WIDE_INT index - = tree_low_cst (TREE_VALUE (vals), 0) - domain_min; - if (index < 0 || index >= bit_size) - { - error ("invalid initializer for bit string"); - return NULL_TREE; - } - buffer[index] = 1; + strcpy (buffer + length, " or "); + length += 4; } + strcpy (buffer + length, tree_code_name[code]); + length += strlen (tree_code_name[code]); } - return non_const_bits; + va_end (args); + + internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d", + buffer, tree_code_name[TREE_CODE (node)], + function, trim_filename (file), line); } -/* Expand (the constant part of) a SET_TYPE CONSTRUCTOR node. - The result is placed in BUFFER (which is an array of bytes). - If the constructor is constant, NULL_TREE is returned. - Otherwise, a TREE_LIST of the non-constant elements is emitted. */ +/* Similar to tree_check_failed, except that we check for a class of tree + code, given in CL. */ -tree -get_set_constructor_bytes (init, buffer, wd_size) - tree init; - unsigned char *buffer; - int wd_size; +void +tree_class_check_failed (const_tree node, const enum tree_code_class cl, + const char *file, int line, const char *function) { - int i; - int set_word_size = BITS_PER_UNIT; - int bit_size = wd_size * set_word_size; - int bit_pos = 0; - unsigned char *bytep = buffer; - char *bit_buffer = (char *) alloca (bit_size); - tree non_const_bits = get_set_constructor_bits (init, bit_buffer, bit_size); + internal_error + ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d", + TREE_CODE_CLASS_STRING (cl), + TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))), + tree_code_name[TREE_CODE (node)], function, trim_filename (file), line); +} + +/* Similar to tree_check_failed, except that instead of specifying a + dozen codes, use the knowledge that they're all sequential. */ + +void +tree_range_check_failed (const_tree node, const char *file, int line, + const char *function, enum tree_code c1, + enum tree_code c2) +{ + char *buffer; + unsigned length = 0; + enum tree_code c; - for (i = 0; i < wd_size; i++) - buffer[i] = 0; + for (c = c1; c <= c2; ++c) + length += 4 + strlen (tree_code_name[c]); - for (i = 0; i < bit_size; i++) + length += strlen ("expected "); + buffer = (char *) alloca (length); + length = 0; + + for (c = c1; c <= c2; ++c) { - if (bit_buffer[i]) - { - if (BYTES_BIG_ENDIAN) - *bytep |= (1 << (set_word_size - 1 - bit_pos)); - else - *bytep |= 1 << bit_pos; - } - bit_pos++; - if (bit_pos >= set_word_size) - bit_pos = 0, bytep++; + const char *prefix = length ? " or " : "expected "; + + strcpy (buffer + length, prefix); + length += strlen (prefix); + strcpy (buffer + length, tree_code_name[c]); + length += strlen (tree_code_name[c]); } - return non_const_bits; + + internal_error ("tree check: %s, have %s in %s, at %s:%d", + buffer, tree_code_name[TREE_CODE (node)], + function, trim_filename (file), line); } - -#if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007) -/* Complain that the tree code of NODE does not match the expected CODE. - FILE, LINE, and FUNCTION are of the caller. */ + + +/* Similar to tree_check_failed, except that we check that a tree does + not have the specified code, given in CL. */ + +void +tree_not_class_check_failed (const_tree node, const enum tree_code_class cl, + const char *file, int line, const char *function) +{ + internal_error + ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d", + TREE_CODE_CLASS_STRING (cl), + TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))), + tree_code_name[TREE_CODE (node)], function, trim_filename (file), line); +} + + +/* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */ void -tree_check_failed (node, code, file, line, function) - const tree node; - enum tree_code code; - const char *file; - int line; - const char *function; -{ - internal_error ("tree check: expected %s, have %s in %s, at %s:%d", - tree_code_name[code], tree_code_name[TREE_CODE (node)], +omp_clause_check_failed (const_tree node, const char *file, int line, + const char *function, enum omp_clause_code code) +{ + internal_error ("tree check: expected omp_clause %s, have %s in %s, at %s:%d", + omp_clause_code_name[code], tree_code_name[TREE_CODE (node)], function, trim_filename (file), line); } -/* Similar to above, except that we check for a class of tree - code, given in CL. */ + +/* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */ + +void +omp_clause_range_check_failed (const_tree node, const char *file, int line, + const char *function, enum omp_clause_code c1, + enum omp_clause_code c2) +{ + char *buffer; + unsigned length = 0; + enum omp_clause_code c; + + for (c = c1; c <= c2; ++c) + length += 4 + strlen (omp_clause_code_name[c]); + + length += strlen ("expected "); + buffer = (char *) alloca (length); + length = 0; + + for (c = c1; c <= c2; ++c) + { + const char *prefix = length ? " or " : "expected "; + + strcpy (buffer + length, prefix); + length += strlen (prefix); + strcpy (buffer + length, omp_clause_code_name[c]); + length += strlen (omp_clause_code_name[c]); + } + + internal_error ("tree check: %s, have %s in %s, at %s:%d", + buffer, omp_clause_code_name[TREE_CODE (node)], + function, trim_filename (file), line); +} + + +#undef DEFTREESTRUCT +#define DEFTREESTRUCT(VAL, NAME) NAME, + +static const char *ts_enum_names[] = { +#include "treestruct.def" +}; +#undef DEFTREESTRUCT + +#define TS_ENUM_NAME(EN) (ts_enum_names[(EN)]) + +/* Similar to tree_class_check_failed, except that we check for + whether CODE contains the tree structure identified by EN. */ void -tree_class_check_failed (node, cl, file, line, function) - const tree node; - int cl; - const char *file; - int line; - const char *function; +tree_contains_struct_check_failed (const_tree node, + const enum tree_node_structure_enum en, + const char *file, int line, + const char *function) { internal_error - ("tree check: expected class '%c', have '%c' (%s) in %s, at %s:%d", - cl, TREE_CODE_CLASS (TREE_CODE (node)), + ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d", + TS_ENUM_NAME(en), tree_code_name[TREE_CODE (node)], function, trim_filename (file), line); } + +/* Similar to above, except that the check is for the bounds of a TREE_VEC's + (dynamically sized) vector. */ + +void +tree_vec_elt_check_failed (int idx, int len, const char *file, int line, + const char *function) +{ + internal_error + ("tree check: accessed elt %d of tree_vec with %d elts in %s, at %s:%d", + idx + 1, len, function, trim_filename (file), line); +} + +/* Similar to above, except that the check is for the bounds of the operand + vector of an expression node EXP. */ + +void +tree_operand_check_failed (int idx, const_tree exp, const char *file, + int line, const char *function) +{ + int code = TREE_CODE (exp); + internal_error + ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d", + idx + 1, tree_code_name[code], TREE_OPERAND_LENGTH (exp), + function, trim_filename (file), line); +} + +/* Similar to above, except that the check is for the number of + operands of an OMP_CLAUSE node. */ + +void +omp_clause_operand_check_failed (int idx, const_tree t, const char *file, + int line, const char *function) +{ + internal_error + ("tree check: accessed operand %d of omp_clause %s with %d operands " + "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)], + omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function, + trim_filename (file), line); +} #endif /* ENABLE_TREE_CHECKING */ -/* For a new vector type node T, build the information necessary for - debuggint output. */ +/* Create a new vector type node holding SUBPARTS units of type INNERTYPE, + and mapped to the machine mode MODE. Initialize its fields and build + the information necessary for debugging output. */ -static void -finish_vector_type (t) - tree t; +static tree +make_vector_type (tree innertype, int nunits, enum machine_mode mode) { + tree t; + hashval_t hashcode = 0; + + /* Build a main variant, based on the main variant of the inner type, then + use it to build the variant we return. */ + if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype)) + && TYPE_MAIN_VARIANT (innertype) != innertype) + return build_type_attribute_qual_variant ( + make_vector_type (TYPE_MAIN_VARIANT (innertype), nunits, mode), + TYPE_ATTRIBUTES (innertype), + TYPE_QUALS (innertype)); + + t = make_node (VECTOR_TYPE); + TREE_TYPE (t) = TYPE_MAIN_VARIANT (innertype); + SET_TYPE_VECTOR_SUBPARTS (t, nunits); + SET_TYPE_MODE (t, mode); + TYPE_READONLY (t) = TYPE_READONLY (innertype); + TYPE_VOLATILE (t) = TYPE_VOLATILE (innertype); + + if (TYPE_STRUCTURAL_EQUALITY_P (innertype)) + SET_TYPE_STRUCTURAL_EQUALITY (t); + else if (TYPE_CANONICAL (innertype) != innertype + || mode != VOIDmode) + TYPE_CANONICAL (t) + = make_vector_type (TYPE_CANONICAL (innertype), nunits, VOIDmode); + layout_type (t); { - tree index = build_int_2 (TYPE_VECTOR_SUBPARTS (t) - 1, 0); - tree array = build_array_type (TREE_TYPE (t), - build_index_type (index)); + tree index = build_int_cst (NULL_TREE, nunits - 1); + tree array = build_array_type (innertype, build_index_type (index)); tree rt = make_node (RECORD_TYPE); TYPE_FIELDS (rt) = build_decl (FIELD_DECL, get_identifier ("f"), array); @@ -4878,24 +7332,125 @@ finish_vector_type (t) numbers equal. */ TYPE_UID (rt) = TYPE_UID (t); } + + hashcode = iterative_hash_host_wide_int (VECTOR_TYPE, hashcode); + hashcode = iterative_hash_host_wide_int (mode, hashcode); + hashcode = iterative_hash_object (TYPE_HASH (innertype), hashcode); + return type_hash_canon (hashcode, t); } -/* Create nodes for all integer types (and error_mark_node) using the sizes - of C datatypes. The caller should call set_sizetype soon after calling - this function to select one of the types as sizetype. */ +static tree +make_or_reuse_type (unsigned size, int unsignedp) +{ + if (size == INT_TYPE_SIZE) + return unsignedp ? unsigned_type_node : integer_type_node; + if (size == CHAR_TYPE_SIZE) + return unsignedp ? unsigned_char_type_node : signed_char_type_node; + if (size == SHORT_TYPE_SIZE) + return unsignedp ? short_unsigned_type_node : short_integer_type_node; + if (size == LONG_TYPE_SIZE) + return unsignedp ? long_unsigned_type_node : long_integer_type_node; + if (size == LONG_LONG_TYPE_SIZE) + return (unsignedp ? long_long_unsigned_type_node + : long_long_integer_type_node); + + if (unsignedp) + return make_unsigned_type (size); + else + return make_signed_type (size); +} -void -build_common_tree_nodes (signed_char) - int signed_char; -{ +/* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */ + +static tree +make_or_reuse_fract_type (unsigned size, int unsignedp, int satp) +{ + if (satp) + { + if (size == SHORT_FRACT_TYPE_SIZE) + return unsignedp ? sat_unsigned_short_fract_type_node + : sat_short_fract_type_node; + if (size == FRACT_TYPE_SIZE) + return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node; + if (size == LONG_FRACT_TYPE_SIZE) + return unsignedp ? sat_unsigned_long_fract_type_node + : sat_long_fract_type_node; + if (size == LONG_LONG_FRACT_TYPE_SIZE) + return unsignedp ? sat_unsigned_long_long_fract_type_node + : sat_long_long_fract_type_node; + } + else + { + if (size == SHORT_FRACT_TYPE_SIZE) + return unsignedp ? unsigned_short_fract_type_node + : short_fract_type_node; + if (size == FRACT_TYPE_SIZE) + return unsignedp ? unsigned_fract_type_node : fract_type_node; + if (size == LONG_FRACT_TYPE_SIZE) + return unsignedp ? unsigned_long_fract_type_node + : long_fract_type_node; + if (size == LONG_LONG_FRACT_TYPE_SIZE) + return unsignedp ? unsigned_long_long_fract_type_node + : long_long_fract_type_node; + } + + return make_fract_type (size, unsignedp, satp); +} + +/* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */ + +static tree +make_or_reuse_accum_type (unsigned size, int unsignedp, int satp) +{ + if (satp) + { + if (size == SHORT_ACCUM_TYPE_SIZE) + return unsignedp ? sat_unsigned_short_accum_type_node + : sat_short_accum_type_node; + if (size == ACCUM_TYPE_SIZE) + return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node; + if (size == LONG_ACCUM_TYPE_SIZE) + return unsignedp ? sat_unsigned_long_accum_type_node + : sat_long_accum_type_node; + if (size == LONG_LONG_ACCUM_TYPE_SIZE) + return unsignedp ? sat_unsigned_long_long_accum_type_node + : sat_long_long_accum_type_node; + } + else + { + if (size == SHORT_ACCUM_TYPE_SIZE) + return unsignedp ? unsigned_short_accum_type_node + : short_accum_type_node; + if (size == ACCUM_TYPE_SIZE) + return unsignedp ? unsigned_accum_type_node : accum_type_node; + if (size == LONG_ACCUM_TYPE_SIZE) + return unsignedp ? unsigned_long_accum_type_node + : long_accum_type_node; + if (size == LONG_LONG_ACCUM_TYPE_SIZE) + return unsignedp ? unsigned_long_long_accum_type_node + : long_long_accum_type_node; + } + + return make_accum_type (size, unsignedp, satp); +} + +/* Create nodes for all integer types (and error_mark_node) using the sizes + of C datatypes. The caller should call set_sizetype soon after calling + this function to select one of the types as sizetype. */ + +void +build_common_tree_nodes (bool signed_char, bool signed_sizetype) +{ error_mark_node = make_node (ERROR_MARK); TREE_TYPE (error_mark_node) = error_mark_node; - initialize_sizetypes (); + initialize_sizetypes (signed_sizetype); /* Define both `signed char' and `unsigned char'. */ signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE); + TYPE_STRING_FLAG (signed_char_type_node) = 1; unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE); + TYPE_STRING_FLAG (unsigned_char_type_node) = 1; /* Define `char', which is like either `signed char' or `unsigned char' but not the same as either. */ @@ -4903,6 +7458,7 @@ build_common_tree_nodes (signed_char) = (signed_char ? make_signed_type (CHAR_TYPE_SIZE) : make_unsigned_type (CHAR_TYPE_SIZE)); + TYPE_STRING_FLAG (char_type_node) = 1; short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE); short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE); @@ -4913,30 +7469,44 @@ build_common_tree_nodes (signed_char) long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE); long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE); - intQI_type_node = make_signed_type (GET_MODE_BITSIZE (QImode)); - intHI_type_node = make_signed_type (GET_MODE_BITSIZE (HImode)); - intSI_type_node = make_signed_type (GET_MODE_BITSIZE (SImode)); - intDI_type_node = make_signed_type (GET_MODE_BITSIZE (DImode)); - intTI_type_node = make_signed_type (GET_MODE_BITSIZE (TImode)); - - unsigned_intQI_type_node = make_unsigned_type (GET_MODE_BITSIZE (QImode)); - unsigned_intHI_type_node = make_unsigned_type (GET_MODE_BITSIZE (HImode)); - unsigned_intSI_type_node = make_unsigned_type (GET_MODE_BITSIZE (SImode)); - unsigned_intDI_type_node = make_unsigned_type (GET_MODE_BITSIZE (DImode)); - unsigned_intTI_type_node = make_unsigned_type (GET_MODE_BITSIZE (TImode)); + /* Define a boolean type. This type only represents boolean values but + may be larger than char depending on the value of BOOL_TYPE_SIZE. + Front ends which want to override this size (i.e. Java) can redefine + boolean_type_node before calling build_common_tree_nodes_2. */ + boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE); + TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE); + TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1); + TYPE_PRECISION (boolean_type_node) = 1; + + /* Fill in the rest of the sized types. Reuse existing type nodes + when possible. */ + intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0); + intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0); + intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0); + intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0); + intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0); + + unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1); + unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1); + unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1); + unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1); + unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1); + + access_public_node = get_identifier ("public"); + access_protected_node = get_identifier ("protected"); + access_private_node = get_identifier ("private"); } /* Call this function after calling build_common_tree_nodes and set_sizetype. It will create several other common tree nodes. */ void -build_common_tree_nodes_2 (short_double) - int short_double; +build_common_tree_nodes_2 (int short_double) { /* Define these next since types below may used them. */ - integer_zero_node = build_int_2 (0, 0); - integer_one_node = build_int_2 (1, 0); - integer_minus_one_node = build_int_2 (-1, -1); + integer_zero_node = build_int_cst (NULL_TREE, 0); + integer_one_node = build_int_cst (NULL_TREE, 1); + integer_minus_one_node = build_int_cst (NULL_TREE, -1); size_zero_node = size_int (0); size_one_node = size_int (1); @@ -4944,6 +7514,9 @@ build_common_tree_nodes_2 (short_double) bitsize_one_node = bitsize_int (1); bitsize_unit_node = bitsize_int (BITS_PER_UNIT); + boolean_false_node = TYPE_MIN_VALUE (boolean_type_node); + boolean_true_node = TYPE_MAX_VALUE (boolean_type_node); + void_type_node = make_node (VOID_TYPE); layout_type (void_type_node); @@ -4952,13 +7525,13 @@ build_common_tree_nodes_2 (short_double) TYPE_ALIGN (void_type_node) = BITS_PER_UNIT; TYPE_USER_ALIGN (void_type_node) = 0; - null_pointer_node = build_int_2 (0, 0); - TREE_TYPE (null_pointer_node) = build_pointer_type (void_type_node); + null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0); layout_type (TREE_TYPE (null_pointer_node)); ptr_type_node = build_pointer_type (void_type_node); const_ptr_type_node = build_pointer_type (build_type_variant (void_type_node, 1, 0)); + fileptr_type_node = ptr_type_node; float_type_node = make_node (REAL_TYPE); TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE; @@ -4975,77 +7548,1724 @@ build_common_tree_nodes_2 (short_double) TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE; layout_type (long_double_type_node); - complex_integer_type_node = make_node (COMPLEX_TYPE); - TREE_TYPE (complex_integer_type_node) = integer_type_node; - layout_type (complex_integer_type_node); - - complex_float_type_node = make_node (COMPLEX_TYPE); - TREE_TYPE (complex_float_type_node) = float_type_node; - layout_type (complex_float_type_node); - - complex_double_type_node = make_node (COMPLEX_TYPE); - TREE_TYPE (complex_double_type_node) = double_type_node; - layout_type (complex_double_type_node); - - complex_long_double_type_node = make_node (COMPLEX_TYPE); - TREE_TYPE (complex_long_double_type_node) = long_double_type_node; - layout_type (complex_long_double_type_node); + float_ptr_type_node = build_pointer_type (float_type_node); + double_ptr_type_node = build_pointer_type (double_type_node); + long_double_ptr_type_node = build_pointer_type (long_double_type_node); + integer_ptr_type_node = build_pointer_type (integer_type_node); + + /* Fixed size integer types. */ + uint32_type_node = build_nonstandard_integer_type (32, true); + uint64_type_node = build_nonstandard_integer_type (64, true); + + /* Decimal float types. */ + dfloat32_type_node = make_node (REAL_TYPE); + TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE; + layout_type (dfloat32_type_node); + SET_TYPE_MODE (dfloat32_type_node, SDmode); + dfloat32_ptr_type_node = build_pointer_type (dfloat32_type_node); + + dfloat64_type_node = make_node (REAL_TYPE); + TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE; + layout_type (dfloat64_type_node); + SET_TYPE_MODE (dfloat64_type_node, DDmode); + dfloat64_ptr_type_node = build_pointer_type (dfloat64_type_node); + + dfloat128_type_node = make_node (REAL_TYPE); + TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE; + layout_type (dfloat128_type_node); + SET_TYPE_MODE (dfloat128_type_node, TDmode); + dfloat128_ptr_type_node = build_pointer_type (dfloat128_type_node); + + complex_integer_type_node = build_complex_type (integer_type_node); + complex_float_type_node = build_complex_type (float_type_node); + complex_double_type_node = build_complex_type (double_type_node); + complex_long_double_type_node = build_complex_type (long_double_type_node); + +/* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */ +#define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \ + sat_ ## KIND ## _type_node = \ + make_sat_signed_ ## KIND ## _type (SIZE); \ + sat_unsigned_ ## KIND ## _type_node = \ + make_sat_unsigned_ ## KIND ## _type (SIZE); \ + KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \ + unsigned_ ## KIND ## _type_node = \ + make_unsigned_ ## KIND ## _type (SIZE); + +#define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \ + sat_ ## WIDTH ## KIND ## _type_node = \ + make_sat_signed_ ## KIND ## _type (SIZE); \ + sat_unsigned_ ## WIDTH ## KIND ## _type_node = \ + make_sat_unsigned_ ## KIND ## _type (SIZE); \ + WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \ + unsigned_ ## WIDTH ## KIND ## _type_node = \ + make_unsigned_ ## KIND ## _type (SIZE); + +/* Make fixed-point type nodes based on four different widths. */ +#define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \ + MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \ + MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \ + MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \ + MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE) + +/* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */ +#define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \ + NAME ## _type_node = \ + make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \ + u ## NAME ## _type_node = \ + make_or_reuse_unsigned_ ## KIND ## _type \ + (GET_MODE_BITSIZE (U ## MODE ## mode)); \ + sat_ ## NAME ## _type_node = \ + make_or_reuse_sat_signed_ ## KIND ## _type \ + (GET_MODE_BITSIZE (MODE ## mode)); \ + sat_u ## NAME ## _type_node = \ + make_or_reuse_sat_unsigned_ ## KIND ## _type \ + (GET_MODE_BITSIZE (U ## MODE ## mode)); + + /* Fixed-point type and mode nodes. */ + MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT) + MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM) + MAKE_FIXED_MODE_NODE (fract, qq, QQ) + MAKE_FIXED_MODE_NODE (fract, hq, HQ) + MAKE_FIXED_MODE_NODE (fract, sq, SQ) + MAKE_FIXED_MODE_NODE (fract, dq, DQ) + MAKE_FIXED_MODE_NODE (fract, tq, TQ) + MAKE_FIXED_MODE_NODE (accum, ha, HA) + MAKE_FIXED_MODE_NODE (accum, sa, SA) + MAKE_FIXED_MODE_NODE (accum, da, DA) + MAKE_FIXED_MODE_NODE (accum, ta, TA) { - tree t; - BUILD_VA_LIST_TYPE (t); + tree t = targetm.build_builtin_va_list (); - /* Many back-ends define record types without seting TYPE_NAME. + /* Many back-ends define record types without setting TYPE_NAME. If we copied the record type here, we'd keep the original record type without a name. This breaks name mangling. So, don't copy record types and let c_common_nodes_and_builtins() declare the type to be __builtin_va_list. */ if (TREE_CODE (t) != RECORD_TYPE) - t = build_type_copy (t); - + t = build_variant_type_copy (t); + va_list_type_node = t; } +} - unsigned_V4SI_type_node - = make_vector (V4SImode, unsigned_intSI_type_node, 1); - unsigned_V2SI_type_node - = make_vector (V2SImode, unsigned_intSI_type_node, 1); - unsigned_V4HI_type_node - = make_vector (V4HImode, unsigned_intHI_type_node, 1); - unsigned_V8QI_type_node - = make_vector (V8QImode, unsigned_intQI_type_node, 1); - unsigned_V8HI_type_node - = make_vector (V8HImode, unsigned_intHI_type_node, 1); - unsigned_V16QI_type_node - = make_vector (V16QImode, unsigned_intQI_type_node, 1); - - V16SF_type_node = make_vector (V16SFmode, float_type_node, 0); - V4SF_type_node = make_vector (V4SFmode, float_type_node, 0); - V4SI_type_node = make_vector (V4SImode, intSI_type_node, 0); - V2SI_type_node = make_vector (V2SImode, intSI_type_node, 0); - V4HI_type_node = make_vector (V4HImode, intHI_type_node, 0); - V8QI_type_node = make_vector (V8QImode, intQI_type_node, 0); - V8HI_type_node = make_vector (V8HImode, intHI_type_node, 0); - V2SF_type_node = make_vector (V2SFmode, float_type_node, 0); - V16QI_type_node = make_vector (V16QImode, intQI_type_node, 0); -} - -/* Returns a vector tree node given a vector mode, the inner type, and - the signness. */ +/* A subroutine of build_common_builtin_nodes. Define a builtin function. */ -static tree -make_vector (mode, innertype, unsignedp) - enum machine_mode mode; - tree innertype; - int unsignedp; +static void +local_define_builtin (const char *name, tree type, enum built_in_function code, + const char *library_name, int ecf_flags) +{ + tree decl; + + decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL, + library_name, NULL_TREE); + if (ecf_flags & ECF_CONST) + TREE_READONLY (decl) = 1; + if (ecf_flags & ECF_PURE) + DECL_PURE_P (decl) = 1; + if (ecf_flags & ECF_LOOPING_CONST_OR_PURE) + DECL_LOOPING_CONST_OR_PURE_P (decl) = 1; + if (ecf_flags & ECF_NORETURN) + TREE_THIS_VOLATILE (decl) = 1; + if (ecf_flags & ECF_NOTHROW) + TREE_NOTHROW (decl) = 1; + if (ecf_flags & ECF_MALLOC) + DECL_IS_MALLOC (decl) = 1; + + built_in_decls[code] = decl; + implicit_built_in_decls[code] = decl; +} + +/* Call this function after instantiating all builtins that the language + front end cares about. This will build the rest of the builtins that + are relied upon by the tree optimizers and the middle-end. */ + +void +build_common_builtin_nodes (void) +{ + tree tmp, ftype; + + if (built_in_decls[BUILT_IN_MEMCPY] == NULL + || built_in_decls[BUILT_IN_MEMMOVE] == NULL) + { + tmp = tree_cons (NULL_TREE, size_type_node, void_list_node); + tmp = tree_cons (NULL_TREE, const_ptr_type_node, tmp); + tmp = tree_cons (NULL_TREE, ptr_type_node, tmp); + ftype = build_function_type (ptr_type_node, tmp); + + if (built_in_decls[BUILT_IN_MEMCPY] == NULL) + local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY, + "memcpy", ECF_NOTHROW); + if (built_in_decls[BUILT_IN_MEMMOVE] == NULL) + local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE, + "memmove", ECF_NOTHROW); + } + + if (built_in_decls[BUILT_IN_MEMCMP] == NULL) + { + tmp = tree_cons (NULL_TREE, size_type_node, void_list_node); + tmp = tree_cons (NULL_TREE, const_ptr_type_node, tmp); + tmp = tree_cons (NULL_TREE, const_ptr_type_node, tmp); + ftype = build_function_type (integer_type_node, tmp); + local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP, + "memcmp", ECF_PURE | ECF_NOTHROW); + } + + if (built_in_decls[BUILT_IN_MEMSET] == NULL) + { + tmp = tree_cons (NULL_TREE, size_type_node, void_list_node); + tmp = tree_cons (NULL_TREE, integer_type_node, tmp); + tmp = tree_cons (NULL_TREE, ptr_type_node, tmp); + ftype = build_function_type (ptr_type_node, tmp); + local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET, + "memset", ECF_NOTHROW); + } + + if (built_in_decls[BUILT_IN_ALLOCA] == NULL) + { + tmp = tree_cons (NULL_TREE, size_type_node, void_list_node); + ftype = build_function_type (ptr_type_node, tmp); + local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA, + "alloca", ECF_NOTHROW | ECF_MALLOC); + } + + tmp = tree_cons (NULL_TREE, ptr_type_node, void_list_node); + tmp = tree_cons (NULL_TREE, ptr_type_node, tmp); + tmp = tree_cons (NULL_TREE, ptr_type_node, tmp); + ftype = build_function_type (void_type_node, tmp); + local_define_builtin ("__builtin_init_trampoline", ftype, + BUILT_IN_INIT_TRAMPOLINE, + "__builtin_init_trampoline", ECF_NOTHROW); + + tmp = tree_cons (NULL_TREE, ptr_type_node, void_list_node); + ftype = build_function_type (ptr_type_node, tmp); + local_define_builtin ("__builtin_adjust_trampoline", ftype, + BUILT_IN_ADJUST_TRAMPOLINE, + "__builtin_adjust_trampoline", + ECF_CONST | ECF_NOTHROW); + + tmp = tree_cons (NULL_TREE, ptr_type_node, void_list_node); + tmp = tree_cons (NULL_TREE, ptr_type_node, tmp); + ftype = build_function_type (void_type_node, tmp); + local_define_builtin ("__builtin_nonlocal_goto", ftype, + BUILT_IN_NONLOCAL_GOTO, + "__builtin_nonlocal_goto", + ECF_NORETURN | ECF_NOTHROW); + + tmp = tree_cons (NULL_TREE, ptr_type_node, void_list_node); + tmp = tree_cons (NULL_TREE, ptr_type_node, tmp); + ftype = build_function_type (void_type_node, tmp); + local_define_builtin ("__builtin_setjmp_setup", ftype, + BUILT_IN_SETJMP_SETUP, + "__builtin_setjmp_setup", ECF_NOTHROW); + + tmp = tree_cons (NULL_TREE, ptr_type_node, void_list_node); + ftype = build_function_type (ptr_type_node, tmp); + local_define_builtin ("__builtin_setjmp_dispatcher", ftype, + BUILT_IN_SETJMP_DISPATCHER, + "__builtin_setjmp_dispatcher", + ECF_PURE | ECF_NOTHROW); + + tmp = tree_cons (NULL_TREE, ptr_type_node, void_list_node); + ftype = build_function_type (void_type_node, tmp); + local_define_builtin ("__builtin_setjmp_receiver", ftype, + BUILT_IN_SETJMP_RECEIVER, + "__builtin_setjmp_receiver", ECF_NOTHROW); + + ftype = build_function_type (ptr_type_node, void_list_node); + local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE, + "__builtin_stack_save", ECF_NOTHROW); + + tmp = tree_cons (NULL_TREE, ptr_type_node, void_list_node); + ftype = build_function_type (void_type_node, tmp); + local_define_builtin ("__builtin_stack_restore", ftype, + BUILT_IN_STACK_RESTORE, + "__builtin_stack_restore", ECF_NOTHROW); + + ftype = build_function_type (void_type_node, void_list_node); + local_define_builtin ("__builtin_profile_func_enter", ftype, + BUILT_IN_PROFILE_FUNC_ENTER, "profile_func_enter", 0); + local_define_builtin ("__builtin_profile_func_exit", ftype, + BUILT_IN_PROFILE_FUNC_EXIT, "profile_func_exit", 0); + + /* Complex multiplication and division. These are handled as builtins + rather than optabs because emit_library_call_value doesn't support + complex. Further, we can do slightly better with folding these + beasties if the real and complex parts of the arguments are separate. */ + { + enum machine_mode mode; + + for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode) + { + char mode_name_buf[4], *q; + const char *p; + enum built_in_function mcode, dcode; + tree type, inner_type; + + type = lang_hooks.types.type_for_mode (mode, 0); + if (type == NULL) + continue; + inner_type = TREE_TYPE (type); + + tmp = tree_cons (NULL_TREE, inner_type, void_list_node); + tmp = tree_cons (NULL_TREE, inner_type, tmp); + tmp = tree_cons (NULL_TREE, inner_type, tmp); + tmp = tree_cons (NULL_TREE, inner_type, tmp); + ftype = build_function_type (type, tmp); + + mcode = BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT; + dcode = BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT; + + for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++) + *q = TOLOWER (*p); + *q = '\0'; + + built_in_names[mcode] = concat ("__mul", mode_name_buf, "3", NULL); + local_define_builtin (built_in_names[mcode], ftype, mcode, + built_in_names[mcode], ECF_CONST | ECF_NOTHROW); + + built_in_names[dcode] = concat ("__div", mode_name_buf, "3", NULL); + local_define_builtin (built_in_names[dcode], ftype, dcode, + built_in_names[dcode], ECF_CONST | ECF_NOTHROW); + } + } +} + +/* HACK. GROSS. This is absolutely disgusting. I wish there was a + better way. + + If we requested a pointer to a vector, build up the pointers that + we stripped off while looking for the inner type. Similarly for + return values from functions. + + The argument TYPE is the top of the chain, and BOTTOM is the + new type which we will point to. */ + +tree +reconstruct_complex_type (tree type, tree bottom) +{ + tree inner, outer; + + if (TREE_CODE (type) == POINTER_TYPE) + { + inner = reconstruct_complex_type (TREE_TYPE (type), bottom); + outer = build_pointer_type_for_mode (inner, TYPE_MODE (type), + TYPE_REF_CAN_ALIAS_ALL (type)); + } + else if (TREE_CODE (type) == REFERENCE_TYPE) + { + inner = reconstruct_complex_type (TREE_TYPE (type), bottom); + outer = build_reference_type_for_mode (inner, TYPE_MODE (type), + TYPE_REF_CAN_ALIAS_ALL (type)); + } + else if (TREE_CODE (type) == ARRAY_TYPE) + { + inner = reconstruct_complex_type (TREE_TYPE (type), bottom); + outer = build_array_type (inner, TYPE_DOMAIN (type)); + } + else if (TREE_CODE (type) == FUNCTION_TYPE) + { + inner = reconstruct_complex_type (TREE_TYPE (type), bottom); + outer = build_function_type (inner, TYPE_ARG_TYPES (type)); + } + else if (TREE_CODE (type) == METHOD_TYPE) + { + inner = reconstruct_complex_type (TREE_TYPE (type), bottom); + /* The build_method_type_directly() routine prepends 'this' to argument list, + so we must compensate by getting rid of it. */ + outer + = build_method_type_directly + (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))), + inner, + TREE_CHAIN (TYPE_ARG_TYPES (type))); + } + else if (TREE_CODE (type) == OFFSET_TYPE) + { + inner = reconstruct_complex_type (TREE_TYPE (type), bottom); + outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner); + } + else + return bottom; + + return build_qualified_type (outer, TYPE_QUALS (type)); +} + +/* Returns a vector tree node given a mode (integer, vector, or BLKmode) and + the inner type. */ +tree +build_vector_type_for_mode (tree innertype, enum machine_mode mode) +{ + int nunits; + + switch (GET_MODE_CLASS (mode)) + { + case MODE_VECTOR_INT: + case MODE_VECTOR_FLOAT: + case MODE_VECTOR_FRACT: + case MODE_VECTOR_UFRACT: + case MODE_VECTOR_ACCUM: + case MODE_VECTOR_UACCUM: + nunits = GET_MODE_NUNITS (mode); + break; + + case MODE_INT: + /* Check that there are no leftover bits. */ + gcc_assert (GET_MODE_BITSIZE (mode) + % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0); + + nunits = GET_MODE_BITSIZE (mode) + / TREE_INT_CST_LOW (TYPE_SIZE (innertype)); + break; + + default: + gcc_unreachable (); + } + + return make_vector_type (innertype, nunits, mode); +} + +/* Similarly, but takes the inner type and number of units, which must be + a power of two. */ + +tree +build_vector_type (tree innertype, int nunits) +{ + return make_vector_type (innertype, nunits, VOIDmode); +} + + +/* Build RESX_EXPR with given REGION_NUMBER. */ +tree +build_resx (int region_number) { tree t; + t = build1 (RESX_EXPR, void_type_node, + build_int_cst (NULL_TREE, region_number)); + return t; +} - t = make_node (VECTOR_TYPE); - TREE_TYPE (t) = innertype; - TYPE_MODE (t) = mode; - TREE_UNSIGNED (TREE_TYPE (t)) = unsignedp; - finish_vector_type (t); +/* Given an initializer INIT, return TRUE if INIT is zero or some + aggregate of zeros. Otherwise return FALSE. */ +bool +initializer_zerop (const_tree init) +{ + tree elt; + + STRIP_NOPS (init); + + switch (TREE_CODE (init)) + { + case INTEGER_CST: + return integer_zerop (init); + + case REAL_CST: + /* ??? Note that this is not correct for C4X float formats. There, + a bit pattern of all zeros is 1.0; 0.0 is encoded with the most + negative exponent. */ + return real_zerop (init) + && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init)); + + case FIXED_CST: + return fixed_zerop (init); + + case COMPLEX_CST: + return integer_zerop (init) + || (real_zerop (init) + && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init))) + && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init)))); + + case VECTOR_CST: + for (elt = TREE_VECTOR_CST_ELTS (init); elt; elt = TREE_CHAIN (elt)) + if (!initializer_zerop (TREE_VALUE (elt))) + return false; + return true; + + case CONSTRUCTOR: + { + unsigned HOST_WIDE_INT idx; + + FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt) + if (!initializer_zerop (elt)) + return false; + return true; + } + + default: + return false; + } +} + +/* Build an empty statement. */ + +tree +build_empty_stmt (void) +{ + return build1 (NOP_EXPR, void_type_node, size_zero_node); +} + + +/* Build an OpenMP clause with code CODE. */ + +tree +build_omp_clause (enum omp_clause_code code) +{ + tree t; + int size, length; + + length = omp_clause_num_ops[code]; + size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree)); + + t = GGC_NEWVAR (union tree_node, size); + memset (t, 0, size); + TREE_SET_CODE (t, OMP_CLAUSE); + OMP_CLAUSE_SET_CODE (t, code); + +#ifdef GATHER_STATISTICS + tree_node_counts[(int) omp_clause_kind]++; + tree_node_sizes[(int) omp_clause_kind] += size; +#endif + + return t; +} + +/* Set various status flags when building a CALL_EXPR object T. */ + +static void +process_call_operands (tree t) +{ + bool side_effects; + + side_effects = TREE_SIDE_EFFECTS (t); + if (!side_effects) + { + int i, n; + n = TREE_OPERAND_LENGTH (t); + for (i = 1; i < n; i++) + { + tree op = TREE_OPERAND (t, i); + if (op && TREE_SIDE_EFFECTS (op)) + { + side_effects = 1; + break; + } + } + } + if (!side_effects) + { + int i; + + /* Calls have side-effects, except those to const or + pure functions. */ + i = call_expr_flags (t); + if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE))) + side_effects = 1; + } + TREE_SIDE_EFFECTS (t) = side_effects; +} + +/* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN + includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1. + Except for the CODE and operand count field, other storage for the + object is initialized to zeros. */ + +tree +build_vl_exp_stat (enum tree_code code, int len MEM_STAT_DECL) +{ + tree t; + int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp); + + gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp); + gcc_assert (len >= 1); + +#ifdef GATHER_STATISTICS + tree_node_counts[(int) e_kind]++; + tree_node_sizes[(int) e_kind] += length; +#endif + + t = (tree) ggc_alloc_zone_pass_stat (length, &tree_zone); + + memset (t, 0, length); + + TREE_SET_CODE (t, code); + + /* Can't use TREE_OPERAND to store the length because if checking is + enabled, it will try to check the length before we store it. :-P */ + t->exp.operands[0] = build_int_cst (sizetype, len); + + return t; +} + + +/* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE + and FN and a null static chain slot. ARGLIST is a TREE_LIST of the + arguments. */ + +tree +build_call_list (tree return_type, tree fn, tree arglist) +{ + tree t; + int i; + + t = build_vl_exp (CALL_EXPR, list_length (arglist) + 3); + TREE_TYPE (t) = return_type; + CALL_EXPR_FN (t) = fn; + CALL_EXPR_STATIC_CHAIN (t) = NULL_TREE; + for (i = 0; arglist; arglist = TREE_CHAIN (arglist), i++) + CALL_EXPR_ARG (t, i) = TREE_VALUE (arglist); + process_call_operands (t); + return t; +} + +/* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and + FN and a null static chain slot. NARGS is the number of call arguments + which are specified as "..." arguments. */ + +tree +build_call_nary (tree return_type, tree fn, int nargs, ...) +{ + tree ret; + va_list args; + va_start (args, nargs); + ret = build_call_valist (return_type, fn, nargs, args); + va_end (args); + return ret; +} +/* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and + FN and a null static chain slot. NARGS is the number of call arguments + which are specified as a va_list ARGS. */ + +tree +build_call_valist (tree return_type, tree fn, int nargs, va_list args) +{ + tree t; + int i; + + t = build_vl_exp (CALL_EXPR, nargs + 3); + TREE_TYPE (t) = return_type; + CALL_EXPR_FN (t) = fn; + CALL_EXPR_STATIC_CHAIN (t) = NULL_TREE; + for (i = 0; i < nargs; i++) + CALL_EXPR_ARG (t, i) = va_arg (args, tree); + process_call_operands (t); + return t; +} + +/* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and + FN and a null static chain slot. NARGS is the number of call arguments + which are specified as a tree array ARGS. */ + +tree +build_call_array (tree return_type, tree fn, int nargs, tree *args) +{ + tree t; + int i; + + t = build_vl_exp (CALL_EXPR, nargs + 3); + TREE_TYPE (t) = return_type; + CALL_EXPR_FN (t) = fn; + CALL_EXPR_STATIC_CHAIN (t) = NULL_TREE; + for (i = 0; i < nargs; i++) + CALL_EXPR_ARG (t, i) = args[i]; + process_call_operands (t); return t; } + + +/* Returns true if it is possible to prove that the index of + an array access REF (an ARRAY_REF expression) falls into the + array bounds. */ + +bool +in_array_bounds_p (tree ref) +{ + tree idx = TREE_OPERAND (ref, 1); + tree min, max; + + if (TREE_CODE (idx) != INTEGER_CST) + return false; + + min = array_ref_low_bound (ref); + max = array_ref_up_bound (ref); + if (!min + || !max + || TREE_CODE (min) != INTEGER_CST + || TREE_CODE (max) != INTEGER_CST) + return false; + + if (tree_int_cst_lt (idx, min) + || tree_int_cst_lt (max, idx)) + return false; + + return true; +} + +/* Returns true if it is possible to prove that the range of + an array access REF (an ARRAY_RANGE_REF expression) falls + into the array bounds. */ + +bool +range_in_array_bounds_p (tree ref) +{ + tree domain_type = TYPE_DOMAIN (TREE_TYPE (ref)); + tree range_min, range_max, min, max; + + range_min = TYPE_MIN_VALUE (domain_type); + range_max = TYPE_MAX_VALUE (domain_type); + if (!range_min + || !range_max + || TREE_CODE (range_min) != INTEGER_CST + || TREE_CODE (range_max) != INTEGER_CST) + return false; + + min = array_ref_low_bound (ref); + max = array_ref_up_bound (ref); + if (!min + || !max + || TREE_CODE (min) != INTEGER_CST + || TREE_CODE (max) != INTEGER_CST) + return false; + + if (tree_int_cst_lt (range_min, min) + || tree_int_cst_lt (max, range_max)) + return false; + + return true; +} + +/* Return true if T (assumed to be a DECL) must be assigned a memory + location. */ + +bool +needs_to_live_in_memory (const_tree t) +{ + if (TREE_CODE (t) == SSA_NAME) + t = SSA_NAME_VAR (t); + + return (TREE_ADDRESSABLE (t) + || is_global_var (t) + || (TREE_CODE (t) == RESULT_DECL + && aggregate_value_p (t, current_function_decl))); +} + +/* There are situations in which a language considers record types + compatible which have different field lists. Decide if two fields + are compatible. It is assumed that the parent records are compatible. */ + +bool +fields_compatible_p (const_tree f1, const_tree f2) +{ + if (!operand_equal_p (DECL_FIELD_BIT_OFFSET (f1), + DECL_FIELD_BIT_OFFSET (f2), OEP_ONLY_CONST)) + return false; + + if (!operand_equal_p (DECL_FIELD_OFFSET (f1), + DECL_FIELD_OFFSET (f2), OEP_ONLY_CONST)) + return false; + + if (!types_compatible_p (TREE_TYPE (f1), TREE_TYPE (f2))) + return false; + + return true; +} + +/* Locate within RECORD a field that is compatible with ORIG_FIELD. */ + +tree +find_compatible_field (tree record, tree orig_field) +{ + tree f; + + for (f = TYPE_FIELDS (record); f ; f = TREE_CHAIN (f)) + if (TREE_CODE (f) == FIELD_DECL + && fields_compatible_p (f, orig_field)) + return f; + + /* ??? Why isn't this on the main fields list? */ + f = TYPE_VFIELD (record); + if (f && TREE_CODE (f) == FIELD_DECL + && fields_compatible_p (f, orig_field)) + return f; + + /* ??? We should abort here, but Java appears to do Bad Things + with inherited fields. */ + return orig_field; +} + +/* Return value of a constant X and sign-extend it. */ + +HOST_WIDE_INT +int_cst_value (const_tree x) +{ + unsigned bits = TYPE_PRECISION (TREE_TYPE (x)); + unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x); + + /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */ + gcc_assert (TREE_INT_CST_HIGH (x) == 0 + || TREE_INT_CST_HIGH (x) == -1); + + if (bits < HOST_BITS_PER_WIDE_INT) + { + bool negative = ((val >> (bits - 1)) & 1) != 0; + if (negative) + val |= (~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1; + else + val &= ~((~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1); + } + + return val; +} + +/* If TYPE is an integral type, return an equivalent type which is + unsigned iff UNSIGNEDP is true. If TYPE is not an integral type, + return TYPE itself. */ + +tree +signed_or_unsigned_type_for (int unsignedp, tree type) +{ + tree t = type; + if (POINTER_TYPE_P (type)) + t = size_type_node; + + if (!INTEGRAL_TYPE_P (t) || TYPE_UNSIGNED (t) == unsignedp) + return t; + + return lang_hooks.types.type_for_size (TYPE_PRECISION (t), unsignedp); +} + +/* Returns unsigned variant of TYPE. */ + +tree +unsigned_type_for (tree type) +{ + return signed_or_unsigned_type_for (1, type); +} + +/* Returns signed variant of TYPE. */ + +tree +signed_type_for (tree type) +{ + return signed_or_unsigned_type_for (0, type); +} + +/* Returns the largest value obtainable by casting something in INNER type to + OUTER type. */ + +tree +upper_bound_in_type (tree outer, tree inner) +{ + unsigned HOST_WIDE_INT lo, hi; + unsigned int det = 0; + unsigned oprec = TYPE_PRECISION (outer); + unsigned iprec = TYPE_PRECISION (inner); + unsigned prec; + + /* Compute a unique number for every combination. */ + det |= (oprec > iprec) ? 4 : 0; + det |= TYPE_UNSIGNED (outer) ? 2 : 0; + det |= TYPE_UNSIGNED (inner) ? 1 : 0; + + /* Determine the exponent to use. */ + switch (det) + { + case 0: + case 1: + /* oprec <= iprec, outer: signed, inner: don't care. */ + prec = oprec - 1; + break; + case 2: + case 3: + /* oprec <= iprec, outer: unsigned, inner: don't care. */ + prec = oprec; + break; + case 4: + /* oprec > iprec, outer: signed, inner: signed. */ + prec = iprec - 1; + break; + case 5: + /* oprec > iprec, outer: signed, inner: unsigned. */ + prec = iprec; + break; + case 6: + /* oprec > iprec, outer: unsigned, inner: signed. */ + prec = oprec; + break; + case 7: + /* oprec > iprec, outer: unsigned, inner: unsigned. */ + prec = iprec; + break; + default: + gcc_unreachable (); + } + + /* Compute 2^^prec - 1. */ + if (prec <= HOST_BITS_PER_WIDE_INT) + { + hi = 0; + lo = ((~(unsigned HOST_WIDE_INT) 0) + >> (HOST_BITS_PER_WIDE_INT - prec)); + } + else + { + hi = ((~(unsigned HOST_WIDE_INT) 0) + >> (2 * HOST_BITS_PER_WIDE_INT - prec)); + lo = ~(unsigned HOST_WIDE_INT) 0; + } + + return build_int_cst_wide (outer, lo, hi); +} + +/* Returns the smallest value obtainable by casting something in INNER type to + OUTER type. */ + +tree +lower_bound_in_type (tree outer, tree inner) +{ + unsigned HOST_WIDE_INT lo, hi; + unsigned oprec = TYPE_PRECISION (outer); + unsigned iprec = TYPE_PRECISION (inner); + + /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type + and obtain 0. */ + if (TYPE_UNSIGNED (outer) + /* If we are widening something of an unsigned type, OUTER type + contains all values of INNER type. In particular, both INNER + and OUTER types have zero in common. */ + || (oprec > iprec && TYPE_UNSIGNED (inner))) + lo = hi = 0; + else + { + /* If we are widening a signed type to another signed type, we + want to obtain -2^^(iprec-1). If we are keeping the + precision or narrowing to a signed type, we want to obtain + -2^(oprec-1). */ + unsigned prec = oprec > iprec ? iprec : oprec; + + if (prec <= HOST_BITS_PER_WIDE_INT) + { + hi = ~(unsigned HOST_WIDE_INT) 0; + lo = (~(unsigned HOST_WIDE_INT) 0) << (prec - 1); + } + else + { + hi = ((~(unsigned HOST_WIDE_INT) 0) + << (prec - HOST_BITS_PER_WIDE_INT - 1)); + lo = 0; + } + } + + return build_int_cst_wide (outer, lo, hi); +} + +/* Return nonzero if two operands that are suitable for PHI nodes are + necessarily equal. Specifically, both ARG0 and ARG1 must be either + SSA_NAME or invariant. Note that this is strictly an optimization. + That is, callers of this function can directly call operand_equal_p + and get the same result, only slower. */ + +int +operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1) +{ + if (arg0 == arg1) + return 1; + if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME) + return 0; + return operand_equal_p (arg0, arg1, 0); +} + +/* Returns number of zeros at the end of binary representation of X. + + ??? Use ffs if available? */ + +tree +num_ending_zeros (const_tree x) +{ + unsigned HOST_WIDE_INT fr, nfr; + unsigned num, abits; + tree type = TREE_TYPE (x); + + if (TREE_INT_CST_LOW (x) == 0) + { + num = HOST_BITS_PER_WIDE_INT; + fr = TREE_INT_CST_HIGH (x); + } + else + { + num = 0; + fr = TREE_INT_CST_LOW (x); + } + + for (abits = HOST_BITS_PER_WIDE_INT / 2; abits; abits /= 2) + { + nfr = fr >> abits; + if (nfr << abits == fr) + { + num += abits; + fr = nfr; + } + } + + if (num > TYPE_PRECISION (type)) + num = TYPE_PRECISION (type); + + return build_int_cst_type (type, num); +} + + +#define WALK_SUBTREE(NODE) \ + do \ + { \ + result = walk_tree_1 (&(NODE), func, data, pset, lh); \ + if (result) \ + return result; \ + } \ + while (0) + +/* This is a subroutine of walk_tree that walks field of TYPE that are to + be walked whenever a type is seen in the tree. Rest of operands and return + value are as for walk_tree. */ + +static tree +walk_type_fields (tree type, walk_tree_fn func, void *data, + struct pointer_set_t *pset, walk_tree_lh lh) +{ + tree result = NULL_TREE; + + switch (TREE_CODE (type)) + { + case POINTER_TYPE: + case REFERENCE_TYPE: + /* We have to worry about mutually recursive pointers. These can't + be written in C. They can in Ada. It's pathological, but + there's an ACATS test (c38102a) that checks it. Deal with this + by checking if we're pointing to another pointer, that one + points to another pointer, that one does too, and we have no htab. + If so, get a hash table. We check three levels deep to avoid + the cost of the hash table if we don't need one. */ + if (POINTER_TYPE_P (TREE_TYPE (type)) + && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type))) + && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type)))) + && !pset) + { + result = walk_tree_without_duplicates (&TREE_TYPE (type), + func, data); + if (result) + return result; + + break; + } + + /* ... fall through ... */ + + case COMPLEX_TYPE: + WALK_SUBTREE (TREE_TYPE (type)); + break; + + case METHOD_TYPE: + WALK_SUBTREE (TYPE_METHOD_BASETYPE (type)); + + /* Fall through. */ + + case FUNCTION_TYPE: + WALK_SUBTREE (TREE_TYPE (type)); + { + tree arg; + + /* We never want to walk into default arguments. */ + for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg)) + WALK_SUBTREE (TREE_VALUE (arg)); + } + break; + + case ARRAY_TYPE: + /* Don't follow this nodes's type if a pointer for fear that + we'll have infinite recursion. If we have a PSET, then we + need not fear. */ + if (pset + || (!POINTER_TYPE_P (TREE_TYPE (type)) + && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE)) + WALK_SUBTREE (TREE_TYPE (type)); + WALK_SUBTREE (TYPE_DOMAIN (type)); + break; + + case OFFSET_TYPE: + WALK_SUBTREE (TREE_TYPE (type)); + WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type)); + break; + + default: + break; + } + + return NULL_TREE; +} + +/* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is + called with the DATA and the address of each sub-tree. If FUNC returns a + non-NULL value, the traversal is stopped, and the value returned by FUNC + is returned. If PSET is non-NULL it is used to record the nodes visited, + and to avoid visiting a node more than once. */ + +tree +walk_tree_1 (tree *tp, walk_tree_fn func, void *data, + struct pointer_set_t *pset, walk_tree_lh lh) +{ + enum tree_code code; + int walk_subtrees; + tree result; + +#define WALK_SUBTREE_TAIL(NODE) \ + do \ + { \ + tp = & (NODE); \ + goto tail_recurse; \ + } \ + while (0) + + tail_recurse: + /* Skip empty subtrees. */ + if (!*tp) + return NULL_TREE; + + /* Don't walk the same tree twice, if the user has requested + that we avoid doing so. */ + if (pset && pointer_set_insert (pset, *tp)) + return NULL_TREE; + + /* Call the function. */ + walk_subtrees = 1; + result = (*func) (tp, &walk_subtrees, data); + + /* If we found something, return it. */ + if (result) + return result; + + code = TREE_CODE (*tp); + + /* Even if we didn't, FUNC may have decided that there was nothing + interesting below this point in the tree. */ + if (!walk_subtrees) + { + /* But we still need to check our siblings. */ + if (code == TREE_LIST) + WALK_SUBTREE_TAIL (TREE_CHAIN (*tp)); + else if (code == OMP_CLAUSE) + WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp)); + else + return NULL_TREE; + } + + if (lh) + { + result = (*lh) (tp, &walk_subtrees, func, data, pset); + if (result || !walk_subtrees) + return result; + } + + switch (code) + { + case ERROR_MARK: + case IDENTIFIER_NODE: + case INTEGER_CST: + case REAL_CST: + case FIXED_CST: + case VECTOR_CST: + case STRING_CST: + case BLOCK: + case PLACEHOLDER_EXPR: + case SSA_NAME: + case FIELD_DECL: + case RESULT_DECL: + /* None of these have subtrees other than those already walked + above. */ + break; + + case TREE_LIST: + WALK_SUBTREE (TREE_VALUE (*tp)); + WALK_SUBTREE_TAIL (TREE_CHAIN (*tp)); + break; + + case TREE_VEC: + { + int len = TREE_VEC_LENGTH (*tp); + + if (len == 0) + break; + + /* Walk all elements but the first. */ + while (--len) + WALK_SUBTREE (TREE_VEC_ELT (*tp, len)); + + /* Now walk the first one as a tail call. */ + WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0)); + } + + case COMPLEX_CST: + WALK_SUBTREE (TREE_REALPART (*tp)); + WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp)); + + case CONSTRUCTOR: + { + unsigned HOST_WIDE_INT idx; + constructor_elt *ce; + + for (idx = 0; + VEC_iterate(constructor_elt, CONSTRUCTOR_ELTS (*tp), idx, ce); + idx++) + WALK_SUBTREE (ce->value); + } + break; + + case SAVE_EXPR: + WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0)); + + case BIND_EXPR: + { + tree decl; + for (decl = BIND_EXPR_VARS (*tp); decl; decl = TREE_CHAIN (decl)) + { + /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk + into declarations that are just mentioned, rather than + declared; they don't really belong to this part of the tree. + And, we can see cycles: the initializer for a declaration + can refer to the declaration itself. */ + WALK_SUBTREE (DECL_INITIAL (decl)); + WALK_SUBTREE (DECL_SIZE (decl)); + WALK_SUBTREE (DECL_SIZE_UNIT (decl)); + } + WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp)); + } + + case STATEMENT_LIST: + { + tree_stmt_iterator i; + for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i)) + WALK_SUBTREE (*tsi_stmt_ptr (i)); + } + break; + + case OMP_CLAUSE: + switch (OMP_CLAUSE_CODE (*tp)) + { + case OMP_CLAUSE_PRIVATE: + case OMP_CLAUSE_SHARED: + case OMP_CLAUSE_FIRSTPRIVATE: + case OMP_CLAUSE_COPYIN: + case OMP_CLAUSE_COPYPRIVATE: + case OMP_CLAUSE_IF: + case OMP_CLAUSE_NUM_THREADS: + case OMP_CLAUSE_SCHEDULE: + WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0)); + /* FALLTHRU */ + + case OMP_CLAUSE_NOWAIT: + case OMP_CLAUSE_ORDERED: + case OMP_CLAUSE_DEFAULT: + case OMP_CLAUSE_UNTIED: + WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp)); + + case OMP_CLAUSE_LASTPRIVATE: + WALK_SUBTREE (OMP_CLAUSE_DECL (*tp)); + WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp)); + WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp)); + + case OMP_CLAUSE_COLLAPSE: + { + int i; + for (i = 0; i < 3; i++) + WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i)); + WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp)); + } + + case OMP_CLAUSE_REDUCTION: + { + int i; + for (i = 0; i < 4; i++) + WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i)); + WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp)); + } + + default: + gcc_unreachable (); + } + break; + + case TARGET_EXPR: + { + int i, len; + + /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same. + But, we only want to walk once. */ + len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3; + for (i = 0; i < len; ++i) + WALK_SUBTREE (TREE_OPERAND (*tp, i)); + WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len)); + } + + case CHANGE_DYNAMIC_TYPE_EXPR: + WALK_SUBTREE (CHANGE_DYNAMIC_TYPE_NEW_TYPE (*tp)); + WALK_SUBTREE_TAIL (CHANGE_DYNAMIC_TYPE_LOCATION (*tp)); + + case DECL_EXPR: + /* If this is a TYPE_DECL, walk into the fields of the type that it's + defining. We only want to walk into these fields of a type in this + case and not in the general case of a mere reference to the type. + + The criterion is as follows: if the field can be an expression, it + must be walked only here. This should be in keeping with the fields + that are directly gimplified in gimplify_type_sizes in order for the + mark/copy-if-shared/unmark machinery of the gimplifier to work with + variable-sized types. + + Note that DECLs get walked as part of processing the BIND_EXPR. */ + if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL) + { + tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp)); + if (TREE_CODE (*type_p) == ERROR_MARK) + return NULL_TREE; + + /* Call the function for the type. See if it returns anything or + doesn't want us to continue. If we are to continue, walk both + the normal fields and those for the declaration case. */ + result = (*func) (type_p, &walk_subtrees, data); + if (result || !walk_subtrees) + return result; + + result = walk_type_fields (*type_p, func, data, pset, lh); + if (result) + return result; + + /* If this is a record type, also walk the fields. */ + if (TREE_CODE (*type_p) == RECORD_TYPE + || TREE_CODE (*type_p) == UNION_TYPE + || TREE_CODE (*type_p) == QUAL_UNION_TYPE) + { + tree field; + + for (field = TYPE_FIELDS (*type_p); field; + field = TREE_CHAIN (field)) + { + /* We'd like to look at the type of the field, but we can + easily get infinite recursion. So assume it's pointed + to elsewhere in the tree. Also, ignore things that + aren't fields. */ + if (TREE_CODE (field) != FIELD_DECL) + continue; + + WALK_SUBTREE (DECL_FIELD_OFFSET (field)); + WALK_SUBTREE (DECL_SIZE (field)); + WALK_SUBTREE (DECL_SIZE_UNIT (field)); + if (TREE_CODE (*type_p) == QUAL_UNION_TYPE) + WALK_SUBTREE (DECL_QUALIFIER (field)); + } + } + + /* Same for scalar types. */ + else if (TREE_CODE (*type_p) == BOOLEAN_TYPE + || TREE_CODE (*type_p) == ENUMERAL_TYPE + || TREE_CODE (*type_p) == INTEGER_TYPE + || TREE_CODE (*type_p) == FIXED_POINT_TYPE + || TREE_CODE (*type_p) == REAL_TYPE) + { + WALK_SUBTREE (TYPE_MIN_VALUE (*type_p)); + WALK_SUBTREE (TYPE_MAX_VALUE (*type_p)); + } + + WALK_SUBTREE (TYPE_SIZE (*type_p)); + WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p)); + } + /* FALLTHRU */ + + default: + if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))) + { + int i, len; + + /* Walk over all the sub-trees of this operand. */ + len = TREE_OPERAND_LENGTH (*tp); + + /* Go through the subtrees. We need to do this in forward order so + that the scope of a FOR_EXPR is handled properly. */ + if (len) + { + for (i = 0; i < len - 1; ++i) + WALK_SUBTREE (TREE_OPERAND (*tp, i)); + WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1)); + } + } + /* If this is a type, walk the needed fields in the type. */ + else if (TYPE_P (*tp)) + return walk_type_fields (*tp, func, data, pset, lh); + break; + } + + /* We didn't find what we were looking for. */ + return NULL_TREE; + +#undef WALK_SUBTREE_TAIL +} +#undef WALK_SUBTREE + +/* Like walk_tree, but does not walk duplicate nodes more than once. */ + +tree +walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data, + walk_tree_lh lh) +{ + tree result; + struct pointer_set_t *pset; + + pset = pointer_set_create (); + result = walk_tree_1 (tp, func, data, pset, lh); + pointer_set_destroy (pset); + return result; +} + + +tree * +tree_block (tree t) +{ + char const c = TREE_CODE_CLASS (TREE_CODE (t)); + + if (IS_EXPR_CODE_CLASS (c)) + return &t->exp.block; + gcc_unreachable (); + return NULL; +} + +/* Build and return a TREE_LIST of arguments in the CALL_EXPR exp. + FIXME: don't use this function. It exists for compatibility with + the old representation of CALL_EXPRs where a list was used to hold the + arguments. Places that currently extract the arglist from a CALL_EXPR + ought to be rewritten to use the CALL_EXPR itself. */ +tree +call_expr_arglist (tree exp) +{ + tree arglist = NULL_TREE; + int i; + for (i = call_expr_nargs (exp) - 1; i >= 0; i--) + arglist = tree_cons (NULL_TREE, CALL_EXPR_ARG (exp, i), arglist); + return arglist; +} + + +/* Create a nameless artificial label and put it in the current function + context. Returns the newly created label. */ + +tree +create_artificial_label (void) +{ + tree lab = build_decl (LABEL_DECL, NULL_TREE, void_type_node); + + DECL_ARTIFICIAL (lab) = 1; + DECL_IGNORED_P (lab) = 1; + DECL_CONTEXT (lab) = current_function_decl; + return lab; +} + +/* Given a tree, try to return a useful variable name that we can use + to prefix a temporary that is being assigned the value of the tree. + I.E. given = &A, return A. */ + +const char * +get_name (tree t) +{ + tree stripped_decl; + + stripped_decl = t; + STRIP_NOPS (stripped_decl); + if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl)) + return IDENTIFIER_POINTER (DECL_NAME (stripped_decl)); + else + { + switch (TREE_CODE (stripped_decl)) + { + case ADDR_EXPR: + return get_name (TREE_OPERAND (stripped_decl, 0)); + default: + return NULL; + } + } +} + +/* Return true if TYPE has a variable argument list. */ + +bool +stdarg_p (tree fntype) +{ + function_args_iterator args_iter; + tree n = NULL_TREE, t; + + if (!fntype) + return false; + + FOREACH_FUNCTION_ARGS(fntype, t, args_iter) + { + n = t; + } + + return n != NULL_TREE && n != void_type_node; +} + +/* Return true if TYPE has a prototype. */ + +bool +prototype_p (tree fntype) +{ + tree t; + + gcc_assert (fntype != NULL_TREE); + + t = TYPE_ARG_TYPES (fntype); + return (t != NULL_TREE); +} + +/* Return the number of arguments that a function has. */ + +int +function_args_count (tree fntype) +{ + function_args_iterator args_iter; + tree t; + int num = 0; + + if (fntype) + { + FOREACH_FUNCTION_ARGS(fntype, t, args_iter) + { + num++; + } + } + + return num; +} + +/* If BLOCK is inlined from an __attribute__((__artificial__)) + routine, return pointer to location from where it has been + called. */ +location_t * +block_nonartificial_location (tree block) +{ + location_t *ret = NULL; + + while (block && TREE_CODE (block) == BLOCK + && BLOCK_ABSTRACT_ORIGIN (block)) + { + tree ao = BLOCK_ABSTRACT_ORIGIN (block); + + while (TREE_CODE (ao) == BLOCK + && BLOCK_ABSTRACT_ORIGIN (ao) + && BLOCK_ABSTRACT_ORIGIN (ao) != ao) + ao = BLOCK_ABSTRACT_ORIGIN (ao); + + if (TREE_CODE (ao) == FUNCTION_DECL) + { + /* If AO is an artificial inline, point RET to the + call site locus at which it has been inlined and continue + the loop, in case AO's caller is also an artificial + inline. */ + if (DECL_DECLARED_INLINE_P (ao) + && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao))) + ret = &BLOCK_SOURCE_LOCATION (block); + else + break; + } + else if (TREE_CODE (ao) != BLOCK) + break; + + block = BLOCK_SUPERCONTEXT (block); + } + return ret; +} + + +/* If EXP is inlined from an __attribute__((__artificial__)) + function, return the location of the original call expression. */ + +location_t +tree_nonartificial_location (tree exp) +{ + location_t *loc = block_nonartificial_location (TREE_BLOCK (exp)); + + if (loc) + return *loc; + else + return EXPR_LOCATION (exp); +} + + +/* These are the hash table functions for the hash table of OPTIMIZATION_NODEq + nodes. */ + +/* Return the hash code code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */ + +static hashval_t +cl_option_hash_hash (const void *x) +{ + const_tree const t = (const_tree) x; + const char *p; + size_t i; + size_t len = 0; + hashval_t hash = 0; + + if (TREE_CODE (t) == OPTIMIZATION_NODE) + { + p = (const char *)TREE_OPTIMIZATION (t); + len = sizeof (struct cl_optimization); + } + + else if (TREE_CODE (t) == TARGET_OPTION_NODE) + { + p = (const char *)TREE_TARGET_OPTION (t); + len = sizeof (struct cl_target_option); + } + + else + gcc_unreachable (); + + /* assume most opt flags are just 0/1, some are 2-3, and a few might be + something else. */ + for (i = 0; i < len; i++) + if (p[i]) + hash = (hash << 4) ^ ((i << 2) | p[i]); + + return hash; +} + +/* Return nonzero if the value represented by *X (an OPTIMIZATION or + TARGET_OPTION tree node) is the same as that given by *Y, which is the + same. */ + +static int +cl_option_hash_eq (const void *x, const void *y) +{ + const_tree const xt = (const_tree) x; + const_tree const yt = (const_tree) y; + const char *xp; + const char *yp; + size_t len; + + if (TREE_CODE (xt) != TREE_CODE (yt)) + return 0; + + if (TREE_CODE (xt) == OPTIMIZATION_NODE) + { + xp = (const char *)TREE_OPTIMIZATION (xt); + yp = (const char *)TREE_OPTIMIZATION (yt); + len = sizeof (struct cl_optimization); + } + + else if (TREE_CODE (xt) == TARGET_OPTION_NODE) + { + xp = (const char *)TREE_TARGET_OPTION (xt); + yp = (const char *)TREE_TARGET_OPTION (yt); + len = sizeof (struct cl_target_option); + } + + else + gcc_unreachable (); + + return (memcmp (xp, yp, len) == 0); +} + +/* Build an OPTIMIZATION_NODE based on the current options. */ + +tree +build_optimization_node (void) +{ + tree t; + void **slot; + + /* Use the cache of optimization nodes. */ + + cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node)); + + slot = htab_find_slot (cl_option_hash_table, cl_optimization_node, INSERT); + t = (tree) *slot; + if (!t) + { + /* Insert this one into the hash table. */ + t = cl_optimization_node; + *slot = t; + + /* Make a new node for next time round. */ + cl_optimization_node = make_node (OPTIMIZATION_NODE); + } + + return t; +} + +/* Build a TARGET_OPTION_NODE based on the current options. */ + +tree +build_target_option_node (void) +{ + tree t; + void **slot; + + /* Use the cache of optimization nodes. */ + + cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node)); + + slot = htab_find_slot (cl_option_hash_table, cl_target_option_node, INSERT); + t = (tree) *slot; + if (!t) + { + /* Insert this one into the hash table. */ + t = cl_target_option_node; + *slot = t; + + /* Make a new node for next time round. */ + cl_target_option_node = make_node (TARGET_OPTION_NODE); + } + + return t; +} + +/* Determine the "ultimate origin" of a block. The block may be an inlined + instance of an inlined instance of a block which is local to an inline + function, so we have to trace all of the way back through the origin chain + to find out what sort of node actually served as the original seed for the + given block. */ + +tree +block_ultimate_origin (const_tree block) +{ + tree immediate_origin = BLOCK_ABSTRACT_ORIGIN (block); + + /* output_inline_function sets BLOCK_ABSTRACT_ORIGIN for all the + nodes in the function to point to themselves; ignore that if + we're trying to output the abstract instance of this function. */ + if (BLOCK_ABSTRACT (block) && immediate_origin == block) + return NULL_TREE; + + if (immediate_origin == NULL_TREE) + return NULL_TREE; + else + { + tree ret_val; + tree lookahead = immediate_origin; + + do + { + ret_val = lookahead; + lookahead = (TREE_CODE (ret_val) == BLOCK + ? BLOCK_ABSTRACT_ORIGIN (ret_val) : NULL); + } + while (lookahead != NULL && lookahead != ret_val); + + /* The block's abstract origin chain may not be the *ultimate* origin of + the block. It could lead to a DECL that has an abstract origin set. + If so, we want that DECL's abstract origin (which is what DECL_ORIGIN + will give us if it has one). Note that DECL's abstract origins are + supposed to be the most distant ancestor (or so decl_ultimate_origin + claims), so we don't need to loop following the DECL origins. */ + if (DECL_P (ret_val)) + return DECL_ORIGIN (ret_val); + + return ret_val; + } +} + +#include "gt-tree.h"