+ if (constructor_fields == 0)
+ {
+ pedwarn_init (input_location, 0,
+ "excess elements in union initializer");
+ break;
+ }
+
+ fieldtype = TREE_TYPE (constructor_fields);
+ if (fieldtype != error_mark_node)
+ fieldtype = TYPE_MAIN_VARIANT (fieldtype);
+ fieldcode = TREE_CODE (fieldtype);
+
+ /* Warn that traditional C rejects initialization of unions.
+ We skip the warning if the value is zero. This is done
+ under the assumption that the zero initializer in user
+ code appears conditioned on e.g. __STDC__ to avoid
+ "missing initializer" warnings and relies on default
+ initialization to zero in the traditional C case.
+ We also skip the warning if the initializer is designated,
+ again on the assumption that this must be conditional on
+ __STDC__ anyway (and we've already complained about the
+ member-designator already). */
+ if (!in_system_header && !constructor_designated
+ && !(value.value && (integer_zerop (value.value)
+ || real_zerop (value.value))))
+ warning (OPT_Wtraditional, "traditional C rejects initialization "
+ "of unions");
+
+ /* Accept a string constant to initialize a subarray. */
+ if (value.value != 0
+ && fieldcode == ARRAY_TYPE
+ && INTEGRAL_TYPE_P (TREE_TYPE (fieldtype))
+ && string_flag)
+ value.value = orig_value;
+ /* Otherwise, if we have come to a subaggregate,
+ and we don't have an element of its type, push into it. */
+ else if (value.value != 0
+ && value.value != error_mark_node
+ && TYPE_MAIN_VARIANT (TREE_TYPE (value.value)) != fieldtype
+ && (fieldcode == RECORD_TYPE || fieldcode == ARRAY_TYPE
+ || fieldcode == UNION_TYPE))
+ {
+ push_init_level (1);
+ continue;
+ }
+
+ if (value.value)
+ {
+ push_member_name (constructor_fields);
+ output_init_element (value.value, strict_string,
+ fieldtype, constructor_fields, 1, implicit);
+ RESTORE_SPELLING_DEPTH (constructor_depth);
+ }
+ else
+ /* Do the bookkeeping for an element that was
+ directly output as a constructor. */
+ {
+ constructor_bit_index = DECL_SIZE (constructor_fields);
+ constructor_unfilled_fields = TREE_CHAIN (constructor_fields);
+ }
+
+ constructor_fields = 0;
+ }
+ else if (TREE_CODE (constructor_type) == ARRAY_TYPE)
+ {
+ tree elttype = TYPE_MAIN_VARIANT (TREE_TYPE (constructor_type));
+ enum tree_code eltcode = TREE_CODE (elttype);
+
+ /* Accept a string constant to initialize a subarray. */
+ if (value.value != 0
+ && eltcode == ARRAY_TYPE
+ && INTEGRAL_TYPE_P (TREE_TYPE (elttype))
+ && string_flag)
+ value.value = orig_value;
+ /* Otherwise, if we have come to a subaggregate,
+ and we don't have an element of its type, push into it. */
+ else if (value.value != 0
+ && value.value != error_mark_node
+ && TYPE_MAIN_VARIANT (TREE_TYPE (value.value)) != elttype
+ && (eltcode == RECORD_TYPE || eltcode == ARRAY_TYPE
+ || eltcode == UNION_TYPE))
+ {
+ push_init_level (1);
+ continue;
+ }
+
+ if (constructor_max_index != 0
+ && (tree_int_cst_lt (constructor_max_index, constructor_index)
+ || integer_all_onesp (constructor_max_index)))
+ {
+ pedwarn_init (input_location, 0,
+ "excess elements in array initializer");
+ break;
+ }
+
+ /* Now output the actual element. */
+ if (value.value)
+ {
+ push_array_bounds (tree_low_cst (constructor_index, 1));
+ output_init_element (value.value, strict_string,
+ elttype, constructor_index, 1, implicit);
+ RESTORE_SPELLING_DEPTH (constructor_depth);
+ }
+
+ constructor_index
+ = size_binop (PLUS_EXPR, constructor_index, bitsize_one_node);
+
+ if (!value.value)
+ /* If we are doing the bookkeeping for an element that was
+ directly output as a constructor, we must update
+ constructor_unfilled_index. */
+ constructor_unfilled_index = constructor_index;
+ }
+ else if (TREE_CODE (constructor_type) == VECTOR_TYPE)
+ {
+ tree elttype = TYPE_MAIN_VARIANT (TREE_TYPE (constructor_type));
+
+ /* Do a basic check of initializer size. Note that vectors
+ always have a fixed size derived from their type. */
+ if (tree_int_cst_lt (constructor_max_index, constructor_index))
+ {
+ pedwarn_init (input_location, 0,
+ "excess elements in vector initializer");
+ break;
+ }
+
+ /* Now output the actual element. */
+ if (value.value)
+ output_init_element (value.value, strict_string,
+ elttype, constructor_index, 1, implicit);
+
+ constructor_index
+ = size_binop (PLUS_EXPR, constructor_index, bitsize_one_node);
+
+ if (!value.value)
+ /* If we are doing the bookkeeping for an element that was
+ directly output as a constructor, we must update
+ constructor_unfilled_index. */
+ constructor_unfilled_index = constructor_index;
+ }
+
+ /* Handle the sole element allowed in a braced initializer
+ for a scalar variable. */
+ else if (constructor_type != error_mark_node
+ && constructor_fields == 0)
+ {
+ pedwarn_init (input_location, 0,
+ "excess elements in scalar initializer");
+ break;
+ }
+ else
+ {
+ if (value.value)
+ output_init_element (value.value, strict_string,
+ constructor_type, NULL_TREE, 1, implicit);
+ constructor_fields = 0;
+ }
+
+ /* Handle range initializers either at this level or anywhere higher
+ in the designator stack. */
+ if (constructor_range_stack)
+ {
+ struct constructor_range_stack *p, *range_stack;
+ int finish = 0;
+
+ range_stack = constructor_range_stack;
+ constructor_range_stack = 0;
+ while (constructor_stack != range_stack->stack)
+ {
+ gcc_assert (constructor_stack->implicit);
+ process_init_element (pop_init_level (1), true);
+ }
+ for (p = range_stack;
+ !p->range_end || tree_int_cst_equal (p->index, p->range_end);
+ p = p->prev)
+ {
+ gcc_assert (constructor_stack->implicit);
+ process_init_element (pop_init_level (1), true);
+ }
+
+ p->index = size_binop (PLUS_EXPR, p->index, bitsize_one_node);
+ if (tree_int_cst_equal (p->index, p->range_end) && !p->prev)
+ finish = 1;
+
+ while (1)
+ {
+ constructor_index = p->index;
+ constructor_fields = p->fields;
+ if (finish && p->range_end && p->index == p->range_start)
+ {
+ finish = 0;
+ p->prev = 0;
+ }
+ p = p->next;
+ if (!p)
+ break;
+ push_init_level (2);
+ p->stack = constructor_stack;
+ if (p->range_end && tree_int_cst_equal (p->index, p->range_end))
+ p->index = p->range_start;
+ }
+
+ if (!finish)
+ constructor_range_stack = range_stack;
+ continue;
+ }
+
+ break;
+ }
+
+ constructor_range_stack = 0;
+}
+\f
+/* Build a complete asm-statement, whose components are a CV_QUALIFIER
+ (guaranteed to be 'volatile' or null) and ARGS (represented using
+ an ASM_EXPR node). */
+tree
+build_asm_stmt (tree cv_qualifier, tree args)
+{
+ if (!ASM_VOLATILE_P (args) && cv_qualifier)
+ ASM_VOLATILE_P (args) = 1;
+ return add_stmt (args);
+}
+
+/* Build an asm-expr, whose components are a STRING, some OUTPUTS,
+ some INPUTS, and some CLOBBERS. The latter three may be NULL.
+ SIMPLE indicates whether there was anything at all after the
+ string in the asm expression -- asm("blah") and asm("blah" : )
+ are subtly different. We use a ASM_EXPR node to represent this. */
+tree
+build_asm_expr (tree string, tree outputs, tree inputs, tree clobbers,
+ bool simple)
+{
+ tree tail;
+ tree args;
+ int i;
+ const char *constraint;
+ const char **oconstraints;
+ bool allows_mem, allows_reg, is_inout;
+ int ninputs, noutputs;
+
+ ninputs = list_length (inputs);
+ noutputs = list_length (outputs);
+ oconstraints = (const char **) alloca (noutputs * sizeof (const char *));
+
+ string = resolve_asm_operand_names (string, outputs, inputs);
+
+ /* Remove output conversions that change the type but not the mode. */
+ for (i = 0, tail = outputs; tail; ++i, tail = TREE_CHAIN (tail))
+ {
+ tree output = TREE_VALUE (tail);
+
+ /* ??? Really, this should not be here. Users should be using a
+ proper lvalue, dammit. But there's a long history of using casts
+ in the output operands. In cases like longlong.h, this becomes a
+ primitive form of typechecking -- if the cast can be removed, then
+ the output operand had a type of the proper width; otherwise we'll
+ get an error. Gross, but ... */
+ STRIP_NOPS (output);
+
+ if (!lvalue_or_else (output, lv_asm))
+ output = error_mark_node;
+
+ if (output != error_mark_node
+ && (TREE_READONLY (output)
+ || TYPE_READONLY (TREE_TYPE (output))
+ || ((TREE_CODE (TREE_TYPE (output)) == RECORD_TYPE
+ || TREE_CODE (TREE_TYPE (output)) == UNION_TYPE)
+ && C_TYPE_FIELDS_READONLY (TREE_TYPE (output)))))
+ readonly_error (output, lv_asm);
+
+ constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tail)));
+ oconstraints[i] = constraint;
+
+ if (parse_output_constraint (&constraint, i, ninputs, noutputs,
+ &allows_mem, &allows_reg, &is_inout))
+ {
+ /* If the operand is going to end up in memory,
+ mark it addressable. */
+ if (!allows_reg && !c_mark_addressable (output))
+ output = error_mark_node;
+ }
+ else
+ output = error_mark_node;
+
+ TREE_VALUE (tail) = output;
+ }
+
+ for (i = 0, tail = inputs; tail; ++i, tail = TREE_CHAIN (tail))
+ {
+ tree input;
+
+ constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tail)));
+ input = TREE_VALUE (tail);
+
+ if (parse_input_constraint (&constraint, i, ninputs, noutputs, 0,
+ oconstraints, &allows_mem, &allows_reg))
+ {
+ /* If the operand is going to end up in memory,
+ mark it addressable. */
+ if (!allows_reg && allows_mem)
+ {
+ /* Strip the nops as we allow this case. FIXME, this really
+ should be rejected or made deprecated. */
+ STRIP_NOPS (input);
+ if (!c_mark_addressable (input))
+ input = error_mark_node;
+ }
+ }
+ else
+ input = error_mark_node;
+
+ TREE_VALUE (tail) = input;
+ }
+
+ args = build_stmt (ASM_EXPR, string, outputs, inputs, clobbers);
+
+ /* asm statements without outputs, including simple ones, are treated
+ as volatile. */
+ ASM_INPUT_P (args) = simple;
+ ASM_VOLATILE_P (args) = (noutputs == 0);
+
+ return args;
+}
+\f
+/* Generate a goto statement to LABEL. */
+
+tree
+c_finish_goto_label (tree label)
+{
+ tree decl = lookup_label (label);
+ if (!decl)
+ return NULL_TREE;
+
+ if (C_DECL_UNJUMPABLE_STMT_EXPR (decl))
+ {
+ error ("jump into statement expression");
+ return NULL_TREE;
+ }
+
+ if (C_DECL_UNJUMPABLE_VM (decl))
+ {
+ error ("jump into scope of identifier with variably modified type");
+ return NULL_TREE;
+ }
+
+ if (!C_DECL_UNDEFINABLE_STMT_EXPR (decl))
+ {
+ /* No jump from outside this statement expression context, so
+ record that there is a jump from within this context. */
+ struct c_label_list *nlist;
+ nlist = XOBNEW (&parser_obstack, struct c_label_list);
+ nlist->next = label_context_stack_se->labels_used;
+ nlist->label = decl;
+ label_context_stack_se->labels_used = nlist;
+ }
+
+ if (!C_DECL_UNDEFINABLE_VM (decl))
+ {
+ /* No jump from outside this context context of identifiers with
+ variably modified type, so record that there is a jump from
+ within this context. */
+ struct c_label_list *nlist;
+ nlist = XOBNEW (&parser_obstack, struct c_label_list);
+ nlist->next = label_context_stack_vm->labels_used;
+ nlist->label = decl;
+ label_context_stack_vm->labels_used = nlist;
+ }
+
+ TREE_USED (decl) = 1;
+ return add_stmt (build1 (GOTO_EXPR, void_type_node, decl));
+}
+
+/* Generate a computed goto statement to EXPR. */
+
+tree
+c_finish_goto_ptr (tree expr)
+{
+ pedwarn (input_location, OPT_pedantic, "ISO C forbids %<goto *expr;%>");
+ expr = convert (ptr_type_node, expr);
+ return add_stmt (build1 (GOTO_EXPR, void_type_node, expr));
+}
+
+/* Generate a C `return' statement. RETVAL is the expression for what
+ to return, or a null pointer for `return;' with no value. */
+
+tree
+c_finish_return (tree retval)
+{
+ tree valtype = TREE_TYPE (TREE_TYPE (current_function_decl)), ret_stmt;
+ bool no_warning = false;
+
+ if (TREE_THIS_VOLATILE (current_function_decl))
+ warning (0, "function declared %<noreturn%> has a %<return%> statement");
+
+ if (!retval)
+ {
+ current_function_returns_null = 1;
+ if ((warn_return_type || flag_isoc99)
+ && valtype != 0 && TREE_CODE (valtype) != VOID_TYPE)
+ {
+ pedwarn_c99 (input_location, flag_isoc99 ? 0 : OPT_Wreturn_type,
+ "%<return%> with no value, in "
+ "function returning non-void");
+ no_warning = true;
+ }
+ }
+ else if (valtype == 0 || TREE_CODE (valtype) == VOID_TYPE)
+ {
+ current_function_returns_null = 1;
+ if (TREE_CODE (TREE_TYPE (retval)) != VOID_TYPE)
+ pedwarn (input_location, 0,
+ "%<return%> with a value, in function returning void");
+ else
+ pedwarn (input_location, OPT_pedantic, "ISO C forbids "
+ "%<return%> with expression, in function returning void");
+ }
+ else
+ {
+ tree t = convert_for_assignment (valtype, retval, ic_return,
+ NULL_TREE, NULL_TREE, 0);
+ tree res = DECL_RESULT (current_function_decl);
+ tree inner;
+
+ current_function_returns_value = 1;
+ if (t == error_mark_node)
+ return NULL_TREE;
+
+ inner = t = convert (TREE_TYPE (res), t);
+
+ /* Strip any conversions, additions, and subtractions, and see if
+ we are returning the address of a local variable. Warn if so. */
+ while (1)
+ {
+ switch (TREE_CODE (inner))
+ {
+ CASE_CONVERT:
+ case NON_LVALUE_EXPR:
+ case PLUS_EXPR:
+ case POINTER_PLUS_EXPR:
+ inner = TREE_OPERAND (inner, 0);
+ continue;
+
+ case MINUS_EXPR:
+ /* If the second operand of the MINUS_EXPR has a pointer
+ type (or is converted from it), this may be valid, so
+ don't give a warning. */
+ {
+ tree op1 = TREE_OPERAND (inner, 1);
+
+ while (!POINTER_TYPE_P (TREE_TYPE (op1))
+ && (CONVERT_EXPR_P (op1)
+ || TREE_CODE (op1) == NON_LVALUE_EXPR))
+ op1 = TREE_OPERAND (op1, 0);
+
+ if (POINTER_TYPE_P (TREE_TYPE (op1)))
+ break;
+
+ inner = TREE_OPERAND (inner, 0);
+ continue;
+ }
+
+ case ADDR_EXPR:
+ inner = TREE_OPERAND (inner, 0);
+
+ while (REFERENCE_CLASS_P (inner)
+ && TREE_CODE (inner) != INDIRECT_REF)
+ inner = TREE_OPERAND (inner, 0);
+
+ if (DECL_P (inner)
+ && !DECL_EXTERNAL (inner)
+ && !TREE_STATIC (inner)
+ && DECL_CONTEXT (inner) == current_function_decl)
+ warning (0, "function returns address of local variable");
+ break;
+
+ default:
+ break;
+ }
+
+ break;
+ }
+
+ retval = build2 (MODIFY_EXPR, TREE_TYPE (res), res, t);
+
+ if (warn_sequence_point)
+ verify_sequence_points (retval);
+ }
+
+ ret_stmt = build_stmt (RETURN_EXPR, retval);
+ TREE_NO_WARNING (ret_stmt) |= no_warning;
+ return add_stmt (ret_stmt);
+}
+\f
+struct c_switch {
+ /* The SWITCH_EXPR being built. */
+ tree switch_expr;
+
+ /* The original type of the testing expression, i.e. before the
+ default conversion is applied. */
+ tree orig_type;
+
+ /* A splay-tree mapping the low element of a case range to the high
+ element, or NULL_TREE if there is no high element. Used to
+ determine whether or not a new case label duplicates an old case
+ label. We need a tree, rather than simply a hash table, because
+ of the GNU case range extension. */
+ splay_tree cases;
+
+ /* Number of nested statement expressions within this switch
+ statement; if nonzero, case and default labels may not
+ appear. */
+ unsigned int blocked_stmt_expr;
+
+ /* Scope of outermost declarations of identifiers with variably
+ modified type within this switch statement; if nonzero, case and
+ default labels may not appear. */
+ unsigned int blocked_vm;
+
+ /* The next node on the stack. */
+ struct c_switch *next;
+};
+
+/* A stack of the currently active switch statements. The innermost
+ switch statement is on the top of the stack. There is no need to
+ mark the stack for garbage collection because it is only active
+ during the processing of the body of a function, and we never
+ collect at that point. */
+
+struct c_switch *c_switch_stack;
+
+/* Start a C switch statement, testing expression EXP. Return the new
+ SWITCH_EXPR. */
+
+tree
+c_start_case (tree exp)
+{
+ tree orig_type = error_mark_node;
+ struct c_switch *cs;
+
+ if (exp != error_mark_node)
+ {
+ orig_type = TREE_TYPE (exp);
+
+ if (!INTEGRAL_TYPE_P (orig_type))
+ {
+ if (orig_type != error_mark_node)
+ {
+ error ("switch quantity not an integer");
+ orig_type = error_mark_node;
+ }
+ exp = integer_zero_node;
+ }
+ else
+ {
+ tree type = TYPE_MAIN_VARIANT (orig_type);
+
+ if (!in_system_header
+ && (type == long_integer_type_node
+ || type == long_unsigned_type_node))
+ warning (OPT_Wtraditional, "%<long%> switch expression not "
+ "converted to %<int%> in ISO C");
+
+ exp = default_conversion (exp);
+
+ if (warn_sequence_point)
+ verify_sequence_points (exp);
+ }
+ }
+
+ /* Add this new SWITCH_EXPR to the stack. */
+ cs = XNEW (struct c_switch);
+ cs->switch_expr = build3 (SWITCH_EXPR, orig_type, exp, NULL_TREE, NULL_TREE);
+ cs->orig_type = orig_type;
+ cs->cases = splay_tree_new (case_compare, NULL, NULL);
+ cs->blocked_stmt_expr = 0;
+ cs->blocked_vm = 0;
+ cs->next = c_switch_stack;
+ c_switch_stack = cs;
+
+ return add_stmt (cs->switch_expr);
+}
+
+/* Process a case label. */
+
+tree
+do_case (tree low_value, tree high_value)
+{
+ tree label = NULL_TREE;
+
+ if (c_switch_stack && !c_switch_stack->blocked_stmt_expr
+ && !c_switch_stack->blocked_vm)
+ {
+ label = c_add_case_label (c_switch_stack->cases,
+ SWITCH_COND (c_switch_stack->switch_expr),
+ c_switch_stack->orig_type,
+ low_value, high_value);
+ if (label == error_mark_node)
+ label = NULL_TREE;
+ }
+ else if (c_switch_stack && c_switch_stack->blocked_stmt_expr)
+ {
+ if (low_value)
+ error ("case label in statement expression not containing "
+ "enclosing switch statement");
+ else
+ error ("%<default%> label in statement expression not containing "
+ "enclosing switch statement");
+ }
+ else if (c_switch_stack && c_switch_stack->blocked_vm)
+ {
+ if (low_value)
+ error ("case label in scope of identifier with variably modified "
+ "type not containing enclosing switch statement");
+ else
+ error ("%<default%> label in scope of identifier with variably "
+ "modified type not containing enclosing switch statement");
+ }
+ else if (low_value)
+ error ("case label not within a switch statement");
+ else
+ error ("%<default%> label not within a switch statement");
+
+ return label;
+}
+
+/* Finish the switch statement. */
+
+void
+c_finish_case (tree body)
+{
+ struct c_switch *cs = c_switch_stack;
+ location_t switch_location;
+
+ SWITCH_BODY (cs->switch_expr) = body;
+
+ /* We must not be within a statement expression nested in the switch
+ at this point; we might, however, be within the scope of an
+ identifier with variably modified type nested in the switch. */
+ gcc_assert (!cs->blocked_stmt_expr);
+
+ /* Emit warnings as needed. */
+ if (EXPR_HAS_LOCATION (cs->switch_expr))
+ switch_location = EXPR_LOCATION (cs->switch_expr);
+ else
+ switch_location = input_location;
+ c_do_switch_warnings (cs->cases, switch_location,
+ TREE_TYPE (cs->switch_expr),
+ SWITCH_COND (cs->switch_expr));
+
+ /* Pop the stack. */
+ c_switch_stack = cs->next;
+ splay_tree_delete (cs->cases);
+ XDELETE (cs);
+}
+\f
+/* Emit an if statement. IF_LOCUS is the location of the 'if'. COND,
+ THEN_BLOCK and ELSE_BLOCK are expressions to be used; ELSE_BLOCK
+ may be null. NESTED_IF is true if THEN_BLOCK contains another IF
+ statement, and was not surrounded with parenthesis. */
+
+void
+c_finish_if_stmt (location_t if_locus, tree cond, tree then_block,
+ tree else_block, bool nested_if)
+{
+ tree stmt;
+
+ /* Diagnose an ambiguous else if if-then-else is nested inside if-then. */
+ if (warn_parentheses && nested_if && else_block == NULL)
+ {
+ tree inner_if = then_block;
+
+ /* We know from the grammar productions that there is an IF nested
+ within THEN_BLOCK. Due to labels and c99 conditional declarations,
+ it might not be exactly THEN_BLOCK, but should be the last
+ non-container statement within. */
+ while (1)
+ switch (TREE_CODE (inner_if))
+ {
+ case COND_EXPR:
+ goto found;
+ case BIND_EXPR:
+ inner_if = BIND_EXPR_BODY (inner_if);
+ break;
+ case STATEMENT_LIST:
+ inner_if = expr_last (then_block);
+ break;
+ case TRY_FINALLY_EXPR:
+ case TRY_CATCH_EXPR:
+ inner_if = TREE_OPERAND (inner_if, 0);
+ break;
+ default:
+ gcc_unreachable ();
+ }
+ found:
+
+ if (COND_EXPR_ELSE (inner_if))
+ warning (OPT_Wparentheses,
+ "%Hsuggest explicit braces to avoid ambiguous %<else%>",
+ &if_locus);
+ }
+
+ stmt = build3 (COND_EXPR, void_type_node, cond, then_block, else_block);
+ SET_EXPR_LOCATION (stmt, if_locus);
+ add_stmt (stmt);
+}
+
+/* Emit a general-purpose loop construct. START_LOCUS is the location of
+ the beginning of the loop. COND is the loop condition. COND_IS_FIRST
+ is false for DO loops. INCR is the FOR increment expression. BODY is
+ the statement controlled by the loop. BLAB is the break label. CLAB is
+ the continue label. Everything is allowed to be NULL. */
+
+void
+c_finish_loop (location_t start_locus, tree cond, tree incr, tree body,
+ tree blab, tree clab, bool cond_is_first)
+{
+ tree entry = NULL, exit = NULL, t;
+
+ /* If the condition is zero don't generate a loop construct. */
+ if (cond && integer_zerop (cond))
+ {
+ if (cond_is_first)
+ {
+ t = build_and_jump (&blab);
+ SET_EXPR_LOCATION (t, start_locus);
+ add_stmt (t);
+ }
+ }
+ else
+ {
+ tree top = build1 (LABEL_EXPR, void_type_node, NULL_TREE);
+
+ /* If we have an exit condition, then we build an IF with gotos either
+ out of the loop, or to the top of it. If there's no exit condition,
+ then we just build a jump back to the top. */
+ exit = build_and_jump (&LABEL_EXPR_LABEL (top));
+
+ if (cond && !integer_nonzerop (cond))
+ {
+ /* Canonicalize the loop condition to the end. This means
+ generating a branch to the loop condition. Reuse the
+ continue label, if possible. */
+ if (cond_is_first)
+ {
+ if (incr || !clab)
+ {
+ entry = build1 (LABEL_EXPR, void_type_node, NULL_TREE);
+ t = build_and_jump (&LABEL_EXPR_LABEL (entry));
+ }
+ else
+ t = build1 (GOTO_EXPR, void_type_node, clab);
+ SET_EXPR_LOCATION (t, start_locus);
+ add_stmt (t);
+ }
+
+ t = build_and_jump (&blab);
+ exit = fold_build3 (COND_EXPR, void_type_node, cond, exit, t);
+ if (cond_is_first)
+ SET_EXPR_LOCATION (exit, start_locus);
+ else
+ SET_EXPR_LOCATION (exit, input_location);
+ }
+
+ add_stmt (top);
+ }
+
+ if (body)
+ add_stmt (body);
+ if (clab)
+ add_stmt (build1 (LABEL_EXPR, void_type_node, clab));
+ if (incr)
+ add_stmt (incr);
+ if (entry)
+ add_stmt (entry);
+ if (exit)
+ add_stmt (exit);
+ if (blab)
+ add_stmt (build1 (LABEL_EXPR, void_type_node, blab));
+}
+
+tree
+c_finish_bc_stmt (tree *label_p, bool is_break)
+{
+ bool skip;
+ tree label = *label_p;
+
+ /* In switch statements break is sometimes stylistically used after
+ a return statement. This can lead to spurious warnings about
+ control reaching the end of a non-void function when it is
+ inlined. Note that we are calling block_may_fallthru with
+ language specific tree nodes; this works because
+ block_may_fallthru returns true when given something it does not
+ understand. */
+ skip = !block_may_fallthru (cur_stmt_list);
+
+ if (!label)
+ {
+ if (!skip)
+ *label_p = label = create_artificial_label ();
+ }
+ else if (TREE_CODE (label) == LABEL_DECL)
+ ;
+ else switch (TREE_INT_CST_LOW (label))
+ {
+ case 0:
+ if (is_break)
+ error ("break statement not within loop or switch");
+ else
+ error ("continue statement not within a loop");
+ return NULL_TREE;
+
+ case 1:
+ gcc_assert (is_break);
+ error ("break statement used with OpenMP for loop");
+ return NULL_TREE;
+
+ default:
+ gcc_unreachable ();
+ }
+
+ if (skip)
+ return NULL_TREE;
+
+ if (!is_break)
+ add_stmt (build_predict_expr (PRED_CONTINUE, NOT_TAKEN));
+
+ return add_stmt (build1 (GOTO_EXPR, void_type_node, label));
+}
+
+/* A helper routine for c_process_expr_stmt and c_finish_stmt_expr. */
+
+static void
+emit_side_effect_warnings (tree expr)
+{
+ if (expr == error_mark_node)
+ ;
+ else if (!TREE_SIDE_EFFECTS (expr))
+ {
+ if (!VOID_TYPE_P (TREE_TYPE (expr)) && !TREE_NO_WARNING (expr))
+ warning (OPT_Wunused_value, "%Hstatement with no effect",
+ EXPR_HAS_LOCATION (expr) ? EXPR_LOCUS (expr) : &input_location);
+ }
+ else
+ warn_if_unused_value (expr, input_location);
+}
+
+/* Process an expression as if it were a complete statement. Emit
+ diagnostics, but do not call ADD_STMT. */
+
+tree
+c_process_expr_stmt (tree expr)
+{
+ if (!expr)
+ return NULL_TREE;
+
+ if (warn_sequence_point)
+ verify_sequence_points (expr);
+
+ if (TREE_TYPE (expr) != error_mark_node
+ && !COMPLETE_OR_VOID_TYPE_P (TREE_TYPE (expr))
+ && TREE_CODE (TREE_TYPE (expr)) != ARRAY_TYPE)
+ error ("expression statement has incomplete type");
+
+ /* If we're not processing a statement expression, warn about unused values.
+ Warnings for statement expressions will be emitted later, once we figure
+ out which is the result. */
+ if (!STATEMENT_LIST_STMT_EXPR (cur_stmt_list)
+ && warn_unused_value)
+ emit_side_effect_warnings (expr);
+
+ /* If the expression is not of a type to which we cannot assign a line
+ number, wrap the thing in a no-op NOP_EXPR. */
+ if (DECL_P (expr) || CONSTANT_CLASS_P (expr))
+ expr = build1 (NOP_EXPR, TREE_TYPE (expr), expr);
+
+ if (CAN_HAVE_LOCATION_P (expr))
+ SET_EXPR_LOCATION (expr, input_location);
+
+ return expr;
+}
+
+/* Emit an expression as a statement. */
+
+tree
+c_finish_expr_stmt (tree expr)
+{
+ if (expr)
+ return add_stmt (c_process_expr_stmt (expr));
+ else
+ return NULL;
+}
+
+/* Do the opposite and emit a statement as an expression. To begin,
+ create a new binding level and return it. */
+
+tree
+c_begin_stmt_expr (void)
+{
+ tree ret;
+ struct c_label_context_se *nstack;
+ struct c_label_list *glist;
+
+ /* We must force a BLOCK for this level so that, if it is not expanded
+ later, there is a way to turn off the entire subtree of blocks that
+ are contained in it. */
+ keep_next_level ();
+ ret = c_begin_compound_stmt (true);
+ if (c_switch_stack)
+ {
+ c_switch_stack->blocked_stmt_expr++;
+ gcc_assert (c_switch_stack->blocked_stmt_expr != 0);
+ }
+ for (glist = label_context_stack_se->labels_used;
+ glist != NULL;
+ glist = glist->next)
+ {
+ C_DECL_UNDEFINABLE_STMT_EXPR (glist->label) = 1;
+ }
+ nstack = XOBNEW (&parser_obstack, struct c_label_context_se);
+ nstack->labels_def = NULL;
+ nstack->labels_used = NULL;
+ nstack->next = label_context_stack_se;
+ label_context_stack_se = nstack;
+
+ /* Mark the current statement list as belonging to a statement list. */
+ STATEMENT_LIST_STMT_EXPR (ret) = 1;
+
+ return ret;
+}
+
+tree
+c_finish_stmt_expr (tree body)
+{
+ tree last, type, tmp, val;
+ tree *last_p;
+ struct c_label_list *dlist, *glist, *glist_prev = NULL;
+
+ body = c_end_compound_stmt (body, true);
+ if (c_switch_stack)
+ {
+ gcc_assert (c_switch_stack->blocked_stmt_expr != 0);
+ c_switch_stack->blocked_stmt_expr--;
+ }
+ /* It is no longer possible to jump to labels defined within this
+ statement expression. */
+ for (dlist = label_context_stack_se->labels_def;
+ dlist != NULL;
+ dlist = dlist->next)
+ {
+ C_DECL_UNJUMPABLE_STMT_EXPR (dlist->label) = 1;
+ }
+ /* It is again possible to define labels with a goto just outside
+ this statement expression. */
+ for (glist = label_context_stack_se->next->labels_used;
+ glist != NULL;
+ glist = glist->next)
+ {
+ C_DECL_UNDEFINABLE_STMT_EXPR (glist->label) = 0;
+ glist_prev = glist;
+ }
+ if (glist_prev != NULL)
+ glist_prev->next = label_context_stack_se->labels_used;
+ else
+ label_context_stack_se->next->labels_used
+ = label_context_stack_se->labels_used;
+ label_context_stack_se = label_context_stack_se->next;
+
+ /* Locate the last statement in BODY. See c_end_compound_stmt
+ about always returning a BIND_EXPR. */
+ last_p = &BIND_EXPR_BODY (body);
+ last = BIND_EXPR_BODY (body);
+
+ continue_searching:
+ if (TREE_CODE (last) == STATEMENT_LIST)
+ {
+ tree_stmt_iterator i;
+
+ /* This can happen with degenerate cases like ({ }). No value. */
+ if (!TREE_SIDE_EFFECTS (last))
+ return body;
+
+ /* If we're supposed to generate side effects warnings, process
+ all of the statements except the last. */
+ if (warn_unused_value)
+ {
+ for (i = tsi_start (last); !tsi_one_before_end_p (i); tsi_next (&i))
+ emit_side_effect_warnings (tsi_stmt (i));
+ }
+ else
+ i = tsi_last (last);
+ last_p = tsi_stmt_ptr (i);
+ last = *last_p;
+ }
+
+ /* If the end of the list is exception related, then the list was split
+ by a call to push_cleanup. Continue searching. */
+ if (TREE_CODE (last) == TRY_FINALLY_EXPR
+ || TREE_CODE (last) == TRY_CATCH_EXPR)
+ {
+ last_p = &TREE_OPERAND (last, 0);
+ last = *last_p;
+ goto continue_searching;
+ }
+
+ /* In the case that the BIND_EXPR is not necessary, return the
+ expression out from inside it. */
+ if (last == error_mark_node
+ || (last == BIND_EXPR_BODY (body)
+ && BIND_EXPR_VARS (body) == NULL))
+ {
+ /* Do not warn if the return value of a statement expression is
+ unused. */
+ if (CAN_HAVE_LOCATION_P (last))
+ TREE_NO_WARNING (last) = 1;
+ return last;
+ }
+
+ /* Extract the type of said expression. */
+ type = TREE_TYPE (last);
+
+ /* If we're not returning a value at all, then the BIND_EXPR that
+ we already have is a fine expression to return. */
+ if (!type || VOID_TYPE_P (type))
+ return body;
+
+ /* Now that we've located the expression containing the value, it seems
+ silly to make voidify_wrapper_expr repeat the process. Create a
+ temporary of the appropriate type and stick it in a TARGET_EXPR. */
+ tmp = create_tmp_var_raw (type, NULL);
+
+ /* Unwrap a no-op NOP_EXPR as added by c_finish_expr_stmt. This avoids
+ tree_expr_nonnegative_p giving up immediately. */
+ val = last;
+ if (TREE_CODE (val) == NOP_EXPR
+ && TREE_TYPE (val) == TREE_TYPE (TREE_OPERAND (val, 0)))
+ val = TREE_OPERAND (val, 0);
+
+ *last_p = build2 (MODIFY_EXPR, void_type_node, tmp, val);
+ SET_EXPR_LOCUS (*last_p, EXPR_LOCUS (last));
+
+ return build4 (TARGET_EXPR, type, tmp, body, NULL_TREE, NULL_TREE);
+}
+
+/* Begin the scope of an identifier of variably modified type, scope
+ number SCOPE. Jumping from outside this scope to inside it is not
+ permitted. */
+
+void
+c_begin_vm_scope (unsigned int scope)
+{
+ struct c_label_context_vm *nstack;
+ struct c_label_list *glist;
+
+ gcc_assert (scope > 0);
+
+ /* At file_scope, we don't have to do any processing. */
+ if (label_context_stack_vm == NULL)
+ return;
+
+ if (c_switch_stack && !c_switch_stack->blocked_vm)
+ c_switch_stack->blocked_vm = scope;
+ for (glist = label_context_stack_vm->labels_used;
+ glist != NULL;
+ glist = glist->next)
+ {
+ C_DECL_UNDEFINABLE_VM (glist->label) = 1;
+ }
+ nstack = XOBNEW (&parser_obstack, struct c_label_context_vm);
+ nstack->labels_def = NULL;
+ nstack->labels_used = NULL;
+ nstack->scope = scope;
+ nstack->next = label_context_stack_vm;
+ label_context_stack_vm = nstack;
+}
+
+/* End a scope which may contain identifiers of variably modified
+ type, scope number SCOPE. */
+
+void
+c_end_vm_scope (unsigned int scope)
+{
+ if (label_context_stack_vm == NULL)
+ return;
+ if (c_switch_stack && c_switch_stack->blocked_vm == scope)
+ c_switch_stack->blocked_vm = 0;
+ /* We may have a number of nested scopes of identifiers with
+ variably modified type, all at this depth. Pop each in turn. */
+ while (label_context_stack_vm->scope == scope)
+ {
+ struct c_label_list *dlist, *glist, *glist_prev = NULL;
+
+ /* It is no longer possible to jump to labels defined within this
+ scope. */
+ for (dlist = label_context_stack_vm->labels_def;
+ dlist != NULL;
+ dlist = dlist->next)
+ {
+ C_DECL_UNJUMPABLE_VM (dlist->label) = 1;
+ }
+ /* It is again possible to define labels with a goto just outside
+ this scope. */
+ for (glist = label_context_stack_vm->next->labels_used;
+ glist != NULL;
+ glist = glist->next)
+ {
+ C_DECL_UNDEFINABLE_VM (glist->label) = 0;
+ glist_prev = glist;
+ }
+ if (glist_prev != NULL)
+ glist_prev->next = label_context_stack_vm->labels_used;
+ else
+ label_context_stack_vm->next->labels_used
+ = label_context_stack_vm->labels_used;
+ label_context_stack_vm = label_context_stack_vm->next;
+ }
+}
+\f
+/* Begin and end compound statements. This is as simple as pushing
+ and popping new statement lists from the tree. */
+
+tree
+c_begin_compound_stmt (bool do_scope)
+{
+ tree stmt = push_stmt_list ();
+ if (do_scope)
+ push_scope ();
+ return stmt;
+}
+
+tree
+c_end_compound_stmt (tree stmt, bool do_scope)
+{
+ tree block = NULL;
+
+ if (do_scope)
+ {
+ if (c_dialect_objc ())
+ objc_clear_super_receiver ();
+ block = pop_scope ();
+ }
+
+ stmt = pop_stmt_list (stmt);
+ stmt = c_build_bind_expr (block, stmt);
+
+ /* If this compound statement is nested immediately inside a statement
+ expression, then force a BIND_EXPR to be created. Otherwise we'll
+ do the wrong thing for ({ { 1; } }) or ({ 1; { } }). In particular,
+ STATEMENT_LISTs merge, and thus we can lose track of what statement
+ was really last. */
+ if (cur_stmt_list
+ && STATEMENT_LIST_STMT_EXPR (cur_stmt_list)
+ && TREE_CODE (stmt) != BIND_EXPR)
+ {
+ stmt = build3 (BIND_EXPR, void_type_node, NULL, stmt, NULL);
+ TREE_SIDE_EFFECTS (stmt) = 1;
+ }
+
+ return stmt;
+}
+
+/* Queue a cleanup. CLEANUP is an expression/statement to be executed
+ when the current scope is exited. EH_ONLY is true when this is not
+ meant to apply to normal control flow transfer. */
+
+void
+push_cleanup (tree ARG_UNUSED (decl), tree cleanup, bool eh_only)
+{
+ enum tree_code code;
+ tree stmt, list;
+ bool stmt_expr;
+
+ code = eh_only ? TRY_CATCH_EXPR : TRY_FINALLY_EXPR;
+ stmt = build_stmt (code, NULL, cleanup);
+ add_stmt (stmt);
+ stmt_expr = STATEMENT_LIST_STMT_EXPR (cur_stmt_list);
+ list = push_stmt_list ();
+ TREE_OPERAND (stmt, 0) = list;
+ STATEMENT_LIST_STMT_EXPR (list) = stmt_expr;
+}
+\f
+/* Build a binary-operation expression without default conversions.
+ CODE is the kind of expression to build.
+ LOCATION is the operator's location.
+ This function differs from `build' in several ways:
+ the data type of the result is computed and recorded in it,
+ warnings are generated if arg data types are invalid,
+ special handling for addition and subtraction of pointers is known,
+ and some optimization is done (operations on narrow ints
+ are done in the narrower type when that gives the same result).
+ Constant folding is also done before the result is returned.
+
+ Note that the operands will never have enumeral types, or function
+ or array types, because either they will have the default conversions
+ performed or they have both just been converted to some other type in which
+ the arithmetic is to be done. */
+
+tree
+build_binary_op (location_t location, enum tree_code code,
+ tree orig_op0, tree orig_op1, int convert_p)
+{
+ tree type0, type1;
+ enum tree_code code0, code1;
+ tree op0, op1;
+ tree ret = error_mark_node;
+ const char *invalid_op_diag;
+
+ /* Expression code to give to the expression when it is built.
+ Normally this is CODE, which is what the caller asked for,
+ but in some special cases we change it. */
+ enum tree_code resultcode = code;
+
+ /* Data type in which the computation is to be performed.
+ In the simplest cases this is the common type of the arguments. */
+ tree result_type = NULL;
+
+ /* Nonzero means operands have already been type-converted
+ in whatever way is necessary.
+ Zero means they need to be converted to RESULT_TYPE. */
+ int converted = 0;
+
+ /* Nonzero means create the expression with this type, rather than
+ RESULT_TYPE. */
+ tree build_type = 0;
+
+ /* Nonzero means after finally constructing the expression
+ convert it to this type. */
+ tree final_type = 0;
+
+ /* Nonzero if this is an operation like MIN or MAX which can
+ safely be computed in short if both args are promoted shorts.
+ Also implies COMMON.
+ -1 indicates a bitwise operation; this makes a difference
+ in the exact conditions for when it is safe to do the operation
+ in a narrower mode. */
+ int shorten = 0;
+
+ /* Nonzero if this is a comparison operation;
+ if both args are promoted shorts, compare the original shorts.
+ Also implies COMMON. */
+ int short_compare = 0;
+
+ /* Nonzero if this is a right-shift operation, which can be computed on the
+ original short and then promoted if the operand is a promoted short. */
+ int short_shift = 0;
+
+ /* Nonzero means set RESULT_TYPE to the common type of the args. */
+ int common = 0;
+
+ /* True means types are compatible as far as ObjC is concerned. */
+ bool objc_ok;
+
+ if (location == UNKNOWN_LOCATION)
+ location = input_location;
+
+ if (convert_p)
+ {
+ op0 = default_conversion (orig_op0);
+ op1 = default_conversion (orig_op1);
+ }
+ else
+ {
+ op0 = orig_op0;
+ op1 = orig_op1;
+ }
+
+ type0 = TREE_TYPE (op0);
+ type1 = TREE_TYPE (op1);
+
+ /* The expression codes of the data types of the arguments tell us
+ whether the arguments are integers, floating, pointers, etc. */
+ code0 = TREE_CODE (type0);
+ code1 = TREE_CODE (type1);
+
+ /* Strip NON_LVALUE_EXPRs, etc., since we aren't using as an lvalue. */
+ STRIP_TYPE_NOPS (op0);
+ STRIP_TYPE_NOPS (op1);
+
+ /* If an error was already reported for one of the arguments,
+ avoid reporting another error. */
+
+ if (code0 == ERROR_MARK || code1 == ERROR_MARK)
+ return error_mark_node;
+
+ if ((invalid_op_diag
+ = targetm.invalid_binary_op (code, type0, type1)))
+ {
+ error_at (location, invalid_op_diag);
+ return error_mark_node;
+ }
+
+ objc_ok = objc_compare_types (type0, type1, -3, NULL_TREE);
+
+ switch (code)
+ {
+ case PLUS_EXPR:
+ /* Handle the pointer + int case. */
+ if (code0 == POINTER_TYPE && code1 == INTEGER_TYPE)
+ {
+ ret = pointer_int_sum (PLUS_EXPR, op0, op1);
+ goto return_build_binary_op;
+ }
+ else if (code1 == POINTER_TYPE && code0 == INTEGER_TYPE)
+ {
+ ret = pointer_int_sum (PLUS_EXPR, op1, op0);
+ goto return_build_binary_op;
+ }
+ else
+ common = 1;
+ break;
+
+ case MINUS_EXPR:
+ /* Subtraction of two similar pointers.
+ We must subtract them as integers, then divide by object size. */
+ if (code0 == POINTER_TYPE && code1 == POINTER_TYPE
+ && comp_target_types (type0, type1))
+ {
+ ret = pointer_diff (op0, op1);
+ goto return_build_binary_op;
+ }
+ /* Handle pointer minus int. Just like pointer plus int. */
+ else if (code0 == POINTER_TYPE && code1 == INTEGER_TYPE)
+ {
+ ret = pointer_int_sum (MINUS_EXPR, op0, op1);
+ goto return_build_binary_op;
+ }
+ else
+ common = 1;
+ break;
+
+ case MULT_EXPR:
+ common = 1;
+ break;
+
+ case TRUNC_DIV_EXPR:
+ case CEIL_DIV_EXPR:
+ case FLOOR_DIV_EXPR:
+ case ROUND_DIV_EXPR:
+ case EXACT_DIV_EXPR:
+ warn_for_div_by_zero (location, op1);
+
+ if ((code0 == INTEGER_TYPE || code0 == REAL_TYPE
+ || code0 == FIXED_POINT_TYPE
+ || code0 == COMPLEX_TYPE || code0 == VECTOR_TYPE)
+ && (code1 == INTEGER_TYPE || code1 == REAL_TYPE
+ || code1 == FIXED_POINT_TYPE
+ || code1 == COMPLEX_TYPE || code1 == VECTOR_TYPE))
+ {
+ enum tree_code tcode0 = code0, tcode1 = code1;
+
+ if (code0 == COMPLEX_TYPE || code0 == VECTOR_TYPE)
+ tcode0 = TREE_CODE (TREE_TYPE (TREE_TYPE (op0)));
+ if (code1 == COMPLEX_TYPE || code1 == VECTOR_TYPE)
+ tcode1 = TREE_CODE (TREE_TYPE (TREE_TYPE (op1)));
+
+ if (!((tcode0 == INTEGER_TYPE && tcode1 == INTEGER_TYPE)
+ || (tcode0 == FIXED_POINT_TYPE && tcode1 == FIXED_POINT_TYPE)))
+ resultcode = RDIV_EXPR;
+ else
+ /* Although it would be tempting to shorten always here, that
+ loses on some targets, since the modulo instruction is
+ undefined if the quotient can't be represented in the
+ computation mode. We shorten only if unsigned or if
+ dividing by something we know != -1. */
+ shorten = (TYPE_UNSIGNED (TREE_TYPE (orig_op0))
+ || (TREE_CODE (op1) == INTEGER_CST
+ && !integer_all_onesp (op1)));
+ common = 1;
+ }
+ break;
+
+ case BIT_AND_EXPR:
+ case BIT_IOR_EXPR:
+ case BIT_XOR_EXPR:
+ if (code0 == INTEGER_TYPE && code1 == INTEGER_TYPE)
+ shorten = -1;
+ /* Allow vector types which are not floating point types. */
+ else if (code0 == VECTOR_TYPE
+ && code1 == VECTOR_TYPE
+ && !VECTOR_FLOAT_TYPE_P (type0)
+ && !VECTOR_FLOAT_TYPE_P (type1))
+ common = 1;
+ break;
+
+ case TRUNC_MOD_EXPR:
+ case FLOOR_MOD_EXPR:
+ warn_for_div_by_zero (location, op1);
+
+ if (code0 == INTEGER_TYPE && code1 == INTEGER_TYPE)
+ {
+ /* Although it would be tempting to shorten always here, that loses
+ on some targets, since the modulo instruction is undefined if the
+ quotient can't be represented in the computation mode. We shorten
+ only if unsigned or if dividing by something we know != -1. */
+ shorten = (TYPE_UNSIGNED (TREE_TYPE (orig_op0))
+ || (TREE_CODE (op1) == INTEGER_CST
+ && !integer_all_onesp (op1)));
+ common = 1;
+ }
+ break;
+
+ case TRUTH_ANDIF_EXPR:
+ case TRUTH_ORIF_EXPR:
+ case TRUTH_AND_EXPR:
+ case TRUTH_OR_EXPR:
+ case TRUTH_XOR_EXPR:
+ if ((code0 == INTEGER_TYPE || code0 == POINTER_TYPE
+ || code0 == REAL_TYPE || code0 == COMPLEX_TYPE
+ || code0 == FIXED_POINT_TYPE)
+ && (code1 == INTEGER_TYPE || code1 == POINTER_TYPE
+ || code1 == REAL_TYPE || code1 == COMPLEX_TYPE
+ || code1 == FIXED_POINT_TYPE))
+ {
+ /* Result of these operations is always an int,
+ but that does not mean the operands should be
+ converted to ints! */
+ result_type = integer_type_node;
+ op0 = c_common_truthvalue_conversion (location, op0);
+ op1 = c_common_truthvalue_conversion (location, op1);
+ converted = 1;
+ }
+ break;
+
+ /* Shift operations: result has same type as first operand;
+ always convert second operand to int.
+ Also set SHORT_SHIFT if shifting rightward. */
+
+ case RSHIFT_EXPR:
+ if ((code0 == INTEGER_TYPE || code0 == FIXED_POINT_TYPE)
+ && code1 == INTEGER_TYPE)
+ {
+ if (TREE_CODE (op1) == INTEGER_CST && skip_evaluation == 0)
+ {
+ if (tree_int_cst_sgn (op1) < 0)
+ warning (0, "right shift count is negative");
+ else
+ {
+ if (!integer_zerop (op1))
+ short_shift = 1;
+
+ if (compare_tree_int (op1, TYPE_PRECISION (type0)) >= 0)
+ warning (0, "right shift count >= width of type");
+ }