+ alignofs = alignofs & -alignofs;
+ align = MIN (align, alignofs * BITS_PER_UNIT);
+ }
+ }
+
+ /* Handle a block of contiguous long-words. */
+
+ if (align >= 64 && bytes >= 8)
+ {
+ words = bytes / 8;
+
+ for (i = 0; i < words; ++i)
+ emit_move_insn (adjust_address (orig_dst, DImode, ofs + i * 8),
+ const0_rtx);
+
+ bytes -= words * 8;
+ ofs += words * 8;
+ }
+
+ /* If the block is large and appropriately aligned, emit a single
+ store followed by a sequence of stq_u insns. */
+
+ if (align >= 32 && bytes > 16)
+ {
+ rtx orig_dsta;
+
+ emit_move_insn (adjust_address (orig_dst, SImode, ofs), const0_rtx);
+ bytes -= 4;
+ ofs += 4;
+
+ orig_dsta = XEXP (orig_dst, 0);
+ if (GET_CODE (orig_dsta) == LO_SUM)
+ orig_dsta = force_reg (Pmode, orig_dsta);
+
+ words = bytes / 8;
+ for (i = 0; i < words; ++i)
+ {
+ rtx mem
+ = change_address (orig_dst, DImode,
+ gen_rtx_AND (DImode,
+ plus_constant (orig_dsta, ofs + i*8),
+ GEN_INT (-8)));
+ set_mem_alias_set (mem, 0);
+ emit_move_insn (mem, const0_rtx);
+ }
+
+ /* Depending on the alignment, the first stq_u may have overlapped
+ with the initial stl, which means that the last stq_u didn't
+ write as much as it would appear. Leave those questionable bytes
+ unaccounted for. */
+ bytes -= words * 8 - 4;
+ ofs += words * 8 - 4;
+ }
+
+ /* Handle a smaller block of aligned words. */
+
+ if ((align >= 64 && bytes == 4)
+ || (align == 32 && bytes >= 4))
+ {
+ words = bytes / 4;
+
+ for (i = 0; i < words; ++i)
+ emit_move_insn (adjust_address (orig_dst, SImode, ofs + i * 4),
+ const0_rtx);
+
+ bytes -= words * 4;
+ ofs += words * 4;
+ }
+
+ /* An unaligned block uses stq_u stores for as many as possible. */
+
+ if (bytes >= 8)
+ {
+ words = bytes / 8;
+
+ alpha_expand_unaligned_store_words (NULL, orig_dst, words, ofs);
+
+ bytes -= words * 8;
+ ofs += words * 8;
+ }
+
+ /* Next clean up any trailing pieces. */
+
+#if HOST_BITS_PER_WIDE_INT >= 64
+ /* Count the number of bits in BYTES for which aligned stores could
+ be emitted. */
+ words = 0;
+ for (i = (TARGET_BWX ? 1 : 4); i * BITS_PER_UNIT <= align ; i <<= 1)
+ if (bytes & i)
+ words += 1;
+
+ /* If we have appropriate alignment (and it wouldn't take too many
+ instructions otherwise), mask out the bytes we need. */
+ if (TARGET_BWX ? words > 2 : bytes > 0)
+ {
+ if (align >= 64)
+ {
+ rtx mem, tmp;
+ HOST_WIDE_INT mask;
+
+ mem = adjust_address (orig_dst, DImode, ofs);
+ set_mem_alias_set (mem, 0);
+
+ mask = ~(HOST_WIDE_INT)0 << (bytes * 8);
+
+ tmp = expand_binop (DImode, and_optab, mem, GEN_INT (mask),
+ NULL_RTX, 1, OPTAB_WIDEN);
+
+ emit_move_insn (mem, tmp);
+ return 1;
+ }
+ else if (align >= 32 && bytes < 4)
+ {
+ rtx mem, tmp;
+ HOST_WIDE_INT mask;
+
+ mem = adjust_address (orig_dst, SImode, ofs);
+ set_mem_alias_set (mem, 0);
+
+ mask = ~(HOST_WIDE_INT)0 << (bytes * 8);
+
+ tmp = expand_binop (SImode, and_optab, mem, GEN_INT (mask),
+ NULL_RTX, 1, OPTAB_WIDEN);
+
+ emit_move_insn (mem, tmp);
+ return 1;
+ }
+ }
+#endif
+
+ if (!TARGET_BWX && bytes >= 4)
+ {
+ alpha_expand_unaligned_store (orig_dst, const0_rtx, 4, ofs);
+ bytes -= 4;
+ ofs += 4;
+ }
+
+ if (bytes >= 2)
+ {
+ if (align >= 16)
+ {
+ do {
+ emit_move_insn (adjust_address (orig_dst, HImode, ofs),
+ const0_rtx);
+ bytes -= 2;
+ ofs += 2;
+ } while (bytes >= 2);
+ }
+ else if (! TARGET_BWX)
+ {
+ alpha_expand_unaligned_store (orig_dst, const0_rtx, 2, ofs);
+ bytes -= 2;
+ ofs += 2;
+ }
+ }
+
+ while (bytes > 0)
+ {
+ emit_move_insn (adjust_address (orig_dst, QImode, ofs), const0_rtx);
+ bytes -= 1;
+ ofs += 1;
+ }
+
+ return 1;
+}
+
+/* Returns a mask so that zap(x, value) == x & mask. */
+
+rtx
+alpha_expand_zap_mask (HOST_WIDE_INT value)
+{
+ rtx result;
+ int i;
+
+ if (HOST_BITS_PER_WIDE_INT >= 64)
+ {
+ HOST_WIDE_INT mask = 0;
+
+ for (i = 7; i >= 0; --i)
+ {
+ mask <<= 8;
+ if (!((value >> i) & 1))
+ mask |= 0xff;
+ }
+
+ result = gen_int_mode (mask, DImode);
+ }
+ else
+ {
+ HOST_WIDE_INT mask_lo = 0, mask_hi = 0;
+
+ gcc_assert (HOST_BITS_PER_WIDE_INT == 32);
+
+ for (i = 7; i >= 4; --i)
+ {
+ mask_hi <<= 8;
+ if (!((value >> i) & 1))
+ mask_hi |= 0xff;
+ }
+
+ for (i = 3; i >= 0; --i)
+ {
+ mask_lo <<= 8;
+ if (!((value >> i) & 1))
+ mask_lo |= 0xff;
+ }
+
+ result = immed_double_const (mask_lo, mask_hi, DImode);
+ }
+
+ return result;
+}
+
+void
+alpha_expand_builtin_vector_binop (rtx (*gen) (rtx, rtx, rtx),
+ enum machine_mode mode,
+ rtx op0, rtx op1, rtx op2)
+{
+ op0 = gen_lowpart (mode, op0);
+
+ if (op1 == const0_rtx)
+ op1 = CONST0_RTX (mode);
+ else
+ op1 = gen_lowpart (mode, op1);
+
+ if (op2 == const0_rtx)
+ op2 = CONST0_RTX (mode);
+ else
+ op2 = gen_lowpart (mode, op2);
+
+ emit_insn ((*gen) (op0, op1, op2));
+}
+
+/* A subroutine of the atomic operation splitters. Jump to LABEL if
+ COND is true. Mark the jump as unlikely to be taken. */
+
+static void
+emit_unlikely_jump (rtx cond, rtx label)
+{
+ rtx very_unlikely = GEN_INT (REG_BR_PROB_BASE / 100 - 1);
+ rtx x;
+
+ x = gen_rtx_IF_THEN_ELSE (VOIDmode, cond, label, pc_rtx);
+ x = emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx, x));
+ REG_NOTES (x) = gen_rtx_EXPR_LIST (REG_BR_PROB, very_unlikely, NULL_RTX);
+}
+
+/* A subroutine of the atomic operation splitters. Emit a load-locked
+ instruction in MODE. */
+
+static void
+emit_load_locked (enum machine_mode mode, rtx reg, rtx mem)
+{
+ rtx (*fn) (rtx, rtx) = NULL;
+ if (mode == SImode)
+ fn = gen_load_locked_si;
+ else if (mode == DImode)
+ fn = gen_load_locked_di;
+ emit_insn (fn (reg, mem));
+}
+
+/* A subroutine of the atomic operation splitters. Emit a store-conditional
+ instruction in MODE. */
+
+static void
+emit_store_conditional (enum machine_mode mode, rtx res, rtx mem, rtx val)
+{
+ rtx (*fn) (rtx, rtx, rtx) = NULL;
+ if (mode == SImode)
+ fn = gen_store_conditional_si;
+ else if (mode == DImode)
+ fn = gen_store_conditional_di;
+ emit_insn (fn (res, mem, val));
+}
+
+/* A subroutine of the atomic operation splitters. Emit an insxl
+ instruction in MODE. */
+
+static rtx
+emit_insxl (enum machine_mode mode, rtx op1, rtx op2)
+{
+ rtx ret = gen_reg_rtx (DImode);
+ rtx (*fn) (rtx, rtx, rtx);
+
+ if (WORDS_BIG_ENDIAN)
+ {
+ if (mode == QImode)
+ fn = gen_insbl_be;
+ else
+ fn = gen_inswl_be;
+ }
+ else
+ {
+ if (mode == QImode)
+ fn = gen_insbl_le;
+ else
+ fn = gen_inswl_le;
+ }
+ /* The insbl and inswl patterns require a register operand. */
+ op1 = force_reg (mode, op1);
+ emit_insn (fn (ret, op1, op2));
+
+ return ret;
+}
+
+/* Expand an atomic fetch-and-operate pattern. CODE is the binary operation
+ to perform. MEM is the memory on which to operate. VAL is the second
+ operand of the binary operator. BEFORE and AFTER are optional locations to
+ return the value of MEM either before of after the operation. SCRATCH is
+ a scratch register. */
+
+void
+alpha_split_atomic_op (enum rtx_code code, rtx mem, rtx val,
+ rtx before, rtx after, rtx scratch)
+{
+ enum machine_mode mode = GET_MODE (mem);
+ rtx label, x, cond = gen_rtx_REG (DImode, REGNO (scratch));
+
+ emit_insn (gen_memory_barrier ());
+
+ label = gen_label_rtx ();
+ emit_label (label);
+ label = gen_rtx_LABEL_REF (DImode, label);
+
+ if (before == NULL)
+ before = scratch;
+ emit_load_locked (mode, before, mem);
+
+ if (code == NOT)
+ {
+ x = gen_rtx_AND (mode, before, val);
+ emit_insn (gen_rtx_SET (VOIDmode, val, x));
+
+ x = gen_rtx_NOT (mode, val);
+ }
+ else
+ x = gen_rtx_fmt_ee (code, mode, before, val);
+ if (after)
+ emit_insn (gen_rtx_SET (VOIDmode, after, copy_rtx (x)));
+ emit_insn (gen_rtx_SET (VOIDmode, scratch, x));
+
+ emit_store_conditional (mode, cond, mem, scratch);
+
+ x = gen_rtx_EQ (DImode, cond, const0_rtx);
+ emit_unlikely_jump (x, label);
+
+ emit_insn (gen_memory_barrier ());
+}
+
+/* Expand a compare and swap operation. */
+
+void
+alpha_split_compare_and_swap (rtx retval, rtx mem, rtx oldval, rtx newval,
+ rtx scratch)
+{
+ enum machine_mode mode = GET_MODE (mem);
+ rtx label1, label2, x, cond = gen_lowpart (DImode, scratch);
+
+ emit_insn (gen_memory_barrier ());
+
+ label1 = gen_rtx_LABEL_REF (DImode, gen_label_rtx ());
+ label2 = gen_rtx_LABEL_REF (DImode, gen_label_rtx ());
+ emit_label (XEXP (label1, 0));
+
+ emit_load_locked (mode, retval, mem);
+
+ x = gen_lowpart (DImode, retval);
+ if (oldval == const0_rtx)
+ x = gen_rtx_NE (DImode, x, const0_rtx);
+ else
+ {
+ x = gen_rtx_EQ (DImode, x, oldval);
+ emit_insn (gen_rtx_SET (VOIDmode, cond, x));
+ x = gen_rtx_EQ (DImode, cond, const0_rtx);
+ }
+ emit_unlikely_jump (x, label2);
+
+ emit_move_insn (scratch, newval);
+ emit_store_conditional (mode, cond, mem, scratch);
+
+ x = gen_rtx_EQ (DImode, cond, const0_rtx);
+ emit_unlikely_jump (x, label1);
+
+ emit_insn (gen_memory_barrier ());
+ emit_label (XEXP (label2, 0));
+}
+
+void
+alpha_expand_compare_and_swap_12 (rtx dst, rtx mem, rtx oldval, rtx newval)
+{
+ enum machine_mode mode = GET_MODE (mem);
+ rtx addr, align, wdst;
+ rtx (*fn5) (rtx, rtx, rtx, rtx, rtx);
+
+ addr = force_reg (DImode, XEXP (mem, 0));
+ align = expand_simple_binop (Pmode, AND, addr, GEN_INT (-8),
+ NULL_RTX, 1, OPTAB_DIRECT);
+
+ oldval = convert_modes (DImode, mode, oldval, 1);
+ newval = emit_insxl (mode, newval, addr);
+
+ wdst = gen_reg_rtx (DImode);
+ if (mode == QImode)
+ fn5 = gen_sync_compare_and_swapqi_1;
+ else
+ fn5 = gen_sync_compare_and_swaphi_1;
+ emit_insn (fn5 (wdst, addr, oldval, newval, align));
+
+ emit_move_insn (dst, gen_lowpart (mode, wdst));
+}
+
+void
+alpha_split_compare_and_swap_12 (enum machine_mode mode, rtx dest, rtx addr,
+ rtx oldval, rtx newval, rtx align,
+ rtx scratch, rtx cond)
+{
+ rtx label1, label2, mem, width, mask, x;
+
+ mem = gen_rtx_MEM (DImode, align);
+ MEM_VOLATILE_P (mem) = 1;
+
+ emit_insn (gen_memory_barrier ());
+ label1 = gen_rtx_LABEL_REF (DImode, gen_label_rtx ());
+ label2 = gen_rtx_LABEL_REF (DImode, gen_label_rtx ());
+ emit_label (XEXP (label1, 0));
+
+ emit_load_locked (DImode, scratch, mem);
+
+ width = GEN_INT (GET_MODE_BITSIZE (mode));
+ mask = GEN_INT (mode == QImode ? 0xff : 0xffff);
+ if (WORDS_BIG_ENDIAN)
+ emit_insn (gen_extxl_be (dest, scratch, width, addr));
+ else
+ emit_insn (gen_extxl_le (dest, scratch, width, addr));
+
+ if (oldval == const0_rtx)
+ x = gen_rtx_NE (DImode, dest, const0_rtx);
+ else
+ {
+ x = gen_rtx_EQ (DImode, dest, oldval);
+ emit_insn (gen_rtx_SET (VOIDmode, cond, x));
+ x = gen_rtx_EQ (DImode, cond, const0_rtx);
+ }
+ emit_unlikely_jump (x, label2);
+
+ if (WORDS_BIG_ENDIAN)
+ emit_insn (gen_mskxl_be (scratch, scratch, mask, addr));
+ else
+ emit_insn (gen_mskxl_le (scratch, scratch, mask, addr));
+ emit_insn (gen_iordi3 (scratch, scratch, newval));
+
+ emit_store_conditional (DImode, scratch, mem, scratch);
+
+ x = gen_rtx_EQ (DImode, scratch, const0_rtx);
+ emit_unlikely_jump (x, label1);
+
+ emit_insn (gen_memory_barrier ());
+ emit_label (XEXP (label2, 0));
+}
+
+/* Expand an atomic exchange operation. */
+
+void
+alpha_split_lock_test_and_set (rtx retval, rtx mem, rtx val, rtx scratch)
+{
+ enum machine_mode mode = GET_MODE (mem);
+ rtx label, x, cond = gen_lowpart (DImode, scratch);
+
+ label = gen_rtx_LABEL_REF (DImode, gen_label_rtx ());
+ emit_label (XEXP (label, 0));
+
+ emit_load_locked (mode, retval, mem);
+ emit_move_insn (scratch, val);
+ emit_store_conditional (mode, cond, mem, scratch);
+
+ x = gen_rtx_EQ (DImode, cond, const0_rtx);
+ emit_unlikely_jump (x, label);
+
+ emit_insn (gen_memory_barrier ());
+}
+
+void
+alpha_expand_lock_test_and_set_12 (rtx dst, rtx mem, rtx val)
+{
+ enum machine_mode mode = GET_MODE (mem);
+ rtx addr, align, wdst;
+ rtx (*fn4) (rtx, rtx, rtx, rtx);
+
+ /* Force the address into a register. */
+ addr = force_reg (DImode, XEXP (mem, 0));
+
+ /* Align it to a multiple of 8. */
+ align = expand_simple_binop (Pmode, AND, addr, GEN_INT (-8),
+ NULL_RTX, 1, OPTAB_DIRECT);
+
+ /* Insert val into the correct byte location within the word. */
+ val = emit_insxl (mode, val, addr);
+
+ wdst = gen_reg_rtx (DImode);
+ if (mode == QImode)
+ fn4 = gen_sync_lock_test_and_setqi_1;
+ else
+ fn4 = gen_sync_lock_test_and_sethi_1;
+ emit_insn (fn4 (wdst, addr, val, align));
+
+ emit_move_insn (dst, gen_lowpart (mode, wdst));
+}
+
+void
+alpha_split_lock_test_and_set_12 (enum machine_mode mode, rtx dest, rtx addr,
+ rtx val, rtx align, rtx scratch)
+{
+ rtx label, mem, width, mask, x;
+
+ mem = gen_rtx_MEM (DImode, align);
+ MEM_VOLATILE_P (mem) = 1;
+
+ label = gen_rtx_LABEL_REF (DImode, gen_label_rtx ());
+ emit_label (XEXP (label, 0));
+
+ emit_load_locked (DImode, scratch, mem);
+
+ width = GEN_INT (GET_MODE_BITSIZE (mode));
+ mask = GEN_INT (mode == QImode ? 0xff : 0xffff);
+ if (WORDS_BIG_ENDIAN)
+ {
+ emit_insn (gen_extxl_be (dest, scratch, width, addr));
+ emit_insn (gen_mskxl_be (scratch, scratch, mask, addr));
+ }
+ else
+ {
+ emit_insn (gen_extxl_le (dest, scratch, width, addr));
+ emit_insn (gen_mskxl_le (scratch, scratch, mask, addr));
+ }
+ emit_insn (gen_iordi3 (scratch, scratch, val));
+
+ emit_store_conditional (DImode, scratch, mem, scratch);
+
+ x = gen_rtx_EQ (DImode, scratch, const0_rtx);
+ emit_unlikely_jump (x, label);
+
+ emit_insn (gen_memory_barrier ());
+}
+\f
+/* Adjust the cost of a scheduling dependency. Return the new cost of
+ a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
+
+static int
+alpha_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
+{
+ enum attr_type insn_type, dep_insn_type;
+
+ /* If the dependence is an anti-dependence, there is no cost. For an
+ output dependence, there is sometimes a cost, but it doesn't seem
+ worth handling those few cases. */
+ if (REG_NOTE_KIND (link) != 0)
+ return cost;
+
+ /* If we can't recognize the insns, we can't really do anything. */
+ if (recog_memoized (insn) < 0 || recog_memoized (dep_insn) < 0)
+ return cost;
+
+ insn_type = get_attr_type (insn);
+ dep_insn_type = get_attr_type (dep_insn);
+
+ /* Bring in the user-defined memory latency. */
+ if (dep_insn_type == TYPE_ILD
+ || dep_insn_type == TYPE_FLD
+ || dep_insn_type == TYPE_LDSYM)
+ cost += alpha_memory_latency-1;
+
+ /* Everything else handled in DFA bypasses now. */
+
+ return cost;
+}
+
+/* The number of instructions that can be issued per cycle. */
+
+static int
+alpha_issue_rate (void)
+{
+ return (alpha_tune == PROCESSOR_EV4 ? 2 : 4);
+}
+
+/* How many alternative schedules to try. This should be as wide as the
+ scheduling freedom in the DFA, but no wider. Making this value too
+ large results extra work for the scheduler.
+
+ For EV4, loads can be issued to either IB0 or IB1, thus we have 2
+ alternative schedules. For EV5, we can choose between E0/E1 and
+ FA/FM. For EV6, an arithmetic insn can be issued to U0/U1/L0/L1. */
+
+static int
+alpha_multipass_dfa_lookahead (void)
+{
+ return (alpha_tune == PROCESSOR_EV6 ? 4 : 2);
+}
+\f
+/* Machine-specific function data. */
+
+struct machine_function GTY(())
+{
+ /* For unicosmk. */
+ /* List of call information words for calls from this function. */
+ struct rtx_def *first_ciw;
+ struct rtx_def *last_ciw;
+ int ciw_count;
+
+ /* List of deferred case vectors. */
+ struct rtx_def *addr_list;
+
+ /* For OSF. */
+ const char *some_ld_name;
+
+ /* For TARGET_LD_BUGGY_LDGP. */
+ struct rtx_def *gp_save_rtx;
+};
+
+/* How to allocate a 'struct machine_function'. */
+
+static struct machine_function *
+alpha_init_machine_status (void)
+{
+ return ((struct machine_function *)
+ ggc_alloc_cleared (sizeof (struct machine_function)));
+}
+
+/* Functions to save and restore alpha_return_addr_rtx. */
+
+/* Start the ball rolling with RETURN_ADDR_RTX. */
+
+rtx
+alpha_return_addr (int count, rtx frame ATTRIBUTE_UNUSED)
+{
+ if (count != 0)
+ return const0_rtx;
+
+ return get_hard_reg_initial_val (Pmode, REG_RA);
+}
+
+/* Return or create a memory slot containing the gp value for the current
+ function. Needed only if TARGET_LD_BUGGY_LDGP. */
+
+rtx
+alpha_gp_save_rtx (void)
+{
+ rtx seq, m = cfun->machine->gp_save_rtx;
+
+ if (m == NULL)
+ {
+ start_sequence ();
+
+ m = assign_stack_local (DImode, UNITS_PER_WORD, BITS_PER_WORD);
+ m = validize_mem (m);
+ emit_move_insn (m, pic_offset_table_rtx);
+
+ seq = get_insns ();
+ end_sequence ();
+
+ /* We used to simply emit the sequence after entry_of_function.
+ However this breaks the CFG if the first instruction in the
+ first block is not the NOTE_INSN_BASIC_BLOCK, for example a
+ label. Emit the sequence properly on the edge. We are only
+ invoked from dw2_build_landing_pads and finish_eh_generation
+ will call commit_edge_insertions thanks to a kludge. */
+ insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
+
+ cfun->machine->gp_save_rtx = m;
+ }
+
+ return m;
+}
+
+static int
+alpha_ra_ever_killed (void)
+{
+ rtx top;
+
+ if (!has_hard_reg_initial_val (Pmode, REG_RA))
+ return (int)df_regs_ever_live_p (REG_RA);
+
+ push_topmost_sequence ();
+ top = get_insns ();
+ pop_topmost_sequence ();
+
+ return reg_set_between_p (gen_rtx_REG (Pmode, REG_RA), top, NULL_RTX);
+}
+
+\f
+/* Return the trap mode suffix applicable to the current
+ instruction, or NULL. */
+
+static const char *
+get_trap_mode_suffix (void)
+{
+ enum attr_trap_suffix s = get_attr_trap_suffix (current_output_insn);
+
+ switch (s)
+ {
+ case TRAP_SUFFIX_NONE:
+ return NULL;
+
+ case TRAP_SUFFIX_SU:
+ if (alpha_fptm >= ALPHA_FPTM_SU)
+ return "su";
+ return NULL;
+
+ case TRAP_SUFFIX_SUI:
+ if (alpha_fptm >= ALPHA_FPTM_SUI)
+ return "sui";
+ return NULL;
+
+ case TRAP_SUFFIX_V_SV:
+ switch (alpha_fptm)
+ {
+ case ALPHA_FPTM_N:
+ return NULL;
+ case ALPHA_FPTM_U:
+ return "v";
+ case ALPHA_FPTM_SU:
+ case ALPHA_FPTM_SUI:
+ return "sv";
+ default:
+ gcc_unreachable ();
+ }
+
+ case TRAP_SUFFIX_V_SV_SVI:
+ switch (alpha_fptm)
+ {
+ case ALPHA_FPTM_N:
+ return NULL;
+ case ALPHA_FPTM_U:
+ return "v";
+ case ALPHA_FPTM_SU:
+ return "sv";
+ case ALPHA_FPTM_SUI:
+ return "svi";
+ default:
+ gcc_unreachable ();
+ }
+ break;
+
+ case TRAP_SUFFIX_U_SU_SUI:
+ switch (alpha_fptm)
+ {
+ case ALPHA_FPTM_N:
+ return NULL;
+ case ALPHA_FPTM_U:
+ return "u";
+ case ALPHA_FPTM_SU:
+ return "su";
+ case ALPHA_FPTM_SUI:
+ return "sui";
+ default:
+ gcc_unreachable ();
+ }
+ break;
+
+ default:
+ gcc_unreachable ();
+ }
+ gcc_unreachable ();
+}
+
+/* Return the rounding mode suffix applicable to the current
+ instruction, or NULL. */
+
+static const char *
+get_round_mode_suffix (void)
+{
+ enum attr_round_suffix s = get_attr_round_suffix (current_output_insn);
+
+ switch (s)
+ {
+ case ROUND_SUFFIX_NONE:
+ return NULL;
+ case ROUND_SUFFIX_NORMAL:
+ switch (alpha_fprm)
+ {
+ case ALPHA_FPRM_NORM:
+ return NULL;
+ case ALPHA_FPRM_MINF:
+ return "m";
+ case ALPHA_FPRM_CHOP:
+ return "c";
+ case ALPHA_FPRM_DYN:
+ return "d";
+ default:
+ gcc_unreachable ();
+ }
+ break;
+
+ case ROUND_SUFFIX_C:
+ return "c";
+
+ default:
+ gcc_unreachable ();
+ }
+ gcc_unreachable ();
+}
+
+/* Locate some local-dynamic symbol still in use by this function
+ so that we can print its name in some movdi_er_tlsldm pattern. */
+
+static int
+get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
+{
+ rtx x = *px;
+
+ if (GET_CODE (x) == SYMBOL_REF
+ && SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
+ {
+ cfun->machine->some_ld_name = XSTR (x, 0);
+ return 1;
+ }
+
+ return 0;
+}
+
+static const char *
+get_some_local_dynamic_name (void)
+{
+ rtx insn;
+
+ if (cfun->machine->some_ld_name)
+ return cfun->machine->some_ld_name;
+
+ for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
+ if (INSN_P (insn)
+ && for_each_rtx (&PATTERN (insn), get_some_local_dynamic_name_1, 0))
+ return cfun->machine->some_ld_name;
+
+ gcc_unreachable ();
+}
+
+/* Print an operand. Recognize special options, documented below. */
+
+void
+print_operand (FILE *file, rtx x, int code)
+{
+ int i;
+
+ switch (code)
+ {
+ case '~':
+ /* Print the assembler name of the current function. */
+ assemble_name (file, alpha_fnname);
+ break;
+
+ case '&':
+ assemble_name (file, get_some_local_dynamic_name ());
+ break;
+
+ case '/':
+ {
+ const char *trap = get_trap_mode_suffix ();
+ const char *round = get_round_mode_suffix ();
+
+ if (trap || round)
+ fprintf (file, (TARGET_AS_SLASH_BEFORE_SUFFIX ? "/%s%s" : "%s%s"),
+ (trap ? trap : ""), (round ? round : ""));
+ break;
+ }
+
+ case ',':
+ /* Generates single precision instruction suffix. */
+ fputc ((TARGET_FLOAT_VAX ? 'f' : 's'), file);
+ break;
+
+ case '-':
+ /* Generates double precision instruction suffix. */
+ fputc ((TARGET_FLOAT_VAX ? 'g' : 't'), file);
+ break;
+
+ case '#':
+ if (alpha_this_literal_sequence_number == 0)
+ alpha_this_literal_sequence_number = alpha_next_sequence_number++;
+ fprintf (file, "%d", alpha_this_literal_sequence_number);
+ break;
+
+ case '*':
+ if (alpha_this_gpdisp_sequence_number == 0)
+ alpha_this_gpdisp_sequence_number = alpha_next_sequence_number++;
+ fprintf (file, "%d", alpha_this_gpdisp_sequence_number);
+ break;
+
+ case 'H':
+ if (GET_CODE (x) == HIGH)
+ output_addr_const (file, XEXP (x, 0));
+ else
+ output_operand_lossage ("invalid %%H value");
+ break;
+
+ case 'J':
+ {
+ const char *lituse;
+
+ if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_TLSGD_CALL)
+ {
+ x = XVECEXP (x, 0, 0);
+ lituse = "lituse_tlsgd";
+ }
+ else if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_TLSLDM_CALL)
+ {
+ x = XVECEXP (x, 0, 0);
+ lituse = "lituse_tlsldm";
+ }
+ else if (GET_CODE (x) == CONST_INT)
+ lituse = "lituse_jsr";
+ else
+ {
+ output_operand_lossage ("invalid %%J value");
+ break;
+ }
+
+ if (x != const0_rtx)
+ fprintf (file, "\t\t!%s!%d", lituse, (int) INTVAL (x));
+ }
+ break;
+
+ case 'j':
+ {
+ const char *lituse;
+
+#ifdef HAVE_AS_JSRDIRECT_RELOCS
+ lituse = "lituse_jsrdirect";
+#else
+ lituse = "lituse_jsr";
+#endif
+
+ gcc_assert (INTVAL (x) != 0);
+ fprintf (file, "\t\t!%s!%d", lituse, (int) INTVAL (x));
+ }
+ break;
+ case 'r':
+ /* If this operand is the constant zero, write it as "$31". */
+ if (GET_CODE (x) == REG)
+ fprintf (file, "%s", reg_names[REGNO (x)]);
+ else if (x == CONST0_RTX (GET_MODE (x)))
+ fprintf (file, "$31");
+ else
+ output_operand_lossage ("invalid %%r value");
+ break;
+
+ case 'R':
+ /* Similar, but for floating-point. */
+ if (GET_CODE (x) == REG)
+ fprintf (file, "%s", reg_names[REGNO (x)]);
+ else if (x == CONST0_RTX (GET_MODE (x)))
+ fprintf (file, "$f31");
+ else
+ output_operand_lossage ("invalid %%R value");
+ break;
+
+ case 'N':
+ /* Write the 1's complement of a constant. */
+ if (GET_CODE (x) != CONST_INT)
+ output_operand_lossage ("invalid %%N value");
+
+ fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INTVAL (x));
+ break;
+
+ case 'P':
+ /* Write 1 << C, for a constant C. */
+ if (GET_CODE (x) != CONST_INT)
+ output_operand_lossage ("invalid %%P value");
+
+ fprintf (file, HOST_WIDE_INT_PRINT_DEC, (HOST_WIDE_INT) 1 << INTVAL (x));
+ break;
+
+ case 'h':
+ /* Write the high-order 16 bits of a constant, sign-extended. */
+ if (GET_CODE (x) != CONST_INT)
+ output_operand_lossage ("invalid %%h value");
+
+ fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) >> 16);
+ break;
+
+ case 'L':
+ /* Write the low-order 16 bits of a constant, sign-extended. */
+ if (GET_CODE (x) != CONST_INT)
+ output_operand_lossage ("invalid %%L value");
+
+ fprintf (file, HOST_WIDE_INT_PRINT_DEC,
+ (INTVAL (x) & 0xffff) - 2 * (INTVAL (x) & 0x8000));
+ break;
+
+ case 'm':
+ /* Write mask for ZAP insn. */
+ if (GET_CODE (x) == CONST_DOUBLE)
+ {
+ HOST_WIDE_INT mask = 0;
+ HOST_WIDE_INT value;
+
+ value = CONST_DOUBLE_LOW (x);
+ for (i = 0; i < HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
+ i++, value >>= 8)
+ if (value & 0xff)
+ mask |= (1 << i);
+
+ value = CONST_DOUBLE_HIGH (x);
+ for (i = 0; i < HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
+ i++, value >>= 8)
+ if (value & 0xff)
+ mask |= (1 << (i + sizeof (int)));
+
+ fprintf (file, HOST_WIDE_INT_PRINT_DEC, mask & 0xff);
+ }
+
+ else if (GET_CODE (x) == CONST_INT)
+ {
+ HOST_WIDE_INT mask = 0, value = INTVAL (x);
+
+ for (i = 0; i < 8; i++, value >>= 8)
+ if (value & 0xff)
+ mask |= (1 << i);
+
+ fprintf (file, HOST_WIDE_INT_PRINT_DEC, mask);
+ }
+ else
+ output_operand_lossage ("invalid %%m value");
+ break;
+
+ case 'M':
+ /* 'b', 'w', 'l', or 'q' as the value of the constant. */
+ if (GET_CODE (x) != CONST_INT
+ || (INTVAL (x) != 8 && INTVAL (x) != 16
+ && INTVAL (x) != 32 && INTVAL (x) != 64))
+ output_operand_lossage ("invalid %%M value");
+
+ fprintf (file, "%s",
+ (INTVAL (x) == 8 ? "b"
+ : INTVAL (x) == 16 ? "w"
+ : INTVAL (x) == 32 ? "l"
+ : "q"));
+ break;
+
+ case 'U':
+ /* Similar, except do it from the mask. */
+ if (GET_CODE (x) == CONST_INT)
+ {
+ HOST_WIDE_INT value = INTVAL (x);
+
+ if (value == 0xff)
+ {
+ fputc ('b', file);
+ break;
+ }
+ if (value == 0xffff)
+ {
+ fputc ('w', file);
+ break;
+ }
+ if (value == 0xffffffff)
+ {
+ fputc ('l', file);
+ break;
+ }
+ if (value == -1)
+ {
+ fputc ('q', file);
+ break;
+ }
+ }
+ else if (HOST_BITS_PER_WIDE_INT == 32
+ && GET_CODE (x) == CONST_DOUBLE
+ && CONST_DOUBLE_LOW (x) == 0xffffffff
+ && CONST_DOUBLE_HIGH (x) == 0)
+ {
+ fputc ('l', file);
+ break;
+ }
+ output_operand_lossage ("invalid %%U value");
+ break;
+
+ case 's':
+ /* Write the constant value divided by 8 for little-endian mode or
+ (56 - value) / 8 for big-endian mode. */
+
+ if (GET_CODE (x) != CONST_INT
+ || (unsigned HOST_WIDE_INT) INTVAL (x) >= (WORDS_BIG_ENDIAN
+ ? 56
+ : 64)
+ || (INTVAL (x) & 7) != 0)
+ output_operand_lossage ("invalid %%s value");
+
+ fprintf (file, HOST_WIDE_INT_PRINT_DEC,
+ WORDS_BIG_ENDIAN
+ ? (56 - INTVAL (x)) / 8
+ : INTVAL (x) / 8);
+ break;
+
+ case 'S':
+ /* Same, except compute (64 - c) / 8 */
+
+ if (GET_CODE (x) != CONST_INT
+ && (unsigned HOST_WIDE_INT) INTVAL (x) >= 64
+ && (INTVAL (x) & 7) != 8)
+ output_operand_lossage ("invalid %%s value");
+
+ fprintf (file, HOST_WIDE_INT_PRINT_DEC, (64 - INTVAL (x)) / 8);
+ break;
+
+ case 't':
+ {
+ /* On Unicos/Mk systems: use a DEX expression if the symbol
+ clashes with a register name. */
+ int dex = unicosmk_need_dex (x);
+ if (dex)
+ fprintf (file, "DEX(%d)", dex);
+ else
+ output_addr_const (file, x);
+ }
+ break;
+
+ case 'C': case 'D': case 'c': case 'd':
+ /* Write out comparison name. */
+ {
+ enum rtx_code c = GET_CODE (x);
+
+ if (!COMPARISON_P (x))
+ output_operand_lossage ("invalid %%C value");
+
+ else if (code == 'D')
+ c = reverse_condition (c);
+ else if (code == 'c')
+ c = swap_condition (c);
+ else if (code == 'd')
+ c = swap_condition (reverse_condition (c));
+
+ if (c == LEU)
+ fprintf (file, "ule");
+ else if (c == LTU)
+ fprintf (file, "ult");
+ else if (c == UNORDERED)
+ fprintf (file, "un");
+ else
+ fprintf (file, "%s", GET_RTX_NAME (c));
+ }
+ break;
+
+ case 'E':
+ /* Write the divide or modulus operator. */
+ switch (GET_CODE (x))
+ {
+ case DIV:
+ fprintf (file, "div%s", GET_MODE (x) == SImode ? "l" : "q");
+ break;
+ case UDIV:
+ fprintf (file, "div%su", GET_MODE (x) == SImode ? "l" : "q");
+ break;
+ case MOD:
+ fprintf (file, "rem%s", GET_MODE (x) == SImode ? "l" : "q");
+ break;
+ case UMOD:
+ fprintf (file, "rem%su", GET_MODE (x) == SImode ? "l" : "q");
+ break;
+ default:
+ output_operand_lossage ("invalid %%E value");
+ break;
+ }
+ break;
+
+ case 'A':
+ /* Write "_u" for unaligned access. */
+ if (GET_CODE (x) == MEM && GET_CODE (XEXP (x, 0)) == AND)
+ fprintf (file, "_u");
+ break;
+
+ case 0:
+ if (GET_CODE (x) == REG)
+ fprintf (file, "%s", reg_names[REGNO (x)]);
+ else if (GET_CODE (x) == MEM)
+ output_address (XEXP (x, 0));
+ else if (GET_CODE (x) == CONST && GET_CODE (XEXP (x, 0)) == UNSPEC)
+ {
+ switch (XINT (XEXP (x, 0), 1))
+ {
+ case UNSPEC_DTPREL:
+ case UNSPEC_TPREL:
+ output_addr_const (file, XVECEXP (XEXP (x, 0), 0, 0));
+ break;
+ default:
+ output_operand_lossage ("unknown relocation unspec");
+ break;
+ }
+ }
+ else
+ output_addr_const (file, x);
+ break;
+
+ default:
+ output_operand_lossage ("invalid %%xn code");
+ }
+}
+
+void
+print_operand_address (FILE *file, rtx addr)
+{
+ int basereg = 31;
+ HOST_WIDE_INT offset = 0;
+
+ if (GET_CODE (addr) == AND)
+ addr = XEXP (addr, 0);
+
+ if (GET_CODE (addr) == PLUS
+ && GET_CODE (XEXP (addr, 1)) == CONST_INT)
+ {
+ offset = INTVAL (XEXP (addr, 1));
+ addr = XEXP (addr, 0);
+ }
+
+ if (GET_CODE (addr) == LO_SUM)
+ {
+ const char *reloc16, *reloclo;
+ rtx op1 = XEXP (addr, 1);
+
+ if (GET_CODE (op1) == CONST && GET_CODE (XEXP (op1, 0)) == UNSPEC)
+ {
+ op1 = XEXP (op1, 0);
+ switch (XINT (op1, 1))
+ {
+ case UNSPEC_DTPREL:
+ reloc16 = NULL;
+ reloclo = (alpha_tls_size == 16 ? "dtprel" : "dtprello");
+ break;
+ case UNSPEC_TPREL:
+ reloc16 = NULL;
+ reloclo = (alpha_tls_size == 16 ? "tprel" : "tprello");
+ break;
+ default:
+ output_operand_lossage ("unknown relocation unspec");
+ return;
+ }
+
+ output_addr_const (file, XVECEXP (op1, 0, 0));
+ }
+ else
+ {
+ reloc16 = "gprel";
+ reloclo = "gprellow";
+ output_addr_const (file, op1);