+/* Look for combinations of insns that can be converted to BN or BP
+ opcodes. This is, unfortunately, too complex to do with MD
+ patterns. */
+
+static void
+combine_bnp (rtx insn)
+{
+ int insn_code, regno, need_extend;
+ unsigned int mask;
+ rtx cond, reg, and, load, qireg, mem;
+ enum machine_mode load_mode = QImode;
+ enum machine_mode and_mode = QImode;
+ rtx shift = NULL_RTX;
+
+ insn_code = recog_memoized (insn);
+ if (insn_code != CODE_FOR_cbranchhi
+ && insn_code != CODE_FOR_cbranchhi_neg)
+ return;
+
+ cond = XVECEXP (PATTERN (insn), 0, 0); /* set */
+ cond = XEXP (cond, 1); /* if */
+ cond = XEXP (cond, 0); /* cond */
+ switch (GET_CODE (cond))
+ {
+ case NE:
+ case EQ:
+ need_extend = 0;
+ break;
+ case LT:
+ case GE:
+ need_extend = 1;
+ break;
+ default:
+ return;
+ }
+
+ reg = XEXP (cond, 0);
+ if (GET_CODE (reg) != REG)
+ return;
+ regno = REGNO (reg);
+ if (XEXP (cond, 1) != const0_rtx)
+ return;
+ if (! find_regno_note (insn, REG_DEAD, regno))
+ return;
+ qireg = gen_rtx_REG (QImode, regno);
+
+ if (need_extend)
+ {
+ /* LT and GE conditionals should have a sign extend before
+ them. */
+ for (and = prev_real_insn (insn); and; and = prev_real_insn (and))
+ {
+ int and_code = recog_memoized (and);
+
+ if (and_code == CODE_FOR_extendqihi2
+ && rtx_equal_p (SET_DEST (PATTERN (and)), reg)
+ && rtx_equal_p (XEXP (SET_SRC (PATTERN (and)), 0), qireg))
+ break;
+
+ if (and_code == CODE_FOR_movhi_internal
+ && rtx_equal_p (SET_DEST (PATTERN (and)), reg))
+ {
+ /* This is for testing bit 15. */
+ and = insn;
+ break;
+ }
+
+ if (reg_mentioned_p (reg, and))
+ return;
+
+ if (GET_CODE (and) != NOTE
+ && GET_CODE (and) != INSN)
+ return;
+ }
+ }
+ else
+ {
+ /* EQ and NE conditionals have an AND before them. */
+ for (and = prev_real_insn (insn); and; and = prev_real_insn (and))
+ {
+ if (recog_memoized (and) == CODE_FOR_andhi3
+ && rtx_equal_p (SET_DEST (PATTERN (and)), reg)
+ && rtx_equal_p (XEXP (SET_SRC (PATTERN (and)), 0), reg))
+ break;
+
+ if (reg_mentioned_p (reg, and))
+ return;
+
+ if (GET_CODE (and) != NOTE
+ && GET_CODE (and) != INSN)
+ return;
+ }
+
+ if (and)
+ {
+ /* Some mis-optimizations by GCC can generate a RIGHT-SHIFT
+ followed by an AND like this:
+
+ (parallel [(set (reg:HI r7) (lshiftrt:HI (reg:HI r7) (const_int 3)))
+ (clobber (reg:BI carry))]
+
+ (set (reg:HI r7) (and:HI (reg:HI r7) (const_int 1)))
+
+ Attempt to detect this here. */
+ for (shift = prev_real_insn (and); shift; shift = prev_real_insn (shift))
+ {
+ if (recog_memoized (shift) == CODE_FOR_lshrhi3
+ && rtx_equal_p (SET_DEST (XVECEXP (PATTERN (shift), 0, 0)), reg)
+ && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (shift), 0, 0)), 0), reg))
+ break;
+
+ if (reg_mentioned_p (reg, shift)
+ || (GET_CODE (shift) != NOTE
+ && GET_CODE (shift) != INSN))
+ {
+ shift = NULL_RTX;
+ break;
+ }
+ }
+ }
+ }
+ if (!and)
+ return;
+
+ for (load = shift ? prev_real_insn (shift) : prev_real_insn (and);
+ load;
+ load = prev_real_insn (load))
+ {
+ int load_code = recog_memoized (load);
+
+ if (load_code == CODE_FOR_movhi_internal
+ && rtx_equal_p (SET_DEST (PATTERN (load)), reg)
+ && xstormy16_below100_operand (SET_SRC (PATTERN (load)), HImode)
+ && ! MEM_VOLATILE_P (SET_SRC (PATTERN (load))))
+ {
+ load_mode = HImode;
+ break;
+ }
+
+ if (load_code == CODE_FOR_movqi_internal
+ && rtx_equal_p (SET_DEST (PATTERN (load)), qireg)
+ && xstormy16_below100_operand (SET_SRC (PATTERN (load)), QImode))
+ {
+ load_mode = QImode;
+ break;
+ }
+
+ if (load_code == CODE_FOR_zero_extendqihi2
+ && rtx_equal_p (SET_DEST (PATTERN (load)), reg)
+ && xstormy16_below100_operand (XEXP (SET_SRC (PATTERN (load)), 0), QImode))
+ {
+ load_mode = QImode;
+ and_mode = HImode;
+ break;
+ }
+
+ if (reg_mentioned_p (reg, load))
+ return;
+
+ if (GET_CODE (load) != NOTE
+ && GET_CODE (load) != INSN)
+ return;
+ }
+ if (!load)
+ return;
+
+ mem = SET_SRC (PATTERN (load));
+
+ if (need_extend)
+ {
+ mask = (load_mode == HImode) ? 0x8000 : 0x80;
+
+ /* If the mem includes a zero-extend operation and we are
+ going to generate a sign-extend operation then move the
+ mem inside the zero-extend. */
+ if (GET_CODE (mem) == ZERO_EXTEND)
+ mem = XEXP (mem, 0);
+ }
+ else
+ {
+ if (!xstormy16_onebit_set_operand (XEXP (SET_SRC (PATTERN (and)), 1), load_mode))
+ return;
+
+ mask = (int) INTVAL (XEXP (SET_SRC (PATTERN (and)), 1));
+
+ if (shift)
+ mask <<= INTVAL (XEXP (SET_SRC (XVECEXP (PATTERN (shift), 0, 0)), 1));
+ }
+
+ if (load_mode == HImode)
+ {
+ rtx addr = XEXP (mem, 0);
+
+ if (! (mask & 0xff))
+ {
+ addr = plus_constant (addr, 1);
+ mask >>= 8;
+ }
+ mem = gen_rtx_MEM (QImode, addr);
+ }
+
+ if (need_extend)
+ XEXP (cond, 0) = gen_rtx_SIGN_EXTEND (HImode, mem);
+ else
+ XEXP (cond, 0) = gen_rtx_AND (and_mode, mem, GEN_INT (mask));
+
+ INSN_CODE (insn) = -1;
+ delete_insn (load);
+
+ if (and != insn)
+ delete_insn (and);
+
+ if (shift != NULL_RTX)
+ delete_insn (shift);
+}
+
+static void
+xstormy16_reorg (void)
+{
+ rtx insn;
+
+ for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
+ {
+ if (! JUMP_P (insn))
+ continue;
+ combine_bnp (insn);
+ }
+}
+\f
+/* Worker function for TARGET_RETURN_IN_MEMORY. */
+
+static bool
+xstormy16_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
+{
+ const HOST_WIDE_INT size = int_size_in_bytes (type);
+ return (size == -1 || size > UNITS_PER_WORD * NUM_ARGUMENT_REGISTERS);
+}
+\f
+#undef TARGET_ASM_ALIGNED_HI_OP