diff options
author | bstarynk <bstarynk@138bc75d-0d04-0410-961f-82ee72b054a4> | 2016-04-16 17:02:56 +0000 |
---|---|---|
committer | bstarynk <bstarynk@138bc75d-0d04-0410-961f-82ee72b054a4> | 2016-04-16 17:02:56 +0000 |
commit | c8aed844acdc89884d630c7e3266ecd8d4101847 (patch) | |
tree | 0d046a9255339220c1bbd6ba14e84e5304acbe10 /gcc/config/aarch64/aarch64.c | |
parent | 74f8420a5b204c5e021ce05b3d0d79ba9718360a (diff) | |
download | gcc-c8aed844acdc89884d630c7e3266ecd8d4101847.tar.gz |
2016-04-16 Basile Starynkevitch <basile@starynkevitch.net>
{{merging with even more of GCC 6, using subversion 1.9
svn merge -r231651:232605 ^/trunk
}}
[gcc/]
2016-04-16 Basile Starynkevitch <basile@starynkevitch.net>
* melt/libmelt-ana-gimple.melt:
(melt_build_transaction_with_label_norm): New inlined function,
for gimple_transaction operator implementation...
git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/branches/melt-branch@235064 138bc75d-0d04-0410-961f-82ee72b054a4
Diffstat (limited to 'gcc/config/aarch64/aarch64.c')
-rw-r--r-- | gcc/config/aarch64/aarch64.c | 469 |
1 files changed, 224 insertions, 245 deletions
diff --git a/gcc/config/aarch64/aarch64.c b/gcc/config/aarch64/aarch64.c index 1e1b864d865..03bc1b97c2b 100644 --- a/gcc/config/aarch64/aarch64.c +++ b/gcc/config/aarch64/aarch64.c @@ -1,5 +1,5 @@ /* Machine description for AArch64 architecture. - Copyright (C) 2009-2015 Free Software Foundation, Inc. + Copyright (C) 2009-2016 Free Software Foundation, Inc. Contributed by ARM Ltd. This file is part of GCC. @@ -4142,11 +4142,20 @@ aarch64_select_cc_mode (RTX_CODE code, rtx x, rtx y) } } + /* Equality comparisons of short modes against zero can be performed + using the TST instruction with the appropriate bitmask. */ + if (y == const0_rtx && REG_P (x) + && (code == EQ || code == NE) + && (GET_MODE (x) == HImode || GET_MODE (x) == QImode)) + return CC_NZmode; + if ((GET_MODE (x) == SImode || GET_MODE (x) == DImode) && y == const0_rtx && (code == EQ || code == NE || code == LT || code == GE) && (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS || GET_CODE (x) == AND - || GET_CODE (x) == NEG)) + || GET_CODE (x) == NEG + || (GET_CODE (x) == ZERO_EXTRACT && CONST_INT_P (XEXP (x, 1)) + && CONST_INT_P (XEXP (x, 2))))) return CC_NZmode; /* A compare with a shifted operand. Because of canonicalization, @@ -4196,7 +4205,6 @@ aarch64_get_condition_code (rtx x) static int aarch64_get_condition_code_1 (enum machine_mode mode, enum rtx_code comp_code) { - int ne = -1, eq = -1; switch (mode) { case CCFPmode: @@ -4219,56 +4227,6 @@ aarch64_get_condition_code_1 (enum machine_mode mode, enum rtx_code comp_code) } break; - case CC_DNEmode: - ne = AARCH64_NE; - eq = AARCH64_EQ; - break; - - case CC_DEQmode: - ne = AARCH64_EQ; - eq = AARCH64_NE; - break; - - case CC_DGEmode: - ne = AARCH64_GE; - eq = AARCH64_LT; - break; - - case CC_DLTmode: - ne = AARCH64_LT; - eq = AARCH64_GE; - break; - - case CC_DGTmode: - ne = AARCH64_GT; - eq = AARCH64_LE; - break; - - case CC_DLEmode: - ne = AARCH64_LE; - eq = AARCH64_GT; - break; - - case CC_DGEUmode: - ne = AARCH64_CS; - eq = AARCH64_CC; - break; - - case CC_DLTUmode: - ne = AARCH64_CC; - eq = AARCH64_CS; - break; - - case CC_DGTUmode: - ne = AARCH64_HI; - eq = AARCH64_LS; - break; - - case CC_DLEUmode: - ne = AARCH64_LS; - eq = AARCH64_HI; - break; - case CCmode: switch (comp_code) { @@ -4330,12 +4288,6 @@ aarch64_get_condition_code_1 (enum machine_mode mode, enum rtx_code comp_code) break; } - if (comp_code == NE) - return ne; - - if (comp_code == EQ) - return eq; - return -1; } @@ -4376,69 +4328,27 @@ aarch64_const_vec_all_same_int_p (rtx x, HOST_WIDE_INT val) #define AARCH64_CC_Z (1 << 2) #define AARCH64_CC_N (1 << 3) -/* N Z C V flags for ccmp. The first code is for AND op and the other - is for IOR op. Indexed by AARCH64_COND_CODE. */ -static const int aarch64_nzcv_codes[][2] = -{ - {AARCH64_CC_Z, 0}, /* EQ, Z == 1. */ - {0, AARCH64_CC_Z}, /* NE, Z == 0. */ - {AARCH64_CC_C, 0}, /* CS, C == 1. */ - {0, AARCH64_CC_C}, /* CC, C == 0. */ - {AARCH64_CC_N, 0}, /* MI, N == 1. */ - {0, AARCH64_CC_N}, /* PL, N == 0. */ - {AARCH64_CC_V, 0}, /* VS, V == 1. */ - {0, AARCH64_CC_V}, /* VC, V == 0. */ - {AARCH64_CC_C, 0}, /* HI, C ==1 && Z == 0. */ - {0, AARCH64_CC_C}, /* LS, !(C == 1 && Z == 0). */ - {0, AARCH64_CC_V}, /* GE, N == V. */ - {AARCH64_CC_V, 0}, /* LT, N != V. */ - {0, AARCH64_CC_Z}, /* GT, Z == 0 && N == V. */ - {AARCH64_CC_Z, 0}, /* LE, !(Z == 0 && N == V). */ - {0, 0}, /* AL, Any. */ - {0, 0}, /* NV, Any. */ +/* N Z C V flags for ccmp. Indexed by AARCH64_COND_CODE. */ +static const int aarch64_nzcv_codes[] = +{ + 0, /* EQ, Z == 1. */ + AARCH64_CC_Z, /* NE, Z == 0. */ + 0, /* CS, C == 1. */ + AARCH64_CC_C, /* CC, C == 0. */ + 0, /* MI, N == 1. */ + AARCH64_CC_N, /* PL, N == 0. */ + 0, /* VS, V == 1. */ + AARCH64_CC_V, /* VC, V == 0. */ + 0, /* HI, C ==1 && Z == 0. */ + AARCH64_CC_C, /* LS, !(C == 1 && Z == 0). */ + AARCH64_CC_V, /* GE, N == V. */ + 0, /* LT, N != V. */ + AARCH64_CC_Z, /* GT, Z == 0 && N == V. */ + 0, /* LE, !(Z == 0 && N == V). */ + 0, /* AL, Any. */ + 0 /* NV, Any. */ }; -int -aarch64_ccmp_mode_to_code (enum machine_mode mode) -{ - switch (mode) - { - case CC_DNEmode: - return NE; - - case CC_DEQmode: - return EQ; - - case CC_DLEmode: - return LE; - - case CC_DGTmode: - return GT; - - case CC_DLTmode: - return LT; - - case CC_DGEmode: - return GE; - - case CC_DLEUmode: - return LEU; - - case CC_DGTUmode: - return GTU; - - case CC_DLTUmode: - return LTU; - - case CC_DGEUmode: - return GEU; - - default: - gcc_unreachable (); - } -} - - static void aarch64_print_operand (FILE *f, rtx x, int code) { @@ -4537,36 +4447,17 @@ aarch64_print_operand (FILE *f, rtx x, int code) asm_fprintf (f, "%s", reg_names [REGNO (x) + 1]); break; - case 'm': - { - int cond_code; - /* Print a condition (eq, ne, etc). */ - - /* CONST_TRUE_RTX means always -- that's the default. */ - if (x == const_true_rtx) - return; - - if (!COMPARISON_P (x)) - { - output_operand_lossage ("invalid operand for '%%%c'", code); - return; - } - - cond_code = aarch64_get_condition_code (x); - gcc_assert (cond_code >= 0); - fputs (aarch64_condition_codes[cond_code], f); - } - break; - case 'M': + case 'm': { int cond_code; - /* Print the inverse of a condition (eq <-> ne, etc). */ + /* Print a condition (eq, ne, etc) or its inverse. */ - /* CONST_TRUE_RTX means never -- that's the default. */ - if (x == const_true_rtx) + /* CONST_TRUE_RTX means al/nv (al is the default, don't print it). */ + if (x == const_true_rtx) { - fputs ("nv", f); + if (code == 'M') + fputs ("nv", f); return; } @@ -4575,10 +4466,12 @@ aarch64_print_operand (FILE *f, rtx x, int code) output_operand_lossage ("invalid operand for '%%%c'", code); return; } + cond_code = aarch64_get_condition_code (x); gcc_assert (cond_code >= 0); - fputs (aarch64_condition_codes[AARCH64_INVERSE_CONDITION_CODE - (cond_code)], f); + if (code == 'M') + cond_code = AARCH64_INVERSE_CONDITION_CODE (cond_code); + fputs (aarch64_condition_codes[cond_code], f); } break; @@ -4819,37 +4712,20 @@ aarch64_print_operand (FILE *f, rtx x, int code) output_addr_const (asm_out_file, x); break; - case 'K': - { - int cond_code; - /* Print nzcv. */ - - if (!COMPARISON_P (x)) - { - output_operand_lossage ("invalid operand for '%%%c'", code); - return; - } - - cond_code = aarch64_get_condition_code_1 (CCmode, GET_CODE (x)); - gcc_assert (cond_code >= 0); - asm_fprintf (f, "%d", aarch64_nzcv_codes[cond_code][0]); - } - break; - case 'k': { - int cond_code; + HOST_WIDE_INT cond_code; /* Print nzcv. */ - if (!COMPARISON_P (x)) + if (!CONST_INT_P (x)) { output_operand_lossage ("invalid operand for '%%%c'", code); return; } - cond_code = aarch64_get_condition_code_1 (CCmode, GET_CODE (x)); - gcc_assert (cond_code >= 0); - asm_fprintf (f, "%d", aarch64_nzcv_codes[cond_code][1]); + cond_code = INTVAL (x); + gcc_assert (cond_code >= 0 && cond_code <= AARCH64_NV); + asm_fprintf (f, "%d", aarch64_nzcv_codes[cond_code]); } break; @@ -6128,6 +6004,26 @@ aarch64_if_then_else_costs (rtx op0, rtx op1, rtx op2, int *cost, bool speed) } else if (GET_MODE_CLASS (GET_MODE (inner)) == MODE_CC) { + /* CCMP. */ + if ((GET_CODE (op1) == COMPARE) && CONST_INT_P (op2)) + { + /* Increase cost of CCMP reg, 0, imm, CC to prefer CMP reg, 0. */ + if (XEXP (op1, 1) == const0_rtx) + *cost += 1; + if (speed) + { + machine_mode mode = GET_MODE (XEXP (op1, 0)); + const struct cpu_cost_table *extra_cost + = aarch64_tune_params.insn_extra_cost; + + if (GET_MODE_CLASS (mode) == MODE_INT) + *cost += extra_cost->alu.arith; + else + *cost += extra_cost->fp[mode == DFmode].compare; + } + return true; + } + /* It's a conditional operation based on the status flags, so it must be some flavor of CSEL. */ @@ -6136,6 +6032,12 @@ aarch64_if_then_else_costs (rtx op0, rtx op1, rtx op2, int *cost, bool speed) || GET_CODE (op1) == NOT || (GET_CODE (op1) == PLUS && XEXP (op1, 1) == const1_rtx)) op1 = XEXP (op1, 0); + else if (GET_CODE (op1) == ZERO_EXTEND && GET_CODE (op2) == ZERO_EXTEND) + { + /* CSEL with zero-extension (*cmovdi_insn_uxtw). */ + op1 = XEXP (op1, 0); + op2 = XEXP (op2, 0); + } *cost += rtx_cost (op1, VOIDmode, IF_THEN_ELSE, 1, speed); *cost += rtx_cost (op2, VOIDmode, IF_THEN_ELSE, 2, speed); @@ -6146,6 +6048,50 @@ aarch64_if_then_else_costs (rtx op0, rtx op1, rtx op2, int *cost, bool speed) return false; } +/* Check whether X is a bitfield operation of the form shift + extend that + maps down to a UBFIZ/SBFIZ/UBFX/SBFX instruction. If so, return the + operand to which the bitfield operation is applied. Otherwise return + NULL_RTX. */ + +static rtx +aarch64_extend_bitfield_pattern_p (rtx x) +{ + rtx_code outer_code = GET_CODE (x); + machine_mode outer_mode = GET_MODE (x); + + if (outer_code != ZERO_EXTEND && outer_code != SIGN_EXTEND + && outer_mode != SImode && outer_mode != DImode) + return NULL_RTX; + + rtx inner = XEXP (x, 0); + rtx_code inner_code = GET_CODE (inner); + machine_mode inner_mode = GET_MODE (inner); + rtx op = NULL_RTX; + + switch (inner_code) + { + case ASHIFT: + if (CONST_INT_P (XEXP (inner, 1)) + && (inner_mode == QImode || inner_mode == HImode)) + op = XEXP (inner, 0); + break; + case LSHIFTRT: + if (outer_code == ZERO_EXTEND && CONST_INT_P (XEXP (inner, 1)) + && (inner_mode == QImode || inner_mode == HImode)) + op = XEXP (inner, 0); + break; + case ASHIFTRT: + if (outer_code == SIGN_EXTEND && CONST_INT_P (XEXP (inner, 1)) + && (inner_mode == QImode || inner_mode == HImode)) + op = XEXP (inner, 0); + break; + default: + break; + } + + return op; +} + /* Calculate the cost of calculating X, storing it in *COST. Result is true if the total cost of the operation has now been calculated. */ static bool @@ -6437,6 +6383,23 @@ aarch64_rtx_costs (rtx x, machine_mode mode, int outer ATTRIBUTE_UNUSED, goto cost_minus; } + if (GET_CODE (op0) == ZERO_EXTRACT && op1 == const0_rtx + && GET_MODE (x) == CC_NZmode && CONST_INT_P (XEXP (op0, 1)) + && CONST_INT_P (XEXP (op0, 2))) + { + /* COMPARE of ZERO_EXTRACT form of TST-immediate. + Handle it here directly rather than going to cost_logic + since we know the immediate generated for the TST is valid + so we can avoid creating an intermediate rtx for it only + for costing purposes. */ + if (speed) + *cost += extra_cost->alu.logical; + + *cost += rtx_cost (XEXP (op0, 0), GET_MODE (op0), + ZERO_EXTRACT, 0, speed); + return true; + } + if (GET_CODE (op1) == NEG) { /* CMN. */ @@ -6837,6 +6800,15 @@ cost_plus: return true; } + op0 = aarch64_extend_bitfield_pattern_p (x); + if (op0) + { + *cost += rtx_cost (op0, mode, ZERO_EXTEND, 0, speed); + if (speed) + *cost += extra_cost->alu.bfx; + return true; + } + if (speed) { if (VECTOR_MODE_P (mode)) @@ -6868,6 +6840,15 @@ cost_plus: return true; } + op0 = aarch64_extend_bitfield_pattern_p (x); + if (op0) + { + *cost += rtx_cost (op0, mode, SIGN_EXTEND, 0, speed); + if (speed) + *cost += extra_cost->alu.bfx; + return true; + } + if (speed) { if (VECTOR_MODE_P (mode)) @@ -8827,6 +8808,7 @@ aarch64_process_one_target_attr (char *arg_str, const char* pragma_or_attr) arg++; } const struct aarch64_attribute_info *p_attr; + bool found = false; for (p_attr = aarch64_attributes; p_attr->name; p_attr++) { /* If the names don't match up, or the user has given an argument @@ -8835,6 +8817,7 @@ aarch64_process_one_target_attr (char *arg_str, const char* pragma_or_attr) if (strcmp (str_to_check, p_attr->name) != 0) continue; + found = true; bool attr_need_arg_p = p_attr->attr_type == aarch64_attr_custom || p_attr->attr_type == aarch64_attr_enum; @@ -8914,7 +8897,10 @@ aarch64_process_one_target_attr (char *arg_str, const char* pragma_or_attr) } } - return true; + /* If we reached here we either have found an attribute and validated + it or didn't match any. If we matched an attribute but its arguments + were malformed we will have returned false already. */ + return found; } /* Count how many times the character C appears in @@ -10666,6 +10652,21 @@ aarch64_simd_imm_zero_p (rtx x, machine_mode mode) return x == CONST0_RTX (mode); } + +/* Return the bitmask CONST_INT to select the bits required by a zero extract + operation of width WIDTH at bit position POS. */ + +rtx +aarch64_mask_from_zextract_ops (rtx width, rtx pos) +{ + gcc_assert (CONST_INT_P (width)); + gcc_assert (CONST_INT_P (pos)); + + unsigned HOST_WIDE_INT mask + = ((unsigned HOST_WIDE_INT) 1 << UINTVAL (width)) - 1; + return GEN_INT (mask << UINTVAL (pos)); +} + bool aarch64_simd_imm_scalar_p (rtx x, machine_mode mode ATTRIBUTE_UNUSED) { @@ -12952,60 +12953,16 @@ aarch64_use_by_pieces_infrastructure_p (unsigned HOST_WIDE_INT size, return default_use_by_pieces_infrastructure_p (size, align, op, speed_p); } -static enum machine_mode -aarch64_code_to_ccmode (enum rtx_code code) -{ - switch (code) - { - case NE: - return CC_DNEmode; - - case EQ: - return CC_DEQmode; - - case LE: - return CC_DLEmode; - - case LT: - return CC_DLTmode; - - case GE: - return CC_DGEmode; - - case GT: - return CC_DGTmode; - - case LEU: - return CC_DLEUmode; - - case LTU: - return CC_DLTUmode; - - case GEU: - return CC_DGEUmode; - - case GTU: - return CC_DGTUmode; - - default: - return CCmode; - } -} - static rtx aarch64_gen_ccmp_first (rtx *prep_seq, rtx *gen_seq, int code, tree treeop0, tree treeop1) { - enum machine_mode op_mode, cmp_mode, cc_mode; - rtx op0, op1, cmp, target; + machine_mode op_mode, cmp_mode, cc_mode = CCmode; + rtx op0, op1; int unsignedp = TYPE_UNSIGNED (TREE_TYPE (treeop0)); - enum insn_code icode; + insn_code icode; struct expand_operand ops[4]; - cc_mode = aarch64_code_to_ccmode ((enum rtx_code) code); - if (cc_mode == CCmode) - return NULL_RTX; - start_sequence (); expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL); @@ -13027,13 +12984,25 @@ aarch64_gen_ccmp_first (rtx *prep_seq, rtx *gen_seq, icode = CODE_FOR_cmpdi; break; + case SFmode: + cmp_mode = SFmode; + cc_mode = aarch64_select_cc_mode ((rtx_code) code, op0, op1); + icode = cc_mode == CCFPEmode ? CODE_FOR_fcmpesf : CODE_FOR_fcmpsf; + break; + + case DFmode: + cmp_mode = DFmode; + cc_mode = aarch64_select_cc_mode ((rtx_code) code, op0, op1); + icode = cc_mode == CCFPEmode ? CODE_FOR_fcmpedf : CODE_FOR_fcmpdf; + break; + default: end_sequence (); return NULL_RTX; } - op0 = prepare_operand (icode, op0, 2, op_mode, cmp_mode, unsignedp); - op1 = prepare_operand (icode, op1, 3, op_mode, cmp_mode, unsignedp); + op0 = prepare_operand (icode, op0, 0, op_mode, cmp_mode, unsignedp); + op1 = prepare_operand (icode, op1, 1, op_mode, cmp_mode, unsignedp); if (!op0 || !op1) { end_sequence (); @@ -13042,16 +13011,11 @@ aarch64_gen_ccmp_first (rtx *prep_seq, rtx *gen_seq, *prep_seq = get_insns (); end_sequence (); - cmp = gen_rtx_fmt_ee ((enum rtx_code) code, cmp_mode, op0, op1); - target = gen_rtx_REG (CCmode, CC_REGNUM); - - create_output_operand (&ops[0], target, CCmode); - create_fixed_operand (&ops[1], cmp); - create_fixed_operand (&ops[2], op0); - create_fixed_operand (&ops[3], op1); + create_fixed_operand (&ops[0], op0); + create_fixed_operand (&ops[1], op1); start_sequence (); - if (!maybe_expand_insn (icode, 4, ops)) + if (!maybe_expand_insn (icode, 2, ops)) { end_sequence (); return NULL_RTX; @@ -13059,22 +13023,20 @@ aarch64_gen_ccmp_first (rtx *prep_seq, rtx *gen_seq, *gen_seq = get_insns (); end_sequence (); - return gen_rtx_REG (cc_mode, CC_REGNUM); + return gen_rtx_fmt_ee ((rtx_code) code, cc_mode, + gen_rtx_REG (cc_mode, CC_REGNUM), const0_rtx); } static rtx aarch64_gen_ccmp_next (rtx *prep_seq, rtx *gen_seq, rtx prev, int cmp_code, tree treeop0, tree treeop1, int bit_code) { - rtx op0, op1, cmp0, cmp1, target; - enum machine_mode op_mode, cmp_mode, cc_mode; + rtx op0, op1, target; + machine_mode op_mode, cmp_mode, cc_mode = CCmode; int unsignedp = TYPE_UNSIGNED (TREE_TYPE (treeop0)); - enum insn_code icode = CODE_FOR_ccmp_andsi; + insn_code icode; struct expand_operand ops[6]; - - cc_mode = aarch64_code_to_ccmode ((enum rtx_code) cmp_code); - if (cc_mode == CCmode) - return NULL_RTX; + int aarch64_cond; push_to_sequence ((rtx_insn*) *prep_seq); expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL); @@ -13089,14 +13051,24 @@ aarch64_gen_ccmp_next (rtx *prep_seq, rtx *gen_seq, rtx prev, int cmp_code, case HImode: case SImode: cmp_mode = SImode; - icode = (enum rtx_code) bit_code == AND ? CODE_FOR_ccmp_andsi - : CODE_FOR_ccmp_iorsi; + icode = CODE_FOR_ccmpsi; break; case DImode: cmp_mode = DImode; - icode = (enum rtx_code) bit_code == AND ? CODE_FOR_ccmp_anddi - : CODE_FOR_ccmp_iordi; + icode = CODE_FOR_ccmpdi; + break; + + case SFmode: + cmp_mode = SFmode; + cc_mode = aarch64_select_cc_mode ((rtx_code) cmp_code, op0, op1); + icode = cc_mode == CCFPEmode ? CODE_FOR_fccmpesf : CODE_FOR_fccmpsf; + break; + + case DFmode: + cmp_mode = DFmode; + cc_mode = aarch64_select_cc_mode ((rtx_code) cmp_code, op0, op1); + icode = cc_mode == CCFPEmode ? CODE_FOR_fccmpedf : CODE_FOR_fccmpdf; break; default: @@ -13115,15 +13087,22 @@ aarch64_gen_ccmp_next (rtx *prep_seq, rtx *gen_seq, rtx prev, int cmp_code, end_sequence (); target = gen_rtx_REG (cc_mode, CC_REGNUM); - cmp1 = gen_rtx_fmt_ee ((enum rtx_code) cmp_code, cmp_mode, op0, op1); - cmp0 = gen_rtx_fmt_ee (NE, cmp_mode, prev, const0_rtx); + aarch64_cond = aarch64_get_condition_code_1 (cc_mode, (rtx_code) cmp_code); - create_fixed_operand (&ops[0], prev); + if (bit_code != AND) + { + prev = gen_rtx_fmt_ee (REVERSE_CONDITION (GET_CODE (prev), + GET_MODE (XEXP (prev, 0))), + VOIDmode, XEXP (prev, 0), const0_rtx); + aarch64_cond = AARCH64_INVERSE_CONDITION_CODE (aarch64_cond); + } + + create_fixed_operand (&ops[0], XEXP (prev, 0)); create_fixed_operand (&ops[1], target); create_fixed_operand (&ops[2], op0); create_fixed_operand (&ops[3], op1); - create_fixed_operand (&ops[4], cmp0); - create_fixed_operand (&ops[5], cmp1); + create_fixed_operand (&ops[4], prev); + create_fixed_operand (&ops[5], GEN_INT (aarch64_cond)); push_to_sequence ((rtx_insn*) *gen_seq); if (!maybe_expand_insn (icode, 6, ops)) @@ -13135,7 +13114,7 @@ aarch64_gen_ccmp_next (rtx *prep_seq, rtx *gen_seq, rtx prev, int cmp_code, *gen_seq = get_insns (); end_sequence (); - return target; + return gen_rtx_fmt_ee ((rtx_code) cmp_code, VOIDmode, target, const0_rtx); } #undef TARGET_GEN_CCMP_FIRST |