pdp11.md (movmemhi, movmemhi1): Correct constraints.

* config/pdp11/pdp11.md (movmemhi, movmemhi1): Correct
constraints. 
* config/pdp11/pdp11.c (output_block_move): Rewrite.

From-SVN: r168338
This commit is contained in:
Paul Koning 2010-12-30 06:50:40 -05:00 committed by Paul Koning
parent fd3ce292ae
commit dea44e569d
3 changed files with 128 additions and 201 deletions

View file

@ -1,3 +1,9 @@
2010-12-30 Paul Koning <ni1d@arrl.net>
* config/pdp11/pdp11.md (movmemhi, movmemhi1): Correct
constraints.
* config/pdp11/pdp11.c (output_block_move): Rewrite.
2010-12-30 Jakub Jelinek <jakub@redhat.com>
PR tree-optimization/47060

View file

@ -1167,236 +1167,157 @@ output_block_move(rtx *operands)
{
static int count = 0;
char buf[200];
int unroll;
int lastbyte = 0;
if (GET_CODE(operands[2]) == CONST_INT
&& ! optimize_size)
/* Move of zero bytes is a NOP. */
if (operands[2] == const0_rtx)
return "";
/* Look for moves by small constant byte counts, those we'll
expand to straight line code. */
if (CONSTANT_P (operands[2]))
{
if (INTVAL(operands[2]) < 16
&& INTVAL(operands[3]) == 1)
if (INTVAL (operands[2]) < 16
&& (!optimize_size || INTVAL (operands[2]) < 5)
&& INTVAL (operands[3]) == 1)
{
register int i;
for (i = 1; i <= INTVAL(operands[2]); i++)
for (i = 1; i <= INTVAL (operands[2]); i++)
output_asm_insn("movb (%1)+, (%0)+", operands);
return "";
}
else if (INTVAL(operands[2]) < 32)
else if (INTVAL(operands[2]) < 32
&& (!optimize_size || INTVAL (operands[2]) < 9)
&& INTVAL (operands[3]) >= 2)
{
register int i;
for (i = 1; i <= INTVAL(operands[2])/2; i++)
output_asm_insn("mov (%1)+, (%0)+", operands);
for (i = 1; i <= INTVAL (operands[2]) / 2; i++)
output_asm_insn ("mov (%1)+, (%0)+", operands);
if (INTVAL (operands[2]) & 1)
output_asm_insn ("movb (%1), (%0)", operands);
/* may I assume that moved quantity is
multiple of alignment ???
I HOPE SO !
*/
return "";
}
/* can do other clever things, maybe... */
}
if (CONSTANT_P(operands[2]) )
/* Ideally we'd look for moves that are multiples of 4 or 8
bytes and handle those by unrolling the move loop. That
makes for a lot of code if done at run time, but it's ok
for constant counts. Also, for variable counts we have
to worry about odd byte count with even aligned pointers.
On 11/40 and up we handle that case; on older machines
we don't and just use byte-wise moves all the time. */
if (CONSTANT_P (operands[2]) )
{
/* just move count to scratch */
output_asm_insn("mov %2, %4", operands);
if (INTVAL (operands[3]) < 2)
unroll = 0;
else
{
lastbyte = INTVAL (operands[2]) & 1;
if (optimize_size || INTVAL (operands[2]) & 2)
unroll = 1;
else if (INTVAL (operands[2]) & 4)
unroll = 2;
else
unroll = 3;
}
/* Loop count is byte count scaled by unroll. */
operands[2] = GEN_INT (INTVAL (operands[2]) >> unroll);
output_asm_insn ("mov %2, %4", operands);
}
else
{
/* just clobber the register */
/* Variable byte count; use the input register
as the scratch. */
operands[4] = operands[2];
}
/* switch over alignment */
switch (INTVAL(operands[3]))
{
case 1:
/*
x:
movb (%1)+, (%0)+
if (TARGET_45)
sob %4,x
else
dec %4
bgt x
*/
sprintf(buf, "\nmovestrhi%d:", count);
output_asm_insn(buf, NULL);
output_asm_insn("movb (%1)+, (%0)+", operands);
if (TARGET_45)
{
sprintf(buf, "sob %%4, movestrhi%d", count);
output_asm_insn(buf, operands);
}
/* Decide whether to move by words, and check
the byte count for zero. */
if (TARGET_40_PLUS && INTVAL (operands[3]) > 1)
{
unroll = 1;
output_asm_insn ("asr %4", operands);
}
else
{
output_asm_insn("dec %4", operands);
sprintf(buf, "bgt movestrhi%d", count);
output_asm_insn(buf, NULL);
}
{
unroll = 0;
output_asm_insn ("tst %4", operands);
}
sprintf (buf, "beq movestrhi%d", count + 1);
output_asm_insn (buf, NULL);
}
/* Output the loop label. */
sprintf (buf, "\nmovestrhi%d:", count);
output_asm_insn (buf, NULL);
/* Output the appropriate move instructions. */
switch (unroll)
{
case 0:
output_asm_insn ("movb (%1)+, (%0)+", operands);
break;
count ++;
case 1:
output_asm_insn ("mov (%1)+, (%0)+", operands);
break;
case 2:
/*
asr %4
x:
mov (%1)+, (%0)+
if (TARGET_45)
sob %4, x
else
dec %4
bgt x
*/
generate_compact_code:
output_asm_insn("asr %4", operands);
sprintf(buf, "\nmovestrhi%d:", count);
output_asm_insn(buf, NULL);
output_asm_insn("mov (%1)+, (%0)+", operands);
if (TARGET_45)
{
sprintf(buf, "sob %%4, movestrhi%d", count);
output_asm_insn(buf, operands);
}
else
{
output_asm_insn("dec %4", operands);
sprintf(buf, "bgt movestrhi%d", count);
output_asm_insn(buf, NULL);
}
count ++;
output_asm_insn ("mov (%1)+, (%0)+", operands);
output_asm_insn ("mov (%1)+, (%0)+", operands);
break;
case 4:
/*
asr %4
asr %4
x:
mov (%1)+, (%0)+
mov (%1)+, (%0)+
if (TARGET_45)
sob %4, x
else
dec %4
bgt x
*/
if (optimize_size)
goto generate_compact_code;
output_asm_insn("asr %4", operands);
output_asm_insn("asr %4", operands);
sprintf(buf, "\nmovestrhi%d:", count);
output_asm_insn(buf, NULL);
output_asm_insn("mov (%1)+, (%0)+", operands);
output_asm_insn("mov (%1)+, (%0)+", operands);
if (TARGET_45)
{
sprintf(buf, "sob %%4, movestrhi%d", count);
output_asm_insn(buf, operands);
}
else
{
output_asm_insn("dec %4", operands);
sprintf(buf, "bgt movestrhi%d", count);
output_asm_insn(buf, NULL);
}
count ++;
break;
default:
/*
asr %4
asr %4
asr %4
x:
mov (%1)+, (%0)+
mov (%1)+, (%0)+
mov (%1)+, (%0)+
mov (%1)+, (%0)+
if (TARGET_45)
sob %4, x
else
dec %4
bgt x
*/
if (optimize_size)
goto generate_compact_code;
output_asm_insn("asr %4", operands);
output_asm_insn("asr %4", operands);
output_asm_insn("asr %4", operands);
sprintf(buf, "\nmovestrhi%d:", count);
output_asm_insn(buf, NULL);
output_asm_insn("mov (%1)+, (%0)+", operands);
output_asm_insn("mov (%1)+, (%0)+", operands);
output_asm_insn("mov (%1)+, (%0)+", operands);
output_asm_insn("mov (%1)+, (%0)+", operands);
if (TARGET_45)
{
sprintf(buf, "sob %%4, movestrhi%d", count);
output_asm_insn(buf, operands);
}
else
{
output_asm_insn("dec %4", operands);
sprintf(buf, "bgt movestrhi%d", count);
output_asm_insn(buf, NULL);
}
count ++;
output_asm_insn ("mov (%1)+, (%0)+", operands);
output_asm_insn ("mov (%1)+, (%0)+", operands);
output_asm_insn ("mov (%1)+, (%0)+", operands);
output_asm_insn ("mov (%1)+, (%0)+", operands);
break;
;
}
/* Output the decrement and test. */
if (TARGET_40_PLUS)
{
sprintf (buf, "sob %%4, movestrhi%d", count);
output_asm_insn (buf, operands);
}
else
{
output_asm_insn ("dec %4", operands);
sprintf (buf, "bgt movestrhi%d", count);
output_asm_insn (buf, NULL);
}
count ++;
/* If constant odd byte count, move the last byte. */
if (lastbyte)
output_asm_insn ("movb (%1), (%0)", operands);
else if (!CONSTANT_P (operands[2]))
{
/* Output the destination label for the zero byte count check. */
sprintf (buf, "\nmovestrhi%d:", count);
output_asm_insn (buf, NULL);
count++;
/* If we did word moves, check for trailing last byte. */
if (unroll)
{
sprintf (buf, "bcc movestrhi%d", count);
output_asm_insn (buf, NULL);
output_asm_insn ("movb (%1), (%0)", operands);
sprintf (buf, "\nmovestrhi%d:", count);
output_asm_insn (buf, NULL);
count++;
}
}
return "";
}

View file

@ -321,7 +321,7 @@
(define_expand "movmemhi"
[(parallel [(set (match_operand:BLK 0 "general_operand" "=g,g")
(match_operand:BLK 1 "general_operand" "g,g"))
(use (match_operand:HI 2 "general_operand" "n,&mr"))
(use (match_operand:HI 2 "general_operand" "n,mr"))
(use (match_operand:HI 3 "immediate_operand" "i,i"))
(clobber (match_scratch:HI 4 "=&r,X"))
(clobber (match_dup 5))
@ -342,10 +342,10 @@
}")
(define_insn "" ; "movmemhi"
[(set (mem:BLK (match_operand 0 "pmode_register_operand" "+r,r"))
(mem:BLK (match_operand 1 "pmode_register_operand" "+r,r")))
(use (match_operand:HI 2 "general_operand" "+n,&r"))
(define_insn "movmemhi1"
[(set (mem:BLK (match_operand:HI 0 "register_operand" "r,r"))
(mem:BLK (match_operand:HI 1 "register_operand" "r,r")))
(use (match_operand:HI 2 "general_operand" "n,r"))
(use (match_operand:HI 3 "immediate_operand" "i,i"))
(clobber (match_scratch:HI 4 "=&r,X"))
(clobber (match_dup 0))