15210d1e6SVineet Gupta/* 25210d1e6SVineet Gupta * Copyright (C) 2004, 2007-2010, 2011-2012 Synopsys, Inc. (www.synopsys.com) 35210d1e6SVineet Gupta * 45210d1e6SVineet Gupta * This program is free software; you can redistribute it and/or modify 55210d1e6SVineet Gupta * it under the terms of the GNU General Public License version 2 as 65210d1e6SVineet Gupta * published by the Free Software Foundation. 75210d1e6SVineet Gupta */ 85210d1e6SVineet Gupta 9ec7ac6afSVineet Gupta#include <linux/linkage.h> 105210d1e6SVineet Gupta 115210d1e6SVineet Gupta#ifdef __LITTLE_ENDIAN__ 125210d1e6SVineet Gupta#define WORD2 r2 135210d1e6SVineet Gupta#define SHIFT r3 145210d1e6SVineet Gupta#else /* BIG ENDIAN */ 155210d1e6SVineet Gupta#define WORD2 r3 165210d1e6SVineet Gupta#define SHIFT r2 175210d1e6SVineet Gupta#endif 185210d1e6SVineet Gupta 19ec7ac6afSVineet GuptaENTRY(memcmp) 205210d1e6SVineet Gupta or r12,r0,r1 215210d1e6SVineet Gupta asl_s r12,r12,30 225210d1e6SVineet Gupta sub r3,r2,1 235210d1e6SVineet Gupta brls r2,r12,.Lbytewise 245210d1e6SVineet Gupta ld r4,[r0,0] 255210d1e6SVineet Gupta ld r5,[r1,0] 265210d1e6SVineet Gupta lsr.f lp_count,r3,3 27*8922bc30SVineet Gupta#ifdef CONFIG_ISA_ARCV2 28*8922bc30SVineet Gupta /* In ARCv2 a branch can't be the last instruction in a zero overhead 29*8922bc30SVineet Gupta * loop. 30*8922bc30SVineet Gupta * So we move the branch to the start of the loop, duplicate it 31*8922bc30SVineet Gupta * after the end, and set up r12 so that the branch isn't taken 32*8922bc30SVineet Gupta * initially. 33*8922bc30SVineet Gupta */ 34*8922bc30SVineet Gupta mov_s r12,WORD2 35*8922bc30SVineet Gupta lpne .Loop_end 36*8922bc30SVineet Gupta brne WORD2,r12,.Lodd 37*8922bc30SVineet Gupta ld WORD2,[r0,4] 38*8922bc30SVineet Gupta#else 395210d1e6SVineet Gupta lpne .Loop_end 405210d1e6SVineet Gupta ld_s WORD2,[r0,4] 41*8922bc30SVineet Gupta#endif 425210d1e6SVineet Gupta ld_s r12,[r1,4] 435210d1e6SVineet Gupta brne r4,r5,.Leven 445210d1e6SVineet Gupta ld.a r4,[r0,8] 455210d1e6SVineet Gupta ld.a r5,[r1,8] 46*8922bc30SVineet Gupta#ifdef CONFIG_ISA_ARCV2 47*8922bc30SVineet Gupta.Loop_end: 48*8922bc30SVineet Gupta brne WORD2,r12,.Lodd 49*8922bc30SVineet Gupta#else 505210d1e6SVineet Gupta brne WORD2,r12,.Lodd 515210d1e6SVineet Gupta.Loop_end: 52*8922bc30SVineet Gupta#endif 535210d1e6SVineet Gupta asl_s SHIFT,SHIFT,3 545210d1e6SVineet Gupta bhs_s .Last_cmp 555210d1e6SVineet Gupta brne r4,r5,.Leven 565210d1e6SVineet Gupta ld r4,[r0,4] 575210d1e6SVineet Gupta ld r5,[r1,4] 585210d1e6SVineet Gupta#ifdef __LITTLE_ENDIAN__ 595210d1e6SVineet Gupta nop_s 605210d1e6SVineet Gupta ; one more load latency cycle 615210d1e6SVineet Gupta.Last_cmp: 625210d1e6SVineet Gupta xor r0,r4,r5 635210d1e6SVineet Gupta bset r0,r0,SHIFT 645210d1e6SVineet Gupta sub_s r1,r0,1 655210d1e6SVineet Gupta bic_s r1,r1,r0 665210d1e6SVineet Gupta norm r1,r1 675210d1e6SVineet Gupta b.d .Leven_cmp 685210d1e6SVineet Gupta and r1,r1,24 695210d1e6SVineet Gupta.Leven: 705210d1e6SVineet Gupta xor r0,r4,r5 715210d1e6SVineet Gupta sub_s r1,r0,1 725210d1e6SVineet Gupta bic_s r1,r1,r0 735210d1e6SVineet Gupta norm r1,r1 745210d1e6SVineet Gupta ; slow track insn 755210d1e6SVineet Gupta and r1,r1,24 765210d1e6SVineet Gupta.Leven_cmp: 775210d1e6SVineet Gupta asl r2,r4,r1 785210d1e6SVineet Gupta asl r12,r5,r1 795210d1e6SVineet Gupta lsr_s r2,r2,1 805210d1e6SVineet Gupta lsr_s r12,r12,1 815210d1e6SVineet Gupta j_s.d [blink] 825210d1e6SVineet Gupta sub r0,r2,r12 835210d1e6SVineet Gupta .balign 4 845210d1e6SVineet Gupta.Lodd: 855210d1e6SVineet Gupta xor r0,WORD2,r12 865210d1e6SVineet Gupta sub_s r1,r0,1 875210d1e6SVineet Gupta bic_s r1,r1,r0 885210d1e6SVineet Gupta norm r1,r1 895210d1e6SVineet Gupta ; slow track insn 905210d1e6SVineet Gupta and r1,r1,24 915210d1e6SVineet Gupta asl_s r2,r2,r1 925210d1e6SVineet Gupta asl_s r12,r12,r1 935210d1e6SVineet Gupta lsr_s r2,r2,1 945210d1e6SVineet Gupta lsr_s r12,r12,1 955210d1e6SVineet Gupta j_s.d [blink] 965210d1e6SVineet Gupta sub r0,r2,r12 975210d1e6SVineet Gupta#else /* BIG ENDIAN */ 985210d1e6SVineet Gupta.Last_cmp: 995210d1e6SVineet Gupta neg_s SHIFT,SHIFT 1005210d1e6SVineet Gupta lsr r4,r4,SHIFT 1015210d1e6SVineet Gupta lsr r5,r5,SHIFT 1025210d1e6SVineet Gupta ; slow track insn 1035210d1e6SVineet Gupta.Leven: 1045210d1e6SVineet Gupta sub.f r0,r4,r5 1055210d1e6SVineet Gupta mov.ne r0,1 1065210d1e6SVineet Gupta j_s.d [blink] 1075210d1e6SVineet Gupta bset.cs r0,r0,31 1085210d1e6SVineet Gupta.Lodd: 1095210d1e6SVineet Gupta cmp_s WORD2,r12 1105210d1e6SVineet Gupta mov_s r0,1 1115210d1e6SVineet Gupta j_s.d [blink] 1125210d1e6SVineet Gupta bset.cs r0,r0,31 1135210d1e6SVineet Gupta#endif /* ENDIAN */ 1145210d1e6SVineet Gupta .balign 4 1155210d1e6SVineet Gupta.Lbytewise: 1165210d1e6SVineet Gupta breq r2,0,.Lnil 1175210d1e6SVineet Gupta ldb r4,[r0,0] 1185210d1e6SVineet Gupta ldb r5,[r1,0] 1195210d1e6SVineet Gupta lsr.f lp_count,r3 120*8922bc30SVineet Gupta#ifdef CONFIG_ISA_ARCV2 121*8922bc30SVineet Gupta mov r12,r3 1225210d1e6SVineet Gupta lpne .Lbyte_end 123*8922bc30SVineet Gupta brne r3,r12,.Lbyte_odd 124*8922bc30SVineet Gupta#else 125*8922bc30SVineet Gupta lpne .Lbyte_end 126*8922bc30SVineet Gupta#endif 1275210d1e6SVineet Gupta ldb_s r3,[r0,1] 1285210d1e6SVineet Gupta ldb r12,[r1,1] 1295210d1e6SVineet Gupta brne r4,r5,.Lbyte_even 1305210d1e6SVineet Gupta ldb.a r4,[r0,2] 1315210d1e6SVineet Gupta ldb.a r5,[r1,2] 132*8922bc30SVineet Gupta#ifdef CONFIG_ISA_ARCV2 133*8922bc30SVineet Gupta.Lbyte_end: 134*8922bc30SVineet Gupta brne r3,r12,.Lbyte_odd 135*8922bc30SVineet Gupta#else 1365210d1e6SVineet Gupta brne r3,r12,.Lbyte_odd 1375210d1e6SVineet Gupta.Lbyte_end: 138*8922bc30SVineet Gupta#endif 1395210d1e6SVineet Gupta bcc .Lbyte_even 1405210d1e6SVineet Gupta brne r4,r5,.Lbyte_even 1415210d1e6SVineet Gupta ldb_s r3,[r0,1] 1425210d1e6SVineet Gupta ldb_s r12,[r1,1] 1435210d1e6SVineet Gupta.Lbyte_odd: 1445210d1e6SVineet Gupta j_s.d [blink] 1455210d1e6SVineet Gupta sub r0,r3,r12 1465210d1e6SVineet Gupta.Lbyte_even: 1475210d1e6SVineet Gupta j_s.d [blink] 1485210d1e6SVineet Gupta sub r0,r4,r5 1495210d1e6SVineet Gupta.Lnil: 1505210d1e6SVineet Gupta j_s.d [blink] 1515210d1e6SVineet Gupta mov r0,0 152ec7ac6afSVineet GuptaEND(memcmp) 153