Back to home page

OSCL-LXR

 
 

    


0001 /*
0002  * This file is subject to the terms and conditions of the GNU General Public
0003  * License.  See the file "COPYING" in the main directory of this archive
0004  * for more details.
0005  *
0006  * Copyright (C) 1998, 1999, 2000 by Ralf Baechle
0007  * Copyright (C) 1999, 2000 Silicon Graphics, Inc.
0008  * Copyright (C) 2007 by Maciej W. Rozycki
0009  * Copyright (C) 2011, 2012 MIPS Technologies, Inc.
0010  */
0011 #include <asm/asm.h>
0012 #include <asm/asm-offsets.h>
0013 #include <asm/export.h>
0014 #include <asm/regdef.h>
0015 
0016 #if LONGSIZE == 4
0017 #define LONG_S_L swl
0018 #define LONG_S_R swr
0019 #else
0020 #define LONG_S_L sdl
0021 #define LONG_S_R sdr
0022 #endif
0023 
0024 #ifdef CONFIG_CPU_MICROMIPS
0025 #define STORSIZE (LONGSIZE * 2)
0026 #define STORMASK (STORSIZE - 1)
0027 #define FILL64RG t8
0028 #define FILLPTRG t7
0029 #undef  LONG_S
0030 #define LONG_S LONG_SP
0031 #else
0032 #define STORSIZE LONGSIZE
0033 #define STORMASK LONGMASK
0034 #define FILL64RG a1
0035 #define FILLPTRG t0
0036 #endif
0037 
0038 #define LEGACY_MODE 1
0039 #define EVA_MODE    2
0040 
0041 /*
0042  * No need to protect it with EVA #ifdefery. The generated block of code
0043  * will never be assembled if EVA is not enabled.
0044  */
0045 #define __EVAFY(insn, reg, addr) __BUILD_EVA_INSN(insn##e, reg, addr)
0046 #define ___BUILD_EVA_INSN(insn, reg, addr) __EVAFY(insn, reg, addr)
0047 
0048 #define EX(insn,reg,addr,handler)           \
0049     .if \mode == LEGACY_MODE;           \
0050 9:      insn    reg, addr;          \
0051     .else;                      \
0052 9:      ___BUILD_EVA_INSN(insn, reg, addr); \
0053     .endif;                     \
0054     .section __ex_table,"a";            \
0055     PTR_WD  9b, handler;                \
0056     .previous
0057 
0058     .macro  f_fill64 dst, offset, val, fixup, mode
0059     EX(LONG_S, \val, (\offset +  0 * STORSIZE)(\dst), \fixup)
0060     EX(LONG_S, \val, (\offset +  1 * STORSIZE)(\dst), \fixup)
0061     EX(LONG_S, \val, (\offset +  2 * STORSIZE)(\dst), \fixup)
0062     EX(LONG_S, \val, (\offset +  3 * STORSIZE)(\dst), \fixup)
0063 #if ((defined(CONFIG_CPU_MICROMIPS) && (LONGSIZE == 4)) || !defined(CONFIG_CPU_MICROMIPS))
0064     EX(LONG_S, \val, (\offset +  4 * STORSIZE)(\dst), \fixup)
0065     EX(LONG_S, \val, (\offset +  5 * STORSIZE)(\dst), \fixup)
0066     EX(LONG_S, \val, (\offset +  6 * STORSIZE)(\dst), \fixup)
0067     EX(LONG_S, \val, (\offset +  7 * STORSIZE)(\dst), \fixup)
0068 #endif
0069 #if (!defined(CONFIG_CPU_MICROMIPS) && (LONGSIZE == 4))
0070     EX(LONG_S, \val, (\offset +  8 * STORSIZE)(\dst), \fixup)
0071     EX(LONG_S, \val, (\offset +  9 * STORSIZE)(\dst), \fixup)
0072     EX(LONG_S, \val, (\offset + 10 * STORSIZE)(\dst), \fixup)
0073     EX(LONG_S, \val, (\offset + 11 * STORSIZE)(\dst), \fixup)
0074     EX(LONG_S, \val, (\offset + 12 * STORSIZE)(\dst), \fixup)
0075     EX(LONG_S, \val, (\offset + 13 * STORSIZE)(\dst), \fixup)
0076     EX(LONG_S, \val, (\offset + 14 * STORSIZE)(\dst), \fixup)
0077     EX(LONG_S, \val, (\offset + 15 * STORSIZE)(\dst), \fixup)
0078 #endif
0079     .endm
0080 
0081     .align  5
0082 
0083     /*
0084      * Macro to generate the __bzero{,_user} symbol
0085      * Arguments:
0086      * mode: LEGACY_MODE or EVA_MODE
0087      */
0088     .macro __BUILD_BZERO mode
0089     /* Initialize __memset if this is the first time we call this macro */
0090     .ifnotdef __memset
0091     .set __memset, 1
0092     .hidden __memset /* Make sure it does not leak */
0093     .endif
0094 
0095     sltiu       t0, a2, STORSIZE    /* very small region? */
0096     .set        noreorder
0097     bnez        t0, .Lsmall_memset\@
0098      andi       t0, a0, STORMASK    /* aligned? */
0099     .set        reorder
0100 
0101 #ifdef CONFIG_CPU_MICROMIPS
0102     move        t8, a1          /* used by 'swp' instruction */
0103     move        t9, a1
0104 #endif
0105     .set        noreorder
0106 #ifndef CONFIG_CPU_DADDI_WORKAROUNDS
0107     beqz        t0, 1f
0108      PTR_SUBU   t0, STORSIZE        /* alignment in bytes */
0109 #else
0110     .set        noat
0111     li      AT, STORSIZE
0112     beqz        t0, 1f
0113      PTR_SUBU   t0, AT          /* alignment in bytes */
0114     .set        at
0115 #endif
0116     .set        reorder
0117 
0118 #ifndef CONFIG_CPU_NO_LOAD_STORE_LR
0119     R10KCBARRIER(0(ra))
0120 #ifdef __MIPSEB__
0121     EX(LONG_S_L, a1, (a0), .Lfirst_fixup\@) /* make word/dword aligned */
0122 #else
0123     EX(LONG_S_R, a1, (a0), .Lfirst_fixup\@) /* make word/dword aligned */
0124 #endif
0125     PTR_SUBU    a0, t0          /* long align ptr */
0126     PTR_ADDU    a2, t0          /* correct size */
0127 
0128 #else /* CONFIG_CPU_NO_LOAD_STORE_LR */
0129 #define STORE_BYTE(N)               \
0130     EX(sb, a1, N(a0), .Lbyte_fixup\@);  \
0131     .set        noreorder;      \
0132     beqz        t0, 0f;         \
0133      PTR_ADDU   t0, 1;          \
0134     .set        reorder;
0135 
0136     PTR_ADDU    a2, t0          /* correct size */
0137     PTR_ADDU    t0, 1
0138     STORE_BYTE(0)
0139     STORE_BYTE(1)
0140 #if LONGSIZE == 4
0141     EX(sb, a1, 2(a0), .Lbyte_fixup\@)
0142 #else
0143     STORE_BYTE(2)
0144     STORE_BYTE(3)
0145     STORE_BYTE(4)
0146     STORE_BYTE(5)
0147     EX(sb, a1, 6(a0), .Lbyte_fixup\@)
0148 #endif
0149 0:
0150     ori     a0, STORMASK
0151     xori        a0, STORMASK
0152     PTR_ADDIU   a0, STORSIZE
0153 #endif /* CONFIG_CPU_NO_LOAD_STORE_LR */
0154 1:  ori     t1, a2, 0x3f        /* # of full blocks */
0155     xori        t1, 0x3f
0156     andi        t0, a2, 0x40-STORSIZE
0157     beqz        t1, .Lmemset_partial\@  /* no block to fill */
0158 
0159     PTR_ADDU    t1, a0          /* end address */
0160 1:  PTR_ADDIU   a0, 64
0161     R10KCBARRIER(0(ra))
0162     f_fill64 a0, -64, FILL64RG, .Lfwd_fixup\@, \mode
0163     bne     t1, a0, 1b
0164 
0165 .Lmemset_partial\@:
0166     R10KCBARRIER(0(ra))
0167     PTR_LA      t1, 2f          /* where to start */
0168 #ifdef CONFIG_CPU_MICROMIPS
0169     LONG_SRL    t7, t0, 1
0170 #endif
0171 #if LONGSIZE == 4
0172     PTR_SUBU    t1, FILLPTRG
0173 #else
0174     .set        noat
0175     LONG_SRL    AT, FILLPTRG, 1
0176     PTR_SUBU    t1, AT
0177     .set        at
0178 #endif
0179     PTR_ADDU    a0, t0          /* dest ptr */
0180     jr      t1
0181 
0182     /* ... but first do longs ... */
0183     f_fill64 a0, -64, FILL64RG, .Lpartial_fixup\@, \mode
0184 2:  andi        a2, STORMASK        /* At most one long to go */
0185 
0186     .set        noreorder
0187     beqz        a2, 1f
0188 #ifndef CONFIG_CPU_NO_LOAD_STORE_LR
0189      PTR_ADDU   a0, a2          /* What's left */
0190     .set        reorder
0191     R10KCBARRIER(0(ra))
0192 #ifdef __MIPSEB__
0193     EX(LONG_S_R, a1, -1(a0), .Llast_fixup\@)
0194 #else
0195     EX(LONG_S_L, a1, -1(a0), .Llast_fixup\@)
0196 #endif
0197 #else /* CONFIG_CPU_NO_LOAD_STORE_LR */
0198      PTR_SUBU   t0, $0, a2
0199     .set        reorder
0200     move        a2, zero        /* No remaining longs */
0201     PTR_ADDIU   t0, 1
0202     STORE_BYTE(0)
0203     STORE_BYTE(1)
0204 #if LONGSIZE == 4
0205     EX(sb, a1, 2(a0), .Lbyte_fixup\@)
0206 #else
0207     STORE_BYTE(2)
0208     STORE_BYTE(3)
0209     STORE_BYTE(4)
0210     STORE_BYTE(5)
0211     EX(sb, a1, 6(a0), .Lbyte_fixup\@)
0212 #endif
0213 0:
0214 #endif /* CONFIG_CPU_NO_LOAD_STORE_LR */
0215 1:  move        a2, zero
0216     jr      ra
0217 
0218 .Lsmall_memset\@:
0219     PTR_ADDU    t1, a0, a2
0220     beqz        a2, 2f
0221 
0222 1:  PTR_ADDIU   a0, 1           /* fill bytewise */
0223     R10KCBARRIER(0(ra))
0224     .set        noreorder
0225     bne     t1, a0, 1b
0226      EX(sb, a1, -1(a0), .Lsmall_fixup\@)
0227     .set        reorder
0228 
0229 2:  move        a2, zero
0230     jr      ra          /* done */
0231     .if __memset == 1
0232     END(memset)
0233     .set __memset, 0
0234     .hidden __memset
0235     .endif
0236 
0237 #ifdef CONFIG_CPU_NO_LOAD_STORE_LR
0238 .Lbyte_fixup\@:
0239     /*
0240      * unset_bytes = (#bytes - (#unaligned bytes)) - (-#unaligned bytes remaining + 1) + 1
0241      *      a2     =             a2                -              t0                   + 1
0242      */
0243     PTR_SUBU    a2, t0
0244     PTR_ADDIU   a2, 1
0245     jr      ra
0246 #endif /* CONFIG_CPU_NO_LOAD_STORE_LR */
0247 
0248 .Lfirst_fixup\@:
0249     /* unset_bytes already in a2 */
0250     jr  ra
0251 
0252 .Lfwd_fixup\@:
0253     /*
0254      * unset_bytes = partial_start_addr +  #bytes   -     fault_addr
0255      *      a2     =         t1         + (a2 & 3f) - $28->task->BUADDR
0256      */
0257     PTR_L       t0, TI_TASK($28)
0258     andi        a2, 0x3f
0259     LONG_L      t0, THREAD_BUADDR(t0)
0260     LONG_ADDU   a2, t1
0261     LONG_SUBU   a2, t0
0262     jr      ra
0263 
0264 .Lpartial_fixup\@:
0265     /*
0266      * unset_bytes = partial_end_addr +      #bytes     -     fault_addr
0267      *      a2     =       a0         + (a2 & STORMASK) - $28->task->BUADDR
0268      */
0269     PTR_L       t0, TI_TASK($28)
0270     andi        a2, STORMASK
0271     LONG_L      t0, THREAD_BUADDR(t0)
0272     LONG_ADDU   a2, a0
0273     LONG_SUBU   a2, t0
0274     jr      ra
0275 
0276 .Llast_fixup\@:
0277     /* unset_bytes already in a2 */
0278     jr      ra
0279 
0280 .Lsmall_fixup\@:
0281     /*
0282      * unset_bytes = end_addr - current_addr + 1
0283      *      a2     =    t1    -      a0      + 1
0284      */
0285     PTR_SUBU    a2, t1, a0
0286     PTR_ADDIU   a2, 1
0287     jr      ra
0288 
0289     .endm
0290 
0291 /*
0292  * memset(void *s, int c, size_t n)
0293  *
0294  * a0: start of area to clear
0295  * a1: char to fill with
0296  * a2: size of area to clear
0297  */
0298 
0299 LEAF(memset)
0300 EXPORT_SYMBOL(memset)
0301     move        v0, a0          /* result */
0302     beqz        a1, 1f
0303 
0304     andi        a1, 0xff        /* spread fillword */
0305     LONG_SLL        t1, a1, 8
0306     or      a1, t1
0307     LONG_SLL        t1, a1, 16
0308 #if LONGSIZE == 8
0309     or      a1, t1
0310     LONG_SLL        t1, a1, 32
0311 #endif
0312     or      a1, t1
0313 1:
0314 #ifndef CONFIG_EVA
0315 FEXPORT(__bzero)
0316 EXPORT_SYMBOL(__bzero)
0317 #endif
0318     __BUILD_BZERO LEGACY_MODE
0319 
0320 #ifdef CONFIG_EVA
0321 LEAF(__bzero)
0322 EXPORT_SYMBOL(__bzero)
0323     __BUILD_BZERO EVA_MODE
0324 END(__bzero)
0325 #endif