File: /Users/paulross/dev/linux/linux-3.13/arch/x86/include/asm/string_64.h

Green shading in the line number column means the source is part of the translation unit, red means it is conditionally excluded. Highlighted line numbers link to the translation unit page. Highlighted macros link to the macro page.

       1: #ifndef _ASM_X86_STRING_64_H
       2: #define _ASM_X86_STRING_64_H
       3: 
       4: #ifdef __KERNEL__
       5: 
       6: /* Written 2002 by Andi Kleen */
       7: 
       8: /* Only used for special circumstances. Stolen from i386/string.h */
       9: static __always_inline void *__inline_memcpy(void *to, const void *from, size_t n)
      10: {
      11:     unsigned long d0, d1, d2;
      12:     asm volatile("rep ; movsl\n\t"
      13:              "testb $2,%b4\n\t"
      14:              "je 1f\n\t"
      15:              "movsw\n"
      16:              "1:\ttestb $1,%b4\n\t"
      17:              "je 2f\n\t"
      18:              "movsb\n"
      19:              "2:"
      20:              : "=&c" (d0), "=&D" (d1), "=&S" (d2)
      21:              : "0" (n / 4), "q" (n), "1" ((long)to), "2" ((long)from)
      22:              : "memory");
      23:     return to;
      24: }
      25: 
      26: /* Even with __builtin_ the compiler may decide to use the out of line
      27:    function. */
      28: 
      29: #define __HAVE_ARCH_MEMCPY 1
      30: #ifndef CONFIG_KMEMCHECK
      31: #if (__GNUC__ == 4 && __GNUC_MINOR__ >= 3) || __GNUC__ > 4
      32: extern void *memcpy(void *to, const void *from, size_t len);
      33: #else
      34: extern void *__memcpy(void *to, const void *from, size_t len);
      35: #define memcpy(dst, src, len)                    \
      36: ({                                \
      37:     size_t __len = (len);                    \
      38:     void *__ret;                        \
      39:     if (__builtin_constant_p(len) && __len >= 64)        \
      40:         __ret = __memcpy((dst), (src), __len);        \
      41:     else                            \
      42:         __ret = __builtin_memcpy((dst), (src), __len);    \
      43:     __ret;                            \
      44: })
      45: #endif
      46: #else
      47: /*
      48:  * kmemcheck becomes very happy if we use the REP instructions unconditionally,
      49:  * because it means that we know both memory operands in advance.
      50:  */
      51: #define memcpy(dst, src, len) __inline_memcpy((dst), (src), (len))
      52: #endif
      53: 
      54: #define __HAVE_ARCH_MEMSET
      55: void *memset(void *s, int c, size_t n);
      56: 
      57: #define __HAVE_ARCH_MEMMOVE
      58: void *memmove(void *dest, const void *src, size_t count);
      59: 
      60: int memcmp(const void *cs, const void *ct, size_t count);
      61: size_t strlen(const char *s);
      62: char *strcpy(char *dest, const char *src);
      63: char *strcat(char *dest, const char *src);
      64: int strcmp(const char *cs, const char *ct);
      65: 
      66: #endif /* __KERNEL__ */
      67: 
      68: #endif /* _ASM_X86_STRING_64_H */
      69: