File: /Users/paulross/dev/linux/linux-3.13/include/asm-generic/percpu.h

Green shading in the line number column means the source is part of the translation unit, red means it is conditionally excluded. Highlighted line numbers link to the translation unit page. Highlighted macros link to the macro page.

       1: #ifndef _ASM_GENERIC_PERCPU_H_
       2: #define _ASM_GENERIC_PERCPU_H_
       3: 
       4: #include <linux/compiler.h>
       5: #include <linux/threads.h>
       6: #include <linux/percpu-defs.h>
       7: 
       8: #ifdef CONFIG_SMP
       9: 
      10: /*
      11:  * per_cpu_offset() is the offset that has to be added to a
      12:  * percpu variable to get to the instance for a certain processor.
      13:  *
      14:  * Most arches use the __per_cpu_offset array for those offsets but
      15:  * some arches have their own ways of determining the offset (x86_64, s390).
      16:  */
      17: #ifndef __per_cpu_offset
      18: extern unsigned long __per_cpu_offset[NR_CPUS];
      19: 
      20: #define per_cpu_offset(x) (__per_cpu_offset[x])
      21: #endif
      22: 
      23: /*
      24:  * Determine the offset for the currently active processor.
      25:  * An arch may define __my_cpu_offset to provide a more effective
      26:  * means of obtaining the offset to the per cpu variables of the
      27:  * current processor.
      28:  */
      29: #ifndef __my_cpu_offset
      30: #define __my_cpu_offset per_cpu_offset(raw_smp_processor_id())
      31: #endif
      32: #ifdef CONFIG_DEBUG_PREEMPT
      33: #define my_cpu_offset per_cpu_offset(smp_processor_id())
      34: #else
      35: #define my_cpu_offset __my_cpu_offset
      36: #endif
      37: 
      38: /*
      39:  * Add a offset to a pointer but keep the pointer as is.
      40:  *
      41:  * Only S390 provides its own means of moving the pointer.
      42:  */
      43: #ifndef SHIFT_PERCPU_PTR
      44: /* Weird cast keeps both GCC and sparse happy. */
      45: #define SHIFT_PERCPU_PTR(__p, __offset)    ({                \
      46:     __verify_pcpu_ptr((__p));                    \
      47:     RELOC_HIDE((typeof(*(__p)) __kernel __force *)(__p), (__offset)); \
      48: })
      49: #endif
      50: 
      51: /*
      52:  * A percpu variable may point to a discarded regions. The following are
      53:  * established ways to produce a usable pointer from the percpu variable
      54:  * offset.
      55:  */
      56: #define per_cpu(var, cpu) \
      57:     (*SHIFT_PERCPU_PTR(&(var), per_cpu_offset(cpu)))
      58: 
      59: #ifndef __this_cpu_ptr
      60: #define __this_cpu_ptr(ptr) SHIFT_PERCPU_PTR(ptr, __my_cpu_offset)
      61: #endif
      62: #ifdef CONFIG_DEBUG_PREEMPT
      63: #define this_cpu_ptr(ptr) SHIFT_PERCPU_PTR(ptr, my_cpu_offset)
      64: #else
      65: #define this_cpu_ptr(ptr) __this_cpu_ptr(ptr)
      66: #endif
      67: 
      68: #define __get_cpu_var(var) (*this_cpu_ptr(&(var)))
      69: #define __raw_get_cpu_var(var) (*__this_cpu_ptr(&(var)))
      70: 
      71: #ifdef CONFIG_HAVE_SETUP_PER_CPU_AREA
      72: extern void setup_per_cpu_areas(void);
      73: #endif
      74: 
      75: #else /* ! SMP */
      76: 
      77: #define VERIFY_PERCPU_PTR(__p) ({            \
      78:     __verify_pcpu_ptr((__p));            \
      79:     (typeof(*(__p)) __kernel __force *)(__p);    \
      80: })
      81: 
      82: #define per_cpu(var, cpu)    (*((void)(cpu), VERIFY_PERCPU_PTR(&(var))))
      83: #define __get_cpu_var(var)    (*VERIFY_PERCPU_PTR(&(var)))
      84: #define __raw_get_cpu_var(var)    (*VERIFY_PERCPU_PTR(&(var)))
      85: #define this_cpu_ptr(ptr)    per_cpu_ptr(ptr, 0)
      86: #define __this_cpu_ptr(ptr)    this_cpu_ptr(ptr)
      87: 
      88: #endif    /* SMP */
      89: 
      90: #ifndef PER_CPU_BASE_SECTION
      91: #ifdef CONFIG_SMP
      92: #define PER_CPU_BASE_SECTION ".data..percpu"
      93: #else
      94: #define PER_CPU_BASE_SECTION ".data"
      95: #endif
      96: #endif
      97: 
      98: #ifdef CONFIG_SMP
      99: 
     100: #ifdef MODULE
     101: #define PER_CPU_SHARED_ALIGNED_SECTION ""
     102: #define PER_CPU_ALIGNED_SECTION ""
     103: #else
     104: #define PER_CPU_SHARED_ALIGNED_SECTION "..shared_aligned"
     105: #define PER_CPU_ALIGNED_SECTION "..shared_aligned"
     106: #endif
     107: #define PER_CPU_FIRST_SECTION "..first"
     108: 
     109: #else
     110: 
     111: #define PER_CPU_SHARED_ALIGNED_SECTION ""
     112: #define PER_CPU_ALIGNED_SECTION "..shared_aligned"
     113: #define PER_CPU_FIRST_SECTION ""
     114: 
     115: #endif
     116: 
     117: #ifndef PER_CPU_ATTRIBUTES
     118: #define PER_CPU_ATTRIBUTES
     119: #endif
     120: 
     121: #ifndef PER_CPU_DEF_ATTRIBUTES
     122: #define PER_CPU_DEF_ATTRIBUTES
     123: #endif
     124: 
     125: #endif /* _ASM_GENERIC_PERCPU_H_ */
     126: