LCOV - code coverage report
Current view: top level - lkbce/arch/x86/include/asm - msr.h (source / functions) Hit Total Coverage
Test: coverage.info Lines: 1 1 100.0 %
Date: 2017-01-25 Functions: 0 0 -

          Line data    Source code
       1             : #ifndef _ASM_X86_MSR_H
       2             : #define _ASM_X86_MSR_H
       3             : 
       4             : #include <asm/msr-index.h>
       5             : 
       6             : #ifndef __ASSEMBLY__
       7             : 
       8             : #include <linux/types.h>
       9             : #include <linux/ioctl.h>
      10             : 
      11             : #define X86_IOC_RDMSR_REGS      _IOWR('c', 0xA0, __u32[8])
      12             : #define X86_IOC_WRMSR_REGS      _IOWR('c', 0xA1, __u32[8])
      13             : 
      14             : #ifdef __KERNEL__
      15             : 
      16             : #include <asm/asm.h>
      17             : #include <asm/errno.h>
      18             : #include <asm/cpumask.h>
      19             : 
      20             : struct msr {
      21             :         union {
      22             :                 struct {
      23             :                         u32 l;
      24             :                         u32 h;
      25             :                 };
      26             :                 u64 q;
      27             :         };
      28             : };
      29             : 
      30             : struct msr_info {
      31             :         u32 msr_no;
      32             :         struct msr reg;
      33             :         struct msr *msrs;
      34             :         int err;
      35             : };
      36             : 
      37             : struct msr_regs_info {
      38             :         u32 *regs;
      39             :         int err;
      40             : };
      41             : 
      42             : static inline unsigned long long native_read_tscp(unsigned int *aux)
      43             : {
      44             :         unsigned long low, high;
      45             :         asm volatile(".byte 0x0f,0x01,0xf9"
      46             :                      : "=a" (low), "=d" (high), "=c" (*aux));
      47             :         return low | ((u64)high << 32);
      48             : }
      49             : 
      50             : /*
      51             :  * both i386 and x86_64 returns 64-bit value in edx:eax, but gcc's "A"
      52             :  * constraint has different meanings. For i386, "A" means exactly
      53             :  * edx:eax, while for x86_64 it doesn't mean rdx:rax or edx:eax. Instead,
      54             :  * it means rax *or* rdx.
      55             :  */
      56             : #ifdef CONFIG_X86_64
      57             : #define DECLARE_ARGS(val, low, high)    unsigned low, high
      58             : #define EAX_EDX_VAL(val, low, high)     ((low) | ((u64)(high) << 32))
      59             : #define EAX_EDX_ARGS(val, low, high)    "a" (low), "d" (high)
      60             : #define EAX_EDX_RET(val, low, high)     "=a" (low), "=d" (high)
      61             : #else
      62             : #define DECLARE_ARGS(val, low, high)    unsigned long long val
      63             : #define EAX_EDX_VAL(val, low, high)     (val)
      64             : #define EAX_EDX_ARGS(val, low, high)    "A" (val)
      65             : #define EAX_EDX_RET(val, low, high)     "=A" (val)
      66             : #endif
      67             : 
      68             : static inline unsigned long long native_read_msr(unsigned int msr)
      69             : {
      70             :         DECLARE_ARGS(val, low, high);
      71             : 
      72             :         asm volatile("rdmsr" : EAX_EDX_RET(val, low, high) : "c" (msr));
      73             :         return EAX_EDX_VAL(val, low, high);
      74             : }
      75             : 
      76             : static inline unsigned long long native_read_msr_safe(unsigned int msr,
      77             :                                                       int *err)
      78             : {
      79             :         DECLARE_ARGS(val, low, high);
      80             : 
      81             :         asm volatile("2: rdmsr ; xor %[err],%[err]\n"
      82             :                      "1:\n\t"
      83             :                      ".section .fixup,\"ax\"\n\t"
      84             :                      "3:  mov %[fault],%[err] ; jmp 1b\n\t"
      85             :                      ".previous\n\t"
      86             :                      _ASM_EXTABLE(2b, 3b)
      87             :                      : [err] "=r" (*err), EAX_EDX_RET(val, low, high)
      88             :                      : "c" (msr), [fault] "i" (-EIO));
      89             :         return EAX_EDX_VAL(val, low, high);
      90             : }
      91             : 
      92             : static inline void native_write_msr(unsigned int msr,
      93             :                                     unsigned low, unsigned high)
      94             : {
      95             :         asm volatile("wrmsr" : : "c" (msr), "a"(low), "d" (high) : "memory");
      96             : }
      97             : 
      98             : /* Can be uninlined because referenced by paravirt */
      99             : notrace static inline int native_write_msr_safe(unsigned int msr,
     100             :                                         unsigned low, unsigned high)
     101             : {
     102             :         int err;
     103             :         asm volatile("2: wrmsr ; xor %[err],%[err]\n"
     104             :                      "1:\n\t"
     105             :                      ".section .fixup,\"ax\"\n\t"
     106             :                      "3:  mov %[fault],%[err] ; jmp 1b\n\t"
     107             :                      ".previous\n\t"
     108             :                      _ASM_EXTABLE(2b, 3b)
     109             :                      : [err] "=a" (err)
     110             :                      : "c" (msr), "0" (low), "d" (high),
     111             :                        [fault] "i" (-EIO)
     112             :                      : "memory");
     113             :         return err;
     114             : }
     115             : 
     116             : extern unsigned long long native_read_tsc(void);
     117             : 
     118             : extern int native_rdmsr_safe_regs(u32 regs[8]);
     119             : extern int native_wrmsr_safe_regs(u32 regs[8]);
     120             : 
     121             : static __always_inline unsigned long long __native_read_tsc(void)
     122             : {
     123             :         DECLARE_ARGS(val, low, high);
     124             : 
     125             :         asm volatile("rdtsc" : EAX_EDX_RET(val, low, high));
     126             : 
     127             :         return EAX_EDX_VAL(val, low, high);
     128             : }
     129             : 
     130             : static inline unsigned long long native_read_pmc(int counter)
     131             : {
     132             :         DECLARE_ARGS(val, low, high);
     133             : 
     134             :         asm volatile("rdpmc" : EAX_EDX_RET(val, low, high) : "c" (counter));
     135             :         return EAX_EDX_VAL(val, low, high);
     136             : }
     137             : 
     138             : #ifdef CONFIG_PARAVIRT
     139             : #include <asm/paravirt.h>
     140             : #else
     141             : #include <linux/errno.h>
     142             : /*
     143             :  * Access to machine-specific registers (available on 586 and better only)
     144             :  * Note: the rd* operations modify the parameters directly (without using
     145             :  * pointer indirection), this allows gcc to optimize better
     146             :  */
     147             : 
     148             : #define rdmsr(msr, val1, val2)                                  \
     149             : do {                                                            \
     150             :         u64 __val = native_read_msr((msr));                     \
     151             :         (val1) = (u32)__val;                                    \
     152             :         (val2) = (u32)(__val >> 32);                              \
     153             : } while (0)
     154             : 
     155             : static inline void wrmsr(unsigned msr, unsigned low, unsigned high)
     156             : {
     157             :         native_write_msr(msr, low, high);
     158             : }
     159             : 
     160             : #define rdmsrl(msr, val)                        \
     161             :         ((val) = native_read_msr((msr)))
     162             : 
     163             : #define wrmsrl(msr, val)                                                \
     164             :         native_write_msr((msr), (u32)((u64)(val)), (u32)((u64)(val) >> 32))
     165             : 
     166             : /* wrmsr with exception handling */
     167             : static inline int wrmsr_safe(unsigned msr, unsigned low, unsigned high)
     168             : {
     169             :         return native_write_msr_safe(msr, low, high);
     170             : }
     171             : 
     172             : /* rdmsr with exception handling */
     173             : #define rdmsr_safe(msr, p1, p2)                                 \
     174             : ({                                                              \
     175             :         int __err;                                              \
     176             :         u64 __val = native_read_msr_safe((msr), &__err);    \
     177             :         (*p1) = (u32)__val;                                     \
     178             :         (*p2) = (u32)(__val >> 32);                               \
     179             :         __err;                                                  \
     180             : })
     181             : 
     182             : static inline int rdmsrl_safe(unsigned msr, unsigned long long *p)
     183             : {
     184             :         int err;
     185             : 
     186             :         *p = native_read_msr_safe(msr, &err);
     187             :         return err;
     188             : }
     189             : 
     190             : static inline int rdmsrl_amd_safe(unsigned msr, unsigned long long *p)
     191             : {
     192             :         u32 gprs[8] = { 0 };
     193             :         int err;
     194             : 
     195             :         gprs[1] = msr;
     196             :         gprs[7] = 0x9c5a203a;
     197             : 
     198             :         err = native_rdmsr_safe_regs(gprs);
     199             : 
     200             :         *p = gprs[0] | ((u64)gprs[2] << 32);
     201             : 
     202             :         return err;
     203             : }
     204             : 
     205             : static inline int wrmsrl_amd_safe(unsigned msr, unsigned long long val)
     206             : {
     207             :         u32 gprs[8] = { 0 };
     208             : 
     209             :         gprs[0] = (u32)val;
     210             :         gprs[1] = msr;
     211             :         gprs[2] = val >> 32;
     212             :         gprs[7] = 0x9c5a203a;
     213             : 
     214             :         return native_wrmsr_safe_regs(gprs);
     215             : }
     216             : 
     217             : static inline int rdmsr_safe_regs(u32 regs[8])
     218             : {
     219             :         return native_rdmsr_safe_regs(regs);
     220             : }
     221             : 
     222             : static inline int wrmsr_safe_regs(u32 regs[8])
     223             : {
     224             :         return native_wrmsr_safe_regs(regs);
     225             : }
     226             : 
     227             : #define rdtscl(low)                                             \
     228             :         ((low) = (u32)__native_read_tsc())
     229             : 
     230             : #define rdtscll(val)                                            \
     231             :         ((val) = __native_read_tsc())
     232             : 
     233             : #define rdpmc(counter, low, high)                       \
     234             : do {                                                    \
     235             :         u64 _l = native_read_pmc((counter));            \
     236             :         (low)  = (u32)_l;                               \
     237             :         (high) = (u32)(_l >> 32);                 \
     238             : } while (0)
     239             : 
     240             : #define rdtscp(low, high, aux)                                  \
     241             : do {                                                            \
     242             :         unsigned long long _val = native_read_tscp(&(aux));     \
     243             :         (low) = (u32)_val;                                      \
     244             :         (high) = (u32)(_val >> 32);                             \
     245             : } while (0)
     246             : 
     247             : #define rdtscpll(val, aux) (val) = native_read_tscp(&(aux))
     248             : 
     249             : #endif  /* !CONFIG_PARAVIRT */
     250             : 
     251             : 
     252             : #define checking_wrmsrl(msr, val) wrmsr_safe((msr), (u32)(val),         \
     253             :                                              (u32)((val) >> 32))
     254             : 
     255             : #define write_tsc(val1, val2) wrmsr(MSR_IA32_TSC, (val1), (val2))
     256             : 
     257             : #define write_rdtscp_aux(val) wrmsr(MSR_TSC_AUX, (val), 0)
     258             : 
     259             : struct msr *msrs_alloc(void);
     260             : void msrs_free(struct msr *msrs);
     261             : 
     262             : #ifdef CONFIG_SMP
     263             : int rdmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h);
     264             : int wrmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h);
     265             : void rdmsr_on_cpus(const struct cpumask *mask, u32 msr_no, struct msr *msrs);
     266             : void wrmsr_on_cpus(const struct cpumask *mask, u32 msr_no, struct msr *msrs);
     267             : int rdmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h);
     268             : int wrmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h);
     269             : int rdmsr_safe_regs_on_cpu(unsigned int cpu, u32 regs[8]);
     270             : int wrmsr_safe_regs_on_cpu(unsigned int cpu, u32 regs[8]);
     271           4 : #else  /*  CONFIG_SMP  */
     272             : static inline int rdmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h)
     273             : {
     274             :         rdmsr(msr_no, *l, *h);
     275             :         return 0;
     276             : }
     277             : static inline int wrmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h)
     278             : {
     279             :         wrmsr(msr_no, l, h);
     280             :         return 0;
     281             : }
     282             : static inline void rdmsr_on_cpus(const struct cpumask *m, u32 msr_no,
     283             :                                 struct msr *msrs)
     284             : {
     285             :        rdmsr_on_cpu(0, msr_no, &(msrs[0].l), &(msrs[0].h));
     286             : }
     287             : static inline void wrmsr_on_cpus(const struct cpumask *m, u32 msr_no,
     288             :                                 struct msr *msrs)
     289             : {
     290             :        wrmsr_on_cpu(0, msr_no, msrs[0].l, msrs[0].h);
     291             : }
     292             : static inline int rdmsr_safe_on_cpu(unsigned int cpu, u32 msr_no,
     293             :                                     u32 *l, u32 *h)
     294             : {
     295             :         return rdmsr_safe(msr_no, l, h);
     296             : }
     297             : static inline int wrmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h)
     298             : {
     299             :         return wrmsr_safe(msr_no, l, h);
     300             : }
     301             : static inline int rdmsr_safe_regs_on_cpu(unsigned int cpu, u32 regs[8])
     302             : {
     303             :         return rdmsr_safe_regs(regs);
     304             : }
     305             : static inline int wrmsr_safe_regs_on_cpu(unsigned int cpu, u32 regs[8])
     306             : {
     307             :         return wrmsr_safe_regs(regs);
     308             : }
     309             : #endif  /* CONFIG_SMP */
     310             : #endif /* __KERNEL__ */
     311             : #endif /* __ASSEMBLY__ */
     312             : #endif /* _ASM_X86_MSR_H */

Generated by: LCOV version 1.10