1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
|
/* SPDX-License-Identifier: GPL-2.0-or-later */
/*
* Common timebase prototypes and such for all ppc machines.
*/
#ifndef _ASM_POWERPC_VDSO_TIMEBASE_H
#define _ASM_POWERPC_VDSO_TIMEBASE_H
#include <asm/reg.h>
/*
* We use __powerpc64__ here because we want the compat VDSO to use the 32-bit
* version below in the else case of the ifdef.
*/
#if defined(__powerpc64__) && (defined(CONFIG_PPC_CELL) || defined(CONFIG_PPC_E500))
#define mftb() ({unsigned long rval; \
asm volatile( \
"90: mfspr %0, %2;\n" \
ASM_FTR_IFSET( \
"97: cmpwi %0,0;\n" \
" beq- 90b;\n", "", %1) \
: "=r" (rval) \
: "i" (CPU_FTR_CELL_TB_BUG), "i" (SPRN_TBRL) : "cr0"); \
rval;})
#elif defined(CONFIG_PPC_8xx)
#define mftb() ({unsigned long rval; \
asm volatile("mftbl %0" : "=r" (rval)); rval;})
#else
#define mftb() ({unsigned long rval; \
asm volatile("mfspr %0, %1" : \
"=r" (rval) : "i" (SPRN_TBRL)); rval;})
#endif /* !CONFIG_PPC_CELL */
#if defined(CONFIG_PPC_8xx)
#define mftbu() ({unsigned long rval; \
asm volatile("mftbu %0" : "=r" (rval)); rval;})
#else
#define mftbu() ({unsigned long rval; \
asm volatile("mfspr %0, %1" : "=r" (rval) : \
"i" (SPRN_TBRU)); rval;})
#endif
#define mttbl(v) asm volatile("mttbl %0":: "r"(v))
#define mttbu(v) asm volatile("mttbu %0":: "r"(v))
static __always_inline u64 get_tb(void)
{
unsigned int tbhi, tblo, tbhi2;
/*
* We use __powerpc64__ here not CONFIG_PPC64 because we want the compat
* VDSO to use the 32-bit compatible version in the while loop below.
*/
if (__is_defined(__powerpc64__))
return mftb();
do {
tbhi = mftbu();
tblo = mftb();
tbhi2 = mftbu();
} while (tbhi != tbhi2);
return ((u64)tbhi << 32) | tblo;
}
static inline void set_tb(unsigned int upper, unsigned int lower)
{
mtspr(SPRN_TBWL, 0);
mtspr(SPRN_TBWU, upper);
mtspr(SPRN_TBWL, lower);
}
#endif /* _ASM_POWERPC_VDSO_TIMEBASE_H */
|