Commit 5305ca10 authored by Cyrill Gorcunov's avatar Cyrill Gorcunov Committed by Ingo Molnar

x86/mm: Unify pte_to_pgoff() and pgoff_to_pte() helpers

Use unified pte_bitop() helper to manipulate bits in pte/pgoff
bitfields and convert pte_to_pgoff()/pgoff_to_pte() to inlines.
Signed-off-by: default avatarCyrill Gorcunov <gorcunov@openvz.org>
Signed-off-by: default avatarAndrew Morton <akpm@linux-foundation.org>
Cc: Pavel Emelyanov <xemul@parallels.com>
Cc: Andy Lutomirski <luto@amacapital.net>
Cc: "H. Peter Anvin" <hpa@zytor.com>
Cc: Linus Torvalds <torvalds@linux-foundation.org>
Cc: Andrew Morton <akpm@linux-foundation.org>
Cc: Peter Zijlstra <a.p.zijlstra@chello.nl>
Signed-off-by: default avatarIngo Molnar <mingo@kernel.org>
parent 801a7605
...@@ -55,6 +55,13 @@ static inline pmd_t native_pmdp_get_and_clear(pmd_t *xp) ...@@ -55,6 +55,13 @@ static inline pmd_t native_pmdp_get_and_clear(pmd_t *xp)
#define native_pmdp_get_and_clear(xp) native_local_pmdp_get_and_clear(xp) #define native_pmdp_get_and_clear(xp) native_local_pmdp_get_and_clear(xp)
#endif #endif
/* Bit manipulation helper on pte/pgoff entry */
static inline unsigned long pte_bitop(unsigned long value, unsigned int rightshift,
unsigned long mask, unsigned int leftshift)
{
return ((value >> rightshift) & mask) << leftshift;
}
#ifdef CONFIG_MEM_SOFT_DIRTY #ifdef CONFIG_MEM_SOFT_DIRTY
/* /*
...@@ -71,31 +78,34 @@ static inline pmd_t native_pmdp_get_and_clear(pmd_t *xp) ...@@ -71,31 +78,34 @@ static inline pmd_t native_pmdp_get_and_clear(pmd_t *xp)
#define PTE_FILE_BITS2 (PTE_FILE_SHIFT3 - PTE_FILE_SHIFT2 - 1) #define PTE_FILE_BITS2 (PTE_FILE_SHIFT3 - PTE_FILE_SHIFT2 - 1)
#define PTE_FILE_BITS3 (PTE_FILE_SHIFT4 - PTE_FILE_SHIFT3 - 1) #define PTE_FILE_BITS3 (PTE_FILE_SHIFT4 - PTE_FILE_SHIFT3 - 1)
#define pte_to_pgoff(pte) \ #define PTE_FILE_MASK1 ((1U << PTE_FILE_BITS1) - 1)
((((pte).pte_low >> (PTE_FILE_SHIFT1)) \ #define PTE_FILE_MASK2 ((1U << PTE_FILE_BITS2) - 1)
& ((1U << PTE_FILE_BITS1) - 1))) \ #define PTE_FILE_MASK3 ((1U << PTE_FILE_BITS3) - 1)
+ ((((pte).pte_low >> (PTE_FILE_SHIFT2)) \
& ((1U << PTE_FILE_BITS2) - 1)) \ #define PTE_FILE_LSHIFT2 (PTE_FILE_BITS1)
<< (PTE_FILE_BITS1)) \ #define PTE_FILE_LSHIFT3 (PTE_FILE_BITS1 + PTE_FILE_BITS2)
+ ((((pte).pte_low >> (PTE_FILE_SHIFT3)) \ #define PTE_FILE_LSHIFT4 (PTE_FILE_BITS1 + PTE_FILE_BITS2 + PTE_FILE_BITS3)
& ((1U << PTE_FILE_BITS3) - 1)) \
<< (PTE_FILE_BITS1 + PTE_FILE_BITS2)) \ static __always_inline pgoff_t pte_to_pgoff(pte_t pte)
+ ((((pte).pte_low >> (PTE_FILE_SHIFT4))) \ {
<< (PTE_FILE_BITS1 + PTE_FILE_BITS2 + PTE_FILE_BITS3)) return (pgoff_t)
(pte_bitop(pte.pte_low, PTE_FILE_SHIFT1, PTE_FILE_MASK1, 0) +
#define pgoff_to_pte(off) \ pte_bitop(pte.pte_low, PTE_FILE_SHIFT2, PTE_FILE_MASK2, PTE_FILE_LSHIFT2) +
((pte_t) { .pte_low = \ pte_bitop(pte.pte_low, PTE_FILE_SHIFT3, PTE_FILE_MASK3, PTE_FILE_LSHIFT3) +
((((off)) & ((1U << PTE_FILE_BITS1) - 1)) << PTE_FILE_SHIFT1) \ pte_bitop(pte.pte_low, PTE_FILE_SHIFT4, -1UL, PTE_FILE_LSHIFT4));
+ ((((off) >> PTE_FILE_BITS1) \ }
& ((1U << PTE_FILE_BITS2) - 1)) \
<< PTE_FILE_SHIFT2) \ static __always_inline pte_t pgoff_to_pte(pgoff_t off)
+ ((((off) >> (PTE_FILE_BITS1 + PTE_FILE_BITS2)) \ {
& ((1U << PTE_FILE_BITS3) - 1)) \ return (pte_t){
<< PTE_FILE_SHIFT3) \ .pte_low =
+ ((((off) >> \ pte_bitop(off, 0, PTE_FILE_MASK1, PTE_FILE_SHIFT1) +
(PTE_FILE_BITS1 + PTE_FILE_BITS2 + PTE_FILE_BITS3))) \ pte_bitop(off, PTE_FILE_LSHIFT2, PTE_FILE_MASK2, PTE_FILE_SHIFT2) +
<< PTE_FILE_SHIFT4) \ pte_bitop(off, PTE_FILE_LSHIFT3, PTE_FILE_MASK3, PTE_FILE_SHIFT3) +
+ _PAGE_FILE }) pte_bitop(off, PTE_FILE_LSHIFT4, -1UL, PTE_FILE_SHIFT4) +
_PAGE_FILE,
};
}
#else /* CONFIG_MEM_SOFT_DIRTY */ #else /* CONFIG_MEM_SOFT_DIRTY */
...@@ -115,22 +125,30 @@ static inline pmd_t native_pmdp_get_and_clear(pmd_t *xp) ...@@ -115,22 +125,30 @@ static inline pmd_t native_pmdp_get_and_clear(pmd_t *xp)
#define PTE_FILE_BITS1 (PTE_FILE_SHIFT2 - PTE_FILE_SHIFT1 - 1) #define PTE_FILE_BITS1 (PTE_FILE_SHIFT2 - PTE_FILE_SHIFT1 - 1)
#define PTE_FILE_BITS2 (PTE_FILE_SHIFT3 - PTE_FILE_SHIFT2 - 1) #define PTE_FILE_BITS2 (PTE_FILE_SHIFT3 - PTE_FILE_SHIFT2 - 1)
#define pte_to_pgoff(pte) \ #define PTE_FILE_MASK1 ((1U << PTE_FILE_BITS1) - 1)
((((pte).pte_low >> PTE_FILE_SHIFT1) \ #define PTE_FILE_MASK2 ((1U << PTE_FILE_BITS2) - 1)
& ((1U << PTE_FILE_BITS1) - 1)) \
+ ((((pte).pte_low >> PTE_FILE_SHIFT2) \ #define PTE_FILE_LSHIFT2 (PTE_FILE_BITS1)
& ((1U << PTE_FILE_BITS2) - 1)) << PTE_FILE_BITS1) \ #define PTE_FILE_LSHIFT3 (PTE_FILE_BITS1 + PTE_FILE_BITS2)
+ (((pte).pte_low >> PTE_FILE_SHIFT3) \
<< (PTE_FILE_BITS1 + PTE_FILE_BITS2))) static __always_inline pgoff_t pte_to_pgoff(pte_t pte)
{
#define pgoff_to_pte(off) \ return (pgoff_t)
((pte_t) { .pte_low = \ (pte_bitop(pte.pte_low, PTE_FILE_SHIFT1, PTE_FILE_MASK1, 0) +
(((off) & ((1U << PTE_FILE_BITS1) - 1)) << PTE_FILE_SHIFT1) \ pte_bitop(pte.pte_low, PTE_FILE_SHIFT2, PTE_FILE_MASK2, PTE_FILE_LSHIFT2) +
+ ((((off) >> PTE_FILE_BITS1) & ((1U << PTE_FILE_BITS2) - 1)) \ pte_bitop(pte.pte_low, PTE_FILE_SHIFT3, -1UL, PTE_FILE_LSHIFT3));
<< PTE_FILE_SHIFT2) \ }
+ (((off) >> (PTE_FILE_BITS1 + PTE_FILE_BITS2)) \
<< PTE_FILE_SHIFT3) \ static __always_inline pte_t pgoff_to_pte(pgoff_t off)
+ _PAGE_FILE }) {
return (pte_t){
.pte_low =
pte_bitop(off, 0, PTE_FILE_MASK1, PTE_FILE_SHIFT1) +
pte_bitop(off, PTE_FILE_LSHIFT2, PTE_FILE_MASK2, PTE_FILE_SHIFT2) +
pte_bitop(off, PTE_FILE_LSHIFT3, -1UL, PTE_FILE_SHIFT3) +
_PAGE_FILE,
};
}
#endif /* CONFIG_MEM_SOFT_DIRTY */ #endif /* CONFIG_MEM_SOFT_DIRTY */
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment