x86: Simplify flush_write_buffers()

Always make it an inline instead of using a macro for the no-op case.

Signed-off-by: Brian Gerst <brgerst@gmail.com>
LKML-Reference: <1265380629-3212-7-git-send-email-brgerst@gmail.com>
Signed-off-by: H. Peter Anvin <hpa@zytor.com>
This commit is contained in:
Brian Gerst 2010-02-05 09:37:08 -05:00 коммит произвёл H. Peter Anvin
Родитель 6175ddf06b
Коммит 910bf6ad0b
2 изменённых файлов: 9 добавлений и 9 удалений

Просмотреть файл

@ -84,18 +84,12 @@ memcpy_toio(volatile void __iomem *dst, const void *src, size_t count)
* 2. Accidentally out of order processors (PPro errata #51)
*/
#if defined(CONFIG_X86_OOSTORE) || defined(CONFIG_X86_PPRO_FENCE)
static inline void flush_write_buffers(void)
{
#if defined(CONFIG_X86_OOSTORE) || defined(CONFIG_X86_PPRO_FENCE)
asm volatile("lock; addl $0,0(%%esp)": : :"memory");
}
#else
#define flush_write_buffers() do { } while (0)
#endif
}
#endif /* __KERNEL__ */

Просмотреть файл

@ -83,7 +83,13 @@ memcpy_toio(volatile void __iomem *dst, const void *src, size_t count)
* 1. Out of order aware processors
* 2. Accidentally out of order processors (PPro errata #51)
*/
#define flush_write_buffers() do { } while (0)
static inline void flush_write_buffers(void)
{
#if defined(CONFIG_X86_OOSTORE) || defined(CONFIG_X86_PPRO_FENCE)
asm volatile("lock; addl $0,0(%%esp)": : :"memory");
#endif
}
#endif /* __KERNEL__ */