diff --git a/arch/x86/include/asm/atomic.h b/arch/x86/include/asm/atomic.h index fbb5e558..aacbeb67 100644 --- a/arch/x86/include/asm/atomic.h +++ b/arch/x86/include/asm/atomic.h @@ -88,7 +88,7 @@ inline static int32_t atomic_int32_test_and_set(atomic_int32_t* d, int32_t ret) inline static int32_t atomic_int32_add(atomic_int32_t *d, int32_t i) { int32_t res = i; - asm volatile(LOCK "xaddl %0, %1" : "=r"(i) : "m"(d->counter), "0"(i) : "memory"); + asm volatile(LOCK "xaddl %0, %1" : "=r"(i) : "m"(d->counter), "0"(i) : "memory", "cc"); return res+i; } diff --git a/arch/x86/include/asm/processor.h b/arch/x86/include/asm/processor.h index 57e1b228..ad962757 100644 --- a/arch/x86/include/asm/processor.h +++ b/arch/x86/include/asm/processor.h @@ -98,7 +98,7 @@ inline static uint64_t rdtsc(void) * is used here */ inline static void flush_cache(void) { - asm volatile ("wbinvd" : : : "memory"); + asm volatile ("wbinvd" ::: "memory"); } /** @brief Invalidate cache @@ -107,7 +107,7 @@ inline static void flush_cache(void) { * is used here */ inline static void invalidate_cache(void) { - asm volatile ("invd"); + asm volatile ("invd" ::: "memory"); } /** @brief Get return value from EAX @@ -125,9 +125,9 @@ inline static int get_return_value(void) { /* Force strict CPU ordering */ #ifdef CONFIG_ROCKCREEK -inline static void mb(void) { asm volatile ("lock; addl $0,0(%%esp)" ::: "memory"); } -inline static void rmb(void) { asm volatile ("lock; addl $0,0(%%esp)" ::: "memory"); } -inline static void wmb(void) { asm volatile ("lock; addl $0,0(%%esp)" ::: "memory"); } +inline static void mb(void) { asm volatile ("lock; addl $0,0(%%esp)" ::: "memory", "cc"); } +inline static void rmb(void) { asm volatile ("lock; addl $0,0(%%esp)" ::: "memory", "cc"); } +inline static void wmb(void) { asm volatile ("lock; addl $0,0(%%esp)" ::: "memory", "cc"); } #else inline static void mb(void) { asm volatile("mfence" ::: "memory"); } inline static void rmb(void) { asm volatile("lfence" ::: "memory"); } @@ -268,7 +268,7 @@ static inline void tlb_flush(void) static inline uint32_t read_eflags(void) { uint32_t result; - asm volatile ("pushf; pop %%eax" : "=a"(result)); + asm volatile ("pushf; pop $0" : "=r"(result)); return result; } @@ -283,7 +283,7 @@ static inline uint32_t last_set(uint32_t i) if (!i) return 0; - asm volatile ("bsr %1, %0" : "=r"(ret) : "r"(i) : "flags"); + asm volatile ("bsr %1, %0" : "=r"(ret) : "r"(i) : "cc"); return ret; } diff --git a/arch/x86/include/asm/string.h b/arch/x86/include/asm/string.h index fd6480e0..e1c092b7 100644 --- a/arch/x86/include/asm/string.h +++ b/arch/x86/include/asm/string.h @@ -83,7 +83,7 @@ inline static void *memcpy(void *dest, const void *src, size_t count) "movl (%%edi), %%edx\n\t" "andl $31, %%ecx\n\t" "rep ; movsb\n\t":"=&a" (h), "=&D"(i), "=&S"(j), "=&b"(k), "=&c"(l), "=&d"(m) - : "0"(count / 32), "1"(dest), "2"(src), "3"(count) : "memory"); + : "0"(count / 32), "1"(dest), "2"(src), "3"(count) : "memory","cc"); return dest; } @@ -107,7 +107,7 @@ inline static void *memcpy(void* dest, const void *src, size_t count) "andl $3, %%ecx\n\t" "rep movsb\n\t" : "=&c"(i), "=&D"(j), "=&S"(k) - : "0"(count/4), "g"(count), "1"(dest), "2"(src) : "memory"); + : "0"(count/4), "g"(count), "1"(dest), "2"(src) : "memory","cc"); return dest; } @@ -134,7 +134,7 @@ inline static void *memset(void* dest, int val, size_t count) asm volatile ("cld; rep stosb" : "=&c"(i), "=&D"(j) - : "a"(val), "1"(dest), "0"(count) : "memory"); + : "a"(val), "1"(dest), "0"(count) : "memory","cc"); return dest; } @@ -162,7 +162,7 @@ inline static size_t strlen(const char* str) asm volatile("not %%ecx; cld; repne scasb; not %%ecx; dec %%ecx" : "=&c"(len), "=&D"(i), "=&a"(j) : "2"(0), "1"(str), "0"(len) - : "memory"); + : "memory","cc"); return len; } diff --git a/arch/x86/include/asm/tasks.h b/arch/x86/include/asm/tasks.h index d2cf6731..206f5ad2 100644 --- a/arch/x86/include/asm/tasks.h +++ b/arch/x86/include/asm/tasks.h @@ -86,7 +86,7 @@ static inline int jump_to_user_code(uint32_t ep, uint32_t stack) { asm volatile ("mov %0, %%ds; mov %0, %%fs; mov %0, %%gs; mov %0, %%es" :: "r"(0x23)); asm volatile ("push $0x23; push %0; push $0x1B; push %1" :: "r"(stack), "r"(ep)); - asm volatile ("lret"); + asm volatile ("lret" ::: "cc"); return 0; } diff --git a/arch/x86/scc/scc_memcpy.h b/arch/x86/scc/scc_memcpy.h index b4d9a0ba..59a64272 100644 --- a/arch/x86/scc/scc_memcpy.h +++ b/arch/x86/scc/scc_memcpy.h @@ -61,7 +61,7 @@ inline static void *memcpy_get(void *dest, const void *src, size_t count) "rep ; movsb\n\t":"=&a" (h), "=&D"(i), "=&S"(j), "=&b"(k), "=&c"(l), "=&d"(m) :"0"(count / 32), "1"(dest), "2"(src), - "3"(count):"memory"); + "3"(count):"memory","cc"); return dest; } @@ -84,7 +84,7 @@ inline static void *memcpy_put(void* dest, const void *src, size_t count) "andl $3, %%ecx\n\t" "rep movsb\n\t" : "=&c"(i), "=&D"(j), "=&S"(k) - : "0"(count/4), "g"(count), "1"(dest), "2"(src) : "memory"); + : "0"(count/4), "g"(count), "1"(dest), "2"(src) : "memory","cc"); return dest; } @@ -160,7 +160,7 @@ inline static void *memcpy_put(void *dest, const void *src, size_t count) "andl $3,%%ecx\n\t" "rep ; movsb\n\t":"=&c" (i), "=&D"(j), "=&S"(k), "=&a"(l) :"0"(count), "1"(dest), "2"(src) - :"memory"); + :"memory","cc"); return dest; }