LKML Archive on lore.kernel.org
help / color / mirror / Atom feed
* [PATCH] WorkStruct: Implement generic UP cmpxchg() where an arch doesn't support it [try #2]
@ 2006-12-06 17:56 David Howells
  2006-12-06 19:02 ` Christoph Lameter
  0 siblings, 1 reply; 3+ messages in thread
From: David Howells @ 2006-12-06 17:56 UTC (permalink / raw)
  To: torvalds, akpm, linux-arm-kernel; +Cc: linux-kernel, linux-arch, dhowells

Implement generic UP cmpxchg() where an arch doesn't otherwise support it.
This assuming that the arch doesn't have support SMP without providing its own
cmpxchg() implementation.

This is required because cmpxchg() is used by the reduced work queue patches to
adjust the management data in a work_struct.

Also provide ARMv6 with a cmpxchg() implementation using LDREX/STREXEQ.  Pre-v6
ARM doesn't support SMP according to ARM's atomic.h, so the generic
IRQ-disablement based cmpxchg() is entirely adequate there (if it isn't, then
atomic_cmpxchg() is also broken on ARM).

Signed-Off-By: David Howells <dhowells@redhat.com>
---

 include/asm-arm/system.h      |   40 ++++++++++++++++++++++++++++++++++++
 include/asm-generic/cmpxchg.h |   46 +++++++++++++++++++++++++++++++++++++++++
 2 files changed, 86 insertions(+), 0 deletions(-)

diff --git a/include/asm-arm/system.h b/include/asm-arm/system.h
index f05fbe3..f16e42d 100644
--- a/include/asm-arm/system.h
+++ b/include/asm-arm/system.h
@@ -325,6 +325,46 @@ #endif
 extern void disable_hlt(void);
 extern void enable_hlt(void);
 
+/*
+ * We only implement cmpxchg in ASM on ARMv6 where we have LDREX/STREX
+ * available, and we only implement it for word-sized exchanges
+ */
+#if __LINUX_ARM_ARCH__ >= 6
+extern void __bad_cmpxchg(volatile void *, int);
+
+#define cmpxchg(ptr, old, new)						\
+({									\
+	__typeof__ (ptr) ____p = (ptr);					\
+	__typeof__(*ptr) ____old = (old);				\
+	__typeof__(*ptr) ____new = (new);				\
+	__typeof__(*ptr) ____oldval;					\
+	__typeof__(*ptr) ____res;					\
+									\
+	switch (sizeof(____res)) {					\
+	case 4:								\
+		do {							\
+			__asm__ __volatile__("@ cmpxchg\n"		\
+			"ldrex	%1, [%2]\n"				\
+			"mov	%0, #0\n"				\
+			"teq	%1, %3\n"				\
+			"strexeq %0, %4, [%2]\n"			\
+			: "=&r" (____res), "=&r" (____oldval)		\
+			: "r" (____p), "Ir" (____old), "r" (____new)	\
+			: "cc");					\
+		} while(____res);					\
+		break;							\
+	default:							\
+		__bad_cmpxchg(____p, sizeof(____res));			\
+		____oldval = 0;						\
+		break;							\
+	}								\
+	____oldval;							\
+})
+
+#else
+#include <asm-generic/cmpxchg.h>
+#endif
+
 #endif /* __ASSEMBLY__ */
 
 #define arch_align_stack(x) (x)
diff --git a/include/asm-generic/cmpxchg.h b/include/asm-generic/cmpxchg.h
new file mode 100644
index 0000000..be0da99
--- /dev/null
+++ b/include/asm-generic/cmpxchg.h
@@ -0,0 +1,46 @@
+/* Generic cmpxchg for those arches that don't implement it themselves
+ *
+ * Copyright (C) 2006 Red Hat, Inc. All Rights Reserved.
+ * Written by David Howells (dhowells@redhat.com)
+ *
+ * This program is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU General Public License
+ * as published by the Free Software Foundation; either version
+ * 2 of the License, or (at your option) any later version.
+ */
+
+#ifndef _ASM_GENERIC_CMPXCHG_H
+#define _ASM_GENERIC_CMPXCHG_H
+
+#if !defined(cmpxchg) && !defined(CONFIG_SMP)
+
+/**
+ * cmpxchg - Atomically conditionally exchange one value for another.
+ * @ptr - Pointer to the value to be altered.
+ * @old - The value to change from.
+ * @new - The value to change to.
+ *
+ * This function atomically compares the current value at the word pointed to
+ * by @ptr, and if it's the same as @old, changes it to @new.  If it's not the
+ * same then it's left unchanged.
+ *
+ * The value that was in the word pointed to by @ptr is returned, whether or
+ * not it was changed to @new.
+ */
+#define cmpxchg(ptr, old, new)			\
+({						\
+	unsigned long ____flags;		\
+	__typeof__ (ptr) ____p = (ptr);		\
+	__typeof__(*ptr) ____old = (old);	\
+	__typeof__(*ptr) ____new = (new);	\
+	__typeof__(*ptr) ____res;		\
+	raw_local_irq_save(____flags);		\
+	____res = *____p;			\
+	if (likely(____res == (____old)))	\
+		*____p = (____new);		\
+	raw_local_irq_restore(____flags);	\
+	____res;				\
+})
+
+#endif /* !cmpxchg && !SMP */
+#endif /* _ASM_GENERIC_CMPXCHG_H */

^ permalink raw reply	[flat|nested] 3+ messages in thread

* Re: [PATCH] WorkStruct: Implement generic UP cmpxchg() where an arch doesn't support it [try #2]
  2006-12-06 17:56 [PATCH] WorkStruct: Implement generic UP cmpxchg() where an arch doesn't support it [try #2] David Howells
@ 2006-12-06 19:02 ` Christoph Lameter
  2006-12-06 19:24   ` Christoph Lameter
  0 siblings, 1 reply; 3+ messages in thread
From: Christoph Lameter @ 2006-12-06 19:02 UTC (permalink / raw)
  To: David Howells; +Cc: torvalds, akpm, linux-arm-kernel, linux-kernel, linux-arch

On Wed, 6 Dec 2006, David Howells wrote:

> Implement generic UP cmpxchg() where an arch doesn't otherwise support it.
> This assuming that the arch doesn't have support SMP without providing its own
> cmpxchg() implementation.
> 
> Signed-Off-By: David Howells <dhowells@redhat.com>

I cannot evaluate the ARM implementation but otherwise.

Acked-by: Christoph Lameter <clameter@sgi.com>

^ permalink raw reply	[flat|nested] 3+ messages in thread

* Re: [PATCH] WorkStruct: Implement generic UP cmpxchg() where an arch doesn't support it [try #2]
  2006-12-06 19:02 ` Christoph Lameter
@ 2006-12-06 19:24   ` Christoph Lameter
  0 siblings, 0 replies; 3+ messages in thread
From: Christoph Lameter @ 2006-12-06 19:24 UTC (permalink / raw)
  To: David Howells; +Cc: torvalds, akpm, linux-arm-kernel, linux-kernel, linux-arch

Have a look at arch/i386/kernel/cpu/intel.c. You can probably replace my 
code that simulates cmpxchg for 386s

arch/i386/kernel/cpu/intel.c:

#ifndef CONFIG_X86_CMPXCHG
unsigned long cmpxchg_386_u8(volatile void *ptr, u8 old, u8 new)
{
        u8 prev;
        unsigned long flags;

        /* Poor man's cmpxchg for 386. Unsuitable for SMP */
        local_irq_save(flags);
        prev = *(u8 *)ptr;
        if (prev == old)
                *(u8 *)ptr = new;
        local_irq_restore(flags);
        return prev;
}
EXPORT_SYMBOL(cmpxchg_386_u8);

unsigned long cmpxchg_386_u16(volatile void *ptr, u16 old, u16 new)
{
        u16 prev;
        unsigned long flags;

        /* Poor man's cmpxchg for 386. Unsuitable for SMP */
        local_irq_save(flags);
        prev = *(u16 *)ptr;
        if (prev == old)
                *(u16 *)ptr = new;
        local_irq_restore(flags);
        return prev;
}
EXPORT_SYMBOL(cmpxchg_386_u16);

unsigned long cmpxchg_386_u32(volatile void *ptr, u32 old, u32 new)
{
        u32 prev;
        unsigned long flags;

        /* Poor man's cmpxchg for 386. Unsuitable for SMP */
        local_irq_save(flags);
        prev = *(u32 *)ptr;
        if (prev == old)
                *(u32 *)ptr = new;
        local_irq_restore(flags);
        return prev;
}
EXPORT_SYMBOL(cmpxchg_386_u32);
#endif



^ permalink raw reply	[flat|nested] 3+ messages in thread

end of thread, other threads:[~2006-12-06 19:25 UTC | newest]

Thread overview: 3+ messages (download: mbox.gz / follow: Atom feed)
-- links below jump to the message on this page --
2006-12-06 17:56 [PATCH] WorkStruct: Implement generic UP cmpxchg() where an arch doesn't support it [try #2] David Howells
2006-12-06 19:02 ` Christoph Lameter
2006-12-06 19:24   ` Christoph Lameter

This is a public inbox, see mirroring instructions
for how to clone and mirror all data and code used for this inbox;
as well as URLs for NNTP newsgroup(s).