blob: 9029a5fdeab65021a508d9ab298ba2d311cae9d4 (
plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
|
$NetBSD: patch-aj,v 1.7 2000/07/14 04:44:54 jlam Exp $
--- include/storage/s_lock.h.orig Wed Apr 12 13:16:51 2000
+++ include/storage/s_lock.h Wed Jul 12 02:40:53 2000
@@ -262,7 +262,17 @@
#define TAS(lock) tas(lock)
#define S_UNLOCK(lock) do { __asm__("mb"); *(lock) = 0; } while (0)
+#if defined(__GNUC__)
+/*
+ * GCC on the Alpha doesn't appear to handle inlining of assembly with
+ * %0 or %1 properly. This removes the inlining of the tas (test-and-set)
+ * function, which probably slows things down considerably, but correctness
+ * first!
+ */
+static int
+#else
static __inline__ int
+#endif
tas(volatile slock_t *lock)
{
register slock_t _res;
|