Annotation of sys/arch/amd64/include/lock.h, Revision 1.1
1.1 ! nbrk 1: /* $OpenBSD: lock.h,v 1.2 2007/05/25 20:48:33 art Exp $ */
! 2: /* $NetBSD: lock.h,v 1.1.2.2 2000/05/03 14:40:55 sommerfeld Exp $ */
! 3:
! 4: /*-
! 5: * Copyright (c) 2000 The NetBSD Foundation, Inc.
! 6: * All rights reserved.
! 7: *
! 8: * This code is derived from software contributed to The NetBSD Foundation
! 9: * by Jason R. Thorpe.
! 10: *
! 11: * Redistribution and use in source and binary forms, with or without
! 12: * modification, are permitted provided that the following conditions
! 13: * are met:
! 14: * 1. Redistributions of source code must retain the above copyright
! 15: * notice, this list of conditions and the following disclaimer.
! 16: * 2. Redistributions in binary form must reproduce the above copyright
! 17: * notice, this list of conditions and the following disclaimer in the
! 18: * documentation and/or other materials provided with the distribution.
! 19: * 3. All advertising materials mentioning features or use of this software
! 20: * must display the following acknowledgement:
! 21: * This product includes software developed by the NetBSD
! 22: * Foundation, Inc. and its contributors.
! 23: * 4. Neither the name of The NetBSD Foundation nor the names of its
! 24: * contributors may be used to endorse or promote products derived
! 25: * from this software without specific prior written permission.
! 26: *
! 27: * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
! 28: * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
! 29: * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
! 30: * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
! 31: * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
! 32: * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
! 33: * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
! 34: * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
! 35: * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
! 36: * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
! 37: * POSSIBILITY OF SUCH DAMAGE.
! 38: */
! 39:
! 40: /*
! 41: * Machine-dependent spin lock operations.
! 42: */
! 43:
! 44: #ifndef _AMD64_LOCK_H_
! 45: #define _AMD64_LOCK_H_
! 46:
! 47: typedef __volatile int __cpu_simple_lock_t;
! 48:
! 49: #define __SIMPLELOCK_LOCKED 1
! 50: #define __SIMPLELOCK_UNLOCKED 0
! 51:
! 52: /*
! 53: * compiler barrier: prevent reordering of instructions.
! 54: * XXX something similar will move to <sys/cdefs.h>
! 55: * or thereabouts.
! 56: * This prevents the compiler from reordering code around
! 57: * this "instruction", acting as a sequence point for code generation.
! 58: */
! 59:
! 60: #define __lockbarrier() __asm __volatile("": : :"memory")
! 61:
! 62: #ifdef LOCKDEBUG
! 63:
! 64: extern void __cpu_simple_lock_init(__cpu_simple_lock_t *);
! 65: extern void __cpu_simple_lock(__cpu_simple_lock_t *);
! 66: extern int __cpu_simple_lock_try(__cpu_simple_lock_t *);
! 67: extern void __cpu_simple_unlock(__cpu_simple_lock_t *);
! 68:
! 69: #else
! 70:
! 71: #include <machine/atomic.h>
! 72:
! 73: static __inline void __cpu_simple_lock_init(__cpu_simple_lock_t *)
! 74: __attribute__((__unused__));
! 75: static __inline void __cpu_simple_lock(__cpu_simple_lock_t *)
! 76: __attribute__((__unused__));
! 77: static __inline int __cpu_simple_lock_try(__cpu_simple_lock_t *)
! 78: __attribute__((__unused__));
! 79: static __inline void __cpu_simple_unlock(__cpu_simple_lock_t *)
! 80: __attribute__((__unused__));
! 81:
! 82: static __inline void
! 83: __cpu_simple_lock_init(__cpu_simple_lock_t *lockp)
! 84: {
! 85: *lockp = __SIMPLELOCK_UNLOCKED;
! 86: __lockbarrier();
! 87: }
! 88:
! 89: static __inline void
! 90: __cpu_simple_lock(__cpu_simple_lock_t *lockp)
! 91: {
! 92: while (x86_atomic_testset_i(lockp, __SIMPLELOCK_LOCKED)
! 93: == __SIMPLELOCK_LOCKED) {
! 94: continue; /* spin */
! 95: }
! 96: __lockbarrier();
! 97: }
! 98:
! 99: static __inline int
! 100: __cpu_simple_lock_try(__cpu_simple_lock_t *lockp)
! 101: {
! 102: int r = (x86_atomic_testset_i(lockp, __SIMPLELOCK_LOCKED)
! 103: == __SIMPLELOCK_UNLOCKED);
! 104:
! 105: __lockbarrier();
! 106:
! 107: return (r);
! 108: }
! 109:
! 110: static __inline void
! 111: __cpu_simple_unlock(__cpu_simple_lock_t *lockp)
! 112: {
! 113: __lockbarrier();
! 114: *lockp = __SIMPLELOCK_UNLOCKED;
! 115: }
! 116:
! 117: #define rw_cas(p, o, n) (x86_atomic_cas_ul(p, o, n) != o)
! 118:
! 119: #endif /* !LOCKDEBUG */
! 120:
! 121: #endif /* _AMD64_LOCK_H_ */
CVSweb