|
| 1 | +//===----------------------------------------------------------------------===// |
| 2 | +// |
| 3 | +// This source file is part of the Swift.org open source project |
| 4 | +// |
| 5 | +// Copyright (c) 2014 - 2018 Apple Inc. and the Swift project authors |
| 6 | +// Licensed under Apache License v2.0 with Runtime Library Exception |
| 7 | +// |
| 8 | +// See https://swift.org/LICENSE.txt for license information |
| 9 | +// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors |
| 10 | +// |
| 11 | +//===----------------------------------------------------------------------===// |
| 12 | + |
| 13 | +@available(swift, deprecated: 4.2, obsoleted: 5.0) |
| 14 | +public final class _stdlib_AtomicInt { |
| 15 | + internal var _value: Int |
| 16 | + |
| 17 | + internal var _valuePtr: UnsafeMutablePointer<Int> { |
| 18 | + return _getUnsafePointerToStoredProperties(self).assumingMemoryBound( |
| 19 | + to: Int.self) |
| 20 | + } |
| 21 | + |
| 22 | + public init(_ value: Int = 0) { |
| 23 | + _value = value |
| 24 | + } |
| 25 | + |
| 26 | + public func store(_ desired: Int) { |
| 27 | + return _swift_stdlib_atomicStoreInt(object: _valuePtr, desired: desired) |
| 28 | + } |
| 29 | + |
| 30 | + public func load() -> Int { |
| 31 | + return _swift_stdlib_atomicLoadInt(object: _valuePtr) |
| 32 | + } |
| 33 | + |
| 34 | +% for operation_name, operation in [ ('Add', '+'), ('And', '&'), ('Or', '|'), ('Xor', '^') ]: |
| 35 | + @discardableResult |
| 36 | + public func fetchAnd${operation_name}(_ operand: Int) -> Int { |
| 37 | + return _swift_stdlib_atomicFetch${operation_name}Int( |
| 38 | + object: _valuePtr, |
| 39 | + operand: operand) |
| 40 | + } |
| 41 | + |
| 42 | + public func ${operation_name.lower()}AndFetch(_ operand: Int) -> Int { |
| 43 | + return fetchAnd${operation_name}(operand) ${operation} operand |
| 44 | + } |
| 45 | +% end |
| 46 | + |
| 47 | + public func compareExchange(expected: inout Int, desired: Int) -> Bool { |
| 48 | + var expectedVar = expected |
| 49 | + let result = _swift_stdlib_atomicCompareExchangeStrongInt( |
| 50 | + object: _valuePtr, |
| 51 | + expected: &expectedVar, |
| 52 | + desired: desired) |
| 53 | + expected = expectedVar |
| 54 | + return result |
| 55 | + } |
| 56 | +} |
| 57 | + |
| 58 | +@usableFromInline // used by SwiftPrivate._stdlib_AtomicInt |
| 59 | +internal func _swift_stdlib_atomicCompareExchangeStrongInt( |
| 60 | + object target: UnsafeMutablePointer<Int>, |
| 61 | + expected: UnsafeMutablePointer<Int>, |
| 62 | + desired: Int) -> Bool { |
| 63 | +#if arch(i386) || arch(arm) || arch(arm64_32) |
| 64 | + let (oldValue, won) = Builtin.cmpxchg_seqcst_seqcst_Int32( |
| 65 | + target._rawValue, expected.pointee._value, desired._value) |
| 66 | +#elseif arch(x86_64) || arch(arm64) || arch(powerpc64) || arch(powerpc64le) || arch(s390x) |
| 67 | + let (oldValue, won) = Builtin.cmpxchg_seqcst_seqcst_Int64( |
| 68 | + target._rawValue, expected.pointee._value, desired._value) |
| 69 | +#endif |
| 70 | + expected.pointee._value = oldValue |
| 71 | + return Bool(won) |
| 72 | +} |
| 73 | + |
| 74 | + |
| 75 | +@usableFromInline // used by SwiftPrivate._stdlib_AtomicInt |
| 76 | +internal func _swift_stdlib_atomicLoadInt( |
| 77 | + object target: UnsafeMutablePointer<Int>) -> Int { |
| 78 | +#if arch(i386) || arch(arm) || arch(arm64_32) |
| 79 | + let value = Builtin.atomicload_seqcst_Int32(target._rawValue) |
| 80 | + return Int(value) |
| 81 | +#elseif arch(x86_64) || arch(arm64) || arch(powerpc64) || arch(powerpc64le) || arch(s390x) |
| 82 | + let value = Builtin.atomicload_seqcst_Int64(target._rawValue) |
| 83 | + return Int(value) |
| 84 | +#endif |
| 85 | +} |
| 86 | + |
| 87 | +@usableFromInline // used by SwiftPrivate._stdlib_AtomicInt |
| 88 | +internal func _swift_stdlib_atomicStoreInt( |
| 89 | + object target: UnsafeMutablePointer<Int>, |
| 90 | + desired: Int) { |
| 91 | +#if arch(i386) || arch(arm) || arch(arm64_32) |
| 92 | + Builtin.atomicstore_seqcst_Int32(target._rawValue, desired._value) |
| 93 | +#elseif arch(x86_64) || arch(arm64) || arch(powerpc64) || arch(powerpc64le) || arch(s390x) |
| 94 | + Builtin.atomicstore_seqcst_Int64(target._rawValue, desired._value) |
| 95 | +#endif |
| 96 | +} |
| 97 | + |
| 98 | +% for operation in ['Add', 'And', 'Or', 'Xor']: |
| 99 | +// Warning: no overflow checking. |
| 100 | +@usableFromInline // used by SwiftPrivate._stdlib_AtomicInt |
| 101 | +internal func _swift_stdlib_atomicFetch${operation}Int( |
| 102 | + object target: UnsafeMutablePointer<Int>, |
| 103 | + operand: Int) -> Int { |
| 104 | + let rawTarget = UnsafeMutableRawPointer(target) |
| 105 | +#if arch(i386) || arch(arm) |
| 106 | + let value = _swift_stdlib_atomicFetch${operation}Int32( |
| 107 | + object: rawTarget.assumingMemoryBound(to: Int32.self), |
| 108 | + operand: Int32(operand)) |
| 109 | +#elseif arch(x86_64) || arch(arm64) || arch(powerpc64) || arch(powerpc64le) || arch(s390x) |
| 110 | + let value = _swift_stdlib_atomicFetch${operation}Int64( |
| 111 | + object: rawTarget.assumingMemoryBound(to: Int64.self), |
| 112 | + operand: Int64(operand)) |
| 113 | +#endif |
| 114 | + return Int(value) |
| 115 | +} |
| 116 | + |
| 117 | +% for bits in [ 32, 64 ]: |
| 118 | + |
| 119 | +// Warning: no overflow checking. |
| 120 | +@usableFromInline // used by SwiftPrivate._stdlib_AtomicInt |
| 121 | +internal func _swift_stdlib_atomicFetch${operation}Int${bits}( |
| 122 | + object target: UnsafeMutablePointer<Int${bits}>, |
| 123 | + operand: Int${bits}) -> Int${bits} { |
| 124 | + |
| 125 | + let value = Builtin.atomicrmw_${operation.lower()}_seqcst_Int${bits}( |
| 126 | + target._rawValue, operand._value) |
| 127 | + |
| 128 | + return Int${bits}(value) |
| 129 | +} |
| 130 | + |
| 131 | +% end |
| 132 | + |
| 133 | +% end |
| 134 | + |
0 commit comments