257 // the arguments the object is called with. If D is a pointer type |
257 // the arguments the object is called with. If D is a pointer type |
258 // P*, then let addend (of type I) be add_value * sizeof(P); |
258 // P*, then let addend (of type I) be add_value * sizeof(P); |
259 // otherwise, addend is add_value. |
259 // otherwise, addend is add_value. |
260 // |
260 // |
261 // FetchAndAdd requires the derived class to provide |
261 // FetchAndAdd requires the derived class to provide |
262 // fetch_and_add(addend, dest) |
262 // fetch_and_add(dest, addend) |
263 // atomically adding addend to the value of dest, and returning the |
263 // atomically adding addend to the value of dest, and returning the |
264 // old value. |
264 // old value. |
265 // |
265 // |
266 // AddAndFetch requires the derived class to provide |
266 // AddAndFetch requires the derived class to provide |
267 // add_and_fetch(addend, dest) |
267 // add_and_fetch(dest, addend) |
268 // atomically adding addend to the value of dest, and returning the |
268 // atomically adding addend to the value of dest, and returning the |
269 // new value. |
269 // new value. |
270 // |
270 // |
271 // When D is a pointer type P*, both fetch_and_add and add_and_fetch |
271 // When D is a pointer type P*, both fetch_and_add and add_and_fetch |
272 // treat it as if it were a uintptr_t; they do not perform any |
272 // treat it as if it were a uintptr_t; they do not perform any |
284 // invoked on the translated arguments, and the result translated |
284 // invoked on the translated arguments, and the result translated |
285 // back. Type is the parameter / return type of the helper |
285 // back. Type is the parameter / return type of the helper |
286 // function. No scaling of add_value is performed when D is a pointer |
286 // function. No scaling of add_value is performed when D is a pointer |
287 // type, so this function can be used to implement the support function |
287 // type, so this function can be used to implement the support function |
288 // required by AddAndFetch. |
288 // required by AddAndFetch. |
289 template<typename Type, typename Fn, typename I, typename D> |
289 template<typename Type, typename Fn, typename D, typename I> |
290 static D add_using_helper(Fn fn, I add_value, D volatile* dest); |
290 static D add_using_helper(Fn fn, D volatile* dest, I add_value); |
291 |
291 |
292 // Dispatch handler for cmpxchg. Provides type-based validity |
292 // Dispatch handler for cmpxchg. Provides type-based validity |
293 // checking and limited conversions around calls to the |
293 // checking and limited conversions around calls to the |
294 // platform-specific implementation layer provided by |
294 // platform-specific implementation layer provided by |
295 // PlatformCmpxchg. |
295 // PlatformCmpxchg. |
515 // platform file, which may use these as base classes, requiring they |
515 // platform file, which may use these as base classes, requiring they |
516 // be complete. |
516 // be complete. |
517 |
517 |
518 template<typename Derived> |
518 template<typename Derived> |
519 struct Atomic::FetchAndAdd { |
519 struct Atomic::FetchAndAdd { |
520 template<typename I, typename D> |
520 template<typename D, typename I> |
521 D operator()(I add_value, D volatile* dest, atomic_memory_order order) const; |
521 D operator()(D volatile* dest, I add_value, atomic_memory_order order) const; |
522 }; |
522 }; |
523 |
523 |
524 template<typename Derived> |
524 template<typename Derived> |
525 struct Atomic::AddAndFetch { |
525 struct Atomic::AddAndFetch { |
526 template<typename I, typename D> |
526 template<typename D, typename I> |
527 D operator()(I add_value, D volatile* dest, atomic_memory_order order) const; |
527 D operator()(D volatile* dest, I add_value, atomic_memory_order order) const; |
528 }; |
528 }; |
529 |
529 |
530 template<typename D> |
530 template<typename D> |
531 inline void Atomic::inc(D volatile* dest, atomic_memory_order order) { |
531 inline void Atomic::inc(D volatile* dest, atomic_memory_order order) { |
532 STATIC_ASSERT(IsPointer<D>::value || IsIntegral<D>::value); |
532 STATIC_ASSERT(IsPointer<D>::value || IsIntegral<D>::value); |
533 typedef typename Conditional<IsPointer<D>::value, ptrdiff_t, D>::type I; |
533 typedef typename Conditional<IsPointer<D>::value, ptrdiff_t, D>::type I; |
534 Atomic::add(I(1), dest, order); |
534 Atomic::add(dest, I(1), order); |
535 } |
535 } |
536 |
536 |
537 template<typename D> |
537 template<typename D> |
538 inline void Atomic::dec(D volatile* dest, atomic_memory_order order) { |
538 inline void Atomic::dec(D volatile* dest, atomic_memory_order order) { |
539 STATIC_ASSERT(IsPointer<D>::value || IsIntegral<D>::value); |
539 STATIC_ASSERT(IsPointer<D>::value || IsIntegral<D>::value); |
540 typedef typename Conditional<IsPointer<D>::value, ptrdiff_t, D>::type I; |
540 typedef typename Conditional<IsPointer<D>::value, ptrdiff_t, D>::type I; |
541 // Assumes two's complement integer representation. |
541 // Assumes two's complement integer representation. |
542 #pragma warning(suppress: 4146) |
542 #pragma warning(suppress: 4146) |
543 Atomic::add(I(-1), dest, order); |
543 Atomic::add(dest, I(-1), order); |
544 } |
544 } |
545 |
545 |
546 template<typename I, typename D> |
546 template<typename I, typename D> |
547 inline D Atomic::sub(I sub_value, D volatile* dest, atomic_memory_order order) { |
547 inline D Atomic::sub(I sub_value, D volatile* dest, atomic_memory_order order) { |
548 STATIC_ASSERT(IsPointer<D>::value || IsIntegral<D>::value); |
548 STATIC_ASSERT(IsPointer<D>::value || IsIntegral<D>::value); |
676 template <typename D, typename T> |
676 template <typename D, typename T> |
677 inline void Atomic::release_store_fence(volatile D* p, T v) { |
677 inline void Atomic::release_store_fence(volatile D* p, T v) { |
678 StoreImpl<D, T, PlatformOrderedStore<sizeof(D), RELEASE_X_FENCE> >()(p, v); |
678 StoreImpl<D, T, PlatformOrderedStore<sizeof(D), RELEASE_X_FENCE> >()(p, v); |
679 } |
679 } |
680 |
680 |
681 template<typename I, typename D> |
681 template<typename D, typename I> |
682 inline D Atomic::add(I add_value, D volatile* dest, |
682 inline D Atomic::add(D volatile* dest, I add_value, |
683 atomic_memory_order order) { |
683 atomic_memory_order order) { |
684 return AddImpl<I, D>()(add_value, dest, order); |
684 return AddImpl<D, I>()(dest, add_value, order); |
685 } |
685 } |
686 |
686 |
687 template<typename I, typename D> |
687 template<typename D, typename I> |
688 struct Atomic::AddImpl< |
688 struct Atomic::AddImpl< |
689 I, D, |
689 D, I, |
690 typename EnableIf<IsIntegral<I>::value && |
690 typename EnableIf<IsIntegral<I>::value && |
691 IsIntegral<D>::value && |
691 IsIntegral<D>::value && |
692 (sizeof(I) <= sizeof(D)) && |
692 (sizeof(I) <= sizeof(D)) && |
693 (IsSigned<I>::value == IsSigned<D>::value)>::type> |
693 (IsSigned<I>::value == IsSigned<D>::value)>::type> |
694 { |
694 { |
695 D operator()(I add_value, D volatile* dest, atomic_memory_order order) const { |
695 D operator()(D volatile* dest, I add_value, atomic_memory_order order) const { |
696 D addend = add_value; |
696 D addend = add_value; |
697 return PlatformAdd<sizeof(D)>()(addend, dest, order); |
697 return PlatformAdd<sizeof(D)>()(dest, addend, order); |
698 } |
698 } |
699 }; |
699 }; |
700 |
700 |
701 template<typename I, typename P> |
701 template<typename P, typename I> |
702 struct Atomic::AddImpl< |
702 struct Atomic::AddImpl< |
703 I, P*, |
703 P*, I, |
704 typename EnableIf<IsIntegral<I>::value && (sizeof(I) <= sizeof(P*))>::type> |
704 typename EnableIf<IsIntegral<I>::value && (sizeof(I) <= sizeof(P*))>::type> |
705 { |
705 { |
706 P* operator()(I add_value, P* volatile* dest, atomic_memory_order order) const { |
706 P* operator()(P* volatile* dest, I add_value, atomic_memory_order order) const { |
707 STATIC_ASSERT(sizeof(intptr_t) == sizeof(P*)); |
707 STATIC_ASSERT(sizeof(intptr_t) == sizeof(P*)); |
708 STATIC_ASSERT(sizeof(uintptr_t) == sizeof(P*)); |
708 STATIC_ASSERT(sizeof(uintptr_t) == sizeof(P*)); |
709 typedef typename Conditional<IsSigned<I>::value, |
709 typedef typename Conditional<IsSigned<I>::value, |
710 intptr_t, |
710 intptr_t, |
711 uintptr_t>::type CI; |
711 uintptr_t>::type CI; |
712 CI addend = add_value; |
712 CI addend = add_value; |
713 return PlatformAdd<sizeof(P*)>()(addend, dest, order); |
713 return PlatformAdd<sizeof(P*)>()(dest, addend, order); |
714 } |
714 } |
715 }; |
715 }; |
716 |
716 |
717 template<typename Derived> |
717 template<typename Derived> |
718 template<typename I, typename D> |
718 template<typename D, typename I> |
719 inline D Atomic::FetchAndAdd<Derived>::operator()(I add_value, D volatile* dest, |
719 inline D Atomic::FetchAndAdd<Derived>::operator()(D volatile* dest, I add_value, |
720 atomic_memory_order order) const { |
720 atomic_memory_order order) const { |
721 I addend = add_value; |
721 I addend = add_value; |
722 // If D is a pointer type P*, scale by sizeof(P). |
722 // If D is a pointer type P*, scale by sizeof(P). |
723 if (IsPointer<D>::value) { |
723 if (IsPointer<D>::value) { |
724 addend *= sizeof(typename RemovePointer<D>::type); |
724 addend *= sizeof(typename RemovePointer<D>::type); |
725 } |
725 } |
726 D old = static_cast<const Derived*>(this)->fetch_and_add(addend, dest, order); |
726 D old = static_cast<const Derived*>(this)->fetch_and_add(dest, addend, order); |
727 return old + add_value; |
727 return old + add_value; |
728 } |
728 } |
729 |
729 |
730 template<typename Derived> |
730 template<typename Derived> |
731 template<typename I, typename D> |
731 template<typename D, typename I> |
732 inline D Atomic::AddAndFetch<Derived>::operator()(I add_value, D volatile* dest, |
732 inline D Atomic::AddAndFetch<Derived>::operator()(D volatile* dest, I add_value, |
733 atomic_memory_order order) const { |
733 atomic_memory_order order) const { |
734 // If D is a pointer type P*, scale by sizeof(P). |
734 // If D is a pointer type P*, scale by sizeof(P). |
735 if (IsPointer<D>::value) { |
735 if (IsPointer<D>::value) { |
736 add_value *= sizeof(typename RemovePointer<D>::type); |
736 add_value *= sizeof(typename RemovePointer<D>::type); |
737 } |
737 } |
738 return static_cast<const Derived*>(this)->add_and_fetch(add_value, dest, order); |
738 return static_cast<const Derived*>(this)->add_and_fetch(dest, add_value, order); |
739 } |
739 } |
740 |
740 |
741 template<typename Type, typename Fn, typename I, typename D> |
741 template<typename Type, typename Fn, typename D, typename I> |
742 inline D Atomic::add_using_helper(Fn fn, I add_value, D volatile* dest) { |
742 inline D Atomic::add_using_helper(Fn fn, D volatile* dest, I add_value) { |
743 return PrimitiveConversions::cast<D>( |
743 return PrimitiveConversions::cast<D>( |
744 fn(PrimitiveConversions::cast<Type>(add_value), |
744 fn(PrimitiveConversions::cast<Type>(add_value), |
745 reinterpret_cast<Type volatile*>(dest))); |
745 reinterpret_cast<Type volatile*>(dest))); |
746 } |
746 } |
747 |
747 |