aboutsummaryrefslogtreecommitdiff
path: root/include/internal/refcount.h
diff options
context:
space:
mode:
Diffstat (limited to 'include/internal/refcount.h')
-rw-r--r--include/internal/refcount.h207
1 files changed, 166 insertions, 41 deletions
diff --git a/include/internal/refcount.h b/include/internal/refcount.h
index 64fb77fba58f..8de230f343ac 100644
--- a/include/internal/refcount.h
+++ b/include/internal/refcount.h
@@ -1,5 +1,5 @@
/*
- * Copyright 2016-2023 The OpenSSL Project Authors. All Rights Reserved.
+ * Copyright 2016-2024 The OpenSSL Project Authors. All Rights Reserved.
*
* Licensed under the Apache License 2.0 (the "License"). You may not use
* this file except in compliance with the License. You can obtain a copy
@@ -12,6 +12,7 @@
# include <openssl/e_os2.h>
# include <openssl/trace.h>
+# include <openssl/err.h>
# if defined(OPENSSL_THREADS) && !defined(OPENSSL_DEV_NO_ATOMICS)
# if defined(__STDC_VERSION__) && __STDC_VERSION__ >= 201112L \
@@ -25,12 +26,19 @@
# define HAVE_ATOMICS 1
-typedef _Atomic int CRYPTO_REF_COUNT;
+# if defined(__has_feature)
+# if __has_feature(thread_sanitizer)
+# define OSSL_TSAN_BUILD
+# endif
+# endif
+
+typedef struct {
+ _Atomic int val;
+} CRYPTO_REF_COUNT;
-static inline int CRYPTO_UP_REF(_Atomic int *val, int *ret,
- ossl_unused void *lock)
+static inline int CRYPTO_UP_REF(CRYPTO_REF_COUNT *refcnt, int *ret)
{
- *ret = atomic_fetch_add_explicit(val, 1, memory_order_relaxed) + 1;
+ *ret = atomic_fetch_add_explicit(&refcnt->val, 1, memory_order_relaxed) + 1;
return 1;
}
@@ -44,12 +52,25 @@ static inline int CRYPTO_UP_REF(_Atomic int *val, int *ret,
* to mutable members doesn't have to be serialized anymore, which would
* otherwise imply an acquire fence. Hence conditional acquire fence...
*/
-static inline int CRYPTO_DOWN_REF(_Atomic int *val, int *ret,
- ossl_unused void *lock)
+static inline int CRYPTO_DOWN_REF(CRYPTO_REF_COUNT *refcnt, int *ret)
{
- *ret = atomic_fetch_sub_explicit(val, 1, memory_order_relaxed) - 1;
+# ifdef OSSL_TSAN_BUILD
+ /*
+ * TSAN requires acq_rel as it indicates a false positive error when
+ * the object that contains the refcount is freed otherwise.
+ */
+ *ret = atomic_fetch_sub_explicit(&refcnt->val, 1, memory_order_acq_rel) - 1;
+# else
+ *ret = atomic_fetch_sub_explicit(&refcnt->val, 1, memory_order_release) - 1;
if (*ret == 0)
atomic_thread_fence(memory_order_acquire);
+# endif
+ return 1;
+}
+
+static inline int CRYPTO_GET_REF(CRYPTO_REF_COUNT *refcnt, int *ret)
+{
+ *ret = atomic_load_explicit(&refcnt->val, memory_order_acquire);
return 1;
}
@@ -57,37 +78,52 @@ static inline int CRYPTO_DOWN_REF(_Atomic int *val, int *ret,
# define HAVE_ATOMICS 1
-typedef int CRYPTO_REF_COUNT;
+typedef struct {
+ int val;
+} CRYPTO_REF_COUNT;
-static __inline__ int CRYPTO_UP_REF(int *val, int *ret, ossl_unused void *lock)
+static __inline__ int CRYPTO_UP_REF(CRYPTO_REF_COUNT *refcnt, int *ret)
{
- *ret = __atomic_fetch_add(val, 1, __ATOMIC_RELAXED) + 1;
+ *ret = __atomic_fetch_add(&refcnt->val, 1, __ATOMIC_RELAXED) + 1;
return 1;
}
-static __inline__ int CRYPTO_DOWN_REF(int *val, int *ret,
- ossl_unused void *lock)
+static __inline__ int CRYPTO_DOWN_REF(CRYPTO_REF_COUNT *refcnt, int *ret)
{
- *ret = __atomic_fetch_sub(val, 1, __ATOMIC_RELAXED) - 1;
+ *ret = __atomic_fetch_sub(&refcnt->val, 1, __ATOMIC_RELEASE) - 1;
if (*ret == 0)
__atomic_thread_fence(__ATOMIC_ACQUIRE);
return 1;
}
+
+static __inline__ int CRYPTO_GET_REF(CRYPTO_REF_COUNT *refcnt, int *ret)
+{
+ *ret = __atomic_load_n(&refcnt->val, __ATOMIC_RELAXED);
+ return 1;
+}
+
# elif defined(__ICL) && defined(_WIN32)
# define HAVE_ATOMICS 1
-typedef volatile int CRYPTO_REF_COUNT;
-static __inline int CRYPTO_UP_REF(volatile int *val, int *ret,
- ossl_unused void *lock)
+typedef struct {
+ volatile int val;
+} CRYPTO_REF_COUNT;
+
+static __inline int CRYPTO_UP_REF(CRYPTO_REF_COUNT *refcnt, int *ret)
+{
+ *ret = _InterlockedExchangeAdd((void *)&refcnt->val, 1) + 1;
+ return 1;
+}
+
+static __inline int CRYPTO_DOWN_REF(CRYPTO_REF_COUNT *refcnt, int *ret)
{
- *ret = _InterlockedExchangeAdd((void *)val, 1) + 1;
+ *ret = _InterlockedExchangeAdd((void *)&refcnt->val, -1) - 1;
return 1;
}
-static __inline int CRYPTO_DOWN_REF(volatile int *val, int *ret,
- ossl_unused void *lock)
+static __inline int CRYPTO_GET_REF(CRYPTO_REF_COUNT *refcnt, int *ret)
{
- *ret = _InterlockedExchangeAdd((void *)val, -1) - 1;
+ *ret = _InterlockedExchangeAdd((void *)&refcnt->val, 0);
return 1;
}
@@ -95,7 +131,9 @@ static __inline int CRYPTO_DOWN_REF(volatile int *val, int *ret,
# define HAVE_ATOMICS 1
-typedef volatile int CRYPTO_REF_COUNT;
+typedef struct {
+ volatile int val;
+} CRYPTO_REF_COUNT;
# if (defined(_M_ARM) && _M_ARM>=7 && !defined(_WIN32_WCE)) || defined(_M_ARM64)
# include <intrin.h>
@@ -103,21 +141,24 @@ typedef volatile int CRYPTO_REF_COUNT;
# define _ARM_BARRIER_ISH _ARM64_BARRIER_ISH
# endif
-static __inline int CRYPTO_UP_REF(volatile int *val, int *ret,
- ossl_unused void *lock)
+static __inline int CRYPTO_UP_REF(CRYPTO_REF_COUNT *refcnt, int *ret)
{
- *ret = _InterlockedExchangeAdd_nf(val, 1) + 1;
+ *ret = _InterlockedExchangeAdd_nf(&refcnt->val, 1) + 1;
return 1;
}
-static __inline int CRYPTO_DOWN_REF(volatile int *val, int *ret,
- ossl_unused void *lock)
+static __inline int CRYPTO_DOWN_REF(CRYPTO_REF_COUNT *refcnt, int *ret)
{
- *ret = _InterlockedExchangeAdd_nf(val, -1) - 1;
- if (*ret == 0)
- __dmb(_ARM_BARRIER_ISH);
+ *ret = _InterlockedExchangeAdd(&refcnt->val, -1) - 1;
return 1;
}
+
+static __inline int CRYPTO_GET_REF(CRYPTO_REF_COUNT *refcnt, int *ret)
+{
+ *ret = _InterlockedExchangeAdd_acq((void *)&refcnt->val, 0);
+ return 1;
+}
+
# else
# if !defined(_WIN32_WCE)
# pragma intrinsic(_InterlockedExchangeAdd)
@@ -131,19 +172,24 @@ static __inline int CRYPTO_DOWN_REF(volatile int *val, int *ret,
# endif
# endif
-static __inline int CRYPTO_UP_REF(volatile int *val, int *ret,
- ossl_unused void *lock)
+static __inline int CRYPTO_UP_REF(CRYPTO_REF_COUNT *refcnt, int *ret)
{
- *ret = _InterlockedExchangeAdd((long volatile *)val, 1) + 1;
+ *ret = _InterlockedExchangeAdd(&refcnt->val, 1) + 1;
return 1;
}
-static __inline int CRYPTO_DOWN_REF(volatile int *val, int *ret,
- ossl_unused void *lock)
+static __inline int CRYPTO_DOWN_REF(CRYPTO_REF_COUNT *refcnt, int *ret)
{
- *ret = _InterlockedExchangeAdd((long volatile *)val, -1) - 1;
+ *ret = _InterlockedExchangeAdd(&refcnt->val, -1) - 1;
return 1;
}
+
+static __inline int CRYPTO_GET_REF(CRYPTO_REF_COUNT *refcnt, int *ret)
+{
+ *ret = _InterlockedExchangeAdd(&refcnt->val, 0);
+ return 1;
+}
+
# endif
# endif
@@ -156,13 +202,92 @@ static __inline int CRYPTO_DOWN_REF(volatile int *val, int *ret,
*/
# ifndef HAVE_ATOMICS
-typedef int CRYPTO_REF_COUNT;
+typedef struct {
+ int val;
+# ifdef OPENSSL_THREADS
+ CRYPTO_RWLOCK *lock;
+# endif
+} CRYPTO_REF_COUNT;
+
+# ifdef OPENSSL_THREADS
-# define CRYPTO_UP_REF(val, ret, lock) CRYPTO_atomic_add(val, 1, ret, lock)
-# define CRYPTO_DOWN_REF(val, ret, lock) CRYPTO_atomic_add(val, -1, ret, lock)
+static ossl_unused ossl_inline int CRYPTO_UP_REF(CRYPTO_REF_COUNT *refcnt,
+ int *ret)
+{
+ return CRYPTO_atomic_add(&refcnt->val, 1, ret, refcnt->lock);
+}
+static ossl_unused ossl_inline int CRYPTO_DOWN_REF(CRYPTO_REF_COUNT *refcnt,
+ int *ret)
+{
+ return CRYPTO_atomic_add(&refcnt->val, -1, ret, refcnt->lock);
+}
+
+static ossl_unused ossl_inline int CRYPTO_GET_REF(CRYPTO_REF_COUNT *refcnt,
+ int *ret)
+{
+ return CRYPTO_atomic_load_int(&refcnt->val, ret, refcnt->lock);
+}
+
+# define CRYPTO_NEW_FREE_DEFINED 1
+static ossl_unused ossl_inline int CRYPTO_NEW_REF(CRYPTO_REF_COUNT *refcnt, int n)
+{
+ refcnt->val = n;
+ refcnt->lock = CRYPTO_THREAD_lock_new();
+ if (refcnt->lock == NULL) {
+ ERR_raise(ERR_LIB_CRYPTO, ERR_R_CRYPTO_LIB);
+ return 0;
+ }
+ return 1;
+}
+
+static ossl_unused ossl_inline void CRYPTO_FREE_REF(CRYPTO_REF_COUNT *refcnt) \
+{
+ if (refcnt != NULL)
+ CRYPTO_THREAD_lock_free(refcnt->lock);
+}
+
+# else /* OPENSSL_THREADS */
+
+static ossl_unused ossl_inline int CRYPTO_UP_REF(CRYPTO_REF_COUNT *refcnt,
+ int *ret)
+{
+ refcnt->val++;
+ *ret = refcnt->val;
+ return 1;
+}
+
+static ossl_unused ossl_inline int CRYPTO_DOWN_REF(CRYPTO_REF_COUNT *refcnt,
+ int *ret)
+{
+ refcnt->val--;
+ *ret = refcnt->val;
+ return 1;
+}
+
+static ossl_unused ossl_inline int CRYPTO_GET_REF(CRYPTO_REF_COUNT *refcnt,
+ int *ret)
+{
+ *ret = refcnt->val;
+ return 1;
+}
+
+# endif /* OPENSSL_THREADS */
# endif
+# ifndef CRYPTO_NEW_FREE_DEFINED
+static ossl_unused ossl_inline int CRYPTO_NEW_REF(CRYPTO_REF_COUNT *refcnt, int n)
+{
+ refcnt->val = n;
+ return 1;
+}
+
+static ossl_unused ossl_inline void CRYPTO_FREE_REF(CRYPTO_REF_COUNT *refcnt) \
+{
+}
+# endif /* CRYPTO_NEW_FREE_DEFINED */
+#undef CRYPTO_NEW_FREE_DEFINED
+
# if !defined(NDEBUG) && !defined(OPENSSL_NO_STDIO)
# define REF_ASSERT_ISNT(test) \
(void)((test) ? (OPENSSL_die("refcount error", __FILE__, __LINE__), 1) : 0)
@@ -172,7 +297,7 @@ typedef int CRYPTO_REF_COUNT;
# define REF_PRINT_EX(text, count, object) \
OSSL_TRACE3(REF_COUNT, "%p:%4d:%s\n", (object), (count), (text));
-# define REF_PRINT_COUNT(text, object) \
- REF_PRINT_EX(text, object->references, (void *)object)
+# define REF_PRINT_COUNT(text, val, object) \
+ REF_PRINT_EX(text, val, (void *)object)
#endif