summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorPauli <pauli@openssl.org>2023-06-22 09:48:56 +1000
committerPauli <pauli@openssl.org>2023-07-01 21:18:08 +1000
commit99fd5b2b103f701151f4eb3fe0500ae0388e5136 (patch)
tree48fdd8b36fca9e3d0670791466c5fa2b4d6c22df
parent500e479db1beae5fa5691d40b866329d2fdc62e7 (diff)
refcnt: convert references counts to a structure
The structure includes the count and optionally a lock. The count has the appropriate qualifiers. Reviewed-by: Tomas Mraz <tomas@openssl.org> Reviewed-by: Matt Caswell <matt@openssl.org> (Merged from https://github.com/openssl/openssl/pull/21260)
-rw-r--r--include/internal/refcount.h177
1 files changed, 125 insertions, 52 deletions
diff --git a/include/internal/refcount.h b/include/internal/refcount.h
index b74c283ae5..9740fb10ce 100644
--- a/include/internal/refcount.h
+++ b/include/internal/refcount.h
@@ -25,12 +25,13 @@
# define HAVE_ATOMICS 1
-typedef _Atomic int CRYPTO_REF_COUNT;
+typedef struct {
+ _Atomic int val;
+} CRYPTO_REF_COUNT;
-static inline int CRYPTO_UP_REF(_Atomic int *val, int *ret,
- ossl_unused void *lock)
+static inline int CRYPTO_UP_REF(CRYPTO_REF_COUNT *refcnt, int *ret)
{
- *ret = atomic_fetch_add_explicit(val, 1, memory_order_relaxed) + 1;
+ *ret = atomic_fetch_add_explicit(&refcnt->val, 1, memory_order_relaxed) + 1;
return 1;
}
@@ -44,19 +45,17 @@ static inline int CRYPTO_UP_REF(_Atomic int *val, int *ret,
* to mutable members doesn't have to be serialized anymore, which would
* otherwise imply an acquire fence. Hence conditional acquire fence...
*/
-static inline int CRYPTO_DOWN_REF(_Atomic int *val, int *ret,
- ossl_unused void *lock)
+static inline int CRYPTO_DOWN_REF(CRYPTO_REF_COUNT *refcnt, int *ret)
{
- *ret = atomic_fetch_sub_explicit(val, 1, memory_order_relaxed) - 1;
+ *ret = atomic_fetch_sub_explicit(&refcnt->val, 1, memory_order_relaxed) - 1;
if (*ret == 0)
atomic_thread_fence(memory_order_acquire);
return 1;
}
-static inline int CRYPTO_GET_REF(_Atomic int *val, int *ret,
- ossl_unused void *lock)
+static inline int CRYPTO_GET_REF(CRYPTO_REF_COUNT *refcnt, int *ret)
{
- *ret = atomic_load_explicit(val, memory_order_relaxed);
+ *ret = atomic_load_explicit(&refcnt->val, memory_order_relaxed);
return 1;
}
@@ -64,51 +63,52 @@ static inline int CRYPTO_GET_REF(_Atomic int *val, int *ret,
# define HAVE_ATOMICS 1
-typedef int CRYPTO_REF_COUNT;
+typedef struct {
+ int val;
+} CRYPTO_REF_COUNT;
-static __inline__ int CRYPTO_UP_REF(int *val, int *ret, ossl_unused void *lock)
+static __inline__ int CRYPTO_UP_REF(CRYPTO_REF_COUNT *refcnt, int *ret)
{
- *ret = __atomic_fetch_add(val, 1, __ATOMIC_RELAXED) + 1;
+ *ret = __atomic_fetch_add(&refcnt->val, 1, __ATOMIC_RELAXED) + 1;
return 1;
}
-static __inline__ int CRYPTO_DOWN_REF(int *val, int *ret,
- ossl_unused void *lock)
+static __inline__ int CRYPTO_DOWN_REF(CRYPTO_REF_COUNT *refcnt, int *ret)
{
- *ret = __atomic_fetch_sub(val, 1, __ATOMIC_RELAXED) - 1;
+ *ret = __atomic_fetch_sub(&refcnt->val, 1, __ATOMIC_RELAXED) - 1;
if (*ret == 0)
__atomic_thread_fence(__ATOMIC_ACQUIRE);
return 1;
}
-static __inline__ int CRYPTO_GET_REF(int *val, int *ret, ossl_unused void *lock)
+static __inline__ int CRYPTO_GET_REF(CRYPTO_REF_COUNT *refcnt, int *ret)
{
- *ret = __atomic_load_n(val, __ATOMIC_RELAXED);
+ *ret = __atomic_load_n(&refcnt->val, __ATOMIC_RELAXED);
return 1;
}
# elif defined(__ICL) && defined(_WIN32)
# define HAVE_ATOMICS 1
-typedef volatile int CRYPTO_REF_COUNT;
-static __inline int CRYPTO_UP_REF(volatile int *val, int *ret,
- ossl_unused void *lock)
+typedef struct {
+ volatile int val;
+} CRYPTO_REF_COUNT;
+
+static __inline int CRYPTO_UP_REF(CRYPTO_REF_COUNT *refcnt, int *ret)
{
- *ret = _InterlockedExchangeAdd((void *)val, 1) + 1;
+ *ret = _InterlockedExchangeAdd((void *)&refcnt->val, 1) + 1;
return 1;
}
-static __inline int CRYPTO_DOWN_REF(volatile int *val, int *ret,
- ossl_unused void *lock)
+static __inline int CRYPTO_DOWN_REF(CRYPTO_REF_COUNT *val, int *refcnt)
{
- *ret = _InterlockedExchangeAdd((void *)val, -1) - 1;
+ *ret = _InterlockedExchangeAdd((void *)&refcnt->val, -1) - 1;
return 1;
}
-static __inline int CRYPTO_GET_REF(volatile int *val, int *ret,
- ossl_unused void *lock)
+static __inline int CRYPTO_GET_REF(CRYPTO_REF_COUNT *refcnt, int *ret)
{
- *ret = _InterlockedOr((void *)val, 0);
+ *ret = _InterlockedOr((void *)&refcnt->val, 0);
return 1;
}
@@ -116,7 +116,9 @@ static __inline int CRYPTO_GET_REF(volatile int *val, int *ret,
# define HAVE_ATOMICS 1
-typedef volatile int CRYPTO_REF_COUNT;
+typedef struct {
+ volatile int val;
+} CRYPTO_REF_COUNT;
# if (defined(_M_ARM) && _M_ARM>=7 && !defined(_WIN32_WCE)) || defined(_M_ARM64)
# include <intrin.h>
@@ -124,26 +126,23 @@ typedef volatile int CRYPTO_REF_COUNT;
# define _ARM_BARRIER_ISH _ARM64_BARRIER_ISH
# endif
-static __inline int CRYPTO_UP_REF(volatile int *val, int *ret,
- ossl_unused void *lock)
+static __inline int CRYPTO_UP_REF(CRYPTO_REF_COUNT *refcnt, int *ret)
{
- *ret = _InterlockedExchangeAdd_nf(val, 1) + 1;
+ *ret = _InterlockedExchangeAdd_nf(&refcnt->val, 1) + 1;
return 1;
}
-static __inline int CRYPTO_DOWN_REF(volatile int *val, int *ret,
- ossl_unused void *lock)
+static __inline int CRYPTO_DOWN_REF(CRYPTO_REF_COUNT *refcnt, int *ret)
{
- *ret = _InterlockedExchangeAdd_nf(val, -1) - 1;
+ *ret = _InterlockedExchangeAdd_nf(&refcnt->val, -1) - 1;
if (*ret == 0)
__dmb(_ARM_BARRIER_ISH);
return 1;
}
-static __inline int CRYPTO_GET_REF(volatile int *val, int *ret,
- ossl_unused void *lock)
+static __inline int CRYPTO_GET_REF(CRYPTO_REF_COUNT *refcnt, int *ret)
{
- *ret = _InterlockedOr_nf((void *)val, 0);
+ *ret = _InterlockedOr_nf((void *)&refcnt->val, 0);
return 1;
}
@@ -160,24 +159,21 @@ static __inline int CRYPTO_GET_REF(volatile int *val, int *ret,
# endif
# endif
-static __inline int CRYPTO_UP_REF(volatile int *val, int *ret,
- ossl_unused void *lock)
+static __inline int CRYPTO_UP_REF(CRYPTO_REF_COUNT *refcnt, int *ret)
{
- *ret = _InterlockedExchangeAdd(val, 1) + 1;
+ *ret = _InterlockedExchangeAdd(&refcnt->val, 1) + 1;
return 1;
}
-static __inline int CRYPTO_DOWN_REF(volatile int *val, int *ret,
- ossl_unused void *lock)
+static __inline int CRYPTO_DOWN_REF(CRYPTO_REF_COUNT *refcnt, int *ret)
{
- *ret = _InterlockedExchangeAdd(val, -1) - 1;
+ *ret = _InterlockedExchangeAdd(&refcnt->val, -1) - 1;
return 1;
}
-static __inline int CRYPTO_GET_REF(volatile int *val, int *ret,
- ossl_unused void *lock)
+static __inline int CRYPTO_GET_REF(CRYPTO_REF_COUNT *refcnt, int *ret)
{
- *ret = _InterlockedExchangeAdd(val, 0);
+ *ret = _InterlockedExchangeAdd(&refcnt->val, 0);
return 1;
}
@@ -193,14 +189,91 @@ static __inline int CRYPTO_GET_REF(volatile int *val, int *ret,
*/
# ifndef HAVE_ATOMICS
-typedef int CRYPTO_REF_COUNT;
+typedef struct {
+ int val;
+# ifdef OPENSSL_THREADS
+ CRYPTO_RWLOCK *lock;
+# endif
+} CRYPTO_REF_COUNT;
+
+# ifdef OPENSSL_THREADS
+
+static ossl_unused ossl_inline int CRYPTO_UP_REF(CRYPTO_REF_COUNT *refcnt,
+ int *ret)
+{
+ return CRYPTO_atomic_add(&refcnt->val, 1, ret, refcnt->lock);
+}
+
+static ossl_unused ossl_inline int CRYPTO_DOWN_REF(CRYPTO_REF_COUNT *refcnt,
+ int *ret)
+{
+ return CRYPTO_atomic_add(&refcnt->val, -1, ret, refcnt->lock);
+}
+
+static ossl_unused ossl_inline int CRYPTO_GET_REF(CRYPTO_REF_COUNT *refcnt,
+ int *ret)
+{
+ return CRYPTO_atomic_load_int(&refcnt->val, ret, refcnt->lock);
+}
+
+# define CRYPTO_NEW_FREE_DEFINED 1
+static ossl_unused ossl_inline int CRYPTO_NEW_REF(CRYPTO_REF_COUNT *refcnt, int n)
+{
+ refcnt->val = n;
+ refcnt->lock = CRYPTO_THREAD_lock_new();
+ if (refcnt->lock == NULL) {
+ ERR_raise(ERR_LIB_CRYPTO, ERR_R_CRYPTO_LIB);
+ return 0;
+ }
+ return 1;
+}
+
+static ossl_unused ossl_inline void CRYPTO_FREE_REF(CRYPTO_REF_COUNT *refcnt) \
+{
+ CRYPTO_THREAD_lock_free(refcnt->lock);
+}
+
+# else /* OPENSSL_THREADS */
+
+static ossl_unused ossl_inline int CRYPTO_UP_REF(CRYPTO_REF_COUNT *refcnt,
+ int *ret)
+{
+ refcnt->val++;
+ *ret = refcnt->val;
+ return 1;
+}
-# define CRYPTO_UP_REF(val, ret, lock) CRYPTO_atomic_add(val, 1, ret, lock)
-# define CRYPTO_DOWN_REF(val, ret, lock) CRYPTO_atomic_add(val, -1, ret, lock)
-# define CRYPTO_GET_REF(val, ret, lock) CRYPTO_atomic_load_int(val, ret, lock)
+static ossl_unused ossl_inline int CRYPTO_DOWN_REF(CRYPTO_REF_COUNT *refcnt,
+ int *ret)
+{
+ refcnt->val--;
+ *ret = refcnt->val;
+ return 1;
+}
+static ossl_unused ossl_inline int CRYPTO_GET_REF(CRYPTO_REF_COUNT *refcnt,
+ int *ret)
+{
+ *ret = refcnt->val;
+ return 1;
+}
+
+# endif /* OPENSSL_THREADS */
# endif
+# ifndef CRYPTO_NEW_FREE_DEFINED
+static ossl_unused ossl_inline int CRYPTO_NEW_REF(CRYPTO_REF_COUNT *refcnt, int n)
+{
+ refcnt->val = n;
+ return 1;
+}
+
+static ossl_unused ossl_inline void CRYPTO_FREE_REF(CRYPTO_REF_COUNT *refcnt) \
+{
+}
+# endif /* CRYPTO_NEW_FREE_DEFINED */
+#undef CRYPTO_NEW_FREE_DEFINED
+
# if !defined(NDEBUG) && !defined(OPENSSL_NO_STDIO)
# define REF_ASSERT_ISNT(test) \
(void)((test) ? (OPENSSL_die("refcount error", __FILE__, __LINE__), 1) : 0)
@@ -211,6 +284,6 @@ typedef int CRYPTO_REF_COUNT;
# define REF_PRINT_EX(text, count, object) \
OSSL_TRACE3(REF_COUNT, "%p:%4d:%s\n", (object), (count), (text));
# define REF_PRINT_COUNT(text, object) \
- REF_PRINT_EX(text, object->references, (void *)object)
+ REF_PRINT_EX(text, object->references.val, (void *)object)
#endif