| Line | Branch | Exec | Source |
|---|---|---|---|
| 1 | /* | ||
| 2 | * This file is part of FFmpeg. | ||
| 3 | * | ||
| 4 | * FFmpeg is free software; you can redistribute it and/or | ||
| 5 | * modify it under the terms of the GNU Lesser General Public | ||
| 6 | * License as published by the Free Software Foundation; either | ||
| 7 | * version 2.1 of the License, or (at your option) any later version. | ||
| 8 | * | ||
| 9 | * FFmpeg is distributed in the hope that it will be useful, | ||
| 10 | * but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 11 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | ||
| 12 | * Lesser General Public License for more details. | ||
| 13 | * | ||
| 14 | * You should have received a copy of the GNU Lesser General Public | ||
| 15 | * License along with FFmpeg; if not, write to the Free Software | ||
| 16 | * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA | ||
| 17 | */ | ||
| 18 | |||
| 19 | #include <stdatomic.h> | ||
| 20 | #include <stdint.h> | ||
| 21 | #include <string.h> | ||
| 22 | |||
| 23 | #include "refstruct.h" | ||
| 24 | |||
| 25 | #include "avassert.h" | ||
| 26 | #include "error.h" | ||
| 27 | #include "macros.h" | ||
| 28 | #include "mem.h" | ||
| 29 | #include "mem_internal.h" | ||
| 30 | #include "thread.h" | ||
| 31 | |||
| 32 | #ifndef REFSTRUCT_CHECKED | ||
| 33 | #ifndef ASSERT_LEVEL | ||
| 34 | #define ASSERT_LEVEL 0 | ||
| 35 | #endif | ||
| 36 | #define REFSTRUCT_CHECKED (ASSERT_LEVEL >= 1) | ||
| 37 | #endif | ||
| 38 | |||
| 39 | #if REFSTRUCT_CHECKED | ||
| 40 | #define ff_assert(cond) av_assert0(cond) | ||
| 41 | #else | ||
| 42 | #define ff_assert(cond) ((void)0) | ||
| 43 | #endif | ||
| 44 | |||
| 45 | #define REFSTRUCT_COOKIE AV_NE((uint64_t)MKBETAG('R', 'e', 'f', 'S') << 32 | MKBETAG('t', 'r', 'u', 'c'), \ | ||
| 46 | MKTAG('R', 'e', 'f', 'S') | (uint64_t)MKTAG('t', 'r', 'u', 'c') << 32) | ||
| 47 | |||
| 48 | #ifndef _MSC_VER | ||
| 49 | #define REFCOUNT_OFFSET FFALIGN(sizeof(RefCount), FFMAX(ALIGN_64, _Alignof(max_align_t))) | ||
| 50 | #else | ||
| 51 | #define REFCOUNT_OFFSET FFALIGN(sizeof(RefCount), ALIGN_64) | ||
| 52 | #endif | ||
| 53 | |||
| 54 | typedef struct RefCount { | ||
| 55 | /** | ||
| 56 | * An uintptr_t is big enough to hold the address of every reference, | ||
| 57 | * so no overflow can happen when incrementing the refcount as long as | ||
| 58 | * the user does not throw away references. | ||
| 59 | */ | ||
| 60 | atomic_uintptr_t refcount; | ||
| 61 | AVRefStructOpaque opaque; | ||
| 62 | void (*free_cb)(AVRefStructOpaque opaque, void *obj); | ||
| 63 | void (*free)(void *ref); | ||
| 64 | |||
| 65 | #if REFSTRUCT_CHECKED | ||
| 66 | uint64_t cookie; | ||
| 67 | #endif | ||
| 68 | } RefCount; | ||
| 69 | |||
| 70 | 7568417 | static RefCount *get_refcount(void *obj) | |
| 71 | { | ||
| 72 | 7568417 | RefCount *ref = (RefCount*)((char*)obj - REFCOUNT_OFFSET); | |
| 73 | ff_assert(ref->cookie == REFSTRUCT_COOKIE); | ||
| 74 | 7568417 | return ref; | |
| 75 | } | ||
| 76 | |||
| 77 | 306 | static const RefCount *cget_refcount(const void *obj) | |
| 78 | { | ||
| 79 | 306 | const RefCount *ref = (const RefCount*)((const char*)obj - REFCOUNT_OFFSET); | |
| 80 | ff_assert(ref->cookie == REFSTRUCT_COOKIE); | ||
| 81 | 306 | return ref; | |
| 82 | } | ||
| 83 | |||
| 84 | 5839564 | static void *get_userdata(void *buf) | |
| 85 | { | ||
| 86 | 5839564 | return (char*)buf + REFCOUNT_OFFSET; | |
| 87 | } | ||
| 88 | |||
| 89 | 1227683 | static void refcount_init(RefCount *ref, AVRefStructOpaque opaque, | |
| 90 | void (*free_cb)(AVRefStructOpaque opaque, void *obj)) | ||
| 91 | { | ||
| 92 | 1227683 | atomic_init(&ref->refcount, 1); | |
| 93 | 1227683 | ref->opaque = opaque; | |
| 94 | 1227683 | ref->free_cb = free_cb; | |
| 95 | 1227683 | ref->free = av_free; | |
| 96 | |||
| 97 | #if REFSTRUCT_CHECKED | ||
| 98 | ref->cookie = REFSTRUCT_COOKIE; | ||
| 99 | #endif | ||
| 100 | 1227683 | } | |
| 101 | |||
| 102 | 1227683 | void *av_refstruct_alloc_ext_c(size_t size, unsigned flags, AVRefStructOpaque opaque, | |
| 103 | void (*free_cb)(AVRefStructOpaque opaque, void *obj)) | ||
| 104 | { | ||
| 105 | void *buf, *obj; | ||
| 106 | |||
| 107 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 1227683 times.
|
1227683 | if (size > SIZE_MAX - REFCOUNT_OFFSET) |
| 108 | ✗ | return NULL; | |
| 109 | 1227683 | buf = av_malloc(size + REFCOUNT_OFFSET); | |
| 110 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 1227683 times.
|
1227683 | if (!buf) |
| 111 | ✗ | return NULL; | |
| 112 | 1227683 | refcount_init(buf, opaque, free_cb); | |
| 113 | 1227683 | obj = get_userdata(buf); | |
| 114 |
2/2✓ Branch 0 taken 1223751 times.
✓ Branch 1 taken 3932 times.
|
1227683 | if (!(flags & AV_REFSTRUCT_FLAG_NO_ZEROING)) |
| 115 | 1223751 | memset(obj, 0, size); | |
| 116 | |||
| 117 | 1227683 | return obj; | |
| 118 | } | ||
| 119 | |||
| 120 | 17899617 | void av_refstruct_unref(void *objp) | |
| 121 | { | ||
| 122 | void *obj; | ||
| 123 | RefCount *ref; | ||
| 124 | |||
| 125 | 17899617 | memcpy(&obj, objp, sizeof(obj)); | |
| 126 |
2/2✓ Branch 0 taken 11659936 times.
✓ Branch 1 taken 6239681 times.
|
17899617 | if (!obj) |
| 127 | 11659936 | return; | |
| 128 | 6239681 | memcpy(objp, &(void *){ NULL }, sizeof(obj)); | |
| 129 | |||
| 130 | 6239681 | ref = get_refcount(obj); | |
| 131 |
2/2✓ Branch 0 taken 5693102 times.
✓ Branch 1 taken 546579 times.
|
6239681 | if (atomic_fetch_sub_explicit(&ref->refcount, 1, memory_order_acq_rel) == 1) { |
| 132 |
2/2✓ Branch 0 taken 2348883 times.
✓ Branch 1 taken 3344219 times.
|
5693102 | if (ref->free_cb) |
| 133 | 2348883 | ref->free_cb(ref->opaque, obj); | |
| 134 | 5693102 | ref->free(ref); | |
| 135 | } | ||
| 136 | |||
| 137 | 6239681 | return; | |
| 138 | } | ||
| 139 | |||
| 140 | 81047 | void *av_refstruct_ref(void *obj) | |
| 141 | { | ||
| 142 | 81047 | RefCount *ref = get_refcount(obj); | |
| 143 | |||
| 144 | 81047 | atomic_fetch_add_explicit(&ref->refcount, 1, memory_order_relaxed); | |
| 145 | |||
| 146 | 81047 | return obj; | |
| 147 | } | ||
| 148 | |||
| 149 | 465532 | const void *av_refstruct_ref_c(const void *obj) | |
| 150 | { | ||
| 151 | /* Casting const away here is fine, as it is only supposed | ||
| 152 | * to apply to the user's data and not our bookkeeping data. */ | ||
| 153 | 465532 | RefCount *ref = get_refcount((void*)obj); | |
| 154 | |||
| 155 | 465532 | atomic_fetch_add_explicit(&ref->refcount, 1, memory_order_relaxed); | |
| 156 | |||
| 157 | 465532 | return obj; | |
| 158 | } | ||
| 159 | |||
| 160 | 1758701 | void av_refstruct_replace(void *dstp, const void *src) | |
| 161 | { | ||
| 162 | const void *dst; | ||
| 163 | 1758701 | memcpy(&dst, dstp, sizeof(dst)); | |
| 164 | |||
| 165 |
2/2✓ Branch 0 taken 1351004 times.
✓ Branch 1 taken 407697 times.
|
1758701 | if (src == dst) |
| 166 | 1351004 | return; | |
| 167 | 407697 | av_refstruct_unref(dstp); | |
| 168 |
2/2✓ Branch 0 taken 407692 times.
✓ Branch 1 taken 5 times.
|
407697 | if (src) { |
| 169 | 407692 | dst = av_refstruct_ref_c(src); | |
| 170 | 407692 | memcpy(dstp, &dst, sizeof(dst)); | |
| 171 | } | ||
| 172 | } | ||
| 173 | |||
| 174 | 306 | int av_refstruct_exclusive(const void *obj) | |
| 175 | { | ||
| 176 | 306 | const RefCount *ref = cget_refcount(obj); | |
| 177 | /* Casting const away here is safe, because it is a load. | ||
| 178 | * It is necessary because atomic_load_explicit() does not | ||
| 179 | * accept const atomics in C11 (see also N1807). */ | ||
| 180 | 306 | return atomic_load_explicit((atomic_uintptr_t*)&ref->refcount, memory_order_acquire) == 1; | |
| 181 | } | ||
| 182 | |||
| 183 | struct AVRefStructPool { | ||
| 184 | size_t size; | ||
| 185 | AVRefStructOpaque opaque; | ||
| 186 | int (*init_cb)(AVRefStructOpaque opaque, void *obj); | ||
| 187 | void (*reset_cb)(AVRefStructOpaque opaque, void *obj); | ||
| 188 | void (*free_entry_cb)(AVRefStructOpaque opaque, void *obj); | ||
| 189 | void (*free_cb)(AVRefStructOpaque opaque); | ||
| 190 | |||
| 191 | int uninited; | ||
| 192 | unsigned entry_flags; | ||
| 193 | unsigned pool_flags; | ||
| 194 | |||
| 195 | /** The number of outstanding entries not in available_entries. */ | ||
| 196 | atomic_uintptr_t refcount; | ||
| 197 | /** | ||
| 198 | * This is a linked list of available entries; | ||
| 199 | * the RefCount's opaque pointer is used as next pointer | ||
| 200 | * for available entries. | ||
| 201 | * While the entries are in use, the opaque is a pointer | ||
| 202 | * to the corresponding AVRefStructPool. | ||
| 203 | */ | ||
| 204 | RefCount *available_entries; | ||
| 205 | AVMutex mutex; | ||
| 206 | }; | ||
| 207 | |||
| 208 | 46769 | static void pool_free(AVRefStructPool *pool) | |
| 209 | { | ||
| 210 | 46769 | ff_mutex_destroy(&pool->mutex); | |
| 211 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 46769 times.
|
46769 | if (pool->free_cb) |
| 212 | ✗ | pool->free_cb(pool->opaque); | |
| 213 | 46769 | av_free(get_refcount(pool)); | |
| 214 | 46769 | } | |
| 215 | |||
| 216 | 688619 | static void pool_free_entry(AVRefStructPool *pool, RefCount *ref) | |
| 217 | { | ||
| 218 |
2/2✓ Branch 0 taken 99693 times.
✓ Branch 1 taken 588926 times.
|
688619 | if (pool->free_entry_cb) |
| 219 | 99693 | pool->free_entry_cb(pool->opaque, get_userdata(ref)); | |
| 220 | 688619 | av_free(ref); | |
| 221 | 688619 | } | |
| 222 | |||
| 223 | 5154038 | static void pool_return_entry(void *ref_) | |
| 224 | { | ||
| 225 | 5154038 | RefCount *ref = ref_; | |
| 226 | 5154038 | AVRefStructPool *pool = ref->opaque.nc; | |
| 227 | |||
| 228 | 5154038 | ff_mutex_lock(&pool->mutex); | |
| 229 |
2/2✓ Branch 0 taken 5133628 times.
✓ Branch 1 taken 20410 times.
|
5154038 | if (!pool->uninited) { |
| 230 | 5133628 | ref->opaque.nc = pool->available_entries; | |
| 231 | 5133628 | pool->available_entries = ref; | |
| 232 | 5133628 | ref = NULL; | |
| 233 | } | ||
| 234 | 5154038 | ff_mutex_unlock(&pool->mutex); | |
| 235 | |||
| 236 |
2/2✓ Branch 0 taken 20410 times.
✓ Branch 1 taken 5133628 times.
|
5154038 | if (ref) |
| 237 | 20410 | pool_free_entry(pool, ref); | |
| 238 | |||
| 239 |
2/2✓ Branch 0 taken 5545 times.
✓ Branch 1 taken 5148493 times.
|
5154038 | if (atomic_fetch_sub_explicit(&pool->refcount, 1, memory_order_acq_rel) == 1) |
| 240 | 5545 | pool_free(pool); | |
| 241 | 5154038 | } | |
| 242 | |||
| 243 | 1840047 | static void pool_reset_entry(AVRefStructOpaque opaque, void *entry) | |
| 244 | { | ||
| 245 | 1840047 | AVRefStructPool *pool = opaque.nc; | |
| 246 | |||
| 247 | 1840047 | pool->reset_cb(pool->opaque, entry); | |
| 248 | 1840047 | } | |
| 249 | |||
| 250 | 5154038 | static int refstruct_pool_get_ext(void *datap, AVRefStructPool *pool) | |
| 251 | { | ||
| 252 | 5154038 | void *ret = NULL; | |
| 253 | |||
| 254 | 5154038 | memcpy(datap, &(void *){ NULL }, sizeof(void*)); | |
| 255 | |||
| 256 | 5154038 | ff_mutex_lock(&pool->mutex); | |
| 257 | ff_assert(!pool->uninited); | ||
| 258 |
2/2✓ Branch 0 taken 4465419 times.
✓ Branch 1 taken 688619 times.
|
5154038 | if (pool->available_entries) { |
| 259 | 4465419 | RefCount *ref = pool->available_entries; | |
| 260 | 4465419 | ret = get_userdata(ref); | |
| 261 | 4465419 | pool->available_entries = ref->opaque.nc; | |
| 262 | 4465419 | ref->opaque.nc = pool; | |
| 263 | 4465419 | atomic_init(&ref->refcount, 1); | |
| 264 | } | ||
| 265 | 5154038 | ff_mutex_unlock(&pool->mutex); | |
| 266 | |||
| 267 |
2/2✓ Branch 0 taken 688619 times.
✓ Branch 1 taken 4465419 times.
|
5154038 | if (!ret) { |
| 268 | RefCount *ref; | ||
| 269 | 688619 | ret = av_refstruct_alloc_ext(pool->size, pool->entry_flags, pool, | |
| 270 |
2/2✓ Branch 0 taken 99693 times.
✓ Branch 1 taken 588926 times.
|
688619 | pool->reset_cb ? pool_reset_entry : NULL); |
| 271 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 688619 times.
|
688619 | if (!ret) |
| 272 | ✗ | return AVERROR(ENOMEM); | |
| 273 | 688619 | ref = get_refcount(ret); | |
| 274 | 688619 | ref->free = pool_return_entry; | |
| 275 |
2/2✓ Branch 0 taken 99693 times.
✓ Branch 1 taken 588926 times.
|
688619 | if (pool->init_cb) { |
| 276 | 99693 | int err = pool->init_cb(pool->opaque, ret); | |
| 277 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 99693 times.
|
99693 | if (err < 0) { |
| 278 | ✗ | if (pool->pool_flags & AV_REFSTRUCT_POOL_FLAG_RESET_ON_INIT_ERROR) | |
| 279 | ✗ | pool->reset_cb(pool->opaque, ret); | |
| 280 | ✗ | if (pool->pool_flags & AV_REFSTRUCT_POOL_FLAG_FREE_ON_INIT_ERROR) | |
| 281 | ✗ | pool->free_entry_cb(pool->opaque, ret); | |
| 282 | ✗ | av_free(ref); | |
| 283 | ✗ | return err; | |
| 284 | } | ||
| 285 | } | ||
| 286 | } | ||
| 287 | 5154038 | atomic_fetch_add_explicit(&pool->refcount, 1, memory_order_relaxed); | |
| 288 | |||
| 289 |
2/2✓ Branch 0 taken 43728 times.
✓ Branch 1 taken 5110310 times.
|
5154038 | if (pool->pool_flags & AV_REFSTRUCT_POOL_FLAG_ZERO_EVERY_TIME) |
| 290 | 43728 | memset(ret, 0, pool->size); | |
| 291 | |||
| 292 | 5154038 | memcpy(datap, &ret, sizeof(ret)); | |
| 293 | |||
| 294 | 5154038 | return 0; | |
| 295 | } | ||
| 296 | |||
| 297 | 5154038 | void *av_refstruct_pool_get(AVRefStructPool *pool) | |
| 298 | { | ||
| 299 | void *ret; | ||
| 300 | 5154038 | refstruct_pool_get_ext(&ret, pool); | |
| 301 | 5154038 | return ret; | |
| 302 | } | ||
| 303 | |||
| 304 | /** | ||
| 305 | * Hint: The content of pool_unref() and refstruct_pool_uninit() | ||
| 306 | * could currently be merged; they are only separate functions | ||
| 307 | * in case we would ever introduce weak references. | ||
| 308 | */ | ||
| 309 | 46769 | static void pool_unref(void *ref) | |
| 310 | { | ||
| 311 | 46769 | AVRefStructPool *pool = get_userdata(ref); | |
| 312 |
2/2✓ Branch 0 taken 41224 times.
✓ Branch 1 taken 5545 times.
|
46769 | if (atomic_fetch_sub_explicit(&pool->refcount, 1, memory_order_acq_rel) == 1) |
| 313 | 41224 | pool_free(pool); | |
| 314 | 46769 | } | |
| 315 | |||
| 316 | 46769 | static void refstruct_pool_uninit(AVRefStructOpaque unused, void *obj) | |
| 317 | { | ||
| 318 | 46769 | AVRefStructPool *pool = obj; | |
| 319 | RefCount *entry; | ||
| 320 | |||
| 321 | 46769 | ff_mutex_lock(&pool->mutex); | |
| 322 | ff_assert(!pool->uninited); | ||
| 323 | 46769 | pool->uninited = 1; | |
| 324 | 46769 | entry = pool->available_entries; | |
| 325 | 46769 | pool->available_entries = NULL; | |
| 326 | 46769 | ff_mutex_unlock(&pool->mutex); | |
| 327 | |||
| 328 |
2/2✓ Branch 0 taken 668209 times.
✓ Branch 1 taken 46769 times.
|
714978 | while (entry) { |
| 329 | 668209 | void *next = entry->opaque.nc; | |
| 330 | 668209 | pool_free_entry(pool, entry); | |
| 331 | 668209 | entry = next; | |
| 332 | } | ||
| 333 | 46769 | } | |
| 334 | |||
| 335 | 9070 | AVRefStructPool *av_refstruct_pool_alloc(size_t size, unsigned flags) | |
| 336 | { | ||
| 337 | 9070 | return av_refstruct_pool_alloc_ext(size, flags, NULL, NULL, NULL, NULL, NULL); | |
| 338 | } | ||
| 339 | |||
| 340 | 46769 | AVRefStructPool *av_refstruct_pool_alloc_ext_c(size_t size, unsigned flags, | |
| 341 | AVRefStructOpaque opaque, | ||
| 342 | int (*init_cb)(AVRefStructOpaque opaque, void *obj), | ||
| 343 | void (*reset_cb)(AVRefStructOpaque opaque, void *obj), | ||
| 344 | void (*free_entry_cb)(AVRefStructOpaque opaque, void *obj), | ||
| 345 | void (*free_cb)(AVRefStructOpaque opaque)) | ||
| 346 | { | ||
| 347 | 46769 | AVRefStructPool *pool = av_refstruct_alloc_ext(sizeof(*pool), 0, NULL, | |
| 348 | refstruct_pool_uninit); | ||
| 349 | int err; | ||
| 350 | |||
| 351 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 46769 times.
|
46769 | if (!pool) |
| 352 | ✗ | return NULL; | |
| 353 | 46769 | get_refcount(pool)->free = pool_unref; | |
| 354 | |||
| 355 | 46769 | pool->size = size; | |
| 356 | 46769 | pool->opaque = opaque; | |
| 357 | 46769 | pool->init_cb = init_cb; | |
| 358 | 46769 | pool->reset_cb = reset_cb; | |
| 359 | 46769 | pool->free_entry_cb = free_entry_cb; | |
| 360 | 46769 | pool->free_cb = free_cb; | |
| 361 | #define COMMON_FLAGS AV_REFSTRUCT_POOL_FLAG_NO_ZEROING | ||
| 362 | 46769 | pool->entry_flags = flags & COMMON_FLAGS; | |
| 363 | // Filter out nonsense combinations to avoid checks later. | ||
| 364 |
2/2✓ Branch 0 taken 9070 times.
✓ Branch 1 taken 37699 times.
|
46769 | if (!pool->reset_cb) |
| 365 | 9070 | flags &= ~AV_REFSTRUCT_POOL_FLAG_RESET_ON_INIT_ERROR; | |
| 366 |
2/2✓ Branch 0 taken 9070 times.
✓ Branch 1 taken 37699 times.
|
46769 | if (!pool->free_entry_cb) |
| 367 | 9070 | flags &= ~AV_REFSTRUCT_POOL_FLAG_FREE_ON_INIT_ERROR; | |
| 368 | 46769 | pool->pool_flags = flags; | |
| 369 | |||
| 370 |
2/2✓ Branch 0 taken 1565 times.
✓ Branch 1 taken 45204 times.
|
46769 | if (flags & AV_REFSTRUCT_POOL_FLAG_ZERO_EVERY_TIME) { |
| 371 | // We will zero the buffer before every use, so zeroing | ||
| 372 | // upon allocating the buffer is unnecessary. | ||
| 373 | 1565 | pool->entry_flags |= AV_REFSTRUCT_FLAG_NO_ZEROING; | |
| 374 | } | ||
| 375 | |||
| 376 | 46769 | atomic_init(&pool->refcount, 1); | |
| 377 | |||
| 378 | 46769 | err = ff_mutex_init(&pool->mutex, NULL); | |
| 379 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 46769 times.
|
46769 | if (err) { |
| 380 | // Don't call av_refstruct_uninit() on pool, as it hasn't been properly | ||
| 381 | // set up and is just a POD right now. | ||
| 382 | ✗ | av_free(get_refcount(pool)); | |
| 383 | ✗ | return NULL; | |
| 384 | } | ||
| 385 | 46769 | return pool; | |
| 386 | } | ||
| 387 |