2020-03-05 19:44:09 +00:00
|
|
|
#pragma once
|
|
|
|
|
2021-04-09 15:24:47 +00:00
|
|
|
#include <assert.h>
|
|
|
|
#include <stdlib.h>
|
2021-09-12 14:04:52 +00:00
|
|
|
#include <string.h>
|
2021-09-13 09:29:04 +00:00
|
|
|
#include "../GL/platform.h"
|
2018-05-05 19:38:55 +00:00
|
|
|
|
2018-05-28 07:52:44 +00:00
|
|
|
#ifdef __cplusplus
|
|
|
|
extern "C" {
|
|
|
|
#endif
|
|
|
|
|
2021-09-12 14:04:52 +00:00
|
|
|
#if defined(__APPLE__) || defined(__WIN32__)
|
|
|
|
/* Linux + Kos define this, OSX does not, so just use malloc there */
|
|
|
|
static inline void* memalign(size_t alignment, size_t size) {
|
|
|
|
return malloc(size);
|
|
|
|
}
|
|
|
|
#else
|
|
|
|
#include <malloc.h>
|
|
|
|
#endif
|
|
|
|
|
2018-05-05 19:38:55 +00:00
|
|
|
typedef struct {
|
|
|
|
unsigned int size;
|
|
|
|
unsigned int capacity;
|
|
|
|
unsigned char* data;
|
|
|
|
unsigned int element_size;
|
|
|
|
} AlignedVector;
|
|
|
|
|
2019-03-13 07:28:23 +00:00
|
|
|
#define ALIGNED_VECTOR_CHUNK_SIZE 256u
|
2018-05-28 06:16:50 +00:00
|
|
|
|
2021-04-22 16:48:15 +00:00
|
|
|
#ifdef __cplusplus
|
|
|
|
#define AV_FORCE_INLINE static inline
|
|
|
|
#else
|
2021-04-20 15:49:00 +00:00
|
|
|
#define AV_NO_INSTRUMENT inline __attribute__((no_instrument_function))
|
|
|
|
#define AV_INLINE_DEBUG AV_NO_INSTRUMENT __attribute__((always_inline))
|
|
|
|
#define AV_FORCE_INLINE static AV_INLINE_DEBUG
|
2021-04-22 16:48:15 +00:00
|
|
|
#endif
|
2021-04-20 15:49:00 +00:00
|
|
|
|
2021-09-12 14:04:52 +00:00
|
|
|
#define ROUND_TO_CHUNK_SIZE(v) \
|
|
|
|
((((v) + ALIGNED_VECTOR_CHUNK_SIZE - 1) / ALIGNED_VECTOR_CHUNK_SIZE) * ALIGNED_VECTOR_CHUNK_SIZE)
|
|
|
|
|
|
|
|
|
2018-05-26 18:17:42 +00:00
|
|
|
void aligned_vector_init(AlignedVector* vector, unsigned int element_size);
|
2021-09-12 14:04:52 +00:00
|
|
|
|
|
|
|
AV_FORCE_INLINE void* aligned_vector_reserve(AlignedVector* vector, unsigned int element_count) {
|
|
|
|
if(element_count <= vector->capacity) {
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
|
|
|
|
unsigned int original_byte_size = vector->size * vector->element_size;
|
|
|
|
|
|
|
|
/* We overallocate so that we don't make small allocations during push backs */
|
|
|
|
element_count = ROUND_TO_CHUNK_SIZE(element_count);
|
|
|
|
|
|
|
|
unsigned int new_byte_size = element_count * vector->element_size;
|
|
|
|
unsigned char* original_data = vector->data;
|
|
|
|
|
|
|
|
vector->data = (unsigned char*) memalign(0x20, new_byte_size);
|
|
|
|
assert(vector->data);
|
|
|
|
|
|
|
|
if(original_data) {
|
2021-09-13 09:29:04 +00:00
|
|
|
FASTCPY(vector->data, original_data, original_byte_size);
|
2021-09-12 14:04:52 +00:00
|
|
|
free(original_data);
|
|
|
|
}
|
|
|
|
|
|
|
|
vector->capacity = element_count;
|
|
|
|
|
|
|
|
return vector->data + original_byte_size;
|
|
|
|
}
|
2021-04-20 15:49:00 +00:00
|
|
|
|
|
|
|
AV_FORCE_INLINE void* aligned_vector_at(const AlignedVector* vector, const unsigned int index) {
|
2020-03-05 19:44:09 +00:00
|
|
|
assert(index < vector->size);
|
|
|
|
return &vector->data[index * vector->element_size];
|
|
|
|
}
|
2021-09-12 14:04:52 +00:00
|
|
|
|
|
|
|
AV_FORCE_INLINE void* aligned_vector_resize(AlignedVector* vector, const unsigned int element_count) {
|
|
|
|
void* ret = NULL;
|
|
|
|
|
|
|
|
unsigned int previousCount = vector->size;
|
|
|
|
|
|
|
|
if(vector->capacity < element_count) {
|
2021-09-13 09:29:04 +00:00
|
|
|
/* If we didn't have capacity, increase capacity (slow) */
|
2021-09-12 14:04:52 +00:00
|
|
|
ret = aligned_vector_reserve(vector, element_count);
|
|
|
|
} else if(previousCount < element_count) {
|
2021-09-13 09:29:04 +00:00
|
|
|
/* So we grew, but had the capacity, just get a pointer to
|
|
|
|
* where we were */
|
2021-09-12 14:04:52 +00:00
|
|
|
ret = aligned_vector_at(vector, previousCount);
|
|
|
|
}
|
|
|
|
|
2021-09-13 09:29:04 +00:00
|
|
|
vector->size = element_count;
|
|
|
|
|
|
|
|
return ret;
|
2021-09-12 14:04:52 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
AV_FORCE_INLINE void* aligned_vector_push_back(AlignedVector* vector, const void* objs, unsigned int count) {
|
|
|
|
/* Resize enough room */
|
|
|
|
assert(count);
|
|
|
|
assert(vector->element_size);
|
|
|
|
|
|
|
|
unsigned int initial_size = vector->size;
|
|
|
|
aligned_vector_resize(vector, vector->size + count);
|
|
|
|
|
|
|
|
assert(vector->size == initial_size + count);
|
|
|
|
|
|
|
|
unsigned char* dest = vector->data + (vector->element_size * initial_size);
|
|
|
|
|
|
|
|
/* Copy the objects in */
|
2021-09-13 09:29:04 +00:00
|
|
|
FASTCPY(dest, objs, vector->element_size * count);
|
2021-09-12 14:04:52 +00:00
|
|
|
|
|
|
|
return dest;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
AV_FORCE_INLINE void* aligned_vector_extend(AlignedVector* vector, const unsigned int additional_count) {
|
|
|
|
return aligned_vector_resize(vector, vector->size + additional_count);
|
|
|
|
}
|
2021-04-20 15:49:00 +00:00
|
|
|
|
|
|
|
AV_FORCE_INLINE void aligned_vector_clear(AlignedVector* vector){
|
2020-03-05 19:44:09 +00:00
|
|
|
vector->size = 0;
|
|
|
|
}
|
2018-05-26 18:17:42 +00:00
|
|
|
void aligned_vector_shrink_to_fit(AlignedVector* vector);
|
|
|
|
void aligned_vector_cleanup(AlignedVector* vector);
|
2020-03-05 19:44:09 +00:00
|
|
|
static inline void* aligned_vector_back(AlignedVector* vector){
|
|
|
|
return aligned_vector_at(vector, vector->size - 1);
|
|
|
|
}
|
2018-05-05 19:38:55 +00:00
|
|
|
|
2018-05-28 07:52:44 +00:00
|
|
|
#ifdef __cplusplus
|
|
|
|
}
|
|
|
|
#endif
|