2018-05-26 18:17:42 +00:00
|
|
|
#include <stdlib.h>
|
|
|
|
#include <string.h>
|
|
|
|
#include <math.h>
|
2019-03-06 08:31:06 +00:00
|
|
|
#include <assert.h>
|
2019-03-28 13:08:23 +00:00
|
|
|
#include <stdio.h>
|
2018-05-26 18:17:42 +00:00
|
|
|
|
2018-10-18 08:54:34 +00:00
|
|
|
#if defined(__APPLE__) || defined(__WIN32__)
|
2018-10-16 05:00:34 +00:00
|
|
|
/* Linux + Kos define this, OSX does not, so just use malloc there */
|
2018-10-18 08:54:34 +00:00
|
|
|
static inline void* memalign(size_t alignment, size_t size) {
|
|
|
|
return malloc(size);
|
|
|
|
}
|
2018-05-26 18:17:42 +00:00
|
|
|
#else
|
2018-10-18 08:54:34 +00:00
|
|
|
#include <malloc.h>
|
2018-05-26 18:17:42 +00:00
|
|
|
#endif
|
|
|
|
|
|
|
|
#include "aligned_vector.h"
|
|
|
|
|
|
|
|
void aligned_vector_init(AlignedVector* vector, unsigned int element_size) {
|
|
|
|
vector->size = vector->capacity = 0;
|
|
|
|
vector->element_size = element_size;
|
|
|
|
vector->data = NULL;
|
|
|
|
|
|
|
|
/* Reserve some initial capacity */
|
2019-03-13 07:28:23 +00:00
|
|
|
aligned_vector_reserve(vector, ALIGNED_VECTOR_CHUNK_SIZE);
|
2018-05-26 18:17:42 +00:00
|
|
|
}
|
|
|
|
|
2019-03-13 07:28:23 +00:00
|
|
|
|
|
|
|
static inline unsigned int round_to_chunk_size(unsigned int val) {
|
|
|
|
const unsigned int n = val;
|
|
|
|
const unsigned int m = ALIGNED_VECTOR_CHUNK_SIZE;
|
|
|
|
|
|
|
|
return ((n + m - 1) / m) * m;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2018-05-26 18:17:42 +00:00
|
|
|
void aligned_vector_reserve(AlignedVector* vector, unsigned int element_count) {
|
2019-03-28 13:08:23 +00:00
|
|
|
if(element_count == 0) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2018-05-26 18:17:42 +00:00
|
|
|
if(element_count <= vector->capacity) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
unsigned int original_byte_size = vector->size * vector->element_size;
|
2019-03-13 07:28:23 +00:00
|
|
|
|
|
|
|
/* We overallocate so that we don't make small allocations during push backs */
|
|
|
|
element_count = round_to_chunk_size(element_count);
|
|
|
|
|
2018-05-26 18:17:42 +00:00
|
|
|
unsigned int new_byte_size = element_count * vector->element_size;
|
|
|
|
unsigned char* original_data = vector->data;
|
2019-03-28 13:08:23 +00:00
|
|
|
|
2018-05-26 18:17:42 +00:00
|
|
|
vector->data = (unsigned char*) memalign(0x20, new_byte_size);
|
2019-03-28 13:08:23 +00:00
|
|
|
assert(vector->data);
|
2018-05-26 18:17:42 +00:00
|
|
|
|
|
|
|
if(original_data) {
|
|
|
|
memcpy(vector->data, original_data, original_byte_size);
|
|
|
|
free(original_data);
|
|
|
|
}
|
|
|
|
|
|
|
|
vector->capacity = element_count;
|
|
|
|
}
|
|
|
|
|
2018-08-21 08:37:19 +00:00
|
|
|
void* aligned_vector_push_back(AlignedVector* vector, const void* objs, unsigned int count) {
|
2018-05-26 18:17:42 +00:00
|
|
|
/* Resize enough room */
|
2019-03-28 13:08:23 +00:00
|
|
|
assert(count);
|
|
|
|
assert(vector->element_size);
|
2018-05-26 18:17:42 +00:00
|
|
|
|
|
|
|
unsigned int initial_size = vector->size;
|
|
|
|
aligned_vector_resize(vector, vector->size + count);
|
|
|
|
|
2019-03-28 13:08:23 +00:00
|
|
|
assert(vector->size == initial_size + count);
|
|
|
|
|
2018-05-26 18:17:42 +00:00
|
|
|
unsigned char* dest = vector->data + (vector->element_size * initial_size);
|
|
|
|
|
|
|
|
/* Copy the objects in */
|
|
|
|
memcpy(dest, objs, vector->element_size * count);
|
2018-08-21 08:37:19 +00:00
|
|
|
|
|
|
|
return dest;
|
2018-05-26 18:17:42 +00:00
|
|
|
}
|
|
|
|
|
2018-08-16 16:51:15 +00:00
|
|
|
void* aligned_vector_resize(AlignedVector* vector, const unsigned int element_count) {
|
|
|
|
unsigned int previousCount = vector->size;
|
|
|
|
|
2018-05-26 18:17:42 +00:00
|
|
|
/* Don't change memory when resizing downwards, just change the size */
|
|
|
|
if(element_count <= vector->size) {
|
|
|
|
vector->size = element_count;
|
2018-08-16 16:51:15 +00:00
|
|
|
return NULL;
|
2018-05-26 18:17:42 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
if(vector->capacity < element_count) {
|
2019-03-06 08:31:06 +00:00
|
|
|
aligned_vector_reserve(vector, element_count);
|
2018-05-26 18:17:42 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
vector->size = element_count;
|
2018-08-16 16:51:15 +00:00
|
|
|
|
|
|
|
if(previousCount < vector->size) {
|
|
|
|
return aligned_vector_at(vector, previousCount);
|
|
|
|
} else {
|
|
|
|
return NULL;
|
|
|
|
}
|
2018-05-26 18:17:42 +00:00
|
|
|
}
|
|
|
|
|
2018-07-09 07:57:01 +00:00
|
|
|
void* aligned_vector_at(const AlignedVector* vector, const unsigned int index) {
|
2019-03-06 08:31:06 +00:00
|
|
|
assert(index < vector->size);
|
2018-05-26 18:17:42 +00:00
|
|
|
return &vector->data[index * vector->element_size];
|
|
|
|
}
|
|
|
|
|
2018-07-10 19:27:15 +00:00
|
|
|
void* aligned_vector_back(AlignedVector* vector) {
|
|
|
|
return aligned_vector_at(vector, vector->size - 1);
|
|
|
|
}
|
|
|
|
|
2018-05-26 18:17:42 +00:00
|
|
|
void* aligned_vector_extend(AlignedVector* vector, const unsigned int additional_count) {
|
|
|
|
const unsigned int current = vector->size;
|
|
|
|
aligned_vector_resize(vector, vector->size + additional_count);
|
|
|
|
return aligned_vector_at(vector, current);
|
|
|
|
}
|
|
|
|
|
|
|
|
void aligned_vector_clear(AlignedVector* vector) {
|
|
|
|
vector->size = 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
void aligned_vector_shrink_to_fit(AlignedVector* vector) {
|
|
|
|
if(vector->size == 0) {
|
|
|
|
free(vector->data);
|
|
|
|
vector->data = NULL;
|
|
|
|
vector->capacity = 0;
|
|
|
|
} else {
|
|
|
|
unsigned int new_byte_size = vector->size * vector->element_size;
|
|
|
|
unsigned char* original_data = vector->data;
|
|
|
|
vector->data = (unsigned char*) memalign(0x20, new_byte_size);
|
|
|
|
|
|
|
|
if(original_data) {
|
|
|
|
memcpy(vector->data, original_data, new_byte_size);
|
|
|
|
free(original_data);
|
|
|
|
}
|
|
|
|
|
|
|
|
vector->capacity = vector->size;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void aligned_vector_cleanup(AlignedVector* vector) {
|
|
|
|
aligned_vector_clear(vector);
|
|
|
|
aligned_vector_shrink_to_fit(vector);
|
|
|
|
}
|