added texture/animation streaming (to-do: fix textures for OpenGL), serendipitously fixed animations when loading from a graph

This commit is contained in:
ecker 2025-08-14 19:09:58 -05:00
parent eadf732ee5
commit 9ec35a26e6
13 changed files with 238 additions and 32 deletions

View File

@ -79,7 +79,7 @@
"tag": "worldspawn",
"player": "info_player_spawn",
"enabled": "auto",
"radius": 16,
"radius": 32,
"every": 4
}
}

View File

@ -7,6 +7,7 @@
#include <uf/utils/renderer/renderer.h>
#include <uf/utils/memory/unordered_map.h>
#include <uf/utils/memory/key_map.h>
#include <uf/utils/memory/queue.h>
#include <queue>
@ -46,7 +47,11 @@ namespace pod {
uf::stl::vector<uf::stl::string> skins;
uf::stl::vector<uf::stl::string> animations;
// Animation queue
std::queue<uf::stl::string> sequence;
uf::stl::queue<uf::stl::string> sequence;
// Streaming stuff
uf::stl::unordered_map<uf::stl::string, uf::stl::string> buffer_paths; // probably will go unused since cramming it all in here is pain
struct {
struct {
bool loop = true;
@ -59,11 +64,16 @@ namespace pod {
uf::stl::string target = "";
} animations;
struct {
bool enabled = false;
float radius = 64.0f;
float every = 4.0f;
bool textures = true;
bool animations = true;
uf::stl::string tag = "worldspawn";
uf::stl::string player = "info_player_spawn";
@ -157,7 +167,14 @@ namespace uf {
void UF_API destroy( uf::Object&, bool soft = false );
void UF_API destroy( pod::Graph::Storage&, bool soft = false );
pod::Graph UF_API load( const uf::stl::string&, const uf::Serializer& = ext::json::null() );
void UF_API load( pod::Graph&, const uf::stl::string&, const uf::Serializer& = ext::json::null() );
inline pod::Graph load( const uf::stl::string& filename, const uf::Serializer& metadata = ext::json::null() ) {
// do some deprecation warning or something because this actually is bad for doing a copy + dealloc
pod::Graph graph;
load( graph, filename, metadata );
return graph;
}
pod::Graph& UF_API convert( uf::Object&, bool = false );
uf::stl::string UF_API save( const pod::Graph&, const uf::stl::string& );

View File

@ -72,6 +72,7 @@ namespace pod {
};
uf::stl::string name = "";
uf::stl::string path = "";
uf::stl::vector<Sampler> samplers;
uf::stl::vector<Channel> channels;

View File

@ -9,7 +9,13 @@
namespace ext {
namespace gltf {
pod::Graph UF_API load( const uf::stl::string&, const uf::Serializer& = {} );
void UF_API load( pod::Graph&, const uf::stl::string&, const uf::Serializer& = {} );
inline pod::Graph load( const uf::stl::string& filename, const uf::Serializer& metadata = ext::json::null() ) {
// do some deprecation warning or something because this actually is bad for doing a copy + dealloc
pod::Graph graph;
load( graph, filename, metadata );
return graph;
}
void UF_API save( const uf::stl::string&, const pod::Graph& );
}
}

View File

@ -53,6 +53,7 @@ namespace ext {
enums::Format::type_t DefaultFormat = enums::Format::R8G8B8A8_UNORM;
Device* device = nullptr;
bool aliased = false;
GLuint image = GL_NULL_HANDLE;
enums::Image::type_t type = enums::Image::TYPE_2D;
@ -81,7 +82,7 @@ namespace ext {
void initialize( Device& device, enums::Image::viewType_t, size_t width, size_t height, size_t depth = 1, size_t layers = 1 );
#endif
void updateDescriptors();
void destroy();
void destroy( bool = false );
bool generated() const;
void loadFromFile(
const uf::stl::string& filename,

View File

@ -33,6 +33,7 @@ namespace uf {
void loadFromBuffer( const Image::container_t& container, const pod::Vector2ui& size, std::size_t bpp, std::size_t channels, bool flip = false );
uf::stl::string getFilename() const;
void setFilename( const uf::stl::string& );
Image::container_t& getPixels();
const Image::container_t& getPixels() const;

View File

@ -158,7 +158,7 @@ namespace uf {
} bounds;
*/
uf::stl::vector<buffer_t> buffers;
uf::stl::vector<uf::stl::string> buffer_paths; // crunge
uf::stl::vector<uf::stl::string> buffer_paths; // crunge, but it's better this way
protected:
void _destroy( uf::Mesh::Input& input );
void _bind( bool interleaved = uf::Mesh::defaultInterleaved );

View File

@ -260,7 +260,8 @@ uf::stl::string uf::asset::load( uf::asset::Payload& payload ) {
case uf::asset::Type::GRAPH: {
UF_ASSET_REGISTER(pod::Graph)
asset = uf::graph::load( filename, payload.metadata );
// asset = uf::graph::load( filename, payload.metadata );
uf::graph::load( asset, filename, payload.metadata );
uf::graph::process( asset );
#if !UF_ENV_DREAMCAST

View File

@ -36,7 +36,11 @@ namespace {
const uf::stl::string directory = uf::io::directory( graph.name );
const uf::stl::string filename = uf::io::filename( json["filename"].as<uf::stl::string>() );
const uf::stl::string name = directory + "/" + filename;
image.open(name, false);
if ( graph.settings.stream.textures ) {
image.setFilename(name);
} else {
image.open(name, false);
}
} else {
auto size = uf::vector::decode( json["size"], pod::Vector2ui{} );
size_t bpp = json["bpp"].as<size_t>();
@ -245,7 +249,7 @@ namespace {
#else
if ( graph.metadata["stream"]["enabled"].as<bool>() ) {
mesh.buffers.emplace_back();
mesh.buffer_paths.emplace_back(directory + "/" + filename );
mesh.buffer_paths.emplace_back(directory + "/" + filename);
} else {
// to-do: make it work for interleaved meshes
mesh.buffers.emplace_back(uf::io::readAsBuffer( directory + "/" + filename ));
@ -335,13 +339,14 @@ namespace {
}
}
pod::Graph uf::graph::load( const uf::stl::string& filename, const uf::Serializer& metadata ) {
void uf::graph::load( pod::Graph& graph, const uf::stl::string& filename, const uf::Serializer& metadata ) {
const uf::stl::string extension = uf::io::extension( filename );
#if UF_USE_GLTF
if ( extension == "glb" || extension == "gltf" ) return ext::gltf::load( filename, metadata );
if ( extension == "glb" || extension == "gltf" ) {
return ext::gltf::load( graph, filename, metadata );
}
#endif
const uf::stl::string directory = uf::io::directory( filename ) + "/";
pod::Graph graph;
uf::Serializer serializer;
UF_DEBUG_TIMER_MULTITRACE_START("Reading {}", filename);
serializer.readFromFile( filename );
@ -395,6 +400,10 @@ pod::Graph uf::graph::load( const uf::stl::string& filename, const uf::Serialize
// copy important settings
{
graph.settings.stream.enabled = graph.metadata["stream"]["enabled"].as(graph.settings.stream.enabled);
graph.settings.stream.textures = graph.settings.stream.enabled && graph.metadata["stream"]["textures"].as(graph.settings.stream.textures);
graph.settings.stream.animations = graph.settings.stream.enabled && graph.metadata["stream"]["animations"].as(graph.settings.stream.animations);
graph.settings.stream.radius = graph.metadata["stream"]["radius"].as(graph.settings.stream.radius);
graph.settings.stream.every = graph.metadata["stream"]["every"].as(graph.settings.stream.every);
@ -565,18 +574,29 @@ pod::Graph uf::graph::load( const uf::stl::string& filename, const uf::Serialize
/*graph.storage*/storage.animations.map.reserve( serializer["animations"].size() );
ext::json::forEach( serializer["animations"], [&]( ext::json::Value& value ){
if ( value.is<uf::stl::string>() ) {
auto path = directory + "/" + value.as<uf::stl::string>();
uf::Serializer json;
json.readFromFile( directory + "/" + value.as<uf::stl::string>() );
json.readFromFile( path );
auto name = key + json["name"].as<uf::stl::string>();
/*graph.storage*/storage.animations[name] = decodeAnimation( json, graph );
if ( graph.settings.stream.animations ) {
/*graph.storage*/storage.animations[name].path = path;
} else {
/*graph.storage*/storage.animations[name] = decodeAnimation( json, graph );
}
graph.animations.emplace_back(name);
} else {
// UF_MSG_DEBUG("{}", name);
if ( value["filename"].is<uf::stl::string>() ) {
auto path = directory + "/" + value.as<uf::stl::string>();
uf::Serializer json;
json.readFromFile( directory + "/" + value["filename"].as<uf::stl::string>() );
json.readFromFile( path );
auto name = key + json["name"].as<uf::stl::string>();
/*graph.storage*/storage.animations[name] = decodeAnimation( json, graph );
if ( graph.settings.stream.animations ) {
/*graph.storage*/storage.animations[name].path = path;
} else {
/*graph.storage*/storage.animations[name] = decodeAnimation( json, graph );
}
graph.animations.emplace_back(name);
} else {
auto name = key + value["name"].as<uf::stl::string>();
@ -631,6 +651,4 @@ pod::Graph uf::graph::load( const uf::stl::string& filename, const uf::Serialize
#if UF_ENV_DREAMCAST
DC_STATS();
#endif
return graph;
}

View File

@ -43,6 +43,59 @@ namespace {
}
return hash;
}
// lazy load animations if requested
void loadAnimation( const uf::stl::string& name ) {
auto& scene = uf::scene::getCurrentScene();
auto& storage = uf::graph::globalStorage ? uf::graph::storage : scene.getComponent<pod::Graph::Storage>();
auto& animation = storage.animations.map[name];
UF_ASSERT( animation.path != "" );
uf::Serializer json;
json.readFromFile( animation.path );
animation.name = json["name"].as(animation.name);
animation.start = json["start"].as(animation.start);
animation.end = json["end"].as(animation.end);
if ( animation.samplers.empty() ) ext::json::forEach( json["samplers"], [&]( ext::json::Value& value ){
auto& sampler = animation.samplers.emplace_back();
sampler.interpolator = value["interpolator"].as(sampler.interpolator);
sampler.inputs.reserve( value["inputs"].size() );
ext::json::forEach( value["inputs"], [&]( ext::json::Value& input ){
sampler.inputs.emplace_back( input.as<float>() );
});
sampler.outputs.reserve( value["outputs"].size() );
ext::json::forEach( value["outputs"], [&]( ext::json::Value& output ){
sampler.outputs.emplace_back( uf::vector::decode( output, pod::Vector4f{} ) );
});
});
if ( animation.channels.empty() ) ext::json::forEach( json["channels"], [&]( ext::json::Value& value ){
auto& channel = animation.channels.emplace_back();
channel.path = value["path"].as(channel.path);
channel.node = value["node"].as(channel.node);
channel.sampler = value["sampler"].as(channel.sampler);
});
}
void unloadAnimation( const uf::stl::string& name ) {
auto& scene = uf::scene::getCurrentScene();
auto& storage = uf::graph::globalStorage ? uf::graph::storage : scene.getComponent<pod::Graph::Storage>();
auto& animation = storage.animations.map[name];
animation.samplers.clear();
animation.channels.clear();
#if UF_ENV_DREAMCAST
animation.samplers.shrink_to_fit();
animation.channels.shrink_to_fit();
#endif
}
}
#if UF_ENV_DREAMCAST
@ -808,7 +861,6 @@ void uf::graph::process( pod::Graph& graph ) {
uf::stl::unordered_map<uf::stl::string, bool> isSrgb;
// process lightmap
UF_DEBUG_TIMER_MULTITRACE("Parsing lightmaps");
if ( true ) {
constexpr const char* UF_GRAPH_DEFAULT_LIGHTMAP = "./lightmap.%i.png";
@ -880,7 +932,9 @@ void uf::graph::process( pod::Graph& graph ) {
auto& texture = /*graph.storage*/storage.textures[graph.textures.emplace_back(f)];
auto& image = /*graph.storage*/storage.images[graph.images.emplace_back(f)];
image.open( f, false );
if ( !graph.settings.stream.textures ) {
image.open( f, false );
}
texture.index = imageID;
@ -934,6 +988,12 @@ void uf::graph::process( pod::Graph& graph ) {
auto& image = storage.images[key];
auto& texture = storage.texture2Ds[key];
if ( !texture.generated() ) {
// set as null
if ( graph.settings.stream.textures ) {
texture.aliasTexture(uf::renderer::Texture2D::empty);
continue;
}
auto filter = uf::renderer::enums::Filter::LINEAR;
auto tag = ext::json::find( key, graph.metadata["tags"] );
if ( !ext::json::isObject( tag ) ) {
@ -948,7 +1008,7 @@ void uf::graph::process( pod::Graph& graph ) {
texture.sampler.descriptor.filter.min = filter;
texture.sampler.descriptor.filter.mag = filter;
texture.srgb = isSrgb.count(key) == 0 ? false : isSrgb[key];
texture.srgb = isSrgb[key];
texture.loadFromImage( image );
#if UF_ENV_DREAMCAST
@ -1570,6 +1630,9 @@ void uf::graph::override( pod::Graph& graph ) {
if ( !toNeutralPose ) {
uf::stl::string name = graph.sequence.front();
pod::Animation& animation = storage.animations.map[name]; // graph.animations[name];
// load animation data
if ( animation.channels.empty() || animation.samplers.empty() ) ::loadAnimation( name );
for ( auto& channel : animation.channels ) {
auto& override = graph.settings.animations.override.map[channel.node];
auto& sampler = animation.samplers[channel.sampler];
@ -1615,7 +1678,11 @@ void uf::graph::animate( pod::Graph& graph, const uf::stl::string& _name, float
// if already playing, ignore it
if ( !graph.sequence.empty() && graph.sequence.front() == name ) return;
if ( immediate ) {
while ( !graph.sequence.empty() ) graph.sequence.pop();
while ( !graph.sequence.empty() ) {
// unload
if ( graph.settings.stream.animations ) ::unloadAnimation( graph.sequence.front() );
graph.sequence.pop();
}
}
bool empty = graph.sequence.empty();
graph.sequence.emplace(name);
@ -1678,7 +1745,10 @@ void uf::graph::update( pod::Graph& graph, float delta ) {
animation->cur = graph.settings.animations.loop ? animation->cur - animation->end : 0;
// go-to next animation
if ( !graph.settings.animations.loop ) {
// unload
if ( graph.settings.stream.animations ) ::unloadAnimation( graph.sequence.front() );
graph.sequence.pop();
// out of animations, set to neutral pose
if ( graph.sequence.empty() ) {
uf::graph::override( graph );
@ -1688,6 +1758,10 @@ void uf::graph::update( pod::Graph& graph, float delta ) {
animation = &storage.animations.map[name]; // &graph.animations[name];
}
}
// load animation data
if ( animation->channels.empty() || animation->samplers.empty() ) ::loadAnimation( name );
for ( auto& channel : animation->channels ) {
auto& sampler = animation->samplers[channel.sampler];
if ( sampler.interpolator != "LINEAR" ) continue;
@ -2131,7 +2205,87 @@ void uf::graph::reload( pod::Graph& graph, pod::Node& node ) {
mesh.buffers[attribute.buffer] = uf::io::readAsBuffer( mesh.buffer_paths[attribute.buffer] );
}
#endif
} else {
if ( graph.settings.stream.textures ) {
// cringe macro that ensures a texture ID is mapped properly, regardless if its visible or not
// lightmaps are not sRGB, while textures (usually) are
#define INCREMENT_TEXTURE_REFCOUNT( ID, isSRGB ) if ( 0 <= ID && ID < graph.textures.size() ) {\
auto& key = graph.textures[ID];\
textureReferences[key] += visible ? 1 : 0;\
isSrgb[key] = isSRGB;\
}
uf::stl::unordered_map<uf::stl::string, bool> isSrgb; // cringe
uf::stl::unordered_map<uf::stl::string, size_t> textureReferences;
// determine which textures are in use or not
for ( size_t drawID = 0; drawID < primitives.size(); ++drawID ) {
auto& primitive = primitives[drawID];
auto& instance = primitive.instance;
auto& drawCommand = drawCommands[drawID];
bool visible = drawCommand.instances > 0;
INCREMENT_TEXTURE_REFCOUNT(instance.lightmapID, false);
// no material information bound
if ( !(0 <= instance.materialID && instance.materialID < graph.materials.size()) ) {
continue;
}
auto& material = storage.materials[graph.materials[instance.materialID]];
INCREMENT_TEXTURE_REFCOUNT(material.indexAlbedo, true);
INCREMENT_TEXTURE_REFCOUNT(material.indexNormal, true);
INCREMENT_TEXTURE_REFCOUNT(material.indexEmissive, true);
INCREMENT_TEXTURE_REFCOUNT(material.indexOcclusion, true);
INCREMENT_TEXTURE_REFCOUNT(material.indexMetallicRoughness, true);
}
// iterate through our ref counts
// to-do: figure out why this doesn't work for OpenGL (texture ID handles might be wrong, might be better to store the old texture ID handle to use it and then update to that handle)
for ( auto& [ key, count ] : textureReferences ) {
auto& texture = storage.texture2Ds[key];
auto& image = storage.images[key];
bool visible = count > 0;
// load texture
if ( visible && (!texture.generated() || texture.aliased) ) {
// load image
if ( image.getPixels().empty() ) image.open(image.getFilename(), false);
auto filter = uf::renderer::enums::Filter::LINEAR;
auto tag = ext::json::find( key, graph.metadata["tags"] );
if ( !ext::json::isObject( tag ) ) {
tag["renderer"] = graph.metadata["renderer"];
}
if ( tag["renderer"]["filter"].is<uf::stl::string>() ) {
const auto mode = uf::string::lowercase( tag["renderer"]["filter"].as<uf::stl::string>("linear") );
if ( mode == "linear" ) filter = uf::renderer::enums::Filter::LINEAR;
else if ( mode == "nearest" ) filter = uf::renderer::enums::Filter::NEAREST;
else UF_MSG_WARNING("Invalid Filter enum string specified: {}", mode);
}
// avoids manipulating the aliased texture
if ( texture.aliased ) {
texture = {};
}
texture.sampler.descriptor.filter.min = filter;
texture.sampler.descriptor.filter.mag = filter;
texture.srgb = isSrgb[key];
texture.loadFromImage( image );
#if UF_ENV_DREAMCAST
image.clear();
#endif
} else if ( !visible && texture.generated() ) {
// unload image
image.clear();
// defer destruction of texture
texture.destroy( true );
// alias to null texture
texture.aliasTexture(uf::renderer::Texture2D::empty);
}
}
}
} else { // this shouldn't be reached
// load mesh data
for ( auto& attribute : mesh.index.attributes ) {
if ( !mesh.buffers[attribute.buffer].empty() || mesh.buffer_paths.empty() ) continue;
@ -2144,8 +2298,8 @@ void uf::graph::reload( pod::Graph& graph, pod::Node& node ) {
}
mesh.updateDescriptor();
// process textures
// may or may not be necessary (OpenGL might need a re-record, Vulkan seems fine for the main deferred pass but VXGI doesn't ever get to update since null textures get used sometimes)
uf::renderer::states::rebuild = true;
// update graphic
if ( /*(graph.metadata["renderer"]["separate"].as<bool>()) &&*/ graph.metadata["renderer"]["render"].as<bool>() ) {

View File

@ -117,10 +117,10 @@ namespace {
}
}
pod::Graph ext::gltf::load( const uf::stl::string& filename, const uf::Serializer& metadata ) {
void ext::gltf::load( pod::Graph& graph, const uf::stl::string& filename, const uf::Serializer& metadata ) {
uf::stl::string extension = uf::io::extension( filename );
if ( extension != "glb" && extension != "gltf" ) {
return uf::graph::load( filename, metadata );
return uf::graph::load( graph, filename, metadata );
}
tinygltf::Model model;
@ -129,14 +129,13 @@ pod::Graph ext::gltf::load( const uf::stl::string& filename, const uf::Serialize
uf::stl::string warn, err;
bool ret = extension == "glb" ? loader.LoadBinaryFromFile(&model, &err, &warn, filename) : loader.LoadASCIIFromFile(&model, &err, &warn, filename);
pod::Graph graph;
graph.name = filename;
graph.metadata = metadata;
if ( !warn.empty() ) UF_MSG_WARNING("glTF warning: {}", warn);
if ( !err.empty() ) UF_MSG_ERROR("glTF error: {}", err);
if ( !ret ) { UF_MSG_ERROR("glTF error: failed to parse file: {}", filename);
return graph;
return;
}
#if 0
@ -570,10 +569,12 @@ pod::Graph ext::gltf::load( const uf::stl::string& filename, const uf::Serialize
// disable streaming
{
graph.settings.stream.enabled = false;
graph.settings.stream.textures = false;
graph.settings.stream.animations = false;
graph.settings.stream.radius = 0;
graph.settings.stream.every = 0;
}
return graph;
}
#endif

View File

@ -70,8 +70,10 @@ bool ext::opengl::Texture::generated() const {
// return glIsTexture(image);
#endif
}
void ext::opengl::Texture::destroy() {
void ext::opengl::Texture::destroy( bool defer ) {
// if ( !device ) return;
if ( aliased ) return;
if ( generated() ) {
GL_MUTEX_LOCK();
GL_ERROR_CHECK(glDeleteTextures(1, &image));
@ -264,6 +266,7 @@ ext::opengl::Texture ext::opengl::Texture::alias() const {
return texture;
}
void ext::opengl::Texture::aliasTexture( const Texture& texture ) {
aliased = true;
image = texture.image;
type = texture.type;
viewType = texture.viewType;

View File

@ -76,6 +76,9 @@ uf::Image::Image( const Image::container_t& copy, const Image::vec2_t& size ) :
uf::stl::string uf::Image::getFilename() const {
return this->m_filename;
}
void uf::Image::setFilename( const uf::stl::string& filename ) {
this->m_filename = filename;
}
#define _PACK4(v) ((v * 0xF) / 0xFF)
#define PACK_ARGB4444(a,r,g,b) (_PACK4(a) << 12) | (_PACK4(r) << 8) | (_PACK4(g) << 4) | (_PACK4(b))