gl: Assume decode buffer is to be used as SSBO as this seems to be a hint to the driver about where to put the buffer

Part of OpenGL's achilles' heel - the API does not distinguish between VRAM and SYSTEM memory at all and relies on developers wrestling with the driver's heurestic algorithm for this.
This commit is contained in:
kd-11 2022-06-02 19:38:12 +03:00 committed by kd-11
parent 234db2be3f
commit ff9c939720

View file

@ -32,7 +32,7 @@ namespace gl
if (g_compute_decode_buffer.size() < static_cast<GLsizeiptr>(staging_data_length) * 3) if (g_compute_decode_buffer.size() < static_cast<GLsizeiptr>(staging_data_length) * 3)
{ {
g_compute_decode_buffer.remove(); g_compute_decode_buffer.remove();
g_compute_decode_buffer.create(gl::buffer::target::pixel_pack, std::max<GLsizeiptr>(512, staging_data_length * 3), nullptr, buffer::memory_type::local, GL_STATIC_COPY); g_compute_decode_buffer.create(gl::buffer::target::ssbo, std::max<GLsizeiptr>(512, staging_data_length * 3), nullptr, buffer::memory_type::local, GL_STATIC_COPY);
} }
return { &g_upload_transfer_buffer, &g_compute_decode_buffer }; return { &g_upload_transfer_buffer, &g_compute_decode_buffer };