+ | Patching opti's problem

This commit is contained in:
TheRedShip
2025-01-19 15:48:25 +01:00
parent d0d67b8bd7
commit 71df788350
6 changed files with 2636 additions and 2870 deletions

View File

@ -20,8 +20,8 @@ int main(int argc, char **argv)
return (1);
Window window(&scene, WIDTH, HEIGHT, "RT_GPU", 0);
Shader shader("shaders/vertex.vert", "shaders/frag.frag", "shaders/compute.glsl");
// Shader shader("shaders/vertex.vert", "shaders/frag.frag", "shaders/debug.glsl");
// Shader shader("shaders/vertex.vert", "shaders/frag.frag", "shaders/compute.glsl");
Shader shader("shaders/vertex.vert", "shaders/frag.frag", "shaders/debug.glsl");
GLint max_gpu_size;
glGetIntegerv(GL_MAX_SHADER_STORAGE_BLOCK_SIZE, &max_gpu_size);
@ -122,7 +122,7 @@ int main(int argc, char **argv)
Camera *camera = scene.getCamera();
// performance profiling
if (false)
if (true)
{
float time = (float)(glfwGetTime()) ;

View File

@ -46,7 +46,7 @@ void BVH::updateBounds(std::vector<GPUTriangle> &primitives)
void BVH::subdivide(std::vector<GPUTriangle> &primitives)
{
if (_primitive_count <= 4)
if (_primitive_count <= 100)
return ;
glm::vec3 extent = _aabb.max - _aabb.min;