1045 lines
38 KiB
Zig
1045 lines
38 KiB
Zig
const std = @import("std");
|
|
const gl = @import("gl.zig");
|
|
const c = @import("sdl.zig");
|
|
const AssetManager = @import("AssetManager.zig");
|
|
const a = @import("asset_manifest");
|
|
const globals = @import("globals.zig");
|
|
pub const Material = @import("formats.zig").Material;
|
|
const math = @import("math.zig");
|
|
|
|
const za = @import("zalgebra");
|
|
const Vec2 = za.Vec2;
|
|
const Vec3 = za.Vec3;
|
|
const Vec4 = za.Vec4;
|
|
const Mat4 = za.Mat4;
|
|
const Quat = za.Quat;
|
|
const Vec2_i32 = za.Vec2_i32;
|
|
|
|
pub const MAX_FRAMES_QUEUED = 3;
|
|
pub const MAX_LIGHTS = 8;
|
|
pub const MAX_DRAW_COMMANDS = 4096;
|
|
pub const MAX_LIGHT_COMMANDS = 2048;
|
|
pub const CSM_SPLITS = 4;
|
|
|
|
pub const Render = @This();
|
|
|
|
var default_camera: Camera = .{};
|
|
|
|
allocator: std.mem.Allocator,
|
|
frame_arena: std.mem.Allocator,
|
|
assetman: *AssetManager,
|
|
camera: *Camera = &default_camera,
|
|
mesh_vao: gl.GLuint = 0,
|
|
tripple_buffer_index: usize = MAX_FRAMES_QUEUED - 1,
|
|
gl_fences: [MAX_FRAMES_QUEUED]?gl.GLsync = [_]?gl.GLsync{null} ** MAX_FRAMES_QUEUED,
|
|
camera_ubo: gl.GLuint = 0,
|
|
camera_matrices: []u8 = &.{},
|
|
point_lights_ubo: gl.GLuint = 0,
|
|
point_lights: []u8 = &.{}, // TODO: remove
|
|
lights: [MAX_LIGHT_COMMANDS]LightCommand = undefined,
|
|
light_count: usize = 0,
|
|
command_buffer: [MAX_DRAW_COMMANDS]DrawCommand = undefined,
|
|
command_count: usize = 0,
|
|
ubo_align: usize = 0,
|
|
shadow_vao: gl.GLuint = 0,
|
|
shadow_texture_array: gl.GLuint = 0,
|
|
shadow_texture_handle: gl.GLuint64 = 0,
|
|
shadow_framebuffer: gl.GLuint = 0,
|
|
shadow_matrices_buffer: gl.GLuint = 0,
|
|
shadow_matrices: CameraMatrices = .{},
|
|
cube_shadow_texture_array: gl.GLuint = 0,
|
|
cube_shadow_texture_handle: gl.GLuint64 = 0,
|
|
cube_shadow_framebuffer: gl.GLuint = 0,
|
|
|
|
// Destination for all 3d rendering
|
|
screen_color_texture: gl.GLuint = 0,
|
|
screen_depth_texture: gl.GLuint = 0,
|
|
screen_fbo: gl.GLuint = 0,
|
|
screen_tex_size: Vec2_i32 = Vec2_i32.zero(),
|
|
screen_mip_count: usize = 1,
|
|
|
|
// VAO for post processing shaders
|
|
post_process_vao: gl.GLuint = 0,
|
|
|
|
// Bloom
|
|
screen_bloom_sampler: gl.GLuint = 0,
|
|
|
|
update_view_frustum: bool = true,
|
|
camera_view_proj: Mat4 = Mat4.identity(),
|
|
world_camera_frustum: math.Frustum = .{},
|
|
world_view_frustum_corners: [8]Vec3 = [_]Vec3{Vec3.new(0, 0, 0)} ** 8,
|
|
|
|
pub fn init(allocator: std.mem.Allocator, frame_arena: std.mem.Allocator, assetman: *AssetManager) Render {
|
|
var render = Render{
|
|
.allocator = allocator,
|
|
.frame_arena = frame_arena,
|
|
.assetman = assetman,
|
|
};
|
|
|
|
gl.clipControl(gl.LOWER_LEFT, gl.ZERO_TO_ONE); // use [0, 1] depth in NDC
|
|
|
|
var buffer_align_int: gl.GLint = 0;
|
|
gl.getIntegerv(gl.UNIFORM_BUFFER_OFFSET_ALIGNMENT, &buffer_align_int);
|
|
|
|
if (buffer_align_int == 0) @panic("Failed to query GL_UNIFORM_BUFFER_OFFSET_ALIGNMENT");
|
|
|
|
render.ubo_align = @intCast(buffer_align_int);
|
|
|
|
{
|
|
// MESH VAO
|
|
var vao: gl.GLuint = 0;
|
|
gl.createVertexArrays(1, &vao);
|
|
std.debug.assert(vao != 0);
|
|
render.mesh_vao = vao;
|
|
|
|
// positions
|
|
// gl.vertexArrayVertexBuffer(vao, 0, vertices, 0, @sizeOf(formats.Vector3));
|
|
gl.enableVertexArrayAttrib(vao, Attrib.Position.value());
|
|
gl.vertexArrayAttribBinding(vao, Attrib.Position.value(), 0);
|
|
gl.vertexArrayAttribFormat(vao, Attrib.Position.value(), 3, gl.FLOAT, gl.FALSE, 0);
|
|
|
|
// normals
|
|
gl.enableVertexArrayAttrib(vao, Attrib.Normal.value());
|
|
gl.vertexArrayAttribBinding(vao, Attrib.Normal.value(), 1);
|
|
gl.vertexArrayAttribFormat(vao, Attrib.Normal.value(), 3, gl.FLOAT, gl.FALSE, 0);
|
|
|
|
// tangents
|
|
gl.enableVertexArrayAttrib(vao, Attrib.Tangent.value());
|
|
gl.vertexArrayAttribBinding(vao, Attrib.Tangent.value(), 3);
|
|
gl.vertexArrayAttribFormat(vao, Attrib.Tangent.value(), 3, gl.FLOAT, gl.FALSE, 0);
|
|
|
|
// uvs
|
|
gl.enableVertexArrayAttrib(vao, Attrib.UV.value());
|
|
gl.vertexArrayAttribBinding(vao, Attrib.UV.value(), 2);
|
|
gl.vertexArrayAttribFormat(vao, Attrib.UV.value(), 2, gl.FLOAT, gl.FALSE, 0);
|
|
}
|
|
|
|
const PERSISTENT_BUFFER_FLAGS: gl.GLbitfield = gl.MAP_PERSISTENT_BIT | gl.MAP_WRITE_BIT | gl.MAP_COHERENT_BIT;
|
|
|
|
// Camera matrices ubo
|
|
{
|
|
gl.createBuffers(1, &render.camera_ubo);
|
|
std.debug.assert(render.camera_ubo != 0);
|
|
|
|
const buf_size = render.uboAlignedSizeOf(CameraMatrices) * MAX_FRAMES_QUEUED;
|
|
gl.namedBufferStorage(
|
|
render.camera_ubo,
|
|
@intCast(buf_size),
|
|
null,
|
|
PERSISTENT_BUFFER_FLAGS,
|
|
);
|
|
const camera_matrices_c: [*]u8 = @ptrCast(gl.mapNamedBufferRange(
|
|
render.camera_ubo,
|
|
0,
|
|
@intCast(buf_size),
|
|
PERSISTENT_BUFFER_FLAGS,
|
|
) orelse {
|
|
checkGLError();
|
|
@panic("bind camera_ubo");
|
|
});
|
|
render.camera_matrices = camera_matrices_c[0..buf_size];
|
|
}
|
|
|
|
// Point lights ubo
|
|
{
|
|
gl.createBuffers(1, &render.point_lights_ubo);
|
|
std.debug.assert(render.camera_ubo != 0);
|
|
|
|
const buf_size = render.uboAlignedSizeOf(LightArray) * MAX_FRAMES_QUEUED;
|
|
gl.namedBufferStorage(
|
|
render.point_lights_ubo,
|
|
@intCast(buf_size),
|
|
null,
|
|
PERSISTENT_BUFFER_FLAGS,
|
|
);
|
|
const point_lights_c: [*]u8 = @ptrCast(gl.mapNamedBufferRange(
|
|
render.point_lights_ubo,
|
|
0,
|
|
@intCast(buf_size),
|
|
PERSISTENT_BUFFER_FLAGS,
|
|
) orelse {
|
|
checkGLError();
|
|
@panic("bind point_lights_ubo");
|
|
});
|
|
render.point_lights = point_lights_c[0..buf_size];
|
|
}
|
|
|
|
{
|
|
// 2D Shadow texture array
|
|
{
|
|
gl.createTextures(gl.TEXTURE_2D_ARRAY, 1, &render.shadow_texture_array);
|
|
checkGLError();
|
|
std.debug.assert(render.shadow_texture_array != 0);
|
|
|
|
gl.textureStorage3D(render.shadow_texture_array, 1, gl.DEPTH_COMPONENT16, 2048, 2048, CSM_SPLITS);
|
|
checkGLError();
|
|
|
|
gl.textureParameteri(render.shadow_texture_array, gl.TEXTURE_COMPARE_MODE, gl.COMPARE_REF_TO_TEXTURE);
|
|
gl.textureParameteri(render.shadow_texture_array, gl.TEXTURE_COMPARE_FUNC, gl.LESS);
|
|
|
|
var border = [_]f32{1} ** 4;
|
|
gl.textureParameterfv(render.shadow_texture_array, gl.TEXTURE_BORDER_COLOR, &border);
|
|
checkGLError();
|
|
|
|
gl.textureParameteri(render.shadow_texture_array, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_BORDER);
|
|
gl.textureParameteri(render.shadow_texture_array, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_BORDER);
|
|
gl.textureParameteri(render.shadow_texture_array, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
|
|
gl.textureParameteri(render.shadow_texture_array, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
|
|
}
|
|
|
|
// First shadow texture handle
|
|
{
|
|
render.shadow_texture_handle = gl.GL_ARB_bindless_texture.getTextureHandleARB(render.shadow_texture_array);
|
|
checkGLError();
|
|
std.debug.assert(render.shadow_texture_handle != 0);
|
|
gl.GL_ARB_bindless_texture.makeTextureHandleResidentARB(render.shadow_texture_handle);
|
|
checkGLError();
|
|
}
|
|
|
|
// Cube Shadow texture array
|
|
{
|
|
gl.createTextures(gl.TEXTURE_CUBE_MAP_ARRAY, 1, &render.cube_shadow_texture_array);
|
|
checkGLError();
|
|
std.debug.assert(render.cube_shadow_texture_array != 0);
|
|
|
|
gl.textureStorage3D(render.cube_shadow_texture_array, 1, gl.DEPTH_COMPONENT16, 512, 512, MAX_LIGHTS * 6);
|
|
checkGLError();
|
|
|
|
gl.textureParameteri(render.cube_shadow_texture_array, gl.TEXTURE_COMPARE_MODE, gl.COMPARE_REF_TO_TEXTURE);
|
|
gl.textureParameteri(render.cube_shadow_texture_array, gl.TEXTURE_COMPARE_FUNC, gl.LESS);
|
|
gl.textureParameteri(render.cube_shadow_texture_array, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
|
|
gl.textureParameteri(render.cube_shadow_texture_array, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
|
|
gl.textureParameteri(render.cube_shadow_texture_array, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
|
|
gl.textureParameteri(render.cube_shadow_texture_array, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
|
|
gl.textureParameteri(render.cube_shadow_texture_array, gl.TEXTURE_WRAP_R, gl.CLAMP_TO_EDGE);
|
|
}
|
|
|
|
// Cube Shadow array handle
|
|
{
|
|
render.cube_shadow_texture_handle = gl.GL_ARB_bindless_texture.getTextureHandleARB(render.cube_shadow_texture_array);
|
|
checkGLError();
|
|
std.debug.assert(render.cube_shadow_texture_handle != 0);
|
|
gl.GL_ARB_bindless_texture.makeTextureHandleResidentARB(render.cube_shadow_texture_handle);
|
|
checkGLError();
|
|
}
|
|
|
|
// Shadow FBO
|
|
{
|
|
gl.createFramebuffers(1, &render.shadow_framebuffer);
|
|
checkGLError();
|
|
std.debug.assert(render.shadow_framebuffer != 0);
|
|
gl.namedFramebufferDrawBuffer(render.shadow_framebuffer, gl.NONE);
|
|
gl.namedFramebufferReadBuffer(render.shadow_framebuffer, gl.NONE);
|
|
}
|
|
|
|
gl.namedFramebufferTextureLayer(render.shadow_framebuffer, gl.DEPTH_ATTACHMENT, render.shadow_texture_array, 0, 0);
|
|
const check_fbo_status = gl.checkNamedFramebufferStatus(render.shadow_framebuffer, gl.DRAW_FRAMEBUFFER);
|
|
if (check_fbo_status != gl.FRAMEBUFFER_COMPLETE) {
|
|
std.log.debug("Shadow Framebuffer Incomplete: {}\n", .{check_fbo_status});
|
|
}
|
|
|
|
gl.createBuffers(1, &render.shadow_matrices_buffer);
|
|
|
|
gl.namedBufferStorage(
|
|
render.shadow_matrices_buffer,
|
|
@sizeOf(CameraMatrices),
|
|
null,
|
|
gl.DYNAMIC_STORAGE_BIT,
|
|
);
|
|
|
|
// SHADOW VAO
|
|
var vao: gl.GLuint = 0;
|
|
gl.createVertexArrays(1, &vao);
|
|
std.debug.assert(vao != 0);
|
|
render.shadow_vao = vao;
|
|
|
|
// positions
|
|
// gl.vertexArrayVertexBuffer(vao, 0, vertices, 0, @sizeOf(formats.Vector3));
|
|
gl.enableVertexArrayAttrib(vao, Attrib.Position.value());
|
|
gl.vertexArrayAttribBinding(vao, Attrib.Position.value(), 0);
|
|
gl.vertexArrayAttribFormat(vao, Attrib.Position.value(), 3, gl.FLOAT, gl.FALSE, 0);
|
|
}
|
|
|
|
// Screen HDR FBO
|
|
{
|
|
gl.createFramebuffers(1, &render.screen_fbo);
|
|
std.debug.assert(render.screen_fbo != 0);
|
|
|
|
var width: c_int = 0;
|
|
var height: c_int = 0;
|
|
c.SDL_GL_GetDrawableSize(globals.g_init.window, &width, &height);
|
|
|
|
var textures = [2]gl.GLuint{ 0, 0 };
|
|
gl.createTextures(gl.TEXTURE_2D, textures.len, &textures);
|
|
render.screen_color_texture = textures[0];
|
|
render.screen_depth_texture = textures[1];
|
|
|
|
std.debug.assert(render.screen_color_texture != 0);
|
|
std.debug.assert(render.screen_depth_texture != 0);
|
|
|
|
gl.textureParameteri(render.screen_color_texture, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
|
|
gl.textureParameteri(render.screen_color_texture, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
|
|
gl.textureParameteri(render.screen_color_texture, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
|
|
gl.textureParameteri(render.screen_color_texture, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
|
|
|
|
gl.textureParameteri(render.screen_depth_texture, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
|
|
gl.textureParameteri(render.screen_depth_texture, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
|
|
gl.textureParameteri(render.screen_depth_texture, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
|
|
gl.textureParameteri(render.screen_depth_texture, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
|
|
|
|
render.updateScreenBufferSize(width, height);
|
|
}
|
|
|
|
// Bloom screen sampler
|
|
{
|
|
var sampler: gl.GLuint = 0;
|
|
gl.createSamplers(1, &sampler);
|
|
std.debug.assert(sampler != 0);
|
|
render.screen_bloom_sampler = sampler;
|
|
|
|
gl.samplerParameteri(sampler, gl.TEXTURE_MIN_FILTER, gl.LINEAR_MIPMAP_NEAREST);
|
|
gl.samplerParameteri(sampler, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
|
|
gl.samplerParameteri(sampler, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
|
|
gl.samplerParameteri(sampler, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
|
|
}
|
|
|
|
// Post process VAO
|
|
{
|
|
gl.createVertexArrays(1, &render.post_process_vao);
|
|
std.debug.assert(render.post_process_vao != 0);
|
|
const vao = render.post_process_vao;
|
|
|
|
// positions
|
|
gl.enableVertexArrayAttrib(vao, Attrib.Position.value());
|
|
gl.vertexArrayAttribBinding(vao, Attrib.Position.value(), 0);
|
|
gl.vertexArrayAttribFormat(vao, Attrib.Position.value(), 3, gl.FLOAT, gl.FALSE, 0);
|
|
}
|
|
|
|
return render;
|
|
}
|
|
|
|
fn getMipSize(width: i32, height: i32, mip_level: usize) Vec2_i32 {
|
|
if (mip_level == 0) return Vec2_i32.new(width, height);
|
|
const denom = std.math.pow(f32, 2, @floatFromInt(mip_level));
|
|
var mip_width: c_int = @intFromFloat(@as(f32, @floatFromInt(width)) / denom);
|
|
var mip_height: c_int = @intFromFloat(@as(f32, @floatFromInt(height)) / denom);
|
|
mip_width = @max(mip_width, 1);
|
|
mip_height = @max(mip_height, 1);
|
|
|
|
return Vec2_i32.new(mip_width, mip_height);
|
|
}
|
|
|
|
fn updateScreenBufferSize(self: *Render, width: c_int, height: c_int) void {
|
|
const mip_count = 1 + @as(
|
|
u32,
|
|
@intFromFloat(@log2(@as(f32, @floatFromInt(@max(width, height))))),
|
|
);
|
|
|
|
gl.bindTexture(gl.TEXTURE_2D, self.screen_color_texture);
|
|
for (0..mip_count) |mip_level| {
|
|
const size = getMipSize(width, height, mip_level);
|
|
std.log.debug("screen_color mip {} size {}x{}\n", .{ mip_level, size.x(), size.y() });
|
|
|
|
gl.texImage2D(gl.TEXTURE_2D, @intCast(mip_level), gl.RGB16F, size.x(), size.y(), 0, gl.RGB, gl.HALF_FLOAT, null);
|
|
checkGLError();
|
|
}
|
|
|
|
// Depth doesn't need any mips cause it's not filterable anyway
|
|
gl.bindTexture(gl.TEXTURE_2D, self.screen_depth_texture);
|
|
gl.texImage2D(gl.TEXTURE_2D, 0, gl.DEPTH_COMPONENT32F, width, height, 0, gl.DEPTH_COMPONENT, gl.FLOAT, null);
|
|
checkGLError();
|
|
|
|
self.screen_tex_size = Vec2_i32.new(width, height);
|
|
self.screen_mip_count = mip_count;
|
|
}
|
|
|
|
pub fn begin(self: *Render) void {
|
|
self.command_count = 0;
|
|
self.light_count = 0;
|
|
self.tripple_buffer_index = (self.tripple_buffer_index + 1) % MAX_FRAMES_QUEUED;
|
|
|
|
gl.enable(gl.CULL_FACE);
|
|
gl.enable(gl.DEPTH_TEST);
|
|
if (self.gl_fences[self.tripple_buffer_index]) |fence| {
|
|
const syncResult = gl.clientWaitSync(fence, gl.SYNC_FLUSH_COMMANDS_BIT, 9999999999);
|
|
|
|
switch (syncResult) {
|
|
gl.ALREADY_SIGNALED => {
|
|
// awesome
|
|
},
|
|
gl.TIMEOUT_EXPIRED => {
|
|
// oh no, driver will crash soon :(
|
|
std.log.err("OpenGL clientWaitSync timeout expired D:\n", .{});
|
|
checkGLError();
|
|
},
|
|
gl.CONDITION_SATISFIED => {
|
|
// awesome
|
|
},
|
|
gl.WAIT_FAILED => {
|
|
checkGLError();
|
|
},
|
|
else => unreachable,
|
|
}
|
|
gl.deleteSync(fence);
|
|
self.gl_fences[self.tripple_buffer_index] = null;
|
|
}
|
|
}
|
|
|
|
fn getLightBuffer(self: *Render) *LightArray {
|
|
return @alignCast(@ptrCast(self.point_lights[self.tripple_buffer_index * self.uboAlignedSizeOf(LightArray) ..].ptr));
|
|
}
|
|
|
|
// TODO: get rid of this
|
|
pub fn flushUBOs(self: *Render) void {
|
|
const idx = self.tripple_buffer_index;
|
|
|
|
// gl.flushMappedNamedBufferRange(self.point_lights_ubo, idx * @sizeOf(PointLightArray), @sizeOf(PointLightArray));
|
|
gl.bindBufferRange(
|
|
gl.UNIFORM_BUFFER,
|
|
UBO.PointLights.value(),
|
|
self.point_lights_ubo,
|
|
idx * self.uboAlignedSizeOf(LightArray),
|
|
@intCast(self.uboAlignedSizeOf(LightArray)),
|
|
);
|
|
checkGLError();
|
|
}
|
|
|
|
pub const LightKind = enum {
|
|
directional,
|
|
point,
|
|
// Spot, // TODO
|
|
};
|
|
|
|
pub const PointLight = struct {
|
|
color: Vec3,
|
|
pos: Vec3,
|
|
radius: f32,
|
|
};
|
|
|
|
pub const LightCommand = union(LightKind) {
|
|
directional: struct {
|
|
color: Vec3,
|
|
dir: Vec3,
|
|
},
|
|
point: PointLight,
|
|
};
|
|
|
|
pub fn drawLight(self: *Render, cmd: LightCommand) void {
|
|
self.lights[self.light_count] = cmd;
|
|
self.light_count += 1;
|
|
}
|
|
|
|
pub fn draw(self: *Render, cmd: DrawCommand) void {
|
|
self.command_buffer[self.command_count] = cmd;
|
|
self.command_count += 1;
|
|
}
|
|
|
|
pub fn finish(self: *Render) void {
|
|
const ginit = globals.g_init;
|
|
|
|
const camera_projection = self.camera.projection();
|
|
const view_proj = camera_projection.mul(self.camera.view_mat);
|
|
if (self.update_view_frustum) {
|
|
self.camera_view_proj = view_proj;
|
|
self.world_camera_frustum = math.Frustum.new(view_proj);
|
|
}
|
|
|
|
const inv_view_proj = view_proj.inv();
|
|
|
|
if (self.update_view_frustum) {
|
|
for (math.ndc_box_corners, 0..) |corner, i| {
|
|
const pos4 = inv_view_proj.mulByVec4(corner.toVec4(1));
|
|
self.world_view_frustum_corners[i] = pos4.toVec3().scale(1.0 / pos4.w());
|
|
}
|
|
}
|
|
|
|
const lights = self.lights[0..self.light_count];
|
|
|
|
// Sort lights: directional first
|
|
{
|
|
std.mem.sortUnstable(LightCommand, lights, {}, struct {
|
|
pub fn lessThan(_: void, lhs: LightCommand, rhs: LightCommand) bool {
|
|
_ = rhs; // autofix
|
|
return switch (lhs) {
|
|
.directional => true,
|
|
.point => false,
|
|
};
|
|
}
|
|
}.lessThan);
|
|
}
|
|
|
|
const lights_buf = self.getLightBuffer();
|
|
lights_buf.count = 0;
|
|
|
|
var dir_aabb_min = Vec3.zero();
|
|
var dir_aabb_max = Vec3.zero();
|
|
var dir_view_proj_mat = Mat4.identity();
|
|
|
|
// Light shadow maps
|
|
{
|
|
gl.bindVertexArray(self.shadow_vao);
|
|
gl.bindFramebuffer(gl.DRAW_FRAMEBUFFER, self.shadow_framebuffer);
|
|
|
|
var finished_dir_lights = false;
|
|
gl.useProgram(self.assetman.resolveShaderProgram(a.ShaderPrograms.shaders.shadow).program);
|
|
|
|
for (lights) |light_cmd| {
|
|
const i = lights_buf.count;
|
|
if (i == lights_buf.lights.len) break;
|
|
|
|
const light = &lights_buf.lights[i];
|
|
lights_buf.count += 1;
|
|
|
|
switch (light_cmd) {
|
|
.directional => |dir_light| {
|
|
light.pos = dir_light.dir.toVec4(0);
|
|
light.color_radius = dir_light.color.toVec4(0);
|
|
gl.namedFramebufferTextureLayer(self.shadow_framebuffer, gl.DEPTH_ATTACHMENT, self.shadow_texture_array, 0, 0);
|
|
const check_fbo_status = gl.checkNamedFramebufferStatus(self.shadow_framebuffer, gl.DRAW_FRAMEBUFFER);
|
|
if (check_fbo_status != gl.FRAMEBUFFER_COMPLETE) {
|
|
std.log.debug("Shadow Framebuffer Incomplete: {}\n", .{check_fbo_status});
|
|
}
|
|
|
|
gl.viewport(0, 0, 2048, 2048);
|
|
|
|
var projection: Mat4 = undefined;
|
|
const view = Mat4.lookAt(
|
|
dir_light.dir.scale(-1),
|
|
Vec3.zero(),
|
|
Vec3.up(),
|
|
);
|
|
|
|
{
|
|
for (self.world_view_frustum_corners) |corner| {
|
|
const pos4 = view.mulByVec4(corner.toVec4(1));
|
|
const pos = pos4.toVec3();
|
|
dir_aabb_min = pos.min(dir_aabb_min);
|
|
dir_aabb_max = pos.max(dir_aabb_max);
|
|
}
|
|
projection = math.orthographic(dir_aabb_min.x(), dir_aabb_max.x(), dir_aabb_min.y(), dir_aabb_max.y(), -dir_aabb_max.z(), -dir_aabb_min.z());
|
|
//projection = math.orthographic(-1, 1, -5, 5, 0, 0.5);
|
|
}
|
|
|
|
const camera_matrix = &self.shadow_matrices;
|
|
camera_matrix.* = .{
|
|
.view = view,
|
|
.projection = projection,
|
|
};
|
|
|
|
const shadow_view_proj = projection.mul(view);
|
|
dir_view_proj_mat = shadow_view_proj;
|
|
const light_frustum = math.Frustum.new(shadow_view_proj);
|
|
light.shadow_vp = shadow_view_proj;
|
|
|
|
gl.namedBufferSubData(self.shadow_matrices_buffer, 0, @sizeOf(CameraMatrices), std.mem.asBytes(&self.shadow_matrices));
|
|
checkGLError();
|
|
|
|
gl.clear(gl.DEPTH_BUFFER_BIT);
|
|
gl.bindBufferBase(gl.UNIFORM_BUFFER, UBO.CameraMatrices.value(), self.shadow_matrices_buffer);
|
|
|
|
self.renderShadow(&light_frustum);
|
|
},
|
|
.point => |point_light| {
|
|
if (!finished_dir_lights) {
|
|
finished_dir_lights = true;
|
|
gl.useProgram(self.assetman.resolveShaderProgram(a.ShaderPrograms.shaders.cube_shadow).program);
|
|
}
|
|
|
|
const pos = point_light.pos;
|
|
light.pos = pos.toVec4(1);
|
|
light.color_radius = point_light.color.toVec4(point_light.radius);
|
|
|
|
light.shadow_vp = Mat4.fromTranslate(pos.negate());
|
|
// For each cube face
|
|
for (cube_camera_dirs, 0..) |cam_dir, face| {
|
|
gl.namedFramebufferTextureLayer(self.shadow_framebuffer, gl.DEPTH_ATTACHMENT, self.cube_shadow_texture_array, 0, @intCast(i * 6 + face));
|
|
const check_fbo_status = gl.checkNamedFramebufferStatus(self.shadow_framebuffer, gl.DRAW_FRAMEBUFFER);
|
|
if (check_fbo_status != gl.FRAMEBUFFER_COMPLETE) {
|
|
std.log.debug("Shadow Framebuffer Incomplete: {}\n", .{check_fbo_status});
|
|
}
|
|
|
|
gl.viewport(0, 0, 512, 512);
|
|
|
|
const range = pointLightRange(&point_light);
|
|
|
|
const near_far = Vec2.new(0.1, range);
|
|
const camera_matrix = &self.shadow_matrices;
|
|
camera_matrix.* = .{
|
|
.projection = math.perspective(90, 1, near_far.x(), near_far.y()),
|
|
.view = Mat4.lookAt(
|
|
pos,
|
|
pos.add(cam_dir.target),
|
|
cam_dir.up,
|
|
),
|
|
};
|
|
|
|
const shadow_view_proj = camera_matrix.projection.mul(camera_matrix.view);
|
|
const light_frustum = math.Frustum.new(shadow_view_proj);
|
|
light.shadow_vp = shadow_view_proj;
|
|
light.near_far = near_far;
|
|
gl.uniform2f(Uniform.NearFarPlanes.value(), near_far.x(), near_far.y());
|
|
|
|
gl.namedBufferSubData(self.shadow_matrices_buffer, 0, @sizeOf(CameraMatrices), std.mem.asBytes(&self.shadow_matrices));
|
|
checkGLError();
|
|
|
|
gl.clear(gl.DEPTH_BUFFER_BIT);
|
|
gl.bindBufferBase(gl.UNIFORM_BUFFER, UBO.CameraMatrices.value(), self.shadow_matrices_buffer);
|
|
|
|
self.renderShadow(&light_frustum);
|
|
}
|
|
},
|
|
}
|
|
}
|
|
}
|
|
|
|
// Light world space to view space
|
|
for (lights_buf.lights[0..lights_buf.count]) |*light| {
|
|
light.pos = self.camera.view_mat.mulByVec4(light.pos);
|
|
}
|
|
|
|
var width: c_int = 0;
|
|
var height: c_int = 0;
|
|
c.SDL_GL_GetDrawableSize(globals.g_init.window, &width, &height);
|
|
|
|
if (width != self.screen_tex_size.x() or height != self.screen_tex_size.y()) {
|
|
self.updateScreenBufferSize(width, height);
|
|
}
|
|
|
|
gl.namedFramebufferTexture(self.screen_fbo, gl.COLOR_ATTACHMENT0, self.screen_color_texture, 0);
|
|
gl.namedFramebufferTexture(self.screen_fbo, gl.DEPTH_ATTACHMENT, self.screen_depth_texture, 0);
|
|
|
|
if (gl.checkNamedFramebufferStatus(self.screen_fbo, gl.DRAW_FRAMEBUFFER) != gl.FRAMEBUFFER_COMPLETE) {
|
|
checkGLError();
|
|
@panic("Framebuffer incomplete");
|
|
}
|
|
|
|
gl.bindFramebuffer(gl.DRAW_FRAMEBUFFER, self.screen_fbo);
|
|
|
|
gl.viewport(0, 0, width, height);
|
|
gl.clearColor(0.0, 0.0, 0.0, 1.0);
|
|
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
|
|
|
|
{
|
|
const camera_matrix: *CameraMatrices = @alignCast(@ptrCast(self.camera_matrices[self.tripple_buffer_index * self.uboAlignedSizeOf(CameraMatrices) ..].ptr));
|
|
camera_matrix.* = .{
|
|
.projection = camera_projection,
|
|
.view = self.camera.view_mat,
|
|
};
|
|
|
|
//gl.flushMappedNamedBufferRange(self.camera_ubo, idx * @sizeOf(CameraMatrices), @sizeOf(CameraMatrices));
|
|
gl.bindBufferRange(
|
|
gl.UNIFORM_BUFFER,
|
|
UBO.CameraMatrices.value(),
|
|
self.camera_ubo,
|
|
self.tripple_buffer_index * self.uboAlignedSizeOf(CameraMatrices),
|
|
@intCast(self.uboAlignedSizeOf(CameraMatrices)),
|
|
);
|
|
checkGLError();
|
|
}
|
|
|
|
gl.useProgram(self.assetman.resolveShaderProgram(a.ShaderPrograms.shaders.mesh).program);
|
|
gl.bindVertexArray(self.mesh_vao);
|
|
|
|
var rendered_count: usize = 0;
|
|
for (self.command_buffer[0..self.command_count]) |*cmd| {
|
|
const mesh = self.assetman.resolveMesh(cmd.mesh);
|
|
const aabb = math.AABB.fromMinMax(mesh.aabb.min, mesh.aabb.max);
|
|
|
|
if (!self.world_camera_frustum.intersectAABB(aabb.transform(cmd.transform))) {
|
|
continue;
|
|
}
|
|
rendered_count += 1;
|
|
|
|
const material: Material = if (cmd.material_override) |mat| mat else mesh.material;
|
|
|
|
gl.uniformMatrix4fv(Uniform.ModelMatrix.value(), 1, gl.FALSE, @ptrCast(&cmd.transform.data));
|
|
{
|
|
gl.uniform3fv(Uniform.Color.value(), 1, @ptrCast(&material.albedo.data));
|
|
|
|
const albedo_map = self.assetman.resolveTexture(material.albedo_map);
|
|
gl.GL_ARB_bindless_texture.uniformHandleui64ARB(
|
|
Uniform.AlbedoMap.value(),
|
|
albedo_map.handle,
|
|
);
|
|
gl.uniform2fv(Uniform.AlbedoMapUVScale.value(), 1, @ptrCast(&albedo_map.uv_scale.data));
|
|
}
|
|
{
|
|
const normal_map = self.assetman.resolveTexture(material.normal_map);
|
|
gl.GL_ARB_bindless_texture.uniformHandleui64ARB(
|
|
Uniform.NormalMap.value(),
|
|
normal_map.handle,
|
|
);
|
|
gl.uniform2fv(Uniform.NormalMapUVScale.value(), 1, @ptrCast(&normal_map.uv_scale.data));
|
|
}
|
|
{
|
|
gl.uniform1fv(Uniform.Metallic.value(), 1, &material.metallic);
|
|
|
|
const metallic_map = self.assetman.resolveTexture(material.metallic_map);
|
|
gl.GL_ARB_bindless_texture.uniformHandleui64ARB(
|
|
Uniform.MetallicMap.value(),
|
|
metallic_map.handle,
|
|
);
|
|
gl.uniform2fv(Uniform.MetallicMapUVScale.value(), 1, @ptrCast(&metallic_map.uv_scale.data));
|
|
}
|
|
{
|
|
gl.uniform1fv(Uniform.Roughness.value(), 1, &material.roughness);
|
|
|
|
const roughness_map = self.assetman.resolveTexture(material.roughness_map);
|
|
gl.GL_ARB_bindless_texture.uniformHandleui64ARB(
|
|
Uniform.RoughnessMap.value(),
|
|
roughness_map.handle,
|
|
);
|
|
gl.uniform2fv(Uniform.RoughnessMapUVScale.value(), 1, @ptrCast(&roughness_map.uv_scale.data));
|
|
}
|
|
{
|
|
gl.uniform3fv(Uniform.Emission.value(), 1, @ptrCast(&material.emission.data));
|
|
|
|
const emission_map = self.assetman.resolveTexture(material.emission_map);
|
|
gl.GL_ARB_bindless_texture.uniformHandleui64ARB(
|
|
Uniform.EmissionMap.value(),
|
|
emission_map.handle,
|
|
);
|
|
gl.uniform2fv(Uniform.EmissionMapUVScale.value(), 1, @ptrCast(&emission_map.uv_scale.data));
|
|
}
|
|
gl.GL_ARB_bindless_texture.uniformHandleui64ARB(Uniform.ShadowMap2D.value(), self.shadow_texture_handle);
|
|
gl.GL_ARB_bindless_texture.uniformHandleui64ARB(Uniform.ShadowMapCube.value(), self.cube_shadow_texture_handle);
|
|
|
|
mesh.positions.bind(Render.Attrib.Position.value());
|
|
mesh.normals.bind(Render.Attrib.Normal.value());
|
|
mesh.tangents.bind(Render.Attrib.Tangent.value());
|
|
mesh.uvs.bind(Render.Attrib.UV.value());
|
|
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, mesh.indices.buffer);
|
|
gl.drawElements(
|
|
gl.TRIANGLES,
|
|
mesh.indices.count,
|
|
mesh.indices.type,
|
|
@ptrFromInt(mesh.indices.offset),
|
|
);
|
|
}
|
|
|
|
// Debug stuff
|
|
{
|
|
gl.polygonMode(gl.FRONT_AND_BACK, gl.LINE);
|
|
defer gl.polygonMode(gl.FRONT_AND_BACK, gl.FILL);
|
|
gl.lineWidth(4);
|
|
|
|
// Frustum debug stuff, drawn only when view frustum is fixed
|
|
if (!self.update_view_frustum) {
|
|
gl.useProgram(self.assetman.resolveShaderProgram(a.ShaderPrograms.shaders.unlit).program);
|
|
|
|
// Draw wire frustum cubes
|
|
{
|
|
const mesh = self.assetman.resolveMesh(a.Meshes.cube.Cube);
|
|
mesh.positions.bind(Render.Attrib.Position.value());
|
|
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, mesh.indices.buffer);
|
|
gl.uniform3fv(Uniform.Color.value(), 1, @ptrCast(&Vec3.one().data));
|
|
|
|
const model = Mat4.fromTranslate(Vec3.new(0, 0, 0.5)).mul(Mat4.fromScale(Vec3.new(1, 1, 0.5)));
|
|
|
|
const view_proj_matrices = [_]Mat4{ self.camera_view_proj, dir_view_proj_mat };
|
|
|
|
for (view_proj_matrices) |frustum_view_proj| {
|
|
const frustum_model_mat = frustum_view_proj.inv().mul(model);
|
|
gl.uniformMatrix4fv(Uniform.ModelMatrix.value(), 1, gl.FALSE, @ptrCast(&frustum_model_mat.data));
|
|
gl.drawElements(
|
|
gl.TRIANGLES,
|
|
mesh.indices.count,
|
|
mesh.indices.type,
|
|
@ptrFromInt(mesh.indices.offset),
|
|
);
|
|
}
|
|
}
|
|
// Draw corner positions of view frustum
|
|
{
|
|
const mesh = self.assetman.resolveMesh(a.Meshes.sphere.Icosphere);
|
|
mesh.positions.bind(Attrib.Position.value());
|
|
mesh.indices.bind();
|
|
|
|
gl.uniform3fv(Uniform.Color.value(), 1, @ptrCast(&Vec3.new(1, 0, 0).data));
|
|
|
|
for (self.world_view_frustum_corners) |corner| {
|
|
const model = Mat4.fromTranslate(corner);
|
|
gl.uniformMatrix4fv(Uniform.ModelMatrix.value(), 1, gl.FALSE, @ptrCast(&model.data));
|
|
gl.drawElements(gl.TRIANGLES, mesh.indices.count, mesh.indices.type, @ptrFromInt(mesh.indices.offset));
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
//std.log.debug("Total draws {}, frustum culled draws {}\n", .{ self.command_count, rendered_count });
|
|
|
|
gl.disable(gl.DEPTH_TEST);
|
|
gl.bindVertexArray(self.post_process_vao); // shared for all post process shaders
|
|
|
|
const quad = self.assetman.resolveMesh(a.Meshes.quad.Plane);
|
|
// Bind quad
|
|
{
|
|
quad.positions.bind(Render.Attrib.Position.value());
|
|
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, quad.indices.buffer);
|
|
}
|
|
|
|
// Bloom pass
|
|
{
|
|
gl.bindFramebuffer(gl.DRAW_FRAMEBUFFER, self.screen_fbo);
|
|
|
|
gl.bindTextureUnit(0, self.screen_color_texture);
|
|
gl.bindSampler(0, self.screen_bloom_sampler);
|
|
defer gl.bindSampler(0, 0);
|
|
|
|
// Downsample and filter
|
|
{
|
|
gl.useProgram(self.assetman.resolveShaderProgram(a.ShaderPrograms.shaders.bloom_downsample).program);
|
|
|
|
for (1..self.screen_mip_count) |dst_mip_level| {
|
|
const src_mip_level = dst_mip_level - 1;
|
|
gl.namedFramebufferTexture(self.screen_fbo, gl.COLOR_ATTACHMENT0, self.screen_color_texture, @intCast(dst_mip_level));
|
|
const size = getMipSize(self.screen_tex_size.x(), self.screen_tex_size.y(), dst_mip_level);
|
|
gl.viewport(0, 0, size.x(), size.y());
|
|
gl.uniform1i(Uniform.SRCMipLevel.value(), @intCast(src_mip_level));
|
|
|
|
gl.drawElements(
|
|
gl.TRIANGLES,
|
|
quad.indices.count,
|
|
quad.indices.type,
|
|
@ptrFromInt(quad.indices.offset),
|
|
);
|
|
}
|
|
}
|
|
|
|
// Upsample
|
|
{
|
|
gl.enable(gl.BLEND);
|
|
defer gl.disable(gl.BLEND);
|
|
gl.blendFunc(gl.ONE, gl.ONE);
|
|
|
|
gl.useProgram(self.assetman.resolveShaderProgram(a.ShaderPrograms.shaders.bloom_upsample).program);
|
|
|
|
var src_mip_level = self.screen_mip_count - 1;
|
|
while (src_mip_level > 0) : (src_mip_level -= 1) {
|
|
const dst_mip_level = src_mip_level - 1;
|
|
gl.namedFramebufferTexture(self.screen_fbo, gl.COLOR_ATTACHMENT0, self.screen_color_texture, @intCast(dst_mip_level));
|
|
const size = getMipSize(self.screen_tex_size.x(), self.screen_tex_size.y(), dst_mip_level);
|
|
gl.viewport(0, 0, size.x(), size.y());
|
|
gl.uniform1i(Uniform.SRCMipLevel.value(), @intCast(src_mip_level));
|
|
gl.uniform1f(Uniform.BloomStrength.value(), if (dst_mip_level == 0) 0.04 else 1);
|
|
|
|
gl.drawElements(
|
|
gl.TRIANGLES,
|
|
quad.indices.count,
|
|
quad.indices.type,
|
|
@ptrFromInt(quad.indices.offset),
|
|
);
|
|
}
|
|
}
|
|
}
|
|
|
|
// Final post processing pass
|
|
{
|
|
gl.bindFramebuffer(gl.DRAW_FRAMEBUFFER, 0);
|
|
//gl.clear(gl.DEPTH_BUFFER_BIT | gl.COLOR_BUFFER_BIT);
|
|
gl.viewport(0, 0, width, height);
|
|
|
|
gl.useProgram(self.assetman.resolveShaderProgram(a.ShaderPrograms.shaders.post_process).program);
|
|
|
|
gl.bindTextureUnit(0, self.screen_color_texture);
|
|
defer gl.bindTextureUnit(0, 0);
|
|
|
|
gl.drawElements(
|
|
gl.TRIANGLES,
|
|
quad.indices.count,
|
|
quad.indices.type,
|
|
@ptrFromInt(quad.indices.offset),
|
|
);
|
|
}
|
|
|
|
self.gl_fences[self.tripple_buffer_index] = gl.fenceSync(gl.SYNC_GPU_COMMANDS_COMPLETE, 0);
|
|
c.SDL_GL_SwapWindow(ginit.window);
|
|
//c.SDL_Delay(1);
|
|
}
|
|
|
|
pub fn pointLightRange(self: *const PointLight) f32 {
|
|
const color = self.color;
|
|
const light_intensity = @max(color.x(), color.y(), color.z());
|
|
|
|
const cutoff = 0.005;
|
|
return self.radius * (@sqrt(light_intensity / cutoff) - 1);
|
|
}
|
|
|
|
const CubeCameraDir = struct {
|
|
face: gl.GLenum,
|
|
target: Vec3,
|
|
up: Vec3,
|
|
};
|
|
|
|
const cube_camera_dirs = [6]CubeCameraDir{
|
|
.{
|
|
.face = gl.TEXTURE_CUBE_MAP_POSITIVE_X,
|
|
.target = Vec3.right(),
|
|
.up = Vec3.down(),
|
|
},
|
|
.{
|
|
.face = gl.TEXTURE_CUBE_MAP_NEGATIVE_X,
|
|
.target = Vec3.left(),
|
|
.up = Vec3.down(),
|
|
},
|
|
.{
|
|
.face = gl.TEXTURE_CUBE_MAP_POSITIVE_Y,
|
|
.target = Vec3.up(),
|
|
.up = Vec3.forward(),
|
|
},
|
|
.{
|
|
.face = gl.TEXTURE_CUBE_MAP_NEGATIVE_Y,
|
|
.target = Vec3.down(),
|
|
.up = Vec3.back(),
|
|
},
|
|
.{
|
|
.face = gl.TEXTURE_CUBE_MAP_POSITIVE_Z,
|
|
.target = Vec3.forward(),
|
|
.up = Vec3.down(),
|
|
},
|
|
.{
|
|
.face = gl.TEXTURE_CUBE_MAP_NEGATIVE_Z,
|
|
.target = Vec3.back(),
|
|
.up = Vec3.down(),
|
|
},
|
|
};
|
|
|
|
fn renderShadow(self: *Render, frustum: *const math.Frustum) void {
|
|
for (self.command_buffer[0..self.command_count]) |*cmd| {
|
|
const mesh = self.assetman.resolveMesh(cmd.mesh);
|
|
const aabb = math.AABB.fromMinMax(mesh.aabb.min, mesh.aabb.max);
|
|
|
|
if (!frustum.intersectAABB(aabb.transform(cmd.transform))) {
|
|
continue;
|
|
}
|
|
|
|
gl.uniformMatrix4fv(Uniform.ModelMatrix.value(), 1, gl.FALSE, @ptrCast(&cmd.transform.data));
|
|
mesh.positions.bind(Render.Attrib.Position.value());
|
|
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, mesh.indices.buffer);
|
|
|
|
gl.drawElements(
|
|
gl.TRIANGLES,
|
|
mesh.indices.count,
|
|
mesh.indices.type,
|
|
@ptrFromInt(mesh.indices.offset),
|
|
);
|
|
}
|
|
}
|
|
|
|
pub fn checkGLError() void {
|
|
var err = gl.getError();
|
|
if (err == gl.NO_ERROR) return;
|
|
|
|
while (err != gl.NO_ERROR) : (err = gl.getError()) {
|
|
const name = switch (err) {
|
|
gl.INVALID_ENUM => "invalid enum",
|
|
gl.INVALID_VALUE => "invalid value",
|
|
gl.INVALID_OPERATION => "invalid operation",
|
|
gl.STACK_OVERFLOW => "stack overflow",
|
|
gl.STACK_UNDERFLOW => "stack underflow",
|
|
gl.OUT_OF_MEMORY => "out of memory",
|
|
gl.INVALID_FRAMEBUFFER_OPERATION => "invalid framebuffer operation",
|
|
// binding.INVALID_FRAMEBUFFER_OPERATION_EXT => Error.InvalidFramebufferOperation,
|
|
// binding.INVALID_FRAMEBUFFER_OPERATION_OES => Error.InvalidFramebufferOperation,
|
|
//binding.TABLE_TOO_LARGE => "Table too large",
|
|
// binding.TABLE_TOO_LARGE_EXT => Error.TableTooLarge,
|
|
//binding.TEXTURE_TOO_LARGE_EXT => "Texture too large",
|
|
else => "unknown error",
|
|
};
|
|
|
|
std.log.scoped(.OpenGL).err("OpenGL Failure: {s}\n", .{name});
|
|
}
|
|
}
|
|
|
|
pub const DrawCommand = struct {
|
|
mesh: AssetManager.Handle.Mesh,
|
|
material_override: ?Material,
|
|
transform: Mat4,
|
|
};
|
|
|
|
pub const Attrib = enum(gl.GLuint) {
|
|
Position = 0,
|
|
Normal = 1,
|
|
UV = 2,
|
|
Tangent = 3,
|
|
|
|
pub inline fn value(self: Attrib) gl.GLuint {
|
|
return @intFromEnum(self);
|
|
}
|
|
};
|
|
pub const UBO = enum(gl.GLuint) {
|
|
CameraMatrices = 0,
|
|
PointLights = 1,
|
|
|
|
pub inline fn value(self: UBO) gl.GLuint {
|
|
return @intFromEnum(self);
|
|
}
|
|
};
|
|
|
|
pub const Uniform = enum(gl.GLint) {
|
|
ModelMatrix = 1,
|
|
Color = 2,
|
|
AlbedoMap = 3,
|
|
AlbedoMapUVScale = 4,
|
|
NormalMap = 5,
|
|
NormalMapUVScale = 6,
|
|
Metallic = 7,
|
|
MetallicMap = 8,
|
|
MetallicMapUVScale = 9,
|
|
Roughness = 10,
|
|
RoughnessMap = 11,
|
|
RoughnessMapUVScale = 12,
|
|
Emission = 13,
|
|
EmissionMap = 14,
|
|
EmissionMapUVScale = 15,
|
|
|
|
ShadowMap2D = 16,
|
|
ShadowMapCube = 17,
|
|
|
|
NearFarPlanes = 18, // vec2 stores near and far planes for perspective projection
|
|
|
|
// Bloom
|
|
SRCMipLevel = 19,
|
|
BloomStrength = 20,
|
|
|
|
pub inline fn value(self: Uniform) gl.GLint {
|
|
return @intFromEnum(self);
|
|
}
|
|
};
|
|
|
|
// TODO: support ortho
|
|
pub const Camera = struct {
|
|
fovy: f32 = 60,
|
|
aspect: f32 = 1,
|
|
near: f32 = 0.1,
|
|
far: f32 = 10,
|
|
|
|
view_mat: Mat4 = Mat4.identity(),
|
|
|
|
pub fn projection(self: *const Camera) Mat4 {
|
|
return math.perspective(self.fovy, self.aspect, self.near, self.far);
|
|
}
|
|
};
|
|
|
|
// Should be std140
|
|
const CameraMatrices = extern struct {
|
|
projection: Mat4 = Mat4.identity(),
|
|
view: Mat4 = Mat4.identity(),
|
|
};
|
|
pub const Light = extern struct {
|
|
pos: Vec4, // x, y, z, w - vPos
|
|
color_radius: Vec4, // x, y, z - color, w - radius
|
|
shadow_vp: Mat4 = Mat4.identity(),
|
|
near_far: Vec2 = Vec2.zero(),
|
|
};
|
|
|
|
// TODO: rename
|
|
pub const LightArray = extern struct {
|
|
lights: [MAX_LIGHTS]Light,
|
|
count: c_uint,
|
|
};
|
|
|
|
fn uboAlignedSizeOf(self: *const Render, comptime T: type) usize {
|
|
return std.mem.alignForward(usize, @sizeOf(T), self.ubo_align);
|
|
}
|