mod middleware;
#[cfg(test)]
mod tests;
use std::sync::Arc;
use assets_manager::AssetCache;
use color_eyre::eyre::eyre;
use egui::{ClippedPrimitive, TexturesDelta};
use encase::ShaderType;
use winit::{dpi::PhysicalSize, window::Window};
use crate::{WINDOW_HEIGHT, WINDOW_WIDTH};
pub struct Screen {
instance: wgpu::Instance,
adapter_state: Option<AdapterState>,
surface_state: Option<SurfaceState>,
size: PhysicalSize<u32>,
queued_size: Option<PhysicalSize<u32>>,
window: Arc<Window>,
}
struct AdapterState {
adapter: wgpu::Adapter,
device: wgpu::Device,
queue: wgpu::Queue,
}
struct SurfaceState {
surface: wgpu::Surface<'static>,
config: wgpu::SurfaceConfiguration,
egui: egui_wgpu::Renderer,
world_atlas: glyphon::TextAtlas,
camera_atlas: glyphon::TextAtlas,
world_renderer: middleware::WorldRenderer,
}
pub const WORLD_HALF_EXTENTS: glam::UVec2 = glam::UVec2::new(1024, 1024);
#[derive(Debug)]
pub struct Camera {
pub transform: glam::Affine3A,
pub width: f32,
pub height: f32,
cached_window_size: Option<winit::dpi::PhysicalSize<u32>>,
}
impl Default for Camera {
fn default() -> Self {
Self {
transform: glam::Affine3A::default(),
width: 800.0,
height: 600.0,
cached_window_size: None,
}
}
}
impl Camera {
const ASPECT_DEVIATION: f32 = 0.1;
fn update_aspect(&mut self, window_size: PhysicalSize<u32>) -> Option<(f32, f32)> {
if self
.cached_window_size
.is_some_and(|cws| cws == window_size)
{
return None;
} else {
self.cached_window_size = Some(window_size);
}
let window_aspect_ratio = window_size.width as f32 / window_size.height as f32;
let target_aspect_ratio = self.width / self.height;
if (window_aspect_ratio - target_aspect_ratio).abs() <= Self::ASPECT_DEVIATION {
Some((1.0, 1.0))
} else if window_aspect_ratio < target_aspect_ratio {
let correction = target_aspect_ratio / window_aspect_ratio;
log::warn!(
"Aspect difference: {window_aspect_ratio} > {target_aspect_ratio} c {correction}"
);
Some((1.0, correction.recip()))
} else {
let correction = window_aspect_ratio / target_aspect_ratio;
log::warn!(
"Aspect difference: {window_aspect_ratio} < {target_aspect_ratio} c {correction}"
);
Some((correction.recip(), 1.0))
}
}
pub(crate) fn build_view_projection_matrix(&self) -> glam::Mat4 {
let view = glam::Affine3A::look_to_lh(
glam::Vec3::new(-1.0, -1.0, 0.0),
glam::Vec3::NEG_Z,
glam::Vec3::NEG_Y,
) * glam::Affine3A::from_translation(self.transform.translation.into());
let view_vector = glam::Vec2::new(self.width, self.height)
* WORLD_HALF_EXTENTS.as_vec2().recip() * 0.25;
let rotation = glam::Affine3A::from_mat3_translation(
self.transform.matrix3.into(),
view_vector.extend(0.0),
) * glam::Affine3A::from_translation(-view_vector.extend(0.0));
let proj = glam::Mat4::orthographic_lh(
-1.0 * WORLD_HALF_EXTENTS.x as f32,
1.0 * WORLD_HALF_EXTENTS.x as f32,
-1.0 * WORLD_HALF_EXTENTS.y as f32,
1.0 * WORLD_HALF_EXTENTS.y as f32,
0.0,
100.0,
);
let world_to_camera = glam::Affine3A::from_scale(glam::Vec3::new(0.5, -0.5, 1.0));
world_to_camera
* view
* rotation
* proj
* glam::Affine3A::from_scale(glam::Vec3::new(self.width, self.height, 2.0) * 0.5)
}
}
#[derive(ShaderType)]
pub struct RenderOptions {
#[size(16)]
pub gamma: f32,
_padding: glam::Vec3,
}
impl Default for RenderOptions {
fn default() -> Self {
Self {
gamma: 1.0,
_padding: Default::default(),
}
}
}
pub struct RenderInput<'a> {
pub textures_delta: &'a TexturesDelta,
pub paint_jobs: &'a [ClippedPrimitive],
pub font_system: &'a mut glyphon::FontSystem,
pub swash_cache: &'a mut glyphon::SwashCache,
pub camera_text_data: Vec<glyphon::TextArea<'a>>,
pub world_text_data: Vec<glyphon::TextArea<'a>>,
pub camera: &'a mut Camera,
pub render_options: &'a RenderOptions,
}
#[derive(thiserror::Error, Debug)]
pub enum RenderError {
#[error(transparent)]
Wgpu(#[from] wgpu::SurfaceError),
#[error(transparent)]
Glphon(#[from] glyphon::RenderError),
#[error("missing render state")]
MissingRenderState,
}
impl Screen {
async fn acknoledge_surface(
&mut self,
surface: &wgpu::Surface<'static>,
) -> color_eyre::Result<()> {
if self
.adapter_state
.as_ref()
.is_some_and(|ads| ads.adapter.is_surface_supported(surface))
{
return Ok(());
}
log::warn!("(Re)creating adapter to support {surface:?}");
let adapter = self
.instance
.request_adapter(&wgpu::RequestAdapterOptions {
power_preference: wgpu::PowerPreference::default(),
compatible_surface: Some(surface),
force_fallback_adapter: false,
})
.await
.ok_or_else(|| eyre!("failed to find an adapter for surface"))?;
let (device, queue) = adapter
.request_device(
&wgpu::DeviceDescriptor {
required_features: wgpu::Features::empty(), required_limits: {let default_limits = if cfg!(target_arch = "wasm32") {
if cfg!(feature = "webgl") {
wgpu::Limits::downlevel_webgl2_defaults()
} else if cfg!(feature = "webgpu") {
wgpu::Limits::downlevel_defaults()
} else {
unreachable!("for `web` feature, you must also enable a backend feature: `webgpu`, `webgl`")
}
} else {
wgpu::Limits::default()
};
#[allow(clippy::let_and_return)]
default_limits
},
label: None,
},
None,
)
.await?;
self.adapter_state = Some(AdapterState {
adapter,
device,
queue,
});
Ok(())
}
pub async fn create_surface<S: assets_manager::source::Source>(
&mut self,
assets: &AssetCache<S>,
) -> color_eyre::Result<()> {
let surface = self.instance.create_surface(self.window.clone())?;
self.acknoledge_surface(&surface).await?;
let Some(adapter_state) = self.adapter_state.as_ref() else {
unreachable!("adapter_state should have been created by `Screen::acknowledge_surface`");
};
let surface_caps = surface.get_capabilities(&adapter_state.adapter);
let surface_format =
surface_caps
.formats
.iter()
.copied()
.find(|f| !f.is_srgb())
.unwrap_or(surface_caps.formats.first().copied().ok_or_else(|| {
eyre!("No surface formats found for given surface and adapter")
})?);
let config = wgpu::SurfaceConfiguration {
usage: wgpu::TextureUsages::RENDER_ATTACHMENT,
format: surface_format,
width: self.size.width,
height: self.size.height,
present_mode: wgpu::PresentMode::AutoNoVsync,
desired_maximum_frame_latency: 2,
alpha_mode: surface_caps
.alpha_modes
.first()
.copied()
.ok_or_else(|| eyre!("no alpha modes found for given surface and adapter"))?,
view_formats: vec![],
};
surface.configure(&adapter_state.device, &config);
let world_texture_format = wgpu::TextureFormat::Rgba16Float;
let egui = egui_wgpu::Renderer::new(&adapter_state.device, surface_format, None, 1);
let world_atlas = glyphon::TextAtlas::new(
&adapter_state.device,
&adapter_state.queue,
world_texture_format,
);
let camera_atlas =
glyphon::TextAtlas::new(&adapter_state.device, &adapter_state.queue, surface_format);
let world_renderer = middleware::WorldRenderer::new(
&adapter_state.device,
config.format,
world_texture_format,
assets,
)?;
self.surface_state = Some(SurfaceState {
surface,
config,
egui,
world_atlas,
camera_atlas,
world_renderer,
});
if let Some(queued_size) = self.queued_size.take() {
self.resize(queued_size);
}
Ok(())
}
pub async fn new(window: Window) -> color_eyre::Result<Self> {
let size = {
let actual_size = window.inner_size();
if actual_size.width == 0 || actual_size.height == 0 {
let assumed_size = PhysicalSize::new(WINDOW_WIDTH, WINDOW_HEIGHT);
window.set_min_inner_size(Some(assumed_size));
assumed_size
} else {
actual_size
}
};
let window = Arc::new(window);
let instance = wgpu::Instance::new(wgpu::InstanceDescriptor {
backends: wgpu::Backends::all(),
..Default::default()
});
let adapter = instance
.request_adapter(&wgpu::RequestAdapterOptions {
power_preference: wgpu::PowerPreference::default(),
compatible_surface: None,
force_fallback_adapter: false,
})
.await;
let adapter_state = if let Some(adapter) = adapter {
let (device, queue) = adapter
.request_device(
&wgpu::DeviceDescriptor {
required_features: wgpu::Features::empty(), required_limits: if cfg!(target_arch = "wasm32") {
if cfg!(feature = "webgpu") {
wgpu::Limits::downlevel_defaults()
} else if cfg!(feature = "webgl") {
wgpu::Limits::downlevel_webgl2_defaults()
} else {
unreachable!("for `web` feature, you must also enable a backend feature: `webgpu`, `webgl`")
}
} else {
wgpu::Limits::default()
},
label: None,
},
None,
)
.await?;
Some(AdapterState {
adapter,
device,
queue,
})
} else {
None
};
Ok(Self {
instance,
size,
queued_size: None,
window,
adapter_state,
surface_state: None,
})
}
pub fn window(&self) -> &Window {
&self.window
}
pub fn resize(&mut self, new_size: winit::dpi::PhysicalSize<u32>) {
if new_size.width > 0 && new_size.height > 0 {
if let Some((surface_state, adapter_state)) =
self.surface_state.as_mut().zip(self.adapter_state.as_ref())
{
self.size = new_size;
surface_state.config.width = new_size.width;
surface_state.config.height = new_size.height;
surface_state
.surface
.configure(&adapter_state.device, &surface_state.config);
} else {
self.queued_size = Some(new_size);
}
}
}
pub fn reconfigure(&self) {
if let Some((surface_state, adapter_state)) =
self.surface_state.as_ref().zip(self.adapter_state.as_ref())
{
surface_state
.surface
.configure(&adapter_state.device, &surface_state.config);
}
}
pub fn is_vsync(&self) -> bool {
if let Some(state) = self.surface_state.as_ref() {
match state.config.present_mode {
wgpu::PresentMode::AutoVsync => true,
wgpu::PresentMode::AutoNoVsync => false,
_ => unreachable!(),
}
} else {
false
}
}
pub fn vsync(&mut self, vsync: bool) {
if let Some((surface_state, adapter_state)) =
self.surface_state.as_mut().zip(self.adapter_state.as_ref())
{
let pm = if vsync {
wgpu::PresentMode::AutoVsync
} else {
wgpu::PresentMode::AutoNoVsync
};
surface_state.config.present_mode = pm;
surface_state
.surface
.configure(&adapter_state.device, &surface_state.config);
}
}
pub fn render(&mut self, input: RenderInput) -> Result<(), RenderError> {
puffin::profile_function!();
let Some((surface_state, adapter_state)) =
self.surface_state.as_mut().zip(self.adapter_state.as_ref())
else {
return Err(RenderError::MissingRenderState);
};
let output = surface_state.surface.get_current_texture()?;
let view = output
.texture
.create_view(&wgpu::TextureViewDescriptor::default());
let mut encoder =
adapter_state
.device
.create_command_encoder(&wgpu::CommandEncoderDescriptor {
label: Some("Render Encoder"),
});
let screen_descriptor = egui_wgpu::ScreenDescriptor {
size_in_pixels: self.size.into(),
pixels_per_point: self.window.scale_factor() as f32,
};
for (tex_id, delta) in &input.textures_delta.set {
surface_state.egui.update_texture(
&adapter_state.device,
&adapter_state.queue,
*tex_id,
delta,
);
}
let egui_commands = surface_state.egui.update_buffers(
&adapter_state.device,
&adapter_state.queue,
&mut encoder,
input.paint_jobs,
&screen_descriptor,
);
let world_resolution = glyphon::Resolution {
width: WORLD_HALF_EXTENTS.x * 2,
height: WORLD_HALF_EXTENTS.y * 2,
};
let prepared_world_text_data: Vec<_> = input.world_text_data;
let mut world_glyphon = glyphon::TextRenderer::new(
&mut surface_state.world_atlas,
&adapter_state.device,
wgpu::MultisampleState::default(),
None,
);
match world_glyphon.prepare(
&adapter_state.device,
&adapter_state.queue,
input.font_system,
&mut surface_state.world_atlas,
world_resolution,
prepared_world_text_data.clone(),
input.swash_cache,
) {
Ok(()) => {}
Err(glyphon::PrepareError::AtlasFull) => {
log::error!("failed to render world level text, giving up...");
}
}
{
let mut world_render_pass = encoder.begin_render_pass(&wgpu::RenderPassDescriptor {
label: Some("World Render Pass"),
color_attachments: &[Some(wgpu::RenderPassColorAttachment {
view: surface_state.world_renderer.world_texture_view(),
resolve_target: None,
ops: wgpu::Operations {
load: wgpu::LoadOp::Clear(wgpu::Color {
r: 0.0,
g: 0.0,
b: 0.0,
a: 1.0,
}),
store: wgpu::StoreOp::Store,
},
})],
depth_stencil_attachment: None,
occlusion_query_set: None,
timestamp_writes: None,
});
world_glyphon.render(&surface_state.world_atlas, &mut world_render_pass)?;
}
let scale_factor = self.window.scale_factor();
let screen_resolution = glyphon::Resolution {
width: self.size.width / scale_factor as u32,
height: self.size.height / scale_factor as u32,
};
let mut camera_glyphon = glyphon::TextRenderer::new(
&mut surface_state.camera_atlas,
&adapter_state.device,
wgpu::MultisampleState::default(),
None,
);
match camera_glyphon.prepare(
&adapter_state.device,
&adapter_state.queue,
input.font_system,
&mut surface_state.camera_atlas,
screen_resolution,
input.camera_text_data.clone(),
input.swash_cache,
) {
Ok(()) => {}
Err(glyphon::PrepareError::AtlasFull) => {
log::error!("failed to render camera level text, giving up...");
}
}
surface_state.world_renderer.prepare(
&adapter_state.device,
&adapter_state.queue,
&self.window,
input.camera,
input.render_options,
);
{
let mut final_render_pass = encoder.begin_render_pass(&wgpu::RenderPassDescriptor {
label: Some("Final Render Pass"),
color_attachments: &[Some(wgpu::RenderPassColorAttachment {
view: &view,
resolve_target: None,
ops: wgpu::Operations {
load: wgpu::LoadOp::Clear(wgpu::Color {
r: 0.15,
g: 0.1,
b: 0.1,
a: 1.0,
}),
store: wgpu::StoreOp::Store,
},
})],
depth_stencil_attachment: None,
occlusion_query_set: None,
timestamp_writes: None,
});
surface_state.world_renderer.render(&mut final_render_pass);
camera_glyphon.render(&surface_state.camera_atlas, &mut final_render_pass)?;
surface_state
.egui
.render(&mut final_render_pass, input.paint_jobs, &screen_descriptor);
}
for tex_id in &input.textures_delta.free {
surface_state.egui.free_texture(tex_id);
}
adapter_state.queue.submit(
egui_commands
.into_iter()
.chain(std::iter::once(encoder.finish())),
);
output.present();
Ok(())
}
}