mod middleware;
#[cfg(test)]
mod tests;

use std::sync::Arc;

use color_eyre::eyre::eyre;
use egui::{ClippedPrimitive, TexturesDelta};
use encase::ShaderType;
use winit::{dpi::PhysicalSize, window::Window};

use crate::{document::Document, WINDOW_HEIGHT, WINDOW_WIDTH};

pub struct Screen {
    instance: wgpu::Instance,
    adapter_state: Option<AdapterState>,
    surface_state: Option<SurfaceState>,
    size: PhysicalSize<u32>,
    /// Used for valid resizes, that simply were dropped before we made a screen
    queued_size: Option<PhysicalSize<u32>>,
    window: Arc<Window>,
}

struct AdapterState {
    adapter: wgpu::Adapter,
    device: wgpu::Device,
    queue: wgpu::Queue,
}

struct SurfaceState {
    surface: wgpu::Surface<'static>,
    config: wgpu::SurfaceConfiguration,

    // Middleware
    egui: egui_wgpu::Renderer,
    world_atlas: glyphon::TextAtlas,
    camera_atlas: glyphon::TextAtlas,
    world_renderer: middleware::DocumentRenderer,
}

/// Controls how the game world is displayed in the window
#[derive(Debug)]
pub struct Camera {
    pub transform: glam::Affine3A,
    /// camera width (in game pixels)
    pub width: f32,
    /// camera height (in game pixels)
    pub height: f32,
    world_width: u32,
    world_height: u32,

    // This are the width/height used to determine aspect, should be adjusted for screen
    cached_window_size: Option<winit::dpi::PhysicalSize<u32>>,
}

impl Default for Camera {
    fn default() -> Self {
        Self {
            transform: glam::Affine3A::default(),
            width: WINDOW_WIDTH as f32,
            height: WINDOW_HEIGHT as f32,
            world_width: 1024,
            world_height: 1024,
            cached_window_size: None,
        }
    }
}

impl Camera {
    /// The magnitude of deviation from our internal aspect ratio that we allow
    /// the window to have (before we start boxing the screen)
    // TODO add sidebars (pillar/letterboxes) and stuff?
    const ASPECT_DEVIATION: f32 = 0.1;

    /// Updates the camera for displaying to the given window
    ///
    /// Returns the amount of scaling the camera did in the (x, y) directions
    /// None is returned if the amount did not change
    // We create letter/postboxes by resizing the display canvas!!!!
    // damn that's cool
    fn update_aspect(&mut self, window_size: PhysicalSize<u32>) -> Option<(f32, f32)> {
        if self
            .cached_window_size
            .is_some_and(|cws| cws == window_size)
        {
            // If the window's size hasn't changed, then we don't need to update
            return None;
        } else {
            // Calculate for the new window size
            self.cached_window_size = Some(window_size);
        }

        let window_aspect_ratio = window_size.width as f32 / window_size.height as f32;
        let target_aspect_ratio = self.width / self.height;
        if (window_aspect_ratio - target_aspect_ratio).abs() <= Self::ASPECT_DEVIATION {
            Some((1.0, 1.0))
        } else if window_aspect_ratio < target_aspect_ratio {
            // window is not wide enough, scale to match width
            let correction = target_aspect_ratio / window_aspect_ratio;
            log::warn!(
                "Aspect difference: {window_aspect_ratio} > {target_aspect_ratio} c {correction}"
            );
            Some((1.0, correction.recip()))
        } else {
            // window is not tall enough, scale to match height
            let correction = window_aspect_ratio / target_aspect_ratio;
            log::warn!(
                "Aspect difference: {window_aspect_ratio} < {target_aspect_ratio} c {correction}"
            );
            Some((correction.recip(), 1.0))
        }
    }

    // You don't know how much blood, sweat, and tears (BST)
    // I put into this fucking function when someone deeeefinitely
    // has figured this out
    //
    // Google is dead, long live Google
    pub(crate) fn build_view_projection_matrix(&self) -> glam::Mat4 {
        let view = glam::Affine3A::look_to_lh(
            glam::Vec3::new(-1.0, -1.0, 0.0),
            glam::Vec3::NEG_Z,
            glam::Vec3::NEG_Y,
        ) * glam::Affine3A::from_translation(self.transform.translation.into());

        let world_extents = glam::Vec2::new(self.world_width as f32, self.world_height as f32);

        let view_vector = glam::Vec2::new(self.width, self.height)
            * world_extents.recip() // 1 / (GW / 2) = 2 / GW
            * 0.5;
        // VW * 2 / (GW * 4) = (VW / GW) * 1/2

        // rotate about the camera center
        let rotation = glam::Affine3A::from_mat3_translation(
            self.transform.matrix3.into(),
            view_vector.extend(0.0),
        ) * glam::Affine3A::from_translation(-view_vector.extend(0.0));

        let proj = glam::Mat4::orthographic_lh(
            -0.5 * self.world_width as f32,
            0.5 * self.world_width as f32,
            -0.5 * self.world_height as f32,
            0.5 * self.world_height as f32,
            // This part hasn't been tested, so take it with a grain of salt
            0.0,
            100.0,
        );

        let world_to_camera = glam::Affine3A::from_scale(glam::Vec3::new(0.5, -0.5, 1.0));

        world_to_camera
            * view
            * rotation
            * proj
            * glam::Affine3A::from_scale(glam::Vec3::new(self.width, self.height, 2.0) * 0.5)
    }
}

/// Information sent to the shader to configuring rendering
#[derive(ShaderType)]
pub struct RenderOptions {
    #[size(16)]
    pub gamma: f32,
    _padding: glam::Vec3,
}

impl Default for RenderOptions {
    fn default() -> Self {
        Self {
            gamma: 0.4,
            _padding: Default::default(),
        }
    }
}

pub struct RenderInput<'a> {
    pub textures_delta: &'a TexturesDelta,
    pub paint_jobs: &'a [ClippedPrimitive],
    pub font_system: &'a mut glyphon::FontSystem,
    pub swash_cache: &'a mut glyphon::SwashCache,
    pub camera_text_data: Vec<glyphon::TextArea<'a>>,
    pub world_text_data: Vec<glyphon::TextArea<'a>>,
    pub camera: &'a mut Camera,
    pub render_options: &'a RenderOptions,
    pub document: &'a Document,
}

#[derive(thiserror::Error, Debug)]
pub enum RenderError {
    #[error(transparent)]
    Wgpu(#[from] wgpu::SurfaceError),
    #[error(transparent)]
    Glphon(#[from] glyphon::RenderError),
    #[error("missing render state")]
    MissingRenderState,
}

impl Screen {
    async fn acknoledge_surface(
        &mut self,
        surface: &wgpu::Surface<'static>,
    ) -> color_eyre::Result<()> {
        if self
            .adapter_state
            .as_ref()
            .is_some_and(|ads| ads.adapter.is_surface_supported(surface))
        {
            // Conserve work: we've already made a perfectly good adapter that supports the surface we want to draw to
            return Ok(());
        }

        log::warn!("(Re)creating adapter to support {surface:?}");

        let adapter = self
            .instance
            .request_adapter(&wgpu::RequestAdapterOptions {
                power_preference: wgpu::PowerPreference::default(),
                compatible_surface: Some(surface),
                force_fallback_adapter: false,
            })
            .await
            .ok_or_else(|| eyre!("failed to find an adapter for surface"))?;
        let (device, queue) = adapter
            .request_device(
                &wgpu::DeviceDescriptor {
                    required_features: wgpu::Features::empty(),//wgpu::Features::all_webgpu_mask(),
                    required_limits: {let default_limits = if cfg!(target_arch = "wasm32") {
                        if cfg!(feature = "webgl") {
                            wgpu::Limits::downlevel_webgl2_defaults()
                        } else if cfg!(feature = "webgpu") {
                            wgpu::Limits::downlevel_defaults()
                        } else {
                            unreachable!("for `web` feature, you must also enable a backend feature: `webgpu`, `webgl`")
                        }
                    } else {
                        wgpu::Limits::default()
                    };
                    #[allow(clippy::let_and_return)]
                    // This would be to allow adjusting limits downwards for max compat, but
                    // Firefox has 0s everywhere meh
                    default_limits
                },
                    label: None,
                },
                None,
            )
            .await?;
        self.adapter_state = Some(AdapterState {
            adapter,
            device,
            queue,
        });
        Ok(())
    }

    pub async fn create_surface(&mut self) -> color_eyre::Result<()> {
        let surface = self.instance.create_surface(self.window.clone())?;

        self.acknoledge_surface(&surface).await?;

        let Some(adapter_state) = self.adapter_state.as_ref() else {
            unreachable!("adapter_state should have been created by `Screen::acknowledge_surface`");
        };

        let surface_caps = surface.get_capabilities(&adapter_state.adapter);
        // Search for the first sRGB surface texture (or the first if unable to find one)
        let surface_format =
            surface_caps
                .formats
                .iter()
                .copied()
                .find(|f| !f.is_srgb())
                .unwrap_or(surface_caps.formats.first().copied().ok_or_else(|| {
                    eyre!("No surface formats found for given surface and adapter")
                })?);

        let config = wgpu::SurfaceConfiguration {
            usage: wgpu::TextureUsages::RENDER_ATTACHMENT,
            format: surface_format,
            width: self.size.width,
            height: self.size.height,
            present_mode: wgpu::PresentMode::AutoNoVsync,
            desired_maximum_frame_latency: 2,
            alpha_mode: surface_caps
                .alpha_modes
                .first()
                .copied()
                .ok_or_else(|| eyre!("no alpha modes found for given surface and adapter"))?,
            view_formats: vec![],
        };
        surface.configure(&adapter_state.device, &config);

        // World texture is HDR, so the format must be a HDR format
        let world_texture_format = wgpu::TextureFormat::Bgra8Unorm;

        let egui = egui_wgpu::Renderer::new(&adapter_state.device, surface_format, None, 1);

        let world_atlas = glyphon::TextAtlas::new(
            &adapter_state.device,
            &adapter_state.queue,
            world_texture_format,
        );
        let camera_atlas =
            glyphon::TextAtlas::new(&adapter_state.device, &adapter_state.queue, surface_format);
        let world_renderer = middleware::DocumentRenderer::new(
            &adapter_state.device,
            config.format,
            world_texture_format,
        )?;

        self.surface_state = Some(SurfaceState {
            surface,
            config,
            egui,
            world_atlas,
            camera_atlas,
            world_renderer,
        });

        // resize to our queued size, now that we definitely have a surface and adapter state
        if let Some(queued_size) = self.queued_size.take() {
            self.resize(queued_size);
        }

        Ok(())
    }

    pub async fn new(window: Window) -> color_eyre::Result<Self> {
        let size = {
            let actual_size = window.inner_size();
            if actual_size.width == 0 || actual_size.height == 0 {
                // Assume we are actually in-built height and notify
                let assumed_size = PhysicalSize::new(WINDOW_WIDTH, WINDOW_HEIGHT);
                window.set_min_inner_size(Some(assumed_size));
                assumed_size
            } else {
                // It's fine
                actual_size
            }
        };

        let window = Arc::new(window);
        let instance = wgpu::Instance::new(wgpu::InstanceDescriptor {
            backends: wgpu::Backends::all(),
            ..Default::default()
        });

        let adapter = instance
            .request_adapter(&wgpu::RequestAdapterOptions {
                power_preference: wgpu::PowerPreference::default(),
                compatible_surface: None,
                force_fallback_adapter: false,
            })
            .await;
        let adapter_state = if let Some(adapter) = adapter {
            let (device, queue) = adapter
            .request_device(
                &wgpu::DeviceDescriptor {
                    required_features: wgpu::Features::empty(), //wgpu::Features::all_webgpu_mask(),
                    required_limits: if cfg!(target_arch = "wasm32") {
                        if cfg!(feature = "webgpu") {
                            wgpu::Limits::downlevel_defaults()
                        } else if cfg!(feature = "webgl") {
                            wgpu::Limits::downlevel_webgl2_defaults()
                        } else {
                            unreachable!("for `web` feature, you must also enable a backend feature: `webgpu`, `webgl`")
                        }
                    } else {
                        wgpu::Limits::default()
                    },
                    label: None,
                },
                None,
            )
            .await?;
            Some(AdapterState {
                adapter,
                device,
                queue,
            })
        } else {
            None
        };

        Ok(Self {
            instance,
            size,
            queued_size: None,
            window,
            adapter_state,
            surface_state: None,
        })
    }

    pub fn window(&self) -> &Window {
        &self.window
    }

    pub fn resize(&mut self, new_size: winit::dpi::PhysicalSize<u32>) {
        if new_size.width > 0 && new_size.height > 0 {
            if let Some((surface_state, adapter_state)) =
                self.surface_state.as_mut().zip(self.adapter_state.as_ref())
            {
                self.size = new_size;
                surface_state.config.width = new_size.width;
                surface_state.config.height = new_size.height;
                surface_state
                    .surface
                    .configure(&adapter_state.device, &surface_state.config);
            } else {
                self.queued_size = Some(new_size);
            }
        }
    }

    pub fn reconfigure(&self) {
        if let Some((surface_state, adapter_state)) =
            self.surface_state.as_ref().zip(self.adapter_state.as_ref())
        {
            surface_state
                .surface
                .configure(&adapter_state.device, &surface_state.config);
        }
    }

    pub fn is_vsync(&self) -> bool {
        if let Some(state) = self.surface_state.as_ref() {
            match state.config.present_mode {
                wgpu::PresentMode::AutoVsync => true,
                wgpu::PresentMode::AutoNoVsync => false,
                _ => unreachable!(),
            }
        } else {
            false
        }
    }

    pub fn vsync(&mut self, vsync: bool) {
        if let Some((surface_state, adapter_state)) =
            self.surface_state.as_mut().zip(self.adapter_state.as_ref())
        {
            let pm = if vsync {
                wgpu::PresentMode::AutoVsync
            } else {
                wgpu::PresentMode::AutoNoVsync
            };
            surface_state.config.present_mode = pm;
            surface_state
                .surface
                .configure(&adapter_state.device, &surface_state.config);
        }
    }

    pub fn render(&mut self, input: RenderInput) -> Result<(), RenderError> {
        puffin::profile_function!();

        let Some((surface_state, adapter_state)) =
            self.surface_state.as_mut().zip(self.adapter_state.as_ref())
        else {
            // Return early, as surface has not been initialized
            return Err(RenderError::MissingRenderState);
        };

        let output = surface_state.surface.get_current_texture()?;

        let view = output
            .texture
            .create_view(&wgpu::TextureViewDescriptor::default());

        let mut encoder =
            adapter_state
                .device
                .create_command_encoder(&wgpu::CommandEncoderDescriptor {
                    label: Some("Render Encoder"),
                });

        let screen_descriptor = egui_wgpu::ScreenDescriptor {
            size_in_pixels: self.size.into(),
            pixels_per_point: self.window.scale_factor() as f32,
        };

        for (tex_id, delta) in &input.textures_delta.set {
            surface_state.egui.update_texture(
                &adapter_state.device,
                &adapter_state.queue,
                *tex_id,
                delta,
            );
        }

        let egui_commands = surface_state.egui.update_buffers(
            &adapter_state.device,
            &adapter_state.queue,
            &mut encoder,
            input.paint_jobs,
            &screen_descriptor,
        );

        let world_resolution = glyphon::Resolution {
            width: input.camera.world_width * 2,
            height: input.camera.world_height * 2,
        };

        // A remenant of the war
        let prepared_world_text_data: Vec<_> = input.world_text_data;
        let mut world_glyphon = glyphon::TextRenderer::new(
            &mut surface_state.world_atlas,
            &adapter_state.device,
            wgpu::MultisampleState::default(),
            None,
        );

        // Prepare world layer text
        match world_glyphon.prepare(
            &adapter_state.device,
            &adapter_state.queue,
            input.font_system,
            &mut surface_state.world_atlas,
            world_resolution,
            prepared_world_text_data.clone(),
            input.swash_cache,
        ) {
            Ok(()) => {}
            Err(glyphon::PrepareError::AtlasFull) => {
                // TODO Retry after executing atlas.trim, which removes all glyph claims (maybe dont quote me)
                log::error!("failed to render world level text, giving up...");
            }
        }

        // The render pass manages world rendering (w/o camera adjustment)
        {
            let mut world_render_pass = encoder.begin_render_pass(&wgpu::RenderPassDescriptor {
                label: Some("World Render Pass"),
                color_attachments: &[Some(wgpu::RenderPassColorAttachment {
                    view: surface_state.world_renderer.world_texture_view(),
                    resolve_target: None,
                    ops: wgpu::Operations {
                        load: wgpu::LoadOp::Clear(wgpu::Color {
                            r: 0.0,
                            g: 0.0,
                            b: 0.0,
                            a: 1.0,
                        }),
                        store: wgpu::StoreOp::Store,
                    },
                })],
                depth_stencil_attachment: None,
                occlusion_query_set: None,
                timestamp_writes: None,
            });

            world_glyphon.render(&surface_state.world_atlas, &mut world_render_pass)?;
        }

        let scale_factor = self.window.scale_factor();
        let screen_resolution = glyphon::Resolution {
            width: self.size.width / scale_factor as u32,
            height: self.size.height / scale_factor as u32,
        };

        let mut camera_glyphon = glyphon::TextRenderer::new(
            &mut surface_state.camera_atlas,
            &adapter_state.device,
            wgpu::MultisampleState::default(),
            None,
        );

        // Prepare camera layer text
        match camera_glyphon.prepare(
            &adapter_state.device,
            &adapter_state.queue,
            input.font_system,
            &mut surface_state.camera_atlas,
            screen_resolution,
            input.camera_text_data.clone(),
            input.swash_cache,
        ) {
            Ok(()) => {}
            Err(glyphon::PrepareError::AtlasFull) => {
                // TODO retry after executing atlas.trim
                log::error!("failed to render camera level text, giving up...");
            }
        }

        surface_state.world_renderer.prepare(
            &adapter_state.device,
            &adapter_state.queue,
            &self.window,
            input.camera,
            input.render_options,
            input.document,
        );

        // This is the final composite pass that draws the game, then all overlays on the game
        {
            let mut final_render_pass = encoder.begin_render_pass(&wgpu::RenderPassDescriptor {
                label: Some("Final Render Pass"),
                color_attachments: &[Some(wgpu::RenderPassColorAttachment {
                    view: &view,
                    resolve_target: None,
                    ops: wgpu::Operations {
                        load: wgpu::LoadOp::Clear(wgpu::Color {
                            r: 0.15,
                            g: 0.1,
                            b: 0.1,
                            a: 1.0,
                        }),
                        store: wgpu::StoreOp::Store,
                    },
                })],
                depth_stencil_attachment: None,
                occlusion_query_set: None,
                timestamp_writes: None,
            });

            surface_state.world_renderer.render(&mut final_render_pass);

            camera_glyphon.render(&surface_state.camera_atlas, &mut final_render_pass)?;

            surface_state
                .egui
                .render(&mut final_render_pass, input.paint_jobs, &screen_descriptor);
        }

        for tex_id in &input.textures_delta.free {
            surface_state.egui.free_texture(tex_id);
        }

        // submit accepts any T s.t. T implements IntoIter
        adapter_state.queue.submit(
            egui_commands
                .into_iter()
                .chain(std::iter::once(encoder.finish())),
        );
        output.present();

        Ok(())
    }
}