zoomie/src/state.rs

321 lines
8.3 KiB
Rust

use std::sync::Arc;
use pollster::FutureExt;
use wgpu::util::DeviceExt;
use winit::{
event::{ElementState, WindowEvent},
keyboard::Key,
window::Window,
};
use crate::{
camera::{Camera, CameraInfo},
controller::CameraController,
spotlight::{Spotlight, SpotlightInfo},
texture::Texture,
vertex::Vertex,
};
const ZOOM_SPEED: f32 = 0.1;
const MOVE_SPEED: f32 = 0.0005;
const VERTICES: &[Vertex] = &[
Vertex {
position: [-1.0, 1.0],
tex_coords: [0.0, 0.0],
},
Vertex {
position: [-1.0, -1.0],
tex_coords: [0.0, 1.0],
},
Vertex {
position: [1.0, -1.0],
tex_coords: [1.0, 1.0],
},
Vertex {
position: [1.0, 1.0],
tex_coords: [1.0, 0.0],
},
];
#[rustfmt::skip]
const INDICES: &[u16] = &[
0, 1, 2,
2, 3, 0
];
pub struct State<'a> {
surface: wgpu::Surface<'a>,
device: wgpu::Device,
queue: wgpu::Queue,
config: wgpu::SurfaceConfiguration,
size: winit::dpi::PhysicalSize<u32>,
wsize: (f64, f64),
pub window: Arc<Window>,
render_pipeline: wgpu::RenderPipeline,
vertex_buffer: wgpu::Buffer,
index_buffer: wgpu::Buffer,
texture_bind_group: wgpu::BindGroup,
pub camera: Camera,
pub camera_info: CameraInfo,
pub spotlight: Spotlight,
pub spotlight_info: SpotlightInfo,
pub zoom: f32,
pub camera_controller: CameraController,
hide_cursor: bool,
}
impl<'a> State<'a> {
pub fn new(window: Arc<Window>, image: image::RgbaImage) -> Self {
let window_size = window.inner_size();
let wsize = (window_size.width as f64, window_size.height as f64);
let instance = wgpu::Instance::new(wgpu::InstanceDescriptor {
backends: wgpu::Backends::PRIMARY,
..Default::default()
});
let surface = instance.create_surface(window.clone()).unwrap();
let adapter = instance
.request_adapter(&wgpu::RequestAdapterOptions {
power_preference: wgpu::PowerPreference::default(),
compatible_surface: Some(&surface),
force_fallback_adapter: false,
})
.block_on()
.unwrap();
let (device, queue) = adapter
.request_device(
&wgpu::DeviceDescriptor {
required_features: wgpu::Features::empty(),
required_limits: wgpu::Limits::default(),
label: None,
memory_hints: Default::default(),
},
None,
)
.block_on()
.unwrap();
let surface_caps = surface.get_capabilities(&adapter);
let surface_format = surface_caps
.formats
.iter()
.find(|f| f.is_srgb())
.copied()
.unwrap_or(surface_caps.formats[0]);
let config = wgpu::SurfaceConfiguration {
usage: wgpu::TextureUsages::RENDER_ATTACHMENT,
format: surface_format,
width: window_size.width,
height: window_size.height,
present_mode: surface_caps.present_modes[0],
alpha_mode: surface_caps.alpha_modes[0],
view_formats: vec![],
desired_maximum_frame_latency: 2,
};
let (camera, camera_info) = Camera::new(&config, &device);
let camera_controller = CameraController::new(ZOOM_SPEED, MOVE_SPEED);
let (spotlight, spotlight_info) = Spotlight::new(&device);
let shader = device.create_shader_module(wgpu::include_wgsl!("shader.wgsl"));
let texture = Texture::new(image, &device, &queue);
let render_pipeline_layout =
device.create_pipeline_layout(&wgpu::PipelineLayoutDescriptor {
label: Some("Render Pipeline Layout"),
bind_group_layouts: &[&texture.layout, &camera_info.layout, &spotlight_info.layout],
push_constant_ranges: &[],
});
let render_pipeline = device.create_render_pipeline(&wgpu::RenderPipelineDescriptor {
label: Some("Render Pipeline"),
layout: Some(&render_pipeline_layout),
vertex: wgpu::VertexState {
module: &shader,
entry_point: Some("vs_main"),
buffers: &[Vertex::layout()],
compilation_options: wgpu::PipelineCompilationOptions::default(),
},
fragment: Some(wgpu::FragmentState {
module: &shader,
entry_point: Some("fs_main"),
targets: &[Some(wgpu::ColorTargetState {
format: config.format,
blend: Some(wgpu::BlendState::REPLACE),
write_mask: wgpu::ColorWrites::ALL,
})],
compilation_options: wgpu::PipelineCompilationOptions::default(),
}),
primitive: wgpu::PrimitiveState {
topology: wgpu::PrimitiveTopology::TriangleList,
strip_index_format: None,
front_face: wgpu::FrontFace::Ccw,
cull_mode: Some(wgpu::Face::Back),
// Setting this to anything other than Fill requires Features::NON_FILL_POLYGON_MODE
polygon_mode: wgpu::PolygonMode::Fill,
// Requires Features::DEPTH_CLIP_CONTROL
unclipped_depth: false,
// Requires Features::CONSERVATIVE_RASTERIZATION
conservative: false,
},
depth_stencil: None,
multisample: wgpu::MultisampleState {
count: 1,
mask: !0,
alpha_to_coverage_enabled: false,
},
multiview: None,
cache: None,
});
let vertex_buffer = device.create_buffer_init(&wgpu::util::BufferInitDescriptor {
label: Some("Vertex Buffer"),
contents: bytemuck::cast_slice(VERTICES),
usage: wgpu::BufferUsages::VERTEX,
});
let index_buffer = device.create_buffer_init(&wgpu::util::BufferInitDescriptor {
label: Some("Index Buffer"),
contents: bytemuck::cast_slice(INDICES),
usage: wgpu::BufferUsages::INDEX,
});
Self {
surface,
device,
queue,
config,
size: window_size,
wsize,
window,
render_pipeline,
vertex_buffer,
index_buffer,
texture_bind_group: texture.bind_group,
camera,
camera_info,
spotlight,
spotlight_info,
camera_controller,
zoom: 1.0,
hide_cursor: false,
}
}
pub fn resize(&mut self, new_size: winit::dpi::PhysicalSize<u32>) {
if new_size.width > 0 && new_size.height > 0 {
self.size = new_size;
self.wsize = (new_size.width as f64, new_size.height as f64);
self.config.width = new_size.width;
self.config.height = new_size.height;
self.surface.configure(&self.device, &self.config);
}
}
pub fn input(&mut self, event: &winit::event::WindowEvent) -> bool {
self.camera_controller.process_events(self.wsize, event);
self.spotlight.process_events(event);
match event {
WindowEvent::KeyboardInput {
event: key_event, ..
} => {
if key_event.state == ElementState::Pressed {
let key = &key_event.logical_key;
if let Key::Character(char) = key {
if char == "r" {
self.camera_controller.reset_camera(&mut self.camera);
}
else if char == "h" {
self.hide_cursor = !self.hide_cursor;
}
}
return true;
}
false
}
_ => false,
}
}
pub fn update(&mut self) {
self.camera_controller.update(&mut self.camera);
self.camera_info.uniform.update_view_proj(&self.camera);
self.queue.write_buffer(
&self.camera_info.buffer,
0,
bytemuck::cast_slice(&[self.camera_info.uniform]),
);
self.spotlight.update();
self.spotlight_info
.uniform
.update(&self.spotlight, &self.camera_controller);
self.queue.write_buffer(
&self.spotlight_info.buffer,
0,
bytemuck::cast_slice(&[self.spotlight_info.uniform]),
);
}
pub fn render(&mut self) -> Result<(), wgpu::SurfaceError> {
self.window.set_cursor_visible(!self.hide_cursor);
let output = self.surface.get_current_texture()?;
let view = output
.texture
.create_view(&wgpu::TextureViewDescriptor::default());
let mut encoder = self
.device
.create_command_encoder(&wgpu::CommandEncoderDescriptor {
label: Some("Render Encoder"),
});
{
let mut render_pass = encoder.begin_render_pass(&wgpu::RenderPassDescriptor {
label: Some("Render Pass"),
color_attachments: &[Some(wgpu::RenderPassColorAttachment {
view: &view,
resolve_target: None,
ops: wgpu::Operations {
load: wgpu::LoadOp::Clear(wgpu::Color {
r: 0.0,
g: 0.0,
b: 0.0,
a: 1.0,
}),
store: wgpu::StoreOp::Store,
},
})],
depth_stencil_attachment: None,
occlusion_query_set: None,
timestamp_writes: None,
});
render_pass.set_pipeline(&self.render_pipeline);
render_pass.set_bind_group(0, &self.texture_bind_group, &[]);
render_pass.set_bind_group(1, &self.camera_info.bind_group, &[]);
render_pass.set_bind_group(2, &self.spotlight_info.bind_group, &[]);
render_pass.set_vertex_buffer(0, self.vertex_buffer.slice(..));
render_pass.set_index_buffer(self.index_buffer.slice(..), wgpu::IndexFormat::Uint16);
let num_indices = INDICES.len() as u32;
render_pass.draw_indexed(0..num_indices, 0, 0..1);
}
// submit will accept anything that implements IntoIter
self.queue.submit(std::iter::once(encoder.finish()));
output.present();
Ok(())
}
}