Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
bevyengine
GitHub Repository: bevyengine/bevy
Path: blob/main/crates/bevy_render/src/camera.rs
6849 views
1
use crate::{
2
batching::gpu_preprocessing::{GpuPreprocessingMode, GpuPreprocessingSupport},
3
extract_component::{ExtractComponent, ExtractComponentPlugin},
4
extract_resource::{ExtractResource, ExtractResourcePlugin},
5
render_asset::RenderAssets,
6
render_graph::{CameraDriverNode, InternedRenderSubGraph, RenderGraph, RenderSubGraph},
7
render_resource::TextureView,
8
sync_world::{RenderEntity, SyncToRenderWorld},
9
texture::{GpuImage, ManualTextureViews},
10
view::{
11
ColorGrading, ExtractedView, ExtractedWindows, Hdr, Msaa, NoIndirectDrawing,
12
RenderVisibleEntities, RetainedViewEntity, ViewUniformOffset,
13
},
14
Extract, ExtractSchedule, Render, RenderApp, RenderSystems,
15
};
16
17
use bevy_app::{App, Plugin, PostStartup, PostUpdate};
18
use bevy_asset::{AssetEvent, AssetEventSystems, AssetId, Assets};
19
use bevy_camera::{
20
primitives::Frustum,
21
visibility::{self, RenderLayers, VisibleEntities},
22
Camera, Camera2d, Camera3d, CameraMainTextureUsages, CameraOutputMode, CameraUpdateSystems,
23
ClearColor, ClearColorConfig, Exposure, ManualTextureViewHandle, NormalizedRenderTarget,
24
Projection, RenderTargetInfo, Viewport,
25
};
26
use bevy_derive::{Deref, DerefMut};
27
use bevy_ecs::{
28
change_detection::DetectChanges,
29
component::Component,
30
entity::{ContainsEntity, Entity},
31
error::BevyError,
32
lifecycle::HookContext,
33
message::MessageReader,
34
prelude::With,
35
query::{Has, QueryItem},
36
reflect::ReflectComponent,
37
resource::Resource,
38
schedule::IntoScheduleConfigs,
39
system::{Commands, Query, Res, ResMut},
40
world::DeferredWorld,
41
};
42
use bevy_image::Image;
43
use bevy_math::{uvec2, vec2, Mat4, URect, UVec2, UVec4, Vec2};
44
use bevy_platform::collections::{HashMap, HashSet};
45
use bevy_reflect::prelude::*;
46
use bevy_transform::components::GlobalTransform;
47
use bevy_window::{PrimaryWindow, Window, WindowCreated, WindowResized, WindowScaleFactorChanged};
48
use tracing::warn;
49
use wgpu::TextureFormat;
50
51
#[derive(Default)]
52
pub struct CameraPlugin;
53
54
impl Plugin for CameraPlugin {
55
fn build(&self, app: &mut App) {
56
app.register_required_components::<Camera, Msaa>()
57
.register_required_components::<Camera, SyncToRenderWorld>()
58
.register_required_components::<Camera3d, ColorGrading>()
59
.register_required_components::<Camera3d, Exposure>()
60
.add_plugins((
61
ExtractResourcePlugin::<ClearColor>::default(),
62
ExtractComponentPlugin::<CameraMainTextureUsages>::default(),
63
))
64
.add_systems(PostStartup, camera_system.in_set(CameraUpdateSystems))
65
.add_systems(
66
PostUpdate,
67
camera_system
68
.in_set(CameraUpdateSystems)
69
.before(AssetEventSystems)
70
.before(visibility::update_frusta),
71
);
72
app.world_mut()
73
.register_component_hooks::<Camera>()
74
.on_add(warn_on_no_render_graph);
75
76
if let Some(render_app) = app.get_sub_app_mut(RenderApp) {
77
render_app
78
.init_resource::<SortedCameras>()
79
.add_systems(ExtractSchedule, extract_cameras)
80
.add_systems(Render, sort_cameras.in_set(RenderSystems::ManageViews));
81
let camera_driver_node = CameraDriverNode::new(render_app.world_mut());
82
let mut render_graph = render_app.world_mut().resource_mut::<RenderGraph>();
83
render_graph.add_node(crate::graph::CameraDriverLabel, camera_driver_node);
84
}
85
}
86
}
87
88
fn warn_on_no_render_graph(world: DeferredWorld, HookContext { entity, caller, .. }: HookContext) {
89
if !world.entity(entity).contains::<CameraRenderGraph>() {
90
warn!("{}Entity {entity} has a `Camera` component, but it doesn't have a render graph configured. Usually, adding a `Camera2d` or `Camera3d` component will work.
91
However, you may instead need to enable `bevy_core_pipeline`, or may want to manually add a `CameraRenderGraph` component to create a custom render graph.", caller.map(|location|format!("{location}: ")).unwrap_or_default());
92
}
93
}
94
95
impl ExtractResource for ClearColor {
96
type Source = Self;
97
98
fn extract_resource(source: &Self::Source) -> Self {
99
source.clone()
100
}
101
}
102
impl ExtractComponent for CameraMainTextureUsages {
103
type QueryData = &'static Self;
104
type QueryFilter = ();
105
type Out = Self;
106
107
fn extract_component(item: QueryItem<Self::QueryData>) -> Option<Self::Out> {
108
Some(*item)
109
}
110
}
111
impl ExtractComponent for Camera2d {
112
type QueryData = &'static Self;
113
type QueryFilter = With<Camera>;
114
type Out = Self;
115
116
fn extract_component(item: QueryItem<Self::QueryData>) -> Option<Self::Out> {
117
Some(item.clone())
118
}
119
}
120
impl ExtractComponent for Camera3d {
121
type QueryData = &'static Self;
122
type QueryFilter = With<Camera>;
123
type Out = Self;
124
125
fn extract_component(item: QueryItem<Self::QueryData>) -> Option<Self::Out> {
126
Some(item.clone())
127
}
128
}
129
130
/// Configures the [`RenderGraph`] name assigned to be run for a given [`Camera`] entity.
131
#[derive(Component, Debug, Deref, DerefMut, Reflect, Clone)]
132
#[reflect(opaque)]
133
#[reflect(Component, Debug, Clone)]
134
pub struct CameraRenderGraph(InternedRenderSubGraph);
135
136
impl CameraRenderGraph {
137
/// Creates a new [`CameraRenderGraph`] from any string-like type.
138
#[inline]
139
pub fn new<T: RenderSubGraph>(name: T) -> Self {
140
Self(name.intern())
141
}
142
143
/// Sets the graph name.
144
#[inline]
145
pub fn set<T: RenderSubGraph>(&mut self, name: T) {
146
self.0 = name.intern();
147
}
148
}
149
150
pub trait NormalizedRenderTargetExt {
151
fn get_texture_view<'a>(
152
&self,
153
windows: &'a ExtractedWindows,
154
images: &'a RenderAssets<GpuImage>,
155
manual_texture_views: &'a ManualTextureViews,
156
) -> Option<&'a TextureView>;
157
158
/// Retrieves the [`TextureFormat`] of this render target, if it exists.
159
fn get_texture_format<'a>(
160
&self,
161
windows: &'a ExtractedWindows,
162
images: &'a RenderAssets<GpuImage>,
163
manual_texture_views: &'a ManualTextureViews,
164
) -> Option<TextureFormat>;
165
166
fn get_render_target_info<'a>(
167
&self,
168
resolutions: impl IntoIterator<Item = (Entity, &'a Window)>,
169
images: &Assets<Image>,
170
manual_texture_views: &ManualTextureViews,
171
) -> Result<RenderTargetInfo, MissingRenderTargetInfoError>;
172
173
// Check if this render target is contained in the given changed windows or images.
174
fn is_changed(
175
&self,
176
changed_window_ids: &HashSet<Entity>,
177
changed_image_handles: &HashSet<&AssetId<Image>>,
178
) -> bool;
179
}
180
181
impl NormalizedRenderTargetExt for NormalizedRenderTarget {
182
fn get_texture_view<'a>(
183
&self,
184
windows: &'a ExtractedWindows,
185
images: &'a RenderAssets<GpuImage>,
186
manual_texture_views: &'a ManualTextureViews,
187
) -> Option<&'a TextureView> {
188
match self {
189
NormalizedRenderTarget::Window(window_ref) => windows
190
.get(&window_ref.entity())
191
.and_then(|window| window.swap_chain_texture_view.as_ref()),
192
NormalizedRenderTarget::Image(image_target) => images
193
.get(&image_target.handle)
194
.map(|image| &image.texture_view),
195
NormalizedRenderTarget::TextureView(id) => {
196
manual_texture_views.get(id).map(|tex| &tex.texture_view)
197
}
198
NormalizedRenderTarget::None { .. } => None,
199
}
200
}
201
202
/// Retrieves the [`TextureFormat`] of this render target, if it exists.
203
fn get_texture_format<'a>(
204
&self,
205
windows: &'a ExtractedWindows,
206
images: &'a RenderAssets<GpuImage>,
207
manual_texture_views: &'a ManualTextureViews,
208
) -> Option<TextureFormat> {
209
match self {
210
NormalizedRenderTarget::Window(window_ref) => windows
211
.get(&window_ref.entity())
212
.and_then(|window| window.swap_chain_texture_format),
213
NormalizedRenderTarget::Image(image_target) => images
214
.get(&image_target.handle)
215
.map(|image| image.texture_format),
216
NormalizedRenderTarget::TextureView(id) => {
217
manual_texture_views.get(id).map(|tex| tex.format)
218
}
219
NormalizedRenderTarget::None { .. } => None,
220
}
221
}
222
223
fn get_render_target_info<'a>(
224
&self,
225
resolutions: impl IntoIterator<Item = (Entity, &'a Window)>,
226
images: &Assets<Image>,
227
manual_texture_views: &ManualTextureViews,
228
) -> Result<RenderTargetInfo, MissingRenderTargetInfoError> {
229
match self {
230
NormalizedRenderTarget::Window(window_ref) => resolutions
231
.into_iter()
232
.find(|(entity, _)| *entity == window_ref.entity())
233
.map(|(_, window)| RenderTargetInfo {
234
physical_size: window.physical_size(),
235
scale_factor: window.resolution.scale_factor(),
236
})
237
.ok_or(MissingRenderTargetInfoError::Window {
238
window: window_ref.entity(),
239
}),
240
NormalizedRenderTarget::Image(image_target) => images
241
.get(&image_target.handle)
242
.map(|image| RenderTargetInfo {
243
physical_size: image.size(),
244
scale_factor: image_target.scale_factor,
245
})
246
.ok_or(MissingRenderTargetInfoError::Image {
247
image: image_target.handle.id(),
248
}),
249
NormalizedRenderTarget::TextureView(id) => manual_texture_views
250
.get(id)
251
.map(|tex| RenderTargetInfo {
252
physical_size: tex.size,
253
scale_factor: 1.0,
254
})
255
.ok_or(MissingRenderTargetInfoError::TextureView { texture_view: *id }),
256
NormalizedRenderTarget::None { width, height } => Ok(RenderTargetInfo {
257
physical_size: uvec2(*width, *height),
258
scale_factor: 1.0,
259
}),
260
}
261
}
262
263
// Check if this render target is contained in the given changed windows or images.
264
fn is_changed(
265
&self,
266
changed_window_ids: &HashSet<Entity>,
267
changed_image_handles: &HashSet<&AssetId<Image>>,
268
) -> bool {
269
match self {
270
NormalizedRenderTarget::Window(window_ref) => {
271
changed_window_ids.contains(&window_ref.entity())
272
}
273
NormalizedRenderTarget::Image(image_target) => {
274
changed_image_handles.contains(&image_target.handle.id())
275
}
276
NormalizedRenderTarget::TextureView(_) => true,
277
NormalizedRenderTarget::None { .. } => false,
278
}
279
}
280
}
281
282
#[derive(Debug, thiserror::Error)]
283
pub enum MissingRenderTargetInfoError {
284
#[error("RenderTarget::Window missing ({window:?}): Make sure the provided entity has a Window component.")]
285
Window { window: Entity },
286
#[error("RenderTarget::Image missing ({image:?}): Make sure the Image's usages include RenderAssetUsages::MAIN_WORLD.")]
287
Image { image: AssetId<Image> },
288
#[error("RenderTarget::TextureView missing ({texture_view:?}): make sure the texture view handle was not removed.")]
289
TextureView {
290
texture_view: ManualTextureViewHandle,
291
},
292
}
293
294
/// System in charge of updating a [`Camera`] when its window or projection changes.
295
///
296
/// The system detects window creation, resize, and scale factor change events to update the camera
297
/// [`Projection`] if needed.
298
///
299
/// ## World Resources
300
///
301
/// [`Res<Assets<Image>>`](Assets<Image>) -- For cameras that render to an image, this resource is used to
302
/// inspect information about the render target. This system will not access any other image assets.
303
///
304
/// [`OrthographicProjection`]: bevy_camera::OrthographicProjection
305
/// [`PerspectiveProjection`]: bevy_camera::PerspectiveProjection
306
pub fn camera_system(
307
mut window_resized_reader: MessageReader<WindowResized>,
308
mut window_created_reader: MessageReader<WindowCreated>,
309
mut window_scale_factor_changed_reader: MessageReader<WindowScaleFactorChanged>,
310
mut image_asset_event_reader: MessageReader<AssetEvent<Image>>,
311
primary_window: Query<Entity, With<PrimaryWindow>>,
312
windows: Query<(Entity, &Window)>,
313
images: Res<Assets<Image>>,
314
manual_texture_views: Res<ManualTextureViews>,
315
mut cameras: Query<(&mut Camera, &mut Projection)>,
316
) -> Result<(), BevyError> {
317
let primary_window = primary_window.iter().next();
318
319
let mut changed_window_ids = <HashSet<_>>::default();
320
changed_window_ids.extend(window_created_reader.read().map(|event| event.window));
321
changed_window_ids.extend(window_resized_reader.read().map(|event| event.window));
322
let scale_factor_changed_window_ids: HashSet<_> = window_scale_factor_changed_reader
323
.read()
324
.map(|event| event.window)
325
.collect();
326
changed_window_ids.extend(scale_factor_changed_window_ids.clone());
327
328
let changed_image_handles: HashSet<&AssetId<Image>> = image_asset_event_reader
329
.read()
330
.filter_map(|event| match event {
331
AssetEvent::Modified { id } | AssetEvent::Added { id } => Some(id),
332
_ => None,
333
})
334
.collect();
335
336
for (mut camera, mut camera_projection) in &mut cameras {
337
let mut viewport_size = camera
338
.viewport
339
.as_ref()
340
.map(|viewport| viewport.physical_size);
341
342
if let Some(normalized_target) = &camera.target.normalize(primary_window)
343
&& (normalized_target.is_changed(&changed_window_ids, &changed_image_handles)
344
|| camera.is_added()
345
|| camera_projection.is_changed()
346
|| camera.computed.old_viewport_size != viewport_size
347
|| camera.computed.old_sub_camera_view != camera.sub_camera_view)
348
{
349
let new_computed_target_info = normalized_target.get_render_target_info(
350
windows,
351
&images,
352
&manual_texture_views,
353
)?;
354
// Check for the scale factor changing, and resize the viewport if needed.
355
// This can happen when the window is moved between monitors with different DPIs.
356
// Without this, the viewport will take a smaller portion of the window moved to
357
// a higher DPI monitor.
358
if normalized_target.is_changed(&scale_factor_changed_window_ids, &HashSet::default())
359
&& let Some(old_scale_factor) = camera
360
.computed
361
.target_info
362
.as_ref()
363
.map(|info| info.scale_factor)
364
{
365
let resize_factor = new_computed_target_info.scale_factor / old_scale_factor;
366
if let Some(ref mut viewport) = camera.viewport {
367
let resize = |vec: UVec2| (vec.as_vec2() * resize_factor).as_uvec2();
368
viewport.physical_position = resize(viewport.physical_position);
369
viewport.physical_size = resize(viewport.physical_size);
370
viewport_size = Some(viewport.physical_size);
371
}
372
}
373
// This check is needed because when changing WindowMode to Fullscreen, the viewport may have invalid
374
// arguments due to a sudden change on the window size to a lower value.
375
// If the size of the window is lower, the viewport will match that lower value.
376
if let Some(viewport) = &mut camera.viewport {
377
viewport.clamp_to_size(new_computed_target_info.physical_size);
378
}
379
camera.computed.target_info = Some(new_computed_target_info);
380
if let Some(size) = camera.logical_viewport_size()
381
&& size.x != 0.0
382
&& size.y != 0.0
383
{
384
camera_projection.update(size.x, size.y);
385
camera.computed.clip_from_view = match &camera.sub_camera_view {
386
Some(sub_view) => camera_projection.get_clip_from_view_for_sub(sub_view),
387
None => camera_projection.get_clip_from_view(),
388
}
389
}
390
}
391
392
if camera.computed.old_viewport_size != viewport_size {
393
camera.computed.old_viewport_size = viewport_size;
394
}
395
396
if camera.computed.old_sub_camera_view != camera.sub_camera_view {
397
camera.computed.old_sub_camera_view = camera.sub_camera_view;
398
}
399
}
400
Ok(())
401
}
402
403
#[derive(Component, Debug)]
404
pub struct ExtractedCamera {
405
pub target: Option<NormalizedRenderTarget>,
406
pub physical_viewport_size: Option<UVec2>,
407
pub physical_target_size: Option<UVec2>,
408
pub viewport: Option<Viewport>,
409
pub render_graph: InternedRenderSubGraph,
410
pub order: isize,
411
pub output_mode: CameraOutputMode,
412
pub msaa_writeback: bool,
413
pub clear_color: ClearColorConfig,
414
pub sorted_camera_index_for_target: usize,
415
pub exposure: f32,
416
pub hdr: bool,
417
}
418
419
pub fn extract_cameras(
420
mut commands: Commands,
421
query: Extract<
422
Query<(
423
Entity,
424
RenderEntity,
425
&Camera,
426
&CameraRenderGraph,
427
&GlobalTransform,
428
&VisibleEntities,
429
&Frustum,
430
Has<Hdr>,
431
Option<&ColorGrading>,
432
Option<&Exposure>,
433
Option<&TemporalJitter>,
434
Option<&MipBias>,
435
Option<&RenderLayers>,
436
Option<&Projection>,
437
Has<NoIndirectDrawing>,
438
)>,
439
>,
440
primary_window: Extract<Query<Entity, With<PrimaryWindow>>>,
441
gpu_preprocessing_support: Res<GpuPreprocessingSupport>,
442
mapper: Extract<Query<&RenderEntity>>,
443
) {
444
let primary_window = primary_window.iter().next();
445
type ExtractedCameraComponents = (
446
ExtractedCamera,
447
ExtractedView,
448
RenderVisibleEntities,
449
TemporalJitter,
450
MipBias,
451
RenderLayers,
452
Projection,
453
NoIndirectDrawing,
454
ViewUniformOffset,
455
);
456
for (
457
main_entity,
458
render_entity,
459
camera,
460
camera_render_graph,
461
transform,
462
visible_entities,
463
frustum,
464
hdr,
465
color_grading,
466
exposure,
467
temporal_jitter,
468
mip_bias,
469
render_layers,
470
projection,
471
no_indirect_drawing,
472
) in query.iter()
473
{
474
if !camera.is_active {
475
commands
476
.entity(render_entity)
477
.remove::<ExtractedCameraComponents>();
478
continue;
479
}
480
481
let color_grading = color_grading.unwrap_or(&ColorGrading::default()).clone();
482
483
if let (
484
Some(URect {
485
min: viewport_origin,
486
..
487
}),
488
Some(viewport_size),
489
Some(target_size),
490
) = (
491
camera.physical_viewport_rect(),
492
camera.physical_viewport_size(),
493
camera.physical_target_size(),
494
) {
495
if target_size.x == 0 || target_size.y == 0 {
496
commands
497
.entity(render_entity)
498
.remove::<ExtractedCameraComponents>();
499
continue;
500
}
501
502
let render_visible_entities = RenderVisibleEntities {
503
entities: visible_entities
504
.entities
505
.iter()
506
.map(|(type_id, entities)| {
507
let entities = entities
508
.iter()
509
.map(|entity| {
510
let render_entity = mapper
511
.get(*entity)
512
.cloned()
513
.map(|entity| entity.id())
514
.unwrap_or(Entity::PLACEHOLDER);
515
(render_entity, (*entity).into())
516
})
517
.collect();
518
(*type_id, entities)
519
})
520
.collect(),
521
};
522
523
let mut commands = commands.entity(render_entity);
524
commands.insert((
525
ExtractedCamera {
526
target: camera.target.normalize(primary_window),
527
viewport: camera.viewport.clone(),
528
physical_viewport_size: Some(viewport_size),
529
physical_target_size: Some(target_size),
530
render_graph: camera_render_graph.0,
531
order: camera.order,
532
output_mode: camera.output_mode,
533
msaa_writeback: camera.msaa_writeback,
534
clear_color: camera.clear_color,
535
// this will be set in sort_cameras
536
sorted_camera_index_for_target: 0,
537
exposure: exposure
538
.map(Exposure::exposure)
539
.unwrap_or_else(|| Exposure::default().exposure()),
540
hdr,
541
},
542
ExtractedView {
543
retained_view_entity: RetainedViewEntity::new(main_entity.into(), None, 0),
544
clip_from_view: camera.clip_from_view(),
545
world_from_view: *transform,
546
clip_from_world: None,
547
hdr,
548
viewport: UVec4::new(
549
viewport_origin.x,
550
viewport_origin.y,
551
viewport_size.x,
552
viewport_size.y,
553
),
554
color_grading,
555
},
556
render_visible_entities,
557
*frustum,
558
));
559
560
if let Some(temporal_jitter) = temporal_jitter {
561
commands.insert(temporal_jitter.clone());
562
} else {
563
commands.remove::<TemporalJitter>();
564
}
565
566
if let Some(mip_bias) = mip_bias {
567
commands.insert(mip_bias.clone());
568
} else {
569
commands.remove::<MipBias>();
570
}
571
572
if let Some(render_layers) = render_layers {
573
commands.insert(render_layers.clone());
574
} else {
575
commands.remove::<RenderLayers>();
576
}
577
578
if let Some(projection) = projection {
579
commands.insert(projection.clone());
580
} else {
581
commands.remove::<Projection>();
582
}
583
584
if no_indirect_drawing
585
|| !matches!(
586
gpu_preprocessing_support.max_supported_mode,
587
GpuPreprocessingMode::Culling
588
)
589
{
590
commands.insert(NoIndirectDrawing);
591
} else {
592
commands.remove::<NoIndirectDrawing>();
593
}
594
};
595
}
596
}
597
598
/// Cameras sorted by their order field. This is updated in the [`sort_cameras`] system.
599
#[derive(Resource, Default)]
600
pub struct SortedCameras(pub Vec<SortedCamera>);
601
602
pub struct SortedCamera {
603
pub entity: Entity,
604
pub order: isize,
605
pub target: Option<NormalizedRenderTarget>,
606
pub hdr: bool,
607
}
608
609
pub fn sort_cameras(
610
mut sorted_cameras: ResMut<SortedCameras>,
611
mut cameras: Query<(Entity, &mut ExtractedCamera)>,
612
) {
613
sorted_cameras.0.clear();
614
for (entity, camera) in cameras.iter() {
615
sorted_cameras.0.push(SortedCamera {
616
entity,
617
order: camera.order,
618
target: camera.target.clone(),
619
hdr: camera.hdr,
620
});
621
}
622
// sort by order and ensure within an order, RenderTargets of the same type are packed together
623
sorted_cameras
624
.0
625
.sort_by(|c1, c2| (c1.order, &c1.target).cmp(&(c2.order, &c2.target)));
626
let mut previous_order_target = None;
627
let mut ambiguities = <HashSet<_>>::default();
628
let mut target_counts = <HashMap<_, _>>::default();
629
for sorted_camera in &mut sorted_cameras.0 {
630
let new_order_target = (sorted_camera.order, sorted_camera.target.clone());
631
if let Some(previous_order_target) = previous_order_target
632
&& previous_order_target == new_order_target
633
{
634
ambiguities.insert(new_order_target.clone());
635
}
636
if let Some(target) = &sorted_camera.target {
637
let count = target_counts
638
.entry((target.clone(), sorted_camera.hdr))
639
.or_insert(0usize);
640
let (_, mut camera) = cameras.get_mut(sorted_camera.entity).unwrap();
641
camera.sorted_camera_index_for_target = *count;
642
*count += 1;
643
}
644
previous_order_target = Some(new_order_target);
645
}
646
647
if !ambiguities.is_empty() {
648
warn!(
649
"Camera order ambiguities detected for active cameras with the following priorities: {:?}. \
650
To fix this, ensure there is exactly one Camera entity spawned with a given order for a given RenderTarget. \
651
Ambiguities should be resolved because either (1) multiple active cameras were spawned accidentally, which will \
652
result in rendering multiple instances of the scene or (2) for cases where multiple active cameras is intentional, \
653
ambiguities could result in unpredictable render results.",
654
ambiguities
655
);
656
}
657
}
658
659
/// A subpixel offset to jitter a perspective camera's frustum by.
660
///
661
/// Useful for temporal rendering techniques.
662
#[derive(Component, Clone, Default, Reflect)]
663
#[reflect(Default, Component, Clone)]
664
pub struct TemporalJitter {
665
/// Offset is in range [-0.5, 0.5].
666
pub offset: Vec2,
667
}
668
669
impl TemporalJitter {
670
pub fn jitter_projection(&self, clip_from_view: &mut Mat4, view_size: Vec2) {
671
// https://github.com/GPUOpen-LibrariesAndSDKs/FidelityFX-SDK/blob/d7531ae47d8b36a5d4025663e731a47a38be882f/docs/techniques/media/super-resolution-temporal/jitter-space.svg
672
let mut jitter = (self.offset * vec2(2.0, -2.0)) / view_size;
673
674
// orthographic
675
if clip_from_view.w_axis.w == 1.0 {
676
jitter *= vec2(clip_from_view.x_axis.x, clip_from_view.y_axis.y) * 0.5;
677
}
678
679
clip_from_view.z_axis.x += jitter.x;
680
clip_from_view.z_axis.y += jitter.y;
681
}
682
}
683
684
/// Camera component specifying a mip bias to apply when sampling from material textures.
685
///
686
/// Often used in conjunction with antialiasing post-process effects to reduce textures blurriness.
687
#[derive(Component, Reflect, Clone)]
688
#[reflect(Default, Component)]
689
pub struct MipBias(pub f32);
690
691
impl Default for MipBias {
692
fn default() -> Self {
693
Self(-1.0)
694
}
695
}
696
697