Skip to content

spec_v2: Blit #20117

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 19 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
81 changes: 46 additions & 35 deletions crates/bevy_core_pipeline/src/blit/mod.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
use core::{ops::Deref, result::Result};

use crate::FullscreenShader;
use bevy_app::{App, Plugin};
use bevy_asset::{embedded_asset, load_embedded_asset, AssetServer, Handle};
use bevy_asset::{embedded_asset, load_embedded_asset, AssetServer};
use bevy_ecs::prelude::*;
use bevy_render::{
render_resource::{
Expand All @@ -24,8 +26,7 @@ impl Plugin for BlitPlugin {
};

render_app
.allow_ambiguous_resource::<SpecializedRenderPipelines<BlitPipeline>>()
.init_resource::<SpecializedRenderPipelines<BlitPipeline>>()
.allow_ambiguous_resource::<BlitPipeline>()
.add_systems(RenderStartup, init_blit_pipeline);
}
}
Expand All @@ -34,8 +35,7 @@ impl Plugin for BlitPlugin {
pub struct BlitPipeline {
pub layout: BindGroupLayout,
pub sampler: Sampler,
pub fullscreen_shader: FullscreenShader,
pub fragment_shader: Handle<Shader>,
pub specialized_cache: SpecializedCache<RenderPipeline, BlitSpecializer>,
}

pub fn init_blit_pipeline(
Expand All @@ -57,11 +57,23 @@ pub fn init_blit_pipeline(

let sampler = render_device.create_sampler(&SamplerDescriptor::default());

let base_descriptor = RenderPipelineDescriptor {
label: Some("blit pipeline".into()),
layout: vec![layout.clone()],
vertex: fullscreen_shader.to_vertex_state(),
fragment: Some(FragmentState {
shader: load_embedded_asset!(asset_server.deref(), "blit.wgsl"),
..default()
}),
..default()
};

let specialized_cache = SpecializedCache::new(BlitSpecializer, base_descriptor);

commands.insert_resource(BlitPipeline {
layout,
sampler,
fullscreen_shader: fullscreen_shader.clone(),
fragment_shader: load_embedded_asset!(asset_server.as_ref(), "blit.wgsl"),
specialized_cache,
});
}

Expand All @@ -79,35 +91,34 @@ impl BlitPipeline {
}
}

#[derive(PartialEq, Eq, Hash, Clone, Copy)]
pub struct BlitPipelineKey {
pub texture_format: TextureFormat,
pub blend_state: Option<BlendState>,
pub samples: u32,
}
pub struct BlitSpecializer;

impl Specializer<RenderPipeline> for BlitSpecializer {
type Key = BlitKey;

impl SpecializedRenderPipeline for BlitPipeline {
type Key = BlitPipelineKey;

fn specialize(&self, key: Self::Key) -> RenderPipelineDescriptor {
RenderPipelineDescriptor {
label: Some("blit pipeline".into()),
layout: vec![self.layout.clone()],
vertex: self.fullscreen_shader.to_vertex_state(),
fragment: Some(FragmentState {
shader: self.fragment_shader.clone(),
targets: vec![Some(ColorTargetState {
format: key.texture_format,
blend: key.blend_state,
write_mask: ColorWrites::ALL,
})],
..default()
}),
multisample: MultisampleState {
count: key.samples,
..default()
fn specialize(
&self,
key: Self::Key,
descriptor: &mut <RenderPipeline as Specializable>::Descriptor,
) -> Result<Canonical<Self::Key>, BevyError> {
descriptor.multisample.count = key.samples;

descriptor.fragment_mut()?.set_target(
0,
ColorTargetState {
format: key.texture_format,
blend: key.blend_state,
write_mask: ColorWrites::ALL,
},
..default()
}
);

Ok(key)
}
}

#[derive(PartialEq, Eq, Hash, Clone, Copy, SpecializerKey)]
pub struct BlitKey {
pub texture_format: TextureFormat,
pub blend_state: Option<BlendState>,
pub samples: u32,
}
14 changes: 8 additions & 6 deletions crates/bevy_core_pipeline/src/msaa_writeback.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
use crate::{
blit::{BlitPipeline, BlitPipelineKey},
blit::{BlitKey, BlitPipeline},
core_2d::graph::{Core2d, Node2d},
core_3d::graph::{Core3d, Node3d},
};
Expand Down Expand Up @@ -119,22 +119,23 @@ pub struct MsaaWritebackBlitPipeline(CachedRenderPipelineId);
fn prepare_msaa_writeback_pipelines(
mut commands: Commands,
pipeline_cache: Res<PipelineCache>,
mut pipelines: ResMut<SpecializedRenderPipelines<BlitPipeline>>,
blit_pipeline: Res<BlitPipeline>,
mut blit_pipeline: ResMut<BlitPipeline>,
view_targets: Query<(Entity, &ViewTarget, &ExtractedCamera, &Msaa)>,
) {
) -> Result<(), BevyError> {
for (entity, view_target, camera, msaa) in view_targets.iter() {
// only do writeback if writeback is enabled for the camera and this isn't the first camera in the target,
// as there is nothing to write back for the first camera.
if msaa.samples() > 1 && camera.msaa_writeback && camera.sorted_camera_index_for_target > 0
{
let key = BlitPipelineKey {
let key = BlitKey {
texture_format: view_target.main_texture_format(),
samples: msaa.samples(),
blend_state: None,
};

let pipeline = pipelines.specialize(&pipeline_cache, &blit_pipeline, key);
let pipeline = blit_pipeline
.specialized_cache
.specialize(&pipeline_cache, key)?;
commands
.entity(entity)
.insert(MsaaWritebackBlitPipeline(pipeline));
Expand All @@ -146,4 +147,5 @@ fn prepare_msaa_writeback_pipelines(
.remove::<MsaaWritebackBlitPipeline>();
}
}
Ok(())
}
15 changes: 9 additions & 6 deletions crates/bevy_core_pipeline/src/upscaling/mod.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
use crate::blit::{BlitPipeline, BlitPipelineKey};
use crate::blit::{BlitKey, BlitPipeline};
use bevy_app::prelude::*;
use bevy_ecs::prelude::*;
use bevy_platform::collections::HashSet;
Expand Down Expand Up @@ -39,10 +39,9 @@ pub struct ViewUpscalingPipeline(CachedRenderPipelineId);
fn prepare_view_upscaling_pipelines(
mut commands: Commands,
mut pipeline_cache: ResMut<PipelineCache>,
mut pipelines: ResMut<SpecializedRenderPipelines<BlitPipeline>>,
blit_pipeline: Res<BlitPipeline>,
mut blit_pipeline: ResMut<BlitPipeline>,
view_targets: Query<(Entity, &ViewTarget, Option<&ExtractedCamera>)>,
) {
) -> Result<(), BevyError> {
let mut output_textures = <HashSet<_>>::default();
for (entity, view_target, camera) in view_targets.iter() {
let out_texture_id = view_target.out_texture().id();
Expand Down Expand Up @@ -73,12 +72,14 @@ fn prepare_view_upscaling_pipelines(
None
};

let key = BlitPipelineKey {
let key = BlitKey {
texture_format: view_target.out_texture_format(),
blend_state,
samples: 1,
};
let pipeline = pipelines.specialize(&pipeline_cache, &blit_pipeline, key);
let pipeline = blit_pipeline
.specialized_cache
.specialize(&pipeline_cache, key)?;

// Ensure the pipeline is loaded before continuing the frame to prevent frames without any GPU work submitted
pipeline_cache.block_on_render_pipeline(pipeline);
Expand All @@ -87,4 +88,6 @@ fn prepare_view_upscaling_pipelines(
.entity(entity)
.insert(ViewUpscalingPipeline(pipeline));
}

Ok(())
}
122 changes: 13 additions & 109 deletions crates/bevy_render/macros/src/specializer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,6 @@ const SPECIALIZE_ALL_IDENT: &str = "all";
const KEY_ATTR_IDENT: &str = "key";
const KEY_DEFAULT_IDENT: &str = "default";

const BASE_DESCRIPTOR_ATTR_IDENT: &str = "base_descriptor";

enum SpecializeImplTargets {
All,
Specific(Vec<Path>),
Expand Down Expand Up @@ -87,7 +85,6 @@ struct FieldInfo {
ty: Type,
member: Member,
key: Key,
use_base_descriptor: bool,
}

impl FieldInfo {
Expand Down Expand Up @@ -117,15 +114,6 @@ impl FieldInfo {
parse_quote!(#ty: #specialize_path::Specializer<#target_path>)
}
}

fn get_base_descriptor_predicate(
&self,
specialize_path: &Path,
target_path: &Path,
) -> WherePredicate {
let ty = &self.ty;
parse_quote!(#ty: #specialize_path::GetBaseDescriptor<#target_path>)
}
}

fn get_field_info(
Expand All @@ -151,12 +139,8 @@ fn get_field_info(

let mut use_key_field = true;
let mut key = Key::Index(key_index);
let mut use_base_descriptor = false;
for attr in &field.attrs {
match &attr.meta {
Meta::Path(path) if path.is_ident(&BASE_DESCRIPTOR_ATTR_IDENT) => {
use_base_descriptor = true;
}
Meta::List(MetaList { path, tokens, .. }) if path.is_ident(&KEY_ATTR_IDENT) => {
let owned_tokens = tokens.clone().into();
let Ok(parsed_key) = syn::parse::<Key>(owned_tokens) else {
Expand Down Expand Up @@ -190,7 +174,6 @@ fn get_field_info(
ty: field_ty,
member: field_member,
key,
use_base_descriptor,
});
}

Expand Down Expand Up @@ -261,41 +244,18 @@ pub fn impl_specializer(input: TokenStream) -> TokenStream {
})
.collect();

let base_descriptor_fields = field_info
.iter()
.filter(|field| field.use_base_descriptor)
.collect::<Vec<_>>();

if base_descriptor_fields.len() > 1 {
return syn::Error::new(
Span::call_site(),
"Too many #[base_descriptor] attributes found. It must be present on exactly one field",
)
.into_compile_error()
.into();
}

let base_descriptor_field = base_descriptor_fields.first().copied();

match targets {
SpecializeImplTargets::All => {
let specialize_impl = impl_specialize_all(
&specialize_path,
&ecs_path,
&ast,
&field_info,
&key_patterns,
&key_tuple_idents,
);
let get_base_descriptor_impl = base_descriptor_field
.map(|field_info| impl_get_base_descriptor_all(&specialize_path, &ast, field_info))
.unwrap_or_default();
[specialize_impl, get_base_descriptor_impl]
.into_iter()
.collect()
}
SpecializeImplTargets::Specific(targets) => {
let specialize_impls = targets.iter().map(|target| {
SpecializeImplTargets::All => impl_specialize_all(
&specialize_path,
&ecs_path,
&ast,
&field_info,
&key_patterns,
&key_tuple_idents,
),
SpecializeImplTargets::Specific(targets) => targets
.iter()
.map(|target| {
impl_specialize_specific(
&specialize_path,
&ecs_path,
Expand All @@ -305,14 +265,8 @@ pub fn impl_specializer(input: TokenStream) -> TokenStream {
&key_patterns,
&key_tuple_idents,
)
});
let get_base_descriptor_impls = targets.iter().filter_map(|target| {
base_descriptor_field.map(|field_info| {
impl_get_base_descriptor_specific(&specialize_path, &ast, field_info, target)
})
});
specialize_impls.chain(get_base_descriptor_impls).collect()
}
})
.collect(),
}
}

Expand Down Expand Up @@ -406,56 +360,6 @@ fn impl_specialize_specific(
})
}

fn impl_get_base_descriptor_specific(
specialize_path: &Path,
ast: &DeriveInput,
base_descriptor_field_info: &FieldInfo,
target_path: &Path,
) -> TokenStream {
let struct_name = &ast.ident;
let (impl_generics, type_generics, where_clause) = &ast.generics.split_for_impl();
let field_ty = &base_descriptor_field_info.ty;
let field_member = &base_descriptor_field_info.member;
TokenStream::from(quote!(
impl #impl_generics #specialize_path::GetBaseDescriptor<#target_path> for #struct_name #type_generics #where_clause {
fn get_base_descriptor(&self) -> <#target_path as #specialize_path::Specializable>::Descriptor {
<#field_ty as #specialize_path::GetBaseDescriptor<#target_path>>::get_base_descriptor(&self.#field_member)
}
}
))
}

fn impl_get_base_descriptor_all(
specialize_path: &Path,
ast: &DeriveInput,
base_descriptor_field_info: &FieldInfo,
) -> TokenStream {
let target_path = Path::from(format_ident!("T"));
let struct_name = &ast.ident;
let mut generics = ast.generics.clone();
generics.params.insert(
0,
parse_quote!(#target_path: #specialize_path::Specializable),
);

let where_clause = generics.make_where_clause();
where_clause.predicates.push(
base_descriptor_field_info.get_base_descriptor_predicate(specialize_path, &target_path),
);

let (_, type_generics, _) = ast.generics.split_for_impl();
let (impl_generics, _, where_clause) = &generics.split_for_impl();
let field_ty = &base_descriptor_field_info.ty;
let field_member = &base_descriptor_field_info.member;
TokenStream::from(quote! {
impl #impl_generics #specialize_path::GetBaseDescriptor<#target_path> for #struct_name #type_generics #where_clause {
fn get_base_descriptor(&self) -> <#target_path as #specialize_path::Specializable>::Descriptor {
<#field_ty as #specialize_path::GetBaseDescriptor<#target_path>>::get_base_descriptor(&self.#field_member)
}
}
})
}

pub fn impl_specializer_key(input: TokenStream) -> TokenStream {
let bevy_render_path: Path = crate::bevy_render_path();
let specialize_path = {
Expand Down
Loading
Loading