1#[cfg(feature = "backend_drm")]
2use crate::wayland::drm_syncobj::{DrmSyncPoint, DrmSyncobjCachedState};
3use crate::{
4 backend::renderer::{
5 buffer_dimensions, buffer_has_alpha, element::RenderElement, ContextId, ErasedContextId, ImportAll,
6 Renderer, Texture,
7 },
8 utils::{Buffer as BufferCoord, Coordinate, Logical, Physical, Point, Rectangle, Scale, Size, Transform},
9 wayland::{
10 compositor::{
11 self, add_destruction_hook, is_sync_subsurface, with_surface_tree_downward,
12 with_surface_tree_upward, BufferAssignment, Damage, RectangleKind, SubsurfaceCachedState,
13 SurfaceAttributes, SurfaceData, TraversalAction,
14 },
15 viewporter,
16 },
17};
18
19use std::{
20 any::Any,
21 collections::{hash_map::Entry, HashMap},
22 sync::{Arc, Mutex},
23};
24
25use super::{CommitCounter, DamageBag, DamageSet, DamageSnapshot, SurfaceView};
26use tracing::{error, instrument, warn};
27use wayland_server::protocol::{wl_buffer::WlBuffer, wl_surface::WlSurface};
28
29pub type RendererSurfaceStateUserData = Mutex<RendererSurfaceState>;
37
38#[derive(Default, Debug)]
40pub struct RendererSurfaceState {
41 pub(crate) buffer_dimensions: Option<Size<i32, BufferCoord>>,
42 pub(crate) buffer_scale: i32,
43 pub(crate) buffer_transform: Transform,
44 pub(crate) buffer_has_alpha: Option<bool>,
45 pub(crate) buffer: Option<Buffer>,
46 pub(crate) damage: DamageBag<i32, BufferCoord>,
47 pub(crate) renderer_seen: HashMap<ErasedContextId, CommitCounter>,
48 pub(crate) textures: HashMap<ErasedContextId, Box<dyn Any>>,
49 pub(crate) surface_view: Option<SurfaceView>,
50 pub(crate) opaque_regions: Vec<Rectangle<i32, Logical>>,
51}
52
53unsafe impl Send for RendererSurfaceState {}
57unsafe impl Sync for RendererSurfaceState {}
58
59#[derive(Debug)]
60struct InnerBuffer {
61 buffer: WlBuffer,
62 #[cfg(feature = "backend_drm")]
63 acquire_point: Option<DrmSyncPoint>,
64 #[cfg(feature = "backend_drm")]
65 release_point: Option<DrmSyncPoint>,
66}
67
68impl Drop for InnerBuffer {
69 #[inline]
70 fn drop(&mut self) {
71 self.buffer.release();
72 #[cfg(feature = "backend_drm")]
73 if let Some(release_point) = &self.release_point {
74 if let Err(err) = release_point.signal() {
75 tracing::error!("Failed to signal syncobj release point: {}", err);
76 }
77 }
78 }
79}
80
81#[derive(Debug, Clone)]
83pub struct Buffer {
84 inner: Arc<InnerBuffer>,
85}
86
87impl Buffer {
88 pub fn with_implicit(buffer: WlBuffer) -> Self {
90 Self {
91 inner: Arc::new(InnerBuffer {
92 buffer,
93 #[cfg(feature = "backend_drm")]
94 acquire_point: None,
95 #[cfg(feature = "backend_drm")]
96 release_point: None,
97 }),
98 }
99 }
100
101 #[cfg(feature = "backend_drm")]
103 pub fn with_explicit(buffer: WlBuffer, acquire_point: DrmSyncPoint, release_point: DrmSyncPoint) -> Self {
104 Self {
105 inner: Arc::new(InnerBuffer {
106 buffer,
107 acquire_point: Some(acquire_point),
108 release_point: Some(release_point),
109 }),
110 }
111 }
112
113 #[cfg(feature = "backend_drm")]
114 #[allow(dead_code)]
115 pub(crate) fn acquire_point(&self) -> Option<&DrmSyncPoint> {
116 self.inner.acquire_point.as_ref()
117 }
118}
119
120impl std::ops::Deref for Buffer {
121 type Target = WlBuffer;
122
123 #[inline]
124 fn deref(&self) -> &Self::Target {
125 &self.inner.buffer
126 }
127}
128
129impl PartialEq<WlBuffer> for Buffer {
130 #[inline]
131 fn eq(&self, other: &WlBuffer) -> bool {
132 self.inner.buffer == *other
133 }
134}
135
136impl PartialEq<WlBuffer> for &Buffer {
137 #[inline]
138 fn eq(&self, other: &WlBuffer) -> bool {
139 self.inner.buffer == *other
140 }
141}
142
143impl RendererSurfaceState {
144 #[profiling::function]
145 pub(crate) fn update_buffer(&mut self, states: &SurfaceData) {
146 #[cfg(feature = "backend_drm")]
147 let mut guard = states.cached_state.get::<DrmSyncobjCachedState>();
148 #[cfg(feature = "backend_drm")]
149 let syncobj_state = guard.current();
150
151 let mut guard = states.cached_state.get::<SurfaceAttributes>();
152 let attrs = guard.current();
153
154 let new_buffer = matches!(attrs.buffer, Some(BufferAssignment::NewBuffer(_)));
155 match attrs.buffer.take() {
156 Some(BufferAssignment::NewBuffer(buffer)) => {
157 self.buffer_dimensions = buffer_dimensions(&buffer);
158 if self.buffer_dimensions.is_none() {
159 self.reset();
162 return;
163 }
164 self.buffer_has_alpha = buffer_has_alpha(&buffer);
165 self.buffer_scale = attrs.buffer_scale;
166 self.buffer_transform = attrs.buffer_transform.into();
167
168 if !self.buffer.as_ref().is_some_and(|b| b == buffer) {
169 self.buffer = Some(Buffer {
170 inner: Arc::new(InnerBuffer {
171 buffer,
172 #[cfg(feature = "backend_drm")]
173 acquire_point: syncobj_state.acquire_point.take(),
174 #[cfg(feature = "backend_drm")]
175 release_point: syncobj_state.release_point.take(),
176 }),
177 });
178 }
179
180 self.textures.clear();
181 }
182 Some(BufferAssignment::Removed) => {
183 self.reset();
184 return;
185 }
186 None => {}
187 };
188
189 let Some(buffer_dimensions) = self.buffer_dimensions else {
190 return;
192 };
193
194 let surface_size = buffer_dimensions.to_logical(self.buffer_scale, self.buffer_transform);
195 let surface_view = SurfaceView::from_states(states, surface_size, attrs.client_scale);
196 let surface_view_changed = self.surface_view.replace(surface_view) != Some(surface_view);
197
198 if new_buffer {
200 let buffer_damage = attrs.damage.drain(..).flat_map(|dmg| {
201 match dmg {
202 Damage::Buffer(rect) => rect,
203 Damage::Surface(rect) => surface_view.rect_to_local(rect).to_i32_up().to_buffer(
204 self.buffer_scale,
205 self.buffer_transform,
206 &surface_size,
207 ),
208 }
209 .intersection(Rectangle::from_size(buffer_dimensions))
210 });
211 self.damage.add(buffer_damage);
212 }
213
214 if new_buffer || surface_view_changed {
216 self.opaque_regions.clear();
217 if !self.buffer_has_alpha.unwrap_or(true) {
218 self.opaque_regions.push(Rectangle::from_size(surface_view.dst))
219 } else if let Some(region_attributes) = &attrs.opaque_region {
220 let opaque_regions = region_attributes
221 .rects
222 .iter()
223 .map(|(kind, rect)| {
224 let dest_size = surface_view.dst;
225
226 let rect_constrained_loc = rect.loc.constrain(Rectangle::from_size(dest_size));
227 let rect_clamped_size = rect
228 .size
229 .clamp((0, 0), (dest_size.to_point() - rect_constrained_loc).to_size());
230
231 let rect = Rectangle::new(rect_constrained_loc, rect_clamped_size);
232
233 (kind, rect)
234 })
235 .fold(
236 std::mem::take(&mut self.opaque_regions),
237 |mut new_regions, (kind, rect)| {
238 match kind {
239 RectangleKind::Add => {
240 let added_regions = rect.subtract_rects(
241 new_regions
242 .iter()
243 .filter(|region| region.overlaps_or_touches(rect))
244 .copied(),
245 );
246 new_regions.extend(added_regions);
247 }
248 RectangleKind::Subtract => {
249 new_regions =
250 Rectangle::subtract_rects_many_in_place(new_regions, [rect]);
251 }
252 }
253
254 new_regions
255 },
256 );
257
258 self.opaque_regions = opaque_regions;
259 }
260 }
261 }
262
263 pub fn current_commit(&self) -> CommitCounter {
268 self.damage.current_commit()
269 }
270
271 pub fn damage_since(&self, commit: Option<CommitCounter>) -> DamageSet<i32, BufferCoord> {
276 self.damage.damage_since(commit).unwrap_or_else(|| {
277 self.buffer_dimensions
278 .as_ref()
279 .map(|size| DamageSet::from_slice(&[Rectangle::from_size(*size)]))
280 .unwrap_or_default()
281 })
282 }
283
284 pub fn damage(&self) -> DamageSnapshot<i32, BufferCoord> {
286 self.damage.snapshot()
287 }
288
289 pub fn buffer_size(&self) -> Option<Size<i32, Logical>> {
291 self.buffer_dimensions
292 .as_ref()
293 .map(|dim| dim.to_logical(self.buffer_scale, self.buffer_transform))
294 }
295
296 pub fn buffer_scale(&self) -> i32 {
298 self.buffer_scale
299 }
300
301 pub fn buffer_transform(&self) -> Transform {
303 self.buffer_transform
304 }
305
306 pub fn surface_size(&self) -> Option<Size<i32, Logical>> {
311 self.surface_view.map(|view| view.dst)
312 }
313
314 pub fn buffer(&self) -> Option<&Buffer> {
317 self.buffer.as_ref()
318 }
319
320 pub fn texture<T>(&self, id: ContextId<T>) -> Option<&T>
322 where
323 T: Texture + 'static,
324 {
325 self.textures.get(&id.erased()).and_then(|e| e.downcast_ref())
326 }
327
328 pub fn opaque_regions(&self) -> Option<&[Rectangle<i32, Logical>]> {
330 self.surface_size()?;
332
333 if self.opaque_regions.is_empty() {
337 return None;
338 }
339
340 Some(&self.opaque_regions[..])
341 }
342
343 pub fn view(&self) -> Option<SurfaceView> {
345 self.surface_view
346 }
347
348 fn reset(&mut self) {
349 self.buffer_dimensions = None;
350 self.buffer = None;
351 self.textures.clear();
352 self.damage.reset();
353 self.surface_view = None;
354 self.buffer_has_alpha = None;
355 self.opaque_regions.clear();
356 }
357}
358
359#[profiling::function]
369pub fn on_commit_buffer_handler<D: 'static>(surface: &WlSurface) {
370 if !is_sync_subsurface(surface) {
371 let mut new_surfaces = Vec::new();
372 with_surface_tree_upward(
373 surface,
374 (),
375 |_, _, _| TraversalAction::DoChildren(()),
376 |surf, states, _| {
377 if states
378 .data_map
379 .insert_if_missing_threadsafe(|| Mutex::new(RendererSurfaceState::default()))
380 {
381 new_surfaces.push(surf.clone());
382 }
383 let mut data = states
384 .data_map
385 .get::<RendererSurfaceStateUserData>()
386 .unwrap()
387 .lock()
388 .unwrap();
389 data.update_buffer(states);
390 },
391 |_, _, _| true,
392 );
393 for surf in &new_surfaces {
394 add_destruction_hook(surf, |_: &mut D, surface| {
395 compositor::with_states(surface, |data| {
400 if let Some(mut state) = data
401 .data_map
402 .get::<RendererSurfaceStateUserData>()
403 .map(|s| s.lock().unwrap())
404 {
405 state.reset();
406 }
407 });
408 });
409 }
410 }
411}
412
413impl SurfaceView {
414 fn from_states(states: &SurfaceData, surface_size: Size<i32, Logical>, client_scale: f64) -> SurfaceView {
415 viewporter::ensure_viewport_valid(states, surface_size);
416 let mut viewport_state = states.cached_state.get::<viewporter::ViewportCachedState>();
417 let viewport = viewport_state.current();
418
419 let src = viewport
420 .src
421 .unwrap_or_else(|| Rectangle::from_size(surface_size.to_f64()));
422 let dst = viewport.size().unwrap_or(
423 surface_size
424 .to_f64()
425 .to_client(1.)
426 .to_logical(client_scale)
427 .to_i32_round(),
428 );
429 let offset = if states.role == Some("subsurface") {
430 states
431 .cached_state
432 .get::<SubsurfaceCachedState>()
433 .current()
434 .location
435 } else {
436 Default::default()
437 };
438 SurfaceView { src, dst, offset }
439 }
440
441 pub(crate) fn rect_to_global<N>(&self, rect: Rectangle<N, Logical>) -> Rectangle<f64, Logical>
442 where
443 N: Coordinate,
444 {
445 let scale = self.scale();
446 let mut rect = rect.to_f64();
447 rect.loc -= self.src.loc;
448 rect.upscale(scale)
449 }
450
451 pub(crate) fn rect_to_local<N>(&self, rect: Rectangle<N, Logical>) -> Rectangle<f64, Logical>
452 where
453 N: Coordinate,
454 {
455 let scale = self.scale();
456 let mut rect = rect.to_f64().downscale(scale);
457 rect.loc += self.src.loc;
458 rect
459 }
460
461 fn scale(&self) -> Scale<f64> {
462 Scale::from((
463 self.dst.w as f64 / self.src.size.w,
464 self.dst.h as f64 / self.src.size.h,
465 ))
466 }
467}
468
469pub fn with_renderer_surface_state<F, T>(surface: &WlSurface, cb: F) -> Option<T>
475where
476 F: FnOnce(&mut RendererSurfaceState) -> T,
477{
478 compositor::with_states(surface, |states| {
479 let data = states.data_map.get::<RendererSurfaceStateUserData>()?;
480 Some(cb(&mut data.lock().unwrap()))
481 })
482}
483
484#[instrument(level = "trace", skip_all)]
492#[profiling::function]
493pub fn import_surface<R>(renderer: &mut R, states: &SurfaceData) -> Result<(), R::Error>
494where
495 R: Renderer + ImportAll,
496 R::TextureId: 'static,
497{
498 if let Some(data) = states.data_map.get::<RendererSurfaceStateUserData>() {
499 let context_id = renderer.context_id().erased();
500 let mut data_ref = data.lock().unwrap();
501 let data = &mut *data_ref;
502
503 let last_commit = data.renderer_seen.get(&context_id);
504 let buffer_damage = data.damage_since(last_commit.copied());
505 if let Entry::Vacant(e) = data.textures.entry(context_id.clone()) {
506 if let Some(buffer) = data.buffer.as_ref() {
507 if matches!(
509 crate::backend::renderer::buffer_type(buffer),
510 Some(crate::backend::renderer::BufferType::SinglePixel)
511 ) {
512 return Ok(());
513 }
514
515 match renderer.import_buffer(buffer, Some(states), &buffer_damage) {
516 Some(Ok(m)) => {
517 e.insert(Box::new(m));
518 data.renderer_seen.insert(context_id, data.current_commit());
519 }
520 Some(Err(err)) => {
521 warn!("Error loading buffer: {}", err);
522 return Err(err);
523 }
524 None => {
525 error!("Unknown buffer format for: {:?}", buffer);
526 }
527 }
528 }
529 }
530 }
531
532 Ok(())
533}
534
535#[instrument(level = "trace", skip_all)]
543#[profiling::function]
544pub fn import_surface_tree<R>(renderer: &mut R, surface: &WlSurface) -> Result<(), R::Error>
545where
546 R: Renderer + ImportAll,
547 R::TextureId: 'static,
548{
549 let scale = 1.0;
550 let location: Point<f64, Physical> = (0.0, 0.0).into();
551
552 let mut result = Ok(());
553 with_surface_tree_downward(
554 surface,
555 location,
556 |_surface, states, location| {
557 let mut location = *location;
558 if let Err(err) = import_surface(renderer, states) {
560 result = Err(err);
561 }
562
563 if let Some(data) = states.data_map.get::<RendererSurfaceStateUserData>() {
564 let mut data_ref = data.lock().unwrap();
565 let data = &mut *data_ref;
566 if data.textures.contains_key(&renderer.context_id().erased()) {
568 let surface_view = data.surface_view.unwrap();
570 location += surface_view.offset.to_f64().to_physical(scale);
571 TraversalAction::DoChildren(location)
572 } else {
573 TraversalAction::SkipChildren
575 }
576 } else {
577 TraversalAction::SkipChildren
579 }
580 },
581 |_, _, _| {},
582 |_, _, _| true,
583 );
584 result
585}
586
587#[instrument(level = "trace", skip(frame, scale, elements))]
597#[profiling::function]
598pub fn draw_render_elements<R, S, E>(
599 frame: &mut R::Frame<'_, '_>,
600 scale: S,
601 elements: &[E],
602 damage: &[Rectangle<i32, Physical>],
603) -> Result<Option<Vec<Rectangle<i32, Physical>>>, R::Error>
604where
605 R: Renderer,
606 R::TextureId: 'static,
607 S: Into<Scale<f64>>,
608 E: RenderElement<R>,
609{
610 let scale = scale.into();
611
612 let mut render_elements: Vec<&E> = Vec::with_capacity(elements.len());
613 let mut opaque_regions: Vec<Rectangle<i32, Physical>> = Vec::new();
614 let mut render_damage: Vec<Rectangle<i32, Physical>> = Vec::with_capacity(damage.len());
615
616 for element in elements {
617 let element_geometry = element.geometry(scale);
618
619 let is_hidden = element_geometry
621 .subtract_rects(opaque_regions.iter().copied())
622 .is_empty();
623
624 if is_hidden {
625 continue;
627 }
628
629 render_damage.extend(Rectangle::subtract_rects_many(
630 damage.iter().copied(),
631 opaque_regions.iter().copied(),
632 ));
633
634 opaque_regions.extend(element.opaque_regions(scale).into_iter().map(|mut region| {
635 region.loc += element_geometry.loc;
636 region
637 }));
638 render_elements.insert(0, element);
639 }
640
641 render_damage.dedup();
643 render_damage.retain(|rect| !rect.is_empty());
644 render_damage = render_damage
646 .into_iter()
647 .fold(Vec::new(), |new_damage, mut rect| {
648 let (overlapping, mut new_damage): (Vec<_>, Vec<_>) = new_damage
650 .into_iter()
651 .partition(|other| other.overlaps_or_touches(rect));
652
653 for overlap in overlapping {
654 rect = rect.merge(overlap);
655 }
656 new_damage.push(rect);
657 new_damage
658 });
659
660 if render_damage.is_empty() {
661 return Ok(None);
662 }
663
664 for element in render_elements.iter() {
665 let element_geometry = element.geometry(scale);
666
667 let element_damage = damage
668 .iter()
669 .filter_map(|d| d.intersection(element_geometry))
670 .map(|mut d| {
671 d.loc -= element_geometry.loc;
672 d
673 })
674 .collect::<Vec<_>>();
675
676 if element_damage.is_empty() {
677 continue;
678 }
679
680 element.draw(frame, element.src(), element_geometry, &element_damage, &[])?;
681 }
682
683 Ok(Some(render_damage))
684}