1#[cfg(feature = "backend_drm")]
2use crate::wayland::drm_syncobj::{DrmSyncPoint, DrmSyncobjCachedState};
3use crate::{
4 backend::renderer::{
5 buffer_dimensions, buffer_has_alpha, element::RenderElement, ContextId, ImportAll, Renderer,
6 },
7 utils::{Buffer as BufferCoord, Coordinate, Logical, Physical, Point, Rectangle, Scale, Size, Transform},
8 wayland::{
9 compositor::{
10 self, add_destruction_hook, is_sync_subsurface, with_surface_tree_downward,
11 with_surface_tree_upward, BufferAssignment, Damage, RectangleKind, SubsurfaceCachedState,
12 SurfaceAttributes, SurfaceData, TraversalAction,
13 },
14 viewporter,
15 },
16};
17
18use std::{
19 any::Any,
20 collections::{hash_map::Entry, HashMap},
21 sync::{Arc, Mutex},
22};
23
24use super::{CommitCounter, DamageBag, DamageSet, DamageSnapshot, SurfaceView};
25use tracing::{error, instrument, warn};
26use wayland_server::protocol::{wl_buffer::WlBuffer, wl_surface::WlSurface};
27
28pub type RendererSurfaceStateUserData = Mutex<RendererSurfaceState>;
36
37#[derive(Default, Debug)]
39pub struct RendererSurfaceState {
40 pub(crate) buffer_dimensions: Option<Size<i32, BufferCoord>>,
41 pub(crate) buffer_scale: i32,
42 pub(crate) buffer_transform: Transform,
43 pub(crate) buffer_has_alpha: Option<bool>,
44 pub(crate) buffer: Option<Buffer>,
45 pub(crate) damage: DamageBag<i32, BufferCoord>,
46 pub(crate) renderer_seen: HashMap<ContextId, CommitCounter>,
47 pub(crate) textures: HashMap<ContextId, Box<dyn Any>>,
48 pub(crate) surface_view: Option<SurfaceView>,
49 pub(crate) opaque_regions: Vec<Rectangle<i32, Logical>>,
50}
51
52unsafe impl Send for RendererSurfaceState {}
56unsafe impl Sync for RendererSurfaceState {}
57
58#[derive(Debug)]
59struct InnerBuffer {
60 buffer: WlBuffer,
61 #[cfg(feature = "backend_drm")]
62 acquire_point: Option<DrmSyncPoint>,
63 #[cfg(feature = "backend_drm")]
64 release_point: Option<DrmSyncPoint>,
65}
66
67impl Drop for InnerBuffer {
68 #[inline]
69 fn drop(&mut self) {
70 self.buffer.release();
71 #[cfg(feature = "backend_drm")]
72 if let Some(release_point) = &self.release_point {
73 if let Err(err) = release_point.signal() {
74 tracing::error!("Failed to signal syncobj release point: {}", err);
75 }
76 }
77 }
78}
79
80#[derive(Debug, Clone)]
82pub struct Buffer {
83 inner: Arc<InnerBuffer>,
84}
85
86impl Buffer {
87 pub fn with_implicit(buffer: WlBuffer) -> Self {
89 Self {
90 inner: Arc::new(InnerBuffer {
91 buffer,
92 #[cfg(feature = "backend_drm")]
93 acquire_point: None,
94 #[cfg(feature = "backend_drm")]
95 release_point: None,
96 }),
97 }
98 }
99
100 #[cfg(feature = "backend_drm")]
102 pub fn with_explicit(buffer: WlBuffer, acquire_point: DrmSyncPoint, release_point: DrmSyncPoint) -> Self {
103 Self {
104 inner: Arc::new(InnerBuffer {
105 buffer,
106 acquire_point: Some(acquire_point),
107 release_point: Some(release_point),
108 }),
109 }
110 }
111
112 #[cfg(feature = "backend_drm")]
113 #[allow(dead_code)]
114 pub(crate) fn acquire_point(&self) -> Option<&DrmSyncPoint> {
115 self.inner.acquire_point.as_ref()
116 }
117}
118
119impl std::ops::Deref for Buffer {
120 type Target = WlBuffer;
121
122 #[inline]
123 fn deref(&self) -> &Self::Target {
124 &self.inner.buffer
125 }
126}
127
128impl PartialEq<WlBuffer> for Buffer {
129 #[inline]
130 fn eq(&self, other: &WlBuffer) -> bool {
131 self.inner.buffer == *other
132 }
133}
134
135impl PartialEq<WlBuffer> for &Buffer {
136 #[inline]
137 fn eq(&self, other: &WlBuffer) -> bool {
138 self.inner.buffer == *other
139 }
140}
141
142impl RendererSurfaceState {
143 #[profiling::function]
144 pub(crate) fn update_buffer(&mut self, states: &SurfaceData) {
145 #[cfg(feature = "backend_drm")]
146 let mut guard = states.cached_state.get::<DrmSyncobjCachedState>();
147 #[cfg(feature = "backend_drm")]
148 let syncobj_state = guard.current();
149
150 let mut guard = states.cached_state.get::<SurfaceAttributes>();
151 let attrs = guard.current();
152
153 let new_buffer = matches!(attrs.buffer, Some(BufferAssignment::NewBuffer(_)));
154 match attrs.buffer.take() {
155 Some(BufferAssignment::NewBuffer(buffer)) => {
156 self.buffer_dimensions = buffer_dimensions(&buffer);
157 if self.buffer_dimensions.is_none() {
158 self.reset();
161 return;
162 }
163 self.buffer_has_alpha = buffer_has_alpha(&buffer);
164 self.buffer_scale = attrs.buffer_scale;
165 self.buffer_transform = attrs.buffer_transform.into();
166
167 if !self.buffer.as_ref().is_some_and(|b| b == buffer) {
168 self.buffer = Some(Buffer {
169 inner: Arc::new(InnerBuffer {
170 buffer,
171 #[cfg(feature = "backend_drm")]
172 acquire_point: syncobj_state.acquire_point.take(),
173 #[cfg(feature = "backend_drm")]
174 release_point: syncobj_state.release_point.take(),
175 }),
176 });
177 }
178
179 self.textures.clear();
180 }
181 Some(BufferAssignment::Removed) => {
182 self.reset();
183 return;
184 }
185 None => {}
186 };
187
188 let Some(buffer_dimensions) = self.buffer_dimensions else {
189 return;
191 };
192
193 let surface_size = buffer_dimensions.to_logical(self.buffer_scale, self.buffer_transform);
194 let surface_view = SurfaceView::from_states(states, surface_size, attrs.client_scale);
195 let surface_view_changed = self.surface_view.replace(surface_view) != Some(surface_view);
196
197 if new_buffer {
199 let buffer_damage = attrs.damage.drain(..).flat_map(|dmg| {
200 match dmg {
201 Damage::Buffer(rect) => rect,
202 Damage::Surface(rect) => surface_view.rect_to_local(rect).to_i32_up().to_buffer(
203 self.buffer_scale,
204 self.buffer_transform,
205 &surface_size,
206 ),
207 }
208 .intersection(Rectangle::from_size(buffer_dimensions))
209 });
210 self.damage.add(buffer_damage);
211 }
212
213 if new_buffer || surface_view_changed {
215 self.opaque_regions.clear();
216 if !self.buffer_has_alpha.unwrap_or(true) {
217 self.opaque_regions.push(Rectangle::from_size(surface_view.dst))
218 } else if let Some(region_attributes) = &attrs.opaque_region {
219 let opaque_regions = region_attributes
220 .rects
221 .iter()
222 .map(|(kind, rect)| {
223 let dest_size = surface_view.dst;
224
225 let rect_constrained_loc = rect.loc.constrain(Rectangle::from_size(dest_size));
226 let rect_clamped_size = rect
227 .size
228 .clamp((0, 0), (dest_size.to_point() - rect_constrained_loc).to_size());
229
230 let rect = Rectangle::new(rect_constrained_loc, rect_clamped_size);
231
232 (kind, rect)
233 })
234 .fold(
235 std::mem::take(&mut self.opaque_regions),
236 |mut new_regions, (kind, rect)| {
237 match kind {
238 RectangleKind::Add => {
239 let added_regions = rect.subtract_rects(
240 new_regions
241 .iter()
242 .filter(|region| region.overlaps_or_touches(rect))
243 .copied(),
244 );
245 new_regions.extend(added_regions);
246 }
247 RectangleKind::Subtract => {
248 new_regions =
249 Rectangle::subtract_rects_many_in_place(new_regions, [rect]);
250 }
251 }
252
253 new_regions
254 },
255 );
256
257 self.opaque_regions = opaque_regions;
258 }
259 }
260 }
261
262 pub fn current_commit(&self) -> CommitCounter {
267 self.damage.current_commit()
268 }
269
270 pub fn damage_since(&self, commit: Option<CommitCounter>) -> DamageSet<i32, BufferCoord> {
275 self.damage.damage_since(commit).unwrap_or_else(|| {
276 self.buffer_dimensions
277 .as_ref()
278 .map(|size| DamageSet::from_slice(&[Rectangle::from_size(*size)]))
279 .unwrap_or_default()
280 })
281 }
282
283 pub fn damage(&self) -> DamageSnapshot<i32, BufferCoord> {
285 self.damage.snapshot()
286 }
287
288 pub fn buffer_size(&self) -> Option<Size<i32, Logical>> {
290 self.buffer_dimensions
291 .as_ref()
292 .map(|dim| dim.to_logical(self.buffer_scale, self.buffer_transform))
293 }
294
295 pub fn buffer_scale(&self) -> i32 {
297 self.buffer_scale
298 }
299
300 pub fn buffer_transform(&self) -> Transform {
302 self.buffer_transform
303 }
304
305 pub fn surface_size(&self) -> Option<Size<i32, Logical>> {
310 self.surface_view.map(|view| view.dst)
311 }
312
313 pub fn buffer(&self) -> Option<&Buffer> {
316 self.buffer.as_ref()
317 }
318
319 pub fn texture<R>(&self, id: &ContextId) -> Option<&R::TextureId>
321 where
322 R: Renderer,
323 R::TextureId: 'static,
324 {
325 self.textures.get(id).and_then(|e| e.downcast_ref())
326 }
327
328 pub fn opaque_regions(&self) -> Option<&[Rectangle<i32, Logical>]> {
330 self.surface_size()?;
332
333 if self.opaque_regions.is_empty() {
337 return None;
338 }
339
340 Some(&self.opaque_regions[..])
341 }
342
343 pub fn view(&self) -> Option<SurfaceView> {
345 self.surface_view
346 }
347
348 fn reset(&mut self) {
349 self.buffer_dimensions = None;
350 self.buffer = None;
351 self.textures.clear();
352 self.damage.reset();
353 self.surface_view = None;
354 self.buffer_has_alpha = None;
355 self.opaque_regions.clear();
356 }
357}
358
359#[profiling::function]
369pub fn on_commit_buffer_handler<D: 'static>(surface: &WlSurface) {
370 if !is_sync_subsurface(surface) {
371 let mut new_surfaces = Vec::new();
372 with_surface_tree_upward(
373 surface,
374 (),
375 |_, _, _| TraversalAction::DoChildren(()),
376 |surf, states, _| {
377 if states
378 .data_map
379 .insert_if_missing_threadsafe(|| Mutex::new(RendererSurfaceState::default()))
380 {
381 new_surfaces.push(surf.clone());
382 }
383 let mut data = states
384 .data_map
385 .get::<RendererSurfaceStateUserData>()
386 .unwrap()
387 .lock()
388 .unwrap();
389 data.update_buffer(states);
390 },
391 |_, _, _| true,
392 );
393 for surf in &new_surfaces {
394 add_destruction_hook(surf, |_: &mut D, surface| {
395 compositor::with_states(surface, |data| {
400 if let Some(mut state) = data
401 .data_map
402 .get::<RendererSurfaceStateUserData>()
403 .map(|s| s.lock().unwrap())
404 {
405 state.reset();
406 }
407 });
408 });
409 }
410 }
411}
412
413impl SurfaceView {
414 fn from_states(states: &SurfaceData, surface_size: Size<i32, Logical>, client_scale: f64) -> SurfaceView {
415 viewporter::ensure_viewport_valid(states, surface_size);
416 let mut viewport_state = states.cached_state.get::<viewporter::ViewportCachedState>();
417 let viewport = viewport_state.current();
418
419 let src = viewport
420 .src
421 .unwrap_or_else(|| Rectangle::from_size(surface_size.to_f64()));
422 let dst = viewport.size().unwrap_or(
423 surface_size
424 .to_f64()
425 .to_client(1.)
426 .to_logical(client_scale)
427 .to_i32_round(),
428 );
429 let offset = if states.role == Some("subsurface") {
430 states
431 .cached_state
432 .get::<SubsurfaceCachedState>()
433 .current()
434 .location
435 } else {
436 Default::default()
437 };
438 SurfaceView { src, dst, offset }
439 }
440
441 pub(crate) fn rect_to_global<N>(&self, rect: Rectangle<N, Logical>) -> Rectangle<f64, Logical>
442 where
443 N: Coordinate,
444 {
445 let scale = self.scale();
446 let mut rect = rect.to_f64();
447 rect.loc -= self.src.loc;
448 rect.upscale(scale)
449 }
450
451 pub(crate) fn rect_to_local<N>(&self, rect: Rectangle<N, Logical>) -> Rectangle<f64, Logical>
452 where
453 N: Coordinate,
454 {
455 let scale = self.scale();
456 let mut rect = rect.to_f64().downscale(scale);
457 rect.loc += self.src.loc;
458 rect
459 }
460
461 fn scale(&self) -> Scale<f64> {
462 Scale::from((
463 self.dst.w as f64 / self.src.size.w,
464 self.dst.h as f64 / self.src.size.h,
465 ))
466 }
467}
468
469pub fn with_renderer_surface_state<F, T>(surface: &WlSurface, cb: F) -> Option<T>
475where
476 F: FnOnce(&mut RendererSurfaceState) -> T,
477{
478 compositor::with_states(surface, |states| {
479 let data = states.data_map.get::<RendererSurfaceStateUserData>()?;
480 Some(cb(&mut data.lock().unwrap()))
481 })
482}
483
484#[instrument(level = "trace", skip_all)]
492#[profiling::function]
493pub fn import_surface<R>(renderer: &mut R, states: &SurfaceData) -> Result<(), R::Error>
494where
495 R: Renderer + ImportAll,
496 R::TextureId: 'static,
497{
498 if let Some(data) = states.data_map.get::<RendererSurfaceStateUserData>() {
499 let context_id = renderer.context_id();
500 let mut data_ref = data.lock().unwrap();
501 let data = &mut *data_ref;
502
503 let last_commit = data.renderer_seen.get(&context_id);
504 let buffer_damage = data.damage_since(last_commit.copied());
505 if let Entry::Vacant(e) = data.textures.entry(context_id.clone()) {
506 if let Some(buffer) = data.buffer.as_ref() {
507 if matches!(
509 crate::backend::renderer::buffer_type(buffer),
510 Some(crate::backend::renderer::BufferType::SinglePixel)
511 ) {
512 return Ok(());
513 }
514
515 match renderer.import_buffer(buffer, Some(states), &buffer_damage) {
516 Some(Ok(m)) => {
517 e.insert(Box::new(m));
518 data.renderer_seen.insert(context_id, data.current_commit());
519 }
520 Some(Err(err)) => {
521 warn!("Error loading buffer: {}", err);
522 return Err(err);
523 }
524 None => {
525 error!("Unknown buffer format for: {:?}", buffer);
526 }
527 }
528 }
529 }
530 }
531
532 Ok(())
533}
534
535#[instrument(level = "trace", skip_all)]
543#[profiling::function]
544pub fn import_surface_tree<R>(renderer: &mut R, surface: &WlSurface) -> Result<(), R::Error>
545where
546 R: Renderer + ImportAll,
547 R::TextureId: 'static,
548{
549 let scale = 1.0;
550 let location: Point<f64, Physical> = (0.0, 0.0).into();
551
552 let mut result = Ok(());
553 with_surface_tree_downward(
554 surface,
555 location,
556 |_surface, states, location| {
557 let mut location = *location;
558 if let Err(err) = import_surface(renderer, states) {
560 result = Err(err);
561 }
562
563 if let Some(data) = states.data_map.get::<RendererSurfaceStateUserData>() {
564 let mut data_ref = data.lock().unwrap();
565 let data = &mut *data_ref;
566 if data.textures.contains_key(&renderer.context_id()) {
568 let surface_view = data.surface_view.unwrap();
570 location += surface_view.offset.to_f64().to_physical(scale);
571 TraversalAction::DoChildren(location)
572 } else {
573 TraversalAction::SkipChildren
575 }
576 } else {
577 TraversalAction::SkipChildren
579 }
580 },
581 |_, _, _| {},
582 |_, _, _| true,
583 );
584 result
585}
586
587#[instrument(level = "trace", skip(frame, scale, elements))]
597#[profiling::function]
598pub fn draw_render_elements<R, S, E>(
599 frame: &mut R::Frame<'_, '_>,
600 scale: S,
601 elements: &[E],
602 damage: &[Rectangle<i32, Physical>],
603) -> Result<Option<Vec<Rectangle<i32, Physical>>>, R::Error>
604where
605 R: Renderer,
606 R::TextureId: 'static,
607 S: Into<Scale<f64>>,
608 E: RenderElement<R>,
609{
610 let scale = scale.into();
611
612 let mut render_elements: Vec<&E> = Vec::with_capacity(elements.len());
613 let mut opaque_regions: Vec<Rectangle<i32, Physical>> = Vec::new();
614 let mut render_damage: Vec<Rectangle<i32, Physical>> = Vec::with_capacity(damage.len());
615
616 for element in elements {
617 let element_geometry = element.geometry(scale);
618
619 let is_hidden = element_geometry
621 .subtract_rects(opaque_regions.iter().copied())
622 .is_empty();
623
624 if is_hidden {
625 continue;
627 }
628
629 render_damage.extend(Rectangle::subtract_rects_many(
630 damage.iter().copied(),
631 opaque_regions.iter().copied(),
632 ));
633
634 opaque_regions.extend(element.opaque_regions(scale).into_iter().map(|mut region| {
635 region.loc += element_geometry.loc;
636 region
637 }));
638 render_elements.insert(0, element);
639 }
640
641 render_damage.dedup();
643 render_damage.retain(|rect| !rect.is_empty());
644 render_damage = render_damage
646 .into_iter()
647 .fold(Vec::new(), |new_damage, mut rect| {
648 let (overlapping, mut new_damage): (Vec<_>, Vec<_>) = new_damage
650 .into_iter()
651 .partition(|other| other.overlaps_or_touches(rect));
652
653 for overlap in overlapping {
654 rect = rect.merge(overlap);
655 }
656 new_damage.push(rect);
657 new_damage
658 });
659
660 if render_damage.is_empty() {
661 return Ok(None);
662 }
663
664 for element in render_elements.iter() {
665 let element_geometry = element.geometry(scale);
666
667 let element_damage = damage
668 .iter()
669 .filter_map(|d| d.intersection(element_geometry))
670 .map(|mut d| {
671 d.loc -= element_geometry.loc;
672 d
673 })
674 .collect::<Vec<_>>();
675
676 if element_damage.is_empty() {
677 continue;
678 }
679
680 element.draw(frame, element.src(), element_geometry, &element_damage, &[])?;
681 }
682
683 Ok(Some(render_damage))
684}