cc: Pass Occlusion instead of OcclusionTracker to LayerImpls
[chromium-blink-merge.git] / cc / layers / delegated_renderer_layer_impl.cc
blob7cd4430328ab9ec8ed88de3c784cc47064010c7a
1 // Copyright 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "cc/layers/delegated_renderer_layer_impl.h"
7 #include <algorithm>
8 #include <utility>
10 #include "base/bind.h"
11 #include "base/containers/hash_tables.h"
12 #include "cc/base/math_util.h"
13 #include "cc/layers/append_quads_data.h"
14 #include "cc/layers/render_pass_sink.h"
15 #include "cc/output/delegated_frame_data.h"
16 #include "cc/quads/render_pass_draw_quad.h"
17 #include "cc/quads/solid_color_draw_quad.h"
18 #include "cc/trees/layer_tree_impl.h"
19 #include "cc/trees/occlusion.h"
21 namespace cc {
23 DelegatedRendererLayerImpl::DelegatedRendererLayerImpl(LayerTreeImpl* tree_impl,
24 int id)
25 : LayerImpl(tree_impl, id),
26 have_render_passes_to_push_(false),
27 inverse_device_scale_factor_(1.0f),
28 child_id_(0),
29 own_child_id_(false) {
32 DelegatedRendererLayerImpl::~DelegatedRendererLayerImpl() {
33 ClearRenderPasses();
34 ClearChildId();
37 bool DelegatedRendererLayerImpl::HasDelegatedContent() const { return true; }
39 bool DelegatedRendererLayerImpl::HasContributingDelegatedRenderPasses() const {
40 // The root RenderPass for the layer is merged with its target
41 // RenderPass in each frame. So we only have extra RenderPasses
42 // to merge when we have a non-root RenderPass present.
43 return render_passes_in_draw_order_.size() > 1;
46 static ResourceProvider::ResourceId ResourceRemapHelper(
47 bool* invalid_frame,
48 const ResourceProvider::ResourceIdMap& child_to_parent_map,
49 ResourceProvider::ResourceIdArray* resources_in_frame,
50 ResourceProvider::ResourceId id) {
52 ResourceProvider::ResourceIdMap::const_iterator it =
53 child_to_parent_map.find(id);
54 if (it == child_to_parent_map.end()) {
55 *invalid_frame = true;
56 return 0;
59 DCHECK_EQ(it->first, id);
60 ResourceProvider::ResourceId remapped_id = it->second;
61 resources_in_frame->push_back(id);
62 return remapped_id;
65 void DelegatedRendererLayerImpl::PushPropertiesTo(LayerImpl* layer) {
66 LayerImpl::PushPropertiesTo(layer);
68 DelegatedRendererLayerImpl* delegated_layer =
69 static_cast<DelegatedRendererLayerImpl*>(layer);
71 // If we have a new child_id to give to the active layer, it should
72 // have already deleted its old child_id.
73 DCHECK(delegated_layer->child_id_ == 0 ||
74 delegated_layer->child_id_ == child_id_);
75 delegated_layer->inverse_device_scale_factor_ = inverse_device_scale_factor_;
76 delegated_layer->child_id_ = child_id_;
77 delegated_layer->own_child_id_ = true;
78 own_child_id_ = false;
80 if (have_render_passes_to_push_) {
81 // This passes ownership of the render passes to the active tree.
82 delegated_layer->SetRenderPasses(&render_passes_in_draw_order_);
83 DCHECK(render_passes_in_draw_order_.empty());
84 have_render_passes_to_push_ = false;
87 // This is just a copy for testing, since resources are added to the
88 // ResourceProvider in the pending tree.
89 delegated_layer->resources_ = resources_;
92 void DelegatedRendererLayerImpl::CreateChildIdIfNeeded(
93 const ReturnCallback& return_callback) {
94 if (child_id_)
95 return;
97 ResourceProvider* resource_provider = layer_tree_impl()->resource_provider();
98 child_id_ = resource_provider->CreateChild(return_callback);
99 own_child_id_ = true;
102 void DelegatedRendererLayerImpl::SetFrameData(
103 const DelegatedFrameData* frame_data,
104 const gfx::RectF& damage_in_frame) {
105 DCHECK(child_id_) << "CreateChildIdIfNeeded must be called first.";
106 DCHECK(frame_data);
107 DCHECK(!frame_data->render_pass_list.empty());
108 // A frame with an empty root render pass is invalid.
109 DCHECK(!frame_data->render_pass_list.back()->output_rect.IsEmpty());
111 ResourceProvider* resource_provider = layer_tree_impl()->resource_provider();
112 const ResourceProvider::ResourceIdMap& resource_map =
113 resource_provider->GetChildToParentMap(child_id_);
115 resource_provider->ReceiveFromChild(child_id_, frame_data->resource_list);
117 RenderPassList render_pass_list;
118 RenderPass::CopyAll(frame_data->render_pass_list, &render_pass_list);
120 bool invalid_frame = false;
121 ResourceProvider::ResourceIdArray resources_in_frame;
122 DrawQuad::ResourceIteratorCallback remap_resources_to_parent_callback =
123 base::Bind(&ResourceRemapHelper,
124 &invalid_frame,
125 resource_map,
126 &resources_in_frame);
127 for (size_t i = 0; i < render_pass_list.size(); ++i) {
128 RenderPass* pass = render_pass_list[i];
129 for (auto& quad : pass->quad_list)
130 quad.IterateResources(remap_resources_to_parent_callback);
133 if (invalid_frame) {
134 // Declare we are still using the last frame's resources.
135 resource_provider->DeclareUsedResourcesFromChild(child_id_, resources_);
136 return;
139 // Declare we are using the new frame's resources.
140 resources_.swap(resources_in_frame);
141 resource_provider->DeclareUsedResourcesFromChild(child_id_, resources_);
143 inverse_device_scale_factor_ = 1.0f / frame_data->device_scale_factor;
144 // Display size is already set so we can compute what the damage rect
145 // will be in layer space. The damage may exceed the visible portion of
146 // the frame, so intersect the damage to the layer's bounds.
147 RenderPass* new_root_pass = render_pass_list.back();
148 gfx::Size frame_size = new_root_pass->output_rect.size();
149 gfx::RectF damage_in_layer = damage_in_frame;
150 damage_in_layer.Scale(inverse_device_scale_factor_);
151 SetUpdateRect(gfx::IntersectRects(
152 gfx::UnionRects(update_rect(), damage_in_layer), gfx::RectF(bounds())));
154 SetRenderPasses(&render_pass_list);
155 have_render_passes_to_push_ = true;
158 void DelegatedRendererLayerImpl::SetRenderPasses(
159 RenderPassList* render_passes_in_draw_order) {
160 ClearRenderPasses();
162 for (size_t i = 0; i < render_passes_in_draw_order->size(); ++i) {
163 RenderPassList::iterator to_take =
164 render_passes_in_draw_order->begin() + i;
165 render_passes_index_by_id_.insert(
166 std::pair<RenderPassId, int>((*to_take)->id, i));
167 scoped_ptr<RenderPass> taken_render_pass =
168 render_passes_in_draw_order->take(to_take);
169 render_passes_in_draw_order_.push_back(taken_render_pass.Pass());
172 // Give back an empty array instead of nulls.
173 render_passes_in_draw_order->clear();
176 void DelegatedRendererLayerImpl::ClearRenderPasses() {
177 render_passes_index_by_id_.clear();
178 render_passes_in_draw_order_.clear();
181 scoped_ptr<LayerImpl> DelegatedRendererLayerImpl::CreateLayerImpl(
182 LayerTreeImpl* tree_impl) {
183 return DelegatedRendererLayerImpl::Create(tree_impl, id());
186 void DelegatedRendererLayerImpl::ReleaseResources() {
187 ClearRenderPasses();
188 ClearChildId();
191 static inline int IndexToId(int index) { return index + 1; }
192 static inline int IdToIndex(int id) { return id - 1; }
194 RenderPassId DelegatedRendererLayerImpl::FirstContributingRenderPassId() const {
195 return RenderPassId(id(), IndexToId(0));
198 RenderPassId DelegatedRendererLayerImpl::NextContributingRenderPassId(
199 RenderPassId previous) const {
200 return RenderPassId(previous.layer_id, previous.index + 1);
203 bool DelegatedRendererLayerImpl::ConvertDelegatedRenderPassId(
204 RenderPassId delegated_render_pass_id,
205 RenderPassId* output_render_pass_id) const {
206 base::hash_map<RenderPassId, int>::const_iterator found =
207 render_passes_index_by_id_.find(delegated_render_pass_id);
208 if (found == render_passes_index_by_id_.end()) {
209 // Be robust against a RenderPass id that isn't part of the frame.
210 return false;
212 unsigned delegated_render_pass_index = found->second;
213 *output_render_pass_id =
214 RenderPassId(id(), IndexToId(delegated_render_pass_index));
215 return true;
218 void DelegatedRendererLayerImpl::AppendContributingRenderPasses(
219 RenderPassSink* render_pass_sink) {
220 DCHECK(HasContributingDelegatedRenderPasses());
222 const RenderPass* root_delegated_render_pass =
223 render_passes_in_draw_order_.back();
224 gfx::Size frame_size = root_delegated_render_pass->output_rect.size();
225 gfx::Transform delegated_frame_to_root_transform = screen_space_transform();
226 delegated_frame_to_root_transform.Scale(inverse_device_scale_factor_,
227 inverse_device_scale_factor_);
229 for (size_t i = 0; i < render_passes_in_draw_order_.size() - 1; ++i) {
230 RenderPassId output_render_pass_id(-1, -1);
231 bool present =
232 ConvertDelegatedRenderPassId(render_passes_in_draw_order_[i]->id,
233 &output_render_pass_id);
235 // Don't clash with the RenderPass we generate if we own a RenderSurface.
236 DCHECK(present) << render_passes_in_draw_order_[i]->id.layer_id << ", "
237 << render_passes_in_draw_order_[i]->id.index;
238 DCHECK_GT(output_render_pass_id.index, 0);
240 scoped_ptr<RenderPass> copy_pass =
241 render_passes_in_draw_order_[i]->Copy(output_render_pass_id);
242 copy_pass->transform_to_root_target.ConcatTransform(
243 delegated_frame_to_root_transform);
244 render_pass_sink->AppendRenderPass(copy_pass.Pass());
248 bool DelegatedRendererLayerImpl::WillDraw(DrawMode draw_mode,
249 ResourceProvider* resource_provider) {
250 if (draw_mode == DRAW_MODE_RESOURCELESS_SOFTWARE)
251 return false;
252 return LayerImpl::WillDraw(draw_mode, resource_provider);
255 void DelegatedRendererLayerImpl::AppendQuads(
256 RenderPass* render_pass,
257 const Occlusion& occlusion_in_content_space,
258 AppendQuadsData* append_quads_data) {
259 AppendRainbowDebugBorder(render_pass, append_quads_data);
261 // This list will be empty after a lost context until a new frame arrives.
262 if (render_passes_in_draw_order_.empty())
263 return;
265 RenderPassId target_render_pass_id = append_quads_data->render_pass_id;
267 const RenderPass* root_delegated_render_pass =
268 render_passes_in_draw_order_.back();
270 DCHECK(root_delegated_render_pass->output_rect.origin().IsOrigin());
271 gfx::Size frame_size = root_delegated_render_pass->output_rect.size();
273 // If the index of the RenderPassId is 0, then it is a RenderPass generated
274 // for a layer in this compositor, not the delegating renderer. Then we want
275 // to merge our root RenderPass with the target RenderPass. Otherwise, it is
276 // some RenderPass which we added from the delegating renderer.
277 bool should_merge_root_render_pass_with_target = !target_render_pass_id.index;
278 if (should_merge_root_render_pass_with_target) {
279 // Verify that the RenderPass we are appending to is created by our
280 // render_target.
281 DCHECK(target_render_pass_id.layer_id == render_target()->id());
283 AppendRenderPassQuads(render_pass,
284 occlusion_in_content_space,
285 append_quads_data,
286 root_delegated_render_pass,
287 frame_size);
288 } else {
289 // Verify that the RenderPass we are appending to was created by us.
290 DCHECK(target_render_pass_id.layer_id == id());
292 int render_pass_index = IdToIndex(target_render_pass_id.index);
293 const RenderPass* delegated_render_pass =
294 render_passes_in_draw_order_[render_pass_index];
295 AppendRenderPassQuads(render_pass,
296 occlusion_in_content_space,
297 append_quads_data,
298 delegated_render_pass,
299 frame_size);
303 void DelegatedRendererLayerImpl::AppendRainbowDebugBorder(
304 RenderPass* render_pass,
305 AppendQuadsData* append_quads_data) {
306 if (!ShowDebugBorders())
307 return;
309 SharedQuadState* shared_quad_state =
310 render_pass->CreateAndAppendSharedQuadState();
311 PopulateSharedQuadState(shared_quad_state);
313 SkColor color;
314 float border_width;
315 GetDebugBorderProperties(&color, &border_width);
317 SkColor colors[] = {
318 0x80ff0000, // Red.
319 0x80ffa500, // Orange.
320 0x80ffff00, // Yellow.
321 0x80008000, // Green.
322 0x800000ff, // Blue.
323 0x80ee82ee, // Violet.
325 const int kNumColors = arraysize(colors);
327 const int kStripeWidth = 300;
328 const int kStripeHeight = 300;
330 for (size_t i = 0; ; ++i) {
331 // For horizontal lines.
332 int x = kStripeWidth * i;
333 int width = std::min(kStripeWidth, content_bounds().width() - x - 1);
335 // For vertical lines.
336 int y = kStripeHeight * i;
337 int height = std::min(kStripeHeight, content_bounds().height() - y - 1);
339 gfx::Rect top(x, 0, width, border_width);
340 gfx::Rect bottom(x,
341 content_bounds().height() - border_width,
342 width,
343 border_width);
344 gfx::Rect left(0, y, border_width, height);
345 gfx::Rect right(content_bounds().width() - border_width,
347 border_width,
348 height);
350 if (top.IsEmpty() && left.IsEmpty())
351 break;
353 if (!top.IsEmpty()) {
354 SolidColorDrawQuad* top_quad =
355 render_pass->CreateAndAppendDrawQuad<SolidColorDrawQuad>();
356 top_quad->SetNew(
357 shared_quad_state, top, top, colors[i % kNumColors], false);
359 SolidColorDrawQuad* bottom_quad =
360 render_pass->CreateAndAppendDrawQuad<SolidColorDrawQuad>();
361 bottom_quad->SetNew(shared_quad_state,
362 bottom,
363 bottom,
364 colors[kNumColors - 1 - (i % kNumColors)],
365 false);
367 if (!left.IsEmpty()) {
368 SolidColorDrawQuad* left_quad =
369 render_pass->CreateAndAppendDrawQuad<SolidColorDrawQuad>();
370 left_quad->SetNew(shared_quad_state,
371 left,
372 left,
373 colors[kNumColors - 1 - (i % kNumColors)],
374 false);
376 SolidColorDrawQuad* right_quad =
377 render_pass->CreateAndAppendDrawQuad<SolidColorDrawQuad>();
378 right_quad->SetNew(
379 shared_quad_state, right, right, colors[i % kNumColors], false);
384 void DelegatedRendererLayerImpl::AppendRenderPassQuads(
385 RenderPass* render_pass,
386 const Occlusion& occlusion_in_content_space,
387 AppendQuadsData* append_quads_data,
388 const RenderPass* delegated_render_pass,
389 const gfx::Size& frame_size) const {
390 const SharedQuadState* delegated_shared_quad_state = NULL;
391 SharedQuadState* output_shared_quad_state = NULL;
393 for (const auto& delegated_quad : delegated_render_pass->quad_list) {
394 bool is_root_delegated_render_pass =
395 delegated_render_pass == render_passes_in_draw_order_.back();
397 if (delegated_quad.shared_quad_state != delegated_shared_quad_state) {
398 delegated_shared_quad_state = delegated_quad.shared_quad_state;
399 output_shared_quad_state = render_pass->CreateAndAppendSharedQuadState();
400 output_shared_quad_state->CopyFrom(delegated_shared_quad_state);
402 if (is_root_delegated_render_pass) {
403 gfx::Transform delegated_frame_to_target_transform = draw_transform();
404 delegated_frame_to_target_transform.Scale(inverse_device_scale_factor_,
405 inverse_device_scale_factor_);
407 output_shared_quad_state->content_to_target_transform.ConcatTransform(
408 delegated_frame_to_target_transform);
410 if (render_target() == this) {
411 DCHECK(!is_clipped());
412 DCHECK(render_surface());
413 DCHECK_EQ(0, num_unclipped_descendants());
414 output_shared_quad_state->clip_rect =
415 MathUtil::MapEnclosingClippedRect(
416 delegated_frame_to_target_transform,
417 output_shared_quad_state->clip_rect);
418 } else {
419 gfx::Rect clip_rect = drawable_content_rect();
420 if (output_shared_quad_state->is_clipped) {
421 clip_rect.Intersect(MathUtil::MapEnclosingClippedRect(
422 delegated_frame_to_target_transform,
423 output_shared_quad_state->clip_rect));
425 output_shared_quad_state->clip_rect = clip_rect;
426 output_shared_quad_state->is_clipped = true;
429 output_shared_quad_state->opacity *= draw_opacity();
432 DCHECK(output_shared_quad_state);
434 gfx::Transform quad_content_to_delegated_target_space =
435 output_shared_quad_state->content_to_target_transform;
436 if (!is_root_delegated_render_pass) {
437 quad_content_to_delegated_target_space.ConcatTransform(
438 delegated_render_pass->transform_to_root_target);
439 quad_content_to_delegated_target_space.ConcatTransform(draw_transform());
442 Occlusion occlusion_in_quad_space =
443 occlusion_in_content_space.GetOcclusionWithGivenDrawTransform(
444 quad_content_to_delegated_target_space);
446 gfx::Rect quad_visible_rect =
447 occlusion_in_quad_space.GetUnoccludedContentRect(
448 delegated_quad.visible_rect);
450 if (quad_visible_rect.IsEmpty())
451 continue;
453 if (delegated_quad.material != DrawQuad::RENDER_PASS) {
454 DrawQuad* output_quad = render_pass->CopyFromAndAppendDrawQuad(
455 &delegated_quad, output_shared_quad_state);
456 output_quad->visible_rect = quad_visible_rect;
457 } else {
458 RenderPassId delegated_contributing_render_pass_id =
459 RenderPassDrawQuad::MaterialCast(&delegated_quad)->render_pass_id;
460 RenderPassId output_contributing_render_pass_id(-1, -1);
462 bool present =
463 ConvertDelegatedRenderPassId(delegated_contributing_render_pass_id,
464 &output_contributing_render_pass_id);
466 // The frame may have a RenderPassDrawQuad that points to a RenderPass not
467 // part of the frame. Just ignore these quads.
468 if (present) {
469 DCHECK(output_contributing_render_pass_id !=
470 append_quads_data->render_pass_id);
472 RenderPassDrawQuad* output_quad =
473 render_pass->CopyFromAndAppendRenderPassDrawQuad(
474 RenderPassDrawQuad::MaterialCast(&delegated_quad),
475 output_shared_quad_state,
476 output_contributing_render_pass_id);
477 output_quad->visible_rect = quad_visible_rect;
483 const char* DelegatedRendererLayerImpl::LayerTypeAsString() const {
484 return "cc::DelegatedRendererLayerImpl";
487 void DelegatedRendererLayerImpl::ClearChildId() {
488 if (!child_id_)
489 return;
491 if (own_child_id_) {
492 ResourceProvider* provider = layer_tree_impl()->resource_provider();
493 provider->DestroyChild(child_id_);
496 resources_.clear();
497 child_id_ = 0;
500 } // namespace cc