blob: 88276d6b07c08eda88b8b4c768b0de784b359061 [file] [log] [blame]
Avi Drissman05dfbc822022-09-13 21:25:341// Copyright 2017 The Chromium Authors
Jonathan Backer7f90dfb662017-12-18 16:52:042// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
Arthur Sonzogni720f1c952024-07-25 16:17:515#ifdef UNSAFE_BUFFERS_BUILD
6// TODO(crbug.com/40285824): Remove this and convert code to safer constructs.
7#pragma allow_unsafe_buffers
8#endif
9
Jonathan Backer7f90dfb662017-12-18 16:52:0410#include "gpu/command_buffer/service/raster_decoder.h"
11
Jonathan Backer7471b2782018-01-25 18:14:1912#include <stdint.h>
13
Jonathan Backer0cd1c4322018-04-17 16:57:1014#include <algorithm>
Arthur Sonzogni6718b702025-01-09 10:49:1015#include <array>
Ho Cheunga4e15c82024-05-13 15:48:5016#include <cstdint>
Jonathan Backer0cd1c4322018-04-17 16:57:1017#include <memory>
Scott Violet39a6a822024-08-09 23:44:0018#include <optional>
Jonathan Backer7471b2782018-01-25 18:14:1919#include <string>
Helmut Januschkab81e97dc2024-04-17 14:57:0520#include <string_view>
Antoine Labourb9f903b2019-02-04 18:06:5721#include <utility>
Jonathan Backer6307cee92018-04-25 19:53:4322#include <vector>
Jonathan Backer7471b2782018-01-25 18:14:1923
Khushal996e9912018-07-13 08:31:0024#include "base/atomic_sequence_num.h"
Peng Huang38a6280642024-04-16 23:56:5525#include "base/command_line.h"
Peng Huangcb2042092018-12-04 13:42:3426#include "base/containers/flat_map.h"
Christopher Camerond5698f682018-12-11 00:59:2027#include "base/debug/crash_logging.h"
Scott Violet39a6a822024-08-09 23:44:0028#include "base/feature_list.h"
Avi Drissman93a273dd2023-01-11 00:38:2729#include "base/functional/bind.h"
Jonathan Backer7f90dfb662017-12-18 16:52:0430#include "base/logging.h"
Keishi Hattori0e45c022021-11-27 09:25:5231#include "base/memory/raw_ptr.h"
Jonathan Backer7471b2782018-01-25 18:14:1932#include "base/memory/ref_counted.h"
33#include "base/memory/weak_ptr.h"
Scott Violet86dc643d2023-04-07 17:57:2434#include "base/metrics/histogram_functions.h"
Daniel Chengee8621422025-06-19 20:27:1735#include "base/notimplemented.h"
Nathan Zabriskie49128932021-07-08 20:00:3136#include "base/numerics/checked_math.h"
Peng Huang7bb7db4b2021-06-16 15:30:0237#include "base/time/time.h"
Jonathan Backer7f90dfb662017-12-18 16:52:0438#include "base/trace_event/trace_event.h"
Jonathan Backer10821a82018-04-04 23:56:0339#include "build/build_config.h"
Khushal33205a72018-11-08 10:12:2940#include "cc/paint/paint_cache.h"
Xianzhu Wange6d732e92022-12-08 17:27:5241#include "cc/paint/paint_op.h"
Jonathan Backer0cd1c4322018-04-17 16:57:1042#include "cc/paint/paint_op_buffer.h"
Lei Zhangd35969c72021-05-20 20:58:3343#include "cc/paint/transfer_cache_deserialize_helper.h"
Jonathan Backer0cd1c4322018-04-17 16:57:1044#include "cc/paint/transfer_cache_entry.h"
Colin Blundell34f5a452023-05-24 08:31:5845#include "components/viz/common/resources/shared_image_format_utils.h"
Jonathan Backer7f90dfb662017-12-18 16:52:0446#include "gpu/command_buffer/common/capabilities.h"
Jonathan Backer7471b2782018-01-25 18:14:1947#include "gpu/command_buffer/common/command_buffer_id.h"
48#include "gpu/command_buffer/common/constants.h"
Jonathan Backer7f90dfb662017-12-18 16:52:0449#include "gpu/command_buffer/common/context_result.h"
Jonathan Backer7471b2782018-01-25 18:14:1950#include "gpu/command_buffer/common/debug_marker_manager.h"
Jonathan Backera07c4a9e2018-03-21 15:13:1651#include "gpu/command_buffer/common/mailbox.h"
Jonathan Backer7471b2782018-01-25 18:14:1952#include "gpu/command_buffer/common/raster_cmd_format.h"
53#include "gpu/command_buffer/common/raster_cmd_ids.h"
Jonathan Backer7f90dfb662017-12-18 16:52:0454#include "gpu/command_buffer/common/sync_token.h"
Jonathan Backer4f9ee5fb2018-04-25 14:03:3855#include "gpu/command_buffer/service/command_buffer_service.h"
Jonathan Backer7471b2782018-01-25 18:14:1956#include "gpu/command_buffer/service/context_state.h"
Saifuddin Hitawalaa4eb4642023-01-18 23:52:2457#include "gpu/command_buffer/service/copy_shared_image_helper.h"
Jonathan Backer1d807a42018-01-08 20:45:5458#include "gpu/command_buffer/service/decoder_client.h"
Jonathan Backer7471b2782018-01-25 18:14:1959#include "gpu/command_buffer/service/error_state.h"
60#include "gpu/command_buffer/service/feature_info.h"
Jonathan Backerc26060e2018-03-29 15:06:2961#include "gpu/command_buffer/service/gl_utils.h"
Jonathan Backere26739c2018-05-15 13:27:0762#include "gpu/command_buffer/service/gpu_tracer.h"
Maggie Chen5d35a0142025-04-17 18:35:2263#include "gpu/command_buffer/service/graphite_shared_context.h"
Jonathan Backer7471b2782018-01-25 18:14:1964#include "gpu/command_buffer/service/logger.h"
Jonathan Backer016bd97e2018-03-14 15:26:3965#include "gpu/command_buffer/service/query_manager.h"
Jonathan Backer9267bf1ec2018-03-06 19:56:5066#include "gpu/command_buffer/service/raster_cmd_validation.h"
Khushala8d50642018-05-03 01:29:0667#include "gpu/command_buffer/service/service_font_manager.h"
Jonathan Backer0cd1c4322018-04-17 16:57:1068#include "gpu/command_buffer/service/service_transfer_cache.h"
Antoine Labour10dddca12019-02-19 19:09:2669#include "gpu/command_buffer/service/service_utils.h"
Peng Huang8294a5a7f2019-01-23 18:56:2670#include "gpu/command_buffer/service/shared_context_state.h"
Saifuddin Hitawala81cbd382022-07-20 19:14:5371#include "gpu/command_buffer/service/shared_image/shared_image_factory.h"
Colin Blundell415f5cc2023-05-23 14:46:4072#include "gpu/command_buffer/service/shared_image/shared_image_format_service_utils.h"
Saifuddin Hitawala81cbd382022-07-20 19:14:5373#include "gpu/command_buffer/service/shared_image/shared_image_representation.h"
74#include "gpu/command_buffer/service/shared_image/wrapped_sk_image_backing_factory.h"
Jonathan Backera1f3d7c2018-10-16 14:46:3275#include "gpu/command_buffer/service/skia_utils.h"
Vikas Sonica02287b2022-01-18 20:17:3576#include "gpu/config/gpu_finch_features.h"
Peng Huang3c13b502019-05-08 01:40:0077#include "gpu/vulkan/buildflags.h"
Ian Preste598eece2020-10-19 23:31:0278#include "skia/ext/legacy_display_globals.h"
Christopher Camerond18b9322021-08-24 21:12:5579#include "skia/ext/rgba_to_yuva.h"
Saifuddin Hitawala53b469aa2023-07-17 16:27:4780#include "third_party/abseil-cpp/absl/cleanup/cleanup.h"
Nathan Zabriskief5f270e2021-04-30 20:53:0681#include "third_party/libyuv/include/libyuv/planar_functions.h"
Jonathan Backere19973e82018-04-18 20:08:0982#include "third_party/skia/include/core/SkCanvas.h"
Kevin Lubick040667a2022-04-07 13:49:0383#include "third_party/skia/include/core/SkColorSpace.h"
Victor Miura6bb4ab5c2023-05-10 21:35:1284#include "third_party/skia/include/core/SkGraphics.h"
Jonathan Backere19973e82018-04-18 20:08:0985#include "third_party/skia/include/core/SkSurface.h"
86#include "third_party/skia/include/core/SkSurfaceProps.h"
87#include "third_party/skia/include/core/SkTypeface.h"
Brian Salomonc54f44f2020-11-11 16:11:0688#include "third_party/skia/include/core/SkYUVAInfo.h"
Saifuddin Hitawalac1eeaa9c2023-06-26 20:33:0789#include "third_party/skia/include/core/SkYUVAPixmaps.h"
Kaylee Lubicka077f79232024-09-04 01:32:5590#include "third_party/skia/include/gpu/ganesh/GrBackendSemaphore.h"
91#include "third_party/skia/include/gpu/ganesh/GrBackendSurface.h"
92#include "third_party/skia/include/gpu/ganesh/GrDirectContext.h"
93#include "third_party/skia/include/gpu/ganesh/GrTypes.h"
94#include "third_party/skia/include/gpu/ganesh/GrYUVABackendTextures.h"
Kevin Lubick6abb9c62023-05-30 21:28:2295#include "third_party/skia/include/gpu/ganesh/SkSurfaceGanesh.h"
Colin Blundelld7b7b472023-05-12 07:38:1296#include "third_party/skia/include/gpu/graphite/Context.h"
Kevin Lubick9865bc82023-06-15 15:48:3097#include "third_party/skia/include/private/chromium/GrPromiseImageTexture.h"
Peng Huang38a6280642024-04-16 23:56:5598#include "third_party/skia/include/utils/SkNoDrawCanvas.h"
Peng Huang5f3d4352021-11-23 17:51:2999#include "ui/base/ui_base_features.h"
Jonathan Backer76dfd502018-05-03 18:34:31100#include "ui/gfx/buffer_format_util.h"
Xianzhu Wang65ef1ad32021-10-07 03:12:33101#include "ui/gfx/geometry/skia_conversions.h"
Jonathan Backer7f90dfb662017-12-18 16:52:04102#include "ui/gl/gl_context.h"
Jonathan Backere19973e82018-04-18 20:08:09103#include "ui/gl/gl_gl_api_implementation.h"
Jonathan Backer7f90dfb662017-12-18 16:52:04104#include "ui/gl/gl_surface.h"
Jonathan Backera4568da12018-01-31 16:25:04105#include "ui/gl/gl_version_info.h"
Jonathan Backer7f90dfb662017-12-18 16:52:04106
Peng Huang3c13b502019-05-08 01:40:00107#if BUILDFLAG(ENABLE_VULKAN)
108#include "components/viz/common/gpu/vulkan_context_provider.h"
Saifuddin Hitawala47abaf362025-09-09 18:15:58109#include "gpu/command_buffer/service/drm_modifiers_filter_vulkan.h"
Peng Huang3c13b502019-05-08 01:40:00110#include "gpu/vulkan/vulkan_device_queue.h"
Chia-I Wuae6d9262023-12-13 01:27:48111#include "gpu/vulkan/vulkan_util.h"
Jonah Chin09d12b72021-05-11 00:52:08112#endif // BUILDFLAG(ENABLE_VULKAN)
113
Xiaohan Wangfa22d3e2022-01-15 02:02:43114#if BUILDFLAG(IS_WIN)
Saifuddin Hitawala81cbd382022-07-20 19:14:53115#include "gpu/command_buffer/service/shared_image/d3d_image_backing_factory.h"
Xiaohan Wangfa22d3e2022-01-15 02:02:43116#endif // BUILDFLAG(IS_WIN)
Peng Huang3c13b502019-05-08 01:40:00117
Saifuddin Hitawalab0c218e2023-10-27 17:25:29118#if BUILDFLAG(SKIA_USE_DAWN)
119#include <dawn/webgpu_cpp.h>
120#include "gpu/command_buffer/service/dawn_context_provider.h"
121#endif // BUILDFLAG(USE_DAWN)
122
Georg Neis24b92cc42025-02-07 04:11:10123#if BUILDFLAG(SKIA_USE_DAWN) && BUILDFLAG(IS_CHROMEOS)
Kevin Haslett0ae5ba12024-03-21 20:10:44124#include "gpu/command_buffer/service/drm_modifiers_filter_dawn.h"
Georg Neis24b92cc42025-02-07 04:11:10125#endif // BUILDFLAG(SKIA_USE_DAWN) && BUILDFLAG(IS_CHROMEOS)
Kevin Haslett0ae5ba12024-03-21 20:10:44126
Jonathan Backer7f90dfb662017-12-18 16:52:04127// Local versions of the SET_GL_ERROR macros
128#define LOCAL_SET_GL_ERROR(error, function_name, msg) \
Peng Huang66a7a3762018-12-07 20:05:38129 ERRORSTATE_SET_GL_ERROR(error_state_.get(), error, function_name, msg)
130#define LOCAL_SET_GL_ERROR_INVALID_ENUM(function_name, value, label) \
131 ERRORSTATE_SET_GL_ERROR_INVALID_ENUM(error_state_.get(), function_name, \
Jonathan Backer9267bf1ec2018-03-06 19:56:50132 static_cast<uint32_t>(value), label)
Peng Huangb4ed1852018-12-05 03:35:29133#define LOCAL_COPY_REAL_GL_ERRORS_TO_WRAPPER(function_name) \
Peng Huang66a7a3762018-12-07 20:05:38134 ERRORSTATE_COPY_REAL_GL_ERRORS_TO_WRAPPER(error_state_.get(), function_name)
Jonathan Backer7f90dfb662017-12-18 16:52:04135#define LOCAL_PEEK_GL_ERROR(function_name) \
Peng Huang66a7a3762018-12-07 20:05:38136 ERRORSTATE_PEEK_GL_ERROR(error_state_.get(), function_name)
Jonathan Backer7f90dfb662017-12-18 16:52:04137#define LOCAL_CLEAR_REAL_GL_ERRORS(function_name) \
Peng Huang66a7a3762018-12-07 20:05:38138 ERRORSTATE_CLEAR_REAL_GL_ERRORS(error_state_.get(), function_name)
Jonathan Backer7f90dfb662017-12-18 16:52:04139#define LOCAL_PERFORMANCE_WARNING(msg) \
140 PerformanceWarning(__FILE__, __LINE__, msg)
141#define LOCAL_RENDER_WARNING(msg) RenderWarning(__FILE__, __LINE__, msg)
142
Jonathan Backer7f90dfb662017-12-18 16:52:04143namespace gpu {
144namespace raster {
145
Jonathan Backera07c4a9e2018-03-21 15:13:16146namespace {
147
Khushal996e9912018-07-13 08:31:00148base::AtomicSequenceNumber g_raster_decoder_id;
149
Scott Violet39a6a822024-08-09 23:44:00150// Controls whether we may yield during rasterization.
Xiaohan Wang732c4f8d52025-09-09 02:40:43151BASE_FEATURE(kGpuYieldRasterization, base::FEATURE_DISABLED_BY_DEFAULT);
Scott Violet39a6a822024-08-09 23:44:00152
Scott Violet39a6a822024-08-09 23:44:00153// Controls how many ops are rastered before checking if we should yield.
154const base::FeatureParam<int> kGpuYieldRasterizationOpCount(
155 &kGpuYieldRasterization,
156 "gpu_yield_rasterization_op_count",
157 500);
158
Jonathan Backer1994ff2e2018-03-22 21:24:33159// This class prevents any GL errors that occur when it is in scope from
160// being reported to the client.
161class ScopedGLErrorSuppressor {
162 public:
Daniel Bratellaabc8d68e2018-07-18 19:20:26163 ScopedGLErrorSuppressor(const char* function_name,
Peng Huangc6a76072018-11-27 23:17:31164 gles2::ErrorState* error_state)
165 : function_name_(function_name), error_state_(error_state) {
166 ERRORSTATE_COPY_REAL_GL_ERRORS_TO_WRAPPER(error_state_, function_name_);
167 }
Peter BostrÃļmdbacdc22021-09-23 22:11:46168
169 ScopedGLErrorSuppressor(const ScopedGLErrorSuppressor&) = delete;
170 ScopedGLErrorSuppressor& operator=(const ScopedGLErrorSuppressor&) = delete;
171
Peng Huangc6a76072018-11-27 23:17:31172 ~ScopedGLErrorSuppressor() {
173 ERRORSTATE_CLEAR_REAL_GL_ERRORS(error_state_, function_name_);
174 }
Jonathan Backer1994ff2e2018-03-22 21:24:33175
176 private:
177 const char* function_name_;
Keishi Hattori0e45c022021-11-27 09:25:52178 raw_ptr<gles2::ErrorState> error_state_;
Jonathan Backer1994ff2e2018-03-22 21:24:33179};
180
Corentin Wallez5cbffcc2020-07-08 10:23:57181// Commands that are explicitly listed as OK to occur between
Adlai Hollerc4dea1a92020-08-04 13:21:55182// BeginRasterCHROMIUM and EndRasterCHROMIUM. They do not invalidate
183// GrDirectContext state tracking.
Jonathan Backer2231a002018-05-14 19:55:22184bool AllowedBetweenBeginEndRaster(CommandId command) {
185 switch (command) {
186 case kCreateTransferCacheEntryINTERNAL:
187 case kDeleteTransferCacheEntryINTERNAL:
188 case kEndRasterCHROMIUM:
189 case kFinish:
190 case kFlush:
191 case kGetError:
192 case kRasterCHROMIUM:
193 case kUnlockTransferCacheEntryINTERNAL:
194 return true;
195 default:
196 return false;
197 }
198}
199
Nathan Zabriskieffc210692020-07-09 07:36:47200// This class is sent to cc::PaintOpReader during paint op deserialization. When
201// a cc:PaintOp refers to a mailbox-backed cc:PaintImage, this class opens the
202// shared image for read access and returns an SkImage reference.
203// SharedImageProviderImpl maintains read access until it is destroyed
204// which should occur after |end_semaphores| have been flushed to Skia.
205class SharedImageProviderImpl final : public cc::SharedImageProvider {
206 public:
207 SharedImageProviderImpl(
208 SharedImageRepresentationFactory* shared_image_factory,
209 scoped_refptr<SharedContextState> shared_context_state,
210 SkSurface* output_surface,
211 std::vector<GrBackendSemaphore>* end_semaphores,
Vasiliy Telezhnikov9837f902022-08-17 15:03:07212 gles2::ErrorState* error_state)
Nathan Zabriskieffc210692020-07-09 07:36:47213 : shared_image_factory_(shared_image_factory),
214 shared_context_state_(std::move(shared_context_state)),
215 output_surface_(output_surface),
216 end_semaphores_(end_semaphores),
Vasiliy Telezhnikov9837f902022-08-17 15:03:07217 error_state_(error_state) {
Nathan Zabriskieffc210692020-07-09 07:36:47218 DCHECK(shared_image_factory_);
219 DCHECK(shared_context_state_);
220 DCHECK(output_surface_);
221 DCHECK(end_semaphores_);
222 DCHECK(error_state_);
223 }
224 SharedImageProviderImpl(const SharedImageProviderImpl&) = delete;
225 SharedImageProviderImpl& operator=(const SharedImageProviderImpl&) = delete;
226
227 ~SharedImageProviderImpl() override { read_accessors_.clear(); }
228
Justin Novosad30670e62021-09-27 22:30:23229 sk_sp<SkImage> OpenSharedImageForRead(const gpu::Mailbox& mailbox,
230 Error& error) override {
Nathan Zabriskieffc210692020-07-09 07:36:47231 auto it = read_accessors_.find(mailbox);
Justin Novosad30670e62021-09-27 22:30:23232 error = Error::kNoError;
233 if (it != read_accessors_.end()) {
Nathan Zabriskieffc210692020-07-09 07:36:47234 return it->second.read_access_sk_image;
Justin Novosad30670e62021-09-27 22:30:23235 }
Nathan Zabriskieffc210692020-07-09 07:36:47236
237 auto shared_image_skia =
238 shared_image_factory_->ProduceSkia(mailbox, shared_context_state_);
239 if (!shared_image_skia) {
240 ERRORSTATE_SET_GL_ERROR(error_state_, GL_INVALID_OPERATION,
241 "SharedImageProviderImpl::OpenSharedImageForRead",
242 ("Attempting to operate on unknown mailbox:" +
243 mailbox.ToDebugString())
244 .c_str());
Justin Novosad30670e62021-09-27 22:30:23245 error = Error::kUnknownMailbox;
Nathan Zabriskieffc210692020-07-09 07:36:47246 return nullptr;
247 }
248
249 std::vector<GrBackendSemaphore> begin_semaphores;
250 // |end_semaphores_| is owned by RasterDecoderImpl which will handle sending
251 // them to SkCanvas
252 auto scoped_read_access = shared_image_skia->BeginScopedReadAccess(
253 &begin_semaphores, end_semaphores_);
254 if (!scoped_read_access) {
255 ERRORSTATE_SET_GL_ERROR(error_state_, GL_INVALID_OPERATION,
256 "SharedImageProviderImpl::OpenSharedImageForRead",
257 ("Couldn't access shared image for mailbox:" +
258 mailbox.ToDebugString())
259 .c_str());
Justin Novosad30670e62021-09-27 22:30:23260 error = Error::kNoAccess;
Nathan Zabriskieffc210692020-07-09 07:36:47261 return nullptr;
262 }
263
264 if (!begin_semaphores.empty()) {
265 bool result = output_surface_->wait(begin_semaphores.size(),
Peng Huang1c6d0582020-07-31 12:14:11266 begin_semaphores.data(),
267 /*deleteSemaphoresAfterWait=*/false);
Nathan Zabriskieffc210692020-07-09 07:36:47268 DCHECK(result);
269 }
270
Nathan Zabriskiec5ed2362020-07-21 05:39:54271 auto sk_image =
Saifuddin Hitawala5a6785d52023-04-25 22:23:12272 scoped_read_access->CreateSkImage(shared_context_state_.get());
Nathan Zabriskieffc210692020-07-09 07:36:47273 if (!sk_image) {
274 ERRORSTATE_SET_GL_ERROR(error_state_, GL_INVALID_OPERATION,
275 "SharedImageProviderImpl::OpenSharedImageForRead",
276 "Couldn't create output SkImage.");
Justin Novosad30670e62021-09-27 22:30:23277 error = Error::kSkImageCreationFailed;
Nathan Zabriskieffc210692020-07-09 07:36:47278 return nullptr;
279 }
280
281 read_accessors_[mailbox] = {std::move(shared_image_skia),
282 std::move(scoped_read_access), sk_image};
283 return sk_image;
284 }
285
Vasiliy Telezhnikov6414a252022-04-26 04:42:11286 void ApplyEndAccessState() {
Saifuddin Hitawala70652782024-08-21 14:13:32287 for (auto& [mailbox, access] : read_accessors_) {
288 access.scoped_read_access->ApplyBackendSurfaceEndState();
Vasiliy Telezhnikov6414a252022-04-26 04:42:11289 }
290 }
291
Saifuddin Hitawala70652782024-08-21 14:13:32292 bool NeedGraphiteContextSubmit() {
293 bool need_graphite_submit = false;
294 for (auto& [mailbox, access] : read_accessors_) {
295 need_graphite_submit |=
296 access.scoped_read_access->NeedGraphiteContextSubmit();
297 }
298 return need_graphite_submit;
299 }
300
Nathan Zabriskieffc210692020-07-09 07:36:47301 private:
PÃĸris7704a8e2022-09-01 09:49:20302 raw_ptr<SharedImageRepresentationFactory, DanglingUntriaged>
303 shared_image_factory_;
Nathan Zabriskieffc210692020-07-09 07:36:47304 scoped_refptr<SharedContextState> shared_context_state_;
PÃĸris7704a8e2022-09-01 09:49:20305 raw_ptr<SkSurface, DanglingUntriaged> output_surface_;
Keishi Hattori0e45c022021-11-27 09:25:52306 raw_ptr<std::vector<GrBackendSemaphore>> end_semaphores_;
307 raw_ptr<gles2::ErrorState> error_state_;
Nathan Zabriskieffc210692020-07-09 07:36:47308
309 struct SharedImageReadAccess {
Saifuddin Hitawaladaed5972022-07-20 22:01:01310 std::unique_ptr<SkiaImageRepresentation> shared_image_skia;
311 std::unique_ptr<SkiaImageRepresentation::ScopedReadAccess>
Nathan Zabriskieffc210692020-07-09 07:36:47312 scoped_read_access;
313 sk_sp<SkImage> read_access_sk_image;
314 };
315 base::flat_map<gpu::Mailbox, SharedImageReadAccess> read_accessors_;
316};
317
Peng Huang7bb7db4b2021-06-16 15:30:02318class RasterCommandsCompletedQuery : public QueryManager::Query {
319 public:
320 RasterCommandsCompletedQuery(
321 scoped_refptr<SharedContextState> shared_context_state,
322 QueryManager* manager,
323 GLenum target,
324 scoped_refptr<gpu::Buffer> buffer,
325 QuerySync* sync)
326 : Query(manager, target, std::move(buffer), sync),
327 shared_context_state_(std::move(shared_context_state)) {}
328
329 // Overridden from QueryManager::Query:
330 void Begin() override {
Peng Huang7bb7db4b2021-06-16 15:30:02331 MarkAsActive();
332 begin_time_.emplace(base::TimeTicks::Now());
333 }
334
335 void End(base::subtle::Atomic32 submit_count) override {
336 DCHECK(begin_time_);
337
338 AddToPendingQueue(submit_count);
339 finished_ = false;
340
Colin Blundelld7b7b472023-05-12 07:38:12341 if (auto* gr_context = shared_context_state_->gr_context()) {
342 GrFlushInfo info;
343 info.fFinishedProc = RasterCommandsCompletedQuery::FinishedProc;
344 auto weak_ptr = weak_ptr_factory_.GetWeakPtr();
345 info.fFinishedContext =
346 new base::WeakPtr<RasterCommandsCompletedQuery>(weak_ptr);
347 gr_context->flush(info);
348 gr_context->submit();
349 } else {
Maggie Chen5d35a0142025-04-17 18:35:22350 CHECK(shared_context_state_->graphite_shared_context());
Colin Blundelld7b7b472023-05-12 07:38:12351 auto recording =
352 shared_context_state_->gpu_main_graphite_recorder()->snap();
353 if (recording) {
354 skgpu::graphite::InsertRecordingInfo info = {};
355 info.fRecording = recording.get();
356 info.fFinishedProc = [](void* context, skgpu::CallbackResult result) {
357 RasterCommandsCompletedQuery::FinishedProc(context);
358 };
359 info.fFinishedContext = new base::WeakPtr<RasterCommandsCompletedQuery>(
360 weak_ptr_factory_.GetWeakPtr());
Maggie Chen5d35a0142025-04-17 18:35:22361 shared_context_state_->graphite_shared_context()->insertRecording(info);
Le Hoang Quyena3f33042025-06-07 02:05:34362
Le Hoang Quyena3f33042025-06-07 02:05:34363 // Canvas typically uses Commands Completed query to implement
364 // backpressures. We need to flush any delayed commands to make sure the
365 // query can be completed in finite time.
366 // Furthermore, some websites use setTimeout() to implement canvas'
367 // rendering loop. Hence within a vsync interval, a canvas could be
368 // redrawn multiple times. Flushing here ensures that we send the draw
369 // commands to GPU earlier, reducing the chance the canvas' rate limiter
370 // kicks in.
Le Hoang Quyen63c9ca02025-07-17 15:56:30371 shared_context_state_->graphite_shared_context()
372 ->submitAndFlushBackend();
Colin Blundelld7b7b472023-05-12 07:38:12373 } else {
374 finished_ = true;
375 }
376 }
Peng Huang7bb7db4b2021-06-16 15:30:02377 }
378
379 void QueryCounter(base::subtle::Atomic32 submit_count) override {
Peter BostrÃļma11556e2024-10-31 04:49:10380 NOTREACHED();
Peng Huang7bb7db4b2021-06-16 15:30:02381 }
382
383 void Pause() override { MarkAsPaused(); }
384
385 void Resume() override { MarkAsActive(); }
386
387 void Process(bool did_finish) override {
388 DCHECK(begin_time_);
Peng Huang63ef8872021-06-17 22:31:20389 if (did_finish || finished_) {
390 const base::TimeDelta elapsed = base::TimeTicks::Now() - *begin_time_;
391 MarkAsCompleted(elapsed.InMicroseconds());
392 begin_time_.reset();
393 }
Peng Huang7bb7db4b2021-06-16 15:30:02394 }
395
396 void Destroy(bool have_context) override {
397 if (!IsDeleted())
398 MarkAsDeleted();
399 }
400
401 protected:
402 ~RasterCommandsCompletedQuery() override = default;
403
404 private:
405 static void FinishedProc(void* context) {
406 auto* weak_ptr =
407 reinterpret_cast<base::WeakPtr<RasterCommandsCompletedQuery>*>(context);
408 if (*weak_ptr)
409 (*weak_ptr)->finished_ = true;
410 delete weak_ptr;
411 }
412
413 const scoped_refptr<SharedContextState> shared_context_state_;
Arthur Sonzogni59ac8222023-11-10 09:46:54414 std::optional<base::TimeTicks> begin_time_;
Peng Huang7bb7db4b2021-06-16 15:30:02415 bool finished_ = false;
416 base::WeakPtrFactory<RasterCommandsCompletedQuery> weak_ptr_factory_{this};
417};
418
419class RasterQueryManager : public QueryManager {
420 public:
421 explicit RasterQueryManager(
422 scoped_refptr<SharedContextState> shared_context_state)
423 : shared_context_state_(std::move(shared_context_state)) {}
424 ~RasterQueryManager() override = default;
425
426 Query* CreateQuery(GLenum target,
427 GLuint client_id,
428 scoped_refptr<gpu::Buffer> buffer,
429 QuerySync* sync) override {
430 if (target == GL_COMMANDS_COMPLETED_CHROMIUM &&
Colin Blundelld7b7b472023-05-12 07:38:12431 (shared_context_state_->gr_context() ||
Maggie Chen5d35a0142025-04-17 18:35:22432 shared_context_state_->graphite_shared_context())) {
Peng Huang7bb7db4b2021-06-16 15:30:02433 auto query = base::MakeRefCounted<RasterCommandsCompletedQuery>(
434 shared_context_state_, this, target, std::move(buffer), sync);
435 std::pair<QueryMap::iterator, bool> result =
436 queries_.insert(std::make_pair(client_id, query));
437 DCHECK(result.second);
438 return query.get();
439 }
440 return QueryManager::CreateQuery(target, client_id, std::move(buffer),
441 sync);
442 }
443
444 private:
445 const scoped_refptr<SharedContextState> shared_context_state_;
446};
447
Saifuddin Hitawalac1eeaa9c2023-06-26 20:33:07448SkYUVAPixmapInfo::DataType ToSkYUVADataType(viz::SharedImageFormat format) {
449 switch (format.channel_format()) {
450 case viz::SharedImageFormat::ChannelFormat::k8:
451 return SkYUVAPixmapInfo::DataType::kUnorm8;
452 case viz::SharedImageFormat::ChannelFormat::k10:
453 return SkYUVAPixmapInfo::DataType::kUnorm10_Unorm2;
454 case viz::SharedImageFormat::ChannelFormat::k16:
455 return SkYUVAPixmapInfo::DataType::kUnorm16;
456 case viz::SharedImageFormat::ChannelFormat::k16F:
457 return SkYUVAPixmapInfo::DataType::kFloat16;
458 }
Peter BostrÃļm01ab59a2024-08-15 02:39:49459 NOTREACHED();
Saifuddin Hitawalac1eeaa9c2023-06-26 20:33:07460}
461
Jonathan Backera07c4a9e2018-03-21 15:13:16462} // namespace
463
Jonathan Backer5ac08a72018-06-27 16:15:35464// RasterDecoderImpl uses two separate state trackers (gpu::gles2::ContextState
Adlai Hollerc4dea1a92020-08-04 13:21:55465// and GrDirectContext) that cache the current GL driver state. Each class sees
466// a fraction of the GL calls issued and can easily become inconsistent with GL
Jonathan Backer5ac08a72018-06-27 16:15:35467// state. We guard against that by resetting. But resetting is expensive, so we
Peng Huange89c2a052018-12-04 11:35:11468// avoid it as much as possible.
Jonathan Backer4f9ee5fb2018-04-25 14:03:38469class RasterDecoderImpl final : public RasterDecoder,
Khushala8d50642018-05-03 01:29:06470 public gles2::ErrorStateClient,
Jonathan Backerbaf79d92020-06-01 21:30:30471 public ServiceFontManager::Client,
472 public SharedContextState::ContextLostObserver {
Jonathan Backer7471b2782018-01-25 18:14:19473 public:
Peng Huang20361dad12019-01-23 14:48:10474 RasterDecoderImpl(DecoderClient* client,
475 CommandBufferServiceBase* command_buffer_service,
476 gles2::Outputter* outputter,
Antoine Labour10dddca12019-02-19 19:09:26477 const GpuFeatureInfo& gpu_feature_info,
478 const GpuPreferences& gpu_preferences,
Maggie Chen40af1efa2025-06-04 18:19:57479 scoped_refptr<MemoryTracker> memory_tracker,
Antoine Labour10dddca12019-02-19 19:09:26480 SharedImageManager* shared_image_manager,
Malay Keshava154dcb2020-01-18 00:17:55481 scoped_refptr<SharedContextState> shared_context_state,
482 bool is_privileged);
Peter BostrÃļmdbacdc22021-09-23 22:11:46483
484 RasterDecoderImpl(const RasterDecoderImpl&) = delete;
485 RasterDecoderImpl& operator=(const RasterDecoderImpl&) = delete;
486
Jonathan Backer7471b2782018-01-25 18:14:19487 ~RasterDecoderImpl() override;
488
Vasiliy Telezhnikov5816c6602025-09-05 17:32:41489 // RasterDecoder implementation.
490 ContextResult Initialize(bool enable_gpu_rasterization,
491 bool lose_context_when_out_of_memory) override;
492 int GetRasterDecoderId() const override;
493 int DecoderIdForTest() override;
494 ServiceTransferCache* GetTransferCacheForTest() override;
495 void SetUpForRasterCHROMIUMForTest() override;
496 void SetOOMErrorForTest() override;
497 void DisableFlushWorkaroundForTest() override;
Daniel Bratellaabc8d68e2018-07-18 19:20:26498 gles2::GLES2Util* GetGLES2Util() override { return &util_; }
Jonathan Backera4568da12018-01-31 16:25:04499
Jonathan Backer7471b2782018-01-25 18:14:19500 // DecoderContext implementation.
501 base::WeakPtr<DecoderContext> AsWeakPtr() override;
Jonathan Backer7471b2782018-01-25 18:14:19502 void Destroy(bool have_context) override;
503 bool MakeCurrent() override;
504 gl::GLContext* GetGLContext() override;
liberato@chromium.org00c8a6c2018-06-05 02:22:41505 gl::GLSurface* GetGLSurface() override;
Daniel Bratellaabc8d68e2018-07-18 19:20:26506 const gles2::FeatureInfo* GetFeatureInfo() const override {
Antoine Labour10dddca12019-02-19 19:09:26507 return feature_info();
Jonathan Backer34008d882018-03-27 18:57:55508 }
Jonathan Backer7471b2782018-01-25 18:14:19509 Capabilities GetCapabilities() override;
Saifuddin Hitawalab2da4f02023-10-04 15:36:46510 GLCapabilities GetGLCapabilities() override;
Peng Huangc6a76072018-11-27 23:17:31511 const gles2::ContextState* GetContextState() override;
Peng Huang66a7a3762018-12-07 20:05:38512
513 // TODO(penghuang): Remove unused context state related methods.
Jonathan Backer34008d882018-03-27 18:57:55514 void RestoreGlobalState() const override;
515 void ClearAllAttributes() const override;
516 void RestoreAllAttributes() const override;
Jonathan Backer7471b2782018-01-25 18:14:19517 void RestoreState(const gles2::ContextState* prev_state) override;
518 void RestoreActiveTexture() const override;
519 void RestoreAllTextureUnitAndSamplerBindings(
520 const gles2::ContextState* prev_state) const override;
521 void RestoreActiveTextureUnitBinding(unsigned int target) const override;
522 void RestoreBufferBinding(unsigned int target) override;
523 void RestoreBufferBindings() const override;
524 void RestoreFramebufferBindings() const override;
525 void RestoreRenderbufferBindings() override;
526 void RestoreProgramBindings() const override;
Antoine Labour9ddf6ac2019-01-17 01:59:39527 void RestoreTextureState(unsigned service_id) override;
Jonathan Backer7471b2782018-01-25 18:14:19528 void RestoreTextureUnitBindings(unsigned unit) const override;
529 void RestoreVertexAttribArray(unsigned index) override;
530 void RestoreAllExternalTextureBindingsIfNeeded() override;
Jonathan Backer016bd97e2018-03-14 15:26:39531 QueryManager* GetQueryManager() override;
Peng Huang66a7a3762018-12-07 20:05:38532
Geoff Lang85287e12018-05-22 17:26:40533 void SetQueryCallback(unsigned int query_client_id,
534 base::OnceClosure callback) override;
Jeffrey Cohen11a6cad2023-07-10 19:56:18535 void CancelAllQueries() override;
Jonathan Backer7471b2782018-01-25 18:14:19536 gles2::GpuFenceManager* GetGpuFenceManager() override;
537 bool HasPendingQueries() const override;
538 void ProcessPendingQueries(bool did_finish) override;
539 bool HasMoreIdleWork() const override;
540 void PerformIdleWork() override;
541 bool HasPollingWork() const override;
542 void PerformPollingWork() override;
543 TextureBase* GetTextureBase(uint32_t client_id) override;
Julien Isorcefd6c15f2018-03-15 16:51:17544 void SetLevelInfo(uint32_t client_id,
545 int level,
546 unsigned internal_format,
547 unsigned width,
548 unsigned height,
549 unsigned depth,
550 unsigned format,
551 unsigned type,
552 const gfx::Rect& cleared_rect) override;
Jonathan Backer7471b2782018-01-25 18:14:19553 bool WasContextLost() const override;
554 bool WasContextLostByRobustnessExtension() const override;
555 void MarkContextLost(error::ContextLostReason reason) override;
556 bool CheckResetStatus() override;
557 void BeginDecoding() override;
558 void EndDecoding() override;
559 const char* GetCommandName(unsigned int command_id) const;
560 error::Error DoCommands(unsigned int num_commands,
561 const volatile void* buffer,
562 int num_entries,
563 int* entries_processed) override;
Helmut Januschkab81e97dc2024-04-17 14:57:05564 std::string_view GetLogPrefix() override;
Colin Blundellddd83e02022-11-30 13:40:16565
Jonathan Backer7471b2782018-01-25 18:14:19566 gles2::ContextGroup* GetContextGroup() override;
567 gles2::ErrorState* GetErrorState() override;
Vasiliy Telezhnikove4fc46792024-04-17 19:03:48568
Jonathan Backerc26060e2018-03-29 15:06:29569 bool IsCompressedTextureFormat(unsigned format) override;
570 bool ClearLevel(gles2::Texture* texture,
571 unsigned target,
572 int level,
573 unsigned format,
574 unsigned type,
575 int xoffset,
576 int yoffset,
577 int width,
578 int height) override;
579 bool ClearCompressedTextureLevel(gles2::Texture* texture,
580 unsigned target,
581 int level,
582 unsigned format,
583 int width,
584 int height) override;
shrekshaoad4525482020-03-12 00:30:55585 bool ClearCompressedTextureLevel3D(gles2::Texture* texture,
586 unsigned target,
587 int level,
588 unsigned format,
589 int width,
590 int height,
591 int depth) override;
Jonathan Backerc26060e2018-03-29 15:06:29592 bool ClearLevel3D(gles2::Texture* texture,
593 unsigned target,
594 int level,
595 unsigned format,
596 unsigned type,
597 int width,
598 int height,
599 int depth) override {
600 NOTIMPLEMENTED();
601 return false;
602 }
Jonathan Backer7471b2782018-01-25 18:14:19603
604 // ErrorClientState implementation.
605 void OnContextLostError() override;
606 void OnOutOfMemoryError() override;
607
Daniel Bratellaabc8d68e2018-07-18 19:20:26608 gles2::Logger* GetLogger() override;
Jonathan Backera4568da12018-01-31 16:25:04609
610 void SetIgnoreCachedStateForTest(bool ignore) override;
Jonathan Backer0cd1c4322018-04-17 16:57:10611
Khushala8d50642018-05-03 01:29:06612 // ServiceFontManager::Client implementation.
613 scoped_refptr<Buffer> GetShmBuffer(uint32_t shm_id) override;
Khushal39641b92019-06-03 21:32:54614 void ReportProgress() override;
Khushala8d50642018-05-03 01:29:06615
Jonathan Backerbaf79d92020-06-01 21:30:30616 // SharedContextState::ContextLostObserver implementation.
617 void OnContextLost() override;
618
Jonathan Backer7471b2782018-01-25 18:14:19619 private:
Peng Huang66a7a3762018-12-07 20:05:38620 gles2::ContextState* state() const {
Antoine Labour10dddca12019-02-19 19:09:26621 if (use_passthrough_) {
Peter BostrÃļma11556e2024-10-31 04:49:10622 NOTREACHED();
Jonathan Backer628d73f82019-01-17 17:34:18623 }
Peng Huang20361dad12019-01-23 14:48:10624 return shared_context_state_->context_state();
Peng Huang66a7a3762018-12-07 20:05:38625 }
Adlai Hollerc4dea1a92020-08-04 13:21:55626 GrDirectContext* gr_context() const {
627 return shared_context_state_->gr_context();
628 }
Maggie Chen5d35a0142025-04-17 18:35:22629 GraphiteSharedContext* graphite_shared_context() const {
630 return shared_context_state_->graphite_shared_context();
Colin Blundelld7b7b472023-05-12 07:38:12631 }
Sunny Sachanandani5fcc5722023-05-09 00:50:36632 skgpu::graphite::Recorder* graphite_recorder() const {
633 return shared_context_state_->gpu_main_graphite_recorder();
634 }
Khushal996e9912018-07-13 08:31:00635 ServiceTransferCache* transfer_cache() {
Peng Huang20361dad12019-01-23 14:48:10636 return shared_context_state_->transfer_cache();
Khushal996e9912018-07-13 08:31:00637 }
Jonathan Backer7471b2782018-01-25 18:14:19638
Antoine Labour10dddca12019-02-19 19:09:26639 const gles2::FeatureInfo* feature_info() const {
640 return shared_context_state_->feature_info();
641 }
642
Daniel Bratellaabc8d68e2018-07-18 19:20:26643 const gles2::FeatureInfo::FeatureFlags& features() const {
Antoine Labour10dddca12019-02-19 19:09:26644 return feature_info()->feature_flags();
Jonathan Backer7471b2782018-01-25 18:14:19645 }
646
Jonathan Backer1994ff2e2018-03-22 21:24:33647 const GpuDriverBugWorkarounds& workarounds() const {
Antoine Labour10dddca12019-02-19 19:09:26648 return feature_info()->workarounds();
Jonathan Backer1994ff2e2018-03-22 21:24:33649 }
650
Christopher Cameronbeb5f272019-02-04 22:49:13651 void FlushToWorkAroundMacCrashes() {
Scott Violet0430768f2022-09-29 00:26:58652 // The workaround is not needed for arm based macs (because they don't have
653 // the bug).
654#if BUILDFLAG(IS_MAC) && !defined(ARCH_CPU_ARM64)
Colin Blundelld7b7b472023-05-12 07:38:12655 // The workaround is also not needed for Graphite, which always uses Metal
656 // drivers (via Dawn).
657 if (!shared_context_state_->GrContextIsGL()) {
Christopher Cameronf787d0b2019-05-15 23:52:18658 return;
Colin Blundelld7b7b472023-05-12 07:38:12659 }
Christopher Cameronbeb5f272019-02-04 22:49:13660 // This function does aggressive flushes to work around crashes in the
661 // macOS OpenGL driver.
662 // https://crbug.com/906453
663 if (!flush_workaround_disabled_for_test_) {
Khushal Sagar70ac89132020-04-17 19:48:42664 TRACE_EVENT0("gpu", "RasterDecoderImpl::FlushToWorkAroundMacCrashes");
Christopher Cameronbeb5f272019-02-04 22:49:13665 if (gr_context())
Kevin Lubick3aeaf2b82023-09-08 18:37:08666 gr_context()->flushAndSubmit(GrSyncCpu::kNo);
Sunny Sachanandani46df8ac2023-09-06 23:18:43667
668 gl::GLApi* const api = gl::g_current_gl_context;
669 api->glFlushFn();
Khushald15c5912019-06-14 17:51:04670
671 // Flushes can be expensive, yield to allow interruption after each flush.
672 ExitCommandProcessingEarly();
Christopher Cameronbeb5f272019-02-04 22:49:13673 }
674#endif
675 }
676
Jonathan Backera4568da12018-01-31 16:25:04677 const gl::GLVersionInfo& gl_version_info() {
Antoine Labour10dddca12019-02-19 19:09:26678 return feature_info()->gl_version_info();
Jonathan Backera4568da12018-01-31 16:25:04679 }
680
Jonathan Backer7471b2782018-01-25 18:14:19681 // Set remaining commands to process to 0 to force DoCommands to return
682 // and allow context preemption and GPU watchdog checks in
683 // CommandExecutor().
Corentin Wallez0f412f02019-04-03 22:42:38684 void ExitCommandProcessingEarly() override;
Jonathan Backer7471b2782018-01-25 18:14:19685
686 template <bool DebugImpl>
687 error::Error DoCommandsImpl(unsigned int num_commands,
688 const volatile void* buffer,
689 int num_entries,
690 int* entries_processed);
691
Jonathan Backer016bd97e2018-03-14 15:26:39692 bool GenQueriesEXTHelper(GLsizei n, const GLuint* client_ids);
693 void DeleteQueriesEXTHelper(GLsizei n, const volatile GLuint* client_ids);
Jonathan Backera4568da12018-01-31 16:25:04694 void DoFinish();
695 void DoFlush();
696 void DoGetIntegerv(GLenum pname, GLint* params, GLsizei params_size);
Jonathan Backere26739c2018-05-15 13:27:07697 void DoTraceEndCHROMIUM();
Saifuddin Hitawala20790cb2023-01-31 17:01:12698 void DoCopySharedImageINTERNAL(GLint xoffset,
699 GLint yoffset,
700 GLint x,
701 GLint y,
Ilya Nikolaevskiy18fd2972025-09-09 17:19:46702 GLsizei src_width,
703 GLsizei src_height,
704 GLsizei dst_width,
705 GLsizei dst_height,
Saifuddin Hitawala20790cb2023-01-31 17:01:12706 const volatile GLbyte* mailboxes);
Nathan Zabriskie31e4dc92020-04-15 23:10:27707 void DoWritePixelsINTERNAL(GLint x_offset,
708 GLint y_offset,
709 GLuint src_width,
710 GLuint src_height,
711 GLuint row_bytes,
712 GLuint src_sk_color_type,
713 GLuint src_sk_alpha_type,
714 GLint shm_id,
715 GLuint shm_offset,
716 GLuint shm_size,
717 const volatile GLbyte* mailbox);
Saifuddin Hitawala9d8a52dd2023-06-26 20:21:37718 void DoWritePixelsYUVINTERNAL(GLuint src_width,
719 GLuint src_height,
720 GLuint src_row_bytes_plane1,
721 GLuint src_row_bytes_plane2,
722 GLuint src_row_bytes_plane3,
723 GLuint src_row_bytes_plane4,
724 GLuint src_yuv_plane_config,
725 GLuint src_yuv_subsampling,
726 GLuint src_yuv_color_space,
727 GLint shm_id,
728 GLuint shm_offset,
729 GLuint plane2_offset,
730 GLuint plane3_offset,
731 GLuint plane4_offset,
732 const volatile GLbyte* mailbox);
Jonah Chin67897ae62020-12-08 21:13:20733 bool DoWritePixelsINTERNALDirectTextureUpload(
Saifuddin Hitawaladaed5972022-07-20 22:01:01734 SkiaImageRepresentation* dest_shared_image,
Jonah Chin67897ae62020-12-08 21:13:20735 const SkImageInfo& src_info,
736 const void* pixel_data,
737 size_t row_bytes);
Nathan Zabriskief5f270e2021-04-30 20:53:06738 void DoReadbackARGBImagePixelsINTERNAL(GLint src_x,
739 GLint src_y,
Saifuddin Hitawalaf32b28c2023-02-22 23:30:38740 GLint plane_index,
Nathan Zabriskief5f270e2021-04-30 20:53:06741 GLuint dst_width,
742 GLuint dst_height,
743 GLuint row_bytes,
744 GLuint dst_sk_color_type,
745 GLuint dst_sk_alpha_type,
746 GLint shm_id,
747 GLuint shm_offset,
748 GLuint color_space_offset,
749 GLuint pixels_offset,
750 const volatile GLbyte* mailbox);
751 void DoReadbackYUVImagePixelsINTERNAL(GLuint dst_width,
752 GLuint dst_height,
753 GLint shm_id,
754 GLuint shm_offset,
755 GLuint y_offset,
756 GLuint y_stride,
757 GLuint u_offset,
758 GLuint u_stride,
759 GLuint v_offset,
760 GLuint v_stride,
761 const volatile GLbyte* mailbox);
Christopher Camerona78f1e3c2021-08-13 19:13:44762
Jonah Chin053aa6c2021-01-22 20:26:14763 void DoLoseContextCHROMIUM(GLenum current, GLenum other);
Aaron Krajeskid7d51a52022-05-25 13:29:09764 void DoBeginRasterCHROMIUM(GLfloat r,
765 GLfloat g,
766 GLfloat b,
767 GLfloat a,
Sunny Sachanandanib461e212021-03-05 19:22:12768 GLboolean needs_clear,
Jonathan Backer7471b2782018-01-25 18:14:19769 GLuint msaa_sample_count,
Justin Novosad7cc290af2021-07-20 17:13:07770 MsaaMode msaa_mode,
Jonathan Backer7471b2782018-01-25 18:14:19771 GLboolean can_use_lcd_text,
Peng Huang5ff70dff2022-03-03 19:55:11772 GLboolean visible,
Christopher Cameron060d382e72023-10-04 23:12:42773 GLfloat hdr_headroom,
Jonathan Backer7e7492522018-07-20 00:23:55774 const volatile GLbyte* key);
Scott Violet39a6a822024-08-09 23:44:00775 error::Error DoRasterCHROMIUM(GLuint raster_shm_id,
776 GLuint raster_shm_offset,
777 GLuint raster_shm_size,
778 GLuint font_shm_id,
779 GLuint font_shm_offset,
780 GLuint font_shm_size);
Jonathan Backere19973e82018-04-18 20:08:09781 void DoEndRasterCHROMIUM();
Jonathan Backer7471b2782018-01-25 18:14:19782 void DoCreateTransferCacheEntryINTERNAL(GLuint entry_type,
783 GLuint entry_id,
784 GLuint handle_shm_id,
785 GLuint handle_shm_offset,
786 GLuint data_shm_id,
787 GLuint data_shm_offset,
Jonathan Backer0cd1c4322018-04-17 16:57:10788 GLuint data_size);
789 void DoUnlockTransferCacheEntryINTERNAL(GLuint entry_type, GLuint entry_id);
790 void DoDeleteTransferCacheEntryINTERNAL(GLuint entry_type, GLuint entry_id);
Jonathan Backer10821a82018-04-04 23:56:03791 void RestoreStateForAttrib(GLuint attrib, bool restore_array_binding);
Khushal33205a72018-11-08 10:12:29792 void DeletePaintCachePathsINTERNALHelper(
793 GLsizei n,
794 const volatile GLuint* paint_cache_ids);
William Liu88a5533872025-03-18 19:11:47795 void DeletePaintCacheEffectsINTERNALHelper(
796 GLsizei n,
797 const volatile GLuint* paint_cache_ids);
Khushal33205a72018-11-08 10:12:29798 void DoClearPaintCacheINTERNAL();
Vikas Sonica02287b2022-01-18 20:17:35799
Jonathan Backera4568da12018-01-31 16:25:04800#if defined(NDEBUG)
801 void LogClientServiceMapping(const char* /* function_name */,
802 GLuint /* client_id */,
803 GLuint /* service_id */) {}
804 template <typename T>
805 void LogClientServiceForInfo(T* /* info */,
806 GLuint /* client_id */,
807 const char* /* function_name */) {}
808#else
809 void LogClientServiceMapping(const char* function_name,
810 GLuint client_id,
811 GLuint service_id) {
Antoine Labour10dddca12019-02-19 19:09:26812 if (gpu_preferences_.enable_gpu_service_logging_gpu) {
Jonathan Backera4568da12018-01-31 16:25:04813 VLOG(1) << "[" << logger_.GetLogPrefix() << "] " << function_name
814 << ": client_id = " << client_id
815 << ", service_id = " << service_id;
816 }
817 }
818 template <typename T>
819 void LogClientServiceForInfo(T* info,
820 GLuint client_id,
821 const char* function_name) {
822 if (info) {
823 LogClientServiceMapping(function_name, client_id, info->service_id());
824 }
825 }
826#endif
827
Jonathan Backer7471b2782018-01-25 18:14:19828// Generate a member function prototype for each command in an automated and
829// typesafe way.
830#define RASTER_CMD_OP(name) \
831 Error Handle##name(uint32_t immediate_data_size, const volatile void* data);
832
833 RASTER_COMMAND_LIST(RASTER_CMD_OP)
834#undef RASTER_CMD_OP
835
836 typedef error::Error (RasterDecoderImpl::*CmdHandler)(
837 uint32_t immediate_data_size,
838 const volatile void* data);
839
840 // A struct to hold info about each command.
841 struct CommandInfo {
842 CmdHandler cmd_handler;
843 uint8_t arg_flags; // How to handle the arguments for this command
844 uint8_t cmd_flags; // How to handle this command
845 uint16_t arg_count; // How many arguments are expected for this command.
846 };
847
848 // A table of CommandInfo for all the commands.
849 static const CommandInfo command_info[kNumCommands - kFirstRasterCommand];
850
Khushal996e9912018-07-13 08:31:00851 const int raster_decoder_id_;
Khushal996e9912018-07-13 08:31:00852
Jonathan Backer7471b2782018-01-25 18:14:19853 // Number of commands remaining to be processed in DoCommands().
Jonathan Backer0cd1c4322018-04-17 16:57:10854 int commands_to_process_ = 0;
855
kylecharee7338172022-02-02 16:59:10856 bool use_gpu_raster_ = false;
Antoine Labour10dddca12019-02-19 19:09:26857 bool use_passthrough_ = false;
Jonathan Backer7471b2782018-01-25 18:14:19858
859 // The current decoder error communicates the decoder error through command
860 // processing functions that do not return the error value. Should be set
861 // only if not returning an error.
Jonathan Backer0cd1c4322018-04-17 16:57:10862 error::Error current_decoder_error_ = error::kNoError;
Jonathan Backer7471b2782018-01-25 18:14:19863
Antoine Labour10dddca12019-02-19 19:09:26864 GpuPreferences gpu_preferences_;
865
Jonathan Backer7471b2782018-01-25 18:14:19866 gles2::DebugMarkerManager debug_marker_manager_;
867 gles2::Logger logger_;
Peng Huangb4ed1852018-12-05 03:35:29868 std::unique_ptr<gles2::ErrorState> error_state_;
Peng Huang66a7a3762018-12-07 20:05:38869 bool context_lost_ = false;
Jonathan Backer7471b2782018-01-25 18:14:19870
Peng Huang20361dad12019-01-23 14:48:10871 scoped_refptr<SharedContextState> shared_context_state_;
Jonathan Backer9267bf1ec2018-03-06 19:56:50872 std::unique_ptr<Validators> validators_;
Jonathan Backer7471b2782018-01-25 18:14:19873
Antoine Labour10dddca12019-02-19 19:09:26874 SharedImageRepresentationFactory shared_image_representation_factory_;
Peng Huang7bb7db4b2021-06-16 15:30:02875 std::unique_ptr<RasterQueryManager> query_manager_;
Jonathan Backer016bd97e2018-03-14 15:26:39876
Daniel Bratellaabc8d68e2018-07-18 19:20:26877 gles2::GLES2Util util_;
Jonathan Backera4568da12018-01-31 16:25:04878
Antoine Labour10dddca12019-02-19 19:09:26879 // An optional behaviour to lose the context when OOM.
Jonathan Backer4f9ee5fb2018-04-25 14:03:38880 bool lose_context_when_out_of_memory_ = false;
881
Daniel Bratellaabc8d68e2018-07-18 19:20:26882 std::unique_ptr<gles2::GPUTracer> gpu_tracer_;
Ali Hijazif5d8b742024-05-29 16:10:43883 raw_ptr<const unsigned char> gpu_decoder_category_;
Jonathan Backere26739c2018-05-15 13:27:07884 static constexpr int gpu_trace_level_ = 2;
885 bool gpu_trace_commands_ = false;
886 bool gpu_debug_commands_ = false;
887
Jonathan Backer0cd1c4322018-04-17 16:57:10888 // Raster helpers.
Khushal3d0b8902018-09-18 03:03:34889 scoped_refptr<ServiceFontManager> font_manager_;
Saifuddin Hitawaladaed5972022-07-20 22:01:01890 std::unique_ptr<SkiaImageRepresentation> shared_image_;
891 std::unique_ptr<SkiaImageRepresentation::ScopedWriteAccess>
Peng Huange9b41cd2019-08-12 19:39:47892 scoped_shared_image_write_;
Peng Huangb447da42021-09-22 19:21:26893
Saifuddin Hitawaladaed5972022-07-20 22:01:01894 std::unique_ptr<RasterImageRepresentation> shared_image_raster_;
895 std::unique_ptr<RasterImageRepresentation::ScopedWriteAccess>
Peng Huangb447da42021-09-22 19:21:26896 scoped_shared_image_raster_write_;
897
Keishi Hattori0e45c022021-11-27 09:25:52898 raw_ptr<SkSurface> sk_surface_ = nullptr;
Christopher Cameron578e3dd2023-10-06 09:34:07899 float sk_surface_hdr_headroom_ = 1.f;
Nathan Zabriskieffc210692020-07-09 07:36:47900 std::unique_ptr<SharedImageProviderImpl> paint_op_shared_image_provider_;
Khushal Sagar4bb1c122020-05-12 20:17:08901
Peng Huange9b41cd2019-08-12 19:39:47902 sk_sp<SkSurface> sk_surface_for_testing_;
Peng Huang5f9118b02019-04-24 20:18:21903 std::vector<GrBackendSemaphore> end_semaphores_;
Khushal33205a72018-11-08 10:12:29904 std::unique_ptr<cc::ServicePaintCache> paint_cache_;
Peng Huang050dee52018-09-05 15:54:08905
Peng Huang38a6280642024-04-16 23:56:55906 std::unique_ptr<SkNoDrawCanvas> no_draw_canvas_;
Peng Huang23b9be7b2022-02-19 01:05:41907 raw_ptr<SkCanvas> raster_canvas_ = nullptr;
Khushal22204a42018-05-17 23:06:21908 std::vector<SkDiscardableHandleId> locked_handles_;
Jonathan Backer4fb9f5acb2018-05-10 22:19:33909
Scott Violet39a6a822024-08-09 23:44:00910 // Cached value of `kGpuYieldRasterizationOpCount`. This is only set if
911 // `kGpuYieldRasterization` is enabled.
912 std::optional<int> check_for_yield_op_count_;
913
914 // If set, indicates rasterization was deferred. The value gives how far into
915 // the buffer was processed.
916 std::optional<size_t> deferred_raster_paint_buffer_offset_;
917
Khushalf9750702018-06-09 00:42:13918 // Tracing helpers.
919 int raster_chromium_id_ = 0;
920
Christopher Cameron386e18582019-01-11 20:17:34921 // Workaround for https://crbug.com/906453
922 bool flush_workaround_disabled_for_test_ = false;
923
Antoine Labour9ddf6ac2019-01-17 01:59:39924 bool in_copy_sub_texture_ = false;
925 bool reset_texture_state_ = false;
926
Malay Keshava154dcb2020-01-18 00:17:55927 bool is_privileged_ = false;
928
Peng Huangb447da42021-09-22 19:21:26929 const bool is_raw_draw_enabled_;
930
Jeremy Roman28926e92019-07-15 15:42:16931 base::WeakPtrFactory<DecoderContext> weak_ptr_factory_{this};
Jonathan Backer7f90dfb662017-12-18 16:52:04932};
933
Jonathan Backer7471b2782018-01-25 18:14:19934constexpr RasterDecoderImpl::CommandInfo RasterDecoderImpl::command_info[] = {
Peng Huangc6a76072018-11-27 23:17:31935#define RASTER_CMD_OP(name) \
936 { \
937 &RasterDecoderImpl::Handle##name, \
938 cmds::name::kArgFlags, \
939 cmds::name::cmd_flags, \
940 sizeof(cmds::name) / sizeof(CommandBufferEntry) - 1, \
Jonathan Backer7471b2782018-01-25 18:14:19941 }, /* NOLINT */
942 RASTER_COMMAND_LIST(RASTER_CMD_OP)
943#undef RASTER_CMD_OP
944};
945
946// static
Vasiliy Telezhnikov1bb704f2025-09-05 17:49:35947std::unique_ptr<RasterDecoder> RasterDecoder::Create(
Jonathan Backer7471b2782018-01-25 18:14:19948 DecoderClient* client,
949 CommandBufferServiceBase* command_buffer_service,
Daniel Bratellaabc8d68e2018-07-18 19:20:26950 gles2::Outputter* outputter,
Antoine Labour10dddca12019-02-19 19:09:26951 const GpuFeatureInfo& gpu_feature_info,
952 const GpuPreferences& gpu_preferences,
Maggie Chen40af1efa2025-06-04 18:19:57953 scoped_refptr<MemoryTracker> memory_tracker,
Antoine Labour10dddca12019-02-19 19:09:26954 SharedImageManager* shared_image_manager,
Malay Keshava154dcb2020-01-18 00:17:55955 scoped_refptr<SharedContextState> shared_context_state,
956 bool is_privileged) {
Vasiliy Telezhnikov1bb704f2025-09-05 17:49:35957 return std::make_unique<RasterDecoderImpl>(
958 client, command_buffer_service, outputter, gpu_feature_info,
959 gpu_preferences, std::move(memory_tracker), shared_image_manager,
960 std::move(shared_context_state), is_privileged);
Jonathan Backer7471b2782018-01-25 18:14:19961}
962
Corentin Wallez0f412f02019-04-03 22:42:38963RasterDecoder::RasterDecoder(DecoderClient* client,
964 CommandBufferServiceBase* command_buffer_service,
Jonathan Backere26739c2018-05-15 13:27:07965 gles2::Outputter* outputter)
Corentin Wallez0f412f02019-04-03 22:42:38966 : CommonDecoder(client, command_buffer_service), outputter_(outputter) {}
Jonathan Backer7471b2782018-01-25 18:14:19967
968RasterDecoder::~RasterDecoder() {}
969
Jonathan Backera4568da12018-01-31 16:25:04970bool RasterDecoder::initialized() const {
971 return initialized_;
972}
973
974TextureBase* RasterDecoder::GetTextureBase(uint32_t client_id) {
975 return nullptr;
976}
977
Julien Isorcefd6c15f2018-03-15 16:51:17978void RasterDecoder::SetLevelInfo(uint32_t client_id,
979 int level,
980 unsigned internal_format,
981 unsigned width,
982 unsigned height,
983 unsigned depth,
984 unsigned format,
985 unsigned type,
986 const gfx::Rect& cleared_rect) {}
987
Jonathan Backera4568da12018-01-31 16:25:04988void RasterDecoder::BeginDecoding() {}
989
990void RasterDecoder::EndDecoding() {}
991
Jonathan Backer0cd1c4322018-04-17 16:57:10992void RasterDecoder::SetLogCommands(bool log_commands) {
993 log_commands_ = log_commands;
994}
995
Jonathan Backere26739c2018-05-15 13:27:07996gles2::Outputter* RasterDecoder::outputter() const {
997 return outputter_;
998}
999
Helmut Januschkab81e97dc2024-04-17 14:57:051000std::string_view RasterDecoder::GetLogPrefix() {
Jonathan Backera4568da12018-01-31 16:25:041001 return GetLogger()->GetLogPrefix();
1002}
1003
Jonathan Backer7471b2782018-01-25 18:14:191004RasterDecoderImpl::RasterDecoderImpl(
1005 DecoderClient* client,
1006 CommandBufferServiceBase* command_buffer_service,
Daniel Bratellaabc8d68e2018-07-18 19:20:261007 gles2::Outputter* outputter,
Antoine Labour10dddca12019-02-19 19:09:261008 const GpuFeatureInfo& gpu_feature_info,
1009 const GpuPreferences& gpu_preferences,
Maggie Chen40af1efa2025-06-04 18:19:571010 scoped_refptr<MemoryTracker> memory_tracker,
Antoine Labour10dddca12019-02-19 19:09:261011 SharedImageManager* shared_image_manager,
Malay Keshava154dcb2020-01-18 00:17:551012 scoped_refptr<SharedContextState> shared_context_state,
1013 bool is_privileged)
Corentin Wallez0f412f02019-04-03 22:42:381014 : RasterDecoder(client, command_buffer_service, outputter),
Khushal996e9912018-07-13 08:31:001015 raster_decoder_id_(g_raster_decoder_id.GetNext() + 1),
Zhenyao Moed036882025-02-05 00:05:041016 use_passthrough_(gpu_preferences.use_passthrough_cmd_decoder),
Antoine Labour10dddca12019-02-19 19:09:261017 gpu_preferences_(gpu_preferences),
Peng Huang61eccfe42018-11-15 22:10:131018 logger_(&debug_marker_manager_,
1019 base::BindRepeating(&DecoderClient::OnConsoleMessage,
Corentin Wallez0f412f02019-04-03 22:42:381020 base::Unretained(client),
James Darpinian104af6f2018-12-06 03:24:551021 0),
Antoine Labour10dddca12019-02-19 19:09:261022 gpu_preferences_.disable_gl_error_limit),
Peng Huangb4ed1852018-12-05 03:35:291023 error_state_(gles2::ErrorState::Create(this, &logger_)),
Peng Huang20361dad12019-01-23 14:48:101024 shared_context_state_(std::move(shared_context_state)),
Jonathan Backer9267bf1ec2018-03-06 19:56:501025 validators_(new Validators),
Antoine Labour10dddca12019-02-19 19:09:261026 shared_image_representation_factory_(shared_image_manager,
Maggie Chen40af1efa2025-06-04 18:19:571027 std::move(memory_tracker)),
Jonathan Backere26739c2018-05-15 13:27:071028 gpu_decoder_category_(TRACE_EVENT_API_GET_CATEGORY_GROUP_ENABLED(
Miguel Casas510fbfcb2018-11-13 23:40:571029 TRACE_DISABLED_BY_DEFAULT("gpu.decoder"))),
Vikas Soni2a4039f2020-07-22 18:34:201030 font_manager_(base::MakeRefCounted<ServiceFontManager>(
1031 this,
1032 gpu_preferences_.disable_oopr_debug_crash_dump)),
Peng Huangb447da42021-09-22 19:21:261033 is_privileged_(is_privileged),
Sunny Sachanandania2094212024-03-20 21:50:551034 is_raw_draw_enabled_(features::IsUsingRawDraw()) {
Peng Huang20361dad12019-01-23 14:48:101035 DCHECK(shared_context_state_);
Jonathan Backerbaf79d92020-06-01 21:30:301036 shared_context_state_->AddContextLostObserver(this);
Peng Huang38a6280642024-04-16 23:56:551037 const base::CommandLine* cmdline = base::CommandLine::ForCurrentProcess();
1038 if (cmdline->HasSwitch(switches::kDisableGLDrawingForTests)) {
1039 no_draw_canvas_ = std::make_unique<SkNoDrawCanvas>(0, 0);
1040 }
Scott Violet39a6a822024-08-09 23:44:001041 if (base::FeatureList::IsEnabled(kGpuYieldRasterization)) {
1042 check_for_yield_op_count_ = kGpuYieldRasterizationOpCount.Get();
1043 }
Khushalb2c140b2018-07-09 20:21:161044}
Jonathan Backer7f90dfb662017-12-18 16:52:041045
Jonathan Backerbaf79d92020-06-01 21:30:301046RasterDecoderImpl::~RasterDecoderImpl() {
1047 shared_context_state_->RemoveContextLostObserver(this);
1048}
Jonathan Backer7f90dfb662017-12-18 16:52:041049
Jonathan Backer7471b2782018-01-25 18:14:191050base::WeakPtr<DecoderContext> RasterDecoderImpl::AsWeakPtr() {
Jonathan Backer7f90dfb662017-12-18 16:52:041051 return weak_ptr_factory_.GetWeakPtr();
1052}
1053
Jonathan Backer0cd1c4322018-04-17 16:57:101054ContextResult RasterDecoderImpl::Initialize(
Vasiliy Telezhnikov5816c6602025-09-05 17:32:411055 bool enable_gpu_rasterization,
1056 bool lose_context_when_out_of_memory) {
Jonathan Backer7471b2782018-01-25 18:14:191057 TRACE_EVENT0("gpu", "RasterDecoderImpl::Initialize");
Jonathan Backer192586792019-12-18 16:36:331058 DCHECK(shared_context_state_->IsCurrent(nullptr));
Jonathan Backer7f90dfb662017-12-18 16:52:041059
Jonathan Backera4568da12018-01-31 16:25:041060 set_initialized();
Jonathan Backer7f90dfb662017-12-18 16:52:041061
Antoine Labour10dddca12019-02-19 19:09:261062 if (gpu_preferences_.enable_gpu_debugging)
Jonathan Backer7471b2782018-01-25 18:14:191063 set_debug(true);
Jonathan Backer7f90dfb662017-12-18 16:52:041064
Antoine Labour10dddca12019-02-19 19:09:261065 if (gpu_preferences_.enable_gpu_command_logging)
Jonathan Backer0cd1c4322018-04-17 16:57:101066 SetLogCommands(true);
Jonathan Backer7f90dfb662017-12-18 16:52:041067
Jonathan Backere26739c2018-05-15 13:27:071068 // Create GPU Tracer for timing values.
Peter BostrÃļm1aa21c142021-04-03 01:12:031069 gpu_tracer_ = std::make_unique<gles2::GPUTracer>(
1070 this, shared_context_state_->GrContextIsGL());
Jonathan Backere26739c2018-05-15 13:27:071071
Vasiliy Telezhnikov5816c6602025-09-05 17:32:411072 lose_context_when_out_of_memory_ = lose_context_when_out_of_memory;
Jonathan Backer4f9ee5fb2018-04-25 14:03:381073
Jonathan Backer7f90dfb662017-12-18 16:52:041074 CHECK_GL_ERROR();
1075
Peng Huang7bb7db4b2021-06-16 15:30:021076 query_manager_ = std::make_unique<RasterQueryManager>(shared_context_state_);
Jonathan Backer016bd97e2018-03-14 15:26:391077
Vasiliy Telezhnikov5816c6602025-09-05 17:32:411078 if (enable_gpu_rasterization) {
Maggie Chen5d35a0142025-04-17 18:35:221079 DCHECK(gr_context() || graphite_shared_context());
kylecharee7338172022-02-02 16:59:101080 use_gpu_raster_ = true;
1081 paint_cache_ = std::make_unique<cc::ServicePaintCache>();
Khushal996e9912018-07-13 08:31:001082 }
Jonathan Backer0cd1c4322018-04-17 16:57:101083
1084 return ContextResult::kSuccess;
Jonathan Backer1d807a42018-01-08 20:45:541085}
Jonathan Backer7f90dfb662017-12-18 16:52:041086
Jonathan Backera4568da12018-01-31 16:25:041087void RasterDecoderImpl::Destroy(bool have_context) {
Jonathan Backer10821a82018-04-04 23:56:031088 if (!initialized())
1089 return;
1090
Sunny Sachanandanie00e86fb2024-07-18 14:36:411091 DCHECK(!have_context || shared_context_state_->IsCurrent(nullptr));
Jonathan Backer10821a82018-04-04 23:56:031092
Vasiliy Telezhnikov91d18812023-02-02 21:21:261093 // Client can call BeginRasterChromium and then channel can be closed and
1094 // decoder destroyed. Finish raster first.
1095 // Note: `have_context` is always false for Vulkan, so we don't gate this code
1096 // on it.
1097 if (sk_surface_ || scoped_shared_image_raster_write_) {
1098 DoEndRasterCHROMIUM();
1099 }
1100
Geoff Lang615cca22025-04-07 18:03:451101 if (have_context && use_gpu_raster_ && transfer_cache()) {
1102 transfer_cache()->DeleteAllEntriesForDecoder(raster_decoder_id_);
Jonathan Backer10821a82018-04-04 23:56:031103 }
1104
Peng Huang95a63e4b2020-07-03 21:28:111105 if (query_manager_) {
Jonathan Backer016bd97e2018-03-14 15:26:391106 query_manager_->Destroy(have_context);
1107 query_manager_.reset();
1108 }
1109
Khushal3d0b8902018-09-18 03:03:341110 font_manager_->Destroy();
1111 font_manager_.reset();
Jonathan Backera4568da12018-01-31 16:25:041112}
Jonathan Backer7f90dfb662017-12-18 16:52:041113
Jonathan Backer7f90dfb662017-12-18 16:52:041114// Make this decoder's GL context current.
Jonathan Backer7471b2782018-01-25 18:14:191115bool RasterDecoderImpl::MakeCurrent() {
Christopher Camerondd7812d2019-04-23 03:12:211116 if (!shared_context_state_->GrContextIsGL())
Jonathan Backer7621ec92018-12-11 00:02:441117 return true;
1118
Peng Huang66a7a3762018-12-07 20:05:381119 if (context_lost_) {
Jonathan Backer7471b2782018-01-25 18:14:191120 LOG(ERROR) << " RasterDecoderImpl: Trying to make lost context current.";
Jonathan Backer7f90dfb662017-12-18 16:52:041121 return false;
1122 }
1123
Peng Huang20361dad12019-01-23 14:48:101124 if (shared_context_state_->context_lost() ||
1125 !shared_context_state_->MakeCurrent(nullptr)) {
Jonathan Backer7471b2782018-01-25 18:14:191126 LOG(ERROR) << " RasterDecoderImpl: Context lost during MakeCurrent.";
Jonathan Backer7f90dfb662017-12-18 16:52:041127 return false;
1128 }
Peng Huang66a7a3762018-12-07 20:05:381129
Jonathan Backer34008d882018-03-27 18:57:551130 // Rebind textures if the service ids may have changed.
1131 RestoreAllExternalTextureBindingsIfNeeded();
1132
Vasiliy Telezhnikov751b5362021-09-14 13:35:491133 // We're going to use skia, so make sure we reset context afterwards.
1134 shared_context_state_->set_need_context_state_reset(true);
1135
Jonathan Backer7f90dfb662017-12-18 16:52:041136 return true;
1137}
1138
Jonathan Backer7471b2782018-01-25 18:14:191139gl::GLContext* RasterDecoderImpl::GetGLContext() {
Peng Huangad7442f2020-02-24 22:22:091140 return shared_context_state_->GrContextIsGL()
1141 ? shared_context_state_->context()
1142 : nullptr;
Jonathan Backer7f90dfb662017-12-18 16:52:041143}
1144
liberato@chromium.org00c8a6c2018-06-05 02:22:411145gl::GLSurface* RasterDecoderImpl::GetGLSurface() {
Peng Huangad7442f2020-02-24 22:22:091146 return shared_context_state_->GrContextIsGL()
1147 ? shared_context_state_->surface()
1148 : nullptr;
liberato@chromium.org00c8a6c2018-06-05 02:22:411149}
1150
Jonathan Backer7471b2782018-01-25 18:14:191151Capabilities RasterDecoderImpl::GetCapabilities() {
Adrienne Walker693f34d2019-07-29 22:07:041152 // TODO(enne): reconcile this with gles2_cmd_decoder's capability settings.
Jonathan Backer0cd1c4322018-04-17 16:57:101153 Capabilities caps;
kylecharee7338172022-02-02 16:59:101154 caps.gpu_rasterization = use_gpu_raster_;
Antoine Labour0f9bee72018-11-06 07:35:431155 caps.gpu_memory_buffer_formats =
Antoine Labour10dddca12019-02-19 19:09:261156 feature_info()->feature_flags().gpu_memory_buffer_formats;
Jonathan Backera07c4a9e2018-03-21 15:13:161157 caps.texture_format_bgra8888 =
Antoine Labour10dddca12019-02-19 19:09:261158 feature_info()->feature_flags().ext_texture_format_bgra8888;
kylechar81ac76d2023-02-14 21:43:491159 caps.texture_rg = feature_info()->feature_flags().ext_texture_rg;
Sunny Sachanandanie9ea4a92023-05-25 19:01:251160 caps.max_texture_size = shared_context_state_->GetMaxTextureSize();
Vikas Sonid2adcb242023-05-25 23:38:271161 caps.using_vulkan_context =
1162 shared_context_state_->GrContextIsVulkan() ? true : false;
1163
Saifuddin Hitawala9f187482023-10-11 22:43:091164 caps.max_copy_texture_chromium_size =
1165 feature_info()->workarounds().max_copy_texture_chromium_size;
1166 caps.texture_format_etc1_npot =
1167 feature_info()->feature_flags().oes_compressed_etc1_rgb8_texture &&
1168 !feature_info()->workarounds().etc1_power_of_two_only;
1169 caps.image_ycbcr_420v =
1170 feature_info()->feature_flags().chromium_image_ycbcr_420v;
Saifuddin Hitawala9f187482023-10-11 22:43:091171 caps.image_ar30 = feature_info()->feature_flags().chromium_image_ar30;
1172 caps.image_ab30 = feature_info()->feature_flags().chromium_image_ab30;
1173 caps.image_ycbcr_p010 =
1174 feature_info()->feature_flags().chromium_image_ycbcr_p010;
1175 caps.render_buffer_format_bgra8888 =
1176 feature_info()->feature_flags().ext_render_buffer_format_bgra8888;
Saifuddin Hitawala899d7f92025-03-20 22:43:181177
Saifuddin Hitawala9f187482023-10-11 22:43:091178 caps.angle_rgbx_internal_format =
1179 feature_info()->feature_flags().angle_rgbx_internal_format;
1180 caps.chromium_gpu_fence = feature_info()->feature_flags().chromium_gpu_fence;
1181 caps.mesa_framebuffer_flip_y =
1182 feature_info()->feature_flags().mesa_framebuffer_flip_y;
1183
Bo Liu1e4cf912022-06-27 16:38:541184 if (feature_info()->workarounds().webgl_or_caps_max_texture_size) {
Bo Liuf2f4c6d2022-04-28 20:34:451185 caps.max_texture_size =
1186 std::min(caps.max_texture_size,
Bo Liu1e4cf912022-06-27 16:38:541187 feature_info()->workarounds().webgl_or_caps_max_texture_size);
James Darpinian3c422032019-04-06 00:20:161188 }
Antoine Labour10dddca12019-02-19 19:09:261189 caps.sync_query = feature_info()->feature_flags().chromium_sync_query;
Justin Novosad5730d362023-07-19 22:05:471190 caps.msaa_is_slow = gles2::MSAAIsSlow(feature_info()->workarounds());
Adrienne Walker693f34d2019-07-29 22:07:041191 caps.avoid_stencil_buffers =
1192 feature_info()->workarounds().avoid_stencil_buffers;
Khushal315f2fd2018-05-29 03:20:391193
Khushalb2c140b2018-07-09 20:21:161194 if (gr_context()) {
Khushal315f2fd2018-05-29 03:20:391195 caps.context_supports_distance_field_text =
Khushalb2c140b2018-07-09 20:21:161196 gr_context()->supportsDistanceFieldText();
Dale Curtis816d5232020-07-29 19:31:351197 caps.texture_norm16 =
1198 gr_context()->colorTypeSupportedAsImage(kA16_unorm_SkColorType);
1199 caps.texture_half_float_linear =
1200 gr_context()->colorTypeSupportedAsImage(kA16_float_SkColorType);
Maggie Chen5d35a0142025-04-17 18:35:221201 } else if (graphite_shared_context()) {
Sunny Sachanandanif67f4952024-08-16 13:31:341202 caps.context_supports_distance_field_text = true;
Colin Blundelld7b7b472023-05-12 07:38:121203 caps.texture_half_float_linear = true;
Sunny Sachanandanif67f4952024-08-16 13:31:341204#if BUILDFLAG(SKIA_USE_DAWN)
1205 if (shared_context_state_->IsGraphiteDawn()) {
1206 caps.texture_norm16 =
1207 shared_context_state_->dawn_context_provider()->SupportsFeature(
1208 wgpu::FeatureName::Unorm16TextureFormats);
1209 }
1210#endif
1211#if BUILDFLAG(SKIA_USE_METAL)
1212 if (shared_context_state_->IsGraphiteMetal()) {
1213 caps.texture_norm16 = true;
1214 }
1215#endif
Dale Curtis816d5232020-07-29 19:31:351216 } else {
1217 caps.texture_norm16 = feature_info()->feature_flags().ext_texture_norm16;
1218 caps.texture_half_float_linear =
1219 feature_info()->feature_flags().enable_texture_half_float_linear;
Khushalf9750702018-06-09 00:42:131220 }
Saifuddin Hitawalab0c218e2023-10-27 17:25:291221
Maggie Chen5d35a0142025-04-17 18:35:221222 if (graphite_shared_context()) {
Saifuddin Hitawalab0c218e2023-10-27 17:25:291223 bool supports_multiplanar_rendering = false;
1224#if BUILDFLAG(SKIA_USE_DAWN)
Sunny Sachanandanif67f4952024-08-16 13:31:341225 if (shared_context_state_->IsGraphiteDawn()) {
1226 supports_multiplanar_rendering =
1227 shared_context_state_->dawn_context_provider()->SupportsFeature(
1228 wgpu::FeatureName::MultiPlanarRenderTargets);
Saifuddin Hitawalab0c218e2023-10-27 17:25:291229 }
1230#endif
Vasiliy Telezhnikovfb8ce252024-01-02 20:04:181231 caps.supports_rgb_to_yuv_conversion = supports_multiplanar_rendering;
Saifuddin Hitawalab0c218e2023-10-27 17:25:291232 caps.supports_yuv_readback = supports_multiplanar_rendering;
1233 } else {
Vasiliy Telezhnikovfb8ce252024-01-02 20:04:181234 caps.supports_rgb_to_yuv_conversion = true;
Saifuddin Hitawalab0c218e2023-10-27 17:25:291235 caps.supports_yuv_readback = true;
1236 }
1237
Georg Neis24b92cc42025-02-07 04:11:101238#if BUILDFLAG(IS_CHROMEOS)
Chia-I Wuae6d9262023-12-13 01:27:481239 if (shared_context_state_->GrContextIsGL()) {
1240 PopulateDRMCapabilities(&caps, feature_info());
1241 }
1242#if BUILDFLAG(ENABLE_VULKAN)
1243 else if (shared_context_state_->GrContextIsVulkan()) {
1244 auto* device_queue =
1245 shared_context_state_->vk_context_provider()->GetDeviceQueue();
1246 caps.drm_device_id = device_queue->drm_device_id();
1247 gpu::PopulateVkDrmFormatsAndModifiers(device_queue,
1248 caps.drm_formats_and_modifiers);
1249 }
1250#endif // BUILDFLAG(ENABLE_VULKAN)
Kevin Haslett0ae5ba12024-03-21 20:10:441251#if BUILDFLAG(SKIA_USE_DAWN)
1252 else if (shared_context_state_->IsGraphiteDawnVulkan()) {
1253 auto adapter = shared_context_state_->dawn_context_provider()
1254 ->GetDevice()
1255 .GetAdapter();
1256 gpu::PopulateDawnDrmFormatsAndModifiers(adapter,
1257 caps.drm_formats_and_modifiers);
1258 }
1259#endif // BUILDFLAG(SKIA_USE_DAWN)
Chia-I Wuae6d9262023-12-13 01:27:481260 else {
Peter BostrÃļma11556e2024-10-31 04:49:101261 NOTREACHED();
Chia-I Wuae6d9262023-12-13 01:27:481262 }
Georg Neis24b92cc42025-02-07 04:11:101263#endif // BUILDFLAG(IS_CHROMEOS)
Chia-I Wuae6d9262023-12-13 01:27:481264
Jonathan Backer7f90dfb662017-12-18 16:52:041265 return caps;
1266}
1267
Saifuddin Hitawalab2da4f02023-10-04 15:36:461268GLCapabilities RasterDecoderImpl::GetGLCapabilities() {
1269 return GLCapabilities();
1270}
1271
Peng Huangc6a76072018-11-27 23:17:311272const gles2::ContextState* RasterDecoderImpl::GetContextState() {
Peter BostrÃļma11556e2024-10-31 04:49:101273 NOTREACHED();
Peng Huangc6a76072018-11-27 23:17:311274}
1275
Jonathan Backer34008d882018-03-27 18:57:551276void RasterDecoderImpl::RestoreGlobalState() const {
Peng Huangd7aaf782019-01-09 22:03:081277 // We mark the context state is dirty instead of restoring global
1278 // state, and the global state will be restored by the next context.
Peng Huang20361dad12019-01-23 14:48:101279 shared_context_state_->set_need_context_state_reset(true);
1280 shared_context_state_->PessimisticallyResetGrContext();
Jonathan Backer34008d882018-03-27 18:57:551281}
1282
Peng Huangc6a76072018-11-27 23:17:311283void RasterDecoderImpl::ClearAllAttributes() const {}
Jonathan Backer34008d882018-03-27 18:57:551284
1285void RasterDecoderImpl::RestoreAllAttributes() const {
Peng Huang20361dad12019-01-23 14:48:101286 shared_context_state_->PessimisticallyResetGrContext();
Jonathan Backer34008d882018-03-27 18:57:551287}
1288
Daniel Bratellaabc8d68e2018-07-18 19:20:261289void RasterDecoderImpl::RestoreState(const gles2::ContextState* prev_state) {
Peng Huang20361dad12019-01-23 14:48:101290 shared_context_state_->PessimisticallyResetGrContext();
Jonathan Backer7f90dfb662017-12-18 16:52:041291}
1292
Jonathan Backer7471b2782018-01-25 18:14:191293void RasterDecoderImpl::RestoreActiveTexture() const {
Peng Huang20361dad12019-01-23 14:48:101294 shared_context_state_->PessimisticallyResetGrContext();
Jonathan Backer7f90dfb662017-12-18 16:52:041295}
1296
Jonathan Backer7471b2782018-01-25 18:14:191297void RasterDecoderImpl::RestoreAllTextureUnitAndSamplerBindings(
Daniel Bratellaabc8d68e2018-07-18 19:20:261298 const gles2::ContextState* prev_state) const {
Peng Huang20361dad12019-01-23 14:48:101299 shared_context_state_->PessimisticallyResetGrContext();
Jonathan Backer7f90dfb662017-12-18 16:52:041300}
1301
Jonathan Backer7471b2782018-01-25 18:14:191302void RasterDecoderImpl::RestoreActiveTextureUnitBinding(
1303 unsigned int target) const {
Peng Huang20361dad12019-01-23 14:48:101304 shared_context_state_->PessimisticallyResetGrContext();
Jonathan Backer7f90dfb662017-12-18 16:52:041305}
1306
Jonathan Backer7471b2782018-01-25 18:14:191307void RasterDecoderImpl::RestoreBufferBinding(unsigned int target) {
Peng Huang20361dad12019-01-23 14:48:101308 shared_context_state_->PessimisticallyResetGrContext();
Jonathan Backer7f90dfb662017-12-18 16:52:041309}
1310
Jonathan Backer7471b2782018-01-25 18:14:191311void RasterDecoderImpl::RestoreBufferBindings() const {
Peng Huang20361dad12019-01-23 14:48:101312 shared_context_state_->PessimisticallyResetGrContext();
Jonathan Backer7f90dfb662017-12-18 16:52:041313}
1314
Jonathan Backer7471b2782018-01-25 18:14:191315void RasterDecoderImpl::RestoreFramebufferBindings() const {
Peng Huang20361dad12019-01-23 14:48:101316 shared_context_state_->PessimisticallyResetGrContext();
Jonathan Backer7f90dfb662017-12-18 16:52:041317}
1318
Jonathan Backer7471b2782018-01-25 18:14:191319void RasterDecoderImpl::RestoreRenderbufferBindings() {
Peng Huang20361dad12019-01-23 14:48:101320 shared_context_state_->PessimisticallyResetGrContext();
Jonathan Backer7f90dfb662017-12-18 16:52:041321}
1322
Jonathan Backer7471b2782018-01-25 18:14:191323void RasterDecoderImpl::RestoreProgramBindings() const {
Peng Huang20361dad12019-01-23 14:48:101324 shared_context_state_->PessimisticallyResetGrContext();
Jonathan Backer7f90dfb662017-12-18 16:52:041325}
1326
Antoine Labour9ddf6ac2019-01-17 01:59:391327void RasterDecoderImpl::RestoreTextureState(unsigned service_id) {
1328 DCHECK(in_copy_sub_texture_);
1329 reset_texture_state_ = true;
Jonathan Backer7f90dfb662017-12-18 16:52:041330}
1331
Jonathan Backer7471b2782018-01-25 18:14:191332void RasterDecoderImpl::RestoreTextureUnitBindings(unsigned unit) const {
Peng Huang20361dad12019-01-23 14:48:101333 shared_context_state_->PessimisticallyResetGrContext();
Jonathan Backer7f90dfb662017-12-18 16:52:041334}
1335
Jonathan Backer7471b2782018-01-25 18:14:191336void RasterDecoderImpl::RestoreVertexAttribArray(unsigned index) {
Peng Huang20361dad12019-01-23 14:48:101337 shared_context_state_->PessimisticallyResetGrContext();
Jonathan Backer7f90dfb662017-12-18 16:52:041338}
1339
Jonathan Backer7471b2782018-01-25 18:14:191340void RasterDecoderImpl::RestoreAllExternalTextureBindingsIfNeeded() {
Peng Huang20361dad12019-01-23 14:48:101341 shared_context_state_->PessimisticallyResetGrContext();
Jonathan Backer7f90dfb662017-12-18 16:52:041342}
1343
Jonathan Backer7471b2782018-01-25 18:14:191344QueryManager* RasterDecoderImpl::GetQueryManager() {
Jonathan Backer016bd97e2018-03-14 15:26:391345 return query_manager_.get();
Jonathan Backer7f90dfb662017-12-18 16:52:041346}
1347
Geoff Lang85287e12018-05-22 17:26:401348void RasterDecoderImpl::SetQueryCallback(unsigned int query_client_id,
1349 base::OnceClosure callback) {
1350 QueryManager::Query* query = query_manager_->GetQuery(query_client_id);
1351 if (query) {
1352 query->AddCallback(std::move(callback));
1353 } else {
1354 VLOG(1) << "RasterDecoderImpl::SetQueryCallback: No query with ID "
1355 << query_client_id << ". Running the callback immediately.";
1356 std::move(callback).Run();
1357 }
1358}
1359
Jeffrey Cohen11a6cad2023-07-10 19:56:181360void RasterDecoderImpl::CancelAllQueries() {
1361 query_manager_->RemoveAllQueries();
1362}
1363
Daniel Bratellaabc8d68e2018-07-18 19:20:261364gles2::GpuFenceManager* RasterDecoderImpl::GetGpuFenceManager() {
Jonathan Backer7f90dfb662017-12-18 16:52:041365 NOTIMPLEMENTED();
1366 return nullptr;
1367}
1368
Jonathan Backer7471b2782018-01-25 18:14:191369bool RasterDecoderImpl::HasPendingQueries() const {
Peng Huang66a7a3762018-12-07 20:05:381370 return query_manager_ && query_manager_->HavePendingQueries();
Jonathan Backer7f90dfb662017-12-18 16:52:041371}
1372
Jonathan Backer7471b2782018-01-25 18:14:191373void RasterDecoderImpl::ProcessPendingQueries(bool did_finish) {
Vasiliy Telezhnikov517dd8c2022-05-17 15:39:471374 if (query_manager_) {
Colin Blundelld7b7b472023-05-12 07:38:121375 if (gr_context()) {
1376 gr_context()->checkAsyncWorkCompletion();
Maggie Chen5d35a0142025-04-17 18:35:221377 } else if (graphite_shared_context()) {
1378 graphite_shared_context()->checkAsyncWorkCompletion();
Colin Blundelld7b7b472023-05-12 07:38:121379 }
Peng Huang66a7a3762018-12-07 20:05:381380 query_manager_->ProcessPendingQueries(did_finish);
Vasiliy Telezhnikov517dd8c2022-05-17 15:39:471381 }
Jonathan Backer7f90dfb662017-12-18 16:52:041382}
1383
Jonathan Backer7471b2782018-01-25 18:14:191384bool RasterDecoderImpl::HasMoreIdleWork() const {
Jonathan Backere26739c2018-05-15 13:27:071385 return gpu_tracer_->HasTracesToProcess();
Jonathan Backer7f90dfb662017-12-18 16:52:041386}
1387
Jonathan Backer7471b2782018-01-25 18:14:191388void RasterDecoderImpl::PerformIdleWork() {
Jonathan Backere26739c2018-05-15 13:27:071389 gpu_tracer_->ProcessTraces();
Jonathan Backer7f90dfb662017-12-18 16:52:041390}
1391
Jonathan Backer7471b2782018-01-25 18:14:191392bool RasterDecoderImpl::HasPollingWork() const {
Jonathan Backer7f90dfb662017-12-18 16:52:041393 return false;
1394}
1395
Jonathan Backerc2b0efa2018-03-21 16:58:351396void RasterDecoderImpl::PerformPollingWork() {}
Jonathan Backer7f90dfb662017-12-18 16:52:041397
Jonathan Backer7471b2782018-01-25 18:14:191398TextureBase* RasterDecoderImpl::GetTextureBase(uint32_t client_id) {
Jonathan Backer7f90dfb662017-12-18 16:52:041399 NOTIMPLEMENTED();
1400 return nullptr;
1401}
1402
Julien Isorcefd6c15f2018-03-15 16:51:171403void RasterDecoderImpl::SetLevelInfo(uint32_t client_id,
1404 int level,
1405 unsigned internal_format,
1406 unsigned width,
1407 unsigned height,
1408 unsigned depth,
1409 unsigned format,
1410 unsigned type,
1411 const gfx::Rect& cleared_rect) {
1412 NOTIMPLEMENTED();
1413}
1414
Jonathan Backer7471b2782018-01-25 18:14:191415bool RasterDecoderImpl::WasContextLost() const {
Jonathan Backerbaf79d92020-06-01 21:30:301416 return shared_context_state_->context_lost();
Jonathan Backer7f90dfb662017-12-18 16:52:041417}
1418
Jonathan Backer7471b2782018-01-25 18:14:191419bool RasterDecoderImpl::WasContextLostByRobustnessExtension() const {
Jonathan Backerbaf79d92020-06-01 21:30:301420 return shared_context_state_->device_needs_reset();
Jonathan Backer7f90dfb662017-12-18 16:52:041421}
1422
Jonathan Backer7471b2782018-01-25 18:14:191423void RasterDecoderImpl::MarkContextLost(error::ContextLostReason reason) {
Jonathan Backerbaf79d92020-06-01 21:30:301424 shared_context_state_->MarkContextLost(reason);
1425}
Jonathan Backer4f9ee5fb2018-04-25 14:03:381426
Jonathan Backerbaf79d92020-06-01 21:30:301427void RasterDecoderImpl::OnContextLost() {
1428 DCHECK(shared_context_state_->context_lost());
1429 command_buffer_service()->SetContextLostReason(
1430 *shared_context_state_->context_lost_reason());
Jonathan Backer4f9ee5fb2018-04-25 14:03:381431 current_decoder_error_ = error::kLostContext;
Jonathan Backer7f90dfb662017-12-18 16:52:041432}
1433
Jonathan Backer7471b2782018-01-25 18:14:191434bool RasterDecoderImpl::CheckResetStatus() {
Jonathan Backer4f9ee5fb2018-04-25 14:03:381435 DCHECK(!WasContextLost());
Jonathan Backerbaf79d92020-06-01 21:30:301436 return shared_context_state_->CheckResetStatus(/*needs_gl=*/false);
Jonathan Backer7f90dfb662017-12-18 16:52:041437}
1438
Daniel Bratellaabc8d68e2018-07-18 19:20:261439gles2::Logger* RasterDecoderImpl::GetLogger() {
Jonathan Backera4568da12018-01-31 16:25:041440 return &logger_;
1441}
1442
1443void RasterDecoderImpl::SetIgnoreCachedStateForTest(bool ignore) {
Antoine Labour10dddca12019-02-19 19:09:261444 if (use_passthrough_)
Jonathan Backer628d73f82019-01-17 17:34:181445 return;
Peng Huang66a7a3762018-12-07 20:05:381446 state()->SetIgnoreCachedStateForTest(ignore);
Jonathan Backera4568da12018-01-31 16:25:041447}
1448
Jonathan Backer7471b2782018-01-25 18:14:191449void RasterDecoderImpl::BeginDecoding() {
Jonathan Backere26739c2018-05-15 13:27:071450 gpu_tracer_->BeginDecoding();
1451 gpu_trace_commands_ = gpu_tracer_->IsTracing() && *gpu_decoder_category_;
1452 gpu_debug_commands_ = log_commands() || debug() || gpu_trace_commands_;
Khushalcd8fbb772018-10-16 22:46:141453 query_manager_->BeginProcessingCommands();
Jonathan Backer7f90dfb662017-12-18 16:52:041454}
1455
Jonathan Backere26739c2018-05-15 13:27:071456void RasterDecoderImpl::EndDecoding() {
1457 gpu_tracer_->EndDecoding();
Khushalcd8fbb772018-10-16 22:46:141458 query_manager_->EndProcessingCommands();
Jonathan Backere26739c2018-05-15 13:27:071459}
Jonathan Backer7f90dfb662017-12-18 16:52:041460
Jonathan Backer7471b2782018-01-25 18:14:191461const char* RasterDecoderImpl::GetCommandName(unsigned int command_id) const {
1462 if (command_id >= kFirstRasterCommand && command_id < kNumCommands) {
Jonathan Backera4568da12018-01-31 16:25:041463 return raster::GetCommandName(static_cast<CommandId>(command_id));
Jonathan Backer7f90dfb662017-12-18 16:52:041464 }
1465 return GetCommonCommandName(static_cast<cmd::CommandId>(command_id));
1466}
1467
Jonathan Backer1d807a42018-01-08 20:45:541468template <bool DebugImpl>
Jonathan Backer7471b2782018-01-25 18:14:191469error::Error RasterDecoderImpl::DoCommandsImpl(unsigned int num_commands,
1470 const volatile void* buffer,
1471 int num_entries,
1472 int* entries_processed) {
Jonathan Backer7f90dfb662017-12-18 16:52:041473 DCHECK(entries_processed);
1474 commands_to_process_ = num_commands;
1475 error::Error result = error::kNoError;
1476 const volatile CommandBufferEntry* cmd_data =
1477 static_cast<const volatile CommandBufferEntry*>(buffer);
1478 int process_pos = 0;
Jonathan Backer2231a002018-05-14 19:55:221479 CommandId command = static_cast<CommandId>(0);
Jonathan Backer7f90dfb662017-12-18 16:52:041480
1481 while (process_pos < num_entries && result == error::kNoError &&
1482 commands_to_process_--) {
1483 const unsigned int size = cmd_data->value_header.size;
Jonathan Backer2231a002018-05-14 19:55:221484 command = static_cast<CommandId>(cmd_data->value_header.command);
Jonathan Backer7f90dfb662017-12-18 16:52:041485
1486 if (size == 0) {
1487 result = error::kInvalidSize;
1488 break;
1489 }
1490
1491 if (static_cast<int>(size) + process_pos > num_entries) {
1492 result = error::kOutOfBounds;
1493 break;
1494 }
1495
Jonathan Backer1d807a42018-01-08 20:45:541496 if (DebugImpl && log_commands()) {
1497 LOG(ERROR) << "[" << logger_.GetLogPrefix() << "]"
1498 << "cmd: " << GetCommandName(command);
1499 }
1500
Jonathan Backer7f90dfb662017-12-18 16:52:041501 const unsigned int arg_count = size - 1;
Jonathan Backer7471b2782018-01-25 18:14:191502 unsigned int command_index = command - kFirstRasterCommand;
Daniel Chengadafb3a2022-02-28 07:38:221503 if (command_index < std::size(command_info)) {
Jonathan Backer7f90dfb662017-12-18 16:52:041504 const CommandInfo& info = command_info[command_index];
Jonathan Backer2231a002018-05-14 19:55:221505 if (sk_surface_) {
1506 if (!AllowedBetweenBeginEndRaster(command)) {
1507 LOCAL_SET_GL_ERROR(
1508 GL_INVALID_OPERATION, GetCommandName(command),
1509 "Unexpected command between BeginRasterCHROMIUM and "
1510 "EndRasterCHROMIUM");
1511 process_pos += size;
1512 cmd_data += size;
1513 continue;
1514 }
1515 }
Jonathan Backer7f90dfb662017-12-18 16:52:041516 unsigned int info_arg_count = static_cast<unsigned int>(info.arg_count);
1517 if ((info.arg_flags == cmd::kFixed && arg_count == info_arg_count) ||
1518 (info.arg_flags == cmd::kAtLeastN && arg_count >= info_arg_count)) {
Jonathan Backere26739c2018-05-15 13:27:071519 bool doing_gpu_trace = false;
1520 if (DebugImpl && gpu_trace_commands_) {
1521 if (CMD_FLAG_GET_TRACE_LEVEL(info.cmd_flags) <= gpu_trace_level_) {
1522 doing_gpu_trace = true;
Miguel Casas510fbfcb2018-11-13 23:40:571523 gpu_tracer_->Begin(TRACE_DISABLED_BY_DEFAULT("gpu.decoder"),
Daniel Bratellaabc8d68e2018-07-18 19:20:261524 GetCommandName(command), gles2::kTraceDecoder);
Jonathan Backere26739c2018-05-15 13:27:071525 }
1526 }
1527
Jonathan Backer7f90dfb662017-12-18 16:52:041528 uint32_t immediate_data_size = (arg_count - info_arg_count) *
1529 sizeof(CommandBufferEntry); // NOLINT
Jonathan Backer7471b2782018-01-25 18:14:191530 result = (this->*info.cmd_handler)(immediate_data_size, cmd_data);
Jonathan Backere26739c2018-05-15 13:27:071531
1532 if (DebugImpl && doing_gpu_trace)
Daniel Bratellaabc8d68e2018-07-18 19:20:261533 gpu_tracer_->End(gles2::kTraceDecoder);
Jonathan Backere26739c2018-05-15 13:27:071534
Kramer Gee6c74692020-02-07 21:07:311535 if (DebugImpl && shared_context_state_->GrContextIsGL() && debug() &&
1536 !WasContextLost()) {
Sunny Sachanandani46df8ac2023-09-06 23:18:431537 gl::GLApi* const api = gl::g_current_gl_context;
Jonathan Backer7471b2782018-01-25 18:14:191538 GLenum error;
Sunny Sachanandani46df8ac2023-09-06 23:18:431539 while ((error = api->glGetErrorFn()) != GL_NO_ERROR) {
Jonathan Backer7471b2782018-01-25 18:14:191540 LOG(ERROR) << "[" << logger_.GetLogPrefix() << "] "
Daniel Bratellaabc8d68e2018-07-18 19:20:261541 << "GL ERROR: " << gles2::GLES2Util::GetStringEnum(error)
Jonathan Backer7471b2782018-01-25 18:14:191542 << " : " << GetCommandName(command);
1543 LOCAL_SET_GL_ERROR(error, "DoCommand", "GL error from driver");
Jonathan Backer1d807a42018-01-08 20:45:541544 }
Jonathan Backer7f90dfb662017-12-18 16:52:041545 }
1546 } else {
1547 result = error::kInvalidArguments;
1548 }
1549 } else {
1550 result = DoCommonCommand(command, arg_count, cmd_data);
1551 }
1552
1553 if (result == error::kNoError &&
1554 current_decoder_error_ != error::kNoError) {
1555 result = current_decoder_error_;
1556 current_decoder_error_ = error::kNoError;
1557 }
1558
1559 if (result != error::kDeferCommandUntilLater) {
1560 process_pos += size;
1561 cmd_data += size;
1562 }
Christopher Cameron02e3149a2019-01-25 19:52:231563
Christopher Cameronbeb5f272019-02-04 22:49:131564 // Workaround for https://crbug.com/906453: Flush after every command that
1565 // is not between a BeginRaster and EndRaster.
1566 if (!sk_surface_)
1567 FlushToWorkAroundMacCrashes();
Jonathan Backer7f90dfb662017-12-18 16:52:041568 }
1569
1570 *entries_processed = process_pos;
1571
1572 if (error::IsError(result)) {
1573 LOG(ERROR) << "Error: " << result << " for Command "
1574 << GetCommandName(command);
1575 }
1576
kylecharee7338172022-02-02 16:59:101577 if (use_gpu_raster_)
Corentin Wallez0f412f02019-04-03 22:42:381578 client()->ScheduleGrContextCleanup();
Khushal7324ec42018-07-10 20:01:451579
Jonathan Backer7f90dfb662017-12-18 16:52:041580 return result;
1581}
1582
Jonathan Backer7471b2782018-01-25 18:14:191583error::Error RasterDecoderImpl::DoCommands(unsigned int num_commands,
1584 const volatile void* buffer,
1585 int num_entries,
1586 int* entries_processed) {
Jonathan Backer1d807a42018-01-08 20:45:541587 if (gpu_debug_commands_) {
1588 return DoCommandsImpl<true>(num_commands, buffer, num_entries,
1589 entries_processed);
1590 } else {
1591 return DoCommandsImpl<false>(num_commands, buffer, num_entries,
1592 entries_processed);
1593 }
Jonathan Backer7f90dfb662017-12-18 16:52:041594}
1595
Corentin Wallez0f412f02019-04-03 22:42:381596void RasterDecoderImpl::ExitCommandProcessingEarly() {
1597 commands_to_process_ = 0;
1598}
1599
Helmut Januschkab81e97dc2024-04-17 14:57:051600std::string_view RasterDecoderImpl::GetLogPrefix() {
Jonathan Backer1d807a42018-01-08 20:45:541601 return logger_.GetLogPrefix();
Jonathan Backer7f90dfb662017-12-18 16:52:041602}
1603
Jonathan Backer7471b2782018-01-25 18:14:191604gles2::ContextGroup* RasterDecoderImpl::GetContextGroup() {
Antoine Labour914f3af2019-02-14 20:24:131605 return nullptr;
Jonathan Backer1d807a42018-01-08 20:45:541606}
1607
Jonathan Backer7471b2782018-01-25 18:14:191608gles2::ErrorState* RasterDecoderImpl::GetErrorState() {
Peng Huangb4ed1852018-12-05 03:35:291609 return error_state_.get();
Jonathan Backer1d807a42018-01-08 20:45:541610}
1611
Jonathan Backerc26060e2018-03-29 15:06:291612bool RasterDecoderImpl::IsCompressedTextureFormat(unsigned format) {
Antoine Labour10dddca12019-02-19 19:09:261613 return feature_info()->validators()->compressed_texture_format.IsValid(
1614 format);
Jonathan Backerc26060e2018-03-29 15:06:291615}
1616
1617bool RasterDecoderImpl::ClearLevel(gles2::Texture* texture,
1618 unsigned target,
1619 int level,
1620 unsigned format,
1621 unsigned type,
1622 int xoffset,
1623 int yoffset,
1624 int width,
1625 int height) {
Peter BostrÃļma11556e2024-10-31 04:49:101626 NOTREACHED();
Jonathan Backerc26060e2018-03-29 15:06:291627}
1628
1629bool RasterDecoderImpl::ClearCompressedTextureLevel(gles2::Texture* texture,
1630 unsigned target,
1631 int level,
1632 unsigned format,
1633 int width,
1634 int height) {
Peter BostrÃļma11556e2024-10-31 04:49:101635 NOTREACHED();
Jonathan Backerc26060e2018-03-29 15:06:291636}
1637
shrekshaoad4525482020-03-12 00:30:551638bool RasterDecoderImpl::ClearCompressedTextureLevel3D(gles2::Texture* texture,
1639 unsigned target,
1640 int level,
1641 unsigned format,
1642 int width,
1643 int height,
1644 int depth) {
Peter BostrÃļma11556e2024-10-31 04:49:101645 NOTREACHED();
shrekshaoad4525482020-03-12 00:30:551646}
1647
Andres Calderon Jaramillob870ca62019-01-29 16:33:351648int RasterDecoderImpl::GetRasterDecoderId() const {
1649 return raster_decoder_id_;
1650}
1651
Khushal996e9912018-07-13 08:31:001652int RasterDecoderImpl::DecoderIdForTest() {
1653 return raster_decoder_id_;
1654}
1655
Jonathan Backer0cd1c4322018-04-17 16:57:101656ServiceTransferCache* RasterDecoderImpl::GetTransferCacheForTest() {
Peng Huang20361dad12019-01-23 14:48:101657 return shared_context_state_->transfer_cache();
Jonathan Backer0cd1c4322018-04-17 16:57:101658}
1659
Khushal15b6abb2018-06-28 00:16:251660void RasterDecoderImpl::SetUpForRasterCHROMIUMForTest() {
1661 // Some tests use mock GL which doesn't work with skia. Just use a bitmap
1662 // backed surface for OOP raster commands.
Mike Reed421f75d2019-02-25 20:30:031663 auto info = SkImageInfo::MakeN32(10, 10, kPremul_SkAlphaType,
1664 SkColorSpace::MakeSRGB());
Ian Preste598eece2020-10-19 23:31:021665 SkSurfaceProps props = skia::LegacyDisplayGlobals::GetSkSurfaceProps();
Kevin Lubick671644d2023-05-12 16:32:001666 sk_surface_for_testing_ = SkSurfaces::Raster(info, &props);
Peng Huange9b41cd2019-08-12 19:39:471667 sk_surface_ = sk_surface_for_testing_.get();
Mike Reed421f75d2019-02-25 20:30:031668 raster_canvas_ = sk_surface_->getCanvas();
Khushal15b6abb2018-06-28 00:16:251669}
1670
Jonathan Backerc7b82972018-11-21 19:08:481671void RasterDecoderImpl::SetOOMErrorForTest() {
1672 LOCAL_SET_GL_ERROR(GL_OUT_OF_MEMORY, "SetOOMErrorForTest",
1673 "synthetic out of memory");
1674}
1675
Christopher Cameron386e18582019-01-11 20:17:341676void RasterDecoderImpl::DisableFlushWorkaroundForTest() {
1677 flush_workaround_disabled_for_test_ = true;
1678}
1679
Jonathan Backer7471b2782018-01-25 18:14:191680void RasterDecoderImpl::OnContextLostError() {
Jonathan Backer4f9ee5fb2018-04-25 14:03:381681 if (!WasContextLost()) {
1682 // Need to lose current context before broadcasting!
Jonathan Backerbaf79d92020-06-01 21:30:301683 shared_context_state_->CheckResetStatus(/*needs_gl=*/false);
Jonathan Backer4f9ee5fb2018-04-25 14:03:381684 }
Jonathan Backer7f90dfb662017-12-18 16:52:041685}
1686
Jonathan Backer7471b2782018-01-25 18:14:191687void RasterDecoderImpl::OnOutOfMemoryError() {
Jonathan Backer4f9ee5fb2018-04-25 14:03:381688 if (lose_context_when_out_of_memory_ && !WasContextLost()) {
Jonathan Backerbaf79d92020-06-01 21:30:301689 if (!shared_context_state_->CheckResetStatus(/*needs_gl=*/false)) {
Jonathan Backer4f9ee5fb2018-04-25 14:03:381690 MarkContextLost(error::kOutOfMemory);
1691 }
Jonathan Backer4f9ee5fb2018-04-25 14:03:381692 }
Jonathan Backer7f90dfb662017-12-18 16:52:041693}
1694
Jonathan Backer7471b2782018-01-25 18:14:191695error::Error RasterDecoderImpl::HandleBeginQueryEXT(
1696 uint32_t immediate_data_size,
1697 const volatile void* cmd_data) {
Jonathan Backer016bd97e2018-03-14 15:26:391698 const volatile raster::cmds::BeginQueryEXT& c =
1699 *static_cast<const volatile raster::cmds::BeginQueryEXT*>(cmd_data);
1700 GLenum target = static_cast<GLenum>(c.target);
1701 GLuint client_id = static_cast<GLuint>(c.id);
1702 int32_t sync_shm_id = static_cast<int32_t>(c.sync_data_shm_id);
1703 uint32_t sync_shm_offset = static_cast<uint32_t>(c.sync_data_shm_offset);
1704
1705 switch (target) {
1706 case GL_COMMANDS_ISSUED_CHROMIUM:
Jonathan Backer80b270292018-12-13 19:13:551707 break;
Jonathan Backer016bd97e2018-03-14 15:26:391708 case GL_COMMANDS_COMPLETED_CHROMIUM:
Jonathan Backer80b270292018-12-13 19:13:551709 if (!features().chromium_sync_query) {
1710 LOCAL_SET_GL_ERROR(GL_INVALID_OPERATION, "glBeginQueryEXT",
1711 "not enabled for commands completed queries");
1712 return error::kNoError;
1713 }
Jonathan Backer016bd97e2018-03-14 15:26:391714 break;
1715 default:
1716 LOCAL_SET_GL_ERROR(GL_INVALID_ENUM, "glBeginQueryEXT",
1717 "unknown query target");
1718 return error::kNoError;
1719 }
1720
1721 if (query_manager_->GetActiveQuery(target)) {
1722 LOCAL_SET_GL_ERROR(GL_INVALID_OPERATION, "glBeginQueryEXT",
1723 "query already in progress");
1724 return error::kNoError;
1725 }
1726
1727 if (client_id == 0) {
1728 LOCAL_SET_GL_ERROR(GL_INVALID_OPERATION, "glBeginQueryEXT", "id is 0");
1729 return error::kNoError;
1730 }
1731
Jonathan Backer0cd1c4322018-04-17 16:57:101732 scoped_refptr<Buffer> buffer = GetSharedMemoryBuffer(sync_shm_id);
Jonathan Backer016bd97e2018-03-14 15:26:391733 if (!buffer)
1734 return error::kInvalidArguments;
1735 QuerySync* sync = static_cast<QuerySync*>(
1736 buffer->GetDataAddress(sync_shm_offset, sizeof(QuerySync)));
1737 if (!sync)
1738 return error::kOutOfBounds;
1739
1740 QueryManager::Query* query = query_manager_->GetQuery(client_id);
1741 if (!query) {
1742 if (!query_manager_->IsValidQuery(client_id)) {
1743 LOCAL_SET_GL_ERROR(GL_INVALID_OPERATION, "glBeginQueryEXT",
1744 "id not made by glGenQueriesEXT");
1745 return error::kNoError;
1746 }
1747
1748 query =
1749 query_manager_->CreateQuery(target, client_id, std::move(buffer), sync);
1750 } else {
1751 if (query->target() != target) {
1752 LOCAL_SET_GL_ERROR(GL_INVALID_OPERATION, "glBeginQueryEXT",
1753 "target does not match");
1754 return error::kNoError;
1755 } else if (query->sync() != sync) {
1756 DLOG(ERROR) << "Shared memory used by query not the same as before";
1757 return error::kInvalidArguments;
1758 }
1759 }
1760
1761 query_manager_->BeginQuery(query);
Jonathan Backer7471b2782018-01-25 18:14:191762 return error::kNoError;
1763}
1764
1765error::Error RasterDecoderImpl::HandleEndQueryEXT(
1766 uint32_t immediate_data_size,
1767 const volatile void* cmd_data) {
Jonathan Backer016bd97e2018-03-14 15:26:391768 const volatile raster::cmds::EndQueryEXT& c =
1769 *static_cast<const volatile raster::cmds::EndQueryEXT*>(cmd_data);
1770 GLenum target = static_cast<GLenum>(c.target);
1771 uint32_t submit_count = static_cast<GLuint>(c.submit_count);
1772
1773 QueryManager::Query* query = query_manager_->GetActiveQuery(target);
1774 if (!query) {
1775 LOCAL_SET_GL_ERROR(GL_INVALID_OPERATION, "glEndQueryEXT",
1776 "No active query");
1777 return error::kNoError;
1778 }
1779
1780 query_manager_->EndQuery(query, submit_count);
Jonathan Backer7471b2782018-01-25 18:14:191781 return error::kNoError;
1782}
1783
Andres Calderon Jaramillo21e98152019-08-22 01:38:041784error::Error RasterDecoderImpl::HandleQueryCounterEXT(
1785 uint32_t immediate_data_size,
1786 const volatile void* cmd_data) {
1787 const volatile raster::cmds::QueryCounterEXT& c =
1788 *static_cast<const volatile raster::cmds::QueryCounterEXT*>(cmd_data);
1789 GLenum target = static_cast<GLenum>(c.target);
1790 GLuint client_id = static_cast<GLuint>(c.id);
1791 int32_t sync_shm_id = static_cast<int32_t>(c.sync_data_shm_id);
1792 uint32_t sync_shm_offset = static_cast<uint32_t>(c.sync_data_shm_offset);
1793 uint32_t submit_count = static_cast<GLuint>(c.submit_count);
1794
1795 if (target != GL_COMMANDS_ISSUED_TIMESTAMP_CHROMIUM) {
1796 LOCAL_SET_GL_ERROR(GL_INVALID_ENUM, "glQueryCounterEXT",
1797 "unknown query target");
1798 return error::kNoError;
1799 }
1800
1801 scoped_refptr<Buffer> buffer = GetSharedMemoryBuffer(sync_shm_id);
1802 if (!buffer)
1803 return error::kInvalidArguments;
1804 QuerySync* sync = static_cast<QuerySync*>(
1805 buffer->GetDataAddress(sync_shm_offset, sizeof(QuerySync)));
1806 if (!sync)
1807 return error::kOutOfBounds;
1808
1809 QueryManager::Query* query = query_manager_->GetQuery(client_id);
1810 if (!query) {
1811 if (!query_manager_->IsValidQuery(client_id)) {
1812 LOCAL_SET_GL_ERROR(GL_INVALID_OPERATION, "glQueryCounterEXT",
1813 "id not made by glGenQueriesEXT");
1814 return error::kNoError;
1815 }
1816 query =
1817 query_manager_->CreateQuery(target, client_id, std::move(buffer), sync);
1818 } else {
1819 if (query->target() != target) {
1820 LOCAL_SET_GL_ERROR(GL_INVALID_OPERATION, "glQueryCounterEXT",
1821 "target does not match");
1822 return error::kNoError;
1823 } else if (query->sync() != sync) {
1824 DLOG(ERROR) << "Shared memory used by query not the same as before";
1825 return error::kInvalidArguments;
1826 }
1827 }
1828 query_manager_->QueryCounter(query, submit_count);
1829
1830 return error::kNoError;
1831}
1832
Jonathan Backera4568da12018-01-31 16:25:041833void RasterDecoderImpl::DoFinish() {
Sunny Sachanandania2094212024-03-20 21:50:551834 shared_context_state_->FlushAndSubmit(/*sync_to_cpu=*/true);
Peng Huang63ef8872021-06-17 22:31:201835 ProcessPendingQueries(/*did_finish=*/true);
Jonathan Backera4568da12018-01-31 16:25:041836}
1837
1838void RasterDecoderImpl::DoFlush() {
Sunny Sachanandania2094212024-03-20 21:50:551839 shared_context_state_->FlushAndSubmit(/*sync_to_cpu=*/false);
Peng Huang63ef8872021-06-17 22:31:201840 ProcessPendingQueries(/*did_finish=*/false);
Jonathan Backera4568da12018-01-31 16:25:041841}
1842
Jonathan Backer016bd97e2018-03-14 15:26:391843bool RasterDecoderImpl::GenQueriesEXTHelper(GLsizei n,
1844 const GLuint* client_ids) {
1845 for (GLsizei ii = 0; ii < n; ++ii) {
1846 if (query_manager_->IsValidQuery(client_ids[ii])) {
1847 return false;
1848 }
1849 }
1850 query_manager_->GenQueries(n, client_ids);
1851 return true;
1852}
1853
1854void RasterDecoderImpl::DeleteQueriesEXTHelper(
1855 GLsizei n,
1856 const volatile GLuint* client_ids) {
1857 for (GLsizei ii = 0; ii < n; ++ii) {
1858 GLuint client_id = client_ids[ii];
1859 query_manager_->RemoveQuery(client_id);
1860 }
1861}
1862
Jonathan Backere26739c2018-05-15 13:27:071863error::Error RasterDecoderImpl::HandleTraceBeginCHROMIUM(
1864 uint32_t immediate_data_size,
1865 const volatile void* cmd_data) {
1866 const volatile gles2::cmds::TraceBeginCHROMIUM& c =
1867 *static_cast<const volatile gles2::cmds::TraceBeginCHROMIUM*>(cmd_data);
1868 Bucket* category_bucket = GetBucket(c.category_bucket_id);
1869 Bucket* name_bucket = GetBucket(c.name_bucket_id);
Jonathan Backerff2ac0f2018-07-18 15:53:471870 static constexpr size_t kMaxStrLen = 256;
1871 if (!category_bucket || category_bucket->size() == 0 ||
1872 category_bucket->size() > kMaxStrLen || !name_bucket ||
1873 name_bucket->size() == 0 || name_bucket->size() > kMaxStrLen) {
Jonathan Backere26739c2018-05-15 13:27:071874 return error::kInvalidArguments;
1875 }
1876
1877 std::string category_name;
1878 std::string trace_name;
1879 if (!category_bucket->GetAsString(&category_name) ||
1880 !name_bucket->GetAsString(&trace_name)) {
1881 return error::kInvalidArguments;
1882 }
1883
1884 debug_marker_manager_.PushGroup(trace_name);
Daniel Bratellaabc8d68e2018-07-18 19:20:261885 if (!gpu_tracer_->Begin(category_name, trace_name, gles2::kTraceCHROMIUM)) {
Jonathan Backere26739c2018-05-15 13:27:071886 LOCAL_SET_GL_ERROR(GL_INVALID_OPERATION, "glTraceBeginCHROMIUM",
1887 "unable to create begin trace");
1888 return error::kNoError;
1889 }
1890 return error::kNoError;
1891}
1892
1893void RasterDecoderImpl::DoTraceEndCHROMIUM() {
1894 debug_marker_manager_.PopGroup();
Daniel Bratellaabc8d68e2018-07-18 19:20:261895 if (!gpu_tracer_->End(gles2::kTraceCHROMIUM)) {
Jonathan Backere26739c2018-05-15 13:27:071896 LOCAL_SET_GL_ERROR(GL_INVALID_OPERATION, "glTraceEndCHROMIUM",
1897 "no trace begin found");
1898 return;
1899 }
1900}
1901
Khushal49836ab2018-07-25 02:08:451902error::Error RasterDecoderImpl::HandleSetActiveURLCHROMIUM(
1903 uint32_t immediate_data_size,
1904 const volatile void* cmd_data) {
1905 const volatile cmds::SetActiveURLCHROMIUM& c =
1906 *static_cast<const volatile cmds::SetActiveURLCHROMIUM*>(cmd_data);
1907 Bucket* url_bucket = GetBucket(c.url_bucket_id);
1908 static constexpr size_t kMaxStrLen = 1024;
1909 if (!url_bucket || url_bucket->size() == 0 ||
Khushalda1fb8332018-10-26 19:26:221910 url_bucket->size() > kMaxStrLen) {
Khushal49836ab2018-07-25 02:08:451911 return error::kInvalidArguments;
1912 }
1913
Khushalda1fb8332018-10-26 19:26:221914 size_t size = url_bucket->size();
Khushal49836ab2018-07-25 02:08:451915 const char* url_str = url_bucket->GetDataAs<const char*>(0, size);
1916 if (!url_str)
1917 return error::kInvalidArguments;
1918
Helmut Januschkab81e97dc2024-04-17 14:57:051919 GURL url(std::string_view(url_str, size));
Corentin Wallez0f412f02019-04-03 22:42:381920 client()->SetActiveURL(std::move(url));
Khushal49836ab2018-07-25 02:08:451921 return error::kNoError;
1922}
1923
Saifuddin Hitawala20790cb2023-01-31 17:01:121924void RasterDecoderImpl::DoCopySharedImageINTERNAL(
Antoine Labour9ddf6ac2019-01-17 01:59:391925 GLint xoffset,
1926 GLint yoffset,
1927 GLint x,
1928 GLint y,
Ilya Nikolaevskiy18fd2972025-09-09 17:19:461929 GLsizei src_width,
1930 GLsizei src_height,
1931 GLsizei dst_width,
1932 GLsizei dst_height,
Antoine Labour9ddf6ac2019-01-17 01:59:391933 const volatile GLbyte* mailboxes) {
Saifuddin Hitawalac457bbd2023-02-07 21:19:241934 CopySharedImageHelper helper(&shared_image_representation_factory_,
1935 shared_context_state_.get());
Vasiliy Telezhnikov1dbbecd32024-11-06 16:36:171936 auto result =
Ilya Nikolaevskiy18fd2972025-09-09 17:19:461937 helper.CopySharedImage(xoffset, yoffset, x, y, src_width, src_height,
1938 dst_width, dst_height, mailboxes);
Saifuddin Hitawalac457bbd2023-02-07 21:19:241939 if (!result.has_value()) {
1940 LOCAL_SET_GL_ERROR(result.error().gl_error,
1941 result.error().function_name.c_str(),
1942 result.error().msg.c_str());
Antoine Labourb9f903b2019-02-04 18:06:571943 }
Nathan Zabriskie67a4fbc2021-01-07 19:39:101944}
1945
Nathan Zabriskie31e4dc92020-04-15 23:10:271946void RasterDecoderImpl::DoWritePixelsINTERNAL(GLint x_offset,
1947 GLint y_offset,
1948 GLuint src_width,
1949 GLuint src_height,
1950 GLuint row_bytes,
1951 GLuint src_sk_color_type,
1952 GLuint src_sk_alpha_type,
1953 GLint shm_id,
1954 GLuint shm_offset,
1955 GLuint pixels_offset,
1956 const volatile GLbyte* mailbox) {
Justin Novosad129aa3ea42021-12-10 20:06:311957 TRACE_EVENT0("gpu", "RasterDecoderImpl::DoWritePixelsINTERNAL");
Saifuddin Hitawalaa5126a702023-04-04 20:16:391958 if (src_sk_color_type < 0 || src_sk_color_type > kLastEnum_SkColorType) {
Nathan Zabriskie31e4dc92020-04-15 23:10:271959 LOCAL_SET_GL_ERROR(GL_INVALID_ENUM, "WritePixels",
1960 "src_sk_color_type must be a valid SkColorType");
1961 return;
1962 }
Saifuddin Hitawalaa5126a702023-04-04 20:16:391963 if (src_sk_alpha_type < 0 || src_sk_alpha_type > kLastEnum_SkAlphaType) {
Nathan Zabriskie31e4dc92020-04-15 23:10:271964 LOCAL_SET_GL_ERROR(GL_INVALID_ENUM, "WritePixels",
1965 "src_sk_alpha_type must be a valid SkAlphaType");
1966 return;
1967 }
1968
1969 Mailbox dest_mailbox = Mailbox::FromVolatile(
1970 *reinterpret_cast<const volatile Mailbox*>(mailbox));
1971 DLOG_IF(ERROR, !dest_mailbox.Verify())
1972 << "WritePixels was passed an invalid mailbox";
1973 auto dest_shared_image = shared_image_representation_factory_.ProduceSkia(
1974 dest_mailbox, shared_context_state_);
1975 if (!dest_shared_image) {
1976 LOCAL_SET_GL_ERROR(GL_INVALID_OPERATION, "glWritePixels",
1977 "Attempting to write to unknown mailbox.");
1978 return;
1979 }
1980
Saifuddin Hitawalaa5126a702023-04-04 20:16:391981 viz::SharedImageFormat dest_format = dest_shared_image->format();
Vasiliy Telezhnikov1470c972025-01-13 15:49:441982 if (SkColorTypeBytesPerPixel(viz::ToClosestSkColorType(dest_format)) !=
Nathan Zabriskie108c18282020-05-28 16:45:071983 SkColorTypeBytesPerPixel(static_cast<SkColorType>(src_sk_color_type))) {
1984 LOCAL_SET_GL_ERROR(GL_INVALID_OPERATION, "glWritePixels",
1985 "Bytes per pixel for src SkColorType and dst "
1986 "SkColorType must be the same.");
1987 return;
1988 }
1989
Nathan Zabriskie31e4dc92020-04-15 23:10:271990 // If present, the color space is serialized into shared memory before the
1991 // pixel data.
1992 sk_sp<SkColorSpace> color_space;
1993 if (pixels_offset > 0) {
Saifuddin Hitawalaa5126a702023-04-04 20:16:391994 // For multiplanar formats write is per plane, and source color
1995 // space must be nullptr to allow letting Skia assume srgb color space.
1996 if (dest_format.is_multi_plane()) {
1997 LOCAL_SET_GL_ERROR(GL_INVALID_OPERATION, "glWritePixels",
1998 "Unexpected color space for multiplanar shared image");
1999 return;
2000 }
Nathan Zabriskie31e4dc92020-04-15 23:10:272001 void* color_space_bytes =
2002 GetSharedMemoryAs<void*>(shm_id, shm_offset, pixels_offset);
Nathan Zabriskiea0f955c32020-04-16 21:29:102003 if (!color_space_bytes) {
2004 LOCAL_SET_GL_ERROR(GL_INVALID_OPERATION, "glWritePixels",
2005 "Failed to retrieve serialized SkColorSpace.");
2006 return;
2007 }
2008
Nathan Zabriskie31e4dc92020-04-15 23:10:272009 color_space = SkColorSpace::Deserialize(color_space_bytes, pixels_offset);
2010 if (!color_space) {
2011 LOCAL_SET_GL_ERROR(GL_INVALID_OPERATION, "glWritePixels",
2012 "Failed to deserialize expected SkColorSpace");
2013 return;
2014 }
2015 }
2016
2017 SkImageInfo src_info = SkImageInfo::Make(
2018 src_width, src_height, static_cast<SkColorType>(src_sk_color_type),
2019 static_cast<SkAlphaType>(src_sk_alpha_type), std::move(color_space));
2020
Nathan Zabriskieee59b5e2020-06-10 06:15:472021 if (row_bytes < src_info.minRowBytes()) {
2022 LOCAL_SET_GL_ERROR(GL_INVALID_VALUE, "glWritePixels",
2023 "row_bytes be >= "
2024 "SkImageInfo::minRowBytes() for source image.");
2025 return;
2026 }
2027
Jonah Chin67897ae62020-12-08 21:13:202028 size_t byte_size = src_info.computeByteSize(row_bytes);
2029 if (byte_size > UINT32_MAX) {
2030 LOCAL_SET_GL_ERROR(
2031 GL_INVALID_VALUE, "glWritePixels",
2032 "Cannot request a memory chunk larger than UINT32_MAX bytes");
2033 return;
2034 }
2035
2036 // The pixels are stored after the serialized SkColorSpace + padding
2037 void* pixel_data =
2038 GetSharedMemoryAs<void*>(shm_id, shm_offset + pixels_offset, byte_size);
2039 if (!pixel_data) {
2040 LOCAL_SET_GL_ERROR(GL_INVALID_OPERATION, "glWritePixels",
2041 "Couldn't retrieve pixel data.");
2042 return;
2043 }
2044
2045 // Try a direct texture upload without using SkSurface.
Sunny Sachanandanid6e0b5f2023-06-14 13:30:372046 if (gfx::Size(src_width, src_height) == dest_shared_image->size() &&
Jonah Chin67897ae62020-12-08 21:13:202047 x_offset == 0 && y_offset == 0 &&
Justin Novosad4fb0c0e42022-02-08 16:10:272048 (src_info.alphaType() == dest_shared_image->alpha_type() ||
2049 src_info.alphaType() == kUnknown_SkAlphaType) &&
2050 SkColorSpace::Equals(
2051 src_info.colorSpace(),
2052 dest_shared_image->color_space().ToSkColorSpace().get()) &&
Saifuddin Hitawala8c8af1e2024-04-09 19:45:322053 DoWritePixelsINTERNALDirectTextureUpload(
2054 dest_shared_image.get(), src_info, pixel_data, row_bytes)) {
Saifuddin Hitawalac1eeaa9c2023-06-26 20:33:072055 if (!dest_shared_image->IsCleared()) {
2056 dest_shared_image->SetClearedRect(
2057 gfx::Rect(src_info.width(), src_info.height()));
2058 }
Jonah Chin67897ae62020-12-08 21:13:202059 return;
2060 }
2061
Nathan Zabriskie31e4dc92020-04-15 23:10:272062 std::vector<GrBackendSemaphore> begin_semaphores;
2063 std::vector<GrBackendSemaphore> end_semaphores;
2064
2065 // Allow uncleared access, as we manually handle clear tracking.
Saifuddin Hitawaladaed5972022-07-20 22:01:012066 std::unique_ptr<SkiaImageRepresentation::ScopedWriteAccess>
Nathan Zabriskie31e4dc92020-04-15 23:10:272067 dest_scoped_access = dest_shared_image->BeginScopedWriteAccess(
2068 &begin_semaphores, &end_semaphores,
2069 SharedImageRepresentation::AllowUnclearedAccess::kYes);
2070 if (!dest_scoped_access) {
2071 LOCAL_SET_GL_ERROR(GL_INVALID_VALUE, "glWritePixels",
2072 "Dest shared image is not writable");
2073 return;
2074 }
2075
Saifuddin Hitawala8c8af1e2024-04-09 19:45:322076 auto* surface = dest_scoped_access->surface();
Saifuddin Hitawalaa5126a702023-04-04 20:16:392077 DCHECK(surface);
2078
Nathan Zabriskie31e4dc92020-04-15 23:10:272079 if (!begin_semaphores.empty()) {
Saifuddin Hitawalaa5126a702023-04-04 20:16:392080 bool result =
2081 surface->wait(begin_semaphores.size(), begin_semaphores.data(),
2082 /*deleteSemaphoresAfterWait=*/false);
Nathan Zabriskie31e4dc92020-04-15 23:10:272083 if (!result) {
2084 LOCAL_SET_GL_ERROR(GL_INVALID_OPERATION, "glWritePixels",
2085 "Unable to obtain write access to dest shared image.");
2086 return;
2087 }
2088 }
2089
Saifuddin Hitawalaa5126a702023-04-04 20:16:392090 auto* canvas = surface->getCanvas();
Nathan Zabriskie31e4dc92020-04-15 23:10:272091 bool written =
2092 canvas->writePixels(src_info, pixel_data, row_bytes, x_offset, y_offset);
2093 if (!written) {
2094 LOCAL_SET_GL_ERROR(GL_INVALID_OPERATION, "glWritePixels",
2095 "Failed to write pixels to SkCanvas");
2096 }
2097
Sunny Sachanandania2094212024-03-20 21:50:552098 shared_context_state_->FlushWriteAccess(dest_scoped_access.get());
Saifuddin Hitawala70652782024-08-21 14:13:322099 shared_context_state_->SubmitIfNecessary(
2100 std::move(end_semaphores),
2101 dest_scoped_access->NeedGraphiteContextSubmit());
Vasiliy Telezhnikovf1562c4f2022-08-06 14:02:052102
Nathan Zabriskie31e4dc92020-04-15 23:10:272103 if (!dest_shared_image->IsCleared()) {
2104 dest_shared_image->SetClearedRect(
2105 gfx::Rect(x_offset, y_offset, src_width, src_height));
2106 }
2107}
2108
Saifuddin Hitawala9d8a52dd2023-06-26 20:21:372109void RasterDecoderImpl::DoWritePixelsYUVINTERNAL(
2110 GLuint src_width,
2111 GLuint src_height,
2112 GLuint src_row_bytes_plane1,
2113 GLuint src_row_bytes_plane2,
2114 GLuint src_row_bytes_plane3,
2115 GLuint src_row_bytes_plane4,
2116 GLuint src_yuv_plane_config,
2117 GLuint src_yuv_subsampling,
2118 GLuint src_yuv_datatype,
2119 GLint shm_id,
2120 GLuint shm_offset,
2121 GLuint plane2_offset,
2122 GLuint plane3_offset,
2123 GLuint plane4_offset,
2124 const volatile GLbyte* mailbox) {
Saifuddin Hitawalac1eeaa9c2023-06-26 20:33:072125 TRACE_EVENT0("gpu", "RasterDecoderImpl::DoWritePixelsYUVINTERNAL");
2126 if (src_yuv_plane_config < 0 ||
2127 src_yuv_plane_config > static_cast<int>(SkYUVAInfo::PlaneConfig::kLast)) {
2128 LOCAL_SET_GL_ERROR(GL_INVALID_ENUM, "WritePixelsYUV",
2129 "src_yuv_plane_config must be a valid PlaneConfig");
2130 return;
2131 }
2132 if (src_yuv_subsampling < 0 ||
2133 src_yuv_subsampling > static_cast<int>(SkYUVAInfo::Subsampling::kLast)) {
2134 LOCAL_SET_GL_ERROR(GL_INVALID_ENUM, "WritePixelsYUV",
2135 "src_yuv_subsampling must be a valid Subsampling");
2136 return;
2137 }
2138 if (src_yuv_datatype < 0 ||
2139 src_yuv_datatype > static_cast<int>(SkYUVAPixmapInfo::DataType::kLast)) {
2140 LOCAL_SET_GL_ERROR(
2141 GL_INVALID_ENUM, "WritePixelsYUV",
2142 "src_yuv_datatype must be a valid SkYUVAPixmapInfo::DataType");
2143 return;
2144 }
2145
2146 Mailbox dest_mailbox = Mailbox::FromVolatile(
2147 *reinterpret_cast<const volatile Mailbox*>(mailbox));
2148 DLOG_IF(ERROR, !dest_mailbox.Verify())
2149 << "WritePixelsYUV was passed an invalid mailbox";
2150 auto dest_shared_image = shared_image_representation_factory_.ProduceSkia(
2151 dest_mailbox, shared_context_state_);
2152 if (!dest_shared_image) {
2153 LOCAL_SET_GL_ERROR(GL_INVALID_OPERATION, "glWritePixelsYUV",
2154 "Attempting to write to unknown mailbox.");
2155 return;
2156 }
2157
2158 SkYUVAInfo::PlaneConfig src_plane_config =
2159 static_cast<SkYUVAInfo::PlaneConfig>(src_yuv_plane_config);
2160 SkYUVAInfo::Subsampling src_subsampling =
2161 static_cast<SkYUVAInfo::Subsampling>(src_yuv_subsampling);
2162 SkYUVAPixmapInfo::DataType src_datatype =
2163 static_cast<SkYUVAPixmapInfo::DataType>(src_yuv_datatype);
2164 viz::SharedImageFormat dest_format = dest_shared_image->format();
2165 if (!dest_format.is_multi_plane()) {
2166 LOCAL_SET_GL_ERROR(
2167 GL_INVALID_OPERATION, "glWritePixelsYUV",
2168 "dest_format must be a valid multiplanar SharedImageFormat.");
2169 return;
2170 }
2171 if (src_plane_config != ToSkYUVAPlaneConfig(dest_format)) {
2172 LOCAL_SET_GL_ERROR(GL_INVALID_OPERATION, "glWritePixelsYUV",
2173 "PlaneConfig mismatch between source texture format and "
2174 "the destination shared image format");
2175 return;
2176 }
2177 if (src_subsampling != ToSkYUVASubsampling(dest_format)) {
2178 LOCAL_SET_GL_ERROR(GL_INVALID_OPERATION, "glWritePixelsYUV",
2179 "Subsampling mismatch between source texture format and "
2180 "the destination shared image format");
2181 return;
2182 }
2183 if (src_datatype != ToSkYUVADataType(dest_format)) {
2184 LOCAL_SET_GL_ERROR(GL_INVALID_OPERATION, "glWritePixelsYUV",
2185 "ChannelFormat mismatch between source texture format "
2186 "and the destination shared image format");
2187 return;
2188 }
2189
2190 SkYUVAInfo yuv_info(SkISize::Make(src_width, src_height), src_plane_config,
2191 src_subsampling,
2192 SkYUVColorSpace::kIdentity_SkYUVColorSpace);
2193 if (yuv_info.numPlanes() != dest_format.NumberOfPlanes()) {
2194 LOCAL_SET_GL_ERROR(GL_INVALID_OPERATION, "glWritePixelsYUV",
2195 "Planes mismatch between source texture format and the "
2196 "destination shared image format");
2197 return;
2198 }
2199
2200 if (gfx::Size(src_width, src_height) != dest_shared_image->size()) {
2201 LOCAL_SET_GL_ERROR(GL_INVALID_OPERATION, "glWritePixelsYUV",
2202 "Unexpected size for multiplanar shared image");
2203 return;
2204 }
2205
Arthur Sonzogni6718b702025-01-09 10:49:102206 std::array<size_t, SkYUVAInfo::kMaxPlanes> row_bytes;
Saifuddin Hitawalac1eeaa9c2023-06-26 20:33:072207 row_bytes[0] = src_row_bytes_plane1;
2208 row_bytes[1] = src_row_bytes_plane2;
2209 row_bytes[2] = src_row_bytes_plane3;
2210 row_bytes[3] = src_row_bytes_plane4;
2211
Arthur Sonzogni6718b702025-01-09 10:49:102212 std::array<size_t, SkYUVAInfo::kMaxPlanes> plane_offsets;
Saifuddin Hitawalac1eeaa9c2023-06-26 20:33:072213 plane_offsets[0] = 0;
2214 plane_offsets[1] = plane2_offset;
2215 plane_offsets[2] = plane3_offset;
2216 plane_offsets[3] = plane4_offset;
2217
Saifuddin Hitawala67ecd0a2023-07-06 18:00:142218 std::array<SkPixmap, SkYUVAInfo::kMaxPlanes> pixmaps = {};
2219
Saifuddin Hitawala80dcaeb92024-01-08 18:38:072220 size_t prev_byte_size = 0;
Saifuddin Hitawalac1eeaa9c2023-06-26 20:33:072221 for (int plane = 0; plane < yuv_info.numPlanes(); plane++) {
Vasiliy Telezhnikovf2408432024-12-12 14:59:382222 auto color_type = viz::ToClosestSkColorType(dest_format, plane);
Saifuddin Hitawalac1eeaa9c2023-06-26 20:33:072223 auto plane_size =
2224 dest_format.GetPlaneSize(plane, gfx::Size(src_width, src_height));
2225 SkImageInfo src_info =
2226 SkImageInfo::Make(gfx::SizeToSkISize(plane_size), color_type,
2227 SkAlphaType::kPremul_SkAlphaType, nullptr);
2228
2229 if (row_bytes[plane] < src_info.minRowBytes()) {
2230 LOCAL_SET_GL_ERROR(GL_INVALID_VALUE, "glWritePixelsYUV",
2231 "row_bytes must be >= "
2232 "SkImageInfo::minRowBytes() for source image.");
2233 return;
2234 }
2235
2236 size_t byte_size = src_info.computeByteSize(row_bytes[plane]);
2237 if (byte_size > UINT32_MAX) {
2238 LOCAL_SET_GL_ERROR(
2239 GL_INVALID_VALUE, "glWritePixelsYUV",
2240 "Cannot request a memory chunk larger than UINT32_MAX bytes");
2241 return;
2242 }
2243 if (plane > 0 &&
Saifuddin Hitawala80dcaeb92024-01-08 18:38:072244 plane_offsets[plane] < plane_offsets[plane - 1] + prev_byte_size) {
Saifuddin Hitawalac1eeaa9c2023-06-26 20:33:072245 LOCAL_SET_GL_ERROR(GL_INVALID_VALUE, "glWritePixelsYUV",
2246 "plane_offsets[plane] must be >= plane_offsets[plane "
Saifuddin Hitawala80dcaeb92024-01-08 18:38:072247 "- 1] + prev_byte_size");
Saifuddin Hitawalac1eeaa9c2023-06-26 20:33:072248 return;
2249 }
2250
2251 // The pixels are stored contiguously for all the planes one after another
2252 // with padding.
2253 void* pixel_data = GetSharedMemoryAs<void*>(
2254 shm_id, shm_offset + plane_offsets[plane], byte_size);
2255 if (!pixel_data) {
2256 LOCAL_SET_GL_ERROR(GL_INVALID_OPERATION, "glWritePixelsYUV",
2257 "Couldn't retrieve pixel data.");
2258 return;
2259 }
2260
Saifuddin Hitawala67ecd0a2023-07-06 18:00:142261 // Create an SkPixmap for the plane.
2262 pixmaps[plane] = SkPixmap(src_info, pixel_data, row_bytes[plane]);
Saifuddin Hitawala80dcaeb92024-01-08 18:38:072263 prev_byte_size = byte_size;
Saifuddin Hitawalac1eeaa9c2023-06-26 20:33:072264 }
2265
Sunny Sachanandania2094212024-03-20 21:50:552266 std::vector<GrBackendSemaphore> begin_semaphores;
2267 std::vector<GrBackendSemaphore> end_semaphores;
2268
2269 // Allow uncleared access, as we manually handle clear tracking.
2270 std::unique_ptr<SkiaImageRepresentation::ScopedWriteAccess>
2271 dest_scoped_access = dest_shared_image->BeginScopedWriteAccess(
2272 &begin_semaphores, &end_semaphores,
2273 SharedImageRepresentation::AllowUnclearedAccess::kYes,
2274 /*use_sk_surface=*/false);
2275 if (!dest_scoped_access) {
2276 LOCAL_SET_GL_ERROR(GL_INVALID_OPERATION, "glWritePixelsYUV",
2277 "Failed to begin scoped write access.");
2278 return;
2279 }
2280 if (!begin_semaphores.empty()) {
2281 // The Graphite SharedImage representation does not set semaphores.
2282 CHECK(gr_context());
2283 bool result =
2284 gr_context()->wait(begin_semaphores.size(), begin_semaphores.data(),
2285 /*deleteSemaphoresAfterWait=*/false);
2286 CHECK(result);
2287 }
2288
Saifuddin Hitawala67ecd0a2023-07-06 18:00:142289 // Try a direct texture upload without using SkSurface.
2290 CopySharedImageHelper helper(&shared_image_representation_factory_,
2291 shared_context_state_.get());
2292 auto helper_result = helper.WritePixelsYUV(
2293 src_width, src_height, pixmaps, std::move(end_semaphores),
2294 std::move(dest_shared_image), std::move(dest_scoped_access));
2295 if (!helper_result.has_value()) {
2296 LOCAL_SET_GL_ERROR(helper_result.error().gl_error,
2297 helper_result.error().function_name.c_str(),
2298 helper_result.error().msg.c_str());
Saifuddin Hitawalac1eeaa9c2023-06-26 20:33:072299 }
Saifuddin Hitawala9d8a52dd2023-06-26 20:21:372300}
2301
Jonah Chin67897ae62020-12-08 21:13:202302bool RasterDecoderImpl::DoWritePixelsINTERNALDirectTextureUpload(
Saifuddin Hitawaladaed5972022-07-20 22:01:012303 SkiaImageRepresentation* dest_shared_image,
Jonah Chin67897ae62020-12-08 21:13:202304 const SkImageInfo& src_info,
2305 const void* pixel_data,
2306 size_t row_bytes) {
2307 std::vector<GrBackendSemaphore> begin_semaphores;
2308 std::vector<GrBackendSemaphore> end_semaphores;
2309
2310 // Allow uncleared access, as we manually handle clear tracking.
Saifuddin Hitawaladaed5972022-07-20 22:01:012311 std::unique_ptr<SkiaImageRepresentation::ScopedWriteAccess>
Jonah Chin67897ae62020-12-08 21:13:202312 dest_scoped_access = dest_shared_image->BeginScopedWriteAccess(
2313 &begin_semaphores, &end_semaphores,
2314 SharedImageRepresentation::AllowUnclearedAccess::kYes,
Saifuddin Hitawalac1eeaa9c2023-06-26 20:33:072315 /*use_sk_surface=*/false);
Jonah Chin67897ae62020-12-08 21:13:202316 if (!dest_scoped_access) {
2317 return false;
2318 }
2319 if (!begin_semaphores.empty()) {
Colin Blundelld7b7b472023-05-12 07:38:122320 // The Graphite SharedImage representation does not set semaphores.
2321 CHECK(gr_context());
2322 bool result =
2323 gr_context()->wait(begin_semaphores.size(), begin_semaphores.data(),
2324 /*deleteSemaphoresAfterWait=*/false);
Jonah Chin67897ae62020-12-08 21:13:202325 DCHECK(result);
2326 }
2327
2328 SkPixmap pixmap(src_info, pixel_data, row_bytes);
Colin Blundelld7b7b472023-05-12 07:38:122329 bool written = false;
2330 if (gr_context()) {
2331 written = gr_context()->updateBackendTexture(
Saifuddin Hitawala8c8af1e2024-04-09 19:45:322332 dest_scoped_access->promise_image_texture(/*plane_index=*/0)
Colin Blundelld7b7b472023-05-12 07:38:122333 ->backendTexture(),
Sunny Sachanandania2094212024-03-20 21:50:552334 &pixmap, /*numLevels=*/1, dest_shared_image->surface_origin(),
2335 /*finishedProc=*/nullptr, /*finishedContext=*/nullptr);
Colin Blundelld7b7b472023-05-12 07:38:122336 } else {
Maggie Chen5d35a0142025-04-17 18:35:222337 CHECK(graphite_shared_context());
Le Hoang Quyen9b068a82025-02-27 21:50:462338 auto graphite_texture_ref =
2339 dest_scoped_access->graphite_texture_holder(/*plane_index=*/0);
2340 auto* graphite_texture_ptr = graphite_texture_ref.release();
2341 using graphite_texture_ptr_type = decltype(graphite_texture_ptr);
2342 auto release_proc = [](void* context, skgpu::CallbackResult) {
2343 static_cast<graphite_texture_ptr_type>(context)->Release();
2344 };
Colin Blundelld7b7b472023-05-12 07:38:122345 written = graphite_recorder()->updateBackendTexture(
Le Hoang Quyen9b068a82025-02-27 21:50:462346 graphite_texture_ptr->texture(), &pixmap,
2347 /*numLevels=*/1, release_proc, graphite_texture_ptr);
Colin Blundelld7b7b472023-05-12 07:38:122348 }
Vasiliy Telezhnikov6414a252022-04-26 04:42:112349
Sunny Sachanandania2094212024-03-20 21:50:552350 shared_context_state_->FlushWriteAccess(dest_scoped_access.get());
Le Hoang Quyen9b068a82025-02-27 21:50:462351 shared_context_state_->SubmitIfNecessary(
2352 std::move(end_semaphores),
2353 dest_scoped_access->NeedGraphiteContextSubmit());
Sunny Sachanandania2094212024-03-20 21:50:552354
Jonah Chin67897ae62020-12-08 21:13:202355 return written;
2356}
2357
Nathan Zabriskief5f270e2021-04-30 20:53:062358void RasterDecoderImpl::DoReadbackARGBImagePixelsINTERNAL(
Jonah Chind9d7a732020-07-17 23:32:112359 GLint src_x,
2360 GLint src_y,
Saifuddin Hitawalaf32b28c2023-02-22 23:30:382361 GLint plane_index,
Jonah Chind9d7a732020-07-17 23:32:112362 GLuint dst_width,
2363 GLuint dst_height,
2364 GLuint row_bytes,
2365 GLuint dst_sk_color_type,
2366 GLuint dst_sk_alpha_type,
2367 GLint shm_id,
2368 GLuint shm_offset,
Nathan Zabriskie7febc8702021-04-29 18:52:402369 GLuint color_space_offset,
Jonah Chind9d7a732020-07-17 23:32:112370 GLuint pixels_offset,
2371 const volatile GLbyte* mailbox) {
Justin Novosad129aa3ea42021-12-10 20:06:312372 TRACE_EVENT0("gpu", "RasterDecoderImpl::DoReadbackARGBImagePixelsINTERNAL");
Jonah Chind9d7a732020-07-17 23:32:112373 if (dst_sk_color_type > kLastEnum_SkColorType) {
2374 LOCAL_SET_GL_ERROR(GL_INVALID_ENUM, "ReadbackImagePixels",
2375 "dst_sk_color_type must be a valid SkColorType");
2376 return;
2377 }
2378 if (dst_sk_alpha_type > kLastEnum_SkAlphaType) {
2379 LOCAL_SET_GL_ERROR(GL_INVALID_ENUM, "ReadbackImagePixels",
2380 "dst_sk_alpha_type must be a valid SkAlphaType");
2381 return;
2382 }
2383
2384 Mailbox source_mailbox = Mailbox::FromVolatile(
2385 *reinterpret_cast<const volatile Mailbox*>(mailbox));
2386 DLOG_IF(ERROR, !source_mailbox.Verify())
2387 << "ReadbackImagePixels was passed an invalid mailbox";
2388 auto source_shared_image = shared_image_representation_factory_.ProduceSkia(
2389 source_mailbox, shared_context_state_);
2390 if (!source_shared_image) {
2391 LOCAL_SET_GL_ERROR(GL_INVALID_VALUE, "glReadbackImagePixels",
2392 "Unknown mailbox");
2393 return;
2394 }
2395
Saifuddin Hitawala3db13d02023-02-23 13:27:192396 viz::SharedImageFormat source_format = source_shared_image->format();
2397
Nathan Zabriskie7febc8702021-04-29 18:52:402398 // If present, the color space is serialized into shared memory after the
2399 // result and before the pixel data.
2400 if (color_space_offset > pixels_offset) {
2401 LOCAL_SET_GL_ERROR(GL_INVALID_VALUE, "glReadbackImagePixels",
2402 "|pixels_offset| must be >= |color_space_offset|");
2403 return;
2404 }
2405 unsigned int color_space_size = pixels_offset - color_space_offset;
2406
Jonah Chind9d7a732020-07-17 23:32:112407 sk_sp<SkColorSpace> dst_color_space;
Nathan Zabriskie7febc8702021-04-29 18:52:402408 if (color_space_size) {
Saifuddin Hitawala3db13d02023-02-23 13:27:192409 // For multiplanar formats readback is per plane, and destination color
Saifuddin Hitawala6c8b8332023-03-03 18:21:302410 // space must be nullptr to avoid unexpected color conversions.
Saifuddin Hitawala3db13d02023-02-23 13:27:192411 if (source_format.is_multi_plane()) {
2412 LOCAL_SET_GL_ERROR(GL_INVALID_OPERATION, "glReadbackImagePixels",
2413 "Unexpected color space for multiplanar shared image");
2414 return;
2415 }
Nathan Zabriskie7febc8702021-04-29 18:52:402416 void* color_space_bytes = GetSharedMemoryAs<void*>(
2417 shm_id, shm_offset + color_space_offset, color_space_size);
Jonah Chind9d7a732020-07-17 23:32:112418 if (!color_space_bytes) {
2419 LOCAL_SET_GL_ERROR(GL_INVALID_OPERATION, "glReadbackImagePixels",
2420 "Failed to retrieve serialized SkColorSpace.");
2421 return;
2422 }
2423 dst_color_space =
Nathan Zabriskie7febc8702021-04-29 18:52:402424 SkColorSpace::Deserialize(color_space_bytes, color_space_size);
Jonah Chind9d7a732020-07-17 23:32:112425 if (!dst_color_space) {
2426 LOCAL_SET_GL_ERROR(GL_INVALID_OPERATION, "glReadbackImagePixels",
2427 "Failed to deserialize expected SkColorSpace");
2428 return;
2429 }
2430 }
2431
2432 SkImageInfo dst_info = SkImageInfo::Make(
2433 dst_width, dst_height, static_cast<SkColorType>(dst_sk_color_type),
2434 static_cast<SkAlphaType>(dst_sk_alpha_type), std::move(dst_color_space));
2435
2436 if (row_bytes < dst_info.minRowBytes()) {
2437 LOCAL_SET_GL_ERROR(GL_INVALID_VALUE, "glReadbackImagePixels",
2438 "row_bytes be >= "
2439 "SkImageInfo::minRowBytes() for dest image.");
2440 return;
2441 }
2442
Jonah Chin76a780022020-08-13 20:45:262443 size_t byte_size = dst_info.computeByteSize(row_bytes);
2444 if (byte_size > UINT32_MAX) {
2445 LOCAL_SET_GL_ERROR(
2446 GL_INVALID_VALUE, "glReadbackImagePixels",
2447 "Cannot request a memory chunk larger than UINT32_MAX bytes");
2448 return;
2449 }
2450
Nathan Zabriskie7febc8702021-04-29 18:52:402451 void* pixel_address =
Jonah Chin76a780022020-08-13 20:45:262452 GetSharedMemoryAs<void*>(shm_id, shm_offset + pixels_offset, byte_size);
Nathan Zabriskie7febc8702021-04-29 18:52:402453 if (!pixel_address) {
Jonah Chind9d7a732020-07-17 23:32:112454 LOCAL_SET_GL_ERROR(GL_INVALID_OPERATION, "glReadbackImagePixels",
Nathan Zabriskie7febc8702021-04-29 18:52:402455 "Failed to retrieve memory for readPixels output");
Jonah Chind9d7a732020-07-17 23:32:112456 return;
2457 }
2458
Nathan Zabriskief5f270e2021-04-30 20:53:062459 typedef cmds::ReadbackARGBImagePixelsINTERNALImmediate::Result Result;
Nathan Zabriskie7febc8702021-04-29 18:52:402460 Result* result =
2461 GetSharedMemoryAs<Result*>(shm_id, shm_offset, sizeof(Result));
2462 if (!result) {
2463 LOCAL_SET_GL_ERROR(GL_INVALID_OPERATION, "glReadbackImagePixels",
2464 "Failed to retrieve memory for readPixels result");
2465 return;
Jonah Chinf16050a2021-02-24 03:27:442466 }
2467
Saifuddin Hitawala3db13d02023-02-23 13:27:192468 if (!source_format.IsValidPlaneIndex(plane_index)) {
2469 LOCAL_SET_GL_ERROR(GL_INVALID_OPERATION, "glReadbackImagePixels",
2470 "Invalid plane_index");
2471 return;
2472 }
2473
Vasiliy Telezhnikov37588062024-01-12 13:58:372474 // Readback is potentially slow, so report progress here.
2475 gl::ScopedProgressReporter report_progress(
2476 shared_context_state_->progress_reporter());
2477
Saifuddin Hitawala6c8b8332023-03-03 18:21:302478 CopySharedImageHelper helper(&shared_image_representation_factory_,
2479 shared_context_state_.get());
2480 auto helper_result =
2481 helper.ReadPixels(src_x, src_y, plane_index, row_bytes, dst_info,
2482 pixel_address, std::move(source_shared_image));
2483 if (!helper_result.has_value()) {
2484 LOCAL_SET_GL_ERROR(helper_result.error().gl_error,
2485 helper_result.error().function_name.c_str(),
2486 helper_result.error().msg.c_str());
Saifuddin Hitawala3db13d02023-02-23 13:27:192487 } else {
Saifuddin Hitawala6c8b8332023-03-03 18:21:302488 *result = 1;
Saifuddin Hitawala3db13d02023-02-23 13:27:192489 }
Jonah Chind9d7a732020-07-17 23:32:112490}
2491
Nathan Zabriskief5f270e2021-04-30 20:53:062492namespace {
2493struct YUVReadbackResult {
2494 std::unique_ptr<const SkImage::AsyncReadResult> async_result;
Vasiliy Telezhnikov081df1e2023-01-16 15:41:342495 bool finished = false;
Nathan Zabriskief5f270e2021-04-30 20:53:062496};
2497
2498void OnReadYUVImagePixelsDone(
2499 void* raw_ctx,
2500 std::unique_ptr<const SkImage::AsyncReadResult> async_result) {
2501 YUVReadbackResult* context = reinterpret_cast<YUVReadbackResult*>(raw_ctx);
2502 context->async_result = std::move(async_result);
Vasiliy Telezhnikov081df1e2023-01-16 15:41:342503 context->finished = true;
Nathan Zabriskief5f270e2021-04-30 20:53:062504}
2505} // namespace
2506
2507void RasterDecoderImpl::DoReadbackYUVImagePixelsINTERNAL(
2508 GLuint dst_width,
2509 GLuint dst_height,
2510 GLint shm_id,
2511 GLuint shm_offset,
2512 GLuint y_offset,
2513 GLuint y_stride,
2514 GLuint u_offset,
2515 GLuint u_stride,
2516 GLuint v_offset,
2517 GLuint v_stride,
2518 const volatile GLbyte* mailbox) {
Justin Novosad129aa3ea42021-12-10 20:06:312519 TRACE_EVENT0("gpu", "RasterDecoderImpl::DoReadbackYUVImagePixelsINTERNAL");
Nathan Zabriskief5f270e2021-04-30 20:53:062520 if (dst_width % 2 != 0 || dst_height % 2 != 0) {
2521 LOCAL_SET_GL_ERROR(GL_INVALID_VALUE, "glReadbackImagePixels",
2522 "|dst_width| and |dst_height| must be divisible by 2");
2523 return;
2524 }
2525
Vasiliy Telezhnikovf3f0ffd2021-06-22 14:19:182526 if (y_stride < dst_width) {
2527 LOCAL_SET_GL_ERROR(GL_INVALID_VALUE, "glReadbackImagePixels",
2528 "|y_stride| must be >= the width of the y plane.");
2529 return;
2530 }
2531
2532 if (u_stride < ((dst_width + 1) / 2)) {
2533 LOCAL_SET_GL_ERROR(GL_INVALID_VALUE, "glReadbackImagePixels",
2534 "|u_stride| must be >= the width of the u plane.");
2535 return;
2536 }
2537 if (v_stride < ((dst_width + 1) / 2)) {
2538 LOCAL_SET_GL_ERROR(GL_INVALID_VALUE, "glReadbackImagePixels",
2539 "|v_stride| must be >= the width of the u plane.");
2540 return;
2541 }
2542
Nathan Zabriskief5f270e2021-04-30 20:53:062543 Mailbox source_mailbox = Mailbox::FromVolatile(
2544 *reinterpret_cast<const volatile Mailbox*>(mailbox));
2545 DLOG_IF(ERROR, !source_mailbox.Verify())
2546 << "ReadbackImagePixels was passed an invalid mailbox";
2547 auto source_shared_image = shared_image_representation_factory_.ProduceSkia(
2548 source_mailbox, shared_context_state_);
2549 if (!source_shared_image) {
2550 LOCAL_SET_GL_ERROR(GL_INVALID_VALUE, "glReadbackImagePixels",
2551 "Unknown mailbox");
2552 return;
2553 }
2554
Nathan Zabriskief5f270e2021-04-30 20:53:062555 auto* result = GetSharedMemoryAs<
2556 cmds::ReadbackARGBImagePixelsINTERNALImmediate::Result*>(
2557 shm_id, shm_offset,
2558 sizeof(cmds::ReadbackARGBImagePixelsINTERNALImmediate::Result));
2559 if (!result) {
2560 LOCAL_SET_GL_ERROR(GL_INVALID_OPERATION, "glReadbackYUVImagePixels",
2561 "Failed to retrieve memory for readPixels result");
2562 return;
2563 }
2564
Nathan Zabriskie49128932021-07-08 20:00:312565 // Large plane strides or heights could potentially overflow the unsigned int
2566 // parameters of GetSharedMemoryAs() below. We use base::CheckedNumeric to
2567 // prevent using any values that overflowed which could cause us to request
2568 // incorrect shared memory regions.
2569 base::CheckedNumeric<unsigned int> checked_shm_offset(shm_offset);
2570 base::CheckedNumeric<unsigned int> checked_dst_height(dst_height);
2571
2572 base::CheckedNumeric<unsigned int> y_size = checked_dst_height * y_stride;
2573 base::CheckedNumeric<unsigned int> y_plane_offset =
2574 checked_shm_offset + y_offset;
2575 if (!y_size.IsValid() || !y_plane_offset.IsValid()) {
2576 LOCAL_SET_GL_ERROR(
2577 GL_INVALID_OPERATION, "glReadbackYUVImagePixels",
2578 "y plane size or offset too large. Both must fit in unsigned int.");
2579 return;
2580 }
2581
2582 // |y_plane_offset| and |y_size| are guaranteed valid by the checks above and
2583 // won't die here. Same with the u and v planes below.
2584 uint8_t* y_out = GetSharedMemoryAs<uint8_t*>(
2585 shm_id, y_plane_offset.ValueOrDie(), y_size.ValueOrDie());
Nathan Zabriskief5f270e2021-04-30 20:53:062586 if (!y_out) {
2587 LOCAL_SET_GL_ERROR(GL_INVALID_OPERATION, "glReadbackYUVImagePixels",
2588 "Failed to get memory for y plane output");
2589 return;
2590 }
2591
Nathan Zabriskie49128932021-07-08 20:00:312592 base::CheckedNumeric<unsigned int> checked_uv_plane_height =
2593 (checked_dst_height + 1) / 2;
2594
2595 base::CheckedNumeric<unsigned int> u_size =
2596 checked_uv_plane_height * u_stride;
2597 base::CheckedNumeric<unsigned int> u_plane_offset =
2598 checked_shm_offset + u_offset;
2599 if (!u_size.IsValid() || !u_plane_offset.IsValid()) {
2600 LOCAL_SET_GL_ERROR(
2601 GL_INVALID_OPERATION, "glReadbackYUVImagePixels",
2602 "u plane size or offset too large. Both must fit in unsigned int.");
2603 return;
2604 }
2605 uint8_t* u_out = GetSharedMemoryAs<uint8_t*>(
2606 shm_id, u_plane_offset.ValueOrDie(), u_size.ValueOrDie());
Nathan Zabriskief5f270e2021-04-30 20:53:062607 if (!u_out) {
2608 LOCAL_SET_GL_ERROR(GL_INVALID_OPERATION, "glReadbackYUVImagePixels",
2609 "Failed to get memory for u plane output");
2610 return;
2611 }
2612
Nathan Zabriskie49128932021-07-08 20:00:312613 base::CheckedNumeric<unsigned int> v_size =
2614 checked_uv_plane_height * v_stride;
2615 base::CheckedNumeric<unsigned int> v_plane_offset =
2616 checked_shm_offset + v_offset;
2617 if (!v_size.IsValid() || !v_plane_offset.IsValid()) {
2618 LOCAL_SET_GL_ERROR(
2619 GL_INVALID_OPERATION, "glReadbackYUVImagePixels",
2620 "v plane size or offset too large. Both must fit in unsigned int.");
2621 return;
2622 }
2623 uint8_t* v_out = GetSharedMemoryAs<uint8_t*>(
2624 shm_id, v_plane_offset.ValueOrDie(), v_size.ValueOrDie());
Nathan Zabriskief5f270e2021-04-30 20:53:062625 if (!v_out) {
2626 LOCAL_SET_GL_ERROR(GL_INVALID_OPERATION, "glReadbackYUVImagePixels",
2627 "Failed to get memory for v plane output");
2628 return;
2629 }
2630
Sunny Sachanandania2094212024-03-20 21:50:552631 std::vector<GrBackendSemaphore> begin_semaphores;
2632 std::vector<GrBackendSemaphore> end_semaphores;
2633
2634 // We don't use |end_semaphores| here because we're going to sync with
2635 // with the CPU later regardless.
2636 std::unique_ptr<SkiaImageRepresentation::ScopedReadAccess>
2637 source_scoped_access = source_shared_image->BeginScopedReadAccess(
2638 &begin_semaphores, &end_semaphores);
2639
2640 if (!begin_semaphores.empty()) {
2641 CHECK(gr_context());
2642 bool wait_result =
2643 gr_context()->wait(begin_semaphores.size(), begin_semaphores.data(),
2644 /*deleteSemaphoresAfterWait=*/false);
2645 DCHECK(wait_result);
2646 }
2647
2648 if (!source_scoped_access) {
2649 LOCAL_SET_GL_ERROR(GL_INVALID_VALUE, "glReadbackImagePixels",
2650 "Source shared image is not accessible");
2651 return;
2652 }
2653
2654 auto sk_image =
2655 source_scoped_access->CreateSkImage(shared_context_state_.get());
2656 if (!sk_image) {
2657 LOCAL_SET_GL_ERROR(GL_INVALID_OPERATION, "glReadbackImagePixels",
2658 "Couldn't create SkImage for reading.");
2659 // Perform ApplyBackendSurfaceEndState() on the ScopedReadAccess before
2660 // exiting.
2661 source_scoped_access->ApplyBackendSurfaceEndState();
Saifuddin Hitawala70652782024-08-21 14:13:322662 shared_context_state_->SubmitIfNecessary(
2663 std::move(end_semaphores),
2664 source_scoped_access->NeedGraphiteContextSubmit());
Sunny Sachanandania2094212024-03-20 21:50:552665 return;
2666 }
2667
Sunny Sachanandanibbeca842023-06-30 08:07:592668 const SkIRect src_rect = SkIRect::MakeSize(sk_image->dimensions());
2669 const SkISize dst_size = SkISize::Make(dst_width, dst_height);
Nathan Zabriskief5f270e2021-04-30 20:53:062670
Vasiliy Telezhnikov37588062024-01-12 13:58:372671 // Readback is potentially slow, so report progress here.
2672 gl::ScopedProgressReporter report_progress(
2673 shared_context_state_->progress_reporter());
2674
Sunny Sachanandanibbeca842023-06-30 08:07:592675 // While this function indicates it's asynchronous, the DoFinish() call below
2676 // ensures it completes synchronously.
Nathan Zabriskief5f270e2021-04-30 20:53:062677 YUVReadbackResult yuv_result;
Maggie Chen5d35a0142025-04-17 18:35:222678 if (graphite_shared_context()) {
Sunny Sachanandani1a163cf2023-07-05 22:00:292679 // SkImage/SkSurface asyncRescaleAndReadPixels methods won't be implemented
2680 // for Graphite. Instead the equivalent methods will be on Graphite Context.
Maggie Chen5d35a0142025-04-17 18:35:222681 graphite_shared_context()->asyncRescaleAndReadPixelsYUV420(
Sunny Sachanandanibbeca842023-06-30 08:07:592682 sk_image.get(), kJPEG_Full_SkYUVColorSpace, SkColorSpace::MakeSRGB(),
2683 src_rect, dst_size, SkImage::RescaleGamma::kSrc,
Maggie Chenfb6a98682025-05-12 19:59:502684 SkImage::RescaleMode::kRepeatedLinear,
2685 base::BindOnce(&OnReadYUVImagePixelsDone), &yuv_result);
Sunny Sachanandanibbeca842023-06-30 08:07:592686 } else {
Sunny Sachanandania2094212024-03-20 21:50:552687 CHECK(gr_context());
Sunny Sachanandanibbeca842023-06-30 08:07:592688 sk_image->asyncRescaleAndReadPixelsYUV420(
2689 kJPEG_Full_SkYUVColorSpace, SkColorSpace::MakeSRGB(), src_rect,
2690 dst_size, SkImage::RescaleGamma::kSrc,
2691 SkImage::RescaleMode::kRepeatedLinear, &OnReadYUVImagePixelsDone,
2692 &yuv_result);
Sunny Sachanandania2094212024-03-20 21:50:552693 source_scoped_access->ApplyBackendSurfaceEndState();
2694 if (!end_semaphores.empty()) {
2695 GrFlushInfo flush_info = {
2696 .fNumSemaphores = end_semaphores.size(),
2697 .fSignalSemaphores = end_semaphores.data(),
2698 };
2699 AddVulkanCleanupTaskForSkiaFlush(
2700 shared_context_state_->vk_context_provider(), &flush_info);
2701 gr_context()->flush(flush_info);
2702 }
Vasiliy Telezhnikov6414a252022-04-26 04:42:112703 }
2704
Alison Gale47d1537d2024-04-19 21:31:462705 // TODO(crbug.com/40106956): Use COMMANDS_COMPLETED query for async readback.
Saifuddin Hitawalad9027662023-05-12 14:50:102706 DoFinish();
Vasiliy Telezhnikov081df1e2023-01-16 15:41:342707
2708 // The call above will sync up gpu and CPU, resulting in callback being run
Sunny Sachanandanibbeca842023-06-30 08:07:592709 // during DoFinish(). To prevent UAF make sure it indeed happened.
Vasiliy Telezhnikov081df1e2023-01-16 15:41:342710 CHECK(yuv_result.finished);
Nathan Zabriskief5f270e2021-04-30 20:53:062711 if (!yuv_result.async_result) {
2712 LOCAL_SET_GL_ERROR(GL_INVALID_OPERATION, "glReadbackYUVImagePixels",
2713 "Failed to read pixels from SkImage");
2714 return;
2715 }
2716
2717 auto& async_result = yuv_result.async_result;
2718 libyuv::I420Copy(static_cast<const uint8_t*>(async_result->data(0)),
2719 async_result->rowBytes(0),
2720 static_cast<const uint8_t*>(async_result->data(1)),
2721 async_result->rowBytes(1),
2722 static_cast<const uint8_t*>(async_result->data(2)),
2723 async_result->rowBytes(2), y_out, y_stride, u_out, u_stride,
2724 v_out, v_stride, dst_width, dst_height);
2725
2726 *result = 1;
2727}
2728
Jonah Chin053aa6c2021-01-22 20:26:142729void RasterDecoderImpl::DoLoseContextCHROMIUM(GLenum current, GLenum other) {
2730 MarkContextLost(gles2::GetContextLostReasonFromResetStatus(current));
2731}
2732
Nathan Zabriskie366b8932020-04-28 01:07:132733namespace {
Jonathan Backere19973e82018-04-18 20:08:092734
2735// Helper to read client data from transfer cache.
Jonathan Backer4f9ee5fb2018-04-25 14:03:382736class TransferCacheDeserializeHelperImpl final
Jonathan Backere19973e82018-04-18 20:08:092737 : public cc::TransferCacheDeserializeHelper {
2738 public:
2739 explicit TransferCacheDeserializeHelperImpl(
Khushal996e9912018-07-13 08:31:002740 int raster_decoder_id,
Jonathan Backere19973e82018-04-18 20:08:092741 ServiceTransferCache* transfer_cache)
Khushal996e9912018-07-13 08:31:002742 : raster_decoder_id_(raster_decoder_id), transfer_cache_(transfer_cache) {
Jonathan Backere19973e82018-04-18 20:08:092743 DCHECK(transfer_cache_);
2744 }
Peter BostrÃļmdbacdc22021-09-23 22:11:462745
2746 TransferCacheDeserializeHelperImpl(
2747 const TransferCacheDeserializeHelperImpl&) = delete;
2748 TransferCacheDeserializeHelperImpl& operator=(
2749 const TransferCacheDeserializeHelperImpl&) = delete;
2750
Jonathan Backer4f9ee5fb2018-04-25 14:03:382751 ~TransferCacheDeserializeHelperImpl() override = default;
Jonathan Backere19973e82018-04-18 20:08:092752
Adrienne Walker90b79a22018-05-08 21:40:422753 void CreateLocalEntry(
2754 uint32_t id,
2755 std::unique_ptr<cc::ServiceTransferCacheEntry> entry) override {
Khushal996e9912018-07-13 08:31:002756 auto type = entry->Type();
2757 transfer_cache_->CreateLocalEntry(
2758 ServiceTransferCache::EntryKey(raster_decoder_id_, type, id),
2759 std::move(entry));
Adrienne Walker90b79a22018-05-08 21:40:422760 }
2761
Jonathan Backere19973e82018-04-18 20:08:092762 private:
2763 cc::ServiceTransferCacheEntry* GetEntryInternal(
2764 cc::TransferCacheEntryType entry_type,
Jonathan Backer4f9ee5fb2018-04-25 14:03:382765 uint32_t entry_id) override {
Khushal996e9912018-07-13 08:31:002766 return transfer_cache_->GetEntry(ServiceTransferCache::EntryKey(
2767 raster_decoder_id_, entry_type, entry_id));
Jonathan Backere19973e82018-04-18 20:08:092768 }
Khushal996e9912018-07-13 08:31:002769
2770 const int raster_decoder_id_;
Keishi Hattori0e45c022021-11-27 09:25:522771 const raw_ptr<ServiceTransferCache> transfer_cache_;
Jonathan Backere19973e82018-04-18 20:08:092772};
2773
2774} // namespace
2775
Khushal33205a72018-11-08 10:12:292776void RasterDecoderImpl::DeletePaintCachePathsINTERNALHelper(
2777 GLsizei n,
2778 const volatile GLuint* paint_cache_ids) {
kylecharee7338172022-02-02 16:59:102779 if (!use_gpu_raster_) {
Khushal33205a72018-11-08 10:12:292780 LOCAL_SET_GL_ERROR(GL_INVALID_OPERATION,
2781 "glDeletePaintCacheEntriesINTERNAL",
2782 "No chromium raster support");
2783 return;
2784 }
2785
Khushalcb653fb2018-11-15 08:56:282786 paint_cache_->Purge(cc::PaintCacheDataType::kPath, n, paint_cache_ids);
Khushal33205a72018-11-08 10:12:292787}
2788
William Liu88a5533872025-03-18 19:11:472789void RasterDecoderImpl::DeletePaintCacheEffectsINTERNALHelper(
2790 GLsizei n,
2791 const volatile GLuint* paint_cache_ids) {
2792 if (!use_gpu_raster_) {
2793 LOCAL_SET_GL_ERROR(GL_INVALID_OPERATION,
2794 "glDeletePaintCacheEntriesINTERNAL",
2795 "No chromium raster support");
2796 return;
2797 }
2798 paint_cache_->Purge(cc::PaintCacheDataType::kSkRuntimeEffect, n,
2799 paint_cache_ids);
2800}
2801
Khushal33205a72018-11-08 10:12:292802void RasterDecoderImpl::DoClearPaintCacheINTERNAL() {
kylecharee7338172022-02-02 16:59:102803 if (!use_gpu_raster_) {
Khushal33205a72018-11-08 10:12:292804 LOCAL_SET_GL_ERROR(GL_INVALID_OPERATION, "glClearPaintCacheINTERNAL",
2805 "No chromium raster support");
2806 return;
2807 }
2808
2809 paint_cache_->PurgeAll();
2810}
2811
Aaron Krajeskid7d51a52022-05-25 13:29:092812void RasterDecoderImpl::DoBeginRasterCHROMIUM(GLfloat r,
2813 GLfloat g,
2814 GLfloat b,
2815 GLfloat a,
Sunny Sachanandanib461e212021-03-05 19:22:122816 GLboolean needs_clear,
Nathan Zabriskie31e4dc92020-04-15 23:10:272817 GLuint msaa_sample_count,
Justin Novosad7cc290af2021-07-20 17:13:072818 MsaaMode msaa_mode,
Nathan Zabriskie31e4dc92020-04-15 23:10:272819 GLboolean can_use_lcd_text,
Peng Huang5ff70dff2022-03-03 19:55:112820 GLboolean visible,
Christopher Cameron060d382e72023-10-04 23:12:422821 GLfloat hdr_headroom,
Nathan Zabriskie31e4dc92020-04-15 23:10:272822 const volatile GLbyte* key) {
Vikas Soni9db200c2023-11-20 19:26:202823 TRACE_EVENT0("gpu", "RasterDecoderImpl::DoBeginRasterCHROMIUM");
Christopher Cameronbeb5f272019-02-04 22:49:132824 // Workaround for https://crbug.com/906453: Flush before BeginRaster (the
2825 // commands between BeginRaster and EndRaster will not flush).
2826 FlushToWorkAroundMacCrashes();
2827
kylecharee7338172022-02-02 16:59:102828 if (!use_gpu_raster_) {
Jonathan Backere19973e82018-04-18 20:08:092829 LOCAL_SET_GL_ERROR(GL_INVALID_OPERATION, "glBeginRasterCHROMIUM",
kylecharee7338172022-02-02 16:59:102830 "No chromium raster support");
Jonathan Backere19973e82018-04-18 20:08:092831 return;
2832 }
2833 if (sk_surface_) {
2834 LOCAL_SET_GL_ERROR(GL_INVALID_OPERATION, "glBeginRasterCHROMIUM",
2835 "BeginRasterCHROMIUM without EndRasterCHROMIUM");
2836 return;
2837 }
2838
Jonathan Backer7e7492522018-07-20 00:23:552839 Mailbox mailbox =
2840 Mailbox::FromVolatile(*reinterpret_cast<const volatile Mailbox*>(key));
2841 DLOG_IF(ERROR, !mailbox.Verify()) << "BeginRasterCHROMIUM was "
2842 "passed a mailbox that was not "
2843 "generated by ProduceTextureCHROMIUM.";
Eric Karl3750cfa2018-10-25 16:54:172844
2845 DCHECK(!shared_image_);
Peng Huangb447da42021-09-22 19:21:262846 DCHECK(!shared_image_raster_);
2847
2848 SharedImageRepresentation* shared_image = nullptr;
2849 if (is_raw_draw_enabled_) {
2850 shared_image_raster_ =
2851 shared_image_representation_factory_.ProduceRaster(mailbox);
2852 shared_image = shared_image_raster_.get();
2853 }
2854
2855 if (!shared_image) {
2856 shared_image_ = shared_image_representation_factory_.ProduceSkia(
2857 mailbox, shared_context_state_.get());
2858 shared_image = shared_image_.get();
2859 }
2860
2861 if (!shared_image) {
Jonathan Backer7e7492522018-07-20 00:23:552862 LOCAL_SET_GL_ERROR(GL_INVALID_VALUE, "glBeginRasterCHROMIUM",
2863 "passed invalid mailbox.");
2864 return;
2865 }
2866
Peng Huangb447da42021-09-22 19:21:262867 if (!needs_clear && !shared_image->IsCleared()) {
Sunny Sachanandanibb3dfad2021-06-04 19:04:222868 LOCAL_SET_GL_ERROR(GL_INVALID_OPERATION, "glBeginRasterCHROMIUM",
2869 "SharedImage not cleared before use.");
Peng Huangb447da42021-09-22 19:21:262870 shared_image_raster_.reset();
Sunny Sachanandani8f8b4eae2021-06-07 19:14:162871 shared_image_.reset();
Sunny Sachanandanibb3dfad2021-06-04 19:04:222872 return;
2873 }
2874
Khushal22204a42018-05-17 23:06:212875 DCHECK(locked_handles_.empty());
Jonathan Backere19973e82018-04-18 20:08:092876 DCHECK(!raster_canvas_);
Jonathan Backere19973e82018-04-18 20:08:092877
Vasiliy Telezhnikov1470c972025-01-13 15:49:442878 SkColorType sk_color_type = viz::ToClosestSkColorType(shared_image->format());
Justin Novosad7cc290af2021-07-20 17:13:072879
2880 int final_msaa_count;
2881 uint32_t flags;
2882 switch (msaa_mode) {
2883 default:
2884 case kNoMSAA:
2885 final_msaa_count = 0;
2886 flags = 0;
Justin Novosad7cc290af2021-07-20 17:13:072887 break;
2888 case kMSAA:
Colin Blundelld7b7b472023-05-12 07:38:122889 // Graphite operates as in the kDMSAA case below.
Maggie Chen5d35a0142025-04-17 18:35:222890 if (graphite_shared_context()) {
Colin Blundelld7b7b472023-05-12 07:38:122891 final_msaa_count = 1;
2892 flags = SkSurfaceProps::kDynamicMSAA_Flag;
2893 break;
2894 }
2895
Justin Novosad7cc290af2021-07-20 17:13:072896 // If we can't match requested MSAA samples, don't use MSAA.
2897 final_msaa_count = std::max(static_cast<int>(msaa_sample_count), 0);
Colin Blundelld7b7b472023-05-12 07:38:122898 if (gr_context() &&
2899 final_msaa_count >
2900 gr_context()->maxSurfaceSampleCountForColorType(sk_color_type)) {
Justin Novosad7cc290af2021-07-20 17:13:072901 final_msaa_count = 0;
Colin Blundelld7b7b472023-05-12 07:38:122902 }
Justin Novosad7cc290af2021-07-20 17:13:072903 flags = 0;
Justin Novosad7cc290af2021-07-20 17:13:072904 break;
2905 case kDMSAA:
2906 final_msaa_count = 1;
2907 flags = SkSurfaceProps::kDynamicMSAA_Flag;
Justin Novosad7cc290af2021-07-20 17:13:072908 break;
2909 }
2910
Jonathan Backere19973e82018-04-18 20:08:092911 // Use unknown pixel geometry to disable LCD text.
2912 SkSurfaceProps surface_props(flags, kUnknown_SkPixelGeometry);
2913 if (can_use_lcd_text) {
Ian Preste598eece2020-10-19 23:31:022914 surface_props = skia::LegacyDisplayGlobals::GetSkSurfaceProps(flags);
Jonathan Backere19973e82018-04-18 20:08:092915 }
2916
Aaron Krajeskid7d51a52022-05-25 13:29:092917 SkColor4f sk_color_4f = {r, g, b, a};
Peng Huangb447da42021-09-22 19:21:262918 if (shared_image_raster_) {
Arthur Sonzogni59ac8222023-11-10 09:46:542919 std::optional<SkColor4f> clear_color;
Peng Huangb447da42021-09-22 19:21:262920 if (needs_clear)
Aaron Krajeskid7d51a52022-05-25 13:29:092921 clear_color.emplace(sk_color_4f);
Peng Huangb447da42021-09-22 19:21:262922 scoped_shared_image_raster_write_ =
2923 shared_image_raster_->BeginScopedWriteAccess(
Peng Huang5ff70dff2022-03-03 19:55:112924 shared_context_state_, final_msaa_count, surface_props, clear_color,
2925 visible);
Peng Huangb447da42021-09-22 19:21:262926 if (!scoped_shared_image_raster_write_) {
2927 LOCAL_SET_GL_ERROR(GL_INVALID_OPERATION, "glBeginRasterCHROMIUM",
2928 "failed to create surface");
2929 shared_image_raster_.reset();
2930 return;
2931 }
2932
2933 if (needs_clear)
2934 shared_image_raster_->SetCleared();
2935
2936 return;
2937 }
2938
Peng Huang5f9118b02019-04-24 20:18:212939 std::vector<GrBackendSemaphore> begin_semaphores;
2940 DCHECK(end_semaphores_.empty());
Peng Huange9b41cd2019-08-12 19:39:472941 DCHECK(!scoped_shared_image_write_);
Nathan Zabriskie31e4dc92020-04-15 23:10:272942 // Allow uncleared access, as raster specifically handles uncleared images
2943 // by clearing them before writing.
Eric Karl707c1b92019-12-10 06:50:082944 scoped_shared_image_write_ = shared_image_->BeginScopedWriteAccess(
Eric Karl14bfb992020-01-03 01:32:212945 final_msaa_count, surface_props, &begin_semaphores, &end_semaphores_,
2946 SharedImageRepresentation::AllowUnclearedAccess::kYes);
Eric Karl707c1b92019-12-10 06:50:082947 if (!scoped_shared_image_write_) {
2948 LOCAL_SET_GL_ERROR(GL_INVALID_OPERATION, "glBeginRasterCHROMIUM",
2949 "failed to create surface");
2950 shared_image_.reset();
2951 return;
2952 }
2953
Peng Huange9b41cd2019-08-12 19:39:472954 sk_surface_ = scoped_shared_image_write_->surface();
Christopher Cameron578e3dd2023-10-06 09:34:072955 sk_surface_hdr_headroom_ = hdr_headroom;
Peng Huang5f9118b02019-04-24 20:18:212956
2957 if (!begin_semaphores.empty()) {
2958 bool result =
Peng Huang1c6d0582020-07-31 12:14:112959 sk_surface_->wait(begin_semaphores.size(), begin_semaphores.data(),
2960 /*deleteSemaphoresAfterWait=*/false);
Peng Huang5f9118b02019-04-24 20:18:212961 DCHECK(result);
2962 }
2963
Peng Huang38a6280642024-04-16 23:56:552964 if (no_draw_canvas_) {
2965 no_draw_canvas_->resetCanvas(sk_surface_->width(), sk_surface_->height());
2966 raster_canvas_ = no_draw_canvas_.get();
2967 } else {
2968 raster_canvas_ = sk_surface_->getCanvas();
2969 }
Peng Huang050dee52018-09-05 15:54:082970
Nathan Zabriskieffc210692020-07-09 07:36:472971 paint_op_shared_image_provider_ = std::make_unique<SharedImageProviderImpl>(
2972 &shared_image_representation_factory_, shared_context_state_, sk_surface_,
Vasiliy Telezhnikov9837f902022-08-17 15:03:072973 &end_semaphores_, error_state_.get());
Nathan Zabriskieffc210692020-07-09 07:36:472974
Jonathan Backere19973e82018-04-18 20:08:092975 // All or nothing clearing, as no way to validate the client's input on what
Sunny Sachanandanib461e212021-03-05 19:22:122976 // is the "used" part of the texture. A separate |needs_clear| flag is needed
2977 // because clear tracking on the shared image cannot be used for this purpose
2978 // with passthrough decoder shared images which are always considered cleared.
2979 //
2980 // TODO(enne): This doesn't handle the case where the background color changes
2981 // and so any extra pixels outside the raster area that get sampled may be
2982 // incorrect.
2983 if (needs_clear) {
Aaron Krajeskid7d51a52022-05-25 13:29:092984 raster_canvas_->drawColor(sk_color_4f, SkBlendMode::kSrc);
Sunny Sachanandanib461e212021-03-05 19:22:122985 shared_image_->SetCleared();
2986 }
2987 DCHECK(shared_image_->IsCleared());
Jonathan Backere19973e82018-04-18 20:08:092988}
2989
Khushala8d50642018-05-03 01:29:062990scoped_refptr<Buffer> RasterDecoderImpl::GetShmBuffer(uint32_t shm_id) {
2991 return GetSharedMemoryBuffer(shm_id);
2992}
2993
Khushal39641b92019-06-03 21:32:542994void RasterDecoderImpl::ReportProgress() {
2995 if (shared_context_state_->progress_reporter())
2996 shared_context_state_->progress_reporter()->ReportProgress();
2997}
2998
Scott Violet39a6a822024-08-09 23:44:002999error::Error RasterDecoderImpl::DoRasterCHROMIUM(GLuint raster_shm_id,
3000 GLuint raster_shm_offset,
3001 GLuint raster_shm_size,
3002 GLuint font_shm_id,
3003 GLuint font_shm_offset,
3004 GLuint font_shm_size) {
Khushalf9750702018-06-09 00:42:133005 TRACE_EVENT1("gpu", "RasterDecoderImpl::DoRasterCHROMIUM", "raster_id",
3006 ++raster_chromium_id_);
Khushala8d50642018-05-03 01:29:063007
kylecharee7338172022-02-02 16:59:103008 if (!use_gpu_raster_) {
3009 LOCAL_SET_GL_ERROR(GL_INVALID_OPERATION, "glRasterCHROMIUM",
3010 "No chromium raster support");
Scott Violet39a6a822024-08-09 23:44:003011 return error::kNoError;
kylecharee7338172022-02-02 16:59:103012 }
3013
Peng Huangb447da42021-09-22 19:21:263014 if (!sk_surface_ && !scoped_shared_image_raster_write_) {
Jonathan Backere19973e82018-04-18 20:08:093015 LOCAL_SET_GL_ERROR(GL_INVALID_OPERATION, "glRasterCHROMIUM",
3016 "RasterCHROMIUM without BeginRasterCHROMIUM");
Scott Violet39a6a822024-08-09 23:44:003017 return error::kNoError;
Jonathan Backere19973e82018-04-18 20:08:093018 }
Khushal996e9912018-07-13 08:31:003019 DCHECK(transfer_cache());
Jonathan Backere19973e82018-04-18 20:08:093020
Khushala8d50642018-05-03 01:29:063021 char* paint_buffer_memory = GetSharedMemoryAs<char*>(
3022 raster_shm_id, raster_shm_offset, raster_shm_size);
3023 if (!paint_buffer_memory) {
3024 LOCAL_SET_GL_ERROR(GL_INVALID_VALUE, "glRasterCHROMIUM",
3025 "Can not read paint buffer.");
Scott Violet39a6a822024-08-09 23:44:003026 return error::kNoError;
Khushala8d50642018-05-03 01:29:063027 }
3028
Peng Huanga8bda892022-05-06 17:15:593029 if (paint_buffer_memory != base::bits::AlignUp(paint_buffer_memory, 16u)) {
3030 LOCAL_SET_GL_ERROR(GL_INVALID_VALUE, "glRasterCHROMIUM",
3031 "Buffer is not aligned with 16 bytes.");
Scott Violet39a6a822024-08-09 23:44:003032 return error::kNoError;
Peng Huanga8bda892022-05-06 17:15:593033 }
3034
Aaron Krajeskid55f49002020-12-09 17:27:513035 cc::PlaybackParams playback_params(nullptr, SkM44());
Christopher Cameron9c177af2025-07-02 11:50:463036 playback_params.destination_hdr_headroom = sk_surface_hdr_headroom_;
Khushal996e9912018-07-13 08:31:003037 TransferCacheDeserializeHelperImpl impl(raster_decoder_id_, transfer_cache());
Xianzhu Wang65210752024-03-25 18:57:323038 cc::PaintOp::DeserializeOptions options{
3039 .transfer_cache = &impl,
3040 .paint_cache = paint_cache_.get(),
3041 .strike_client = font_manager_->strike_client(),
3042 .scratch_buffer =
3043 *shared_context_state_->scratch_deserialization_buffer(),
3044 .crash_dump_on_failure = !gpu_preferences_.disable_oopr_debug_crash_dump,
3045 .is_privileged = is_privileged_,
Xianzhu Wang65210752024-03-25 18:57:323046 .shared_image_provider = paint_op_shared_image_provider_.get()};
Jonathan Backere19973e82018-04-18 20:08:093047
Xianzhu Wanga0711d72023-02-08 22:54:263048 alignas(cc::PaintOpBuffer::kPaintOpAlign) char
3049 data[cc::kLargestPaintOpAlignedSize];
Peng Huangb447da42021-09-22 19:21:263050
Khushala8d50642018-05-03 01:29:063051 size_t paint_buffer_size = raster_shm_size;
Adrienne Walkerd8a56462019-03-16 03:11:473052 gl::ScopedProgressReporter report_progress(
Khushal61977dfc2019-02-22 02:24:293053 shared_context_state_->progress_reporter());
Scott Violet39a6a822024-08-09 23:44:003054
3055 TRACE_EVENT0("gpu", "RasterDecoderImpl::DoRasterCHROMIUM::Deserializing");
3056
3057 if (scoped_shared_image_raster_write_) {
3058 DCHECK(!deferred_raster_paint_buffer_offset_.has_value());
3059 auto* paint_op_buffer =
3060 scoped_shared_image_raster_write_->paint_op_buffer();
3061 paint_op_buffer->Deserialize(paint_buffer_memory, raster_shm_size, options);
3062 return error::kNoError;
3063 }
3064
3065 if (deferred_raster_paint_buffer_offset_.has_value()) {
3066 paint_buffer_size -= *deferred_raster_paint_buffer_offset_;
3067 paint_buffer_memory += *deferred_raster_paint_buffer_offset_;
3068 deferred_raster_paint_buffer_offset_.reset();
3069 } else {
3070 if (font_shm_size > 0) {
3071 // Deserialize fonts before raster.
3072 volatile uint8_t* font_buffer_memory = GetSharedMemoryAs<uint8_t*>(
3073 font_shm_id, font_shm_offset, font_shm_size);
3074 if (!font_buffer_memory) {
3075 LOCAL_SET_GL_ERROR(GL_INVALID_VALUE, "glRasterCHROMIUM",
3076 "Can not read font buffer.");
3077 return error::kNoError;
3078 }
3079
3080 std::vector<SkDiscardableHandleId> new_locked_handles;
3081 if (!font_manager_->Deserialize(font_buffer_memory, font_shm_size,
3082 &new_locked_handles)) {
3083 LOCAL_SET_GL_ERROR(GL_INVALID_VALUE, "glRasterCHROMIUM",
3084 "Invalid font buffer.");
3085 return error::kNoError;
3086 }
3087 locked_handles_.insert(locked_handles_.end(), new_locked_handles.begin(),
3088 new_locked_handles.end());
3089 }
3090 }
3091
3092 size_t processed_commands = 0;
3093
Khushala4156862018-06-08 22:46:023094 while (paint_buffer_size > 0) {
Jonathan Backere19973e82018-04-18 20:08:093095 size_t skip = 0;
Xianzhu Wanga0711d72023-02-08 22:54:263096 cc::PaintOp* deserialized_op =
3097 cc::PaintOp::Deserialize(paint_buffer_memory, paint_buffer_size, data,
3098 std::size(data), &skip, options);
Jonathan Backere19973e82018-04-18 20:08:093099 if (!deserialized_op) {
Adrienne Walkere62484a2018-11-27 18:41:303100 LOCAL_SET_GL_ERROR(GL_INVALID_OPERATION, "glRasterCHROMIUM",
3101 "RasterCHROMIUM: serialization failure");
Scott Violet39a6a822024-08-09 23:44:003102 return error::kNoError;
Jonathan Backere19973e82018-04-18 20:08:093103 }
3104
Mike Reed421f75d2019-02-25 20:30:033105 deserialized_op->Raster(raster_canvas_, playback_params);
Christopher Camerond5698f682018-12-11 00:59:203106 deserialized_op->DestroyThis();
3107
Christopher Camerond5698f682018-12-11 00:59:203108 paint_buffer_size -= skip;
3109 paint_buffer_memory += skip;
Scott Violet39a6a822024-08-09 23:44:003110 processed_commands++;
3111
3112 if (check_for_yield_op_count_.has_value() &&
3113 processed_commands % check_for_yield_op_count_.value() == 0 &&
3114 paint_buffer_size && client()->ShouldYield()) {
3115 // Pause command batch to check if we should yield execution.
3116 TRACE_EVENT0("gpu", "RasterDecoderImpl::DoRasterCHROMIUM::Yield");
3117 deferred_raster_paint_buffer_offset_ =
3118 raster_shm_size - paint_buffer_size;
3119 return error::kDeferCommandUntilLater;
3120 }
Jonathan Backere19973e82018-04-18 20:08:093121 }
Scott Violet39a6a822024-08-09 23:44:003122
3123 return error::kNoError;
3124}
3125
3126error::Error RasterDecoderImpl::HandleRasterCHROMIUM(
3127 uint32_t immediate_data_size,
3128 const volatile void* cmd_data) {
3129 const volatile raster::cmds::RasterCHROMIUM& c =
3130 *static_cast<const volatile raster::cmds::RasterCHROMIUM*>(cmd_data);
3131 GLuint raster_shm_id = static_cast<GLuint>(c.raster_shm_id);
3132 GLuint raster_shm_offset = static_cast<GLuint>(c.raster_shm_offset);
3133 GLuint raster_shm_size = static_cast<GLuint>(c.raster_shm_size);
3134 GLuint font_shm_id = static_cast<GLuint>(c.font_shm_id);
3135 GLuint font_shm_offset = static_cast<GLuint>(c.font_shm_offset);
3136 GLuint font_shm_size = static_cast<GLuint>(c.font_shm_size);
3137 return DoRasterCHROMIUM(raster_shm_id, raster_shm_offset, raster_shm_size,
3138 font_shm_id, font_shm_offset, font_shm_size);
Jonathan Backere19973e82018-04-18 20:08:093139}
3140
3141void RasterDecoderImpl::DoEndRasterCHROMIUM() {
Peng Huang050dee52018-09-05 15:54:083142 TRACE_EVENT0("gpu", "RasterDecoderImpl::DoEndRasterCHROMIUM");
Peng Huangb447da42021-09-22 19:21:263143 if (!sk_surface_ && !scoped_shared_image_raster_write_) {
Jonathan Backera1f3d7c2018-10-16 14:46:323144 LOCAL_SET_GL_ERROR(GL_INVALID_OPERATION, "glEndRasterCHROMIUM",
Jonathan Backere19973e82018-04-18 20:08:093145 "EndRasterCHROMIUM without BeginRasterCHROMIUM");
3146 return;
3147 }
3148
Peng Huangb447da42021-09-22 19:21:263149 if (scoped_shared_image_raster_write_) {
3150 scoped_shared_image_raster_write_->set_callback(base::BindOnce(
3151 [](scoped_refptr<ServiceFontManager> font_manager,
3152 std::vector<SkDiscardableHandleId> handles) {
Peng Huang410ae6b22022-04-15 03:31:013153 font_manager->Unlock(handles);
Peng Huangb447da42021-09-22 19:21:263154 },
3155 font_manager_, std::move(locked_handles_)));
3156 scoped_shared_image_raster_write_.reset();
3157 shared_image_raster_.reset();
3158 locked_handles_.clear();
3159 return;
3160 }
3161
Mike Reed421f75d2019-02-25 20:30:033162 raster_canvas_ = nullptr;
Peng Huang050dee52018-09-05 15:54:083163
Khushal61977dfc2019-02-22 02:24:293164 {
Peng Huangd9608432019-07-29 15:56:363165 TRACE_EVENT0("gpu", "RasterDecoderImpl::DoEndRasterCHROMIUM::Flush");
Khushal61977dfc2019-02-22 02:24:293166 // This is a slow operation since skia will execute the GPU work for the
3167 // complete tile. Make sure the progress reporter is notified to avoid
3168 // hangs.
Adrienne Walkerd8a56462019-03-16 03:11:473169 gl::ScopedProgressReporter report_progress(
Khushal61977dfc2019-02-22 02:24:293170 shared_context_state_->progress_reporter());
Vasiliy Telezhnikov6414a252022-04-26 04:42:113171
3172 // scoped_shared_image_write_ can be nullptr if sk_surface_ was set by
3173 // SetUpForRasterCHROMIUMForTest.
3174 if (scoped_shared_image_write_) {
Sunny Sachanandania2094212024-03-20 21:50:553175 shared_context_state_->FlushWriteAccess(scoped_shared_image_write_.get());
Vasiliy Telezhnikovf1562c4f2022-08-06 14:02:053176 // Flushing surface will cause vulkan command buffer to be recorded with
3177 // image layout transitions as necessary. Transitioning layout back to
3178 // desired need to be happening after.
Vasiliy Telezhnikov6414a252022-04-26 04:42:113179 paint_op_shared_image_provider_->ApplyEndAccessState();
Saifuddin Hitawala70652782024-08-21 14:13:323180 bool need_graphite_submit =
3181 paint_op_shared_image_provider_->NeedGraphiteContextSubmit() ||
3182 scoped_shared_image_write_->NeedGraphiteContextSubmit();
3183 shared_context_state_->SubmitIfNecessary(std::move(end_semaphores_),
3184 need_graphite_submit);
Vasiliy Telezhnikov6414a252022-04-26 04:42:113185 } else {
3186 DCHECK(end_semaphores_.empty());
3187 }
Khushal61977dfc2019-02-22 02:24:293188 }
3189
Kramer Geddcac3532019-12-12 23:33:103190 shared_context_state_->UpdateSkiaOwnedMemorySize();
Peng Huange9b41cd2019-08-12 19:39:473191 sk_surface_ = nullptr;
Peng Huangb63db212020-06-01 14:37:393192 scoped_shared_image_write_.reset();
3193 shared_image_.reset();
Nathan Zabriskieffc210692020-07-09 07:36:473194 paint_op_shared_image_provider_.reset();
3195
Peng Huangb63db212020-06-01 14:37:393196 // Test only path for SetUpForRasterCHROMIUMForTest.
3197 sk_surface_for_testing_.reset();
Khushal22204a42018-05-17 23:06:213198
3199 // Unlock all font handles. This needs to be deferred until
Greg Daniel18362e9d2019-04-15 19:54:503200 // SkSurface::flush since that flushes batched Gr operations
Peng Huangc6a76072018-11-27 23:17:313201 // in skia that access the glyph data.
Khushal3d0b8902018-09-18 03:03:343202 if (!font_manager_->Unlock(locked_handles_)) {
Khushal22204a42018-05-17 23:06:213203 LOCAL_SET_GL_ERROR(GL_INVALID_VALUE, "glRasterCHROMIUM",
3204 "Invalid font discardable handle.");
3205 }
3206 locked_handles_.clear();
Khushal15b6abb2018-06-28 00:16:253207
Victor Miura6bb4ab5c2023-05-10 21:35:123208 // We just unlocked a bunch of handles. Give a call to skia to
3209 // attempt to purge any unlocked handles.
3210 SkGraphics::PurgePinnedFontCache();
3211
Khushal15b6abb2018-06-28 00:16:253212 // We just flushed a tile's worth of GPU work from the SkSurface in
Khushal Sagarb3f53292019-06-05 04:24:233213 // flush above. Yield to the Scheduler to allow pre-emption before
3214 // processing more commands.
3215 ExitCommandProcessingEarly();
Jonathan Backere19973e82018-04-18 20:08:093216}
3217
Jonathan Backer0cd1c4322018-04-17 16:57:103218void RasterDecoderImpl::DoCreateTransferCacheEntryINTERNAL(
3219 GLuint raw_entry_type,
3220 GLuint entry_id,
3221 GLuint handle_shm_id,
3222 GLuint handle_shm_offset,
3223 GLuint data_shm_id,
3224 GLuint data_shm_offset,
3225 GLuint data_size) {
kylecharee7338172022-02-02 16:59:103226 if (!use_gpu_raster_) {
Jonathan Backer0cd1c4322018-04-17 16:57:103227 LOCAL_SET_GL_ERROR(
3228 GL_INVALID_VALUE, "glCreateTransferCacheEntryINTERNAL",
3229 "Attempt to use OOP transfer cache on a context without OOP raster.");
3230 return;
3231 }
Saifuddin Hitawalad9027662023-05-12 14:50:103232 CHECK(gr_context() || graphite_recorder());
3233 CHECK(transfer_cache());
Jonathan Backer0cd1c4322018-04-17 16:57:103234
3235 // Validate the type we are about to create.
3236 cc::TransferCacheEntryType entry_type;
3237 if (!cc::ServiceTransferCacheEntry::SafeConvertToType(raw_entry_type,
3238 &entry_type)) {
Peng Huangc6a76072018-11-27 23:17:313239 LOCAL_SET_GL_ERROR(GL_INVALID_VALUE, "glCreateTransferCacheEntryINTERNAL",
3240 "Attempt to use OOP transfer cache with an invalid "
3241 "cache entry type.");
Jonathan Backer0cd1c4322018-04-17 16:57:103242 return;
3243 }
3244
Khushalc11f57b2020-03-20 16:56:153245 if (entry_type == cc::TransferCacheEntryType::kSkottie && !is_privileged_) {
3246 LOCAL_SET_GL_ERROR(GL_INVALID_VALUE, "glCreateTransferCacheEntryINTERNAL",
3247 "Attempt to use skottie on a non privileged channel");
3248 return;
3249 }
3250
Jonathan Backer0cd1c4322018-04-17 16:57:103251 uint8_t* data_memory =
3252 GetSharedMemoryAs<uint8_t*>(data_shm_id, data_shm_offset, data_size);
3253 if (!data_memory) {
3254 LOCAL_SET_GL_ERROR(GL_INVALID_VALUE, "glCreateTransferCacheEntryINTERNAL",
3255 "Can not read transfer cache entry data.");
3256 return;
3257 }
3258
3259 scoped_refptr<Buffer> handle_buffer = GetSharedMemoryBuffer(handle_shm_id);
3260 if (!DiscardableHandleBase::ValidateParameters(handle_buffer.get(),
3261 handle_shm_offset)) {
3262 LOCAL_SET_GL_ERROR(GL_INVALID_VALUE, "glCreateTransferCacheEntryINTERNAL",
3263 "Invalid shm for discardable handle.");
3264 return;
3265 }
3266 ServiceDiscardableHandle handle(std::move(handle_buffer), handle_shm_offset,
3267 handle_shm_id);
3268
Peng Huangc6a76072018-11-27 23:17:313269 // If the entry is going to use skia during deserialization, make sure we
3270 // mark the context state dirty.
Sunny Sachanandani5fcc5722023-05-09 00:50:363271 bool use_gpu = cc::ServiceTransferCacheEntry::UsesGpuContext(entry_type);
Khushal996e9912018-07-13 08:31:003272 if (!transfer_cache()->CreateLockedEntry(
3273 ServiceTransferCache::EntryKey(raster_decoder_id_, entry_type,
3274 entry_id),
Sunny Sachanandani5fcc5722023-05-09 00:50:363275 handle, use_gpu ? gr_context() : nullptr,
3276 use_gpu ? graphite_recorder() : nullptr,
Peter Kasting24c7c992024-12-02 19:35:313277 base::span(data_memory, data_size))) {
Jonathan Backer0cd1c4322018-04-17 16:57:103278 LOCAL_SET_GL_ERROR(GL_INVALID_VALUE, "glCreateTransferCacheEntryINTERNAL",
3279 "Failure to deserialize transfer cache entry.");
3280 return;
3281 }
Khushal Sagarb3f53292019-06-05 04:24:233282
3283 // The only entry using the GrContext are image transfer cache entries for
3284 // image uploads. Since this tends to a slow operation, yield to allow the
3285 // decoder to be pre-empted.
Sunny Sachanandani5fcc5722023-05-09 00:50:363286 if (use_gpu) {
Khushal Sagarb3f53292019-06-05 04:24:233287 ExitCommandProcessingEarly();
Sunny Sachanandani5fcc5722023-05-09 00:50:363288 }
Jonathan Backer0cd1c4322018-04-17 16:57:103289}
3290
3291void RasterDecoderImpl::DoUnlockTransferCacheEntryINTERNAL(
3292 GLuint raw_entry_type,
3293 GLuint entry_id) {
kylecharee7338172022-02-02 16:59:103294 if (!use_gpu_raster_) {
Jonathan Backer0cd1c4322018-04-17 16:57:103295 LOCAL_SET_GL_ERROR(
3296 GL_INVALID_VALUE, "glUnlockTransferCacheEntryINTERNAL",
3297 "Attempt to use OOP transfer cache on a context without OOP raster.");
3298 return;
3299 }
Khushal996e9912018-07-13 08:31:003300 DCHECK(transfer_cache());
Jonathan Backer0cd1c4322018-04-17 16:57:103301 cc::TransferCacheEntryType entry_type;
3302 if (!cc::ServiceTransferCacheEntry::SafeConvertToType(raw_entry_type,
3303 &entry_type)) {
Peng Huangc6a76072018-11-27 23:17:313304 LOCAL_SET_GL_ERROR(GL_INVALID_VALUE, "glUnlockTransferCacheEntryINTERNAL",
3305 "Attempt to use OOP transfer cache with an invalid "
3306 "cache entry type.");
Jonathan Backer0cd1c4322018-04-17 16:57:103307 return;
3308 }
3309
Khushal996e9912018-07-13 08:31:003310 if (!transfer_cache()->UnlockEntry(ServiceTransferCache::EntryKey(
3311 raster_decoder_id_, entry_type, entry_id))) {
Jonathan Backer0cd1c4322018-04-17 16:57:103312 LOCAL_SET_GL_ERROR(GL_INVALID_VALUE, "glUnlockTransferCacheEntryINTERNAL",
3313 "Attempt to unlock an invalid ID");
3314 }
3315}
3316
3317void RasterDecoderImpl::DoDeleteTransferCacheEntryINTERNAL(
3318 GLuint raw_entry_type,
3319 GLuint entry_id) {
kylecharee7338172022-02-02 16:59:103320 if (!use_gpu_raster_) {
Jonathan Backer0cd1c4322018-04-17 16:57:103321 LOCAL_SET_GL_ERROR(
3322 GL_INVALID_VALUE, "glDeleteTransferCacheEntryINTERNAL",
3323 "Attempt to use OOP transfer cache on a context without OOP raster.");
3324 return;
3325 }
Khushal996e9912018-07-13 08:31:003326 DCHECK(transfer_cache());
Jonathan Backer0cd1c4322018-04-17 16:57:103327 cc::TransferCacheEntryType entry_type;
3328 if (!cc::ServiceTransferCacheEntry::SafeConvertToType(raw_entry_type,
3329 &entry_type)) {
Peng Huangc6a76072018-11-27 23:17:313330 LOCAL_SET_GL_ERROR(GL_INVALID_VALUE, "glDeleteTransferCacheEntryINTERNAL",
3331 "Attempt to use OOP transfer cache with an invalid "
3332 "cache entry type.");
Jonathan Backer0cd1c4322018-04-17 16:57:103333 return;
3334 }
3335
Khushal996e9912018-07-13 08:31:003336 if (!transfer_cache()->DeleteEntry(ServiceTransferCache::EntryKey(
3337 raster_decoder_id_, entry_type, entry_id))) {
Jonathan Backer0cd1c4322018-04-17 16:57:103338 LOCAL_SET_GL_ERROR(GL_INVALID_VALUE, "glDeleteTransferCacheEntryINTERNAL",
3339 "Attempt to delete an invalid ID");
3340 }
3341}
3342
Jonathan Backer10821a82018-04-04 23:56:033343void RasterDecoderImpl::RestoreStateForAttrib(GLuint attrib_index,
3344 bool restore_array_binding) {
Peng Huang20361dad12019-01-23 14:48:103345 shared_context_state_->PessimisticallyResetGrContext();
Jonathan Backer10821a82018-04-04 23:56:033346}
3347
Jonathan Backer7471b2782018-01-25 18:14:193348// Include the auto-generated part of this file. We split this because it means
3349// we can easily edit the non-auto generated parts right here in this file
3350// instead of having to edit some template or the code generator.
Jonathan Backera4568da12018-01-31 16:25:043351#include "gpu/command_buffer/service/raster_decoder_autogen.h"
Jonathan Backer7471b2782018-01-25 18:14:193352
Jonathan Backer7f90dfb662017-12-18 16:52:043353} // namespace raster
3354} // namespace gpu