Bug 1890793: Assert CallArgs::newTarget is not gray. r=spidermonkey-reviewers,sfink...
[gecko.git] / dom / media / MediaData.cpp
blob15774ec53369fa497cc7ae331a03648bae533eeb
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
2 /* vim: set ts=8 sts=2 et sw=2 tw=80: */
3 /* This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
7 #include "MediaData.h"
9 #include "ImageContainer.h"
10 #include "MediaInfo.h"
11 #include "MediaResult.h"
12 #include "PerformanceRecorder.h"
13 #include "VideoUtils.h"
14 #include "YCbCrUtils.h"
15 #include "mozilla/gfx/gfxVars.h"
16 #include "mozilla/layers/ImageBridgeChild.h"
17 #include "mozilla/layers/KnowsCompositor.h"
18 #include "mozilla/layers/SharedRGBImage.h"
20 #include <stdint.h>
22 #ifdef XP_WIN
23 # include "mozilla/gfx/DeviceManagerDx.h"
24 # include "mozilla/layers/D3D11ShareHandleImage.h"
25 # include "mozilla/layers/D3D11YCbCrImage.h"
26 #elif XP_MACOSX
27 # include "MacIOSurfaceImage.h"
28 # include "mozilla/gfx/gfxVars.h"
29 #endif
31 namespace mozilla {
33 using namespace mozilla::gfx;
34 using layers::PlanarYCbCrData;
35 using layers::PlanarYCbCrImage;
36 using media::TimeUnit;
38 const char* AudioData::sTypeName = "audio";
39 const char* VideoData::sTypeName = "video";
41 AudioData::AudioData(int64_t aOffset, const media::TimeUnit& aTime,
42 AlignedAudioBuffer&& aData, uint32_t aChannels,
43 uint32_t aRate, uint32_t aChannelMap)
44 // Passing TimeUnit::Zero() here because we can't pass the result of an
45 // arithmetic operation to the CheckedInt ctor. We set the duration in the
46 // ctor body below.
47 : MediaData(sType, aOffset, aTime, TimeUnit::Zero()),
48 mChannels(aChannels),
49 mChannelMap(aChannelMap),
50 mRate(aRate),
51 mOriginalTime(aTime),
52 mAudioData(std::move(aData)),
53 mFrames(mAudioData.Length() / aChannels) {
54 MOZ_RELEASE_ASSERT(aChannels != 0,
55 "Can't create an AudioData with 0 channels.");
56 MOZ_RELEASE_ASSERT(aRate != 0,
57 "Can't create an AudioData with a sample-rate of 0.");
58 mDuration = TimeUnit(mFrames, aRate);
61 Span<AudioDataValue> AudioData::Data() const {
62 return Span{GetAdjustedData(), mFrames * mChannels};
65 nsCString AudioData::ToString() const {
66 nsCString rv;
67 rv.AppendPrintf("AudioData: %s %s %" PRIu32 " frames %" PRIu32 "Hz, %" PRIu32
68 "ch",
69 mTime.ToString().get(), mDuration.ToString().get(), mFrames,
70 mRate, mChannels);
71 return rv;
74 void AudioData::SetOriginalStartTime(const media::TimeUnit& aStartTime) {
75 MOZ_ASSERT(mTime == mOriginalTime,
76 "Do not call this if data has been trimmed!");
77 mTime = aStartTime;
78 mOriginalTime = aStartTime;
81 bool AudioData::AdjustForStartTime(const media::TimeUnit& aStartTime) {
82 mOriginalTime -= aStartTime;
83 mTime -= aStartTime;
84 if (mTrimWindow) {
85 *mTrimWindow -= aStartTime;
87 if (mTime.IsNegative()) {
88 NS_WARNING("Negative audio start time after time-adjustment!");
90 return mTime.IsValid() && mOriginalTime.IsValid();
93 bool AudioData::SetTrimWindow(const media::TimeInterval& aTrim) {
94 MOZ_DIAGNOSTIC_ASSERT(aTrim.mStart.IsValid() && aTrim.mEnd.IsValid(),
95 "An overflow occurred on the provided TimeInterval");
96 if (!mAudioData) {
97 // MoveableData got called. Can no longer work on it.
98 return false;
100 if (aTrim.mStart < mOriginalTime || aTrim.mEnd > GetEndTime()) {
101 return false;
104 auto trimBefore = aTrim.mStart - mOriginalTime;
105 auto trimAfter = aTrim.mEnd - mOriginalTime;
106 if (!trimBefore.IsValid() || !trimAfter.IsValid()) {
107 // Overflow.
108 return false;
110 if (!mTrimWindow && trimBefore.IsZero() && trimAfter == mDuration) {
111 // Nothing to change, abort early to prevent rounding errors.
112 return true;
115 size_t frameOffset = trimBefore.ToTicksAtRate(mRate);
116 mTrimWindow = Some(aTrim);
117 mDataOffset = frameOffset * mChannels;
118 MOZ_DIAGNOSTIC_ASSERT(mDataOffset <= mAudioData.Length(),
119 "Data offset outside original buffer");
120 int64_t frameCountAfterTrim = (trimAfter - trimBefore).ToTicksAtRate(mRate);
121 if (frameCountAfterTrim >
122 AssertedCast<int64_t>(mAudioData.Length() / mChannels)) {
123 // Accept rounding error caused by an imprecise time_base in the container,
124 // that can cause a mismatch but not other kind of unexpected frame count.
125 MOZ_RELEASE_ASSERT(!trimBefore.IsBase(mRate));
126 mFrames = 0;
127 } else {
128 mFrames = frameCountAfterTrim;
130 mTime = mOriginalTime + trimBefore;
131 mDuration = TimeUnit(mFrames, mRate);
133 return true;
136 AudioDataValue* AudioData::GetAdjustedData() const {
137 if (!mAudioData) {
138 return nullptr;
140 return mAudioData.Data() + mDataOffset;
143 void AudioData::EnsureAudioBuffer() {
144 if (mAudioBuffer || !mAudioData) {
145 return;
147 const AudioDataValue* srcData = GetAdjustedData();
148 CheckedInt<size_t> bufferSize(sizeof(AudioDataValue));
149 bufferSize *= mFrames;
150 bufferSize *= mChannels;
151 mAudioBuffer = SharedBuffer::Create(bufferSize);
153 AudioDataValue* destData = static_cast<AudioDataValue*>(mAudioBuffer->Data());
154 for (uint32_t i = 0; i < mFrames; ++i) {
155 for (uint32_t j = 0; j < mChannels; ++j) {
156 destData[j * mFrames + i] = srcData[i * mChannels + j];
161 size_t AudioData::SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const {
162 size_t size =
163 aMallocSizeOf(this) + mAudioData.SizeOfExcludingThis(aMallocSizeOf);
164 if (mAudioBuffer) {
165 size += mAudioBuffer->SizeOfIncludingThis(aMallocSizeOf);
167 return size;
170 AlignedAudioBuffer AudioData::MoveableData() {
171 // Trim buffer according to trimming mask.
172 mAudioData.PopFront(mDataOffset);
173 mAudioData.SetLength(mFrames * mChannels);
174 mDataOffset = 0;
175 mFrames = 0;
176 mTrimWindow.reset();
177 return std::move(mAudioData);
180 static bool ValidatePlane(const VideoData::YCbCrBuffer::Plane& aPlane) {
181 return aPlane.mWidth <= PlanarYCbCrImage::MAX_DIMENSION &&
182 aPlane.mHeight <= PlanarYCbCrImage::MAX_DIMENSION &&
183 aPlane.mWidth * aPlane.mHeight < MAX_VIDEO_WIDTH * MAX_VIDEO_HEIGHT &&
184 aPlane.mStride > 0 && aPlane.mWidth <= aPlane.mStride;
187 static MediaResult ValidateBufferAndPicture(
188 const VideoData::YCbCrBuffer& aBuffer, const IntRect& aPicture) {
189 // The following situation should never happen unless there is a bug
190 // in the decoder
191 if (aBuffer.mPlanes[1].mWidth != aBuffer.mPlanes[2].mWidth ||
192 aBuffer.mPlanes[1].mHeight != aBuffer.mPlanes[2].mHeight) {
193 return MediaResult(NS_ERROR_INVALID_ARG,
194 "Chroma planes with different sizes");
197 // The following situations could be triggered by invalid input
198 if (aPicture.width <= 0 || aPicture.height <= 0) {
199 return MediaResult(NS_ERROR_INVALID_ARG, "Empty picture rect");
201 if (!ValidatePlane(aBuffer.mPlanes[0]) ||
202 !ValidatePlane(aBuffer.mPlanes[1]) ||
203 !ValidatePlane(aBuffer.mPlanes[2])) {
204 return MediaResult(NS_ERROR_INVALID_ARG, "Invalid plane size");
207 // Ensure the picture size specified in the headers can be extracted out of
208 // the frame we've been supplied without indexing out of bounds.
209 CheckedUint32 xLimit = aPicture.x + CheckedUint32(aPicture.width);
210 CheckedUint32 yLimit = aPicture.y + CheckedUint32(aPicture.height);
211 if (!xLimit.isValid() || xLimit.value() > aBuffer.mPlanes[0].mStride ||
212 !yLimit.isValid() || yLimit.value() > aBuffer.mPlanes[0].mHeight) {
213 // The specified picture dimensions can't be contained inside the video
214 // frame, we'll stomp memory if we try to copy it. Fail.
215 return MediaResult(NS_ERROR_INVALID_ARG, "Overflowing picture rect");
217 return MediaResult(NS_OK);
220 VideoData::VideoData(int64_t aOffset, const TimeUnit& aTime,
221 const TimeUnit& aDuration, bool aKeyframe,
222 const TimeUnit& aTimecode, IntSize aDisplay,
223 layers::ImageContainer::FrameID aFrameID)
224 : MediaData(Type::VIDEO_DATA, aOffset, aTime, aDuration),
225 mDisplay(aDisplay),
226 mFrameID(aFrameID),
227 mSentToCompositor(false),
228 mNextKeyFrameTime(TimeUnit::Invalid()) {
229 MOZ_ASSERT(!mDuration.IsNegative(), "Frame must have non-negative duration.");
230 mKeyframe = aKeyframe;
231 mTimecode = aTimecode;
234 VideoData::~VideoData() = default;
236 size_t VideoData::SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const {
237 size_t size = aMallocSizeOf(this);
239 // Currently only PLANAR_YCBCR has a well defined function for determining
240 // it's size, so reporting is limited to that type.
241 if (mImage && mImage->GetFormat() == ImageFormat::PLANAR_YCBCR) {
242 const mozilla::layers::PlanarYCbCrImage* img =
243 static_cast<const mozilla::layers::PlanarYCbCrImage*>(mImage.get());
244 size += img->SizeOfIncludingThis(aMallocSizeOf);
247 return size;
250 ColorDepth VideoData::GetColorDepth() const {
251 if (!mImage) {
252 return ColorDepth::COLOR_8;
255 return mImage->GetColorDepth();
258 void VideoData::UpdateDuration(const TimeUnit& aDuration) {
259 MOZ_ASSERT(!aDuration.IsNegative());
260 mDuration = aDuration;
263 void VideoData::UpdateTimestamp(const TimeUnit& aTimestamp) {
264 MOZ_ASSERT(!aTimestamp.IsNegative());
266 auto updatedDuration = GetEndTime() - aTimestamp;
267 MOZ_ASSERT(!updatedDuration.IsNegative());
269 mTime = aTimestamp;
270 mDuration = updatedDuration;
273 bool VideoData::AdjustForStartTime(const media::TimeUnit& aStartTime) {
274 mTime -= aStartTime;
275 if (mTime.IsNegative()) {
276 NS_WARNING("Negative video start time after time-adjustment!");
278 return mTime.IsValid();
281 PlanarYCbCrData ConstructPlanarYCbCrData(const VideoInfo& aInfo,
282 const VideoData::YCbCrBuffer& aBuffer,
283 const IntRect& aPicture) {
284 const VideoData::YCbCrBuffer::Plane& Y = aBuffer.mPlanes[0];
285 const VideoData::YCbCrBuffer::Plane& Cb = aBuffer.mPlanes[1];
286 const VideoData::YCbCrBuffer::Plane& Cr = aBuffer.mPlanes[2];
288 PlanarYCbCrData data;
289 data.mYChannel = Y.mData;
290 data.mYStride = AssertedCast<int32_t>(Y.mStride);
291 data.mYSkip = AssertedCast<int32_t>(Y.mSkip);
292 data.mCbChannel = Cb.mData;
293 data.mCrChannel = Cr.mData;
294 data.mCbCrStride = AssertedCast<int32_t>(Cb.mStride);
295 data.mCbSkip = AssertedCast<int32_t>(Cb.mSkip);
296 data.mCrSkip = AssertedCast<int32_t>(Cr.mSkip);
297 data.mPictureRect = aPicture;
298 data.mStereoMode = aInfo.mStereoMode;
299 data.mYUVColorSpace = aBuffer.mYUVColorSpace;
300 data.mColorPrimaries = aBuffer.mColorPrimaries;
301 data.mColorDepth = aBuffer.mColorDepth;
302 if (aInfo.mTransferFunction) {
303 data.mTransferFunction = *aInfo.mTransferFunction;
305 data.mColorRange = aBuffer.mColorRange;
306 data.mChromaSubsampling = aBuffer.mChromaSubsampling;
307 return data;
310 /* static */
311 MediaResult VideoData::SetVideoDataToImage(PlanarYCbCrImage* aVideoImage,
312 const VideoInfo& aInfo,
313 const YCbCrBuffer& aBuffer,
314 const IntRect& aPicture,
315 bool aCopyData) {
316 MOZ_ASSERT(aVideoImage);
318 PlanarYCbCrData data = ConstructPlanarYCbCrData(aInfo, aBuffer, aPicture);
320 if (aCopyData) {
321 return MediaResult(aVideoImage->CopyData(data),
322 RESULT_DETAIL("Failed to copy image data"));
324 return MediaResult(aVideoImage->AdoptData(data),
325 RESULT_DETAIL("Failed to adopt image data"));
328 /* static */
329 Result<already_AddRefed<VideoData>, MediaResult> VideoData::CreateAndCopyData(
330 const VideoInfo& aInfo, ImageContainer* aContainer, int64_t aOffset,
331 const TimeUnit& aTime, const TimeUnit& aDuration,
332 const YCbCrBuffer& aBuffer, bool aKeyframe, const TimeUnit& aTimecode,
333 const IntRect& aPicture, layers::KnowsCompositor* aAllocator) {
334 if (!aContainer) {
335 // Create a dummy VideoData with no image. This gives us something to
336 // send to media streams if necessary.
337 RefPtr<VideoData> v(new VideoData(aOffset, aTime, aDuration, aKeyframe,
338 aTimecode, aInfo.mDisplay, 0));
339 return v.forget();
342 if (MediaResult r = ValidateBufferAndPicture(aBuffer, aPicture);
343 NS_FAILED(r)) {
344 return Err(r);
347 PerformanceRecorder<PlaybackStage> perfRecorder(MediaStage::CopyDecodedVideo,
348 aInfo.mImage.height);
349 RefPtr<VideoData> v(new VideoData(aOffset, aTime, aDuration, aKeyframe,
350 aTimecode, aInfo.mDisplay, 0));
352 // Currently our decoder only knows how to output to ImageFormat::PLANAR_YCBCR
353 // format.
354 #if XP_MACOSX
355 if (aAllocator && aAllocator->GetWebRenderCompositorType() !=
356 layers::WebRenderCompositor::SOFTWARE) {
357 RefPtr<layers::MacIOSurfaceImage> ioImage =
358 new layers::MacIOSurfaceImage(nullptr);
359 PlanarYCbCrData data = ConstructPlanarYCbCrData(aInfo, aBuffer, aPicture);
360 if (ioImage->SetData(aContainer, data)) {
361 v->mImage = ioImage;
362 perfRecorder.Record();
363 return v.forget();
366 #endif
367 if (!v->mImage) {
368 v->mImage = aContainer->CreatePlanarYCbCrImage();
371 if (!v->mImage) {
372 // TODO: Should other error like NS_ERROR_UNEXPECTED be used here to
373 // distinguish this error from the NS_ERROR_OUT_OF_MEMORY below?
374 return Err(MediaResult(NS_ERROR_OUT_OF_MEMORY,
375 "Failed to create a PlanarYCbCrImage"));
377 NS_ASSERTION(v->mImage->GetFormat() == ImageFormat::PLANAR_YCBCR,
378 "Wrong format?");
379 PlanarYCbCrImage* videoImage = v->mImage->AsPlanarYCbCrImage();
380 MOZ_ASSERT(videoImage);
382 if (MediaResult r = VideoData::SetVideoDataToImage(
383 videoImage, aInfo, aBuffer, aPicture, true /* aCopyData */);
384 NS_FAILED(r)) {
385 return Err(r);
388 perfRecorder.Record();
389 return v.forget();
392 /* static */
393 already_AddRefed<VideoData> VideoData::CreateAndCopyData(
394 const VideoInfo& aInfo, ImageContainer* aContainer, int64_t aOffset,
395 const TimeUnit& aTime, const TimeUnit& aDuration,
396 const YCbCrBuffer& aBuffer, const YCbCrBuffer::Plane& aAlphaPlane,
397 bool aKeyframe, const TimeUnit& aTimecode, const IntRect& aPicture) {
398 if (!aContainer) {
399 // Create a dummy VideoData with no image. This gives us something to
400 // send to media streams if necessary.
401 RefPtr<VideoData> v(new VideoData(aOffset, aTime, aDuration, aKeyframe,
402 aTimecode, aInfo.mDisplay, 0));
403 return v.forget();
406 if (MediaResult r = ValidateBufferAndPicture(aBuffer, aPicture);
407 NS_FAILED(r)) {
408 NS_ERROR(r.Message().get());
409 return nullptr;
412 RefPtr<VideoData> v(new VideoData(aOffset, aTime, aDuration, aKeyframe,
413 aTimecode, aInfo.mDisplay, 0));
415 // Convert from YUVA to BGRA format on the software side.
416 RefPtr<layers::SharedRGBImage> videoImage =
417 aContainer->CreateSharedRGBImage();
418 v->mImage = videoImage;
420 if (!v->mImage) {
421 return nullptr;
423 if (!videoImage->Allocate(
424 IntSize(aBuffer.mPlanes[0].mWidth, aBuffer.mPlanes[0].mHeight),
425 SurfaceFormat::B8G8R8A8)) {
426 return nullptr;
429 RefPtr<layers::TextureClient> texture =
430 videoImage->GetTextureClient(/* aKnowsCompositor */ nullptr);
431 if (!texture) {
432 NS_WARNING("Failed to allocate TextureClient");
433 return nullptr;
436 layers::TextureClientAutoLock autoLock(texture,
437 layers::OpenMode::OPEN_WRITE_ONLY);
438 if (!autoLock.Succeeded()) {
439 NS_WARNING("Failed to lock TextureClient");
440 return nullptr;
443 layers::MappedTextureData buffer;
444 if (!texture->BorrowMappedData(buffer)) {
445 NS_WARNING("Failed to borrow mapped data");
446 return nullptr;
449 // The naming convention for libyuv and associated utils is word-order.
450 // The naming convention in the gfx stack is byte-order.
451 ConvertI420AlphaToARGB(aBuffer.mPlanes[0].mData, aBuffer.mPlanes[1].mData,
452 aBuffer.mPlanes[2].mData, aAlphaPlane.mData,
453 AssertedCast<int>(aBuffer.mPlanes[0].mStride),
454 AssertedCast<int>(aBuffer.mPlanes[1].mStride),
455 buffer.data, buffer.stride, buffer.size.width,
456 buffer.size.height);
458 return v.forget();
461 /* static */
462 already_AddRefed<VideoData> VideoData::CreateFromImage(
463 const IntSize& aDisplay, int64_t aOffset, const TimeUnit& aTime,
464 const TimeUnit& aDuration, const RefPtr<Image>& aImage, bool aKeyframe,
465 const TimeUnit& aTimecode) {
466 RefPtr<VideoData> v(new VideoData(aOffset, aTime, aDuration, aKeyframe,
467 aTimecode, aDisplay, 0));
468 v->mImage = aImage;
469 return v.forget();
472 nsCString VideoData::ToString() const {
473 std::array ImageFormatStrings = {
474 "PLANAR_YCBCR",
475 "NV_IMAGE",
476 "SHARED_RGB",
477 "MOZ2D_SURFACE",
478 "MAC_IOSURFACE",
479 "SURFACE_TEXTURE",
480 "D3D9_RGB32_TEXTURE",
481 "OVERLAY_IMAGE",
482 "D3D11_SHARE_HANDLE_TEXTURE",
483 "D3D11_TEXTURE_IMF_SAMPLE",
484 "TEXTURE_WRAPPER",
485 "D3D11_YCBCR_IMAGE",
486 "GPU_VIDEO",
487 "DMABUF",
488 "DCOMP_SURFACE",
491 nsCString rv;
492 rv.AppendPrintf(
493 "VideoFrame [%s,%s] [%dx%d] format: %s", mTime.ToString().get(),
494 mDuration.ToString().get(), mDisplay.Width(), mDisplay.Height(),
495 mImage ? ImageFormatStrings[static_cast<int>(mImage->GetFormat())]
496 : "null");
497 return rv;
500 MediaRawData::MediaRawData()
501 : MediaData(Type::RAW_DATA), mCrypto(mCryptoInternal) {}
503 MediaRawData::MediaRawData(const uint8_t* aData, size_t aSize)
504 : MediaData(Type::RAW_DATA),
505 mCrypto(mCryptoInternal),
506 mBuffer(aData, aSize) {}
508 MediaRawData::MediaRawData(const uint8_t* aData, size_t aSize,
509 const uint8_t* aAlphaData, size_t aAlphaSize)
510 : MediaData(Type::RAW_DATA),
511 mCrypto(mCryptoInternal),
512 mBuffer(aData, aSize),
513 mAlphaBuffer(aAlphaData, aAlphaSize) {}
515 MediaRawData::MediaRawData(AlignedByteBuffer&& aData)
516 : MediaData(Type::RAW_DATA),
517 mCrypto(mCryptoInternal),
518 mBuffer(std::move(aData)) {}
520 MediaRawData::MediaRawData(AlignedByteBuffer&& aData,
521 AlignedByteBuffer&& aAlphaData)
522 : MediaData(Type::RAW_DATA),
523 mCrypto(mCryptoInternal),
524 mBuffer(std::move(aData)),
525 mAlphaBuffer(std::move(aAlphaData)) {}
527 already_AddRefed<MediaRawData> MediaRawData::Clone() const {
528 int32_t sampleHeight = 0;
529 if (mTrackInfo && mTrackInfo->GetAsVideoInfo()) {
530 sampleHeight = mTrackInfo->GetAsVideoInfo()->mImage.height;
532 PerformanceRecorder<PlaybackStage> perfRecorder(MediaStage::CopyDemuxedData,
533 sampleHeight);
534 RefPtr<MediaRawData> s = new MediaRawData;
535 s->mTimecode = mTimecode;
536 s->mTime = mTime;
537 s->mDuration = mDuration;
538 s->mOffset = mOffset;
539 s->mKeyframe = mKeyframe;
540 s->mExtraData = mExtraData;
541 s->mCryptoInternal = mCryptoInternal;
542 s->mTrackInfo = mTrackInfo;
543 s->mEOS = mEOS;
544 s->mOriginalPresentationWindow = mOriginalPresentationWindow;
545 if (!s->mBuffer.Append(mBuffer.Data(), mBuffer.Length())) {
546 return nullptr;
548 if (!s->mAlphaBuffer.Append(mAlphaBuffer.Data(), mAlphaBuffer.Length())) {
549 return nullptr;
551 perfRecorder.Record();
552 return s.forget();
555 MediaRawData::~MediaRawData() = default;
557 size_t MediaRawData::SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const {
558 size_t size = aMallocSizeOf(this);
559 size += mBuffer.SizeOfExcludingThis(aMallocSizeOf);
560 return size;
563 UniquePtr<MediaRawDataWriter> MediaRawData::CreateWriter() {
564 UniquePtr<MediaRawDataWriter> p(new MediaRawDataWriter(this));
565 return p;
568 MediaRawDataWriter::MediaRawDataWriter(MediaRawData* aMediaRawData)
569 : mCrypto(aMediaRawData->mCryptoInternal), mTarget(aMediaRawData) {}
571 bool MediaRawDataWriter::SetSize(size_t aSize) {
572 return mTarget->mBuffer.SetLength(aSize);
575 bool MediaRawDataWriter::Prepend(const uint8_t* aData, size_t aSize) {
576 return mTarget->mBuffer.Prepend(aData, aSize);
579 bool MediaRawDataWriter::Append(const uint8_t* aData, size_t aSize) {
580 return mTarget->mBuffer.Append(aData, aSize);
583 bool MediaRawDataWriter::Replace(const uint8_t* aData, size_t aSize) {
584 return mTarget->mBuffer.Replace(aData, aSize);
587 void MediaRawDataWriter::Clear() { mTarget->mBuffer.Clear(); }
589 uint8_t* MediaRawDataWriter::Data() { return mTarget->mBuffer.Data(); }
591 size_t MediaRawDataWriter::Size() { return mTarget->Size(); }
593 void MediaRawDataWriter::PopFront(size_t aSize) {
594 mTarget->mBuffer.PopFront(aSize);
597 const char* CryptoSchemeToString(const CryptoScheme& aScheme) {
598 switch (aScheme) {
599 case CryptoScheme::None:
600 return "None";
601 case CryptoScheme::Cenc:
602 return "Cenc";
603 case CryptoScheme::Cbcs:
604 return "Cbcs";
605 default:
606 MOZ_ASSERT_UNREACHABLE();
607 return "";
611 } // namespace mozilla