no bug - Bumping Firefox l10n changesets r=release a=l10n-bump DONTBUILD CLOSED TREE
[gecko.git] / dom / media / MediaData.cpp
blob31f6c25bb248a7fd7ec2b4559e768ae88965944c
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
2 /* vim: set ts=8 sts=2 et sw=2 tw=80: */
3 /* This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
7 #include "MediaData.h"
9 #include "ImageContainer.h"
10 #include "MediaInfo.h"
11 #include "PerformanceRecorder.h"
12 #include "VideoUtils.h"
13 #include "YCbCrUtils.h"
14 #include "mozilla/gfx/gfxVars.h"
15 #include "mozilla/layers/ImageBridgeChild.h"
16 #include "mozilla/layers/KnowsCompositor.h"
17 #include "mozilla/layers/SharedRGBImage.h"
19 #include <stdint.h>
21 #ifdef XP_WIN
22 # include "mozilla/gfx/DeviceManagerDx.h"
23 # include "mozilla/layers/D3D11ShareHandleImage.h"
24 # include "mozilla/layers/D3D11YCbCrImage.h"
25 #elif XP_MACOSX
26 # include "MacIOSurfaceImage.h"
27 # include "mozilla/gfx/gfxVars.h"
28 #endif
30 namespace mozilla {
32 using namespace mozilla::gfx;
33 using layers::PlanarYCbCrData;
34 using layers::PlanarYCbCrImage;
35 using media::TimeUnit;
37 const char* AudioData::sTypeName = "audio";
38 const char* VideoData::sTypeName = "video";
40 AudioData::AudioData(int64_t aOffset, const media::TimeUnit& aTime,
41 AlignedAudioBuffer&& aData, uint32_t aChannels,
42 uint32_t aRate, uint32_t aChannelMap)
43 // Passing TimeUnit::Zero() here because we can't pass the result of an
44 // arithmetic operation to the CheckedInt ctor. We set the duration in the
45 // ctor body below.
46 : MediaData(sType, aOffset, aTime, TimeUnit::Zero()),
47 mChannels(aChannels),
48 mChannelMap(aChannelMap),
49 mRate(aRate),
50 mOriginalTime(aTime),
51 mAudioData(std::move(aData)),
52 mFrames(mAudioData.Length() / aChannels) {
53 MOZ_RELEASE_ASSERT(aChannels != 0,
54 "Can't create an AudioData with 0 channels.");
55 MOZ_RELEASE_ASSERT(aRate != 0,
56 "Can't create an AudioData with a sample-rate of 0.");
57 mDuration = TimeUnit(mFrames, aRate);
60 Span<AudioDataValue> AudioData::Data() const {
61 return Span{GetAdjustedData(), mFrames * mChannels};
64 void AudioData::SetOriginalStartTime(const media::TimeUnit& aStartTime) {
65 MOZ_ASSERT(mTime == mOriginalTime,
66 "Do not call this if data has been trimmed!");
67 mTime = aStartTime;
68 mOriginalTime = aStartTime;
71 bool AudioData::AdjustForStartTime(const media::TimeUnit& aStartTime) {
72 mOriginalTime -= aStartTime;
73 mTime -= aStartTime;
74 if (mTrimWindow) {
75 *mTrimWindow -= aStartTime;
77 if (mTime.IsNegative()) {
78 NS_WARNING("Negative audio start time after time-adjustment!");
80 return mTime.IsValid() && mOriginalTime.IsValid();
83 bool AudioData::SetTrimWindow(const media::TimeInterval& aTrim) {
84 MOZ_DIAGNOSTIC_ASSERT(aTrim.mStart.IsValid() && aTrim.mEnd.IsValid(),
85 "An overflow occurred on the provided TimeInterval");
86 if (!mAudioData) {
87 // MoveableData got called. Can no longer work on it.
88 return false;
90 if (aTrim.mStart < mOriginalTime || aTrim.mEnd > GetEndTime()) {
91 return false;
94 auto trimBefore = aTrim.mStart - mOriginalTime;
95 auto trimAfter = aTrim.mEnd - mOriginalTime;
96 if (!trimBefore.IsValid() || !trimAfter.IsValid()) {
97 // Overflow.
98 return false;
100 if (!mTrimWindow && trimBefore.IsZero() && trimAfter == mDuration) {
101 // Nothing to change, abort early to prevent rounding errors.
102 return true;
105 size_t frameOffset = trimBefore.ToTicksAtRate(mRate);
106 mTrimWindow = Some(aTrim);
107 mDataOffset = frameOffset * mChannels;
108 MOZ_DIAGNOSTIC_ASSERT(mDataOffset <= mAudioData.Length(),
109 "Data offset outside original buffer");
110 int64_t frameCountAfterTrim = (trimAfter - trimBefore).ToTicksAtRate(mRate);
111 if (frameCountAfterTrim >
112 AssertedCast<int64_t>(mAudioData.Length() / mChannels)) {
113 // Accept rounding error caused by an imprecise time_base in the container,
114 // that can cause a mismatch but not other kind of unexpected frame count.
115 MOZ_RELEASE_ASSERT(!trimBefore.IsBase(mRate));
116 mFrames = 0;
117 } else {
118 mFrames = frameCountAfterTrim;
120 mTime = mOriginalTime + trimBefore;
121 mDuration = TimeUnit(mFrames, mRate);
123 return true;
126 AudioDataValue* AudioData::GetAdjustedData() const {
127 if (!mAudioData) {
128 return nullptr;
130 return mAudioData.Data() + mDataOffset;
133 void AudioData::EnsureAudioBuffer() {
134 if (mAudioBuffer || !mAudioData) {
135 return;
137 const AudioDataValue* srcData = GetAdjustedData();
138 CheckedInt<size_t> bufferSize(sizeof(AudioDataValue));
139 bufferSize *= mFrames;
140 bufferSize *= mChannels;
141 mAudioBuffer = SharedBuffer::Create(bufferSize);
143 AudioDataValue* destData = static_cast<AudioDataValue*>(mAudioBuffer->Data());
144 for (uint32_t i = 0; i < mFrames; ++i) {
145 for (uint32_t j = 0; j < mChannels; ++j) {
146 destData[j * mFrames + i] = srcData[i * mChannels + j];
151 size_t AudioData::SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const {
152 size_t size =
153 aMallocSizeOf(this) + mAudioData.SizeOfExcludingThis(aMallocSizeOf);
154 if (mAudioBuffer) {
155 size += mAudioBuffer->SizeOfIncludingThis(aMallocSizeOf);
157 return size;
160 AlignedAudioBuffer AudioData::MoveableData() {
161 // Trim buffer according to trimming mask.
162 mAudioData.PopFront(mDataOffset);
163 mAudioData.SetLength(mFrames * mChannels);
164 mDataOffset = 0;
165 mFrames = 0;
166 mTrimWindow.reset();
167 return std::move(mAudioData);
170 static bool ValidatePlane(const VideoData::YCbCrBuffer::Plane& aPlane) {
171 return aPlane.mWidth <= PlanarYCbCrImage::MAX_DIMENSION &&
172 aPlane.mHeight <= PlanarYCbCrImage::MAX_DIMENSION &&
173 aPlane.mWidth * aPlane.mHeight < MAX_VIDEO_WIDTH * MAX_VIDEO_HEIGHT &&
174 aPlane.mStride > 0 && aPlane.mWidth <= aPlane.mStride;
177 static bool ValidateBufferAndPicture(const VideoData::YCbCrBuffer& aBuffer,
178 const IntRect& aPicture) {
179 // The following situation should never happen unless there is a bug
180 // in the decoder
181 if (aBuffer.mPlanes[1].mWidth != aBuffer.mPlanes[2].mWidth ||
182 aBuffer.mPlanes[1].mHeight != aBuffer.mPlanes[2].mHeight) {
183 NS_ERROR("C planes with different sizes");
184 return false;
187 // The following situations could be triggered by invalid input
188 if (aPicture.width <= 0 || aPicture.height <= 0) {
189 NS_WARNING("Empty picture rect");
190 return false;
192 if (!ValidatePlane(aBuffer.mPlanes[0]) ||
193 !ValidatePlane(aBuffer.mPlanes[1]) ||
194 !ValidatePlane(aBuffer.mPlanes[2])) {
195 NS_WARNING("Invalid plane size");
196 return false;
199 // Ensure the picture size specified in the headers can be extracted out of
200 // the frame we've been supplied without indexing out of bounds.
201 CheckedUint32 xLimit = aPicture.x + CheckedUint32(aPicture.width);
202 CheckedUint32 yLimit = aPicture.y + CheckedUint32(aPicture.height);
203 if (!xLimit.isValid() || xLimit.value() > aBuffer.mPlanes[0].mStride ||
204 !yLimit.isValid() || yLimit.value() > aBuffer.mPlanes[0].mHeight) {
205 // The specified picture dimensions can't be contained inside the video
206 // frame, we'll stomp memory if we try to copy it. Fail.
207 NS_WARNING("Overflowing picture rect");
208 return false;
210 return true;
213 VideoData::VideoData(int64_t aOffset, const TimeUnit& aTime,
214 const TimeUnit& aDuration, bool aKeyframe,
215 const TimeUnit& aTimecode, IntSize aDisplay,
216 layers::ImageContainer::FrameID aFrameID)
217 : MediaData(Type::VIDEO_DATA, aOffset, aTime, aDuration),
218 mDisplay(aDisplay),
219 mFrameID(aFrameID),
220 mSentToCompositor(false),
221 mNextKeyFrameTime(TimeUnit::Invalid()) {
222 MOZ_ASSERT(!mDuration.IsNegative(), "Frame must have non-negative duration.");
223 mKeyframe = aKeyframe;
224 mTimecode = aTimecode;
227 VideoData::~VideoData() = default;
229 size_t VideoData::SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const {
230 size_t size = aMallocSizeOf(this);
232 // Currently only PLANAR_YCBCR has a well defined function for determining
233 // it's size, so reporting is limited to that type.
234 if (mImage && mImage->GetFormat() == ImageFormat::PLANAR_YCBCR) {
235 const mozilla::layers::PlanarYCbCrImage* img =
236 static_cast<const mozilla::layers::PlanarYCbCrImage*>(mImage.get());
237 size += img->SizeOfIncludingThis(aMallocSizeOf);
240 return size;
243 ColorDepth VideoData::GetColorDepth() const {
244 if (!mImage) {
245 return ColorDepth::COLOR_8;
248 return mImage->GetColorDepth();
251 void VideoData::UpdateDuration(const TimeUnit& aDuration) {
252 MOZ_ASSERT(!aDuration.IsNegative());
253 mDuration = aDuration;
256 void VideoData::UpdateTimestamp(const TimeUnit& aTimestamp) {
257 MOZ_ASSERT(!aTimestamp.IsNegative());
259 auto updatedDuration = GetEndTime() - aTimestamp;
260 MOZ_ASSERT(!updatedDuration.IsNegative());
262 mTime = aTimestamp;
263 mDuration = updatedDuration;
266 bool VideoData::AdjustForStartTime(const media::TimeUnit& aStartTime) {
267 mTime -= aStartTime;
268 if (mTime.IsNegative()) {
269 NS_WARNING("Negative video start time after time-adjustment!");
271 return mTime.IsValid();
274 PlanarYCbCrData ConstructPlanarYCbCrData(const VideoInfo& aInfo,
275 const VideoData::YCbCrBuffer& aBuffer,
276 const IntRect& aPicture) {
277 const VideoData::YCbCrBuffer::Plane& Y = aBuffer.mPlanes[0];
278 const VideoData::YCbCrBuffer::Plane& Cb = aBuffer.mPlanes[1];
279 const VideoData::YCbCrBuffer::Plane& Cr = aBuffer.mPlanes[2];
281 PlanarYCbCrData data;
282 data.mYChannel = Y.mData;
283 data.mYStride = AssertedCast<int32_t>(Y.mStride);
284 data.mYSkip = AssertedCast<int32_t>(Y.mSkip);
285 data.mCbChannel = Cb.mData;
286 data.mCrChannel = Cr.mData;
287 data.mCbCrStride = AssertedCast<int32_t>(Cb.mStride);
288 data.mCbSkip = AssertedCast<int32_t>(Cb.mSkip);
289 data.mCrSkip = AssertedCast<int32_t>(Cr.mSkip);
290 data.mPictureRect = aPicture;
291 data.mStereoMode = aInfo.mStereoMode;
292 data.mYUVColorSpace = aBuffer.mYUVColorSpace;
293 data.mColorPrimaries = aBuffer.mColorPrimaries;
294 data.mColorDepth = aBuffer.mColorDepth;
295 if (aInfo.mTransferFunction) {
296 data.mTransferFunction = *aInfo.mTransferFunction;
298 data.mColorRange = aBuffer.mColorRange;
299 data.mChromaSubsampling = aBuffer.mChromaSubsampling;
300 return data;
303 /* static */
304 bool VideoData::SetVideoDataToImage(PlanarYCbCrImage* aVideoImage,
305 const VideoInfo& aInfo,
306 const YCbCrBuffer& aBuffer,
307 const IntRect& aPicture, bool aCopyData) {
308 if (!aVideoImage) {
309 return false;
312 PlanarYCbCrData data = ConstructPlanarYCbCrData(aInfo, aBuffer, aPicture);
314 if (aCopyData) {
315 return aVideoImage->CopyData(data);
317 return aVideoImage->AdoptData(data);
320 /* static */
321 already_AddRefed<VideoData> VideoData::CreateAndCopyData(
322 const VideoInfo& aInfo, ImageContainer* aContainer, int64_t aOffset,
323 const TimeUnit& aTime, const TimeUnit& aDuration,
324 const YCbCrBuffer& aBuffer, bool aKeyframe, const TimeUnit& aTimecode,
325 const IntRect& aPicture, layers::KnowsCompositor* aAllocator) {
326 if (!aContainer) {
327 // Create a dummy VideoData with no image. This gives us something to
328 // send to media streams if necessary.
329 RefPtr<VideoData> v(new VideoData(aOffset, aTime, aDuration, aKeyframe,
330 aTimecode, aInfo.mDisplay, 0));
331 return v.forget();
334 if (!ValidateBufferAndPicture(aBuffer, aPicture)) {
335 return nullptr;
338 PerformanceRecorder<PlaybackStage> perfRecorder(MediaStage::CopyDecodedVideo,
339 aInfo.mImage.height);
340 RefPtr<VideoData> v(new VideoData(aOffset, aTime, aDuration, aKeyframe,
341 aTimecode, aInfo.mDisplay, 0));
343 // Currently our decoder only knows how to output to ImageFormat::PLANAR_YCBCR
344 // format.
345 #if XP_MACOSX
346 if (aAllocator && aAllocator->GetWebRenderCompositorType() !=
347 layers::WebRenderCompositor::SOFTWARE) {
348 RefPtr<layers::MacIOSurfaceImage> ioImage =
349 new layers::MacIOSurfaceImage(nullptr);
350 PlanarYCbCrData data = ConstructPlanarYCbCrData(aInfo, aBuffer, aPicture);
351 if (ioImage->SetData(aContainer, data)) {
352 v->mImage = ioImage;
353 perfRecorder.Record();
354 return v.forget();
357 #endif
358 if (!v->mImage) {
359 v->mImage = aContainer->CreatePlanarYCbCrImage();
362 if (!v->mImage) {
363 return nullptr;
365 NS_ASSERTION(v->mImage->GetFormat() == ImageFormat::PLANAR_YCBCR,
366 "Wrong format?");
367 PlanarYCbCrImage* videoImage = v->mImage->AsPlanarYCbCrImage();
368 MOZ_ASSERT(videoImage);
370 if (!VideoData::SetVideoDataToImage(videoImage, aInfo, aBuffer, aPicture,
371 true /* aCopyData */)) {
372 return nullptr;
375 perfRecorder.Record();
376 return v.forget();
379 /* static */
380 already_AddRefed<VideoData> VideoData::CreateAndCopyData(
381 const VideoInfo& aInfo, ImageContainer* aContainer, int64_t aOffset,
382 const TimeUnit& aTime, const TimeUnit& aDuration,
383 const YCbCrBuffer& aBuffer, const YCbCrBuffer::Plane& aAlphaPlane,
384 bool aKeyframe, const TimeUnit& aTimecode, const IntRect& aPicture) {
385 if (!aContainer) {
386 // Create a dummy VideoData with no image. This gives us something to
387 // send to media streams if necessary.
388 RefPtr<VideoData> v(new VideoData(aOffset, aTime, aDuration, aKeyframe,
389 aTimecode, aInfo.mDisplay, 0));
390 return v.forget();
393 if (!ValidateBufferAndPicture(aBuffer, aPicture)) {
394 return nullptr;
397 RefPtr<VideoData> v(new VideoData(aOffset, aTime, aDuration, aKeyframe,
398 aTimecode, aInfo.mDisplay, 0));
400 // Convert from YUVA to BGRA format on the software side.
401 RefPtr<layers::SharedRGBImage> videoImage =
402 aContainer->CreateSharedRGBImage();
403 v->mImage = videoImage;
405 if (!v->mImage) {
406 return nullptr;
408 if (!videoImage->Allocate(
409 IntSize(aBuffer.mPlanes[0].mWidth, aBuffer.mPlanes[0].mHeight),
410 SurfaceFormat::B8G8R8A8)) {
411 return nullptr;
414 RefPtr<layers::TextureClient> texture =
415 videoImage->GetTextureClient(/* aKnowsCompositor */ nullptr);
416 if (!texture) {
417 NS_WARNING("Failed to allocate TextureClient");
418 return nullptr;
421 layers::TextureClientAutoLock autoLock(texture,
422 layers::OpenMode::OPEN_WRITE_ONLY);
423 if (!autoLock.Succeeded()) {
424 NS_WARNING("Failed to lock TextureClient");
425 return nullptr;
428 layers::MappedTextureData buffer;
429 if (!texture->BorrowMappedData(buffer)) {
430 NS_WARNING("Failed to borrow mapped data");
431 return nullptr;
434 // The naming convention for libyuv and associated utils is word-order.
435 // The naming convention in the gfx stack is byte-order.
436 ConvertI420AlphaToARGB(aBuffer.mPlanes[0].mData, aBuffer.mPlanes[1].mData,
437 aBuffer.mPlanes[2].mData, aAlphaPlane.mData,
438 AssertedCast<int>(aBuffer.mPlanes[0].mStride),
439 AssertedCast<int>(aBuffer.mPlanes[1].mStride),
440 buffer.data, buffer.stride, buffer.size.width,
441 buffer.size.height);
443 return v.forget();
446 /* static */
447 already_AddRefed<VideoData> VideoData::CreateFromImage(
448 const IntSize& aDisplay, int64_t aOffset, const TimeUnit& aTime,
449 const TimeUnit& aDuration, const RefPtr<Image>& aImage, bool aKeyframe,
450 const TimeUnit& aTimecode) {
451 RefPtr<VideoData> v(new VideoData(aOffset, aTime, aDuration, aKeyframe,
452 aTimecode, aDisplay, 0));
453 v->mImage = aImage;
454 return v.forget();
457 nsCString VideoData::ToString() const {
458 std::array ImageFormatStrings = {
459 "PLANAR_YCBCR",
460 "NV_IMAGE",
461 "SHARED_RGB",
462 "MOZ2D_SURFACE",
463 "MAC_IOSURFACE",
464 "SURFACE_TEXTURE",
465 "D3D9_RGB32_TEXTURE",
466 "OVERLAY_IMAGE",
467 "D3D11_SHARE_HANDLE_TEXTURE",
468 "D3D11_TEXTURE_IMF_SAMPLE",
469 "TEXTURE_WRAPPER",
470 "D3D11_YCBCR_IMAGE",
471 "GPU_VIDEO",
472 "DMABUF",
473 "DCOMP_SURFACE",
476 nsCString rv;
477 rv.AppendPrintf(
478 "VideoFrame [%s,%s] [%dx%d] format: %s", mTime.ToString().get(),
479 mDuration.ToString().get(), mDisplay.Width(), mDisplay.Height(),
480 mImage ? ImageFormatStrings[static_cast<int>(mImage->GetFormat())]
481 : "null");
482 return rv;
485 MediaRawData::MediaRawData()
486 : MediaData(Type::RAW_DATA), mCrypto(mCryptoInternal) {}
488 MediaRawData::MediaRawData(const uint8_t* aData, size_t aSize)
489 : MediaData(Type::RAW_DATA),
490 mCrypto(mCryptoInternal),
491 mBuffer(aData, aSize) {}
493 MediaRawData::MediaRawData(const uint8_t* aData, size_t aSize,
494 const uint8_t* aAlphaData, size_t aAlphaSize)
495 : MediaData(Type::RAW_DATA),
496 mCrypto(mCryptoInternal),
497 mBuffer(aData, aSize),
498 mAlphaBuffer(aAlphaData, aAlphaSize) {}
500 MediaRawData::MediaRawData(AlignedByteBuffer&& aData)
501 : MediaData(Type::RAW_DATA),
502 mCrypto(mCryptoInternal),
503 mBuffer(std::move(aData)) {}
505 MediaRawData::MediaRawData(AlignedByteBuffer&& aData,
506 AlignedByteBuffer&& aAlphaData)
507 : MediaData(Type::RAW_DATA),
508 mCrypto(mCryptoInternal),
509 mBuffer(std::move(aData)),
510 mAlphaBuffer(std::move(aAlphaData)) {}
512 already_AddRefed<MediaRawData> MediaRawData::Clone() const {
513 int32_t sampleHeight = 0;
514 if (mTrackInfo && mTrackInfo->GetAsVideoInfo()) {
515 sampleHeight = mTrackInfo->GetAsVideoInfo()->mImage.height;
517 PerformanceRecorder<PlaybackStage> perfRecorder(MediaStage::CopyDemuxedData,
518 sampleHeight);
519 RefPtr<MediaRawData> s = new MediaRawData;
520 s->mTimecode = mTimecode;
521 s->mTime = mTime;
522 s->mDuration = mDuration;
523 s->mOffset = mOffset;
524 s->mKeyframe = mKeyframe;
525 s->mExtraData = mExtraData;
526 s->mCryptoInternal = mCryptoInternal;
527 s->mTrackInfo = mTrackInfo;
528 s->mEOS = mEOS;
529 s->mOriginalPresentationWindow = mOriginalPresentationWindow;
530 if (!s->mBuffer.Append(mBuffer.Data(), mBuffer.Length())) {
531 return nullptr;
533 if (!s->mAlphaBuffer.Append(mAlphaBuffer.Data(), mAlphaBuffer.Length())) {
534 return nullptr;
536 perfRecorder.Record();
537 return s.forget();
540 MediaRawData::~MediaRawData() = default;
542 size_t MediaRawData::SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const {
543 size_t size = aMallocSizeOf(this);
544 size += mBuffer.SizeOfExcludingThis(aMallocSizeOf);
545 return size;
548 UniquePtr<MediaRawDataWriter> MediaRawData::CreateWriter() {
549 UniquePtr<MediaRawDataWriter> p(new MediaRawDataWriter(this));
550 return p;
553 MediaRawDataWriter::MediaRawDataWriter(MediaRawData* aMediaRawData)
554 : mCrypto(aMediaRawData->mCryptoInternal), mTarget(aMediaRawData) {}
556 bool MediaRawDataWriter::SetSize(size_t aSize) {
557 return mTarget->mBuffer.SetLength(aSize);
560 bool MediaRawDataWriter::Prepend(const uint8_t* aData, size_t aSize) {
561 return mTarget->mBuffer.Prepend(aData, aSize);
564 bool MediaRawDataWriter::Append(const uint8_t* aData, size_t aSize) {
565 return mTarget->mBuffer.Append(aData, aSize);
568 bool MediaRawDataWriter::Replace(const uint8_t* aData, size_t aSize) {
569 return mTarget->mBuffer.Replace(aData, aSize);
572 void MediaRawDataWriter::Clear() { mTarget->mBuffer.Clear(); }
574 uint8_t* MediaRawDataWriter::Data() { return mTarget->mBuffer.Data(); }
576 size_t MediaRawDataWriter::Size() { return mTarget->Size(); }
578 void MediaRawDataWriter::PopFront(size_t aSize) {
579 mTarget->mBuffer.PopFront(aSize);
582 const char* CryptoSchemeToString(const CryptoScheme& aScheme) {
583 switch (aScheme) {
584 case CryptoScheme::None:
585 return "None";
586 case CryptoScheme::Cenc:
587 return "Cenc";
588 case CryptoScheme::Cbcs:
589 return "Cbcs";
590 default:
591 MOZ_ASSERT_UNREACHABLE();
592 return "";
596 } // namespace mozilla