Bug 1772053 - Enable dynamic code disable mitigations only on Windows 10 1703+ r...
[gecko.git] / dom / media / MediaData.cpp
blob714c067c1c4c57537ea77b1d08fd83ae91b89ba2
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
2 /* vim: set ts=8 sts=2 et sw=2 tw=80: */
3 /* This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
7 #include "MediaData.h"
9 #include "ImageContainer.h"
10 #include "MediaInfo.h"
11 #include "PerformanceRecorder.h"
12 #include "VideoUtils.h"
13 #include "YCbCrUtils.h"
14 #include "mozilla/layers/ImageBridgeChild.h"
15 #include "mozilla/layers/KnowsCompositor.h"
16 #include "mozilla/layers/SharedRGBImage.h"
18 #include <stdint.h>
20 #ifdef XP_WIN
21 # include "mozilla/WindowsVersion.h"
22 # include "mozilla/layers/D3D11YCbCrImage.h"
23 #elif XP_MACOSX
24 # include "MacIOSurfaceImage.h"
25 # include "mozilla/gfx/gfxVars.h"
26 #endif
28 namespace mozilla {
30 using namespace mozilla::gfx;
31 using layers::ImageContainer;
32 using layers::PlanarYCbCrData;
33 using layers::PlanarYCbCrImage;
34 using media::TimeUnit;
36 const char* AudioData::sTypeName = "audio";
37 const char* VideoData::sTypeName = "video";
39 AudioData::AudioData(int64_t aOffset, const media::TimeUnit& aTime,
40 AlignedAudioBuffer&& aData, uint32_t aChannels,
41 uint32_t aRate, uint32_t aChannelMap)
42 : MediaData(sType, aOffset, aTime,
43 FramesToTimeUnit(aData.Length() / aChannels, aRate)),
44 mChannels(aChannels),
45 mChannelMap(aChannelMap),
46 mRate(aRate),
47 mOriginalTime(aTime),
48 mAudioData(std::move(aData)),
49 mFrames(mAudioData.Length() / aChannels) {}
51 Span<AudioDataValue> AudioData::Data() const {
52 return Span{GetAdjustedData(), mFrames * mChannels};
55 void AudioData::SetOriginalStartTime(const media::TimeUnit& aStartTime) {
56 MOZ_ASSERT(mTime == mOriginalTime,
57 "Do not call this if data has been trimmed!");
58 mTime = aStartTime;
59 mOriginalTime = aStartTime;
62 bool AudioData::AdjustForStartTime(const media::TimeUnit& aStartTime) {
63 mOriginalTime -= aStartTime;
64 mTime -= aStartTime;
65 if (mTrimWindow) {
66 *mTrimWindow -= aStartTime;
68 if (mTime.IsNegative()) {
69 NS_WARNING("Negative audio start time after time-adjustment!");
71 return mTime.IsValid() && mOriginalTime.IsValid();
74 bool AudioData::SetTrimWindow(const media::TimeInterval& aTrim) {
75 MOZ_DIAGNOSTIC_ASSERT(aTrim.mStart.IsValid() && aTrim.mEnd.IsValid(),
76 "An overflow occurred on the provided TimeInterval");
77 if (!mAudioData) {
78 // MoveableData got called. Can no longer work on it.
79 return false;
81 const size_t originalFrames = mAudioData.Length() / mChannels;
82 const TimeUnit originalDuration = FramesToTimeUnit(originalFrames, mRate);
83 if (aTrim.mStart < mOriginalTime ||
84 aTrim.mEnd > mOriginalTime + originalDuration) {
85 return false;
88 auto trimBefore = TimeUnitToFrames(aTrim.mStart - mOriginalTime, mRate);
89 auto trimAfter = aTrim.mEnd == GetEndTime()
90 ? originalFrames
91 : TimeUnitToFrames(aTrim.mEnd - mOriginalTime, mRate);
92 if (!trimBefore.isValid() || !trimAfter.isValid()) {
93 // Overflow.
94 return false;
96 MOZ_DIAGNOSTIC_ASSERT(trimAfter.value() >= trimBefore.value(),
97 "Something went wrong with trimming value");
98 if (!mTrimWindow && trimBefore == 0 && trimAfter == originalFrames) {
99 // Nothing to change, abort early to prevent rounding errors.
100 return true;
103 mTrimWindow = Some(aTrim);
104 mDataOffset = trimBefore.value() * mChannels;
105 MOZ_DIAGNOSTIC_ASSERT(mDataOffset <= mAudioData.Length(),
106 "Data offset outside original buffer");
107 mFrames = (trimAfter - trimBefore).value();
108 MOZ_DIAGNOSTIC_ASSERT(mFrames <= originalFrames,
109 "More frames than found in container");
110 mTime = mOriginalTime + FramesToTimeUnit(trimBefore.value(), mRate);
111 mDuration = FramesToTimeUnit(mFrames, mRate);
113 return true;
116 AudioDataValue* AudioData::GetAdjustedData() const {
117 if (!mAudioData) {
118 return nullptr;
120 return mAudioData.Data() + mDataOffset;
123 void AudioData::EnsureAudioBuffer() {
124 if (mAudioBuffer || !mAudioData) {
125 return;
127 const AudioDataValue* srcData = GetAdjustedData();
128 CheckedInt<size_t> bufferSize(sizeof(AudioDataValue));
129 bufferSize *= mFrames;
130 bufferSize *= mChannels;
131 mAudioBuffer = SharedBuffer::Create(bufferSize);
133 AudioDataValue* destData = static_cast<AudioDataValue*>(mAudioBuffer->Data());
134 for (uint32_t i = 0; i < mFrames; ++i) {
135 for (uint32_t j = 0; j < mChannels; ++j) {
136 destData[j * mFrames + i] = srcData[i * mChannels + j];
141 size_t AudioData::SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const {
142 size_t size =
143 aMallocSizeOf(this) + mAudioData.SizeOfExcludingThis(aMallocSizeOf);
144 if (mAudioBuffer) {
145 size += mAudioBuffer->SizeOfIncludingThis(aMallocSizeOf);
147 return size;
150 AlignedAudioBuffer AudioData::MoveableData() {
151 // Trim buffer according to trimming mask.
152 mAudioData.PopFront(mDataOffset);
153 mAudioData.SetLength(mFrames * mChannels);
154 mDataOffset = 0;
155 mFrames = 0;
156 mTrimWindow.reset();
157 return std::move(mAudioData);
160 static bool ValidatePlane(const VideoData::YCbCrBuffer::Plane& aPlane) {
161 return aPlane.mWidth <= PlanarYCbCrImage::MAX_DIMENSION &&
162 aPlane.mHeight <= PlanarYCbCrImage::MAX_DIMENSION &&
163 aPlane.mWidth * aPlane.mHeight < MAX_VIDEO_WIDTH * MAX_VIDEO_HEIGHT &&
164 aPlane.mStride > 0 && aPlane.mWidth <= aPlane.mStride;
167 static bool ValidateBufferAndPicture(const VideoData::YCbCrBuffer& aBuffer,
168 const IntRect& aPicture) {
169 // The following situation should never happen unless there is a bug
170 // in the decoder
171 if (aBuffer.mPlanes[1].mWidth != aBuffer.mPlanes[2].mWidth ||
172 aBuffer.mPlanes[1].mHeight != aBuffer.mPlanes[2].mHeight) {
173 NS_ERROR("C planes with different sizes");
174 return false;
177 // The following situations could be triggered by invalid input
178 if (aPicture.width <= 0 || aPicture.height <= 0) {
179 NS_WARNING("Empty picture rect");
180 return false;
182 if (!ValidatePlane(aBuffer.mPlanes[0]) ||
183 !ValidatePlane(aBuffer.mPlanes[1]) ||
184 !ValidatePlane(aBuffer.mPlanes[2])) {
185 NS_WARNING("Invalid plane size");
186 return false;
189 // Ensure the picture size specified in the headers can be extracted out of
190 // the frame we've been supplied without indexing out of bounds.
191 CheckedUint32 xLimit = aPicture.x + CheckedUint32(aPicture.width);
192 CheckedUint32 yLimit = aPicture.y + CheckedUint32(aPicture.height);
193 if (!xLimit.isValid() || xLimit.value() > aBuffer.mPlanes[0].mStride ||
194 !yLimit.isValid() || yLimit.value() > aBuffer.mPlanes[0].mHeight) {
195 // The specified picture dimensions can't be contained inside the video
196 // frame, we'll stomp memory if we try to copy it. Fail.
197 NS_WARNING("Overflowing picture rect");
198 return false;
200 return true;
203 VideoData::VideoData(int64_t aOffset, const TimeUnit& aTime,
204 const TimeUnit& aDuration, bool aKeyframe,
205 const TimeUnit& aTimecode, IntSize aDisplay,
206 layers::ImageContainer::FrameID aFrameID)
207 : MediaData(Type::VIDEO_DATA, aOffset, aTime, aDuration),
208 mDisplay(aDisplay),
209 mFrameID(aFrameID),
210 mSentToCompositor(false),
211 mNextKeyFrameTime(TimeUnit::Invalid()) {
212 MOZ_ASSERT(!mDuration.IsNegative(), "Frame must have non-negative duration.");
213 mKeyframe = aKeyframe;
214 mTimecode = aTimecode;
217 VideoData::~VideoData() = default;
219 size_t VideoData::SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const {
220 size_t size = aMallocSizeOf(this);
222 // Currently only PLANAR_YCBCR has a well defined function for determining
223 // it's size, so reporting is limited to that type.
224 if (mImage && mImage->GetFormat() == ImageFormat::PLANAR_YCBCR) {
225 const mozilla::layers::PlanarYCbCrImage* img =
226 static_cast<const mozilla::layers::PlanarYCbCrImage*>(mImage.get());
227 size += img->SizeOfIncludingThis(aMallocSizeOf);
230 return size;
233 void VideoData::UpdateDuration(const TimeUnit& aDuration) {
234 MOZ_ASSERT(!aDuration.IsNegative());
235 mDuration = aDuration;
238 void VideoData::UpdateTimestamp(const TimeUnit& aTimestamp) {
239 MOZ_ASSERT(!aTimestamp.IsNegative());
241 auto updatedDuration = GetEndTime() - aTimestamp;
242 MOZ_ASSERT(!updatedDuration.IsNegative());
244 mTime = aTimestamp;
245 mDuration = updatedDuration;
248 bool VideoData::AdjustForStartTime(const media::TimeUnit& aStartTime) {
249 mTime -= aStartTime;
250 if (mTime.IsNegative()) {
251 NS_WARNING("Negative video start time after time-adjustment!");
253 return mTime.IsValid();
256 PlanarYCbCrData ConstructPlanarYCbCrData(const VideoInfo& aInfo,
257 const VideoData::YCbCrBuffer& aBuffer,
258 const IntRect& aPicture) {
259 const VideoData::YCbCrBuffer::Plane& Y = aBuffer.mPlanes[0];
260 const VideoData::YCbCrBuffer::Plane& Cb = aBuffer.mPlanes[1];
261 const VideoData::YCbCrBuffer::Plane& Cr = aBuffer.mPlanes[2];
263 PlanarYCbCrData data;
264 data.mYChannel = Y.mData;
265 data.mYStride = Y.mStride;
266 data.mYSkip = Y.mSkip;
267 data.mCbChannel = Cb.mData;
268 data.mCrChannel = Cr.mData;
269 data.mCbCrStride = Cb.mStride;
270 data.mCbSkip = Cb.mSkip;
271 data.mCrSkip = Cr.mSkip;
272 data.mPictureRect = aPicture;
273 data.mStereoMode = aInfo.mStereoMode;
274 data.mYUVColorSpace = aBuffer.mYUVColorSpace;
275 data.mColorDepth = aBuffer.mColorDepth;
276 if (aInfo.mTransferFunction) {
277 data.mTransferFunction = *aInfo.mTransferFunction;
279 data.mColorRange = aBuffer.mColorRange;
280 data.mChromaSubsampling = aBuffer.mChromaSubsampling;
281 return data;
284 /* static */
285 bool VideoData::SetVideoDataToImage(PlanarYCbCrImage* aVideoImage,
286 const VideoInfo& aInfo,
287 const YCbCrBuffer& aBuffer,
288 const IntRect& aPicture, bool aCopyData) {
289 if (!aVideoImage) {
290 return false;
293 PlanarYCbCrData data = ConstructPlanarYCbCrData(aInfo, aBuffer, aPicture);
295 aVideoImage->SetDelayedConversion(true);
296 if (aCopyData) {
297 return aVideoImage->CopyData(data);
298 } else {
299 return aVideoImage->AdoptData(data);
303 /* static */
304 already_AddRefed<VideoData> VideoData::CreateAndCopyData(
305 const VideoInfo& aInfo, ImageContainer* aContainer, int64_t aOffset,
306 const TimeUnit& aTime, const TimeUnit& aDuration,
307 const YCbCrBuffer& aBuffer, bool aKeyframe, const TimeUnit& aTimecode,
308 const IntRect& aPicture, layers::KnowsCompositor* aAllocator) {
309 if (!aContainer) {
310 // Create a dummy VideoData with no image. This gives us something to
311 // send to media streams if necessary.
312 RefPtr<VideoData> v(new VideoData(aOffset, aTime, aDuration, aKeyframe,
313 aTimecode, aInfo.mDisplay, 0));
314 return v.forget();
317 if (!ValidateBufferAndPicture(aBuffer, aPicture)) {
318 return nullptr;
321 PerformanceRecorder perfRecorder(PerformanceRecorder::Stage::CopyDecodedVideo,
322 aInfo.mImage.height);
323 perfRecorder.Start();
324 RefPtr<VideoData> v(new VideoData(aOffset, aTime, aDuration, aKeyframe,
325 aTimecode, aInfo.mDisplay, 0));
327 // Currently our decoder only knows how to output to ImageFormat::PLANAR_YCBCR
328 // format.
329 #if XP_WIN
330 // We disable this code path on Windows version earlier of Windows 8 due to
331 // intermittent crashes with old drivers. See bug 1405110.
332 // D3D11YCbCrImage can only handle YCbCr images using 3 non-interleaved planes
333 // non-zero mSkip value indicates that one of the plane would be interleaved.
334 if (IsWin8OrLater() && !XRE_IsParentProcess() && aAllocator &&
335 aAllocator->SupportsD3D11() && aBuffer.mPlanes[0].mSkip == 0 &&
336 aBuffer.mPlanes[1].mSkip == 0 && aBuffer.mPlanes[2].mSkip == 0) {
337 RefPtr<layers::D3D11YCbCrImage> d3d11Image = new layers::D3D11YCbCrImage();
338 PlanarYCbCrData data = ConstructPlanarYCbCrData(aInfo, aBuffer, aPicture);
339 if (d3d11Image->SetData(layers::ImageBridgeChild::GetSingleton()
340 ? layers::ImageBridgeChild::GetSingleton().get()
341 : aAllocator,
342 aContainer, data)) {
343 v->mImage = d3d11Image;
344 perfRecorder.End();
345 return v.forget();
348 #elif XP_MACOSX
349 if (aAllocator && aAllocator->GetWebRenderCompositorType() !=
350 layers::WebRenderCompositor::SOFTWARE) {
351 RefPtr<layers::MacIOSurfaceImage> ioImage =
352 new layers::MacIOSurfaceImage(nullptr);
353 PlanarYCbCrData data = ConstructPlanarYCbCrData(aInfo, aBuffer, aPicture);
354 if (ioImage->SetData(aContainer, data)) {
355 v->mImage = ioImage;
356 perfRecorder.End();
357 return v.forget();
360 #endif
361 if (!v->mImage) {
362 v->mImage = aContainer->CreatePlanarYCbCrImage();
365 if (!v->mImage) {
366 return nullptr;
368 NS_ASSERTION(v->mImage->GetFormat() == ImageFormat::PLANAR_YCBCR,
369 "Wrong format?");
370 PlanarYCbCrImage* videoImage = v->mImage->AsPlanarYCbCrImage();
371 MOZ_ASSERT(videoImage);
373 if (!VideoData::SetVideoDataToImage(videoImage, aInfo, aBuffer, aPicture,
374 true /* aCopyData */)) {
375 return nullptr;
378 perfRecorder.End();
379 return v.forget();
382 /* static */
383 already_AddRefed<VideoData> VideoData::CreateAndCopyData(
384 const VideoInfo& aInfo, ImageContainer* aContainer, int64_t aOffset,
385 const TimeUnit& aTime, const TimeUnit& aDuration,
386 const YCbCrBuffer& aBuffer, const YCbCrBuffer::Plane& aAlphaPlane,
387 bool aKeyframe, const TimeUnit& aTimecode, const IntRect& aPicture) {
388 if (!aContainer) {
389 // Create a dummy VideoData with no image. This gives us something to
390 // send to media streams if necessary.
391 RefPtr<VideoData> v(new VideoData(aOffset, aTime, aDuration, aKeyframe,
392 aTimecode, aInfo.mDisplay, 0));
393 return v.forget();
396 if (!ValidateBufferAndPicture(aBuffer, aPicture)) {
397 return nullptr;
400 RefPtr<VideoData> v(new VideoData(aOffset, aTime, aDuration, aKeyframe,
401 aTimecode, aInfo.mDisplay, 0));
403 // Convert from YUVA to BGRA format on the software side.
404 RefPtr<layers::SharedRGBImage> videoImage =
405 aContainer->CreateSharedRGBImage();
406 v->mImage = videoImage;
408 if (!v->mImage) {
409 return nullptr;
411 if (!videoImage->Allocate(
412 IntSize(aBuffer.mPlanes[0].mWidth, aBuffer.mPlanes[0].mHeight),
413 SurfaceFormat::B8G8R8A8)) {
414 return nullptr;
417 RefPtr<layers::TextureClient> texture =
418 videoImage->GetTextureClient(/* aKnowsCompositor */ nullptr);
419 if (!texture) {
420 NS_WARNING("Failed to allocate TextureClient");
421 return nullptr;
424 layers::TextureClientAutoLock autoLock(texture,
425 layers::OpenMode::OPEN_WRITE_ONLY);
426 if (!autoLock.Succeeded()) {
427 NS_WARNING("Failed to lock TextureClient");
428 return nullptr;
431 layers::MappedTextureData buffer;
432 if (!texture->BorrowMappedData(buffer)) {
433 NS_WARNING("Failed to borrow mapped data");
434 return nullptr;
437 // The naming convention for libyuv and associated utils is word-order.
438 // The naming convention in the gfx stack is byte-order.
439 ConvertI420AlphaToARGB(aBuffer.mPlanes[0].mData, aBuffer.mPlanes[1].mData,
440 aBuffer.mPlanes[2].mData, aAlphaPlane.mData,
441 aBuffer.mPlanes[0].mStride, aBuffer.mPlanes[1].mStride,
442 buffer.data, buffer.stride, buffer.size.width,
443 buffer.size.height);
445 return v.forget();
448 /* static */
449 already_AddRefed<VideoData> VideoData::CreateFromImage(
450 const IntSize& aDisplay, int64_t aOffset, const TimeUnit& aTime,
451 const TimeUnit& aDuration, const RefPtr<Image>& aImage, bool aKeyframe,
452 const TimeUnit& aTimecode) {
453 RefPtr<VideoData> v(new VideoData(aOffset, aTime, aDuration, aKeyframe,
454 aTimecode, aDisplay, 0));
455 v->mImage = aImage;
456 return v.forget();
459 MediaRawData::MediaRawData()
460 : MediaData(Type::RAW_DATA), mCrypto(mCryptoInternal) {}
462 MediaRawData::MediaRawData(const uint8_t* aData, size_t aSize)
463 : MediaData(Type::RAW_DATA),
464 mCrypto(mCryptoInternal),
465 mBuffer(aData, aSize) {}
467 MediaRawData::MediaRawData(const uint8_t* aData, size_t aSize,
468 const uint8_t* aAlphaData, size_t aAlphaSize)
469 : MediaData(Type::RAW_DATA),
470 mCrypto(mCryptoInternal),
471 mBuffer(aData, aSize),
472 mAlphaBuffer(aAlphaData, aAlphaSize) {}
474 MediaRawData::MediaRawData(AlignedByteBuffer&& aData)
475 : MediaData(Type::RAW_DATA),
476 mCrypto(mCryptoInternal),
477 mBuffer(std::move(aData)) {}
479 MediaRawData::MediaRawData(AlignedByteBuffer&& aData,
480 AlignedByteBuffer&& aAlphaData)
481 : MediaData(Type::RAW_DATA),
482 mCrypto(mCryptoInternal),
483 mBuffer(std::move(aData)),
484 mAlphaBuffer(std::move(aAlphaData)) {}
486 already_AddRefed<MediaRawData> MediaRawData::Clone() const {
487 int32_t sampleHeight = 0;
488 if (mTrackInfo && mTrackInfo->GetAsVideoInfo()) {
489 sampleHeight = mTrackInfo->GetAsVideoInfo()->mImage.height;
491 PerformanceRecorder perfRecorder(PerformanceRecorder::Stage::CopyDemuxedData,
492 sampleHeight);
493 perfRecorder.Start();
494 RefPtr<MediaRawData> s = new MediaRawData;
495 s->mTimecode = mTimecode;
496 s->mTime = mTime;
497 s->mDuration = mDuration;
498 s->mOffset = mOffset;
499 s->mKeyframe = mKeyframe;
500 s->mExtraData = mExtraData;
501 s->mCryptoInternal = mCryptoInternal;
502 s->mTrackInfo = mTrackInfo;
503 s->mEOS = mEOS;
504 s->mOriginalPresentationWindow = mOriginalPresentationWindow;
505 if (!s->mBuffer.Append(mBuffer.Data(), mBuffer.Length())) {
506 return nullptr;
508 if (!s->mAlphaBuffer.Append(mAlphaBuffer.Data(), mAlphaBuffer.Length())) {
509 return nullptr;
511 perfRecorder.End();
512 return s.forget();
515 MediaRawData::~MediaRawData() = default;
517 size_t MediaRawData::SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const {
518 size_t size = aMallocSizeOf(this);
519 size += mBuffer.SizeOfExcludingThis(aMallocSizeOf);
520 return size;
523 UniquePtr<MediaRawDataWriter> MediaRawData::CreateWriter() {
524 UniquePtr<MediaRawDataWriter> p(new MediaRawDataWriter(this));
525 return p;
528 MediaRawDataWriter::MediaRawDataWriter(MediaRawData* aMediaRawData)
529 : mCrypto(aMediaRawData->mCryptoInternal), mTarget(aMediaRawData) {}
531 bool MediaRawDataWriter::SetSize(size_t aSize) {
532 return mTarget->mBuffer.SetLength(aSize);
535 bool MediaRawDataWriter::Prepend(const uint8_t* aData, size_t aSize) {
536 return mTarget->mBuffer.Prepend(aData, aSize);
539 bool MediaRawDataWriter::Append(const uint8_t* aData, size_t aSize) {
540 return mTarget->mBuffer.Append(aData, aSize);
543 bool MediaRawDataWriter::Replace(const uint8_t* aData, size_t aSize) {
544 return mTarget->mBuffer.Replace(aData, aSize);
547 void MediaRawDataWriter::Clear() { mTarget->mBuffer.Clear(); }
549 uint8_t* MediaRawDataWriter::Data() { return mTarget->mBuffer.Data(); }
551 size_t MediaRawDataWriter::Size() { return mTarget->Size(); }
553 void MediaRawDataWriter::PopFront(size_t aSize) {
554 mTarget->mBuffer.PopFront(aSize);
557 } // namespace mozilla