1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
2 /* vim: set ts=8 sts=2 et sw=2 tw=80: */
3 /* This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
9 #include "ImageContainer.h"
10 #include "MediaInfo.h"
11 #include "VideoUtils.h"
12 #include "YCbCrUtils.h"
13 #include "mozilla/layers/ImageBridgeChild.h"
14 #include "mozilla/layers/KnowsCompositor.h"
15 #include "mozilla/layers/SharedRGBImage.h"
20 # include "mozilla/WindowsVersion.h"
21 # include "mozilla/layers/D3D11YCbCrImage.h"
23 # include "MacIOSurfaceImage.h"
24 # include "mozilla/gfx/gfxVars.h"
29 using namespace mozilla::gfx
;
30 using layers::ImageContainer
;
31 using layers::PlanarYCbCrData
;
32 using layers::PlanarYCbCrImage
;
33 using media::TimeUnit
;
35 const char* AudioData::sTypeName
= "audio";
36 const char* VideoData::sTypeName
= "video";
38 AudioData::AudioData(int64_t aOffset
, const media::TimeUnit
& aTime
,
39 AlignedAudioBuffer
&& aData
, uint32_t aChannels
,
40 uint32_t aRate
, uint32_t aChannelMap
)
41 : MediaData(sType
, aOffset
, aTime
,
42 FramesToTimeUnit(aData
.Length() / aChannels
, aRate
)),
44 mChannelMap(aChannelMap
),
47 mAudioData(std::move(aData
)),
48 mFrames(mAudioData
.Length() / aChannels
) {}
50 Span
<AudioDataValue
> AudioData::Data() const {
51 return Span
{GetAdjustedData(), mFrames
* mChannels
};
54 void AudioData::SetOriginalStartTime(const media::TimeUnit
& aStartTime
) {
55 MOZ_ASSERT(mTime
== mOriginalTime
,
56 "Do not call this if data has been trimmed!");
58 mOriginalTime
= aStartTime
;
61 bool AudioData::AdjustForStartTime(const media::TimeUnit
& aStartTime
) {
62 mOriginalTime
-= aStartTime
;
65 *mTrimWindow
-= aStartTime
;
67 if (mTime
.IsNegative()) {
68 NS_WARNING("Negative audio start time after time-adjustment!");
70 return mTime
.IsValid() && mOriginalTime
.IsValid();
73 bool AudioData::SetTrimWindow(const media::TimeInterval
& aTrim
) {
74 MOZ_DIAGNOSTIC_ASSERT(aTrim
.mStart
.IsValid() && aTrim
.mEnd
.IsValid(),
75 "An overflow occurred on the provided TimeInterval");
77 // MoveableData got called. Can no longer work on it.
80 const size_t originalFrames
= mAudioData
.Length() / mChannels
;
81 const TimeUnit originalDuration
= FramesToTimeUnit(originalFrames
, mRate
);
82 if (aTrim
.mStart
< mOriginalTime
||
83 aTrim
.mEnd
> mOriginalTime
+ originalDuration
) {
87 auto trimBefore
= TimeUnitToFrames(aTrim
.mStart
- mOriginalTime
, mRate
);
88 auto trimAfter
= aTrim
.mEnd
== GetEndTime()
90 : TimeUnitToFrames(aTrim
.mEnd
- mOriginalTime
, mRate
);
91 if (!trimBefore
.isValid() || !trimAfter
.isValid()) {
95 MOZ_DIAGNOSTIC_ASSERT(trimAfter
.value() >= trimBefore
.value(),
96 "Something went wrong with trimming value");
97 if (!mTrimWindow
&& trimBefore
== 0 && trimAfter
== originalFrames
) {
98 // Nothing to change, abort early to prevent rounding errors.
102 mTrimWindow
= Some(aTrim
);
103 mDataOffset
= trimBefore
.value() * mChannels
;
104 MOZ_DIAGNOSTIC_ASSERT(mDataOffset
<= mAudioData
.Length(),
105 "Data offset outside original buffer");
106 mFrames
= (trimAfter
- trimBefore
).value();
107 MOZ_DIAGNOSTIC_ASSERT(mFrames
<= originalFrames
,
108 "More frames than found in container");
109 mTime
= mOriginalTime
+ FramesToTimeUnit(trimBefore
.value(), mRate
);
110 mDuration
= FramesToTimeUnit(mFrames
, mRate
);
115 AudioDataValue
* AudioData::GetAdjustedData() const {
119 return mAudioData
.Data() + mDataOffset
;
122 void AudioData::EnsureAudioBuffer() {
123 if (mAudioBuffer
|| !mAudioData
) {
126 const AudioDataValue
* srcData
= GetAdjustedData();
127 CheckedInt
<size_t> bufferSize(sizeof(AudioDataValue
));
128 bufferSize
*= mFrames
;
129 bufferSize
*= mChannels
;
130 mAudioBuffer
= SharedBuffer::Create(bufferSize
);
132 AudioDataValue
* destData
= static_cast<AudioDataValue
*>(mAudioBuffer
->Data());
133 for (uint32_t i
= 0; i
< mFrames
; ++i
) {
134 for (uint32_t j
= 0; j
< mChannels
; ++j
) {
135 destData
[j
* mFrames
+ i
] = srcData
[i
* mChannels
+ j
];
140 size_t AudioData::SizeOfIncludingThis(MallocSizeOf aMallocSizeOf
) const {
142 aMallocSizeOf(this) + mAudioData
.SizeOfExcludingThis(aMallocSizeOf
);
144 size
+= mAudioBuffer
->SizeOfIncludingThis(aMallocSizeOf
);
149 AlignedAudioBuffer
AudioData::MoveableData() {
150 // Trim buffer according to trimming mask.
151 mAudioData
.PopFront(mDataOffset
);
152 mAudioData
.SetLength(mFrames
* mChannels
);
156 return std::move(mAudioData
);
159 static bool ValidatePlane(const VideoData::YCbCrBuffer::Plane
& aPlane
) {
160 return aPlane
.mWidth
<= PlanarYCbCrImage::MAX_DIMENSION
&&
161 aPlane
.mHeight
<= PlanarYCbCrImage::MAX_DIMENSION
&&
162 aPlane
.mWidth
* aPlane
.mHeight
< MAX_VIDEO_WIDTH
* MAX_VIDEO_HEIGHT
&&
163 aPlane
.mStride
> 0 && aPlane
.mWidth
<= aPlane
.mStride
;
166 static bool ValidateBufferAndPicture(const VideoData::YCbCrBuffer
& aBuffer
,
167 const IntRect
& aPicture
) {
168 // The following situation should never happen unless there is a bug
170 if (aBuffer
.mPlanes
[1].mWidth
!= aBuffer
.mPlanes
[2].mWidth
||
171 aBuffer
.mPlanes
[1].mHeight
!= aBuffer
.mPlanes
[2].mHeight
) {
172 NS_ERROR("C planes with different sizes");
176 // The following situations could be triggered by invalid input
177 if (aPicture
.width
<= 0 || aPicture
.height
<= 0) {
178 NS_WARNING("Empty picture rect");
181 if (!ValidatePlane(aBuffer
.mPlanes
[0]) ||
182 !ValidatePlane(aBuffer
.mPlanes
[1]) ||
183 !ValidatePlane(aBuffer
.mPlanes
[2])) {
184 NS_WARNING("Invalid plane size");
188 // Ensure the picture size specified in the headers can be extracted out of
189 // the frame we've been supplied without indexing out of bounds.
190 CheckedUint32 xLimit
= aPicture
.x
+ CheckedUint32(aPicture
.width
);
191 CheckedUint32 yLimit
= aPicture
.y
+ CheckedUint32(aPicture
.height
);
192 if (!xLimit
.isValid() || xLimit
.value() > aBuffer
.mPlanes
[0].mStride
||
193 !yLimit
.isValid() || yLimit
.value() > aBuffer
.mPlanes
[0].mHeight
) {
194 // The specified picture dimensions can't be contained inside the video
195 // frame, we'll stomp memory if we try to copy it. Fail.
196 NS_WARNING("Overflowing picture rect");
202 VideoData::VideoData(int64_t aOffset
, const TimeUnit
& aTime
,
203 const TimeUnit
& aDuration
, bool aKeyframe
,
204 const TimeUnit
& aTimecode
, IntSize aDisplay
,
205 layers::ImageContainer::FrameID aFrameID
)
206 : MediaData(Type::VIDEO_DATA
, aOffset
, aTime
, aDuration
),
209 mSentToCompositor(false),
210 mNextKeyFrameTime(TimeUnit::Invalid()) {
211 MOZ_ASSERT(!mDuration
.IsNegative(), "Frame must have non-negative duration.");
212 mKeyframe
= aKeyframe
;
213 mTimecode
= aTimecode
;
216 VideoData::~VideoData() = default;
218 size_t VideoData::SizeOfIncludingThis(MallocSizeOf aMallocSizeOf
) const {
219 size_t size
= aMallocSizeOf(this);
221 // Currently only PLANAR_YCBCR has a well defined function for determining
222 // it's size, so reporting is limited to that type.
223 if (mImage
&& mImage
->GetFormat() == ImageFormat::PLANAR_YCBCR
) {
224 const mozilla::layers::PlanarYCbCrImage
* img
=
225 static_cast<const mozilla::layers::PlanarYCbCrImage
*>(mImage
.get());
226 size
+= img
->SizeOfIncludingThis(aMallocSizeOf
);
232 void VideoData::UpdateDuration(const TimeUnit
& aDuration
) {
233 MOZ_ASSERT(!aDuration
.IsNegative());
234 mDuration
= aDuration
;
237 void VideoData::UpdateTimestamp(const TimeUnit
& aTimestamp
) {
238 MOZ_ASSERT(!aTimestamp
.IsNegative());
240 auto updatedDuration
= GetEndTime() - aTimestamp
;
241 MOZ_ASSERT(!updatedDuration
.IsNegative());
244 mDuration
= updatedDuration
;
247 bool VideoData::AdjustForStartTime(const media::TimeUnit
& aStartTime
) {
249 if (mTime
.IsNegative()) {
250 NS_WARNING("Negative video start time after time-adjustment!");
252 return mTime
.IsValid();
255 PlanarYCbCrData
ConstructPlanarYCbCrData(const VideoInfo
& aInfo
,
256 const VideoData::YCbCrBuffer
& aBuffer
,
257 const IntRect
& aPicture
) {
258 const VideoData::YCbCrBuffer::Plane
& Y
= aBuffer
.mPlanes
[0];
259 const VideoData::YCbCrBuffer::Plane
& Cb
= aBuffer
.mPlanes
[1];
260 const VideoData::YCbCrBuffer::Plane
& Cr
= aBuffer
.mPlanes
[2];
262 PlanarYCbCrData data
;
263 data
.mYChannel
= Y
.mData
;
264 data
.mYSize
= IntSize(Y
.mWidth
, Y
.mHeight
);
265 data
.mYStride
= Y
.mStride
;
266 data
.mYSkip
= Y
.mSkip
;
267 data
.mCbChannel
= Cb
.mData
;
268 data
.mCrChannel
= Cr
.mData
;
269 data
.mCbCrSize
= IntSize(Cb
.mWidth
, Cb
.mHeight
);
270 data
.mCbCrStride
= Cb
.mStride
;
271 data
.mCbSkip
= Cb
.mSkip
;
272 data
.mCrSkip
= Cr
.mSkip
;
273 data
.mPicX
= aPicture
.x
;
274 data
.mPicY
= aPicture
.y
;
275 data
.mPicSize
= aPicture
.Size();
276 data
.mStereoMode
= aInfo
.mStereoMode
;
277 data
.mYUVColorSpace
= aBuffer
.mYUVColorSpace
;
278 data
.mColorDepth
= aBuffer
.mColorDepth
;
279 data
.mColorRange
= aBuffer
.mColorRange
;
284 bool VideoData::SetVideoDataToImage(PlanarYCbCrImage
* aVideoImage
,
285 const VideoInfo
& aInfo
,
286 const YCbCrBuffer
& aBuffer
,
287 const IntRect
& aPicture
, bool aCopyData
) {
292 MOZ_ASSERT(aBuffer
.mYUVColorSpace
!= gfx::YUVColorSpace::UNKNOWN
,
293 "We must know the colorframe at this point");
295 PlanarYCbCrData data
= ConstructPlanarYCbCrData(aInfo
, aBuffer
, aPicture
);
297 aVideoImage
->SetDelayedConversion(true);
299 return aVideoImage
->CopyData(data
);
301 return aVideoImage
->AdoptData(data
);
306 already_AddRefed
<VideoData
> VideoData::CreateAndCopyData(
307 const VideoInfo
& aInfo
, ImageContainer
* aContainer
, int64_t aOffset
,
308 const TimeUnit
& aTime
, const TimeUnit
& aDuration
,
309 const YCbCrBuffer
& aBuffer
, bool aKeyframe
, const TimeUnit
& aTimecode
,
310 const IntRect
& aPicture
, layers::KnowsCompositor
* aAllocator
) {
312 // Create a dummy VideoData with no image. This gives us something to
313 // send to media streams if necessary.
314 RefPtr
<VideoData
> v(new VideoData(aOffset
, aTime
, aDuration
, aKeyframe
,
315 aTimecode
, aInfo
.mDisplay
, 0));
319 if (!ValidateBufferAndPicture(aBuffer
, aPicture
)) {
323 MOZ_ASSERT(aBuffer
.mYUVColorSpace
!= gfx::YUVColorSpace::UNKNOWN
,
324 "We must know the colorframe at this point");
326 RefPtr
<VideoData
> v(new VideoData(aOffset
, aTime
, aDuration
, aKeyframe
,
327 aTimecode
, aInfo
.mDisplay
, 0));
329 // Currently our decoder only knows how to output to ImageFormat::PLANAR_YCBCR
332 // We disable this code path on Windows version earlier of Windows 8 due to
333 // intermittent crashes with old drivers. See bug 1405110.
334 // D3D11YCbCrImage can only handle YCbCr images using 3 non-interleaved planes
335 // non-zero mSkip value indicates that one of the plane would be interleaved.
336 if (IsWin8OrLater() && !XRE_IsParentProcess() && aAllocator
&&
337 aAllocator
->SupportsD3D11() && aBuffer
.mPlanes
[0].mSkip
== 0 &&
338 aBuffer
.mPlanes
[1].mSkip
== 0 && aBuffer
.mPlanes
[2].mSkip
== 0) {
339 RefPtr
<layers::D3D11YCbCrImage
> d3d11Image
= new layers::D3D11YCbCrImage();
340 PlanarYCbCrData data
= ConstructPlanarYCbCrData(aInfo
, aBuffer
, aPicture
);
341 if (d3d11Image
->SetData(layers::ImageBridgeChild::GetSingleton()
342 ? layers::ImageBridgeChild::GetSingleton().get()
345 v
->mImage
= d3d11Image
;
350 if (aAllocator
&& aAllocator
->GetCompositorBackendType() ==
351 layers::LayersBackend::LAYERS_WR
) {
352 RefPtr
<layers::MacIOSurfaceImage
> ioImage
=
353 new layers::MacIOSurfaceImage(nullptr);
354 PlanarYCbCrData data
= ConstructPlanarYCbCrData(aInfo
, aBuffer
, aPicture
);
355 if (ioImage
->SetData(aContainer
, data
)) {
362 v
->mImage
= aContainer
->CreatePlanarYCbCrImage();
368 NS_ASSERTION(v
->mImage
->GetFormat() == ImageFormat::PLANAR_YCBCR
,
370 PlanarYCbCrImage
* videoImage
= v
->mImage
->AsPlanarYCbCrImage();
371 MOZ_ASSERT(videoImage
);
373 if (!VideoData::SetVideoDataToImage(videoImage
, aInfo
, aBuffer
, aPicture
,
374 true /* aCopyData */)) {
382 already_AddRefed
<VideoData
> VideoData::CreateAndCopyData(
383 const VideoInfo
& aInfo
, ImageContainer
* aContainer
, int64_t aOffset
,
384 const TimeUnit
& aTime
, const TimeUnit
& aDuration
,
385 const YCbCrBuffer
& aBuffer
, const YCbCrBuffer::Plane
& aAlphaPlane
,
386 bool aKeyframe
, const TimeUnit
& aTimecode
, const IntRect
& aPicture
) {
388 // Create a dummy VideoData with no image. This gives us something to
389 // send to media streams if necessary.
390 RefPtr
<VideoData
> v(new VideoData(aOffset
, aTime
, aDuration
, aKeyframe
,
391 aTimecode
, aInfo
.mDisplay
, 0));
395 if (!ValidateBufferAndPicture(aBuffer
, aPicture
)) {
399 RefPtr
<VideoData
> v(new VideoData(aOffset
, aTime
, aDuration
, aKeyframe
,
400 aTimecode
, aInfo
.mDisplay
, 0));
402 // Convert from YUVA to BGRA format on the software side.
403 RefPtr
<layers::SharedRGBImage
> videoImage
=
404 aContainer
->CreateSharedRGBImage();
405 v
->mImage
= videoImage
;
410 if (!videoImage
->Allocate(
411 IntSize(aBuffer
.mPlanes
[0].mWidth
, aBuffer
.mPlanes
[0].mHeight
),
412 SurfaceFormat::B8G8R8A8
)) {
416 RefPtr
<layers::TextureClient
> texture
=
417 videoImage
->GetTextureClient(/* aKnowsCompositor */ nullptr);
419 NS_WARNING("Failed to allocate TextureClient");
423 layers::TextureClientAutoLock
autoLock(texture
,
424 layers::OpenMode::OPEN_WRITE_ONLY
);
425 if (!autoLock
.Succeeded()) {
426 NS_WARNING("Failed to lock TextureClient");
430 layers::MappedTextureData buffer
;
431 if (!texture
->BorrowMappedData(buffer
)) {
432 NS_WARNING("Failed to borrow mapped data");
436 // The naming convention for libyuv and associated utils is word-order.
437 // The naming convention in the gfx stack is byte-order.
438 ConvertI420AlphaToARGB(aBuffer
.mPlanes
[0].mData
, aBuffer
.mPlanes
[1].mData
,
439 aBuffer
.mPlanes
[2].mData
, aAlphaPlane
.mData
,
440 aBuffer
.mPlanes
[0].mStride
, aBuffer
.mPlanes
[1].mStride
,
441 buffer
.data
, buffer
.stride
, buffer
.size
.width
,
448 already_AddRefed
<VideoData
> VideoData::CreateFromImage(
449 const IntSize
& aDisplay
, int64_t aOffset
, const TimeUnit
& aTime
,
450 const TimeUnit
& aDuration
, const RefPtr
<Image
>& aImage
, bool aKeyframe
,
451 const TimeUnit
& aTimecode
) {
452 RefPtr
<VideoData
> v(new VideoData(aOffset
, aTime
, aDuration
, aKeyframe
,
453 aTimecode
, aDisplay
, 0));
458 MediaRawData::MediaRawData()
459 : MediaData(Type::RAW_DATA
), mCrypto(mCryptoInternal
) {}
461 MediaRawData::MediaRawData(const uint8_t* aData
, size_t aSize
)
462 : MediaData(Type::RAW_DATA
),
463 mCrypto(mCryptoInternal
),
464 mBuffer(aData
, aSize
) {}
466 MediaRawData::MediaRawData(const uint8_t* aData
, size_t aSize
,
467 const uint8_t* aAlphaData
, size_t aAlphaSize
)
468 : MediaData(Type::RAW_DATA
),
469 mCrypto(mCryptoInternal
),
470 mBuffer(aData
, aSize
),
471 mAlphaBuffer(aAlphaData
, aAlphaSize
) {}
473 MediaRawData::MediaRawData(AlignedByteBuffer
&& aData
)
474 : MediaData(Type::RAW_DATA
),
475 mCrypto(mCryptoInternal
),
476 mBuffer(std::move(aData
)) {}
478 MediaRawData::MediaRawData(AlignedByteBuffer
&& aData
,
479 AlignedByteBuffer
&& aAlphaData
)
480 : MediaData(Type::RAW_DATA
),
481 mCrypto(mCryptoInternal
),
482 mBuffer(std::move(aData
)),
483 mAlphaBuffer(std::move(aAlphaData
)) {}
485 already_AddRefed
<MediaRawData
> MediaRawData::Clone() const {
486 RefPtr
<MediaRawData
> s
= new MediaRawData
;
487 s
->mTimecode
= mTimecode
;
489 s
->mDuration
= mDuration
;
490 s
->mOffset
= mOffset
;
491 s
->mKeyframe
= mKeyframe
;
492 s
->mExtraData
= mExtraData
;
493 s
->mCryptoInternal
= mCryptoInternal
;
494 s
->mTrackInfo
= mTrackInfo
;
496 s
->mOriginalPresentationWindow
= mOriginalPresentationWindow
;
497 if (!s
->mBuffer
.Append(mBuffer
.Data(), mBuffer
.Length())) {
500 if (!s
->mAlphaBuffer
.Append(mAlphaBuffer
.Data(), mAlphaBuffer
.Length())) {
506 MediaRawData::~MediaRawData() = default;
508 size_t MediaRawData::SizeOfIncludingThis(MallocSizeOf aMallocSizeOf
) const {
509 size_t size
= aMallocSizeOf(this);
510 size
+= mBuffer
.SizeOfExcludingThis(aMallocSizeOf
);
514 UniquePtr
<MediaRawDataWriter
> MediaRawData::CreateWriter() {
515 UniquePtr
<MediaRawDataWriter
> p(new MediaRawDataWriter(this));
519 MediaRawDataWriter::MediaRawDataWriter(MediaRawData
* aMediaRawData
)
520 : mCrypto(aMediaRawData
->mCryptoInternal
), mTarget(aMediaRawData
) {}
522 bool MediaRawDataWriter::SetSize(size_t aSize
) {
523 return mTarget
->mBuffer
.SetLength(aSize
);
526 bool MediaRawDataWriter::Prepend(const uint8_t* aData
, size_t aSize
) {
527 return mTarget
->mBuffer
.Prepend(aData
, aSize
);
530 bool MediaRawDataWriter::Append(const uint8_t* aData
, size_t aSize
) {
531 return mTarget
->mBuffer
.Append(aData
, aSize
);
534 bool MediaRawDataWriter::Replace(const uint8_t* aData
, size_t aSize
) {
535 return mTarget
->mBuffer
.Replace(aData
, aSize
);
538 void MediaRawDataWriter::Clear() { mTarget
->mBuffer
.Clear(); }
540 uint8_t* MediaRawDataWriter::Data() { return mTarget
->mBuffer
.Data(); }
542 size_t MediaRawDataWriter::Size() { return mTarget
->Size(); }
544 void MediaRawDataWriter::PopFront(size_t aSize
) {
545 mTarget
->mBuffer
.PopFront(aSize
);
548 } // namespace mozilla